/linux/include/crypto/ |
A D | sha256_base.h | 44 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_update() 48 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in sha256_base_do_update() 52 int p = SHA256_BLOCK_SIZE - partial; in sha256_base_do_update() 61 blocks = len / SHA256_BLOCK_SIZE; in sha256_base_do_update() 62 len %= SHA256_BLOCK_SIZE; in sha256_base_do_update() 66 data += blocks * SHA256_BLOCK_SIZE; in sha256_base_do_update() 79 const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64); in sha256_base_do_finalize() 82 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_finalize() 86 memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); in sha256_base_do_finalize()
|
A D | sha2.h | 15 #define SHA256_BLOCK_SIZE 64 macro 70 u8 buf[SHA256_BLOCK_SIZE];
|
/linux/net/mptcp/ |
A D | crypto.c | 46 u8 input[SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE]; in mptcp_crypto_hmac_sha() 58 memset(input, 0x36, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha() 64 memcpy(&input[SHA256_BLOCK_SIZE], msg, len); in mptcp_crypto_hmac_sha() 69 sha256(input, SHA256_BLOCK_SIZE + len, &input[SHA256_BLOCK_SIZE]); in mptcp_crypto_hmac_sha() 72 memset(input, 0x5C, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha() 78 sha256(input, SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE, hmac); in mptcp_crypto_hmac_sha()
|
/linux/arch/sparc/crypto/ |
A D | sha256_glue.c | 68 done = SHA256_BLOCK_SIZE - partial; in __sha256_sparc64_update() 72 if (len - done >= SHA256_BLOCK_SIZE) { in __sha256_sparc64_update() 73 const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE; in __sha256_sparc64_update() 76 done += rounds * SHA256_BLOCK_SIZE; in __sha256_sparc64_update() 86 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_update() 89 if (partial + len < SHA256_BLOCK_SIZE) { in sha256_sparc64_update() 104 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, }; in sha256_sparc64_final() 109 index = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_final() 110 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56) - index); in sha256_sparc64_final() 172 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/nx/ |
A D | nx-sha256.c | 22 u8 buf[SHA256_BLOCK_SIZE]; 73 u64 buf_len = (sctx->count % SHA256_BLOCK_SIZE); in nx_sha256_update() 81 total = (sctx->count % SHA256_BLOCK_SIZE) + len; in nx_sha256_update() 82 if (total < SHA256_BLOCK_SIZE) { in nx_sha256_update() 134 to_process = to_process & ~(SHA256_BLOCK_SIZE - 1); in nx_sha256_update() 168 } while (leftover >= SHA256_BLOCK_SIZE); in nx_sha256_update() 201 if (sctx->count >= SHA256_BLOCK_SIZE) { in nx_sha256_final() 214 len = sctx->count & (SHA256_BLOCK_SIZE - 1); in nx_sha256_final() 218 if (len != (sctx->count & (SHA256_BLOCK_SIZE - 1))) { in nx_sha256_final() 282 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/mips/cavium-octeon/crypto/ |
A D | octeon-sha256.c | 107 partial = sctx->count % SHA256_BLOCK_SIZE; in __octeon_sha256_update() 112 if ((partial + len) >= SHA256_BLOCK_SIZE) { in __octeon_sha256_update() 116 done + SHA256_BLOCK_SIZE); in __octeon_sha256_update() 122 done += SHA256_BLOCK_SIZE; in __octeon_sha256_update() 124 } while (done + SHA256_BLOCK_SIZE <= len); in __octeon_sha256_update() 143 if ((sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in octeon_sha256_update() 238 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/arm64/crypto/ |
A D | sha256-glue.c | 77 .base.cra_blocksize = SHA256_BLOCK_SIZE, 111 chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE) in sha256_update_neon() 112 chunk = SHA256_BLOCK_SIZE - in sha256_update_neon() 113 sctx->count % SHA256_BLOCK_SIZE; in sha256_update_neon() 157 .base.cra_blocksize = SHA256_BLOCK_SIZE,
|
A D | sha2-ce-glue.c | 46 src += (blocks - rem) * SHA256_BLOCK_SIZE; in __sha2_ce_transform() 83 bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len; in sha256_ce_finup() 150 .cra_blocksize = SHA256_BLOCK_SIZE, 167 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/arm/crypto/ |
A D | sha2-ce-glue.c | 36 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in sha2_ce_update() 79 .cra_blocksize = SHA256_BLOCK_SIZE, 93 .cra_blocksize = SHA256_BLOCK_SIZE,
|
A D | sha256_neon_glue.c | 33 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in crypto_sha256_neon_update() 77 .cra_blocksize = SHA256_BLOCK_SIZE,
|
A D | sha256_glue.c | 68 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/x86/crypto/ |
A D | sha256_ssse3_glue.c | 52 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in _sha256_update() 112 .cra_blocksize = SHA256_BLOCK_SIZE, 177 .cra_blocksize = SHA256_BLOCK_SIZE, 253 .cra_blocksize = SHA256_BLOCK_SIZE, 328 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/ |
A D | padlock-sha.c | 155 leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1; in padlock_sha256_finup() 156 space = SHA256_BLOCK_SIZE - leftover; in padlock_sha256_finup() 261 .cra_blocksize = SHA256_BLOCK_SIZE, 381 if ((partial + len) >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano() 387 done + SHA256_BLOCK_SIZE); in padlock_sha256_update_nano() 392 done += SHA256_BLOCK_SIZE; in padlock_sha256_update_nano() 397 if (len - done >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano() 488 .cra_blocksize = SHA256_BLOCK_SIZE,
|
A D | sahara.c | 35 #define SAHARA_MAX_SHA_BLOCK_SIZE SHA256_BLOCK_SIZE 1175 SHA_BUFFER_LEN + SHA256_BLOCK_SIZE); in sahara_sha_cra_init() 1262 .cra_blocksize = SHA256_BLOCK_SIZE,
|
A D | omap-sham.c | 419 d = SHA256_BLOCK_SIZE; in get_block_size() 977 bs = SHA256_BLOCK_SIZE; in omap_sham_init() 1559 .cra_blocksize = SHA256_BLOCK_SIZE, 1606 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/qce/ |
A D | sha.h | 16 #define QCE_SHA_MAX_BLOCKSIZE SHA256_BLOCK_SIZE
|
A D | sha.c | 426 .blocksize = SHA256_BLOCK_SIZE, 444 .blocksize = SHA256_BLOCK_SIZE,
|
/linux/crypto/ |
A D | sha256_generic.c | 84 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/arch/s390/crypto/ |
A D | sha256_s390.c | 74 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/inside-secure/ |
A D | safexcel_hash.c | 1275 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_sha256_init() 1311 .cra_blocksize = SHA256_BLOCK_SIZE, 1332 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_sha224_init() 1395 req->len = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init() 1396 req->processed = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init() 1402 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init() 1467 req->len = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init() 1468 req->processed = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init() 1474 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init() 1512 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/axis/ |
A D | artpec6_crypto.c | 291 char partial_buffer[SHA256_BLOCK_SIZE]; 292 char partial_buffer_out[SHA256_BLOCK_SIZE]; 293 char key_buffer[SHA256_BLOCK_SIZE]; 294 char pad_buffer[SHA256_BLOCK_SIZE + 32]; 305 char partial_buffer[SHA256_BLOCK_SIZE]; 314 char hmac_key[SHA256_BLOCK_SIZE]; 2660 .cra_blocksize = SHA256_BLOCK_SIZE, 2685 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/keembay/ |
A D | keembay-ocs-hcu-core.c | 585 rctx->blk_sz = SHA256_BLOCK_SIZE; in kmb_ocs_hcu_init() 955 .cra_blocksize = SHA256_BLOCK_SIZE, 980 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/fs/verity/ |
A D | hash_algs.c | 18 .block_size = SHA256_BLOCK_SIZE,
|
/linux/arch/powerpc/crypto/ |
A D | sha256-spe-glue.c | 228 .cra_blocksize = SHA256_BLOCK_SIZE,
|
/linux/drivers/crypto/stm32/ |
A D | stm32-hash.c | 109 #define HASH_MAX_KEY_SIZE (SHA256_BLOCK_SIZE * 8) 1298 .cra_blocksize = SHA256_BLOCK_SIZE, 1324 .cra_blocksize = SHA256_BLOCK_SIZE,
|