Home
last modified time | relevance | path

Searched refs:SHA256_BLOCK_SIZE (Results 1 – 25 of 46) sorted by relevance

12

/linux-6.1.9/include/crypto/
Dsha256_base.h44 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_update()
48 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in sha256_base_do_update()
52 int p = SHA256_BLOCK_SIZE - partial; in sha256_base_do_update()
61 blocks = len / SHA256_BLOCK_SIZE; in sha256_base_do_update()
62 len %= SHA256_BLOCK_SIZE; in sha256_base_do_update()
66 data += blocks * SHA256_BLOCK_SIZE; in sha256_base_do_update()
79 const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64); in sha256_base_do_finalize()
82 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_finalize()
86 memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); in sha256_base_do_finalize()
Dsha2.h15 #define SHA256_BLOCK_SIZE 64 macro
70 u8 buf[SHA256_BLOCK_SIZE];
/linux-6.1.9/net/mptcp/
Dcrypto.c46 u8 input[SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE]; in mptcp_crypto_hmac_sha()
58 memset(input, 0x36, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha()
64 memcpy(&input[SHA256_BLOCK_SIZE], msg, len); in mptcp_crypto_hmac_sha()
69 sha256(input, SHA256_BLOCK_SIZE + len, &input[SHA256_BLOCK_SIZE]); in mptcp_crypto_hmac_sha()
72 memset(input, 0x5C, SHA256_BLOCK_SIZE); in mptcp_crypto_hmac_sha()
78 sha256(input, SHA256_BLOCK_SIZE + SHA256_DIGEST_SIZE, hmac); in mptcp_crypto_hmac_sha()
/linux-6.1.9/arch/sparc/crypto/
Dsha256_glue.c37 done = SHA256_BLOCK_SIZE - partial; in __sha256_sparc64_update()
41 if (len - done >= SHA256_BLOCK_SIZE) { in __sha256_sparc64_update()
42 const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE; in __sha256_sparc64_update()
45 done += rounds * SHA256_BLOCK_SIZE; in __sha256_sparc64_update()
55 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_update()
58 if (partial + len < SHA256_BLOCK_SIZE) { in sha256_sparc64_update()
73 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, }; in sha256_sparc64_final()
78 index = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_final()
79 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56) - index); in sha256_sparc64_final()
141 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/arch/mips/cavium-octeon/crypto/
Docteon-sha256.c74 partial = sctx->count % SHA256_BLOCK_SIZE; in __octeon_sha256_update()
79 if ((partial + len) >= SHA256_BLOCK_SIZE) { in __octeon_sha256_update()
83 done + SHA256_BLOCK_SIZE); in __octeon_sha256_update()
89 done += SHA256_BLOCK_SIZE; in __octeon_sha256_update()
91 } while (done + SHA256_BLOCK_SIZE <= len); in __octeon_sha256_update()
110 if ((sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in octeon_sha256_update()
205 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/drivers/crypto/nx/
Dnx-sha256.c22 u8 buf[SHA256_BLOCK_SIZE];
73 u64 buf_len = (sctx->count % SHA256_BLOCK_SIZE); in nx_sha256_update()
81 total = (sctx->count % SHA256_BLOCK_SIZE) + len; in nx_sha256_update()
82 if (total < SHA256_BLOCK_SIZE) { in nx_sha256_update()
134 to_process = to_process & ~(SHA256_BLOCK_SIZE - 1); in nx_sha256_update()
168 } while (leftover >= SHA256_BLOCK_SIZE); in nx_sha256_update()
201 if (sctx->count >= SHA256_BLOCK_SIZE) { in nx_sha256_final()
214 len = sctx->count & (SHA256_BLOCK_SIZE - 1); in nx_sha256_final()
218 if (len != (sctx->count & (SHA256_BLOCK_SIZE - 1))) { in nx_sha256_final()
282 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/arch/arm64/crypto/
Dsha256-glue.c77 .base.cra_blocksize = SHA256_BLOCK_SIZE,
111 chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE) in sha256_update_neon()
112 chunk = SHA256_BLOCK_SIZE - in sha256_update_neon()
113 sctx->count % SHA256_BLOCK_SIZE; in sha256_update_neon()
157 .base.cra_blocksize = SHA256_BLOCK_SIZE,
Dsha2-ce-glue.c46 src += (blocks - rem) * SHA256_BLOCK_SIZE; in __sha2_ce_transform()
83 bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len; in sha256_ce_finup()
150 .cra_blocksize = SHA256_BLOCK_SIZE,
167 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/arch/arm/crypto/
Dsha2-ce-glue.c36 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in sha2_ce_update()
79 .cra_blocksize = SHA256_BLOCK_SIZE,
93 .cra_blocksize = SHA256_BLOCK_SIZE,
Dsha256_neon_glue.c33 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in crypto_sha256_neon_update()
77 .cra_blocksize = SHA256_BLOCK_SIZE,
Dsha256_glue.c68 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/arch/x86/crypto/
Dsha256_ssse3_glue.c52 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in _sha256_update()
112 .cra_blocksize = SHA256_BLOCK_SIZE,
177 .cra_blocksize = SHA256_BLOCK_SIZE,
253 .cra_blocksize = SHA256_BLOCK_SIZE,
328 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/drivers/crypto/
Dpadlock-sha.c155 leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1; in padlock_sha256_finup()
156 space = SHA256_BLOCK_SIZE - leftover; in padlock_sha256_finup()
261 .cra_blocksize = SHA256_BLOCK_SIZE,
381 if ((partial + len) >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano()
387 done + SHA256_BLOCK_SIZE); in padlock_sha256_update_nano()
392 done += SHA256_BLOCK_SIZE; in padlock_sha256_update_nano()
397 if (len - done >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano()
488 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/drivers/crypto/qce/
Dsha.h16 #define QCE_SHA_MAX_BLOCKSIZE SHA256_BLOCK_SIZE
Dsha.c428 .blocksize = SHA256_BLOCK_SIZE,
446 .blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/crypto/
Dsha256_generic.c72 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/arch/s390/crypto/
Dsha256_s390.c74 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/drivers/crypto/allwinner/sun8i-ss/
Dsun8i-ss.h244 u8 key[SHA256_BLOCK_SIZE];
/linux-6.1.9/arch/powerpc/crypto/
Dsha256-spe-glue.c195 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/drivers/crypto/inside-secure/
Dsafexcel_hash.c1275 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_sha256_init()
1311 .cra_blocksize = SHA256_BLOCK_SIZE,
1332 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_sha224_init()
1395 req->len = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init()
1396 req->processed = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init()
1402 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init()
1467 req->len = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init()
1468 req->processed = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init()
1474 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init()
1512 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/drivers/crypto/axis/
Dartpec6_crypto.c291 char partial_buffer[SHA256_BLOCK_SIZE];
292 char partial_buffer_out[SHA256_BLOCK_SIZE];
293 char key_buffer[SHA256_BLOCK_SIZE];
294 char pad_buffer[SHA256_BLOCK_SIZE + 32];
305 char partial_buffer[SHA256_BLOCK_SIZE];
314 char hmac_key[SHA256_BLOCK_SIZE];
2660 .cra_blocksize = SHA256_BLOCK_SIZE,
2685 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/drivers/crypto/keembay/
Dkeembay-ocs-hcu-core.c585 rctx->blk_sz = SHA256_BLOCK_SIZE; in kmb_ocs_hcu_init()
955 .cra_blocksize = SHA256_BLOCK_SIZE,
980 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/fs/verity/
Dhash_algs.c18 .block_size = SHA256_BLOCK_SIZE,
/linux-6.1.9/drivers/crypto/aspeed/
Daspeed-hace-hash.c706 rctx->block_size = SHA256_BLOCK_SIZE; in aspeed_sham_init()
967 .cra_blocksize = SHA256_BLOCK_SIZE,
1091 .cra_blocksize = SHA256_BLOCK_SIZE,
/linux-6.1.9/drivers/crypto/stm32/
Dstm32-hash.c109 #define HASH_MAX_KEY_SIZE (SHA256_BLOCK_SIZE * 8)
1298 .cra_blocksize = SHA256_BLOCK_SIZE,
1324 .cra_blocksize = SHA256_BLOCK_SIZE,

12