Home
last modified time | relevance | path

Searched refs:SHA256_BLOCK_SIZE (Results 1 – 25 of 40) sorted by relevance

12

/Linux-v5.4/include/crypto/
Dsha256_base.h41 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_update()
45 if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { in sha256_base_do_update()
49 int p = SHA256_BLOCK_SIZE - partial; in sha256_base_do_update()
58 blocks = len / SHA256_BLOCK_SIZE; in sha256_base_do_update()
59 len %= SHA256_BLOCK_SIZE; in sha256_base_do_update()
63 data += blocks * SHA256_BLOCK_SIZE; in sha256_base_do_update()
76 const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64); in sha256_base_do_finalize()
79 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_base_do_finalize()
83 memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); in sha256_base_do_finalize()
Dsha.h18 #define SHA256_BLOCK_SIZE 64 macro
87 u8 buf[SHA256_BLOCK_SIZE];
/Linux-v5.4/arch/sparc/crypto/
Dsha256_glue.c69 done = SHA256_BLOCK_SIZE - partial; in __sha256_sparc64_update()
73 if (len - done >= SHA256_BLOCK_SIZE) { in __sha256_sparc64_update()
74 const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE; in __sha256_sparc64_update()
77 done += rounds * SHA256_BLOCK_SIZE; in __sha256_sparc64_update()
87 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_update()
90 if (partial + len < SHA256_BLOCK_SIZE) { in sha256_sparc64_update()
105 static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, }; in sha256_sparc64_final()
110 index = sctx->count % SHA256_BLOCK_SIZE; in sha256_sparc64_final()
111 padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56) - index); in sha256_sparc64_final()
173 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/arch/mips/cavium-octeon/crypto/
Docteon-sha256.c107 partial = sctx->count % SHA256_BLOCK_SIZE; in __octeon_sha256_update()
112 if ((partial + len) >= SHA256_BLOCK_SIZE) { in __octeon_sha256_update()
116 done + SHA256_BLOCK_SIZE); in __octeon_sha256_update()
122 done += SHA256_BLOCK_SIZE; in __octeon_sha256_update()
124 } while (done + SHA256_BLOCK_SIZE <= len); in __octeon_sha256_update()
143 if ((sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in octeon_sha256_update()
238 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/drivers/crypto/nx/
Dnx-sha256.c68 u64 buf_len = (sctx->count % SHA256_BLOCK_SIZE); in nx_sha256_update()
76 total = (sctx->count % SHA256_BLOCK_SIZE) + len; in nx_sha256_update()
77 if (total < SHA256_BLOCK_SIZE) { in nx_sha256_update()
129 to_process = to_process & ~(SHA256_BLOCK_SIZE - 1); in nx_sha256_update()
163 } while (leftover >= SHA256_BLOCK_SIZE); in nx_sha256_update()
196 if (sctx->count >= SHA256_BLOCK_SIZE) { in nx_sha256_final()
209 len = sctx->count & (SHA256_BLOCK_SIZE - 1); in nx_sha256_final()
213 if (len != (sctx->count & (SHA256_BLOCK_SIZE - 1))) { in nx_sha256_final()
277 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/arch/arm64/crypto/
Dsha256-glue.c67 .base.cra_blocksize = SHA256_BLOCK_SIZE,
101 chunk + sctx->count % SHA256_BLOCK_SIZE > SHA256_BLOCK_SIZE) in sha256_update_neon()
102 chunk = SHA256_BLOCK_SIZE - in sha256_update_neon()
103 sctx->count % SHA256_BLOCK_SIZE; in sha256_update_neon()
150 .base.cra_blocksize = SHA256_BLOCK_SIZE,
Dsha2-ce-glue.c60 bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len; in sha256_ce_finup()
115 .cra_blocksize = SHA256_BLOCK_SIZE,
129 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/arch/arm/crypto/
Dsha2-ce-glue.c36 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in sha2_ce_update()
79 .cra_blocksize = SHA256_BLOCK_SIZE,
93 .cra_blocksize = SHA256_BLOCK_SIZE,
Dsha256_neon_glue.c34 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in crypto_sha256_neon_update()
78 .cra_blocksize = SHA256_BLOCK_SIZE,
Dsha256_glue.c69 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/arch/x86/crypto/
Dsha256_ssse3_glue.c54 (sctx->count % SHA256_BLOCK_SIZE) + len < SHA256_BLOCK_SIZE) in _sha256_update()
113 .cra_blocksize = SHA256_BLOCK_SIZE,
179 .cra_blocksize = SHA256_BLOCK_SIZE,
261 .cra_blocksize = SHA256_BLOCK_SIZE,
341 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/drivers/crypto/
Dpadlock-sha.c154 leftover = ((state.count - 1) & (SHA256_BLOCK_SIZE - 1)) + 1; in padlock_sha256_finup()
155 space = SHA256_BLOCK_SIZE - leftover; in padlock_sha256_finup()
264 .cra_blocksize = SHA256_BLOCK_SIZE,
386 if ((partial + len) >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano()
392 done + SHA256_BLOCK_SIZE); in padlock_sha256_update_nano()
397 done += SHA256_BLOCK_SIZE; in padlock_sha256_update_nano()
402 if (len - done >= SHA256_BLOCK_SIZE) { in padlock_sha256_update_nano()
493 .cra_blocksize = SHA256_BLOCK_SIZE,
Dsahara.c34 #define SAHARA_MAX_SHA_BLOCK_SIZE SHA256_BLOCK_SIZE
1187 SHA_BUFFER_LEN + SHA256_BLOCK_SIZE); in sahara_sha_cra_init()
1280 .cra_blocksize = SHA256_BLOCK_SIZE,
Domap-sham.c432 d = SHA256_BLOCK_SIZE; in get_block_size()
963 bs = SHA256_BLOCK_SIZE; in omap_sham_init()
1581 .cra_blocksize = SHA256_BLOCK_SIZE,
1626 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/drivers/crypto/qce/
Dsha.h15 #define QCE_SHA_MAX_BLOCKSIZE SHA256_BLOCK_SIZE
Dsha.c445 .blocksize = SHA256_BLOCK_SIZE,
463 .blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/drivers/crypto/inside-secure/
Dsafexcel_hash.c1128 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_sha256_init()
1163 .cra_blocksize = SHA256_BLOCK_SIZE,
1183 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_sha224_init()
1245 req->len = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init()
1246 req->processed = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init()
1251 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_hmac_sha224_init()
1315 req->len = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init()
1316 req->processed = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init()
1321 req->block_sz = SHA256_BLOCK_SIZE; in safexcel_hmac_sha256_init()
1358 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/crypto/
Dsha256_generic.c80 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/arch/s390/crypto/
Dsha256_s390.c74 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/drivers/crypto/axis/
Dartpec6_crypto.c290 char partial_buffer[SHA256_BLOCK_SIZE];
291 char partial_buffer_out[SHA256_BLOCK_SIZE];
292 char key_buffer[SHA256_BLOCK_SIZE];
293 char pad_buffer[SHA256_BLOCK_SIZE + 32];
304 char partial_buffer[SHA256_BLOCK_SIZE];
313 char hmac_key[SHA256_BLOCK_SIZE];
2667 .cra_blocksize = SHA256_BLOCK_SIZE,
2691 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/fs/verity/
Dhash_algs.c18 .block_size = SHA256_BLOCK_SIZE,
/Linux-v5.4/arch/powerpc/crypto/
Dsha256-spe-glue.c229 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/drivers/crypto/mediatek/
Dmtk-sha.c392 ctx->bs = SHA256_BLOCK_SIZE; in mtk_sha_init()
980 .cra_blocksize = SHA256_BLOCK_SIZE,
1057 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/drivers/crypto/stm32/
Dstm32-hash.c107 #define HASH_MAX_KEY_SIZE (SHA256_BLOCK_SIZE * 8)
1295 .cra_blocksize = SHA256_BLOCK_SIZE,
1321 .cra_blocksize = SHA256_BLOCK_SIZE,
/Linux-v5.4/drivers/crypto/rockchip/
Drk3288_crypto_ahash.c363 .cra_blocksize = SHA256_BLOCK_SIZE,

12