Searched refs:crypto_tfm_ctx_alignment (Results 1 – 22 of 22) sorted by relevance
/Linux-v4.19/crypto/ |
D | cmac.c | 270 ALIGN(sizeof(struct cmac_desc_ctx), crypto_tfm_ctx_alignment()) in cmac_create() 271 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)) in cmac_create() 275 ALIGN(sizeof(struct cmac_tfm_ctx), crypto_tfm_ctx_alignment()) in cmac_create() 277 ~(crypto_tfm_ctx_alignment() - 1)) in cmac_create()
|
D | hmac.c | 42 crypto_tfm_ctx_alignment()); in hmac_ctx() 54 crypto_tfm_ctx_alignment()); in hmac_setkey() 230 ALIGN(ss * 2, crypto_tfm_ctx_alignment()); in hmac_create()
|
D | xcbc.c | 239 crypto_tfm_ctx_alignment()) + in xcbc_create() 241 ~(crypto_tfm_ctx_alignment() - 1)) + in xcbc_create()
|
D | cts.c | 303 crypto_tfm_ctx_alignment()) + in crypto_cts_init_tfm() 304 (align & ~(crypto_tfm_ctx_alignment() - 1)) + bsize; in crypto_cts_init_tfm()
|
D | gcm.c | 566 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_gcm_init_tfm() 881 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_rfc4106_init_tfm() 885 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + in crypto_rfc4106_init_tfm() 1113 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_rfc4543_init_tfm() 1117 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + in crypto_rfc4543_init_tfm()
|
D | ablkcipher.c | 155 (alignmask & ~(crypto_tfm_ctx_alignment() - 1))); in ablkcipher_next_slow() 193 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1); in ablkcipher_copy_iv()
|
D | blkcipher.c | 160 (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); in blkcipher_next_slow() 283 size += walk->alignmask & ~(crypto_tfm_ctx_alignment() - 1); in blkcipher_copy_iv()
|
D | ccm.c | 426 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_ccm_init_tfm() 743 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_rfc4309_init_tfm() 747 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + in crypto_rfc4309_init_tfm()
|
D | skcipher.c | 239 a = crypto_tfm_ctx_alignment() - 1; in skcipher_next_slow() 394 unsigned a = crypto_tfm_ctx_alignment() - 1; in skcipher_copy_iv()
|
D | api.c | 332 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); in crypto_ctxsize()
|
D | ctr.c | 330 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_rfc3686_init_tfm()
|
D | ahash.c | 221 return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); in ahash_align_buffer_size()
|
D | shash.c | 44 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); in shash_setkey_unaligned()
|
D | chacha20poly1305.c | 546 align &= ~(crypto_tfm_ctx_alignment() - 1); in chachapoly_init()
|
D | algapi.c | 1045 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); in crypto_alg_extsize()
|
/Linux-v4.19/net/ipv4/ |
D | ah4.c | 31 ~(crypto_tfm_ctx_alignment() - 1)); in ah_alloc_tmp() 33 len = ALIGN(len, crypto_tfm_ctx_alignment()); in ah_alloc_tmp() 60 crypto_tfm_ctx_alignment()); in ah_tmp_req()
|
D | esp4.c | 55 ~(crypto_tfm_ctx_alignment() - 1); in esp_alloc_tmp() 56 len = ALIGN(len, crypto_tfm_ctx_alignment()); in esp_alloc_tmp() 84 crypto_tfm_ctx_alignment()); in esp_tmp_req()
|
/Linux-v4.19/net/ipv6/ |
D | ah6.c | 68 ~(crypto_tfm_ctx_alignment() - 1)); in ah_alloc_tmp() 70 len = ALIGN(len, crypto_tfm_ctx_alignment()); in ah_alloc_tmp() 102 crypto_tfm_ctx_alignment()); in ah_tmp_req()
|
D | esp6.c | 77 ~(crypto_tfm_ctx_alignment() - 1); in esp_alloc_tmp() 78 len = ALIGN(len, crypto_tfm_ctx_alignment()); in esp_alloc_tmp() 106 crypto_tfm_ctx_alignment()); in esp_tmp_req()
|
/Linux-v4.19/arch/x86/crypto/ |
D | aesni-intel_glue.c | 324 if (align <= crypto_tfm_ctx_alignment()) in aesni_rfc4106_gcm_ctx_get() 334 if (align <= crypto_tfm_ctx_alignment()) in generic_gcmaes_ctx_get() 345 if (align <= crypto_tfm_ctx_alignment()) in aes_ctx()
|
/Linux-v4.19/drivers/crypto/ |
D | padlock-aes.c | 89 if (align <= crypto_tfm_ctx_alignment()) in aes_ctx_common()
|
/Linux-v4.19/include/linux/ |
D | crypto.h | 720 static inline unsigned int crypto_tfm_ctx_alignment(void) in crypto_tfm_ctx_alignment() function
|