Searched refs:crypto_tfm_ctx_alignment (Results 1 – 22 of 22) sorted by relevance
| /Linux-v5.4/crypto/ |
| D | cmac.c | 265 ALIGN(sizeof(struct cmac_desc_ctx), crypto_tfm_ctx_alignment()) in cmac_create() 266 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)) in cmac_create() 270 ALIGN(sizeof(struct cmac_tfm_ctx), crypto_tfm_ctx_alignment()) in cmac_create() 272 ~(crypto_tfm_ctx_alignment() - 1)) in cmac_create()
|
| D | hmac.c | 37 crypto_tfm_ctx_alignment()); in hmac_ctx() 49 crypto_tfm_ctx_alignment()); in hmac_setkey() 218 ALIGN(ss * 2, crypto_tfm_ctx_alignment()); in hmac_create()
|
| D | xcbc.c | 229 crypto_tfm_ctx_alignment()) + in xcbc_create() 231 ~(crypto_tfm_ctx_alignment() - 1)) + in xcbc_create()
|
| D | ablkcipher.c | 150 (alignmask & ~(crypto_tfm_ctx_alignment() - 1))); in ablkcipher_next_slow() 188 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1); in ablkcipher_copy_iv()
|
| D | cts.c | 307 crypto_tfm_ctx_alignment()) + in crypto_cts_init_tfm() 308 (align & ~(crypto_tfm_ctx_alignment() - 1)) + bsize; in crypto_cts_init_tfm()
|
| D | gcm.c | 550 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_gcm_init_tfm() 843 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_rfc4106_init_tfm() 847 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + in crypto_rfc4106_init_tfm() 1071 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_rfc4543_init_tfm() 1075 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + in crypto_rfc4543_init_tfm()
|
| D | blkcipher.c | 155 (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); in blkcipher_next_slow() 278 size += walk->alignmask & ~(crypto_tfm_ctx_alignment() - 1); in blkcipher_copy_iv()
|
| D | ccm.c | 424 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_ccm_init_tfm() 721 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_rfc4309_init_tfm() 725 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) + in crypto_rfc4309_init_tfm()
|
| D | ctr.c | 239 align &= ~(crypto_tfm_ctx_alignment() - 1); in crypto_rfc3686_init_tfm()
|
| D | api.c | 327 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1); in crypto_ctxsize()
|
| D | skcipher.c | 243 a = crypto_tfm_ctx_alignment() - 1; in skcipher_next_slow() 398 unsigned a = crypto_tfm_ctx_alignment() - 1; in skcipher_copy_iv()
|
| D | shash.c | 39 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); in shash_setkey_unaligned()
|
| D | ahash.c | 227 return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); in ahash_align_buffer_size()
|
| D | chacha20poly1305.c | 531 align &= ~(crypto_tfm_ctx_alignment() - 1); in chachapoly_init()
|
| D | algapi.c | 1023 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1)); in crypto_alg_extsize()
|
| /Linux-v5.4/net/ipv4/ |
| D | ah4.c | 32 ~(crypto_tfm_ctx_alignment() - 1)); in ah_alloc_tmp() 34 len = ALIGN(len, crypto_tfm_ctx_alignment()); in ah_alloc_tmp() 61 crypto_tfm_ctx_alignment()); in ah_tmp_req()
|
| D | esp4.c | 54 ~(crypto_tfm_ctx_alignment() - 1); in esp_alloc_tmp() 55 len = ALIGN(len, crypto_tfm_ctx_alignment()); in esp_alloc_tmp() 83 crypto_tfm_ctx_alignment()); in esp_tmp_req()
|
| /Linux-v5.4/net/ipv6/ |
| D | ah6.c | 56 ~(crypto_tfm_ctx_alignment() - 1)); in ah_alloc_tmp() 58 len = ALIGN(len, crypto_tfm_ctx_alignment()); in ah_alloc_tmp() 90 crypto_tfm_ctx_alignment()); in ah_tmp_req()
|
| D | esp6.c | 63 ~(crypto_tfm_ctx_alignment() - 1); in esp_alloc_tmp() 64 len = ALIGN(len, crypto_tfm_ctx_alignment()); in esp_alloc_tmp() 92 crypto_tfm_ctx_alignment()); in esp_tmp_req()
|
| /Linux-v5.4/arch/x86/crypto/ |
| D | aesni-intel_glue.c | 291 if (align <= crypto_tfm_ctx_alignment()) in aesni_rfc4106_gcm_ctx_get() 301 if (align <= crypto_tfm_ctx_alignment()) in generic_gcmaes_ctx_get() 312 if (align <= crypto_tfm_ctx_alignment()) in aes_ctx()
|
| /Linux-v5.4/drivers/crypto/ |
| D | padlock-aes.c | 90 if (align <= crypto_tfm_ctx_alignment()) in aes_ctx_common()
|
| /Linux-v5.4/include/linux/ |
| D | crypto.h | 888 static inline unsigned int crypto_tfm_ctx_alignment(void) in crypto_tfm_ctx_alignment() function
|