Home
last modified time | relevance | path

Searched refs:AES_BLOCK_SIZE (Results 1 – 25 of 90) sorted by relevance

1234

/Linux-v4.19/arch/arm64/crypto/
Daes-glue.c102 u8 dg[AES_BLOCK_SIZE];
142 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
147 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
162 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
167 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
182 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt()
187 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
202 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt()
207 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_decrypt()
222 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ctr_encrypt()
[all …]
Daes-neonbs-glue.c54 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32];
56 } __aligned(AES_BLOCK_SIZE);
104 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
105 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
109 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
116 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
163 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_encrypt()
164 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_encrypt()
172 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
186 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
[all …]
Dghash-ce-glue.c268 __aes_arm64_encrypt(ctx->aes_key.key_enc, key, (u8[AES_BLOCK_SIZE]){}, in gcm_setkey()
355 u8 mac[AES_BLOCK_SIZE]; in gcm_final()
366 crypto_xor(tag, mac, AES_BLOCK_SIZE); in gcm_final()
374 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt()
375 u8 ks[2 * AES_BLOCK_SIZE]; in gcm_encrypt()
376 u8 tag[AES_BLOCK_SIZE]; in gcm_encrypt()
389 if (likely(may_use_simd() && walk.total >= 2 * AES_BLOCK_SIZE)) { in gcm_encrypt()
397 pmull_gcm_encrypt_block(ks + AES_BLOCK_SIZE, iv, NULL, nrounds); in gcm_encrypt()
401 int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2; in gcm_encrypt()
412 walk.nbytes % (2 * AES_BLOCK_SIZE)); in gcm_encrypt()
[all …]
Daes-ce-ccm-glue.c74 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8]; in ccm_init_mac()
92 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac()
105 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac()
118 if (*macp > 0 && *macp < AES_BLOCK_SIZE) { in ccm_update_mac()
119 int added = min(abytes, AES_BLOCK_SIZE - *macp); in ccm_update_mac()
128 while (abytes > AES_BLOCK_SIZE) { in ccm_update_mac()
131 crypto_xor(mac, in, AES_BLOCK_SIZE); in ccm_update_mac()
133 in += AES_BLOCK_SIZE; in ccm_update_mac()
134 abytes -= AES_BLOCK_SIZE; in ccm_update_mac()
191 u8 buf[AES_BLOCK_SIZE]; in ccm_crypt_fallback()
[all …]
Daes-ctr-fallback.h21 u8 buf[AES_BLOCK_SIZE]; in aes_ctr_encrypt_fallback()
33 nbytes = round_down(nbytes, AES_BLOCK_SIZE); in aes_ctr_encrypt_fallback()
34 tail = walk.nbytes % AES_BLOCK_SIZE; in aes_ctr_encrypt_fallback()
38 int bsize = min(nbytes, AES_BLOCK_SIZE); in aes_ctr_encrypt_fallback()
43 crypto_inc(walk.iv, AES_BLOCK_SIZE); in aes_ctr_encrypt_fallback()
45 dst += AES_BLOCK_SIZE; in aes_ctr_encrypt_fallback()
46 src += AES_BLOCK_SIZE; in aes_ctr_encrypt_fallback()
47 nbytes -= AES_BLOCK_SIZE; in aes_ctr_encrypt_fallback()
/Linux-v4.19/arch/arm/crypto/
Daes-neonbs-glue.c47 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
92 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
93 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
97 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
102 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
161 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
162 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt()
166 walk.stride / AES_BLOCK_SIZE); in cbc_decrypt()
172 walk.nbytes - blocks * AES_BLOCK_SIZE); in cbc_decrypt()
200 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
[all …]
Daes-ce-glue.c50 u8 b[AES_BLOCK_SIZE];
188 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
191 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
208 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
211 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
228 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt()
232 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
249 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt()
253 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_decrypt()
269 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ctr_encrypt()
[all …]
/Linux-v4.19/net/mac80211/
Dfils_aead.c31 u8 d[AES_BLOCK_SIZE], tmp[AES_BLOCK_SIZE] = {}; in aes_s2v()
38 crypto_shash_digest(desc, tmp, AES_BLOCK_SIZE, d); in aes_s2v()
44 crypto_xor(d, tmp, AES_BLOCK_SIZE); in aes_s2v()
49 if (len[i] >= AES_BLOCK_SIZE) { in aes_s2v()
52 crypto_shash_update(desc, addr[i], len[i] - AES_BLOCK_SIZE); in aes_s2v()
53 crypto_xor(d, addr[i] + len[i] - AES_BLOCK_SIZE, in aes_s2v()
54 AES_BLOCK_SIZE); in aes_s2v()
63 crypto_shash_finup(desc, d, AES_BLOCK_SIZE, v); in aes_s2v()
74 u8 v[AES_BLOCK_SIZE]; in aes_siv_encrypt()
109 memcpy(out, v, AES_BLOCK_SIZE); in aes_siv_encrypt()
[all …]
/Linux-v4.19/drivers/crypto/nx/
Dnx-aes-xcbc.c35 u8 state[AES_BLOCK_SIZE];
37 u8 buffer[AES_BLOCK_SIZE];
76 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty()
83 memcpy(key, csbcpb->cpb.aes_xcbc.key, AES_BLOCK_SIZE); in nx_xcbc_empty()
84 memcpy(csbcpb->cpb.aes_ecb.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
120 memcpy(csbcpb->cpb.aes_ecb.key, keys[0], AES_BLOCK_SIZE); in nx_xcbc_empty()
127 len = AES_BLOCK_SIZE; in nx_xcbc_empty()
131 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty()
146 memcpy(csbcpb->cpb.aes_xcbc.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
203 if (total <= AES_BLOCK_SIZE) { in nx_xcbc_update()
[all …]
Dnx-aes-gcm.c124 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca()
167 AES_BLOCK_SIZE); in nx_gca()
176 memcpy(out, csbcpb_aead->cpb.aes_gca.out_pat, AES_BLOCK_SIZE); in nx_gca()
205 memcpy(csbcpb->cpb.aes_gcm.iv_or_cnt, desc->info, AES_BLOCK_SIZE); in gmac()
237 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gmac()
239 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gmac()
262 char out[AES_BLOCK_SIZE]; in gcm_empty()
280 len = AES_BLOCK_SIZE; in gcm_empty()
286 if (len != AES_BLOCK_SIZE) in gcm_empty()
392 memcpy(desc.info, csbcpb->cpb.aes_gcm.out_cnt, AES_BLOCK_SIZE); in gcm_aes_nx_crypt()
[all …]
/Linux-v4.19/arch/s390/crypto/
Dpaes_s390.c103 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_paes_crypt()
105 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_paes_crypt()
143 .cra_blocksize = AES_BLOCK_SIZE,
197 u8 iv[AES_BLOCK_SIZE]; in cbc_paes_crypt()
202 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
204 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in cbc_paes_crypt()
206 n = nbytes & ~(AES_BLOCK_SIZE - 1); in cbc_paes_crypt()
217 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE); in cbc_paes_crypt()
246 .cra_blocksize = AES_BLOCK_SIZE,
255 .ivsize = AES_BLOCK_SIZE,
[all …]
Daes_s390.c65 u8 buf[AES_BLOCK_SIZE];
119 cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE); in aes_encrypt()
131 &sctx->key, out, in, AES_BLOCK_SIZE); in aes_decrypt()
165 .cra_blocksize = AES_BLOCK_SIZE,
265 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ecb_aes_crypt()
267 n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt()
335 .cra_blocksize = AES_BLOCK_SIZE,
380 u8 iv[AES_BLOCK_SIZE]; in cbc_aes_crypt()
385 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE); in cbc_aes_crypt()
387 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in cbc_aes_crypt()
[all …]
/Linux-v4.19/drivers/crypto/ccree/
Dcc_cipher.c99 if (size >= AES_BLOCK_SIZE && in validate_data_size()
100 IS_ALIGNED(size, AES_BLOCK_SIZE)) in validate_data_size()
104 if (size >= AES_BLOCK_SIZE) in validate_data_size()
114 if (IS_ALIGNED(size, AES_BLOCK_SIZE)) in validate_data_size()
611 u8 *pos = (ctr + AES_BLOCK_SIZE); in cc_update_ctr()
616 for (size = AES_BLOCK_SIZE; size; size--) { in cc_update_ctr()
658 len = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / AES_BLOCK_SIZE; in cc_cipher_complete()
806 .blocksize = AES_BLOCK_SIZE,
813 .ivsize = AES_BLOCK_SIZE,
822 .blocksize = AES_BLOCK_SIZE,
[all …]
Dcc_aead.h17 #define CCM_CONFIG_BUF_SIZE (AES_BLOCK_SIZE * 3)
52 u8 ctr_iv[AES_BLOCK_SIZE] ____cacheline_aligned;
55 u8 gcm_iv_inc1[AES_BLOCK_SIZE] ____cacheline_aligned;
56 u8 gcm_iv_inc2[AES_BLOCK_SIZE] ____cacheline_aligned;
57 u8 hkey[AES_BLOCK_SIZE] ____cacheline_aligned;
/Linux-v4.19/drivers/crypto/ccp/
Dccp-crypto.h110 u8 k1[AES_BLOCK_SIZE];
111 u8 k2[AES_BLOCK_SIZE];
116 u8 iv[AES_BLOCK_SIZE];
119 u8 tag[AES_BLOCK_SIZE];
123 u8 rfc3686_iv[AES_BLOCK_SIZE];
141 u8 iv[AES_BLOCK_SIZE];
145 u8 buf[AES_BLOCK_SIZE];
149 u8 pad[AES_BLOCK_SIZE];
157 u8 iv[AES_BLOCK_SIZE];
160 u8 buf[AES_BLOCK_SIZE];
[all …]
Dccp-crypto-aes.c35 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete()
84 (req->nbytes & (AES_BLOCK_SIZE - 1))) in ccp_aes_crypt()
91 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_crypt()
93 iv_len = AES_BLOCK_SIZE; in ccp_aes_crypt()
223 .cra_blocksize = AES_BLOCK_SIZE,
276 .blocksize = AES_BLOCK_SIZE,
285 .blocksize = AES_BLOCK_SIZE,
286 .ivsize = AES_BLOCK_SIZE,
294 .blocksize = AES_BLOCK_SIZE,
295 .ivsize = AES_BLOCK_SIZE,
[all …]
Dccp-crypto-aes-xts.c73 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete()
122 if (req->nbytes & (AES_BLOCK_SIZE - 1)) in ccp_aes_xts_crypt()
170 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_xts_crypt()
171 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt()
183 rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE; in ccp_aes_xts_crypt()
253 alg->cra_blocksize = AES_BLOCK_SIZE; in ccp_register_aes_xts_alg()
262 alg->cra_ablkcipher.ivsize = AES_BLOCK_SIZE; in ccp_register_aes_xts_alg()
/Linux-v4.19/drivers/crypto/vmx/
Daes_ctr.c94 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final()
108 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final()
130 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
131 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt()
139 AES_BLOCK_SIZE, in p8_aes_ctr_crypt()
147 inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE; in p8_aes_ctr_crypt()
150 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
152 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_ctr_crypt()
177 .ivsize = AES_BLOCK_SIZE,
/Linux-v4.19/drivers/crypto/
Dpadlock-aes.c33 #define ecb_fetch_bytes (ecb_fetch_blocks * AES_BLOCK_SIZE)
37 #define cbc_fetch_bytes (cbc_fetch_blocks * AES_BLOCK_SIZE)
214 u8 buf[AES_BLOCK_SIZE * (MAX_ECB_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in ecb_crypt_copy()
217 memcpy(tmp, in, count * AES_BLOCK_SIZE); in ecb_crypt_copy()
228 u8 buf[AES_BLOCK_SIZE * (MAX_CBC_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in cbc_crypt_copy()
231 memcpy(tmp, in, count * AES_BLOCK_SIZE); in cbc_crypt_copy()
325 .cra_blocksize = AES_BLOCK_SIZE,
356 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt()
357 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt()
382 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt()
[all …]
Datmel-aes.c123 u32 iv[AES_BLOCK_SIZE / sizeof(u32)];
135 u32 j0[AES_BLOCK_SIZE / sizeof(u32)];
136 u32 tag[AES_BLOCK_SIZE / sizeof(u32)];
137 u32 ghash[AES_BLOCK_SIZE / sizeof(u32)];
160 u32 lastc[AES_BLOCK_SIZE / sizeof(u32)];
396 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block()
402 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block()
584 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer()
586 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer()
615 size_t padlen = atmel_aes_padlen(len, AES_BLOCK_SIZE); in atmel_aes_cpu_start()
[all …]
Domap-aes-gcm.c49 alen = ALIGN(dd->assoc_len, AES_BLOCK_SIZE); in omap_aes_gcm_done_task()
50 clen = ALIGN(dd->total, AES_BLOCK_SIZE); in omap_aes_gcm_done_task()
110 alen = ALIGN(assoclen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers()
111 clen = ALIGN(cryptlen, AES_BLOCK_SIZE); in omap_aes_gcm_copy_buffers()
121 AES_BLOCK_SIZE, dd->in_sgl, in omap_aes_gcm_copy_buffers()
133 AES_BLOCK_SIZE, &dd->in_sgl[nsg], in omap_aes_gcm_copy_buffers()
156 AES_BLOCK_SIZE, &dd->out_sgl, in omap_aes_gcm_copy_buffers()
195 sg_init_one(&iv_sg, iv, AES_BLOCK_SIZE); in do_encrypt_iv()
196 sg_init_one(&tag_sg, tag, AES_BLOCK_SIZE); in do_encrypt_iv()
200 skcipher_request_set_crypt(sk_req, &iv_sg, &tag_sg, AES_BLOCK_SIZE, in do_encrypt_iv()
/Linux-v4.19/arch/powerpc/crypto/
Daes-spe-glue.c200 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_encrypt()
228 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_decrypt()
256 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_cbc_encrypt()
284 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_cbc_decrypt()
308 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ppc_ctr_crypt()
313 nbytes : pbytes & ~(AES_BLOCK_SIZE - 1); in ppc_ctr_crypt()
344 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_xts_encrypt()
375 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_xts_decrypt()
403 .cra_blocksize = AES_BLOCK_SIZE,
421 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
/Linux-v4.19/arch/sparc/crypto/
Daes_glue.c213 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
237 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt()
267 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt()
297 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt()
328 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt()
340 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; in ctr_crypt_final()
346 keystream, AES_BLOCK_SIZE); in ctr_crypt_final()
348 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()
360 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_crypt()
364 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_crypt()
[all …]
/Linux-v4.19/drivers/crypto/cavium/nitrox/
Dnitrox_algs.c297 .cra_blocksize = AES_BLOCK_SIZE,
304 .ivsize = AES_BLOCK_SIZE,
316 .cra_blocksize = AES_BLOCK_SIZE,
323 .ivsize = AES_BLOCK_SIZE,
335 .cra_blocksize = AES_BLOCK_SIZE,
342 .ivsize = AES_BLOCK_SIZE,
354 .cra_blocksize = AES_BLOCK_SIZE,
361 .ivsize = AES_BLOCK_SIZE,
392 .cra_blocksize = AES_BLOCK_SIZE,
400 .ivsize = AES_BLOCK_SIZE,
/Linux-v4.19/drivers/crypto/mediatek/
Dmtk-aes.c22 & ~(AES_BLOCK_SIZE - 1))
24 AES_BLOCK_SIZE * 2)
126 u32 iv[AES_BLOCK_SIZE / sizeof(u32)];
185 len &= AES_BLOCK_SIZE - 1; in mtk_aes_padlen()
186 return len ? AES_BLOCK_SIZE - len : 0; in mtk_aes_padlen()
194 if (!IS_ALIGNED(len, AES_BLOCK_SIZE)) in mtk_aes_check_aligned()
202 if (!IS_ALIGNED(len, AES_BLOCK_SIZE)) in mtk_aes_check_aligned()
211 if (!IS_ALIGNED(sg->length, AES_BLOCK_SIZE)) in mtk_aes_check_aligned()
323 res->hdr += AES_BLOCK_SIZE; in mtk_aes_xmit()
447 AES_BLOCK_SIZE); in mtk_aes_info_init()
[all …]

1234