Lines Matching refs:AES_BLOCK_SIZE
47 u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32] __aligned(AES_BLOCK_SIZE);
92 while (walk.nbytes >= AES_BLOCK_SIZE) { in __ecb_crypt()
93 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __ecb_crypt()
97 walk.stride / AES_BLOCK_SIZE); in __ecb_crypt()
102 walk.nbytes - blocks * AES_BLOCK_SIZE); in __ecb_crypt()
161 while (walk.nbytes >= AES_BLOCK_SIZE) { in cbc_decrypt()
162 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in cbc_decrypt()
166 walk.stride / AES_BLOCK_SIZE); in cbc_decrypt()
172 walk.nbytes - blocks * AES_BLOCK_SIZE); in cbc_decrypt()
200 u8 buf[AES_BLOCK_SIZE]; in ctr_encrypt()
207 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in ctr_encrypt()
208 u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL; in ctr_encrypt()
212 walk.stride / AES_BLOCK_SIZE); in ctr_encrypt()
220 u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; in ctr_encrypt()
221 u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE; in ctr_encrypt()
224 walk.total % AES_BLOCK_SIZE); in ctr_encrypt()
230 walk.nbytes - blocks * AES_BLOCK_SIZE); in ctr_encrypt()
285 while (walk.nbytes >= AES_BLOCK_SIZE) { in __xts_crypt()
286 unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE; in __xts_crypt()
290 walk.stride / AES_BLOCK_SIZE); in __xts_crypt()
295 walk.nbytes - blocks * AES_BLOCK_SIZE); in __xts_crypt()
316 .base.cra_blocksize = AES_BLOCK_SIZE,
323 .walksize = 8 * AES_BLOCK_SIZE,
331 .base.cra_blocksize = AES_BLOCK_SIZE,
340 .walksize = 8 * AES_BLOCK_SIZE,
341 .ivsize = AES_BLOCK_SIZE,
356 .chunksize = AES_BLOCK_SIZE,
357 .walksize = 8 * AES_BLOCK_SIZE,
358 .ivsize = AES_BLOCK_SIZE,
366 .base.cra_blocksize = AES_BLOCK_SIZE,
375 .walksize = 8 * AES_BLOCK_SIZE,
376 .ivsize = AES_BLOCK_SIZE,