Lines Matching refs:AES_BLOCK_SIZE

74 	__be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];  in ccm_init_mac()
92 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac()
105 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac()
118 if (*macp > 0 && *macp < AES_BLOCK_SIZE) { in ccm_update_mac()
119 int added = min(abytes, AES_BLOCK_SIZE - *macp); in ccm_update_mac()
128 while (abytes > AES_BLOCK_SIZE) { in ccm_update_mac()
131 crypto_xor(mac, in, AES_BLOCK_SIZE); in ccm_update_mac()
133 in += AES_BLOCK_SIZE; in ccm_update_mac()
134 abytes -= AES_BLOCK_SIZE; in ccm_update_mac()
191 u8 buf[AES_BLOCK_SIZE]; in ccm_crypt_fallback()
195 int blocks = walk->nbytes / AES_BLOCK_SIZE; in ccm_crypt_fallback()
196 u32 tail = walk->nbytes % AES_BLOCK_SIZE; in ccm_crypt_fallback()
207 u32 bsize = AES_BLOCK_SIZE; in ccm_crypt_fallback()
209 if (nbytes < AES_BLOCK_SIZE) in ccm_crypt_fallback()
212 crypto_inc(walk->iv, AES_BLOCK_SIZE); in ccm_crypt_fallback()
233 crypto_xor(mac, buf, AES_BLOCK_SIZE); in ccm_crypt_fallback()
243 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_encrypt()
244 u8 buf[AES_BLOCK_SIZE]; in ccm_encrypt()
256 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_encrypt()
262 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt()
301 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_decrypt()
302 u8 buf[AES_BLOCK_SIZE]; in ccm_decrypt()
314 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_decrypt()
320 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_decrypt()
366 .ivsize = AES_BLOCK_SIZE,
367 .chunksize = AES_BLOCK_SIZE,
368 .maxauthsize = AES_BLOCK_SIZE,