Lines Matching refs:AES_BLOCK_SIZE
119 __be32 iv[AES_BLOCK_SIZE / sizeof(u32)];
132 __be32 j0[AES_BLOCK_SIZE / sizeof(u32)];
133 u32 tag[AES_BLOCK_SIZE / sizeof(u32)];
134 __be32 ghash[AES_BLOCK_SIZE / sizeof(u32)];
157 u8 lastc[AES_BLOCK_SIZE];
393 atmel_aes_read_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_read_block()
399 atmel_aes_write_n(dd, offset, value, SIZE_IN_WORDS(AES_BLOCK_SIZE)); in atmel_aes_write_block()
537 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_update_req_iv()
620 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_cpu_transfer()
622 if (dd->datalen < AES_BLOCK_SIZE) in atmel_aes_cpu_transfer()
651 size_t padlen = atmel_aes_padlen(len, AES_BLOCK_SIZE); in atmel_aes_cpu_start()
892 case AES_BLOCK_SIZE: in atmel_aes_dma_start()
992 dd->ctx->block_size != AES_BLOCK_SIZE); in atmel_aes_start()
1028 ctx->blocks = DIV_ROUND_UP(datalen, AES_BLOCK_SIZE); in atmel_aes_ctr_transfer()
1037 datalen = AES_BLOCK_SIZE * (0x10000 - start); in atmel_aes_ctr_transfer()
1056 crypto_inc((u8 *)ctx->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_transfer()
1080 memcpy(ctx->iv, req->iv, AES_BLOCK_SIZE); in atmel_aes_ctr_start()
1111 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_crypt()
1266 .base.cra_blocksize = AES_BLOCK_SIZE,
1279 .base.cra_blocksize = AES_BLOCK_SIZE,
1288 .ivsize = AES_BLOCK_SIZE,
1293 .base.cra_blocksize = AES_BLOCK_SIZE,
1302 .ivsize = AES_BLOCK_SIZE,
1307 .base.cra_blocksize = AES_BLOCK_SIZE,
1316 .ivsize = AES_BLOCK_SIZE,
1330 .ivsize = AES_BLOCK_SIZE,
1344 .ivsize = AES_BLOCK_SIZE,
1358 .ivsize = AES_BLOCK_SIZE,
1372 .ivsize = AES_BLOCK_SIZE,
1388 .ivsize = AES_BLOCK_SIZE,
1456 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_ghash_finalize()
1497 padlen = atmel_aes_padlen(ivsize, AES_BLOCK_SIZE); in atmel_aes_gcm_start()
1498 datalen = ivsize + padlen + AES_BLOCK_SIZE; in atmel_aes_gcm_start()
1557 padlen = atmel_aes_padlen(req->assoclen, AES_BLOCK_SIZE); in atmel_aes_gcm_length()
1580 dd->datalen -= AES_BLOCK_SIZE; in atmel_aes_gcm_data()
1638 return atmel_aes_gcm_ghash(dd, (const u32 *)data, AES_BLOCK_SIZE, in atmel_aes_gcm_tag_init()
1697 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_gcm_crypt()
1759 .maxauthsize = AES_BLOCK_SIZE,
1810 u32 tweak[AES_BLOCK_SIZE / sizeof(u32)]; in atmel_aes_xts_process_data()
1811 static const __le32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), }; in atmel_aes_xts_process_data()
1822 for (i = 0; i < AES_BLOCK_SIZE/2; ++i) { in atmel_aes_xts_process_data()
1823 u8 tmp = tweak_bytes[AES_BLOCK_SIZE - 1 - i]; in atmel_aes_xts_process_data()
1825 tweak_bytes[AES_BLOCK_SIZE - 1 - i] = tweak_bytes[i]; in atmel_aes_xts_process_data()
1882 .base.cra_blocksize = AES_BLOCK_SIZE,
1887 .ivsize = AES_BLOCK_SIZE,
1962 __be32 iv[AES_BLOCK_SIZE / sizeof(u32)]; in atmel_aes_authenc_transfer()
2146 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_authenc_crypt()
2173 .ivsize = AES_BLOCK_SIZE,
2179 .cra_blocksize = AES_BLOCK_SIZE,
2189 .ivsize = AES_BLOCK_SIZE,
2195 .cra_blocksize = AES_BLOCK_SIZE,
2205 .ivsize = AES_BLOCK_SIZE,
2211 .cra_blocksize = AES_BLOCK_SIZE,
2221 .ivsize = AES_BLOCK_SIZE,
2227 .cra_blocksize = AES_BLOCK_SIZE,
2237 .ivsize = AES_BLOCK_SIZE,
2243 .cra_blocksize = AES_BLOCK_SIZE,
2256 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()