Lines Matching refs:AES_BLOCK_SIZE
128 u8 dg[AES_BLOCK_SIZE];
183 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
188 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
203 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
208 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
221 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
226 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
250 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
255 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
276 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
286 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
287 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
294 cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
302 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
313 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
333 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
343 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
344 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
351 cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
359 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt()
370 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
411 blocks = walk.nbytes / AES_BLOCK_SIZE; in essiv_cbc_encrypt()
418 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in essiv_cbc_encrypt()
433 blocks = walk.nbytes / AES_BLOCK_SIZE; in essiv_cbc_decrypt()
440 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in essiv_cbc_decrypt()
455 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ctr_encrypt()
460 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ctr_encrypt()
463 u8 __aligned(8) tail[AES_BLOCK_SIZE]; in ctr_encrypt()
512 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt()
518 if (req->cryptlen < AES_BLOCK_SIZE) in xts_encrypt()
525 AES_BLOCK_SIZE) - 2; in xts_encrypt()
534 xts_blocks * AES_BLOCK_SIZE, in xts_encrypt()
542 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { in xts_encrypt()
546 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_encrypt()
563 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in xts_encrypt()
584 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_decrypt()
590 if (req->cryptlen < AES_BLOCK_SIZE) in xts_decrypt()
597 AES_BLOCK_SIZE) - 2; in xts_decrypt()
606 xts_blocks * AES_BLOCK_SIZE, in xts_decrypt()
614 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { in xts_decrypt()
618 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_decrypt()
635 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in xts_decrypt()
659 .cra_blocksize = AES_BLOCK_SIZE,
674 .cra_blocksize = AES_BLOCK_SIZE,
680 .ivsize = AES_BLOCK_SIZE,
696 .ivsize = AES_BLOCK_SIZE,
697 .chunksize = AES_BLOCK_SIZE,
712 .ivsize = AES_BLOCK_SIZE,
713 .chunksize = AES_BLOCK_SIZE,
723 .cra_blocksize = AES_BLOCK_SIZE,
729 .ivsize = AES_BLOCK_SIZE,
730 .walksize = 2 * AES_BLOCK_SIZE,
741 .cra_blocksize = AES_BLOCK_SIZE,
747 .ivsize = AES_BLOCK_SIZE,
748 .walksize = 2 * AES_BLOCK_SIZE,
758 .cra_blocksize = AES_BLOCK_SIZE,
764 .ivsize = AES_BLOCK_SIZE,
803 aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc, in cmac_setkey()
816 static u8 const ks[3][AES_BLOCK_SIZE] = { in xcbc_setkey()
817 { [0 ... AES_BLOCK_SIZE - 1] = 0x1 }, in xcbc_setkey()
818 { [0 ... AES_BLOCK_SIZE - 1] = 0x2 }, in xcbc_setkey()
819 { [0 ... AES_BLOCK_SIZE - 1] = 0x3 }, in xcbc_setkey()
824 u8 key[AES_BLOCK_SIZE]; in xcbc_setkey()
843 memset(ctx->dg, 0, AES_BLOCK_SIZE); in mac_init()
864 crypto_xor(dg, in, AES_BLOCK_SIZE); in mac_do_update()
865 in += AES_BLOCK_SIZE; in mac_do_update()
881 if ((ctx->len % AES_BLOCK_SIZE) == 0 && in mac_update()
882 (ctx->len + len) > AES_BLOCK_SIZE) { in mac_update()
884 int blocks = len / AES_BLOCK_SIZE; in mac_update()
886 len %= AES_BLOCK_SIZE; in mac_update()
891 p += blocks * AES_BLOCK_SIZE; in mac_update()
894 ctx->len = AES_BLOCK_SIZE; in mac_update()
900 l = min(len, AES_BLOCK_SIZE - ctx->len); in mac_update()
902 if (l <= AES_BLOCK_SIZE) { in mac_update()
920 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cbcmac_final()
931 if (ctx->len != AES_BLOCK_SIZE) { in cmac_final()
933 consts += AES_BLOCK_SIZE; in cmac_final()
938 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cmac_final()
947 .base.cra_blocksize = AES_BLOCK_SIZE,
949 2 * AES_BLOCK_SIZE,
952 .digestsize = AES_BLOCK_SIZE,
962 .base.cra_blocksize = AES_BLOCK_SIZE,
964 2 * AES_BLOCK_SIZE,
967 .digestsize = AES_BLOCK_SIZE,
981 .digestsize = AES_BLOCK_SIZE,