Lines Matching refs:AES_BLOCK_SIZE
128 u8 dg[AES_BLOCK_SIZE];
201 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
206 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
221 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
226 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
239 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt_walk()
244 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_encrypt_walk()
268 while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt_walk()
273 err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE); in cbc_decrypt_walk()
294 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_encrypt()
304 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_encrypt()
305 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_encrypt()
312 cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
320 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_encrypt()
331 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_encrypt()
351 int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; in cts_cbc_decrypt()
361 if (req->cryptlen <= AES_BLOCK_SIZE) { in cts_cbc_decrypt()
362 if (req->cryptlen < AES_BLOCK_SIZE) in cts_cbc_decrypt()
369 cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
377 if (req->cryptlen == AES_BLOCK_SIZE) in cts_cbc_decrypt()
388 req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, in cts_cbc_decrypt()
429 blocks = walk.nbytes / AES_BLOCK_SIZE; in essiv_cbc_encrypt()
436 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in essiv_cbc_encrypt()
451 blocks = walk.nbytes / AES_BLOCK_SIZE; in essiv_cbc_decrypt()
458 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in essiv_cbc_decrypt()
473 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ctr_encrypt()
478 err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE); in ctr_encrypt()
481 u8 __aligned(8) tail[AES_BLOCK_SIZE]; in ctr_encrypt()
530 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_encrypt()
536 if (req->cryptlen < AES_BLOCK_SIZE) in xts_encrypt()
543 AES_BLOCK_SIZE) - 2; in xts_encrypt()
552 xts_blocks * AES_BLOCK_SIZE, in xts_encrypt()
560 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { in xts_encrypt()
564 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_encrypt()
581 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in xts_encrypt()
602 int tail = req->cryptlen % AES_BLOCK_SIZE; in xts_decrypt()
608 if (req->cryptlen < AES_BLOCK_SIZE) in xts_decrypt()
615 AES_BLOCK_SIZE) - 2; in xts_decrypt()
624 xts_blocks * AES_BLOCK_SIZE, in xts_decrypt()
632 for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) { in xts_decrypt()
636 nbytes &= ~(AES_BLOCK_SIZE - 1); in xts_decrypt()
653 skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail, in xts_decrypt()
677 .cra_blocksize = AES_BLOCK_SIZE,
692 .cra_blocksize = AES_BLOCK_SIZE,
698 .ivsize = AES_BLOCK_SIZE,
714 .ivsize = AES_BLOCK_SIZE,
715 .chunksize = AES_BLOCK_SIZE,
730 .ivsize = AES_BLOCK_SIZE,
731 .chunksize = AES_BLOCK_SIZE,
741 .cra_blocksize = AES_BLOCK_SIZE,
747 .ivsize = AES_BLOCK_SIZE,
748 .walksize = 2 * AES_BLOCK_SIZE,
759 .cra_blocksize = AES_BLOCK_SIZE,
765 .ivsize = AES_BLOCK_SIZE,
766 .walksize = 2 * AES_BLOCK_SIZE,
776 .cra_blocksize = AES_BLOCK_SIZE,
782 .ivsize = AES_BLOCK_SIZE,
826 aes_ecb_encrypt(ctx->consts, (u8[AES_BLOCK_SIZE]){}, ctx->key.key_enc, in cmac_setkey()
839 static u8 const ks[3][AES_BLOCK_SIZE] = { in xcbc_setkey()
840 { [0 ... AES_BLOCK_SIZE - 1] = 0x1 }, in xcbc_setkey()
841 { [0 ... AES_BLOCK_SIZE - 1] = 0x2 }, in xcbc_setkey()
842 { [0 ... AES_BLOCK_SIZE - 1] = 0x3 }, in xcbc_setkey()
847 u8 key[AES_BLOCK_SIZE]; in xcbc_setkey()
866 memset(ctx->dg, 0, AES_BLOCK_SIZE); in mac_init()
887 crypto_xor(dg, in, AES_BLOCK_SIZE); in mac_do_update()
888 in += AES_BLOCK_SIZE; in mac_do_update()
904 if ((ctx->len % AES_BLOCK_SIZE) == 0 && in mac_update()
905 (ctx->len + len) > AES_BLOCK_SIZE) { in mac_update()
907 int blocks = len / AES_BLOCK_SIZE; in mac_update()
909 len %= AES_BLOCK_SIZE; in mac_update()
914 p += blocks * AES_BLOCK_SIZE; in mac_update()
917 ctx->len = AES_BLOCK_SIZE; in mac_update()
923 l = min(len, AES_BLOCK_SIZE - ctx->len); in mac_update()
925 if (l <= AES_BLOCK_SIZE) { in mac_update()
943 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cbcmac_final()
954 if (ctx->len != AES_BLOCK_SIZE) { in cmac_final()
956 consts += AES_BLOCK_SIZE; in cmac_final()
961 memcpy(out, ctx->dg, AES_BLOCK_SIZE); in cmac_final()
970 .base.cra_blocksize = AES_BLOCK_SIZE,
972 2 * AES_BLOCK_SIZE,
975 .digestsize = AES_BLOCK_SIZE,
985 .base.cra_blocksize = AES_BLOCK_SIZE,
987 2 * AES_BLOCK_SIZE,
990 .digestsize = AES_BLOCK_SIZE,
1004 .digestsize = AES_BLOCK_SIZE,