Lines Matching refs:AES_BLOCK_SIZE

268 	__aes_arm64_encrypt(ctx->aes_key.key_enc, key, (u8[AES_BLOCK_SIZE]){},  in gcm_setkey()
355 u8 mac[AES_BLOCK_SIZE]; in gcm_final()
366 crypto_xor(tag, mac, AES_BLOCK_SIZE); in gcm_final()
374 u8 iv[AES_BLOCK_SIZE]; in gcm_encrypt()
375 u8 ks[2 * AES_BLOCK_SIZE]; in gcm_encrypt()
376 u8 tag[AES_BLOCK_SIZE]; in gcm_encrypt()
389 if (likely(may_use_simd() && walk.total >= 2 * AES_BLOCK_SIZE)) { in gcm_encrypt()
397 pmull_gcm_encrypt_block(ks + AES_BLOCK_SIZE, iv, NULL, nrounds); in gcm_encrypt()
401 int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2; in gcm_encrypt()
412 walk.nbytes % (2 * AES_BLOCK_SIZE)); in gcm_encrypt()
415 } while (walk.nbytes >= 2 * AES_BLOCK_SIZE); in gcm_encrypt()
420 while (walk.nbytes >= (2 * AES_BLOCK_SIZE)) { in gcm_encrypt()
421 int blocks = walk.nbytes / AES_BLOCK_SIZE; in gcm_encrypt()
428 crypto_xor_cpy(dst, src, ks, AES_BLOCK_SIZE); in gcm_encrypt()
429 crypto_inc(iv, AES_BLOCK_SIZE); in gcm_encrypt()
431 dst += AES_BLOCK_SIZE; in gcm_encrypt()
432 src += AES_BLOCK_SIZE; in gcm_encrypt()
435 ghash_do_update(walk.nbytes / AES_BLOCK_SIZE, dg, in gcm_encrypt()
440 walk.nbytes % (2 * AES_BLOCK_SIZE)); in gcm_encrypt()
445 if (walk.nbytes > AES_BLOCK_SIZE) { in gcm_encrypt()
446 crypto_inc(iv, AES_BLOCK_SIZE); in gcm_encrypt()
448 ks + AES_BLOCK_SIZE, iv, in gcm_encrypt()
495 u8 iv[2 * AES_BLOCK_SIZE]; in gcm_decrypt()
496 u8 tag[AES_BLOCK_SIZE]; in gcm_decrypt()
510 if (likely(may_use_simd() && walk.total >= 2 * AES_BLOCK_SIZE)) { in gcm_decrypt()
518 int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2; in gcm_decrypt()
519 int rem = walk.total - blocks * AES_BLOCK_SIZE; in gcm_decrypt()
529 if (rem < (2 * AES_BLOCK_SIZE)) { in gcm_decrypt()
530 u8 *iv2 = iv + AES_BLOCK_SIZE; in gcm_decrypt()
532 if (rem > AES_BLOCK_SIZE) { in gcm_decrypt()
533 memcpy(iv2, iv, AES_BLOCK_SIZE); in gcm_decrypt()
534 crypto_inc(iv2, AES_BLOCK_SIZE); in gcm_decrypt()
539 if (rem > AES_BLOCK_SIZE) in gcm_decrypt()
547 walk.nbytes % (2 * AES_BLOCK_SIZE)); in gcm_decrypt()
550 } while (walk.nbytes >= 2 * AES_BLOCK_SIZE); in gcm_decrypt()
555 while (walk.nbytes >= (2 * AES_BLOCK_SIZE)) { in gcm_decrypt()
556 int blocks = walk.nbytes / AES_BLOCK_SIZE; in gcm_decrypt()
566 crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE); in gcm_decrypt()
567 crypto_inc(iv, AES_BLOCK_SIZE); in gcm_decrypt()
569 dst += AES_BLOCK_SIZE; in gcm_decrypt()
570 src += AES_BLOCK_SIZE; in gcm_decrypt()
574 walk.nbytes % (2 * AES_BLOCK_SIZE)); in gcm_decrypt()
577 if (walk.nbytes > AES_BLOCK_SIZE) { in gcm_decrypt()
578 u8 *iv2 = iv + AES_BLOCK_SIZE; in gcm_decrypt()
580 memcpy(iv2, iv, AES_BLOCK_SIZE); in gcm_decrypt()
581 crypto_inc(iv2, AES_BLOCK_SIZE); in gcm_decrypt()
630 .chunksize = 2 * AES_BLOCK_SIZE,
631 .maxauthsize = AES_BLOCK_SIZE,