Searched refs:GHASH_BLOCK_SIZE (Results 1 – 10 of 10) sorted by relevance
| /Linux-v5.4/arch/s390/crypto/ |
| D | ghash_s390.c | 16 #define GHASH_BLOCK_SIZE 16 macro 20 u8 key[GHASH_BLOCK_SIZE]; 24 u8 icv[GHASH_BLOCK_SIZE]; 25 u8 key[GHASH_BLOCK_SIZE]; 26 u8 buffer[GHASH_BLOCK_SIZE]; 36 memcpy(dctx->key, ctx->key, GHASH_BLOCK_SIZE); in ghash_init() 46 if (keylen != GHASH_BLOCK_SIZE) { in ghash_setkey() 51 memcpy(ctx->key, key, GHASH_BLOCK_SIZE); in ghash_setkey() 64 u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 75 GHASH_BLOCK_SIZE); in ghash_update() [all …]
|
| D | aes_s390.c | 1010 u8 j0[GHASH_BLOCK_SIZE];/* initial counter value */ in gcm_aes_crypt() 1105 .ivsize = GHASH_BLOCK_SIZE - sizeof(u32),
|
| /Linux-v5.4/crypto/ |
| D | ghash-generic.c | 61 if (keylen != GHASH_BLOCK_SIZE) { in ghash_setkey() 69 BUILD_BUG_ON(sizeof(k) != GHASH_BLOCK_SIZE); in ghash_setkey() 70 memcpy(&k, key, GHASH_BLOCK_SIZE); /* avoid violating alignment rules */ in ghash_setkey() 72 memzero_explicit(&k, GHASH_BLOCK_SIZE); in ghash_setkey() 89 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 101 while (srclen >= GHASH_BLOCK_SIZE) { in ghash_update() 102 crypto_xor(dst, src, GHASH_BLOCK_SIZE); in ghash_update() 104 src += GHASH_BLOCK_SIZE; in ghash_update() 105 srclen -= GHASH_BLOCK_SIZE; in ghash_update() 109 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update() [all …]
|
| /Linux-v5.4/arch/arm64/crypto/ |
| D | ghash-ce-glue.c | 29 #define GHASH_BLOCK_SIZE 16 macro 44 u8 buf[GHASH_BLOCK_SIZE]; 103 src += GHASH_BLOCK_SIZE; in ghash_do_update() 106 crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE); in ghash_do_update() 116 #define MAX_BLOCKS (SZ_64K / GHASH_BLOCK_SIZE) 126 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in __ghash_update() 130 if ((partial + len) >= GHASH_BLOCK_SIZE) { in __ghash_update() 135 int p = GHASH_BLOCK_SIZE - partial; in __ghash_update() 142 blocks = len / GHASH_BLOCK_SIZE; in __ghash_update() 143 len %= GHASH_BLOCK_SIZE; in __ghash_update() [all …]
|
| /Linux-v5.4/arch/arm/crypto/ |
| D | ghash-ce-glue.c | 26 #define GHASH_BLOCK_SIZE 16 macro 40 u8 buf[GHASH_BLOCK_SIZE]; 86 src += GHASH_BLOCK_SIZE; in ghash_do_update() 89 crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE); in ghash_do_update() 102 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; in ghash_update() 106 if ((partial + len) >= GHASH_BLOCK_SIZE) { in ghash_update() 111 int p = GHASH_BLOCK_SIZE - partial; in ghash_update() 118 blocks = len / GHASH_BLOCK_SIZE; in ghash_update() 119 len %= GHASH_BLOCK_SIZE; in ghash_update() 123 src += blocks * GHASH_BLOCK_SIZE; in ghash_update() [all …]
|
| /Linux-v5.4/drivers/crypto/vmx/ |
| D | ghash.c | 59 if (keylen != GHASH_BLOCK_SIZE) in p8_ghash_setkey() 70 memcpy(&ctx->key, key, GHASH_BLOCK_SIZE); in p8_ghash_setkey() 88 crypto_xor((u8 *)dctx->shash, dctx->buffer, GHASH_BLOCK_SIZE); in __ghash_block() 107 while (srclen >= GHASH_BLOCK_SIZE) { in __ghash_blocks() 108 crypto_xor((u8 *)dctx->shash, src, GHASH_BLOCK_SIZE); in __ghash_blocks() 110 srclen -= GHASH_BLOCK_SIZE; in __ghash_blocks() 111 src += GHASH_BLOCK_SIZE; in __ghash_blocks() 180 .cra_blocksize = GHASH_BLOCK_SIZE,
|
| /Linux-v5.4/arch/x86/crypto/ |
| D | ghash-clmulni-intel_glue.c | 23 #define GHASH_BLOCK_SIZE 16 macro 40 u8 buffer[GHASH_BLOCK_SIZE]; 60 if (keylen != GHASH_BLOCK_SIZE) { in ghash_setkey() 88 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_update() 106 dctx->bytes = GHASH_BLOCK_SIZE - srclen; in ghash_update() 119 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes); in ghash_flush() 139 memcpy(dst, buf, GHASH_BLOCK_SIZE); in ghash_final() 156 .cra_blocksize = GHASH_BLOCK_SIZE, 314 .cra_blocksize = GHASH_BLOCK_SIZE,
|
| /Linux-v5.4/include/crypto/ |
| D | ghash.h | 12 #define GHASH_BLOCK_SIZE 16 macro 20 u8 buffer[GHASH_BLOCK_SIZE];
|
| /Linux-v5.4/drivers/crypto/chelsio/ |
| D | chcr_crypto.h | 39 #define GHASH_BLOCK_SIZE 16 macro
|
| /Linux-v5.4/drivers/crypto/inside-secure/ |
| D | safexcel_cipher.c | 2133 ctx->state_sz = GHASH_BLOCK_SIZE; in safexcel_aead_gcm_cra_init()
|