/Linux-v6.6/drivers/md/ |
D | dm-crypt.c | 176 unsigned int iv_size; member 319 memset(iv, 0, cc->iv_size); in crypt_iv_plain_gen() 328 memset(iv, 0, cc->iv_size); in crypt_iv_plain64_gen() 337 memset(iv, 0, cc->iv_size); in crypt_iv_plain64be_gen() 339 *(__be64 *)&iv[cc->iv_size - sizeof(u64)] = cpu_to_be64(dmreq->iv_sector); in crypt_iv_plain64be_gen() 351 memset(iv, 0, cc->iv_size); in crypt_iv_essiv_gen() 397 memset(iv, 0, cc->iv_size - sizeof(u64)); /* rest is cleared below */ in crypt_iv_benbi_gen() 400 put_unaligned(val, (__be64 *)(iv + cc->iv_size - sizeof(u64))); in crypt_iv_benbi_gen() 408 memset(iv, 0, cc->iv_size); in crypt_iv_null_gen() 524 memcpy(iv, &md5state.hash, cc->iv_size); in crypt_iv_lmk_one() [all …]
|
/Linux-v6.6/drivers/crypto/intel/keembay/ |
D | keembay-ocs-aes-core.c | 256 int iv_size = crypto_skcipher_ivsize(tfm); in kmb_ocs_sk_validate_input() local 272 if (!req->iv || iv_size != AES_BLOCK_SIZE) in kmb_ocs_sk_validate_input() 283 if (!req->iv || iv_size != AES_BLOCK_SIZE) in kmb_ocs_sk_validate_input() 293 if (!req->iv || iv_size != AES_BLOCK_SIZE) in kmb_ocs_sk_validate_input() 403 int iv_size = crypto_skcipher_ivsize(tfm); in kmb_ocs_sk_prepare_inplace() local 415 req->cryptlen - iv_size, iv_size, 0); in kmb_ocs_sk_prepare_inplace() 520 int iv_size = crypto_skcipher_ivsize(tfm); in kmb_ocs_sk_run() local 551 req->cryptlen, req->iv, iv_size); in kmb_ocs_sk_run() 571 req->cryptlen - iv_size, in kmb_ocs_sk_run() 572 iv_size, 0); in kmb_ocs_sk_run() [all …]
|
D | ocs-aes.c | 601 const u8 *iv, u32 iv_size, in ocs_aes_validate_inputs() argument 664 if (!iv || iv_size != AES_BLOCK_SIZE) in ocs_aes_validate_inputs() 680 if (!iv || iv_size != AES_BLOCK_SIZE) in ocs_aes_validate_inputs() 696 if (!iv || iv_size != AES_BLOCK_SIZE) in ocs_aes_validate_inputs() 703 if (!iv || iv_size != GCM_AES_IV_SIZE) in ocs_aes_validate_inputs() 730 if (!iv || iv_size != AES_BLOCK_SIZE) in ocs_aes_validate_inputs() 804 u32 iv_size) in ocs_aes_op() argument 809 rc = ocs_aes_validate_inputs(src_dma_list, src_size, iv, iv_size, 0, 0, in ocs_aes_op()
|
D | ocs-aes.h | 78 u32 iv_size);
|
/Linux-v6.6/net/tls/ |
D | tls.h | 313 prot->iv_size); in tls_advance_record_sn() 333 size_t pkt_len, iv_size = prot->iv_size; in tls_fill_prepend() local 338 pkt_len += iv_size; in tls_fill_prepend() 341 ctx->tx.iv + prot->salt_size, iv_size); in tls_fill_prepend()
|
D | tls_sw.c | 523 prot->iv_size + prot->salt_size); in tls_do_encryption() 1524 prot->iv_size + prot->salt_size); in tls_decrypt_sg() 1528 prot->iv_size); in tls_decrypt_sg() 2356 cipher_overhead += prot->iv_size; in tls_rx_msg_size() 2691 prot->iv_size = cipher_desc->iv; in tls_set_sw_offload()
|
D | tls_device.c | 1096 prot->iv_size = cipher_desc->iv; in tls_set_device_offload()
|
/Linux-v6.6/drivers/s390/cio/ |
D | airq.c | 114 static inline unsigned long iv_size(unsigned long bits) in iv_size() function 138 size = iv_size(bits); in airq_iv_create() 210 cio_dma_free(iv->vector, iv_size(iv->bits)); in airq_iv_release()
|
/Linux-v6.6/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ih.c | 290 uint32_t iv_size = 32; in amdgpu_ih_decode_iv_ts_helper() local 294 rptr += iv_size * offset; in amdgpu_ih_decode_iv_ts_helper()
|
/Linux-v6.6/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/ |
D | chcr_ktls.h | 50 u32 iv_size; member
|
D | chcr_ktls.c | 89 tx_info->iv_size = TLS_CIPHER_AES_GCM_128_IV_SIZE; in chcr_ktls_save_keys() 1187 cipher_start = TLS_HEADER_SIZE + tx_info->iv_size + 1; in chcr_ktls_xmit_wr_complete() 1366 (!tls_rec_offset ? TLS_HEADER_SIZE + tx_info->iv_size : 0); in chcr_ktls_xmit_wr_short() 1423 memcpy(pos + tx_info->salt_size, &iv_record, tx_info->iv_size); in chcr_ktls_xmit_wr_short() 1424 *(__be32 *)(pos + tx_info->salt_size + tx_info->iv_size) = in chcr_ktls_xmit_wr_short() 1426 (TLS_HEADER_SIZE + tx_info->iv_size)) / AES_BLOCK_LEN) : 0)); in chcr_ktls_xmit_wr_short() 1806 if (tls_rec_offset + data_len <= (TLS_HEADER_SIZE + tx_info->iv_size)) { in chcr_short_record_handler() 1824 if (tls_rec_offset < (TLS_HEADER_SIZE + tx_info->iv_size)) { in chcr_short_record_handler() 1831 (TLS_HEADER_SIZE + tx_info->iv_size)) in chcr_short_record_handler()
|
/Linux-v6.6/drivers/crypto/hisilicon/sec2/ |
D | sec_crypto.c | 1391 u32 iv_size = req->ctx->c_ctx.ivsize; in sec_update_iv() local 1411 sz = sg_pcopy_to_buffer(sgl, sg_nents(sgl), iv, iv_size, in sec_update_iv() 1412 cryptlen - iv_size); in sec_update_iv() 1413 if (unlikely(sz != iv_size)) in sec_update_iv() 1416 sz = cryptlen / iv_size; in sec_update_iv() 1417 if (cryptlen % iv_size) in sec_update_iv() 1419 ctr_iv_inc(iv, iv_size, sz); in sec_update_iv() 2148 sec_max_key_size, ctx_init, ctx_exit, blk_size, iv_size)\ argument 2167 .ivsize = iv_size,\ 2171 max_key_size, blk_size, iv_size) \ argument [all …]
|
/Linux-v6.6/drivers/crypto/ |
D | sa2ul.c | 108 u8 iv_size; member 585 if (cfg->iv_size) in sa_format_cmdl_gen() 586 auth_offset += cfg->iv_size; in sa_format_cmdl_gen() 601 if (cfg->iv_size) { in sa_format_cmdl_gen() 605 upd_info->enc_iv.size = cfg->iv_size; in sa_format_cmdl_gen() 608 SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size; in sa_format_cmdl_gen() 611 (SA_CTX_ENC_AUX2_OFFSET | (cfg->iv_size >> 3)); in sa_format_cmdl_gen() 612 total += SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size; in sa_format_cmdl_gen() 903 cfg.iv_size = crypto_skcipher_ivsize(tfm); in sa_cipher_setkey() 1467 cfg.iv_size = 0; in sa_sha_setup() [all …]
|
/Linux-v6.6/include/net/ |
D | tls.h | 217 u16 iv_size; member
|
/Linux-v6.6/net/tipc/ |
D | crypto.c | 690 unsigned int iv_size, req_size; in tipc_aead_mem_alloc() local 694 iv_size = crypto_aead_ivsize(tfm); in tipc_aead_mem_alloc() 698 len += iv_size; in tipc_aead_mem_alloc() 711 *req = (struct aead_request *)PTR_ALIGN(*iv + iv_size, in tipc_aead_mem_alloc()
|
/Linux-v6.6/fs/smb/client/ |
D | smb2ops.c | 4202 unsigned int iv_size = crypto_aead_ivsize(tfm); in smb2_aead_req_alloc() local 4210 len = iv_size; in smb2_aead_req_alloc() 4223 *req = (struct aead_request *)PTR_ALIGN(*iv + iv_size, in smb2_aead_req_alloc()
|