Home
last modified time | relevance | path

Searched refs:ivsize (Results 1 – 25 of 133) sorted by relevance

123456

/Linux-v5.4/crypto/
Dechainiv.c34 unsigned int ivsize = crypto_aead_ivsize(geniv); in echainiv_encrypt() local
37 if (req->cryptlen < ivsize) in echainiv_encrypt()
65 memcpy(&nseqno, info + ivsize - 8, 8); in echainiv_encrypt()
67 memset(info, 0, ivsize); in echainiv_encrypt()
69 scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1); in echainiv_encrypt()
74 memcpy(&a, ctx->salt + ivsize - 8, 8); in echainiv_encrypt()
79 memcpy(info + ivsize - 8, &a, 8); in echainiv_encrypt()
80 } while ((ivsize -= 8)); in echainiv_encrypt()
92 unsigned int ivsize = crypto_aead_ivsize(geniv); in echainiv_decrypt() local
94 if (req->cryptlen < ivsize) in echainiv_decrypt()
[all …]
Dseqiv.c58 unsigned int ivsize = 8; in seqiv_aead_encrypt() local
61 if (req->cryptlen < ivsize) in seqiv_aead_encrypt()
87 info = kmemdup(req->iv, ivsize, req->base.flags & in seqiv_aead_encrypt()
99 req->cryptlen - ivsize, info); in seqiv_aead_encrypt()
100 aead_request_set_ad(subreq, req->assoclen + ivsize); in seqiv_aead_encrypt()
102 crypto_xor(info, ctx->salt, ivsize); in seqiv_aead_encrypt()
103 scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1); in seqiv_aead_encrypt()
118 unsigned int ivsize = 8; in seqiv_aead_decrypt() local
120 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt()
130 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
[all …]
Dablkcipher.c131 memcpy(req->info, walk->iv, tfm->crt_ablkcipher.ivsize); in ablkcipher_walk_done()
182 unsigned int ivsize = tfm->crt_ablkcipher.ivsize; in ablkcipher_copy_iv() local
184 unsigned int size = aligned_bs * 2 + ivsize + max(aligned_bs, ivsize) - in ablkcipher_copy_iv()
196 iv = ablkcipher_get_spot(iv, ivsize); in ablkcipher_copy_iv()
198 walk->iv = memcpy(iv, walk->iv, ivsize); in ablkcipher_copy_iv()
346 if (alg->ivsize > PAGE_SIZE / 8) in crypto_init_ablkcipher_ops()
353 crt->ivsize = alg->ivsize; in crypto_init_ablkcipher_ops()
371 rblkcipher.ivsize = alg->cra_ablkcipher.ivsize; in crypto_ablkcipher_report()
395 seq_printf(m, "ivsize : %u\n", ablkcipher->ivsize); in crypto_ablkcipher_show()
Dblkcipher.c130 memcpy(desc->info, walk->iv, walk->ivsize); in blkcipher_walk_done()
274 walk->ivsize + max(aligned_bs, walk->ivsize) - in blkcipher_copy_iv()
286 iv = blkcipher_get_spot(iv, walk->ivsize); in blkcipher_copy_iv()
288 walk->iv = memcpy(iv, walk->iv, walk->ivsize); in blkcipher_copy_iv()
298 walk->ivsize = crypto_blkcipher_ivsize(desc->tfm); in blkcipher_walk_virt()
310 walk->ivsize = crypto_blkcipher_ivsize(desc->tfm); in blkcipher_walk_phys()
348 walk->ivsize = crypto_blkcipher_ivsize(desc->tfm); in blkcipher_walk_virt_block()
362 walk->ivsize = crypto_aead_ivsize(tfm); in blkcipher_aead_walk_virt_block()
446 cipher->ivsize) { in crypto_blkcipher_ctxsize()
448 len += cipher->ivsize; in crypto_blkcipher_ctxsize()
[all …]
Dessiv.c219 int ivsize = crypto_aead_ivsize(tfm); in essiv_aead_crypt() local
220 int ssize = req->assoclen - ivsize; in essiv_aead_crypt()
231 memcpy(iv, req->iv, ivsize); in essiv_aead_crypt()
251 sg_set_buf(rctx->sg + 1, iv, ivsize); in essiv_aead_crypt()
428 int ivsize) in essiv_supported_algorithms() argument
443 if (ivsize != alg->cra_blocksize) in essiv_supported_algorithms()
470 int ivsize; in essiv_create() local
508 ivsize = crypto_skcipher_alg_ivsize(skcipher_alg); in essiv_create()
535 ivsize = aead_alg->ivsize; in essiv_create()
560 ivsize)) { in essiv_create()
[all …]
Daead.c163 raead.ivsize = aead->ivsize; in crypto_aead_report()
184 seq_printf(m, "ivsize : %u\n", aead->ivsize); in crypto_aead_show()
239 unsigned int ivsize; in aead_geniv_alloc() local
270 ivsize = crypto_aead_alg_ivsize(alg); in aead_geniv_alloc()
274 if (ivsize < sizeof(u64)) in aead_geniv_alloc()
296 inst->alg.ivsize = ivsize; in aead_geniv_alloc()
389 if (max3(alg->maxauthsize, alg->ivsize, alg->chunksize) > in aead_prepare_alg()
Dskcipher.c170 memcpy(walk->oiv, walk->iv, walk->ivsize); in skcipher_walk_done()
209 memcpy(walk->oiv, walk->iv, walk->ivsize); in skcipher_walk_complete()
400 unsigned ivsize = walk->ivsize; in skcipher_copy_iv() local
412 size += ivsize; in skcipher_copy_iv()
414 size += aligned_bs + ivsize; in skcipher_copy_iv()
427 walk->iv = memcpy(iv, walk->iv, walk->ivsize); in skcipher_copy_iv()
470 walk->ivsize = crypto_skcipher_ivsize(tfm); in skcipher_walk_skcipher()
541 walk->ivsize = crypto_aead_ivsize(tfm); in skcipher_walk_aead_common()
689 skcipher->ivsize = crypto_blkcipher_ivsize(blkcipher); in crypto_init_skcipher_ops_blkcipher()
787 skcipher->ivsize = crypto_ablkcipher_ivsize(ablkcipher); in crypto_init_skcipher_ops_ablkcipher()
[all …]
/Linux-v5.4/drivers/crypto/caam/
Dcaamalg_desc.h58 struct alginfo *adata, unsigned int ivsize,
64 struct alginfo *adata, unsigned int ivsize,
70 struct alginfo *adata, unsigned int ivsize,
76 unsigned int ivsize, unsigned int icvsize,
80 unsigned int ivsize, unsigned int icvsize,
84 unsigned int ivsize, unsigned int icvsize,
88 unsigned int ivsize, unsigned int icvsize,
92 unsigned int ivsize, unsigned int icvsize,
96 unsigned int ivsize, unsigned int icvsize,
100 struct alginfo *adata, unsigned int ivsize,
[all …]
Dcaamalg.c171 unsigned int ivsize = crypto_aead_ivsize(aead); in aead_set_sh_desc() local
241 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize, in aead_set_sh_desc()
263 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize, in aead_set_sh_desc()
287 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize, in aead_set_sh_desc()
312 unsigned int ivsize = crypto_aead_ivsize(aead); in gcm_set_sh_desc() local
334 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false); in gcm_set_sh_desc()
351 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false); in gcm_set_sh_desc()
377 unsigned int ivsize = crypto_aead_ivsize(aead); in rfc4106_set_sh_desc() local
399 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize, in rfc4106_set_sh_desc()
417 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize, in rfc4106_set_sh_desc()
[all …]
Dcaamalg_qi.c77 unsigned int ivsize = crypto_aead_ivsize(aead); in aead_set_sh_desc() local
137 ivsize, ctx->authsize, is_rfc3686, nonce, in aead_set_sh_desc()
152 ivsize, ctx->authsize, alg->caam.geniv, in aead_set_sh_desc()
170 ivsize, ctx->authsize, is_rfc3686, nonce, in aead_set_sh_desc()
296 unsigned int ivsize = crypto_aead_ivsize(aead); in gcm_set_sh_desc() local
315 cnstr_shdsc_gcm_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize, in gcm_set_sh_desc()
330 cnstr_shdsc_gcm_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize, in gcm_set_sh_desc()
401 unsigned int ivsize = crypto_aead_ivsize(aead); in rfc4106_set_sh_desc() local
421 cnstr_shdsc_rfc4106_encap(ctx->sh_desc_enc, &ctx->cdata, ivsize, in rfc4106_set_sh_desc()
435 cnstr_shdsc_rfc4106_decap(ctx->sh_desc_dec, &ctx->cdata, ivsize, in rfc4106_set_sh_desc()
[all …]
Dcaamalg_desc.c289 struct alginfo *adata, unsigned int ivsize, in cnstr_shdsc_aead_encap() argument
315 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | in cnstr_shdsc_aead_encap()
383 struct alginfo *adata, unsigned int ivsize, in cnstr_shdsc_aead_decap() argument
410 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | in cnstr_shdsc_aead_decap()
420 ivsize); in cnstr_shdsc_aead_decap()
428 ivsize); in cnstr_shdsc_aead_decap()
442 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | in cnstr_shdsc_aead_decap()
446 (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize); in cnstr_shdsc_aead_decap()
500 struct alginfo *adata, unsigned int ivsize, in cnstr_shdsc_aead_givencap() argument
528 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB | in cnstr_shdsc_aead_givencap()
[all …]
Dcaamalg_qi2.c143 int dst_nents, dma_addr_t iv_dma, int ivsize, in caam_unmap() argument
157 dma_unmap_single(dev, iv_dma, ivsize, iv_dir); in caam_unmap()
168 unsigned int ivsize = crypto_aead_ivsize(aead); in aead_set_sh_desc() local
233 ivsize, ctx->authsize, is_rfc3686, in aead_set_sh_desc()
238 ivsize, ctx->authsize, is_rfc3686, nonce, in aead_set_sh_desc()
259 ivsize, ctx->authsize, alg->caam.geniv, in aead_set_sh_desc()
364 int ivsize = 0; in aead_edesc_alloc() local
444 ivsize = crypto_aead_ivsize(aead); in aead_edesc_alloc()
458 qm_sg_nents = 1 + !!ivsize + mapped_src_nents; in aead_edesc_alloc()
463 1 + !!ivsize + in aead_edesc_alloc()
[all …]
/Linux-v5.4/drivers/crypto/cavium/nitrox/
Dnitrox_skcipher.c137 static int alloc_src_sglist(struct skcipher_request *skreq, int ivsize) in alloc_src_sglist() argument
144 ret = alloc_src_req_buf(nkreq, nents, ivsize); in alloc_src_sglist()
148 nitrox_creq_copy_iv(nkreq->src, skreq->iv, ivsize); in alloc_src_sglist()
149 nitrox_creq_set_src_sg(nkreq, nents, ivsize, skreq->src, in alloc_src_sglist()
155 static int alloc_dst_sglist(struct skcipher_request *skreq, int ivsize) in alloc_dst_sglist() argument
170 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, skreq->dst, in alloc_dst_sglist()
209 int ivsize = crypto_skcipher_ivsize(cipher); in nitrox_skcipher_crypt() local
226 creq->gph.param2 = cpu_to_be16(ivsize); in nitrox_skcipher_crypt()
232 ret = alloc_src_sglist(skreq, ivsize); in nitrox_skcipher_crypt()
236 ret = alloc_dst_sglist(skreq, ivsize); in nitrox_skcipher_crypt()
[all …]
Dnitrox_aead.c78 struct scatterlist *src, char *iv, int ivsize, in alloc_src_sglist() argument
90 ret = alloc_src_req_buf(nkreq, nents, ivsize); in alloc_src_sglist()
94 nitrox_creq_copy_iv(nkreq->src, iv, ivsize); in alloc_src_sglist()
95 nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen); in alloc_src_sglist()
101 struct scatterlist *dst, int ivsize, int buflen) in alloc_dst_sglist() argument
120 nitrox_creq_set_dst_sg(nkreq, nents, ivsize, dst, buflen); in alloc_dst_sglist()
151 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq()
153 param3.auth_offset = rctx->ivsize; in nitrox_set_creq()
159 ret = alloc_src_sglist(&rctx->nkreq, rctx->src, rctx->iv, rctx->ivsize, in nitrox_set_creq()
164 ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize, in nitrox_set_creq()
[all …]
Dnitrox_req.h236 int ivsize; member
613 int nents, int ivsize) in alloc_src_req_buf() argument
617 nkreq->src = alloc_req_buf(nents, ivsize, creq->gfp); in alloc_src_req_buf()
629 static inline struct scatterlist *nitrox_creq_src_sg(char *iv, int ivsize) in nitrox_creq_src_sg() argument
631 return (struct scatterlist *)(iv + ivsize); in nitrox_creq_src_sg()
635 int nents, int ivsize, in nitrox_creq_set_src_sg() argument
642 creq->src = nitrox_creq_src_sg(iv, ivsize); in nitrox_creq_set_src_sg()
653 sg = create_single_sg(sg, iv, ivsize); in nitrox_creq_set_src_sg()
693 int nents, int ivsize, in nitrox_creq_set_dst_sg() argument
713 sg = create_single_sg(sg, iv, ivsize); in nitrox_creq_set_dst_sg()
/Linux-v5.4/drivers/crypto/rockchip/
Drk3288_crypto_ablkcipher.c204 u32 ivsize, block, conf_reg = 0; in rk_ablk_hw_init() local
207 ivsize = crypto_ablkcipher_ivsize(cipher); in rk_ablk_hw_init()
214 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->info, ivsize); in rk_ablk_hw_init()
226 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->info, ivsize); in rk_ablk_hw_init()
251 u32 ivsize = crypto_ablkcipher_ivsize(tfm); in rk_set_data_start() local
253 dev->sg_src->offset + dev->sg_src->length - ivsize; in rk_set_data_start()
259 memcpy(ctx->iv, src_last_blk, ivsize); in rk_set_data_start()
261 ivsize, dev->total - ivsize); in rk_set_data_start()
299 u32 ivsize = crypto_ablkcipher_ivsize(tfm); in rk_iv_copyback() local
305 dev->sg_dst->length - ivsize, ivsize); in rk_iv_copyback()
[all …]
/Linux-v5.4/drivers/crypto/ccree/
Dcc_cipher.c472 unsigned int ivsize, struct cc_hw_desc desc[], in cc_setup_readiv_desc() argument
494 set_dout_dlli(&desc[*seq_size], iv_dma_addr, ivsize, NS_BIT, 1); in cc_setup_readiv_desc()
528 unsigned int ivsize, unsigned int nbytes, in cc_setup_state_desc() argument
558 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize, in cc_setup_state_desc()
800 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); in cc_cipher_complete() local
804 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst); in cc_cipher_complete()
805 memcpy(req->iv, req_ctx->iv, ivsize); in cc_cipher_complete()
818 unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm); in cc_cipher_process() local
853 req_ctx->iv = kmemdup(iv, ivsize, flags); in cc_cipher_process()
875 rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes, in cc_cipher_process()
[all …]
/Linux-v5.4/drivers/crypto/qce/
Dablkcipher.c66 rctx->ivsize = crypto_ablkcipher_ivsize(ablkcipher); in qce_ablkcipher_async_req_handle()
274 unsigned int ivsize; member
285 .ivsize = AES_BLOCK_SIZE,
294 .ivsize = AES_BLOCK_SIZE,
303 .ivsize = AES_BLOCK_SIZE,
312 .ivsize = AES_BLOCK_SIZE,
321 .ivsize = 0,
330 .ivsize = DES_BLOCK_SIZE,
339 .ivsize = 0,
348 .ivsize = DES3_EDE_BLOCK_SIZE,
[all …]
Dcommon.c172 static void qce_xts_swapiv(__be32 *dst, const u8 *src, unsigned int ivsize) in qce_xts_swapiv() argument
177 if (ivsize > QCE_AES_IV_LENGTH) in qce_xts_swapiv()
182 for (i = (QCE_AES_IV_LENGTH - ivsize), j = ivsize - 1; in qce_xts_swapiv()
320 unsigned int ivsize = rctx->ivsize; in qce_setup_regs_ablkcipher() local
356 qce_xts_swapiv(enciv, rctx->iv, ivsize); in qce_setup_regs_ablkcipher()
358 qce_cpu_to_be32p_array(enciv, rctx->iv, ivsize); in qce_setup_regs_ablkcipher()
/Linux-v5.4/drivers/crypto/ccp/
Dccp-crypto-aes.c262 unsigned int ivsize; member
273 .ivsize = 0,
282 .ivsize = AES_BLOCK_SIZE,
291 .ivsize = AES_BLOCK_SIZE,
300 .ivsize = AES_BLOCK_SIZE,
309 .ivsize = AES_BLOCK_SIZE,
318 .ivsize = CTR_RFC3686_IV_SIZE,
345 alg->cra_ablkcipher.ivsize = def->ivsize; in ccp_register_aes_alg()
Dccp-crypto-aes-galois.c172 .ivsize = GCM_AES_IV_SIZE,
194 unsigned int ivsize; member
205 .ivsize = AES_BLOCK_SIZE,
232 alg->base.cra_ablkcipher.ivsize = def->ivsize; in ccp_register_aes_aead()
Dccp-crypto-des3.c161 unsigned int ivsize; member
172 .ivsize = 0,
181 .ivsize = DES3_EDE_BLOCK_SIZE,
208 alg->cra_ablkcipher.ivsize = def->ivsize; in ccp_register_des3_alg()
/Linux-v5.4/include/crypto/
Dskcipher.h43 unsigned int ivsize; member
121 unsigned int ivsize; member
263 return alg->base.cra_blkcipher.ivsize; in crypto_skcipher_alg_ivsize()
266 return alg->base.cra_ablkcipher.ivsize; in crypto_skcipher_alg_ivsize()
268 return alg->ivsize; in crypto_skcipher_alg_ivsize()
282 return tfm->ivsize; in crypto_skcipher_ivsize()
/Linux-v5.4/drivers/crypto/
Datmel-aes.c509 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); in atmel_aes_complete() local
513 req->nbytes - ivsize, ivsize, 0); in atmel_aes_complete()
516 memcpy(req->info, rctx->lastc, ivsize); in atmel_aes_complete()
519 req->nbytes - ivsize, ivsize, 0); in atmel_aes_complete()
1128 int ivsize = crypto_ablkcipher_ivsize(ablkcipher); in atmel_aes_crypt() local
1131 (req->nbytes - ivsize), ivsize, 0); in atmel_aes_crypt()
1299 .ivsize = AES_BLOCK_SIZE,
1319 .ivsize = AES_BLOCK_SIZE,
1339 .ivsize = AES_BLOCK_SIZE,
1359 .ivsize = AES_BLOCK_SIZE,
[all …]
Dtalitos.c997 unsigned int ivsize = crypto_aead_ivsize(aead); in ipsec_esp_unmap() local
1018 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize, in ipsec_esp_unmap()
1019 areq->assoclen + cryptlen - ivsize); in ipsec_esp_unmap()
1032 unsigned int ivsize = crypto_aead_ivsize(authenc); in ipsec_esp_encrypt_done() local
1039 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE); in ipsec_esp_encrypt_done()
1207 unsigned int ivsize = crypto_aead_ivsize(aead); in ipsec_esp() local
1241 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1); in ipsec_esp()
1302 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv, in ipsec_esp()
1328 unsigned int ivsize, in talitos_edesc_alloc() argument
1396 alloc_len += ivsize; in talitos_edesc_alloc()
[all …]

123456