Home
last modified time | relevance | path

Searched refs:in_sg (Results 1 – 20 of 20) sorted by relevance

/Linux-v4.19/drivers/crypto/nx/
Dnx-aes-xcbc.c75 struct nx_sg *in_sg, *out_sg; in nx_xcbc_empty() local
93 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len, in nx_xcbc_empty()
105 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
121 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len, in nx_xcbc_empty()
134 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_xcbc_empty()
186 struct nx_sg *in_sg; in nx_xcbc_update() local
209 in_sg = nx_ctx->in_sg; in nx_xcbc_update()
244 in_sg = nx_build_sg_list(nx_ctx->in_sg, in nx_xcbc_update()
255 in_sg = nx_build_sg_list(in_sg, in nx_xcbc_update()
265 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * in nx_xcbc_update()
[all …]
Dnx-sha512.c116 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha512_update() local
120 in_sg = nx_build_sg_list(in_sg, in nx_sha512_update()
128 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha512_update()
143 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha512_update()
146 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_update()
197 struct nx_sg *in_sg, *out_sg; in nx_sha512_final() local
232 in_sg = nx_build_sg_list(nx_ctx->in_sg, sctx->buf, &len, in nx_sha512_final()
244 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha512_final()
Dnx-sha256.c116 struct nx_sg *in_sg = nx_ctx->in_sg; in nx_sha256_update() local
120 in_sg = nx_build_sg_list(in_sg, in nx_sha256_update()
129 used_sgs = in_sg - nx_ctx->in_sg; in nx_sha256_update()
144 in_sg = nx_build_sg_list(in_sg, (u8 *) data, in nx_sha256_update()
147 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_update()
194 struct nx_sg *in_sg, *out_sg; in nx_sha256_final() local
223 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf, in nx_sha256_final()
239 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in nx_sha256_final()
Dnx-aes-gcm.c119 struct nx_sg *nx_sg = nx_ctx->in_sg; in nx_gca()
149 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in nx_gca()
157 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) in nx_gca()
217 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, in gmac()
225 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg) in gmac()
263 struct nx_sg *in_sg, *out_sg; in gcm_empty() local
283 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) desc->info, in gcm_empty()
296 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); in gcm_empty()
Dnx.c280 struct nx_sg *nx_insg = nx_ctx->in_sg; in nx_build_sg_lists()
305 nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes); in nx_build_sg_lists()
325 nx_ctx->op.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
333 nx_ctx->op_aead.in = __pa(nx_ctx->in_sg); in nx_ctx_init()
689 nx_ctx->in_sg = (struct nx_sg *)((u8 *)nx_ctx->csbcpb + NX_PAGE_SIZE); in nx_crypto_ctx_init()
690 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); in nx_crypto_ctx_init()
765 nx_ctx->in_sg = NULL; in nx_crypto_ctx_exit()
Dnx-aes-ccm.c176 struct nx_sg *nx_insg = nx_ctx->in_sg; in generate_pat()
264 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
297 nx_insg = nx_walk_and_build(nx_ctx->in_sg, in generate_pat()
311 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) * in generate_pat()
Dnx.h137 struct nx_sg *in_sg; /* aligned pointer into kmem to an sg list */ member
/Linux-v4.19/lib/
Dsg_split.c82 struct scatterlist *in_sg, *out_sg; in sg_split_phys() local
86 in_sg = split->in_sg0; in sg_split_phys()
89 *out_sg = *in_sg; in sg_split_phys()
98 in_sg = sg_next(in_sg); in sg_split_phys()
108 struct scatterlist *in_sg, *out_sg; in sg_split_mapped() local
112 in_sg = split->in_sg0; in sg_split_mapped()
115 sg_dma_address(out_sg) = sg_dma_address(in_sg); in sg_split_mapped()
116 sg_dma_len(out_sg) = sg_dma_len(in_sg); in sg_split_mapped()
121 in_sg = sg_next(in_sg); in sg_split_mapped()
/Linux-v4.19/drivers/crypto/
Domap-des.c155 struct scatterlist *in_sg; member
381 struct scatterlist *in_sg, struct scatterlist *out_sg, in omap_des_crypt_dma() argument
391 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_des_crypt_dma()
400 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); in omap_des_crypt_dma()
419 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, in omap_des_crypt_dma()
470 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, in omap_des_crypt_dma_start()
485 err = omap_des_crypt_dma(tfm, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_des_crypt_dma_start()
488 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_des_crypt_dma_start()
547 dd->in_sg = req->src; in omap_des_prepare_req()
555 ret = omap_crypto_align_sg(&dd->in_sg, dd->total, DES_BLOCK_SIZE, in omap_des_prepare_req()
[all …]
Domap-aes.c272 struct scatterlist *in_sg, in omap_aes_crypt_dma() argument
281 scatterwalk_start(&dd->in_walk, dd->in_sg); in omap_aes_crypt_dma()
290 dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma()
309 tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len, in omap_aes_crypt_dma()
361 err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
376 err = omap_aes_crypt_dma(dd, dd->in_sg, dd->out_sg, dd->in_sg_len, in omap_aes_crypt_dma_start()
379 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_crypt_dma_start()
436 dd->in_sg = req->src; in omap_aes_prepare_req()
444 ret = omap_crypto_align_sg(&dd->in_sg, dd->total, AES_BLOCK_SIZE, in omap_aes_prepare_req()
456 dd->in_sg_len = sg_nents_for_len(dd->in_sg, dd->total); in omap_aes_prepare_req()
[all …]
Dsahara.c186 struct scatterlist *in_sg; member
224 struct scatterlist *in_sg; member
473 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, dev->total); in sahara_hw_descriptor_create()
489 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
504 sg = dev->in_sg; in sahara_hw_descriptor_create()
547 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
568 dev->in_sg = req->src; in sahara_aes_process()
597 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_aes_process()
806 dev->in_sg = rctx->in_sg; in sahara_sha_hw_links_create()
808 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, rctx->total); in sahara_sha_hw_links_create()
[all …]
Datmel-tdes.c115 struct scatterlist *in_sg; member
329 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_pdc_stop()
516 in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) && in atmel_tdes_crypt_start()
517 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); in atmel_tdes_crypt_start()
522 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_tdes_crypt_start()
528 count = min_t(size_t, dd->total, sg_dma_len(dd->in_sg)); in atmel_tdes_crypt_start()
531 err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_tdes_crypt_start()
541 dma_unmap_sg(dd->dev, dd->in_sg, 1, in atmel_tdes_crypt_start()
546 addr_in = sg_dma_address(dd->in_sg); in atmel_tdes_crypt_start()
553 count = atmel_tdes_sg_copy(&dd->in_sg, &dd->in_offset, in atmel_tdes_crypt_start()
[all …]
Domap-aes-gcm.c37 dd->in_sg = NULL; in omap_aes_gcm_finish_req()
57 dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE); in omap_aes_gcm_done_task()
141 dd->in_sg = dd->in_sgl; in omap_aes_gcm_copy_buffers()
162 dd->in_sg_len = sg_nents_for_len(dd->in_sg, alen + clen); in omap_aes_gcm_copy_buffers()
Domap-aes.h182 struct scatterlist *in_sg; member
/Linux-v4.19/drivers/crypto/stm32/
Dstm32-cryp.c107 #define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
152 struct scatterlist *in_sg; member
318 ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in, in stm32_cryp_check_io_aligned()
367 sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0); in stm32_cryp_copy_sgs()
370 cryp->in_sg = &cryp->in_sgl; in stm32_cryp_copy_sgs()
971 cryp->in_sg = req ? req->src : areq->src; in stm32_cryp_prepare_req()
975 cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in); in stm32_cryp_prepare_req()
993 scatterwalk_start(&cryp->in_walk, cryp->in_sg); in stm32_cryp_prepare_req()
1084 if (unlikely(cryp->in_sg->length == _walked_in)) { in stm32_cryp_next_in()
1085 cryp->in_sg = sg_next(cryp->in_sg); in stm32_cryp_next_in()
[all …]
/Linux-v4.19/drivers/crypto/sunxi-ss/
Dsun4i-ss-hash.c187 struct scatterlist *in_sg = areq->src; in sun4i_hash() local
248 while (in_sg && i == 1) { in sun4i_hash()
249 if (in_sg->length % 4) in sun4i_hash()
251 in_sg = sg_next(in_sg); in sun4i_hash()
Dsun4i-ss-cipher.c133 struct scatterlist *in_sg = areq->src; in sun4i_ss_cipher_poll() local
174 while (in_sg && no_chunk == 1) { in sun4i_ss_cipher_poll()
175 if (in_sg->length % 4) in sun4i_ss_cipher_poll()
177 in_sg = sg_next(in_sg); in sun4i_ss_cipher_poll()
/Linux-v4.19/crypto/
Drsa-pkcs1pad.c104 struct scatterlist in_sg[2], out_sg[1]; member
261 pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf, in pkcs1pad_encrypt()
278 akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg, in pkcs1pad_encrypt()
427 pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf, in pkcs1pad_sign()
435 akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg, in pkcs1pad_sign()
/Linux-v4.19/net/vmw_vsock/
Dvirtio_transport.c133 int ret, in_sg = 0, out_sg = 0; in virtio_transport_send_pkt_work() local
158 ret = virtqueue_add_sgs(vq, sgs, out_sg, in_sg, pkt, GFP_KERNEL); in virtio_transport_send_pkt_work()
/Linux-v4.19/drivers/usb/wusbcore/
Dwa-xfer.c1005 static struct scatterlist *wa_xfer_create_subset_sg(struct scatterlist *in_sg, in wa_xfer_create_subset_sg() argument
1012 struct scatterlist *current_xfer_sg = in_sg; in wa_xfer_create_subset_sg()