Home
last modified time | relevance | path

Searched refs:creq (Results 1 – 21 of 21) sorted by relevance

/Linux-v5.4/drivers/crypto/marvell/
Dhash.c27 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_req_iter_init() local
28 unsigned int len = req->nbytes + creq->cache_ptr; in mv_cesa_ahash_req_iter_init()
30 if (!creq->last_req) in mv_cesa_ahash_req_iter_init()
35 iter->src.op_offset = creq->cache_ptr; in mv_cesa_ahash_req_iter_init()
93 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_dma_last_cleanup() local
95 mv_cesa_ahash_dma_free_padding(&creq->req.dma); in mv_cesa_ahash_dma_last_cleanup()
100 struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); in mv_cesa_ahash_dma_cleanup() local
102 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); in mv_cesa_ahash_dma_cleanup()
103 mv_cesa_ahash_dma_free_cache(&creq->req.dma); in mv_cesa_ahash_dma_cleanup()
104 mv_cesa_dma_cleanup(&creq->base); in mv_cesa_ahash_dma_cleanup()
[all …]
Dcipher.c59 struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); in mv_cesa_skcipher_dma_cleanup() local
62 dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents, in mv_cesa_skcipher_dma_cleanup()
64 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
67 dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, in mv_cesa_skcipher_dma_cleanup()
70 mv_cesa_dma_cleanup(&creq->base); in mv_cesa_skcipher_dma_cleanup()
75 struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); in mv_cesa_skcipher_cleanup() local
77 if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) in mv_cesa_skcipher_cleanup()
83 struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); in mv_cesa_skcipher_std_step() local
84 struct mv_cesa_skcipher_std_req *sreq = &creq->std; in mv_cesa_skcipher_std_step()
85 struct mv_cesa_engine *engine = creq->base.engine; in mv_cesa_skcipher_std_step()
[all …]
Dcesa.c170 struct mv_cesa_req *creq) in mv_cesa_queue_req() argument
173 struct mv_cesa_engine *engine = creq->engine; in mv_cesa_queue_req()
177 if ((mv_cesa_req_get_type(creq) == CESA_DMA_REQ) && in mv_cesa_queue_req()
179 mv_cesa_tdma_chain(engine, creq); in mv_cesa_queue_req()
Dcesa.h722 struct mv_cesa_req *creq);
/Linux-v5.4/drivers/crypto/cavium/nitrox/
Dnitrox_req.h209 struct se_crypto_request creq; member
615 struct se_crypto_request *creq = &nkreq->creq; in alloc_src_req_buf() local
617 nkreq->src = alloc_req_buf(nents, ivsize, creq->gfp); in alloc_src_req_buf()
640 struct se_crypto_request *creq = &nkreq->creq; in nitrox_creq_set_src_sg() local
642 creq->src = nitrox_creq_src_sg(iv, ivsize); in nitrox_creq_set_src_sg()
643 sg = creq->src; in nitrox_creq_set_src_sg()
662 struct se_crypto_request *creq = &nkreq->creq; in alloc_dst_req_buf() local
664 nkreq->dst = alloc_req_buf(nents, extralen, creq->gfp); in alloc_dst_req_buf()
673 struct se_crypto_request *creq = &nkreq->creq; in nitrox_creq_set_orh() local
675 creq->orh = (u64 *)(nkreq->dst); in nitrox_creq_set_orh()
[all …]
Dnitrox_aead.c137 struct se_crypto_request *creq = &rctx->nkreq.creq; in nitrox_set_creq() local
141 creq->flags = rctx->flags; in nitrox_set_creq()
142 creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : in nitrox_set_creq()
145 creq->ctrl.value = 0; in nitrox_set_creq()
146 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; in nitrox_set_creq()
147 creq->ctrl.s.arg = rctx->ctrl_arg; in nitrox_set_creq()
149 creq->gph.param0 = cpu_to_be16(rctx->cryptlen); in nitrox_set_creq()
150 creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); in nitrox_set_creq()
151 creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); in nitrox_set_creq()
154 creq->gph.param3 = cpu_to_be16(param3.param); in nitrox_set_creq()
[all …]
Dnitrox_skcipher.c210 struct se_crypto_request *creq; in nitrox_skcipher_crypt() local
213 creq = &nkreq->creq; in nitrox_skcipher_crypt()
214 creq->flags = skreq->base.flags; in nitrox_skcipher_crypt()
215 creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? in nitrox_skcipher_crypt()
219 creq->ctrl.value = 0; in nitrox_skcipher_crypt()
220 creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; in nitrox_skcipher_crypt()
221 creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); in nitrox_skcipher_crypt()
223 creq->gph.param0 = cpu_to_be16(skreq->cryptlen); in nitrox_skcipher_crypt()
224 creq->gph.param1 = 0; in nitrox_skcipher_crypt()
226 creq->gph.param2 = cpu_to_be16(ivsize); in nitrox_skcipher_crypt()
[all …]
Dnitrox_reqmgr.c212 struct se_crypto_request *creq) in softreq_map_iobuf() argument
216 ret = dma_map_inbufs(sr, creq); in softreq_map_iobuf()
220 ret = dma_map_outbufs(sr, creq); in softreq_map_iobuf()
/Linux-v5.4/crypto/
Dchacha20poly1305.c129 struct chacha_req *creq = &rctx->u.chacha; in chacha_decrypt() local
136 chacha_iv(creq->iv, req, 1); in chacha_decrypt()
143 skcipher_request_set_callback(&creq->req, rctx->flags, in chacha_decrypt()
145 skcipher_request_set_tfm(&creq->req, ctx->chacha); in chacha_decrypt()
146 skcipher_request_set_crypt(&creq->req, src, dst, in chacha_decrypt()
147 rctx->cryptlen, creq->iv); in chacha_decrypt()
148 err = crypto_skcipher_decrypt(&creq->req); in chacha_decrypt()
367 struct chacha_req *creq = &rctx->u.chacha; in poly_genkey() local
379 sg_init_one(creq->src, rctx->key, sizeof(rctx->key)); in poly_genkey()
381 chacha_iv(creq->iv, req, 0); in poly_genkey()
[all …]
Dpcrypt.c88 struct aead_request *creq = pcrypt_request_ctx(preq); in pcrypt_aead_encrypt() local
99 aead_request_set_tfm(creq, ctx->child); in pcrypt_aead_encrypt()
100 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, in pcrypt_aead_encrypt()
102 aead_request_set_crypt(creq, req->src, req->dst, in pcrypt_aead_encrypt()
104 aead_request_set_ad(creq, req->assoclen); in pcrypt_aead_encrypt()
130 struct aead_request *creq = pcrypt_request_ctx(preq); in pcrypt_aead_decrypt() local
141 aead_request_set_tfm(creq, ctx->child); in pcrypt_aead_decrypt()
142 aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, in pcrypt_aead_decrypt()
144 aead_request_set_crypt(creq, req->src, req->dst, in pcrypt_aead_decrypt()
146 aead_request_set_ad(creq, req->assoclen); in pcrypt_aead_decrypt()
/Linux-v5.4/drivers/infiniband/hw/bnxt_re/
Dqplib_rcfw.c369 struct bnxt_qplib_hwq *creq = &rcfw->creq; in bnxt_qplib_service_creq() local
376 spin_lock_irqsave(&creq->lock, flags); in bnxt_qplib_service_creq()
377 raw_cons = creq->cons; in bnxt_qplib_service_creq()
379 sw_cons = HWQ_CMP(raw_cons, creq); in bnxt_qplib_service_creq()
380 creq_ptr = (struct creq_base **)creq->pbl_ptr; in bnxt_qplib_service_creq()
382 if (!CREQ_CMP_VALID(creqe, raw_cons, creq->max_elements)) in bnxt_qplib_service_creq()
415 if (creq->cons != raw_cons) { in bnxt_qplib_service_creq()
416 creq->cons = raw_cons; in bnxt_qplib_service_creq()
418 raw_cons, creq->max_elements, in bnxt_qplib_service_creq()
421 spin_unlock_irqrestore(&creq->lock, flags); in bnxt_qplib_service_creq()
[all …]
Dqplib_rcfw.h256 struct bnxt_qplib_hwq creq; member
Dmain.c1387 pg_map = rdev->rcfw.creq.pbl[PBL_LVL_0].pg_map_arr; in bnxt_re_ib_reg()
1388 pages = rdev->rcfw.creq.pbl[rdev->rcfw.creq.level].pg_count; in bnxt_re_ib_reg()
/Linux-v5.4/drivers/net/can/
Dpch_can.c123 u32 creq; member
285 pch_can_rw_msg_obj(&priv->regs->ifregs[dir].creq, buff_num); in pch_can_set_rxtx()
301 pch_can_rw_msg_obj(&priv->regs->ifregs[dir].creq, buff_num); in pch_can_set_rxtx()
345 pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, i); in pch_can_clear_if_buffers()
355 pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, i); in pch_can_config_rx_tx_buffers()
379 pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, i); in pch_can_config_rx_tx_buffers()
384 pch_can_rw_msg_obj(&priv->regs->ifregs[1].creq, i); in pch_can_config_rx_tx_buffers()
401 pch_can_rw_msg_obj(&priv->regs->ifregs[1].creq, i); in pch_can_config_rx_tx_buffers()
451 pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, mask); in pch_can_int_clr()
468 pch_can_rw_msg_obj(&priv->regs->ifregs[1].creq, mask); in pch_can_int_clr()
[all …]
/Linux-v5.4/drivers/crypto/ccree/
Dcc_request_mgr.c48 struct cc_crypto_req creq; member
354 struct cc_crypto_req *creq; in cc_proc_backlog() local
368 creq = &bli->creq; in cc_proc_backlog()
369 req = creq->user_arg; in cc_proc_backlog()
376 creq->user_cb(dev, req, -EINPROGRESS); in cc_proc_backlog()
393 rc = cc_do_send_request(drvdata, &bli->creq, bli->desc, in cc_proc_backlog()
400 creq->user_cb(dev, req, rc); in cc_proc_backlog()
446 memcpy(&bli->creq, cc_req, sizeof(*cc_req)); in cc_send_request()
/Linux-v5.4/drivers/net/wireless/marvell/libertas/
Dcfg.c1223 struct cfg80211_scan_request *creq = NULL; in _new_connect_scan_req() local
1227 creq = kzalloc(sizeof(*creq) + sizeof(struct cfg80211_ssid) + in _new_connect_scan_req()
1230 if (!creq) in _new_connect_scan_req()
1234 creq->ssids = (void *)&creq->channels[n_channels]; in _new_connect_scan_req()
1235 creq->n_channels = n_channels; in _new_connect_scan_req()
1236 creq->n_ssids = 1; in _new_connect_scan_req()
1252 creq->channels[i] = &wiphy->bands[band]->channels[j]; in _new_connect_scan_req()
1258 creq->n_channels = i; in _new_connect_scan_req()
1261 memcpy(creq->ssids[0].ssid, sme->ssid, sme->ssid_len); in _new_connect_scan_req()
1262 creq->ssids[0].ssid_len = sme->ssid_len; in _new_connect_scan_req()
[all …]
/Linux-v5.4/drivers/gpu/drm/vmwgfx/
Dvmwgfx_surface.c1328 rep->creq = rep_ext.creq.base; in vmw_gb_surface_reference_ioctl()
1733 rep->creq.base.svga3d_flags = SVGA3D_FLAGS_LOWER_32(srf->flags); in vmw_gb_surface_reference_internal()
1734 rep->creq.base.format = srf->format; in vmw_gb_surface_reference_internal()
1735 rep->creq.base.mip_levels = srf->mip_levels[0]; in vmw_gb_surface_reference_internal()
1736 rep->creq.base.drm_surface_flags = 0; in vmw_gb_surface_reference_internal()
1737 rep->creq.base.multisample_count = srf->multisample_count; in vmw_gb_surface_reference_internal()
1738 rep->creq.base.autogen_filter = srf->autogen_filter; in vmw_gb_surface_reference_internal()
1739 rep->creq.base.array_size = srf->array_size; in vmw_gb_surface_reference_internal()
1740 rep->creq.base.buffer_handle = backup_handle; in vmw_gb_surface_reference_internal()
1741 rep->creq.base.base_size = srf->base_size; in vmw_gb_surface_reference_internal()
[all …]
/Linux-v5.4/net/wireless/
Dscan.c2118 struct cfg80211_scan_request *creq = NULL; in cfg80211_wext_siwscan() local
2146 creq = kzalloc(sizeof(*creq) + sizeof(struct cfg80211_ssid) + in cfg80211_wext_siwscan()
2149 if (!creq) { in cfg80211_wext_siwscan()
2154 creq->wiphy = wiphy; in cfg80211_wext_siwscan()
2155 creq->wdev = dev->ieee80211_ptr; in cfg80211_wext_siwscan()
2157 creq->ssids = (void *)&creq->channels[n_channels]; in cfg80211_wext_siwscan()
2158 creq->n_channels = n_channels; in cfg80211_wext_siwscan()
2159 creq->n_ssids = 1; in cfg80211_wext_siwscan()
2160 creq->scan_start = jiffies; in cfg80211_wext_siwscan()
2196 creq->channels[i] = &wiphy->bands[band]->channels[j]; in cfg80211_wext_siwscan()
[all …]
/Linux-v5.4/include/uapi/drm/
Dvmwgfx_drm.h991 struct drm_vmw_gb_surface_create_req creq; member
1196 struct drm_vmw_gb_surface_create_ext_req creq; member
/Linux-v5.4/drivers/usb/gadget/function/
Df_fs.c3317 const struct usb_ctrlrequest *creq) in ffs_func_setup() argument
3326 pr_vdebug("creq->bRequestType = %02x\n", creq->bRequestType); in ffs_func_setup()
3327 pr_vdebug("creq->bRequest = %02x\n", creq->bRequest); in ffs_func_setup()
3328 pr_vdebug("creq->wValue = %04x\n", le16_to_cpu(creq->wValue)); in ffs_func_setup()
3329 pr_vdebug("creq->wIndex = %04x\n", le16_to_cpu(creq->wIndex)); in ffs_func_setup()
3330 pr_vdebug("creq->wLength = %04x\n", le16_to_cpu(creq->wLength)); in ffs_func_setup()
3345 switch (creq->bRequestType & USB_RECIP_MASK) { in ffs_func_setup()
3347 ret = ffs_func_revmap_intf(func, le16_to_cpu(creq->wIndex)); in ffs_func_setup()
3353 ret = ffs_func_revmap_ep(func, le16_to_cpu(creq->wIndex)); in ffs_func_setup()
3362 ret = le16_to_cpu(creq->wIndex); in ffs_func_setup()
[all …]
/Linux-v5.4/drivers/crypto/inside-secure/
Dsafexcel_cipher.c1539 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_aead_encrypt() local
1541 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT); in safexcel_aead_encrypt()
1546 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_aead_decrypt() local
1548 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT); in safexcel_aead_decrypt()
2263 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_ccm_encrypt() local
2268 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT); in safexcel_ccm_encrypt()
2273 struct safexcel_cipher_req *creq = aead_request_ctx(req); in safexcel_ccm_decrypt() local
2278 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT); in safexcel_ccm_decrypt()