Lines Matching refs:lldi
710 int qid = u_ctx->lldi.rxq_ids[ctx->rx_qidx]; in create_wreq()
1107 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, in chcr_handle_cipher_resp()
1130 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, in chcr_handle_cipher_resp()
1145 wrparam.qid = u_ctx->lldi.rxq_ids[c_ctx(tfm)->rx_qidx]; in chcr_handle_cipher_resp()
1154 skb->dev = u_ctx->lldi.ports[0]; in chcr_handle_cipher_resp()
1161 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req); in chcr_handle_cipher_resp()
1189 chcr_cipher_dma_map(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req); in process_cipher()
1241 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, in process_cipher()
1270 chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req); in process_cipher()
1282 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_encrypt()
1289 err = process_cipher(req, u_ctx->lldi.rxq_ids[c_ctx(tfm)->rx_qidx], in chcr_aes_encrypt()
1293 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_encrypt()
1306 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_decrypt()
1313 err = process_cipher(req, u_ctx->lldi.rxq_ids[c_ctx(tfm)->rx_qidx], in chcr_aes_decrypt()
1317 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_decrypt()
1340 ntxq = min_not_zero((unsigned int)u_ctx->lldi.nrxq, in chcr_device_init()
1342 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan; in chcr_device_init()
1343 txq_perchan = ntxq / u_ctx->lldi.nchan; in chcr_device_init()
1534 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr, in create_hash_wr()
1536 if (dma_mapping_error(&u_ctx->lldi.pdev->dev, in create_hash_wr()
1573 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_update()
1590 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1625 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_update()
1631 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1692 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_final()
1711 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_finup()
1718 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1770 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_finup()
1776 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1794 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_digest()
1802 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
1851 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_digest()
1856 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
1911 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_continue()
1939 dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr, in chcr_handle_ahash_resp()
1966 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_handle_ahash_resp()
2191 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op); in chcr_aead_common_exit()
2212 error = chcr_aead_dma_map(&ULD_CTX(a_ctx(tfm))->lldi.pdev->dev, req, in chcr_aead_common_init()
3577 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aead_op()
3585 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[a_ctx(tfm)->rx_qidx], size); in chcr_aead_op()
3590 skb->dev = u_ctx->lldi.ports[0]; in chcr_aead_op()