Lines Matching refs:u_ctx
768 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_wreq() local
774 qid = u_ctx->lldi.rxq_ids[rxqidx]; in create_wreq()
775 fid = u_ctx->lldi.rxq_ids[0]; in create_wreq()
1167 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_handle_cipher_resp() local
1213 wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx]; in chcr_handle_cipher_resp()
1222 skb->dev = u_ctx->lldi.ports[0]; in chcr_handle_cipher_resp()
1373 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_aes_encrypt() local
1385 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_encrypt()
1392 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_encrypt()
1396 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_encrypt()
1415 struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm)); in chcr_aes_decrypt() local
1431 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aes_decrypt()
1435 err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], in chcr_aes_decrypt()
1439 skb->dev = u_ctx->lldi.ports[0]; in chcr_aes_decrypt()
1446 struct uld_ctx *u_ctx = NULL; in chcr_device_init() local
1451 u_ctx = assign_chcr_device(); in chcr_device_init()
1452 if (!u_ctx) { in chcr_device_init()
1457 ctx->dev = &u_ctx->dev; in chcr_device_init()
1458 ntxq = u_ctx->lldi.ntxq; in chcr_device_init()
1459 rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan; in chcr_device_init()
1460 txq_perchan = ntxq / u_ctx->lldi.nchan; in chcr_device_init()
1462 ctx->nrxq = u_ctx->lldi.nrxq; in chcr_device_init()
1571 struct uld_ctx *u_ctx = ULD_CTX(ctx); in create_hash_wr() local
1632 dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr, in create_hash_wr()
1634 if (dma_mapping_error(&u_ctx->lldi.pdev->dev, in create_hash_wr()
1662 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_update() local
1694 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_update()
1702 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1739 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_update()
1744 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_update()
1767 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_final() local
1821 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_final()
1835 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_finup() local
1853 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_finup()
1860 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1914 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_finup()
1919 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_finup()
1930 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm)); in chcr_ahash_digest() local
1949 if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_ahash_digest()
1957 error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
2008 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_digest()
2013 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_ahash_digest()
2025 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_ahash_continue() local
2076 skb->dev = u_ctx->lldi.ports[0]; in chcr_ahash_continue()
2092 struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm)); in chcr_handle_ahash_resp() local
2105 dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr, in chcr_handle_ahash_resp()
2132 chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req); in chcr_handle_ahash_resp()
2369 struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm)); in chcr_aead_common_exit() local
2371 chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op); in chcr_aead_common_exit()
3731 struct uld_ctx *u_ctx = ULD_CTX(ctx); in chcr_aead_op() local
3748 if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0], in chcr_aead_op()
3763 skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size); in chcr_aead_op()
3770 skb->dev = u_ctx->lldi.ports[0]; in chcr_aead_op()