Lines Matching refs:qidev
67 struct device *qidev; member
856 drv_ctx = caam_drv_ctx_init(ctx->qidev, &cpu, desc); in get_drv_ctx()
916 struct device *qidev; in aead_done() local
923 qidev = caam_ctx->qidev; in aead_done()
926 ecode = caam_jr_strstatus(qidev, status); in aead_done()
929 aead_unmap(qidev, edesc, aead_req); in aead_done()
945 struct device *qidev = ctx->qidev; in aead_edesc_alloc() local
966 dev_err(qidev, "could not allocate extended descriptor\n"); in aead_edesc_alloc()
976 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n", in aead_edesc_alloc()
982 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents, in aead_edesc_alloc()
985 dev_err(qidev, "unable to map source\n"); in aead_edesc_alloc()
995 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n", in aead_edesc_alloc()
1003 dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n", in aead_edesc_alloc()
1010 mapped_src_nents = dma_map_sg(qidev, req->src, in aead_edesc_alloc()
1013 dev_err(qidev, "unable to map source\n"); in aead_edesc_alloc()
1022 mapped_dst_nents = dma_map_sg(qidev, req->dst, in aead_edesc_alloc()
1026 dev_err(qidev, "unable to map destination\n"); in aead_edesc_alloc()
1027 dma_unmap_sg(qidev, req->src, src_nents, in aead_edesc_alloc()
1065 dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n", in aead_edesc_alloc()
1067 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0, in aead_edesc_alloc()
1079 iv_dma = dma_map_single(qidev, iv, ivsize, DMA_TO_DEVICE); in aead_edesc_alloc()
1080 if (dma_mapping_error(qidev, iv_dma)) { in aead_edesc_alloc()
1081 dev_err(qidev, "unable to map IV\n"); in aead_edesc_alloc()
1082 caam_unmap(qidev, req->src, req->dst, src_nents, in aead_edesc_alloc()
1097 edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4, in aead_edesc_alloc()
1099 if (dma_mapping_error(qidev, edesc->assoclen_dma)) { in aead_edesc_alloc()
1100 dev_err(qidev, "unable to map assoclen\n"); in aead_edesc_alloc()
1101 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, in aead_edesc_alloc()
1119 qm_sg_dma = dma_map_single(qidev, sg_table, qm_sg_bytes, DMA_TO_DEVICE); in aead_edesc_alloc()
1120 if (dma_mapping_error(qidev, qm_sg_dma)) { in aead_edesc_alloc()
1121 dev_err(qidev, "unable to map S/G table\n"); in aead_edesc_alloc()
1122 dma_unmap_single(qidev, edesc->assoclen_dma, 4, DMA_TO_DEVICE); in aead_edesc_alloc()
1123 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, in aead_edesc_alloc()
1174 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); in aead_crypt()
1178 aead_unmap(ctx->qidev, edesc, req); in aead_crypt()
1213 struct device *qidev = caam_ctx->qidev; in skcipher_done() local
1217 dev_dbg(qidev, "%s %d: status 0x%x\n", __func__, __LINE__, status); in skcipher_done()
1222 ecode = caam_jr_strstatus(qidev, status); in skcipher_done()
1231 skcipher_unmap(qidev, edesc, req); in skcipher_done()
1251 struct device *qidev = ctx->qidev; in skcipher_edesc_alloc() local
1269 dev_err(qidev, "Insufficient bytes (%d) in src S/G\n", in skcipher_edesc_alloc()
1277 dev_err(qidev, "Insufficient bytes (%d) in dst S/G\n", in skcipher_edesc_alloc()
1282 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents, in skcipher_edesc_alloc()
1285 dev_err(qidev, "unable to map source\n"); in skcipher_edesc_alloc()
1289 mapped_dst_nents = dma_map_sg(qidev, req->dst, dst_nents, in skcipher_edesc_alloc()
1292 dev_err(qidev, "unable to map destination\n"); in skcipher_edesc_alloc()
1293 dma_unmap_sg(qidev, req->src, src_nents, DMA_TO_DEVICE); in skcipher_edesc_alloc()
1297 mapped_src_nents = dma_map_sg(qidev, req->src, src_nents, in skcipher_edesc_alloc()
1300 dev_err(qidev, "unable to map source\n"); in skcipher_edesc_alloc()
1324 dev_err(qidev, "No space for %d S/G entries and/or %dB IV\n", in skcipher_edesc_alloc()
1326 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1334 dev_err(qidev, "could not allocate extended descriptor\n"); in skcipher_edesc_alloc()
1335 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1345 iv_dma = dma_map_single(qidev, iv, ivsize, DMA_BIDIRECTIONAL); in skcipher_edesc_alloc()
1346 if (dma_mapping_error(qidev, iv_dma)) { in skcipher_edesc_alloc()
1347 dev_err(qidev, "unable to map IV\n"); in skcipher_edesc_alloc()
1348 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1371 edesc->qm_sg_dma = dma_map_single(qidev, sg_table, edesc->qm_sg_bytes, in skcipher_edesc_alloc()
1373 if (dma_mapping_error(qidev, edesc->qm_sg_dma)) { in skcipher_edesc_alloc()
1374 dev_err(qidev, "unable to map S/G table\n"); in skcipher_edesc_alloc()
1375 caam_unmap(qidev, req->src, req->dst, src_nents, dst_nents, in skcipher_edesc_alloc()
1416 ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); in skcipher_crypt()
1420 skcipher_unmap(ctx->qidev, edesc, req); in skcipher_crypt()
2451 ctx->qidev = dev; in caam_init_common()