Lines Matching refs:jrdev

115 	struct device *jrdev;  member
135 struct device *jrdev = ctx->jrdev; in aead_null_set_sh_desc() local
136 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_null_set_sh_desc()
157 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_null_set_sh_desc()
176 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in aead_null_set_sh_desc()
188 struct device *jrdev = ctx->jrdev; in aead_set_sh_desc() local
189 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_set_sh_desc()
259 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_set_sh_desc()
281 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in aead_set_sh_desc()
305 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_set_sh_desc()
326 struct device *jrdev = ctx->jrdev; in gcm_set_sh_desc() local
350 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in gcm_set_sh_desc()
367 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in gcm_set_sh_desc()
391 struct device *jrdev = ctx->jrdev; in rfc4106_set_sh_desc() local
416 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in rfc4106_set_sh_desc()
434 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in rfc4106_set_sh_desc()
459 struct device *jrdev = ctx->jrdev; in rfc4543_set_sh_desc() local
484 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in rfc4543_set_sh_desc()
502 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in rfc4543_set_sh_desc()
525 struct device *jrdev = ctx->jrdev; in chachapoly_set_sh_desc() local
535 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in chachapoly_set_sh_desc()
541 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in chachapoly_set_sh_desc()
579 struct device *jrdev = ctx->jrdev; in aead_setkey() local
580 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_setkey()
587 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", in aead_setkey()
608 dma_sync_single_for_device(jrdev, ctx->key_dma, in aead_setkey()
614 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, in aead_setkey()
623 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + in aead_setkey()
660 struct device *jrdev = ctx->jrdev; in gcm_setkey() local
671 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); in gcm_setkey()
681 struct device *jrdev = ctx->jrdev; in rfc4106_setkey() local
698 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, in rfc4106_setkey()
707 struct device *jrdev = ctx->jrdev; in rfc4543_setkey() local
724 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, in rfc4543_setkey()
736 struct device *jrdev = ctx->jrdev; in skcipher_setkey() local
752 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in skcipher_setkey()
759 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in skcipher_setkey()
836 struct device *jrdev = ctx->jrdev; in xts_skcipher_setkey() local
837 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in xts_skcipher_setkey()
843 dev_dbg(jrdev, "key size mismatch\n"); in xts_skcipher_setkey()
863 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in xts_skcipher_setkey()
869 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in xts_skcipher_setkey()
969 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err, in aead_crypt_done() argument
974 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); in aead_crypt_done()
979 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in aead_crypt_done()
985 ecode = caam_jr_strstatus(jrdev, err); in aead_crypt_done()
987 aead_unmap(jrdev, edesc, req); in aead_crypt_done()
1001 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err, in skcipher_crypt_done() argument
1008 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev); in skcipher_crypt_done()
1013 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in skcipher_crypt_done()
1018 ecode = caam_jr_strstatus(jrdev, err); in skcipher_crypt_done()
1020 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt_done()
1189 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); in init_authenc_job()
1238 struct device *jrdev = ctx->jrdev; in init_skcipher_job() local
1248 dev_dbg(jrdev, "asked=%d, cryptlen%d\n", in init_skcipher_job()
1294 struct device *jrdev = ctx->jrdev; in aead_edesc_alloc() local
1310 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", in aead_edesc_alloc()
1317 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", in aead_edesc_alloc()
1327 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", in aead_edesc_alloc()
1334 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in aead_edesc_alloc()
1337 dev_err(jrdev, "unable to map source\n"); in aead_edesc_alloc()
1343 mapped_src_nents = dma_map_sg(jrdev, req->src, in aead_edesc_alloc()
1346 dev_err(jrdev, "unable to map source\n"); in aead_edesc_alloc()
1355 mapped_dst_nents = dma_map_sg(jrdev, req->dst, in aead_edesc_alloc()
1359 dev_err(jrdev, "unable to map destination\n"); in aead_edesc_alloc()
1360 dma_unmap_sg(jrdev, req->src, src_nents, in aead_edesc_alloc()
1385 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, in aead_edesc_alloc()
1415 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in aead_edesc_alloc()
1417 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in aead_edesc_alloc()
1418 dev_err(jrdev, "unable to map S/G table\n"); in aead_edesc_alloc()
1419 aead_unmap(jrdev, edesc, req); in aead_edesc_alloc()
1429 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req) in aead_enqueue_req() argument
1431 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); in aead_enqueue_req()
1446 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req); in aead_enqueue_req()
1449 aead_unmap(jrdev, edesc, req); in aead_enqueue_req()
1461 struct device *jrdev = ctx->jrdev; in chachapoly_crypt() local
1477 return aead_enqueue_req(jrdev, req); in chachapoly_crypt()
1495 struct device *jrdev = ctx->jrdev; in aead_crypt() local
1511 return aead_enqueue_req(jrdev, req); in aead_crypt()
1534 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req); in aead_do_one_req()
1537 aead_unmap(ctx->jrdev, rctx->edesc, req); in aead_do_one_req()
1551 struct device *jrdev = ctx->jrdev; in gcm_crypt() local
1567 return aead_enqueue_req(jrdev, req); in gcm_crypt()
1599 struct device *jrdev = ctx->jrdev; in skcipher_edesc_alloc() local
1611 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", in skcipher_edesc_alloc()
1619 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", in skcipher_edesc_alloc()
1626 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in skcipher_edesc_alloc()
1629 dev_err(jrdev, "unable to map source\n"); in skcipher_edesc_alloc()
1633 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in skcipher_edesc_alloc()
1636 dev_err(jrdev, "unable to map source\n"); in skcipher_edesc_alloc()
1639 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, in skcipher_edesc_alloc()
1642 dev_err(jrdev, "unable to map destination\n"); in skcipher_edesc_alloc()
1643 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in skcipher_edesc_alloc()
1684 dev_err(jrdev, "could not allocate extended descriptor\n"); in skcipher_edesc_alloc()
1685 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1704 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); in skcipher_edesc_alloc()
1705 if (dma_mapping_error(jrdev, iv_dma)) { in skcipher_edesc_alloc()
1706 dev_err(jrdev, "unable to map IV\n"); in skcipher_edesc_alloc()
1707 caam_unmap(jrdev, req->src, req->dst, src_nents, in skcipher_edesc_alloc()
1732 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in skcipher_edesc_alloc()
1735 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in skcipher_edesc_alloc()
1736 dev_err(jrdev, "unable to map S/G table\n"); in skcipher_edesc_alloc()
1737 caam_unmap(jrdev, req->src, req->dst, src_nents, in skcipher_edesc_alloc()
1763 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req); in skcipher_do_one_req()
1766 skcipher_unmap(ctx->jrdev, rctx->edesc, req); in skcipher_do_one_req()
1788 struct device *jrdev = ctx->jrdev; in skcipher_crypt() local
1789 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev); in skcipher_crypt()
1790 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in skcipher_crypt()
1840 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req); in skcipher_crypt()
1843 skcipher_unmap(jrdev, edesc, req); in skcipher_crypt()
3352 ctx->jrdev = caam_jr_alloc(); in caam_init_common()
3353 if (IS_ERR(ctx->jrdev)) { in caam_init_common()
3355 return PTR_ERR(ctx->jrdev); in caam_init_common()
3358 priv = dev_get_drvdata(ctx->jrdev->parent); in caam_init_common()
3364 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, in caam_init_common()
3369 if (dma_mapping_error(ctx->jrdev, dma_addr)) { in caam_init_common()
3370 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); in caam_init_common()
3371 caam_jr_free(ctx->jrdev); in caam_init_common()
3407 dev_err(ctx->jrdev, "Failed to allocate %s fallback: %ld\n", in caam_cra_init()
3442 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, in caam_exit_common()
3446 caam_jr_free(ctx->jrdev); in caam_exit_common()