Lines Matching refs:jrdev
111 struct device *jrdev; member
120 struct device *jrdev = ctx->jrdev; in aead_null_set_sh_desc() local
121 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_null_set_sh_desc()
142 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_null_set_sh_desc()
161 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in aead_null_set_sh_desc()
173 struct device *jrdev = ctx->jrdev; in aead_set_sh_desc() local
174 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_set_sh_desc()
244 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_set_sh_desc()
266 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in aead_set_sh_desc()
290 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in aead_set_sh_desc()
311 struct device *jrdev = ctx->jrdev; in gcm_set_sh_desc() local
335 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in gcm_set_sh_desc()
352 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in gcm_set_sh_desc()
376 struct device *jrdev = ctx->jrdev; in rfc4106_set_sh_desc() local
401 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in rfc4106_set_sh_desc()
419 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in rfc4106_set_sh_desc()
444 struct device *jrdev = ctx->jrdev; in rfc4543_set_sh_desc() local
469 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in rfc4543_set_sh_desc()
487 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in rfc4543_set_sh_desc()
510 struct device *jrdev = ctx->jrdev; in chachapoly_set_sh_desc() local
520 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in chachapoly_set_sh_desc()
526 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in chachapoly_set_sh_desc()
566 struct device *jrdev = ctx->jrdev; in aead_setkey() local
567 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent); in aead_setkey()
574 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n", in aead_setkey()
595 dma_sync_single_for_device(jrdev, ctx->key_dma, in aead_setkey()
601 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey, in aead_setkey()
610 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad + in aead_setkey()
648 struct device *jrdev = ctx->jrdev; in gcm_setkey() local
661 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir); in gcm_setkey()
671 struct device *jrdev = ctx->jrdev; in rfc4106_setkey() local
690 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, in rfc4106_setkey()
699 struct device *jrdev = ctx->jrdev; in rfc4543_setkey() local
718 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen, in rfc4543_setkey()
730 struct device *jrdev = ctx->jrdev; in skcipher_setkey() local
746 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in skcipher_setkey()
753 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in skcipher_setkey()
845 struct device *jrdev = ctx->jrdev; in xts_skcipher_setkey() local
850 dev_err(jrdev, "key size mismatch\n"); in xts_skcipher_setkey()
861 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma, in xts_skcipher_setkey()
867 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma, in xts_skcipher_setkey()
963 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err, in aead_encrypt_done() argument
970 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in aead_encrypt_done()
975 ecode = caam_jr_strstatus(jrdev, err); in aead_encrypt_done()
977 aead_unmap(jrdev, edesc, req); in aead_encrypt_done()
984 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err, in aead_decrypt_done() argument
991 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in aead_decrypt_done()
996 ecode = caam_jr_strstatus(jrdev, err); in aead_decrypt_done()
998 aead_unmap(jrdev, edesc, req); in aead_decrypt_done()
1005 static void skcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err, in skcipher_encrypt_done() argument
1014 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in skcipher_encrypt_done()
1019 ecode = caam_jr_strstatus(jrdev, err); in skcipher_encrypt_done()
1021 skcipher_unmap(jrdev, edesc, req); in skcipher_encrypt_done()
1045 static void skcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err, in skcipher_decrypt_done() argument
1054 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err); in skcipher_decrypt_done()
1058 ecode = caam_jr_strstatus(jrdev, err); in skcipher_decrypt_done()
1060 skcipher_unmap(jrdev, edesc, req); in skcipher_decrypt_done()
1222 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent); in init_authenc_job()
1271 struct device *jrdev = ctx->jrdev; in init_skcipher_job() local
1281 dev_dbg(jrdev, "asked=%d, cryptlen%d\n", in init_skcipher_job()
1327 struct device *jrdev = ctx->jrdev; in aead_edesc_alloc() local
1342 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", in aead_edesc_alloc()
1349 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", in aead_edesc_alloc()
1359 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", in aead_edesc_alloc()
1366 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in aead_edesc_alloc()
1369 dev_err(jrdev, "unable to map source\n"); in aead_edesc_alloc()
1375 mapped_src_nents = dma_map_sg(jrdev, req->src, in aead_edesc_alloc()
1378 dev_err(jrdev, "unable to map source\n"); in aead_edesc_alloc()
1387 mapped_dst_nents = dma_map_sg(jrdev, req->dst, in aead_edesc_alloc()
1391 dev_err(jrdev, "unable to map destination\n"); in aead_edesc_alloc()
1392 dma_unmap_sg(jrdev, req->src, src_nents, in aead_edesc_alloc()
1417 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, in aead_edesc_alloc()
1444 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in aead_edesc_alloc()
1446 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in aead_edesc_alloc()
1447 dev_err(jrdev, "unable to map S/G table\n"); in aead_edesc_alloc()
1448 aead_unmap(jrdev, edesc, req); in aead_edesc_alloc()
1463 struct device *jrdev = ctx->jrdev; in gcm_encrypt() local
1481 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); in gcm_encrypt()
1485 aead_unmap(jrdev, edesc, req); in gcm_encrypt()
1497 struct device *jrdev = ctx->jrdev; in chachapoly_encrypt() local
1514 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); in chachapoly_encrypt()
1518 aead_unmap(jrdev, edesc, req); in chachapoly_encrypt()
1530 struct device *jrdev = ctx->jrdev; in chachapoly_decrypt() local
1547 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); in chachapoly_decrypt()
1551 aead_unmap(jrdev, edesc, req); in chachapoly_decrypt()
1568 struct device *jrdev = ctx->jrdev; in aead_encrypt() local
1587 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req); in aead_encrypt()
1591 aead_unmap(jrdev, edesc, req); in aead_encrypt()
1603 struct device *jrdev = ctx->jrdev; in gcm_decrypt() local
1621 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); in gcm_decrypt()
1625 aead_unmap(jrdev, edesc, req); in gcm_decrypt()
1642 struct device *jrdev = ctx->jrdev; in aead_decrypt() local
1665 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req); in aead_decrypt()
1669 aead_unmap(jrdev, edesc, req); in aead_decrypt()
1684 struct device *jrdev = ctx->jrdev; in skcipher_edesc_alloc() local
1696 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n", in skcipher_edesc_alloc()
1704 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n", in skcipher_edesc_alloc()
1711 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in skcipher_edesc_alloc()
1714 dev_err(jrdev, "unable to map source\n"); in skcipher_edesc_alloc()
1718 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents, in skcipher_edesc_alloc()
1721 dev_err(jrdev, "unable to map source\n"); in skcipher_edesc_alloc()
1724 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents, in skcipher_edesc_alloc()
1727 dev_err(jrdev, "unable to map destination\n"); in skcipher_edesc_alloc()
1728 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE); in skcipher_edesc_alloc()
1769 dev_err(jrdev, "could not allocate extended descriptor\n"); in skcipher_edesc_alloc()
1770 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0, in skcipher_edesc_alloc()
1788 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL); in skcipher_edesc_alloc()
1789 if (dma_mapping_error(jrdev, iv_dma)) { in skcipher_edesc_alloc()
1790 dev_err(jrdev, "unable to map IV\n"); in skcipher_edesc_alloc()
1791 caam_unmap(jrdev, req->src, req->dst, src_nents, in skcipher_edesc_alloc()
1816 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg, in skcipher_edesc_alloc()
1819 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) { in skcipher_edesc_alloc()
1820 dev_err(jrdev, "unable to map S/G table\n"); in skcipher_edesc_alloc()
1821 caam_unmap(jrdev, req->src, req->dst, src_nents, in skcipher_edesc_alloc()
1842 struct device *jrdev = ctx->jrdev; in skcipher_encrypt() local
1862 ret = caam_jr_enqueue(jrdev, desc, skcipher_encrypt_done, req); in skcipher_encrypt()
1867 skcipher_unmap(jrdev, edesc, req); in skcipher_encrypt()
1879 struct device *jrdev = ctx->jrdev; in skcipher_decrypt() local
1899 ret = caam_jr_enqueue(jrdev, desc, skcipher_decrypt_done, req); in skcipher_decrypt()
1903 skcipher_unmap(jrdev, edesc, req); in skcipher_decrypt()
3414 ctx->jrdev = caam_jr_alloc(); in caam_init_common()
3415 if (IS_ERR(ctx->jrdev)) { in caam_init_common()
3417 return PTR_ERR(ctx->jrdev); in caam_init_common()
3420 priv = dev_get_drvdata(ctx->jrdev->parent); in caam_init_common()
3426 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc, in caam_init_common()
3430 if (dma_mapping_error(ctx->jrdev, dma_addr)) { in caam_init_common()
3431 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n"); in caam_init_common()
3432 caam_jr_free(ctx->jrdev); in caam_init_common()
3470 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma, in caam_exit_common()
3473 caam_jr_free(ctx->jrdev); in caam_exit_common()