Lines Matching +full:j721e +full:- +full:sa2ul
1 // SPDX-License-Identifier: GPL-2.0
3 * K3 SA2UL crypto accelerator driver
5 * Copyright (C) 2018-2020 Texas Instruments Incorporated - http://www.ti.com
12 #include <linux/dma-mapping.h>
31 #include "sa2ul.h"
35 /* Byte offset for Aux-1 in encryption security context */
62 /* Make 32-bit word from 4 bytes */
94 * struct sa_cmdl_cfg - Command label configuration descriptor
114 * struct algo_data - Crypto algorithm specific data
366 * The SA2UL Expects the security context to
378 in[i + j] = data[15 - j]; in sa_swiz_128()
417 result = sha1->state; in sa_export_shash()
421 result = sha256->state; in sa_export_shash()
437 SHASH_DESC_ON_STACK(shash, data->ctx->shash); in sa_prepare_iopads()
438 int block_size = crypto_shash_blocksize(data->ctx->shash); in sa_prepare_iopads()
439 int digest_size = crypto_shash_digestsize(data->ctx->shash); in sa_prepare_iopads()
446 shash->tfm = data->ctx->shash; in sa_prepare_iopads()
464 /* Derive the inverse key used in AES-CBC decryption operation */
472 return -EINVAL; in sa_aes_inv_key()
489 key_pos = key_sz + 24 - 4; in sa_aes_inv_key()
494 return -EINVAL; in sa_aes_inv_key()
511 mci = ad->mci_enc; in sa_set_sc_enc()
513 mci = ad->mci_dec; in sa_set_sc_enc()
518 /* For AES-CBC decryption get the inverse key */ in sa_set_sc_enc()
519 if (ad->inv_key && !enc) { in sa_set_sc_enc()
521 return -EINVAL; in sa_set_sc_enc()
541 sc_buf[1] |= ad->auth_ctrl; in sa_set_sc_auth()
544 if (ad->keyed_mac) in sa_set_sc_auth()
545 ad->prep_iopad(ad, key, key_sz, ipad, opad); in sa_set_sc_auth()
579 if (cfg->enc_eng_id && cfg->auth_eng_id) { in sa_format_cmdl_gen()
580 if (cfg->enc) { in sa_format_cmdl_gen()
582 enc_next_eng = cfg->auth_eng_id; in sa_format_cmdl_gen()
584 if (cfg->iv_size) in sa_format_cmdl_gen()
585 auth_offset += cfg->iv_size; in sa_format_cmdl_gen()
588 auth_next_eng = cfg->enc_eng_id; in sa_format_cmdl_gen()
592 if (cfg->enc_eng_id) { in sa_format_cmdl_gen()
593 upd_info->flags |= SA_CMDL_UPD_ENC; in sa_format_cmdl_gen()
594 upd_info->enc_size.index = enc_offset >> 2; in sa_format_cmdl_gen()
595 upd_info->enc_offset.index = upd_info->enc_size.index + 1; in sa_format_cmdl_gen()
600 if (cfg->iv_size) { in sa_format_cmdl_gen()
601 upd_info->flags |= SA_CMDL_UPD_ENC_IV; in sa_format_cmdl_gen()
602 upd_info->enc_iv.index = in sa_format_cmdl_gen()
604 upd_info->enc_iv.size = cfg->iv_size; in sa_format_cmdl_gen()
607 SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size; in sa_format_cmdl_gen()
610 (SA_CTX_ENC_AUX2_OFFSET | (cfg->iv_size >> 3)); in sa_format_cmdl_gen()
611 total += SA_CMDL_HEADER_SIZE_BYTES + cfg->iv_size; in sa_format_cmdl_gen()
619 if (cfg->auth_eng_id) { in sa_format_cmdl_gen()
620 upd_info->flags |= SA_CMDL_UPD_AUTH; in sa_format_cmdl_gen()
621 upd_info->auth_size.index = auth_offset >> 2; in sa_format_cmdl_gen()
622 upd_info->auth_offset.index = upd_info->auth_size.index + 1; in sa_format_cmdl_gen()
643 if (likely(upd_info->flags & SA_CMDL_UPD_ENC)) { in sa_update_cmdl()
644 cmdl[upd_info->enc_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK; in sa_update_cmdl()
645 cmdl[upd_info->enc_size.index] |= req->enc_size; in sa_update_cmdl()
646 cmdl[upd_info->enc_offset.index] &= in sa_update_cmdl()
648 cmdl[upd_info->enc_offset.index] |= in sa_update_cmdl()
649 ((u32)req->enc_offset << in sa_update_cmdl()
652 if (likely(upd_info->flags & SA_CMDL_UPD_ENC_IV)) { in sa_update_cmdl()
653 __be32 *data = (__be32 *)&cmdl[upd_info->enc_iv.index]; in sa_update_cmdl()
654 u32 *enc_iv = (u32 *)req->enc_iv; in sa_update_cmdl()
656 for (j = 0; i < upd_info->enc_iv.size; i += 4, j++) { in sa_update_cmdl()
663 if (likely(upd_info->flags & SA_CMDL_UPD_AUTH)) { in sa_update_cmdl()
664 cmdl[upd_info->auth_size.index] &= ~SA_CMDL_PAYLOAD_LENGTH_MASK; in sa_update_cmdl()
665 cmdl[upd_info->auth_size.index] |= req->auth_size; in sa_update_cmdl()
666 cmdl[upd_info->auth_offset.index] &= in sa_update_cmdl()
668 cmdl[upd_info->auth_offset.index] |= in sa_update_cmdl()
669 ((u32)req->auth_offset << in sa_update_cmdl()
671 if (upd_info->flags & SA_CMDL_UPD_AUTH_IV) { in sa_update_cmdl()
672 sa_copy_iv((void *)&cmdl[upd_info->auth_iv.index], in sa_update_cmdl()
673 req->auth_iv, in sa_update_cmdl()
674 (upd_info->auth_iv.size > 8)); in sa_update_cmdl()
676 if (upd_info->flags & SA_CMDL_UPD_AUX_KEY) { in sa_update_cmdl()
677 int offset = (req->auth_size & 0xF) ? 4 : 0; in sa_update_cmdl()
679 memcpy(&cmdl[upd_info->aux_key_info.index], in sa_update_cmdl()
680 &upd_info->aux_key[offset], 16); in sa_update_cmdl()
722 u8 *sc_buf = ctx->sc; in sa_init_sc()
723 u16 sc_id = ctx->sc_id; in sa_init_sc()
728 if (ad->auth_eng.eng_id) { in sa_init_sc()
730 first_engine = ad->enc_eng.eng_id; in sa_init_sc()
732 first_engine = ad->auth_eng.eng_id; in sa_init_sc()
735 auth_sc_offset = enc_sc_offset + ad->enc_eng.sc_size; in sa_init_sc()
737 if (!ad->hash_size) in sa_init_sc()
738 return -EINVAL; in sa_init_sc()
739 ad->hash_size = roundup(ad->hash_size, 8); in sa_init_sc()
741 } else if (ad->enc_eng.eng_id && !ad->auth_eng.eng_id) { in sa_init_sc()
743 first_engine = ad->enc_eng.eng_id; in sa_init_sc()
745 ad->hash_size = ad->iv_out_size; in sa_init_sc()
752 sc_buf[5] = match_data->priv_id; in sa_init_sc()
753 sc_buf[6] = match_data->priv; in sa_init_sc()
757 if (ad->enc_eng.sc_size) { in sa_init_sc()
760 return -EINVAL; in sa_init_sc()
764 if (ad->auth_eng.sc_size) in sa_init_sc()
774 sa_set_swinfo(first_engine, ctx->sc_id, ctx->sc_phys, 1, 0, in sa_init_sc()
775 SA_SW_INFO_FLAG_EVICT, ad->hash_size, swinfo); in sa_init_sc()
777 sa_dump_sc(sc_buf, ctx->sc_phys); in sa_init_sc()
788 bn = ctx->sc_id - data->sc_id_start; in sa_free_ctx_info()
789 spin_lock(&data->scid_lock); in sa_free_ctx_info()
790 __clear_bit(bn, data->ctx_bm); in sa_free_ctx_info()
791 data->sc_id--; in sa_free_ctx_info()
792 spin_unlock(&data->scid_lock); in sa_free_ctx_info()
794 if (ctx->sc) { in sa_free_ctx_info()
795 dma_pool_free(data->sc_pool, ctx->sc, ctx->sc_phys); in sa_free_ctx_info()
796 ctx->sc = NULL; in sa_free_ctx_info()
806 spin_lock(&data->scid_lock); in sa_init_ctx_info()
807 bn = find_first_zero_bit(data->ctx_bm, SA_MAX_NUM_CTX); in sa_init_ctx_info()
808 __set_bit(bn, data->ctx_bm); in sa_init_ctx_info()
809 data->sc_id++; in sa_init_ctx_info()
810 spin_unlock(&data->scid_lock); in sa_init_ctx_info()
812 ctx->sc_id = (u16)(data->sc_id_start + bn); in sa_init_ctx_info()
814 ctx->sc = dma_pool_alloc(data->sc_pool, GFP_KERNEL, &ctx->sc_phys); in sa_init_ctx_info()
815 if (!ctx->sc) { in sa_init_ctx_info()
816 dev_err(&data->pdev->dev, "Failed to allocate SC memory\n"); in sa_init_ctx_info()
817 err = -ENOMEM; in sa_init_ctx_info()
824 spin_lock(&data->scid_lock); in sa_init_ctx_info()
825 __clear_bit(bn, data->ctx_bm); in sa_init_ctx_info()
826 data->sc_id--; in sa_init_ctx_info()
827 spin_unlock(&data->scid_lock); in sa_init_ctx_info()
837 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_cipher_cra_exit()
838 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_cipher_cra_exit()
839 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_cipher_cra_exit()
841 sa_free_ctx_info(&ctx->enc, data); in sa_cipher_cra_exit()
842 sa_free_ctx_info(&ctx->dec, data); in sa_cipher_cra_exit()
844 crypto_free_skcipher(ctx->fallback.skcipher); in sa_cipher_cra_exit()
851 const char *name = crypto_tfm_alg_name(&tfm->base); in sa_cipher_cra_init()
856 ctx->dev_data = data; in sa_cipher_cra_init()
858 ret = sa_init_ctx_info(&ctx->enc, data); in sa_cipher_cra_init()
861 ret = sa_init_ctx_info(&ctx->dec, data); in sa_cipher_cra_init()
863 sa_free_ctx_info(&ctx->enc, data); in sa_cipher_cra_init()
874 ctx->fallback.skcipher = child; in sa_cipher_cra_init()
878 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_cipher_cra_init()
879 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_cipher_cra_init()
880 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_cipher_cra_init()
888 struct crypto_skcipher *child = ctx->fallback.skcipher; in sa_cipher_setkey()
895 return -EINVAL; in sa_cipher_setkey()
897 ad->enc_eng.eng_id = SA_ENG_ID_EM1; in sa_cipher_setkey()
898 ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ; in sa_cipher_setkey()
901 cfg.enc_eng_id = ad->enc_eng.eng_id; in sa_cipher_setkey()
905 crypto_skcipher_set_flags(child, tfm->base.crt_flags & in sa_cipher_setkey()
912 if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, key, keylen, NULL, 0, in sa_cipher_setkey()
913 ad, 1, &ctx->enc.epib[1])) in sa_cipher_setkey()
917 (u8 *)ctx->enc.cmdl, in sa_cipher_setkey()
918 &ctx->enc.cmdl_upd_info); in sa_cipher_setkey()
922 ctx->enc.cmdl_size = cmdl_len; in sa_cipher_setkey()
925 if (sa_init_sc(&ctx->dec, ctx->dev_data->match_data, key, keylen, NULL, 0, in sa_cipher_setkey()
926 ad, 0, &ctx->dec.epib[1])) in sa_cipher_setkey()
929 cfg.enc_eng_id = ad->enc_eng.eng_id; in sa_cipher_setkey()
930 cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl, in sa_cipher_setkey()
931 &ctx->dec.cmdl_upd_info); in sa_cipher_setkey()
936 ctx->dec.cmdl_size = cmdl_len; in sa_cipher_setkey()
937 ctx->iv_idx = ad->iv_idx; in sa_cipher_setkey()
943 return -EINVAL; in sa_cipher_setkey()
951 int key_idx = (keylen >> 3) - 2; in sa_aes_cbc_setkey()
954 return -EINVAL; in sa_aes_cbc_setkey()
971 int key_idx = (keylen >> 3) - 2; in sa_aes_ecb_setkey()
974 return -EINVAL; in sa_aes_ecb_setkey()
1013 if (rxd->mapped_sg[0].dir == DMA_BIDIRECTIONAL) in sa_sync_from_device()
1014 sgt = &rxd->mapped_sg[0].sgt; in sa_sync_from_device()
1016 sgt = &rxd->mapped_sg[1].sgt; in sa_sync_from_device()
1018 dma_sync_sgtable_for_cpu(rxd->ddev, sgt, DMA_FROM_DEVICE); in sa_sync_from_device()
1025 for (i = 0; i < ARRAY_SIZE(rxd->mapped_sg); i++) { in sa_free_sa_rx_data()
1026 struct sa_mapped_sg *mapped_sg = &rxd->mapped_sg[i]; in sa_free_sa_rx_data()
1028 if (mapped_sg->mapped) { in sa_free_sa_rx_data()
1029 dma_unmap_sgtable(rxd->ddev, &mapped_sg->sgt, in sa_free_sa_rx_data()
1030 mapped_sg->dir, 0); in sa_free_sa_rx_data()
1031 kfree(mapped_sg->split_sg); in sa_free_sa_rx_data()
1048 req = container_of(rxd->req, struct skcipher_request, base); in sa_aes_dma_in_callback()
1050 if (req->iv) { in sa_aes_dma_in_callback()
1051 mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, in sa_aes_dma_in_callback()
1053 result = (u32 *)req->iv; in sa_aes_dma_in_callback()
1055 for (i = 0; i < (rxd->enc_iv_size / 4); i++) in sa_aes_dma_in_callback()
1056 result[i] = be32_to_cpu(mdptr[i + rxd->iv_idx]); in sa_aes_dma_in_callback()
1090 struct sa_ctx_info *sa_ctx = req->enc ? &req->ctx->enc : &req->ctx->dec; in sa_run()
1098 gfp_flags = req->base->flags & CRYPTO_TFM_REQ_MAY_SLEEP ? in sa_run()
1103 return -ENOMEM; in sa_run()
1105 if (req->src != req->dst) { in sa_run()
1114 * SA2UL has an interesting feature where the receive DMA channel in sa_run()
1121 if (req->size >= 256) in sa_run()
1122 dma_rx = pdata->dma_rx2; in sa_run()
1124 dma_rx = pdata->dma_rx1; in sa_run()
1126 ddev = dmaengine_get_dma_device(pdata->dma_tx); in sa_run()
1127 rxd->ddev = ddev; in sa_run()
1129 memcpy(cmdl, sa_ctx->cmdl, sa_ctx->cmdl_size); in sa_run()
1131 sa_update_cmdl(req, cmdl, &sa_ctx->cmdl_upd_info); in sa_run()
1133 if (req->type != CRYPTO_ALG_TYPE_AHASH) { in sa_run()
1134 if (req->enc) in sa_run()
1135 req->type |= in sa_run()
1138 req->type |= in sa_run()
1142 cmdl[sa_ctx->cmdl_size / sizeof(u32)] = req->type; in sa_run()
1150 src = req->src; in sa_run()
1151 sg_nents = sg_nents_for_len(src, req->size); in sa_run()
1153 split_size = req->size; in sa_run()
1155 mapped_sg = &rxd->mapped_sg[0]; in sa_run()
1156 if (sg_nents == 1 && split_size <= req->src->length) { in sa_run()
1157 src = &mapped_sg->static_sg; in sa_run()
1160 sg_set_page(src, sg_page(req->src), split_size, in sa_run()
1161 req->src->offset); in sa_run()
1163 mapped_sg->sgt.sgl = src; in sa_run()
1164 mapped_sg->sgt.orig_nents = src_nents; in sa_run()
1165 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0); in sa_run()
1171 mapped_sg->dir = dir_src; in sa_run()
1172 mapped_sg->mapped = true; in sa_run()
1174 mapped_sg->sgt.sgl = req->src; in sa_run()
1175 mapped_sg->sgt.orig_nents = sg_nents; in sa_run()
1176 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, dir_src, 0); in sa_run()
1182 mapped_sg->dir = dir_src; in sa_run()
1183 mapped_sg->mapped = true; in sa_run()
1185 ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents, 0, 1, in sa_run()
1188 src_nents = mapped_sg->sgt.nents; in sa_run()
1189 src = mapped_sg->sgt.sgl; in sa_run()
1191 mapped_sg->split_sg = src; in sa_run()
1195 dma_sync_sgtable_for_device(ddev, &mapped_sg->sgt, DMA_TO_DEVICE); in sa_run()
1201 dst_nents = sg_nents_for_len(req->dst, req->size); in sa_run()
1202 mapped_sg = &rxd->mapped_sg[1]; in sa_run()
1204 if (dst_nents == 1 && split_size <= req->dst->length) { in sa_run()
1205 dst = &mapped_sg->static_sg; in sa_run()
1208 sg_set_page(dst, sg_page(req->dst), split_size, in sa_run()
1209 req->dst->offset); in sa_run()
1211 mapped_sg->sgt.sgl = dst; in sa_run()
1212 mapped_sg->sgt.orig_nents = dst_nents; in sa_run()
1213 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, in sa_run()
1218 mapped_sg->dir = DMA_FROM_DEVICE; in sa_run()
1219 mapped_sg->mapped = true; in sa_run()
1221 mapped_sg->sgt.sgl = req->dst; in sa_run()
1222 mapped_sg->sgt.orig_nents = dst_nents; in sa_run()
1223 ret = dma_map_sgtable(ddev, &mapped_sg->sgt, in sa_run()
1228 mapped_sg->dir = DMA_FROM_DEVICE; in sa_run()
1229 mapped_sg->mapped = true; in sa_run()
1231 ret = sg_split(mapped_sg->sgt.sgl, mapped_sg->sgt.nents, in sa_run()
1235 dst_nents = mapped_sg->sgt.nents; in sa_run()
1236 dst = mapped_sg->sgt.sgl; in sa_run()
1238 mapped_sg->split_sg = dst; in sa_run()
1243 rxd->tx_in = dmaengine_prep_slave_sg(dma_rx, dst, dst_nents, in sa_run()
1246 if (!rxd->tx_in) { in sa_run()
1247 dev_err(pdata->dev, "IN prep_slave_sg() failed\n"); in sa_run()
1248 ret = -EINVAL; in sa_run()
1252 rxd->req = (void *)req->base; in sa_run()
1253 rxd->enc = req->enc; in sa_run()
1254 rxd->iv_idx = req->ctx->iv_idx; in sa_run()
1255 rxd->enc_iv_size = sa_ctx->cmdl_upd_info.enc_iv.size; in sa_run()
1256 rxd->tx_in->callback = req->callback; in sa_run()
1257 rxd->tx_in->callback_param = rxd; in sa_run()
1259 tx_out = dmaengine_prep_slave_sg(pdata->dma_tx, src, in sa_run()
1264 dev_err(pdata->dev, "OUT prep_slave_sg() failed\n"); in sa_run()
1265 ret = -EINVAL; in sa_run()
1275 sa_prepare_tx_desc(mdptr, (sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS * in sa_run()
1276 sizeof(u32))), cmdl, sizeof(sa_ctx->epib), in sa_run()
1277 sa_ctx->epib); in sa_run()
1279 ml = sa_ctx->cmdl_size + (SA_PSDATA_CTX_WORDS * sizeof(u32)); in sa_run()
1280 dmaengine_desc_set_metadata_len(tx_out, req->mdata_size); in sa_run()
1283 dmaengine_submit(rxd->tx_in); in sa_run()
1286 dma_async_issue_pending(pdata->dma_tx); in sa_run()
1288 return -EINPROGRESS; in sa_run()
1300 struct crypto_alg *alg = req->base.tfm->__crt_alg; in sa_cipher_run()
1303 if (!req->cryptlen) in sa_cipher_run()
1306 if (req->cryptlen % alg->cra_blocksize) in sa_cipher_run()
1307 return -EINVAL; in sa_cipher_run()
1310 if (req->cryptlen > SA_MAX_DATA_SZ || in sa_cipher_run()
1311 (req->cryptlen >= SA_UNSAFE_DATA_SZ_MIN && in sa_cipher_run()
1312 req->cryptlen <= SA_UNSAFE_DATA_SZ_MAX)) { in sa_cipher_run()
1315 skcipher_request_set_tfm(subreq, ctx->fallback.skcipher); in sa_cipher_run()
1316 skcipher_request_set_callback(subreq, req->base.flags, in sa_cipher_run()
1317 req->base.complete, in sa_cipher_run()
1318 req->base.data); in sa_cipher_run()
1319 skcipher_request_set_crypt(subreq, req->src, req->dst, in sa_cipher_run()
1320 req->cryptlen, req->iv); in sa_cipher_run()
1327 sa_req.size = req->cryptlen; in sa_cipher_run()
1328 sa_req.enc_size = req->cryptlen; in sa_cipher_run()
1329 sa_req.src = req->src; in sa_cipher_run()
1330 sa_req.dst = req->dst; in sa_cipher_run()
1336 sa_req.base = &req->base; in sa_cipher_run()
1344 return sa_cipher_run(req, req->iv, 1); in sa_encrypt()
1349 return sa_cipher_run(req, req->iv, 0); in sa_decrypt()
1364 req = container_of(rxd->req, struct ahash_request, base); in sa_sha_dma_in_callback()
1368 mdptr = (__be32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml); in sa_sha_dma_in_callback()
1369 result = (u32 *)req->result; in sa_sha_dma_in_callback()
1386 memcpy(req->result, sha1_zero_message_hash, sa_digest_size); in zero_message_process()
1389 memcpy(req->result, sha256_zero_message_hash, sa_digest_size); in zero_message_process()
1392 memcpy(req->result, sha512_zero_message_hash, sa_digest_size); in zero_message_process()
1395 return -EINVAL; in zero_message_process()
1408 auth_len = req->nbytes; in sa_sha_run()
1416 struct ahash_request *subreq = &rctx->fallback_req; in sa_sha_run()
1419 ahash_request_set_tfm(subreq, ctx->fallback.ahash); in sa_sha_run()
1420 subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_run()
1424 subreq->nbytes = auth_len; in sa_sha_run()
1425 subreq->src = req->src; in sa_sha_run()
1426 subreq->result = req->result; in sa_sha_run()
1430 subreq->nbytes = 0; in sa_sha_run()
1439 sa_req.src = req->src; in sa_sha_run()
1440 sa_req.dst = req->src; in sa_sha_run()
1446 sa_req.base = &req->base; in sa_sha_run()
1453 int bs = crypto_shash_blocksize(ctx->shash); in sa_sha_setup()
1457 ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ; in sa_sha_setup()
1458 ad->auth_eng.eng_id = SA_ENG_ID_AM1; in sa_sha_setup()
1459 ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ; in sa_sha_setup()
1461 memset(ctx->authkey, 0, bs); in sa_sha_setup()
1463 cfg.aalg = ad->aalg_id; in sa_sha_setup()
1464 cfg.enc_eng_id = ad->enc_eng.eng_id; in sa_sha_setup()
1465 cfg.auth_eng_id = ad->auth_eng.eng_id; in sa_sha_setup()
1470 ctx->dev_data = dev_get_drvdata(sa_k3_dev); in sa_sha_setup()
1472 if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, NULL, 0, NULL, 0, in sa_sha_setup()
1473 ad, 0, &ctx->enc.epib[1])) in sa_sha_setup()
1477 (u8 *)ctx->enc.cmdl, in sa_sha_setup()
1478 &ctx->enc.cmdl_upd_info); in sa_sha_setup()
1482 ctx->enc.cmdl_size = cmdl_len; in sa_sha_setup()
1488 return -EINVAL; in sa_sha_setup()
1498 ctx->dev_data = data; in sa_sha_cra_init_alg()
1499 ret = sa_init_ctx_info(&ctx->enc, data); in sa_sha_cra_init_alg()
1504 ctx->shash = crypto_alloc_shash(alg_base, 0, in sa_sha_cra_init_alg()
1506 if (IS_ERR(ctx->shash)) { in sa_sha_cra_init_alg()
1509 return PTR_ERR(ctx->shash); in sa_sha_cra_init_alg()
1512 ctx->fallback.ahash = in sa_sha_cra_init_alg()
1515 if (IS_ERR(ctx->fallback.ahash)) { in sa_sha_cra_init_alg()
1516 dev_err(ctx->dev_data->dev, in sa_sha_cra_init_alg()
1518 return PTR_ERR(ctx->fallback.ahash); in sa_sha_cra_init_alg()
1522 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_sha_cra_init_alg()
1523 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_sha_cra_init_alg()
1524 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_sha_cra_init_alg()
1528 crypto_ahash_reqsize(ctx->fallback.ahash)); in sa_sha_cra_init_alg()
1547 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_init()
1548 rctx->fallback_req.base.flags = in sa_sha_init()
1549 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_init()
1551 return crypto_ahash_init(&rctx->fallback_req); in sa_sha_init()
1560 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_update()
1561 rctx->fallback_req.base.flags = in sa_sha_update()
1562 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_update()
1563 rctx->fallback_req.nbytes = req->nbytes; in sa_sha_update()
1564 rctx->fallback_req.src = req->src; in sa_sha_update()
1566 return crypto_ahash_update(&rctx->fallback_req); in sa_sha_update()
1575 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_final()
1576 rctx->fallback_req.base.flags = in sa_sha_final()
1577 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_final()
1578 rctx->fallback_req.result = req->result; in sa_sha_final()
1580 return crypto_ahash_final(&rctx->fallback_req); in sa_sha_final()
1589 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_finup()
1590 rctx->fallback_req.base.flags = in sa_sha_finup()
1591 req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_finup()
1593 rctx->fallback_req.nbytes = req->nbytes; in sa_sha_finup()
1594 rctx->fallback_req.src = req->src; in sa_sha_finup()
1595 rctx->fallback_req.result = req->result; in sa_sha_finup()
1597 return crypto_ahash_finup(&rctx->fallback_req); in sa_sha_finup()
1606 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback.ahash); in sa_sha_import()
1607 rctx->fallback_req.base.flags = req->base.flags & in sa_sha_import()
1610 return crypto_ahash_import(&rctx->fallback_req, in); in sa_sha_import()
1618 struct ahash_request *subreq = &rctx->fallback_req; in sa_sha_export()
1620 ahash_request_set_tfm(subreq, ctx->fallback.ahash); in sa_sha_export()
1621 subreq->base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; in sa_sha_export()
1679 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_sha_cra_exit()
1680 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_sha_cra_exit()
1681 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_sha_cra_exit()
1684 sa_free_ctx_info(&ctx->enc, data); in sa_sha_cra_exit()
1686 crypto_free_shash(ctx->shash); in sa_sha_cra_exit()
1687 crypto_free_ahash(ctx->fallback.ahash); in sa_sha_cra_exit()
1704 req = container_of(rxd->req, struct aead_request, base); in sa_aead_dma_in_callback()
1706 start = req->assoclen + req->cryptlen; in sa_aead_dma_in_callback()
1709 mdptr = (u32 *)dmaengine_desc_get_metadata_ptr(rxd->tx_in, &pl, &ml); in sa_aead_dma_in_callback()
1713 if (rxd->enc) { in sa_aead_dma_in_callback()
1714 scatterwalk_map_and_copy(&mdptr[4], req->dst, start, authsize, in sa_aead_dma_in_callback()
1717 start -= authsize; in sa_aead_dma_in_callback()
1718 scatterwalk_map_and_copy(auth_tag, req->src, start, authsize, in sa_aead_dma_in_callback()
1721 err = memcmp(&mdptr[4], auth_tag, authsize) ? -EBADMSG : 0; in sa_aead_dma_in_callback()
1737 ctx->dev_data = data; in sa_cra_init_aead()
1739 ctx->shash = crypto_alloc_shash(hash, 0, CRYPTO_ALG_NEED_FALLBACK); in sa_cra_init_aead()
1740 if (IS_ERR(ctx->shash)) { in sa_cra_init_aead()
1742 return PTR_ERR(ctx->shash); in sa_cra_init_aead()
1745 ctx->fallback.aead = crypto_alloc_aead(fallback, 0, in sa_cra_init_aead()
1748 if (IS_ERR(ctx->fallback.aead)) { in sa_cra_init_aead()
1751 return PTR_ERR(ctx->fallback.aead); in sa_cra_init_aead()
1755 crypto_aead_reqsize(ctx->fallback.aead)); in sa_cra_init_aead()
1757 ret = sa_init_ctx_info(&ctx->enc, data); in sa_cra_init_aead()
1761 ret = sa_init_ctx_info(&ctx->dec, data); in sa_cra_init_aead()
1763 sa_free_ctx_info(&ctx->enc, data); in sa_cra_init_aead()
1767 dev_dbg(sa_k3_dev, "%s(0x%p) sc-ids(0x%x(0x%pad), 0x%x(0x%pad))\n", in sa_cra_init_aead()
1768 __func__, tfm, ctx->enc.sc_id, &ctx->enc.sc_phys, in sa_cra_init_aead()
1769 ctx->dec.sc_id, &ctx->dec.sc_phys); in sa_cra_init_aead()
1777 "authenc(hmac(sha1-ce),cbc(aes-ce))"); in sa_cra_init_aead_sha1()
1783 "authenc(hmac(sha256-ce),cbc(aes-ce))"); in sa_cra_init_aead_sha256()
1791 crypto_free_shash(ctx->shash); in sa_exit_tfm_aead()
1792 crypto_free_aead(ctx->fallback.aead); in sa_exit_tfm_aead()
1794 sa_free_ctx_info(&ctx->enc, data); in sa_exit_tfm_aead()
1795 sa_free_ctx_info(&ctx->dec, data); in sa_exit_tfm_aead()
1810 return -EINVAL; in sa_aead_setkey()
1813 key_idx = (keys.enckeylen >> 3) - 2; in sa_aead_setkey()
1815 return -EINVAL; in sa_aead_setkey()
1817 ad->ctx = ctx; in sa_aead_setkey()
1818 ad->enc_eng.eng_id = SA_ENG_ID_EM1; in sa_aead_setkey()
1819 ad->enc_eng.sc_size = SA_CTX_ENC_TYPE1_SZ; in sa_aead_setkey()
1820 ad->auth_eng.eng_id = SA_ENG_ID_AM1; in sa_aead_setkey()
1821 ad->auth_eng.sc_size = SA_CTX_AUTH_TYPE2_SZ; in sa_aead_setkey()
1822 ad->mci_enc = mci_cbc_enc_no_iv_array[key_idx]; in sa_aead_setkey()
1823 ad->mci_dec = mci_cbc_dec_no_iv_array[key_idx]; in sa_aead_setkey()
1824 ad->inv_key = true; in sa_aead_setkey()
1825 ad->keyed_mac = true; in sa_aead_setkey()
1826 ad->ealg_id = SA_EALG_ID_AES_CBC; in sa_aead_setkey()
1827 ad->prep_iopad = sa_prepare_iopads; in sa_aead_setkey()
1831 cfg.aalg = ad->aalg_id; in sa_aead_setkey()
1832 cfg.enc_eng_id = ad->enc_eng.eng_id; in sa_aead_setkey()
1833 cfg.auth_eng_id = ad->auth_eng.eng_id; in sa_aead_setkey()
1839 if (sa_init_sc(&ctx->enc, ctx->dev_data->match_data, keys.enckey, in sa_aead_setkey()
1841 ad, 1, &ctx->enc.epib[1])) in sa_aead_setkey()
1842 return -EINVAL; in sa_aead_setkey()
1845 (u8 *)ctx->enc.cmdl, in sa_aead_setkey()
1846 &ctx->enc.cmdl_upd_info); in sa_aead_setkey()
1848 return -EINVAL; in sa_aead_setkey()
1850 ctx->enc.cmdl_size = cmdl_len; in sa_aead_setkey()
1853 if (sa_init_sc(&ctx->dec, ctx->dev_data->match_data, keys.enckey, in sa_aead_setkey()
1855 ad, 0, &ctx->dec.epib[1])) in sa_aead_setkey()
1856 return -EINVAL; in sa_aead_setkey()
1859 cmdl_len = sa_format_cmdl_gen(&cfg, (u8 *)ctx->dec.cmdl, in sa_aead_setkey()
1860 &ctx->dec.cmdl_upd_info); in sa_aead_setkey()
1863 return -EINVAL; in sa_aead_setkey()
1865 ctx->dec.cmdl_size = cmdl_len; in sa_aead_setkey()
1867 crypto_aead_clear_flags(ctx->fallback.aead, CRYPTO_TFM_REQ_MASK); in sa_aead_setkey()
1868 crypto_aead_set_flags(ctx->fallback.aead, in sa_aead_setkey()
1871 crypto_aead_setkey(ctx->fallback.aead, key, keylen); in sa_aead_setkey()
1880 return crypto_aead_setauthsize(ctx->fallback.aead, authsize); in sa_aead_setauthsize()
1916 enc_size = req->cryptlen; in sa_aead_run()
1917 auth_size = req->assoclen + req->cryptlen; in sa_aead_run()
1920 enc_size -= crypto_aead_authsize(tfm); in sa_aead_run()
1921 auth_size -= crypto_aead_authsize(tfm); in sa_aead_run()
1930 aead_request_set_tfm(subreq, ctx->fallback.aead); in sa_aead_run()
1931 aead_request_set_callback(subreq, req->base.flags, in sa_aead_run()
1932 req->base.complete, req->base.data); in sa_aead_run()
1933 aead_request_set_crypt(subreq, req->src, req->dst, in sa_aead_run()
1934 req->cryptlen, req->iv); in sa_aead_run()
1935 aead_request_set_ad(subreq, req->assoclen); in sa_aead_run()
1942 sa_req.enc_offset = req->assoclen; in sa_aead_run()
1951 sa_req.base = &req->base; in sa_aead_run()
1953 sa_req.src = req->src; in sa_aead_run()
1954 sa_req.dst = req->dst; in sa_aead_run()
1962 return sa_aead_run(req, req->iv, 1); in sa_aead_encrypt()
1968 return sa_aead_run(req, req->iv, 0); in sa_aead_decrypt()
1976 .base.cra_driver_name = "cbc-aes-sa2ul",
1999 .base.cra_driver_name = "ecb-aes-sa2ul",
2021 .base.cra_driver_name = "cbc-des3-sa2ul",
2044 .base.cra_driver_name = "ecb-des3-sa2ul",
2067 .cra_driver_name = "sha1-sa2ul",
2096 .cra_driver_name = "sha256-sa2ul",
2125 .cra_driver_name = "sha512-sa2ul",
2155 "authenc(hmac(sha1),cbc(aes))-sa2ul",
2182 "authenc(hmac(sha256),cbc(aes))-sa2ul",
2209 const struct sa_match_data *match_data = dev_data->match_data; in sa_register_algos()
2210 struct device *dev = dev_data->dev; in sa_register_algos()
2217 if (!(match_data->supported_algos & BIT(i))) in sa_register_algos()
2232 "un-supported crypto algorithm (%d)", in sa_register_algos()
2267 struct device *dev = &dev_data->pdev->dev; in sa_init_mem()
2269 dev_data->sc_pool = dma_pool_create("keystone-sc", dev, in sa_init_mem()
2271 if (!dev_data->sc_pool) { in sa_init_mem()
2273 return -ENOMEM; in sa_init_mem()
2284 dd->dma_rx1 = NULL; in sa_dma_init()
2285 dd->dma_tx = NULL; in sa_dma_init()
2286 dd->dma_rx2 = NULL; in sa_dma_init()
2288 ret = dma_coerce_mask_and_coherent(dd->dev, DMA_BIT_MASK(48)); in sa_dma_init()
2292 dd->dma_rx1 = dma_request_chan(dd->dev, "rx1"); in sa_dma_init()
2293 if (IS_ERR(dd->dma_rx1)) in sa_dma_init()
2294 return dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx1), in sa_dma_init()
2297 dd->dma_rx2 = dma_request_chan(dd->dev, "rx2"); in sa_dma_init()
2298 if (IS_ERR(dd->dma_rx2)) { in sa_dma_init()
2299 ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_rx2), in sa_dma_init()
2304 dd->dma_tx = dma_request_chan(dd->dev, "tx"); in sa_dma_init()
2305 if (IS_ERR(dd->dma_tx)) { in sa_dma_init()
2306 ret = dev_err_probe(dd->dev, PTR_ERR(dd->dma_tx), in sa_dma_init()
2318 ret = dmaengine_slave_config(dd->dma_rx1, &cfg); in sa_dma_init()
2320 dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n", in sa_dma_init()
2325 ret = dmaengine_slave_config(dd->dma_rx2, &cfg); in sa_dma_init()
2327 dev_err(dd->dev, "can't configure IN dmaengine slave: %d\n", in sa_dma_init()
2332 ret = dmaengine_slave_config(dd->dma_tx, &cfg); in sa_dma_init()
2334 dev_err(dd->dev, "can't configure OUT dmaengine slave: %d\n", in sa_dma_init()
2342 dma_release_channel(dd->dma_tx); in sa_dma_init()
2344 dma_release_channel(dd->dma_rx2); in sa_dma_init()
2346 dma_release_channel(dd->dma_rx1); in sa_dma_init()
2378 { .compatible = "ti,j721e-sa2ul", .data = &am654_match_data, },
2379 { .compatible = "ti,am654-sa2ul", .data = &am654_match_data, },
2380 { .compatible = "ti,am64-sa2ul", .data = &am64_match_data, },
2387 struct device *dev = &pdev->dev; in sa_ul_probe()
2388 struct device_node *node = dev->of_node; in sa_ul_probe()
2395 return -ENOMEM; in sa_ul_probe()
2397 dev_data->match_data = of_device_get_match_data(dev); in sa_ul_probe()
2398 if (!dev_data->match_data) in sa_ul_probe()
2399 return -ENODEV; in sa_ul_probe()
2406 dev_data->dev = dev; in sa_ul_probe()
2407 dev_data->pdev = pdev; in sa_ul_probe()
2408 dev_data->base = saul_base; in sa_ul_probe()
2415 dev_err(&pdev->dev, "%s: failed to get sync: %d\n", __func__, in sa_ul_probe()
2426 spin_lock_init(&dev_data->scid_lock); in sa_ul_probe()
2428 if (!dev_data->match_data->skip_engine_control) { in sa_ul_probe()
2438 ret = of_platform_populate(node, NULL, NULL, &pdev->dev); in sa_ul_probe()
2442 device_for_each_child(&pdev->dev, &pdev->dev, sa_link_child); in sa_ul_probe()
2447 sa_unregister_algos(&pdev->dev); in sa_ul_probe()
2449 dma_release_channel(dev_data->dma_rx2); in sa_ul_probe()
2450 dma_release_channel(dev_data->dma_rx1); in sa_ul_probe()
2451 dma_release_channel(dev_data->dma_tx); in sa_ul_probe()
2454 dma_pool_destroy(dev_data->sc_pool); in sa_ul_probe()
2456 pm_runtime_put_sync(&pdev->dev); in sa_ul_probe()
2457 pm_runtime_disable(&pdev->dev); in sa_ul_probe()
2466 of_platform_depopulate(&pdev->dev); in sa_ul_remove()
2468 sa_unregister_algos(&pdev->dev); in sa_ul_remove()
2470 dma_release_channel(dev_data->dma_rx2); in sa_ul_remove()
2471 dma_release_channel(dev_data->dma_rx1); in sa_ul_remove()
2472 dma_release_channel(dev_data->dma_tx); in sa_ul_remove()
2474 dma_pool_destroy(dev_data->sc_pool); in sa_ul_remove()
2478 pm_runtime_put_sync(&pdev->dev); in sa_ul_remove()
2479 pm_runtime_disable(&pdev->dev); in sa_ul_remove()
2488 .name = "saul-crypto",