Lines Matching +full:secure +full:- +full:reg +full:- +full:access

1 // SPDX-License-Identifier: GPL-2.0
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
10 #include <linux/dma-mapping.h>
45 writel(0, priv->base + EIP197_FLUE_IFC_LUT(i)); in eip197_trc_cache_setupvirt()
51 for (i = 0; i < priv->config.rings; i++) { in eip197_trc_cache_setupvirt()
52 writel(0, priv->base + EIP197_FLUE_CACHEBASE_LO(i)); in eip197_trc_cache_setupvirt()
53 writel(0, priv->base + EIP197_FLUE_CACHEBASE_HI(i)); in eip197_trc_cache_setupvirt()
55 priv->base + EIP197_FLUE_CONFIG(i)); in eip197_trc_cache_setupvirt()
57 writel(0, priv->base + EIP197_FLUE_OFFSETS); in eip197_trc_cache_setupvirt()
58 writel(0, priv->base + EIP197_FLUE_ARC4_OFFSET); in eip197_trc_cache_setupvirt()
69 val = readl(priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_banksel()
72 writel(val, priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_banksel()
89 actbank = min(maxbanks - 1, 0); in eip197_trc_cache_probe()
90 while ((addrhi - addrlo) > 32) { in eip197_trc_cache_probe()
95 priv->base + EIP197_CLASSIFICATION_RAMS + in eip197_trc_cache_probe()
101 priv->base + EIP197_CLASSIFICATION_RAMS + in eip197_trc_cache_probe()
106 val = readl(priv->base + EIP197_CLASSIFICATION_RAMS + in eip197_trc_cache_probe()
132 priv->base + offset); in eip197_trc_cache_clear()
134 val = EIP197_CS_RC_NEXT(i + 1) | EIP197_CS_RC_PREV(i - 1); in eip197_trc_cache_clear()
137 else if (i == cs_rc_max - 1) in eip197_trc_cache_clear()
139 writel(val, priv->base + offset + 4); in eip197_trc_cache_clear()
141 writel(0, priv->base + offset + 8); in eip197_trc_cache_clear()
142 writel(0, priv->base + offset + 12); in eip197_trc_cache_clear()
149 priv->base + EIP197_CLASSIFICATION_RAMS + in eip197_trc_cache_clear()
164 * Enable the record cache memory access and in eip197_trc_cache_init()
167 val = readl(priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_init()
170 writel(val, priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_init()
171 val = readl(priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_init()
175 writel(0, priv->base + EIP197_TRC_ECCCTRL); in eip197_trc_cache_init()
179 * reset. Need data memory access here, not admin access. in eip197_trc_cache_init()
181 val = readl(priv->base + EIP197_TRC_PARAMS); in eip197_trc_cache_init()
183 writel(val, priv->base + EIP197_TRC_PARAMS); in eip197_trc_cache_init()
193 val = readl(priv->base + EIP197_TRC_PARAMS); in eip197_trc_cache_init()
194 /* admin access now */ in eip197_trc_cache_init()
196 writel(val, priv->base + EIP197_TRC_PARAMS); in eip197_trc_cache_init()
202 writel(0, priv->base + EIP197_TRC_ECCCTRL); in eip197_trc_cache_init()
222 cs_ht_sz = __fls(asize - cs_rc_max) - 2; in eip197_trc_cache_init()
226 cs_rc_max = min_t(uint, cs_rc_abs_max, asize - (cs_ht_wc >> 4)); in eip197_trc_cache_init()
231 /* Disable the record cache memory access */ in eip197_trc_cache_init()
232 val = readl(priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_init()
234 writel(val, priv->base + EIP197_CS_RAM_CTRL); in eip197_trc_cache_init()
238 EIP197_TRC_FREECHAIN_TAIL_PTR(cs_rc_max - 1); in eip197_trc_cache_init()
239 writel(val, priv->base + EIP197_TRC_FREECHAIN); in eip197_trc_cache_init()
244 writel(val, priv->base + EIP197_TRC_PARAMS2); in eip197_trc_cache_init()
250 writel(val, priv->base + EIP197_TRC_PARAMS); in eip197_trc_cache_init()
252 dev_info(priv->dev, "TRC init: %dd,%da (%dr,%dh)\n", in eip197_trc_cache_init()
261 for (pe = 0; pe < priv->config.pes; pe++) { in eip197_init_firmware()
291 /* Enable access to all IFPP program memories */ in eip197_init_firmware()
301 const u32 *data = (const u32 *)fw->data; in eip197_write_firmware()
305 for (i = 0; i < fw->size / sizeof(u32); i++) in eip197_write_firmware()
307 priv->base + EIP197_CLASSIFICATION_RAMS + i * sizeof(u32)); in eip197_write_firmware()
310 return i - EIP197_FW_TERMINAL_NOPS; in eip197_write_firmware()
327 for (pe = 0; pe < priv->config.pes; pe++) { in poll_fw_ready()
333 pollcnt--; in poll_fw_ready()
336 dev_err(priv->dev, "FW(%d) for PE %d failed to start\n", in poll_fw_ready()
350 for (pe = 0; pe < priv->config.pes; pe++) { in eip197_start_firmware()
351 /* Disable access to all program memory */ in eip197_start_firmware()
358 val = EIP197_PE_ICE_UENG_START_OFFSET((ifppsz - 1) & in eip197_start_firmware()
367 val = EIP197_PE_ICE_UENG_START_OFFSET((ipuesz - 1) & in eip197_start_firmware()
394 if (priv->version == EIP197D_MRVL) in eip197_load_firmwares()
396 else if (priv->version == EIP197B_MRVL || in eip197_load_firmwares()
397 priv->version == EIP197_DEVBRD) in eip197_load_firmwares()
400 return -ENODEV; in eip197_load_firmwares()
404 snprintf(fw_path, 37, "inside-secure/%s/%s", dir, fw_name[i]); in eip197_load_firmwares()
405 ret = firmware_request_nowarn(&fw[i], fw_path, priv->dev); in eip197_load_firmwares()
407 if (minifw || priv->version != EIP197B_MRVL) in eip197_load_firmwares()
414 priv->dev); in eip197_load_firmwares()
424 /* Enable access to IPUE program memories */ in eip197_load_firmwares()
425 for (pe = 0; pe < priv->config.pes; pe++) in eip197_load_firmwares()
432 dev_dbg(priv->dev, "Firmware loaded successfully\n"); in eip197_load_firmwares()
436 ret = -ENODEV; in eip197_load_firmwares()
444 …dev_dbg(priv->dev, "Firmware set not (fully) present or init failed, falling back to BCLA mode\n"); in eip197_load_firmwares()
450 dev_dbg(priv->dev, "Firmware load failed.\n"); in eip197_load_firmwares()
460 cd_size_rnd = (priv->config.cd_size + in safexcel_hw_setup_cdesc_rings()
461 (BIT(priv->hwconfig.hwdataw) - 1)) >> in safexcel_hw_setup_cdesc_rings()
462 priv->hwconfig.hwdataw; in safexcel_hw_setup_cdesc_rings()
464 if (priv->flags & SAFEXCEL_HW_EIP197) { in safexcel_hw_setup_cdesc_rings()
466 cd_fetch_cnt = (1 << priv->hwconfig.hwcfsize) / cd_size_rnd; in safexcel_hw_setup_cdesc_rings()
468 (priv->config.pes * EIP197_FETCH_DEPTH)); in safexcel_hw_setup_cdesc_rings()
471 cd_fetch_cnt = ((1 << priv->hwconfig.hwcfsize) / in safexcel_hw_setup_cdesc_rings()
472 cd_size_rnd) - 1; in safexcel_hw_setup_cdesc_rings()
475 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_setup_cdesc_rings()
477 writel(lower_32_bits(priv->ring[i].cdr.base_dma), in safexcel_hw_setup_cdesc_rings()
479 writel(upper_32_bits(priv->ring[i].cdr.base_dma), in safexcel_hw_setup_cdesc_rings()
482 writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.cd_offset << 16) | in safexcel_hw_setup_cdesc_rings()
483 priv->config.cd_size, in safexcel_hw_setup_cdesc_rings()
486 (cd_size_rnd << priv->hwconfig.hwdataw)) << 16) | in safexcel_hw_setup_cdesc_rings()
487 (cd_fetch_cnt * priv->config.cd_offset), in safexcel_hw_setup_cdesc_rings()
510 (BIT(priv->hwconfig.hwdataw) - 1)) >> in safexcel_hw_setup_rdesc_rings()
511 priv->hwconfig.hwdataw; in safexcel_hw_setup_rdesc_rings()
512 if (priv->flags & SAFEXCEL_HW_EIP197) { in safexcel_hw_setup_rdesc_rings()
514 rd_fetch_cnt = (1 << priv->hwconfig.hwrfsize) / rd_size_rnd; in safexcel_hw_setup_rdesc_rings()
516 (priv->config.pes * EIP197_FETCH_DEPTH)); in safexcel_hw_setup_rdesc_rings()
519 rd_fetch_cnt = ((1 << priv->hwconfig.hwrfsize) / in safexcel_hw_setup_rdesc_rings()
520 rd_size_rnd) - 1; in safexcel_hw_setup_rdesc_rings()
523 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_setup_rdesc_rings()
525 writel(lower_32_bits(priv->ring[i].rdr.base_dma), in safexcel_hw_setup_rdesc_rings()
527 writel(upper_32_bits(priv->ring[i].rdr.base_dma), in safexcel_hw_setup_rdesc_rings()
530 writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.rd_offset << 16) | in safexcel_hw_setup_rdesc_rings()
531 priv->config.rd_size, in safexcel_hw_setup_rdesc_rings()
535 (rd_size_rnd << priv->hwconfig.hwdataw)) << 16) | in safexcel_hw_setup_rdesc_rings()
536 (rd_fetch_cnt * priv->config.rd_offset), in safexcel_hw_setup_rdesc_rings()
564 dev_dbg(priv->dev, "HW init: using %d pipe(s) and %d ring(s)\n", in safexcel_hw_init()
565 priv->config.pes, priv->config.rings); in safexcel_hw_init()
571 if (priv->flags & SAFEXCEL_HW_EIP197) { in safexcel_hw_init()
591 for (pe = 0; pe < priv->config.pes; pe++) { in safexcel_hw_init()
598 if (priv->flags & SAFEXCEL_HW_EIP197) in safexcel_hw_init()
624 if (priv->flags & SAFEXCEL_HW_EIP197) in safexcel_hw_init()
627 GENMASK(priv->config.rings - 1, 0), in safexcel_hw_init()
650 if (priv->flags & SAFEXCEL_HW_EIP197) in safexcel_hw_init()
678 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_init()
699 writel((EIP197_DEFAULT_RING_SIZE * priv->config.cd_offset) << 2, in safexcel_hw_init()
704 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_init()
722 writel((EIP197_DEFAULT_RING_SIZE * priv->config.rd_offset) << 2, in safexcel_hw_init()
726 for (pe = 0; pe < priv->config.pes; pe++) { in safexcel_hw_init()
728 writel(EIP197_DxE_THR_CTRL_EN | GENMASK(priv->config.rings - 1, 0), in safexcel_hw_init()
732 writel(EIP197_DxE_THR_CTRL_EN | GENMASK(priv->config.rings - 1, 0), in safexcel_hw_init()
739 if (priv->flags & SAFEXCEL_HW_EIP197) { in safexcel_hw_init()
741 priv->flags |= EIP197_TRC_CACHE; in safexcel_hw_init()
758 int coal = min_t(int, priv->ring[ring].requests, EIP197_MAX_BATCH_SZ); in safexcel_try_push_requests()
778 req = priv->ring[ring].req; in safexcel_dequeue()
779 backlog = priv->ring[ring].backlog; in safexcel_dequeue()
784 spin_lock_bh(&priv->ring[ring].queue_lock); in safexcel_dequeue()
785 backlog = crypto_get_backlog(&priv->ring[ring].queue); in safexcel_dequeue()
786 req = crypto_dequeue_request(&priv->ring[ring].queue); in safexcel_dequeue()
787 spin_unlock_bh(&priv->ring[ring].queue_lock); in safexcel_dequeue()
790 priv->ring[ring].req = NULL; in safexcel_dequeue()
791 priv->ring[ring].backlog = NULL; in safexcel_dequeue()
796 ctx = crypto_tfm_ctx(req->tfm); in safexcel_dequeue()
797 ret = ctx->send(req, ring, &commands, &results); in safexcel_dequeue()
802 backlog->complete(backlog, -EINPROGRESS); in safexcel_dequeue()
818 * the request and the backlog for the next dequeue call (per-ring). in safexcel_dequeue()
820 priv->ring[ring].req = req; in safexcel_dequeue()
821 priv->ring[ring].backlog = backlog; in safexcel_dequeue()
827 spin_lock_bh(&priv->ring[ring].lock); in safexcel_dequeue()
829 priv->ring[ring].requests += nreq; in safexcel_dequeue()
831 if (!priv->ring[ring].busy) { in safexcel_dequeue()
833 priv->ring[ring].busy = true; in safexcel_dequeue()
836 spin_unlock_bh(&priv->ring[ring].lock); in safexcel_dequeue()
839 writel((rdesc * priv->config.rd_offset) << 2, in safexcel_dequeue()
843 writel((cdesc * priv->config.cd_offset) << 2, in safexcel_dequeue()
850 if (likely((!rdesc->descriptor_overflow) && in safexcel_rdesc_check_errors()
851 (!rdesc->buffer_overflow) && in safexcel_rdesc_check_errors()
852 (!rdesc->result_data.error_code))) in safexcel_rdesc_check_errors()
855 if (rdesc->descriptor_overflow) in safexcel_rdesc_check_errors()
856 dev_err(priv->dev, "Descriptor overflow detected"); in safexcel_rdesc_check_errors()
858 if (rdesc->buffer_overflow) in safexcel_rdesc_check_errors()
859 dev_err(priv->dev, "Buffer overflow detected"); in safexcel_rdesc_check_errors()
861 if (rdesc->result_data.error_code & 0x4066) { in safexcel_rdesc_check_errors()
863 dev_err(priv->dev, in safexcel_rdesc_check_errors()
865 rdesc->result_data.error_code); in safexcel_rdesc_check_errors()
866 return -EIO; in safexcel_rdesc_check_errors()
867 } else if (rdesc->result_data.error_code & in safexcel_rdesc_check_errors()
874 return -EINVAL; in safexcel_rdesc_check_errors()
875 } else if (rdesc->result_data.error_code & BIT(9)) { in safexcel_rdesc_check_errors()
877 return -EBADMSG; in safexcel_rdesc_check_errors()
880 /* All other non-fatal errors */ in safexcel_rdesc_check_errors()
881 return -EINVAL; in safexcel_rdesc_check_errors()
891 priv->ring[ring].rdr_req[i] = req; in safexcel_rdr_req_set()
899 return priv->ring[ring].rdr_req[i]; in safexcel_rdr_req_get()
908 cdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].cdr); in safexcel_complete()
910 dev_err(priv->dev, in safexcel_complete()
914 } while (!cdesc->last_seg); in safexcel_complete()
919 struct safexcel_inv_result *result = req->data; in safexcel_inv_complete()
921 if (error == -EINPROGRESS) in safexcel_inv_complete()
924 result->error = error; in safexcel_inv_complete()
925 complete(&result->completion); in safexcel_inv_complete()
941 cdesc->control_data.type = EIP197_TYPE_EXTENDED; in safexcel_invalidate_cache()
942 cdesc->control_data.options = 0; in safexcel_invalidate_cache()
943 cdesc->control_data.refresh = 0; in safexcel_invalidate_cache()
944 cdesc->control_data.control0 = CONTEXT_CONTROL_INV_TR; in safexcel_invalidate_cache()
959 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr); in safexcel_invalidate_cache()
984 ctx = crypto_tfm_ctx(req->tfm); in safexcel_handle_result_descriptor()
985 ndesc = ctx->handle_result(priv, ring, req, in safexcel_handle_result_descriptor()
988 dev_err(priv->dev, "failed to handle result (%d)\n", in safexcel_handle_result_descriptor()
995 req->complete(req, ret); in safexcel_handle_result_descriptor()
1006 EIP197_xDR_PROC_xD_COUNT(tot_descs * priv->config.rd_offset), in safexcel_handle_result_descriptor()
1016 spin_lock_bh(&priv->ring[ring].lock); in safexcel_handle_result_descriptor()
1018 priv->ring[ring].requests -= handled; in safexcel_handle_result_descriptor()
1021 if (!priv->ring[ring].requests) in safexcel_handle_result_descriptor()
1022 priv->ring[ring].busy = false; in safexcel_handle_result_descriptor()
1024 spin_unlock_bh(&priv->ring[ring].lock); in safexcel_handle_result_descriptor()
1032 safexcel_dequeue(data->priv, data->ring); in safexcel_dequeue_work()
1043 struct safexcel_crypto_priv *priv = irq_data->priv; in safexcel_irq_ring()
1044 int ring = irq_data->ring, rc = IRQ_NONE; in safexcel_irq_ring()
1061 dev_err(priv->dev, "RDR: fatal error.\n"); in safexcel_irq_ring()
1080 struct safexcel_crypto_priv *priv = irq_data->priv; in safexcel_irq_ring_thread()
1081 int ring = irq_data->ring; in safexcel_irq_ring_thread()
1085 queue_work(priv->ring[ring].workqueue, in safexcel_irq_ring_thread()
1086 &priv->ring[ring].work_data.work); in safexcel_irq_ring_thread()
1103 dev = &pci_pdev->dev; in safexcel_request_ring_irq()
1115 dev = &plf_pdev->dev; in safexcel_request_ring_irq()
1179 safexcel_algs[i]->priv = priv; in safexcel_register_algorithms()
1182 if ((safexcel_algs[i]->algo_mask & priv->hwconfig.algo_flags) != in safexcel_register_algorithms()
1183 safexcel_algs[i]->algo_mask) in safexcel_register_algorithms()
1187 if (safexcel_algs[i]->type == SAFEXCEL_ALG_TYPE_SKCIPHER) in safexcel_register_algorithms()
1188 ret = crypto_register_skcipher(&safexcel_algs[i]->alg.skcipher); in safexcel_register_algorithms()
1189 else if (safexcel_algs[i]->type == SAFEXCEL_ALG_TYPE_AEAD) in safexcel_register_algorithms()
1190 ret = crypto_register_aead(&safexcel_algs[i]->alg.aead); in safexcel_register_algorithms()
1192 ret = crypto_register_ahash(&safexcel_algs[i]->alg.ahash); in safexcel_register_algorithms()
1203 if ((safexcel_algs[j]->algo_mask & priv->hwconfig.algo_flags) != in safexcel_register_algorithms()
1204 safexcel_algs[j]->algo_mask) in safexcel_register_algorithms()
1208 if (safexcel_algs[j]->type == SAFEXCEL_ALG_TYPE_SKCIPHER) in safexcel_register_algorithms()
1209 crypto_unregister_skcipher(&safexcel_algs[j]->alg.skcipher); in safexcel_register_algorithms()
1210 else if (safexcel_algs[j]->type == SAFEXCEL_ALG_TYPE_AEAD) in safexcel_register_algorithms()
1211 crypto_unregister_aead(&safexcel_algs[j]->alg.aead); in safexcel_register_algorithms()
1213 crypto_unregister_ahash(&safexcel_algs[j]->alg.ahash); in safexcel_register_algorithms()
1225 if ((safexcel_algs[i]->algo_mask & priv->hwconfig.algo_flags) != in safexcel_unregister_algorithms()
1226 safexcel_algs[i]->algo_mask) in safexcel_unregister_algorithms()
1230 if (safexcel_algs[i]->type == SAFEXCEL_ALG_TYPE_SKCIPHER) in safexcel_unregister_algorithms()
1231 crypto_unregister_skcipher(&safexcel_algs[i]->alg.skcipher); in safexcel_unregister_algorithms()
1232 else if (safexcel_algs[i]->type == SAFEXCEL_ALG_TYPE_AEAD) in safexcel_unregister_algorithms()
1233 crypto_unregister_aead(&safexcel_algs[i]->alg.aead); in safexcel_unregister_algorithms()
1235 crypto_unregister_ahash(&safexcel_algs[i]->alg.ahash); in safexcel_unregister_algorithms()
1246 if (priv->flags & SAFEXCEL_HW_EIP197) in safexcel_configure()
1253 priv->config.pes = (val >> EIP197_N_PES_OFFSET) & mask; in safexcel_configure()
1255 priv->config.rings = min_t(u32, val & GENMASK(3, 0), max_rings); in safexcel_configure()
1258 mask = BIT(val) - 1; in safexcel_configure()
1260 priv->config.cd_size = (sizeof(struct safexcel_command_desc) / sizeof(u32)); in safexcel_configure()
1261 priv->config.cd_offset = (priv->config.cd_size + mask) & ~mask; in safexcel_configure()
1263 priv->config.rd_size = (sizeof(struct safexcel_result_desc) / sizeof(u32)); in safexcel_configure()
1264 priv->config.rd_offset = (priv->config.rd_size + mask) & ~mask; in safexcel_configure()
1269 struct safexcel_register_offsets *offsets = &priv->offsets; in safexcel_init_register_offsets()
1271 if (priv->flags & SAFEXCEL_HW_EIP197) { in safexcel_init_register_offsets()
1272 offsets->hia_aic = EIP197_HIA_AIC_BASE; in safexcel_init_register_offsets()
1273 offsets->hia_aic_g = EIP197_HIA_AIC_G_BASE; in safexcel_init_register_offsets()
1274 offsets->hia_aic_r = EIP197_HIA_AIC_R_BASE; in safexcel_init_register_offsets()
1275 offsets->hia_aic_xdr = EIP197_HIA_AIC_xDR_BASE; in safexcel_init_register_offsets()
1276 offsets->hia_dfe = EIP197_HIA_DFE_BASE; in safexcel_init_register_offsets()
1277 offsets->hia_dfe_thr = EIP197_HIA_DFE_THR_BASE; in safexcel_init_register_offsets()
1278 offsets->hia_dse = EIP197_HIA_DSE_BASE; in safexcel_init_register_offsets()
1279 offsets->hia_dse_thr = EIP197_HIA_DSE_THR_BASE; in safexcel_init_register_offsets()
1280 offsets->hia_gen_cfg = EIP197_HIA_GEN_CFG_BASE; in safexcel_init_register_offsets()
1281 offsets->pe = EIP197_PE_BASE; in safexcel_init_register_offsets()
1282 offsets->global = EIP197_GLOBAL_BASE; in safexcel_init_register_offsets()
1284 offsets->hia_aic = EIP97_HIA_AIC_BASE; in safexcel_init_register_offsets()
1285 offsets->hia_aic_g = EIP97_HIA_AIC_G_BASE; in safexcel_init_register_offsets()
1286 offsets->hia_aic_r = EIP97_HIA_AIC_R_BASE; in safexcel_init_register_offsets()
1287 offsets->hia_aic_xdr = EIP97_HIA_AIC_xDR_BASE; in safexcel_init_register_offsets()
1288 offsets->hia_dfe = EIP97_HIA_DFE_BASE; in safexcel_init_register_offsets()
1289 offsets->hia_dfe_thr = EIP97_HIA_DFE_THR_BASE; in safexcel_init_register_offsets()
1290 offsets->hia_dse = EIP97_HIA_DSE_BASE; in safexcel_init_register_offsets()
1291 offsets->hia_dse_thr = EIP97_HIA_DSE_THR_BASE; in safexcel_init_register_offsets()
1292 offsets->hia_gen_cfg = EIP97_HIA_GEN_CFG_BASE; in safexcel_init_register_offsets()
1293 offsets->pe = EIP97_PE_BASE; in safexcel_init_register_offsets()
1294 offsets->global = EIP97_GLOBAL_BASE; in safexcel_init_register_offsets()
1309 struct device *dev = priv->dev; in safexcel_probe_generic()
1313 priv->context_pool = dmam_pool_create("safexcel-context", dev, in safexcel_probe_generic()
1316 if (!priv->context_pool) in safexcel_probe_generic()
1317 return -ENOMEM; in safexcel_probe_generic()
1324 version = readl(priv->base + EIP97_HIA_AIC_BASE + EIP197_HIA_VERSION); in safexcel_probe_generic()
1328 priv->hwconfig.hiaver = EIP197_VERSION_MASK(version); in safexcel_probe_generic()
1330 /* read back byte-swapped, so complement byte swap bits */ in safexcel_probe_generic()
1332 priv->hwconfig.hiaver = EIP197_VERSION_SWAP(version); in safexcel_probe_generic()
1335 version = readl(priv->base + EIP197_HIA_AIC_BASE + in safexcel_probe_generic()
1338 priv->hwconfig.hiaver = EIP197_VERSION_MASK(version); in safexcel_probe_generic()
1339 priv->flags |= SAFEXCEL_HW_EIP197; in safexcel_probe_generic()
1342 /* read back byte-swapped, so complement swap bits */ in safexcel_probe_generic()
1344 priv->hwconfig.hiaver = EIP197_VERSION_SWAP(version); in safexcel_probe_generic()
1345 priv->flags |= SAFEXCEL_HW_EIP197; in safexcel_probe_generic()
1347 return -ENODEV; in safexcel_probe_generic()
1351 /* Now initialize the reg offsets based on the probing info so far */ in safexcel_probe_generic()
1355 * If the version was read byte-swapped, we need to flip the device in safexcel_probe_generic()
1357 * byte-swapped ... in safexcel_probe_generic()
1371 if (((priv->flags & SAFEXCEL_HW_EIP197) && in safexcel_probe_generic()
1373 ((!(priv->flags & SAFEXCEL_HW_EIP197) && in safexcel_probe_generic()
1379 return -ENODEV; in safexcel_probe_generic()
1382 priv->hwconfig.hwver = EIP197_VERSION_MASK(version); in safexcel_probe_generic()
1390 return -ENODEV; in safexcel_probe_generic()
1392 priv->hwconfig.pever = EIP197_VERSION_MASK(version); in safexcel_probe_generic()
1396 if (priv->flags & SAFEXCEL_HW_EIP197) { in safexcel_probe_generic()
1398 priv->hwconfig.hwdataw = (hiaopt >> EIP197_HWDATAW_OFFSET) & in safexcel_probe_generic()
1400 priv->hwconfig.hwcfsize = ((hiaopt >> EIP197_CFSIZE_OFFSET) & in safexcel_probe_generic()
1403 priv->hwconfig.hwrfsize = ((hiaopt >> EIP197_RFSIZE_OFFSET) & in safexcel_probe_generic()
1408 priv->hwconfig.hwdataw = (hiaopt >> EIP197_HWDATAW_OFFSET) & in safexcel_probe_generic()
1410 priv->hwconfig.hwcfsize = (hiaopt >> EIP97_CFSIZE_OFFSET) & in safexcel_probe_generic()
1412 priv->hwconfig.hwrfsize = (hiaopt >> EIP97_RFSIZE_OFFSET) & in safexcel_probe_generic()
1417 priv->hwconfig.algo_flags = readl(EIP197_PE(priv) + in safexcel_probe_generic()
1421 dev_info(priv->dev, "EIP%d:%x(%d)-HIA:%x(%d,%d,%d),PE:%x,alg:%08x\n", in safexcel_probe_generic()
1422 peid, priv->hwconfig.hwver, hwctg, priv->hwconfig.hiaver, in safexcel_probe_generic()
1423 priv->hwconfig.hwdataw, priv->hwconfig.hwcfsize, in safexcel_probe_generic()
1424 priv->hwconfig.hwrfsize, priv->hwconfig.pever, in safexcel_probe_generic()
1425 priv->hwconfig.algo_flags); in safexcel_probe_generic()
1429 if (IS_ENABLED(CONFIG_PCI) && priv->version == EIP197_DEVBRD) { in safexcel_probe_generic()
1431 * Request MSI vectors for global + 1 per ring - in safexcel_probe_generic()
1437 priv->config.rings + 1, in safexcel_probe_generic()
1438 priv->config.rings + 1, in safexcel_probe_generic()
1447 priv->ring = devm_kcalloc(dev, priv->config.rings, in safexcel_probe_generic()
1448 sizeof(*priv->ring), in safexcel_probe_generic()
1450 if (!priv->ring) in safexcel_probe_generic()
1451 return -ENOMEM; in safexcel_probe_generic()
1453 for (i = 0; i < priv->config.rings; i++) { in safexcel_probe_generic()
1459 &priv->ring[i].cdr, in safexcel_probe_generic()
1460 &priv->ring[i].rdr); in safexcel_probe_generic()
1466 priv->ring[i].rdr_req = devm_kcalloc(dev, in safexcel_probe_generic()
1468 sizeof(priv->ring[i].rdr_req), in safexcel_probe_generic()
1470 if (!priv->ring[i].rdr_req) in safexcel_probe_generic()
1471 return -ENOMEM; in safexcel_probe_generic()
1475 return -ENOMEM; in safexcel_probe_generic()
1477 ring_irq->priv = priv; in safexcel_probe_generic()
1478 ring_irq->ring = i; in safexcel_probe_generic()
1491 priv->ring[i].work_data.priv = priv; in safexcel_probe_generic()
1492 priv->ring[i].work_data.ring = i; in safexcel_probe_generic()
1493 INIT_WORK(&priv->ring[i].work_data.work, in safexcel_probe_generic()
1497 priv->ring[i].workqueue = in safexcel_probe_generic()
1499 if (!priv->ring[i].workqueue) in safexcel_probe_generic()
1500 return -ENOMEM; in safexcel_probe_generic()
1502 priv->ring[i].requests = 0; in safexcel_probe_generic()
1503 priv->ring[i].busy = false; in safexcel_probe_generic()
1505 crypto_init_queue(&priv->ring[i].queue, in safexcel_probe_generic()
1508 spin_lock_init(&priv->ring[i].lock); in safexcel_probe_generic()
1509 spin_lock_init(&priv->ring[i].queue_lock); in safexcel_probe_generic()
1512 atomic_set(&priv->ring_used, 0); in safexcel_probe_generic()
1533 for (i = 0; i < priv->config.rings; i++) { in safexcel_hw_reset_rings()
1553 struct device *dev = &pdev->dev; in safexcel_probe()
1559 return -ENOMEM; in safexcel_probe()
1561 priv->dev = dev; in safexcel_probe()
1562 priv->version = (enum safexcel_eip_version)of_device_get_match_data(dev); in safexcel_probe()
1566 priv->base = devm_platform_ioremap_resource(pdev, 0); in safexcel_probe()
1567 if (IS_ERR(priv->base)) { in safexcel_probe()
1569 return PTR_ERR(priv->base); in safexcel_probe()
1572 priv->clk = devm_clk_get(&pdev->dev, NULL); in safexcel_probe()
1573 ret = PTR_ERR_OR_ZERO(priv->clk); in safexcel_probe()
1575 if (ret != -ENOENT) { in safexcel_probe()
1579 ret = clk_prepare_enable(priv->clk); in safexcel_probe()
1586 priv->reg_clk = devm_clk_get(&pdev->dev, "reg"); in safexcel_probe()
1587 ret = PTR_ERR_OR_ZERO(priv->reg_clk); in safexcel_probe()
1589 if (ret != -ENOENT) { in safexcel_probe()
1593 ret = clk_prepare_enable(priv->reg_clk); in safexcel_probe()
1595 dev_err(dev, "unable to enable reg clk (%d)\n", ret); in safexcel_probe()
1612 clk_disable_unprepare(priv->reg_clk); in safexcel_probe()
1614 clk_disable_unprepare(priv->clk); in safexcel_probe()
1626 clk_disable_unprepare(priv->clk); in safexcel_remove()
1628 for (i = 0; i < priv->config.rings; i++) in safexcel_remove()
1629 destroy_workqueue(priv->ring[i].workqueue); in safexcel_remove()
1636 .compatible = "inside-secure,safexcel-eip97ies",
1640 .compatible = "inside-secure,safexcel-eip197b",
1644 .compatible = "inside-secure,safexcel-eip197d",
1649 .compatible = "inside-secure,safexcel-eip97",
1653 .compatible = "inside-secure,safexcel-eip197",
1663 .name = "crypto-safexcel",
1670 /* PCIE devices - i.e. Inside Secure development boards */
1675 struct device *dev = &pdev->dev; in safexcel_pci_probe()
1682 ent->vendor, ent->device, ent->subvendor, in safexcel_pci_probe()
1683 ent->subdevice, ent->driver_data); in safexcel_pci_probe()
1687 return -ENOMEM; in safexcel_pci_probe()
1689 priv->dev = dev; in safexcel_pci_probe()
1690 priv->version = (enum safexcel_eip_version)ent->driver_data; in safexcel_pci_probe()
1707 priv->base = pcim_iomap_table(pdev)[0]; in safexcel_pci_probe()
1709 if (priv->version == EIP197_DEVBRD) { in safexcel_pci_probe()
1710 dev_dbg(dev, "Device identified as FPGA based development board - applying HW reset\n"); in safexcel_pci_probe()
1740 return -ENODEV; in safexcel_pci_probe()
1745 writel(1, priv->base + EIP197_XLX_GPIO_BASE); in safexcel_pci_probe()
1748 writel(0, priv->base + EIP197_XLX_GPIO_BASE); in safexcel_pci_probe()
1767 for (i = 0; i < priv->config.rings; i++) in safexcel_pci_remove()
1768 destroy_workqueue(priv->ring[i].workqueue); in safexcel_pci_remove()
1785 .name = "crypto-safexcel",
1794 int pcireg_rc = -EINVAL; /* Default safe value */
1797 int ofreg_rc = -EINVAL; /* Default safe value */
1820 return -EINVAL; in safexcel_init()
1843 MODULE_AUTHOR("Antoine Tenart <antoine.tenart@free-electrons.com>");