| /Linux-v5.15/drivers/crypto/qat/qat_common/ |
| D | adf_init.c | 59 int adf_dev_init(struct adf_accel_dev *accel_dev) in adf_dev_init() argument 63 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_dev_init() 67 dev_err(&GET_DEV(accel_dev), in adf_dev_init() 72 if (!test_bit(ADF_STATUS_CONFIGURED, &accel_dev->status)) { in adf_dev_init() 73 dev_err(&GET_DEV(accel_dev), "Device not configured\n"); in adf_dev_init() 77 if (adf_init_etr_data(accel_dev)) { in adf_dev_init() 78 dev_err(&GET_DEV(accel_dev), "Failed initialize etr\n"); in adf_dev_init() 82 if (hw_data->init_admin_comms && hw_data->init_admin_comms(accel_dev)) { in adf_dev_init() 83 dev_err(&GET_DEV(accel_dev), "Failed initialize admin comms\n"); in adf_dev_init() 87 if (hw_data->init_arb && hw_data->init_arb(accel_dev)) { in adf_dev_init() [all …]
|
| D | adf_vf_isr.c | 28 struct adf_accel_dev *accel_dev; member 32 void adf_enable_pf2vf_interrupts(struct adf_accel_dev *accel_dev) in adf_enable_pf2vf_interrupts() argument 34 struct adf_accel_pci *pci_info = &accel_dev->accel_pci_dev; in adf_enable_pf2vf_interrupts() 35 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_pf2vf_interrupts() 42 void adf_disable_pf2vf_interrupts(struct adf_accel_dev *accel_dev) in adf_disable_pf2vf_interrupts() argument 44 struct adf_accel_pci *pci_info = &accel_dev->accel_pci_dev; in adf_disable_pf2vf_interrupts() 45 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_disable_pf2vf_interrupts() 53 static int adf_enable_msi(struct adf_accel_dev *accel_dev) in adf_enable_msi() argument 55 struct adf_accel_pci *pci_dev_info = &accel_dev->accel_pci_dev; in adf_enable_msi() 59 dev_err(&GET_DEV(accel_dev), in adf_enable_msi() [all …]
|
| D | adf_common_drv.h | 44 int (*event_hld)(struct adf_accel_dev *accel_dev, 60 int adf_dev_init(struct adf_accel_dev *accel_dev); 61 int adf_dev_start(struct adf_accel_dev *accel_dev); 62 void adf_dev_stop(struct adf_accel_dev *accel_dev); 63 void adf_dev_shutdown(struct adf_accel_dev *accel_dev); 65 int adf_iov_putmsg(struct adf_accel_dev *accel_dev, u32 msg, u8 vf_nr); 66 void adf_pf2vf_notify_restarting(struct adf_accel_dev *accel_dev); 67 int adf_enable_vf2pf_comms(struct adf_accel_dev *accel_dev); 77 int adf_devmgr_add_dev(struct adf_accel_dev *accel_dev, 79 void adf_devmgr_rm_dev(struct adf_accel_dev *accel_dev, [all …]
|
| D | adf_accel_engine.c | 10 static int adf_ae_fw_load_images(struct adf_accel_dev *accel_dev, void *fw_addr, in adf_ae_fw_load_images() argument 13 struct adf_fw_loader_data *loader_data = accel_dev->fw_loader; in adf_ae_fw_load_images() 14 struct adf_hw_device_data *hw_device = accel_dev->hw_device; in adf_ae_fw_load_images() 29 dev_err(&GET_DEV(accel_dev), in adf_ae_fw_load_images() 34 dev_err(&GET_DEV(accel_dev), in adf_ae_fw_load_images() 39 dev_err(&GET_DEV(accel_dev), in adf_ae_fw_load_images() 49 adf_ae_fw_release(accel_dev); in adf_ae_fw_load_images() 53 int adf_ae_fw_load(struct adf_accel_dev *accel_dev) in adf_ae_fw_load() argument 55 struct adf_fw_loader_data *loader_data = accel_dev->fw_loader; in adf_ae_fw_load() 56 struct adf_hw_device_data *hw_device = accel_dev->hw_device; in adf_ae_fw_load() [all …]
|
| D | adf_pf2vf_msg.c | 14 static void __adf_enable_vf2pf_interrupts(struct adf_accel_dev *accel_dev, in __adf_enable_vf2pf_interrupts() argument 17 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in __adf_enable_vf2pf_interrupts() 19 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in __adf_enable_vf2pf_interrupts() 38 void adf_enable_vf2pf_interrupts(struct adf_accel_dev *accel_dev, u32 vf_mask) in adf_enable_vf2pf_interrupts() argument 42 spin_lock_irqsave(&accel_dev->pf.vf2pf_ints_lock, flags); in adf_enable_vf2pf_interrupts() 43 __adf_enable_vf2pf_interrupts(accel_dev, vf_mask); in adf_enable_vf2pf_interrupts() 44 spin_unlock_irqrestore(&accel_dev->pf.vf2pf_ints_lock, flags); in adf_enable_vf2pf_interrupts() 47 static void __adf_disable_vf2pf_interrupts(struct adf_accel_dev *accel_dev, in __adf_disable_vf2pf_interrupts() argument 50 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in __adf_disable_vf2pf_interrupts() 52 &GET_BARS(accel_dev)[hw_data->get_misc_bar_id(hw_data)]; in __adf_disable_vf2pf_interrupts() [all …]
|
| D | adf_sriov.c | 40 static int adf_enable_sriov(struct adf_accel_dev *accel_dev) in adf_enable_sriov() argument 42 struct pci_dev *pdev = accel_to_pci_dev(accel_dev); in adf_enable_sriov() 44 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_sriov() 48 for (i = 0, vf_info = accel_dev->pf.vf_info; i < totalvfs; in adf_enable_sriov() 51 vf_info->accel_dev = accel_dev; in adf_enable_sriov() 62 hw_data->configure_iov_threads(accel_dev, true); in adf_enable_sriov() 66 adf_enable_vf2pf_interrupts(accel_dev, BIT_ULL(totalvfs) - 1); in adf_enable_sriov() 85 void adf_disable_sriov(struct adf_accel_dev *accel_dev) in adf_disable_sriov() argument 87 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_disable_sriov() 88 int totalvfs = pci_sriov_get_totalvfs(accel_to_pci_dev(accel_dev)); in adf_disable_sriov() [all …]
|
| D | adf_isr.c | 26 static int adf_enable_msix(struct adf_accel_dev *accel_dev) in adf_enable_msix() argument 28 struct adf_accel_pci *pci_dev_info = &accel_dev->accel_pci_dev; in adf_enable_msix() 29 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_enable_msix() 33 hw_data->set_msix_rttable(accel_dev); in adf_enable_msix() 36 if (!accel_dev->pf.vf_info) { in adf_enable_msix() 50 dev_err(&GET_DEV(accel_dev), "Failed to enable MSI-X IRQ(s)\n"); in adf_enable_msix() 64 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(bank->accel_dev); in adf_msix_isr_bundle() 74 struct adf_accel_dev *accel_dev = dev_ptr; in adf_msix_isr_ae() local 78 if (accel_dev->pf.vf_info) { in adf_msix_isr_ae() 79 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_msix_isr_ae() [all …]
|
| D | adf_aer.c | 17 struct adf_accel_dev *accel_dev = adf_devmgr_pci_to_accel_dev(pdev); in adf_error_detected() local 20 if (!accel_dev) { in adf_error_detected() 36 struct adf_accel_dev *accel_dev; member 41 void adf_reset_sbr(struct adf_accel_dev *accel_dev) in adf_reset_sbr() argument 43 struct pci_dev *pdev = accel_to_pci_dev(accel_dev); in adf_reset_sbr() 51 dev_info(&GET_DEV(accel_dev), in adf_reset_sbr() 54 dev_info(&GET_DEV(accel_dev), "Secondary bus reset\n"); in adf_reset_sbr() 66 void adf_reset_flr(struct adf_accel_dev *accel_dev) in adf_reset_flr() argument 68 pcie_flr(accel_to_pci_dev(accel_dev)); in adf_reset_flr() 72 void adf_dev_restore(struct adf_accel_dev *accel_dev) in adf_dev_restore() argument [all …]
|
| D | qat_crypto.c | 21 adf_dev_put(inst->accel_dev); in qat_crypto_put_instance() 24 static int qat_crypto_free_instances(struct adf_accel_dev *accel_dev) in qat_crypto_free_instances() argument 29 list_for_each_entry_safe(inst, tmp, &accel_dev->crypto_list, list) { in qat_crypto_free_instances() 53 struct adf_accel_dev *accel_dev = NULL, *tmp_dev; in qat_crypto_get_instance_node() local 66 accel_dev = tmp_dev; in qat_crypto_get_instance_node() 72 if (!accel_dev) { in qat_crypto_get_instance_node() 78 accel_dev = tmp_dev; in qat_crypto_get_instance_node() 84 if (!accel_dev) in qat_crypto_get_instance_node() 88 list_for_each_entry(tmp_inst, &accel_dev->crypto_list, list) { in qat_crypto_get_instance_node() 98 if (adf_dev_get(accel_dev)) { in qat_crypto_get_instance_node() [all …]
|
| D | adf_ctl_drv.c | 107 static int adf_add_key_value_data(struct adf_accel_dev *accel_dev, in adf_add_key_value_data() argument 115 if (adf_cfg_add_key_value_param(accel_dev, section, in adf_add_key_value_data() 118 dev_err(&GET_DEV(accel_dev), in adf_add_key_value_data() 123 if (adf_cfg_add_key_value_param(accel_dev, section, in adf_add_key_value_data() 126 dev_err(&GET_DEV(accel_dev), in adf_add_key_value_data() 134 static int adf_copy_key_value_data(struct adf_accel_dev *accel_dev, in adf_copy_key_value_data() argument 146 dev_err(&GET_DEV(accel_dev), in adf_copy_key_value_data() 151 if (adf_cfg_section_add(accel_dev, section.name)) { in adf_copy_key_value_data() 152 dev_err(&GET_DEV(accel_dev), in adf_copy_key_value_data() 162 dev_err(&GET_DEV(accel_dev), in adf_copy_key_value_data() [all …]
|
| D | adf_admin.c | 109 static int adf_put_admin_msg_sync(struct adf_accel_dev *accel_dev, u32 ae, in adf_put_admin_msg_sync() argument 114 struct adf_admin_comms *admin = accel_dev->admin; in adf_put_admin_msg_sync() 136 dev_err(&GET_DEV(accel_dev), in adf_put_admin_msg_sync() 151 static int adf_send_admin(struct adf_accel_dev *accel_dev, in adf_send_admin() argument 159 if (adf_put_admin_msg_sync(accel_dev, ae, req, resp) || in adf_send_admin() 166 static int adf_init_ae(struct adf_accel_dev *accel_dev) in adf_init_ae() argument 170 struct adf_hw_device_data *hw_device = accel_dev->hw_device; in adf_init_ae() 177 return adf_send_admin(accel_dev, &req, &resp, ae_mask); in adf_init_ae() 180 static int adf_set_fw_constants(struct adf_accel_dev *accel_dev) in adf_set_fw_constants() argument 184 struct adf_hw_device_data *hw_device = accel_dev->hw_device; in adf_set_fw_constants() [all …]
|
| D | adf_transport.c | 58 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(bank->accel_dev); in adf_enable_ring_irq() 71 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(bank->accel_dev); in adf_disable_ring_irq() 82 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(ring->bank->accel_dev); in adf_send_message() 106 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(ring->bank->accel_dev); in adf_handle_response() 130 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(ring->bank->accel_dev); in adf_configure_tx_ring() 141 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(ring->bank->accel_dev); in adf_configure_rx_ring() 155 struct adf_accel_dev *accel_dev = bank->accel_dev; in adf_init_ring() local 156 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_ring() 157 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(accel_dev); in adf_init_ring() 163 ring->base_addr = dma_alloc_coherent(&GET_DEV(accel_dev), in adf_init_ring() [all …]
|
| D | adf_dev_mgr.c | 139 int adf_devmgr_add_dev(struct adf_accel_dev *accel_dev, in adf_devmgr_add_dev() argument 146 dev_err(&GET_DEV(accel_dev), "Only support up to %d devices\n", in adf_devmgr_add_dev() 152 atomic_set(&accel_dev->ref_count, 0); in adf_devmgr_add_dev() 155 if (!accel_dev->is_vf || !pf) { in adf_devmgr_add_dev() 162 if (ptr == accel_dev) { in adf_devmgr_add_dev() 168 list_add_tail(&accel_dev->list, &accel_table); in adf_devmgr_add_dev() 169 accel_dev->accel_id = adf_find_free_id(); in adf_devmgr_add_dev() 170 if (accel_dev->accel_id > ADF_MAX_DEVICES) { in adf_devmgr_add_dev() 181 map->id = accel_dev->accel_id; in adf_devmgr_add_dev() 185 } else if (accel_dev->is_vf && pf) { in adf_devmgr_add_dev() [all …]
|
| D | adf_accel_devices.h | 150 u32 (*get_accel_cap)(struct adf_accel_dev *accel_dev); 160 int (*alloc_irq)(struct adf_accel_dev *accel_dev); 161 void (*free_irq)(struct adf_accel_dev *accel_dev); 162 void (*enable_error_correction)(struct adf_accel_dev *accel_dev); 163 int (*init_admin_comms)(struct adf_accel_dev *accel_dev); 164 void (*exit_admin_comms)(struct adf_accel_dev *accel_dev); 165 int (*send_admin_init)(struct adf_accel_dev *accel_dev); 166 int (*init_arb)(struct adf_accel_dev *accel_dev); 167 void (*exit_arb)(struct adf_accel_dev *accel_dev); 169 void (*disable_iov)(struct adf_accel_dev *accel_dev); [all …]
|
| D | adf_cfg.c | 67 int adf_cfg_dev_add(struct adf_accel_dev *accel_dev) in adf_cfg_dev_add() argument 76 accel_dev->cfg = dev_cfg_data; in adf_cfg_dev_add() 80 accel_dev->debugfs_dir, in adf_cfg_dev_add() 89 void adf_cfg_del_all(struct adf_accel_dev *accel_dev) in adf_cfg_del_all() argument 91 struct adf_cfg_device_data *dev_cfg_data = accel_dev->cfg; in adf_cfg_del_all() 96 clear_bit(ADF_STATUS_CONFIGURED, &accel_dev->status); in adf_cfg_del_all() 109 void adf_cfg_dev_remove(struct adf_accel_dev *accel_dev) in adf_cfg_dev_remove() argument 111 struct adf_cfg_device_data *dev_cfg_data = accel_dev->cfg; in adf_cfg_dev_remove() 121 accel_dev->cfg = NULL; in adf_cfg_dev_remove() 170 static struct adf_cfg_section *adf_cfg_sec_find(struct adf_accel_dev *accel_dev, in adf_cfg_sec_find() argument [all …]
|
| D | adf_hw_arbiter.c | 18 int adf_init_arb(struct adf_accel_dev *accel_dev) in adf_init_arb() argument 20 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_init_arb() 21 void __iomem *csr = accel_dev->transport->banks[0].csr_addr; in adf_init_arb() 50 struct adf_accel_dev *accel_dev = ring->bank->accel_dev; in adf_update_ring_arb() local 51 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_update_ring_arb() 52 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(accel_dev); in adf_update_ring_arb() 73 void adf_exit_arb(struct adf_accel_dev *accel_dev) in adf_exit_arb() argument 75 struct adf_hw_device_data *hw_data = accel_dev->hw_device; in adf_exit_arb() 76 struct adf_hw_csr_ops *csr_ops = GET_CSR_OPS(accel_dev); in adf_exit_arb() 86 if (!accel_dev->transport) in adf_exit_arb() [all …]
|
| D | adf_vf2pf_msg.c | 15 int adf_vf2pf_notify_init(struct adf_accel_dev *accel_dev) in adf_vf2pf_notify_init() argument 20 if (adf_iov_putmsg(accel_dev, msg, 0)) { in adf_vf2pf_notify_init() 21 dev_err(&GET_DEV(accel_dev), in adf_vf2pf_notify_init() 25 set_bit(ADF_STATUS_PF_RUNNING, &accel_dev->status); in adf_vf2pf_notify_init() 38 void adf_vf2pf_notify_shutdown(struct adf_accel_dev *accel_dev) in adf_vf2pf_notify_shutdown() argument 43 if (test_bit(ADF_STATUS_PF_RUNNING, &accel_dev->status)) in adf_vf2pf_notify_shutdown() 44 if (adf_iov_putmsg(accel_dev, msg, 0)) in adf_vf2pf_notify_shutdown() 45 dev_err(&GET_DEV(accel_dev), in adf_vf2pf_notify_shutdown()
|
| /Linux-v5.15/drivers/crypto/qat/qat_c3xxxvf/ |
| D | adf_drv.c | 37 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument 39 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 40 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 43 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument 45 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel() 56 if (accel_dev->hw_device) { in adf_cleanup_accel() 59 adf_clean_hw_data_c3xxxiov(accel_dev->hw_device); in adf_cleanup_accel() 64 kfree(accel_dev->hw_device); in adf_cleanup_accel() 65 accel_dev->hw_device = NULL; in adf_cleanup_accel() 67 adf_cfg_dev_remove(accel_dev); in adf_cleanup_accel() [all …]
|
| /Linux-v5.15/drivers/crypto/qat/qat_c62xvf/ |
| D | adf_drv.c | 37 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument 39 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 40 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 43 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument 45 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel() 56 if (accel_dev->hw_device) { in adf_cleanup_accel() 59 adf_clean_hw_data_c62xiov(accel_dev->hw_device); in adf_cleanup_accel() 64 kfree(accel_dev->hw_device); in adf_cleanup_accel() 65 accel_dev->hw_device = NULL; in adf_cleanup_accel() 67 adf_cfg_dev_remove(accel_dev); in adf_cleanup_accel() [all …]
|
| /Linux-v5.15/drivers/crypto/qat/qat_dh895xccvf/ |
| D | adf_drv.c | 37 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument 39 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 40 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 43 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument 45 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel() 56 if (accel_dev->hw_device) { in adf_cleanup_accel() 59 adf_clean_hw_data_dh895xcciov(accel_dev->hw_device); in adf_cleanup_accel() 64 kfree(accel_dev->hw_device); in adf_cleanup_accel() 65 accel_dev->hw_device = NULL; in adf_cleanup_accel() 67 adf_cfg_dev_remove(accel_dev); in adf_cleanup_accel() [all …]
|
| /Linux-v5.15/drivers/crypto/qat/qat_c62x/ |
| D | adf_drv.c | 38 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument 40 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 41 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 44 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument 46 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel() 56 if (accel_dev->hw_device) { in adf_cleanup_accel() 59 adf_clean_hw_data_c62x(accel_dev->hw_device); in adf_cleanup_accel() 64 kfree(accel_dev->hw_device); in adf_cleanup_accel() 65 accel_dev->hw_device = NULL; in adf_cleanup_accel() 67 adf_cfg_dev_remove(accel_dev); in adf_cleanup_accel() [all …]
|
| /Linux-v5.15/drivers/crypto/qat/qat_c3xxx/ |
| D | adf_drv.c | 38 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument 40 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 41 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 44 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument 46 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel() 56 if (accel_dev->hw_device) { in adf_cleanup_accel() 59 adf_clean_hw_data_c3xxx(accel_dev->hw_device); in adf_cleanup_accel() 64 kfree(accel_dev->hw_device); in adf_cleanup_accel() 65 accel_dev->hw_device = NULL; in adf_cleanup_accel() 67 adf_cfg_dev_remove(accel_dev); in adf_cleanup_accel() [all …]
|
| /Linux-v5.15/drivers/crypto/qat/qat_dh895xcc/ |
| D | adf_drv.c | 38 static void adf_cleanup_pci_dev(struct adf_accel_dev *accel_dev) in adf_cleanup_pci_dev() argument 40 pci_release_regions(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 41 pci_disable_device(accel_dev->accel_pci_dev.pci_dev); in adf_cleanup_pci_dev() 44 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument 46 struct adf_accel_pci *accel_pci_dev = &accel_dev->accel_pci_dev; in adf_cleanup_accel() 56 if (accel_dev->hw_device) { in adf_cleanup_accel() 59 adf_clean_hw_data_dh895xcc(accel_dev->hw_device); in adf_cleanup_accel() 64 kfree(accel_dev->hw_device); in adf_cleanup_accel() 65 accel_dev->hw_device = NULL; in adf_cleanup_accel() 67 adf_cfg_dev_remove(accel_dev); in adf_cleanup_accel() [all …]
|
| D | adf_dh895xcc_hw_data.c | 81 static u32 get_accel_cap(struct adf_accel_dev *accel_dev) in get_accel_cap() argument 83 struct pci_dev *pdev = accel_dev->accel_pci_dev.pci_dev; in get_accel_cap() 134 static void adf_enable_error_correction(struct adf_accel_dev *accel_dev) in adf_enable_error_correction() argument 136 struct adf_hw_device_data *hw_device = accel_dev->hw_device; in adf_enable_error_correction() 137 struct adf_bar *misc_bar = &GET_BARS(accel_dev)[ADF_DH895XCC_PMISC_BAR]; in adf_enable_error_correction() 144 for_each_set_bit(i, &ae_mask, GET_MAX_ACCELENGINES(accel_dev)) { in adf_enable_error_correction() 164 static void adf_enable_ints(struct adf_accel_dev *accel_dev) in adf_enable_ints() argument 168 addr = (&GET_BARS(accel_dev)[ADF_DH895XCC_PMISC_BAR])->virt_addr; in adf_enable_ints() 172 accel_dev->pf.vf_info ? 0 : in adf_enable_ints() 173 BIT_ULL(GET_MAX_BANKS(accel_dev)) - 1); in adf_enable_ints() [all …]
|
| /Linux-v5.15/drivers/crypto/qat/qat_4xxx/ |
| D | adf_drv.c | 21 static void adf_cleanup_accel(struct adf_accel_dev *accel_dev) in adf_cleanup_accel() argument 23 if (accel_dev->hw_device) { in adf_cleanup_accel() 24 adf_clean_hw_data_4xxx(accel_dev->hw_device); in adf_cleanup_accel() 25 accel_dev->hw_device = NULL; in adf_cleanup_accel() 27 adf_cfg_dev_remove(accel_dev); in adf_cleanup_accel() 28 debugfs_remove(accel_dev->debugfs_dir); in adf_cleanup_accel() 29 adf_devmgr_rm_dev(accel_dev, NULL); in adf_cleanup_accel() 32 static int adf_crypto_dev_config(struct adf_accel_dev *accel_dev) in adf_crypto_dev_config() argument 35 int banks = GET_MAX_BANKS(accel_dev); in adf_crypto_dev_config() 42 if (adf_hw_dev_has_crypto(accel_dev)) in adf_crypto_dev_config() [all …]
|