Home
last modified time | relevance | path

Searched refs:idxd (Results 1 – 15 of 15) sorted by relevance

/Linux-v5.15/drivers/dma/idxd/
Dinit.c71 static int idxd_setup_interrupts(struct idxd_device *idxd) in idxd_setup_interrupts() argument
73 struct pci_dev *pdev = idxd->pdev; in idxd_setup_interrupts()
96 idxd->irq_entries = kcalloc_node(msixcnt, sizeof(struct idxd_irq_entry), in idxd_setup_interrupts()
98 if (!idxd->irq_entries) { in idxd_setup_interrupts()
104 idxd->irq_entries[i].id = i; in idxd_setup_interrupts()
105 idxd->irq_entries[i].idxd = idxd; in idxd_setup_interrupts()
106 idxd->irq_entries[i].vector = pci_irq_vector(pdev, i); in idxd_setup_interrupts()
107 spin_lock_init(&idxd->irq_entries[i].list_lock); in idxd_setup_interrupts()
110 idxd_msix_perm_setup(idxd); in idxd_setup_interrupts()
112 irq_entry = &idxd->irq_entries[0]; in idxd_setup_interrupts()
[all …]
Ddevice.c16 static void idxd_cmd_exec(struct idxd_device *idxd, int cmd_code, u32 operand,
18 static void idxd_device_wqs_clear_state(struct idxd_device *idxd);
22 void idxd_mask_msix_vector(struct idxd_device *idxd, int vec_id) in idxd_mask_msix_vector() argument
24 struct irq_data *data = irq_get_irq_data(idxd->irq_entries[vec_id].vector); in idxd_mask_msix_vector()
29 void idxd_mask_msix_vectors(struct idxd_device *idxd) in idxd_mask_msix_vectors() argument
31 struct pci_dev *pdev = idxd->pdev; in idxd_mask_msix_vectors()
36 idxd_mask_msix_vector(idxd, i); in idxd_mask_msix_vectors()
39 void idxd_unmask_msix_vector(struct idxd_device *idxd, int vec_id) in idxd_unmask_msix_vector() argument
41 struct irq_data *data = irq_get_irq_data(idxd->irq_entries[vec_id].vector); in idxd_unmask_msix_vector()
46 void idxd_unmask_error_interrupts(struct idxd_device *idxd) in idxd_unmask_error_interrupts() argument
[all …]
Dsysfs.c36 struct idxd_device *idxd = engine->idxd; in engine_group_id_store() local
45 if (!test_bit(IDXD_FLAG_CONFIGURABLE, &idxd->flags)) in engine_group_id_store()
48 if (id > idxd->max_groups - 1 || id < -1) in engine_group_id_store()
63 engine->group = idxd->groups[id]; in engine_group_id_store()
102 static void idxd_set_free_tokens(struct idxd_device *idxd) in idxd_set_free_tokens() argument
106 for (i = 0, tokens = 0; i < idxd->max_groups; i++) { in idxd_set_free_tokens()
107 struct idxd_group *g = idxd->groups[i]; in idxd_set_free_tokens()
112 idxd->nr_tokens = idxd->max_tokens - tokens; in idxd_set_free_tokens()
129 struct idxd_device *idxd = group->idxd; in group_tokens_reserved_store() local
137 if (idxd->data->type == IDXD_TYPE_IAX) in group_tokens_reserved_store()
[all …]
Dirq.c22 struct idxd_device *idxd; member
27 struct idxd_device *idxd = container_of(work, struct idxd_device, work); in idxd_device_reinit() local
28 struct device *dev = &idxd->pdev->dev; in idxd_device_reinit()
31 idxd_device_reset(idxd); in idxd_device_reinit()
32 rc = idxd_device_config(idxd); in idxd_device_reinit()
36 rc = idxd_device_enable(idxd); in idxd_device_reinit()
40 for (i = 0; i < idxd->max_wqs; i++) { in idxd_device_reinit()
41 struct idxd_wq *wq = idxd->wqs[i]; in idxd_device_reinit()
55 idxd_device_clear_state(idxd); in idxd_device_reinit()
58 static int process_misc_interrupts(struct idxd_device *idxd, u32 cause) in process_misc_interrupts() argument
[all …]
Dperfmon.c126 struct idxd_device *idxd = idxd_pmu->idxd; in perfmon_assign_hw_event() local
130 hwc->config_base = ioread64(CNTRCFG_REG(idxd, idx)); in perfmon_assign_hw_event()
131 hwc->event_base = ioread64(CNTRCFG_REG(idxd, idx)); in perfmon_assign_hw_event()
200 struct idxd_device *idxd; in perfmon_pmu_event_init() local
203 idxd = event_to_idxd(event); in perfmon_pmu_event_init()
216 if (event->pmu != &idxd->idxd_pmu->pmu) in perfmon_pmu_event_init()
219 event->hw.event_base = ioread64(PERFMON_TABLE_OFFSET(idxd)); in perfmon_pmu_event_init()
220 event->cpu = idxd->idxd_pmu->cpu; in perfmon_pmu_event_init()
225 ret = perfmon_validate_group(idxd->idxd_pmu, event); in perfmon_pmu_event_init()
233 struct idxd_device *idxd; in perfmon_pmu_read_counter() local
[all …]
Dperfmon.h38 return idxd_pmu->idxd; in event_to_idxd()
47 return idxd_pmu->idxd; in pmu_to_idxd()
87 #define PERFMON_REG_OFFSET(idxd, offset) \ argument
88 (PERFMON_TABLE_OFFSET(idxd) + (offset))
90 #define PERFCAP_REG(idxd) (PERFMON_REG_OFFSET(idxd, IDXD_PERFCAP_OFFSET)) argument
91 #define PERFRST_REG(idxd) (PERFMON_REG_OFFSET(idxd, IDXD_PERFRST_OFFSET)) argument
92 #define OVFSTATUS_REG(idxd) (PERFMON_REG_OFFSET(idxd, IDXD_OVFSTATUS_OFFSET)) argument
93 #define PERFFRZ_REG(idxd) (PERFMON_REG_OFFSET(idxd, IDXD_PERFFRZ_OFFSET)) argument
95 #define FLTCFG_REG(idxd, cntr, flt) \ argument
96 (PERFMON_REG_OFFSET(idxd, IDXD_FLTCFG_OFFSET) + ((cntr) * 32) + ((flt) * 4))
[all …]
Didxd.h68 struct idxd_device *idxd; member
82 struct idxd_device *idxd; member
95 struct idxd_device *idxd; member
172 struct idxd_device *idxd; member
207 struct idxd_device *idxd; member
234 struct idxd_device *idxd; member
330 #define idxd_confdev(idxd) &idxd->idxd_dev.conf_dev argument
442 static inline bool device_pasid_enabled(struct idxd_device *idxd) in device_pasid_enabled() argument
444 return test_bit(IDXD_FLAG_PASID_ENABLED, &idxd->flags); in device_pasid_enabled()
447 static inline bool device_swq_supported(struct idxd_device *idxd) in device_swq_supported() argument
[all …]
Dcdev.c48 cdev_ctx = &ictx[wq->idxd->data->type]; in idxd_cdev_dev_release()
75 struct idxd_device *idxd; in idxd_cdev_open() local
83 idxd = wq->idxd; in idxd_cdev_open()
84 dev = &idxd->pdev->dev; in idxd_cdev_open()
102 if (device_pasid_enabled(idxd)) { in idxd_cdev_open()
144 struct idxd_device *idxd = wq->idxd; in idxd_cdev_release() local
145 struct device *dev = &idxd->pdev->dev; in idxd_cdev_release()
153 idxd_device_drain_pasid(idxd, ctx->pasid); in idxd_cdev_release()
155 if (device_pasid_enabled(idxd)) { in idxd_cdev_release()
177 struct device *dev = &wq->idxd->pdev->dev; in check_vma()
[all …]
Ddma.c86 struct idxd_device *idxd = wq->idxd; in idxd_dma_submit_memcpy() local
92 if (len > idxd->max_xfer_bytes) in idxd_dma_submit_memcpy()
112 struct device *dev = &wq->idxd->pdev->dev; in idxd_dma_alloc_chan_resources()
123 struct device *dev = &wq->idxd->pdev->dev; in idxd_dma_free_chan_resources()
169 int idxd_register_dma_device(struct idxd_device *idxd) in idxd_register_dma_device() argument
173 struct device *dev = &idxd->pdev->dev; in idxd_register_dma_device()
188 if (idxd->hw.opcap.bits[0] & IDXD_OPCAP_MEMMOVE) { in idxd_register_dma_device()
204 idxd_dma->idxd = idxd; in idxd_register_dma_device()
209 idxd->idxd_dma = idxd_dma; in idxd_register_dma_device()
213 void idxd_unregister_dma_device(struct idxd_device *idxd) in idxd_unregister_dma_device() argument
[all …]
Dsubmit.c14 struct idxd_device *idxd = wq->idxd; in __get_desc() local
18 memset(desc->completion, 0, idxd->data->compl_size); in __get_desc()
21 if (device_pasid_enabled(idxd)) in __get_desc()
22 desc->hw->pasid = idxd->pasid; in __get_desc()
28 if (!idxd->int_handles) in __get_desc()
31 desc->hw->int_handle = idxd->int_handles[wq->id]; in __get_desc()
39 struct idxd_device *idxd = wq->idxd; in idxd_alloc_desc() local
44 if (idxd->state != IDXD_DEV_ENABLED) in idxd_alloc_desc()
137 struct idxd_device *idxd = wq->idxd; in idxd_submit_desc() local
142 if (idxd->state != IDXD_DEV_ENABLED) { in idxd_submit_desc()
[all …]
DMakefile3 obj-$(CONFIG_INTEL_IDXD) += idxd.o
4 idxd-y := init.o irq.o device.o sysfs.o submit.o dma.o cdev.o
6 idxd-$(CONFIG_INTEL_IDXD_PERFMON) += perfmon.o
/Linux-v5.15/drivers/dma/
DMakefile45 obj-y += idxd/
DKconfig300 bool "Legacy behavior for idxd driver"
317 # support shared virtual memory for the devices supported by idxd.
/Linux-v5.15/Documentation/admin-guide/
Dkernel-parameters.txt1755 idxd.sva= [HW]
1758 support for the idxd driver. By default it is set to
1761 idxd.tc_override= [HW]
/Linux-v5.15/
DMAINTAINERS9437 F: drivers/dma/idxd/*
9438 F: include/uapi/linux/idxd.h