/Linux-v5.4/include/linux/ |
D | async.h | 90 return async_schedule_node(func, dev, dev_to_node(dev)); in async_schedule_dev() 112 return async_schedule_node_domain(func, dev, dev_to_node(dev), domain); in async_schedule_dev_domain()
|
/Linux-v5.4/drivers/crypto/qat/qat_c3xxx/ |
D | adf_drv.c | 137 if (num_possible_nodes() > 1 && dev_to_node(&pdev->dev) < 0) { in adf_probe() 146 dev_to_node(&pdev->dev)); in adf_probe() 165 dev_to_node(&pdev->dev)); in adf_probe()
|
/Linux-v5.4/drivers/crypto/qat/qat_c62x/ |
D | adf_drv.c | 137 if (num_possible_nodes() > 1 && dev_to_node(&pdev->dev) < 0) { in adf_probe() 146 dev_to_node(&pdev->dev)); in adf_probe() 165 dev_to_node(&pdev->dev)); in adf_probe()
|
/Linux-v5.4/drivers/crypto/qat/qat_dh895xcc/ |
D | adf_drv.c | 137 if (num_possible_nodes() > 1 && dev_to_node(&pdev->dev) < 0) { in adf_probe() 146 dev_to_node(&pdev->dev)); in adf_probe() 165 dev_to_node(&pdev->dev)); in adf_probe()
|
/Linux-v5.4/drivers/crypto/virtio/ |
D | virtio_crypto_mgr.c | 194 if ((node == dev_to_node(&tmp_dev->vdev->dev) || in virtcrypto_get_dev_node() 195 dev_to_node(&tmp_dev->vdev->dev) < 0) && in virtcrypto_get_dev_node()
|
D | virtio_crypto_algs.c | 373 dev_to_node(&vcrypto->vdev->dev)); in __virtio_crypto_ablkcipher_do_req() 378 dev_to_node(&vcrypto->vdev->dev)); in __virtio_crypto_ablkcipher_do_req() 434 dev_to_node(&vcrypto->vdev->dev)); in __virtio_crypto_ablkcipher_do_req()
|
D | virtio_crypto_core.c | 311 if (num_possible_nodes() > 1 && dev_to_node(&vdev->dev) < 0) { in virtcrypto_probe() 322 dev_to_node(&vdev->dev)); in virtcrypto_probe()
|
/Linux-v5.4/drivers/crypto/qat/qat_common/ |
D | qat_crypto.c | 104 if ((node == dev_to_node(&GET_DEV(tmp_dev)) || in qat_crypto_get_instance_node() 105 dev_to_node(&GET_DEV(tmp_dev)) < 0) && in qat_crypto_get_instance_node() 262 dev_to_node(&GET_DEV(accel_dev))); in qat_crypto_create_instances()
|
D | adf_transport.c | 418 dev_to_node(&GET_DEV(accel_dev))); in adf_init_bank() 469 dev_to_node(&GET_DEV(accel_dev))); in adf_init_etr_data() 476 dev_to_node(&GET_DEV(accel_dev))); in adf_init_etr_data()
|
D | adf_admin.c | 244 dev_to_node(&GET_DEV(accel_dev))); in adf_init_admin_comms()
|
D | adf_isr.c | 242 GFP_KERNEL, dev_to_node(&GET_DEV(accel_dev))); in adf_isr_alloc_msix_entry_table()
|
/Linux-v5.4/drivers/usb/host/ |
D | xhci-mem.c | 38 seg = kzalloc_node(sizeof(*seg), flags, dev_to_node(dev)); in xhci_segment_alloc() 50 dev_to_node(dev)); in xhci_segment_alloc() 370 ring = kzalloc_node(sizeof(*ring), flags, dev_to_node(dev)); in xhci_ring_alloc() 469 ctx = kzalloc_node(sizeof(*ctx), flags, dev_to_node(dev)); in xhci_alloc_container_ctx() 634 dev_to_node(dev)); in xhci_alloc_stream_info() 644 dev_to_node(dev)); in xhci_alloc_stream_info() 851 dev_to_node(dev)); in xhci_alloc_tt_info() 1657 dev_to_node(dev)); in scratchpad_alloc() 1668 flags, dev_to_node(dev)); in scratchpad_alloc() 1738 command = kzalloc_node(sizeof(*command), mem_flags, dev_to_node(dev)); in xhci_alloc_command() [all …]
|
/Linux-v5.4/drivers/base/test/ |
D | test_async_driver_probe.c | 47 if (dev_to_node(dev) != numa_node_id()) { in test_probe() 49 dev_to_node(dev), numa_node_id()); in test_probe()
|
/Linux-v5.4/drivers/crypto/qat/qat_c62xvf/ |
D | adf_drv.c | 140 dev_to_node(&pdev->dev)); in adf_probe() 160 dev_to_node(&pdev->dev)); in adf_probe()
|
/Linux-v5.4/drivers/crypto/qat/qat_c3xxxvf/ |
D | adf_drv.c | 140 dev_to_node(&pdev->dev)); in adf_probe() 160 dev_to_node(&pdev->dev)); in adf_probe()
|
/Linux-v5.4/drivers/crypto/qat/qat_dh895xccvf/ |
D | adf_drv.c | 140 dev_to_node(&pdev->dev)); in adf_probe() 160 dev_to_node(&pdev->dev)); in adf_probe()
|
/Linux-v5.4/drivers/nvdimm/ |
D | of_pmem.c | 70 ndr_desc.numa_node = dev_to_node(&pdev->dev); in of_pmem_region_probe()
|
D | virtio_pmem.c | 35 int nid = dev_to_node(&vdev->dev); in virtio_pmem_probe()
|
D | pmem.c | 357 int nid = dev_to_node(dev), fua; in pmem_attach_disk() 401 q = blk_alloc_queue_node(GFP_KERNEL, dev_to_node(dev)); in pmem_attach_disk()
|
/Linux-v5.4/kernel/ |
D | iomem.c | 145 dev_to_node(dev)); in devm_memremap()
|
/Linux-v5.4/drivers/perf/ |
D | thunderx2_pmu.c | 651 tx2_pmu->node = dev_to_node(dev); in tx2_uncore_pmu_init_dev() 792 dev_info(dev, "node%d: pmu uncore registered\n", dev_to_node(dev)); in tx2_uncore_probe() 803 if (tx2_pmu->node == dev_to_node(dev)) { in tx2_uncore_remove()
|
/Linux-v5.4/arch/arm64/kernel/ |
D | pci.c | 61 return dev_to_node(&bus->dev); in pcibus_to_node()
|
/Linux-v5.4/arch/powerpc/kernel/ |
D | dma-iommu.c | 46 dev_to_node(dev)); in dma_iommu_alloc_coherent()
|
/Linux-v5.4/arch/x86/pci/ |
D | intel_mid_pci.c | 258 ioapic_set_alloc_attr(&info, dev_to_node(&dev->dev), 1, polarity); in intel_mid_pci_irq_enable()
|
/Linux-v5.4/drivers/pci/ |
D | p2pdma.c | 123 p2p->pool = gen_pool_create(PAGE_SHIFT, dev_to_node(&pdev->dev)); in pci_p2pdma_setup() 205 resource_size(&pgmap->res), dev_to_node(&pdev->dev), in pci_p2pdma_add_resource()
|