/Linux-v4.19/drivers/pci/controller/dwc/ |
D | pcie-designware.c | 108 int type, u64 cpu_addr, in dw_pcie_prog_outbound_atu_unroll() argument 114 lower_32_bits(cpu_addr)); in dw_pcie_prog_outbound_atu_unroll() 116 upper_32_bits(cpu_addr)); in dw_pcie_prog_outbound_atu_unroll() 118 lower_32_bits(cpu_addr + size - 1)); in dw_pcie_prog_outbound_atu_unroll() 144 u64 cpu_addr, u64 pci_addr, u32 size) in dw_pcie_prog_outbound_atu() argument 149 cpu_addr = pci->ops->cpu_addr_fixup(pci, cpu_addr); in dw_pcie_prog_outbound_atu() 152 dw_pcie_prog_outbound_atu_unroll(pci, index, type, cpu_addr, in dw_pcie_prog_outbound_atu() 160 lower_32_bits(cpu_addr)); in dw_pcie_prog_outbound_atu() 162 upper_32_bits(cpu_addr)); in dw_pcie_prog_outbound_atu() 164 lower_32_bits(cpu_addr + size - 1)); in dw_pcie_prog_outbound_atu() [all …]
|
/Linux-v4.19/include/linux/ |
D | dma-mapping.h | 168 void *cpu_addr, size_t size, int *ret); 172 int dma_mmap_from_global_coherent(struct vm_area_struct *vma, void *cpu_addr, 192 void *cpu_addr, size_t size, in dma_mmap_from_global_coherent() argument 448 void *cpu_addr, dma_addr_t dma_addr, size_t size); 457 void dma_common_free_remap(void *cpu_addr, size_t size, unsigned long vm_flags); 473 dma_mmap_attrs(struct device *dev, struct vm_area_struct *vma, void *cpu_addr, in dma_mmap_attrs() argument 479 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs() 480 return dma_common_mmap(dev, vma, cpu_addr, dma_addr, size); in dma_mmap_attrs() 487 void *cpu_addr, dma_addr_t dma_addr, size_t size); 490 dma_get_sgtable_attrs(struct device *dev, struct sg_table *sgt, void *cpu_addr, in dma_get_sgtable_attrs() argument [all …]
|
/Linux-v4.19/kernel/dma/ |
D | mapping.c | 205 void *cpu_addr, dma_addr_t handle, size_t size) in dma_common_get_sgtable() argument 207 struct page *page = virt_to_page(cpu_addr); in dma_common_get_sgtable() 223 void *cpu_addr, dma_addr_t dma_addr, size_t size) in dma_common_mmap() argument 233 if (dma_mmap_from_dev_coherent(dev, vma, cpu_addr, size, &ret)) in dma_common_mmap() 238 page_to_pfn(virt_to_page(cpu_addr)) + off, in dma_common_mmap() 317 void dma_common_free_remap(void *cpu_addr, size_t size, unsigned long vm_flags) in dma_common_free_remap() argument 319 struct vm_struct *area = find_vm_area(cpu_addr); in dma_common_free_remap() 322 WARN(1, "trying to free invalid coherent area: %p\n", cpu_addr); in dma_common_free_remap() 326 unmap_kernel_range((unsigned long)cpu_addr, PAGE_ALIGN(size)); in dma_common_free_remap() 327 vunmap(cpu_addr); in dma_common_free_remap()
|
D | direct.c | 127 void dma_direct_free(struct device *dev, size_t size, void *cpu_addr, in dma_direct_free() argument 134 set_memory_encrypted((unsigned long)cpu_addr, 1 << page_order); in dma_direct_free() 135 if (!dma_release_from_contiguous(dev, virt_to_page(cpu_addr), count)) in dma_direct_free() 136 free_pages((unsigned long)cpu_addr, page_order); in dma_direct_free()
|
/Linux-v4.19/arch/s390/kvm/ |
D | trace.h | 220 TP_PROTO(VCPU_PROTO_COMMON, __u8 order_code, __u16 cpu_addr, \ 222 TP_ARGS(VCPU_ARGS_COMMON, order_code, cpu_addr, parameter), 227 __field(__u16, cpu_addr) 234 __entry->cpu_addr = cpu_addr; 242 __entry->cpu_addr, __entry->parameter) 246 TP_PROTO(VCPU_PROTO_COMMON, __u8 order_code, __u16 cpu_addr), 247 TP_ARGS(VCPU_ARGS_COMMON, order_code, cpu_addr), 252 __field(__u16, cpu_addr) 258 __entry->cpu_addr = cpu_addr; 265 __entry->cpu_addr)
|
D | sigp.c | 283 u16 cpu_addr, u32 parameter, u64 *status_reg) in handle_sigp_dst() argument 286 struct kvm_vcpu *dst_vcpu = kvm_get_vcpu_by_id(vcpu->kvm, cpu_addr); in handle_sigp_dst() 360 u16 cpu_addr) in handle_sigp_order_in_user_space() argument 404 order_code, cpu_addr); in handle_sigp_order_in_user_space() 414 u16 cpu_addr = vcpu->run->s.regs.gprs[r3]; in kvm_s390_handle_sigp() local 423 if (handle_sigp_order_in_user_space(vcpu, order_code, cpu_addr)) in kvm_s390_handle_sigp() 431 trace_kvm_s390_handle_sigp(vcpu, order_code, cpu_addr, parameter); in kvm_s390_handle_sigp() 439 rc = handle_sigp_dst(vcpu, order_code, cpu_addr, in kvm_s390_handle_sigp() 463 u16 cpu_addr = vcpu->run->s.regs.gprs[r3]; in kvm_s390_handle_sigp_pei() local 467 trace_kvm_s390_handle_sigp_pei(vcpu, order_code, cpu_addr); in kvm_s390_handle_sigp_pei() [all …]
|
/Linux-v4.19/arch/mips/mm/ |
D | dma-noncoherent.c | 87 void arch_dma_free(struct device *dev, size_t size, void *cpu_addr, in arch_dma_free() argument 91 cpu_addr = (void *)CAC_ADDR((unsigned long)cpu_addr); in arch_dma_free() 92 dma_direct_free(dev, size, cpu_addr, dma_addr, attrs); in arch_dma_free() 96 void *cpu_addr, dma_addr_t dma_addr, size_t size, in arch_dma_mmap() argument 101 unsigned long addr = (unsigned long)cpu_addr; in arch_dma_mmap() 116 if (dma_mmap_from_dev_coherent(dev, vma, cpu_addr, size, &ret)) in arch_dma_mmap()
|
/Linux-v4.19/drivers/pci/controller/ |
D | pcie-cadence.c | 12 u64 cpu_addr, u64 pci_addr, size_t size) in cdns_pcie_set_outbound_region() argument 76 cpu_addr -= pcie->mem_res->start; in cdns_pcie_set_outbound_region() 78 (lower_32_bits(cpu_addr) & GENMASK(31, 8)); in cdns_pcie_set_outbound_region() 79 addr1 = upper_32_bits(cpu_addr); in cdns_pcie_set_outbound_region() 86 u32 r, u64 cpu_addr) in cdns_pcie_set_outbound_region_for_normal_msg() argument 103 cpu_addr -= pcie->mem_res->start; in cdns_pcie_set_outbound_region_for_normal_msg() 105 (lower_32_bits(cpu_addr) & GENMASK(31, 8)); in cdns_pcie_set_outbound_region_for_normal_msg() 106 addr1 = upper_32_bits(cpu_addr); in cdns_pcie_set_outbound_region_for_normal_msg()
|
D | pci-xgene.c | 369 u64 cpu_addr, u64 pci_addr) in xgene_pcie_setup_ob_reg() argument 391 xgene_pcie_writel(port, offset, lower_32_bits(cpu_addr)); in xgene_pcie_setup_ob_reg() 392 xgene_pcie_writel(port, offset + 0x04, upper_32_bits(cpu_addr)); in xgene_pcie_setup_ob_reg() 494 u64 cpu_addr = range->cpu_addr; in xgene_pcie_setup_ib_reg() local 511 bar_low = pcie_bar_low_val((u32)cpu_addr, flags); in xgene_pcie_setup_ib_reg() 517 writel(upper_32_bits(cpu_addr), bar_addr + 0x4); in xgene_pcie_setup_ib_reg() 527 xgene_pcie_writel(port, IBAR3L + 0x4, upper_32_bits(cpu_addr)); in xgene_pcie_setup_ib_reg() 552 u64 end = range.cpu_addr + range.size - 1; in xgene_pcie_parse_map_dma_ranges() 555 range.flags, range.cpu_addr, end, range.pci_addr); in xgene_pcie_parse_map_dma_ranges()
|
/Linux-v4.19/arch/arm/mm/ |
D | dma-mapping.c | 58 void *cpu_addr; member 208 static void arm_coherent_dma_free(struct device *dev, size_t size, void *cpu_addr, 211 void *cpu_addr, dma_addr_t dma_addr, size_t size, 378 static void __dma_free_remap(void *cpu_addr, size_t size) in __dma_free_remap() argument 380 dma_common_free_remap(cpu_addr, size, in __dma_free_remap() 623 void *cpu_addr, size_t size, bool want_vaddr) in __free_from_contiguous() argument 627 __dma_free_remap(cpu_addr, size); in __free_from_contiguous() 683 __free_from_contiguous(args->dev, args->page, args->cpu_addr, in cma_allocator_free() 700 __free_from_pool(args->cpu_addr, args->size); in pool_allocator_free() 719 __dma_free_remap(args->cpu_addr, args->size); in remap_allocator_free() [all …]
|
D | dma-mapping-nommu.c | 69 void *cpu_addr, dma_addr_t dma_addr, in arm_nommu_dma_free() argument 73 dma_direct_free(dev, size, cpu_addr, dma_addr, attrs); in arm_nommu_dma_free() 76 cpu_addr); in arm_nommu_dma_free() 85 void *cpu_addr, dma_addr_t dma_addr, size_t size, in arm_nommu_dma_mmap() argument 90 if (dma_mmap_from_global_coherent(vma, cpu_addr, size, &ret)) in arm_nommu_dma_mmap() 93 return dma_common_mmap(dev, vma, cpu_addr, dma_addr, size); in arm_nommu_dma_mmap()
|
/Linux-v4.19/arch/arm64/mm/ |
D | dma-mapping.c | 282 void *cpu_addr, dma_addr_t dma_addr, size_t size, in __swiotlb_mmap() argument 291 if (dma_mmap_from_dev_coherent(dev, vma, cpu_addr, size, &ret)) in __swiotlb_mmap() 309 void *cpu_addr, dma_addr_t handle, size_t size, in __swiotlb_get_sgtable() argument 428 void *cpu_addr, dma_addr_t dma_addr, size_t size, in __dummy_mmap() argument 615 static void __iommu_free_attrs(struct device *dev, size_t size, void *cpu_addr, in __iommu_free_attrs() argument 632 if (__in_atomic_pool(cpu_addr, size)) { in __iommu_free_attrs() 634 __free_from_pool(cpu_addr, size); in __iommu_free_attrs() 636 struct page *page = vmalloc_to_page(cpu_addr); in __iommu_free_attrs() 640 dma_common_free_remap(cpu_addr, size, VM_USERMAP); in __iommu_free_attrs() 641 } else if (is_vmalloc_addr(cpu_addr)){ in __iommu_free_attrs() [all …]
|
/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_fence.c | 100 if (drv->cpu_addr) in amdgpu_fence_write() 101 *drv->cpu_addr = cpu_to_le32(seq); in amdgpu_fence_write() 117 if (drv->cpu_addr) in amdgpu_fence_read() 118 seq = le32_to_cpu(*drv->cpu_addr); in amdgpu_fence_read() 380 ring->fence_drv.cpu_addr = &adev->wb.wb[ring->fence_offs]; in amdgpu_fence_driver_start_ring() 385 ring->fence_drv.cpu_addr = adev->uvd.inst[ring->me].cpu_addr + index; in amdgpu_fence_driver_start_ring() 397 ring->fence_drv.gpu_addr, ring->fence_drv.cpu_addr); in amdgpu_fence_driver_start_ring() 421 ring->fence_drv.cpu_addr = NULL; in amdgpu_fence_driver_init_ring() 681 le32_to_cpu(*(ring->fence_drv.cpu_addr + 2))); in amdgpu_debugfs_fence_info() 684 le32_to_cpu(*(ring->fence_drv.cpu_addr + 4))); in amdgpu_debugfs_fence_info() [all …]
|
/Linux-v4.19/drivers/media/common/saa7146/ |
D | saa7146_core.c | 427 dev->d_rps0.cpu_addr = pci_zalloc_consistent(pci, SAA7146_RPS_MEM, in saa7146_init_one() 429 if (!dev->d_rps0.cpu_addr) in saa7146_init_one() 432 dev->d_rps1.cpu_addr = pci_zalloc_consistent(pci, SAA7146_RPS_MEM, in saa7146_init_one() 434 if (!dev->d_rps1.cpu_addr) in saa7146_init_one() 437 dev->d_i2c.cpu_addr = pci_zalloc_consistent(pci, SAA7146_RPS_MEM, in saa7146_init_one() 439 if (!dev->d_i2c.cpu_addr) in saa7146_init_one() 486 pci_free_consistent(pci, SAA7146_RPS_MEM, dev->d_i2c.cpu_addr, in saa7146_init_one() 489 pci_free_consistent(pci, SAA7146_RPS_MEM, dev->d_rps1.cpu_addr, in saa7146_init_one() 492 pci_free_consistent(pci, SAA7146_RPS_MEM, dev->d_rps0.cpu_addr, in saa7146_init_one() 515 { dev->d_i2c.cpu_addr, dev->d_i2c.dma_handle }, in saa7146_remove_one() [all …]
|
/Linux-v4.19/arch/alpha/kernel/ |
D | pci_iommu.c | 252 pci_map_single_1(struct pci_dev *pdev, void *cpu_addr, size_t size, in pci_map_single_1() argument 264 paddr = __pa(cpu_addr); in pci_map_single_1() 273 cpu_addr, size, ret, __builtin_return_address(0)); in pci_map_single_1() 284 cpu_addr, size, ret, __builtin_return_address(0)); in pci_map_single_1() 318 ret += (unsigned long)cpu_addr & ~PAGE_MASK; in pci_map_single_1() 321 cpu_addr, size, npages, ret, __builtin_return_address(0)); in pci_map_single_1() 440 void *cpu_addr; in alpha_pci_alloc_coherent() local 446 cpu_addr = (void *)__get_free_pages(gfp, order); in alpha_pci_alloc_coherent() 447 if (! cpu_addr) { in alpha_pci_alloc_coherent() 455 memset(cpu_addr, 0, size); in alpha_pci_alloc_coherent() [all …]
|
/Linux-v4.19/drivers/dma/ |
D | st_fdma.h | 189 readl((fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \ 194 writel((val), (fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \ 201 writel((val), (fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \ 211 readl((fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \ 216 writel((val), (fchan)->fdev->slim_rproc->mem[ST_SLIM_DMEM].cpu_addr \
|
/Linux-v4.19/drivers/remoteproc/ |
D | imx_rproc.c | 58 void __iomem *cpu_addr; member 236 va = (__force void *)(priv->mem[i].cpu_addr + offset); in imx_rproc_da_to_va() 270 priv->mem[b].cpu_addr = devm_ioremap(&pdev->dev, in imx_rproc_addr_init() 272 if (!priv->mem[b].cpu_addr) { in imx_rproc_addr_init() 301 priv->mem[b].cpu_addr = devm_ioremap_resource(&pdev->dev, &res); in imx_rproc_addr_init() 302 if (IS_ERR(priv->mem[b].cpu_addr)) { in imx_rproc_addr_init() 304 err = PTR_ERR(priv->mem[b].cpu_addr); in imx_rproc_addr_init()
|
D | wkup_m3_rproc.c | 43 void __iomem *cpu_addr; member 106 va = (__force void *)(wkupm3->mem[i].cpu_addr + offset); in wkup_m3_rproc_da_to_va() 179 wkupm3->mem[i].cpu_addr = devm_ioremap_resource(dev, res); in wkup_m3_rproc_probe() 180 if (IS_ERR(wkupm3->mem[i].cpu_addr)) { in wkup_m3_rproc_probe() 183 ret = PTR_ERR(wkupm3->mem[i].cpu_addr); in wkup_m3_rproc_probe()
|
D | keystone_remoteproc.c | 43 void __iomem *cpu_addr; member 279 va = ksproc->mem[i].cpu_addr + offset; in keystone_rproc_da_to_va() 287 va = ksproc->mem[i].cpu_addr + offset; in keystone_rproc_da_to_va() 321 ksproc->mem[i].cpu_addr = devm_ioremap_resource(dev, res); in keystone_rproc_of_get_memories() 322 if (IS_ERR(ksproc->mem[i].cpu_addr)) { in keystone_rproc_of_get_memories() 325 return PTR_ERR(ksproc->mem[i].cpu_addr); in keystone_rproc_of_get_memories() 333 memset((__force void *)ksproc->mem[i].cpu_addr, 0, in keystone_rproc_of_get_memories()
|
D | st_slim_rproc.c | 148 fw_rev = readl(slim_rproc->mem[ST_SLIM_DMEM].cpu_addr + in slim_rproc_start() 193 va = (__force void *)slim_rproc->mem[i].cpu_addr; in slim_rproc_da_to_va() 257 slim_rproc->mem[i].cpu_addr = devm_ioremap_resource(dev, res); in st_slim_rproc_alloc() 258 if (IS_ERR(slim_rproc->mem[i].cpu_addr)) { in st_slim_rproc_alloc() 260 err = PTR_ERR(slim_rproc->mem[i].cpu_addr); in st_slim_rproc_alloc()
|
/Linux-v4.19/arch/microblaze/pci/ |
D | pci-common.c | 443 range.cpu_addr, range.size); in pci_process_bridge_OF_ranges() 450 if (range.cpu_addr == OF_BAD_ADDR || range.size == 0) in pci_process_bridge_OF_ranges() 458 range.cpu_addr, range.cpu_addr + range.size - 1, in pci_process_bridge_OF_ranges() 471 hose->io_base_virt = ioremap(range.cpu_addr, in pci_process_bridge_OF_ranges() 482 hose->io_base_phys = range.cpu_addr - range.pci_addr; in pci_process_bridge_OF_ranges() 486 range.cpu_addr = range.pci_addr; in pci_process_bridge_OF_ranges() 491 range.cpu_addr, range.cpu_addr + range.size - 1, in pci_process_bridge_OF_ranges() 503 isa_mb = range.cpu_addr; in pci_process_bridge_OF_ranges() 506 isa_mem_base = range.cpu_addr; in pci_process_bridge_OF_ranges() 507 hose->isa_mem_phys = range.cpu_addr; in pci_process_bridge_OF_ranges() [all …]
|
/Linux-v4.19/arch/mips/pci/ |
D | pci-legacy.c | 154 range.cpu_addr, in pci_load_of_ranges() 155 range.cpu_addr + range.size - 1); in pci_load_of_ranges() 157 (unsigned long)ioremap(range.cpu_addr, in pci_load_of_ranges() 163 range.cpu_addr, in pci_load_of_ranges() 164 range.cpu_addr + range.size - 1); in pci_load_of_ranges()
|
/Linux-v4.19/drivers/net/wireless/ath/wcn36xx/ |
D | dxe.c | 177 wcn_ch->cpu_addr = dma_alloc_coherent(dev, size, &wcn_ch->dma_addr, in wcn36xx_dxe_init_descs() 179 if (!wcn_ch->cpu_addr) in wcn36xx_dxe_init_descs() 182 memset(wcn_ch->cpu_addr, 0, size); in wcn36xx_dxe_init_descs() 184 cur_dxe = (struct wcn36xx_dxe_desc *)wcn_ch->cpu_addr; in wcn36xx_dxe_init_descs() 234 dma_free_coherent(dev, size,wcn_ch->cpu_addr, wcn_ch->dma_addr); in wcn36xx_dxe_deinit_descs() 622 void *cpu_addr; in wcn36xx_dxe_allocate_mem_pools() local 631 cpu_addr = dma_alloc_coherent(wcn->dev, s, &wcn->mgmt_mem_pool.phy_addr, in wcn36xx_dxe_allocate_mem_pools() 633 if (!cpu_addr) in wcn36xx_dxe_allocate_mem_pools() 636 wcn->mgmt_mem_pool.virt_addr = cpu_addr; in wcn36xx_dxe_allocate_mem_pools() 637 memset(cpu_addr, 0, s); in wcn36xx_dxe_allocate_mem_pools() [all …]
|
/Linux-v4.19/arch/powerpc/mm/ |
D | dma-noncoherent.c | 406 unsigned long __dma_get_coherent_pfn(unsigned long cpu_addr) in __dma_get_coherent_pfn() argument 412 pgd_t *pgd = pgd_offset_k(cpu_addr); in __dma_get_coherent_pfn() 413 pud_t *pud = pud_offset(pgd, cpu_addr); in __dma_get_coherent_pfn() 414 pmd_t *pmd = pmd_offset(pud, cpu_addr); in __dma_get_coherent_pfn() 415 pte_t *ptep = pte_offset_kernel(pmd, cpu_addr); in __dma_get_coherent_pfn()
|
/Linux-v4.19/arch/arm/include/asm/ |
D | dma-mapping.h | 140 extern void arm_dma_free(struct device *dev, size_t size, void *cpu_addr, 157 void *cpu_addr, dma_addr_t dma_addr, size_t size, 213 void *cpu_addr, dma_addr_t dma_addr, size_t size,
|