| /Linux-v5.4/drivers/iommu/ |
| D | tegra-gart.c | 58 #define FLUSH_GART_REGS(gart) readl_relaxed((gart)->regs + GART_CONFIG) argument 60 #define for_each_gart_pte(gart, iova) \ argument 61 for (iova = gart->iovmm_base; \ 62 iova < gart->iovmm_end; \ 65 static inline void gart_set_pte(struct gart_device *gart, in gart_set_pte() argument 68 writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR); in gart_set_pte() 69 writel_relaxed(pte, gart->regs + GART_ENTRY_DATA); in gart_set_pte() 72 static inline unsigned long gart_read_pte(struct gart_device *gart, in gart_read_pte() argument 77 writel_relaxed(iova, gart->regs + GART_ENTRY_ADDR); in gart_read_pte() 78 pte = readl_relaxed(gart->regs + GART_ENTRY_DATA); in gart_read_pte() [all …]
|
| D | Makefile | 30 obj-$(CONFIG_TEGRA_IOMMU_GART) += tegra-gart.o
|
| /Linux-v5.4/drivers/gpu/drm/radeon/ |
| D | radeon_gart.c | 75 ptr = pci_alloc_consistent(rdev->pdev, rdev->gart.table_size, in radeon_gart_table_ram_alloc() 76 &rdev->gart.table_addr); in radeon_gart_table_ram_alloc() 84 rdev->gart.table_size >> PAGE_SHIFT); in radeon_gart_table_ram_alloc() 87 rdev->gart.ptr = ptr; in radeon_gart_table_ram_alloc() 88 memset((void *)rdev->gart.ptr, 0, rdev->gart.table_size); in radeon_gart_table_ram_alloc() 103 if (rdev->gart.ptr == NULL) { in radeon_gart_table_ram_free() 109 set_memory_wb((unsigned long)rdev->gart.ptr, in radeon_gart_table_ram_free() 110 rdev->gart.table_size >> PAGE_SHIFT); in radeon_gart_table_ram_free() 113 pci_free_consistent(rdev->pdev, rdev->gart.table_size, in radeon_gart_table_ram_free() 114 (void *)rdev->gart.ptr, in radeon_gart_table_ram_free() [all …]
|
| D | rs400.c | 85 if (rdev->gart.ptr) { in rs400_gart_init() 108 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rs400_gart_init() 166 tmp = (u32)rdev->gart.table_addr & 0xfffff000; in rs400_gart_enable() 167 tmp |= (upper_32_bits(rdev->gart.table_addr) & 0xff) << 4; in rs400_gart_enable() 194 (unsigned long long)rdev->gart.table_addr); in rs400_gart_enable() 195 rdev->gart.ready = true; in rs400_gart_enable() 238 u32 *gtt = rdev->gart.ptr; in rs400_gart_set_page()
|
| D | radeon_asic.c | 167 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; in radeon_agp_disable() 168 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; in radeon_agp_disable() 169 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; in radeon_agp_disable() 173 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; in radeon_agp_disable() 174 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; in radeon_agp_disable() 175 rdev->asic->gart.set_page = &r100_pci_gart_set_page; in radeon_agp_disable() 209 .gart = { 277 .gart = { 373 .gart = { 441 .gart = { [all …]
|
| D | rs600.c | 543 if (rdev->gart.robj) { in rs600_gart_init() 552 rdev->gart.table_size = rdev->gart.num_gpu_pages * 8; in rs600_gart_init() 561 if (rdev->gart.robj == NULL) { in rs600_gart_enable() 598 rdev->gart.table_addr); in rs600_gart_enable() 615 (unsigned long long)rdev->gart.table_addr); in rs600_gart_enable() 616 rdev->gart.ready = true; in rs600_gart_enable() 656 void __iomem *ptr = (void *)rdev->gart.ptr; in rs600_gart_set_page()
|
| D | r300.c | 123 void __iomem *ptr = rdev->gart.ptr; in rv370_pcie_gart_set_page() 135 if (rdev->gart.robj) { in rv370_pcie_gart_init() 146 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in rv370_pcie_gart_init() 147 rdev->asic->gart.tlb_flush = &rv370_pcie_gart_tlb_flush; in rv370_pcie_gart_init() 148 rdev->asic->gart.get_page_entry = &rv370_pcie_gart_get_page_entry; in rv370_pcie_gart_init() 149 rdev->asic->gart.set_page = &rv370_pcie_gart_set_page; in rv370_pcie_gart_init() 159 if (rdev->gart.robj == NULL) { in rv370_pcie_gart_enable() 174 table_addr = rdev->gart.table_addr; in rv370_pcie_gart_enable() 189 rdev->gart.ready = true; in rv370_pcie_gart_enable()
|
| D | radeon_ttm.c | 1027 if (p >= rdev->gart.num_cpu_pages) in radeon_ttm_gtt_read() 1030 page = rdev->gart.pages[p]; in radeon_ttm_gtt_read() 1036 kunmap(rdev->gart.pages[p]); in radeon_ttm_gtt_read()
|
| D | rv770.c | 902 if (rdev->gart.robj == NULL) { in rv770_pcie_gart_enable() 931 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12); in rv770_pcie_gart_enable() 942 (unsigned long long)rdev->gart.table_addr); in rv770_pcie_gart_enable() 943 rdev->gart.ready = true; in rv770_pcie_gart_enable()
|
| D | r100.c | 645 if (rdev->gart.ptr) { in r100_pci_gart_init() 653 rdev->gart.table_size = rdev->gart.num_gpu_pages * 4; in r100_pci_gart_init() 654 rdev->asic->gart.tlb_flush = &r100_pci_gart_tlb_flush; in r100_pci_gart_init() 655 rdev->asic->gart.get_page_entry = &r100_pci_gart_get_page_entry; in r100_pci_gart_init() 656 rdev->asic->gart.set_page = &r100_pci_gart_set_page; in r100_pci_gart_init() 671 WREG32(RADEON_AIC_PT_BASE, rdev->gart.table_addr); in r100_pci_gart_enable() 677 (unsigned long long)rdev->gart.table_addr); in r100_pci_gart_enable() 678 rdev->gart.ready = true; in r100_pci_gart_enable() 701 u32 *gtt = rdev->gart.ptr; in r100_pci_gart_set_page()
|
| /Linux-v5.4/drivers/gpu/drm/amd/amdgpu/ |
| D | amdgpu_gart.c | 118 if (adev->gart.bo == NULL) { in amdgpu_gart_table_vram_alloc() 122 bp.size = adev->gart.table_size; in amdgpu_gart_table_vram_alloc() 129 r = amdgpu_bo_create(adev, &bp, &adev->gart.bo); in amdgpu_gart_table_vram_alloc() 151 r = amdgpu_bo_reserve(adev->gart.bo, false); in amdgpu_gart_table_vram_pin() 154 r = amdgpu_bo_pin(adev->gart.bo, AMDGPU_GEM_DOMAIN_VRAM); in amdgpu_gart_table_vram_pin() 156 amdgpu_bo_unreserve(adev->gart.bo); in amdgpu_gart_table_vram_pin() 159 r = amdgpu_bo_kmap(adev->gart.bo, &adev->gart.ptr); in amdgpu_gart_table_vram_pin() 161 amdgpu_bo_unpin(adev->gart.bo); in amdgpu_gart_table_vram_pin() 162 amdgpu_bo_unreserve(adev->gart.bo); in amdgpu_gart_table_vram_pin() 178 if (adev->gart.bo == NULL) { in amdgpu_gart_table_vram_unpin() [all …]
|
| D | gmc_v10_0.c | 310 job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo); in gmc_v10_0_flush_gpu_tlb() 589 if (adev->gart.bo) { in gmc_v10_0_gart_init() 599 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v10_0_gart_init() 600 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_NV10(MTYPE_UC) | in gmc_v10_0_gart_init() 771 if (adev->gart.bo == NULL) { in gmc_v10_0_gart_enable() 808 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v10_0_gart_enable() 810 adev->gart.ready = true; in gmc_v10_0_gart_enable()
|
| D | gmc_v6_0.c | 492 if (adev->gart.bo == NULL) { in gmc_v6_0_gart_enable() 500 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v6_0_gart_enable() 578 adev->gart.ready = true; in gmc_v6_0_gart_enable() 586 if (adev->gart.bo) { in gmc_v6_0_gart_init() 593 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v6_0_gart_init() 594 adev->gart.gart_pte_flags = 0; in gmc_v6_0_gart_init()
|
| D | gmc_v9_0.c | 1049 if (adev->gart.bo) { in gmc_v9_0_gart_init() 1057 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v9_0_gart_init() 1058 adev->gart.gart_pte_flags = AMDGPU_PTE_MTYPE_VG10(MTYPE_UC) | in gmc_v9_0_gart_init() 1327 if (adev->gart.bo == NULL) { in gmc_v9_0_gart_enable() 1382 (unsigned long long)amdgpu_bo_gpu_offset(adev->gart.bo)); in gmc_v9_0_gart_enable() 1383 adev->gart.ready = true; in gmc_v9_0_gart_enable()
|
| D | gmc_v7_0.c | 588 if (adev->gart.bo == NULL) { in gmc_v7_0_gart_enable() 596 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v7_0_gart_enable() 684 adev->gart.ready = true; in gmc_v7_0_gart_enable() 692 if (adev->gart.bo) { in gmc_v7_0_gart_init() 700 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v7_0_gart_init() 701 adev->gart.gart_pte_flags = 0; in gmc_v7_0_gart_init()
|
| D | gmc_v8_0.c | 815 if (adev->gart.bo == NULL) { in gmc_v8_0_gart_enable() 823 table_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in gmc_v8_0_gart_enable() 928 adev->gart.ready = true; in gmc_v8_0_gart_enable() 936 if (adev->gart.bo) { in gmc_v8_0_gart_init() 944 adev->gart.table_size = adev->gart.num_gpu_pages * 8; in gmc_v8_0_gart_init() 945 adev->gart.gart_pte_flags = AMDGPU_PTE_EXECUTABLE; in gmc_v8_0_gart_init()
|
| D | amdgpu_ttm.c | 1451 flags |= adev->gart.gart_pte_flags; in amdgpu_ttm_tt_pte_flags() 1968 dst_addr = amdgpu_bo_gpu_offset(adev->gart.bo); in amdgpu_map_buffer() 2029 job->vm_pd_addr = amdgpu_gmc_pd_addr(adev->gart.bo); in amdgpu_copy_buffer() 2304 if (p >= adev->gart.num_cpu_pages) in amdgpu_ttm_gtt_read() 2307 page = adev->gart.pages[p]; in amdgpu_ttm_gtt_read() 2313 kunmap(adev->gart.pages[p]); in amdgpu_ttm_gtt_read()
|
| D | amdgpu_gtt_mgr.c | 191 lpfn = adev->gart.num_cpu_pages; in amdgpu_gtt_mgr_alloc()
|
| D | gfxhub_v2_0.c | 51 uint64_t value = amdgpu_gmc_pd_addr(adev->gart.bo); in gfxhub_v2_0_init_gart_pt_regs()
|
| D | gfxhub_v1_0.c | 54 uint64_t pt_base = amdgpu_gmc_pd_addr(adev->gart.bo); in gfxhub_v1_0_init_gart_aperture_regs()
|
| /Linux-v5.4/include/soc/tegra/ |
| D | mc.h | 100 int tegra_gart_suspend(struct gart_device *gart); 101 int tegra_gart_resume(struct gart_device *gart); 109 static inline int tegra_gart_suspend(struct gart_device *gart) in tegra_gart_suspend() argument 114 static inline int tegra_gart_resume(struct gart_device *gart) in tegra_gart_resume() argument 168 struct gart_device *gart; member
|
| /Linux-v5.4/drivers/memory/tegra/ |
| D | mc.c | 712 mc->gart = tegra_gart_probe(&pdev->dev, mc); in tegra_mc_probe() 713 if (IS_ERR(mc->gart)) { in tegra_mc_probe() 715 PTR_ERR(mc->gart)); in tegra_mc_probe() 716 mc->gart = NULL; in tegra_mc_probe() 728 if (IS_ENABLED(CONFIG_TEGRA_IOMMU_GART) && mc->gart) { in tegra_mc_suspend() 729 err = tegra_gart_suspend(mc->gart); in tegra_mc_suspend() 742 if (IS_ENABLED(CONFIG_TEGRA_IOMMU_GART) && mc->gart) { in tegra_mc_resume() 743 err = tegra_gart_resume(mc->gart); in tegra_mc_resume()
|
| /Linux-v5.4/Documentation/devicetree/bindings/memory-controllers/ |
| D | nvidia,tegra20-mc.txt | 4 - compatible : "nvidia,tegra20-mc-gart" 22 compatible = "nvidia,tegra20-mc-gart";
|
| /Linux-v5.4/drivers/gpu/drm/nouveau/ |
| D | nouveau_chan.h | 18 struct nvif_object gart; member
|
| D | nouveau_chan.c | 104 nvif_object_fini(&chan->gart); in nouveau_channel_del() 355 nouveau_channel_init(struct nouveau_channel *chan, u32 vram, u32 gart) in nouveau_channel_init() argument 415 ret = nvif_object_init(&chan->user, gart, NV_DMA_IN_MEMORY, in nouveau_channel_init() 416 &args, sizeof(args), &chan->gart); in nouveau_channel_init()
|