/Linux-v4.19/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_vm.c | 197 adev->vm_manager.block_size; in amdgpu_vm_level_shift() 222 adev->vm_manager.root_level); in amdgpu_vm_num_entries() 224 if (level == adev->vm_manager.root_level) in amdgpu_vm_num_entries() 226 return round_up(adev->vm_manager.max_pfn, 1 << shift) >> shift; in amdgpu_vm_num_entries() 376 if (level == adev->vm_manager.root_level) { in amdgpu_vm_clear_bo() 596 if (eaddr >= adev->vm_manager.max_pfn) { in amdgpu_vm_alloc_pts() 598 eaddr, adev->vm_manager.max_pfn); in amdgpu_vm_alloc_pts() 603 adev->vm_manager.root_level, ats); in amdgpu_vm_alloc_pts() 655 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vm_need_pipeline_sync() 693 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vm_flush() [all …]
|
D | amdgpu_ids.c | 203 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab_idle() 226 u64 fence_context = adev->vm_manager.fence_context + ring->idx; in amdgpu_vmid_grab_idle() 227 unsigned seqno = ++adev->vm_manager.seqno[ring->idx]; in amdgpu_vmid_grab_idle() 339 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab_used() 408 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_grab() 471 id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_alloc_reserved() 498 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_free_reserved() 521 struct amdgpu_vmid_mgr *id_mgr = &adev->vm_manager.id_mgr[vmhub]; in amdgpu_vmid_reset() 548 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_reset_all() 568 &adev->vm_manager.id_mgr[i]; in amdgpu_vmid_mgr_init() [all …]
|
D | gfxhub_v1_0.c | 44 + adev->vm_manager.vram_base_offset; in gfxhub_v1_0_init_gart_pt_regs() 87 + adev->vm_manager.vram_base_offset; in gfxhub_v1_0_init_system_aperture_regs() 196 num_level = adev->vm_manager.num_level; in gfxhub_v1_0_setup_vmid_config() 197 block_size = adev->vm_manager.block_size; in gfxhub_v1_0_setup_vmid_config() 233 lower_32_bits(adev->vm_manager.max_pfn - 1)); in gfxhub_v1_0_setup_vmid_config() 235 upper_32_bits(adev->vm_manager.max_pfn - 1)); in gfxhub_v1_0_setup_vmid_config()
|
D | vega10_ih.c | 279 spin_lock(&adev->vm_manager.pasid_lock); in vega10_ih_prescreen_iv() 280 vm = idr_find(&adev->vm_manager.pasid_idr, pasid); in vega10_ih_prescreen_iv() 283 spin_unlock(&adev->vm_manager.pasid_lock); in vega10_ih_prescreen_iv() 291 spin_unlock(&adev->vm_manager.pasid_lock); in vega10_ih_prescreen_iv() 295 spin_unlock(&adev->vm_manager.pasid_lock); in vega10_ih_prescreen_iv()
|
D | mmhub_v1_0.c | 54 adev->vm_manager.vram_base_offset; in mmhub_v1_0_init_gart_pt_regs() 98 adev->vm_manager.vram_base_offset; in mmhub_v1_0_init_system_aperture_regs() 208 num_level = adev->vm_manager.num_level; in mmhub_v1_0_setup_vmid_config() 209 block_size = adev->vm_manager.block_size; in mmhub_v1_0_setup_vmid_config() 245 lower_32_bits(adev->vm_manager.max_pfn - 1)); in mmhub_v1_0_setup_vmid_config() 247 upper_32_bits(adev->vm_manager.max_pfn - 1)); in mmhub_v1_0_setup_vmid_config()
|
D | gmc_v6_0.c | 471 uint32_t high = adev->vm_manager.max_pfn - in gmc_v6_0_set_prt() 526 field = adev->vm_manager.fragment_size; in gmc_v6_0_gart_enable() 550 WREG32(mmVM_CONTEXT1_PAGE_TABLE_END_ADDR, adev->vm_manager.max_pfn - 1); in gmc_v6_0_gart_enable() 571 ((adev->vm_manager.block_size - 9) in gmc_v6_0_gart_enable() 910 adev->vm_manager.id_mgr[0].num_ids = AMDGPU_NUM_OF_VMIDS; in gmc_v6_0_sw_init() 918 adev->vm_manager.vram_base_offset = tmp; in gmc_v6_0_sw_init() 920 adev->vm_manager.vram_base_offset = 0; in gmc_v6_0_sw_init()
|
D | gmc_v7_0.c | 568 uint32_t high = adev->vm_manager.max_pfn - in gmc_v7_0_set_prt() 636 field = adev->vm_manager.fragment_size; in gmc_v7_0_gart_enable() 665 WREG32(mmVM_CONTEXT1_PAGE_TABLE_END_ADDR, adev->vm_manager.max_pfn - 1); in gmc_v7_0_gart_enable() 683 adev->vm_manager.block_size - 9); in gmc_v7_0_gart_enable() 1056 adev->vm_manager.id_mgr[0].num_ids = AMDGPU_NUM_OF_VMIDS; in gmc_v7_0_sw_init() 1064 adev->vm_manager.vram_base_offset = tmp; in gmc_v7_0_sw_init() 1066 adev->vm_manager.vram_base_offset = 0; in gmc_v7_0_sw_init()
|
D | gmc_v9_0.c | 513 *addr = adev->vm_manager.vram_base_offset + *addr - in gmc_v9_0_get_vm_pde() 704 adev->vm_manager.vram_base_offset = gfxhub_v1_0_get_mc_fb_offset(adev); in gmc_v9_0_vram_gtt_location() 864 adev->vm_manager.num_level > 1; in gmc_v9_0_sw_init() 937 adev->vm_manager.id_mgr[AMDGPU_GFXHUB].num_ids = AMDGPU_NUM_OF_VMIDS; in gmc_v9_0_sw_init() 938 adev->vm_manager.id_mgr[AMDGPU_MMHUB].num_ids = AMDGPU_NUM_OF_VMIDS; in gmc_v9_0_sw_init()
|
D | si_dma.c | 884 if (adev->vm_manager.vm_pte_funcs == NULL) { in si_dma_set_vm_pte_funcs() 885 adev->vm_manager.vm_pte_funcs = &si_dma_vm_pte_funcs; in si_dma_set_vm_pte_funcs() 887 adev->vm_manager.vm_pte_rings[i] = in si_dma_set_vm_pte_funcs() 890 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in si_dma_set_vm_pte_funcs()
|
D | gmc_v8_0.c | 773 uint32_t high = adev->vm_manager.max_pfn - in gmc_v8_0_set_prt() 842 field = adev->vm_manager.fragment_size; in gmc_v8_0_gart_enable() 886 WREG32(mmVM_CONTEXT1_PAGE_TABLE_END_ADDR, adev->vm_manager.max_pfn - 1); in gmc_v8_0_gart_enable() 911 adev->vm_manager.block_size - 9); in gmc_v8_0_gart_enable() 1160 adev->vm_manager.id_mgr[0].num_ids = AMDGPU_NUM_OF_VMIDS; in gmc_v8_0_sw_init() 1168 adev->vm_manager.vram_base_offset = tmp; in gmc_v8_0_sw_init() 1170 adev->vm_manager.vram_base_offset = 0; in gmc_v8_0_sw_init()
|
D | amdgpu_amdkfd_gfx_v9.c | 1034 lower_32_bits(adev->vm_manager.max_pfn - 1)); in set_vm_context_page_table_base() 1036 upper_32_bits(adev->vm_manager.max_pfn - 1)); in set_vm_context_page_table_base() 1045 lower_32_bits(adev->vm_manager.max_pfn - 1)); in set_vm_context_page_table_base() 1047 upper_32_bits(adev->vm_manager.max_pfn - 1)); in set_vm_context_page_table_base()
|
D | cik_sdma.c | 1391 if (adev->vm_manager.vm_pte_funcs == NULL) { in cik_sdma_set_vm_pte_funcs() 1392 adev->vm_manager.vm_pte_funcs = &cik_sdma_vm_pte_funcs; in cik_sdma_set_vm_pte_funcs() 1394 adev->vm_manager.vm_pte_rings[i] = in cik_sdma_set_vm_pte_funcs() 1397 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in cik_sdma_set_vm_pte_funcs()
|
D | sdma_v2_4.c | 1317 if (adev->vm_manager.vm_pte_funcs == NULL) { in sdma_v2_4_set_vm_pte_funcs() 1318 adev->vm_manager.vm_pte_funcs = &sdma_v2_4_vm_pte_funcs; in sdma_v2_4_set_vm_pte_funcs() 1320 adev->vm_manager.vm_pte_rings[i] = in sdma_v2_4_set_vm_pte_funcs() 1323 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in sdma_v2_4_set_vm_pte_funcs()
|
D | amdgpu_vm.h | 49 #define AMDGPU_VM_PTE_COUNT(adev) (1 << (adev)->vm_manager.block_size)
|
D | sdma_v3_0.c | 1757 if (adev->vm_manager.vm_pte_funcs == NULL) { in sdma_v3_0_set_vm_pte_funcs() 1758 adev->vm_manager.vm_pte_funcs = &sdma_v3_0_vm_pte_funcs; in sdma_v3_0_set_vm_pte_funcs() 1760 adev->vm_manager.vm_pte_rings[i] = in sdma_v3_0_set_vm_pte_funcs() 1763 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in sdma_v3_0_set_vm_pte_funcs()
|
D | sdma_v4_0.c | 1758 if (adev->vm_manager.vm_pte_funcs == NULL) { in sdma_v4_0_set_vm_pte_funcs() 1759 adev->vm_manager.vm_pte_funcs = &sdma_v4_0_vm_pte_funcs; in sdma_v4_0_set_vm_pte_funcs() 1761 adev->vm_manager.vm_pte_rings[i] = in sdma_v4_0_set_vm_pte_funcs() 1764 adev->vm_manager.vm_pte_num_rings = adev->sdma.num_instances; in sdma_v4_0_set_vm_pte_funcs()
|
D | amdgpu_virt.c | 31 uint64_t addr = adev->vm_manager.max_pfn << AMDGPU_GPU_PAGE_SHIFT; in amdgpu_csa_vaddr()
|
D | amdgpu.h | 1430 struct amdgpu_vm_manager vm_manager; member 1709 #define amdgpu_vm_copy_pte(adev, ib, pe, src, count) ((adev)->vm_manager.vm_pte_funcs->copy_pte((ib… 1710 #define amdgpu_vm_write_pte(adev, ib, pe, value, count, incr) ((adev)->vm_manager.vm_pte_funcs->wri… 1711 #define amdgpu_vm_set_pte_pde(adev, ib, pe, addr, count, incr, flags) ((adev)->vm_manager.vm_pte_fu…
|
D | amdgpu_amdkfd.c | 156 .gpuvm_size = min(adev->vm_manager.max_pfn in amdgpu_amdkfd_device_init()
|
D | amdgpu_kms.c | 616 vm_size = adev->vm_manager.max_pfn * AMDGPU_GPU_PAGE_SIZE; in amdgpu_info_ioctl() 632 dev_info.pte_fragment_size = (1 << adev->vm_manager.fragment_size) * AMDGPU_GPU_PAGE_SIZE; in amdgpu_info_ioctl()
|
/Linux-v4.19/drivers/gpu/drm/radeon/ |
D | radeon_vm.c | 62 return rdev->vm_manager.max_pfn >> radeon_vm_block_size; in radeon_vm_num_pdes() 89 if (!rdev->vm_manager.enabled) { in radeon_vm_manager_init() 94 rdev->vm_manager.enabled = true; in radeon_vm_manager_init() 110 if (!rdev->vm_manager.enabled) in radeon_vm_manager_fini() 114 radeon_fence_unref(&rdev->vm_manager.active[i]); in radeon_vm_manager_fini() 116 rdev->vm_manager.enabled = false; in radeon_vm_manager_fini() 188 vm_id->last_id_use == rdev->vm_manager.active[vm_id->id]) in radeon_vm_grab_id() 195 for (i = 1; i < rdev->vm_manager.nvm; ++i) { in radeon_vm_grab_id() 196 struct radeon_fence *fence = rdev->vm_manager.active[i]; in radeon_vm_grab_id() 215 return rdev->vm_manager.active[choices[i]]; in radeon_vm_grab_id() [all …]
|
D | ni.c | 1323 rdev->vm_manager.max_pfn - 1); in cayman_pcie_gart_enable() 1325 rdev->vm_manager.saved_table_addr[i]); in cayman_pcie_gart_enable() 1360 rdev->vm_manager.saved_table_addr[i] = RREG32( in cayman_pcie_gart_disable() 2506 rdev->vm_manager.nvm = 8; in cayman_vm_init() 2511 rdev->vm_manager.vram_base_offset = tmp; in cayman_vm_init() 2513 rdev->vm_manager.vram_base_offset = 0; in cayman_vm_init()
|
D | radeon_gem.c | 607 if (!rdev->vm_manager.enabled) { in radeon_gem_va_ioctl()
|
D | radeon_cs.c | 365 !p->rdev->vm_manager.enabled) { in radeon_cs_parser_init()
|
D | si.c | 4328 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn - 1); in si_pcie_gart_enable() 4336 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable() 4339 rdev->vm_manager.saved_table_addr[i]); in si_pcie_gart_enable() 4379 rdev->vm_manager.saved_table_addr[i] = RREG32(reg); in si_pcie_gart_disable() 4797 rdev->vm_manager.nvm = 16; in si_vm_init() 4799 rdev->vm_manager.vram_base_offset = 0; in si_vm_init()
|