Lines Matching refs:mman
108 adev->mman.mem_global_referenced = false; in amdgpu_ttm_global_init()
110 global_ref = &adev->mman.mem_global_ref; in amdgpu_ttm_global_init()
122 adev->mman.bo_global_ref.mem_glob = in amdgpu_ttm_global_init()
123 adev->mman.mem_global_ref.object; in amdgpu_ttm_global_init()
124 global_ref = &adev->mman.bo_global_ref.ref; in amdgpu_ttm_global_init()
135 mutex_init(&adev->mman.gtt_window_lock); in amdgpu_ttm_global_init()
137 adev->mman.mem_global_referenced = true; in amdgpu_ttm_global_init()
142 drm_global_item_unref(&adev->mman.mem_global_ref); in amdgpu_ttm_global_init()
149 if (adev->mman.mem_global_referenced) { in amdgpu_ttm_global_fini()
150 mutex_destroy(&adev->mman.gtt_window_lock); in amdgpu_ttm_global_fini()
151 drm_global_item_unref(&adev->mman.bo_global_ref.ref); in amdgpu_ttm_global_fini()
152 drm_global_item_unref(&adev->mman.mem_global_ref); in amdgpu_ttm_global_fini()
153 adev->mman.mem_global_referenced = false; in amdgpu_ttm_global_fini()
259 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_evict_flags()
390 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_copy_mem_to_mem()
399 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_ttm_copy_mem_to_mem()
416 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem()
492 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem()
680 if (!adev->mman.buffer_funcs_enabled) in amdgpu_bo_move()
749 if (adev->mman.aper_base_kaddr && in amdgpu_ttm_io_mem_reserve()
751 mem->bus.addr = (u8 *)adev->mman.aper_base_kaddr + in amdgpu_ttm_io_mem_reserve()
1723 r = ttm_bo_device_init(&adev->mman.bdev, in amdgpu_ttm_init()
1724 adev->mman.bo_global_ref.ref.object, in amdgpu_ttm_init()
1733 adev->mman.initialized = true; in amdgpu_ttm_init()
1736 adev->mman.bdev.no_retry = true; in amdgpu_ttm_init()
1739 r = ttm_bo_init_mm(&adev->mman.bdev, TTM_PL_VRAM, in amdgpu_ttm_init()
1755 adev->mman.aper_base_kaddr = ioremap_wc(adev->gmc.aper_base, in amdgpu_ttm_init()
1797 r = ttm_bo_init_mm(&adev->mman.bdev, TTM_PL_TT, gtt_size >> PAGE_SHIFT); in amdgpu_ttm_init()
1817 r = ttm_bo_init_mm(&adev->mman.bdev, AMDGPU_PL_GDS, in amdgpu_ttm_init()
1827 r = ttm_bo_init_mm(&adev->mman.bdev, AMDGPU_PL_GWS, in amdgpu_ttm_init()
1837 r = ttm_bo_init_mm(&adev->mman.bdev, AMDGPU_PL_OA, in amdgpu_ttm_init()
1868 if (!adev->mman.initialized) in amdgpu_ttm_fini()
1873 if (adev->mman.aper_base_kaddr) in amdgpu_ttm_fini()
1874 iounmap(adev->mman.aper_base_kaddr); in amdgpu_ttm_fini()
1875 adev->mman.aper_base_kaddr = NULL; in amdgpu_ttm_fini()
1877 ttm_bo_clean_mm(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_ttm_fini()
1878 ttm_bo_clean_mm(&adev->mman.bdev, TTM_PL_TT); in amdgpu_ttm_fini()
1880 ttm_bo_clean_mm(&adev->mman.bdev, AMDGPU_PL_GDS); in amdgpu_ttm_fini()
1882 ttm_bo_clean_mm(&adev->mman.bdev, AMDGPU_PL_GWS); in amdgpu_ttm_fini()
1884 ttm_bo_clean_mm(&adev->mman.bdev, AMDGPU_PL_OA); in amdgpu_ttm_fini()
1885 ttm_bo_device_release(&adev->mman.bdev); in amdgpu_ttm_fini()
1887 adev->mman.initialized = false; in amdgpu_ttm_fini()
1902 struct ttm_mem_type_manager *man = &adev->mman.bdev.man[TTM_PL_VRAM]; in amdgpu_ttm_set_buffer_funcs_status()
1906 if (!adev->mman.initialized || adev->in_gpu_reset || in amdgpu_ttm_set_buffer_funcs_status()
1907 adev->mman.buffer_funcs_enabled == enable) in amdgpu_ttm_set_buffer_funcs_status()
1914 ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_set_buffer_funcs_status()
1916 r = drm_sched_entity_init(&adev->mman.entity, &rq, 1, NULL); in amdgpu_ttm_set_buffer_funcs_status()
1923 drm_sched_entity_destroy(&adev->mman.entity); in amdgpu_ttm_set_buffer_funcs_status()
1934 adev->mman.buffer_funcs_enabled = enable; in amdgpu_ttm_set_buffer_funcs_status()
1950 return ttm_bo_mmap(filp, vma, &adev->mman.bdev); in amdgpu_mmap()
1970 BUG_ON(adev->mman.buffer_funcs->copy_max_bytes < in amdgpu_map_buffer()
1977 num_dw = adev->mman.buffer_funcs->copy_num_dw; in amdgpu_map_buffer()
2005 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_map_buffer()
2038 max_bytes = adev->mman.buffer_funcs->copy_max_bytes; in amdgpu_copy_buffer()
2040 num_dw = num_loops * adev->mman.buffer_funcs->copy_num_dw; in amdgpu_copy_buffer()
2077 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_copy_buffer()
2096 uint32_t max_bytes = adev->mman.buffer_funcs->fill_max_bytes; in amdgpu_fill_buffer()
2097 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_fill_buffer()
2106 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_fill_buffer()
2127 num_dw = num_loops * adev->mman.buffer_funcs->fill_num_dw; in amdgpu_fill_buffer()
2169 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_fill_buffer()
2189 struct ttm_mem_type_manager *man = &adev->mman.bdev.man[ttm_pl]; in amdgpu_mm_dump_table()
2394 if (p->mapping != adev->mman.bdev.dev_mapping) in amdgpu_iomem_read()
2445 if (p->mapping != adev->mman.bdev.dev_mapping) in amdgpu_iomem_write()
2503 adev->mman.debugfs_entries[count] = ent; in amdgpu_ttm_debugfs_init()
2525 debugfs_remove(adev->mman.debugfs_entries[i]); in amdgpu_ttm_debugfs_fini()