Lines Matching refs:mman

79 	return ttm_range_man_init(&adev->mman.bdev, type,  in amdgpu_ttm_init_on_chip()
135 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_evict_flags()
199 BUG_ON(adev->mman.buffer_funcs->copy_max_bytes < in amdgpu_ttm_map_buffer()
229 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in amdgpu_ttm_map_buffer()
272 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_ttm_map_buffer()
308 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_copy_mem_to_mem()
313 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_ttm_copy_mem_to_mem()
321 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem()
352 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem()
528 if (adev->mman.buffer_funcs_enabled) { in amdgpu_bo_move()
589 if (adev->mman.aper_base_kaddr && in amdgpu_ttm_io_mem_reserve()
591 mem->bus.addr = (u8 *)adev->mman.aper_base_kaddr + in amdgpu_ttm_io_mem_reserve()
1087 ret = ttm_pool_alloc(&adev->mman.bdev.pool, ttm, ctx); in amdgpu_ttm_tt_populate()
1126 return ttm_pool_free(&adev->mman.bdev.pool, ttm); in amdgpu_ttm_tt_unpopulate()
1411 if (!adev->mman.sdma_access_ptr) in amdgpu_ttm_access_memory_sdma()
1418 memcpy(adev->mman.sdma_access_ptr, buf, len); in amdgpu_ttm_access_memory_sdma()
1420 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in amdgpu_ttm_access_memory_sdma()
1427 dst_addr = amdgpu_bo_gpu_offset(adev->mman.sdma_access_bo); in amdgpu_ttm_access_memory_sdma()
1433 amdgpu_ring_pad_ib(adev->mman.buffer_funcs_ring, &job->ibs[0]); in amdgpu_ttm_access_memory_sdma()
1436 r = amdgpu_job_submit(job, &adev->mman.entity, AMDGPU_FENCE_OWNER_UNDEFINED, &fence); in amdgpu_ttm_access_memory_sdma()
1447 memcpy(buf, adev->mman.sdma_access_ptr, len); in amdgpu_ttm_access_memory_sdma()
1536 amdgpu_bo_free_kernel(&adev->mman.fw_vram_usage_reserved_bo, in amdgpu_ttm_fw_reserve_vram_fini()
1537 NULL, &adev->mman.fw_vram_usage_va); in amdgpu_ttm_fw_reserve_vram_fini()
1551 adev->mman.fw_vram_usage_va = NULL; in amdgpu_ttm_fw_reserve_vram_init()
1552 adev->mman.fw_vram_usage_reserved_bo = NULL; in amdgpu_ttm_fw_reserve_vram_init()
1554 if (adev->mman.fw_vram_usage_size == 0 || in amdgpu_ttm_fw_reserve_vram_init()
1555 adev->mman.fw_vram_usage_size > vram_size) in amdgpu_ttm_fw_reserve_vram_init()
1559 adev->mman.fw_vram_usage_start_offset, in amdgpu_ttm_fw_reserve_vram_init()
1560 adev->mman.fw_vram_usage_size, in amdgpu_ttm_fw_reserve_vram_init()
1562 &adev->mman.fw_vram_usage_reserved_bo, in amdgpu_ttm_fw_reserve_vram_init()
1563 &adev->mman.fw_vram_usage_va); in amdgpu_ttm_fw_reserve_vram_init()
1595 ALIGN((adev->gmc.mc_vram_size - adev->mman.discovery_tmr_size - SZ_1M), SZ_1M); in amdgpu_ttm_training_data_block_init()
1631 adev->mman.discovery_tmr_size = in amdgpu_ttm_reserve_tmr()
1633 if (!adev->mman.discovery_tmr_size) in amdgpu_ttm_reserve_tmr()
1634 adev->mman.discovery_tmr_size = DISCOVERY_TMR_OFFSET; in amdgpu_ttm_reserve_tmr()
1654 adev->gmc.real_vram_size - adev->mman.discovery_tmr_size, in amdgpu_ttm_reserve_tmr()
1655 adev->mman.discovery_tmr_size, in amdgpu_ttm_reserve_tmr()
1657 &adev->mman.discovery_memory, in amdgpu_ttm_reserve_tmr()
1661 amdgpu_bo_free_kernel(&adev->mman.discovery_memory, NULL, NULL); in amdgpu_ttm_reserve_tmr()
1683 mutex_init(&adev->mman.gtt_window_lock); in amdgpu_ttm_init()
1686 r = ttm_device_init(&adev->mman.bdev, &amdgpu_bo_driver, adev->dev, in amdgpu_ttm_init()
1695 adev->mman.initialized = true; in amdgpu_ttm_init()
1715 adev->mman.aper_base_kaddr = ioremap_cache(adev->gmc.aper_base, in amdgpu_ttm_init()
1720 adev->mman.aper_base_kaddr = ioremap_wc(adev->gmc.aper_base, in amdgpu_ttm_init()
1738 if (adev->mman.discovery_bin) { in amdgpu_ttm_init()
1748 r = amdgpu_bo_create_kernel_at(adev, 0, adev->mman.stolen_vga_size, in amdgpu_ttm_init()
1750 &adev->mman.stolen_vga_memory, in amdgpu_ttm_init()
1754 r = amdgpu_bo_create_kernel_at(adev, adev->mman.stolen_vga_size, in amdgpu_ttm_init()
1755 adev->mman.stolen_extended_size, in amdgpu_ttm_init()
1757 &adev->mman.stolen_extended_memory, in amdgpu_ttm_init()
1761 r = amdgpu_bo_create_kernel_at(adev, adev->mman.stolen_reserved_offset, in amdgpu_ttm_init()
1762 adev->mman.stolen_reserved_size, in amdgpu_ttm_init()
1764 &adev->mman.stolen_reserved_memory, in amdgpu_ttm_init()
1830 &adev->mman.sdma_access_bo, NULL, in amdgpu_ttm_init()
1831 &adev->mman.sdma_access_ptr)) in amdgpu_ttm_init()
1843 if (!adev->mman.initialized) in amdgpu_ttm_fini()
1848 amdgpu_bo_free_kernel(&adev->mman.stolen_vga_memory, NULL, NULL); in amdgpu_ttm_fini()
1849 amdgpu_bo_free_kernel(&adev->mman.stolen_extended_memory, NULL, NULL); in amdgpu_ttm_fini()
1851 amdgpu_bo_free_kernel(&adev->mman.discovery_memory, NULL, NULL); in amdgpu_ttm_fini()
1852 if (adev->mman.stolen_reserved_size) in amdgpu_ttm_fini()
1853 amdgpu_bo_free_kernel(&adev->mman.stolen_reserved_memory, in amdgpu_ttm_fini()
1855 amdgpu_bo_free_kernel(&adev->mman.sdma_access_bo, NULL, in amdgpu_ttm_fini()
1856 &adev->mman.sdma_access_ptr); in amdgpu_ttm_fini()
1861 if (adev->mman.aper_base_kaddr) in amdgpu_ttm_fini()
1862 iounmap(adev->mman.aper_base_kaddr); in amdgpu_ttm_fini()
1863 adev->mman.aper_base_kaddr = NULL; in amdgpu_ttm_fini()
1871 ttm_range_man_fini(&adev->mman.bdev, AMDGPU_PL_GDS); in amdgpu_ttm_fini()
1872 ttm_range_man_fini(&adev->mman.bdev, AMDGPU_PL_GWS); in amdgpu_ttm_fini()
1873 ttm_range_man_fini(&adev->mman.bdev, AMDGPU_PL_OA); in amdgpu_ttm_fini()
1874 ttm_device_fini(&adev->mman.bdev); in amdgpu_ttm_fini()
1875 adev->mman.initialized = false; in amdgpu_ttm_fini()
1890 struct ttm_resource_manager *man = ttm_manager_type(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_ttm_set_buffer_funcs_status()
1894 if (!adev->mman.initialized || amdgpu_in_reset(adev) || in amdgpu_ttm_set_buffer_funcs_status()
1895 adev->mman.buffer_funcs_enabled == enable) in amdgpu_ttm_set_buffer_funcs_status()
1902 ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_set_buffer_funcs_status()
1904 r = drm_sched_entity_init(&adev->mman.entity, in amdgpu_ttm_set_buffer_funcs_status()
1913 drm_sched_entity_destroy(&adev->mman.entity); in amdgpu_ttm_set_buffer_funcs_status()
1924 adev->mman.buffer_funcs_enabled = enable; in amdgpu_ttm_set_buffer_funcs_status()
1980 max_bytes = adev->mman.buffer_funcs->copy_max_bytes; in amdgpu_copy_buffer()
1982 num_dw = ALIGN(num_loops * adev->mman.buffer_funcs->copy_num_dw, 8); in amdgpu_copy_buffer()
2004 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_copy_buffer()
2030 max_bytes = adev->mman.buffer_funcs->fill_max_bytes; in amdgpu_ttm_fill_mem()
2032 num_dw = ALIGN(num_loops * adev->mman.buffer_funcs->fill_num_dw, 8); in amdgpu_ttm_fill_mem()
2050 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_ttm_fill_mem()
2068 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_fill_buffer()
2073 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_fill_buffer()
2080 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_fill_buffer()
2104 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_fill_buffer()
2131 man = ttm_manager_type(&adev->mman.bdev, mem_type); in amdgpu_ttm_evict_resources()
2138 return ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_ttm_evict_resources()
2147 return ttm_pool_debugfs(&adev->mman.bdev.pool, m); in amdgpu_ttm_page_pool_show()
2272 if (p->mapping != adev->mman.bdev.dev_mapping) in amdgpu_iomem_read()
2323 if (p->mapping != adev->mman.bdev.dev_mapping) in amdgpu_iomem_write()
2361 ttm_resource_manager_create_debugfs(ttm_manager_type(&adev->mman.bdev, in amdgpu_ttm_debugfs_init()
2364 ttm_resource_manager_create_debugfs(ttm_manager_type(&adev->mman.bdev, in amdgpu_ttm_debugfs_init()
2367 ttm_resource_manager_create_debugfs(ttm_manager_type(&adev->mman.bdev, in amdgpu_ttm_debugfs_init()
2370 ttm_resource_manager_create_debugfs(ttm_manager_type(&adev->mman.bdev, in amdgpu_ttm_debugfs_init()
2373 ttm_resource_manager_create_debugfs(ttm_manager_type(&adev->mman.bdev, in amdgpu_ttm_debugfs_init()