Lines Matching refs:mman

74 	return ttm_range_man_init(&adev->mman.bdev, type,  in amdgpu_ttm_init_on_chip()
124 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_evict_flags()
255 BUG_ON(adev->mman.buffer_funcs->copy_max_bytes < in amdgpu_ttm_map_buffer()
269 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in amdgpu_ttm_map_buffer()
320 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_ttm_map_buffer()
360 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_copy_mem_to_mem()
365 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_ttm_copy_mem_to_mem()
388 mutex_lock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem()
448 mutex_unlock(&adev->mman.gtt_window_lock); in amdgpu_ttm_copy_mem_to_mem()
693 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_bo_move()
763 if (adev->mman.aper_base_kaddr && in amdgpu_ttm_io_mem_reserve()
765 mem->bus.addr = (u8 *)adev->mman.aper_base_kaddr + in amdgpu_ttm_io_mem_reserve()
1734 amdgpu_bo_free_kernel(&adev->mman.fw_vram_usage_reserved_bo, in amdgpu_ttm_fw_reserve_vram_fini()
1735 NULL, &adev->mman.fw_vram_usage_va); in amdgpu_ttm_fw_reserve_vram_fini()
1749 adev->mman.fw_vram_usage_va = NULL; in amdgpu_ttm_fw_reserve_vram_init()
1750 adev->mman.fw_vram_usage_reserved_bo = NULL; in amdgpu_ttm_fw_reserve_vram_init()
1752 if (adev->mman.fw_vram_usage_size == 0 || in amdgpu_ttm_fw_reserve_vram_init()
1753 adev->mman.fw_vram_usage_size > vram_size) in amdgpu_ttm_fw_reserve_vram_init()
1757 adev->mman.fw_vram_usage_start_offset, in amdgpu_ttm_fw_reserve_vram_init()
1758 adev->mman.fw_vram_usage_size, in amdgpu_ttm_fw_reserve_vram_init()
1760 &adev->mman.fw_vram_usage_reserved_bo, in amdgpu_ttm_fw_reserve_vram_init()
1761 &adev->mman.fw_vram_usage_va); in amdgpu_ttm_fw_reserve_vram_init()
1793 ALIGN((adev->gmc.mc_vram_size - adev->mman.discovery_tmr_size - SZ_1M), SZ_1M); in amdgpu_ttm_training_data_block_init()
1832 adev->mman.discovery_tmr_size = in amdgpu_ttm_reserve_tmr()
1834 if (!adev->mman.discovery_tmr_size) in amdgpu_ttm_reserve_tmr()
1835 adev->mman.discovery_tmr_size = DISCOVERY_TMR_OFFSET; in amdgpu_ttm_reserve_tmr()
1855 adev->gmc.real_vram_size - adev->mman.discovery_tmr_size, in amdgpu_ttm_reserve_tmr()
1856 adev->mman.discovery_tmr_size, in amdgpu_ttm_reserve_tmr()
1858 &adev->mman.discovery_memory, in amdgpu_ttm_reserve_tmr()
1862 amdgpu_bo_free_kernel(&adev->mman.discovery_memory, NULL, NULL); in amdgpu_ttm_reserve_tmr()
1884 mutex_init(&adev->mman.gtt_window_lock); in amdgpu_ttm_init()
1887 r = ttm_bo_device_init(&adev->mman.bdev, in amdgpu_ttm_init()
1896 adev->mman.initialized = true; in amdgpu_ttm_init()
1899 adev->mman.bdev.no_retry = true; in amdgpu_ttm_init()
1917 adev->mman.aper_base_kaddr = ioremap_wc(adev->gmc.aper_base, in amdgpu_ttm_init()
1935 if (adev->mman.discovery_bin) { in amdgpu_ttm_init()
1945 r = amdgpu_bo_create_kernel_at(adev, 0, adev->mman.stolen_vga_size, in amdgpu_ttm_init()
1947 &adev->mman.stolen_vga_memory, in amdgpu_ttm_init()
1951 r = amdgpu_bo_create_kernel_at(adev, adev->mman.stolen_vga_size, in amdgpu_ttm_init()
1952 adev->mman.stolen_extended_size, in amdgpu_ttm_init()
1954 &adev->mman.stolen_extended_memory, in amdgpu_ttm_init()
2012 if (!adev->mman.keep_stolen_vga_memory) in amdgpu_ttm_late_init()
2013 amdgpu_bo_free_kernel(&adev->mman.stolen_vga_memory, NULL, NULL); in amdgpu_ttm_late_init()
2014 amdgpu_bo_free_kernel(&adev->mman.stolen_extended_memory, NULL, NULL); in amdgpu_ttm_late_init()
2022 if (!adev->mman.initialized) in amdgpu_ttm_fini()
2027 if (adev->mman.keep_stolen_vga_memory) in amdgpu_ttm_fini()
2028 amdgpu_bo_free_kernel(&adev->mman.stolen_vga_memory, NULL, NULL); in amdgpu_ttm_fini()
2030 amdgpu_bo_free_kernel(&adev->mman.discovery_memory, NULL, NULL); in amdgpu_ttm_fini()
2033 if (adev->mman.aper_base_kaddr) in amdgpu_ttm_fini()
2034 iounmap(adev->mman.aper_base_kaddr); in amdgpu_ttm_fini()
2035 adev->mman.aper_base_kaddr = NULL; in amdgpu_ttm_fini()
2039 ttm_range_man_fini(&adev->mman.bdev, AMDGPU_PL_GDS); in amdgpu_ttm_fini()
2040 ttm_range_man_fini(&adev->mman.bdev, AMDGPU_PL_GWS); in amdgpu_ttm_fini()
2041 ttm_range_man_fini(&adev->mman.bdev, AMDGPU_PL_OA); in amdgpu_ttm_fini()
2042 ttm_bo_device_release(&adev->mman.bdev); in amdgpu_ttm_fini()
2043 adev->mman.initialized = false; in amdgpu_ttm_fini()
2058 struct ttm_resource_manager *man = ttm_manager_type(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_ttm_set_buffer_funcs_status()
2062 if (!adev->mman.initialized || amdgpu_in_reset(adev) || in amdgpu_ttm_set_buffer_funcs_status()
2063 adev->mman.buffer_funcs_enabled == enable) in amdgpu_ttm_set_buffer_funcs_status()
2070 ring = adev->mman.buffer_funcs_ring; in amdgpu_ttm_set_buffer_funcs_status()
2072 r = drm_sched_entity_init(&adev->mman.entity, in amdgpu_ttm_set_buffer_funcs_status()
2081 drm_sched_entity_destroy(&adev->mman.entity); in amdgpu_ttm_set_buffer_funcs_status()
2092 adev->mman.buffer_funcs_enabled = enable; in amdgpu_ttm_set_buffer_funcs_status()
2103 return ttm_bo_mmap(filp, vma, &adev->mman.bdev); in amdgpu_mmap()
2127 max_bytes = adev->mman.buffer_funcs->copy_max_bytes; in amdgpu_copy_buffer()
2129 num_dw = ALIGN(num_loops * adev->mman.buffer_funcs->copy_num_dw, 8); in amdgpu_copy_buffer()
2165 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_copy_buffer()
2184 uint32_t max_bytes = adev->mman.buffer_funcs->fill_max_bytes; in amdgpu_fill_buffer()
2185 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_fill_buffer()
2194 if (!adev->mman.buffer_funcs_enabled) { in amdgpu_fill_buffer()
2215 num_dw = num_loops * adev->mman.buffer_funcs->fill_num_dw; in amdgpu_fill_buffer()
2260 r = amdgpu_job_submit(job, &adev->mman.entity, in amdgpu_fill_buffer()
2280 struct ttm_resource_manager *man = ttm_manager_type(&adev->mman.bdev, ttm_pl); in amdgpu_mm_dump_table()
2476 if (p->mapping != adev->mman.bdev.dev_mapping) in amdgpu_iomem_read()
2527 if (p->mapping != adev->mman.bdev.dev_mapping) in amdgpu_iomem_write()
2585 adev->mman.debugfs_entries[count] = ent; in amdgpu_ttm_debugfs_init()