| /Linux-v5.4/drivers/gpu/drm/amd/amdgpu/ |
| D | gmc_v9_0.c | 270 struct ras_common_if *ras_if = adev->gmc.umc_ras_if; in gmc_v9_0_process_ecc_irq() 425 adev->gmc.vm_fault.num_types = 1; in gmc_v9_0_set_irq_funcs() 426 adev->gmc.vm_fault.funcs = &gmc_v9_0_irq_funcs; in gmc_v9_0_set_irq_funcs() 428 adev->gmc.ecc_irq.num_types = 1; in gmc_v9_0_set_irq_funcs() 429 adev->gmc.ecc_irq.funcs = &gmc_v9_0_ecc_funcs; in gmc_v9_0_set_irq_funcs() 493 spin_lock(&adev->gmc.invalidate_lock); in gmc_v9_0_flush_gpu_tlb() 509 spin_unlock(&adev->gmc.invalidate_lock); in gmc_v9_0_flush_gpu_tlb() 632 adev->gmc.vram_start; in gmc_v9_0_get_vm_pde() 635 if (!adev->gmc.translate_further) in gmc_v9_0_get_vm_pde() 661 adev->gmc.gmc_funcs = &gmc_v9_0_gmc_funcs; in gmc_v9_0_set_gmc_funcs() [all …]
|
| D | amdgpu_xgmi.c | 130 return snprintf(buf, PAGE_SIZE, "%llu\n", adev->gmc.xgmi.node_id); in amdgpu_xgmi_show_device_id() 231 if (!adev->gmc.xgmi.hive_id) in amdgpu_get_xgmi_hive() 238 if (tmp->hive_id == adev->gmc.xgmi.hive_id) { in amdgpu_get_xgmi_hive() 259 tmp->hive_id = adev->gmc.xgmi.hive_id; in amdgpu_get_xgmi_hive() 290 adev->gmc.xgmi.node_id, in amdgpu_xgmi_set_pstate() 291 adev->gmc.xgmi.hive_id, ret); in amdgpu_xgmi_set_pstate() 307 adev->gmc.xgmi.node_id, in amdgpu_xgmi_update_topology() 308 adev->gmc.xgmi.hive_id, ret); in amdgpu_xgmi_update_topology() 321 if (top->nodes[i].node_id == peer_adev->gmc.xgmi.node_id) in amdgpu_xgmi_get_hops_count() 335 if (!adev->gmc.xgmi.supported) in amdgpu_xgmi_add_device() [all …]
|
| D | gmc_v7_0.c | 159 err = request_firmware(&adev->gmc.fw, fw_name, adev->dev); in gmc_v7_0_init_microcode() 162 err = amdgpu_ucode_validate(adev->gmc.fw); in gmc_v7_0_init_microcode() 167 release_firmware(adev->gmc.fw); in gmc_v7_0_init_microcode() 168 adev->gmc.fw = NULL; in gmc_v7_0_init_microcode() 189 if (!adev->gmc.fw) in gmc_v7_0_mc_load_microcode() 192 hdr = (const struct mc_firmware_header_v1_0 *)adev->gmc.fw->data; in gmc_v7_0_mc_load_microcode() 195 adev->gmc.fw_version = le32_to_cpu(hdr->header.ucode_version); in gmc_v7_0_mc_load_microcode() 198 (adev->gmc.fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in gmc_v7_0_mc_load_microcode() 201 (adev->gmc.fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in gmc_v7_0_mc_load_microcode() 291 adev->gmc.vram_start >> 12); in gmc_v7_0_mc_program() [all …]
|
| D | gmc_v10_0.c | 198 adev->gmc.vm_fault.num_types = 1; in gmc_v10_0_set_irq_funcs() 199 adev->gmc.vm_fault.funcs = &gmc_v10_0_irq_funcs; in gmc_v10_0_set_irq_funcs() 443 adev->gmc.vram_start; in gmc_v10_0_get_vm_pde() 446 if (!adev->gmc.translate_further) in gmc_v10_0_get_vm_pde() 472 if (adev->gmc.gmc_funcs == NULL) in gmc_v10_0_set_gmc_funcs() 473 adev->gmc.gmc_funcs = &gmc_v10_0_gmc_funcs; in gmc_v10_0_set_gmc_funcs() 483 adev->gmc.shared_aperture_start = 0x2000000000000000ULL; in gmc_v10_0_early_init() 484 adev->gmc.shared_aperture_end = in gmc_v10_0_early_init() 485 adev->gmc.shared_aperture_start + (4ULL << 30) - 1; in gmc_v10_0_early_init() 486 adev->gmc.private_aperture_start = 0x1000000000000000ULL; in gmc_v10_0_early_init() [all …]
|
| D | gmc_v8_0.c | 276 err = request_firmware(&adev->gmc.fw, fw_name, adev->dev); in gmc_v8_0_init_microcode() 279 err = amdgpu_ucode_validate(adev->gmc.fw); in gmc_v8_0_init_microcode() 284 release_firmware(adev->gmc.fw); in gmc_v8_0_init_microcode() 285 adev->gmc.fw = NULL; in gmc_v8_0_init_microcode() 314 if (!adev->gmc.fw) in gmc_v8_0_tonga_mc_load_microcode() 317 hdr = (const struct mc_firmware_header_v1_0 *)adev->gmc.fw->data; in gmc_v8_0_tonga_mc_load_microcode() 320 adev->gmc.fw_version = le32_to_cpu(hdr->header.ucode_version); in gmc_v8_0_tonga_mc_load_microcode() 323 (adev->gmc.fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in gmc_v8_0_tonga_mc_load_microcode() 326 (adev->gmc.fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in gmc_v8_0_tonga_mc_load_microcode() 383 if (!adev->gmc.fw) in gmc_v8_0_polaris_mc_load_microcode() [all …]
|
| D | gmc_v6_0.c | 145 err = request_firmware(&adev->gmc.fw, fw_name, adev->dev); in gmc_v6_0_init_microcode() 149 err = amdgpu_ucode_validate(adev->gmc.fw); in gmc_v6_0_init_microcode() 156 release_firmware(adev->gmc.fw); in gmc_v6_0_init_microcode() 157 adev->gmc.fw = NULL; in gmc_v6_0_init_microcode() 170 if (!adev->gmc.fw) in gmc_v6_0_mc_load_microcode() 173 hdr = (const struct mc_firmware_header_v1_0 *)adev->gmc.fw->data; in gmc_v6_0_mc_load_microcode() 177 adev->gmc.fw_version = le32_to_cpu(hdr->header.ucode_version); in gmc_v6_0_mc_load_microcode() 180 (adev->gmc.fw->data + le32_to_cpu(hdr->io_debug_array_offset_bytes)); in gmc_v6_0_mc_load_microcode() 183 (adev->gmc.fw->data + le32_to_cpu(hdr->header.ucode_array_offset_bytes)); in gmc_v6_0_mc_load_microcode() 268 adev->gmc.vram_start >> 12); in gmc_v6_0_mc_program() [all …]
|
| D | amdgpu_gmc.c | 128 if (ttm->dma_address[0] + PAGE_SIZE >= adev->gmc.agp_size) in amdgpu_gmc_agp_addr() 131 return adev->gmc.agp_start + ttm->dma_address[0]; in amdgpu_gmc_agp_addr() 179 u64 max_mc_address = min(adev->gmc.mc_mask, AMDGPU_GMC_HOLE_START - 1); in amdgpu_gmc_gart_location() 269 struct amdgpu_gmc *gmc = &adev->gmc; in amdgpu_gmc_filter_faults() local 278 if (gmc->fault_ring[gmc->last_fault].timestamp >= stamp) in amdgpu_gmc_filter_faults() 283 fault = &gmc->fault_ring[gmc->fault_hash[hash].idx]; in amdgpu_gmc_filter_faults() 291 fault = &gmc->fault_ring[fault->next]; in amdgpu_gmc_filter_faults() 299 fault = &gmc->fault_ring[gmc->last_fault]; in amdgpu_gmc_filter_faults() 304 fault->next = gmc->fault_hash[hash].idx; in amdgpu_gmc_filter_faults() 305 gmc->fault_hash[hash].idx = gmc->last_fault++; in amdgpu_gmc_filter_faults()
|
| D | amdgpu_gmc.h | 184 #define amdgpu_gmc_flush_gpu_tlb(adev, vmid, vmhub, type) ((adev)->gmc.gmc_funcs->flush_gpu_tlb((ad… 185 #define amdgpu_gmc_emit_flush_gpu_tlb(r, vmid, addr) (r)->adev->gmc.gmc_funcs->emit_flush_gpu_tlb((… 186 #define amdgpu_gmc_emit_pasid_mapping(r, vmid, pasid) (r)->adev->gmc.gmc_funcs->emit_pasid_mapping(… 187 #define amdgpu_gmc_get_vm_pde(adev, level, dst, flags) (adev)->gmc.gmc_funcs->get_vm_pde((adev), (l… 188 #define amdgpu_gmc_get_pte_flags(adev, flags) (adev)->gmc.gmc_funcs->get_vm_pte_flags((adev),(flags… 198 static inline bool amdgpu_gmc_vram_full_visible(struct amdgpu_gmc *gmc) in amdgpu_gmc_vram_full_visible() argument 200 WARN_ON(gmc->real_vram_size < gmc->visible_vram_size); in amdgpu_gmc_vram_full_visible() 202 return (gmc->real_vram_size == gmc->visible_vram_size); in amdgpu_gmc_vram_full_visible()
|
| D | gfxhub_v1_0.c | 59 (u32)(adev->gmc.gart_start >> 12)); in gfxhub_v1_0_init_gart_aperture_regs() 61 (u32)(adev->gmc.gart_start >> 44)); in gfxhub_v1_0_init_gart_aperture_regs() 64 (u32)(adev->gmc.gart_end >> 12)); in gfxhub_v1_0_init_gart_aperture_regs() 66 (u32)(adev->gmc.gart_end >> 44)); in gfxhub_v1_0_init_gart_aperture_regs() 75 WREG32_SOC15_RLC(GC, 0, mmMC_VM_AGP_BOT, adev->gmc.agp_start >> 24); in gfxhub_v1_0_init_system_aperture_regs() 76 WREG32_SOC15_RLC(GC, 0, mmMC_VM_AGP_TOP, adev->gmc.agp_end >> 24); in gfxhub_v1_0_init_system_aperture_regs() 80 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in gfxhub_v1_0_init_system_aperture_regs() 90 max((adev->gmc.fb_end >> 18) + 0x1, in gfxhub_v1_0_init_system_aperture_regs() 91 adev->gmc.agp_end >> 18)); in gfxhub_v1_0_init_system_aperture_regs() 94 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in gfxhub_v1_0_init_system_aperture_regs() [all …]
|
| D | gfxhub_v1_1.c | 39 adev->gmc.xgmi.num_physical_nodes = max_region + 1; in gfxhub_v1_1_get_xgmi_info() 40 if (adev->gmc.xgmi.num_physical_nodes > 4) in gfxhub_v1_1_get_xgmi_info() 43 adev->gmc.xgmi.physical_node_id = in gfxhub_v1_1_get_xgmi_info() 45 if (adev->gmc.xgmi.physical_node_id > 3) in gfxhub_v1_1_get_xgmi_info() 47 adev->gmc.xgmi.node_segment_size = REG_GET_FIELD( in gfxhub_v1_1_get_xgmi_info()
|
| D | mmhub_v1_0.c | 52 adev->gmc.fb_start = base; in mmhub_v1_0_get_fb_location() 53 adev->gmc.fb_end = top; in mmhub_v1_0_get_fb_location() 79 (u32)(adev->gmc.gart_start >> 12)); in mmhub_v1_0_init_gart_aperture_regs() 81 (u32)(adev->gmc.gart_start >> 44)); in mmhub_v1_0_init_gart_aperture_regs() 84 (u32)(adev->gmc.gart_end >> 12)); in mmhub_v1_0_init_gart_aperture_regs() 86 (u32)(adev->gmc.gart_end >> 44)); in mmhub_v1_0_init_gart_aperture_regs() 96 WREG32_SOC15(MMHUB, 0, mmMC_VM_AGP_BOT, adev->gmc.agp_start >> 24); in mmhub_v1_0_init_system_aperture_regs() 97 WREG32_SOC15(MMHUB, 0, mmMC_VM_AGP_TOP, adev->gmc.agp_end >> 24); in mmhub_v1_0_init_system_aperture_regs() 101 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in mmhub_v1_0_init_system_aperture_regs() 111 max((adev->gmc.fb_end >> 18) + 0x1, in mmhub_v1_0_init_system_aperture_regs() [all …]
|
| D | mmhub_v9_4.c | 50 adev->gmc.fb_start = base; in mmhub_v9_4_get_fb_location() 51 adev->gmc.fb_end = top; in mmhub_v9_4_get_fb_location() 87 (u32)(adev->gmc.gart_start >> 12)); in mmhub_v9_4_init_gart_aperture_regs() 91 (u32)(adev->gmc.gart_start >> 44)); in mmhub_v9_4_init_gart_aperture_regs() 96 (u32)(adev->gmc.gart_end >> 12)); in mmhub_v9_4_init_gart_aperture_regs() 100 (u32)(adev->gmc.gart_end >> 44)); in mmhub_v9_4_init_gart_aperture_regs() 115 adev->gmc.agp_end >> 24); in mmhub_v9_4_init_system_aperture_regs() 118 adev->gmc.agp_start >> 24); in mmhub_v9_4_init_system_aperture_regs() 124 min(adev->gmc.fb_start, adev->gmc.agp_start) >> 18); in mmhub_v9_4_init_system_aperture_regs() 128 max(adev->gmc.fb_end, adev->gmc.agp_end) >> 18); in mmhub_v9_4_init_system_aperture_regs() [all …]
|
| D | gfxhub_v2_0.c | 66 (u32)(adev->gmc.gart_start >> 12)); in gfxhub_v2_0_init_gart_aperture_regs() 68 (u32)(adev->gmc.gart_start >> 44)); in gfxhub_v2_0_init_gart_aperture_regs() 71 (u32)(adev->gmc.gart_end >> 12)); in gfxhub_v2_0_init_gart_aperture_regs() 73 (u32)(adev->gmc.gart_end >> 44)); in gfxhub_v2_0_init_gart_aperture_regs() 87 adev->gmc.vram_start >> 18); in gfxhub_v2_0_init_system_aperture_regs() 89 adev->gmc.vram_end >> 18); in gfxhub_v2_0_init_system_aperture_regs() 92 value = adev->vram_scratch.gpu_addr - adev->gmc.vram_start in gfxhub_v2_0_init_system_aperture_regs() 154 if (adev->gmc.translate_further) { in gfxhub_v2_0_init_cache_regs() 260 adev->gmc.vram_start >> 24); in gfxhub_v2_0_gart_enable() 262 adev->gmc.vram_end >> 24); in gfxhub_v2_0_gart_enable()
|
| D | amdgpu_xgmi.h | 50 adev->gmc.xgmi.hive_id && in amdgpu_xgmi_same_hive() 51 adev->gmc.xgmi.hive_id == bo_adev->gmc.xgmi.hive_id); in amdgpu_xgmi_same_hive()
|
| D | amdgpu_amdkfd.c | 107 amdgpu_amdkfd_total_mem_size += adev->gmc.real_vram_size; in amdgpu_amdkfd_device_probe() 428 resource_size_t aper_limit = adev->gmc.aper_base + adev->gmc.aper_size; in amdgpu_amdkfd_get_local_mem_info() 431 if (!(adev->gmc.aper_base & address_mask || aper_limit & address_mask)) { in amdgpu_amdkfd_get_local_mem_info() 432 mem_info->local_mem_size_public = adev->gmc.visible_vram_size; in amdgpu_amdkfd_get_local_mem_info() 433 mem_info->local_mem_size_private = adev->gmc.real_vram_size - in amdgpu_amdkfd_get_local_mem_info() 434 adev->gmc.visible_vram_size; in amdgpu_amdkfd_get_local_mem_info() 437 mem_info->local_mem_size_private = adev->gmc.real_vram_size; in amdgpu_amdkfd_get_local_mem_info() 439 mem_info->vram_width = adev->gmc.vram_width; in amdgpu_amdkfd_get_local_mem_info() 442 &adev->gmc.aper_base, &aper_limit, in amdgpu_amdkfd_get_local_mem_info() 569 return adev->gmc.xgmi.hive_id; in amdgpu_amdkfd_get_hive_id() [all …]
|
| D | amdgpu_test.c | 47 n = adev->gmc.gart_size - AMDGPU_IB_POOL_SIZE*64*1024; in amdgpu_do_test_moves() 158 (gart_addr - adev->gmc.gart_start + in amdgpu_do_test_moves() 161 (vram_addr - adev->gmc.vram_start + in amdgpu_do_test_moves() 203 (vram_addr - adev->gmc.vram_start + in amdgpu_do_test_moves() 206 (gart_addr - adev->gmc.gart_start + in amdgpu_do_test_moves() 216 gart_addr - adev->gmc.gart_start); in amdgpu_do_test_moves()
|
| D | mmhub_v2_0.c | 50 (u32)(adev->gmc.gart_start >> 12)); in mmhub_v2_0_init_gart_aperture_regs() 52 (u32)(adev->gmc.gart_start >> 44)); in mmhub_v2_0_init_gart_aperture_regs() 55 (u32)(adev->gmc.gart_end >> 12)); in mmhub_v2_0_init_gart_aperture_regs() 57 (u32)(adev->gmc.gart_end >> 44)); in mmhub_v2_0_init_gart_aperture_regs() 72 adev->gmc.vram_start >> 18); in mmhub_v2_0_init_system_aperture_regs() 74 adev->gmc.vram_end >> 18); in mmhub_v2_0_init_system_aperture_regs() 77 value = adev->vram_scratch.gpu_addr - adev->gmc.vram_start + in mmhub_v2_0_init_system_aperture_regs() 140 if (adev->gmc.translate_further) { in mmhub_v2_0_init_cache_regs() 250 adev->gmc.vram_start >> 24); in mmhub_v2_0_gart_enable() 252 adev->gmc.vram_end >> 24); in mmhub_v2_0_gart_enable()
|
| D | amdgpu_vram_mgr.c | 48 return snprintf(buf, PAGE_SIZE, "%llu\n", adev->gmc.real_vram_size); in amdgpu_mem_info_vram_total_show() 65 return snprintf(buf, PAGE_SIZE, "%llu\n", adev->gmc.visible_vram_size); in amdgpu_mem_info_vis_vram_total_show() 200 if (start >= adev->gmc.visible_vram_size) in amdgpu_vram_mgr_vis_size() 203 return (end > adev->gmc.visible_vram_size ? in amdgpu_vram_mgr_vis_size() 204 adev->gmc.visible_vram_size : end) - start; in amdgpu_vram_mgr_vis_size() 223 if (amdgpu_gmc_vram_full_visible(&adev->gmc)) in amdgpu_vram_mgr_bo_visible_size() 226 if (mem->start >= adev->gmc.visible_vram_size >> PAGE_SHIFT) in amdgpu_vram_mgr_bo_visible_size() 288 if (atomic64_add_return(mem_bytes, &mgr->usage) > adev->gmc.mc_vram_size) { in amdgpu_vram_mgr_new()
|
| D | amdgpu_object.c | 134 unsigned visible_pfn = adev->gmc.visible_vram_size >> PAGE_SHIFT; in amdgpu_bo_placement_from_domain() 520 if (!amdgpu_gmc_vram_full_visible(&adev->gmc) && in amdgpu_bo_do_create() 522 bo->tbo.mem.start < adev->gmc.visible_vram_size >> PAGE_SHIFT) in amdgpu_bo_do_create() 1005 arch_io_reserve_memtype_wc(adev->gmc.aper_base, in amdgpu_bo_init() 1006 adev->gmc.aper_size); in amdgpu_bo_init() 1009 adev->gmc.vram_mtrr = arch_phys_wc_add(adev->gmc.aper_base, in amdgpu_bo_init() 1010 adev->gmc.aper_size); in amdgpu_bo_init() 1012 adev->gmc.mc_vram_size >> 20, in amdgpu_bo_init() 1013 (unsigned long long)adev->gmc.aper_size >> 20); in amdgpu_bo_init() 1015 adev->gmc.vram_width, amdgpu_vram_names[adev->gmc.vram_type]); in amdgpu_bo_init() [all …]
|
| D | amdgpu_fb.c | 249 tmp = amdgpu_bo_gpu_offset(abo) - adev->gmc.vram_start; in amdgpufb_create() 250 info->fix.smem_start = adev->gmc.aper_base + tmp; in amdgpufb_create() 259 info->apertures->ranges[0].size = adev->gmc.aper_size; in amdgpufb_create() 269 DRM_INFO("vram apper at 0x%lX\n", (unsigned long)adev->gmc.aper_base); in amdgpufb_create() 326 if (adev->gmc.real_vram_size <= (32*1024*1024)) in amdgpu_fbdev_init()
|
| D | amdgpu_ttm.c | 101 man->gpu_offset = adev->gmc.gart_start; in amdgpu_init_mem_type() 109 man->gpu_offset = adev->gmc.vram_start; in amdgpu_init_mem_type() 180 } else if (!amdgpu_gmc_vram_full_visible(&adev->gmc) && in amdgpu_evict_flags() 191 abo->placements[0].fpfn = adev->gmc.visible_vram_size >> PAGE_SHIFT; in amdgpu_evict_flags() 608 <= adev->gmc.visible_vram_size; in amdgpu_mem_visible() 726 if ((mem->bus.offset + mem->bus.size) > adev->gmc.visible_vram_size) in amdgpu_ttm_io_mem_reserve() 737 mem->bus.base = adev->gmc.aper_base; in amdgpu_ttm_io_mem_reserve() 1116 placements.lpfn = adev->gmc.gart_size >> PAGE_SHIFT; in amdgpu_ttm_alloc_gart() 1552 while (len && pos < adev->gmc.mc_vram_size) { in amdgpu_ttm_access_memory() 1641 u64 vram_size = adev->gmc.visible_vram_size; in amdgpu_ttm_fw_reserve_vram_init() [all …]
|
| D | amdgpu_device.c | 754 u64 space_needed = roundup_pow_of_two(adev->gmc.real_vram_size); in amdgpu_device_resize_fb_bar() 1842 if (adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_ip_init() 2057 if (adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_ip_fini() 2590 adev->gmc.gart_size = 512 * 1024 * 1024; in amdgpu_device_init() 2597 adev->gmc.gmc_funcs = NULL; in amdgpu_device_init() 3612 list_for_each_entry(tmp_adev, device_list_handle, gmc.xgmi.head) { in amdgpu_do_asic_reset() 3614 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_do_asic_reset() 3630 gmc.xgmi.head) { in amdgpu_do_asic_reset() 3631 if (tmp_adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_do_asic_reset() 3640 gmc.xgmi.head) { in amdgpu_do_asic_reset() [all …]
|
| D | amdgpu_gart.c | 370 adev->gart.num_cpu_pages = adev->gmc.gart_size / PAGE_SIZE; in amdgpu_gart_init() 371 adev->gart.num_gpu_pages = adev->gmc.gart_size / AMDGPU_GPU_PAGE_SIZE; in amdgpu_gart_init()
|
| D | amdgpu_kms.c | 222 fw_info->ver = adev->gmc.fw_version; in amdgpu_firmware_info() 585 vram_gtt.vram_size = adev->gmc.real_vram_size - in amdgpu_info_ioctl() 587 vram_gtt.vram_cpu_accessible_size = adev->gmc.visible_vram_size - in amdgpu_info_ioctl() 599 mem.vram.total_heap_size = adev->gmc.real_vram_size; in amdgpu_info_ioctl() 600 mem.vram.usable_heap_size = adev->gmc.real_vram_size - in amdgpu_info_ioctl() 607 adev->gmc.visible_vram_size; in amdgpu_info_ioctl() 608 mem.cpu_accessible_vram.usable_heap_size = adev->gmc.visible_vram_size - in amdgpu_info_ioctl() 730 dev_info.vram_type = adev->gmc.vram_type; in amdgpu_info_ioctl() 731 dev_info.vram_bit_width = adev->gmc.vram_width; in amdgpu_info_ioctl()
|
| /Linux-v5.4/drivers/video/fbdev/ |
| D | w100fb.c | 296 union dp_gui_master_cntl_u gmc; in w100_init_graphic_engine() local 322 gmc.val = 0; in w100_init_graphic_engine() 323 gmc.f.gmc_src_pitch_offset_cntl = 1; in w100_init_graphic_engine() 324 gmc.f.gmc_dst_pitch_offset_cntl = 1; in w100_init_graphic_engine() 325 gmc.f.gmc_src_clipping = 1; in w100_init_graphic_engine() 326 gmc.f.gmc_dst_clipping = 1; in w100_init_graphic_engine() 327 gmc.f.gmc_brush_datatype = GMC_BRUSH_NONE; in w100_init_graphic_engine() 328 gmc.f.gmc_dst_datatype = 3; /* from DstType_16Bpp_444 */ in w100_init_graphic_engine() 329 gmc.f.gmc_src_datatype = SRC_DATATYPE_EQU_DST; in w100_init_graphic_engine() 330 gmc.f.gmc_byte_pix_order = 1; in w100_init_graphic_engine() [all …]
|