Lines Matching refs:adev

146 	struct amdgpu_device *adev = drm_to_adev(ddev);  in amdgpu_device_get_pcie_replay_count()  local
147 uint64_t cnt = amdgpu_asic_get_pcie_replay_count(adev); in amdgpu_device_get_pcie_replay_count()
155 static void amdgpu_device_get_pcie_info(struct amdgpu_device *adev);
171 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_product_name() local
173 return sysfs_emit(buf, "%s\n", adev->product_name); in amdgpu_device_get_product_name()
193 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_product_number() local
195 return sysfs_emit(buf, "%s\n", adev->product_number); in amdgpu_device_get_product_number()
215 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_serial_number() local
217 return sysfs_emit(buf, "%s\n", adev->serial); in amdgpu_device_get_serial_number()
233 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_px() local
235 if ((adev->flags & AMD_IS_PX) && !amdgpu_is_atpx_hybrid()) in amdgpu_device_supports_px()
250 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_boco() local
252 if (adev->has_pr3 || in amdgpu_device_supports_boco()
253 ((adev->flags & AMD_IS_PX) && amdgpu_is_atpx_hybrid())) in amdgpu_device_supports_boco()
268 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_baco() local
270 return amdgpu_asic_supports_baco(adev); in amdgpu_device_supports_baco()
301 void amdgpu_device_mm_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_mm_access() argument
310 if (!drm_dev_enter(adev_to_drm(adev), &idx)) in amdgpu_device_mm_access()
315 spin_lock_irqsave(&adev->mmio_idx_lock, flags); in amdgpu_device_mm_access()
330 spin_unlock_irqrestore(&adev->mmio_idx_lock, flags); in amdgpu_device_mm_access()
345 size_t amdgpu_device_aper_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_aper_access() argument
353 if (!adev->mman.aper_base_kaddr) in amdgpu_device_aper_access()
356 last = min(pos + size, adev->gmc.visible_vram_size); in amdgpu_device_aper_access()
358 addr = adev->mman.aper_base_kaddr + pos; in amdgpu_device_aper_access()
364 amdgpu_device_flush_hdp(adev, NULL); in amdgpu_device_aper_access()
366 amdgpu_device_invalidate_hdp(adev, NULL); in amdgpu_device_aper_access()
388 void amdgpu_device_vram_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_vram_access() argument
394 count = amdgpu_device_aper_access(adev, pos, buf, size, write); in amdgpu_device_vram_access()
400 amdgpu_device_mm_access(adev, pos, buf, size, write); in amdgpu_device_vram_access()
409 bool amdgpu_device_skip_hw_access(struct amdgpu_device *adev) in amdgpu_device_skip_hw_access() argument
411 if (adev->no_hw_access) in amdgpu_device_skip_hw_access()
427 if (down_read_trylock(&adev->reset_domain->sem)) in amdgpu_device_skip_hw_access()
428 up_read(&adev->reset_domain->sem); in amdgpu_device_skip_hw_access()
430 lockdep_assert_held(&adev->reset_domain->sem); in amdgpu_device_skip_hw_access()
445 uint32_t amdgpu_device_rreg(struct amdgpu_device *adev, in amdgpu_device_rreg() argument
450 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_device_rreg()
453 if ((reg * 4) < adev->rmmio_size) { in amdgpu_device_rreg()
455 amdgpu_sriov_runtime(adev) && in amdgpu_device_rreg()
456 down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_device_rreg()
457 ret = amdgpu_kiq_rreg(adev, reg); in amdgpu_device_rreg()
458 up_read(&adev->reset_domain->sem); in amdgpu_device_rreg()
460 ret = readl(((void __iomem *)adev->rmmio) + (reg * 4)); in amdgpu_device_rreg()
463 ret = adev->pcie_rreg(adev, reg * 4); in amdgpu_device_rreg()
466 trace_amdgpu_device_rreg(adev->pdev->device, reg, ret); in amdgpu_device_rreg()
485 uint8_t amdgpu_mm_rreg8(struct amdgpu_device *adev, uint32_t offset) in amdgpu_mm_rreg8() argument
487 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_rreg8()
490 if (offset < adev->rmmio_size) in amdgpu_mm_rreg8()
491 return (readb(adev->rmmio + offset)); in amdgpu_mm_rreg8()
510 void amdgpu_mm_wreg8(struct amdgpu_device *adev, uint32_t offset, uint8_t value) in amdgpu_mm_wreg8() argument
512 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wreg8()
515 if (offset < adev->rmmio_size) in amdgpu_mm_wreg8()
516 writeb(value, adev->rmmio + offset); in amdgpu_mm_wreg8()
531 void amdgpu_device_wreg(struct amdgpu_device *adev, in amdgpu_device_wreg() argument
535 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_device_wreg()
538 if ((reg * 4) < adev->rmmio_size) { in amdgpu_device_wreg()
540 amdgpu_sriov_runtime(adev) && in amdgpu_device_wreg()
541 down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_device_wreg()
542 amdgpu_kiq_wreg(adev, reg, v); in amdgpu_device_wreg()
543 up_read(&adev->reset_domain->sem); in amdgpu_device_wreg()
545 writel(v, ((void __iomem *)adev->rmmio) + (reg * 4)); in amdgpu_device_wreg()
548 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_device_wreg()
551 trace_amdgpu_device_wreg(adev->pdev->device, reg, v); in amdgpu_device_wreg()
563 void amdgpu_mm_wreg_mmio_rlc(struct amdgpu_device *adev, in amdgpu_mm_wreg_mmio_rlc() argument
566 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wreg_mmio_rlc()
569 if (amdgpu_sriov_fullaccess(adev) && in amdgpu_mm_wreg_mmio_rlc()
570 adev->gfx.rlc.funcs && in amdgpu_mm_wreg_mmio_rlc()
571 adev->gfx.rlc.funcs->is_rlcg_access_range) { in amdgpu_mm_wreg_mmio_rlc()
572 if (adev->gfx.rlc.funcs->is_rlcg_access_range(adev, reg)) in amdgpu_mm_wreg_mmio_rlc()
573 return amdgpu_sriov_wreg(adev, reg, v, 0, 0); in amdgpu_mm_wreg_mmio_rlc()
574 } else if ((reg * 4) >= adev->rmmio_size) { in amdgpu_mm_wreg_mmio_rlc()
575 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_mm_wreg_mmio_rlc()
577 writel(v, ((void __iomem *)adev->rmmio) + (reg * 4)); in amdgpu_mm_wreg_mmio_rlc()
590 u32 amdgpu_mm_rdoorbell(struct amdgpu_device *adev, u32 index) in amdgpu_mm_rdoorbell() argument
592 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_rdoorbell()
595 if (index < adev->doorbell.num_doorbells) { in amdgpu_mm_rdoorbell()
596 return readl(adev->doorbell.ptr + index); in amdgpu_mm_rdoorbell()
613 void amdgpu_mm_wdoorbell(struct amdgpu_device *adev, u32 index, u32 v) in amdgpu_mm_wdoorbell() argument
615 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wdoorbell()
618 if (index < adev->doorbell.num_doorbells) { in amdgpu_mm_wdoorbell()
619 writel(v, adev->doorbell.ptr + index); in amdgpu_mm_wdoorbell()
634 u64 amdgpu_mm_rdoorbell64(struct amdgpu_device *adev, u32 index) in amdgpu_mm_rdoorbell64() argument
636 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_rdoorbell64()
639 if (index < adev->doorbell.num_doorbells) { in amdgpu_mm_rdoorbell64()
640 return atomic64_read((atomic64_t *)(adev->doorbell.ptr + index)); in amdgpu_mm_rdoorbell64()
657 void amdgpu_mm_wdoorbell64(struct amdgpu_device *adev, u32 index, u64 v) in amdgpu_mm_wdoorbell64() argument
659 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wdoorbell64()
662 if (index < adev->doorbell.num_doorbells) { in amdgpu_mm_wdoorbell64()
663 atomic64_set((atomic64_t *)(adev->doorbell.ptr + index), v); in amdgpu_mm_wdoorbell64()
679 u32 amdgpu_device_indirect_rreg(struct amdgpu_device *adev, in amdgpu_device_indirect_rreg() argument
688 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg()
689 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_rreg()
690 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_rreg()
695 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg()
710 u64 amdgpu_device_indirect_rreg64(struct amdgpu_device *adev, in amdgpu_device_indirect_rreg64() argument
719 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg64()
720 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_rreg64()
721 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_rreg64()
731 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg64()
746 void amdgpu_device_indirect_wreg(struct amdgpu_device *adev, in amdgpu_device_indirect_wreg() argument
754 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg()
755 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_wreg()
756 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_wreg()
762 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg()
775 void amdgpu_device_indirect_wreg64(struct amdgpu_device *adev, in amdgpu_device_indirect_wreg64() argument
783 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg64()
784 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_wreg64()
785 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_wreg64()
797 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg64()
810 static uint32_t amdgpu_invalid_rreg(struct amdgpu_device *adev, uint32_t reg) in amdgpu_invalid_rreg() argument
827 static void amdgpu_invalid_wreg(struct amdgpu_device *adev, uint32_t reg, uint32_t v) in amdgpu_invalid_wreg() argument
844 static uint64_t amdgpu_invalid_rreg64(struct amdgpu_device *adev, uint32_t reg) in amdgpu_invalid_rreg64() argument
861 static void amdgpu_invalid_wreg64(struct amdgpu_device *adev, uint32_t reg, uint64_t v) in amdgpu_invalid_wreg64() argument
879 static uint32_t amdgpu_block_invalid_rreg(struct amdgpu_device *adev, in amdgpu_block_invalid_rreg() argument
899 static void amdgpu_block_invalid_wreg(struct amdgpu_device *adev, in amdgpu_block_invalid_wreg() argument
915 static int amdgpu_device_asic_init(struct amdgpu_device *adev) in amdgpu_device_asic_init() argument
917 amdgpu_asic_pre_asic_init(adev); in amdgpu_device_asic_init()
919 if (adev->ip_versions[GC_HWIP][0] >= IP_VERSION(11, 0, 0)) in amdgpu_device_asic_init()
920 return amdgpu_atomfirmware_asic_init(adev, true); in amdgpu_device_asic_init()
922 return amdgpu_atom_asic_init(adev->mode_info.atom_context); in amdgpu_device_asic_init()
933 static int amdgpu_device_vram_scratch_init(struct amdgpu_device *adev) in amdgpu_device_vram_scratch_init() argument
935 return amdgpu_bo_create_kernel(adev, AMDGPU_GPU_PAGE_SIZE, in amdgpu_device_vram_scratch_init()
937 &adev->vram_scratch.robj, in amdgpu_device_vram_scratch_init()
938 &adev->vram_scratch.gpu_addr, in amdgpu_device_vram_scratch_init()
939 (void **)&adev->vram_scratch.ptr); in amdgpu_device_vram_scratch_init()
949 static void amdgpu_device_vram_scratch_fini(struct amdgpu_device *adev) in amdgpu_device_vram_scratch_fini() argument
951 amdgpu_bo_free_kernel(&adev->vram_scratch.robj, NULL, NULL); in amdgpu_device_vram_scratch_fini()
964 void amdgpu_device_program_register_sequence(struct amdgpu_device *adev, in amdgpu_device_program_register_sequence() argument
984 if (adev->family >= AMDGPU_FAMILY_AI) in amdgpu_device_program_register_sequence()
1001 void amdgpu_device_pci_config_reset(struct amdgpu_device *adev) in amdgpu_device_pci_config_reset() argument
1003 pci_write_config_dword(adev->pdev, 0x7c, AMDGPU_ASIC_RESET_DATA); in amdgpu_device_pci_config_reset()
1013 int amdgpu_device_pci_reset(struct amdgpu_device *adev) in amdgpu_device_pci_reset() argument
1015 return pci_reset_function(adev->pdev); in amdgpu_device_pci_reset()
1029 static int amdgpu_device_doorbell_init(struct amdgpu_device *adev) in amdgpu_device_doorbell_init() argument
1033 if (adev->asic_type < CHIP_BONAIRE) { in amdgpu_device_doorbell_init()
1034 adev->doorbell.base = 0; in amdgpu_device_doorbell_init()
1035 adev->doorbell.size = 0; in amdgpu_device_doorbell_init()
1036 adev->doorbell.num_doorbells = 0; in amdgpu_device_doorbell_init()
1037 adev->doorbell.ptr = NULL; in amdgpu_device_doorbell_init()
1041 if (pci_resource_flags(adev->pdev, 2) & IORESOURCE_UNSET) in amdgpu_device_doorbell_init()
1044 amdgpu_asic_init_doorbell_index(adev); in amdgpu_device_doorbell_init()
1047 adev->doorbell.base = pci_resource_start(adev->pdev, 2); in amdgpu_device_doorbell_init()
1048 adev->doorbell.size = pci_resource_len(adev->pdev, 2); in amdgpu_device_doorbell_init()
1050 if (adev->enable_mes) { in amdgpu_device_doorbell_init()
1051 adev->doorbell.num_doorbells = in amdgpu_device_doorbell_init()
1052 adev->doorbell.size / sizeof(u32); in amdgpu_device_doorbell_init()
1054 adev->doorbell.num_doorbells = in amdgpu_device_doorbell_init()
1055 min_t(u32, adev->doorbell.size / sizeof(u32), in amdgpu_device_doorbell_init()
1056 adev->doorbell_index.max_assignment+1); in amdgpu_device_doorbell_init()
1057 if (adev->doorbell.num_doorbells == 0) in amdgpu_device_doorbell_init()
1066 if (adev->asic_type >= CHIP_VEGA10) in amdgpu_device_doorbell_init()
1067 adev->doorbell.num_doorbells += 0x400; in amdgpu_device_doorbell_init()
1070 adev->doorbell.ptr = ioremap(adev->doorbell.base, in amdgpu_device_doorbell_init()
1071 adev->doorbell.num_doorbells * in amdgpu_device_doorbell_init()
1073 if (adev->doorbell.ptr == NULL) in amdgpu_device_doorbell_init()
1086 static void amdgpu_device_doorbell_fini(struct amdgpu_device *adev) in amdgpu_device_doorbell_fini() argument
1088 iounmap(adev->doorbell.ptr); in amdgpu_device_doorbell_fini()
1089 adev->doorbell.ptr = NULL; in amdgpu_device_doorbell_fini()
1108 static void amdgpu_device_wb_fini(struct amdgpu_device *adev) in amdgpu_device_wb_fini() argument
1110 if (adev->wb.wb_obj) { in amdgpu_device_wb_fini()
1111 amdgpu_bo_free_kernel(&adev->wb.wb_obj, in amdgpu_device_wb_fini()
1112 &adev->wb.gpu_addr, in amdgpu_device_wb_fini()
1113 (void **)&adev->wb.wb); in amdgpu_device_wb_fini()
1114 adev->wb.wb_obj = NULL; in amdgpu_device_wb_fini()
1127 static int amdgpu_device_wb_init(struct amdgpu_device *adev) in amdgpu_device_wb_init() argument
1131 if (adev->wb.wb_obj == NULL) { in amdgpu_device_wb_init()
1133 r = amdgpu_bo_create_kernel(adev, AMDGPU_MAX_WB * sizeof(uint32_t) * 8, in amdgpu_device_wb_init()
1135 &adev->wb.wb_obj, &adev->wb.gpu_addr, in amdgpu_device_wb_init()
1136 (void **)&adev->wb.wb); in amdgpu_device_wb_init()
1138 dev_warn(adev->dev, "(%d) create WB bo failed\n", r); in amdgpu_device_wb_init()
1142 adev->wb.num_wb = AMDGPU_MAX_WB; in amdgpu_device_wb_init()
1143 memset(&adev->wb.used, 0, sizeof(adev->wb.used)); in amdgpu_device_wb_init()
1146 memset((char *)adev->wb.wb, 0, AMDGPU_MAX_WB * sizeof(uint32_t) * 8); in amdgpu_device_wb_init()
1161 int amdgpu_device_wb_get(struct amdgpu_device *adev, u32 *wb) in amdgpu_device_wb_get() argument
1163 unsigned long offset = find_first_zero_bit(adev->wb.used, adev->wb.num_wb); in amdgpu_device_wb_get()
1165 if (offset < adev->wb.num_wb) { in amdgpu_device_wb_get()
1166 __set_bit(offset, adev->wb.used); in amdgpu_device_wb_get()
1182 void amdgpu_device_wb_free(struct amdgpu_device *adev, u32 wb) in amdgpu_device_wb_free() argument
1185 if (wb < adev->wb.num_wb) in amdgpu_device_wb_free()
1186 __clear_bit(wb, adev->wb.used); in amdgpu_device_wb_free()
1198 int amdgpu_device_resize_fb_bar(struct amdgpu_device *adev) in amdgpu_device_resize_fb_bar() argument
1200 int rbar_size = pci_rebar_bytes_to_size(adev->gmc.real_vram_size); in amdgpu_device_resize_fb_bar()
1208 if (amdgpu_sriov_vf(adev)) in amdgpu_device_resize_fb_bar()
1212 if (adev->gmc.real_vram_size && in amdgpu_device_resize_fb_bar()
1213 (pci_resource_len(adev->pdev, 0) >= adev->gmc.real_vram_size)) in amdgpu_device_resize_fb_bar()
1217 root = adev->pdev->bus; in amdgpu_device_resize_fb_bar()
1232 rbar_size = min(fls(pci_rebar_get_possible_sizes(adev->pdev, 0)) - 1, in amdgpu_device_resize_fb_bar()
1236 pci_read_config_word(adev->pdev, PCI_COMMAND, &cmd); in amdgpu_device_resize_fb_bar()
1237 pci_write_config_word(adev->pdev, PCI_COMMAND, in amdgpu_device_resize_fb_bar()
1241 amdgpu_device_doorbell_fini(adev); in amdgpu_device_resize_fb_bar()
1242 if (adev->asic_type >= CHIP_BONAIRE) in amdgpu_device_resize_fb_bar()
1243 pci_release_resource(adev->pdev, 2); in amdgpu_device_resize_fb_bar()
1245 pci_release_resource(adev->pdev, 0); in amdgpu_device_resize_fb_bar()
1247 r = pci_resize_resource(adev->pdev, 0, rbar_size); in amdgpu_device_resize_fb_bar()
1253 pci_assign_unassigned_bus_resources(adev->pdev->bus); in amdgpu_device_resize_fb_bar()
1258 r = amdgpu_device_doorbell_init(adev); in amdgpu_device_resize_fb_bar()
1259 if (r || (pci_resource_flags(adev->pdev, 0) & IORESOURCE_UNSET)) in amdgpu_device_resize_fb_bar()
1262 pci_write_config_word(adev->pdev, PCI_COMMAND, cmd); in amdgpu_device_resize_fb_bar()
1279 bool amdgpu_device_need_post(struct amdgpu_device *adev) in amdgpu_device_need_post() argument
1283 if (amdgpu_sriov_vf(adev)) in amdgpu_device_need_post()
1286 if (amdgpu_passthrough(adev)) { in amdgpu_device_need_post()
1292 if (adev->asic_type == CHIP_FIJI) { in amdgpu_device_need_post()
1295 err = request_firmware(&adev->pm.fw, "amdgpu/fiji_smc.bin", adev->dev); in amdgpu_device_need_post()
1300 fw_ver = *((uint32_t *)adev->pm.fw->data + 69); in amdgpu_device_need_post()
1307 if (adev->gmc.xgmi.pending_reset) in amdgpu_device_need_post()
1310 if (adev->has_hw_reset) { in amdgpu_device_need_post()
1311 adev->has_hw_reset = false; in amdgpu_device_need_post()
1316 if (adev->asic_type >= CHIP_BONAIRE) in amdgpu_device_need_post()
1317 return amdgpu_atombios_scratch_need_asic_init(adev); in amdgpu_device_need_post()
1320 reg = amdgpu_asic_get_config_memsize(adev); in amdgpu_device_need_post()
1338 bool amdgpu_device_should_use_aspm(struct amdgpu_device *adev) in amdgpu_device_should_use_aspm() argument
1350 return pcie_aspm_enabled(adev->pdev); in amdgpu_device_should_use_aspm()
1366 struct amdgpu_device *adev = drm_to_adev(pci_get_drvdata(pdev)); in amdgpu_device_vga_set_decode() local
1367 amdgpu_asic_set_vga_state(adev, state); in amdgpu_device_vga_set_decode()
1385 static void amdgpu_device_check_block_size(struct amdgpu_device *adev) in amdgpu_device_check_block_size() argument
1394 dev_warn(adev->dev, "VM page table size (%d) too small\n", in amdgpu_device_check_block_size()
1408 static void amdgpu_device_check_vm_size(struct amdgpu_device *adev) in amdgpu_device_check_vm_size() argument
1415 dev_warn(adev->dev, "VM size (%d) too small, min is 1GB\n", in amdgpu_device_check_vm_size()
1421 static void amdgpu_device_check_smu_prv_buffer_size(struct amdgpu_device *adev) in amdgpu_device_check_smu_prv_buffer_size() argument
1451 adev->pm.smu_prv_buffer_size = amdgpu_smu_memory_pool_size << 28; in amdgpu_device_check_smu_prv_buffer_size()
1458 adev->pm.smu_prv_buffer_size = 0; in amdgpu_device_check_smu_prv_buffer_size()
1461 static int amdgpu_device_init_apu_flags(struct amdgpu_device *adev) in amdgpu_device_init_apu_flags() argument
1463 if (!(adev->flags & AMD_IS_APU) || in amdgpu_device_init_apu_flags()
1464 adev->asic_type < CHIP_RAVEN) in amdgpu_device_init_apu_flags()
1467 switch (adev->asic_type) { in amdgpu_device_init_apu_flags()
1469 if (adev->pdev->device == 0x15dd) in amdgpu_device_init_apu_flags()
1470 adev->apu_flags |= AMD_APU_IS_RAVEN; in amdgpu_device_init_apu_flags()
1471 if (adev->pdev->device == 0x15d8) in amdgpu_device_init_apu_flags()
1472 adev->apu_flags |= AMD_APU_IS_PICASSO; in amdgpu_device_init_apu_flags()
1475 if ((adev->pdev->device == 0x1636) || in amdgpu_device_init_apu_flags()
1476 (adev->pdev->device == 0x164c)) in amdgpu_device_init_apu_flags()
1477 adev->apu_flags |= AMD_APU_IS_RENOIR; in amdgpu_device_init_apu_flags()
1479 adev->apu_flags |= AMD_APU_IS_GREEN_SARDINE; in amdgpu_device_init_apu_flags()
1482 adev->apu_flags |= AMD_APU_IS_VANGOGH; in amdgpu_device_init_apu_flags()
1487 if ((adev->pdev->device == 0x13FE) || in amdgpu_device_init_apu_flags()
1488 (adev->pdev->device == 0x143F)) in amdgpu_device_init_apu_flags()
1489 adev->apu_flags |= AMD_APU_IS_CYAN_SKILLFISH2; in amdgpu_device_init_apu_flags()
1506 static int amdgpu_device_check_arguments(struct amdgpu_device *adev) in amdgpu_device_check_arguments() argument
1509 dev_warn(adev->dev, "sched jobs (%d) must be at least 4\n", in amdgpu_device_check_arguments()
1513 dev_warn(adev->dev, "sched jobs (%d) must be a power of 2\n", in amdgpu_device_check_arguments()
1520 dev_warn(adev->dev, "gart size (%d) too small\n", in amdgpu_device_check_arguments()
1527 dev_warn(adev->dev, "gtt size (%d) too small\n", in amdgpu_device_check_arguments()
1535 dev_warn(adev->dev, "valid range is between 4 and 9\n"); in amdgpu_device_check_arguments()
1540 dev_warn(adev->dev, "sched hw submission jobs (%d) must be at least 2\n", in amdgpu_device_check_arguments()
1544 dev_warn(adev->dev, "sched hw submission jobs (%d) must be a power of 2\n", in amdgpu_device_check_arguments()
1550 dev_warn(adev->dev, "invalid option for reset method, reverting to default\n"); in amdgpu_device_check_arguments()
1554 amdgpu_device_check_smu_prv_buffer_size(adev); in amdgpu_device_check_arguments()
1556 amdgpu_device_check_vm_size(adev); in amdgpu_device_check_arguments()
1558 amdgpu_device_check_block_size(adev); in amdgpu_device_check_arguments()
1560 adev->firmware.load_type = amdgpu_ucode_get_load_type(adev, amdgpu_fw_load_type); in amdgpu_device_check_arguments()
1650 struct amdgpu_device *adev = dev; in amdgpu_device_ip_set_clockgating_state() local
1653 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_set_clockgating_state()
1654 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_set_clockgating_state()
1656 if (adev->ip_blocks[i].version->type != block_type) in amdgpu_device_ip_set_clockgating_state()
1658 if (!adev->ip_blocks[i].version->funcs->set_clockgating_state) in amdgpu_device_ip_set_clockgating_state()
1660 r = adev->ip_blocks[i].version->funcs->set_clockgating_state( in amdgpu_device_ip_set_clockgating_state()
1661 (void *)adev, state); in amdgpu_device_ip_set_clockgating_state()
1664 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_set_clockgating_state()
1684 struct amdgpu_device *adev = dev; in amdgpu_device_ip_set_powergating_state() local
1687 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_set_powergating_state()
1688 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_set_powergating_state()
1690 if (adev->ip_blocks[i].version->type != block_type) in amdgpu_device_ip_set_powergating_state()
1692 if (!adev->ip_blocks[i].version->funcs->set_powergating_state) in amdgpu_device_ip_set_powergating_state()
1694 r = adev->ip_blocks[i].version->funcs->set_powergating_state( in amdgpu_device_ip_set_powergating_state()
1695 (void *)adev, state); in amdgpu_device_ip_set_powergating_state()
1698 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_set_powergating_state()
1714 void amdgpu_device_ip_get_clockgating_state(struct amdgpu_device *adev, in amdgpu_device_ip_get_clockgating_state() argument
1719 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_get_clockgating_state()
1720 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_get_clockgating_state()
1722 if (adev->ip_blocks[i].version->funcs->get_clockgating_state) in amdgpu_device_ip_get_clockgating_state()
1723 adev->ip_blocks[i].version->funcs->get_clockgating_state((void *)adev, flags); in amdgpu_device_ip_get_clockgating_state()
1736 int amdgpu_device_ip_wait_for_idle(struct amdgpu_device *adev, in amdgpu_device_ip_wait_for_idle() argument
1741 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_wait_for_idle()
1742 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_wait_for_idle()
1744 if (adev->ip_blocks[i].version->type == block_type) { in amdgpu_device_ip_wait_for_idle()
1745 r = adev->ip_blocks[i].version->funcs->wait_for_idle((void *)adev); in amdgpu_device_ip_wait_for_idle()
1764 bool amdgpu_device_ip_is_idle(struct amdgpu_device *adev, in amdgpu_device_ip_is_idle() argument
1769 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_is_idle()
1770 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_is_idle()
1772 if (adev->ip_blocks[i].version->type == block_type) in amdgpu_device_ip_is_idle()
1773 return adev->ip_blocks[i].version->funcs->is_idle((void *)adev); in amdgpu_device_ip_is_idle()
1789 amdgpu_device_ip_get_ip_block(struct amdgpu_device *adev, in amdgpu_device_ip_get_ip_block() argument
1794 for (i = 0; i < adev->num_ip_blocks; i++) in amdgpu_device_ip_get_ip_block()
1795 if (adev->ip_blocks[i].version->type == type) in amdgpu_device_ip_get_ip_block()
1796 return &adev->ip_blocks[i]; in amdgpu_device_ip_get_ip_block()
1812 int amdgpu_device_ip_block_version_cmp(struct amdgpu_device *adev, in amdgpu_device_ip_block_version_cmp() argument
1816 struct amdgpu_ip_block *ip_block = amdgpu_device_ip_get_ip_block(adev, type); in amdgpu_device_ip_block_version_cmp()
1835 int amdgpu_device_ip_block_add(struct amdgpu_device *adev, in amdgpu_device_ip_block_add() argument
1843 if (adev->harvest_ip_mask & AMD_HARVEST_IP_VCN_MASK) in amdgpu_device_ip_block_add()
1847 if (adev->harvest_ip_mask & AMD_HARVEST_IP_JPEG_MASK) in amdgpu_device_ip_block_add()
1854 DRM_INFO("add ip block number %d <%s>\n", adev->num_ip_blocks, in amdgpu_device_ip_block_add()
1857 adev->ip_blocks[adev->num_ip_blocks++].version = ip_block_version; in amdgpu_device_ip_block_add()
1874 static void amdgpu_device_enable_virtual_display(struct amdgpu_device *adev) in amdgpu_device_enable_virtual_display() argument
1876 adev->enable_virtual_display = false; in amdgpu_device_enable_virtual_display()
1879 const char *pci_address_name = pci_name(adev->pdev); in amdgpu_device_enable_virtual_display()
1891 adev->enable_virtual_display = true; in amdgpu_device_enable_virtual_display()
1902 adev->mode_info.num_crtc = num_crtc; in amdgpu_device_enable_virtual_display()
1904 adev->mode_info.num_crtc = 1; in amdgpu_device_enable_virtual_display()
1912 adev->enable_virtual_display, adev->mode_info.num_crtc); in amdgpu_device_enable_virtual_display()
1928 static int amdgpu_device_parse_gpu_info_fw(struct amdgpu_device *adev) in amdgpu_device_parse_gpu_info_fw() argument
1935 adev->firmware.gpu_info_fw = NULL; in amdgpu_device_parse_gpu_info_fw()
1937 if (adev->mman.discovery_bin) { in amdgpu_device_parse_gpu_info_fw()
1943 if (adev->asic_type != CHIP_NAVI12) in amdgpu_device_parse_gpu_info_fw()
1947 switch (adev->asic_type) { in amdgpu_device_parse_gpu_info_fw()
1957 if (adev->apu_flags & AMD_APU_IS_RAVEN2) in amdgpu_device_parse_gpu_info_fw()
1959 else if (adev->apu_flags & AMD_APU_IS_PICASSO) in amdgpu_device_parse_gpu_info_fw()
1973 err = request_firmware(&adev->firmware.gpu_info_fw, fw_name, adev->dev); in amdgpu_device_parse_gpu_info_fw()
1975 dev_err(adev->dev, in amdgpu_device_parse_gpu_info_fw()
1980 err = amdgpu_ucode_validate(adev->firmware.gpu_info_fw); in amdgpu_device_parse_gpu_info_fw()
1982 dev_err(adev->dev, in amdgpu_device_parse_gpu_info_fw()
1988 hdr = (const struct gpu_info_firmware_header_v1_0 *)adev->firmware.gpu_info_fw->data; in amdgpu_device_parse_gpu_info_fw()
1995 (const struct gpu_info_firmware_v1_0 *)(adev->firmware.gpu_info_fw->data + in amdgpu_device_parse_gpu_info_fw()
2001 if (adev->asic_type == CHIP_NAVI12) in amdgpu_device_parse_gpu_info_fw()
2004 adev->gfx.config.max_shader_engines = le32_to_cpu(gpu_info_fw->gc_num_se); in amdgpu_device_parse_gpu_info_fw()
2005 adev->gfx.config.max_cu_per_sh = le32_to_cpu(gpu_info_fw->gc_num_cu_per_sh); in amdgpu_device_parse_gpu_info_fw()
2006 adev->gfx.config.max_sh_per_se = le32_to_cpu(gpu_info_fw->gc_num_sh_per_se); in amdgpu_device_parse_gpu_info_fw()
2007 adev->gfx.config.max_backends_per_se = le32_to_cpu(gpu_info_fw->gc_num_rb_per_se); in amdgpu_device_parse_gpu_info_fw()
2008 adev->gfx.config.max_texture_channel_caches = in amdgpu_device_parse_gpu_info_fw()
2010 adev->gfx.config.max_gprs = le32_to_cpu(gpu_info_fw->gc_num_gprs); in amdgpu_device_parse_gpu_info_fw()
2011 adev->gfx.config.max_gs_threads = le32_to_cpu(gpu_info_fw->gc_num_max_gs_thds); in amdgpu_device_parse_gpu_info_fw()
2012 adev->gfx.config.gs_vgt_table_depth = le32_to_cpu(gpu_info_fw->gc_gs_table_depth); in amdgpu_device_parse_gpu_info_fw()
2013 adev->gfx.config.gs_prim_buffer_depth = le32_to_cpu(gpu_info_fw->gc_gsprim_buff_depth); in amdgpu_device_parse_gpu_info_fw()
2014 adev->gfx.config.double_offchip_lds_buf = in amdgpu_device_parse_gpu_info_fw()
2016 adev->gfx.cu_info.wave_front_size = le32_to_cpu(gpu_info_fw->gc_wave_size); in amdgpu_device_parse_gpu_info_fw()
2017 adev->gfx.cu_info.max_waves_per_simd = in amdgpu_device_parse_gpu_info_fw()
2019 adev->gfx.cu_info.max_scratch_slots_per_cu = in amdgpu_device_parse_gpu_info_fw()
2021 adev->gfx.cu_info.lds_size = le32_to_cpu(gpu_info_fw->gc_lds_size); in amdgpu_device_parse_gpu_info_fw()
2024 (const struct gpu_info_firmware_v1_1 *)(adev->firmware.gpu_info_fw->data + in amdgpu_device_parse_gpu_info_fw()
2026 adev->gfx.config.num_sc_per_sh = in amdgpu_device_parse_gpu_info_fw()
2028 adev->gfx.config.num_packer_per_sc = in amdgpu_device_parse_gpu_info_fw()
2039 (const struct gpu_info_firmware_v1_2 *)(adev->firmware.gpu_info_fw->data + in amdgpu_device_parse_gpu_info_fw()
2041 adev->dm.soc_bounding_box = &gpu_info_fw->soc_bounding_box; in amdgpu_device_parse_gpu_info_fw()
2046 dev_err(adev->dev, in amdgpu_device_parse_gpu_info_fw()
2065 static int amdgpu_device_ip_early_init(struct amdgpu_device *adev) in amdgpu_device_ip_early_init() argument
2067 struct drm_device *dev = adev_to_drm(adev); in amdgpu_device_ip_early_init()
2071 amdgpu_device_enable_virtual_display(adev); in amdgpu_device_ip_early_init()
2073 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_early_init()
2074 r = amdgpu_virt_request_full_gpu(adev, true); in amdgpu_device_ip_early_init()
2079 switch (adev->asic_type) { in amdgpu_device_ip_early_init()
2086 adev->family = AMDGPU_FAMILY_SI; in amdgpu_device_ip_early_init()
2087 r = si_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2098 if (adev->flags & AMD_IS_APU) in amdgpu_device_ip_early_init()
2099 adev->family = AMDGPU_FAMILY_KV; in amdgpu_device_ip_early_init()
2101 adev->family = AMDGPU_FAMILY_CI; in amdgpu_device_ip_early_init()
2103 r = cik_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2117 if (adev->flags & AMD_IS_APU) in amdgpu_device_ip_early_init()
2118 adev->family = AMDGPU_FAMILY_CZ; in amdgpu_device_ip_early_init()
2120 adev->family = AMDGPU_FAMILY_VI; in amdgpu_device_ip_early_init()
2122 r = vi_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2127 r = amdgpu_discovery_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2136 ((adev->flags & AMD_IS_APU) == 0) && in amdgpu_device_ip_early_init()
2138 adev->flags |= AMD_IS_PX; in amdgpu_device_ip_early_init()
2140 if (!(adev->flags & AMD_IS_APU)) { in amdgpu_device_ip_early_init()
2141 parent = pci_upstream_bridge(adev->pdev); in amdgpu_device_ip_early_init()
2142 adev->has_pr3 = parent ? pci_pr3_present(parent) : false; in amdgpu_device_ip_early_init()
2145 amdgpu_amdkfd_device_probe(adev); in amdgpu_device_ip_early_init()
2147 adev->pm.pp_feature = amdgpu_pp_feature_mask; in amdgpu_device_ip_early_init()
2148 if (amdgpu_sriov_vf(adev) || sched_policy == KFD_SCHED_POLICY_NO_HWS) in amdgpu_device_ip_early_init()
2149 adev->pm.pp_feature &= ~PP_GFXOFF_MASK; in amdgpu_device_ip_early_init()
2150 if (amdgpu_sriov_vf(adev) && adev->asic_type == CHIP_SIENNA_CICHLID) in amdgpu_device_ip_early_init()
2151 adev->pm.pp_feature &= ~PP_OVERDRIVE_MASK; in amdgpu_device_ip_early_init()
2153 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_early_init()
2156 i, adev->ip_blocks[i].version->funcs->name); in amdgpu_device_ip_early_init()
2157 adev->ip_blocks[i].status.valid = false; in amdgpu_device_ip_early_init()
2159 if (adev->ip_blocks[i].version->funcs->early_init) { in amdgpu_device_ip_early_init()
2160 r = adev->ip_blocks[i].version->funcs->early_init((void *)adev); in amdgpu_device_ip_early_init()
2162 adev->ip_blocks[i].status.valid = false; in amdgpu_device_ip_early_init()
2165 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_early_init()
2168 adev->ip_blocks[i].status.valid = true; in amdgpu_device_ip_early_init()
2171 adev->ip_blocks[i].status.valid = true; in amdgpu_device_ip_early_init()
2175 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON) { in amdgpu_device_ip_early_init()
2176 r = amdgpu_device_parse_gpu_info_fw(adev); in amdgpu_device_ip_early_init()
2181 if (!amdgpu_get_bios(adev)) in amdgpu_device_ip_early_init()
2184 r = amdgpu_atombios_init(adev); in amdgpu_device_ip_early_init()
2186 dev_err(adev->dev, "amdgpu_atombios_init failed\n"); in amdgpu_device_ip_early_init()
2187 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_ATOMBIOS_INIT_FAIL, 0, 0); in amdgpu_device_ip_early_init()
2192 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_early_init()
2193 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_ip_early_init()
2198 adev->cg_flags &= amdgpu_cg_mask; in amdgpu_device_ip_early_init()
2199 adev->pg_flags &= amdgpu_pg_mask; in amdgpu_device_ip_early_init()
2204 static int amdgpu_device_ip_hw_init_phase1(struct amdgpu_device *adev) in amdgpu_device_ip_hw_init_phase1() argument
2208 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_hw_init_phase1()
2209 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_ip_hw_init_phase1()
2211 if (adev->ip_blocks[i].status.hw) in amdgpu_device_ip_hw_init_phase1()
2213 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_hw_init_phase1()
2214 (amdgpu_sriov_vf(adev) && (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP)) || in amdgpu_device_ip_hw_init_phase1()
2215 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH) { in amdgpu_device_ip_hw_init_phase1()
2216 r = adev->ip_blocks[i].version->funcs->hw_init(adev); in amdgpu_device_ip_hw_init_phase1()
2219 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_hw_init_phase1()
2222 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_hw_init_phase1()
2229 static int amdgpu_device_ip_hw_init_phase2(struct amdgpu_device *adev) in amdgpu_device_ip_hw_init_phase2() argument
2233 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_hw_init_phase2()
2234 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_ip_hw_init_phase2()
2236 if (adev->ip_blocks[i].status.hw) in amdgpu_device_ip_hw_init_phase2()
2238 r = adev->ip_blocks[i].version->funcs->hw_init(adev); in amdgpu_device_ip_hw_init_phase2()
2241 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_hw_init_phase2()
2244 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_hw_init_phase2()
2250 static int amdgpu_device_fw_loading(struct amdgpu_device *adev) in amdgpu_device_fw_loading() argument
2256 if (adev->asic_type >= CHIP_VEGA10) { in amdgpu_device_fw_loading()
2257 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_fw_loading()
2258 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_PSP) in amdgpu_device_fw_loading()
2261 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_fw_loading()
2265 if (adev->ip_blocks[i].status.hw == true) in amdgpu_device_fw_loading()
2268 if (amdgpu_in_reset(adev) || adev->in_suspend) { in amdgpu_device_fw_loading()
2269 r = adev->ip_blocks[i].version->funcs->resume(adev); in amdgpu_device_fw_loading()
2272 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_fw_loading()
2276 r = adev->ip_blocks[i].version->funcs->hw_init(adev); in amdgpu_device_fw_loading()
2279 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_fw_loading()
2284 adev->ip_blocks[i].status.hw = true; in amdgpu_device_fw_loading()
2289 if (!amdgpu_sriov_vf(adev) || adev->asic_type == CHIP_TONGA) in amdgpu_device_fw_loading()
2290 r = amdgpu_pm_load_smu_firmware(adev, &smu_version); in amdgpu_device_fw_loading()
2295 static int amdgpu_device_init_schedulers(struct amdgpu_device *adev) in amdgpu_device_init_schedulers() argument
2301 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_init_schedulers()
2309 timeout = adev->gfx_timeout; in amdgpu_device_init_schedulers()
2312 timeout = adev->compute_timeout; in amdgpu_device_init_schedulers()
2315 timeout = adev->sdma_timeout; in amdgpu_device_init_schedulers()
2318 timeout = adev->video_timeout; in amdgpu_device_init_schedulers()
2324 timeout, adev->reset_domain->wq, in amdgpu_device_init_schedulers()
2326 adev->dev); in amdgpu_device_init_schedulers()
2349 static int amdgpu_device_ip_init(struct amdgpu_device *adev) in amdgpu_device_ip_init() argument
2353 r = amdgpu_ras_init(adev); in amdgpu_device_ip_init()
2357 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_init()
2358 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_init()
2360 r = adev->ip_blocks[i].version->funcs->sw_init((void *)adev); in amdgpu_device_ip_init()
2363 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_init()
2366 adev->ip_blocks[i].status.sw = true; in amdgpu_device_ip_init()
2368 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON) { in amdgpu_device_ip_init()
2370 r = adev->ip_blocks[i].version->funcs->hw_init((void *)adev); in amdgpu_device_ip_init()
2375 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_init()
2376 } else if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC) { in amdgpu_device_ip_init()
2379 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_init()
2380 amdgpu_virt_exchange_data(adev); in amdgpu_device_ip_init()
2382 r = amdgpu_device_vram_scratch_init(adev); in amdgpu_device_ip_init()
2387 r = adev->ip_blocks[i].version->funcs->hw_init((void *)adev); in amdgpu_device_ip_init()
2392 r = amdgpu_device_wb_init(adev); in amdgpu_device_ip_init()
2397 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_init()
2400 if (amdgpu_mcbp || amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_init()
2401 r = amdgpu_allocate_static_csa(adev, &adev->virt.csa_obj, in amdgpu_device_ip_init()
2412 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_init()
2413 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_ip_init()
2415 r = amdgpu_ib_pool_init(adev); in amdgpu_device_ip_init()
2417 dev_err(adev->dev, "IB initialization failed (%d).\n", r); in amdgpu_device_ip_init()
2418 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_IB_INIT_FAIL, 0, r); in amdgpu_device_ip_init()
2422 r = amdgpu_ucode_create_bo(adev); /* create ucode bo when sw_init complete*/ in amdgpu_device_ip_init()
2426 r = amdgpu_device_ip_hw_init_phase1(adev); in amdgpu_device_ip_init()
2430 r = amdgpu_device_fw_loading(adev); in amdgpu_device_ip_init()
2434 r = amdgpu_device_ip_hw_init_phase2(adev); in amdgpu_device_ip_init()
2453 r = amdgpu_ras_recovery_init(adev); in amdgpu_device_ip_init()
2460 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_device_ip_init()
2461 if (amdgpu_xgmi_add_device(adev) == 0) { in amdgpu_device_ip_init()
2462 if (!amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_init()
2463 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_ip_init()
2473 amdgpu_reset_put_reset_domain(adev->reset_domain); in amdgpu_device_ip_init()
2474 adev->reset_domain = hive->reset_domain; in amdgpu_device_ip_init()
2480 r = amdgpu_device_init_schedulers(adev); in amdgpu_device_ip_init()
2485 if (!adev->gmc.xgmi.pending_reset) in amdgpu_device_ip_init()
2486 amdgpu_amdkfd_device_init(adev); in amdgpu_device_ip_init()
2488 amdgpu_fru_get_product_info(adev); in amdgpu_device_ip_init()
2491 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_init()
2492 amdgpu_virt_release_full_gpu(adev, true); in amdgpu_device_ip_init()
2506 static void amdgpu_device_fill_reset_magic(struct amdgpu_device *adev) in amdgpu_device_fill_reset_magic() argument
2508 memcpy(adev->reset_magic, adev->gart.ptr, AMDGPU_RESET_MAGIC_NUM); in amdgpu_device_fill_reset_magic()
2521 static bool amdgpu_device_check_vram_lost(struct amdgpu_device *adev) in amdgpu_device_check_vram_lost() argument
2523 if (memcmp(adev->gart.ptr, adev->reset_magic, in amdgpu_device_check_vram_lost()
2527 if (!amdgpu_in_reset(adev)) in amdgpu_device_check_vram_lost()
2534 switch (amdgpu_asic_reset_method(adev)) { in amdgpu_device_check_vram_lost()
2556 int amdgpu_device_set_cg_state(struct amdgpu_device *adev, in amdgpu_device_set_cg_state() argument
2564 for (j = 0; j < adev->num_ip_blocks; j++) { in amdgpu_device_set_cg_state()
2565 i = state == AMD_CG_STATE_GATE ? j : adev->num_ip_blocks - j - 1; in amdgpu_device_set_cg_state()
2566 if (!adev->ip_blocks[i].status.late_initialized) in amdgpu_device_set_cg_state()
2569 if (adev->in_s0ix && in amdgpu_device_set_cg_state()
2570 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GFX) in amdgpu_device_set_cg_state()
2573 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_UVD && in amdgpu_device_set_cg_state()
2574 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCE && in amdgpu_device_set_cg_state()
2575 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCN && in amdgpu_device_set_cg_state()
2576 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_JPEG && in amdgpu_device_set_cg_state()
2577 adev->ip_blocks[i].version->funcs->set_clockgating_state) { in amdgpu_device_set_cg_state()
2579 r = adev->ip_blocks[i].version->funcs->set_clockgating_state((void *)adev, in amdgpu_device_set_cg_state()
2583 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_set_cg_state()
2592 int amdgpu_device_set_pg_state(struct amdgpu_device *adev, in amdgpu_device_set_pg_state() argument
2600 for (j = 0; j < adev->num_ip_blocks; j++) { in amdgpu_device_set_pg_state()
2601 i = state == AMD_PG_STATE_GATE ? j : adev->num_ip_blocks - j - 1; in amdgpu_device_set_pg_state()
2602 if (!adev->ip_blocks[i].status.late_initialized) in amdgpu_device_set_pg_state()
2605 if (adev->in_s0ix && in amdgpu_device_set_pg_state()
2606 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GFX) in amdgpu_device_set_pg_state()
2609 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_UVD && in amdgpu_device_set_pg_state()
2610 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCE && in amdgpu_device_set_pg_state()
2611 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCN && in amdgpu_device_set_pg_state()
2612 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_JPEG && in amdgpu_device_set_pg_state()
2613 adev->ip_blocks[i].version->funcs->set_powergating_state) { in amdgpu_device_set_pg_state()
2615 r = adev->ip_blocks[i].version->funcs->set_powergating_state((void *)adev, in amdgpu_device_set_pg_state()
2619 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_set_pg_state()
2630 struct amdgpu_device *adev; in amdgpu_device_enable_mgpu_fan_boost() local
2645 adev = gpu_ins->adev; in amdgpu_device_enable_mgpu_fan_boost()
2646 if (!(adev->flags & AMD_IS_APU) && in amdgpu_device_enable_mgpu_fan_boost()
2648 ret = amdgpu_dpm_enable_mgpu_fan_boost(adev); in amdgpu_device_enable_mgpu_fan_boost()
2674 static int amdgpu_device_ip_late_init(struct amdgpu_device *adev) in amdgpu_device_ip_late_init() argument
2679 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_late_init()
2680 if (!adev->ip_blocks[i].status.hw) in amdgpu_device_ip_late_init()
2682 if (adev->ip_blocks[i].version->funcs->late_init) { in amdgpu_device_ip_late_init()
2683 r = adev->ip_blocks[i].version->funcs->late_init((void *)adev); in amdgpu_device_ip_late_init()
2686 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_late_init()
2690 adev->ip_blocks[i].status.late_initialized = true; in amdgpu_device_ip_late_init()
2693 r = amdgpu_ras_late_init(adev); in amdgpu_device_ip_late_init()
2699 amdgpu_ras_set_error_query_ready(adev, true); in amdgpu_device_ip_late_init()
2701 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_GATE); in amdgpu_device_ip_late_init()
2702 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_GATE); in amdgpu_device_ip_late_init()
2704 amdgpu_device_fill_reset_magic(adev); in amdgpu_device_ip_late_init()
2711 …if (amdgpu_passthrough(adev) && ((adev->asic_type == CHIP_ARCTURUS && adev->gmc.xgmi.num_physical_… in amdgpu_device_ip_late_init()
2712 adev->asic_type == CHIP_ALDEBARAN )) in amdgpu_device_ip_late_init()
2713 amdgpu_dpm_handle_passthrough_sbr(adev, true); in amdgpu_device_ip_late_init()
2715 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_device_ip_late_init()
2731 if (mgpu_info.num_dgpu == adev->gmc.xgmi.num_physical_nodes) { in amdgpu_device_ip_late_init()
2734 if (gpu_instance->adev->flags & AMD_IS_APU) in amdgpu_device_ip_late_init()
2737 r = amdgpu_xgmi_set_pstate(gpu_instance->adev, in amdgpu_device_ip_late_init()
2759 static void amdgpu_device_smu_fini_early(struct amdgpu_device *adev) in amdgpu_device_smu_fini_early() argument
2763 if (adev->ip_versions[GC_HWIP][0] > IP_VERSION(9, 0, 0)) in amdgpu_device_smu_fini_early()
2766 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_smu_fini_early()
2767 if (!adev->ip_blocks[i].status.hw) in amdgpu_device_smu_fini_early()
2769 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) { in amdgpu_device_smu_fini_early()
2770 r = adev->ip_blocks[i].version->funcs->hw_fini((void *)adev); in amdgpu_device_smu_fini_early()
2774 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_smu_fini_early()
2776 adev->ip_blocks[i].status.hw = false; in amdgpu_device_smu_fini_early()
2782 static int amdgpu_device_ip_fini_early(struct amdgpu_device *adev) in amdgpu_device_ip_fini_early() argument
2786 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_fini_early()
2787 if (!adev->ip_blocks[i].version->funcs->early_fini) in amdgpu_device_ip_fini_early()
2790 r = adev->ip_blocks[i].version->funcs->early_fini((void *)adev); in amdgpu_device_ip_fini_early()
2793 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini_early()
2797 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_UNGATE); in amdgpu_device_ip_fini_early()
2798 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_UNGATE); in amdgpu_device_ip_fini_early()
2800 amdgpu_amdkfd_suspend(adev, false); in amdgpu_device_ip_fini_early()
2803 amdgpu_device_smu_fini_early(adev); in amdgpu_device_ip_fini_early()
2805 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_fini_early()
2806 if (!adev->ip_blocks[i].status.hw) in amdgpu_device_ip_fini_early()
2809 r = adev->ip_blocks[i].version->funcs->hw_fini((void *)adev); in amdgpu_device_ip_fini_early()
2813 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini_early()
2816 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_fini_early()
2819 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_fini_early()
2820 if (amdgpu_virt_release_full_gpu(adev, false)) in amdgpu_device_ip_fini_early()
2838 static int amdgpu_device_ip_fini(struct amdgpu_device *adev) in amdgpu_device_ip_fini() argument
2842 if (amdgpu_sriov_vf(adev) && adev->virt.ras_init_done) in amdgpu_device_ip_fini()
2843 amdgpu_virt_release_ras_err_handler_data(adev); in amdgpu_device_ip_fini()
2845 if (adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_ip_fini()
2846 amdgpu_xgmi_remove_device(adev); in amdgpu_device_ip_fini()
2848 amdgpu_amdkfd_device_fini_sw(adev); in amdgpu_device_ip_fini()
2850 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_fini()
2851 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_ip_fini()
2854 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC) { in amdgpu_device_ip_fini()
2855 amdgpu_ucode_free_bo(adev); in amdgpu_device_ip_fini()
2856 amdgpu_free_static_csa(&adev->virt.csa_obj); in amdgpu_device_ip_fini()
2857 amdgpu_device_wb_fini(adev); in amdgpu_device_ip_fini()
2858 amdgpu_device_vram_scratch_fini(adev); in amdgpu_device_ip_fini()
2859 amdgpu_ib_pool_fini(adev); in amdgpu_device_ip_fini()
2862 r = adev->ip_blocks[i].version->funcs->sw_fini((void *)adev); in amdgpu_device_ip_fini()
2866 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini()
2868 adev->ip_blocks[i].status.sw = false; in amdgpu_device_ip_fini()
2869 adev->ip_blocks[i].status.valid = false; in amdgpu_device_ip_fini()
2872 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_fini()
2873 if (!adev->ip_blocks[i].status.late_initialized) in amdgpu_device_ip_fini()
2875 if (adev->ip_blocks[i].version->funcs->late_fini) in amdgpu_device_ip_fini()
2876 adev->ip_blocks[i].version->funcs->late_fini((void *)adev); in amdgpu_device_ip_fini()
2877 adev->ip_blocks[i].status.late_initialized = false; in amdgpu_device_ip_fini()
2880 amdgpu_ras_fini(adev); in amdgpu_device_ip_fini()
2892 struct amdgpu_device *adev = in amdgpu_device_delayed_init_work_handler() local
2896 r = amdgpu_ib_ring_tests(adev); in amdgpu_device_delayed_init_work_handler()
2903 struct amdgpu_device *adev = in amdgpu_device_delay_enable_gfx_off() local
2906 WARN_ON_ONCE(adev->gfx.gfx_off_state); in amdgpu_device_delay_enable_gfx_off()
2907 WARN_ON_ONCE(adev->gfx.gfx_off_req_count); in amdgpu_device_delay_enable_gfx_off()
2909 if (!amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_GFX, true)) in amdgpu_device_delay_enable_gfx_off()
2910 adev->gfx.gfx_off_state = true; in amdgpu_device_delay_enable_gfx_off()
2924 static int amdgpu_device_ip_suspend_phase1(struct amdgpu_device *adev) in amdgpu_device_ip_suspend_phase1() argument
2928 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_UNGATE); in amdgpu_device_ip_suspend_phase1()
2929 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_UNGATE); in amdgpu_device_ip_suspend_phase1()
2936 if (amdgpu_dpm_set_df_cstate(adev, DF_CSTATE_DISALLOW)) in amdgpu_device_ip_suspend_phase1()
2937 dev_warn(adev->dev, "Failed to disallow df cstate"); in amdgpu_device_ip_suspend_phase1()
2939 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_suspend_phase1()
2940 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_suspend_phase1()
2944 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_DCE) in amdgpu_device_ip_suspend_phase1()
2948 r = adev->ip_blocks[i].version->funcs->suspend(adev); in amdgpu_device_ip_suspend_phase1()
2952 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_suspend_phase1()
2956 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase1()
2973 static int amdgpu_device_ip_suspend_phase2(struct amdgpu_device *adev) in amdgpu_device_ip_suspend_phase2() argument
2977 if (adev->in_s0ix) in amdgpu_device_ip_suspend_phase2()
2978 amdgpu_dpm_gfx_state_change(adev, sGpuChangeState_D3Entry); in amdgpu_device_ip_suspend_phase2()
2980 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_suspend_phase2()
2981 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_suspend_phase2()
2984 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_DCE) in amdgpu_device_ip_suspend_phase2()
2988 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) { in amdgpu_device_ip_suspend_phase2()
2989 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase2()
2994 if (adev->gmc.xgmi.pending_reset && in amdgpu_device_ip_suspend_phase2()
2995 !(adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_ip_suspend_phase2()
2996 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC || in amdgpu_device_ip_suspend_phase2()
2997 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_suspend_phase2()
2998 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH)) { in amdgpu_device_ip_suspend_phase2()
2999 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase2()
3008 if (adev->in_s0ix && in amdgpu_device_ip_suspend_phase2()
3009 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP || in amdgpu_device_ip_suspend_phase2()
3010 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GFX)) in amdgpu_device_ip_suspend_phase2()
3014 r = adev->ip_blocks[i].version->funcs->suspend(adev); in amdgpu_device_ip_suspend_phase2()
3018 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_suspend_phase2()
3020 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase2()
3022 if(!amdgpu_sriov_vf(adev)){ in amdgpu_device_ip_suspend_phase2()
3023 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) { in amdgpu_device_ip_suspend_phase2()
3024 r = amdgpu_dpm_set_mp1_state(adev, adev->mp1_state); in amdgpu_device_ip_suspend_phase2()
3027 adev->mp1_state, r); in amdgpu_device_ip_suspend_phase2()
3048 int amdgpu_device_ip_suspend(struct amdgpu_device *adev) in amdgpu_device_ip_suspend() argument
3052 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_suspend()
3053 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_ip_suspend()
3054 amdgpu_virt_request_full_gpu(adev, false); in amdgpu_device_ip_suspend()
3057 r = amdgpu_device_ip_suspend_phase1(adev); in amdgpu_device_ip_suspend()
3060 r = amdgpu_device_ip_suspend_phase2(adev); in amdgpu_device_ip_suspend()
3062 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_suspend()
3063 amdgpu_virt_release_full_gpu(adev, false); in amdgpu_device_ip_suspend()
3068 static int amdgpu_device_ip_reinit_early_sriov(struct amdgpu_device *adev) in amdgpu_device_ip_reinit_early_sriov() argument
3079 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_reinit_early_sriov()
3083 block = &adev->ip_blocks[i]; in amdgpu_device_ip_reinit_early_sriov()
3092 r = block->version->funcs->hw_init(adev); in amdgpu_device_ip_reinit_early_sriov()
3103 static int amdgpu_device_ip_reinit_late_sriov(struct amdgpu_device *adev) in amdgpu_device_ip_reinit_late_sriov() argument
3121 for (j = 0; j < adev->num_ip_blocks; j++) { in amdgpu_device_ip_reinit_late_sriov()
3122 block = &adev->ip_blocks[j]; in amdgpu_device_ip_reinit_late_sriov()
3130 r = block->version->funcs->resume(adev); in amdgpu_device_ip_reinit_late_sriov()
3132 r = block->version->funcs->hw_init(adev); in amdgpu_device_ip_reinit_late_sriov()
3156 static int amdgpu_device_ip_resume_phase1(struct amdgpu_device *adev) in amdgpu_device_ip_resume_phase1() argument
3160 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_resume_phase1()
3161 if (!adev->ip_blocks[i].status.valid || adev->ip_blocks[i].status.hw) in amdgpu_device_ip_resume_phase1()
3163 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_resume_phase1()
3164 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_ip_resume_phase1()
3165 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH || in amdgpu_device_ip_resume_phase1()
3166 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP && amdgpu_sriov_vf(adev))) { in amdgpu_device_ip_resume_phase1()
3168 r = adev->ip_blocks[i].version->funcs->resume(adev); in amdgpu_device_ip_resume_phase1()
3171 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_resume_phase1()
3174 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_resume_phase1()
3194 static int amdgpu_device_ip_resume_phase2(struct amdgpu_device *adev) in amdgpu_device_ip_resume_phase2() argument
3198 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_resume_phase2()
3199 if (!adev->ip_blocks[i].status.valid || adev->ip_blocks[i].status.hw) in amdgpu_device_ip_resume_phase2()
3201 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_resume_phase2()
3202 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_ip_resume_phase2()
3203 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH || in amdgpu_device_ip_resume_phase2()
3204 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) in amdgpu_device_ip_resume_phase2()
3206 r = adev->ip_blocks[i].version->funcs->resume(adev); in amdgpu_device_ip_resume_phase2()
3209 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_resume_phase2()
3212 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_resume_phase2()
3214 if (adev->in_s0ix && adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) { in amdgpu_device_ip_resume_phase2()
3218 amdgpu_gfx_off_ctrl(adev, false); in amdgpu_device_ip_resume_phase2()
3239 static int amdgpu_device_ip_resume(struct amdgpu_device *adev) in amdgpu_device_ip_resume() argument
3243 r = amdgpu_amdkfd_resume_iommu(adev); in amdgpu_device_ip_resume()
3247 r = amdgpu_device_ip_resume_phase1(adev); in amdgpu_device_ip_resume()
3251 r = amdgpu_device_fw_loading(adev); in amdgpu_device_ip_resume()
3255 r = amdgpu_device_ip_resume_phase2(adev); in amdgpu_device_ip_resume()
3267 static void amdgpu_device_detect_sriov_bios(struct amdgpu_device *adev) in amdgpu_device_detect_sriov_bios() argument
3269 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_detect_sriov_bios()
3270 if (adev->is_atom_fw) { in amdgpu_device_detect_sriov_bios()
3271 if (amdgpu_atomfirmware_gpu_virtualization_supported(adev)) in amdgpu_device_detect_sriov_bios()
3272 adev->virt.caps |= AMDGPU_SRIOV_CAPS_SRIOV_VBIOS; in amdgpu_device_detect_sriov_bios()
3274 if (amdgpu_atombios_has_gpu_virtualization_table(adev)) in amdgpu_device_detect_sriov_bios()
3275 adev->virt.caps |= AMDGPU_SRIOV_CAPS_SRIOV_VBIOS; in amdgpu_device_detect_sriov_bios()
3278 if (!(adev->virt.caps & AMDGPU_SRIOV_CAPS_SRIOV_VBIOS)) in amdgpu_device_detect_sriov_bios()
3279 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_NO_VBIOS, 0, 0); in amdgpu_device_detect_sriov_bios()
3348 bool amdgpu_device_has_dc_support(struct amdgpu_device *adev) in amdgpu_device_has_dc_support() argument
3350 if (amdgpu_sriov_vf(adev) || in amdgpu_device_has_dc_support()
3351 adev->enable_virtual_display || in amdgpu_device_has_dc_support()
3352 (adev->harvest_ip_mask & AMD_HARVEST_IP_DMU_MASK)) in amdgpu_device_has_dc_support()
3355 return amdgpu_device_asic_has_dc_support(adev->asic_type); in amdgpu_device_has_dc_support()
3360 struct amdgpu_device *adev = in amdgpu_device_xgmi_reset_func() local
3362 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_xgmi_reset_func()
3374 if (amdgpu_asic_reset_method(adev) == AMD_RESET_METHOD_BACO) { in amdgpu_device_xgmi_reset_func()
3377 adev->asic_reset_res = amdgpu_device_baco_enter(adev_to_drm(adev)); in amdgpu_device_xgmi_reset_func()
3379 if (adev->asic_reset_res) in amdgpu_device_xgmi_reset_func()
3383 adev->asic_reset_res = amdgpu_device_baco_exit(adev_to_drm(adev)); in amdgpu_device_xgmi_reset_func()
3385 if (adev->asic_reset_res) in amdgpu_device_xgmi_reset_func()
3388 if (adev->mmhub.ras && adev->mmhub.ras->ras_block.hw_ops && in amdgpu_device_xgmi_reset_func()
3389 adev->mmhub.ras->ras_block.hw_ops->reset_ras_error_count) in amdgpu_device_xgmi_reset_func()
3390 adev->mmhub.ras->ras_block.hw_ops->reset_ras_error_count(adev); in amdgpu_device_xgmi_reset_func()
3394 adev->asic_reset_res = amdgpu_asic_reset(adev); in amdgpu_device_xgmi_reset_func()
3398 if (adev->asic_reset_res) in amdgpu_device_xgmi_reset_func()
3400 adev->asic_reset_res, adev_to_drm(adev)->unique); in amdgpu_device_xgmi_reset_func()
3404 static int amdgpu_device_get_job_timeout_settings(struct amdgpu_device *adev) in amdgpu_device_get_job_timeout_settings() argument
3418 adev->gfx_timeout = msecs_to_jiffies(10000); in amdgpu_device_get_job_timeout_settings()
3419 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
3420 if (amdgpu_sriov_vf(adev)) in amdgpu_device_get_job_timeout_settings()
3421 adev->compute_timeout = amdgpu_sriov_is_pp_one_vf(adev) ? in amdgpu_device_get_job_timeout_settings()
3424 adev->compute_timeout = msecs_to_jiffies(60000); in amdgpu_device_get_job_timeout_settings()
3438 dev_warn(adev->dev, "lockup timeout disabled"); in amdgpu_device_get_job_timeout_settings()
3446 adev->gfx_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3449 adev->compute_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3452 adev->sdma_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3455 adev->video_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3466 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
3467 if (amdgpu_sriov_vf(adev) || amdgpu_passthrough(adev)) in amdgpu_device_get_job_timeout_settings()
3468 adev->compute_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
3482 static void amdgpu_device_check_iommu_direct_map(struct amdgpu_device *adev) in amdgpu_device_check_iommu_direct_map() argument
3486 domain = iommu_get_domain_for_dev(adev->dev); in amdgpu_device_check_iommu_direct_map()
3488 adev->ram_is_direct_mapped = true; in amdgpu_device_check_iommu_direct_map()
3509 int amdgpu_device_init(struct amdgpu_device *adev, in amdgpu_device_init() argument
3512 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_device_init()
3513 struct pci_dev *pdev = adev->pdev; in amdgpu_device_init()
3518 adev->shutdown = false; in amdgpu_device_init()
3519 adev->flags = flags; in amdgpu_device_init()
3522 adev->asic_type = amdgpu_force_asic_type; in amdgpu_device_init()
3524 adev->asic_type = flags & AMD_ASIC_MASK; in amdgpu_device_init()
3526 adev->usec_timeout = AMDGPU_MAX_USEC_TIMEOUT; in amdgpu_device_init()
3528 adev->usec_timeout *= 10; in amdgpu_device_init()
3529 adev->gmc.gart_size = 512 * 1024 * 1024; in amdgpu_device_init()
3530 adev->accel_working = false; in amdgpu_device_init()
3531 adev->num_rings = 0; in amdgpu_device_init()
3532 RCU_INIT_POINTER(adev->gang_submit, dma_fence_get_stub()); in amdgpu_device_init()
3533 adev->mman.buffer_funcs = NULL; in amdgpu_device_init()
3534 adev->mman.buffer_funcs_ring = NULL; in amdgpu_device_init()
3535 adev->vm_manager.vm_pte_funcs = NULL; in amdgpu_device_init()
3536 adev->vm_manager.vm_pte_num_scheds = 0; in amdgpu_device_init()
3537 adev->gmc.gmc_funcs = NULL; in amdgpu_device_init()
3538 adev->harvest_ip_mask = 0x0; in amdgpu_device_init()
3539 adev->fence_context = dma_fence_context_alloc(AMDGPU_MAX_RINGS); in amdgpu_device_init()
3540 bitmap_zero(adev->gfx.pipe_reserve_bitmap, AMDGPU_MAX_COMPUTE_QUEUES); in amdgpu_device_init()
3542 adev->smc_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3543 adev->smc_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3544 adev->pcie_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3545 adev->pcie_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3546 adev->pciep_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3547 adev->pciep_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3548 adev->pcie_rreg64 = &amdgpu_invalid_rreg64; in amdgpu_device_init()
3549 adev->pcie_wreg64 = &amdgpu_invalid_wreg64; in amdgpu_device_init()
3550 adev->uvd_ctx_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3551 adev->uvd_ctx_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3552 adev->didt_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3553 adev->didt_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3554 adev->gc_cac_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3555 adev->gc_cac_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3556 adev->audio_endpt_rreg = &amdgpu_block_invalid_rreg; in amdgpu_device_init()
3557 adev->audio_endpt_wreg = &amdgpu_block_invalid_wreg; in amdgpu_device_init()
3560 amdgpu_asic_name[adev->asic_type], pdev->vendor, pdev->device, in amdgpu_device_init()
3565 mutex_init(&adev->firmware.mutex); in amdgpu_device_init()
3566 mutex_init(&adev->pm.mutex); in amdgpu_device_init()
3567 mutex_init(&adev->gfx.gpu_clock_mutex); in amdgpu_device_init()
3568 mutex_init(&adev->srbm_mutex); in amdgpu_device_init()
3569 mutex_init(&adev->gfx.pipe_reserve_mutex); in amdgpu_device_init()
3570 mutex_init(&adev->gfx.gfx_off_mutex); in amdgpu_device_init()
3571 mutex_init(&adev->grbm_idx_mutex); in amdgpu_device_init()
3572 mutex_init(&adev->mn_lock); in amdgpu_device_init()
3573 mutex_init(&adev->virt.vf_errors.lock); in amdgpu_device_init()
3574 hash_init(adev->mn_hash); in amdgpu_device_init()
3575 mutex_init(&adev->psp.mutex); in amdgpu_device_init()
3576 mutex_init(&adev->notifier_lock); in amdgpu_device_init()
3577 mutex_init(&adev->pm.stable_pstate_ctx_lock); in amdgpu_device_init()
3578 mutex_init(&adev->benchmark_mutex); in amdgpu_device_init()
3580 amdgpu_device_init_apu_flags(adev); in amdgpu_device_init()
3582 r = amdgpu_device_check_arguments(adev); in amdgpu_device_init()
3586 spin_lock_init(&adev->mmio_idx_lock); in amdgpu_device_init()
3587 spin_lock_init(&adev->smc_idx_lock); in amdgpu_device_init()
3588 spin_lock_init(&adev->pcie_idx_lock); in amdgpu_device_init()
3589 spin_lock_init(&adev->uvd_ctx_idx_lock); in amdgpu_device_init()
3590 spin_lock_init(&adev->didt_idx_lock); in amdgpu_device_init()
3591 spin_lock_init(&adev->gc_cac_idx_lock); in amdgpu_device_init()
3592 spin_lock_init(&adev->se_cac_idx_lock); in amdgpu_device_init()
3593 spin_lock_init(&adev->audio_endpt_idx_lock); in amdgpu_device_init()
3594 spin_lock_init(&adev->mm_stats.lock); in amdgpu_device_init()
3596 INIT_LIST_HEAD(&adev->shadow_list); in amdgpu_device_init()
3597 mutex_init(&adev->shadow_list_lock); in amdgpu_device_init()
3599 INIT_LIST_HEAD(&adev->reset_list); in amdgpu_device_init()
3601 INIT_LIST_HEAD(&adev->ras_list); in amdgpu_device_init()
3603 INIT_DELAYED_WORK(&adev->delayed_init_work, in amdgpu_device_init()
3605 INIT_DELAYED_WORK(&adev->gfx.gfx_off_delay_work, in amdgpu_device_init()
3608 INIT_WORK(&adev->xgmi_reset_work, amdgpu_device_xgmi_reset_func); in amdgpu_device_init()
3610 adev->gfx.gfx_off_req_count = 1; in amdgpu_device_init()
3611 adev->gfx.gfx_off_residency = 0; in amdgpu_device_init()
3612 adev->gfx.gfx_off_entrycount = 0; in amdgpu_device_init()
3613 adev->pm.ac_power = power_supply_is_system_supplied() > 0; in amdgpu_device_init()
3615 atomic_set(&adev->throttling_logging_enabled, 1); in amdgpu_device_init()
3623 ratelimit_state_init(&adev->throttling_logging_rs, (60 - 1) * HZ, 1); in amdgpu_device_init()
3624 ratelimit_set_flags(&adev->throttling_logging_rs, RATELIMIT_MSG_ON_RELEASE); in amdgpu_device_init()
3628 if (adev->asic_type >= CHIP_BONAIRE) { in amdgpu_device_init()
3629 adev->rmmio_base = pci_resource_start(adev->pdev, 5); in amdgpu_device_init()
3630 adev->rmmio_size = pci_resource_len(adev->pdev, 5); in amdgpu_device_init()
3632 adev->rmmio_base = pci_resource_start(adev->pdev, 2); in amdgpu_device_init()
3633 adev->rmmio_size = pci_resource_len(adev->pdev, 2); in amdgpu_device_init()
3637 atomic_set(&adev->pm.pwr_state[i], POWER_STATE_UNKNOWN); in amdgpu_device_init()
3639 adev->rmmio = ioremap(adev->rmmio_base, adev->rmmio_size); in amdgpu_device_init()
3640 if (adev->rmmio == NULL) { in amdgpu_device_init()
3643 DRM_INFO("register mmio base: 0x%08X\n", (uint32_t)adev->rmmio_base); in amdgpu_device_init()
3644 DRM_INFO("register mmio size: %u\n", (unsigned)adev->rmmio_size); in amdgpu_device_init()
3646 amdgpu_device_get_pcie_info(adev); in amdgpu_device_init()
3656 adev->reset_domain = amdgpu_reset_create_reset_domain(SINGLE_DEVICE, "amdgpu-reset-dev"); in amdgpu_device_init()
3657 if (!adev->reset_domain) in amdgpu_device_init()
3661 amdgpu_detect_virtualization(adev); in amdgpu_device_init()
3663 r = amdgpu_device_get_job_timeout_settings(adev); in amdgpu_device_init()
3665 dev_err(adev->dev, "invalid lockup_timeout parameter syntax\n"); in amdgpu_device_init()
3670 r = amdgpu_device_ip_early_init(adev); in amdgpu_device_init()
3675 amdgpu_gmc_tmz_set(adev); in amdgpu_device_init()
3677 amdgpu_gmc_noretry_set(adev); in amdgpu_device_init()
3679 if (adev->gmc.xgmi.supported) { in amdgpu_device_init()
3680 r = adev->gfxhub.funcs->get_xgmi_info(adev); in amdgpu_device_init()
3686 if (amdgpu_sriov_vf(adev)) in amdgpu_device_init()
3687 adev->have_atomics_support = ((struct amd_sriov_msg_pf2vf_info *) in amdgpu_device_init()
3688 adev->virt.fw_reserve.p_pf2vf)->pcie_atomic_ops_support_flags == in amdgpu_device_init()
3691 adev->have_atomics_support = in amdgpu_device_init()
3692 !pci_enable_atomic_ops_to_root(adev->pdev, in amdgpu_device_init()
3695 if (!adev->have_atomics_support) in amdgpu_device_init()
3696 dev_info(adev->dev, "PCIE atomic ops is not supported\n"); in amdgpu_device_init()
3699 amdgpu_device_doorbell_init(adev); in amdgpu_device_init()
3703 emu_soc_asic_init(adev); in amdgpu_device_init()
3707 amdgpu_reset_init(adev); in amdgpu_device_init()
3710 amdgpu_device_detect_sriov_bios(adev); in amdgpu_device_init()
3715 if (!amdgpu_sriov_vf(adev) && amdgpu_asic_need_reset_on_init(adev)) { in amdgpu_device_init()
3716 if (adev->gmc.xgmi.num_physical_nodes) { in amdgpu_device_init()
3717 dev_info(adev->dev, "Pending hive reset.\n"); in amdgpu_device_init()
3718 adev->gmc.xgmi.pending_reset = true; in amdgpu_device_init()
3720 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_init()
3721 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_init()
3723 if (!(adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_init()
3724 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_init()
3725 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH || in amdgpu_device_init()
3726 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC)) { in amdgpu_device_init()
3728 adev->ip_blocks[i].version->funcs->name); in amdgpu_device_init()
3729 adev->ip_blocks[i].status.hw = true; in amdgpu_device_init()
3733 r = amdgpu_asic_reset(adev); in amdgpu_device_init()
3735 dev_err(adev->dev, "asic reset on init failed\n"); in amdgpu_device_init()
3741 pci_enable_pcie_error_reporting(adev->pdev); in amdgpu_device_init()
3744 if (amdgpu_device_need_post(adev)) { in amdgpu_device_init()
3745 if (!adev->bios) { in amdgpu_device_init()
3746 dev_err(adev->dev, "no vBIOS found\n"); in amdgpu_device_init()
3751 r = amdgpu_device_asic_init(adev); in amdgpu_device_init()
3753 dev_err(adev->dev, "gpu post error!\n"); in amdgpu_device_init()
3758 if (adev->is_atom_fw) { in amdgpu_device_init()
3760 r = amdgpu_atomfirmware_get_clock_info(adev); in amdgpu_device_init()
3762 dev_err(adev->dev, "amdgpu_atomfirmware_get_clock_info failed\n"); in amdgpu_device_init()
3763 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_ATOMBIOS_GET_CLOCK_FAIL, 0, 0); in amdgpu_device_init()
3768 r = amdgpu_atombios_get_clock_info(adev); in amdgpu_device_init()
3770 dev_err(adev->dev, "amdgpu_atombios_get_clock_info failed\n"); in amdgpu_device_init()
3771 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_ATOMBIOS_GET_CLOCK_FAIL, 0, 0); in amdgpu_device_init()
3775 if (!amdgpu_device_has_dc_support(adev)) in amdgpu_device_init()
3776 amdgpu_atombios_i2c_init(adev); in amdgpu_device_init()
3781 r = amdgpu_fence_driver_sw_init(adev); in amdgpu_device_init()
3783 dev_err(adev->dev, "amdgpu_fence_driver_sw_init failed\n"); in amdgpu_device_init()
3784 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_FENCE_INIT_FAIL, 0, 0); in amdgpu_device_init()
3789 drm_mode_config_init(adev_to_drm(adev)); in amdgpu_device_init()
3791 r = amdgpu_device_ip_init(adev); in amdgpu_device_init()
3794 if (amdgpu_sriov_vf(adev) && in amdgpu_device_init()
3795 !amdgpu_sriov_runtime(adev) && in amdgpu_device_init()
3796 amdgpu_virt_mmio_blocked(adev) && in amdgpu_device_init()
3797 !amdgpu_virt_wait_reset(adev)) { in amdgpu_device_init()
3798 dev_err(adev->dev, "VF exclusive mode timeout\n"); in amdgpu_device_init()
3800 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_device_init()
3801 adev->virt.ops = NULL; in amdgpu_device_init()
3805 dev_err(adev->dev, "amdgpu_device_ip_init failed\n"); in amdgpu_device_init()
3806 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_AMDGPU_INIT_FAIL, 0, 0); in amdgpu_device_init()
3810 amdgpu_fence_driver_hw_init(adev); in amdgpu_device_init()
3812 dev_info(adev->dev, in amdgpu_device_init()
3814 adev->gfx.config.max_shader_engines, in amdgpu_device_init()
3815 adev->gfx.config.max_sh_per_se, in amdgpu_device_init()
3816 adev->gfx.config.max_cu_per_sh, in amdgpu_device_init()
3817 adev->gfx.cu_info.number); in amdgpu_device_init()
3819 adev->accel_working = true; in amdgpu_device_init()
3821 amdgpu_vm_check_compute_bug(adev); in amdgpu_device_init()
3829 adev->mm_stats.log2_max_MBps = ilog2(max(1u, max_MBps)); in amdgpu_device_init()
3831 r = amdgpu_pm_sysfs_init(adev); in amdgpu_device_init()
3833 adev->pm_sysfs_en = false; in amdgpu_device_init()
3836 adev->pm_sysfs_en = true; in amdgpu_device_init()
3838 r = amdgpu_ucode_sysfs_init(adev); in amdgpu_device_init()
3840 adev->ucode_sysfs_en = false; in amdgpu_device_init()
3843 adev->ucode_sysfs_en = true; in amdgpu_device_init()
3845 r = amdgpu_psp_sysfs_init(adev); in amdgpu_device_init()
3847 adev->psp_sysfs_en = false; in amdgpu_device_init()
3848 if (!amdgpu_sriov_vf(adev)) in amdgpu_device_init()
3851 adev->psp_sysfs_en = true; in amdgpu_device_init()
3858 amdgpu_register_gpu_instance(adev); in amdgpu_device_init()
3863 if (!adev->gmc.xgmi.pending_reset) { in amdgpu_device_init()
3864 r = amdgpu_device_ip_late_init(adev); in amdgpu_device_init()
3866 dev_err(adev->dev, "amdgpu_device_ip_late_init failed\n"); in amdgpu_device_init()
3867 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_AMDGPU_LATE_INIT_FAIL, 0, r); in amdgpu_device_init()
3871 amdgpu_ras_resume(adev); in amdgpu_device_init()
3872 queue_delayed_work(system_wq, &adev->delayed_init_work, in amdgpu_device_init()
3876 if (amdgpu_sriov_vf(adev)) in amdgpu_device_init()
3877 flush_delayed_work(&adev->delayed_init_work); in amdgpu_device_init()
3879 r = sysfs_create_files(&adev->dev->kobj, amdgpu_dev_attributes); in amdgpu_device_init()
3881 dev_err(adev->dev, "Could not create amdgpu device attr\n"); in amdgpu_device_init()
3884 r = amdgpu_pmu_init(adev); in amdgpu_device_init()
3886 dev_err(adev->dev, "amdgpu_pmu_init failed\n"); in amdgpu_device_init()
3889 if (amdgpu_device_cache_pci_state(adev->pdev)) in amdgpu_device_init()
3895 if ((adev->pdev->class >> 8) == PCI_CLASS_DISPLAY_VGA) in amdgpu_device_init()
3896 vga_client_register(adev->pdev, amdgpu_device_vga_set_decode); in amdgpu_device_init()
3900 vga_switcheroo_register_client(adev->pdev, in amdgpu_device_init()
3902 vga_switcheroo_init_domain_pm_ops(adev->dev, &adev->vga_pm_domain); in amdgpu_device_init()
3905 if (adev->gmc.xgmi.pending_reset) in amdgpu_device_init()
3909 amdgpu_device_check_iommu_direct_map(adev); in amdgpu_device_init()
3914 amdgpu_release_ras_context(adev); in amdgpu_device_init()
3917 amdgpu_vf_error_trans_all(adev); in amdgpu_device_init()
3922 static void amdgpu_device_unmap_mmio(struct amdgpu_device *adev) in amdgpu_device_unmap_mmio() argument
3926 unmap_mapping_range(adev->ddev.anon_inode->i_mapping, 0, 0, 1); in amdgpu_device_unmap_mmio()
3929 amdgpu_device_doorbell_fini(adev); in amdgpu_device_unmap_mmio()
3931 iounmap(adev->rmmio); in amdgpu_device_unmap_mmio()
3932 adev->rmmio = NULL; in amdgpu_device_unmap_mmio()
3933 if (adev->mman.aper_base_kaddr) in amdgpu_device_unmap_mmio()
3934 iounmap(adev->mman.aper_base_kaddr); in amdgpu_device_unmap_mmio()
3935 adev->mman.aper_base_kaddr = NULL; in amdgpu_device_unmap_mmio()
3938 if (!adev->gmc.xgmi.connected_to_cpu) { in amdgpu_device_unmap_mmio()
3939 arch_phys_wc_del(adev->gmc.vram_mtrr); in amdgpu_device_unmap_mmio()
3940 arch_io_free_memtype_wc(adev->gmc.aper_base, adev->gmc.aper_size); in amdgpu_device_unmap_mmio()
3952 void amdgpu_device_fini_hw(struct amdgpu_device *adev) in amdgpu_device_fini_hw() argument
3954 dev_info(adev->dev, "amdgpu: finishing device.\n"); in amdgpu_device_fini_hw()
3955 flush_delayed_work(&adev->delayed_init_work); in amdgpu_device_fini_hw()
3956 adev->shutdown = true; in amdgpu_device_fini_hw()
3961 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_fini_hw()
3962 amdgpu_virt_request_full_gpu(adev, false); in amdgpu_device_fini_hw()
3963 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_fini_hw()
3967 amdgpu_irq_disable_all(adev); in amdgpu_device_fini_hw()
3968 if (adev->mode_info.mode_config_initialized){ in amdgpu_device_fini_hw()
3969 if (!drm_drv_uses_atomic_modeset(adev_to_drm(adev))) in amdgpu_device_fini_hw()
3970 drm_helper_force_disable_all(adev_to_drm(adev)); in amdgpu_device_fini_hw()
3972 drm_atomic_helper_shutdown(adev_to_drm(adev)); in amdgpu_device_fini_hw()
3974 amdgpu_fence_driver_hw_fini(adev); in amdgpu_device_fini_hw()
3976 if (adev->mman.initialized) { in amdgpu_device_fini_hw()
3977 flush_delayed_work(&adev->mman.bdev.wq); in amdgpu_device_fini_hw()
3978 ttm_bo_lock_delayed_workqueue(&adev->mman.bdev); in amdgpu_device_fini_hw()
3981 if (adev->pm_sysfs_en) in amdgpu_device_fini_hw()
3982 amdgpu_pm_sysfs_fini(adev); in amdgpu_device_fini_hw()
3983 if (adev->ucode_sysfs_en) in amdgpu_device_fini_hw()
3984 amdgpu_ucode_sysfs_fini(adev); in amdgpu_device_fini_hw()
3985 if (adev->psp_sysfs_en) in amdgpu_device_fini_hw()
3986 amdgpu_psp_sysfs_fini(adev); in amdgpu_device_fini_hw()
3987 sysfs_remove_files(&adev->dev->kobj, amdgpu_dev_attributes); in amdgpu_device_fini_hw()
3990 amdgpu_ras_pre_fini(adev); in amdgpu_device_fini_hw()
3992 amdgpu_device_ip_fini_early(adev); in amdgpu_device_fini_hw()
3994 amdgpu_irq_fini_hw(adev); in amdgpu_device_fini_hw()
3996 if (adev->mman.initialized) in amdgpu_device_fini_hw()
3997 ttm_device_clear_dma_mappings(&adev->mman.bdev); in amdgpu_device_fini_hw()
3999 amdgpu_gart_dummy_page_fini(adev); in amdgpu_device_fini_hw()
4001 amdgpu_device_unmap_mmio(adev); in amdgpu_device_fini_hw()
4005 void amdgpu_device_fini_sw(struct amdgpu_device *adev) in amdgpu_device_fini_sw() argument
4009 amdgpu_fence_driver_sw_fini(adev); in amdgpu_device_fini_sw()
4010 amdgpu_device_ip_fini(adev); in amdgpu_device_fini_sw()
4011 release_firmware(adev->firmware.gpu_info_fw); in amdgpu_device_fini_sw()
4012 adev->firmware.gpu_info_fw = NULL; in amdgpu_device_fini_sw()
4013 adev->accel_working = false; in amdgpu_device_fini_sw()
4014 dma_fence_put(rcu_dereference_protected(adev->gang_submit, true)); in amdgpu_device_fini_sw()
4016 amdgpu_reset_fini(adev); in amdgpu_device_fini_sw()
4019 if (!amdgpu_device_has_dc_support(adev)) in amdgpu_device_fini_sw()
4020 amdgpu_i2c_fini(adev); in amdgpu_device_fini_sw()
4023 amdgpu_atombios_fini(adev); in amdgpu_device_fini_sw()
4025 kfree(adev->bios); in amdgpu_device_fini_sw()
4026 adev->bios = NULL; in amdgpu_device_fini_sw()
4027 if (amdgpu_device_supports_px(adev_to_drm(adev))) { in amdgpu_device_fini_sw()
4028 vga_switcheroo_unregister_client(adev->pdev); in amdgpu_device_fini_sw()
4029 vga_switcheroo_fini_domain_pm_ops(adev->dev); in amdgpu_device_fini_sw()
4031 if ((adev->pdev->class >> 8) == PCI_CLASS_DISPLAY_VGA) in amdgpu_device_fini_sw()
4032 vga_client_unregister(adev->pdev); in amdgpu_device_fini_sw()
4034 if (drm_dev_enter(adev_to_drm(adev), &idx)) { in amdgpu_device_fini_sw()
4036 iounmap(adev->rmmio); in amdgpu_device_fini_sw()
4037 adev->rmmio = NULL; in amdgpu_device_fini_sw()
4038 amdgpu_device_doorbell_fini(adev); in amdgpu_device_fini_sw()
4043 amdgpu_pmu_fini(adev); in amdgpu_device_fini_sw()
4044 if (adev->mman.discovery_bin) in amdgpu_device_fini_sw()
4045 amdgpu_discovery_fini(adev); in amdgpu_device_fini_sw()
4047 amdgpu_reset_put_reset_domain(adev->reset_domain); in amdgpu_device_fini_sw()
4048 adev->reset_domain = NULL; in amdgpu_device_fini_sw()
4050 kfree(adev->pci_state); in amdgpu_device_fini_sw()
4063 static int amdgpu_device_evict_resources(struct amdgpu_device *adev) in amdgpu_device_evict_resources() argument
4068 if ((adev->in_s3 || adev->in_s0ix) && (adev->flags & AMD_IS_APU)) in amdgpu_device_evict_resources()
4071 ret = amdgpu_ttm_evict_resources(adev, TTM_PL_VRAM); in amdgpu_device_evict_resources()
4092 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_suspend() local
4098 adev->in_suspend = true; in amdgpu_device_suspend()
4100 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_suspend()
4101 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_suspend()
4102 r = amdgpu_virt_request_full_gpu(adev, false); in amdgpu_device_suspend()
4113 drm_fb_helper_set_suspend_unlocked(adev_to_drm(adev)->fb_helper, true); in amdgpu_device_suspend()
4115 cancel_delayed_work_sync(&adev->delayed_init_work); in amdgpu_device_suspend()
4117 amdgpu_ras_suspend(adev); in amdgpu_device_suspend()
4119 amdgpu_device_ip_suspend_phase1(adev); in amdgpu_device_suspend()
4121 if (!adev->in_s0ix) in amdgpu_device_suspend()
4122 amdgpu_amdkfd_suspend(adev, adev->in_runpm); in amdgpu_device_suspend()
4124 r = amdgpu_device_evict_resources(adev); in amdgpu_device_suspend()
4128 amdgpu_fence_driver_hw_fini(adev); in amdgpu_device_suspend()
4130 amdgpu_device_ip_suspend_phase2(adev); in amdgpu_device_suspend()
4132 if (amdgpu_sriov_vf(adev)) in amdgpu_device_suspend()
4133 amdgpu_virt_release_full_gpu(adev, false); in amdgpu_device_suspend()
4150 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_resume() local
4153 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_resume()
4154 r = amdgpu_virt_request_full_gpu(adev, true); in amdgpu_device_resume()
4162 if (adev->in_s0ix) in amdgpu_device_resume()
4163 amdgpu_dpm_gfx_state_change(adev, sGpuChangeState_D0Entry); in amdgpu_device_resume()
4166 if (amdgpu_device_need_post(adev)) { in amdgpu_device_resume()
4167 r = amdgpu_device_asic_init(adev); in amdgpu_device_resume()
4169 dev_err(adev->dev, "amdgpu asic init failed\n"); in amdgpu_device_resume()
4172 r = amdgpu_device_ip_resume(adev); in amdgpu_device_resume()
4175 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_resume()
4176 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_resume()
4177 amdgpu_virt_release_full_gpu(adev, true); in amdgpu_device_resume()
4181 dev_err(adev->dev, "amdgpu_device_ip_resume failed (%d).\n", r); in amdgpu_device_resume()
4184 amdgpu_fence_driver_hw_init(adev); in amdgpu_device_resume()
4186 r = amdgpu_device_ip_late_init(adev); in amdgpu_device_resume()
4190 queue_delayed_work(system_wq, &adev->delayed_init_work, in amdgpu_device_resume()
4193 if (!adev->in_s0ix) { in amdgpu_device_resume()
4194 r = amdgpu_amdkfd_resume(adev, adev->in_runpm); in amdgpu_device_resume()
4200 flush_delayed_work(&adev->delayed_init_work); in amdgpu_device_resume()
4202 if (adev->in_s0ix) { in amdgpu_device_resume()
4206 amdgpu_gfx_off_ctrl(adev, true); in amdgpu_device_resume()
4210 drm_fb_helper_set_suspend_unlocked(adev_to_drm(adev)->fb_helper, false); in amdgpu_device_resume()
4214 amdgpu_ras_resume(adev); in amdgpu_device_resume()
4228 if (!amdgpu_device_has_dc_support(adev)) in amdgpu_device_resume()
4235 adev->in_suspend = false; in amdgpu_device_resume()
4253 static bool amdgpu_device_ip_check_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_check_soft_reset() argument
4258 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_check_soft_reset()
4261 if (amdgpu_asic_need_full_reset(adev)) in amdgpu_device_ip_check_soft_reset()
4264 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_check_soft_reset()
4265 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_check_soft_reset()
4267 if (adev->ip_blocks[i].version->funcs->check_soft_reset) in amdgpu_device_ip_check_soft_reset()
4268 adev->ip_blocks[i].status.hang = in amdgpu_device_ip_check_soft_reset()
4269 adev->ip_blocks[i].version->funcs->check_soft_reset(adev); in amdgpu_device_ip_check_soft_reset()
4270 if (adev->ip_blocks[i].status.hang) { in amdgpu_device_ip_check_soft_reset()
4271 dev_info(adev->dev, "IP block:%s is hung!\n", adev->ip_blocks[i].version->funcs->name); in amdgpu_device_ip_check_soft_reset()
4289 static int amdgpu_device_ip_pre_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_pre_soft_reset() argument
4293 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_pre_soft_reset()
4294 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_pre_soft_reset()
4296 if (adev->ip_blocks[i].status.hang && in amdgpu_device_ip_pre_soft_reset()
4297 adev->ip_blocks[i].version->funcs->pre_soft_reset) { in amdgpu_device_ip_pre_soft_reset()
4298 r = adev->ip_blocks[i].version->funcs->pre_soft_reset(adev); in amdgpu_device_ip_pre_soft_reset()
4316 static bool amdgpu_device_ip_need_full_reset(struct amdgpu_device *adev) in amdgpu_device_ip_need_full_reset() argument
4320 if (amdgpu_asic_need_full_reset(adev)) in amdgpu_device_ip_need_full_reset()
4323 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_need_full_reset()
4324 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_need_full_reset()
4326 if ((adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC) || in amdgpu_device_ip_need_full_reset()
4327 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) || in amdgpu_device_ip_need_full_reset()
4328 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_ACP) || in amdgpu_device_ip_need_full_reset()
4329 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_DCE) || in amdgpu_device_ip_need_full_reset()
4330 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) { in amdgpu_device_ip_need_full_reset()
4331 if (adev->ip_blocks[i].status.hang) { in amdgpu_device_ip_need_full_reset()
4332 dev_info(adev->dev, "Some block need full reset!\n"); in amdgpu_device_ip_need_full_reset()
4351 static int amdgpu_device_ip_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_soft_reset() argument
4355 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_soft_reset()
4356 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_soft_reset()
4358 if (adev->ip_blocks[i].status.hang && in amdgpu_device_ip_soft_reset()
4359 adev->ip_blocks[i].version->funcs->soft_reset) { in amdgpu_device_ip_soft_reset()
4360 r = adev->ip_blocks[i].version->funcs->soft_reset(adev); in amdgpu_device_ip_soft_reset()
4380 static int amdgpu_device_ip_post_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_post_soft_reset() argument
4384 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_post_soft_reset()
4385 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_post_soft_reset()
4387 if (adev->ip_blocks[i].status.hang && in amdgpu_device_ip_post_soft_reset()
4388 adev->ip_blocks[i].version->funcs->post_soft_reset) in amdgpu_device_ip_post_soft_reset()
4389 r = adev->ip_blocks[i].version->funcs->post_soft_reset(adev); in amdgpu_device_ip_post_soft_reset()
4409 static int amdgpu_device_recover_vram(struct amdgpu_device *adev) in amdgpu_device_recover_vram() argument
4416 if (amdgpu_sriov_runtime(adev)) in amdgpu_device_recover_vram()
4421 dev_info(adev->dev, "recover vram bo from shadow start\n"); in amdgpu_device_recover_vram()
4422 mutex_lock(&adev->shadow_list_lock); in amdgpu_device_recover_vram()
4423 list_for_each_entry(vmbo, &adev->shadow_list, shadow_list) { in amdgpu_device_recover_vram()
4450 mutex_unlock(&adev->shadow_list_lock); in amdgpu_device_recover_vram()
4457 dev_err(adev->dev, "recover vram bo from shadow failed, r is %ld, tmo is %ld\n", r, tmo); in amdgpu_device_recover_vram()
4461 dev_info(adev->dev, "recover vram bo from shadow done\n"); in amdgpu_device_recover_vram()
4475 static int amdgpu_device_reset_sriov(struct amdgpu_device *adev, in amdgpu_device_reset_sriov() argument
4483 amdgpu_amdkfd_pre_reset(adev); in amdgpu_device_reset_sriov()
4486 r = amdgpu_virt_request_full_gpu(adev, true); in amdgpu_device_reset_sriov()
4488 r = amdgpu_virt_reset_gpu(adev); in amdgpu_device_reset_sriov()
4493 r = amdgpu_device_ip_reinit_early_sriov(adev); in amdgpu_device_reset_sriov()
4497 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_reset_sriov()
4499 r = amdgpu_device_fw_loading(adev); in amdgpu_device_reset_sriov()
4504 r = amdgpu_device_ip_reinit_late_sriov(adev); in amdgpu_device_reset_sriov()
4508 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_reset_sriov()
4510 if (hive && adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_reset_sriov()
4511 r = amdgpu_xgmi_update_topology(hive, adev); in amdgpu_device_reset_sriov()
4517 amdgpu_irq_gpu_reset_resume_helper(adev); in amdgpu_device_reset_sriov()
4518 r = amdgpu_ib_ring_tests(adev); in amdgpu_device_reset_sriov()
4520 amdgpu_amdkfd_post_reset(adev); in amdgpu_device_reset_sriov()
4524 if (!r && adev->virt.gim_feature & AMDGIM_FEATURE_GIM_FLR_VRAMLOST) { in amdgpu_device_reset_sriov()
4525 amdgpu_inc_vram_lost(adev); in amdgpu_device_reset_sriov()
4526 r = amdgpu_device_recover_vram(adev); in amdgpu_device_reset_sriov()
4528 amdgpu_virt_release_full_gpu(adev, true); in amdgpu_device_reset_sriov()
4548 bool amdgpu_device_has_job_running(struct amdgpu_device *adev) in amdgpu_device_has_job_running() argument
4554 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_has_job_running()
4577 bool amdgpu_device_should_recover_gpu(struct amdgpu_device *adev) in amdgpu_device_should_recover_gpu() argument
4583 if (!amdgpu_device_ip_check_soft_reset(adev)) { in amdgpu_device_should_recover_gpu()
4584 dev_info(adev->dev,"Timeout, but no hardware hang detected.\n"); in amdgpu_device_should_recover_gpu()
4588 if (amdgpu_sriov_vf(adev)) in amdgpu_device_should_recover_gpu()
4592 switch (adev->asic_type) { in amdgpu_device_should_recover_gpu()
4617 dev_info(adev->dev, "GPU recovery disabled.\n"); in amdgpu_device_should_recover_gpu()
4621 int amdgpu_device_mode1_reset(struct amdgpu_device *adev) in amdgpu_device_mode1_reset() argument
4626 amdgpu_atombios_scratch_regs_engine_hung(adev, true); in amdgpu_device_mode1_reset()
4628 dev_info(adev->dev, "GPU mode1 reset\n"); in amdgpu_device_mode1_reset()
4631 pci_clear_master(adev->pdev); in amdgpu_device_mode1_reset()
4633 amdgpu_device_cache_pci_state(adev->pdev); in amdgpu_device_mode1_reset()
4635 if (amdgpu_dpm_is_mode1_reset_supported(adev)) { in amdgpu_device_mode1_reset()
4636 dev_info(adev->dev, "GPU smu mode1 reset\n"); in amdgpu_device_mode1_reset()
4637 ret = amdgpu_dpm_mode1_reset(adev); in amdgpu_device_mode1_reset()
4639 dev_info(adev->dev, "GPU psp mode1 reset\n"); in amdgpu_device_mode1_reset()
4640 ret = psp_gpu_reset(adev); in amdgpu_device_mode1_reset()
4644 dev_err(adev->dev, "GPU mode1 reset failed\n"); in amdgpu_device_mode1_reset()
4646 amdgpu_device_load_pci_state(adev->pdev); in amdgpu_device_mode1_reset()
4649 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_device_mode1_reset()
4650 u32 memsize = adev->nbio.funcs->get_memsize(adev); in amdgpu_device_mode1_reset()
4657 amdgpu_atombios_scratch_regs_engine_hung(adev, false); in amdgpu_device_mode1_reset()
4661 int amdgpu_device_pre_asic_reset(struct amdgpu_device *adev, in amdgpu_device_pre_asic_reset() argument
4669 if (reset_context->reset_req_dev == adev) in amdgpu_device_pre_asic_reset()
4672 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_pre_asic_reset()
4674 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_pre_asic_reset()
4677 amdgpu_fence_driver_isr_toggle(adev, true); in amdgpu_device_pre_asic_reset()
4681 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_pre_asic_reset()
4694 amdgpu_fence_driver_isr_toggle(adev, false); in amdgpu_device_pre_asic_reset()
4699 r = amdgpu_reset_prepare_hwcontext(adev, reset_context); in amdgpu_device_pre_asic_reset()
4707 if (!amdgpu_sriov_vf(adev)) { in amdgpu_device_pre_asic_reset()
4710 need_full_reset = amdgpu_device_ip_need_full_reset(adev); in amdgpu_device_pre_asic_reset()
4713 amdgpu_device_ip_pre_soft_reset(adev); in amdgpu_device_pre_asic_reset()
4714 r = amdgpu_device_ip_soft_reset(adev); in amdgpu_device_pre_asic_reset()
4715 amdgpu_device_ip_post_soft_reset(adev); in amdgpu_device_pre_asic_reset()
4716 if (r || amdgpu_device_ip_check_soft_reset(adev)) { in amdgpu_device_pre_asic_reset()
4717 dev_info(adev->dev, "soft reset failed, will fallback to full reset!\n"); in amdgpu_device_pre_asic_reset()
4723 r = amdgpu_device_ip_suspend(adev); in amdgpu_device_pre_asic_reset()
4734 static int amdgpu_reset_reg_dumps(struct amdgpu_device *adev) in amdgpu_reset_reg_dumps() argument
4738 lockdep_assert_held(&adev->reset_domain->sem); in amdgpu_reset_reg_dumps()
4740 for (i = 0; i < adev->num_regs; i++) { in amdgpu_reset_reg_dumps()
4741 adev->reset_dump_reg_value[i] = RREG32(adev->reset_dump_reg_list[i]); in amdgpu_reset_reg_dumps()
4742 trace_amdgpu_reset_reg_dumps(adev->reset_dump_reg_list[i], in amdgpu_reset_reg_dumps()
4743 adev->reset_dump_reg_value[i]); in amdgpu_reset_reg_dumps()
4754 struct amdgpu_device *adev = data; in amdgpu_devcoredump_read() local
4768 drm_printf(&p, "time: %lld.%09ld\n", adev->reset_time.tv_sec, adev->reset_time.tv_nsec); in amdgpu_devcoredump_read()
4769 if (adev->reset_task_info.pid) in amdgpu_devcoredump_read()
4771 adev->reset_task_info.process_name, in amdgpu_devcoredump_read()
4772 adev->reset_task_info.pid); in amdgpu_devcoredump_read()
4774 if (adev->reset_vram_lost) in amdgpu_devcoredump_read()
4776 if (adev->num_regs) { in amdgpu_devcoredump_read()
4779 for (i = 0; i < adev->num_regs; i++) in amdgpu_devcoredump_read()
4781 adev->reset_dump_reg_list[i], in amdgpu_devcoredump_read()
4782 adev->reset_dump_reg_value[i]); in amdgpu_devcoredump_read()
4792 static void amdgpu_reset_capture_coredumpm(struct amdgpu_device *adev) in amdgpu_reset_capture_coredumpm() argument
4794 struct drm_device *dev = adev_to_drm(adev); in amdgpu_reset_capture_coredumpm()
4796 ktime_get_ts64(&adev->reset_time); in amdgpu_reset_capture_coredumpm()
4797 dev_coredumpm(dev->dev, THIS_MODULE, adev, 0, GFP_KERNEL, in amdgpu_reset_capture_coredumpm()
4998 static void amdgpu_device_set_mp1_state(struct amdgpu_device *adev) in amdgpu_device_set_mp1_state() argument
5001 switch (amdgpu_asic_reset_method(adev)) { in amdgpu_device_set_mp1_state()
5003 adev->mp1_state = PP_MP1_STATE_SHUTDOWN; in amdgpu_device_set_mp1_state()
5006 adev->mp1_state = PP_MP1_STATE_RESET; in amdgpu_device_set_mp1_state()
5009 adev->mp1_state = PP_MP1_STATE_NONE; in amdgpu_device_set_mp1_state()
5014 static void amdgpu_device_unset_mp1_state(struct amdgpu_device *adev) in amdgpu_device_unset_mp1_state() argument
5016 amdgpu_vf_error_trans_all(adev); in amdgpu_device_unset_mp1_state()
5017 adev->mp1_state = PP_MP1_STATE_NONE; in amdgpu_device_unset_mp1_state()
5020 static void amdgpu_device_resume_display_audio(struct amdgpu_device *adev) in amdgpu_device_resume_display_audio() argument
5024 p = pci_get_domain_bus_and_slot(pci_domain_nr(adev->pdev->bus), in amdgpu_device_resume_display_audio()
5025 adev->pdev->bus->number, 1); in amdgpu_device_resume_display_audio()
5032 static int amdgpu_device_suspend_display_audio(struct amdgpu_device *adev) in amdgpu_device_suspend_display_audio() argument
5042 reset_method = amdgpu_asic_reset_method(adev); in amdgpu_device_suspend_display_audio()
5047 p = pci_get_domain_bus_and_slot(pci_domain_nr(adev->pdev->bus), in amdgpu_device_suspend_display_audio()
5048 adev->pdev->bus->number, 1); in amdgpu_device_suspend_display_audio()
5067 dev_warn(adev->dev, "failed to suspend display audio\n"); in amdgpu_device_suspend_display_audio()
5079 struct amdgpu_device *adev, struct list_head *device_list_handle, in amdgpu_device_recheck_guilty_jobs() argument
5085 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_recheck_guilty_jobs()
5112 amdgpu_fence_driver_isr_toggle(adev, true); in amdgpu_device_recheck_guilty_jobs()
5117 amdgpu_fence_driver_isr_toggle(adev, false); in amdgpu_device_recheck_guilty_jobs()
5127 amdgpu_reset_prepare_hwcontext(adev, reset_context); in amdgpu_device_recheck_guilty_jobs()
5130 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_recheck_guilty_jobs()
5131 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_recheck_guilty_jobs()
5132 r = amdgpu_device_reset_sriov(adev, false); in amdgpu_device_recheck_guilty_jobs()
5134 adev->asic_reset_res = r; in amdgpu_device_recheck_guilty_jobs()
5148 atomic_inc(&adev->gpu_reset_counter); in amdgpu_device_recheck_guilty_jobs()
5166 static inline void amdgpu_device_stop_pending_resets(struct amdgpu_device *adev) in amdgpu_device_stop_pending_resets() argument
5168 struct amdgpu_ras *con = amdgpu_ras_get_context(adev); in amdgpu_device_stop_pending_resets()
5171 if (!amdgpu_sriov_vf(adev)) in amdgpu_device_stop_pending_resets()
5172 cancel_work(&adev->reset_work); in amdgpu_device_stop_pending_resets()
5175 if (adev->kfd.dev) in amdgpu_device_stop_pending_resets()
5176 cancel_work(&adev->kfd.reset_work); in amdgpu_device_stop_pending_resets()
5178 if (amdgpu_sriov_vf(adev)) in amdgpu_device_stop_pending_resets()
5179 cancel_work(&adev->virt.flr_work); in amdgpu_device_stop_pending_resets()
5181 if (con && adev->ras_enabled) in amdgpu_device_stop_pending_resets()
5198 int amdgpu_device_gpu_recover(struct amdgpu_device *adev, in amdgpu_device_gpu_recover() argument
5219 need_emergency_restart = amdgpu_ras_need_emergency_restart(adev); in amdgpu_device_gpu_recover()
5225 if (need_emergency_restart && amdgpu_ras_get_context(adev)->reboot) { in amdgpu_device_gpu_recover()
5232 dev_info(adev->dev, "GPU %s begin!\n", in amdgpu_device_gpu_recover()
5235 if (!amdgpu_sriov_vf(adev)) in amdgpu_device_gpu_recover()
5236 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_gpu_recover()
5248 if (!amdgpu_sriov_vf(adev) && (adev->gmc.xgmi.num_physical_nodes > 1)) { in amdgpu_device_gpu_recover()
5251 if (gpu_reset_for_dev_remove && adev->shutdown) in amdgpu_device_gpu_recover()
5254 if (!list_is_first(&adev->reset_list, &device_list)) in amdgpu_device_gpu_recover()
5255 list_rotate_to_front(&adev->reset_list, &device_list); in amdgpu_device_gpu_recover()
5258 list_add_tail(&adev->reset_list, &device_list); in amdgpu_device_gpu_recover()
5330 dev_info(adev->dev, "Guilty job already signaled, skipping HW reset"); in amdgpu_device_gpu_recover()
5355 tmp_vram_lost_counter = atomic_read(&((adev)->vram_lost_counter)); in amdgpu_device_gpu_recover()
5358 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_gpu_recover()
5359 r = amdgpu_device_reset_sriov(adev, job ? false : true); in amdgpu_device_gpu_recover()
5361 adev->asic_reset_res = r; in amdgpu_device_gpu_recover()
5364 if (adev->ip_versions[GC_HWIP][0] == IP_VERSION(9, 4, 2)) in amdgpu_device_gpu_recover()
5365 amdgpu_ras_resume(adev); in amdgpu_device_gpu_recover()
5388 !(tmp_vram_lost_counter < atomic_read(&adev->vram_lost_counter))) in amdgpu_device_gpu_recover()
5405 if (adev->enable_mes && adev->ip_versions[GC_HWIP][0] != IP_VERSION(11, 0, 3)) in amdgpu_device_gpu_recover()
5437 if (!adev->kfd.init_complete) in amdgpu_device_gpu_recover()
5438 amdgpu_amdkfd_device_init(adev); in amdgpu_device_gpu_recover()
5457 dev_info(adev->dev, "GPU reset end with ret = %d\n", r); in amdgpu_device_gpu_recover()
5459 atomic_set(&adev->reset_domain->reset_res, r); in amdgpu_device_gpu_recover()
5472 static void amdgpu_device_get_pcie_info(struct amdgpu_device *adev) in amdgpu_device_get_pcie_info() argument
5479 adev->pm.pcie_gen_mask = amdgpu_pcie_gen_cap; in amdgpu_device_get_pcie_info()
5482 adev->pm.pcie_mlw_mask = amdgpu_pcie_lane_cap; in amdgpu_device_get_pcie_info()
5485 if (pci_is_root_bus(adev->pdev->bus)) { in amdgpu_device_get_pcie_info()
5486 if (adev->pm.pcie_gen_mask == 0) in amdgpu_device_get_pcie_info()
5487 adev->pm.pcie_gen_mask = AMDGPU_DEFAULT_PCIE_GEN_MASK; in amdgpu_device_get_pcie_info()
5488 if (adev->pm.pcie_mlw_mask == 0) in amdgpu_device_get_pcie_info()
5489 adev->pm.pcie_mlw_mask = AMDGPU_DEFAULT_PCIE_MLW_MASK; in amdgpu_device_get_pcie_info()
5493 if (adev->pm.pcie_gen_mask && adev->pm.pcie_mlw_mask) in amdgpu_device_get_pcie_info()
5496 pcie_bandwidth_available(adev->pdev, NULL, in amdgpu_device_get_pcie_info()
5499 if (adev->pm.pcie_gen_mask == 0) { in amdgpu_device_get_pcie_info()
5501 pdev = adev->pdev; in amdgpu_device_get_pcie_info()
5504 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5509 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5515 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5520 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5524 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5527 adev->pm.pcie_gen_mask |= CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1; in amdgpu_device_get_pcie_info()
5531 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5535 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5541 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5546 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5550 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5553 adev->pm.pcie_gen_mask |= CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1; in amdgpu_device_get_pcie_info()
5557 if (adev->pm.pcie_mlw_mask == 0) { in amdgpu_device_get_pcie_info()
5559 adev->pm.pcie_mlw_mask |= AMDGPU_DEFAULT_PCIE_MLW_MASK; in amdgpu_device_get_pcie_info()
5563 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X32 | in amdgpu_device_get_pcie_info()
5572 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X16 | in amdgpu_device_get_pcie_info()
5580 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X12 | in amdgpu_device_get_pcie_info()
5587 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X8 | in amdgpu_device_get_pcie_info()
5593 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X4 | in amdgpu_device_get_pcie_info()
5598 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X2 | in amdgpu_device_get_pcie_info()
5602 adev->pm.pcie_mlw_mask = CAIL_PCIE_LINK_WIDTH_SUPPORT_X1; in amdgpu_device_get_pcie_info()
5621 bool amdgpu_device_is_peer_accessible(struct amdgpu_device *adev, in amdgpu_device_is_peer_accessible() argument
5628 adev->gmc.aper_base + adev->gmc.aper_size - 1; in amdgpu_device_is_peer_accessible()
5630 !adev->gmc.xgmi.connected_to_cpu && in amdgpu_device_is_peer_accessible()
5631 !(pci_p2pdma_distance(adev->pdev, peer_adev->dev, false) < 0); in amdgpu_device_is_peer_accessible()
5633 return pcie_p2p && p2p_access && (adev->gmc.visible_vram_size && in amdgpu_device_is_peer_accessible()
5634 adev->gmc.real_vram_size == adev->gmc.visible_vram_size && in amdgpu_device_is_peer_accessible()
5635 !(adev->gmc.aper_base & address_mask || in amdgpu_device_is_peer_accessible()
5644 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_baco_enter() local
5645 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_device_baco_enter()
5647 if (!amdgpu_device_supports_baco(adev_to_drm(adev))) in amdgpu_device_baco_enter()
5650 if (ras && adev->ras_enabled && in amdgpu_device_baco_enter()
5651 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_enter()
5652 adev->nbio.funcs->enable_doorbell_interrupt(adev, false); in amdgpu_device_baco_enter()
5654 return amdgpu_dpm_baco_enter(adev); in amdgpu_device_baco_enter()
5659 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_baco_exit() local
5660 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_device_baco_exit()
5663 if (!amdgpu_device_supports_baco(adev_to_drm(adev))) in amdgpu_device_baco_exit()
5666 ret = amdgpu_dpm_baco_exit(adev); in amdgpu_device_baco_exit()
5670 if (ras && adev->ras_enabled && in amdgpu_device_baco_exit()
5671 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_exit()
5672 adev->nbio.funcs->enable_doorbell_interrupt(adev, true); in amdgpu_device_baco_exit()
5674 if (amdgpu_passthrough(adev) && in amdgpu_device_baco_exit()
5675 adev->nbio.funcs->clear_doorbell_interrupt) in amdgpu_device_baco_exit()
5676 adev->nbio.funcs->clear_doorbell_interrupt(adev); in amdgpu_device_baco_exit()
5693 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_pci_error_detected() local
5698 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_pci_error_detected()
5703 adev->pci_channel_state = state; in amdgpu_pci_error_detected()
5714 amdgpu_device_lock_reset_domain(adev->reset_domain); in amdgpu_pci_error_detected()
5715 amdgpu_device_set_mp1_state(adev); in amdgpu_pci_error_detected()
5722 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_pci_error_detected()
5729 atomic_inc(&adev->gpu_reset_counter); in amdgpu_pci_error_detected()
5769 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_pci_slot_reset() local
5780 list_add_tail(&adev->reset_list, &device_list); in amdgpu_pci_slot_reset()
5789 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_pci_slot_reset()
5790 memsize = amdgpu_asic_get_config_memsize(adev); in amdgpu_pci_slot_reset()
5802 reset_context.reset_req_dev = adev; in amdgpu_pci_slot_reset()
5806 adev->no_hw_access = true; in amdgpu_pci_slot_reset()
5807 r = amdgpu_device_pre_asic_reset(adev, &reset_context); in amdgpu_pci_slot_reset()
5808 adev->no_hw_access = false; in amdgpu_pci_slot_reset()
5816 if (amdgpu_device_cache_pci_state(adev->pdev)) in amdgpu_pci_slot_reset()
5817 pci_restore_state(adev->pdev); in amdgpu_pci_slot_reset()
5822 amdgpu_device_unset_mp1_state(adev); in amdgpu_pci_slot_reset()
5823 amdgpu_device_unlock_reset_domain(adev->reset_domain); in amdgpu_pci_slot_reset()
5839 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_pci_resume() local
5846 if (adev->pci_channel_state != pci_channel_io_frozen) in amdgpu_pci_resume()
5850 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_pci_resume()
5860 amdgpu_device_unset_mp1_state(adev); in amdgpu_pci_resume()
5861 amdgpu_device_unlock_reset_domain(adev->reset_domain); in amdgpu_pci_resume()
5867 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_cache_pci_state() local
5872 kfree(adev->pci_state); in amdgpu_device_cache_pci_state()
5874 adev->pci_state = pci_store_saved_state(pdev); in amdgpu_device_cache_pci_state()
5876 if (!adev->pci_state) { in amdgpu_device_cache_pci_state()
5891 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_load_pci_state() local
5894 if (!adev->pci_state) in amdgpu_device_load_pci_state()
5897 r = pci_load_saved_state(pdev, adev->pci_state); in amdgpu_device_load_pci_state()
5909 void amdgpu_device_flush_hdp(struct amdgpu_device *adev, in amdgpu_device_flush_hdp() argument
5913 if ((adev->flags & AMD_IS_APU) && !amdgpu_passthrough(adev)) in amdgpu_device_flush_hdp()
5916 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_device_flush_hdp()
5922 amdgpu_asic_flush_hdp(adev, ring); in amdgpu_device_flush_hdp()
5925 void amdgpu_device_invalidate_hdp(struct amdgpu_device *adev, in amdgpu_device_invalidate_hdp() argument
5929 if ((adev->flags & AMD_IS_APU) && !amdgpu_passthrough(adev)) in amdgpu_device_invalidate_hdp()
5932 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_device_invalidate_hdp()
5935 amdgpu_asic_invalidate_hdp(adev, ring); in amdgpu_device_invalidate_hdp()
5938 int amdgpu_in_reset(struct amdgpu_device *adev) in amdgpu_in_reset() argument
5940 return atomic_read(&adev->reset_domain->in_gpu_reset); in amdgpu_in_reset()
5963 void amdgpu_device_halt(struct amdgpu_device *adev) in amdgpu_device_halt() argument
5965 struct pci_dev *pdev = adev->pdev; in amdgpu_device_halt()
5966 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_device_halt()
5970 amdgpu_irq_disable_all(adev); in amdgpu_device_halt()
5972 amdgpu_fence_driver_hw_fini(adev); in amdgpu_device_halt()
5974 adev->no_hw_access = true; in amdgpu_device_halt()
5976 amdgpu_device_unmap_mmio(adev); in amdgpu_device_halt()
5982 u32 amdgpu_device_pcie_port_rreg(struct amdgpu_device *adev, in amdgpu_device_pcie_port_rreg() argument
5988 address = adev->nbio.funcs->get_pcie_port_index_offset(adev); in amdgpu_device_pcie_port_rreg()
5989 data = adev->nbio.funcs->get_pcie_port_data_offset(adev); in amdgpu_device_pcie_port_rreg()
5991 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_pcie_port_rreg()
5995 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_pcie_port_rreg()
5999 void amdgpu_device_pcie_port_wreg(struct amdgpu_device *adev, in amdgpu_device_pcie_port_wreg() argument
6004 address = adev->nbio.funcs->get_pcie_port_index_offset(adev); in amdgpu_device_pcie_port_wreg()
6005 data = adev->nbio.funcs->get_pcie_port_data_offset(adev); in amdgpu_device_pcie_port_wreg()
6007 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_pcie_port_wreg()
6012 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_pcie_port_wreg()
6024 struct dma_fence *amdgpu_device_switch_gang(struct amdgpu_device *adev, in amdgpu_device_switch_gang() argument
6032 old = dma_fence_get_rcu_safe(&adev->gang_submit); in amdgpu_device_switch_gang()
6041 } while (cmpxchg((struct dma_fence __force **)&adev->gang_submit, in amdgpu_device_switch_gang()
6048 bool amdgpu_device_has_display_hardware(struct amdgpu_device *adev) in amdgpu_device_has_display_hardware() argument
6050 switch (adev->asic_type) { in amdgpu_device_has_display_hardware()
6082 if (!adev->ip_versions[DCE_HWIP][0] || in amdgpu_device_has_display_hardware()
6083 (adev->harvest_ip_mask & AMD_HARVEST_IP_DMU_MASK)) in amdgpu_device_has_display_hardware()