Lines Matching refs:adev
156 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_pcie_replay_count() local
157 uint64_t cnt = amdgpu_asic_get_pcie_replay_count(adev); in amdgpu_device_get_pcie_replay_count()
165 static void amdgpu_device_get_pcie_info(struct amdgpu_device *adev);
178 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_px() local
180 if ((adev->flags & AMD_IS_PX) && !amdgpu_is_atpx_hybrid()) in amdgpu_device_supports_px()
195 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_boco() local
197 if (adev->has_pr3 || in amdgpu_device_supports_boco()
198 ((adev->flags & AMD_IS_PX) && amdgpu_is_atpx_hybrid())) in amdgpu_device_supports_boco()
213 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_baco() local
215 return amdgpu_asic_supports_baco(adev); in amdgpu_device_supports_baco()
246 void amdgpu_device_mm_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_mm_access() argument
255 if (!drm_dev_enter(adev_to_drm(adev), &idx)) in amdgpu_device_mm_access()
260 spin_lock_irqsave(&adev->mmio_idx_lock, flags); in amdgpu_device_mm_access()
275 spin_unlock_irqrestore(&adev->mmio_idx_lock, flags); in amdgpu_device_mm_access()
290 size_t amdgpu_device_aper_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_aper_access() argument
298 if (!adev->mman.aper_base_kaddr) in amdgpu_device_aper_access()
301 last = min(pos + size, adev->gmc.visible_vram_size); in amdgpu_device_aper_access()
303 addr = adev->mman.aper_base_kaddr + pos; in amdgpu_device_aper_access()
312 amdgpu_device_flush_hdp(adev, NULL); in amdgpu_device_aper_access()
314 amdgpu_device_invalidate_hdp(adev, NULL); in amdgpu_device_aper_access()
339 void amdgpu_device_vram_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_vram_access() argument
345 count = amdgpu_device_aper_access(adev, pos, buf, size, write); in amdgpu_device_vram_access()
351 amdgpu_device_mm_access(adev, pos, buf, size, write); in amdgpu_device_vram_access()
360 bool amdgpu_device_skip_hw_access(struct amdgpu_device *adev) in amdgpu_device_skip_hw_access() argument
362 if (adev->no_hw_access) in amdgpu_device_skip_hw_access()
378 if (down_read_trylock(&adev->reset_domain->sem)) in amdgpu_device_skip_hw_access()
379 up_read(&adev->reset_domain->sem); in amdgpu_device_skip_hw_access()
381 lockdep_assert_held(&adev->reset_domain->sem); in amdgpu_device_skip_hw_access()
396 uint32_t amdgpu_device_rreg(struct amdgpu_device *adev, in amdgpu_device_rreg() argument
401 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_device_rreg()
404 if ((reg * 4) < adev->rmmio_size) { in amdgpu_device_rreg()
406 amdgpu_sriov_runtime(adev) && in amdgpu_device_rreg()
407 down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_device_rreg()
408 ret = amdgpu_kiq_rreg(adev, reg); in amdgpu_device_rreg()
409 up_read(&adev->reset_domain->sem); in amdgpu_device_rreg()
411 ret = readl(((void __iomem *)adev->rmmio) + (reg * 4)); in amdgpu_device_rreg()
414 ret = adev->pcie_rreg(adev, reg * 4); in amdgpu_device_rreg()
417 trace_amdgpu_device_rreg(adev->pdev->device, reg, ret); in amdgpu_device_rreg()
435 uint8_t amdgpu_mm_rreg8(struct amdgpu_device *adev, uint32_t offset) in amdgpu_mm_rreg8() argument
437 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_rreg8()
440 if (offset < adev->rmmio_size) in amdgpu_mm_rreg8()
441 return (readb(adev->rmmio + offset)); in amdgpu_mm_rreg8()
460 void amdgpu_mm_wreg8(struct amdgpu_device *adev, uint32_t offset, uint8_t value) in amdgpu_mm_wreg8() argument
462 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wreg8()
465 if (offset < adev->rmmio_size) in amdgpu_mm_wreg8()
466 writeb(value, adev->rmmio + offset); in amdgpu_mm_wreg8()
481 void amdgpu_device_wreg(struct amdgpu_device *adev, in amdgpu_device_wreg() argument
485 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_device_wreg()
488 if ((reg * 4) < adev->rmmio_size) { in amdgpu_device_wreg()
490 amdgpu_sriov_runtime(adev) && in amdgpu_device_wreg()
491 down_read_trylock(&adev->reset_domain->sem)) { in amdgpu_device_wreg()
492 amdgpu_kiq_wreg(adev, reg, v); in amdgpu_device_wreg()
493 up_read(&adev->reset_domain->sem); in amdgpu_device_wreg()
495 writel(v, ((void __iomem *)adev->rmmio) + (reg * 4)); in amdgpu_device_wreg()
498 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_device_wreg()
501 trace_amdgpu_device_wreg(adev->pdev->device, reg, v); in amdgpu_device_wreg()
513 void amdgpu_mm_wreg_mmio_rlc(struct amdgpu_device *adev, in amdgpu_mm_wreg_mmio_rlc() argument
517 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wreg_mmio_rlc()
520 if (amdgpu_sriov_fullaccess(adev) && in amdgpu_mm_wreg_mmio_rlc()
521 adev->gfx.rlc.funcs && in amdgpu_mm_wreg_mmio_rlc()
522 adev->gfx.rlc.funcs->is_rlcg_access_range) { in amdgpu_mm_wreg_mmio_rlc()
523 if (adev->gfx.rlc.funcs->is_rlcg_access_range(adev, reg)) in amdgpu_mm_wreg_mmio_rlc()
524 return amdgpu_sriov_wreg(adev, reg, v, 0, 0, xcc_id); in amdgpu_mm_wreg_mmio_rlc()
525 } else if ((reg * 4) >= adev->rmmio_size) { in amdgpu_mm_wreg_mmio_rlc()
526 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_mm_wreg_mmio_rlc()
528 writel(v, ((void __iomem *)adev->rmmio) + (reg * 4)); in amdgpu_mm_wreg_mmio_rlc()
540 u32 amdgpu_device_indirect_rreg(struct amdgpu_device *adev, in amdgpu_device_indirect_rreg() argument
548 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_rreg()
549 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg()
551 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg()
552 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_rreg()
553 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_rreg()
558 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg()
563 u32 amdgpu_device_indirect_rreg_ext(struct amdgpu_device *adev, in amdgpu_device_indirect_rreg_ext() argument
572 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_rreg_ext()
573 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg_ext()
574 if (adev->nbio.funcs->get_pcie_index_hi_offset) in amdgpu_device_indirect_rreg_ext()
575 pcie_index_hi = adev->nbio.funcs->get_pcie_index_hi_offset(adev); in amdgpu_device_indirect_rreg_ext()
579 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg_ext()
580 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_rreg_ext()
581 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_rreg_ext()
583 pcie_index_hi_offset = (void __iomem *)adev->rmmio + in amdgpu_device_indirect_rreg_ext()
600 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg_ext()
613 u64 amdgpu_device_indirect_rreg64(struct amdgpu_device *adev, in amdgpu_device_indirect_rreg64() argument
621 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_rreg64()
622 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_rreg64()
624 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg64()
625 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_rreg64()
626 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_rreg64()
636 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg64()
649 void amdgpu_device_indirect_wreg(struct amdgpu_device *adev, in amdgpu_device_indirect_wreg() argument
656 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_wreg()
657 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_wreg()
659 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg()
660 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_wreg()
661 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_wreg()
667 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg()
670 void amdgpu_device_indirect_wreg_ext(struct amdgpu_device *adev, in amdgpu_device_indirect_wreg_ext() argument
678 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_wreg_ext()
679 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_wreg_ext()
680 if (adev->nbio.funcs->get_pcie_index_hi_offset) in amdgpu_device_indirect_wreg_ext()
681 pcie_index_hi = adev->nbio.funcs->get_pcie_index_hi_offset(adev); in amdgpu_device_indirect_wreg_ext()
685 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg_ext()
686 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_wreg_ext()
687 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_wreg_ext()
689 pcie_index_hi_offset = (void __iomem *)adev->rmmio + in amdgpu_device_indirect_wreg_ext()
707 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg_ext()
718 void amdgpu_device_indirect_wreg64(struct amdgpu_device *adev, in amdgpu_device_indirect_wreg64() argument
725 pcie_index = adev->nbio.funcs->get_pcie_index_offset(adev); in amdgpu_device_indirect_wreg64()
726 pcie_data = adev->nbio.funcs->get_pcie_data_offset(adev); in amdgpu_device_indirect_wreg64()
728 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg64()
729 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_wreg64()
730 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_wreg64()
742 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg64()
752 u32 amdgpu_device_get_rev_id(struct amdgpu_device *adev) in amdgpu_device_get_rev_id() argument
754 return adev->nbio.funcs->get_rev_id(adev); in amdgpu_device_get_rev_id()
767 static uint32_t amdgpu_invalid_rreg(struct amdgpu_device *adev, uint32_t reg) in amdgpu_invalid_rreg() argument
774 static uint32_t amdgpu_invalid_rreg_ext(struct amdgpu_device *adev, uint64_t reg) in amdgpu_invalid_rreg_ext() argument
791 static void amdgpu_invalid_wreg(struct amdgpu_device *adev, uint32_t reg, uint32_t v) in amdgpu_invalid_wreg() argument
798 static void amdgpu_invalid_wreg_ext(struct amdgpu_device *adev, uint64_t reg, uint32_t v) in amdgpu_invalid_wreg_ext() argument
815 static uint64_t amdgpu_invalid_rreg64(struct amdgpu_device *adev, uint32_t reg) in amdgpu_invalid_rreg64() argument
832 static void amdgpu_invalid_wreg64(struct amdgpu_device *adev, uint32_t reg, uint64_t v) in amdgpu_invalid_wreg64() argument
850 static uint32_t amdgpu_block_invalid_rreg(struct amdgpu_device *adev, in amdgpu_block_invalid_rreg() argument
870 static void amdgpu_block_invalid_wreg(struct amdgpu_device *adev, in amdgpu_block_invalid_wreg() argument
886 static int amdgpu_device_asic_init(struct amdgpu_device *adev) in amdgpu_device_asic_init() argument
890 amdgpu_asic_pre_asic_init(adev); in amdgpu_device_asic_init()
892 if (adev->ip_versions[GC_HWIP][0] == IP_VERSION(9, 4, 3) || in amdgpu_device_asic_init()
893 adev->ip_versions[GC_HWIP][0] >= IP_VERSION(11, 0, 0)) { in amdgpu_device_asic_init()
894 amdgpu_psp_wait_for_bootloader(adev); in amdgpu_device_asic_init()
895 ret = amdgpu_atomfirmware_asic_init(adev, true); in amdgpu_device_asic_init()
898 return amdgpu_atom_asic_init(adev->mode_info.atom_context); in amdgpu_device_asic_init()
912 static int amdgpu_device_mem_scratch_init(struct amdgpu_device *adev) in amdgpu_device_mem_scratch_init() argument
914 return amdgpu_bo_create_kernel(adev, AMDGPU_GPU_PAGE_SIZE, PAGE_SIZE, in amdgpu_device_mem_scratch_init()
917 &adev->mem_scratch.robj, in amdgpu_device_mem_scratch_init()
918 &adev->mem_scratch.gpu_addr, in amdgpu_device_mem_scratch_init()
919 (void **)&adev->mem_scratch.ptr); in amdgpu_device_mem_scratch_init()
929 static void amdgpu_device_mem_scratch_fini(struct amdgpu_device *adev) in amdgpu_device_mem_scratch_fini() argument
931 amdgpu_bo_free_kernel(&adev->mem_scratch.robj, NULL, NULL); in amdgpu_device_mem_scratch_fini()
944 void amdgpu_device_program_register_sequence(struct amdgpu_device *adev, in amdgpu_device_program_register_sequence() argument
964 if (adev->family >= AMDGPU_FAMILY_AI) in amdgpu_device_program_register_sequence()
981 void amdgpu_device_pci_config_reset(struct amdgpu_device *adev) in amdgpu_device_pci_config_reset() argument
983 pci_write_config_dword(adev->pdev, 0x7c, AMDGPU_ASIC_RESET_DATA); in amdgpu_device_pci_config_reset()
993 int amdgpu_device_pci_reset(struct amdgpu_device *adev) in amdgpu_device_pci_reset() argument
995 return pci_reset_function(adev->pdev); in amdgpu_device_pci_reset()
1012 static void amdgpu_device_wb_fini(struct amdgpu_device *adev) in amdgpu_device_wb_fini() argument
1014 if (adev->wb.wb_obj) { in amdgpu_device_wb_fini()
1015 amdgpu_bo_free_kernel(&adev->wb.wb_obj, in amdgpu_device_wb_fini()
1016 &adev->wb.gpu_addr, in amdgpu_device_wb_fini()
1017 (void **)&adev->wb.wb); in amdgpu_device_wb_fini()
1018 adev->wb.wb_obj = NULL; in amdgpu_device_wb_fini()
1031 static int amdgpu_device_wb_init(struct amdgpu_device *adev) in amdgpu_device_wb_init() argument
1035 if (adev->wb.wb_obj == NULL) { in amdgpu_device_wb_init()
1037 r = amdgpu_bo_create_kernel(adev, AMDGPU_MAX_WB * sizeof(uint32_t) * 8, in amdgpu_device_wb_init()
1039 &adev->wb.wb_obj, &adev->wb.gpu_addr, in amdgpu_device_wb_init()
1040 (void **)&adev->wb.wb); in amdgpu_device_wb_init()
1042 dev_warn(adev->dev, "(%d) create WB bo failed\n", r); in amdgpu_device_wb_init()
1046 adev->wb.num_wb = AMDGPU_MAX_WB; in amdgpu_device_wb_init()
1047 memset(&adev->wb.used, 0, sizeof(adev->wb.used)); in amdgpu_device_wb_init()
1050 memset((char *)adev->wb.wb, 0, AMDGPU_MAX_WB * sizeof(uint32_t) * 8); in amdgpu_device_wb_init()
1065 int amdgpu_device_wb_get(struct amdgpu_device *adev, u32 *wb) in amdgpu_device_wb_get() argument
1067 unsigned long offset = find_first_zero_bit(adev->wb.used, adev->wb.num_wb); in amdgpu_device_wb_get()
1069 if (offset < adev->wb.num_wb) { in amdgpu_device_wb_get()
1070 __set_bit(offset, adev->wb.used); in amdgpu_device_wb_get()
1086 void amdgpu_device_wb_free(struct amdgpu_device *adev, u32 wb) in amdgpu_device_wb_free() argument
1089 if (wb < adev->wb.num_wb) in amdgpu_device_wb_free()
1090 __clear_bit(wb, adev->wb.used); in amdgpu_device_wb_free()
1102 int amdgpu_device_resize_fb_bar(struct amdgpu_device *adev) in amdgpu_device_resize_fb_bar() argument
1104 int rbar_size = pci_rebar_bytes_to_size(adev->gmc.real_vram_size); in amdgpu_device_resize_fb_bar()
1115 if (amdgpu_sriov_vf(adev)) in amdgpu_device_resize_fb_bar()
1119 if (adev->gmc.real_vram_size && in amdgpu_device_resize_fb_bar()
1120 (pci_resource_len(adev->pdev, 0) >= adev->gmc.real_vram_size)) in amdgpu_device_resize_fb_bar()
1124 root = adev->pdev->bus; in amdgpu_device_resize_fb_bar()
1139 rbar_size = min(fls(pci_rebar_get_possible_sizes(adev->pdev, 0)) - 1, in amdgpu_device_resize_fb_bar()
1143 pci_read_config_word(adev->pdev, PCI_COMMAND, &cmd); in amdgpu_device_resize_fb_bar()
1144 pci_write_config_word(adev->pdev, PCI_COMMAND, in amdgpu_device_resize_fb_bar()
1148 amdgpu_doorbell_fini(adev); in amdgpu_device_resize_fb_bar()
1149 if (adev->asic_type >= CHIP_BONAIRE) in amdgpu_device_resize_fb_bar()
1150 pci_release_resource(adev->pdev, 2); in amdgpu_device_resize_fb_bar()
1152 pci_release_resource(adev->pdev, 0); in amdgpu_device_resize_fb_bar()
1154 r = pci_resize_resource(adev->pdev, 0, rbar_size); in amdgpu_device_resize_fb_bar()
1160 pci_assign_unassigned_bus_resources(adev->pdev->bus); in amdgpu_device_resize_fb_bar()
1165 r = amdgpu_doorbell_init(adev); in amdgpu_device_resize_fb_bar()
1166 if (r || (pci_resource_flags(adev->pdev, 0) & IORESOURCE_UNSET)) in amdgpu_device_resize_fb_bar()
1169 pci_write_config_word(adev->pdev, PCI_COMMAND, cmd); in amdgpu_device_resize_fb_bar()
1174 static bool amdgpu_device_read_bios(struct amdgpu_device *adev) in amdgpu_device_read_bios() argument
1176 if (hweight32(adev->aid_mask) && (adev->flags & AMD_IS_APU)) in amdgpu_device_read_bios()
1194 bool amdgpu_device_need_post(struct amdgpu_device *adev) in amdgpu_device_need_post() argument
1198 if (amdgpu_sriov_vf(adev)) in amdgpu_device_need_post()
1201 if (!amdgpu_device_read_bios(adev)) in amdgpu_device_need_post()
1204 if (amdgpu_passthrough(adev)) { in amdgpu_device_need_post()
1210 if (adev->asic_type == CHIP_FIJI) { in amdgpu_device_need_post()
1214 err = request_firmware(&adev->pm.fw, "amdgpu/fiji_smc.bin", adev->dev); in amdgpu_device_need_post()
1219 fw_ver = *((uint32_t *)adev->pm.fw->data + 69); in amdgpu_device_need_post()
1226 if (adev->gmc.xgmi.pending_reset) in amdgpu_device_need_post()
1229 if (adev->has_hw_reset) { in amdgpu_device_need_post()
1230 adev->has_hw_reset = false; in amdgpu_device_need_post()
1235 if (adev->asic_type >= CHIP_BONAIRE) in amdgpu_device_need_post()
1236 return amdgpu_atombios_scratch_need_asic_init(adev); in amdgpu_device_need_post()
1239 reg = amdgpu_asic_get_config_memsize(adev); in amdgpu_device_need_post()
1276 bool amdgpu_device_should_use_aspm(struct amdgpu_device *adev) in amdgpu_device_should_use_aspm() argument
1288 return pcie_aspm_enabled(adev->pdev); in amdgpu_device_should_use_aspm()
1315 struct amdgpu_device *adev = drm_to_adev(pci_get_drvdata(pdev)); in amdgpu_device_vga_set_decode() local
1317 amdgpu_asic_set_vga_state(adev, state); in amdgpu_device_vga_set_decode()
1335 static void amdgpu_device_check_block_size(struct amdgpu_device *adev) in amdgpu_device_check_block_size() argument
1345 dev_warn(adev->dev, "VM page table size (%d) too small\n", in amdgpu_device_check_block_size()
1359 static void amdgpu_device_check_vm_size(struct amdgpu_device *adev) in amdgpu_device_check_vm_size() argument
1366 dev_warn(adev->dev, "VM size (%d) too small, min is 1GB\n", in amdgpu_device_check_vm_size()
1372 static void amdgpu_device_check_smu_prv_buffer_size(struct amdgpu_device *adev) in amdgpu_device_check_smu_prv_buffer_size() argument
1402 adev->pm.smu_prv_buffer_size = amdgpu_smu_memory_pool_size << 28; in amdgpu_device_check_smu_prv_buffer_size()
1409 adev->pm.smu_prv_buffer_size = 0; in amdgpu_device_check_smu_prv_buffer_size()
1412 static int amdgpu_device_init_apu_flags(struct amdgpu_device *adev) in amdgpu_device_init_apu_flags() argument
1414 if (!(adev->flags & AMD_IS_APU) || in amdgpu_device_init_apu_flags()
1415 adev->asic_type < CHIP_RAVEN) in amdgpu_device_init_apu_flags()
1418 switch (adev->asic_type) { in amdgpu_device_init_apu_flags()
1420 if (adev->pdev->device == 0x15dd) in amdgpu_device_init_apu_flags()
1421 adev->apu_flags |= AMD_APU_IS_RAVEN; in amdgpu_device_init_apu_flags()
1422 if (adev->pdev->device == 0x15d8) in amdgpu_device_init_apu_flags()
1423 adev->apu_flags |= AMD_APU_IS_PICASSO; in amdgpu_device_init_apu_flags()
1426 if ((adev->pdev->device == 0x1636) || in amdgpu_device_init_apu_flags()
1427 (adev->pdev->device == 0x164c)) in amdgpu_device_init_apu_flags()
1428 adev->apu_flags |= AMD_APU_IS_RENOIR; in amdgpu_device_init_apu_flags()
1430 adev->apu_flags |= AMD_APU_IS_GREEN_SARDINE; in amdgpu_device_init_apu_flags()
1433 adev->apu_flags |= AMD_APU_IS_VANGOGH; in amdgpu_device_init_apu_flags()
1438 if ((adev->pdev->device == 0x13FE) || in amdgpu_device_init_apu_flags()
1439 (adev->pdev->device == 0x143F)) in amdgpu_device_init_apu_flags()
1440 adev->apu_flags |= AMD_APU_IS_CYAN_SKILLFISH2; in amdgpu_device_init_apu_flags()
1457 static int amdgpu_device_check_arguments(struct amdgpu_device *adev) in amdgpu_device_check_arguments() argument
1460 dev_warn(adev->dev, "sched jobs (%d) must be at least 4\n", in amdgpu_device_check_arguments()
1464 dev_warn(adev->dev, "sched jobs (%d) must be a power of 2\n", in amdgpu_device_check_arguments()
1471 dev_warn(adev->dev, "gart size (%d) too small\n", in amdgpu_device_check_arguments()
1478 dev_warn(adev->dev, "gtt size (%d) too small\n", in amdgpu_device_check_arguments()
1486 dev_warn(adev->dev, "valid range is between 4 and 9\n"); in amdgpu_device_check_arguments()
1491 dev_warn(adev->dev, "sched hw submission jobs (%d) must be at least 2\n", in amdgpu_device_check_arguments()
1495 dev_warn(adev->dev, "sched hw submission jobs (%d) must be a power of 2\n", in amdgpu_device_check_arguments()
1501 dev_warn(adev->dev, "invalid option for reset method, reverting to default\n"); in amdgpu_device_check_arguments()
1505 amdgpu_device_check_smu_prv_buffer_size(adev); in amdgpu_device_check_arguments()
1507 amdgpu_device_check_vm_size(adev); in amdgpu_device_check_arguments()
1509 amdgpu_device_check_block_size(adev); in amdgpu_device_check_arguments()
1511 adev->firmware.load_type = amdgpu_ucode_get_load_type(adev, amdgpu_fw_load_type); in amdgpu_device_check_arguments()
1601 struct amdgpu_device *adev = dev; in amdgpu_device_ip_set_clockgating_state() local
1604 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_set_clockgating_state()
1605 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_set_clockgating_state()
1607 if (adev->ip_blocks[i].version->type != block_type) in amdgpu_device_ip_set_clockgating_state()
1609 if (!adev->ip_blocks[i].version->funcs->set_clockgating_state) in amdgpu_device_ip_set_clockgating_state()
1611 r = adev->ip_blocks[i].version->funcs->set_clockgating_state( in amdgpu_device_ip_set_clockgating_state()
1612 (void *)adev, state); in amdgpu_device_ip_set_clockgating_state()
1615 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_set_clockgating_state()
1635 struct amdgpu_device *adev = dev; in amdgpu_device_ip_set_powergating_state() local
1638 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_set_powergating_state()
1639 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_set_powergating_state()
1641 if (adev->ip_blocks[i].version->type != block_type) in amdgpu_device_ip_set_powergating_state()
1643 if (!adev->ip_blocks[i].version->funcs->set_powergating_state) in amdgpu_device_ip_set_powergating_state()
1645 r = adev->ip_blocks[i].version->funcs->set_powergating_state( in amdgpu_device_ip_set_powergating_state()
1646 (void *)adev, state); in amdgpu_device_ip_set_powergating_state()
1649 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_set_powergating_state()
1665 void amdgpu_device_ip_get_clockgating_state(struct amdgpu_device *adev, in amdgpu_device_ip_get_clockgating_state() argument
1670 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_get_clockgating_state()
1671 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_get_clockgating_state()
1673 if (adev->ip_blocks[i].version->funcs->get_clockgating_state) in amdgpu_device_ip_get_clockgating_state()
1674 adev->ip_blocks[i].version->funcs->get_clockgating_state((void *)adev, flags); in amdgpu_device_ip_get_clockgating_state()
1687 int amdgpu_device_ip_wait_for_idle(struct amdgpu_device *adev, in amdgpu_device_ip_wait_for_idle() argument
1692 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_wait_for_idle()
1693 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_wait_for_idle()
1695 if (adev->ip_blocks[i].version->type == block_type) { in amdgpu_device_ip_wait_for_idle()
1696 r = adev->ip_blocks[i].version->funcs->wait_for_idle((void *)adev); in amdgpu_device_ip_wait_for_idle()
1715 bool amdgpu_device_ip_is_idle(struct amdgpu_device *adev, in amdgpu_device_ip_is_idle() argument
1720 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_is_idle()
1721 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_is_idle()
1723 if (adev->ip_blocks[i].version->type == block_type) in amdgpu_device_ip_is_idle()
1724 return adev->ip_blocks[i].version->funcs->is_idle((void *)adev); in amdgpu_device_ip_is_idle()
1740 amdgpu_device_ip_get_ip_block(struct amdgpu_device *adev, in amdgpu_device_ip_get_ip_block() argument
1745 for (i = 0; i < adev->num_ip_blocks; i++) in amdgpu_device_ip_get_ip_block()
1746 if (adev->ip_blocks[i].version->type == type) in amdgpu_device_ip_get_ip_block()
1747 return &adev->ip_blocks[i]; in amdgpu_device_ip_get_ip_block()
1763 int amdgpu_device_ip_block_version_cmp(struct amdgpu_device *adev, in amdgpu_device_ip_block_version_cmp() argument
1767 struct amdgpu_ip_block *ip_block = amdgpu_device_ip_get_ip_block(adev, type); in amdgpu_device_ip_block_version_cmp()
1786 int amdgpu_device_ip_block_add(struct amdgpu_device *adev, in amdgpu_device_ip_block_add() argument
1794 if (adev->harvest_ip_mask & AMD_HARVEST_IP_VCN_MASK) in amdgpu_device_ip_block_add()
1798 if (adev->harvest_ip_mask & AMD_HARVEST_IP_JPEG_MASK) in amdgpu_device_ip_block_add()
1805 DRM_INFO("add ip block number %d <%s>\n", adev->num_ip_blocks, in amdgpu_device_ip_block_add()
1808 adev->ip_blocks[adev->num_ip_blocks++].version = ip_block_version; in amdgpu_device_ip_block_add()
1825 static void amdgpu_device_enable_virtual_display(struct amdgpu_device *adev) in amdgpu_device_enable_virtual_display() argument
1827 adev->enable_virtual_display = false; in amdgpu_device_enable_virtual_display()
1830 const char *pci_address_name = pci_name(adev->pdev); in amdgpu_device_enable_virtual_display()
1842 adev->enable_virtual_display = true; in amdgpu_device_enable_virtual_display()
1853 adev->mode_info.num_crtc = num_crtc; in amdgpu_device_enable_virtual_display()
1855 adev->mode_info.num_crtc = 1; in amdgpu_device_enable_virtual_display()
1863 adev->enable_virtual_display, adev->mode_info.num_crtc); in amdgpu_device_enable_virtual_display()
1869 void amdgpu_device_set_sriov_virtual_display(struct amdgpu_device *adev) in amdgpu_device_set_sriov_virtual_display() argument
1871 if (amdgpu_sriov_vf(adev) && !adev->enable_virtual_display) { in amdgpu_device_set_sriov_virtual_display()
1872 adev->mode_info.num_crtc = 1; in amdgpu_device_set_sriov_virtual_display()
1873 adev->enable_virtual_display = true; in amdgpu_device_set_sriov_virtual_display()
1875 adev->enable_virtual_display, adev->mode_info.num_crtc); in amdgpu_device_set_sriov_virtual_display()
1889 static int amdgpu_device_parse_gpu_info_fw(struct amdgpu_device *adev) in amdgpu_device_parse_gpu_info_fw() argument
1896 adev->firmware.gpu_info_fw = NULL; in amdgpu_device_parse_gpu_info_fw()
1898 if (adev->mman.discovery_bin) { in amdgpu_device_parse_gpu_info_fw()
1904 if (adev->asic_type != CHIP_NAVI12) in amdgpu_device_parse_gpu_info_fw()
1908 switch (adev->asic_type) { in amdgpu_device_parse_gpu_info_fw()
1918 if (adev->apu_flags & AMD_APU_IS_RAVEN2) in amdgpu_device_parse_gpu_info_fw()
1920 else if (adev->apu_flags & AMD_APU_IS_PICASSO) in amdgpu_device_parse_gpu_info_fw()
1934 err = amdgpu_ucode_request(adev, &adev->firmware.gpu_info_fw, fw_name); in amdgpu_device_parse_gpu_info_fw()
1936 dev_err(adev->dev, in amdgpu_device_parse_gpu_info_fw()
1942 hdr = (const struct gpu_info_firmware_header_v1_0 *)adev->firmware.gpu_info_fw->data; in amdgpu_device_parse_gpu_info_fw()
1949 (const struct gpu_info_firmware_v1_0 *)(adev->firmware.gpu_info_fw->data + in amdgpu_device_parse_gpu_info_fw()
1955 if (adev->asic_type == CHIP_NAVI12) in amdgpu_device_parse_gpu_info_fw()
1958 adev->gfx.config.max_shader_engines = le32_to_cpu(gpu_info_fw->gc_num_se); in amdgpu_device_parse_gpu_info_fw()
1959 adev->gfx.config.max_cu_per_sh = le32_to_cpu(gpu_info_fw->gc_num_cu_per_sh); in amdgpu_device_parse_gpu_info_fw()
1960 adev->gfx.config.max_sh_per_se = le32_to_cpu(gpu_info_fw->gc_num_sh_per_se); in amdgpu_device_parse_gpu_info_fw()
1961 adev->gfx.config.max_backends_per_se = le32_to_cpu(gpu_info_fw->gc_num_rb_per_se); in amdgpu_device_parse_gpu_info_fw()
1962 adev->gfx.config.max_texture_channel_caches = in amdgpu_device_parse_gpu_info_fw()
1964 adev->gfx.config.max_gprs = le32_to_cpu(gpu_info_fw->gc_num_gprs); in amdgpu_device_parse_gpu_info_fw()
1965 adev->gfx.config.max_gs_threads = le32_to_cpu(gpu_info_fw->gc_num_max_gs_thds); in amdgpu_device_parse_gpu_info_fw()
1966 adev->gfx.config.gs_vgt_table_depth = le32_to_cpu(gpu_info_fw->gc_gs_table_depth); in amdgpu_device_parse_gpu_info_fw()
1967 adev->gfx.config.gs_prim_buffer_depth = le32_to_cpu(gpu_info_fw->gc_gsprim_buff_depth); in amdgpu_device_parse_gpu_info_fw()
1968 adev->gfx.config.double_offchip_lds_buf = in amdgpu_device_parse_gpu_info_fw()
1970 adev->gfx.cu_info.wave_front_size = le32_to_cpu(gpu_info_fw->gc_wave_size); in amdgpu_device_parse_gpu_info_fw()
1971 adev->gfx.cu_info.max_waves_per_simd = in amdgpu_device_parse_gpu_info_fw()
1973 adev->gfx.cu_info.max_scratch_slots_per_cu = in amdgpu_device_parse_gpu_info_fw()
1975 adev->gfx.cu_info.lds_size = le32_to_cpu(gpu_info_fw->gc_lds_size); in amdgpu_device_parse_gpu_info_fw()
1978 (const struct gpu_info_firmware_v1_1 *)(adev->firmware.gpu_info_fw->data + in amdgpu_device_parse_gpu_info_fw()
1980 adev->gfx.config.num_sc_per_sh = in amdgpu_device_parse_gpu_info_fw()
1982 adev->gfx.config.num_packer_per_sc = in amdgpu_device_parse_gpu_info_fw()
1993 (const struct gpu_info_firmware_v1_2 *)(adev->firmware.gpu_info_fw->data + in amdgpu_device_parse_gpu_info_fw()
1995 adev->dm.soc_bounding_box = &gpu_info_fw->soc_bounding_box; in amdgpu_device_parse_gpu_info_fw()
2000 dev_err(adev->dev, in amdgpu_device_parse_gpu_info_fw()
2019 static int amdgpu_device_ip_early_init(struct amdgpu_device *adev) in amdgpu_device_ip_early_init() argument
2021 struct drm_device *dev = adev_to_drm(adev); in amdgpu_device_ip_early_init()
2026 amdgpu_device_enable_virtual_display(adev); in amdgpu_device_ip_early_init()
2028 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_early_init()
2029 r = amdgpu_virt_request_full_gpu(adev, true); in amdgpu_device_ip_early_init()
2034 switch (adev->asic_type) { in amdgpu_device_ip_early_init()
2041 adev->family = AMDGPU_FAMILY_SI; in amdgpu_device_ip_early_init()
2042 r = si_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2053 if (adev->flags & AMD_IS_APU) in amdgpu_device_ip_early_init()
2054 adev->family = AMDGPU_FAMILY_KV; in amdgpu_device_ip_early_init()
2056 adev->family = AMDGPU_FAMILY_CI; in amdgpu_device_ip_early_init()
2058 r = cik_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2072 if (adev->flags & AMD_IS_APU) in amdgpu_device_ip_early_init()
2073 adev->family = AMDGPU_FAMILY_CZ; in amdgpu_device_ip_early_init()
2075 adev->family = AMDGPU_FAMILY_VI; in amdgpu_device_ip_early_init()
2077 r = vi_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2082 r = amdgpu_discovery_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2091 ((adev->flags & AMD_IS_APU) == 0) && in amdgpu_device_ip_early_init()
2093 adev->flags |= AMD_IS_PX; in amdgpu_device_ip_early_init()
2095 if (!(adev->flags & AMD_IS_APU)) { in amdgpu_device_ip_early_init()
2096 parent = pcie_find_root_port(adev->pdev); in amdgpu_device_ip_early_init()
2097 adev->has_pr3 = parent ? pci_pr3_present(parent) : false; in amdgpu_device_ip_early_init()
2101 adev->pm.pp_feature = amdgpu_pp_feature_mask; in amdgpu_device_ip_early_init()
2102 if (amdgpu_sriov_vf(adev) || sched_policy == KFD_SCHED_POLICY_NO_HWS) in amdgpu_device_ip_early_init()
2103 adev->pm.pp_feature &= ~PP_GFXOFF_MASK; in amdgpu_device_ip_early_init()
2104 if (amdgpu_sriov_vf(adev) && adev->asic_type == CHIP_SIENNA_CICHLID) in amdgpu_device_ip_early_init()
2105 adev->pm.pp_feature &= ~PP_OVERDRIVE_MASK; in amdgpu_device_ip_early_init()
2108 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_early_init()
2111 i, adev->ip_blocks[i].version->funcs->name); in amdgpu_device_ip_early_init()
2112 adev->ip_blocks[i].status.valid = false; in amdgpu_device_ip_early_init()
2114 if (adev->ip_blocks[i].version->funcs->early_init) { in amdgpu_device_ip_early_init()
2115 r = adev->ip_blocks[i].version->funcs->early_init((void *)adev); in amdgpu_device_ip_early_init()
2117 adev->ip_blocks[i].status.valid = false; in amdgpu_device_ip_early_init()
2120 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_early_init()
2123 adev->ip_blocks[i].status.valid = true; in amdgpu_device_ip_early_init()
2126 adev->ip_blocks[i].status.valid = true; in amdgpu_device_ip_early_init()
2130 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON) { in amdgpu_device_ip_early_init()
2131 r = amdgpu_device_parse_gpu_info_fw(adev); in amdgpu_device_ip_early_init()
2136 if (amdgpu_device_read_bios(adev)) { in amdgpu_device_ip_early_init()
2137 if (!amdgpu_get_bios(adev)) in amdgpu_device_ip_early_init()
2140 r = amdgpu_atombios_init(adev); in amdgpu_device_ip_early_init()
2142 dev_err(adev->dev, "amdgpu_atombios_init failed\n"); in amdgpu_device_ip_early_init()
2143 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_ATOMBIOS_INIT_FAIL, 0, 0); in amdgpu_device_ip_early_init()
2149 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_early_init()
2150 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_ip_early_init()
2157 amdgpu_amdkfd_device_probe(adev); in amdgpu_device_ip_early_init()
2158 adev->cg_flags &= amdgpu_cg_mask; in amdgpu_device_ip_early_init()
2159 adev->pg_flags &= amdgpu_pg_mask; in amdgpu_device_ip_early_init()
2164 static int amdgpu_device_ip_hw_init_phase1(struct amdgpu_device *adev) in amdgpu_device_ip_hw_init_phase1() argument
2168 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_hw_init_phase1()
2169 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_ip_hw_init_phase1()
2171 if (adev->ip_blocks[i].status.hw) in amdgpu_device_ip_hw_init_phase1()
2173 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_hw_init_phase1()
2174 (amdgpu_sriov_vf(adev) && (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP)) || in amdgpu_device_ip_hw_init_phase1()
2175 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH) { in amdgpu_device_ip_hw_init_phase1()
2176 r = adev->ip_blocks[i].version->funcs->hw_init(adev); in amdgpu_device_ip_hw_init_phase1()
2179 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_hw_init_phase1()
2182 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_hw_init_phase1()
2189 static int amdgpu_device_ip_hw_init_phase2(struct amdgpu_device *adev) in amdgpu_device_ip_hw_init_phase2() argument
2193 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_hw_init_phase2()
2194 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_ip_hw_init_phase2()
2196 if (adev->ip_blocks[i].status.hw) in amdgpu_device_ip_hw_init_phase2()
2198 r = adev->ip_blocks[i].version->funcs->hw_init(adev); in amdgpu_device_ip_hw_init_phase2()
2201 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_hw_init_phase2()
2204 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_hw_init_phase2()
2210 static int amdgpu_device_fw_loading(struct amdgpu_device *adev) in amdgpu_device_fw_loading() argument
2216 if (adev->asic_type >= CHIP_VEGA10) { in amdgpu_device_fw_loading()
2217 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_fw_loading()
2218 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_PSP) in amdgpu_device_fw_loading()
2221 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_fw_loading()
2225 if (adev->ip_blocks[i].status.hw == true) in amdgpu_device_fw_loading()
2228 if (amdgpu_in_reset(adev) || adev->in_suspend) { in amdgpu_device_fw_loading()
2229 r = adev->ip_blocks[i].version->funcs->resume(adev); in amdgpu_device_fw_loading()
2232 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_fw_loading()
2236 r = adev->ip_blocks[i].version->funcs->hw_init(adev); in amdgpu_device_fw_loading()
2239 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_fw_loading()
2244 adev->ip_blocks[i].status.hw = true; in amdgpu_device_fw_loading()
2249 if (!amdgpu_sriov_vf(adev) || adev->asic_type == CHIP_TONGA) in amdgpu_device_fw_loading()
2250 r = amdgpu_pm_load_smu_firmware(adev, &smu_version); in amdgpu_device_fw_loading()
2255 static int amdgpu_device_init_schedulers(struct amdgpu_device *adev) in amdgpu_device_init_schedulers() argument
2261 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_init_schedulers()
2269 timeout = adev->gfx_timeout; in amdgpu_device_init_schedulers()
2272 timeout = adev->compute_timeout; in amdgpu_device_init_schedulers()
2275 timeout = adev->sdma_timeout; in amdgpu_device_init_schedulers()
2278 timeout = adev->video_timeout; in amdgpu_device_init_schedulers()
2284 timeout, adev->reset_domain->wq, in amdgpu_device_init_schedulers()
2286 adev->dev); in amdgpu_device_init_schedulers()
2294 amdgpu_xcp_update_partition_sched_list(adev); in amdgpu_device_init_schedulers()
2311 static int amdgpu_device_ip_init(struct amdgpu_device *adev) in amdgpu_device_ip_init() argument
2315 r = amdgpu_ras_init(adev); in amdgpu_device_ip_init()
2319 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_init()
2320 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_init()
2322 r = adev->ip_blocks[i].version->funcs->sw_init((void *)adev); in amdgpu_device_ip_init()
2325 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_init()
2328 adev->ip_blocks[i].status.sw = true; in amdgpu_device_ip_init()
2330 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON) { in amdgpu_device_ip_init()
2332 r = adev->ip_blocks[i].version->funcs->hw_init((void *)adev); in amdgpu_device_ip_init()
2337 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_init()
2338 } else if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC) { in amdgpu_device_ip_init()
2341 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_init()
2342 amdgpu_virt_exchange_data(adev); in amdgpu_device_ip_init()
2344 r = amdgpu_device_mem_scratch_init(adev); in amdgpu_device_ip_init()
2349 r = adev->ip_blocks[i].version->funcs->hw_init((void *)adev); in amdgpu_device_ip_init()
2354 r = amdgpu_device_wb_init(adev); in amdgpu_device_ip_init()
2359 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_init()
2362 if (adev->gfx.mcbp) { in amdgpu_device_ip_init()
2363 r = amdgpu_allocate_static_csa(adev, &adev->virt.csa_obj, in amdgpu_device_ip_init()
2375 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_init()
2376 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_ip_init()
2378 r = amdgpu_ib_pool_init(adev); in amdgpu_device_ip_init()
2380 dev_err(adev->dev, "IB initialization failed (%d).\n", r); in amdgpu_device_ip_init()
2381 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_IB_INIT_FAIL, 0, r); in amdgpu_device_ip_init()
2385 r = amdgpu_ucode_create_bo(adev); /* create ucode bo when sw_init complete*/ in amdgpu_device_ip_init()
2389 r = amdgpu_device_ip_hw_init_phase1(adev); in amdgpu_device_ip_init()
2393 r = amdgpu_device_fw_loading(adev); in amdgpu_device_ip_init()
2397 r = amdgpu_device_ip_hw_init_phase2(adev); in amdgpu_device_ip_init()
2416 r = amdgpu_ras_recovery_init(adev); in amdgpu_device_ip_init()
2423 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_device_ip_init()
2424 if (amdgpu_xgmi_add_device(adev) == 0) { in amdgpu_device_ip_init()
2425 if (!amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_init()
2426 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_ip_init()
2441 amdgpu_reset_put_reset_domain(adev->reset_domain); in amdgpu_device_ip_init()
2442 adev->reset_domain = hive->reset_domain; in amdgpu_device_ip_init()
2448 r = amdgpu_device_init_schedulers(adev); in amdgpu_device_ip_init()
2453 if (!adev->gmc.xgmi.pending_reset) { in amdgpu_device_ip_init()
2454 kgd2kfd_init_zone_device(adev); in amdgpu_device_ip_init()
2455 amdgpu_amdkfd_device_init(adev); in amdgpu_device_ip_init()
2458 amdgpu_fru_get_product_info(adev); in amdgpu_device_ip_init()
2474 static void amdgpu_device_fill_reset_magic(struct amdgpu_device *adev) in amdgpu_device_fill_reset_magic() argument
2476 memcpy(adev->reset_magic, adev->gart.ptr, AMDGPU_RESET_MAGIC_NUM); in amdgpu_device_fill_reset_magic()
2489 static bool amdgpu_device_check_vram_lost(struct amdgpu_device *adev) in amdgpu_device_check_vram_lost() argument
2491 if (memcmp(adev->gart.ptr, adev->reset_magic, in amdgpu_device_check_vram_lost()
2495 if (!amdgpu_in_reset(adev)) in amdgpu_device_check_vram_lost()
2502 switch (amdgpu_asic_reset_method(adev)) { in amdgpu_device_check_vram_lost()
2524 int amdgpu_device_set_cg_state(struct amdgpu_device *adev, in amdgpu_device_set_cg_state() argument
2532 for (j = 0; j < adev->num_ip_blocks; j++) { in amdgpu_device_set_cg_state()
2533 i = state == AMD_CG_STATE_GATE ? j : adev->num_ip_blocks - j - 1; in amdgpu_device_set_cg_state()
2534 if (!adev->ip_blocks[i].status.late_initialized) in amdgpu_device_set_cg_state()
2537 if (adev->in_s0ix && in amdgpu_device_set_cg_state()
2538 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GFX || in amdgpu_device_set_cg_state()
2539 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SDMA)) in amdgpu_device_set_cg_state()
2542 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_UVD && in amdgpu_device_set_cg_state()
2543 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCE && in amdgpu_device_set_cg_state()
2544 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCN && in amdgpu_device_set_cg_state()
2545 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_JPEG && in amdgpu_device_set_cg_state()
2546 adev->ip_blocks[i].version->funcs->set_clockgating_state) { in amdgpu_device_set_cg_state()
2548 r = adev->ip_blocks[i].version->funcs->set_clockgating_state((void *)adev, in amdgpu_device_set_cg_state()
2552 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_set_cg_state()
2561 int amdgpu_device_set_pg_state(struct amdgpu_device *adev, in amdgpu_device_set_pg_state() argument
2569 for (j = 0; j < adev->num_ip_blocks; j++) { in amdgpu_device_set_pg_state()
2570 i = state == AMD_PG_STATE_GATE ? j : adev->num_ip_blocks - j - 1; in amdgpu_device_set_pg_state()
2571 if (!adev->ip_blocks[i].status.late_initialized) in amdgpu_device_set_pg_state()
2574 if (adev->in_s0ix && in amdgpu_device_set_pg_state()
2575 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GFX || in amdgpu_device_set_pg_state()
2576 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SDMA)) in amdgpu_device_set_pg_state()
2579 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_UVD && in amdgpu_device_set_pg_state()
2580 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCE && in amdgpu_device_set_pg_state()
2581 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCN && in amdgpu_device_set_pg_state()
2582 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_JPEG && in amdgpu_device_set_pg_state()
2583 adev->ip_blocks[i].version->funcs->set_powergating_state) { in amdgpu_device_set_pg_state()
2585 r = adev->ip_blocks[i].version->funcs->set_powergating_state((void *)adev, in amdgpu_device_set_pg_state()
2589 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_set_pg_state()
2600 struct amdgpu_device *adev; in amdgpu_device_enable_mgpu_fan_boost() local
2615 adev = gpu_ins->adev; in amdgpu_device_enable_mgpu_fan_boost()
2616 if (!(adev->flags & AMD_IS_APU) && in amdgpu_device_enable_mgpu_fan_boost()
2618 ret = amdgpu_dpm_enable_mgpu_fan_boost(adev); in amdgpu_device_enable_mgpu_fan_boost()
2644 static int amdgpu_device_ip_late_init(struct amdgpu_device *adev) in amdgpu_device_ip_late_init() argument
2649 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_late_init()
2650 if (!adev->ip_blocks[i].status.hw) in amdgpu_device_ip_late_init()
2652 if (adev->ip_blocks[i].version->funcs->late_init) { in amdgpu_device_ip_late_init()
2653 r = adev->ip_blocks[i].version->funcs->late_init((void *)adev); in amdgpu_device_ip_late_init()
2656 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_late_init()
2660 adev->ip_blocks[i].status.late_initialized = true; in amdgpu_device_ip_late_init()
2663 r = amdgpu_ras_late_init(adev); in amdgpu_device_ip_late_init()
2669 amdgpu_ras_set_error_query_ready(adev, true); in amdgpu_device_ip_late_init()
2671 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_GATE); in amdgpu_device_ip_late_init()
2672 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_GATE); in amdgpu_device_ip_late_init()
2674 amdgpu_device_fill_reset_magic(adev); in amdgpu_device_ip_late_init()
2681 if (amdgpu_passthrough(adev) && in amdgpu_device_ip_late_init()
2682 ((adev->asic_type == CHIP_ARCTURUS && adev->gmc.xgmi.num_physical_nodes > 1) || in amdgpu_device_ip_late_init()
2683 adev->asic_type == CHIP_ALDEBARAN)) in amdgpu_device_ip_late_init()
2684 amdgpu_dpm_handle_passthrough_sbr(adev, true); in amdgpu_device_ip_late_init()
2686 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_device_ip_late_init()
2702 if (mgpu_info.num_dgpu == adev->gmc.xgmi.num_physical_nodes) { in amdgpu_device_ip_late_init()
2705 if (gpu_instance->adev->flags & AMD_IS_APU) in amdgpu_device_ip_late_init()
2708 r = amdgpu_xgmi_set_pstate(gpu_instance->adev, in amdgpu_device_ip_late_init()
2730 static void amdgpu_device_smu_fini_early(struct amdgpu_device *adev) in amdgpu_device_smu_fini_early() argument
2734 if (adev->ip_versions[GC_HWIP][0] > IP_VERSION(9, 0, 0)) in amdgpu_device_smu_fini_early()
2737 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_smu_fini_early()
2738 if (!adev->ip_blocks[i].status.hw) in amdgpu_device_smu_fini_early()
2740 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) { in amdgpu_device_smu_fini_early()
2741 r = adev->ip_blocks[i].version->funcs->hw_fini((void *)adev); in amdgpu_device_smu_fini_early()
2745 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_smu_fini_early()
2747 adev->ip_blocks[i].status.hw = false; in amdgpu_device_smu_fini_early()
2753 static int amdgpu_device_ip_fini_early(struct amdgpu_device *adev) in amdgpu_device_ip_fini_early() argument
2757 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_fini_early()
2758 if (!adev->ip_blocks[i].version->funcs->early_fini) in amdgpu_device_ip_fini_early()
2761 r = adev->ip_blocks[i].version->funcs->early_fini((void *)adev); in amdgpu_device_ip_fini_early()
2764 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini_early()
2768 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_UNGATE); in amdgpu_device_ip_fini_early()
2769 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_UNGATE); in amdgpu_device_ip_fini_early()
2771 amdgpu_amdkfd_suspend(adev, false); in amdgpu_device_ip_fini_early()
2774 amdgpu_device_smu_fini_early(adev); in amdgpu_device_ip_fini_early()
2776 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_fini_early()
2777 if (!adev->ip_blocks[i].status.hw) in amdgpu_device_ip_fini_early()
2780 r = adev->ip_blocks[i].version->funcs->hw_fini((void *)adev); in amdgpu_device_ip_fini_early()
2784 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini_early()
2787 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_fini_early()
2790 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_fini_early()
2791 if (amdgpu_virt_release_full_gpu(adev, false)) in amdgpu_device_ip_fini_early()
2809 static int amdgpu_device_ip_fini(struct amdgpu_device *adev) in amdgpu_device_ip_fini() argument
2813 if (amdgpu_sriov_vf(adev) && adev->virt.ras_init_done) in amdgpu_device_ip_fini()
2814 amdgpu_virt_release_ras_err_handler_data(adev); in amdgpu_device_ip_fini()
2816 if (adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_ip_fini()
2817 amdgpu_xgmi_remove_device(adev); in amdgpu_device_ip_fini()
2819 amdgpu_amdkfd_device_fini_sw(adev); in amdgpu_device_ip_fini()
2821 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_fini()
2822 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_ip_fini()
2825 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC) { in amdgpu_device_ip_fini()
2826 amdgpu_ucode_free_bo(adev); in amdgpu_device_ip_fini()
2827 amdgpu_free_static_csa(&adev->virt.csa_obj); in amdgpu_device_ip_fini()
2828 amdgpu_device_wb_fini(adev); in amdgpu_device_ip_fini()
2829 amdgpu_device_mem_scratch_fini(adev); in amdgpu_device_ip_fini()
2830 amdgpu_ib_pool_fini(adev); in amdgpu_device_ip_fini()
2833 r = adev->ip_blocks[i].version->funcs->sw_fini((void *)adev); in amdgpu_device_ip_fini()
2837 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini()
2839 adev->ip_blocks[i].status.sw = false; in amdgpu_device_ip_fini()
2840 adev->ip_blocks[i].status.valid = false; in amdgpu_device_ip_fini()
2843 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_fini()
2844 if (!adev->ip_blocks[i].status.late_initialized) in amdgpu_device_ip_fini()
2846 if (adev->ip_blocks[i].version->funcs->late_fini) in amdgpu_device_ip_fini()
2847 adev->ip_blocks[i].version->funcs->late_fini((void *)adev); in amdgpu_device_ip_fini()
2848 adev->ip_blocks[i].status.late_initialized = false; in amdgpu_device_ip_fini()
2851 amdgpu_ras_fini(adev); in amdgpu_device_ip_fini()
2863 struct amdgpu_device *adev = in amdgpu_device_delayed_init_work_handler() local
2867 r = amdgpu_ib_ring_tests(adev); in amdgpu_device_delayed_init_work_handler()
2874 struct amdgpu_device *adev = in amdgpu_device_delay_enable_gfx_off() local
2877 WARN_ON_ONCE(adev->gfx.gfx_off_state); in amdgpu_device_delay_enable_gfx_off()
2878 WARN_ON_ONCE(adev->gfx.gfx_off_req_count); in amdgpu_device_delay_enable_gfx_off()
2880 if (!amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_GFX, true)) in amdgpu_device_delay_enable_gfx_off()
2881 adev->gfx.gfx_off_state = true; in amdgpu_device_delay_enable_gfx_off()
2895 static int amdgpu_device_ip_suspend_phase1(struct amdgpu_device *adev) in amdgpu_device_ip_suspend_phase1() argument
2899 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_UNGATE); in amdgpu_device_ip_suspend_phase1()
2900 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_UNGATE); in amdgpu_device_ip_suspend_phase1()
2907 if (amdgpu_dpm_set_df_cstate(adev, DF_CSTATE_DISALLOW)) in amdgpu_device_ip_suspend_phase1()
2908 dev_warn(adev->dev, "Failed to disallow df cstate"); in amdgpu_device_ip_suspend_phase1()
2910 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_suspend_phase1()
2911 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_suspend_phase1()
2915 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_DCE) in amdgpu_device_ip_suspend_phase1()
2919 r = adev->ip_blocks[i].version->funcs->suspend(adev); in amdgpu_device_ip_suspend_phase1()
2923 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_suspend_phase1()
2927 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase1()
2944 static int amdgpu_device_ip_suspend_phase2(struct amdgpu_device *adev) in amdgpu_device_ip_suspend_phase2() argument
2948 if (adev->in_s0ix) in amdgpu_device_ip_suspend_phase2()
2949 amdgpu_dpm_gfx_state_change(adev, sGpuChangeState_D3Entry); in amdgpu_device_ip_suspend_phase2()
2951 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_suspend_phase2()
2952 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_suspend_phase2()
2955 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_DCE) in amdgpu_device_ip_suspend_phase2()
2959 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) { in amdgpu_device_ip_suspend_phase2()
2960 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase2()
2965 if (adev->gmc.xgmi.pending_reset && in amdgpu_device_ip_suspend_phase2()
2966 !(adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_ip_suspend_phase2()
2967 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC || in amdgpu_device_ip_suspend_phase2()
2968 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_suspend_phase2()
2969 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH)) { in amdgpu_device_ip_suspend_phase2()
2970 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase2()
2979 if (adev->in_s0ix && in amdgpu_device_ip_suspend_phase2()
2980 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP || in amdgpu_device_ip_suspend_phase2()
2981 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GFX || in amdgpu_device_ip_suspend_phase2()
2982 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_MES)) in amdgpu_device_ip_suspend_phase2()
2986 if (adev->in_s0ix && in amdgpu_device_ip_suspend_phase2()
2987 (adev->ip_versions[SDMA0_HWIP][0] >= IP_VERSION(5, 0, 0)) && in amdgpu_device_ip_suspend_phase2()
2988 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SDMA)) in amdgpu_device_ip_suspend_phase2()
2998 if (amdgpu_in_reset(adev) && in amdgpu_device_ip_suspend_phase2()
2999 (adev->flags & AMD_IS_APU) && adev->gfx.imu.funcs && in amdgpu_device_ip_suspend_phase2()
3000 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) in amdgpu_device_ip_suspend_phase2()
3004 r = adev->ip_blocks[i].version->funcs->suspend(adev); in amdgpu_device_ip_suspend_phase2()
3008 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_suspend_phase2()
3010 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase2()
3012 if (!amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_suspend_phase2()
3013 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) { in amdgpu_device_ip_suspend_phase2()
3014 r = amdgpu_dpm_set_mp1_state(adev, adev->mp1_state); in amdgpu_device_ip_suspend_phase2()
3017 adev->mp1_state, r); in amdgpu_device_ip_suspend_phase2()
3038 int amdgpu_device_ip_suspend(struct amdgpu_device *adev) in amdgpu_device_ip_suspend() argument
3042 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_suspend()
3043 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_ip_suspend()
3044 amdgpu_virt_request_full_gpu(adev, false); in amdgpu_device_ip_suspend()
3047 r = amdgpu_device_ip_suspend_phase1(adev); in amdgpu_device_ip_suspend()
3050 r = amdgpu_device_ip_suspend_phase2(adev); in amdgpu_device_ip_suspend()
3052 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_suspend()
3053 amdgpu_virt_release_full_gpu(adev, false); in amdgpu_device_ip_suspend()
3058 static int amdgpu_device_ip_reinit_early_sriov(struct amdgpu_device *adev) in amdgpu_device_ip_reinit_early_sriov() argument
3069 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_reinit_early_sriov()
3073 block = &adev->ip_blocks[i]; in amdgpu_device_ip_reinit_early_sriov()
3082 r = block->version->funcs->hw_init(adev); in amdgpu_device_ip_reinit_early_sriov()
3093 static int amdgpu_device_ip_reinit_late_sriov(struct amdgpu_device *adev) in amdgpu_device_ip_reinit_late_sriov() argument
3113 for (j = 0; j < adev->num_ip_blocks; j++) { in amdgpu_device_ip_reinit_late_sriov()
3114 block = &adev->ip_blocks[j]; in amdgpu_device_ip_reinit_late_sriov()
3122 r = block->version->funcs->resume(adev); in amdgpu_device_ip_reinit_late_sriov()
3124 r = block->version->funcs->hw_init(adev); in amdgpu_device_ip_reinit_late_sriov()
3148 static int amdgpu_device_ip_resume_phase1(struct amdgpu_device *adev) in amdgpu_device_ip_resume_phase1() argument
3152 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_resume_phase1()
3153 if (!adev->ip_blocks[i].status.valid || adev->ip_blocks[i].status.hw) in amdgpu_device_ip_resume_phase1()
3155 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_resume_phase1()
3156 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_ip_resume_phase1()
3157 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH || in amdgpu_device_ip_resume_phase1()
3158 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP && amdgpu_sriov_vf(adev))) { in amdgpu_device_ip_resume_phase1()
3160 r = adev->ip_blocks[i].version->funcs->resume(adev); in amdgpu_device_ip_resume_phase1()
3163 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_resume_phase1()
3166 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_resume_phase1()
3186 static int amdgpu_device_ip_resume_phase2(struct amdgpu_device *adev) in amdgpu_device_ip_resume_phase2() argument
3190 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_resume_phase2()
3191 if (!adev->ip_blocks[i].status.valid || adev->ip_blocks[i].status.hw) in amdgpu_device_ip_resume_phase2()
3193 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_resume_phase2()
3194 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_ip_resume_phase2()
3195 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH || in amdgpu_device_ip_resume_phase2()
3196 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) in amdgpu_device_ip_resume_phase2()
3198 r = adev->ip_blocks[i].version->funcs->resume(adev); in amdgpu_device_ip_resume_phase2()
3201 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_resume_phase2()
3204 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_resume_phase2()
3222 static int amdgpu_device_ip_resume(struct amdgpu_device *adev) in amdgpu_device_ip_resume() argument
3226 r = amdgpu_device_ip_resume_phase1(adev); in amdgpu_device_ip_resume()
3230 r = amdgpu_device_fw_loading(adev); in amdgpu_device_ip_resume()
3234 r = amdgpu_device_ip_resume_phase2(adev); in amdgpu_device_ip_resume()
3246 static void amdgpu_device_detect_sriov_bios(struct amdgpu_device *adev) in amdgpu_device_detect_sriov_bios() argument
3248 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_detect_sriov_bios()
3249 if (adev->is_atom_fw) { in amdgpu_device_detect_sriov_bios()
3250 if (amdgpu_atomfirmware_gpu_virtualization_supported(adev)) in amdgpu_device_detect_sriov_bios()
3251 adev->virt.caps |= AMDGPU_SRIOV_CAPS_SRIOV_VBIOS; in amdgpu_device_detect_sriov_bios()
3253 if (amdgpu_atombios_has_gpu_virtualization_table(adev)) in amdgpu_device_detect_sriov_bios()
3254 adev->virt.caps |= AMDGPU_SRIOV_CAPS_SRIOV_VBIOS; in amdgpu_device_detect_sriov_bios()
3257 if (!(adev->virt.caps & AMDGPU_SRIOV_CAPS_SRIOV_VBIOS)) in amdgpu_device_detect_sriov_bios()
3258 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_NO_VBIOS, 0, 0); in amdgpu_device_detect_sriov_bios()
3326 bool amdgpu_device_has_dc_support(struct amdgpu_device *adev) in amdgpu_device_has_dc_support() argument
3328 if (adev->enable_virtual_display || in amdgpu_device_has_dc_support()
3329 (adev->harvest_ip_mask & AMD_HARVEST_IP_DMU_MASK)) in amdgpu_device_has_dc_support()
3332 return amdgpu_device_asic_has_dc_support(adev->asic_type); in amdgpu_device_has_dc_support()
3337 struct amdgpu_device *adev = in amdgpu_device_xgmi_reset_func() local
3339 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_xgmi_reset_func()
3351 if (amdgpu_asic_reset_method(adev) == AMD_RESET_METHOD_BACO) { in amdgpu_device_xgmi_reset_func()
3354 adev->asic_reset_res = amdgpu_device_baco_enter(adev_to_drm(adev)); in amdgpu_device_xgmi_reset_func()
3356 if (adev->asic_reset_res) in amdgpu_device_xgmi_reset_func()
3360 adev->asic_reset_res = amdgpu_device_baco_exit(adev_to_drm(adev)); in amdgpu_device_xgmi_reset_func()
3362 if (adev->asic_reset_res) in amdgpu_device_xgmi_reset_func()
3365 if (adev->mmhub.ras && adev->mmhub.ras->ras_block.hw_ops && in amdgpu_device_xgmi_reset_func()
3366 adev->mmhub.ras->ras_block.hw_ops->reset_ras_error_count) in amdgpu_device_xgmi_reset_func()
3367 adev->mmhub.ras->ras_block.hw_ops->reset_ras_error_count(adev); in amdgpu_device_xgmi_reset_func()
3371 adev->asic_reset_res = amdgpu_asic_reset(adev); in amdgpu_device_xgmi_reset_func()
3375 if (adev->asic_reset_res) in amdgpu_device_xgmi_reset_func()
3377 adev->asic_reset_res, adev_to_drm(adev)->unique); in amdgpu_device_xgmi_reset_func()
3381 static int amdgpu_device_get_job_timeout_settings(struct amdgpu_device *adev) in amdgpu_device_get_job_timeout_settings() argument
3395 adev->gfx_timeout = msecs_to_jiffies(10000); in amdgpu_device_get_job_timeout_settings()
3396 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
3397 if (amdgpu_sriov_vf(adev)) in amdgpu_device_get_job_timeout_settings()
3398 adev->compute_timeout = amdgpu_sriov_is_pp_one_vf(adev) ? in amdgpu_device_get_job_timeout_settings()
3401 adev->compute_timeout = msecs_to_jiffies(60000); in amdgpu_device_get_job_timeout_settings()
3415 dev_warn(adev->dev, "lockup timeout disabled"); in amdgpu_device_get_job_timeout_settings()
3423 adev->gfx_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3426 adev->compute_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3429 adev->sdma_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3432 adev->video_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3443 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
3444 if (amdgpu_sriov_vf(adev) || amdgpu_passthrough(adev)) in amdgpu_device_get_job_timeout_settings()
3445 adev->compute_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
3459 static void amdgpu_device_check_iommu_direct_map(struct amdgpu_device *adev) in amdgpu_device_check_iommu_direct_map() argument
3463 domain = iommu_get_domain_for_dev(adev->dev); in amdgpu_device_check_iommu_direct_map()
3465 adev->ram_is_direct_mapped = true; in amdgpu_device_check_iommu_direct_map()
3473 static void amdgpu_device_set_mcbp(struct amdgpu_device *adev) in amdgpu_device_set_mcbp() argument
3476 adev->gfx.mcbp = true; in amdgpu_device_set_mcbp()
3478 adev->gfx.mcbp = false; in amdgpu_device_set_mcbp()
3479 else if ((adev->ip_versions[GC_HWIP][0] >= IP_VERSION(9, 0, 0)) && in amdgpu_device_set_mcbp()
3480 (adev->ip_versions[GC_HWIP][0] < IP_VERSION(10, 0, 0)) && in amdgpu_device_set_mcbp()
3481 adev->gfx.num_gfx_rings) in amdgpu_device_set_mcbp()
3482 adev->gfx.mcbp = true; in amdgpu_device_set_mcbp()
3484 if (amdgpu_sriov_vf(adev)) in amdgpu_device_set_mcbp()
3485 adev->gfx.mcbp = true; in amdgpu_device_set_mcbp()
3487 if (adev->gfx.mcbp) in amdgpu_device_set_mcbp()
3501 int amdgpu_device_init(struct amdgpu_device *adev, in amdgpu_device_init() argument
3504 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_device_init()
3505 struct pci_dev *pdev = adev->pdev; in amdgpu_device_init()
3511 adev->shutdown = false; in amdgpu_device_init()
3512 adev->flags = flags; in amdgpu_device_init()
3515 adev->asic_type = amdgpu_force_asic_type; in amdgpu_device_init()
3517 adev->asic_type = flags & AMD_ASIC_MASK; in amdgpu_device_init()
3519 adev->usec_timeout = AMDGPU_MAX_USEC_TIMEOUT; in amdgpu_device_init()
3521 adev->usec_timeout *= 10; in amdgpu_device_init()
3522 adev->gmc.gart_size = 512 * 1024 * 1024; in amdgpu_device_init()
3523 adev->accel_working = false; in amdgpu_device_init()
3524 adev->num_rings = 0; in amdgpu_device_init()
3525 RCU_INIT_POINTER(adev->gang_submit, dma_fence_get_stub()); in amdgpu_device_init()
3526 adev->mman.buffer_funcs = NULL; in amdgpu_device_init()
3527 adev->mman.buffer_funcs_ring = NULL; in amdgpu_device_init()
3528 adev->vm_manager.vm_pte_funcs = NULL; in amdgpu_device_init()
3529 adev->vm_manager.vm_pte_num_scheds = 0; in amdgpu_device_init()
3530 adev->gmc.gmc_funcs = NULL; in amdgpu_device_init()
3531 adev->harvest_ip_mask = 0x0; in amdgpu_device_init()
3532 adev->fence_context = dma_fence_context_alloc(AMDGPU_MAX_RINGS); in amdgpu_device_init()
3533 bitmap_zero(adev->gfx.pipe_reserve_bitmap, AMDGPU_MAX_COMPUTE_QUEUES); in amdgpu_device_init()
3535 adev->smc_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3536 adev->smc_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3537 adev->pcie_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3538 adev->pcie_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3539 adev->pcie_rreg_ext = &amdgpu_invalid_rreg_ext; in amdgpu_device_init()
3540 adev->pcie_wreg_ext = &amdgpu_invalid_wreg_ext; in amdgpu_device_init()
3541 adev->pciep_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3542 adev->pciep_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3543 adev->pcie_rreg64 = &amdgpu_invalid_rreg64; in amdgpu_device_init()
3544 adev->pcie_wreg64 = &amdgpu_invalid_wreg64; in amdgpu_device_init()
3545 adev->uvd_ctx_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3546 adev->uvd_ctx_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3547 adev->didt_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3548 adev->didt_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3549 adev->gc_cac_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3550 adev->gc_cac_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3551 adev->audio_endpt_rreg = &amdgpu_block_invalid_rreg; in amdgpu_device_init()
3552 adev->audio_endpt_wreg = &amdgpu_block_invalid_wreg; in amdgpu_device_init()
3555 amdgpu_asic_name[adev->asic_type], pdev->vendor, pdev->device, in amdgpu_device_init()
3561 mutex_init(&adev->firmware.mutex); in amdgpu_device_init()
3562 mutex_init(&adev->pm.mutex); in amdgpu_device_init()
3563 mutex_init(&adev->gfx.gpu_clock_mutex); in amdgpu_device_init()
3564 mutex_init(&adev->srbm_mutex); in amdgpu_device_init()
3565 mutex_init(&adev->gfx.pipe_reserve_mutex); in amdgpu_device_init()
3566 mutex_init(&adev->gfx.gfx_off_mutex); in amdgpu_device_init()
3567 mutex_init(&adev->gfx.partition_mutex); in amdgpu_device_init()
3568 mutex_init(&adev->grbm_idx_mutex); in amdgpu_device_init()
3569 mutex_init(&adev->mn_lock); in amdgpu_device_init()
3570 mutex_init(&adev->virt.vf_errors.lock); in amdgpu_device_init()
3571 hash_init(adev->mn_hash); in amdgpu_device_init()
3572 mutex_init(&adev->psp.mutex); in amdgpu_device_init()
3573 mutex_init(&adev->notifier_lock); in amdgpu_device_init()
3574 mutex_init(&adev->pm.stable_pstate_ctx_lock); in amdgpu_device_init()
3575 mutex_init(&adev->benchmark_mutex); in amdgpu_device_init()
3577 amdgpu_device_init_apu_flags(adev); in amdgpu_device_init()
3579 r = amdgpu_device_check_arguments(adev); in amdgpu_device_init()
3583 spin_lock_init(&adev->mmio_idx_lock); in amdgpu_device_init()
3584 spin_lock_init(&adev->smc_idx_lock); in amdgpu_device_init()
3585 spin_lock_init(&adev->pcie_idx_lock); in amdgpu_device_init()
3586 spin_lock_init(&adev->uvd_ctx_idx_lock); in amdgpu_device_init()
3587 spin_lock_init(&adev->didt_idx_lock); in amdgpu_device_init()
3588 spin_lock_init(&adev->gc_cac_idx_lock); in amdgpu_device_init()
3589 spin_lock_init(&adev->se_cac_idx_lock); in amdgpu_device_init()
3590 spin_lock_init(&adev->audio_endpt_idx_lock); in amdgpu_device_init()
3591 spin_lock_init(&adev->mm_stats.lock); in amdgpu_device_init()
3593 INIT_LIST_HEAD(&adev->shadow_list); in amdgpu_device_init()
3594 mutex_init(&adev->shadow_list_lock); in amdgpu_device_init()
3596 INIT_LIST_HEAD(&adev->reset_list); in amdgpu_device_init()
3598 INIT_LIST_HEAD(&adev->ras_list); in amdgpu_device_init()
3600 INIT_DELAYED_WORK(&adev->delayed_init_work, in amdgpu_device_init()
3602 INIT_DELAYED_WORK(&adev->gfx.gfx_off_delay_work, in amdgpu_device_init()
3605 INIT_WORK(&adev->xgmi_reset_work, amdgpu_device_xgmi_reset_func); in amdgpu_device_init()
3607 adev->gfx.gfx_off_req_count = 1; in amdgpu_device_init()
3608 adev->gfx.gfx_off_residency = 0; in amdgpu_device_init()
3609 adev->gfx.gfx_off_entrycount = 0; in amdgpu_device_init()
3610 adev->pm.ac_power = power_supply_is_system_supplied() > 0; in amdgpu_device_init()
3612 atomic_set(&adev->throttling_logging_enabled, 1); in amdgpu_device_init()
3620 ratelimit_state_init(&adev->throttling_logging_rs, (60 - 1) * HZ, 1); in amdgpu_device_init()
3621 ratelimit_set_flags(&adev->throttling_logging_rs, RATELIMIT_MSG_ON_RELEASE); in amdgpu_device_init()
3625 if (adev->asic_type >= CHIP_BONAIRE) { in amdgpu_device_init()
3626 adev->rmmio_base = pci_resource_start(adev->pdev, 5); in amdgpu_device_init()
3627 adev->rmmio_size = pci_resource_len(adev->pdev, 5); in amdgpu_device_init()
3629 adev->rmmio_base = pci_resource_start(adev->pdev, 2); in amdgpu_device_init()
3630 adev->rmmio_size = pci_resource_len(adev->pdev, 2); in amdgpu_device_init()
3634 atomic_set(&adev->pm.pwr_state[i], POWER_STATE_UNKNOWN); in amdgpu_device_init()
3636 adev->rmmio = ioremap(adev->rmmio_base, adev->rmmio_size); in amdgpu_device_init()
3637 if (!adev->rmmio) in amdgpu_device_init()
3640 DRM_INFO("register mmio base: 0x%08X\n", (uint32_t)adev->rmmio_base); in amdgpu_device_init()
3641 DRM_INFO("register mmio size: %u\n", (unsigned int)adev->rmmio_size); in amdgpu_device_init()
3648 adev->reset_domain = amdgpu_reset_create_reset_domain(SINGLE_DEVICE, "amdgpu-reset-dev"); in amdgpu_device_init()
3649 if (!adev->reset_domain) in amdgpu_device_init()
3653 amdgpu_detect_virtualization(adev); in amdgpu_device_init()
3655 amdgpu_device_get_pcie_info(adev); in amdgpu_device_init()
3657 r = amdgpu_device_get_job_timeout_settings(adev); in amdgpu_device_init()
3659 dev_err(adev->dev, "invalid lockup_timeout parameter syntax\n"); in amdgpu_device_init()
3664 r = amdgpu_device_ip_early_init(adev); in amdgpu_device_init()
3668 amdgpu_device_set_mcbp(adev); in amdgpu_device_init()
3671 r = drm_aperture_remove_conflicting_pci_framebuffers(adev->pdev, &amdgpu_kms_driver); in amdgpu_device_init()
3676 amdgpu_gmc_tmz_set(adev); in amdgpu_device_init()
3678 amdgpu_gmc_noretry_set(adev); in amdgpu_device_init()
3680 if (adev->gmc.xgmi.supported) { in amdgpu_device_init()
3681 r = adev->gfxhub.funcs->get_xgmi_info(adev); in amdgpu_device_init()
3687 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_init()
3688 if (adev->virt.fw_reserve.p_pf2vf) in amdgpu_device_init()
3689 adev->have_atomics_support = ((struct amd_sriov_msg_pf2vf_info *) in amdgpu_device_init()
3690 adev->virt.fw_reserve.p_pf2vf)->pcie_atomic_ops_support_flags == in amdgpu_device_init()
3695 } else if ((adev->flags & AMD_IS_APU) && in amdgpu_device_init()
3696 (adev->ip_versions[GC_HWIP][0] > IP_VERSION(9, 0, 0))) { in amdgpu_device_init()
3697 adev->have_atomics_support = true; in amdgpu_device_init()
3699 adev->have_atomics_support = in amdgpu_device_init()
3700 !pci_enable_atomic_ops_to_root(adev->pdev, in amdgpu_device_init()
3705 if (!adev->have_atomics_support) in amdgpu_device_init()
3706 dev_info(adev->dev, "PCIE atomic ops is not supported\n"); in amdgpu_device_init()
3709 amdgpu_doorbell_init(adev); in amdgpu_device_init()
3713 emu_soc_asic_init(adev); in amdgpu_device_init()
3717 amdgpu_reset_init(adev); in amdgpu_device_init()
3720 if (adev->bios) in amdgpu_device_init()
3721 amdgpu_device_detect_sriov_bios(adev); in amdgpu_device_init()
3726 if (!amdgpu_sriov_vf(adev) && amdgpu_asic_need_reset_on_init(adev)) { in amdgpu_device_init()
3727 if (adev->gmc.xgmi.num_physical_nodes) { in amdgpu_device_init()
3728 dev_info(adev->dev, "Pending hive reset.\n"); in amdgpu_device_init()
3729 adev->gmc.xgmi.pending_reset = true; in amdgpu_device_init()
3731 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_init()
3732 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_init()
3734 if (!(adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_init()
3735 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_init()
3736 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH || in amdgpu_device_init()
3737 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC)) { in amdgpu_device_init()
3739 adev->ip_blocks[i].version->funcs->name); in amdgpu_device_init()
3740 adev->ip_blocks[i].status.hw = true; in amdgpu_device_init()
3749 r = amdgpu_asic_reset(adev); in amdgpu_device_init()
3752 dev_err(adev->dev, "asic reset on init failed\n"); in amdgpu_device_init()
3759 if (amdgpu_device_need_post(adev)) { in amdgpu_device_init()
3760 if (!adev->bios) { in amdgpu_device_init()
3761 dev_err(adev->dev, "no vBIOS found\n"); in amdgpu_device_init()
3766 r = amdgpu_device_asic_init(adev); in amdgpu_device_init()
3768 dev_err(adev->dev, "gpu post error!\n"); in amdgpu_device_init()
3773 if (adev->bios) { in amdgpu_device_init()
3774 if (adev->is_atom_fw) { in amdgpu_device_init()
3776 r = amdgpu_atomfirmware_get_clock_info(adev); in amdgpu_device_init()
3778 dev_err(adev->dev, "amdgpu_atomfirmware_get_clock_info failed\n"); in amdgpu_device_init()
3779 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_ATOMBIOS_GET_CLOCK_FAIL, 0, 0); in amdgpu_device_init()
3784 r = amdgpu_atombios_get_clock_info(adev); in amdgpu_device_init()
3786 dev_err(adev->dev, "amdgpu_atombios_get_clock_info failed\n"); in amdgpu_device_init()
3787 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_ATOMBIOS_GET_CLOCK_FAIL, 0, 0); in amdgpu_device_init()
3791 if (!amdgpu_device_has_dc_support(adev)) in amdgpu_device_init()
3792 amdgpu_atombios_i2c_init(adev); in amdgpu_device_init()
3798 r = amdgpu_fence_driver_sw_init(adev); in amdgpu_device_init()
3800 dev_err(adev->dev, "amdgpu_fence_driver_sw_init failed\n"); in amdgpu_device_init()
3801 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_FENCE_INIT_FAIL, 0, 0); in amdgpu_device_init()
3806 drm_mode_config_init(adev_to_drm(adev)); in amdgpu_device_init()
3808 r = amdgpu_device_ip_init(adev); in amdgpu_device_init()
3810 dev_err(adev->dev, "amdgpu_device_ip_init failed\n"); in amdgpu_device_init()
3811 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_AMDGPU_INIT_FAIL, 0, 0); in amdgpu_device_init()
3815 amdgpu_fence_driver_hw_init(adev); in amdgpu_device_init()
3817 dev_info(adev->dev, in amdgpu_device_init()
3819 adev->gfx.config.max_shader_engines, in amdgpu_device_init()
3820 adev->gfx.config.max_sh_per_se, in amdgpu_device_init()
3821 adev->gfx.config.max_cu_per_sh, in amdgpu_device_init()
3822 adev->gfx.cu_info.number); in amdgpu_device_init()
3824 adev->accel_working = true; in amdgpu_device_init()
3826 amdgpu_vm_check_compute_bug(adev); in amdgpu_device_init()
3834 adev->mm_stats.log2_max_MBps = ilog2(max(1u, max_MBps)); in amdgpu_device_init()
3836 r = amdgpu_atombios_sysfs_init(adev); in amdgpu_device_init()
3838 drm_err(&adev->ddev, in amdgpu_device_init()
3841 r = amdgpu_pm_sysfs_init(adev); in amdgpu_device_init()
3845 r = amdgpu_ucode_sysfs_init(adev); in amdgpu_device_init()
3847 adev->ucode_sysfs_en = false; in amdgpu_device_init()
3850 adev->ucode_sysfs_en = true; in amdgpu_device_init()
3857 amdgpu_register_gpu_instance(adev); in amdgpu_device_init()
3862 if (!adev->gmc.xgmi.pending_reset) { in amdgpu_device_init()
3863 r = amdgpu_device_ip_late_init(adev); in amdgpu_device_init()
3865 dev_err(adev->dev, "amdgpu_device_ip_late_init failed\n"); in amdgpu_device_init()
3866 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_AMDGPU_LATE_INIT_FAIL, 0, r); in amdgpu_device_init()
3870 amdgpu_ras_resume(adev); in amdgpu_device_init()
3871 queue_delayed_work(system_wq, &adev->delayed_init_work, in amdgpu_device_init()
3875 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_init()
3876 amdgpu_virt_release_full_gpu(adev, true); in amdgpu_device_init()
3877 flush_delayed_work(&adev->delayed_init_work); in amdgpu_device_init()
3880 r = sysfs_create_files(&adev->dev->kobj, amdgpu_dev_attributes); in amdgpu_device_init()
3882 dev_err(adev->dev, "Could not create amdgpu device attr\n"); in amdgpu_device_init()
3884 amdgpu_fru_sysfs_init(adev); in amdgpu_device_init()
3887 r = amdgpu_pmu_init(adev); in amdgpu_device_init()
3889 dev_err(adev->dev, "amdgpu_pmu_init failed\n"); in amdgpu_device_init()
3892 if (amdgpu_device_cache_pci_state(adev->pdev)) in amdgpu_device_init()
3899 if ((adev->pdev->class >> 8) == PCI_CLASS_DISPLAY_VGA) in amdgpu_device_init()
3900 vga_client_register(adev->pdev, amdgpu_device_vga_set_decode); in amdgpu_device_init()
3904 if (px || (!pci_is_thunderbolt_attached(adev->pdev) && in amdgpu_device_init()
3906 vga_switcheroo_register_client(adev->pdev, in amdgpu_device_init()
3910 vga_switcheroo_init_domain_pm_ops(adev->dev, &adev->vga_pm_domain); in amdgpu_device_init()
3912 if (adev->gmc.xgmi.pending_reset) in amdgpu_device_init()
3916 amdgpu_device_check_iommu_direct_map(adev); in amdgpu_device_init()
3921 if (amdgpu_sriov_vf(adev)) in amdgpu_device_init()
3922 amdgpu_virt_release_full_gpu(adev, true); in amdgpu_device_init()
3925 if (amdgpu_sriov_vf(adev) && in amdgpu_device_init()
3926 !amdgpu_sriov_runtime(adev) && in amdgpu_device_init()
3927 amdgpu_virt_mmio_blocked(adev) && in amdgpu_device_init()
3928 !amdgpu_virt_wait_reset(adev)) { in amdgpu_device_init()
3929 dev_err(adev->dev, "VF exclusive mode timeout\n"); in amdgpu_device_init()
3931 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_device_init()
3932 adev->virt.ops = NULL; in amdgpu_device_init()
3935 amdgpu_release_ras_context(adev); in amdgpu_device_init()
3938 amdgpu_vf_error_trans_all(adev); in amdgpu_device_init()
3943 static void amdgpu_device_unmap_mmio(struct amdgpu_device *adev) in amdgpu_device_unmap_mmio() argument
3947 unmap_mapping_range(adev->ddev.anon_inode->i_mapping, 0, 0, 1); in amdgpu_device_unmap_mmio()
3950 amdgpu_doorbell_fini(adev); in amdgpu_device_unmap_mmio()
3952 iounmap(adev->rmmio); in amdgpu_device_unmap_mmio()
3953 adev->rmmio = NULL; in amdgpu_device_unmap_mmio()
3954 if (adev->mman.aper_base_kaddr) in amdgpu_device_unmap_mmio()
3955 iounmap(adev->mman.aper_base_kaddr); in amdgpu_device_unmap_mmio()
3956 adev->mman.aper_base_kaddr = NULL; in amdgpu_device_unmap_mmio()
3959 if (!adev->gmc.xgmi.connected_to_cpu && !adev->gmc.is_app_apu) { in amdgpu_device_unmap_mmio()
3960 arch_phys_wc_del(adev->gmc.vram_mtrr); in amdgpu_device_unmap_mmio()
3961 arch_io_free_memtype_wc(adev->gmc.aper_base, adev->gmc.aper_size); in amdgpu_device_unmap_mmio()
3973 void amdgpu_device_fini_hw(struct amdgpu_device *adev) in amdgpu_device_fini_hw() argument
3975 dev_info(adev->dev, "amdgpu: finishing device.\n"); in amdgpu_device_fini_hw()
3976 flush_delayed_work(&adev->delayed_init_work); in amdgpu_device_fini_hw()
3977 adev->shutdown = true; in amdgpu_device_fini_hw()
3982 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_fini_hw()
3983 amdgpu_virt_request_full_gpu(adev, false); in amdgpu_device_fini_hw()
3984 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_fini_hw()
3988 amdgpu_irq_disable_all(adev); in amdgpu_device_fini_hw()
3989 if (adev->mode_info.mode_config_initialized) { in amdgpu_device_fini_hw()
3990 if (!drm_drv_uses_atomic_modeset(adev_to_drm(adev))) in amdgpu_device_fini_hw()
3991 drm_helper_force_disable_all(adev_to_drm(adev)); in amdgpu_device_fini_hw()
3993 drm_atomic_helper_shutdown(adev_to_drm(adev)); in amdgpu_device_fini_hw()
3995 amdgpu_fence_driver_hw_fini(adev); in amdgpu_device_fini_hw()
3997 if (adev->mman.initialized) in amdgpu_device_fini_hw()
3998 drain_workqueue(adev->mman.bdev.wq); in amdgpu_device_fini_hw()
4000 if (adev->pm.sysfs_initialized) in amdgpu_device_fini_hw()
4001 amdgpu_pm_sysfs_fini(adev); in amdgpu_device_fini_hw()
4002 if (adev->ucode_sysfs_en) in amdgpu_device_fini_hw()
4003 amdgpu_ucode_sysfs_fini(adev); in amdgpu_device_fini_hw()
4004 sysfs_remove_files(&adev->dev->kobj, amdgpu_dev_attributes); in amdgpu_device_fini_hw()
4005 amdgpu_fru_sysfs_fini(adev); in amdgpu_device_fini_hw()
4008 amdgpu_ras_pre_fini(adev); in amdgpu_device_fini_hw()
4010 amdgpu_device_ip_fini_early(adev); in amdgpu_device_fini_hw()
4012 amdgpu_irq_fini_hw(adev); in amdgpu_device_fini_hw()
4014 if (adev->mman.initialized) in amdgpu_device_fini_hw()
4015 ttm_device_clear_dma_mappings(&adev->mman.bdev); in amdgpu_device_fini_hw()
4017 amdgpu_gart_dummy_page_fini(adev); in amdgpu_device_fini_hw()
4019 if (drm_dev_is_unplugged(adev_to_drm(adev))) in amdgpu_device_fini_hw()
4020 amdgpu_device_unmap_mmio(adev); in amdgpu_device_fini_hw()
4024 void amdgpu_device_fini_sw(struct amdgpu_device *adev) in amdgpu_device_fini_sw() argument
4029 amdgpu_fence_driver_sw_fini(adev); in amdgpu_device_fini_sw()
4030 amdgpu_device_ip_fini(adev); in amdgpu_device_fini_sw()
4031 amdgpu_ucode_release(&adev->firmware.gpu_info_fw); in amdgpu_device_fini_sw()
4032 adev->accel_working = false; in amdgpu_device_fini_sw()
4033 dma_fence_put(rcu_dereference_protected(adev->gang_submit, true)); in amdgpu_device_fini_sw()
4035 amdgpu_reset_fini(adev); in amdgpu_device_fini_sw()
4038 if (!amdgpu_device_has_dc_support(adev)) in amdgpu_device_fini_sw()
4039 amdgpu_i2c_fini(adev); in amdgpu_device_fini_sw()
4042 amdgpu_atombios_fini(adev); in amdgpu_device_fini_sw()
4044 kfree(adev->bios); in amdgpu_device_fini_sw()
4045 adev->bios = NULL; in amdgpu_device_fini_sw()
4047 px = amdgpu_device_supports_px(adev_to_drm(adev)); in amdgpu_device_fini_sw()
4049 if (px || (!pci_is_thunderbolt_attached(adev->pdev) && in amdgpu_device_fini_sw()
4051 vga_switcheroo_unregister_client(adev->pdev); in amdgpu_device_fini_sw()
4054 vga_switcheroo_fini_domain_pm_ops(adev->dev); in amdgpu_device_fini_sw()
4056 if ((adev->pdev->class >> 8) == PCI_CLASS_DISPLAY_VGA) in amdgpu_device_fini_sw()
4057 vga_client_unregister(adev->pdev); in amdgpu_device_fini_sw()
4059 if (drm_dev_enter(adev_to_drm(adev), &idx)) { in amdgpu_device_fini_sw()
4061 iounmap(adev->rmmio); in amdgpu_device_fini_sw()
4062 adev->rmmio = NULL; in amdgpu_device_fini_sw()
4063 amdgpu_doorbell_fini(adev); in amdgpu_device_fini_sw()
4068 amdgpu_pmu_fini(adev); in amdgpu_device_fini_sw()
4069 if (adev->mman.discovery_bin) in amdgpu_device_fini_sw()
4070 amdgpu_discovery_fini(adev); in amdgpu_device_fini_sw()
4072 amdgpu_reset_put_reset_domain(adev->reset_domain); in amdgpu_device_fini_sw()
4073 adev->reset_domain = NULL; in amdgpu_device_fini_sw()
4075 kfree(adev->pci_state); in amdgpu_device_fini_sw()
4088 static int amdgpu_device_evict_resources(struct amdgpu_device *adev) in amdgpu_device_evict_resources() argument
4093 if ((adev->in_s3 || adev->in_s0ix) && (adev->flags & AMD_IS_APU)) in amdgpu_device_evict_resources()
4096 ret = amdgpu_ttm_evict_resources(adev, TTM_PL_VRAM); in amdgpu_device_evict_resources()
4117 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_suspend() local
4123 adev->in_suspend = true; in amdgpu_device_suspend()
4126 r = amdgpu_device_evict_resources(adev); in amdgpu_device_suspend()
4130 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_suspend()
4131 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_suspend()
4132 r = amdgpu_virt_request_full_gpu(adev, false); in amdgpu_device_suspend()
4141 drm_fb_helper_set_suspend_unlocked(adev_to_drm(adev)->fb_helper, true); in amdgpu_device_suspend()
4143 cancel_delayed_work_sync(&adev->delayed_init_work); in amdgpu_device_suspend()
4144 flush_delayed_work(&adev->gfx.gfx_off_delay_work); in amdgpu_device_suspend()
4146 amdgpu_ras_suspend(adev); in amdgpu_device_suspend()
4148 amdgpu_device_ip_suspend_phase1(adev); in amdgpu_device_suspend()
4150 if (!adev->in_s0ix) in amdgpu_device_suspend()
4151 amdgpu_amdkfd_suspend(adev, adev->in_runpm); in amdgpu_device_suspend()
4153 r = amdgpu_device_evict_resources(adev); in amdgpu_device_suspend()
4157 amdgpu_fence_driver_hw_fini(adev); in amdgpu_device_suspend()
4159 amdgpu_device_ip_suspend_phase2(adev); in amdgpu_device_suspend()
4161 if (amdgpu_sriov_vf(adev)) in amdgpu_device_suspend()
4162 amdgpu_virt_release_full_gpu(adev, false); in amdgpu_device_suspend()
4179 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_resume() local
4182 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_resume()
4183 r = amdgpu_virt_request_full_gpu(adev, true); in amdgpu_device_resume()
4191 if (adev->in_s0ix) in amdgpu_device_resume()
4192 amdgpu_dpm_gfx_state_change(adev, sGpuChangeState_D0Entry); in amdgpu_device_resume()
4195 if (amdgpu_device_need_post(adev)) { in amdgpu_device_resume()
4196 r = amdgpu_device_asic_init(adev); in amdgpu_device_resume()
4198 dev_err(adev->dev, "amdgpu asic init failed\n"); in amdgpu_device_resume()
4201 r = amdgpu_device_ip_resume(adev); in amdgpu_device_resume()
4204 dev_err(adev->dev, "amdgpu_device_ip_resume failed (%d).\n", r); in amdgpu_device_resume()
4207 amdgpu_fence_driver_hw_init(adev); in amdgpu_device_resume()
4209 r = amdgpu_device_ip_late_init(adev); in amdgpu_device_resume()
4213 queue_delayed_work(system_wq, &adev->delayed_init_work, in amdgpu_device_resume()
4216 if (!adev->in_s0ix) { in amdgpu_device_resume()
4217 r = amdgpu_amdkfd_resume(adev, adev->in_runpm); in amdgpu_device_resume()
4223 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_resume()
4224 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_resume()
4225 amdgpu_virt_release_full_gpu(adev, true); in amdgpu_device_resume()
4232 flush_delayed_work(&adev->delayed_init_work); in amdgpu_device_resume()
4235 drm_fb_helper_set_suspend_unlocked(adev_to_drm(adev)->fb_helper, false); in amdgpu_device_resume()
4237 amdgpu_ras_resume(adev); in amdgpu_device_resume()
4239 if (adev->mode_info.num_crtc) { in amdgpu_device_resume()
4252 if (!adev->dc_enabled) in amdgpu_device_resume()
4260 adev->in_suspend = false; in amdgpu_device_resume()
4262 if (adev->enable_mes) in amdgpu_device_resume()
4263 amdgpu_mes_self_test(adev); in amdgpu_device_resume()
4281 static bool amdgpu_device_ip_check_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_check_soft_reset() argument
4286 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_check_soft_reset()
4289 if (amdgpu_asic_need_full_reset(adev)) in amdgpu_device_ip_check_soft_reset()
4292 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_check_soft_reset()
4293 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_check_soft_reset()
4295 if (adev->ip_blocks[i].version->funcs->check_soft_reset) in amdgpu_device_ip_check_soft_reset()
4296 adev->ip_blocks[i].status.hang = in amdgpu_device_ip_check_soft_reset()
4297 adev->ip_blocks[i].version->funcs->check_soft_reset(adev); in amdgpu_device_ip_check_soft_reset()
4298 if (adev->ip_blocks[i].status.hang) { in amdgpu_device_ip_check_soft_reset()
4299 dev_info(adev->dev, "IP block:%s is hung!\n", adev->ip_blocks[i].version->funcs->name); in amdgpu_device_ip_check_soft_reset()
4317 static int amdgpu_device_ip_pre_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_pre_soft_reset() argument
4321 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_pre_soft_reset()
4322 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_pre_soft_reset()
4324 if (adev->ip_blocks[i].status.hang && in amdgpu_device_ip_pre_soft_reset()
4325 adev->ip_blocks[i].version->funcs->pre_soft_reset) { in amdgpu_device_ip_pre_soft_reset()
4326 r = adev->ip_blocks[i].version->funcs->pre_soft_reset(adev); in amdgpu_device_ip_pre_soft_reset()
4344 static bool amdgpu_device_ip_need_full_reset(struct amdgpu_device *adev) in amdgpu_device_ip_need_full_reset() argument
4348 if (amdgpu_asic_need_full_reset(adev)) in amdgpu_device_ip_need_full_reset()
4351 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_need_full_reset()
4352 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_need_full_reset()
4354 if ((adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC) || in amdgpu_device_ip_need_full_reset()
4355 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) || in amdgpu_device_ip_need_full_reset()
4356 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_ACP) || in amdgpu_device_ip_need_full_reset()
4357 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_DCE) || in amdgpu_device_ip_need_full_reset()
4358 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) { in amdgpu_device_ip_need_full_reset()
4359 if (adev->ip_blocks[i].status.hang) { in amdgpu_device_ip_need_full_reset()
4360 dev_info(adev->dev, "Some block need full reset!\n"); in amdgpu_device_ip_need_full_reset()
4379 static int amdgpu_device_ip_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_soft_reset() argument
4383 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_soft_reset()
4384 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_soft_reset()
4386 if (adev->ip_blocks[i].status.hang && in amdgpu_device_ip_soft_reset()
4387 adev->ip_blocks[i].version->funcs->soft_reset) { in amdgpu_device_ip_soft_reset()
4388 r = adev->ip_blocks[i].version->funcs->soft_reset(adev); in amdgpu_device_ip_soft_reset()
4408 static int amdgpu_device_ip_post_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_post_soft_reset() argument
4412 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_post_soft_reset()
4413 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_post_soft_reset()
4415 if (adev->ip_blocks[i].status.hang && in amdgpu_device_ip_post_soft_reset()
4416 adev->ip_blocks[i].version->funcs->post_soft_reset) in amdgpu_device_ip_post_soft_reset()
4417 r = adev->ip_blocks[i].version->funcs->post_soft_reset(adev); in amdgpu_device_ip_post_soft_reset()
4437 static int amdgpu_device_recover_vram(struct amdgpu_device *adev) in amdgpu_device_recover_vram() argument
4444 if (amdgpu_sriov_runtime(adev)) in amdgpu_device_recover_vram()
4449 dev_info(adev->dev, "recover vram bo from shadow start\n"); in amdgpu_device_recover_vram()
4450 mutex_lock(&adev->shadow_list_lock); in amdgpu_device_recover_vram()
4451 list_for_each_entry(vmbo, &adev->shadow_list, shadow_list) { in amdgpu_device_recover_vram()
4482 mutex_unlock(&adev->shadow_list_lock); in amdgpu_device_recover_vram()
4489 dev_err(adev->dev, "recover vram bo from shadow failed, r is %ld, tmo is %ld\n", r, tmo); in amdgpu_device_recover_vram()
4493 dev_info(adev->dev, "recover vram bo from shadow done\n"); in amdgpu_device_recover_vram()
4507 static int amdgpu_device_reset_sriov(struct amdgpu_device *adev, in amdgpu_device_reset_sriov() argument
4515 amdgpu_amdkfd_pre_reset(adev); in amdgpu_device_reset_sriov()
4518 r = amdgpu_virt_request_full_gpu(adev, true); in amdgpu_device_reset_sriov()
4520 r = amdgpu_virt_reset_gpu(adev); in amdgpu_device_reset_sriov()
4523 amdgpu_irq_gpu_reset_resume_helper(adev); in amdgpu_device_reset_sriov()
4526 amdgpu_virt_post_reset(adev); in amdgpu_device_reset_sriov()
4529 r = amdgpu_device_ip_reinit_early_sriov(adev); in amdgpu_device_reset_sriov()
4533 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_reset_sriov()
4535 r = amdgpu_device_fw_loading(adev); in amdgpu_device_reset_sriov()
4540 r = amdgpu_device_ip_reinit_late_sriov(adev); in amdgpu_device_reset_sriov()
4544 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_reset_sriov()
4546 if (hive && adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_reset_sriov()
4547 r = amdgpu_xgmi_update_topology(hive, adev); in amdgpu_device_reset_sriov()
4553 r = amdgpu_ib_ring_tests(adev); in amdgpu_device_reset_sriov()
4555 amdgpu_amdkfd_post_reset(adev); in amdgpu_device_reset_sriov()
4559 if (!r && adev->virt.gim_feature & AMDGIM_FEATURE_GIM_FLR_VRAMLOST) { in amdgpu_device_reset_sriov()
4560 amdgpu_inc_vram_lost(adev); in amdgpu_device_reset_sriov()
4561 r = amdgpu_device_recover_vram(adev); in amdgpu_device_reset_sriov()
4563 amdgpu_virt_release_full_gpu(adev, true); in amdgpu_device_reset_sriov()
4583 bool amdgpu_device_has_job_running(struct amdgpu_device *adev) in amdgpu_device_has_job_running() argument
4589 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_has_job_running()
4612 bool amdgpu_device_should_recover_gpu(struct amdgpu_device *adev) in amdgpu_device_should_recover_gpu() argument
4619 if (!amdgpu_ras_is_poison_mode_supported(adev)) in amdgpu_device_should_recover_gpu()
4622 if (amdgpu_sriov_vf(adev)) in amdgpu_device_should_recover_gpu()
4626 switch (adev->asic_type) { in amdgpu_device_should_recover_gpu()
4651 dev_info(adev->dev, "GPU recovery disabled.\n"); in amdgpu_device_should_recover_gpu()
4655 int amdgpu_device_mode1_reset(struct amdgpu_device *adev) in amdgpu_device_mode1_reset() argument
4660 amdgpu_atombios_scratch_regs_engine_hung(adev, true); in amdgpu_device_mode1_reset()
4662 dev_info(adev->dev, "GPU mode1 reset\n"); in amdgpu_device_mode1_reset()
4665 pci_clear_master(adev->pdev); in amdgpu_device_mode1_reset()
4667 amdgpu_device_cache_pci_state(adev->pdev); in amdgpu_device_mode1_reset()
4669 if (amdgpu_dpm_is_mode1_reset_supported(adev)) { in amdgpu_device_mode1_reset()
4670 dev_info(adev->dev, "GPU smu mode1 reset\n"); in amdgpu_device_mode1_reset()
4671 ret = amdgpu_dpm_mode1_reset(adev); in amdgpu_device_mode1_reset()
4673 dev_info(adev->dev, "GPU psp mode1 reset\n"); in amdgpu_device_mode1_reset()
4674 ret = psp_gpu_reset(adev); in amdgpu_device_mode1_reset()
4680 amdgpu_device_load_pci_state(adev->pdev); in amdgpu_device_mode1_reset()
4681 ret = amdgpu_psp_wait_for_bootloader(adev); in amdgpu_device_mode1_reset()
4686 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_device_mode1_reset()
4687 u32 memsize = adev->nbio.funcs->get_memsize(adev); in amdgpu_device_mode1_reset()
4694 if (i >= adev->usec_timeout) { in amdgpu_device_mode1_reset()
4699 amdgpu_atombios_scratch_regs_engine_hung(adev, false); in amdgpu_device_mode1_reset()
4704 dev_err(adev->dev, "GPU mode1 reset failed\n"); in amdgpu_device_mode1_reset()
4708 int amdgpu_device_pre_asic_reset(struct amdgpu_device *adev, in amdgpu_device_pre_asic_reset() argument
4716 if (reset_context->reset_req_dev == adev) in amdgpu_device_pre_asic_reset()
4719 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_pre_asic_reset()
4721 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_pre_asic_reset()
4724 amdgpu_fence_driver_isr_toggle(adev, true); in amdgpu_device_pre_asic_reset()
4728 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_pre_asic_reset()
4742 amdgpu_fence_driver_isr_toggle(adev, false); in amdgpu_device_pre_asic_reset()
4747 r = amdgpu_reset_prepare_hwcontext(adev, reset_context); in amdgpu_device_pre_asic_reset()
4755 if (!amdgpu_sriov_vf(adev)) { in amdgpu_device_pre_asic_reset()
4758 need_full_reset = amdgpu_device_ip_need_full_reset(adev); in amdgpu_device_pre_asic_reset()
4761 amdgpu_device_ip_check_soft_reset(adev)) { in amdgpu_device_pre_asic_reset()
4762 amdgpu_device_ip_pre_soft_reset(adev); in amdgpu_device_pre_asic_reset()
4763 r = amdgpu_device_ip_soft_reset(adev); in amdgpu_device_pre_asic_reset()
4764 amdgpu_device_ip_post_soft_reset(adev); in amdgpu_device_pre_asic_reset()
4765 if (r || amdgpu_device_ip_check_soft_reset(adev)) { in amdgpu_device_pre_asic_reset()
4766 dev_info(adev->dev, "soft reset failed, will fallback to full reset!\n"); in amdgpu_device_pre_asic_reset()
4772 r = amdgpu_device_ip_suspend(adev); in amdgpu_device_pre_asic_reset()
4783 static int amdgpu_reset_reg_dumps(struct amdgpu_device *adev) in amdgpu_reset_reg_dumps() argument
4787 lockdep_assert_held(&adev->reset_domain->sem); in amdgpu_reset_reg_dumps()
4789 for (i = 0; i < adev->num_regs; i++) { in amdgpu_reset_reg_dumps()
4790 adev->reset_dump_reg_value[i] = RREG32(adev->reset_dump_reg_list[i]); in amdgpu_reset_reg_dumps()
4791 trace_amdgpu_reset_reg_dumps(adev->reset_dump_reg_list[i], in amdgpu_reset_reg_dumps()
4792 adev->reset_dump_reg_value[i]); in amdgpu_reset_reg_dumps()
4803 struct amdgpu_device *adev = data; in amdgpu_devcoredump_read() local
4817 drm_printf(&p, "time: %lld.%09ld\n", adev->reset_time.tv_sec, adev->reset_time.tv_nsec); in amdgpu_devcoredump_read()
4818 if (adev->reset_task_info.pid) in amdgpu_devcoredump_read()
4820 adev->reset_task_info.process_name, in amdgpu_devcoredump_read()
4821 adev->reset_task_info.pid); in amdgpu_devcoredump_read()
4823 if (adev->reset_vram_lost) in amdgpu_devcoredump_read()
4825 if (adev->num_regs) { in amdgpu_devcoredump_read()
4828 for (i = 0; i < adev->num_regs; i++) in amdgpu_devcoredump_read()
4830 adev->reset_dump_reg_list[i], in amdgpu_devcoredump_read()
4831 adev->reset_dump_reg_value[i]); in amdgpu_devcoredump_read()
4841 static void amdgpu_reset_capture_coredumpm(struct amdgpu_device *adev) in amdgpu_reset_capture_coredumpm() argument
4843 struct drm_device *dev = adev_to_drm(adev); in amdgpu_reset_capture_coredumpm()
4845 ktime_get_ts64(&adev->reset_time); in amdgpu_reset_capture_coredumpm()
4846 dev_coredumpm(dev->dev, THIS_MODULE, adev, 0, GFP_NOWAIT, in amdgpu_reset_capture_coredumpm()
5044 static void amdgpu_device_set_mp1_state(struct amdgpu_device *adev) in amdgpu_device_set_mp1_state() argument
5047 switch (amdgpu_asic_reset_method(adev)) { in amdgpu_device_set_mp1_state()
5049 adev->mp1_state = PP_MP1_STATE_SHUTDOWN; in amdgpu_device_set_mp1_state()
5052 adev->mp1_state = PP_MP1_STATE_RESET; in amdgpu_device_set_mp1_state()
5055 adev->mp1_state = PP_MP1_STATE_NONE; in amdgpu_device_set_mp1_state()
5060 static void amdgpu_device_unset_mp1_state(struct amdgpu_device *adev) in amdgpu_device_unset_mp1_state() argument
5062 amdgpu_vf_error_trans_all(adev); in amdgpu_device_unset_mp1_state()
5063 adev->mp1_state = PP_MP1_STATE_NONE; in amdgpu_device_unset_mp1_state()
5066 static void amdgpu_device_resume_display_audio(struct amdgpu_device *adev) in amdgpu_device_resume_display_audio() argument
5070 p = pci_get_domain_bus_and_slot(pci_domain_nr(adev->pdev->bus), in amdgpu_device_resume_display_audio()
5071 adev->pdev->bus->number, 1); in amdgpu_device_resume_display_audio()
5080 static int amdgpu_device_suspend_display_audio(struct amdgpu_device *adev) in amdgpu_device_suspend_display_audio() argument
5090 reset_method = amdgpu_asic_reset_method(adev); in amdgpu_device_suspend_display_audio()
5095 p = pci_get_domain_bus_and_slot(pci_domain_nr(adev->pdev->bus), in amdgpu_device_suspend_display_audio()
5096 adev->pdev->bus->number, 1); in amdgpu_device_suspend_display_audio()
5115 dev_warn(adev->dev, "failed to suspend display audio\n"); in amdgpu_device_suspend_display_audio()
5128 static inline void amdgpu_device_stop_pending_resets(struct amdgpu_device *adev) in amdgpu_device_stop_pending_resets() argument
5130 struct amdgpu_ras *con = amdgpu_ras_get_context(adev); in amdgpu_device_stop_pending_resets()
5133 if (!amdgpu_sriov_vf(adev)) in amdgpu_device_stop_pending_resets()
5134 cancel_work(&adev->reset_work); in amdgpu_device_stop_pending_resets()
5137 if (adev->kfd.dev) in amdgpu_device_stop_pending_resets()
5138 cancel_work(&adev->kfd.reset_work); in amdgpu_device_stop_pending_resets()
5140 if (amdgpu_sriov_vf(adev)) in amdgpu_device_stop_pending_resets()
5141 cancel_work(&adev->virt.flr_work); in amdgpu_device_stop_pending_resets()
5143 if (con && adev->ras_enabled) in amdgpu_device_stop_pending_resets()
5160 int amdgpu_device_gpu_recover(struct amdgpu_device *adev, in amdgpu_device_gpu_recover() argument
5180 need_emergency_restart = amdgpu_ras_need_emergency_restart(adev); in amdgpu_device_gpu_recover()
5186 if (need_emergency_restart && amdgpu_ras_get_context(adev)->reboot) { in amdgpu_device_gpu_recover()
5193 dev_info(adev->dev, "GPU %s begin!\n", in amdgpu_device_gpu_recover()
5196 if (!amdgpu_sriov_vf(adev)) in amdgpu_device_gpu_recover()
5197 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_gpu_recover()
5209 if (!amdgpu_sriov_vf(adev) && (adev->gmc.xgmi.num_physical_nodes > 1)) { in amdgpu_device_gpu_recover()
5212 if (gpu_reset_for_dev_remove && adev->shutdown) in amdgpu_device_gpu_recover()
5215 if (!list_is_first(&adev->reset_list, &device_list)) in amdgpu_device_gpu_recover()
5216 list_rotate_to_front(&adev->reset_list, &device_list); in amdgpu_device_gpu_recover()
5219 list_add_tail(&adev->reset_list, &device_list); in amdgpu_device_gpu_recover()
5291 dev_info(adev->dev, "Guilty job already signaled, skipping HW reset"); in amdgpu_device_gpu_recover()
5318 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_gpu_recover()
5319 r = amdgpu_device_reset_sriov(adev, job ? false : true); in amdgpu_device_gpu_recover()
5321 adev->asic_reset_res = r; in amdgpu_device_gpu_recover()
5324 if (adev->ip_versions[GC_HWIP][0] == IP_VERSION(9, 4, 2) || in amdgpu_device_gpu_recover()
5325 adev->ip_versions[GC_HWIP][0] == IP_VERSION(11, 0, 3)) in amdgpu_device_gpu_recover()
5326 amdgpu_ras_resume(adev); in amdgpu_device_gpu_recover()
5350 if (adev->enable_mes && adev->ip_versions[GC_HWIP][0] != IP_VERSION(11, 0, 3)) in amdgpu_device_gpu_recover()
5381 if (!adev->kfd.init_complete) in amdgpu_device_gpu_recover()
5382 amdgpu_amdkfd_device_init(adev); in amdgpu_device_gpu_recover()
5403 dev_info(adev->dev, "GPU reset end with ret = %d\n", r); in amdgpu_device_gpu_recover()
5405 atomic_set(&adev->reset_domain->reset_res, r); in amdgpu_device_gpu_recover()
5418 static void amdgpu_device_get_pcie_info(struct amdgpu_device *adev) in amdgpu_device_get_pcie_info() argument
5425 adev->pm.pcie_gen_mask = amdgpu_pcie_gen_cap; in amdgpu_device_get_pcie_info()
5428 adev->pm.pcie_mlw_mask = amdgpu_pcie_lane_cap; in amdgpu_device_get_pcie_info()
5431 if (pci_is_root_bus(adev->pdev->bus) && !amdgpu_passthrough(adev)) { in amdgpu_device_get_pcie_info()
5432 if (adev->pm.pcie_gen_mask == 0) in amdgpu_device_get_pcie_info()
5433 adev->pm.pcie_gen_mask = AMDGPU_DEFAULT_PCIE_GEN_MASK; in amdgpu_device_get_pcie_info()
5434 if (adev->pm.pcie_mlw_mask == 0) in amdgpu_device_get_pcie_info()
5435 adev->pm.pcie_mlw_mask = AMDGPU_DEFAULT_PCIE_MLW_MASK; in amdgpu_device_get_pcie_info()
5439 if (adev->pm.pcie_gen_mask && adev->pm.pcie_mlw_mask) in amdgpu_device_get_pcie_info()
5442 pcie_bandwidth_available(adev->pdev, NULL, in amdgpu_device_get_pcie_info()
5445 if (adev->pm.pcie_gen_mask == 0) { in amdgpu_device_get_pcie_info()
5447 pdev = adev->pdev; in amdgpu_device_get_pcie_info()
5450 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5455 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5461 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5466 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5470 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5473 adev->pm.pcie_gen_mask |= CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1; in amdgpu_device_get_pcie_info()
5477 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5481 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5487 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5492 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5496 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5499 adev->pm.pcie_gen_mask |= CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1; in amdgpu_device_get_pcie_info()
5503 if (adev->pm.pcie_mlw_mask == 0) { in amdgpu_device_get_pcie_info()
5505 adev->pm.pcie_mlw_mask |= AMDGPU_DEFAULT_PCIE_MLW_MASK; in amdgpu_device_get_pcie_info()
5509 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X32 | in amdgpu_device_get_pcie_info()
5518 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X16 | in amdgpu_device_get_pcie_info()
5526 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X12 | in amdgpu_device_get_pcie_info()
5533 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X8 | in amdgpu_device_get_pcie_info()
5539 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X4 | in amdgpu_device_get_pcie_info()
5544 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X2 | in amdgpu_device_get_pcie_info()
5548 adev->pm.pcie_mlw_mask = CAIL_PCIE_LINK_WIDTH_SUPPORT_X1; in amdgpu_device_get_pcie_info()
5567 bool amdgpu_device_is_peer_accessible(struct amdgpu_device *adev, in amdgpu_device_is_peer_accessible() argument
5574 adev->gmc.aper_base + adev->gmc.aper_size - 1; in amdgpu_device_is_peer_accessible()
5576 !adev->gmc.xgmi.connected_to_cpu && in amdgpu_device_is_peer_accessible()
5577 !(pci_p2pdma_distance(adev->pdev, peer_adev->dev, false) < 0); in amdgpu_device_is_peer_accessible()
5579 return pcie_p2p && p2p_access && (adev->gmc.visible_vram_size && in amdgpu_device_is_peer_accessible()
5580 adev->gmc.real_vram_size == adev->gmc.visible_vram_size && in amdgpu_device_is_peer_accessible()
5581 !(adev->gmc.aper_base & address_mask || in amdgpu_device_is_peer_accessible()
5590 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_baco_enter() local
5591 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_device_baco_enter()
5596 if (ras && adev->ras_enabled && in amdgpu_device_baco_enter()
5597 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_enter()
5598 adev->nbio.funcs->enable_doorbell_interrupt(adev, false); in amdgpu_device_baco_enter()
5600 return amdgpu_dpm_baco_enter(adev); in amdgpu_device_baco_enter()
5605 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_baco_exit() local
5606 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_device_baco_exit()
5612 ret = amdgpu_dpm_baco_exit(adev); in amdgpu_device_baco_exit()
5616 if (ras && adev->ras_enabled && in amdgpu_device_baco_exit()
5617 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_exit()
5618 adev->nbio.funcs->enable_doorbell_interrupt(adev, true); in amdgpu_device_baco_exit()
5620 if (amdgpu_passthrough(adev) && in amdgpu_device_baco_exit()
5621 adev->nbio.funcs->clear_doorbell_interrupt) in amdgpu_device_baco_exit()
5622 adev->nbio.funcs->clear_doorbell_interrupt(adev); in amdgpu_device_baco_exit()
5639 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_pci_error_detected() local
5644 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_pci_error_detected()
5649 adev->pci_channel_state = state; in amdgpu_pci_error_detected()
5660 amdgpu_device_lock_reset_domain(adev->reset_domain); in amdgpu_pci_error_detected()
5661 amdgpu_device_set_mp1_state(adev); in amdgpu_pci_error_detected()
5668 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_pci_error_detected()
5675 atomic_inc(&adev->gpu_reset_counter); in amdgpu_pci_error_detected()
5715 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_pci_slot_reset() local
5726 list_add_tail(&adev->reset_list, &device_list); in amdgpu_pci_slot_reset()
5735 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_pci_slot_reset()
5736 memsize = amdgpu_asic_get_config_memsize(adev); in amdgpu_pci_slot_reset()
5748 reset_context.reset_req_dev = adev; in amdgpu_pci_slot_reset()
5752 adev->no_hw_access = true; in amdgpu_pci_slot_reset()
5753 r = amdgpu_device_pre_asic_reset(adev, &reset_context); in amdgpu_pci_slot_reset()
5754 adev->no_hw_access = false; in amdgpu_pci_slot_reset()
5762 if (amdgpu_device_cache_pci_state(adev->pdev)) in amdgpu_pci_slot_reset()
5763 pci_restore_state(adev->pdev); in amdgpu_pci_slot_reset()
5768 amdgpu_device_unset_mp1_state(adev); in amdgpu_pci_slot_reset()
5769 amdgpu_device_unlock_reset_domain(adev->reset_domain); in amdgpu_pci_slot_reset()
5785 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_pci_resume() local
5792 if (adev->pci_channel_state != pci_channel_io_frozen) in amdgpu_pci_resume()
5796 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_pci_resume()
5804 amdgpu_device_unset_mp1_state(adev); in amdgpu_pci_resume()
5805 amdgpu_device_unlock_reset_domain(adev->reset_domain); in amdgpu_pci_resume()
5811 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_cache_pci_state() local
5816 kfree(adev->pci_state); in amdgpu_device_cache_pci_state()
5818 adev->pci_state = pci_store_saved_state(pdev); in amdgpu_device_cache_pci_state()
5820 if (!adev->pci_state) { in amdgpu_device_cache_pci_state()
5835 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_load_pci_state() local
5838 if (!adev->pci_state) in amdgpu_device_load_pci_state()
5841 r = pci_load_saved_state(pdev, adev->pci_state); in amdgpu_device_load_pci_state()
5853 void amdgpu_device_flush_hdp(struct amdgpu_device *adev, in amdgpu_device_flush_hdp() argument
5857 if ((adev->flags & AMD_IS_APU) && !amdgpu_passthrough(adev)) in amdgpu_device_flush_hdp()
5860 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_device_flush_hdp()
5866 amdgpu_asic_flush_hdp(adev, ring); in amdgpu_device_flush_hdp()
5869 void amdgpu_device_invalidate_hdp(struct amdgpu_device *adev, in amdgpu_device_invalidate_hdp() argument
5873 if ((adev->flags & AMD_IS_APU) && !amdgpu_passthrough(adev)) in amdgpu_device_invalidate_hdp()
5876 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_device_invalidate_hdp()
5879 amdgpu_asic_invalidate_hdp(adev, ring); in amdgpu_device_invalidate_hdp()
5882 int amdgpu_in_reset(struct amdgpu_device *adev) in amdgpu_in_reset() argument
5884 return atomic_read(&adev->reset_domain->in_gpu_reset); in amdgpu_in_reset()
5907 void amdgpu_device_halt(struct amdgpu_device *adev) in amdgpu_device_halt() argument
5909 struct pci_dev *pdev = adev->pdev; in amdgpu_device_halt()
5910 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_device_halt()
5912 amdgpu_xcp_dev_unplug(adev); in amdgpu_device_halt()
5915 amdgpu_irq_disable_all(adev); in amdgpu_device_halt()
5917 amdgpu_fence_driver_hw_fini(adev); in amdgpu_device_halt()
5919 adev->no_hw_access = true; in amdgpu_device_halt()
5921 amdgpu_device_unmap_mmio(adev); in amdgpu_device_halt()
5927 u32 amdgpu_device_pcie_port_rreg(struct amdgpu_device *adev, in amdgpu_device_pcie_port_rreg() argument
5933 address = adev->nbio.funcs->get_pcie_port_index_offset(adev); in amdgpu_device_pcie_port_rreg()
5934 data = adev->nbio.funcs->get_pcie_port_data_offset(adev); in amdgpu_device_pcie_port_rreg()
5936 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_pcie_port_rreg()
5940 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_pcie_port_rreg()
5944 void amdgpu_device_pcie_port_wreg(struct amdgpu_device *adev, in amdgpu_device_pcie_port_wreg() argument
5949 address = adev->nbio.funcs->get_pcie_port_index_offset(adev); in amdgpu_device_pcie_port_wreg()
5950 data = adev->nbio.funcs->get_pcie_port_data_offset(adev); in amdgpu_device_pcie_port_wreg()
5952 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_pcie_port_wreg()
5957 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_pcie_port_wreg()
5969 struct dma_fence *amdgpu_device_switch_gang(struct amdgpu_device *adev, in amdgpu_device_switch_gang() argument
5977 old = dma_fence_get_rcu_safe(&adev->gang_submit); in amdgpu_device_switch_gang()
5986 } while (cmpxchg((struct dma_fence __force **)&adev->gang_submit, in amdgpu_device_switch_gang()
5993 bool amdgpu_device_has_display_hardware(struct amdgpu_device *adev) in amdgpu_device_has_display_hardware() argument
5995 switch (adev->asic_type) { in amdgpu_device_has_display_hardware()
6027 if (!adev->ip_versions[DCE_HWIP][0] || in amdgpu_device_has_display_hardware()
6028 (adev->harvest_ip_mask & AMD_HARVEST_IP_DMU_MASK)) in amdgpu_device_has_display_hardware()
6034 uint32_t amdgpu_device_wait_on_rreg(struct amdgpu_device *adev, in amdgpu_device_wait_on_rreg() argument
6041 uint32_t loop = adev->usec_timeout; in amdgpu_device_wait_on_rreg()
6045 loop = adev->usec_timeout; in amdgpu_device_wait_on_rreg()