Lines Matching refs:adev
144 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_pcie_replay_count() local
145 uint64_t cnt = amdgpu_asic_get_pcie_replay_count(adev); in amdgpu_device_get_pcie_replay_count()
153 static void amdgpu_device_get_pcie_info(struct amdgpu_device *adev);
169 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_product_name() local
171 return sysfs_emit(buf, "%s\n", adev->product_name); in amdgpu_device_get_product_name()
191 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_product_number() local
193 return sysfs_emit(buf, "%s\n", adev->product_number); in amdgpu_device_get_product_number()
213 struct amdgpu_device *adev = drm_to_adev(ddev); in amdgpu_device_get_serial_number() local
215 return sysfs_emit(buf, "%s\n", adev->serial); in amdgpu_device_get_serial_number()
231 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_px() local
233 if ((adev->flags & AMD_IS_PX) && !amdgpu_is_atpx_hybrid()) in amdgpu_device_supports_px()
248 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_boco() local
250 if (adev->has_pr3 || in amdgpu_device_supports_boco()
251 ((adev->flags & AMD_IS_PX) && amdgpu_is_atpx_hybrid())) in amdgpu_device_supports_boco()
266 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_supports_baco() local
268 return amdgpu_asic_supports_baco(adev); in amdgpu_device_supports_baco()
299 void amdgpu_device_mm_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_mm_access() argument
308 if (!drm_dev_enter(&adev->ddev, &idx)) in amdgpu_device_mm_access()
313 spin_lock_irqsave(&adev->mmio_idx_lock, flags); in amdgpu_device_mm_access()
328 spin_unlock_irqrestore(&adev->mmio_idx_lock, flags); in amdgpu_device_mm_access()
343 size_t amdgpu_device_aper_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_aper_access() argument
351 if (!adev->mman.aper_base_kaddr) in amdgpu_device_aper_access()
354 last = min(pos + size, adev->gmc.visible_vram_size); in amdgpu_device_aper_access()
356 addr = adev->mman.aper_base_kaddr + pos; in amdgpu_device_aper_access()
362 amdgpu_device_flush_hdp(adev, NULL); in amdgpu_device_aper_access()
364 amdgpu_device_invalidate_hdp(adev, NULL); in amdgpu_device_aper_access()
386 void amdgpu_device_vram_access(struct amdgpu_device *adev, loff_t pos, in amdgpu_device_vram_access() argument
392 count = amdgpu_device_aper_access(adev, pos, buf, size, write); in amdgpu_device_vram_access()
398 amdgpu_device_mm_access(adev, pos, buf, size, write); in amdgpu_device_vram_access()
407 bool amdgpu_device_skip_hw_access(struct amdgpu_device *adev) in amdgpu_device_skip_hw_access() argument
409 if (adev->no_hw_access) in amdgpu_device_skip_hw_access()
425 if (down_read_trylock(&adev->reset_sem)) in amdgpu_device_skip_hw_access()
426 up_read(&adev->reset_sem); in amdgpu_device_skip_hw_access()
428 lockdep_assert_held(&adev->reset_sem); in amdgpu_device_skip_hw_access()
443 uint32_t amdgpu_device_rreg(struct amdgpu_device *adev, in amdgpu_device_rreg() argument
448 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_device_rreg()
451 if ((reg * 4) < adev->rmmio_size) { in amdgpu_device_rreg()
453 amdgpu_sriov_runtime(adev) && in amdgpu_device_rreg()
454 down_read_trylock(&adev->reset_sem)) { in amdgpu_device_rreg()
455 ret = amdgpu_kiq_rreg(adev, reg); in amdgpu_device_rreg()
456 up_read(&adev->reset_sem); in amdgpu_device_rreg()
458 ret = readl(((void __iomem *)adev->rmmio) + (reg * 4)); in amdgpu_device_rreg()
461 ret = adev->pcie_rreg(adev, reg * 4); in amdgpu_device_rreg()
464 trace_amdgpu_device_rreg(adev->pdev->device, reg, ret); in amdgpu_device_rreg()
483 uint8_t amdgpu_mm_rreg8(struct amdgpu_device *adev, uint32_t offset) in amdgpu_mm_rreg8() argument
485 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_rreg8()
488 if (offset < adev->rmmio_size) in amdgpu_mm_rreg8()
489 return (readb(adev->rmmio + offset)); in amdgpu_mm_rreg8()
508 void amdgpu_mm_wreg8(struct amdgpu_device *adev, uint32_t offset, uint8_t value) in amdgpu_mm_wreg8() argument
510 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wreg8()
513 if (offset < adev->rmmio_size) in amdgpu_mm_wreg8()
514 writeb(value, adev->rmmio + offset); in amdgpu_mm_wreg8()
529 void amdgpu_device_wreg(struct amdgpu_device *adev, in amdgpu_device_wreg() argument
533 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_device_wreg()
536 if ((reg * 4) < adev->rmmio_size) { in amdgpu_device_wreg()
538 amdgpu_sriov_runtime(adev) && in amdgpu_device_wreg()
539 down_read_trylock(&adev->reset_sem)) { in amdgpu_device_wreg()
540 amdgpu_kiq_wreg(adev, reg, v); in amdgpu_device_wreg()
541 up_read(&adev->reset_sem); in amdgpu_device_wreg()
543 writel(v, ((void __iomem *)adev->rmmio) + (reg * 4)); in amdgpu_device_wreg()
546 adev->pcie_wreg(adev, reg * 4, v); in amdgpu_device_wreg()
549 trace_amdgpu_device_wreg(adev->pdev->device, reg, v); in amdgpu_device_wreg()
557 void amdgpu_mm_wreg_mmio_rlc(struct amdgpu_device *adev, in amdgpu_mm_wreg_mmio_rlc() argument
560 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wreg_mmio_rlc()
563 if (amdgpu_sriov_fullaccess(adev) && in amdgpu_mm_wreg_mmio_rlc()
564 adev->gfx.rlc.funcs && in amdgpu_mm_wreg_mmio_rlc()
565 adev->gfx.rlc.funcs->is_rlcg_access_range) { in amdgpu_mm_wreg_mmio_rlc()
566 if (adev->gfx.rlc.funcs->is_rlcg_access_range(adev, reg)) in amdgpu_mm_wreg_mmio_rlc()
567 return adev->gfx.rlc.funcs->sriov_wreg(adev, reg, v, 0, 0); in amdgpu_mm_wreg_mmio_rlc()
569 writel(v, ((void __iomem *)adev->rmmio) + (reg * 4)); in amdgpu_mm_wreg_mmio_rlc()
582 u32 amdgpu_mm_rdoorbell(struct amdgpu_device *adev, u32 index) in amdgpu_mm_rdoorbell() argument
584 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_rdoorbell()
587 if (index < adev->doorbell.num_doorbells) { in amdgpu_mm_rdoorbell()
588 return readl(adev->doorbell.ptr + index); in amdgpu_mm_rdoorbell()
605 void amdgpu_mm_wdoorbell(struct amdgpu_device *adev, u32 index, u32 v) in amdgpu_mm_wdoorbell() argument
607 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wdoorbell()
610 if (index < adev->doorbell.num_doorbells) { in amdgpu_mm_wdoorbell()
611 writel(v, adev->doorbell.ptr + index); in amdgpu_mm_wdoorbell()
626 u64 amdgpu_mm_rdoorbell64(struct amdgpu_device *adev, u32 index) in amdgpu_mm_rdoorbell64() argument
628 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_rdoorbell64()
631 if (index < adev->doorbell.num_doorbells) { in amdgpu_mm_rdoorbell64()
632 return atomic64_read((atomic64_t *)(adev->doorbell.ptr + index)); in amdgpu_mm_rdoorbell64()
649 void amdgpu_mm_wdoorbell64(struct amdgpu_device *adev, u32 index, u64 v) in amdgpu_mm_wdoorbell64() argument
651 if (amdgpu_device_skip_hw_access(adev)) in amdgpu_mm_wdoorbell64()
654 if (index < adev->doorbell.num_doorbells) { in amdgpu_mm_wdoorbell64()
655 atomic64_set((atomic64_t *)(adev->doorbell.ptr + index), v); in amdgpu_mm_wdoorbell64()
671 u32 amdgpu_device_indirect_rreg(struct amdgpu_device *adev, in amdgpu_device_indirect_rreg() argument
680 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg()
681 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_rreg()
682 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_rreg()
687 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg()
702 u64 amdgpu_device_indirect_rreg64(struct amdgpu_device *adev, in amdgpu_device_indirect_rreg64() argument
711 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg64()
712 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_rreg64()
713 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_rreg64()
723 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_rreg64()
738 void amdgpu_device_indirect_wreg(struct amdgpu_device *adev, in amdgpu_device_indirect_wreg() argument
746 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg()
747 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_wreg()
748 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_wreg()
754 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg()
767 void amdgpu_device_indirect_wreg64(struct amdgpu_device *adev, in amdgpu_device_indirect_wreg64() argument
775 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg64()
776 pcie_index_offset = (void __iomem *)adev->rmmio + pcie_index * 4; in amdgpu_device_indirect_wreg64()
777 pcie_data_offset = (void __iomem *)adev->rmmio + pcie_data * 4; in amdgpu_device_indirect_wreg64()
789 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in amdgpu_device_indirect_wreg64()
802 static uint32_t amdgpu_invalid_rreg(struct amdgpu_device *adev, uint32_t reg) in amdgpu_invalid_rreg() argument
819 static void amdgpu_invalid_wreg(struct amdgpu_device *adev, uint32_t reg, uint32_t v) in amdgpu_invalid_wreg() argument
836 static uint64_t amdgpu_invalid_rreg64(struct amdgpu_device *adev, uint32_t reg) in amdgpu_invalid_rreg64() argument
853 static void amdgpu_invalid_wreg64(struct amdgpu_device *adev, uint32_t reg, uint64_t v) in amdgpu_invalid_wreg64() argument
871 static uint32_t amdgpu_block_invalid_rreg(struct amdgpu_device *adev, in amdgpu_block_invalid_rreg() argument
891 static void amdgpu_block_invalid_wreg(struct amdgpu_device *adev, in amdgpu_block_invalid_wreg() argument
907 static int amdgpu_device_asic_init(struct amdgpu_device *adev) in amdgpu_device_asic_init() argument
909 amdgpu_asic_pre_asic_init(adev); in amdgpu_device_asic_init()
911 return amdgpu_atom_asic_init(adev->mode_info.atom_context); in amdgpu_device_asic_init()
922 static int amdgpu_device_vram_scratch_init(struct amdgpu_device *adev) in amdgpu_device_vram_scratch_init() argument
924 return amdgpu_bo_create_kernel(adev, AMDGPU_GPU_PAGE_SIZE, in amdgpu_device_vram_scratch_init()
926 &adev->vram_scratch.robj, in amdgpu_device_vram_scratch_init()
927 &adev->vram_scratch.gpu_addr, in amdgpu_device_vram_scratch_init()
928 (void **)&adev->vram_scratch.ptr); in amdgpu_device_vram_scratch_init()
938 static void amdgpu_device_vram_scratch_fini(struct amdgpu_device *adev) in amdgpu_device_vram_scratch_fini() argument
940 amdgpu_bo_free_kernel(&adev->vram_scratch.robj, NULL, NULL); in amdgpu_device_vram_scratch_fini()
953 void amdgpu_device_program_register_sequence(struct amdgpu_device *adev, in amdgpu_device_program_register_sequence() argument
973 if (adev->family >= AMDGPU_FAMILY_AI) in amdgpu_device_program_register_sequence()
990 void amdgpu_device_pci_config_reset(struct amdgpu_device *adev) in amdgpu_device_pci_config_reset() argument
992 pci_write_config_dword(adev->pdev, 0x7c, AMDGPU_ASIC_RESET_DATA); in amdgpu_device_pci_config_reset()
1002 int amdgpu_device_pci_reset(struct amdgpu_device *adev) in amdgpu_device_pci_reset() argument
1004 return pci_reset_function(adev->pdev); in amdgpu_device_pci_reset()
1018 static int amdgpu_device_doorbell_init(struct amdgpu_device *adev) in amdgpu_device_doorbell_init() argument
1022 if (adev->asic_type < CHIP_BONAIRE) { in amdgpu_device_doorbell_init()
1023 adev->doorbell.base = 0; in amdgpu_device_doorbell_init()
1024 adev->doorbell.size = 0; in amdgpu_device_doorbell_init()
1025 adev->doorbell.num_doorbells = 0; in amdgpu_device_doorbell_init()
1026 adev->doorbell.ptr = NULL; in amdgpu_device_doorbell_init()
1030 if (pci_resource_flags(adev->pdev, 2) & IORESOURCE_UNSET) in amdgpu_device_doorbell_init()
1033 amdgpu_asic_init_doorbell_index(adev); in amdgpu_device_doorbell_init()
1036 adev->doorbell.base = pci_resource_start(adev->pdev, 2); in amdgpu_device_doorbell_init()
1037 adev->doorbell.size = pci_resource_len(adev->pdev, 2); in amdgpu_device_doorbell_init()
1039 adev->doorbell.num_doorbells = min_t(u32, adev->doorbell.size / sizeof(u32), in amdgpu_device_doorbell_init()
1040 adev->doorbell_index.max_assignment+1); in amdgpu_device_doorbell_init()
1041 if (adev->doorbell.num_doorbells == 0) in amdgpu_device_doorbell_init()
1050 if (adev->asic_type >= CHIP_VEGA10) in amdgpu_device_doorbell_init()
1051 adev->doorbell.num_doorbells += 0x400; in amdgpu_device_doorbell_init()
1053 adev->doorbell.ptr = ioremap(adev->doorbell.base, in amdgpu_device_doorbell_init()
1054 adev->doorbell.num_doorbells * in amdgpu_device_doorbell_init()
1056 if (adev->doorbell.ptr == NULL) in amdgpu_device_doorbell_init()
1069 static void amdgpu_device_doorbell_fini(struct amdgpu_device *adev) in amdgpu_device_doorbell_fini() argument
1071 iounmap(adev->doorbell.ptr); in amdgpu_device_doorbell_fini()
1072 adev->doorbell.ptr = NULL; in amdgpu_device_doorbell_fini()
1091 static void amdgpu_device_wb_fini(struct amdgpu_device *adev) in amdgpu_device_wb_fini() argument
1093 if (adev->wb.wb_obj) { in amdgpu_device_wb_fini()
1094 amdgpu_bo_free_kernel(&adev->wb.wb_obj, in amdgpu_device_wb_fini()
1095 &adev->wb.gpu_addr, in amdgpu_device_wb_fini()
1096 (void **)&adev->wb.wb); in amdgpu_device_wb_fini()
1097 adev->wb.wb_obj = NULL; in amdgpu_device_wb_fini()
1110 static int amdgpu_device_wb_init(struct amdgpu_device *adev) in amdgpu_device_wb_init() argument
1114 if (adev->wb.wb_obj == NULL) { in amdgpu_device_wb_init()
1116 r = amdgpu_bo_create_kernel(adev, AMDGPU_MAX_WB * sizeof(uint32_t) * 8, in amdgpu_device_wb_init()
1118 &adev->wb.wb_obj, &adev->wb.gpu_addr, in amdgpu_device_wb_init()
1119 (void **)&adev->wb.wb); in amdgpu_device_wb_init()
1121 dev_warn(adev->dev, "(%d) create WB bo failed\n", r); in amdgpu_device_wb_init()
1125 adev->wb.num_wb = AMDGPU_MAX_WB; in amdgpu_device_wb_init()
1126 memset(&adev->wb.used, 0, sizeof(adev->wb.used)); in amdgpu_device_wb_init()
1129 memset((char *)adev->wb.wb, 0, AMDGPU_MAX_WB * sizeof(uint32_t) * 8); in amdgpu_device_wb_init()
1144 int amdgpu_device_wb_get(struct amdgpu_device *adev, u32 *wb) in amdgpu_device_wb_get() argument
1146 unsigned long offset = find_first_zero_bit(adev->wb.used, adev->wb.num_wb); in amdgpu_device_wb_get()
1148 if (offset < adev->wb.num_wb) { in amdgpu_device_wb_get()
1149 __set_bit(offset, adev->wb.used); in amdgpu_device_wb_get()
1165 void amdgpu_device_wb_free(struct amdgpu_device *adev, u32 wb) in amdgpu_device_wb_free() argument
1168 if (wb < adev->wb.num_wb) in amdgpu_device_wb_free()
1169 __clear_bit(wb, adev->wb.used); in amdgpu_device_wb_free()
1181 int amdgpu_device_resize_fb_bar(struct amdgpu_device *adev) in amdgpu_device_resize_fb_bar() argument
1183 int rbar_size = pci_rebar_bytes_to_size(adev->gmc.real_vram_size); in amdgpu_device_resize_fb_bar()
1191 if (amdgpu_sriov_vf(adev)) in amdgpu_device_resize_fb_bar()
1195 if (adev->gmc.real_vram_size && in amdgpu_device_resize_fb_bar()
1196 (pci_resource_len(adev->pdev, 0) >= adev->gmc.real_vram_size)) in amdgpu_device_resize_fb_bar()
1200 root = adev->pdev->bus; in amdgpu_device_resize_fb_bar()
1215 rbar_size = min(fls(pci_rebar_get_possible_sizes(adev->pdev, 0)) - 1, in amdgpu_device_resize_fb_bar()
1219 pci_read_config_word(adev->pdev, PCI_COMMAND, &cmd); in amdgpu_device_resize_fb_bar()
1220 pci_write_config_word(adev->pdev, PCI_COMMAND, in amdgpu_device_resize_fb_bar()
1224 amdgpu_device_doorbell_fini(adev); in amdgpu_device_resize_fb_bar()
1225 if (adev->asic_type >= CHIP_BONAIRE) in amdgpu_device_resize_fb_bar()
1226 pci_release_resource(adev->pdev, 2); in amdgpu_device_resize_fb_bar()
1228 pci_release_resource(adev->pdev, 0); in amdgpu_device_resize_fb_bar()
1230 r = pci_resize_resource(adev->pdev, 0, rbar_size); in amdgpu_device_resize_fb_bar()
1236 pci_assign_unassigned_bus_resources(adev->pdev->bus); in amdgpu_device_resize_fb_bar()
1241 r = amdgpu_device_doorbell_init(adev); in amdgpu_device_resize_fb_bar()
1242 if (r || (pci_resource_flags(adev->pdev, 0) & IORESOURCE_UNSET)) in amdgpu_device_resize_fb_bar()
1245 pci_write_config_word(adev->pdev, PCI_COMMAND, cmd); in amdgpu_device_resize_fb_bar()
1262 bool amdgpu_device_need_post(struct amdgpu_device *adev) in amdgpu_device_need_post() argument
1266 if (amdgpu_sriov_vf(adev)) in amdgpu_device_need_post()
1269 if (amdgpu_passthrough(adev)) { in amdgpu_device_need_post()
1275 if (adev->asic_type == CHIP_FIJI) { in amdgpu_device_need_post()
1278 err = request_firmware(&adev->pm.fw, "amdgpu/fiji_smc.bin", adev->dev); in amdgpu_device_need_post()
1283 fw_ver = *((uint32_t *)adev->pm.fw->data + 69); in amdgpu_device_need_post()
1290 if (adev->gmc.xgmi.pending_reset) in amdgpu_device_need_post()
1293 if (adev->has_hw_reset) { in amdgpu_device_need_post()
1294 adev->has_hw_reset = false; in amdgpu_device_need_post()
1299 if (adev->asic_type >= CHIP_BONAIRE) in amdgpu_device_need_post()
1300 return amdgpu_atombios_scratch_need_asic_init(adev); in amdgpu_device_need_post()
1303 reg = amdgpu_asic_get_config_memsize(adev); in amdgpu_device_need_post()
1324 struct amdgpu_device *adev = drm_to_adev(pci_get_drvdata(pdev)); in amdgpu_device_vga_set_decode() local
1325 amdgpu_asic_set_vga_state(adev, state); in amdgpu_device_vga_set_decode()
1343 static void amdgpu_device_check_block_size(struct amdgpu_device *adev) in amdgpu_device_check_block_size() argument
1352 dev_warn(adev->dev, "VM page table size (%d) too small\n", in amdgpu_device_check_block_size()
1366 static void amdgpu_device_check_vm_size(struct amdgpu_device *adev) in amdgpu_device_check_vm_size() argument
1373 dev_warn(adev->dev, "VM size (%d) too small, min is 1GB\n", in amdgpu_device_check_vm_size()
1379 static void amdgpu_device_check_smu_prv_buffer_size(struct amdgpu_device *adev) in amdgpu_device_check_smu_prv_buffer_size() argument
1409 adev->pm.smu_prv_buffer_size = amdgpu_smu_memory_pool_size << 28; in amdgpu_device_check_smu_prv_buffer_size()
1416 adev->pm.smu_prv_buffer_size = 0; in amdgpu_device_check_smu_prv_buffer_size()
1419 static int amdgpu_device_init_apu_flags(struct amdgpu_device *adev) in amdgpu_device_init_apu_flags() argument
1421 if (!(adev->flags & AMD_IS_APU) || in amdgpu_device_init_apu_flags()
1422 adev->asic_type < CHIP_RAVEN) in amdgpu_device_init_apu_flags()
1425 switch (adev->asic_type) { in amdgpu_device_init_apu_flags()
1427 if (adev->pdev->device == 0x15dd) in amdgpu_device_init_apu_flags()
1428 adev->apu_flags |= AMD_APU_IS_RAVEN; in amdgpu_device_init_apu_flags()
1429 if (adev->pdev->device == 0x15d8) in amdgpu_device_init_apu_flags()
1430 adev->apu_flags |= AMD_APU_IS_PICASSO; in amdgpu_device_init_apu_flags()
1433 if ((adev->pdev->device == 0x1636) || in amdgpu_device_init_apu_flags()
1434 (adev->pdev->device == 0x164c)) in amdgpu_device_init_apu_flags()
1435 adev->apu_flags |= AMD_APU_IS_RENOIR; in amdgpu_device_init_apu_flags()
1437 adev->apu_flags |= AMD_APU_IS_GREEN_SARDINE; in amdgpu_device_init_apu_flags()
1440 adev->apu_flags |= AMD_APU_IS_VANGOGH; in amdgpu_device_init_apu_flags()
1445 if (adev->pdev->device == 0x13FE) in amdgpu_device_init_apu_flags()
1446 adev->apu_flags |= AMD_APU_IS_CYAN_SKILLFISH2; in amdgpu_device_init_apu_flags()
1463 static int amdgpu_device_check_arguments(struct amdgpu_device *adev) in amdgpu_device_check_arguments() argument
1466 dev_warn(adev->dev, "sched jobs (%d) must be at least 4\n", in amdgpu_device_check_arguments()
1470 dev_warn(adev->dev, "sched jobs (%d) must be a power of 2\n", in amdgpu_device_check_arguments()
1477 dev_warn(adev->dev, "gart size (%d) too small\n", in amdgpu_device_check_arguments()
1484 dev_warn(adev->dev, "gtt size (%d) too small\n", in amdgpu_device_check_arguments()
1492 dev_warn(adev->dev, "valid range is between 4 and 9\n"); in amdgpu_device_check_arguments()
1497 dev_warn(adev->dev, "sched hw submission jobs (%d) must be at least 2\n", in amdgpu_device_check_arguments()
1501 dev_warn(adev->dev, "sched hw submission jobs (%d) must be a power of 2\n", in amdgpu_device_check_arguments()
1506 amdgpu_device_check_smu_prv_buffer_size(adev); in amdgpu_device_check_arguments()
1508 amdgpu_device_check_vm_size(adev); in amdgpu_device_check_arguments()
1510 amdgpu_device_check_block_size(adev); in amdgpu_device_check_arguments()
1512 adev->firmware.load_type = amdgpu_ucode_get_load_type(adev, amdgpu_fw_load_type); in amdgpu_device_check_arguments()
1514 amdgpu_gmc_tmz_set(adev); in amdgpu_device_check_arguments()
1516 amdgpu_gmc_noretry_set(adev); in amdgpu_device_check_arguments()
1606 struct amdgpu_device *adev = dev; in amdgpu_device_ip_set_clockgating_state() local
1609 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_set_clockgating_state()
1610 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_set_clockgating_state()
1612 if (adev->ip_blocks[i].version->type != block_type) in amdgpu_device_ip_set_clockgating_state()
1614 if (!adev->ip_blocks[i].version->funcs->set_clockgating_state) in amdgpu_device_ip_set_clockgating_state()
1616 r = adev->ip_blocks[i].version->funcs->set_clockgating_state( in amdgpu_device_ip_set_clockgating_state()
1617 (void *)adev, state); in amdgpu_device_ip_set_clockgating_state()
1620 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_set_clockgating_state()
1640 struct amdgpu_device *adev = dev; in amdgpu_device_ip_set_powergating_state() local
1643 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_set_powergating_state()
1644 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_set_powergating_state()
1646 if (adev->ip_blocks[i].version->type != block_type) in amdgpu_device_ip_set_powergating_state()
1648 if (!adev->ip_blocks[i].version->funcs->set_powergating_state) in amdgpu_device_ip_set_powergating_state()
1650 r = adev->ip_blocks[i].version->funcs->set_powergating_state( in amdgpu_device_ip_set_powergating_state()
1651 (void *)adev, state); in amdgpu_device_ip_set_powergating_state()
1654 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_set_powergating_state()
1670 void amdgpu_device_ip_get_clockgating_state(struct amdgpu_device *adev, in amdgpu_device_ip_get_clockgating_state() argument
1675 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_get_clockgating_state()
1676 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_get_clockgating_state()
1678 if (adev->ip_blocks[i].version->funcs->get_clockgating_state) in amdgpu_device_ip_get_clockgating_state()
1679 adev->ip_blocks[i].version->funcs->get_clockgating_state((void *)adev, flags); in amdgpu_device_ip_get_clockgating_state()
1692 int amdgpu_device_ip_wait_for_idle(struct amdgpu_device *adev, in amdgpu_device_ip_wait_for_idle() argument
1697 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_wait_for_idle()
1698 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_wait_for_idle()
1700 if (adev->ip_blocks[i].version->type == block_type) { in amdgpu_device_ip_wait_for_idle()
1701 r = adev->ip_blocks[i].version->funcs->wait_for_idle((void *)adev); in amdgpu_device_ip_wait_for_idle()
1720 bool amdgpu_device_ip_is_idle(struct amdgpu_device *adev, in amdgpu_device_ip_is_idle() argument
1725 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_is_idle()
1726 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_is_idle()
1728 if (adev->ip_blocks[i].version->type == block_type) in amdgpu_device_ip_is_idle()
1729 return adev->ip_blocks[i].version->funcs->is_idle((void *)adev); in amdgpu_device_ip_is_idle()
1745 amdgpu_device_ip_get_ip_block(struct amdgpu_device *adev, in amdgpu_device_ip_get_ip_block() argument
1750 for (i = 0; i < adev->num_ip_blocks; i++) in amdgpu_device_ip_get_ip_block()
1751 if (adev->ip_blocks[i].version->type == type) in amdgpu_device_ip_get_ip_block()
1752 return &adev->ip_blocks[i]; in amdgpu_device_ip_get_ip_block()
1768 int amdgpu_device_ip_block_version_cmp(struct amdgpu_device *adev, in amdgpu_device_ip_block_version_cmp() argument
1772 struct amdgpu_ip_block *ip_block = amdgpu_device_ip_get_ip_block(adev, type); in amdgpu_device_ip_block_version_cmp()
1791 int amdgpu_device_ip_block_add(struct amdgpu_device *adev, in amdgpu_device_ip_block_add() argument
1799 if (adev->harvest_ip_mask & AMD_HARVEST_IP_VCN_MASK) in amdgpu_device_ip_block_add()
1803 if (adev->harvest_ip_mask & AMD_HARVEST_IP_JPEG_MASK) in amdgpu_device_ip_block_add()
1810 DRM_INFO("add ip block number %d <%s>\n", adev->num_ip_blocks, in amdgpu_device_ip_block_add()
1813 adev->ip_blocks[adev->num_ip_blocks++].version = ip_block_version; in amdgpu_device_ip_block_add()
1830 static void amdgpu_device_enable_virtual_display(struct amdgpu_device *adev) in amdgpu_device_enable_virtual_display() argument
1832 adev->enable_virtual_display = false; in amdgpu_device_enable_virtual_display()
1835 const char *pci_address_name = pci_name(adev->pdev); in amdgpu_device_enable_virtual_display()
1847 adev->enable_virtual_display = true; in amdgpu_device_enable_virtual_display()
1858 adev->mode_info.num_crtc = num_crtc; in amdgpu_device_enable_virtual_display()
1860 adev->mode_info.num_crtc = 1; in amdgpu_device_enable_virtual_display()
1868 adev->enable_virtual_display, adev->mode_info.num_crtc); in amdgpu_device_enable_virtual_display()
1884 static int amdgpu_device_parse_gpu_info_fw(struct amdgpu_device *adev) in amdgpu_device_parse_gpu_info_fw() argument
1891 adev->firmware.gpu_info_fw = NULL; in amdgpu_device_parse_gpu_info_fw()
1893 if (adev->mman.discovery_bin) { in amdgpu_device_parse_gpu_info_fw()
1894 amdgpu_discovery_get_gfx_info(adev); in amdgpu_device_parse_gpu_info_fw()
1901 if (adev->asic_type != CHIP_NAVI12) in amdgpu_device_parse_gpu_info_fw()
1905 switch (adev->asic_type) { in amdgpu_device_parse_gpu_info_fw()
1944 if (adev->apu_flags & AMD_APU_IS_RAVEN2) in amdgpu_device_parse_gpu_info_fw()
1946 else if (adev->apu_flags & AMD_APU_IS_PICASSO) in amdgpu_device_parse_gpu_info_fw()
1955 if (adev->apu_flags & AMD_APU_IS_RENOIR) in amdgpu_device_parse_gpu_info_fw()
1978 err = request_firmware(&adev->firmware.gpu_info_fw, fw_name, adev->dev); in amdgpu_device_parse_gpu_info_fw()
1980 dev_err(adev->dev, in amdgpu_device_parse_gpu_info_fw()
1985 err = amdgpu_ucode_validate(adev->firmware.gpu_info_fw); in amdgpu_device_parse_gpu_info_fw()
1987 dev_err(adev->dev, in amdgpu_device_parse_gpu_info_fw()
1993 hdr = (const struct gpu_info_firmware_header_v1_0 *)adev->firmware.gpu_info_fw->data; in amdgpu_device_parse_gpu_info_fw()
2000 (const struct gpu_info_firmware_v1_0 *)(adev->firmware.gpu_info_fw->data + in amdgpu_device_parse_gpu_info_fw()
2006 if (adev->asic_type == CHIP_NAVI12) in amdgpu_device_parse_gpu_info_fw()
2009 adev->gfx.config.max_shader_engines = le32_to_cpu(gpu_info_fw->gc_num_se); in amdgpu_device_parse_gpu_info_fw()
2010 adev->gfx.config.max_cu_per_sh = le32_to_cpu(gpu_info_fw->gc_num_cu_per_sh); in amdgpu_device_parse_gpu_info_fw()
2011 adev->gfx.config.max_sh_per_se = le32_to_cpu(gpu_info_fw->gc_num_sh_per_se); in amdgpu_device_parse_gpu_info_fw()
2012 adev->gfx.config.max_backends_per_se = le32_to_cpu(gpu_info_fw->gc_num_rb_per_se); in amdgpu_device_parse_gpu_info_fw()
2013 adev->gfx.config.max_texture_channel_caches = in amdgpu_device_parse_gpu_info_fw()
2015 adev->gfx.config.max_gprs = le32_to_cpu(gpu_info_fw->gc_num_gprs); in amdgpu_device_parse_gpu_info_fw()
2016 adev->gfx.config.max_gs_threads = le32_to_cpu(gpu_info_fw->gc_num_max_gs_thds); in amdgpu_device_parse_gpu_info_fw()
2017 adev->gfx.config.gs_vgt_table_depth = le32_to_cpu(gpu_info_fw->gc_gs_table_depth); in amdgpu_device_parse_gpu_info_fw()
2018 adev->gfx.config.gs_prim_buffer_depth = le32_to_cpu(gpu_info_fw->gc_gsprim_buff_depth); in amdgpu_device_parse_gpu_info_fw()
2019 adev->gfx.config.double_offchip_lds_buf = in amdgpu_device_parse_gpu_info_fw()
2021 adev->gfx.cu_info.wave_front_size = le32_to_cpu(gpu_info_fw->gc_wave_size); in amdgpu_device_parse_gpu_info_fw()
2022 adev->gfx.cu_info.max_waves_per_simd = in amdgpu_device_parse_gpu_info_fw()
2024 adev->gfx.cu_info.max_scratch_slots_per_cu = in amdgpu_device_parse_gpu_info_fw()
2026 adev->gfx.cu_info.lds_size = le32_to_cpu(gpu_info_fw->gc_lds_size); in amdgpu_device_parse_gpu_info_fw()
2029 (const struct gpu_info_firmware_v1_1 *)(adev->firmware.gpu_info_fw->data + in amdgpu_device_parse_gpu_info_fw()
2031 adev->gfx.config.num_sc_per_sh = in amdgpu_device_parse_gpu_info_fw()
2033 adev->gfx.config.num_packer_per_sc = in amdgpu_device_parse_gpu_info_fw()
2044 (const struct gpu_info_firmware_v1_2 *)(adev->firmware.gpu_info_fw->data + in amdgpu_device_parse_gpu_info_fw()
2046 adev->dm.soc_bounding_box = &gpu_info_fw->soc_bounding_box; in amdgpu_device_parse_gpu_info_fw()
2051 dev_err(adev->dev, in amdgpu_device_parse_gpu_info_fw()
2070 static int amdgpu_device_ip_early_init(struct amdgpu_device *adev) in amdgpu_device_ip_early_init() argument
2074 amdgpu_device_enable_virtual_display(adev); in amdgpu_device_ip_early_init()
2076 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_early_init()
2077 r = amdgpu_virt_request_full_gpu(adev, true); in amdgpu_device_ip_early_init()
2082 switch (adev->asic_type) { in amdgpu_device_ip_early_init()
2089 adev->family = AMDGPU_FAMILY_SI; in amdgpu_device_ip_early_init()
2090 r = si_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2101 if (adev->flags & AMD_IS_APU) in amdgpu_device_ip_early_init()
2102 adev->family = AMDGPU_FAMILY_KV; in amdgpu_device_ip_early_init()
2104 adev->family = AMDGPU_FAMILY_CI; in amdgpu_device_ip_early_init()
2106 r = cik_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2120 if (adev->flags & AMD_IS_APU) in amdgpu_device_ip_early_init()
2121 adev->family = AMDGPU_FAMILY_CZ; in amdgpu_device_ip_early_init()
2123 adev->family = AMDGPU_FAMILY_VI; in amdgpu_device_ip_early_init()
2125 r = vi_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2136 if (adev->flags & AMD_IS_APU) in amdgpu_device_ip_early_init()
2137 adev->family = AMDGPU_FAMILY_RV; in amdgpu_device_ip_early_init()
2139 adev->family = AMDGPU_FAMILY_AI; in amdgpu_device_ip_early_init()
2141 r = soc15_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2155 if (adev->asic_type == CHIP_VANGOGH) in amdgpu_device_ip_early_init()
2156 adev->family = AMDGPU_FAMILY_VGH; in amdgpu_device_ip_early_init()
2157 else if (adev->asic_type == CHIP_YELLOW_CARP) in amdgpu_device_ip_early_init()
2158 adev->family = AMDGPU_FAMILY_YC; in amdgpu_device_ip_early_init()
2160 adev->family = AMDGPU_FAMILY_NV; in amdgpu_device_ip_early_init()
2162 r = nv_set_ip_blocks(adev); in amdgpu_device_ip_early_init()
2171 amdgpu_amdkfd_device_probe(adev); in amdgpu_device_ip_early_init()
2173 adev->pm.pp_feature = amdgpu_pp_feature_mask; in amdgpu_device_ip_early_init()
2174 if (amdgpu_sriov_vf(adev) || sched_policy == KFD_SCHED_POLICY_NO_HWS) in amdgpu_device_ip_early_init()
2175 adev->pm.pp_feature &= ~PP_GFXOFF_MASK; in amdgpu_device_ip_early_init()
2176 if (amdgpu_sriov_vf(adev) && adev->asic_type == CHIP_SIENNA_CICHLID) in amdgpu_device_ip_early_init()
2177 adev->pm.pp_feature &= ~PP_OVERDRIVE_MASK; in amdgpu_device_ip_early_init()
2179 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_early_init()
2182 i, adev->ip_blocks[i].version->funcs->name); in amdgpu_device_ip_early_init()
2183 adev->ip_blocks[i].status.valid = false; in amdgpu_device_ip_early_init()
2185 if (adev->ip_blocks[i].version->funcs->early_init) { in amdgpu_device_ip_early_init()
2186 r = adev->ip_blocks[i].version->funcs->early_init((void *)adev); in amdgpu_device_ip_early_init()
2188 adev->ip_blocks[i].status.valid = false; in amdgpu_device_ip_early_init()
2191 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_early_init()
2194 adev->ip_blocks[i].status.valid = true; in amdgpu_device_ip_early_init()
2197 adev->ip_blocks[i].status.valid = true; in amdgpu_device_ip_early_init()
2201 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON) { in amdgpu_device_ip_early_init()
2202 r = amdgpu_device_parse_gpu_info_fw(adev); in amdgpu_device_ip_early_init()
2207 if (!amdgpu_get_bios(adev)) in amdgpu_device_ip_early_init()
2210 r = amdgpu_atombios_init(adev); in amdgpu_device_ip_early_init()
2212 dev_err(adev->dev, "amdgpu_atombios_init failed\n"); in amdgpu_device_ip_early_init()
2213 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_ATOMBIOS_INIT_FAIL, 0, 0); in amdgpu_device_ip_early_init()
2218 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_early_init()
2219 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_ip_early_init()
2224 adev->cg_flags &= amdgpu_cg_mask; in amdgpu_device_ip_early_init()
2225 adev->pg_flags &= amdgpu_pg_mask; in amdgpu_device_ip_early_init()
2230 static int amdgpu_device_ip_hw_init_phase1(struct amdgpu_device *adev) in amdgpu_device_ip_hw_init_phase1() argument
2234 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_hw_init_phase1()
2235 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_ip_hw_init_phase1()
2237 if (adev->ip_blocks[i].status.hw) in amdgpu_device_ip_hw_init_phase1()
2239 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_hw_init_phase1()
2240 (amdgpu_sriov_vf(adev) && (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP)) || in amdgpu_device_ip_hw_init_phase1()
2241 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH) { in amdgpu_device_ip_hw_init_phase1()
2242 r = adev->ip_blocks[i].version->funcs->hw_init(adev); in amdgpu_device_ip_hw_init_phase1()
2245 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_hw_init_phase1()
2248 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_hw_init_phase1()
2255 static int amdgpu_device_ip_hw_init_phase2(struct amdgpu_device *adev) in amdgpu_device_ip_hw_init_phase2() argument
2259 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_hw_init_phase2()
2260 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_ip_hw_init_phase2()
2262 if (adev->ip_blocks[i].status.hw) in amdgpu_device_ip_hw_init_phase2()
2264 r = adev->ip_blocks[i].version->funcs->hw_init(adev); in amdgpu_device_ip_hw_init_phase2()
2267 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_hw_init_phase2()
2270 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_hw_init_phase2()
2276 static int amdgpu_device_fw_loading(struct amdgpu_device *adev) in amdgpu_device_fw_loading() argument
2282 if (adev->asic_type >= CHIP_VEGA10) { in amdgpu_device_fw_loading()
2283 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_fw_loading()
2284 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_PSP) in amdgpu_device_fw_loading()
2287 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_fw_loading()
2291 if (adev->ip_blocks[i].status.hw == true) in amdgpu_device_fw_loading()
2294 if (amdgpu_in_reset(adev) || adev->in_suspend) { in amdgpu_device_fw_loading()
2295 r = adev->ip_blocks[i].version->funcs->resume(adev); in amdgpu_device_fw_loading()
2298 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_fw_loading()
2302 r = adev->ip_blocks[i].version->funcs->hw_init(adev); in amdgpu_device_fw_loading()
2305 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_fw_loading()
2310 adev->ip_blocks[i].status.hw = true; in amdgpu_device_fw_loading()
2315 if (!amdgpu_sriov_vf(adev) || adev->asic_type == CHIP_TONGA) in amdgpu_device_fw_loading()
2316 r = amdgpu_pm_load_smu_firmware(adev, &smu_version); in amdgpu_device_fw_loading()
2332 static int amdgpu_device_ip_init(struct amdgpu_device *adev) in amdgpu_device_ip_init() argument
2336 r = amdgpu_ras_init(adev); in amdgpu_device_ip_init()
2340 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_init()
2341 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_init()
2343 r = adev->ip_blocks[i].version->funcs->sw_init((void *)adev); in amdgpu_device_ip_init()
2346 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_init()
2349 adev->ip_blocks[i].status.sw = true; in amdgpu_device_ip_init()
2352 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC) { in amdgpu_device_ip_init()
2353 r = amdgpu_device_vram_scratch_init(adev); in amdgpu_device_ip_init()
2358 r = adev->ip_blocks[i].version->funcs->hw_init((void *)adev); in amdgpu_device_ip_init()
2363 r = amdgpu_device_wb_init(adev); in amdgpu_device_ip_init()
2368 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_init()
2371 if (amdgpu_mcbp || amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_init()
2372 r = amdgpu_allocate_static_csa(adev, &adev->virt.csa_obj, in amdgpu_device_ip_init()
2383 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_init()
2384 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_ip_init()
2386 r = amdgpu_ib_pool_init(adev); in amdgpu_device_ip_init()
2388 dev_err(adev->dev, "IB initialization failed (%d).\n", r); in amdgpu_device_ip_init()
2389 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_IB_INIT_FAIL, 0, r); in amdgpu_device_ip_init()
2393 r = amdgpu_ucode_create_bo(adev); /* create ucode bo when sw_init complete*/ in amdgpu_device_ip_init()
2397 r = amdgpu_device_ip_hw_init_phase1(adev); in amdgpu_device_ip_init()
2401 r = amdgpu_device_fw_loading(adev); in amdgpu_device_ip_init()
2405 r = amdgpu_device_ip_hw_init_phase2(adev); in amdgpu_device_ip_init()
2424 r = amdgpu_ras_recovery_init(adev); in amdgpu_device_ip_init()
2428 if (adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_ip_init()
2429 amdgpu_xgmi_add_device(adev); in amdgpu_device_ip_init()
2432 if (!adev->gmc.xgmi.pending_reset) in amdgpu_device_ip_init()
2433 amdgpu_amdkfd_device_init(adev); in amdgpu_device_ip_init()
2435 r = amdgpu_amdkfd_resume_iommu(adev); in amdgpu_device_ip_init()
2439 amdgpu_fru_get_product_info(adev); in amdgpu_device_ip_init()
2442 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_init()
2443 amdgpu_virt_release_full_gpu(adev, true); in amdgpu_device_ip_init()
2457 static void amdgpu_device_fill_reset_magic(struct amdgpu_device *adev) in amdgpu_device_fill_reset_magic() argument
2459 memcpy(adev->reset_magic, adev->gart.ptr, AMDGPU_RESET_MAGIC_NUM); in amdgpu_device_fill_reset_magic()
2472 static bool amdgpu_device_check_vram_lost(struct amdgpu_device *adev) in amdgpu_device_check_vram_lost() argument
2474 if (memcmp(adev->gart.ptr, adev->reset_magic, in amdgpu_device_check_vram_lost()
2478 if (!amdgpu_in_reset(adev)) in amdgpu_device_check_vram_lost()
2485 switch (amdgpu_asic_reset_method(adev)) { in amdgpu_device_check_vram_lost()
2507 int amdgpu_device_set_cg_state(struct amdgpu_device *adev, in amdgpu_device_set_cg_state() argument
2515 for (j = 0; j < adev->num_ip_blocks; j++) { in amdgpu_device_set_cg_state()
2516 i = state == AMD_CG_STATE_GATE ? j : adev->num_ip_blocks - j - 1; in amdgpu_device_set_cg_state()
2517 if (!adev->ip_blocks[i].status.late_initialized) in amdgpu_device_set_cg_state()
2520 if (adev->in_s0ix && in amdgpu_device_set_cg_state()
2521 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GFX) in amdgpu_device_set_cg_state()
2524 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_UVD && in amdgpu_device_set_cg_state()
2525 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCE && in amdgpu_device_set_cg_state()
2526 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCN && in amdgpu_device_set_cg_state()
2527 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_JPEG && in amdgpu_device_set_cg_state()
2528 adev->ip_blocks[i].version->funcs->set_clockgating_state) { in amdgpu_device_set_cg_state()
2530 r = adev->ip_blocks[i].version->funcs->set_clockgating_state((void *)adev, in amdgpu_device_set_cg_state()
2534 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_set_cg_state()
2543 int amdgpu_device_set_pg_state(struct amdgpu_device *adev, in amdgpu_device_set_pg_state() argument
2551 for (j = 0; j < adev->num_ip_blocks; j++) { in amdgpu_device_set_pg_state()
2552 i = state == AMD_PG_STATE_GATE ? j : adev->num_ip_blocks - j - 1; in amdgpu_device_set_pg_state()
2553 if (!adev->ip_blocks[i].status.late_initialized) in amdgpu_device_set_pg_state()
2556 if (adev->in_s0ix && in amdgpu_device_set_pg_state()
2557 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GFX) in amdgpu_device_set_pg_state()
2560 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_UVD && in amdgpu_device_set_pg_state()
2561 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCE && in amdgpu_device_set_pg_state()
2562 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_VCN && in amdgpu_device_set_pg_state()
2563 adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_JPEG && in amdgpu_device_set_pg_state()
2564 adev->ip_blocks[i].version->funcs->set_powergating_state) { in amdgpu_device_set_pg_state()
2566 r = adev->ip_blocks[i].version->funcs->set_powergating_state((void *)adev, in amdgpu_device_set_pg_state()
2570 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_set_pg_state()
2581 struct amdgpu_device *adev; in amdgpu_device_enable_mgpu_fan_boost() local
2596 adev = gpu_ins->adev; in amdgpu_device_enable_mgpu_fan_boost()
2597 if (!(adev->flags & AMD_IS_APU) && in amdgpu_device_enable_mgpu_fan_boost()
2599 ret = amdgpu_dpm_enable_mgpu_fan_boost(adev); in amdgpu_device_enable_mgpu_fan_boost()
2625 static int amdgpu_device_ip_late_init(struct amdgpu_device *adev) in amdgpu_device_ip_late_init() argument
2630 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_late_init()
2631 if (!adev->ip_blocks[i].status.hw) in amdgpu_device_ip_late_init()
2633 if (adev->ip_blocks[i].version->funcs->late_init) { in amdgpu_device_ip_late_init()
2634 r = adev->ip_blocks[i].version->funcs->late_init((void *)adev); in amdgpu_device_ip_late_init()
2637 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_late_init()
2641 adev->ip_blocks[i].status.late_initialized = true; in amdgpu_device_ip_late_init()
2644 amdgpu_ras_set_error_query_ready(adev, true); in amdgpu_device_ip_late_init()
2646 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_GATE); in amdgpu_device_ip_late_init()
2647 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_GATE); in amdgpu_device_ip_late_init()
2649 amdgpu_device_fill_reset_magic(adev); in amdgpu_device_ip_late_init()
2656 if (adev->asic_type == CHIP_ARCTURUS && in amdgpu_device_ip_late_init()
2657 amdgpu_passthrough(adev) && in amdgpu_device_ip_late_init()
2658 adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_ip_late_init()
2659 smu_set_light_sbr(&adev->smu, true); in amdgpu_device_ip_late_init()
2661 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_device_ip_late_init()
2677 if (mgpu_info.num_dgpu == adev->gmc.xgmi.num_physical_nodes) { in amdgpu_device_ip_late_init()
2680 if (gpu_instance->adev->flags & AMD_IS_APU) in amdgpu_device_ip_late_init()
2683 r = amdgpu_xgmi_set_pstate(gpu_instance->adev, in amdgpu_device_ip_late_init()
2698 static int amdgpu_device_ip_fini_early(struct amdgpu_device *adev) in amdgpu_device_ip_fini_early() argument
2702 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_fini_early()
2703 if (!adev->ip_blocks[i].version->funcs->early_fini) in amdgpu_device_ip_fini_early()
2706 r = adev->ip_blocks[i].version->funcs->early_fini((void *)adev); in amdgpu_device_ip_fini_early()
2709 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini_early()
2713 amdgpu_amdkfd_suspend(adev, false); in amdgpu_device_ip_fini_early()
2715 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_UNGATE); in amdgpu_device_ip_fini_early()
2716 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_UNGATE); in amdgpu_device_ip_fini_early()
2719 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_fini_early()
2720 if (!adev->ip_blocks[i].status.hw) in amdgpu_device_ip_fini_early()
2722 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) { in amdgpu_device_ip_fini_early()
2723 r = adev->ip_blocks[i].version->funcs->hw_fini((void *)adev); in amdgpu_device_ip_fini_early()
2727 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini_early()
2729 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_fini_early()
2734 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_fini_early()
2735 if (!adev->ip_blocks[i].status.hw) in amdgpu_device_ip_fini_early()
2738 r = adev->ip_blocks[i].version->funcs->hw_fini((void *)adev); in amdgpu_device_ip_fini_early()
2742 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini_early()
2745 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_fini_early()
2762 static int amdgpu_device_ip_fini(struct amdgpu_device *adev) in amdgpu_device_ip_fini() argument
2766 if (amdgpu_sriov_vf(adev) && adev->virt.ras_init_done) in amdgpu_device_ip_fini()
2767 amdgpu_virt_release_ras_err_handler_data(adev); in amdgpu_device_ip_fini()
2769 amdgpu_ras_pre_fini(adev); in amdgpu_device_ip_fini()
2771 if (adev->gmc.xgmi.num_physical_nodes > 1) in amdgpu_device_ip_fini()
2772 amdgpu_xgmi_remove_device(adev); in amdgpu_device_ip_fini()
2774 amdgpu_amdkfd_device_fini_sw(adev); in amdgpu_device_ip_fini()
2776 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_fini()
2777 if (!adev->ip_blocks[i].status.sw) in amdgpu_device_ip_fini()
2780 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC) { in amdgpu_device_ip_fini()
2781 amdgpu_ucode_free_bo(adev); in amdgpu_device_ip_fini()
2782 amdgpu_free_static_csa(&adev->virt.csa_obj); in amdgpu_device_ip_fini()
2783 amdgpu_device_wb_fini(adev); in amdgpu_device_ip_fini()
2784 amdgpu_device_vram_scratch_fini(adev); in amdgpu_device_ip_fini()
2785 amdgpu_ib_pool_fini(adev); in amdgpu_device_ip_fini()
2788 r = adev->ip_blocks[i].version->funcs->sw_fini((void *)adev); in amdgpu_device_ip_fini()
2792 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_fini()
2794 adev->ip_blocks[i].status.sw = false; in amdgpu_device_ip_fini()
2795 adev->ip_blocks[i].status.valid = false; in amdgpu_device_ip_fini()
2798 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_fini()
2799 if (!adev->ip_blocks[i].status.late_initialized) in amdgpu_device_ip_fini()
2801 if (adev->ip_blocks[i].version->funcs->late_fini) in amdgpu_device_ip_fini()
2802 adev->ip_blocks[i].version->funcs->late_fini((void *)adev); in amdgpu_device_ip_fini()
2803 adev->ip_blocks[i].status.late_initialized = false; in amdgpu_device_ip_fini()
2806 amdgpu_ras_fini(adev); in amdgpu_device_ip_fini()
2808 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_fini()
2809 if (amdgpu_virt_release_full_gpu(adev, false)) in amdgpu_device_ip_fini()
2822 struct amdgpu_device *adev = in amdgpu_device_delayed_init_work_handler() local
2826 r = amdgpu_ib_ring_tests(adev); in amdgpu_device_delayed_init_work_handler()
2833 struct amdgpu_device *adev = in amdgpu_device_delay_enable_gfx_off() local
2836 WARN_ON_ONCE(adev->gfx.gfx_off_state); in amdgpu_device_delay_enable_gfx_off()
2837 WARN_ON_ONCE(adev->gfx.gfx_off_req_count); in amdgpu_device_delay_enable_gfx_off()
2839 if (!amdgpu_dpm_set_powergating_by_smu(adev, AMD_IP_BLOCK_TYPE_GFX, true)) in amdgpu_device_delay_enable_gfx_off()
2840 adev->gfx.gfx_off_state = true; in amdgpu_device_delay_enable_gfx_off()
2854 static int amdgpu_device_ip_suspend_phase1(struct amdgpu_device *adev) in amdgpu_device_ip_suspend_phase1() argument
2858 amdgpu_device_set_pg_state(adev, AMD_PG_STATE_UNGATE); in amdgpu_device_ip_suspend_phase1()
2859 amdgpu_device_set_cg_state(adev, AMD_CG_STATE_UNGATE); in amdgpu_device_ip_suspend_phase1()
2861 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_suspend_phase1()
2862 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_suspend_phase1()
2866 if (adev->ip_blocks[i].version->type != AMD_IP_BLOCK_TYPE_DCE) in amdgpu_device_ip_suspend_phase1()
2870 r = adev->ip_blocks[i].version->funcs->suspend(adev); in amdgpu_device_ip_suspend_phase1()
2874 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_suspend_phase1()
2878 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase1()
2895 static int amdgpu_device_ip_suspend_phase2(struct amdgpu_device *adev) in amdgpu_device_ip_suspend_phase2() argument
2899 if (adev->in_s0ix) in amdgpu_device_ip_suspend_phase2()
2900 amdgpu_gfx_state_change_set(adev, sGpuChangeState_D3Entry); in amdgpu_device_ip_suspend_phase2()
2902 for (i = adev->num_ip_blocks - 1; i >= 0; i--) { in amdgpu_device_ip_suspend_phase2()
2903 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_suspend_phase2()
2906 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_DCE) in amdgpu_device_ip_suspend_phase2()
2910 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) { in amdgpu_device_ip_suspend_phase2()
2911 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase2()
2916 if (adev->gmc.xgmi.pending_reset && in amdgpu_device_ip_suspend_phase2()
2917 !(adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_ip_suspend_phase2()
2918 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC || in amdgpu_device_ip_suspend_phase2()
2919 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_suspend_phase2()
2920 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH)) { in amdgpu_device_ip_suspend_phase2()
2921 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase2()
2930 if (adev->in_s0ix && in amdgpu_device_ip_suspend_phase2()
2931 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP || in amdgpu_device_ip_suspend_phase2()
2932 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GFX)) in amdgpu_device_ip_suspend_phase2()
2936 r = adev->ip_blocks[i].version->funcs->suspend(adev); in amdgpu_device_ip_suspend_phase2()
2940 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_suspend_phase2()
2942 adev->ip_blocks[i].status.hw = false; in amdgpu_device_ip_suspend_phase2()
2944 if(!amdgpu_sriov_vf(adev)){ in amdgpu_device_ip_suspend_phase2()
2945 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) { in amdgpu_device_ip_suspend_phase2()
2946 r = amdgpu_dpm_set_mp1_state(adev, adev->mp1_state); in amdgpu_device_ip_suspend_phase2()
2949 adev->mp1_state, r); in amdgpu_device_ip_suspend_phase2()
2970 int amdgpu_device_ip_suspend(struct amdgpu_device *adev) in amdgpu_device_ip_suspend() argument
2974 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_ip_suspend()
2975 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_ip_suspend()
2976 amdgpu_virt_request_full_gpu(adev, false); in amdgpu_device_ip_suspend()
2979 r = amdgpu_device_ip_suspend_phase1(adev); in amdgpu_device_ip_suspend()
2982 r = amdgpu_device_ip_suspend_phase2(adev); in amdgpu_device_ip_suspend()
2984 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_suspend()
2985 amdgpu_virt_release_full_gpu(adev, false); in amdgpu_device_ip_suspend()
2990 static int amdgpu_device_ip_reinit_early_sriov(struct amdgpu_device *adev) in amdgpu_device_ip_reinit_early_sriov() argument
3001 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_reinit_early_sriov()
3005 block = &adev->ip_blocks[i]; in amdgpu_device_ip_reinit_early_sriov()
3014 r = block->version->funcs->hw_init(adev); in amdgpu_device_ip_reinit_early_sriov()
3025 static int amdgpu_device_ip_reinit_late_sriov(struct amdgpu_device *adev) in amdgpu_device_ip_reinit_late_sriov() argument
3043 for (j = 0; j < adev->num_ip_blocks; j++) { in amdgpu_device_ip_reinit_late_sriov()
3044 block = &adev->ip_blocks[j]; in amdgpu_device_ip_reinit_late_sriov()
3052 r = block->version->funcs->resume(adev); in amdgpu_device_ip_reinit_late_sriov()
3054 r = block->version->funcs->hw_init(adev); in amdgpu_device_ip_reinit_late_sriov()
3078 static int amdgpu_device_ip_resume_phase1(struct amdgpu_device *adev) in amdgpu_device_ip_resume_phase1() argument
3082 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_resume_phase1()
3083 if (!adev->ip_blocks[i].status.valid || adev->ip_blocks[i].status.hw) in amdgpu_device_ip_resume_phase1()
3085 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_resume_phase1()
3086 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_ip_resume_phase1()
3087 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH) { in amdgpu_device_ip_resume_phase1()
3089 r = adev->ip_blocks[i].version->funcs->resume(adev); in amdgpu_device_ip_resume_phase1()
3092 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_resume_phase1()
3095 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_resume_phase1()
3115 static int amdgpu_device_ip_resume_phase2(struct amdgpu_device *adev) in amdgpu_device_ip_resume_phase2() argument
3119 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_resume_phase2()
3120 if (!adev->ip_blocks[i].status.valid || adev->ip_blocks[i].status.hw) in amdgpu_device_ip_resume_phase2()
3122 if (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_ip_resume_phase2()
3123 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_ip_resume_phase2()
3124 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH || in amdgpu_device_ip_resume_phase2()
3125 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) in amdgpu_device_ip_resume_phase2()
3127 r = adev->ip_blocks[i].version->funcs->resume(adev); in amdgpu_device_ip_resume_phase2()
3130 adev->ip_blocks[i].version->funcs->name, r); in amdgpu_device_ip_resume_phase2()
3133 adev->ip_blocks[i].status.hw = true; in amdgpu_device_ip_resume_phase2()
3151 static int amdgpu_device_ip_resume(struct amdgpu_device *adev) in amdgpu_device_ip_resume() argument
3155 r = amdgpu_amdkfd_resume_iommu(adev); in amdgpu_device_ip_resume()
3159 r = amdgpu_device_ip_resume_phase1(adev); in amdgpu_device_ip_resume()
3163 r = amdgpu_device_fw_loading(adev); in amdgpu_device_ip_resume()
3167 r = amdgpu_device_ip_resume_phase2(adev); in amdgpu_device_ip_resume()
3179 static void amdgpu_device_detect_sriov_bios(struct amdgpu_device *adev) in amdgpu_device_detect_sriov_bios() argument
3181 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_detect_sriov_bios()
3182 if (adev->is_atom_fw) { in amdgpu_device_detect_sriov_bios()
3183 if (amdgpu_atomfirmware_gpu_virtualization_supported(adev)) in amdgpu_device_detect_sriov_bios()
3184 adev->virt.caps |= AMDGPU_SRIOV_CAPS_SRIOV_VBIOS; in amdgpu_device_detect_sriov_bios()
3186 if (amdgpu_atombios_has_gpu_virtualization_table(adev)) in amdgpu_device_detect_sriov_bios()
3187 adev->virt.caps |= AMDGPU_SRIOV_CAPS_SRIOV_VBIOS; in amdgpu_device_detect_sriov_bios()
3190 if (!(adev->virt.caps & AMDGPU_SRIOV_CAPS_SRIOV_VBIOS)) in amdgpu_device_detect_sriov_bios()
3191 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_NO_VBIOS, 0, 0); in amdgpu_device_detect_sriov_bios()
3267 bool amdgpu_device_has_dc_support(struct amdgpu_device *adev) in amdgpu_device_has_dc_support() argument
3269 if (amdgpu_sriov_vf(adev) || in amdgpu_device_has_dc_support()
3270 adev->enable_virtual_display || in amdgpu_device_has_dc_support()
3271 (adev->harvest_ip_mask & AMD_HARVEST_IP_DMU_MASK)) in amdgpu_device_has_dc_support()
3274 return amdgpu_device_asic_has_dc_support(adev->asic_type); in amdgpu_device_has_dc_support()
3279 struct amdgpu_device *adev = in amdgpu_device_xgmi_reset_func() local
3281 struct amdgpu_hive_info *hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_xgmi_reset_func()
3293 if (amdgpu_asic_reset_method(adev) == AMD_RESET_METHOD_BACO) { in amdgpu_device_xgmi_reset_func()
3296 adev->asic_reset_res = amdgpu_device_baco_enter(adev_to_drm(adev)); in amdgpu_device_xgmi_reset_func()
3298 if (adev->asic_reset_res) in amdgpu_device_xgmi_reset_func()
3302 adev->asic_reset_res = amdgpu_device_baco_exit(adev_to_drm(adev)); in amdgpu_device_xgmi_reset_func()
3304 if (adev->asic_reset_res) in amdgpu_device_xgmi_reset_func()
3307 if (adev->mmhub.ras_funcs && in amdgpu_device_xgmi_reset_func()
3308 adev->mmhub.ras_funcs->reset_ras_error_count) in amdgpu_device_xgmi_reset_func()
3309 adev->mmhub.ras_funcs->reset_ras_error_count(adev); in amdgpu_device_xgmi_reset_func()
3313 adev->asic_reset_res = amdgpu_asic_reset(adev); in amdgpu_device_xgmi_reset_func()
3317 if (adev->asic_reset_res) in amdgpu_device_xgmi_reset_func()
3319 adev->asic_reset_res, adev_to_drm(adev)->unique); in amdgpu_device_xgmi_reset_func()
3323 static int amdgpu_device_get_job_timeout_settings(struct amdgpu_device *adev) in amdgpu_device_get_job_timeout_settings() argument
3337 adev->gfx_timeout = msecs_to_jiffies(10000); in amdgpu_device_get_job_timeout_settings()
3338 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
3339 if (amdgpu_sriov_vf(adev)) in amdgpu_device_get_job_timeout_settings()
3340 adev->compute_timeout = amdgpu_sriov_is_pp_one_vf(adev) ? in amdgpu_device_get_job_timeout_settings()
3343 adev->compute_timeout = msecs_to_jiffies(60000); in amdgpu_device_get_job_timeout_settings()
3363 adev->gfx_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3366 adev->compute_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3369 adev->sdma_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3372 adev->video_timeout = timeout; in amdgpu_device_get_job_timeout_settings()
3383 adev->sdma_timeout = adev->video_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
3384 if (amdgpu_sriov_vf(adev) || amdgpu_passthrough(adev)) in amdgpu_device_get_job_timeout_settings()
3385 adev->compute_timeout = adev->gfx_timeout; in amdgpu_device_get_job_timeout_settings()
3410 int amdgpu_device_init(struct amdgpu_device *adev, in amdgpu_device_init() argument
3413 struct drm_device *ddev = adev_to_drm(adev); in amdgpu_device_init()
3414 struct pci_dev *pdev = adev->pdev; in amdgpu_device_init()
3419 adev->shutdown = false; in amdgpu_device_init()
3420 adev->flags = flags; in amdgpu_device_init()
3423 adev->asic_type = amdgpu_force_asic_type; in amdgpu_device_init()
3425 adev->asic_type = flags & AMD_ASIC_MASK; in amdgpu_device_init()
3427 adev->usec_timeout = AMDGPU_MAX_USEC_TIMEOUT; in amdgpu_device_init()
3429 adev->usec_timeout *= 10; in amdgpu_device_init()
3430 adev->gmc.gart_size = 512 * 1024 * 1024; in amdgpu_device_init()
3431 adev->accel_working = false; in amdgpu_device_init()
3432 adev->num_rings = 0; in amdgpu_device_init()
3433 adev->mman.buffer_funcs = NULL; in amdgpu_device_init()
3434 adev->mman.buffer_funcs_ring = NULL; in amdgpu_device_init()
3435 adev->vm_manager.vm_pte_funcs = NULL; in amdgpu_device_init()
3436 adev->vm_manager.vm_pte_num_scheds = 0; in amdgpu_device_init()
3437 adev->gmc.gmc_funcs = NULL; in amdgpu_device_init()
3438 adev->harvest_ip_mask = 0x0; in amdgpu_device_init()
3439 adev->fence_context = dma_fence_context_alloc(AMDGPU_MAX_RINGS); in amdgpu_device_init()
3440 bitmap_zero(adev->gfx.pipe_reserve_bitmap, AMDGPU_MAX_COMPUTE_QUEUES); in amdgpu_device_init()
3442 adev->smc_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3443 adev->smc_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3444 adev->pcie_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3445 adev->pcie_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3446 adev->pciep_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3447 adev->pciep_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3448 adev->pcie_rreg64 = &amdgpu_invalid_rreg64; in amdgpu_device_init()
3449 adev->pcie_wreg64 = &amdgpu_invalid_wreg64; in amdgpu_device_init()
3450 adev->uvd_ctx_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3451 adev->uvd_ctx_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3452 adev->didt_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3453 adev->didt_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3454 adev->gc_cac_rreg = &amdgpu_invalid_rreg; in amdgpu_device_init()
3455 adev->gc_cac_wreg = &amdgpu_invalid_wreg; in amdgpu_device_init()
3456 adev->audio_endpt_rreg = &amdgpu_block_invalid_rreg; in amdgpu_device_init()
3457 adev->audio_endpt_wreg = &amdgpu_block_invalid_wreg; in amdgpu_device_init()
3460 amdgpu_asic_name[adev->asic_type], pdev->vendor, pdev->device, in amdgpu_device_init()
3465 mutex_init(&adev->firmware.mutex); in amdgpu_device_init()
3466 mutex_init(&adev->pm.mutex); in amdgpu_device_init()
3467 mutex_init(&adev->gfx.gpu_clock_mutex); in amdgpu_device_init()
3468 mutex_init(&adev->srbm_mutex); in amdgpu_device_init()
3469 mutex_init(&adev->gfx.pipe_reserve_mutex); in amdgpu_device_init()
3470 mutex_init(&adev->gfx.gfx_off_mutex); in amdgpu_device_init()
3471 mutex_init(&adev->grbm_idx_mutex); in amdgpu_device_init()
3472 mutex_init(&adev->mn_lock); in amdgpu_device_init()
3473 mutex_init(&adev->virt.vf_errors.lock); in amdgpu_device_init()
3474 hash_init(adev->mn_hash); in amdgpu_device_init()
3475 atomic_set(&adev->in_gpu_reset, 0); in amdgpu_device_init()
3476 init_rwsem(&adev->reset_sem); in amdgpu_device_init()
3477 mutex_init(&adev->psp.mutex); in amdgpu_device_init()
3478 mutex_init(&adev->notifier_lock); in amdgpu_device_init()
3480 r = amdgpu_device_init_apu_flags(adev); in amdgpu_device_init()
3484 r = amdgpu_device_check_arguments(adev); in amdgpu_device_init()
3488 spin_lock_init(&adev->mmio_idx_lock); in amdgpu_device_init()
3489 spin_lock_init(&adev->smc_idx_lock); in amdgpu_device_init()
3490 spin_lock_init(&adev->pcie_idx_lock); in amdgpu_device_init()
3491 spin_lock_init(&adev->uvd_ctx_idx_lock); in amdgpu_device_init()
3492 spin_lock_init(&adev->didt_idx_lock); in amdgpu_device_init()
3493 spin_lock_init(&adev->gc_cac_idx_lock); in amdgpu_device_init()
3494 spin_lock_init(&adev->se_cac_idx_lock); in amdgpu_device_init()
3495 spin_lock_init(&adev->audio_endpt_idx_lock); in amdgpu_device_init()
3496 spin_lock_init(&adev->mm_stats.lock); in amdgpu_device_init()
3498 INIT_LIST_HEAD(&adev->shadow_list); in amdgpu_device_init()
3499 mutex_init(&adev->shadow_list_lock); in amdgpu_device_init()
3501 INIT_LIST_HEAD(&adev->reset_list); in amdgpu_device_init()
3503 INIT_DELAYED_WORK(&adev->delayed_init_work, in amdgpu_device_init()
3505 INIT_DELAYED_WORK(&adev->gfx.gfx_off_delay_work, in amdgpu_device_init()
3508 INIT_WORK(&adev->xgmi_reset_work, amdgpu_device_xgmi_reset_func); in amdgpu_device_init()
3510 adev->gfx.gfx_off_req_count = 1; in amdgpu_device_init()
3511 adev->pm.ac_power = power_supply_is_system_supplied() > 0; in amdgpu_device_init()
3513 atomic_set(&adev->throttling_logging_enabled, 1); in amdgpu_device_init()
3521 ratelimit_state_init(&adev->throttling_logging_rs, (60 - 1) * HZ, 1); in amdgpu_device_init()
3522 ratelimit_set_flags(&adev->throttling_logging_rs, RATELIMIT_MSG_ON_RELEASE); in amdgpu_device_init()
3526 if (adev->asic_type >= CHIP_BONAIRE) { in amdgpu_device_init()
3527 adev->rmmio_base = pci_resource_start(adev->pdev, 5); in amdgpu_device_init()
3528 adev->rmmio_size = pci_resource_len(adev->pdev, 5); in amdgpu_device_init()
3530 adev->rmmio_base = pci_resource_start(adev->pdev, 2); in amdgpu_device_init()
3531 adev->rmmio_size = pci_resource_len(adev->pdev, 2); in amdgpu_device_init()
3534 adev->rmmio = ioremap(adev->rmmio_base, adev->rmmio_size); in amdgpu_device_init()
3535 if (adev->rmmio == NULL) { in amdgpu_device_init()
3538 DRM_INFO("register mmio base: 0x%08X\n", (uint32_t)adev->rmmio_base); in amdgpu_device_init()
3539 DRM_INFO("register mmio size: %u\n", (unsigned)adev->rmmio_size); in amdgpu_device_init()
3542 r = pci_enable_atomic_ops_to_root(adev->pdev, in amdgpu_device_init()
3546 adev->have_atomics_support = false; in amdgpu_device_init()
3549 adev->have_atomics_support = true; in amdgpu_device_init()
3552 amdgpu_device_get_pcie_info(adev); in amdgpu_device_init()
3557 if (amdgpu_mes && adev->asic_type >= CHIP_NAVI10) in amdgpu_device_init()
3558 adev->enable_mes = true; in amdgpu_device_init()
3561 amdgpu_detect_virtualization(adev); in amdgpu_device_init()
3563 r = amdgpu_device_get_job_timeout_settings(adev); in amdgpu_device_init()
3565 dev_err(adev->dev, "invalid lockup_timeout parameter syntax\n"); in amdgpu_device_init()
3570 r = amdgpu_device_ip_early_init(adev); in amdgpu_device_init()
3575 amdgpu_device_doorbell_init(adev); in amdgpu_device_init()
3579 emu_soc_asic_init(adev); in amdgpu_device_init()
3583 amdgpu_reset_init(adev); in amdgpu_device_init()
3586 amdgpu_device_detect_sriov_bios(adev); in amdgpu_device_init()
3591 if (!amdgpu_sriov_vf(adev) && amdgpu_asic_need_reset_on_init(adev)) { in amdgpu_device_init()
3592 if (adev->gmc.xgmi.num_physical_nodes) { in amdgpu_device_init()
3593 dev_info(adev->dev, "Pending hive reset.\n"); in amdgpu_device_init()
3594 adev->gmc.xgmi.pending_reset = true; in amdgpu_device_init()
3596 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_init()
3597 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_init()
3599 if (!(adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC || in amdgpu_device_init()
3600 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_COMMON || in amdgpu_device_init()
3601 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_IH || in amdgpu_device_init()
3602 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC)) { in amdgpu_device_init()
3604 adev->ip_blocks[i].version->funcs->name); in amdgpu_device_init()
3605 adev->ip_blocks[i].status.hw = true; in amdgpu_device_init()
3609 r = amdgpu_asic_reset(adev); in amdgpu_device_init()
3611 dev_err(adev->dev, "asic reset on init failed\n"); in amdgpu_device_init()
3617 pci_enable_pcie_error_reporting(adev->pdev); in amdgpu_device_init()
3620 if (amdgpu_device_need_post(adev)) { in amdgpu_device_init()
3621 if (!adev->bios) { in amdgpu_device_init()
3622 dev_err(adev->dev, "no vBIOS found\n"); in amdgpu_device_init()
3627 r = amdgpu_device_asic_init(adev); in amdgpu_device_init()
3629 dev_err(adev->dev, "gpu post error!\n"); in amdgpu_device_init()
3634 if (adev->is_atom_fw) { in amdgpu_device_init()
3636 r = amdgpu_atomfirmware_get_clock_info(adev); in amdgpu_device_init()
3638 dev_err(adev->dev, "amdgpu_atomfirmware_get_clock_info failed\n"); in amdgpu_device_init()
3639 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_ATOMBIOS_GET_CLOCK_FAIL, 0, 0); in amdgpu_device_init()
3644 r = amdgpu_atombios_get_clock_info(adev); in amdgpu_device_init()
3646 dev_err(adev->dev, "amdgpu_atombios_get_clock_info failed\n"); in amdgpu_device_init()
3647 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_ATOMBIOS_GET_CLOCK_FAIL, 0, 0); in amdgpu_device_init()
3651 if (!amdgpu_device_has_dc_support(adev)) in amdgpu_device_init()
3652 amdgpu_atombios_i2c_init(adev); in amdgpu_device_init()
3657 r = amdgpu_fence_driver_sw_init(adev); in amdgpu_device_init()
3659 dev_err(adev->dev, "amdgpu_fence_driver_sw_init failed\n"); in amdgpu_device_init()
3660 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_FENCE_INIT_FAIL, 0, 0); in amdgpu_device_init()
3665 drm_mode_config_init(adev_to_drm(adev)); in amdgpu_device_init()
3667 r = amdgpu_device_ip_init(adev); in amdgpu_device_init()
3670 if (amdgpu_sriov_vf(adev) && in amdgpu_device_init()
3671 !amdgpu_sriov_runtime(adev) && in amdgpu_device_init()
3672 amdgpu_virt_mmio_blocked(adev) && in amdgpu_device_init()
3673 !amdgpu_virt_wait_reset(adev)) { in amdgpu_device_init()
3674 dev_err(adev->dev, "VF exclusive mode timeout\n"); in amdgpu_device_init()
3676 adev->virt.caps &= ~AMDGPU_SRIOV_CAPS_RUNTIME; in amdgpu_device_init()
3677 adev->virt.ops = NULL; in amdgpu_device_init()
3681 dev_err(adev->dev, "amdgpu_device_ip_init failed\n"); in amdgpu_device_init()
3682 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_AMDGPU_INIT_FAIL, 0, 0); in amdgpu_device_init()
3686 amdgpu_fence_driver_hw_init(adev); in amdgpu_device_init()
3688 dev_info(adev->dev, in amdgpu_device_init()
3690 adev->gfx.config.max_shader_engines, in amdgpu_device_init()
3691 adev->gfx.config.max_sh_per_se, in amdgpu_device_init()
3692 adev->gfx.config.max_cu_per_sh, in amdgpu_device_init()
3693 adev->gfx.cu_info.number); in amdgpu_device_init()
3695 adev->accel_working = true; in amdgpu_device_init()
3697 amdgpu_vm_check_compute_bug(adev); in amdgpu_device_init()
3705 adev->mm_stats.log2_max_MBps = ilog2(max(1u, max_MBps)); in amdgpu_device_init()
3707 amdgpu_fbdev_init(adev); in amdgpu_device_init()
3709 r = amdgpu_pm_sysfs_init(adev); in amdgpu_device_init()
3711 adev->pm_sysfs_en = false; in amdgpu_device_init()
3714 adev->pm_sysfs_en = true; in amdgpu_device_init()
3716 r = amdgpu_ucode_sysfs_init(adev); in amdgpu_device_init()
3718 adev->ucode_sysfs_en = false; in amdgpu_device_init()
3721 adev->ucode_sysfs_en = true; in amdgpu_device_init()
3724 if (adev->accel_working) in amdgpu_device_init()
3725 amdgpu_test_moves(adev); in amdgpu_device_init()
3730 if (adev->accel_working) in amdgpu_device_init()
3731 amdgpu_benchmark(adev, amdgpu_benchmarking); in amdgpu_device_init()
3741 amdgpu_register_gpu_instance(adev); in amdgpu_device_init()
3746 if (!adev->gmc.xgmi.pending_reset) { in amdgpu_device_init()
3747 r = amdgpu_device_ip_late_init(adev); in amdgpu_device_init()
3749 dev_err(adev->dev, "amdgpu_device_ip_late_init failed\n"); in amdgpu_device_init()
3750 amdgpu_vf_error_put(adev, AMDGIM_ERROR_VF_AMDGPU_LATE_INIT_FAIL, 0, r); in amdgpu_device_init()
3754 amdgpu_ras_resume(adev); in amdgpu_device_init()
3755 queue_delayed_work(system_wq, &adev->delayed_init_work, in amdgpu_device_init()
3759 if (amdgpu_sriov_vf(adev)) in amdgpu_device_init()
3760 flush_delayed_work(&adev->delayed_init_work); in amdgpu_device_init()
3762 r = sysfs_create_files(&adev->dev->kobj, amdgpu_dev_attributes); in amdgpu_device_init()
3764 dev_err(adev->dev, "Could not create amdgpu device attr\n"); in amdgpu_device_init()
3767 r = amdgpu_pmu_init(adev); in amdgpu_device_init()
3769 dev_err(adev->dev, "amdgpu_pmu_init failed\n"); in amdgpu_device_init()
3772 if (amdgpu_device_cache_pci_state(adev->pdev)) in amdgpu_device_init()
3778 if ((adev->pdev->class >> 8) == PCI_CLASS_DISPLAY_VGA) in amdgpu_device_init()
3779 vga_client_register(adev->pdev, amdgpu_device_vga_set_decode); in amdgpu_device_init()
3783 vga_switcheroo_register_client(adev->pdev, in amdgpu_device_init()
3785 vga_switcheroo_init_domain_pm_ops(adev->dev, &adev->vga_pm_domain); in amdgpu_device_init()
3788 if (adev->gmc.xgmi.pending_reset) in amdgpu_device_init()
3795 amdgpu_release_ras_context(adev); in amdgpu_device_init()
3798 amdgpu_vf_error_trans_all(adev); in amdgpu_device_init()
3803 static void amdgpu_device_unmap_mmio(struct amdgpu_device *adev) in amdgpu_device_unmap_mmio() argument
3806 unmap_mapping_range(adev->ddev.anon_inode->i_mapping, 0, 0, 1); in amdgpu_device_unmap_mmio()
3809 amdgpu_device_doorbell_fini(adev); in amdgpu_device_unmap_mmio()
3811 iounmap(adev->rmmio); in amdgpu_device_unmap_mmio()
3812 adev->rmmio = NULL; in amdgpu_device_unmap_mmio()
3813 if (adev->mman.aper_base_kaddr) in amdgpu_device_unmap_mmio()
3814 iounmap(adev->mman.aper_base_kaddr); in amdgpu_device_unmap_mmio()
3815 adev->mman.aper_base_kaddr = NULL; in amdgpu_device_unmap_mmio()
3818 if (!adev->gmc.xgmi.connected_to_cpu) { in amdgpu_device_unmap_mmio()
3819 arch_phys_wc_del(adev->gmc.vram_mtrr); in amdgpu_device_unmap_mmio()
3820 arch_io_free_memtype_wc(adev->gmc.aper_base, adev->gmc.aper_size); in amdgpu_device_unmap_mmio()
3832 void amdgpu_device_fini_hw(struct amdgpu_device *adev) in amdgpu_device_fini_hw() argument
3834 dev_info(adev->dev, "amdgpu: finishing device.\n"); in amdgpu_device_fini_hw()
3835 flush_delayed_work(&adev->delayed_init_work); in amdgpu_device_fini_hw()
3836 if (adev->mman.initialized) { in amdgpu_device_fini_hw()
3837 flush_delayed_work(&adev->mman.bdev.wq); in amdgpu_device_fini_hw()
3838 ttm_bo_lock_delayed_workqueue(&adev->mman.bdev); in amdgpu_device_fini_hw()
3840 adev->shutdown = true; in amdgpu_device_fini_hw()
3845 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_fini_hw()
3846 amdgpu_virt_request_full_gpu(adev, false); in amdgpu_device_fini_hw()
3847 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_fini_hw()
3851 amdgpu_irq_disable_all(adev); in amdgpu_device_fini_hw()
3852 if (adev->mode_info.mode_config_initialized){ in amdgpu_device_fini_hw()
3853 if (!amdgpu_device_has_dc_support(adev)) in amdgpu_device_fini_hw()
3854 drm_helper_force_disable_all(adev_to_drm(adev)); in amdgpu_device_fini_hw()
3856 drm_atomic_helper_shutdown(adev_to_drm(adev)); in amdgpu_device_fini_hw()
3858 amdgpu_fence_driver_hw_fini(adev); in amdgpu_device_fini_hw()
3860 if (adev->pm_sysfs_en) in amdgpu_device_fini_hw()
3861 amdgpu_pm_sysfs_fini(adev); in amdgpu_device_fini_hw()
3862 if (adev->ucode_sysfs_en) in amdgpu_device_fini_hw()
3863 amdgpu_ucode_sysfs_fini(adev); in amdgpu_device_fini_hw()
3864 sysfs_remove_files(&adev->dev->kobj, amdgpu_dev_attributes); in amdgpu_device_fini_hw()
3866 amdgpu_fbdev_fini(adev); in amdgpu_device_fini_hw()
3868 amdgpu_irq_fini_hw(adev); in amdgpu_device_fini_hw()
3870 amdgpu_device_ip_fini_early(adev); in amdgpu_device_fini_hw()
3872 amdgpu_gart_dummy_page_fini(adev); in amdgpu_device_fini_hw()
3874 amdgpu_device_unmap_mmio(adev); in amdgpu_device_fini_hw()
3877 void amdgpu_device_fini_sw(struct amdgpu_device *adev) in amdgpu_device_fini_sw() argument
3879 amdgpu_device_ip_fini(adev); in amdgpu_device_fini_sw()
3880 amdgpu_fence_driver_sw_fini(adev); in amdgpu_device_fini_sw()
3881 release_firmware(adev->firmware.gpu_info_fw); in amdgpu_device_fini_sw()
3882 adev->firmware.gpu_info_fw = NULL; in amdgpu_device_fini_sw()
3883 adev->accel_working = false; in amdgpu_device_fini_sw()
3885 amdgpu_reset_fini(adev); in amdgpu_device_fini_sw()
3888 if (!amdgpu_device_has_dc_support(adev)) in amdgpu_device_fini_sw()
3889 amdgpu_i2c_fini(adev); in amdgpu_device_fini_sw()
3892 amdgpu_atombios_fini(adev); in amdgpu_device_fini_sw()
3894 kfree(adev->bios); in amdgpu_device_fini_sw()
3895 adev->bios = NULL; in amdgpu_device_fini_sw()
3896 if (amdgpu_device_supports_px(adev_to_drm(adev))) { in amdgpu_device_fini_sw()
3897 vga_switcheroo_unregister_client(adev->pdev); in amdgpu_device_fini_sw()
3898 vga_switcheroo_fini_domain_pm_ops(adev->dev); in amdgpu_device_fini_sw()
3900 if ((adev->pdev->class >> 8) == PCI_CLASS_DISPLAY_VGA) in amdgpu_device_fini_sw()
3901 vga_client_unregister(adev->pdev); in amdgpu_device_fini_sw()
3904 amdgpu_pmu_fini(adev); in amdgpu_device_fini_sw()
3905 if (adev->mman.discovery_bin) in amdgpu_device_fini_sw()
3906 amdgpu_discovery_fini(adev); in amdgpu_device_fini_sw()
3908 kfree(adev->pci_state); in amdgpu_device_fini_sw()
3928 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_suspend() local
3933 adev->in_suspend = true; in amdgpu_device_suspend()
3941 amdgpu_fbdev_set_suspend(adev, 1); in amdgpu_device_suspend()
3943 cancel_delayed_work_sync(&adev->delayed_init_work); in amdgpu_device_suspend()
3945 amdgpu_ras_suspend(adev); in amdgpu_device_suspend()
3947 amdgpu_device_ip_suspend_phase1(adev); in amdgpu_device_suspend()
3949 if (!adev->in_s0ix) in amdgpu_device_suspend()
3950 amdgpu_amdkfd_suspend(adev, adev->in_runpm); in amdgpu_device_suspend()
3953 amdgpu_bo_evict_vram(adev); in amdgpu_device_suspend()
3955 amdgpu_fence_driver_hw_fini(adev); in amdgpu_device_suspend()
3957 amdgpu_device_ip_suspend_phase2(adev); in amdgpu_device_suspend()
3962 amdgpu_bo_evict_vram(adev); in amdgpu_device_suspend()
3979 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_resume() local
3985 if (adev->in_s0ix) in amdgpu_device_resume()
3986 amdgpu_gfx_state_change_set(adev, sGpuChangeState_D0Entry); in amdgpu_device_resume()
3989 if (amdgpu_device_need_post(adev)) { in amdgpu_device_resume()
3990 r = amdgpu_device_asic_init(adev); in amdgpu_device_resume()
3992 dev_err(adev->dev, "amdgpu asic init failed\n"); in amdgpu_device_resume()
3995 r = amdgpu_device_ip_resume(adev); in amdgpu_device_resume()
3997 dev_err(adev->dev, "amdgpu_device_ip_resume failed (%d).\n", r); in amdgpu_device_resume()
4000 amdgpu_fence_driver_hw_init(adev); in amdgpu_device_resume()
4002 r = amdgpu_device_ip_late_init(adev); in amdgpu_device_resume()
4006 queue_delayed_work(system_wq, &adev->delayed_init_work, in amdgpu_device_resume()
4009 if (!adev->in_s0ix) { in amdgpu_device_resume()
4010 r = amdgpu_amdkfd_resume(adev, adev->in_runpm); in amdgpu_device_resume()
4016 flush_delayed_work(&adev->delayed_init_work); in amdgpu_device_resume()
4019 amdgpu_fbdev_set_suspend(adev, 0); in amdgpu_device_resume()
4023 amdgpu_ras_resume(adev); in amdgpu_device_resume()
4037 if (!amdgpu_device_has_dc_support(adev)) in amdgpu_device_resume()
4044 adev->in_suspend = false; in amdgpu_device_resume()
4062 static bool amdgpu_device_ip_check_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_check_soft_reset() argument
4067 if (amdgpu_sriov_vf(adev)) in amdgpu_device_ip_check_soft_reset()
4070 if (amdgpu_asic_need_full_reset(adev)) in amdgpu_device_ip_check_soft_reset()
4073 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_check_soft_reset()
4074 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_check_soft_reset()
4076 if (adev->ip_blocks[i].version->funcs->check_soft_reset) in amdgpu_device_ip_check_soft_reset()
4077 adev->ip_blocks[i].status.hang = in amdgpu_device_ip_check_soft_reset()
4078 adev->ip_blocks[i].version->funcs->check_soft_reset(adev); in amdgpu_device_ip_check_soft_reset()
4079 if (adev->ip_blocks[i].status.hang) { in amdgpu_device_ip_check_soft_reset()
4080 dev_info(adev->dev, "IP block:%s is hung!\n", adev->ip_blocks[i].version->funcs->name); in amdgpu_device_ip_check_soft_reset()
4098 static int amdgpu_device_ip_pre_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_pre_soft_reset() argument
4102 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_pre_soft_reset()
4103 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_pre_soft_reset()
4105 if (adev->ip_blocks[i].status.hang && in amdgpu_device_ip_pre_soft_reset()
4106 adev->ip_blocks[i].version->funcs->pre_soft_reset) { in amdgpu_device_ip_pre_soft_reset()
4107 r = adev->ip_blocks[i].version->funcs->pre_soft_reset(adev); in amdgpu_device_ip_pre_soft_reset()
4125 static bool amdgpu_device_ip_need_full_reset(struct amdgpu_device *adev) in amdgpu_device_ip_need_full_reset() argument
4129 if (amdgpu_asic_need_full_reset(adev)) in amdgpu_device_ip_need_full_reset()
4132 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_need_full_reset()
4133 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_need_full_reset()
4135 if ((adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_GMC) || in amdgpu_device_ip_need_full_reset()
4136 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_SMC) || in amdgpu_device_ip_need_full_reset()
4137 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_ACP) || in amdgpu_device_ip_need_full_reset()
4138 (adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_DCE) || in amdgpu_device_ip_need_full_reset()
4139 adev->ip_blocks[i].version->type == AMD_IP_BLOCK_TYPE_PSP) { in amdgpu_device_ip_need_full_reset()
4140 if (adev->ip_blocks[i].status.hang) { in amdgpu_device_ip_need_full_reset()
4141 dev_info(adev->dev, "Some block need full reset!\n"); in amdgpu_device_ip_need_full_reset()
4160 static int amdgpu_device_ip_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_soft_reset() argument
4164 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_soft_reset()
4165 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_soft_reset()
4167 if (adev->ip_blocks[i].status.hang && in amdgpu_device_ip_soft_reset()
4168 adev->ip_blocks[i].version->funcs->soft_reset) { in amdgpu_device_ip_soft_reset()
4169 r = adev->ip_blocks[i].version->funcs->soft_reset(adev); in amdgpu_device_ip_soft_reset()
4189 static int amdgpu_device_ip_post_soft_reset(struct amdgpu_device *adev) in amdgpu_device_ip_post_soft_reset() argument
4193 for (i = 0; i < adev->num_ip_blocks; i++) { in amdgpu_device_ip_post_soft_reset()
4194 if (!adev->ip_blocks[i].status.valid) in amdgpu_device_ip_post_soft_reset()
4196 if (adev->ip_blocks[i].status.hang && in amdgpu_device_ip_post_soft_reset()
4197 adev->ip_blocks[i].version->funcs->post_soft_reset) in amdgpu_device_ip_post_soft_reset()
4198 r = adev->ip_blocks[i].version->funcs->post_soft_reset(adev); in amdgpu_device_ip_post_soft_reset()
4218 static int amdgpu_device_recover_vram(struct amdgpu_device *adev) in amdgpu_device_recover_vram() argument
4225 if (amdgpu_sriov_runtime(adev)) in amdgpu_device_recover_vram()
4230 dev_info(adev->dev, "recover vram bo from shadow start\n"); in amdgpu_device_recover_vram()
4231 mutex_lock(&adev->shadow_list_lock); in amdgpu_device_recover_vram()
4232 list_for_each_entry(vmbo, &adev->shadow_list, shadow_list) { in amdgpu_device_recover_vram()
4259 mutex_unlock(&adev->shadow_list_lock); in amdgpu_device_recover_vram()
4266 dev_err(adev->dev, "recover vram bo from shadow failed, r is %ld, tmo is %ld\n", r, tmo); in amdgpu_device_recover_vram()
4270 dev_info(adev->dev, "recover vram bo from shadow done\n"); in amdgpu_device_recover_vram()
4284 static int amdgpu_device_reset_sriov(struct amdgpu_device *adev, in amdgpu_device_reset_sriov() argument
4290 r = amdgpu_virt_request_full_gpu(adev, true); in amdgpu_device_reset_sriov()
4292 r = amdgpu_virt_reset_gpu(adev); in amdgpu_device_reset_sriov()
4296 amdgpu_amdkfd_pre_reset(adev); in amdgpu_device_reset_sriov()
4299 r = amdgpu_device_ip_reinit_early_sriov(adev); in amdgpu_device_reset_sriov()
4303 amdgpu_virt_init_data_exchange(adev); in amdgpu_device_reset_sriov()
4305 amdgpu_gtt_mgr_recover(ttm_manager_type(&adev->mman.bdev, TTM_PL_TT)); in amdgpu_device_reset_sriov()
4307 r = amdgpu_device_fw_loading(adev); in amdgpu_device_reset_sriov()
4312 r = amdgpu_device_ip_reinit_late_sriov(adev); in amdgpu_device_reset_sriov()
4316 amdgpu_irq_gpu_reset_resume_helper(adev); in amdgpu_device_reset_sriov()
4317 r = amdgpu_ib_ring_tests(adev); in amdgpu_device_reset_sriov()
4318 amdgpu_amdkfd_post_reset(adev); in amdgpu_device_reset_sriov()
4321 if (!r && adev->virt.gim_feature & AMDGIM_FEATURE_GIM_FLR_VRAMLOST) { in amdgpu_device_reset_sriov()
4322 amdgpu_inc_vram_lost(adev); in amdgpu_device_reset_sriov()
4323 r = amdgpu_device_recover_vram(adev); in amdgpu_device_reset_sriov()
4325 amdgpu_virt_release_full_gpu(adev, true); in amdgpu_device_reset_sriov()
4337 bool amdgpu_device_has_job_running(struct amdgpu_device *adev) in amdgpu_device_has_job_running() argument
4343 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_has_job_running()
4366 bool amdgpu_device_should_recover_gpu(struct amdgpu_device *adev) in amdgpu_device_should_recover_gpu() argument
4368 if (!amdgpu_device_ip_check_soft_reset(adev)) { in amdgpu_device_should_recover_gpu()
4369 dev_info(adev->dev, "Timeout, but no hardware hang detected.\n"); in amdgpu_device_should_recover_gpu()
4376 if (amdgpu_sriov_vf(adev)) in amdgpu_device_should_recover_gpu()
4380 switch (adev->asic_type) { in amdgpu_device_should_recover_gpu()
4414 dev_info(adev->dev, "GPU recovery disabled.\n"); in amdgpu_device_should_recover_gpu()
4418 int amdgpu_device_mode1_reset(struct amdgpu_device *adev) in amdgpu_device_mode1_reset() argument
4423 amdgpu_atombios_scratch_regs_engine_hung(adev, true); in amdgpu_device_mode1_reset()
4425 dev_info(adev->dev, "GPU mode1 reset\n"); in amdgpu_device_mode1_reset()
4428 pci_clear_master(adev->pdev); in amdgpu_device_mode1_reset()
4430 amdgpu_device_cache_pci_state(adev->pdev); in amdgpu_device_mode1_reset()
4432 if (amdgpu_dpm_is_mode1_reset_supported(adev)) { in amdgpu_device_mode1_reset()
4433 dev_info(adev->dev, "GPU smu mode1 reset\n"); in amdgpu_device_mode1_reset()
4434 ret = amdgpu_dpm_mode1_reset(adev); in amdgpu_device_mode1_reset()
4436 dev_info(adev->dev, "GPU psp mode1 reset\n"); in amdgpu_device_mode1_reset()
4437 ret = psp_gpu_reset(adev); in amdgpu_device_mode1_reset()
4441 dev_err(adev->dev, "GPU mode1 reset failed\n"); in amdgpu_device_mode1_reset()
4443 amdgpu_device_load_pci_state(adev->pdev); in amdgpu_device_mode1_reset()
4446 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_device_mode1_reset()
4447 u32 memsize = adev->nbio.funcs->get_memsize(adev); in amdgpu_device_mode1_reset()
4454 amdgpu_atombios_scratch_regs_engine_hung(adev, false); in amdgpu_device_mode1_reset()
4458 int amdgpu_device_pre_asic_reset(struct amdgpu_device *adev, in amdgpu_device_pre_asic_reset() argument
4466 if (reset_context->reset_req_dev == adev) in amdgpu_device_pre_asic_reset()
4470 if (!adev->gmc.xgmi.pending_reset) in amdgpu_device_pre_asic_reset()
4471 amdgpu_debugfs_wait_dump(adev); in amdgpu_device_pre_asic_reset()
4473 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_pre_asic_reset()
4475 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_pre_asic_reset()
4480 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_pre_asic_reset()
4503 r = amdgpu_reset_prepare_hwcontext(adev, reset_context); in amdgpu_device_pre_asic_reset()
4511 if (!amdgpu_sriov_vf(adev)) { in amdgpu_device_pre_asic_reset()
4514 need_full_reset = amdgpu_device_ip_need_full_reset(adev); in amdgpu_device_pre_asic_reset()
4517 amdgpu_device_ip_pre_soft_reset(adev); in amdgpu_device_pre_asic_reset()
4518 r = amdgpu_device_ip_soft_reset(adev); in amdgpu_device_pre_asic_reset()
4519 amdgpu_device_ip_post_soft_reset(adev); in amdgpu_device_pre_asic_reset()
4520 if (r || amdgpu_device_ip_check_soft_reset(adev)) { in amdgpu_device_pre_asic_reset()
4521 dev_info(adev->dev, "soft reset failed, will fallback to full reset!\n"); in amdgpu_device_pre_asic_reset()
4527 r = amdgpu_device_ip_suspend(adev); in amdgpu_device_pre_asic_reset()
4709 static bool amdgpu_device_lock_adev(struct amdgpu_device *adev, in amdgpu_device_lock_adev() argument
4712 if (atomic_cmpxchg(&adev->in_gpu_reset, 0, 1) != 0) in amdgpu_device_lock_adev()
4716 down_write_nest_lock(&adev->reset_sem, &hive->hive_lock); in amdgpu_device_lock_adev()
4718 down_write(&adev->reset_sem); in amdgpu_device_lock_adev()
4721 switch (amdgpu_asic_reset_method(adev)) { in amdgpu_device_lock_adev()
4723 adev->mp1_state = PP_MP1_STATE_SHUTDOWN; in amdgpu_device_lock_adev()
4726 adev->mp1_state = PP_MP1_STATE_RESET; in amdgpu_device_lock_adev()
4729 adev->mp1_state = PP_MP1_STATE_NONE; in amdgpu_device_lock_adev()
4736 static void amdgpu_device_unlock_adev(struct amdgpu_device *adev) in amdgpu_device_unlock_adev() argument
4738 amdgpu_vf_error_trans_all(adev); in amdgpu_device_unlock_adev()
4739 adev->mp1_state = PP_MP1_STATE_NONE; in amdgpu_device_unlock_adev()
4740 atomic_set(&adev->in_gpu_reset, 0); in amdgpu_device_unlock_adev()
4741 up_write(&adev->reset_sem); in amdgpu_device_unlock_adev()
4750 static int amdgpu_device_lock_hive_adev(struct amdgpu_device *adev, struct amdgpu_hive_info *hive) in amdgpu_device_lock_hive_adev() argument
4754 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_device_lock_hive_adev()
4756 dev_err(adev->dev, "Hive is NULL while device has multiple xgmi nodes"); in amdgpu_device_lock_hive_adev()
4763 } else if (!amdgpu_device_lock_adev(adev, hive)) in amdgpu_device_lock_hive_adev()
4784 static void amdgpu_device_resume_display_audio(struct amdgpu_device *adev) in amdgpu_device_resume_display_audio() argument
4788 p = pci_get_domain_bus_and_slot(pci_domain_nr(adev->pdev->bus), in amdgpu_device_resume_display_audio()
4789 adev->pdev->bus->number, 1); in amdgpu_device_resume_display_audio()
4796 static int amdgpu_device_suspend_display_audio(struct amdgpu_device *adev) in amdgpu_device_suspend_display_audio() argument
4806 reset_method = amdgpu_asic_reset_method(adev); in amdgpu_device_suspend_display_audio()
4811 p = pci_get_domain_bus_and_slot(pci_domain_nr(adev->pdev->bus), in amdgpu_device_suspend_display_audio()
4812 adev->pdev->bus->number, 1); in amdgpu_device_suspend_display_audio()
4831 dev_warn(adev->dev, "failed to suspend display audio\n"); in amdgpu_device_suspend_display_audio()
4843 struct amdgpu_device *adev, struct list_head *device_list_handle, in amdgpu_device_recheck_guilty_jobs() argument
4849 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_device_recheck_guilty_jobs()
4874 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_recheck_guilty_jobs()
4875 amdgpu_virt_fini_data_exchange(adev); in amdgpu_device_recheck_guilty_jobs()
4876 r = amdgpu_device_reset_sriov(adev, false); in amdgpu_device_recheck_guilty_jobs()
4878 adev->asic_reset_res = r; in amdgpu_device_recheck_guilty_jobs()
4892 atomic_inc(&adev->gpu_reset_counter); in amdgpu_device_recheck_guilty_jobs()
4921 int amdgpu_device_gpu_recover(struct amdgpu_device *adev, in amdgpu_device_gpu_recover() argument
4939 need_emergency_restart = amdgpu_ras_need_emergency_restart(adev); in amdgpu_device_gpu_recover()
4945 if (need_emergency_restart && amdgpu_ras_get_context(adev)->reboot) { in amdgpu_device_gpu_recover()
4952 dev_info(adev->dev, "GPU %s begin!\n", in amdgpu_device_gpu_recover()
4962 hive = amdgpu_get_xgmi_hive(adev); in amdgpu_device_gpu_recover()
4976 reset_context.reset_req_dev = adev; in amdgpu_device_gpu_recover()
4986 r = amdgpu_device_lock_hive_adev(adev, hive); in amdgpu_device_gpu_recover()
4988 dev_info(adev->dev, "Bailing on TDR for s_job:%llx, as another already in progress", in amdgpu_device_gpu_recover()
5003 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_device_gpu_recover()
5006 if (!list_is_first(&adev->reset_list, &device_list)) in amdgpu_device_gpu_recover()
5007 list_rotate_to_front(&adev->reset_list, &device_list); in amdgpu_device_gpu_recover()
5010 list_add_tail(&adev->reset_list, &device_list); in amdgpu_device_gpu_recover()
5075 dev_info(adev->dev, "Guilty job already signaled, skipping HW reset"); in amdgpu_device_gpu_recover()
5090 tmp_vram_lost_counter = atomic_read(&((adev)->vram_lost_counter)); in amdgpu_device_gpu_recover()
5093 if (amdgpu_sriov_vf(adev)) { in amdgpu_device_gpu_recover()
5094 r = amdgpu_device_reset_sriov(adev, job ? false : true); in amdgpu_device_gpu_recover()
5096 adev->asic_reset_res = r; in amdgpu_device_gpu_recover()
5116 !(tmp_vram_lost_counter < atomic_read(&adev->vram_lost_counter))) in amdgpu_device_gpu_recover()
5159 if (!adev->kfd.init_complete) in amdgpu_device_gpu_recover()
5160 amdgpu_amdkfd_device_init(adev); in amdgpu_device_gpu_recover()
5175 dev_info(adev->dev, "GPU reset end with ret = %d\n", r); in amdgpu_device_gpu_recover()
5188 static void amdgpu_device_get_pcie_info(struct amdgpu_device *adev) in amdgpu_device_get_pcie_info() argument
5195 adev->pm.pcie_gen_mask = amdgpu_pcie_gen_cap; in amdgpu_device_get_pcie_info()
5198 adev->pm.pcie_mlw_mask = amdgpu_pcie_lane_cap; in amdgpu_device_get_pcie_info()
5201 if (pci_is_root_bus(adev->pdev->bus)) { in amdgpu_device_get_pcie_info()
5202 if (adev->pm.pcie_gen_mask == 0) in amdgpu_device_get_pcie_info()
5203 adev->pm.pcie_gen_mask = AMDGPU_DEFAULT_PCIE_GEN_MASK; in amdgpu_device_get_pcie_info()
5204 if (adev->pm.pcie_mlw_mask == 0) in amdgpu_device_get_pcie_info()
5205 adev->pm.pcie_mlw_mask = AMDGPU_DEFAULT_PCIE_MLW_MASK; in amdgpu_device_get_pcie_info()
5209 if (adev->pm.pcie_gen_mask && adev->pm.pcie_mlw_mask) in amdgpu_device_get_pcie_info()
5212 pcie_bandwidth_available(adev->pdev, NULL, in amdgpu_device_get_pcie_info()
5215 if (adev->pm.pcie_gen_mask == 0) { in amdgpu_device_get_pcie_info()
5217 pdev = adev->pdev; in amdgpu_device_get_pcie_info()
5220 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5225 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5231 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5236 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5240 adev->pm.pcie_gen_mask |= (CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5243 adev->pm.pcie_gen_mask |= CAIL_ASIC_PCIE_LINK_SPEED_SUPPORT_GEN1; in amdgpu_device_get_pcie_info()
5247 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5251 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5257 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5262 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5266 adev->pm.pcie_gen_mask |= (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1 | in amdgpu_device_get_pcie_info()
5269 adev->pm.pcie_gen_mask |= CAIL_PCIE_LINK_SPEED_SUPPORT_GEN1; in amdgpu_device_get_pcie_info()
5273 if (adev->pm.pcie_mlw_mask == 0) { in amdgpu_device_get_pcie_info()
5275 adev->pm.pcie_mlw_mask |= AMDGPU_DEFAULT_PCIE_MLW_MASK; in amdgpu_device_get_pcie_info()
5279 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X32 | in amdgpu_device_get_pcie_info()
5288 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X16 | in amdgpu_device_get_pcie_info()
5296 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X12 | in amdgpu_device_get_pcie_info()
5303 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X8 | in amdgpu_device_get_pcie_info()
5309 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X4 | in amdgpu_device_get_pcie_info()
5314 adev->pm.pcie_mlw_mask = (CAIL_PCIE_LINK_WIDTH_SUPPORT_X2 | in amdgpu_device_get_pcie_info()
5318 adev->pm.pcie_mlw_mask = CAIL_PCIE_LINK_WIDTH_SUPPORT_X1; in amdgpu_device_get_pcie_info()
5329 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_baco_enter() local
5330 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_device_baco_enter()
5332 if (!amdgpu_device_supports_baco(adev_to_drm(adev))) in amdgpu_device_baco_enter()
5335 if (ras && adev->ras_enabled && in amdgpu_device_baco_enter()
5336 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_enter()
5337 adev->nbio.funcs->enable_doorbell_interrupt(adev, false); in amdgpu_device_baco_enter()
5339 return amdgpu_dpm_baco_enter(adev); in amdgpu_device_baco_enter()
5344 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_baco_exit() local
5345 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in amdgpu_device_baco_exit()
5348 if (!amdgpu_device_supports_baco(adev_to_drm(adev))) in amdgpu_device_baco_exit()
5351 ret = amdgpu_dpm_baco_exit(adev); in amdgpu_device_baco_exit()
5355 if (ras && adev->ras_enabled && in amdgpu_device_baco_exit()
5356 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_exit()
5357 adev->nbio.funcs->enable_doorbell_interrupt(adev, true); in amdgpu_device_baco_exit()
5359 if (amdgpu_passthrough(adev) && in amdgpu_device_baco_exit()
5360 adev->nbio.funcs->clear_doorbell_interrupt) in amdgpu_device_baco_exit()
5361 adev->nbio.funcs->clear_doorbell_interrupt(adev); in amdgpu_device_baco_exit()
5366 static void amdgpu_cancel_all_tdr(struct amdgpu_device *adev) in amdgpu_cancel_all_tdr() argument
5371 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_cancel_all_tdr()
5392 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_pci_error_detected() local
5397 if (adev->gmc.xgmi.num_physical_nodes > 1) { in amdgpu_pci_error_detected()
5402 adev->pci_channel_state = state; in amdgpu_pci_error_detected()
5416 while (!amdgpu_device_lock_adev(adev, NULL)) in amdgpu_pci_error_detected()
5417 amdgpu_cancel_all_tdr(adev); in amdgpu_pci_error_detected()
5424 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_pci_error_detected()
5431 atomic_inc(&adev->gpu_reset_counter); in amdgpu_pci_error_detected()
5471 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_pci_slot_reset() local
5482 list_add_tail(&adev->reset_list, &device_list); in amdgpu_pci_slot_reset()
5491 for (i = 0; i < adev->usec_timeout; i++) { in amdgpu_pci_slot_reset()
5492 memsize = amdgpu_asic_get_config_memsize(adev); in amdgpu_pci_slot_reset()
5504 reset_context.reset_req_dev = adev; in amdgpu_pci_slot_reset()
5508 adev->no_hw_access = true; in amdgpu_pci_slot_reset()
5509 r = amdgpu_device_pre_asic_reset(adev, &reset_context); in amdgpu_pci_slot_reset()
5510 adev->no_hw_access = false; in amdgpu_pci_slot_reset()
5518 if (amdgpu_device_cache_pci_state(adev->pdev)) in amdgpu_pci_slot_reset()
5519 pci_restore_state(adev->pdev); in amdgpu_pci_slot_reset()
5524 amdgpu_device_unlock_adev(adev); in amdgpu_pci_slot_reset()
5540 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_pci_resume() local
5547 if (adev->pci_channel_state != pci_channel_io_frozen) in amdgpu_pci_resume()
5551 struct amdgpu_ring *ring = adev->rings[i]; in amdgpu_pci_resume()
5561 amdgpu_device_unlock_adev(adev); in amdgpu_pci_resume()
5567 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_cache_pci_state() local
5572 kfree(adev->pci_state); in amdgpu_device_cache_pci_state()
5574 adev->pci_state = pci_store_saved_state(pdev); in amdgpu_device_cache_pci_state()
5576 if (!adev->pci_state) { in amdgpu_device_cache_pci_state()
5591 struct amdgpu_device *adev = drm_to_adev(dev); in amdgpu_device_load_pci_state() local
5594 if (!adev->pci_state) in amdgpu_device_load_pci_state()
5597 r = pci_load_saved_state(pdev, adev->pci_state); in amdgpu_device_load_pci_state()
5609 void amdgpu_device_flush_hdp(struct amdgpu_device *adev, in amdgpu_device_flush_hdp() argument
5613 if (adev->flags & AMD_IS_APU) in amdgpu_device_flush_hdp()
5616 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_device_flush_hdp()
5622 amdgpu_asic_flush_hdp(adev, ring); in amdgpu_device_flush_hdp()
5625 void amdgpu_device_invalidate_hdp(struct amdgpu_device *adev, in amdgpu_device_invalidate_hdp() argument
5629 if (adev->flags & AMD_IS_APU) in amdgpu_device_invalidate_hdp()
5632 if (adev->gmc.xgmi.connected_to_cpu) in amdgpu_device_invalidate_hdp()
5635 amdgpu_asic_invalidate_hdp(adev, ring); in amdgpu_device_invalidate_hdp()