Lines Matching refs:adev
84 static u32 vi_pcie_rreg(struct amdgpu_device *adev, u32 reg) in vi_pcie_rreg() argument
89 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_rreg()
93 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_rreg()
97 static void vi_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_pcie_wreg() argument
101 spin_lock_irqsave(&adev->pcie_idx_lock, flags); in vi_pcie_wreg()
106 spin_unlock_irqrestore(&adev->pcie_idx_lock, flags); in vi_pcie_wreg()
109 static u32 vi_smc_rreg(struct amdgpu_device *adev, u32 reg) in vi_smc_rreg() argument
114 spin_lock_irqsave(&adev->smc_idx_lock, flags); in vi_smc_rreg()
117 spin_unlock_irqrestore(&adev->smc_idx_lock, flags); in vi_smc_rreg()
121 static void vi_smc_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_smc_wreg() argument
125 spin_lock_irqsave(&adev->smc_idx_lock, flags); in vi_smc_wreg()
128 spin_unlock_irqrestore(&adev->smc_idx_lock, flags); in vi_smc_wreg()
135 static u32 cz_smc_rreg(struct amdgpu_device *adev, u32 reg) in cz_smc_rreg() argument
140 spin_lock_irqsave(&adev->smc_idx_lock, flags); in cz_smc_rreg()
143 spin_unlock_irqrestore(&adev->smc_idx_lock, flags); in cz_smc_rreg()
147 static void cz_smc_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in cz_smc_wreg() argument
151 spin_lock_irqsave(&adev->smc_idx_lock, flags); in cz_smc_wreg()
154 spin_unlock_irqrestore(&adev->smc_idx_lock, flags); in cz_smc_wreg()
157 static u32 vi_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg) in vi_uvd_ctx_rreg() argument
162 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in vi_uvd_ctx_rreg()
165 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in vi_uvd_ctx_rreg()
169 static void vi_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_uvd_ctx_wreg() argument
173 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in vi_uvd_ctx_wreg()
176 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in vi_uvd_ctx_wreg()
179 static u32 vi_didt_rreg(struct amdgpu_device *adev, u32 reg) in vi_didt_rreg() argument
184 spin_lock_irqsave(&adev->didt_idx_lock, flags); in vi_didt_rreg()
187 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in vi_didt_rreg()
191 static void vi_didt_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_didt_wreg() argument
195 spin_lock_irqsave(&adev->didt_idx_lock, flags); in vi_didt_wreg()
198 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in vi_didt_wreg()
201 static u32 vi_gc_cac_rreg(struct amdgpu_device *adev, u32 reg) in vi_gc_cac_rreg() argument
206 spin_lock_irqsave(&adev->gc_cac_idx_lock, flags); in vi_gc_cac_rreg()
209 spin_unlock_irqrestore(&adev->gc_cac_idx_lock, flags); in vi_gc_cac_rreg()
213 static void vi_gc_cac_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in vi_gc_cac_wreg() argument
217 spin_lock_irqsave(&adev->gc_cac_idx_lock, flags); in vi_gc_cac_wreg()
220 spin_unlock_irqrestore(&adev->gc_cac_idx_lock, flags); in vi_gc_cac_wreg()
271 static void vi_init_golden_registers(struct amdgpu_device *adev) in vi_init_golden_registers() argument
274 mutex_lock(&adev->grbm_idx_mutex); in vi_init_golden_registers()
276 if (amdgpu_sriov_vf(adev)) { in vi_init_golden_registers()
277 xgpu_vi_init_golden_registers(adev); in vi_init_golden_registers()
278 mutex_unlock(&adev->grbm_idx_mutex); in vi_init_golden_registers()
282 switch (adev->asic_type) { in vi_init_golden_registers()
284 amdgpu_device_program_register_sequence(adev, in vi_init_golden_registers()
289 amdgpu_device_program_register_sequence(adev, in vi_init_golden_registers()
294 amdgpu_device_program_register_sequence(adev, in vi_init_golden_registers()
299 amdgpu_device_program_register_sequence(adev, in vi_init_golden_registers()
304 amdgpu_device_program_register_sequence(adev, in vi_init_golden_registers()
315 mutex_unlock(&adev->grbm_idx_mutex); in vi_init_golden_registers()
326 static u32 vi_get_xclk(struct amdgpu_device *adev) in vi_get_xclk() argument
328 u32 reference_clock = adev->clock.spll.reference_freq; in vi_get_xclk()
331 if (adev->flags & AMD_IS_APU) in vi_get_xclk()
358 void vi_srbm_select(struct amdgpu_device *adev, in vi_srbm_select() argument
369 static void vi_vga_set_state(struct amdgpu_device *adev, bool state) in vi_vga_set_state() argument
374 static bool vi_read_disabled_bios(struct amdgpu_device *adev) in vi_read_disabled_bios() argument
384 if (adev->mode_info.num_crtc) { in vi_read_disabled_bios()
393 if (adev->mode_info.num_crtc) { in vi_read_disabled_bios()
406 r = amdgpu_read_bios(adev); in vi_read_disabled_bios()
410 if (adev->mode_info.num_crtc) { in vi_read_disabled_bios()
419 static bool vi_read_bios_from_rom(struct amdgpu_device *adev, in vi_read_bios_from_rom() argument
431 if (adev->flags & AMD_IS_APU) in vi_read_bios_from_rom()
437 spin_lock_irqsave(&adev->smc_idx_lock, flags); in vi_read_bios_from_rom()
445 spin_unlock_irqrestore(&adev->smc_idx_lock, flags); in vi_read_bios_from_rom()
450 static void vi_detect_hw_virtualization(struct amdgpu_device *adev) in vi_detect_hw_virtualization() argument
454 if (adev->asic_type == CHIP_TONGA || in vi_detect_hw_virtualization()
455 adev->asic_type == CHIP_FIJI) { in vi_detect_hw_virtualization()
459 adev->virt.caps |= AMDGPU_SRIOV_CAPS_IS_VF; in vi_detect_hw_virtualization()
462 adev->virt.caps |= AMDGPU_SRIOV_CAPS_ENABLE_IOV; in vi_detect_hw_virtualization()
467 adev->virt.caps |= AMDGPU_PASSTHROUGH_MODE; in vi_detect_hw_virtualization()
550 static uint32_t vi_get_register_value(struct amdgpu_device *adev, in vi_get_register_value() argument
561 return adev->gfx.config.rb_config[se_idx][sh_idx].rb_backend_disable; in vi_get_register_value()
563 return adev->gfx.config.rb_config[se_idx][sh_idx].user_rb_backend_disable; in vi_get_register_value()
565 return adev->gfx.config.rb_config[se_idx][sh_idx].raster_config; in vi_get_register_value()
567 return adev->gfx.config.rb_config[se_idx][sh_idx].raster_config_1; in vi_get_register_value()
570 mutex_lock(&adev->grbm_idx_mutex); in vi_get_register_value()
572 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff); in vi_get_register_value()
577 amdgpu_gfx_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff); in vi_get_register_value()
578 mutex_unlock(&adev->grbm_idx_mutex); in vi_get_register_value()
585 return adev->gfx.config.gb_addr_config; in vi_get_register_value()
587 return adev->gfx.config.mc_arb_ramcfg; in vi_get_register_value()
621 return adev->gfx.config.tile_mode_array[idx]; in vi_get_register_value()
639 return adev->gfx.config.macrotile_mode_array[idx]; in vi_get_register_value()
646 static int vi_read_register(struct amdgpu_device *adev, u32 se_num, in vi_read_register() argument
658 *value = vi_get_register_value(adev, indexed, se_num, sh_num, in vi_read_register()
665 static int vi_gpu_pci_config_reset(struct amdgpu_device *adev) in vi_gpu_pci_config_reset() argument
669 dev_info(adev->dev, "GPU pci config reset\n"); in vi_gpu_pci_config_reset()
672 pci_clear_master(adev->pdev); in vi_gpu_pci_config_reset()
674 amdgpu_device_pci_config_reset(adev); in vi_gpu_pci_config_reset()
679 for (i = 0; i < adev->usec_timeout; i++) { in vi_gpu_pci_config_reset()
682 pci_set_master(adev->pdev); in vi_gpu_pci_config_reset()
683 adev->has_hw_reset = true; in vi_gpu_pci_config_reset()
700 static int vi_asic_reset(struct amdgpu_device *adev) in vi_asic_reset() argument
704 amdgpu_atombios_scratch_regs_engine_hung(adev, true); in vi_asic_reset()
706 r = vi_gpu_pci_config_reset(adev); in vi_asic_reset()
708 amdgpu_atombios_scratch_regs_engine_hung(adev, false); in vi_asic_reset()
713 static u32 vi_get_config_memsize(struct amdgpu_device *adev) in vi_get_config_memsize() argument
718 static int vi_set_uvd_clock(struct amdgpu_device *adev, u32 clock, in vi_set_uvd_clock() argument
725 r = amdgpu_atombios_get_clock_dividers(adev, in vi_set_uvd_clock()
733 if (adev->flags & AMD_IS_APU) in vi_set_uvd_clock()
743 if (adev->flags & AMD_IS_APU) { in vi_set_uvd_clock()
764 static int vi_set_uvd_clocks(struct amdgpu_device *adev, u32 vclk, u32 dclk) in vi_set_uvd_clocks() argument
768 if (adev->flags & AMD_IS_APU) { in vi_set_uvd_clocks()
769 r = vi_set_uvd_clock(adev, vclk, ixGNB_CLK2_DFS_CNTL, ixGNB_CLK2_STATUS); in vi_set_uvd_clocks()
773 r = vi_set_uvd_clock(adev, dclk, ixGNB_CLK1_DFS_CNTL, ixGNB_CLK1_STATUS); in vi_set_uvd_clocks()
777 r = vi_set_uvd_clock(adev, vclk, ixCG_VCLK_CNTL, ixCG_VCLK_STATUS); in vi_set_uvd_clocks()
781 r = vi_set_uvd_clock(adev, dclk, ixCG_DCLK_CNTL, ixCG_DCLK_STATUS); in vi_set_uvd_clocks()
789 static int vi_set_vce_clocks(struct amdgpu_device *adev, u32 evclk, u32 ecclk) in vi_set_vce_clocks() argument
799 if (adev->flags & AMD_IS_APU) { in vi_set_vce_clocks()
811 r = amdgpu_atombios_get_clock_dividers(adev, in vi_set_vce_clocks()
843 static void vi_pcie_gen3_enable(struct amdgpu_device *adev) in vi_pcie_gen3_enable() argument
845 if (pci_is_root_bus(adev->pdev->bus)) in vi_pcie_gen3_enable()
851 if (adev->flags & AMD_IS_APU) in vi_pcie_gen3_enable()
854 if (!(adev->pm.pcie_gen_mask & (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2 | in vi_pcie_gen3_enable()
861 static void vi_program_aspm(struct amdgpu_device *adev) in vi_program_aspm() argument
870 static void vi_enable_doorbell_aperture(struct amdgpu_device *adev, in vi_enable_doorbell_aperture() argument
876 if (adev->flags & AMD_IS_APU) in vi_enable_doorbell_aperture()
892 static uint32_t vi_get_rev_id(struct amdgpu_device *adev) in vi_get_rev_id() argument
894 if (adev->flags & AMD_IS_APU) in vi_get_rev_id()
902 static void vi_flush_hdp(struct amdgpu_device *adev, struct amdgpu_ring *ring) in vi_flush_hdp() argument
912 static void vi_invalidate_hdp(struct amdgpu_device *adev, in vi_invalidate_hdp() argument
923 static bool vi_need_full_reset(struct amdgpu_device *adev) in vi_need_full_reset() argument
925 switch (adev->asic_type) { in vi_need_full_reset()
965 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_early_init() local
967 if (adev->flags & AMD_IS_APU) { in vi_common_early_init()
968 adev->smc_rreg = &cz_smc_rreg; in vi_common_early_init()
969 adev->smc_wreg = &cz_smc_wreg; in vi_common_early_init()
971 adev->smc_rreg = &vi_smc_rreg; in vi_common_early_init()
972 adev->smc_wreg = &vi_smc_wreg; in vi_common_early_init()
974 adev->pcie_rreg = &vi_pcie_rreg; in vi_common_early_init()
975 adev->pcie_wreg = &vi_pcie_wreg; in vi_common_early_init()
976 adev->uvd_ctx_rreg = &vi_uvd_ctx_rreg; in vi_common_early_init()
977 adev->uvd_ctx_wreg = &vi_uvd_ctx_wreg; in vi_common_early_init()
978 adev->didt_rreg = &vi_didt_rreg; in vi_common_early_init()
979 adev->didt_wreg = &vi_didt_wreg; in vi_common_early_init()
980 adev->gc_cac_rreg = &vi_gc_cac_rreg; in vi_common_early_init()
981 adev->gc_cac_wreg = &vi_gc_cac_wreg; in vi_common_early_init()
983 adev->asic_funcs = &vi_asic_funcs; in vi_common_early_init()
985 adev->rev_id = vi_get_rev_id(adev); in vi_common_early_init()
986 adev->external_rev_id = 0xFF; in vi_common_early_init()
987 switch (adev->asic_type) { in vi_common_early_init()
989 adev->cg_flags = 0; in vi_common_early_init()
990 adev->pg_flags = 0; in vi_common_early_init()
991 adev->external_rev_id = 0x1; in vi_common_early_init()
994 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in vi_common_early_init()
1011 adev->pg_flags = 0; in vi_common_early_init()
1012 adev->external_rev_id = adev->rev_id + 0x3c; in vi_common_early_init()
1015 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in vi_common_early_init()
1028 adev->pg_flags = 0; in vi_common_early_init()
1029 adev->external_rev_id = adev->rev_id + 0x14; in vi_common_early_init()
1032 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in vi_common_early_init()
1051 adev->pg_flags = 0; in vi_common_early_init()
1052 adev->external_rev_id = adev->rev_id + 0x5A; in vi_common_early_init()
1055 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in vi_common_early_init()
1074 adev->pg_flags = 0; in vi_common_early_init()
1075 adev->external_rev_id = adev->rev_id + 0x50; in vi_common_early_init()
1078 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in vi_common_early_init()
1097 adev->pg_flags = 0; in vi_common_early_init()
1098 adev->external_rev_id = adev->rev_id + 0x64; in vi_common_early_init()
1101 adev->cg_flags = 0; in vi_common_early_init()
1121 adev->pg_flags = 0; in vi_common_early_init()
1122 adev->external_rev_id = adev->rev_id + 0x6E; in vi_common_early_init()
1125 adev->cg_flags = AMD_CG_SUPPORT_UVD_MGCG | in vi_common_early_init()
1141 adev->pg_flags = 0; in vi_common_early_init()
1142 if (adev->rev_id != 0x00 || CZ_REV_BRISTOL(adev->pdev->revision)) { in vi_common_early_init()
1143 adev->pg_flags |= AMD_PG_SUPPORT_GFX_SMG | in vi_common_early_init()
1149 adev->external_rev_id = adev->rev_id + 0x1; in vi_common_early_init()
1152 adev->cg_flags = AMD_CG_SUPPORT_UVD_MGCG | in vi_common_early_init()
1166 adev->pg_flags = AMD_PG_SUPPORT_GFX_PG | in vi_common_early_init()
1172 adev->external_rev_id = adev->rev_id + 0x61; in vi_common_early_init()
1179 if (amdgpu_sriov_vf(adev)) { in vi_common_early_init()
1180 amdgpu_virt_init_setting(adev); in vi_common_early_init()
1181 xgpu_vi_mailbox_set_irq_funcs(adev); in vi_common_early_init()
1189 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_late_init() local
1191 if (amdgpu_sriov_vf(adev)) in vi_common_late_init()
1192 xgpu_vi_mailbox_get_irq(adev); in vi_common_late_init()
1199 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_sw_init() local
1201 if (amdgpu_sriov_vf(adev)) in vi_common_sw_init()
1202 xgpu_vi_mailbox_add_irq_id(adev); in vi_common_sw_init()
1214 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_hw_init() local
1217 vi_init_golden_registers(adev); in vi_common_hw_init()
1219 vi_pcie_gen3_enable(adev); in vi_common_hw_init()
1221 vi_program_aspm(adev); in vi_common_hw_init()
1223 vi_enable_doorbell_aperture(adev, true); in vi_common_hw_init()
1230 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_hw_fini() local
1233 vi_enable_doorbell_aperture(adev, false); in vi_common_hw_fini()
1235 if (amdgpu_sriov_vf(adev)) in vi_common_hw_fini()
1236 xgpu_vi_mailbox_put_irq(adev); in vi_common_hw_fini()
1243 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_suspend() local
1245 return vi_common_hw_fini(adev); in vi_common_suspend()
1250 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_resume() local
1252 return vi_common_hw_init(adev); in vi_common_resume()
1270 static void vi_update_bif_medium_grain_light_sleep(struct amdgpu_device *adev, in vi_update_bif_medium_grain_light_sleep() argument
1277 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_BIF_LS)) in vi_update_bif_medium_grain_light_sleep()
1290 static void vi_update_hdp_medium_grain_clock_gating(struct amdgpu_device *adev, in vi_update_hdp_medium_grain_clock_gating() argument
1297 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_HDP_MGCG)) in vi_update_hdp_medium_grain_clock_gating()
1306 static void vi_update_hdp_light_sleep(struct amdgpu_device *adev, in vi_update_hdp_light_sleep() argument
1313 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_HDP_LS)) in vi_update_hdp_light_sleep()
1322 static void vi_update_drm_light_sleep(struct amdgpu_device *adev, in vi_update_drm_light_sleep() argument
1329 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_DRM_LS)) in vi_update_drm_light_sleep()
1339 static void vi_update_rom_medium_grain_clock_gating(struct amdgpu_device *adev, in vi_update_rom_medium_grain_clock_gating() argument
1346 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_ROM_MGCG)) in vi_update_rom_medium_grain_clock_gating()
1362 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_set_clockgating_state_by_smu() local
1364 if (adev->cg_flags & (AMD_CG_SUPPORT_MC_LS | AMD_CG_SUPPORT_MC_MGCG)) { in vi_common_set_clockgating_state_by_smu()
1365 if (adev->cg_flags & AMD_CG_SUPPORT_MC_LS) { in vi_common_set_clockgating_state_by_smu()
1369 if (adev->cg_flags & AMD_CG_SUPPORT_MC_MGCG) { in vi_common_set_clockgating_state_by_smu()
1379 if (adev->powerplay.pp_funcs->set_clockgating_by_smu) in vi_common_set_clockgating_state_by_smu()
1380 amdgpu_dpm_set_clockgating_by_smu(adev, msg_id); in vi_common_set_clockgating_state_by_smu()
1383 if (adev->cg_flags & (AMD_CG_SUPPORT_SDMA_LS | AMD_CG_SUPPORT_SDMA_MGCG)) { in vi_common_set_clockgating_state_by_smu()
1384 if (adev->cg_flags & AMD_CG_SUPPORT_SDMA_LS) { in vi_common_set_clockgating_state_by_smu()
1388 if (adev->cg_flags & AMD_CG_SUPPORT_SDMA_MGCG) { in vi_common_set_clockgating_state_by_smu()
1398 if (adev->powerplay.pp_funcs->set_clockgating_by_smu) in vi_common_set_clockgating_state_by_smu()
1399 amdgpu_dpm_set_clockgating_by_smu(adev, msg_id); in vi_common_set_clockgating_state_by_smu()
1402 if (adev->cg_flags & (AMD_CG_SUPPORT_HDP_LS | AMD_CG_SUPPORT_HDP_MGCG)) { in vi_common_set_clockgating_state_by_smu()
1403 if (adev->cg_flags & AMD_CG_SUPPORT_HDP_LS) { in vi_common_set_clockgating_state_by_smu()
1407 if (adev->cg_flags & AMD_CG_SUPPORT_HDP_MGCG) { in vi_common_set_clockgating_state_by_smu()
1417 if (adev->powerplay.pp_funcs->set_clockgating_by_smu) in vi_common_set_clockgating_state_by_smu()
1418 amdgpu_dpm_set_clockgating_by_smu(adev, msg_id); in vi_common_set_clockgating_state_by_smu()
1422 if (adev->cg_flags & AMD_CG_SUPPORT_BIF_LS) { in vi_common_set_clockgating_state_by_smu()
1432 if (adev->powerplay.pp_funcs->set_clockgating_by_smu) in vi_common_set_clockgating_state_by_smu()
1433 amdgpu_dpm_set_clockgating_by_smu(adev, msg_id); in vi_common_set_clockgating_state_by_smu()
1435 if (adev->cg_flags & AMD_CG_SUPPORT_BIF_MGCG) { in vi_common_set_clockgating_state_by_smu()
1445 if (adev->powerplay.pp_funcs->set_clockgating_by_smu) in vi_common_set_clockgating_state_by_smu()
1446 amdgpu_dpm_set_clockgating_by_smu(adev, msg_id); in vi_common_set_clockgating_state_by_smu()
1449 if (adev->cg_flags & AMD_CG_SUPPORT_DRM_LS) { in vi_common_set_clockgating_state_by_smu()
1460 if (adev->powerplay.pp_funcs->set_clockgating_by_smu) in vi_common_set_clockgating_state_by_smu()
1461 amdgpu_dpm_set_clockgating_by_smu(adev, msg_id); in vi_common_set_clockgating_state_by_smu()
1464 if (adev->cg_flags & AMD_CG_SUPPORT_ROM_MGCG) { in vi_common_set_clockgating_state_by_smu()
1475 if (adev->powerplay.pp_funcs->set_clockgating_by_smu) in vi_common_set_clockgating_state_by_smu()
1476 amdgpu_dpm_set_clockgating_by_smu(adev, msg_id); in vi_common_set_clockgating_state_by_smu()
1484 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_set_clockgating_state() local
1486 if (amdgpu_sriov_vf(adev)) in vi_common_set_clockgating_state()
1489 switch (adev->asic_type) { in vi_common_set_clockgating_state()
1491 vi_update_bif_medium_grain_light_sleep(adev, in vi_common_set_clockgating_state()
1493 vi_update_hdp_medium_grain_clock_gating(adev, in vi_common_set_clockgating_state()
1495 vi_update_hdp_light_sleep(adev, in vi_common_set_clockgating_state()
1497 vi_update_rom_medium_grain_clock_gating(adev, in vi_common_set_clockgating_state()
1502 vi_update_bif_medium_grain_light_sleep(adev, in vi_common_set_clockgating_state()
1504 vi_update_hdp_medium_grain_clock_gating(adev, in vi_common_set_clockgating_state()
1506 vi_update_hdp_light_sleep(adev, in vi_common_set_clockgating_state()
1508 vi_update_drm_light_sleep(adev, in vi_common_set_clockgating_state()
1516 vi_common_set_clockgating_state_by_smu(adev, state); in vi_common_set_clockgating_state()
1531 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in vi_common_get_clockgating_state() local
1534 if (amdgpu_sriov_vf(adev)) in vi_common_get_clockgating_state()
1585 int vi_set_ip_blocks(struct amdgpu_device *adev) in vi_set_ip_blocks() argument
1588 vi_detect_hw_virtualization(adev); in vi_set_ip_blocks()
1590 if (amdgpu_sriov_vf(adev)) in vi_set_ip_blocks()
1591 adev->virt.ops = &xgpu_vi_virt_ops; in vi_set_ip_blocks()
1593 switch (adev->asic_type) { in vi_set_ip_blocks()
1596 amdgpu_device_ip_block_add(adev, &vi_common_ip_block); in vi_set_ip_blocks()
1597 amdgpu_device_ip_block_add(adev, &gmc_v7_4_ip_block); in vi_set_ip_blocks()
1598 amdgpu_device_ip_block_add(adev, &iceland_ih_ip_block); in vi_set_ip_blocks()
1599 amdgpu_device_ip_block_add(adev, &pp_smu_ip_block); in vi_set_ip_blocks()
1600 if (adev->enable_virtual_display) in vi_set_ip_blocks()
1601 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block); in vi_set_ip_blocks()
1602 amdgpu_device_ip_block_add(adev, &gfx_v8_0_ip_block); in vi_set_ip_blocks()
1603 amdgpu_device_ip_block_add(adev, &sdma_v2_4_ip_block); in vi_set_ip_blocks()
1606 amdgpu_device_ip_block_add(adev, &vi_common_ip_block); in vi_set_ip_blocks()
1607 amdgpu_device_ip_block_add(adev, &gmc_v8_5_ip_block); in vi_set_ip_blocks()
1608 amdgpu_device_ip_block_add(adev, &tonga_ih_ip_block); in vi_set_ip_blocks()
1609 amdgpu_device_ip_block_add(adev, &pp_smu_ip_block); in vi_set_ip_blocks()
1610 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev)) in vi_set_ip_blocks()
1611 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block); in vi_set_ip_blocks()
1613 else if (amdgpu_device_has_dc_support(adev)) in vi_set_ip_blocks()
1614 amdgpu_device_ip_block_add(adev, &dm_ip_block); in vi_set_ip_blocks()
1617 amdgpu_device_ip_block_add(adev, &dce_v10_1_ip_block); in vi_set_ip_blocks()
1618 amdgpu_device_ip_block_add(adev, &gfx_v8_0_ip_block); in vi_set_ip_blocks()
1619 amdgpu_device_ip_block_add(adev, &sdma_v3_0_ip_block); in vi_set_ip_blocks()
1620 if (!amdgpu_sriov_vf(adev)) { in vi_set_ip_blocks()
1621 amdgpu_device_ip_block_add(adev, &uvd_v6_0_ip_block); in vi_set_ip_blocks()
1622 amdgpu_device_ip_block_add(adev, &vce_v3_0_ip_block); in vi_set_ip_blocks()
1626 amdgpu_device_ip_block_add(adev, &vi_common_ip_block); in vi_set_ip_blocks()
1627 amdgpu_device_ip_block_add(adev, &gmc_v8_0_ip_block); in vi_set_ip_blocks()
1628 amdgpu_device_ip_block_add(adev, &tonga_ih_ip_block); in vi_set_ip_blocks()
1629 amdgpu_device_ip_block_add(adev, &pp_smu_ip_block); in vi_set_ip_blocks()
1630 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev)) in vi_set_ip_blocks()
1631 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block); in vi_set_ip_blocks()
1633 else if (amdgpu_device_has_dc_support(adev)) in vi_set_ip_blocks()
1634 amdgpu_device_ip_block_add(adev, &dm_ip_block); in vi_set_ip_blocks()
1637 amdgpu_device_ip_block_add(adev, &dce_v10_0_ip_block); in vi_set_ip_blocks()
1638 amdgpu_device_ip_block_add(adev, &gfx_v8_0_ip_block); in vi_set_ip_blocks()
1639 amdgpu_device_ip_block_add(adev, &sdma_v3_0_ip_block); in vi_set_ip_blocks()
1640 if (!amdgpu_sriov_vf(adev)) { in vi_set_ip_blocks()
1641 amdgpu_device_ip_block_add(adev, &uvd_v5_0_ip_block); in vi_set_ip_blocks()
1642 amdgpu_device_ip_block_add(adev, &vce_v3_0_ip_block); in vi_set_ip_blocks()
1649 amdgpu_device_ip_block_add(adev, &vi_common_ip_block); in vi_set_ip_blocks()
1650 amdgpu_device_ip_block_add(adev, &gmc_v8_1_ip_block); in vi_set_ip_blocks()
1651 amdgpu_device_ip_block_add(adev, &tonga_ih_ip_block); in vi_set_ip_blocks()
1652 amdgpu_device_ip_block_add(adev, &pp_smu_ip_block); in vi_set_ip_blocks()
1653 if (adev->enable_virtual_display) in vi_set_ip_blocks()
1654 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block); in vi_set_ip_blocks()
1656 else if (amdgpu_device_has_dc_support(adev)) in vi_set_ip_blocks()
1657 amdgpu_device_ip_block_add(adev, &dm_ip_block); in vi_set_ip_blocks()
1660 amdgpu_device_ip_block_add(adev, &dce_v11_2_ip_block); in vi_set_ip_blocks()
1661 amdgpu_device_ip_block_add(adev, &gfx_v8_0_ip_block); in vi_set_ip_blocks()
1662 amdgpu_device_ip_block_add(adev, &sdma_v3_1_ip_block); in vi_set_ip_blocks()
1663 amdgpu_device_ip_block_add(adev, &uvd_v6_3_ip_block); in vi_set_ip_blocks()
1664 amdgpu_device_ip_block_add(adev, &vce_v3_4_ip_block); in vi_set_ip_blocks()
1667 amdgpu_device_ip_block_add(adev, &vi_common_ip_block); in vi_set_ip_blocks()
1668 amdgpu_device_ip_block_add(adev, &gmc_v8_0_ip_block); in vi_set_ip_blocks()
1669 amdgpu_device_ip_block_add(adev, &cz_ih_ip_block); in vi_set_ip_blocks()
1670 amdgpu_device_ip_block_add(adev, &pp_smu_ip_block); in vi_set_ip_blocks()
1671 if (adev->enable_virtual_display) in vi_set_ip_blocks()
1672 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block); in vi_set_ip_blocks()
1674 else if (amdgpu_device_has_dc_support(adev)) in vi_set_ip_blocks()
1675 amdgpu_device_ip_block_add(adev, &dm_ip_block); in vi_set_ip_blocks()
1678 amdgpu_device_ip_block_add(adev, &dce_v11_0_ip_block); in vi_set_ip_blocks()
1679 amdgpu_device_ip_block_add(adev, &gfx_v8_0_ip_block); in vi_set_ip_blocks()
1680 amdgpu_device_ip_block_add(adev, &sdma_v3_0_ip_block); in vi_set_ip_blocks()
1681 amdgpu_device_ip_block_add(adev, &uvd_v6_0_ip_block); in vi_set_ip_blocks()
1682 amdgpu_device_ip_block_add(adev, &vce_v3_1_ip_block); in vi_set_ip_blocks()
1684 amdgpu_device_ip_block_add(adev, &acp_ip_block); in vi_set_ip_blocks()
1688 amdgpu_device_ip_block_add(adev, &vi_common_ip_block); in vi_set_ip_blocks()
1689 amdgpu_device_ip_block_add(adev, &gmc_v8_0_ip_block); in vi_set_ip_blocks()
1690 amdgpu_device_ip_block_add(adev, &cz_ih_ip_block); in vi_set_ip_blocks()
1691 amdgpu_device_ip_block_add(adev, &pp_smu_ip_block); in vi_set_ip_blocks()
1692 if (adev->enable_virtual_display) in vi_set_ip_blocks()
1693 amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block); in vi_set_ip_blocks()
1695 else if (amdgpu_device_has_dc_support(adev)) in vi_set_ip_blocks()
1696 amdgpu_device_ip_block_add(adev, &dm_ip_block); in vi_set_ip_blocks()
1699 amdgpu_device_ip_block_add(adev, &dce_v11_0_ip_block); in vi_set_ip_blocks()
1700 amdgpu_device_ip_block_add(adev, &gfx_v8_1_ip_block); in vi_set_ip_blocks()
1701 amdgpu_device_ip_block_add(adev, &sdma_v3_0_ip_block); in vi_set_ip_blocks()
1702 amdgpu_device_ip_block_add(adev, &uvd_v6_2_ip_block); in vi_set_ip_blocks()
1703 amdgpu_device_ip_block_add(adev, &vce_v3_4_ip_block); in vi_set_ip_blocks()
1705 amdgpu_device_ip_block_add(adev, &acp_ip_block); in vi_set_ip_blocks()