Lines Matching refs:adev

174 static int soc15_query_video_codecs(struct amdgpu_device *adev, bool encode,  in soc15_query_video_codecs()  argument
177 if (adev->ip_versions[VCE_HWIP][0]) { in soc15_query_video_codecs()
178 switch (adev->ip_versions[VCE_HWIP][0]) { in soc15_query_video_codecs()
190 switch (adev->ip_versions[UVD_HWIP][0]) { in soc15_query_video_codecs()
218 static u32 soc15_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg) in soc15_uvd_ctx_rreg() argument
226 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg()
229 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_rreg()
233 static void soc15_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_uvd_ctx_wreg() argument
240 spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg()
243 spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags); in soc15_uvd_ctx_wreg()
246 static u32 soc15_didt_rreg(struct amdgpu_device *adev, u32 reg) in soc15_didt_rreg() argument
254 spin_lock_irqsave(&adev->didt_idx_lock, flags); in soc15_didt_rreg()
257 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in soc15_didt_rreg()
261 static void soc15_didt_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_didt_wreg() argument
268 spin_lock_irqsave(&adev->didt_idx_lock, flags); in soc15_didt_wreg()
271 spin_unlock_irqrestore(&adev->didt_idx_lock, flags); in soc15_didt_wreg()
274 static u32 soc15_gc_cac_rreg(struct amdgpu_device *adev, u32 reg) in soc15_gc_cac_rreg() argument
279 spin_lock_irqsave(&adev->gc_cac_idx_lock, flags); in soc15_gc_cac_rreg()
282 spin_unlock_irqrestore(&adev->gc_cac_idx_lock, flags); in soc15_gc_cac_rreg()
286 static void soc15_gc_cac_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_gc_cac_wreg() argument
290 spin_lock_irqsave(&adev->gc_cac_idx_lock, flags); in soc15_gc_cac_wreg()
293 spin_unlock_irqrestore(&adev->gc_cac_idx_lock, flags); in soc15_gc_cac_wreg()
296 static u32 soc15_se_cac_rreg(struct amdgpu_device *adev, u32 reg) in soc15_se_cac_rreg() argument
301 spin_lock_irqsave(&adev->se_cac_idx_lock, flags); in soc15_se_cac_rreg()
304 spin_unlock_irqrestore(&adev->se_cac_idx_lock, flags); in soc15_se_cac_rreg()
308 static void soc15_se_cac_wreg(struct amdgpu_device *adev, u32 reg, u32 v) in soc15_se_cac_wreg() argument
312 spin_lock_irqsave(&adev->se_cac_idx_lock, flags); in soc15_se_cac_wreg()
315 spin_unlock_irqrestore(&adev->se_cac_idx_lock, flags); in soc15_se_cac_wreg()
318 static u32 soc15_get_config_memsize(struct amdgpu_device *adev) in soc15_get_config_memsize() argument
320 return adev->nbio.funcs->get_memsize(adev); in soc15_get_config_memsize()
323 static u32 soc15_get_xclk(struct amdgpu_device *adev) in soc15_get_xclk() argument
325 u32 reference_clock = adev->clock.spll.reference_freq; in soc15_get_xclk()
327 if (adev->ip_versions[MP1_HWIP][0] == IP_VERSION(12, 0, 0) || in soc15_get_xclk()
328 adev->ip_versions[MP1_HWIP][0] == IP_VERSION(12, 0, 1)) in soc15_get_xclk()
330 if (adev->ip_versions[MP1_HWIP][0] == IP_VERSION(10, 0, 0) || in soc15_get_xclk()
331 adev->ip_versions[MP1_HWIP][0] == IP_VERSION(10, 0, 1)) in soc15_get_xclk()
338 void soc15_grbm_select(struct amdgpu_device *adev, in soc15_grbm_select() argument
350 static bool soc15_read_disabled_bios(struct amdgpu_device *adev) in soc15_read_disabled_bios() argument
379 static uint32_t soc15_read_indexed_register(struct amdgpu_device *adev, u32 se_num, in soc15_read_indexed_register() argument
384 mutex_lock(&adev->grbm_idx_mutex); in soc15_read_indexed_register()
386 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff, 0); in soc15_read_indexed_register()
391 amdgpu_gfx_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff, 0); in soc15_read_indexed_register()
392 mutex_unlock(&adev->grbm_idx_mutex); in soc15_read_indexed_register()
396 static uint32_t soc15_get_register_value(struct amdgpu_device *adev, in soc15_get_register_value() argument
401 return soc15_read_indexed_register(adev, se_num, sh_num, reg_offset); in soc15_get_register_value()
404 return adev->gfx.config.gb_addr_config; in soc15_get_register_value()
406 return adev->gfx.config.db_debug2; in soc15_get_register_value()
411 static int soc15_read_register(struct amdgpu_device *adev, u32 se_num, in soc15_read_register() argument
420 if (!adev->reg_offset[en->hwip][en->inst]) in soc15_read_register()
422 else if (reg_offset != (adev->reg_offset[en->hwip][en->inst][en->seg] in soc15_read_register()
426 *value = soc15_get_register_value(adev, in soc15_read_register()
446 void soc15_program_register_sequence(struct amdgpu_device *adev, in soc15_program_register_sequence() argument
456 reg = adev->reg_offset[entry->hwip][entry->instance][entry->segment] + entry->reg; in soc15_program_register_sequence()
481 static int soc15_asic_baco_reset(struct amdgpu_device *adev) in soc15_asic_baco_reset() argument
483 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in soc15_asic_baco_reset()
487 if (ras && adev->ras_enabled) in soc15_asic_baco_reset()
488 adev->nbio.funcs->enable_doorbell_interrupt(adev, false); in soc15_asic_baco_reset()
490 ret = amdgpu_dpm_baco_reset(adev); in soc15_asic_baco_reset()
495 if (ras && adev->ras_enabled) in soc15_asic_baco_reset()
496 adev->nbio.funcs->enable_doorbell_interrupt(adev, true); in soc15_asic_baco_reset()
502 soc15_asic_reset_method(struct amdgpu_device *adev) in soc15_asic_reset_method() argument
506 struct amdgpu_ras *ras = amdgpu_ras_get_context(adev); in soc15_asic_reset_method()
508 if (adev->gmc.xgmi.supported && adev->gmc.xgmi.connected_to_cpu) in soc15_asic_reset_method()
522 dev_warn(adev->dev, "Specified reset method:%d isn't supported, using AUTO instead.\n", in soc15_asic_reset_method()
525 switch (adev->ip_versions[MP1_HWIP][0]) { in soc15_asic_reset_method()
533 if (adev->asic_type == CHIP_VEGA20) { in soc15_asic_reset_method()
534 if (adev->psp.sos.fw_version >= 0x80067) in soc15_asic_reset_method()
535 baco_reset = amdgpu_dpm_is_baco_supported(adev); in soc15_asic_reset_method()
540 if (ras && adev->ras_enabled && in soc15_asic_reset_method()
541 adev->pm.fw_version <= 0x283400) in soc15_asic_reset_method()
544 baco_reset = amdgpu_dpm_is_baco_supported(adev); in soc15_asic_reset_method()
562 else if (!(adev->flags & AMD_IS_APU)) in soc15_asic_reset_method()
576 static int soc15_asic_reset(struct amdgpu_device *adev) in soc15_asic_reset() argument
579 if ((adev->apu_flags & AMD_APU_IS_RAVEN) || in soc15_asic_reset()
580 (adev->apu_flags & AMD_APU_IS_RAVEN2)) in soc15_asic_reset()
583 switch (soc15_asic_reset_method(adev)) { in soc15_asic_reset()
585 dev_info(adev->dev, "PCI reset\n"); in soc15_asic_reset()
586 return amdgpu_device_pci_reset(adev); in soc15_asic_reset()
588 dev_info(adev->dev, "BACO reset\n"); in soc15_asic_reset()
589 return soc15_asic_baco_reset(adev); in soc15_asic_reset()
591 dev_info(adev->dev, "MODE2 reset\n"); in soc15_asic_reset()
592 return amdgpu_dpm_mode2_reset(adev); in soc15_asic_reset()
594 dev_info(adev->dev, "MODE1 reset\n"); in soc15_asic_reset()
595 return amdgpu_device_mode1_reset(adev); in soc15_asic_reset()
599 static bool soc15_supports_baco(struct amdgpu_device *adev) in soc15_supports_baco() argument
601 switch (adev->ip_versions[MP1_HWIP][0]) { in soc15_supports_baco()
604 if (adev->asic_type == CHIP_VEGA20) { in soc15_supports_baco()
605 if (adev->psp.sos.fw_version >= 0x80067) in soc15_supports_baco()
606 return amdgpu_dpm_is_baco_supported(adev); in soc15_supports_baco()
609 return amdgpu_dpm_is_baco_supported(adev); in soc15_supports_baco()
623 static int soc15_set_uvd_clocks(struct amdgpu_device *adev, u32 vclk, u32 dclk) in soc15_set_uvd_clocks() argument
636 static int soc15_set_vce_clocks(struct amdgpu_device *adev, u32 evclk, u32 ecclk) in soc15_set_vce_clocks() argument
643 static void soc15_program_aspm(struct amdgpu_device *adev) in soc15_program_aspm() argument
645 if (!amdgpu_device_should_use_aspm(adev)) in soc15_program_aspm()
648 if (!(adev->flags & AMD_IS_APU) && in soc15_program_aspm()
649 (adev->nbio.funcs->program_aspm)) in soc15_program_aspm()
650 adev->nbio.funcs->program_aspm(adev); in soc15_program_aspm()
662 static void soc15_reg_base_init(struct amdgpu_device *adev) in soc15_reg_base_init() argument
665 switch (adev->asic_type) { in soc15_reg_base_init()
670 vega10_reg_base_init(adev); in soc15_reg_base_init()
673 vega20_reg_base_init(adev); in soc15_reg_base_init()
676 arct_reg_base_init(adev); in soc15_reg_base_init()
679 aldebaran_reg_base_init(adev); in soc15_reg_base_init()
682 DRM_ERROR("Unsupported asic type: %d!\n", adev->asic_type); in soc15_reg_base_init()
687 void soc15_set_virt_ops(struct amdgpu_device *adev) in soc15_set_virt_ops() argument
689 adev->virt.ops = &xgpu_ai_virt_ops; in soc15_set_virt_ops()
694 soc15_reg_base_init(adev); in soc15_set_virt_ops()
697 static bool soc15_need_full_reset(struct amdgpu_device *adev) in soc15_need_full_reset() argument
703 static void soc15_get_pcie_usage(struct amdgpu_device *adev, uint64_t *count0, in soc15_get_pcie_usage() argument
713 if (adev->flags & AMD_IS_APU) in soc15_get_pcie_usage()
750 static void vega20_get_pcie_usage(struct amdgpu_device *adev, uint64_t *count0, in vega20_get_pcie_usage() argument
760 if (adev->flags & AMD_IS_APU) in vega20_get_pcie_usage()
799 static bool soc15_need_reset_on_init(struct amdgpu_device *adev) in soc15_need_reset_on_init() argument
804 if (adev->asic_type == CHIP_RENOIR) in soc15_need_reset_on_init()
810 if (!amdgpu_passthrough(adev)) in soc15_need_reset_on_init()
813 if (adev->flags & AMD_IS_APU) in soc15_need_reset_on_init()
826 static uint64_t soc15_get_pcie_replay_count(struct amdgpu_device *adev) in soc15_get_pcie_replay_count() argument
838 static void soc15_pre_asic_init(struct amdgpu_device *adev) in soc15_pre_asic_init() argument
840 gmc_v9_0_restore_registers(adev); in soc15_pre_asic_init()
910 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_early_init() local
912 if (!amdgpu_sriov_vf(adev)) { in soc15_common_early_init()
913 adev->rmmio_remap.reg_offset = MMIO_REG_HOLE_OFFSET; in soc15_common_early_init()
914 adev->rmmio_remap.bus_addr = adev->rmmio_base + MMIO_REG_HOLE_OFFSET; in soc15_common_early_init()
916 adev->smc_rreg = NULL; in soc15_common_early_init()
917 adev->smc_wreg = NULL; in soc15_common_early_init()
918 adev->pcie_rreg = &amdgpu_device_indirect_rreg; in soc15_common_early_init()
919 adev->pcie_wreg = &amdgpu_device_indirect_wreg; in soc15_common_early_init()
920 adev->pcie_rreg_ext = &amdgpu_device_indirect_rreg_ext; in soc15_common_early_init()
921 adev->pcie_wreg_ext = &amdgpu_device_indirect_wreg_ext; in soc15_common_early_init()
922 adev->pcie_rreg64 = &amdgpu_device_indirect_rreg64; in soc15_common_early_init()
923 adev->pcie_wreg64 = &amdgpu_device_indirect_wreg64; in soc15_common_early_init()
924 adev->uvd_ctx_rreg = &soc15_uvd_ctx_rreg; in soc15_common_early_init()
925 adev->uvd_ctx_wreg = &soc15_uvd_ctx_wreg; in soc15_common_early_init()
926 adev->didt_rreg = &soc15_didt_rreg; in soc15_common_early_init()
927 adev->didt_wreg = &soc15_didt_wreg; in soc15_common_early_init()
928 adev->gc_cac_rreg = &soc15_gc_cac_rreg; in soc15_common_early_init()
929 adev->gc_cac_wreg = &soc15_gc_cac_wreg; in soc15_common_early_init()
930 adev->se_cac_rreg = &soc15_se_cac_rreg; in soc15_common_early_init()
931 adev->se_cac_wreg = &soc15_se_cac_wreg; in soc15_common_early_init()
933 adev->rev_id = amdgpu_device_get_rev_id(adev); in soc15_common_early_init()
934 adev->external_rev_id = 0xFF; in soc15_common_early_init()
938 switch (adev->ip_versions[GC_HWIP][0]) { in soc15_common_early_init()
940 adev->asic_funcs = &soc15_asic_funcs; in soc15_common_early_init()
941 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in soc15_common_early_init()
960 adev->pg_flags = 0; in soc15_common_early_init()
961 adev->external_rev_id = 0x1; in soc15_common_early_init()
964 adev->asic_funcs = &soc15_asic_funcs; in soc15_common_early_init()
965 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in soc15_common_early_init()
983 adev->pg_flags = 0; in soc15_common_early_init()
984 adev->external_rev_id = adev->rev_id + 0x14; in soc15_common_early_init()
987 adev->asic_funcs = &vega20_asic_funcs; in soc15_common_early_init()
988 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in soc15_common_early_init()
1006 adev->pg_flags = 0; in soc15_common_early_init()
1007 adev->external_rev_id = adev->rev_id + 0x28; in soc15_common_early_init()
1011 adev->asic_funcs = &soc15_asic_funcs; in soc15_common_early_init()
1013 if (adev->rev_id >= 0x8) in soc15_common_early_init()
1014 adev->apu_flags |= AMD_APU_IS_RAVEN2; in soc15_common_early_init()
1016 if (adev->apu_flags & AMD_APU_IS_RAVEN2) in soc15_common_early_init()
1017 adev->external_rev_id = adev->rev_id + 0x79; in soc15_common_early_init()
1018 else if (adev->apu_flags & AMD_APU_IS_PICASSO) in soc15_common_early_init()
1019 adev->external_rev_id = adev->rev_id + 0x41; in soc15_common_early_init()
1020 else if (adev->rev_id == 1) in soc15_common_early_init()
1021 adev->external_rev_id = adev->rev_id + 0x20; in soc15_common_early_init()
1023 adev->external_rev_id = adev->rev_id + 0x01; in soc15_common_early_init()
1025 if (adev->apu_flags & AMD_APU_IS_RAVEN2) { in soc15_common_early_init()
1026 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in soc15_common_early_init()
1041 adev->pg_flags = AMD_PG_SUPPORT_SDMA | AMD_PG_SUPPORT_VCN; in soc15_common_early_init()
1042 } else if (adev->apu_flags & AMD_APU_IS_PICASSO) { in soc15_common_early_init()
1043 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in soc15_common_early_init()
1061 adev->pg_flags = AMD_PG_SUPPORT_SDMA | in soc15_common_early_init()
1064 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in soc15_common_early_init()
1083 adev->pg_flags = AMD_PG_SUPPORT_SDMA | AMD_PG_SUPPORT_VCN; in soc15_common_early_init()
1087 adev->asic_funcs = &vega20_asic_funcs; in soc15_common_early_init()
1088 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in soc15_common_early_init()
1102 adev->pg_flags = AMD_PG_SUPPORT_VCN | AMD_PG_SUPPORT_VCN_DPG; in soc15_common_early_init()
1103 adev->external_rev_id = adev->rev_id + 0x32; in soc15_common_early_init()
1106 adev->asic_funcs = &soc15_asic_funcs; in soc15_common_early_init()
1108 if (adev->apu_flags & AMD_APU_IS_RENOIR) in soc15_common_early_init()
1109 adev->external_rev_id = adev->rev_id + 0x91; in soc15_common_early_init()
1111 adev->external_rev_id = adev->rev_id + 0xa1; in soc15_common_early_init()
1112 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in soc15_common_early_init()
1131 adev->pg_flags = AMD_PG_SUPPORT_SDMA | in soc15_common_early_init()
1137 adev->asic_funcs = &vega20_asic_funcs; in soc15_common_early_init()
1138 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG | in soc15_common_early_init()
1146 adev->pg_flags = AMD_PG_SUPPORT_VCN_DPG; in soc15_common_early_init()
1147 adev->external_rev_id = adev->rev_id + 0x3c; in soc15_common_early_init()
1150 adev->asic_funcs = &aqua_vanjaram_asic_funcs; in soc15_common_early_init()
1151 adev->cg_flags = in soc15_common_early_init()
1157 adev->pg_flags = in soc15_common_early_init()
1161 adev->external_rev_id = adev->rev_id + 0x46; in soc15_common_early_init()
1168 if (amdgpu_sriov_vf(adev)) { in soc15_common_early_init()
1169 amdgpu_virt_init_setting(adev); in soc15_common_early_init()
1170 xgpu_ai_mailbox_set_irq_funcs(adev); in soc15_common_early_init()
1178 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_late_init() local
1180 if (amdgpu_sriov_vf(adev)) in soc15_common_late_init()
1181 xgpu_ai_mailbox_get_irq(adev); in soc15_common_late_init()
1186 adev->nbio.funcs->enable_doorbell_selfring_aperture(adev, true); in soc15_common_late_init()
1193 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_sw_init() local
1195 if (amdgpu_sriov_vf(adev)) in soc15_common_sw_init()
1196 xgpu_ai_mailbox_add_irq_id(adev); in soc15_common_sw_init()
1198 if (adev->df.funcs && in soc15_common_sw_init()
1199 adev->df.funcs->sw_init) in soc15_common_sw_init()
1200 adev->df.funcs->sw_init(adev); in soc15_common_sw_init()
1207 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_sw_fini() local
1209 if (adev->df.funcs && in soc15_common_sw_fini()
1210 adev->df.funcs->sw_fini) in soc15_common_sw_fini()
1211 adev->df.funcs->sw_fini(adev); in soc15_common_sw_fini()
1215 static void soc15_sdma_doorbell_range_init(struct amdgpu_device *adev) in soc15_sdma_doorbell_range_init() argument
1220 if (!amdgpu_sriov_vf(adev)) { in soc15_sdma_doorbell_range_init()
1221 for (i = 0; i < adev->sdma.num_instances; i++) { in soc15_sdma_doorbell_range_init()
1222 adev->nbio.funcs->sdma_doorbell_range(adev, i, in soc15_sdma_doorbell_range_init()
1223 true, adev->doorbell_index.sdma_engine[i] << 1, in soc15_sdma_doorbell_range_init()
1224 adev->doorbell_index.sdma_doorbell_range); in soc15_sdma_doorbell_range_init()
1231 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_hw_init() local
1234 soc15_program_aspm(adev); in soc15_common_hw_init()
1236 adev->nbio.funcs->init_registers(adev); in soc15_common_hw_init()
1241 if (adev->nbio.funcs->remap_hdp_registers && !amdgpu_sriov_vf(adev)) in soc15_common_hw_init()
1242 adev->nbio.funcs->remap_hdp_registers(adev); in soc15_common_hw_init()
1245 adev->nbio.funcs->enable_doorbell_aperture(adev, true); in soc15_common_hw_init()
1253 soc15_sdma_doorbell_range_init(adev); in soc15_common_hw_init()
1260 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_hw_fini() local
1267 adev->nbio.funcs->enable_doorbell_aperture(adev, false); in soc15_common_hw_fini()
1268 adev->nbio.funcs->enable_doorbell_selfring_aperture(adev, false); in soc15_common_hw_fini()
1270 if (amdgpu_sriov_vf(adev)) in soc15_common_hw_fini()
1271 xgpu_ai_mailbox_put_irq(adev); in soc15_common_hw_fini()
1273 if (adev->nbio.ras_if && in soc15_common_hw_fini()
1274 amdgpu_ras_is_supported(adev, adev->nbio.ras_if->block)) { in soc15_common_hw_fini()
1275 if (adev->nbio.ras && in soc15_common_hw_fini()
1276 adev->nbio.ras->init_ras_controller_interrupt) in soc15_common_hw_fini()
1277 amdgpu_irq_put(adev, &adev->nbio.ras_controller_irq, 0); in soc15_common_hw_fini()
1278 if (adev->nbio.ras && in soc15_common_hw_fini()
1279 adev->nbio.ras->init_ras_err_event_athub_interrupt) in soc15_common_hw_fini()
1280 amdgpu_irq_put(adev, &adev->nbio.ras_err_event_athub_irq, 0); in soc15_common_hw_fini()
1288 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_suspend() local
1290 return soc15_common_hw_fini(adev); in soc15_common_suspend()
1295 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_resume() local
1297 return soc15_common_hw_init(adev); in soc15_common_resume()
1315 static void soc15_update_drm_clock_gating(struct amdgpu_device *adev, bool enable) in soc15_update_drm_clock_gating() argument
1321 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_DRM_MGCG)) in soc15_update_drm_clock_gating()
1344 static void soc15_update_drm_light_sleep(struct amdgpu_device *adev, bool enable) in soc15_update_drm_light_sleep() argument
1350 if (enable && (adev->cg_flags & AMD_CG_SUPPORT_DRM_LS)) in soc15_update_drm_light_sleep()
1362 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_set_clockgating_state() local
1364 if (amdgpu_sriov_vf(adev)) in soc15_common_set_clockgating_state()
1367 switch (adev->ip_versions[NBIO_HWIP][0]) { in soc15_common_set_clockgating_state()
1371 adev->nbio.funcs->update_medium_grain_clock_gating(adev, in soc15_common_set_clockgating_state()
1373 adev->nbio.funcs->update_medium_grain_light_sleep(adev, in soc15_common_set_clockgating_state()
1375 adev->hdp.funcs->update_clock_gating(adev, in soc15_common_set_clockgating_state()
1377 soc15_update_drm_clock_gating(adev, in soc15_common_set_clockgating_state()
1379 soc15_update_drm_light_sleep(adev, in soc15_common_set_clockgating_state()
1381 adev->smuio.funcs->update_rom_clock_gating(adev, in soc15_common_set_clockgating_state()
1383 adev->df.funcs->update_medium_grain_clock_gating(adev, in soc15_common_set_clockgating_state()
1389 adev->nbio.funcs->update_medium_grain_clock_gating(adev, in soc15_common_set_clockgating_state()
1391 adev->nbio.funcs->update_medium_grain_light_sleep(adev, in soc15_common_set_clockgating_state()
1393 adev->hdp.funcs->update_clock_gating(adev, in soc15_common_set_clockgating_state()
1395 soc15_update_drm_clock_gating(adev, in soc15_common_set_clockgating_state()
1397 soc15_update_drm_light_sleep(adev, in soc15_common_set_clockgating_state()
1402 adev->hdp.funcs->update_clock_gating(adev, in soc15_common_set_clockgating_state()
1413 struct amdgpu_device *adev = (struct amdgpu_device *)handle; in soc15_common_get_clockgating_state() local
1416 if (amdgpu_sriov_vf(adev)) in soc15_common_get_clockgating_state()
1419 adev->nbio.funcs->get_clockgating_state(adev, flags); in soc15_common_get_clockgating_state()
1421 adev->hdp.funcs->get_clock_gating_state(adev, flags); in soc15_common_get_clockgating_state()
1423 if (adev->ip_versions[MP0_HWIP][0] != IP_VERSION(13, 0, 2)) { in soc15_common_get_clockgating_state()
1437 adev->smuio.funcs->get_clock_gating_state(adev, flags); in soc15_common_get_clockgating_state()
1439 adev->df.funcs->get_clockgating_state(adev, flags); in soc15_common_get_clockgating_state()