Lines Matching refs:hwmgr
107 static int smu7_force_clock_level(struct pp_hwmgr *hwmgr,
136 static int smu7_get_mc_microcode_version(struct pp_hwmgr *hwmgr) in smu7_get_mc_microcode_version() argument
138 cgs_write_register(hwmgr->device, mmMC_SEQ_IO_DEBUG_INDEX, 0x9F); in smu7_get_mc_microcode_version()
140 hwmgr->microcode_version_info.MC = cgs_read_register(hwmgr->device, mmMC_SEQ_IO_DEBUG_DATA); in smu7_get_mc_microcode_version()
145 static uint16_t smu7_get_current_pcie_speed(struct pp_hwmgr *hwmgr) in smu7_get_current_pcie_speed() argument
150 speedCntl = cgs_read_ind_register(hwmgr->device, CGS_IND_REG__PCIE, in smu7_get_current_pcie_speed()
156 static int smu7_get_current_pcie_lane_number(struct pp_hwmgr *hwmgr) in smu7_get_current_pcie_lane_number() argument
161 link_width = PHM_READ_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__PCIE, in smu7_get_current_pcie_lane_number()
176 static int smu7_enable_smc_voltage_controller(struct pp_hwmgr *hwmgr) in smu7_enable_smc_voltage_controller() argument
178 if (hwmgr->chip_id == CHIP_VEGAM) { in smu7_enable_smc_voltage_controller()
179 PHM_WRITE_VFPF_INDIRECT_FIELD(hwmgr->device, in smu7_enable_smc_voltage_controller()
181 PHM_WRITE_VFPF_INDIRECT_FIELD(hwmgr->device, in smu7_enable_smc_voltage_controller()
185 if (hwmgr->feature_mask & PP_SMC_VOLTAGE_CONTROL_MASK) in smu7_enable_smc_voltage_controller()
186 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_Voltage_Cntl_Enable); in smu7_enable_smc_voltage_controller()
196 static bool smu7_voltage_control(const struct pp_hwmgr *hwmgr) in smu7_voltage_control() argument
199 (const struct smu7_hwmgr *)(hwmgr->backend); in smu7_voltage_control()
210 static int smu7_enable_voltage_control(struct pp_hwmgr *hwmgr) in smu7_enable_voltage_control() argument
213 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_voltage_control()
248 static int smu7_construct_voltage_tables(struct pp_hwmgr *hwmgr) in smu7_construct_voltage_tables() argument
250 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_construct_voltage_tables()
252 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_construct_voltage_tables()
257 result = atomctrl_get_voltage_table_v3(hwmgr, in smu7_construct_voltage_tables()
264 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_construct_voltage_tables()
267 else if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_construct_voltage_tables()
269 hwmgr->dyn_state.mvdd_dependency_on_mclk); in smu7_construct_voltage_tables()
277 result = atomctrl_get_voltage_table_v3(hwmgr, in smu7_construct_voltage_tables()
284 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_construct_voltage_tables()
287 else if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_construct_voltage_tables()
289 hwmgr->dyn_state.vddci_dependency_on_mclk); in smu7_construct_voltage_tables()
305 result = atomctrl_get_voltage_table_v3(hwmgr, in smu7_construct_voltage_tables()
312 if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_construct_voltage_tables()
314 hwmgr->dyn_state.vddc_dependency_on_mclk); in smu7_construct_voltage_tables()
315 else if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_construct_voltage_tables()
323 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_VDDC); in smu7_construct_voltage_tables()
330 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_VDDGFX); in smu7_construct_voltage_tables()
337 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_VDDCI); in smu7_construct_voltage_tables()
344 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_MVDD); in smu7_construct_voltage_tables()
361 struct pp_hwmgr *hwmgr) in smu7_program_static_screen_threshold_parameters() argument
363 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_program_static_screen_threshold_parameters()
366 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_static_screen_threshold_parameters()
370 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_static_screen_threshold_parameters()
383 static int smu7_enable_display_gap(struct pp_hwmgr *hwmgr) in smu7_enable_display_gap() argument
386 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_display_gap()
395 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_display_gap()
407 static int smu7_program_voting_clients(struct pp_hwmgr *hwmgr) in smu7_program_voting_clients() argument
409 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_program_voting_clients()
413 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_voting_clients()
415 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_voting_clients()
419 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_voting_clients()
425 static int smu7_clear_voting_clients(struct pp_hwmgr *hwmgr) in smu7_clear_voting_clients() argument
430 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_clear_voting_clients()
432 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_clear_voting_clients()
436 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_clear_voting_clients()
445 static int smu7_copy_and_switch_arb_sets(struct pp_hwmgr *hwmgr, in smu7_copy_and_switch_arb_sets() argument
455 mc_arb_dram_timing = cgs_read_register(hwmgr->device, mmMC_ARB_DRAM_TIMING); in smu7_copy_and_switch_arb_sets()
456 mc_arb_dram_timing2 = cgs_read_register(hwmgr->device, mmMC_ARB_DRAM_TIMING2); in smu7_copy_and_switch_arb_sets()
457 burst_time = PHM_READ_FIELD(hwmgr->device, MC_ARB_BURST_TIME, STATE0); in smu7_copy_and_switch_arb_sets()
460 mc_arb_dram_timing = cgs_read_register(hwmgr->device, mmMC_ARB_DRAM_TIMING_1); in smu7_copy_and_switch_arb_sets()
461 mc_arb_dram_timing2 = cgs_read_register(hwmgr->device, mmMC_ARB_DRAM_TIMING2_1); in smu7_copy_and_switch_arb_sets()
462 burst_time = PHM_READ_FIELD(hwmgr->device, MC_ARB_BURST_TIME, STATE1); in smu7_copy_and_switch_arb_sets()
470 cgs_write_register(hwmgr->device, mmMC_ARB_DRAM_TIMING, mc_arb_dram_timing); in smu7_copy_and_switch_arb_sets()
471 cgs_write_register(hwmgr->device, mmMC_ARB_DRAM_TIMING2, mc_arb_dram_timing2); in smu7_copy_and_switch_arb_sets()
472 PHM_WRITE_FIELD(hwmgr->device, MC_ARB_BURST_TIME, STATE0, burst_time); in smu7_copy_and_switch_arb_sets()
475 cgs_write_register(hwmgr->device, mmMC_ARB_DRAM_TIMING_1, mc_arb_dram_timing); in smu7_copy_and_switch_arb_sets()
476 cgs_write_register(hwmgr->device, mmMC_ARB_DRAM_TIMING2_1, mc_arb_dram_timing2); in smu7_copy_and_switch_arb_sets()
477 PHM_WRITE_FIELD(hwmgr->device, MC_ARB_BURST_TIME, STATE1, burst_time); in smu7_copy_and_switch_arb_sets()
483 mc_cg_config = cgs_read_register(hwmgr->device, mmMC_CG_CONFIG); in smu7_copy_and_switch_arb_sets()
485 cgs_write_register(hwmgr->device, mmMC_CG_CONFIG, mc_cg_config); in smu7_copy_and_switch_arb_sets()
486 PHM_WRITE_FIELD(hwmgr->device, MC_ARB_CG, CG_ARB_REQ, arb_dest); in smu7_copy_and_switch_arb_sets()
491 static int smu7_reset_to_default(struct pp_hwmgr *hwmgr) in smu7_reset_to_default() argument
493 return smum_send_msg_to_smc(hwmgr, PPSMC_MSG_ResetToDefaults); in smu7_reset_to_default()
503 static int smu7_initial_switch_from_arbf0_to_f1(struct pp_hwmgr *hwmgr) in smu7_initial_switch_from_arbf0_to_f1() argument
505 return smu7_copy_and_switch_arb_sets(hwmgr, in smu7_initial_switch_from_arbf0_to_f1()
509 static int smu7_force_switch_to_arbf0(struct pp_hwmgr *hwmgr) in smu7_force_switch_to_arbf0() argument
513 tmp = (cgs_read_ind_register(hwmgr->device, in smu7_force_switch_to_arbf0()
520 return smu7_copy_and_switch_arb_sets(hwmgr, in smu7_force_switch_to_arbf0()
524 static int smu7_setup_default_pcie_table(struct pp_hwmgr *hwmgr) in smu7_setup_default_pcie_table() argument
526 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_default_pcie_table()
529 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_setup_default_pcie_table()
551 tmp = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_LINK); in smu7_setup_default_pcie_table()
570 smum_update_smc_table(hwmgr, SMU_BIF_TABLE); in smu7_setup_default_pcie_table()
607 if (hwmgr->chip_family == AMDGPU_FAMILY_CI) { in smu7_setup_default_pcie_table()
624 static int smu7_reset_dpm_tables(struct pp_hwmgr *hwmgr) in smu7_reset_dpm_tables() argument
626 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_reset_dpm_tables()
632 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
637 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
642 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
647 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
652 smum_get_mac_definition(hwmgr, in smu7_reset_dpm_tables()
666 static int smu7_setup_dpm_tables_v0(struct pp_hwmgr *hwmgr) in smu7_setup_dpm_tables_v0() argument
668 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_dpm_tables_v0()
670 hwmgr->dyn_state.vddc_dependency_on_sclk; in smu7_setup_dpm_tables_v0()
672 hwmgr->dyn_state.vddc_dependency_on_mclk; in smu7_setup_dpm_tables_v0()
674 hwmgr->dyn_state.cac_leakage_table; in smu7_setup_dpm_tables_v0()
724 allowed_vdd_mclk_table = hwmgr->dyn_state.vddci_dependency_on_mclk; in smu7_setup_dpm_tables_v0()
735 allowed_vdd_mclk_table = hwmgr->dyn_state.mvdd_dependency_on_mclk; in smu7_setup_dpm_tables_v0()
752 static int smu7_setup_dpm_tables_v1(struct pp_hwmgr *hwmgr) in smu7_setup_dpm_tables_v1() argument
754 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_dpm_tables_v1()
756 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_setup_dpm_tables_v1()
796 if (hwmgr->platform_descriptor.overdriveLimit.engineClock == 0) in smu7_setup_dpm_tables_v1()
797 hwmgr->platform_descriptor.overdriveLimit.engineClock = dep_sclk_table->entries[i-1].clk; in smu7_setup_dpm_tables_v1()
812 if (hwmgr->platform_descriptor.overdriveLimit.memoryClock == 0) in smu7_setup_dpm_tables_v1()
813 hwmgr->platform_descriptor.overdriveLimit.memoryClock = dep_mclk_table->entries[i-1].clk; in smu7_setup_dpm_tables_v1()
817 static int smu7_odn_initial_default_setting(struct pp_hwmgr *hwmgr) in smu7_odn_initial_default_setting() argument
819 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_odn_initial_default_setting()
822 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_odn_initial_default_setting()
862 static void smu7_setup_voltage_range_from_vbios(struct pp_hwmgr *hwmgr) in smu7_setup_voltage_range_from_vbios() argument
864 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_voltage_range_from_vbios()
867 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_setup_voltage_range_from_vbios()
876 atomctrl_get_voltage_range(hwmgr, &max_vddc, &min_vddc); in smu7_setup_voltage_range_from_vbios()
890 static void smu7_check_dpm_table_updated(struct pp_hwmgr *hwmgr) in smu7_check_dpm_table_updated() argument
892 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_dpm_table_updated()
895 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_check_dpm_table_updated()
944 static int smu7_setup_default_dpm_tables(struct pp_hwmgr *hwmgr) in smu7_setup_default_dpm_tables() argument
946 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_setup_default_dpm_tables()
948 smu7_reset_dpm_tables(hwmgr); in smu7_setup_default_dpm_tables()
950 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_setup_default_dpm_tables()
951 smu7_setup_dpm_tables_v1(hwmgr); in smu7_setup_default_dpm_tables()
952 else if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_setup_default_dpm_tables()
953 smu7_setup_dpm_tables_v0(hwmgr); in smu7_setup_default_dpm_tables()
955 smu7_setup_default_pcie_table(hwmgr); in smu7_setup_default_dpm_tables()
962 if (hwmgr->od_enabled) { in smu7_setup_default_dpm_tables()
964 smu7_check_dpm_table_updated(hwmgr); in smu7_setup_default_dpm_tables()
966 smu7_setup_voltage_range_from_vbios(hwmgr); in smu7_setup_default_dpm_tables()
967 smu7_odn_initial_default_setting(hwmgr); in smu7_setup_default_dpm_tables()
973 static int smu7_enable_vrhot_gpio_interrupt(struct pp_hwmgr *hwmgr) in smu7_enable_vrhot_gpio_interrupt() argument
976 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_enable_vrhot_gpio_interrupt()
978 return smum_send_msg_to_smc(hwmgr, in smu7_enable_vrhot_gpio_interrupt()
984 static int smu7_enable_sclk_control(struct pp_hwmgr *hwmgr) in smu7_enable_sclk_control() argument
986 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, SCLK_PWRMGT_CNTL, in smu7_enable_sclk_control()
991 static int smu7_enable_ulv(struct pp_hwmgr *hwmgr) in smu7_enable_ulv() argument
993 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_enable_ulv()
996 return smum_send_msg_to_smc(hwmgr, PPSMC_MSG_EnableULV); in smu7_enable_ulv()
1001 static int smu7_disable_ulv(struct pp_hwmgr *hwmgr) in smu7_disable_ulv() argument
1003 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_ulv()
1006 return smum_send_msg_to_smc(hwmgr, PPSMC_MSG_DisableULV); in smu7_disable_ulv()
1011 static int smu7_enable_deep_sleep_master_switch(struct pp_hwmgr *hwmgr) in smu7_enable_deep_sleep_master_switch() argument
1013 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_enable_deep_sleep_master_switch()
1015 if (smum_send_msg_to_smc(hwmgr, PPSMC_MSG_MASTER_DeepSleep_ON)) in smu7_enable_deep_sleep_master_switch()
1020 if (smum_send_msg_to_smc(hwmgr, in smu7_enable_deep_sleep_master_switch()
1031 static int smu7_disable_deep_sleep_master_switch(struct pp_hwmgr *hwmgr) in smu7_disable_deep_sleep_master_switch() argument
1033 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_disable_deep_sleep_master_switch()
1035 if (smum_send_msg_to_smc(hwmgr, in smu7_disable_deep_sleep_master_switch()
1046 static int smu7_disable_sclk_vce_handshake(struct pp_hwmgr *hwmgr) in smu7_disable_sclk_vce_handshake() argument
1048 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_sclk_vce_handshake()
1051 + smum_get_offsetof(hwmgr, in smu7_disable_sclk_vce_handshake()
1054 soft_register_value = cgs_read_ind_register(hwmgr->device, in smu7_disable_sclk_vce_handshake()
1057 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_sclk_vce_handshake()
1062 static int smu7_disable_handshake_uvd(struct pp_hwmgr *hwmgr) in smu7_disable_handshake_uvd() argument
1064 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_handshake_uvd()
1067 + smum_get_offsetof(hwmgr, in smu7_disable_handshake_uvd()
1070 soft_register_value = cgs_read_ind_register(hwmgr->device, in smu7_disable_handshake_uvd()
1072 soft_register_value |= smum_get_mac_definition(hwmgr, in smu7_disable_handshake_uvd()
1074 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_handshake_uvd()
1079 static int smu7_enable_sclk_mclk_dpm(struct pp_hwmgr *hwmgr) in smu7_enable_sclk_mclk_dpm() argument
1081 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_enable_sclk_mclk_dpm()
1085 if (hwmgr->chip_id == CHIP_VEGAM) in smu7_enable_sclk_mclk_dpm()
1086 smu7_disable_sclk_vce_handshake(hwmgr); in smu7_enable_sclk_mclk_dpm()
1089 (0 == smum_send_msg_to_smc(hwmgr, PPSMC_MSG_DPM_Enable)), in smu7_enable_sclk_mclk_dpm()
1096 if (!(hwmgr->feature_mask & PP_UVD_HANDSHAKE_MASK)) in smu7_enable_sclk_mclk_dpm()
1097 smu7_disable_handshake_uvd(hwmgr); in smu7_enable_sclk_mclk_dpm()
1100 (0 == smum_send_msg_to_smc(hwmgr, in smu7_enable_sclk_mclk_dpm()
1105 if (hwmgr->chip_family != CHIP_VEGAM) in smu7_enable_sclk_mclk_dpm()
1106 PHM_WRITE_FIELD(hwmgr->device, MC_SEQ_CNTL_3, CAC_EN, 0x1); in smu7_enable_sclk_mclk_dpm()
1109 if (hwmgr->chip_family == AMDGPU_FAMILY_CI) { in smu7_enable_sclk_mclk_dpm()
1110 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d30, 0x5); in smu7_enable_sclk_mclk_dpm()
1111 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d3c, 0x5); in smu7_enable_sclk_mclk_dpm()
1112 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d80, 0x100005); in smu7_enable_sclk_mclk_dpm()
1114 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d30, 0x400005); in smu7_enable_sclk_mclk_dpm()
1115 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d3c, 0x400005); in smu7_enable_sclk_mclk_dpm()
1116 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, 0xc0400d80, 0x500005); in smu7_enable_sclk_mclk_dpm()
1118 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC0_CNTL, 0x5); in smu7_enable_sclk_mclk_dpm()
1119 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC1_CNTL, 0x5); in smu7_enable_sclk_mclk_dpm()
1120 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_CPL_CNTL, 0x100005); in smu7_enable_sclk_mclk_dpm()
1122 if (hwmgr->chip_id == CHIP_VEGAM) { in smu7_enable_sclk_mclk_dpm()
1123 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC0_CNTL, 0x400009); in smu7_enable_sclk_mclk_dpm()
1124 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC1_CNTL, 0x400009); in smu7_enable_sclk_mclk_dpm()
1126 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC0_CNTL, 0x400005); in smu7_enable_sclk_mclk_dpm()
1127 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_MC1_CNTL, 0x400005); in smu7_enable_sclk_mclk_dpm()
1129 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixLCAC_CPL_CNTL, 0x500005); in smu7_enable_sclk_mclk_dpm()
1136 static int smu7_start_dpm(struct pp_hwmgr *hwmgr) in smu7_start_dpm() argument
1138 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_start_dpm()
1142 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, GENERAL_PWRMGT, in smu7_start_dpm()
1147 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, SCLK_PWRMGT_CNTL, in smu7_start_dpm()
1152 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_start_dpm()
1154 smum_get_offsetof(hwmgr, SMU_SoftRegisters, in smu7_start_dpm()
1156 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__PCIE, in smu7_start_dpm()
1159 if (hwmgr->chip_family == AMDGPU_FAMILY_CI) in smu7_start_dpm()
1160 cgs_write_register(hwmgr->device, 0x1488, in smu7_start_dpm()
1161 (cgs_read_register(hwmgr->device, 0x1488) & ~0x1)); in smu7_start_dpm()
1163 if (smu7_enable_sclk_mclk_dpm(hwmgr)) { in smu7_start_dpm()
1171 (0 == smum_send_msg_to_smc(hwmgr, in smu7_start_dpm()
1177 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_start_dpm()
1179 PP_ASSERT_WITH_CODE((0 == smum_send_msg_to_smc(hwmgr, in smu7_start_dpm()
1188 static int smu7_disable_sclk_mclk_dpm(struct pp_hwmgr *hwmgr) in smu7_disable_sclk_mclk_dpm() argument
1190 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_sclk_mclk_dpm()
1194 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_disable_sclk_mclk_dpm()
1197 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_DPM_Disable); in smu7_disable_sclk_mclk_dpm()
1202 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_disable_sclk_mclk_dpm()
1205 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_MCLKDPM_Disable); in smu7_disable_sclk_mclk_dpm()
1211 static int smu7_stop_dpm(struct pp_hwmgr *hwmgr) in smu7_stop_dpm() argument
1213 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_stop_dpm()
1216 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, GENERAL_PWRMGT, in smu7_stop_dpm()
1219 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, SCLK_PWRMGT_CNTL, in smu7_stop_dpm()
1225 (smum_send_msg_to_smc(hwmgr, in smu7_stop_dpm()
1231 smu7_disable_sclk_mclk_dpm(hwmgr); in smu7_stop_dpm()
1233 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_stop_dpm()
1237 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_Voltage_Cntl_Disable); in smu7_stop_dpm()
1242 static void smu7_set_dpm_event_sources(struct pp_hwmgr *hwmgr, uint32_t sources) in smu7_set_dpm_event_sources() argument
1271 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, CG_THERMAL_CTRL, in smu7_set_dpm_event_sources()
1273 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, GENERAL_PWRMGT, in smu7_set_dpm_event_sources()
1275 !phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_set_dpm_event_sources()
1278 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, GENERAL_PWRMGT, in smu7_set_dpm_event_sources()
1282 static int smu7_enable_auto_throttle_source(struct pp_hwmgr *hwmgr, in smu7_enable_auto_throttle_source() argument
1285 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_enable_auto_throttle_source()
1289 smu7_set_dpm_event_sources(hwmgr, data->active_auto_throttle_sources); in smu7_enable_auto_throttle_source()
1294 static int smu7_enable_thermal_auto_throttle(struct pp_hwmgr *hwmgr) in smu7_enable_thermal_auto_throttle() argument
1296 return smu7_enable_auto_throttle_source(hwmgr, PHM_AutoThrottleSource_Thermal); in smu7_enable_thermal_auto_throttle()
1299 static int smu7_disable_auto_throttle_source(struct pp_hwmgr *hwmgr, in smu7_disable_auto_throttle_source() argument
1302 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_disable_auto_throttle_source()
1306 smu7_set_dpm_event_sources(hwmgr, data->active_auto_throttle_sources); in smu7_disable_auto_throttle_source()
1311 static int smu7_disable_thermal_auto_throttle(struct pp_hwmgr *hwmgr) in smu7_disable_thermal_auto_throttle() argument
1313 return smu7_disable_auto_throttle_source(hwmgr, PHM_AutoThrottleSource_Thermal); in smu7_disable_thermal_auto_throttle()
1316 static int smu7_pcie_performance_request(struct pp_hwmgr *hwmgr) in smu7_pcie_performance_request() argument
1318 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_pcie_performance_request()
1324 static int smu7_enable_dpm_tasks(struct pp_hwmgr *hwmgr) in smu7_enable_dpm_tasks() argument
1329 if (smu7_voltage_control(hwmgr)) { in smu7_enable_dpm_tasks()
1330 tmp_result = smu7_enable_voltage_control(hwmgr); in smu7_enable_dpm_tasks()
1335 tmp_result = smu7_construct_voltage_tables(hwmgr); in smu7_enable_dpm_tasks()
1340 smum_initialize_mc_reg_table(hwmgr); in smu7_enable_dpm_tasks()
1342 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_enable_dpm_tasks()
1344 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_dpm_tasks()
1347 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_enable_dpm_tasks()
1349 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_dpm_tasks()
1352 tmp_result = smu7_program_static_screen_threshold_parameters(hwmgr); in smu7_enable_dpm_tasks()
1357 tmp_result = smu7_enable_display_gap(hwmgr); in smu7_enable_dpm_tasks()
1361 tmp_result = smu7_program_voting_clients(hwmgr); in smu7_enable_dpm_tasks()
1365 tmp_result = smum_process_firmware_header(hwmgr); in smu7_enable_dpm_tasks()
1369 if (hwmgr->chip_id != CHIP_VEGAM) { in smu7_enable_dpm_tasks()
1370 tmp_result = smu7_initial_switch_from_arbf0_to_f1(hwmgr); in smu7_enable_dpm_tasks()
1376 result = smu7_setup_default_dpm_tables(hwmgr); in smu7_enable_dpm_tasks()
1380 tmp_result = smum_init_smc_table(hwmgr); in smu7_enable_dpm_tasks()
1384 tmp_result = smu7_enable_vrhot_gpio_interrupt(hwmgr); in smu7_enable_dpm_tasks()
1388 smum_send_msg_to_smc(hwmgr, (PPSMC_Msg)PPSMC_NoDisplay); in smu7_enable_dpm_tasks()
1390 tmp_result = smu7_enable_sclk_control(hwmgr); in smu7_enable_dpm_tasks()
1394 tmp_result = smu7_enable_smc_voltage_controller(hwmgr); in smu7_enable_dpm_tasks()
1398 tmp_result = smu7_enable_ulv(hwmgr); in smu7_enable_dpm_tasks()
1402 tmp_result = smu7_enable_deep_sleep_master_switch(hwmgr); in smu7_enable_dpm_tasks()
1406 tmp_result = smu7_enable_didt_config(hwmgr); in smu7_enable_dpm_tasks()
1410 tmp_result = smu7_start_dpm(hwmgr); in smu7_enable_dpm_tasks()
1414 tmp_result = smu7_enable_smc_cac(hwmgr); in smu7_enable_dpm_tasks()
1418 tmp_result = smu7_enable_power_containment(hwmgr); in smu7_enable_dpm_tasks()
1422 tmp_result = smu7_power_control_set_level(hwmgr); in smu7_enable_dpm_tasks()
1426 tmp_result = smu7_enable_thermal_auto_throttle(hwmgr); in smu7_enable_dpm_tasks()
1430 tmp_result = smu7_pcie_performance_request(hwmgr); in smu7_enable_dpm_tasks()
1437 static int smu7_avfs_control(struct pp_hwmgr *hwmgr, bool enable) in smu7_avfs_control() argument
1439 if (!hwmgr->avfs_supported) in smu7_avfs_control()
1443 if (!PHM_READ_VFPF_INDIRECT_FIELD(hwmgr->device, in smu7_avfs_control()
1446 hwmgr, PPSMC_MSG_EnableAvfs), in smu7_avfs_control()
1450 } else if (PHM_READ_VFPF_INDIRECT_FIELD(hwmgr->device, in smu7_avfs_control()
1453 hwmgr, PPSMC_MSG_DisableAvfs), in smu7_avfs_control()
1461 static int smu7_update_avfs(struct pp_hwmgr *hwmgr) in smu7_update_avfs() argument
1463 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_update_avfs()
1465 if (!hwmgr->avfs_supported) in smu7_update_avfs()
1469 smu7_avfs_control(hwmgr, false); in smu7_update_avfs()
1471 smu7_avfs_control(hwmgr, false); in smu7_update_avfs()
1472 smu7_avfs_control(hwmgr, true); in smu7_update_avfs()
1474 smu7_avfs_control(hwmgr, true); in smu7_update_avfs()
1480 int smu7_disable_dpm_tasks(struct pp_hwmgr *hwmgr) in smu7_disable_dpm_tasks() argument
1484 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_disable_dpm_tasks()
1486 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_dpm_tasks()
1489 tmp_result = smu7_disable_power_containment(hwmgr); in smu7_disable_dpm_tasks()
1493 tmp_result = smu7_disable_smc_cac(hwmgr); in smu7_disable_dpm_tasks()
1497 tmp_result = smu7_disable_didt_config(hwmgr); in smu7_disable_dpm_tasks()
1501 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_dpm_tasks()
1503 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_disable_dpm_tasks()
1506 tmp_result = smu7_disable_thermal_auto_throttle(hwmgr); in smu7_disable_dpm_tasks()
1510 tmp_result = smu7_avfs_control(hwmgr, false); in smu7_disable_dpm_tasks()
1514 tmp_result = smu7_stop_dpm(hwmgr); in smu7_disable_dpm_tasks()
1518 tmp_result = smu7_disable_deep_sleep_master_switch(hwmgr); in smu7_disable_dpm_tasks()
1522 tmp_result = smu7_disable_ulv(hwmgr); in smu7_disable_dpm_tasks()
1526 tmp_result = smu7_clear_voting_clients(hwmgr); in smu7_disable_dpm_tasks()
1530 tmp_result = smu7_reset_to_default(hwmgr); in smu7_disable_dpm_tasks()
1534 tmp_result = smu7_force_switch_to_arbf0(hwmgr); in smu7_disable_dpm_tasks()
1541 int smu7_reset_asic_tasks(struct pp_hwmgr *hwmgr) in smu7_reset_asic_tasks() argument
1547 static void smu7_init_dpm_defaults(struct pp_hwmgr *hwmgr) in smu7_init_dpm_defaults() argument
1549 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_init_dpm_defaults()
1551 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_init_dpm_defaults()
1552 struct amdgpu_device *adev = hwmgr->adev; in smu7_init_dpm_defaults()
1568 data->mclk_dpm_key_disabled = hwmgr->feature_mask & PP_MCLK_DPM_MASK ? false : true; in smu7_init_dpm_defaults()
1569 data->sclk_dpm_key_disabled = hwmgr->feature_mask & PP_SCLK_DPM_MASK ? false : true; in smu7_init_dpm_defaults()
1570 data->pcie_dpm_key_disabled = hwmgr->feature_mask & PP_PCIE_DPM_MASK ? false : true; in smu7_init_dpm_defaults()
1578 data->ulv_supported = hwmgr->feature_mask & PP_ULV_MASK ? true : false; in smu7_init_dpm_defaults()
1587 hwmgr->workload_mask = 1 << hwmgr->workload_prority[PP_SMC_POWER_PROFILE_FULLSCREEN3D]; in smu7_init_dpm_defaults()
1588 hwmgr->power_profile_mode = PP_SMC_POWER_PROFILE_FULLSCREEN3D; in smu7_init_dpm_defaults()
1589 hwmgr->default_power_profile_mode = PP_SMC_POWER_PROFILE_FULLSCREEN3D; in smu7_init_dpm_defaults()
1591 if (hwmgr->chip_id == CHIP_POLARIS12 || hwmgr->is_kicker) { in smu7_init_dpm_defaults()
1594 atomctrl_get_svi2_info(hwmgr, VOLTAGE_TYPE_VDDC, &tmp1, &tmp2, in smu7_init_dpm_defaults()
1598 } else if (hwmgr->chip_family == AMDGPU_FAMILY_CI) { in smu7_init_dpm_defaults()
1604 if (hwmgr->chip_id == CHIP_HAWAII) { in smu7_init_dpm_defaults()
1615 if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1618 else if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1622 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1624 if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1630 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1632 if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1635 else if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1641 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1644 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1646 if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1649 else if (atomctrl_is_voltage_controlled_by_gpio_v3(hwmgr, in smu7_init_dpm_defaults()
1655 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1659 phm_cap_unset(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1662 if ((hwmgr->pp_table_version != PP_TABLE_V0) && (hwmgr->feature_mask & PP_CLOCK_STRETCH_MASK) in smu7_init_dpm_defaults()
1664 phm_cap_set(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1678 phm_cap_set(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1681 phm_cap_set(hwmgr->platform_descriptor.platformCaps, in smu7_init_dpm_defaults()
1691 static int smu7_get_evv_voltages(struct pp_hwmgr *hwmgr) in smu7_get_evv_voltages() argument
1693 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_evv_voltages()
1700 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_evv_voltages()
1708 if ((hwmgr->pp_table_version == PP_TABLE_V1) in smu7_get_evv_voltages()
1709 && !phm_get_sclk_for_voltage_evv(hwmgr, in smu7_get_evv_voltages()
1711 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_get_evv_voltages()
1724 (hwmgr, VOLTAGE_TYPE_VDDGFX, sclk, in smu7_get_evv_voltages()
1740 if ((hwmgr->pp_table_version == PP_TABLE_V0) in smu7_get_evv_voltages()
1741 || !phm_get_sclk_for_voltage_evv(hwmgr, in smu7_get_evv_voltages()
1743 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_get_evv_voltages()
1758 if (phm_get_voltage_evv_on_sclk(hwmgr, in smu7_get_evv_voltages()
1788 static void smu7_patch_ppt_v1_with_vdd_leakage(struct pp_hwmgr *hwmgr, in smu7_patch_ppt_v1_with_vdd_leakage() argument
1815 static int smu7_patch_lookup_table_with_leakage(struct pp_hwmgr *hwmgr, in smu7_patch_lookup_table_with_leakage() argument
1822 smu7_patch_ppt_v1_with_vdd_leakage(hwmgr, in smu7_patch_lookup_table_with_leakage()
1829 struct pp_hwmgr *hwmgr, struct smu7_leakage_voltage *leakage_table, in smu7_patch_clock_voltage_limits_with_vddc_leakage() argument
1833 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_patch_clock_voltage_limits_with_vddc_leakage()
1834 smu7_patch_ppt_v1_with_vdd_leakage(hwmgr, (uint16_t *)vddc, leakage_table); in smu7_patch_clock_voltage_limits_with_vddc_leakage()
1835 hwmgr->dyn_state.max_clock_voltage_on_dc.vddc = in smu7_patch_clock_voltage_limits_with_vddc_leakage()
1841 struct pp_hwmgr *hwmgr) in smu7_patch_voltage_dependency_tables_with_lookup_table() argument
1845 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_voltage_dependency_tables_with_lookup_table()
1847 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_patch_voltage_dependency_tables_with_lookup_table()
1886 static int phm_add_voltage(struct pp_hwmgr *hwmgr, in phm_add_voltage() argument
1897 i = smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_VDDGFX); in phm_add_voltage()
1923 static int smu7_calc_voltage_dependency_tables(struct pp_hwmgr *hwmgr) in smu7_calc_voltage_dependency_tables() argument
1927 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_calc_voltage_dependency_tables()
1928 struct phm_ppt_v1_information *pptable_info = (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_calc_voltage_dependency_tables()
1946 phm_add_voltage(hwmgr, pptable_info->vddc_lookup_table, &v_record); in smu7_calc_voltage_dependency_tables()
1959 phm_add_voltage(hwmgr, pptable_info->vddgfx_lookup_table, &v_record); in smu7_calc_voltage_dependency_tables()
1965 static int smu7_calc_mm_voltage_dependency_table(struct pp_hwmgr *hwmgr) in smu7_calc_mm_voltage_dependency_table() argument
1969 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_calc_mm_voltage_dependency_table()
1970 struct phm_ppt_v1_information *pptable_info = (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_calc_mm_voltage_dependency_table()
1985 phm_add_voltage(hwmgr, pptable_info->vddgfx_lookup_table, &v_record); in smu7_calc_mm_voltage_dependency_table()
1991 static int smu7_sort_lookup_table(struct pp_hwmgr *hwmgr, in smu7_sort_lookup_table() argument
2016 static int smu7_complete_dependency_tables(struct pp_hwmgr *hwmgr) in smu7_complete_dependency_tables() argument
2020 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_complete_dependency_tables()
2022 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_complete_dependency_tables()
2025 tmp_result = smu7_patch_lookup_table_with_leakage(hwmgr, in smu7_complete_dependency_tables()
2030 smu7_patch_ppt_v1_with_vdd_leakage(hwmgr, in smu7_complete_dependency_tables()
2034 tmp_result = smu7_patch_lookup_table_with_leakage(hwmgr, in smu7_complete_dependency_tables()
2039 tmp_result = smu7_patch_clock_voltage_limits_with_vddc_leakage(hwmgr, in smu7_complete_dependency_tables()
2045 tmp_result = smu7_patch_voltage_dependency_tables_with_lookup_table(hwmgr); in smu7_complete_dependency_tables()
2049 tmp_result = smu7_calc_voltage_dependency_tables(hwmgr); in smu7_complete_dependency_tables()
2053 tmp_result = smu7_calc_mm_voltage_dependency_table(hwmgr); in smu7_complete_dependency_tables()
2057 tmp_result = smu7_sort_lookup_table(hwmgr, table_info->vddgfx_lookup_table); in smu7_complete_dependency_tables()
2061 tmp_result = smu7_sort_lookup_table(hwmgr, table_info->vddc_lookup_table); in smu7_complete_dependency_tables()
2068 static int smu7_set_private_data_based_on_pptable_v1(struct pp_hwmgr *hwmgr) in smu7_set_private_data_based_on_pptable_v1() argument
2071 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_set_private_data_based_on_pptable_v1()
2101 hwmgr->dyn_state.max_clock_voltage_on_ac.sclk = table_info->max_clock_voltage_on_ac.sclk; in smu7_set_private_data_based_on_pptable_v1()
2102 hwmgr->dyn_state.max_clock_voltage_on_ac.mclk = table_info->max_clock_voltage_on_ac.mclk; in smu7_set_private_data_based_on_pptable_v1()
2103 hwmgr->dyn_state.max_clock_voltage_on_ac.vddc = table_info->max_clock_voltage_on_ac.vddc; in smu7_set_private_data_based_on_pptable_v1()
2104 hwmgr->dyn_state.max_clock_voltage_on_ac.vddci = table_info->max_clock_voltage_on_ac.vddci; in smu7_set_private_data_based_on_pptable_v1()
2109 static int smu7_patch_voltage_workaround(struct pp_hwmgr *hwmgr) in smu7_patch_voltage_workaround() argument
2112 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_patch_voltage_workaround()
2117 struct amdgpu_device *adev = hwmgr->adev; in smu7_patch_voltage_workaround()
2129 if (hwmgr->chip_id == CHIP_POLARIS10 && hw_revision == 0xC7 && in smu7_patch_voltage_workaround()
2146 static int smu7_thermal_parameter_init(struct pp_hwmgr *hwmgr) in smu7_thermal_parameter_init() argument
2151 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_thermal_parameter_init()
2154 if (atomctrl_get_pp_assign_pin(hwmgr, VDDC_PCC_GPIO_PINID, &gpio_pin_assignment)) { in smu7_thermal_parameter_init()
2155 temp_reg = cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCNB_PWRMGT_CNTL); in smu7_thermal_parameter_init()
2175 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCNB_PWRMGT_CNTL, temp_reg); in smu7_thermal_parameter_init()
2182 hwmgr->thermal_controller.advanceFanControlParameters.ucFanControlMode) { in smu7_thermal_parameter_init()
2183 hwmgr->thermal_controller.advanceFanControlParameters.usFanPWMMinLimit = in smu7_thermal_parameter_init()
2184 (uint16_t)hwmgr->thermal_controller.advanceFanControlParameters.ucMinimumPWMLimit; in smu7_thermal_parameter_init()
2186 hwmgr->thermal_controller.advanceFanControlParameters.usFanPWMMaxLimit = in smu7_thermal_parameter_init()
2187 (uint16_t)hwmgr->thermal_controller.advanceFanControlParameters.usDefaultMaxFanPWM; in smu7_thermal_parameter_init()
2189 hwmgr->thermal_controller.advanceFanControlParameters.usFanPWMStep = 1; in smu7_thermal_parameter_init()
2191 hwmgr->thermal_controller.advanceFanControlParameters.usFanRPMMaxLimit = 100; in smu7_thermal_parameter_init()
2193 hwmgr->thermal_controller.advanceFanControlParameters.usFanRPMMinLimit = in smu7_thermal_parameter_init()
2194 (uint16_t)hwmgr->thermal_controller.advanceFanControlParameters.ucMinimumPWMLimit; in smu7_thermal_parameter_init()
2196 hwmgr->thermal_controller.advanceFanControlParameters.usFanRPMStep = 1; in smu7_thermal_parameter_init()
2205 hwmgr->thermal_controller.advanceFanControlParameters.usMaxFanPWM = in smu7_thermal_parameter_init()
2206 hwmgr->thermal_controller.advanceFanControlParameters.usDefaultMaxFanPWM; in smu7_thermal_parameter_init()
2208 hwmgr->thermal_controller.advanceFanControlParameters.usMaxFanRPM = in smu7_thermal_parameter_init()
2209 hwmgr->thermal_controller.advanceFanControlParameters.usDefaultMaxFanRPM; in smu7_thermal_parameter_init()
2211 hwmgr->dyn_state.cac_dtp_table->usOperatingTempMinLimit = in smu7_thermal_parameter_init()
2214 hwmgr->dyn_state.cac_dtp_table->usOperatingTempMaxLimit = in smu7_thermal_parameter_init()
2217 hwmgr->dyn_state.cac_dtp_table->usDefaultTargetOperatingTemp = in smu7_thermal_parameter_init()
2220 hwmgr->dyn_state.cac_dtp_table->usOperatingTempStep = in smu7_thermal_parameter_init()
2223 hwmgr->dyn_state.cac_dtp_table->usTargetOperatingTemp = in smu7_thermal_parameter_init()
2225 if (hwmgr->feature_mask & PP_OD_FUZZY_FAN_CONTROL_MASK) in smu7_thermal_parameter_init()
2226 phm_cap_set(hwmgr->platform_descriptor.platformCaps, in smu7_thermal_parameter_init()
2240 static void smu7_patch_ppt_v0_with_vdd_leakage(struct pp_hwmgr *hwmgr, in smu7_patch_ppt_v0_with_vdd_leakage() argument
2260 static int smu7_patch_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_vddc() argument
2264 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_vddc()
2268 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_vddc()
2274 static int smu7_patch_vddci(struct pp_hwmgr *hwmgr, in smu7_patch_vddci() argument
2278 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_vddci()
2282 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_vddci()
2288 static int smu7_patch_vce_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_vce_vddc() argument
2292 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_vce_vddc()
2296 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_vce_vddc()
2303 static int smu7_patch_uvd_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_uvd_vddc() argument
2307 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_uvd_vddc()
2311 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_uvd_vddc()
2317 static int smu7_patch_vddc_shed_limit(struct pp_hwmgr *hwmgr, in smu7_patch_vddc_shed_limit() argument
2321 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_vddc_shed_limit()
2325 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].Voltage, in smu7_patch_vddc_shed_limit()
2331 static int smu7_patch_samu_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_samu_vddc() argument
2335 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_samu_vddc()
2339 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_samu_vddc()
2345 static int smu7_patch_acp_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_acp_vddc() argument
2349 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_acp_vddc()
2353 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &tab->entries[i].v, in smu7_patch_acp_vddc()
2359 static int smu7_patch_limits_vddc(struct pp_hwmgr *hwmgr, in smu7_patch_limits_vddc() argument
2363 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_limits_vddc()
2367 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &vddc, in smu7_patch_limits_vddc()
2371 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &vddci, in smu7_patch_limits_vddc()
2379 static int smu7_patch_cac_vddc(struct pp_hwmgr *hwmgr, struct phm_cac_leakage_table *tab) in smu7_patch_cac_vddc() argument
2383 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_cac_vddc()
2388 smu7_patch_ppt_v0_with_vdd_leakage(hwmgr, &vddc, &data->vddc_leakage); in smu7_patch_cac_vddc()
2396 static int smu7_patch_dependency_tables_with_leakage(struct pp_hwmgr *hwmgr) in smu7_patch_dependency_tables_with_leakage() argument
2400 tmp = smu7_patch_vddc(hwmgr, hwmgr->dyn_state.vddc_dependency_on_sclk); in smu7_patch_dependency_tables_with_leakage()
2404 tmp = smu7_patch_vddc(hwmgr, hwmgr->dyn_state.vddc_dependency_on_mclk); in smu7_patch_dependency_tables_with_leakage()
2408 tmp = smu7_patch_vddc(hwmgr, hwmgr->dyn_state.vddc_dep_on_dal_pwrl); in smu7_patch_dependency_tables_with_leakage()
2412 tmp = smu7_patch_vddci(hwmgr, hwmgr->dyn_state.vddci_dependency_on_mclk); in smu7_patch_dependency_tables_with_leakage()
2416 tmp = smu7_patch_vce_vddc(hwmgr, hwmgr->dyn_state.vce_clock_voltage_dependency_table); in smu7_patch_dependency_tables_with_leakage()
2420 tmp = smu7_patch_uvd_vddc(hwmgr, hwmgr->dyn_state.uvd_clock_voltage_dependency_table); in smu7_patch_dependency_tables_with_leakage()
2424 tmp = smu7_patch_samu_vddc(hwmgr, hwmgr->dyn_state.samu_clock_voltage_dependency_table); in smu7_patch_dependency_tables_with_leakage()
2428 tmp = smu7_patch_acp_vddc(hwmgr, hwmgr->dyn_state.acp_clock_voltage_dependency_table); in smu7_patch_dependency_tables_with_leakage()
2432 tmp = smu7_patch_vddc_shed_limit(hwmgr, hwmgr->dyn_state.vddc_phase_shed_limits_table); in smu7_patch_dependency_tables_with_leakage()
2436 tmp = smu7_patch_limits_vddc(hwmgr, &hwmgr->dyn_state.max_clock_voltage_on_ac); in smu7_patch_dependency_tables_with_leakage()
2440 tmp = smu7_patch_limits_vddc(hwmgr, &hwmgr->dyn_state.max_clock_voltage_on_dc); in smu7_patch_dependency_tables_with_leakage()
2444 tmp = smu7_patch_cac_vddc(hwmgr, hwmgr->dyn_state.cac_leakage_table); in smu7_patch_dependency_tables_with_leakage()
2452 static int smu7_set_private_data_based_on_pptable_v0(struct pp_hwmgr *hwmgr) in smu7_set_private_data_based_on_pptable_v0() argument
2454 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_private_data_based_on_pptable_v0()
2456 …struct phm_clock_voltage_dependency_table *allowed_sclk_vddc_table = hwmgr->dyn_state.vddc_depende… in smu7_set_private_data_based_on_pptable_v0()
2457 …struct phm_clock_voltage_dependency_table *allowed_mclk_vddc_table = hwmgr->dyn_state.vddc_depende… in smu7_set_private_data_based_on_pptable_v0()
2458 …struct phm_clock_voltage_dependency_table *allowed_mclk_vddci_table = hwmgr->dyn_state.vddci_depen… in smu7_set_private_data_based_on_pptable_v0()
2477 hwmgr->dyn_state.max_clock_voltage_on_ac.sclk = in smu7_set_private_data_based_on_pptable_v0()
2479 hwmgr->dyn_state.max_clock_voltage_on_ac.mclk = in smu7_set_private_data_based_on_pptable_v0()
2481 hwmgr->dyn_state.max_clock_voltage_on_ac.vddc = in smu7_set_private_data_based_on_pptable_v0()
2489 …if (hwmgr->dyn_state.vddci_dependency_on_mclk != NULL && hwmgr->dyn_state.vddci_dependency_on_mclk… in smu7_set_private_data_based_on_pptable_v0()
2490 …hwmgr->dyn_state.max_clock_voltage_on_ac.vddci = hwmgr->dyn_state.vddci_dependency_on_mclk->entrie… in smu7_set_private_data_based_on_pptable_v0()
2495 static int smu7_hwmgr_backend_fini(struct pp_hwmgr *hwmgr) in smu7_hwmgr_backend_fini() argument
2497 kfree(hwmgr->dyn_state.vddc_dep_on_dal_pwrl); in smu7_hwmgr_backend_fini()
2498 hwmgr->dyn_state.vddc_dep_on_dal_pwrl = NULL; in smu7_hwmgr_backend_fini()
2499 kfree(hwmgr->backend); in smu7_hwmgr_backend_fini()
2500 hwmgr->backend = NULL; in smu7_hwmgr_backend_fini()
2505 static int smu7_get_elb_voltages(struct pp_hwmgr *hwmgr) in smu7_get_elb_voltages() argument
2508 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_elb_voltages()
2511 if (atomctrl_get_leakage_id_from_efuse(hwmgr, &efuse_voltage_id) == 0) { in smu7_get_elb_voltages()
2514 if (atomctrl_get_leakage_vddc_base_on_leakage(hwmgr, &vddc, &vddci, in smu7_get_elb_voltages()
2533 static int smu7_hwmgr_backend_init(struct pp_hwmgr *hwmgr) in smu7_hwmgr_backend_init() argument
2542 hwmgr->backend = data; in smu7_hwmgr_backend_init()
2543 smu7_patch_voltage_workaround(hwmgr); in smu7_hwmgr_backend_init()
2544 smu7_init_dpm_defaults(hwmgr); in smu7_hwmgr_backend_init()
2547 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_hwmgr_backend_init()
2549 result = smu7_get_evv_voltages(hwmgr); in smu7_hwmgr_backend_init()
2555 smu7_get_elb_voltages(hwmgr); in smu7_hwmgr_backend_init()
2558 if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_hwmgr_backend_init()
2559 smu7_complete_dependency_tables(hwmgr); in smu7_hwmgr_backend_init()
2560 smu7_set_private_data_based_on_pptable_v1(hwmgr); in smu7_hwmgr_backend_init()
2561 } else if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_hwmgr_backend_init()
2562 smu7_patch_dependency_tables_with_leakage(hwmgr); in smu7_hwmgr_backend_init()
2563 smu7_set_private_data_based_on_pptable_v0(hwmgr); in smu7_hwmgr_backend_init()
2567 result = phm_initializa_dynamic_state_adjustment_rule_settings(hwmgr); in smu7_hwmgr_backend_init()
2570 struct amdgpu_device *adev = hwmgr->adev; in smu7_hwmgr_backend_init()
2574 hwmgr->platform_descriptor.hardwareActivityPerformanceLevels = in smu7_hwmgr_backend_init()
2576 hwmgr->platform_descriptor.hardwarePerformanceLevels = 2; in smu7_hwmgr_backend_init()
2577 hwmgr->platform_descriptor.minimumClocksReductionPercentage = 50; in smu7_hwmgr_backend_init()
2584 hwmgr->platform_descriptor.vbiosInterruptId = 0x20000400; /* IRQ_SOURCE1_SW_INT */ in smu7_hwmgr_backend_init()
2586 hwmgr->platform_descriptor.clockStep.engineClock = 500; in smu7_hwmgr_backend_init()
2587 hwmgr->platform_descriptor.clockStep.memoryClock = 500; in smu7_hwmgr_backend_init()
2588 smu7_thermal_parameter_init(hwmgr); in smu7_hwmgr_backend_init()
2591 smu7_hwmgr_backend_fini(hwmgr); in smu7_hwmgr_backend_init()
2597 static int smu7_force_dpm_highest(struct pp_hwmgr *hwmgr) in smu7_force_dpm_highest() argument
2599 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_force_dpm_highest()
2610 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_highest()
2623 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_highest()
2637 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_highest()
2646 static int smu7_upload_dpm_level_enable_mask(struct pp_hwmgr *hwmgr) in smu7_upload_dpm_level_enable_mask() argument
2648 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_upload_dpm_level_enable_mask()
2650 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_upload_dpm_level_enable_mask()
2651 phm_apply_dal_min_voltage_request(hwmgr); in smu7_upload_dpm_level_enable_mask()
2656 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_upload_dpm_level_enable_mask()
2663 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_upload_dpm_level_enable_mask()
2671 static int smu7_unforce_dpm_levels(struct pp_hwmgr *hwmgr) in smu7_unforce_dpm_levels() argument
2673 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_unforce_dpm_levels()
2675 if (!smum_is_dpm_running(hwmgr)) in smu7_unforce_dpm_levels()
2679 smum_send_msg_to_smc(hwmgr, in smu7_unforce_dpm_levels()
2683 return smu7_upload_dpm_level_enable_mask(hwmgr); in smu7_unforce_dpm_levels()
2686 static int smu7_force_dpm_lowest(struct pp_hwmgr *hwmgr) in smu7_force_dpm_lowest() argument
2689 (struct smu7_hwmgr *)(hwmgr->backend); in smu7_force_dpm_lowest()
2694 level = phm_get_lowest_enabled_level(hwmgr, in smu7_force_dpm_lowest()
2696 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_lowest()
2704 level = phm_get_lowest_enabled_level(hwmgr, in smu7_force_dpm_lowest()
2706 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_lowest()
2714 level = phm_get_lowest_enabled_level(hwmgr, in smu7_force_dpm_lowest()
2716 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_dpm_lowest()
2725 static int smu7_get_profiling_clk(struct pp_hwmgr *hwmgr, enum amd_dpm_forced_level level, in smu7_get_profiling_clk() argument
2729 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_profiling_clk()
2752 if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_get_profiling_clk()
2753 for (count = hwmgr->dyn_state.vddc_dependency_on_sclk->count-1; in smu7_get_profiling_clk()
2755 if (tmp_sclk >= hwmgr->dyn_state.vddc_dependency_on_sclk->entries[count].clk) { in smu7_get_profiling_clk()
2756 tmp_sclk = hwmgr->dyn_state.vddc_dependency_on_sclk->entries[count].clk; in smu7_get_profiling_clk()
2763 tmp_sclk = hwmgr->dyn_state.vddc_dependency_on_sclk->entries[0].clk; in smu7_get_profiling_clk()
2767 *sclk_mask = hwmgr->dyn_state.vddc_dependency_on_sclk->count-1; in smu7_get_profiling_clk()
2768 } else if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_get_profiling_clk()
2770 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_get_profiling_clk()
2794 hwmgr->pstate_sclk = tmp_sclk; in smu7_get_profiling_clk()
2795 hwmgr->pstate_mclk = tmp_mclk; in smu7_get_profiling_clk()
2800 static int smu7_force_dpm_level(struct pp_hwmgr *hwmgr, in smu7_force_dpm_level() argument
2808 if (hwmgr->pstate_sclk == 0) in smu7_force_dpm_level()
2809 smu7_get_profiling_clk(hwmgr, level, &sclk_mask, &mclk_mask, &pcie_mask); in smu7_force_dpm_level()
2813 ret = smu7_force_dpm_highest(hwmgr); in smu7_force_dpm_level()
2816 ret = smu7_force_dpm_lowest(hwmgr); in smu7_force_dpm_level()
2819 ret = smu7_unforce_dpm_levels(hwmgr); in smu7_force_dpm_level()
2825 ret = smu7_get_profiling_clk(hwmgr, level, &sclk_mask, &mclk_mask, &pcie_mask); in smu7_force_dpm_level()
2828 smu7_force_clock_level(hwmgr, PP_SCLK, 1<<sclk_mask); in smu7_force_dpm_level()
2829 smu7_force_clock_level(hwmgr, PP_MCLK, 1<<mclk_mask); in smu7_force_dpm_level()
2830 smu7_force_clock_level(hwmgr, PP_PCIE, 1<<pcie_mask); in smu7_force_dpm_level()
2839 …if (level == AMD_DPM_FORCED_LEVEL_PROFILE_PEAK && hwmgr->dpm_level != AMD_DPM_FORCED_LEVEL_PROFILE… in smu7_force_dpm_level()
2840 smu7_fan_ctrl_set_fan_speed_percent(hwmgr, 100); in smu7_force_dpm_level()
2841 …else if (level != AMD_DPM_FORCED_LEVEL_PROFILE_PEAK && hwmgr->dpm_level == AMD_DPM_FORCED_LEVEL_PR… in smu7_force_dpm_level()
2842 smu7_fan_ctrl_reset_fan_speed_to_default(hwmgr); in smu7_force_dpm_level()
2847 static int smu7_get_power_state_size(struct pp_hwmgr *hwmgr) in smu7_get_power_state_size() argument
2852 static int smu7_vblank_too_short(struct pp_hwmgr *hwmgr, in smu7_vblank_too_short() argument
2855 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_vblank_too_short()
2858 switch (hwmgr->chip_id) { in smu7_vblank_too_short()
2878 static int smu7_apply_state_adjust_rules(struct pp_hwmgr *hwmgr, in smu7_apply_state_adjust_rules() argument
2882 struct amdgpu_device *adev = hwmgr->adev; in smu7_apply_state_adjust_rules()
2892 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_apply_state_adjust_rules()
2894 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_apply_state_adjust_rules()
2906 &(hwmgr->dyn_state.max_clock_voltage_on_ac) : in smu7_apply_state_adjust_rules()
2907 &(hwmgr->dyn_state.max_clock_voltage_on_dc); in smu7_apply_state_adjust_rules()
2919 minimum_clocks.engineClock = hwmgr->display_config->min_core_set_clock; in smu7_apply_state_adjust_rules()
2920 minimum_clocks.memoryClock = hwmgr->display_config->min_mem_set_clock; in smu7_apply_state_adjust_rules()
2922 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_apply_state_adjust_rules()
2924 max_limits = &(hwmgr->dyn_state.max_clock_voltage_on_ac); in smu7_apply_state_adjust_rules()
2947 hwmgr->platform_descriptor.platformCaps, in smu7_apply_state_adjust_rules()
2951 if (hwmgr->display_config->num_display == 0) in smu7_apply_state_adjust_rules()
2954 disable_mclk_switching = ((1 < hwmgr->display_config->num_display) || in smu7_apply_state_adjust_rules()
2956 smu7_vblank_too_short(hwmgr, hwmgr->display_config->min_vblank_time)); in smu7_apply_state_adjust_rules()
2995 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_apply_state_adjust_rules()
3008 static uint32_t smu7_dpm_get_mclk(struct pp_hwmgr *hwmgr, bool low) in smu7_dpm_get_mclk() argument
3013 if (hwmgr == NULL) in smu7_dpm_get_mclk()
3016 ps = hwmgr->request_ps; in smu7_dpm_get_mclk()
3030 static uint32_t smu7_dpm_get_sclk(struct pp_hwmgr *hwmgr, bool low) in smu7_dpm_get_sclk() argument
3035 if (hwmgr == NULL) in smu7_dpm_get_sclk()
3038 ps = hwmgr->request_ps; in smu7_dpm_get_sclk()
3052 static int smu7_dpm_patch_boot_state(struct pp_hwmgr *hwmgr, in smu7_dpm_patch_boot_state() argument
3055 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_dpm_patch_boot_state()
3065 fw_info = (ATOM_FIRMWARE_INFO_V2_2 *)smu_atom_get_data_table(hwmgr->adev, index, in smu7_dpm_patch_boot_state()
3083 smu7_get_current_pcie_speed(hwmgr); in smu7_dpm_patch_boot_state()
3086 (uint16_t)smu7_get_current_pcie_lane_number(hwmgr); in smu7_dpm_patch_boot_state()
3097 static int smu7_get_number_of_powerplay_table_entries(struct pp_hwmgr *hwmgr) in smu7_get_number_of_powerplay_table_entries() argument
3102 if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_get_number_of_powerplay_table_entries()
3103 result = pp_tables_get_num_of_entries(hwmgr, &ret); in smu7_get_number_of_powerplay_table_entries()
3105 } else if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_get_number_of_powerplay_table_entries()
3106 result = get_number_of_powerplay_table_entries_v1_0(hwmgr); in smu7_get_number_of_powerplay_table_entries()
3112 static int smu7_get_pp_table_entry_callback_func_v1(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry_callback_func_v1() argument
3116 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_pp_table_entry_callback_func_v1()
3166 …(smu7_power_state->performance_level_count < smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_GRAPHIC… in smu7_get_pp_table_entry_callback_func_v1()
3172 hwmgr->platform_descriptor.hardwareActivityPerformanceLevels), in smu7_get_pp_table_entry_callback_func_v1()
3210 static int smu7_get_pp_table_entry_v1(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry_v1() argument
3215 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_pp_table_entry_v1()
3217 (struct phm_ppt_v1_information *)(hwmgr->pptable); in smu7_get_pp_table_entry_v1()
3225 result = get_powerplay_table_entry_v1_0(hwmgr, entry_index, state, in smu7_get_pp_table_entry_v1()
3312 static int smu7_get_pp_table_entry_callback_func_v0(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry_callback_func_v0() argument
3316 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_pp_table_entry_callback_func_v0()
3330 (ps->performance_level_count < smum_get_mac_definition(hwmgr, SMU_MAX_LEVELS_GRAPHICS)), in smu7_get_pp_table_entry_callback_func_v0()
3336 hwmgr->platform_descriptor.hardwareActivityPerformanceLevels), in smu7_get_pp_table_entry_callback_func_v0()
3355 static int smu7_get_pp_table_entry_v0(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry_v0() argument
3360 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_pp_table_entry_v0()
3362 hwmgr->dyn_state.vddci_dependency_on_mclk; in smu7_get_pp_table_entry_v0()
3370 result = pp_tables_get_entry(hwmgr, entry_index, state, in smu7_get_pp_table_entry_v0()
3462 static int smu7_get_pp_table_entry(struct pp_hwmgr *hwmgr, in smu7_get_pp_table_entry() argument
3465 if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_get_pp_table_entry()
3466 return smu7_get_pp_table_entry_v0(hwmgr, entry_index, state); in smu7_get_pp_table_entry()
3467 else if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_get_pp_table_entry()
3468 return smu7_get_pp_table_entry_v1(hwmgr, entry_index, state); in smu7_get_pp_table_entry()
3473 static int smu7_get_gpu_power(struct pp_hwmgr *hwmgr, u32 *query) in smu7_get_gpu_power() argument
3481 smum_send_msg_to_smc_with_parameter(hwmgr, PPSMC_MSG_GetCurrPkgPwr, 0); in smu7_get_gpu_power()
3482 tmp = cgs_read_register(hwmgr->device, mmSMC_MSG_ARG_0); in smu7_get_gpu_power()
3488 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_PmStatusLogStart); in smu7_get_gpu_power()
3489 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_get_gpu_power()
3494 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_PmStatusLogSample); in smu7_get_gpu_power()
3495 tmp = cgs_read_ind_register(hwmgr->device, in smu7_get_gpu_power()
3506 static int smu7_read_sensor(struct pp_hwmgr *hwmgr, int idx, in smu7_read_sensor() argument
3511 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_read_sensor()
3519 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_API_GetSclkFrequency); in smu7_read_sensor()
3520 sclk = cgs_read_register(hwmgr->device, mmSMC_MSG_ARG_0); in smu7_read_sensor()
3525 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_API_GetMclkFrequency); in smu7_read_sensor()
3526 mclk = cgs_read_register(hwmgr->device, mmSMC_MSG_ARG_0); in smu7_read_sensor()
3531 offset = data->soft_regs_start + smum_get_offsetof(hwmgr, in smu7_read_sensor()
3535 activity_percent = cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, offset); in smu7_read_sensor()
3542 *((uint32_t *)value) = smu7_thermal_get_temperature(hwmgr); in smu7_read_sensor()
3554 return smu7_get_gpu_power(hwmgr, (uint32_t *)value); in smu7_read_sensor()
3557 val_vid = PHM_READ_INDIRECT_FIELD(hwmgr->device, in smu7_read_sensor()
3560 val_vid = PHM_READ_INDIRECT_FIELD(hwmgr->device, in smu7_read_sensor()
3570 static int smu7_find_dpm_states_clocks_in_dpm_table(struct pp_hwmgr *hwmgr, const void *input) in smu7_find_dpm_states_clocks_in_dpm_table() argument
3576 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_find_dpm_states_clocks_in_dpm_table()
3612 if (data->display_timing.num_existing_displays != hwmgr->display_config->num_display) in smu7_find_dpm_states_clocks_in_dpm_table()
3618 static uint16_t smu7_get_maximum_link_speed(struct pp_hwmgr *hwmgr, in smu7_get_maximum_link_speed() argument
3623 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_maximum_link_speed()
3644 struct pp_hwmgr *hwmgr, const void *input) in smu7_request_link_speed_change_before_state_change() argument
3648 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_request_link_speed_change_before_state_change()
3654 uint16_t target_link_speed = smu7_get_maximum_link_speed(hwmgr, smu7_nps); in smu7_request_link_speed_change_before_state_change()
3658 current_link_speed = smu7_get_maximum_link_speed(hwmgr, polaris10_cps); in smu7_request_link_speed_change_before_state_change()
3669 if (0 == amdgpu_acpi_pcie_performance_request(hwmgr->adev, PCIE_PERF_REQ_GEN3, false)) in smu7_request_link_speed_change_before_state_change()
3675 if (0 == amdgpu_acpi_pcie_performance_request(hwmgr->adev, PCIE_PERF_REQ_GEN2, false)) in smu7_request_link_speed_change_before_state_change()
3679 data->force_pcie_gen = smu7_get_current_pcie_speed(hwmgr); in smu7_request_link_speed_change_before_state_change()
3690 static int smu7_freeze_sclk_mclk_dpm(struct pp_hwmgr *hwmgr) in smu7_freeze_sclk_mclk_dpm() argument
3692 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_freeze_sclk_mclk_dpm()
3700 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_freeze_sclk_mclk_dpm()
3703 PP_ASSERT_WITH_CODE(0 == smum_send_msg_to_smc(hwmgr, in smu7_freeze_sclk_mclk_dpm()
3712 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_freeze_sclk_mclk_dpm()
3715 PP_ASSERT_WITH_CODE(0 == smum_send_msg_to_smc(hwmgr, in smu7_freeze_sclk_mclk_dpm()
3725 struct pp_hwmgr *hwmgr, const void *input) in smu7_populate_and_upload_sclk_mclk_dpm_levels() argument
3728 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_populate_and_upload_sclk_mclk_dpm_levels()
3738 if (hwmgr->od_enabled && data->need_update_smu7_dpm_table & DPMTABLE_OD_UPDATE_SCLK) { in smu7_populate_and_upload_sclk_mclk_dpm_levels()
3745 if (hwmgr->od_enabled && data->need_update_smu7_dpm_table & DPMTABLE_OD_UPDATE_MCLK) { in smu7_populate_and_upload_sclk_mclk_dpm_levels()
3754 result = smum_populate_all_graphic_levels(hwmgr); in smu7_populate_and_upload_sclk_mclk_dpm_levels()
3763 result = smum_populate_all_memory_levels(hwmgr); in smu7_populate_and_upload_sclk_mclk_dpm_levels()
3772 static int smu7_trim_single_dpm_states(struct pp_hwmgr *hwmgr, in smu7_trim_single_dpm_states() argument
3780 if (!hwmgr->od_enabled && (dpm_table->dpm_levels[i].value < low_limit in smu7_trim_single_dpm_states()
3790 static int smu7_trim_dpm_states(struct pp_hwmgr *hwmgr, in smu7_trim_dpm_states() argument
3793 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_trim_dpm_states()
3802 smu7_trim_single_dpm_states(hwmgr, in smu7_trim_dpm_states()
3807 smu7_trim_single_dpm_states(hwmgr, in smu7_trim_dpm_states()
3816 struct pp_hwmgr *hwmgr, const void *input) in smu7_generate_dpm_level_enable_mask() argument
3821 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_generate_dpm_level_enable_mask()
3826 result = smu7_trim_dpm_states(hwmgr, smu7_ps); in smu7_generate_dpm_level_enable_mask()
3840 static int smu7_unfreeze_sclk_mclk_dpm(struct pp_hwmgr *hwmgr) in smu7_unfreeze_sclk_mclk_dpm() argument
3842 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_unfreeze_sclk_mclk_dpm()
3851 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_unfreeze_sclk_mclk_dpm()
3854 PP_ASSERT_WITH_CODE(0 == smum_send_msg_to_smc(hwmgr, in smu7_unfreeze_sclk_mclk_dpm()
3863 PP_ASSERT_WITH_CODE(true == smum_is_dpm_running(hwmgr), in smu7_unfreeze_sclk_mclk_dpm()
3866 PP_ASSERT_WITH_CODE(0 == smum_send_msg_to_smc(hwmgr, in smu7_unfreeze_sclk_mclk_dpm()
3878 struct pp_hwmgr *hwmgr, const void *input) in smu7_notify_link_speed_change_after_state_change() argument
3882 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_notify_link_speed_change_after_state_change()
3885 uint16_t target_link_speed = smu7_get_maximum_link_speed(hwmgr, smu7_ps); in smu7_notify_link_speed_change_after_state_change()
3897 smu7_get_current_pcie_speed(hwmgr) > 0) in smu7_notify_link_speed_change_after_state_change()
3901 if (amdgpu_acpi_pcie_performance_request(hwmgr->adev, request, false)) { in smu7_notify_link_speed_change_after_state_change()
3913 static int smu7_notify_smc_display(struct pp_hwmgr *hwmgr) in smu7_notify_smc_display() argument
3915 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_notify_smc_display()
3917 if (hwmgr->feature_mask & PP_VBI_TIME_SUPPORT_MASK) { in smu7_notify_smc_display()
3918 if (hwmgr->chip_id == CHIP_VEGAM) in smu7_notify_smc_display()
3919 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_notify_smc_display()
3922 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_notify_smc_display()
3925 return (smum_send_msg_to_smc(hwmgr, (PPSMC_Msg)PPSMC_HasDisplay) == 0) ? 0 : -EINVAL; in smu7_notify_smc_display()
3928 static int smu7_set_power_state_tasks(struct pp_hwmgr *hwmgr, const void *input) in smu7_set_power_state_tasks() argument
3931 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_power_state_tasks()
3933 tmp_result = smu7_find_dpm_states_clocks_in_dpm_table(hwmgr, input); in smu7_set_power_state_tasks()
3938 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_set_power_state_tasks()
3941 smu7_request_link_speed_change_before_state_change(hwmgr, input); in smu7_set_power_state_tasks()
3947 tmp_result = smu7_freeze_sclk_mclk_dpm(hwmgr); in smu7_set_power_state_tasks()
3951 tmp_result = smu7_populate_and_upload_sclk_mclk_dpm_levels(hwmgr, input); in smu7_set_power_state_tasks()
3956 tmp_result = smu7_update_avfs(hwmgr); in smu7_set_power_state_tasks()
3961 tmp_result = smu7_generate_dpm_level_enable_mask(hwmgr, input); in smu7_set_power_state_tasks()
3966 tmp_result = smum_update_sclk_threshold(hwmgr); in smu7_set_power_state_tasks()
3971 tmp_result = smu7_notify_smc_display(hwmgr); in smu7_set_power_state_tasks()
3976 tmp_result = smu7_unfreeze_sclk_mclk_dpm(hwmgr); in smu7_set_power_state_tasks()
3981 tmp_result = smu7_upload_dpm_level_enable_mask(hwmgr); in smu7_set_power_state_tasks()
3986 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_set_power_state_tasks()
3989 smu7_notify_link_speed_change_after_state_change(hwmgr, input); in smu7_set_power_state_tasks()
3998 static int smu7_set_max_fan_pwm_output(struct pp_hwmgr *hwmgr, uint16_t us_max_fan_pwm) in smu7_set_max_fan_pwm_output() argument
4000 hwmgr->thermal_controller. in smu7_set_max_fan_pwm_output()
4003 return smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_set_max_fan_pwm_output()
4008 smu7_notify_smc_display_change(struct pp_hwmgr *hwmgr, bool has_display) in smu7_notify_smc_display_change() argument
4012 return (smum_send_msg_to_smc(hwmgr, msg) == 0) ? 0 : -1; in smu7_notify_smc_display_change()
4016 smu7_notify_smc_display_config_after_ps_adjustment(struct pp_hwmgr *hwmgr) in smu7_notify_smc_display_config_after_ps_adjustment() argument
4018 if (hwmgr->display_config->num_display > 1 && in smu7_notify_smc_display_config_after_ps_adjustment()
4019 !hwmgr->display_config->multi_monitor_in_sync) in smu7_notify_smc_display_config_after_ps_adjustment()
4020 smu7_notify_smc_display_change(hwmgr, false); in smu7_notify_smc_display_config_after_ps_adjustment()
4031 static int smu7_program_display_gap(struct pp_hwmgr *hwmgr) in smu7_program_display_gap() argument
4033 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_program_display_gap()
4034 …uint32_t display_gap = cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_DISPLAY_GAP_CNT… in smu7_program_display_gap()
4040 …display_gap = PHM_SET_FIELD(display_gap, CG_DISPLAY_GAP_CNTL, DISP_GAP, (hwmgr->display_config->nu… in smu7_program_display_gap()
4041 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_DISPLAY_GAP_CNTL, display_gap); in smu7_program_display_gap()
4043 ref_clock = amdgpu_asic_get_xclk((struct amdgpu_device *)hwmgr->adev); in smu7_program_display_gap()
4044 refresh_rate = hwmgr->display_config->vrefresh; in smu7_program_display_gap()
4051 pre_vbi_time_in_us = frame_time_in_us - 200 - hwmgr->display_config->min_vblank_time; in smu7_program_display_gap()
4057 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_DISPLAY_GAP_CNTL2, display_gap2); in smu7_program_display_gap()
4059 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_display_gap()
4060 data->soft_regs_start + smum_get_offsetof(hwmgr, in smu7_program_display_gap()
4064 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_program_display_gap()
4065 data->soft_regs_start + smum_get_offsetof(hwmgr, in smu7_program_display_gap()
4073 static int smu7_display_configuration_changed_task(struct pp_hwmgr *hwmgr) in smu7_display_configuration_changed_task() argument
4075 return smu7_program_display_gap(hwmgr); in smu7_display_configuration_changed_task()
4085 static int smu7_set_max_fan_rpm_output(struct pp_hwmgr *hwmgr, uint16_t us_max_fan_rpm) in smu7_set_max_fan_rpm_output() argument
4087 hwmgr->thermal_controller. in smu7_set_max_fan_rpm_output()
4090 return smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_set_max_fan_rpm_output()
4098 static int smu7_register_irq_handlers(struct pp_hwmgr *hwmgr) in smu7_register_irq_handlers() argument
4108 amdgpu_irq_add_id((struct amdgpu_device *)(hwmgr->adev), in smu7_register_irq_handlers()
4112 amdgpu_irq_add_id((struct amdgpu_device *)(hwmgr->adev), in smu7_register_irq_handlers()
4118 amdgpu_irq_add_id((struct amdgpu_device *)(hwmgr->adev), in smu7_register_irq_handlers()
4127 smu7_check_smc_update_required_for_display_configuration(struct pp_hwmgr *hwmgr) in smu7_check_smc_update_required_for_display_configuration() argument
4129 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_smc_update_required_for_display_configuration()
4132 if (data->display_timing.num_existing_displays != hwmgr->display_config->num_display) in smu7_check_smc_update_required_for_display_configuration()
4135 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, PHM_PlatformCaps_SclkDeepSleep)) { in smu7_check_smc_update_required_for_display_configuration()
4136 if (data->display_timing.min_clock_in_sr != hwmgr->display_config->min_core_set_clock_in_sr && in smu7_check_smc_update_required_for_display_configuration()
4138 hwmgr->display_config->min_core_set_clock_in_sr >= SMU7_MINIMUM_ENGINE_CLOCK)) in smu7_check_smc_update_required_for_display_configuration()
4153 static int smu7_check_states_equal(struct pp_hwmgr *hwmgr, in smu7_check_states_equal() argument
4160 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_states_equal()
4193 static int smu7_check_mc_firmware(struct pp_hwmgr *hwmgr) in smu7_check_mc_firmware() argument
4195 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_mc_firmware()
4205 smu7_get_mc_microcode_version(hwmgr); in smu7_check_mc_firmware()
4206 vbios_version = hwmgr->microcode_version_info.MC & 0xf; in smu7_check_mc_firmware()
4210 cgs_write_register(hwmgr->device, mmMC_SEQ_IO_DEBUG_INDEX, in smu7_check_mc_firmware()
4212 tmp = cgs_read_register(hwmgr->device, mmMC_SEQ_IO_DEBUG_DATA); in smu7_check_mc_firmware()
4225 static int smu7_read_clock_registers(struct pp_hwmgr *hwmgr) in smu7_read_clock_registers() argument
4227 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_read_clock_registers()
4230 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_FUNC_CNTL); in smu7_read_clock_registers()
4232 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_FUNC_CNTL_2); in smu7_read_clock_registers()
4234 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_FUNC_CNTL_3); in smu7_read_clock_registers()
4236 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_FUNC_CNTL_4); in smu7_read_clock_registers()
4238 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_SPREAD_SPECTRUM); in smu7_read_clock_registers()
4240 cgs_read_ind_register(hwmgr->device, CGS_IND_REG__SMC, ixCG_SPLL_SPREAD_SPECTRUM_2); in smu7_read_clock_registers()
4242 cgs_read_register(hwmgr->device, mmDLL_CNTL); in smu7_read_clock_registers()
4244 cgs_read_register(hwmgr->device, mmMCLK_PWRMGT_CNTL); in smu7_read_clock_registers()
4246 cgs_read_register(hwmgr->device, mmMPLL_AD_FUNC_CNTL); in smu7_read_clock_registers()
4248 cgs_read_register(hwmgr->device, mmMPLL_DQ_FUNC_CNTL); in smu7_read_clock_registers()
4250 cgs_read_register(hwmgr->device, mmMPLL_FUNC_CNTL); in smu7_read_clock_registers()
4252 cgs_read_register(hwmgr->device, mmMPLL_FUNC_CNTL_1); in smu7_read_clock_registers()
4254 cgs_read_register(hwmgr->device, mmMPLL_FUNC_CNTL_2); in smu7_read_clock_registers()
4256 cgs_read_register(hwmgr->device, mmMPLL_SS1); in smu7_read_clock_registers()
4258 cgs_read_register(hwmgr->device, mmMPLL_SS2); in smu7_read_clock_registers()
4269 static int smu7_get_memory_type(struct pp_hwmgr *hwmgr) in smu7_get_memory_type() argument
4271 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_memory_type()
4272 struct amdgpu_device *adev = hwmgr->adev; in smu7_get_memory_type()
4285 static int smu7_enable_acpi_power_management(struct pp_hwmgr *hwmgr) in smu7_enable_acpi_power_management() argument
4287 PHM_WRITE_INDIRECT_FIELD(hwmgr->device, CGS_IND_REG__SMC, in smu7_enable_acpi_power_management()
4299 static int smu7_init_power_gate_state(struct pp_hwmgr *hwmgr) in smu7_init_power_gate_state() argument
4301 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_init_power_gate_state()
4309 static int smu7_init_sclk_threshold(struct pp_hwmgr *hwmgr) in smu7_init_sclk_threshold() argument
4311 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_init_sclk_threshold()
4317 static int smu7_setup_asic_task(struct pp_hwmgr *hwmgr) in smu7_setup_asic_task() argument
4321 smu7_check_mc_firmware(hwmgr); in smu7_setup_asic_task()
4323 tmp_result = smu7_read_clock_registers(hwmgr); in smu7_setup_asic_task()
4327 tmp_result = smu7_get_memory_type(hwmgr); in smu7_setup_asic_task()
4331 tmp_result = smu7_enable_acpi_power_management(hwmgr); in smu7_setup_asic_task()
4335 tmp_result = smu7_init_power_gate_state(hwmgr); in smu7_setup_asic_task()
4339 tmp_result = smu7_get_mc_microcode_version(hwmgr); in smu7_setup_asic_task()
4343 tmp_result = smu7_init_sclk_threshold(hwmgr); in smu7_setup_asic_task()
4350 static int smu7_force_clock_level(struct pp_hwmgr *hwmgr, in smu7_force_clock_level() argument
4353 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_force_clock_level()
4361 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_clock_level()
4367 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_clock_level()
4377 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_PCIeDPM_UnForceLevel); in smu7_force_clock_level()
4379 smum_send_msg_to_smc_with_parameter(hwmgr, in smu7_force_clock_level()
4392 static int smu7_print_clock_levels(struct pp_hwmgr *hwmgr, in smu7_print_clock_levels() argument
4395 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_print_clock_levels()
4407 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_API_GetSclkFrequency); in smu7_print_clock_levels()
4408 clock = cgs_read_register(hwmgr->device, mmSMC_MSG_ARG_0); in smu7_print_clock_levels()
4423 smum_send_msg_to_smc(hwmgr, PPSMC_MSG_API_GetMclkFrequency); in smu7_print_clock_levels()
4424 clock = cgs_read_register(hwmgr->device, mmSMC_MSG_ARG_0); in smu7_print_clock_levels()
4439 pcie_speed = smu7_get_current_pcie_speed(hwmgr); in smu7_print_clock_levels()
4455 if (hwmgr->od_enabled) { in smu7_print_clock_levels()
4464 if (hwmgr->od_enabled) { in smu7_print_clock_levels()
4473 if (hwmgr->od_enabled) { in smu7_print_clock_levels()
4477 hwmgr->platform_descriptor.overdriveLimit.engineClock/100); in smu7_print_clock_levels()
4480 hwmgr->platform_descriptor.overdriveLimit.memoryClock/100); in smu7_print_clock_levels()
4492 static void smu7_set_fan_control_mode(struct pp_hwmgr *hwmgr, uint32_t mode) in smu7_set_fan_control_mode() argument
4496 smu7_fan_ctrl_set_fan_speed_percent(hwmgr, 100); in smu7_set_fan_control_mode()
4499 if (phm_cap_enabled(hwmgr->platform_descriptor.platformCaps, in smu7_set_fan_control_mode()
4501 smu7_fan_ctrl_stop_smc_fan_control(hwmgr); in smu7_set_fan_control_mode()
4504 if (!smu7_fan_ctrl_set_static_mode(hwmgr, mode)) in smu7_set_fan_control_mode()
4505 smu7_fan_ctrl_start_smc_fan_control(hwmgr); in smu7_set_fan_control_mode()
4512 static uint32_t smu7_get_fan_control_mode(struct pp_hwmgr *hwmgr) in smu7_get_fan_control_mode() argument
4514 return hwmgr->fan_ctrl_enabled ? AMD_FAN_CTRL_AUTO : AMD_FAN_CTRL_MANUAL; in smu7_get_fan_control_mode()
4517 static int smu7_get_sclk_od(struct pp_hwmgr *hwmgr) in smu7_get_sclk_od() argument
4519 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_sclk_od()
4533 static int smu7_set_sclk_od(struct pp_hwmgr *hwmgr, uint32_t value) in smu7_set_sclk_od() argument
4535 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_sclk_od()
4544 ps = hwmgr->request_ps; in smu7_set_sclk_od()
4559 static int smu7_get_mclk_od(struct pp_hwmgr *hwmgr) in smu7_get_mclk_od() argument
4561 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_mclk_od()
4575 static int smu7_set_mclk_od(struct pp_hwmgr *hwmgr, uint32_t value) in smu7_set_mclk_od() argument
4577 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_mclk_od()
4586 ps = hwmgr->request_ps; in smu7_set_mclk_od()
4602 static int smu7_get_sclks(struct pp_hwmgr *hwmgr, struct amd_pp_clocks *clocks) in smu7_get_sclks() argument
4605 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_sclks()
4610 if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_get_sclks()
4617 } else if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_get_sclks()
4618 sclk_table = hwmgr->dyn_state.vddc_dependency_on_sclk; in smu7_get_sclks()
4627 static uint32_t smu7_get_mem_latency(struct pp_hwmgr *hwmgr, uint32_t clk) in smu7_get_mem_latency() argument
4629 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_mem_latency()
4639 static int smu7_get_mclks(struct pp_hwmgr *hwmgr, struct amd_pp_clocks *clocks) in smu7_get_mclks() argument
4642 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_mclks()
4647 if (hwmgr->pp_table_version == PP_TABLE_V1) { in smu7_get_mclks()
4653 clocks->latency[i] = smu7_get_mem_latency(hwmgr, in smu7_get_mclks()
4657 } else if (hwmgr->pp_table_version == PP_TABLE_V0) { in smu7_get_mclks()
4658 mclk_table = hwmgr->dyn_state.vddc_dependency_on_mclk; in smu7_get_mclks()
4666 static int smu7_get_clock_by_type(struct pp_hwmgr *hwmgr, enum amd_pp_clock_type type, in smu7_get_clock_by_type() argument
4671 smu7_get_sclks(hwmgr, clocks); in smu7_get_clock_by_type()
4674 smu7_get_mclks(hwmgr, clocks); in smu7_get_clock_by_type()
4683 static int smu7_notify_cac_buffer_info(struct pp_hwmgr *hwmgr, in smu7_notify_cac_buffer_info() argument
4690 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_notify_cac_buffer_info()
4692 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
4694 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
4698 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
4700 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
4704 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
4706 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
4710 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
4712 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
4716 cgs_write_ind_register(hwmgr->device, CGS_IND_REG__SMC, in smu7_notify_cac_buffer_info()
4718 smum_get_offsetof(hwmgr, in smu7_notify_cac_buffer_info()
4724 static int smu7_get_max_high_clocks(struct pp_hwmgr *hwmgr, in smu7_get_max_high_clocks() argument
4727 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_max_high_clocks()
4743 static int smu7_get_thermal_temperature_range(struct pp_hwmgr *hwmgr, in smu7_get_thermal_temperature_range() argument
4746 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_thermal_temperature_range()
4748 (struct phm_ppt_v1_information *)hwmgr->pptable; in smu7_get_thermal_temperature_range()
4752 if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_get_thermal_temperature_range()
4755 else if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_get_thermal_temperature_range()
4762 static bool smu7_check_clk_voltage_valid(struct pp_hwmgr *hwmgr, in smu7_check_clk_voltage_valid() argument
4767 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_check_clk_voltage_valid()
4778 hwmgr->platform_descriptor.overdriveLimit.engineClock < clk) { in smu7_check_clk_voltage_valid()
4781 hwmgr->platform_descriptor.overdriveLimit.engineClock/100); in smu7_check_clk_voltage_valid()
4786 hwmgr->platform_descriptor.overdriveLimit.memoryClock < clk) { in smu7_check_clk_voltage_valid()
4789 hwmgr->platform_descriptor.overdriveLimit.memoryClock/100); in smu7_check_clk_voltage_valid()
4799 static int smu7_odn_edit_dpm_table(struct pp_hwmgr *hwmgr, in smu7_odn_edit_dpm_table() argument
4806 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_odn_edit_dpm_table()
4815 if (!hwmgr->od_enabled) { in smu7_odn_edit_dpm_table()
4834 smu7_odn_initial_default_setting(hwmgr); in smu7_odn_edit_dpm_table()
4837 smu7_check_dpm_table_updated(hwmgr); in smu7_odn_edit_dpm_table()
4852 if (smu7_check_clk_voltage_valid(hwmgr, type, input_clk, input_vol)) { in smu7_odn_edit_dpm_table()
4865 static int smu7_get_power_profile_mode(struct pp_hwmgr *hwmgr, char *buf) in smu7_get_power_profile_mode() argument
4867 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_get_power_profile_mode()
4897 if (i == hwmgr->power_profile_mode) { in smu7_get_power_profile_mode()
4930 static void smu7_patch_compute_profile_mode(struct pp_hwmgr *hwmgr, in smu7_patch_compute_profile_mode() argument
4933 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_patch_compute_profile_mode()
4943 smu7_force_clock_level(hwmgr, PP_SCLK, 3 << (level-1)); in smu7_patch_compute_profile_mode()
4945 } else if (hwmgr->power_profile_mode == PP_SMC_POWER_PROFILE_COMPUTE) { in smu7_patch_compute_profile_mode()
4946 smu7_force_clock_level(hwmgr, PP_SCLK, data->dpm_level_enable_mask.sclk_dpm_enable_mask); in smu7_patch_compute_profile_mode()
4950 static int smu7_set_power_profile_mode(struct pp_hwmgr *hwmgr, long *input, uint32_t size) in smu7_set_power_profile_mode() argument
4952 struct smu7_hwmgr *data = (struct smu7_hwmgr *)(hwmgr->backend); in smu7_set_power_profile_mode()
4973 if (!smum_update_dpm_settings(hwmgr, &tmp)) { in smu7_set_power_profile_mode()
4975 hwmgr->power_profile_mode = mode; in smu7_set_power_profile_mode()
4983 if (mode == hwmgr->power_profile_mode) in smu7_set_power_profile_mode()
4987 if (!smum_update_dpm_settings(hwmgr, &tmp)) { in smu7_set_power_profile_mode()
5000 smu7_patch_compute_profile_mode(hwmgr, mode); in smu7_set_power_profile_mode()
5001 hwmgr->power_profile_mode = mode; in smu7_set_power_profile_mode()
5086 int smu7_init_function_pointers(struct pp_hwmgr *hwmgr) in smu7_init_function_pointers() argument
5090 hwmgr->hwmgr_func = &smu7_hwmgr_funcs; in smu7_init_function_pointers()
5091 if (hwmgr->pp_table_version == PP_TABLE_V0) in smu7_init_function_pointers()
5092 hwmgr->pptable_func = &pptable_funcs; in smu7_init_function_pointers()
5093 else if (hwmgr->pp_table_version == PP_TABLE_V1) in smu7_init_function_pointers()
5094 hwmgr->pptable_func = &pptable_v1_0_funcs; in smu7_init_function_pointers()