/Linux-v6.1/drivers/gpu/drm/amd/pm/swsmu/smu13/ |
D | aldebaran_ppt.c | 269 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in aldebaran_allocate_dpm_context() local 271 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_13_0_dpm_context), in aldebaran_allocate_dpm_context() 273 if (!smu_dpm->dpm_context) in aldebaran_allocate_dpm_context() 275 smu_dpm->dpm_context_size = sizeof(struct smu_13_0_dpm_context); in aldebaran_allocate_dpm_context() 309 struct smu_13_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in aldebaran_set_default_dpm_table() 508 smu->smu_dpm.dpm_context; in aldebaran_populate_umd_state_clk() 747 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in aldebaran_print_clk_levels() local 760 dpm_context = smu_dpm->dpm_context; in aldebaran_print_clk_levels() 939 smu->smu_dpm.dpm_context; in aldebaran_upload_dpm_level() 991 struct smu_13_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in aldebaran_force_clk_levels() [all …]
|
D | smu_v13_0_7_ppt.c | 512 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v13_0_7_allocate_dpm_context() local 514 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_13_0_dpm_context), in smu_v13_0_7_allocate_dpm_context() 516 if (!smu_dpm->dpm_context) in smu_v13_0_7_allocate_dpm_context() 519 smu_dpm->dpm_context_size = sizeof(struct smu_13_0_dpm_context); in smu_v13_0_7_allocate_dpm_context() 541 struct smu_13_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in smu_v13_0_7_set_default_dpm_table() 955 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v13_0_7_print_clk_levels() local 956 struct smu_13_0_dpm_context *dpm_context = smu_dpm->dpm_context; in smu_v13_0_7_print_clk_levels() 1088 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v13_0_7_force_clk_levels() local 1089 struct smu_13_0_dpm_context *dpm_context = smu_dpm->dpm_context; in smu_v13_0_7_force_clk_levels() 1167 struct smu_13_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in smu_v13_0_7_update_pcie_parameters() [all …]
|
D | smu_v13_0_0_ppt.c | 458 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v13_0_0_allocate_dpm_context() local 460 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_13_0_dpm_context), in smu_v13_0_0_allocate_dpm_context() 462 if (!smu_dpm->dpm_context) in smu_v13_0_0_allocate_dpm_context() 465 smu_dpm->dpm_context_size = sizeof(struct smu_13_0_dpm_context); in smu_v13_0_0_allocate_dpm_context() 487 struct smu_13_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in smu_v13_0_0_set_default_dpm_table() 911 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v13_0_0_print_clk_levels() local 912 struct smu_13_0_dpm_context *dpm_context = smu_dpm->dpm_context; in smu_v13_0_0_print_clk_levels() 1045 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v13_0_0_force_clk_levels() local 1046 struct smu_13_0_dpm_context *dpm_context = smu_dpm->dpm_context; in smu_v13_0_0_force_clk_levels() 1124 struct smu_13_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in smu_v13_0_0_update_pcie_parameters() [all …]
|
D | smu_v13_0.c | 536 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v13_0_fini_smc_tables() local 561 kfree(smu_dpm->dpm_context); in smu_v13_0_fini_smc_tables() 562 kfree(smu_dpm->golden_dpm_context); in smu_v13_0_fini_smc_tables() 563 kfree(smu_dpm->dpm_current_power_state); in smu_v13_0_fini_smc_tables() 564 kfree(smu_dpm->dpm_request_power_state); in smu_v13_0_fini_smc_tables() 565 smu_dpm->dpm_context = NULL; in smu_v13_0_fini_smc_tables() 566 smu_dpm->golden_dpm_context = NULL; in smu_v13_0_fini_smc_tables() 567 smu_dpm->dpm_context_size = 0; in smu_v13_0_fini_smc_tables() 568 smu_dpm->dpm_current_power_state = NULL; in smu_v13_0_fini_smc_tables() 569 smu_dpm->dpm_request_power_state = NULL; in smu_v13_0_fini_smc_tables() [all …]
|
D | smu_v13_0_5_ppt.c | 507 struct smu_dpm_context *smu_dpm = &(smu->smu_dpm); in smu_v13_0_5_od_edit_dpm_table() local 511 if (smu_dpm->dpm_level != AMD_DPM_FORCED_LEVEL_MANUAL) in smu_v13_0_5_od_edit_dpm_table()
|
D | yellow_carp_ppt.c | 630 struct smu_dpm_context *smu_dpm = &(smu->smu_dpm); in yellow_carp_od_edit_dpm_table() local 634 if (smu_dpm->dpm_level != AMD_DPM_FORCED_LEVEL_MANUAL) in yellow_carp_od_edit_dpm_table()
|
/Linux-v6.1/drivers/gpu/drm/amd/pm/swsmu/smu11/ |
D | smu_v11_0.c | 457 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v11_0_fini_smc_tables() local 486 kfree(smu_dpm->dpm_context); in smu_v11_0_fini_smc_tables() 487 kfree(smu_dpm->golden_dpm_context); in smu_v11_0_fini_smc_tables() 488 kfree(smu_dpm->dpm_current_power_state); in smu_v11_0_fini_smc_tables() 489 kfree(smu_dpm->dpm_request_power_state); in smu_v11_0_fini_smc_tables() 490 smu_dpm->dpm_context = NULL; in smu_v11_0_fini_smc_tables() 491 smu_dpm->golden_dpm_context = NULL; in smu_v11_0_fini_smc_tables() 492 smu_dpm->dpm_context_size = 0; in smu_v11_0_fini_smc_tables() 493 smu_dpm->dpm_current_power_state = NULL; in smu_v11_0_fini_smc_tables() 494 smu_dpm->dpm_request_power_state = NULL; in smu_v11_0_fini_smc_tables() [all …]
|
D | arcturus_ppt.c | 288 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in arcturus_allocate_dpm_context() local 290 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_11_0_dpm_context), in arcturus_allocate_dpm_context() 292 if (!smu_dpm->dpm_context) in arcturus_allocate_dpm_context() 294 smu_dpm->dpm_context_size = sizeof(struct smu_11_0_dpm_context); in arcturus_allocate_dpm_context() 329 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in arcturus_set_default_dpm_table() 532 smu->smu_dpm.dpm_context; in arcturus_populate_umd_state_clk() 767 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in arcturus_print_clk_levels() local 778 dpm_context = smu_dpm->dpm_context; in arcturus_print_clk_levels() 953 smu->smu_dpm.dpm_context; in arcturus_upload_dpm_level() 1005 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in arcturus_force_clk_levels()
|
D | navi10_ppt.c | 935 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in navi10_allocate_dpm_context() local 937 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_11_0_dpm_context), in navi10_allocate_dpm_context() 939 if (!smu_dpm->dpm_context) in navi10_allocate_dpm_context() 942 smu_dpm->dpm_context_size = sizeof(struct smu_11_0_dpm_context); in navi10_allocate_dpm_context() 964 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in navi10_set_default_dpm_table() 1260 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in navi10_emit_clk_levels() local 1261 struct smu_11_0_dpm_context *dpm_context = smu_dpm->dpm_context; in navi10_emit_clk_levels() 1467 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in navi10_print_clk_levels() local 1468 struct smu_11_0_dpm_context *dpm_context = smu_dpm->dpm_context; in navi10_print_clk_levels() 1695 smu->smu_dpm.dpm_context; in navi10_populate_umd_state_clk() [all …]
|
D | vangogh_ppt.c | 447 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in vangogh_allocate_dpm_context() local 449 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_11_0_dpm_context), in vangogh_allocate_dpm_context() 451 if (!smu_dpm->dpm_context) in vangogh_allocate_dpm_context() 454 smu_dpm->dpm_context_size = sizeof(struct smu_11_0_dpm_context); in vangogh_allocate_dpm_context() 582 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in vangogh_print_legacy_clk_levels() 683 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in vangogh_print_clk_levels() 1933 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in vangogh_od_edit_dpm_table()
|
D | sienna_cichlid_ppt.c | 891 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in sienna_cichlid_allocate_dpm_context() local 893 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_11_0_dpm_context), in sienna_cichlid_allocate_dpm_context() 895 if (!smu_dpm->dpm_context) in sienna_cichlid_allocate_dpm_context() 898 smu_dpm->dpm_context_size = sizeof(struct smu_11_0_dpm_context); in sienna_cichlid_allocate_dpm_context() 926 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in sienna_cichlid_set_default_dpm_table() 1255 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in sienna_cichlid_print_clk_levels() local 1256 struct smu_11_0_dpm_context *dpm_context = smu_dpm->dpm_context; in sienna_cichlid_print_clk_levels() 1467 smu->smu_dpm.dpm_context; in sienna_cichlid_populate_umd_state_clk() 2074 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in sienna_cichlid_update_pcie_parameters()
|
/Linux-v6.1/drivers/gpu/drm/amd/pm/swsmu/ |
D | amdgpu_smu.c | 349 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_restore_dpm_user_profile() 740 smu->smu_dpm.dpm_level, in smu_late_init() 1066 smu->smu_dpm.dpm_level = AMD_DPM_FORCED_LEVEL_AUTO; in smu_sw_init() 1067 smu->smu_dpm.requested_dpm_level = AMD_DPM_FORCED_LEVEL_AUTO; in smu_sw_init() 1714 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_enable_umd_pstate() 1762 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_adjust_power_state_dynamic() 1842 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_handle_dpm_task() local 1844 return smu_handle_task(smu, smu_dpm->dpm_level, task_id); in smu_handle_dpm_task() 1853 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_switch_power_profile() 1885 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_get_performance_level() [all …]
|
/Linux-v6.1/drivers/gpu/drm/amd/pm/swsmu/smu12/ |
D | renoir_ppt.c | 388 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in renoir_od_edit_dpm_table() 500 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in renoir_print_clk_levels() 622 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in renoir_get_current_power_state()
|
/Linux-v6.1/drivers/gpu/drm/amd/pm/swsmu/inc/ |
D | amdgpu_smu.h | 496 struct smu_dpm_context smu_dpm; member
|