Home
last modified time | relevance | path

Searched refs:smu (Results 1 – 25 of 37) sorted by relevance

12

/Linux-v5.4/drivers/gpu/drm/amd/powerplay/inc/
Damdgpu_smu.h391 int (*alloc_dpm_context)(struct smu_context *smu);
392 int (*store_powerplay_table)(struct smu_context *smu);
393 int (*check_powerplay_table)(struct smu_context *smu);
394 int (*append_powerplay_table)(struct smu_context *smu);
395 int (*get_smu_msg_index)(struct smu_context *smu, uint32_t index);
396 int (*get_smu_clk_index)(struct smu_context *smu, uint32_t index);
397 int (*get_smu_feature_index)(struct smu_context *smu, uint32_t index);
398 int (*get_smu_table_index)(struct smu_context *smu, uint32_t index);
399 int (*get_smu_power_index)(struct smu_context *smu, uint32_t index);
400 int (*get_workload_type)(struct smu_context *smu, enum PP_SMC_POWER_PROFILE profile);
[all …]
Dsmu_v12_0.h40 void smu_v12_0_set_smu_funcs(struct smu_context *smu);
Dsmu_v11_0.h133 void smu_v11_0_set_smu_funcs(struct smu_context *smu);
/Linux-v5.4/drivers/gpu/drm/amd/powerplay/
Damdgpu_smu.c40 const char *smu_get_message_name(struct smu_context *smu, enum smu_message_type type) in smu_get_message_name() argument
53 const char *smu_get_feature_name(struct smu_context *smu, enum smu_feature_mask feature) in smu_get_feature_name() argument
60 size_t smu_sys_get_pp_feature_mask(struct smu_context *smu, char *buf) in smu_sys_get_pp_feature_mask() argument
70 ret = smu_feature_get_enabled_mask(smu, feature_mask, 2); in smu_sys_get_pp_feature_mask()
78 feature_index = smu_feature_get_index(smu, i); in smu_sys_get_pp_feature_mask()
88 smu_get_feature_name(smu, sort_feature[i]), in smu_sys_get_pp_feature_mask()
90 !!smu_feature_is_enabled(smu, sort_feature[i]) ? in smu_sys_get_pp_feature_mask()
98 int smu_sys_set_pp_feature_mask(struct smu_context *smu, uint64_t new_mask) in smu_sys_set_pp_feature_mask() argument
106 ret = smu_feature_get_enabled_mask(smu, feature_mask, 2); in smu_sys_set_pp_feature_mask()
116 ret = smu_feature_update_enable_state(smu, feature_2_enabled, true); in smu_sys_set_pp_feature_mask()
[all …]
Dsmu_v11_0.c56 static int smu_v11_0_send_msg_without_waiting(struct smu_context *smu, in smu_v11_0_send_msg_without_waiting() argument
59 struct amdgpu_device *adev = smu->adev; in smu_v11_0_send_msg_without_waiting()
64 static int smu_v11_0_read_arg(struct smu_context *smu, uint32_t *arg) in smu_v11_0_read_arg() argument
66 struct amdgpu_device *adev = smu->adev; in smu_v11_0_read_arg()
72 static int smu_v11_0_wait_for_response(struct smu_context *smu) in smu_v11_0_wait_for_response() argument
74 struct amdgpu_device *adev = smu->adev; in smu_v11_0_wait_for_response()
91 static int smu_v11_0_send_msg(struct smu_context *smu, uint16_t msg) in smu_v11_0_send_msg() argument
93 struct amdgpu_device *adev = smu->adev; in smu_v11_0_send_msg()
96 index = smu_msg_get_index(smu, msg); in smu_v11_0_send_msg()
100 smu_v11_0_wait_for_response(smu); in smu_v11_0_send_msg()
[all …]
Dsmu_v12_0.c44 static int smu_v12_0_send_msg_without_waiting(struct smu_context *smu, in smu_v12_0_send_msg_without_waiting() argument
47 struct amdgpu_device *adev = smu->adev; in smu_v12_0_send_msg_without_waiting()
53 static int smu_v12_0_read_arg(struct smu_context *smu, uint32_t *arg) in smu_v12_0_read_arg() argument
55 struct amdgpu_device *adev = smu->adev; in smu_v12_0_read_arg()
61 static int smu_v12_0_wait_for_response(struct smu_context *smu) in smu_v12_0_wait_for_response() argument
63 struct amdgpu_device *adev = smu->adev; in smu_v12_0_wait_for_response()
80 static int smu_v12_0_send_msg(struct smu_context *smu, uint16_t msg) in smu_v12_0_send_msg() argument
82 struct amdgpu_device *adev = smu->adev; in smu_v12_0_send_msg()
85 index = smu_msg_get_index(smu, msg); in smu_v12_0_send_msg()
89 smu_v12_0_wait_for_response(smu); in smu_v12_0_send_msg()
[all …]
Dnavi10_ppt.c288 static int navi10_get_workload_type(struct smu_context *smu, enum PP_SMC_POWER_PROFILE profile) in navi10_get_workload_type() argument
303 static bool is_asic_secure(struct smu_context *smu) in is_asic_secure() argument
305 struct amdgpu_device *adev = smu->adev; in is_asic_secure()
319 navi10_get_allowed_feature_mask(struct smu_context *smu, in navi10_get_allowed_feature_mask() argument
322 struct amdgpu_device *adev = smu->adev; in navi10_get_allowed_feature_mask()
365 if (smu->adev->pg_flags & AMD_PG_SUPPORT_MMHUB) in navi10_get_allowed_feature_mask()
368 if (smu->adev->pg_flags & AMD_PG_SUPPORT_ATHUB) in navi10_get_allowed_feature_mask()
371 if (smu->adev->pg_flags & AMD_PG_SUPPORT_VCN) in navi10_get_allowed_feature_mask()
376 if (is_asic_secure(smu)) { in navi10_get_allowed_feature_mask()
392 static int navi10_check_powerplay_table(struct smu_context *smu) in navi10_check_powerplay_table() argument
[all …]
Dvega20_ppt.c301 static int vega20_get_workload_type(struct smu_context *smu, enum PP_SMC_POWER_PROFILE profile) in vega20_get_workload_type() argument
316 static int vega20_tables_init(struct smu_context *smu, struct smu_table *tables) in vega20_tables_init() argument
318 struct smu_table_context *smu_table = &smu->smu_table; in vega20_tables_init()
342 static int vega20_allocate_dpm_context(struct smu_context *smu) in vega20_allocate_dpm_context() argument
344 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in vega20_allocate_dpm_context()
377 static int vega20_setup_od8_information(struct smu_context *smu) in vega20_setup_od8_information() argument
380 struct smu_table_context *table_context = &smu->smu_table; in vega20_setup_od8_information()
381 struct vega20_od8_settings *od8_settings = (struct vega20_od8_settings *)smu->od_settings; in vega20_setup_od8_information()
453 static int vega20_store_powerplay_table(struct smu_context *smu) in vega20_store_powerplay_table() argument
456 struct smu_table_context *table_context = &smu->smu_table; in vega20_store_powerplay_table()
[all …]
Darcturus_ppt.c268 static int arcturus_get_workload_type(struct smu_context *smu, enum PP_SMC_POWER_PROFILE profile) in arcturus_get_workload_type() argument
284 static int arcturus_tables_init(struct smu_context *smu, struct smu_table *tables) in arcturus_tables_init() argument
286 struct smu_table_context *smu_table = &smu->smu_table; in arcturus_tables_init()
305 static int arcturus_allocate_dpm_context(struct smu_context *smu) in arcturus_allocate_dpm_context() argument
307 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in arcturus_allocate_dpm_context()
341 arcturus_get_allowed_feature_mask(struct smu_context *smu, in arcturus_get_allowed_feature_mask() argument
354 arcturus_set_single_dpm_table(struct smu_context *smu, in arcturus_set_single_dpm_table() argument
361 ret = smu_send_smc_msg_with_param(smu, in arcturus_set_single_dpm_table()
369 smu_read_smc_arg(smu, &num_of_levels); in arcturus_set_single_dpm_table()
377 ret = smu_send_smc_msg_with_param(smu, in arcturus_set_single_dpm_table()
[all …]
Drenoir_ppt.c141 static int renoir_tables_init(struct smu_context *smu, struct smu_table *tables) in renoir_tables_init() argument
143 struct smu_table_context *smu_table = &smu->smu_table; in renoir_tables_init()
163 static int renoir_get_dpm_uclk_limited(struct smu_context *smu, uint32_t *clock, bool max) in renoir_get_dpm_uclk_limited() argument
166 DpmClocks_t *table = smu->smu_table.clocks_table; in renoir_get_dpm_uclk_limited()
180 static int renoir_print_clk_levels(struct smu_context *smu, in renoir_print_clk_levels() argument
185 DpmClocks_t *clk_table = smu->smu_table.clocks_table; in renoir_print_clk_levels()
191 ret = smu_update_table(smu, SMU_TABLE_SMU_METRICS, 0, in renoir_print_clk_levels()
201 ret = smu_get_dpm_freq_range(smu, SMU_GFXCLK, &min, &max); in renoir_print_clk_levels()
258 void renoir_set_ppt_funcs(struct smu_context *smu) in renoir_set_ppt_funcs() argument
260 struct smu_table_context *smu_table = &smu->smu_table; in renoir_set_ppt_funcs()
[all …]
Dnavi10_ppt.h30 extern void navi10_set_ppt_funcs(struct smu_context *smu);
Drenoir_ppt.h26 extern void renoir_set_ppt_funcs(struct smu_context *smu);
Darcturus_ppt.h70 extern void arcturus_set_ppt_funcs(struct smu_context *smu);
Dvega20_ppt.h177 extern void vega20_set_ppt_funcs(struct smu_context *smu);
/Linux-v5.4/drivers/macintosh/
Dsmu.c99 static struct smu_device *smu; variable
115 if (list_empty(&smu->cmd_list)) in smu_start_cmd()
119 cmd = list_entry(smu->cmd_list.next, struct smu_cmd, link); in smu_start_cmd()
120 smu->cmd_cur = cmd; in smu_start_cmd()
128 smu->cmd_buf->cmd = cmd->cmd; in smu_start_cmd()
129 smu->cmd_buf->length = cmd->data_len; in smu_start_cmd()
130 memcpy(smu->cmd_buf->data, cmd->data_buf, cmd->data_len); in smu_start_cmd()
133 faddr = (unsigned long)smu->cmd_buf; in smu_start_cmd()
134 fend = faddr + smu->cmd_buf->length + 2; in smu_start_cmd()
147 if (smu->broken_nap) in smu_start_cmd()
[all …]
Dwindfarm_smu_controls.c258 struct device_node *smu, *fans, *fan; in smu_controls_init() local
263 smu = of_find_node_by_type(NULL, "smu"); in smu_controls_init()
264 if (smu == NULL) in smu_controls_init()
268 for (fans = NULL; (fans = of_get_next_child(smu, fans)) != NULL;) in smu_controls_init()
288 for (fans = NULL; (fans = of_get_next_child(smu, fans)) != NULL;) in smu_controls_init()
304 of_node_put(smu); in smu_controls_init()
Dwindfarm_smu_sensors.c410 struct device_node *smu, *sensors, *s; in smu_sensors_init() local
419 smu = of_find_node_by_type(NULL, "smu"); in smu_sensors_init()
420 if (smu == NULL) in smu_sensors_init()
425 (sensors = of_get_next_child(smu, sensors)) != NULL;) in smu_sensors_init()
429 of_node_put(smu); in smu_sensors_init()
/Linux-v5.4/drivers/gpu/drm/amd/display/amdgpu_dm/
Damdgpu_dm_pp_smu.c43 struct smu_context *smu = &adev->smu; in dm_pp_apply_display_requirements() local
110 smu_display_configuration_change(smu, in dm_pp_apply_display_requirements()
348 } else if (adev->smu.funcs && adev->smu.funcs->get_clock_by_type) { in dm_pp_get_clock_levels_by_type()
349 if (smu_get_clock_by_type(&adev->smu, in dm_pp_get_clock_levels_by_type()
368 } else if (adev->smu.funcs && adev->smu.funcs->get_max_high_clocks) { in dm_pp_get_clock_levels_by_type()
369 if (smu_get_max_high_clocks(&adev->smu, &validation_clks)) { in dm_pp_get_clock_levels_by_type()
433 } else if (adev->smu.ppt_funcs && adev->smu.ppt_funcs->get_clock_by_type_with_latency) { in dm_pp_get_clock_levels_by_type_with_latency()
434 if (smu_get_clock_by_type_with_latency(&adev->smu, in dm_pp_get_clock_levels_by_type_with_latency()
463 } else if (adev->smu.ppt_funcs && adev->smu.ppt_funcs->get_clock_by_type_with_voltage) { in dm_pp_get_clock_levels_by_type_with_voltage()
464 if (smu_get_clock_by_type_with_voltage(&adev->smu, in dm_pp_get_clock_levels_by_type_with_voltage()
[all …]
/Linux-v5.4/arch/arm/mach-shmobile/
Dsmp-emev2.c32 void __iomem *smu; in emev2_smp_prepare_cpus() local
35 smu = ioremap(EMEV2_SMU_BASE, PAGE_SIZE); in emev2_smp_prepare_cpus()
36 if (smu) { in emev2_smp_prepare_cpus()
37 iowrite32(__pa(shmobile_boot_vector), smu + SMU_GENERAL_REG0); in emev2_smp_prepare_cpus()
38 iounmap(smu); in emev2_smp_prepare_cpus()
/Linux-v5.4/arch/arm/boot/dts/
Demev2.dtsi61 compatible = "renesas,emev2-smu";
72 compatible = "renesas,emev2-smu-clkdiv";
78 compatible = "renesas,emev2-smu-gclk";
84 compatible = "renesas,emev2-smu-clkdiv";
90 compatible = "renesas,emev2-smu-gclk";
103 compatible = "renesas,emev2-smu-clkdiv";
109 compatible = "renesas,emev2-smu-clkdiv";
115 compatible = "renesas,emev2-smu-clkdiv";
121 compatible = "renesas,emev2-smu-clkdiv";
127 compatible = "renesas,emev2-smu-gclk";
[all …]
/Linux-v5.4/Documentation/devicetree/bindings/clock/
Drenesas,emev2-smu.txt10 - compatible: Should be "renesas,emev2-smu"
21 - compatible: Should be "renesas,emev2-smu-clkdiv"
32 - compatible: Should be "renesas,emev2-smu-gclk"
40 compatible = "renesas,emev2-smu-clkdiv";
47 compatible = "renesas,emev2-smu-gclk";
68 smu@e0110000 {
69 compatible = "renesas,emev2-smu";
87 compatible = "renesas,emev2-smu-clkdiv";
93 compatible = "renesas,emev2-smu-gclk";
/Linux-v5.4/drivers/mtd/nand/raw/atmel/
Dpmecc.c175 s16 *smu; member
374 user->smu = user->lmu + (req->ecc.strength + 1); in atmel_pmecc_create_user()
375 user->mu = (s32 *)PTR_ALIGN(user->smu + in atmel_pmecc_create_user()
502 s16 *smu = user->smu; in atmel_pmecc_get_sigma() local
516 memset(smu, 0, sizeof(s16) * num); in atmel_pmecc_get_sigma()
517 smu[0] = 1; in atmel_pmecc_get_sigma()
530 memset(&smu[num], 0, sizeof(s16) * num); in atmel_pmecc_get_sigma()
531 smu[num] = 1; in atmel_pmecc_get_sigma()
542 memset(&smu[(strength + 1) * num], 0, sizeof(s16) * num); in atmel_pmecc_get_sigma()
559 smu[(strength + 1) * num + j] = in atmel_pmecc_get_sigma()
[all …]
/Linux-v5.4/drivers/gpu/drm/amd/amdgpu/
Damdgpu_pm.c108 ret = smu_read_sensor(&adev->smu, sensor, data, size); in amdgpu_dpm_read_sensor()
163 if (adev->smu.ppt_funcs->get_current_power_state) in amdgpu_get_dpm_state()
294 level = smu_get_performance_level(&adev->smu); in amdgpu_get_dpm_forced_performance_level()
365 current_level = smu_get_performance_level(&adev->smu); in amdgpu_set_dpm_forced_performance_level()
383 ret = smu_force_performance_level(&adev->smu, level); in amdgpu_set_dpm_forced_performance_level()
415 ret = smu_get_power_num_states(&adev->smu, &data); in amdgpu_get_pp_num_states()
439 struct smu_context *smu = &adev->smu; in amdgpu_get_pp_cur_state() local
444 pm = smu_get_current_power_state(smu); in amdgpu_get_pp_cur_state()
445 ret = smu_get_power_num_states(smu, &data); in amdgpu_get_pp_cur_state()
539 size = smu_sys_get_pp_table(&adev->smu, (void **)&table); in amdgpu_get_pp_table()
[all …]
Dnv.c297 struct smu_context *smu = &adev->smu; in nv_asic_reset_method() local
299 if (smu_baco_is_support(smu)) in nv_asic_reset_method()
317 struct smu_context *smu = &adev->smu; in nv_asic_reset() local
322 ret = smu_baco_reset(smu); in nv_asic_reset()
Damdgpu_dpm.h302 ((adev)->smu.ppt_funcs->get_current_power_state(&((adev)->smu)))
305 ((adev)->smu.ppt_funcs->set_power_state(&((adev)->smu)))

12