Home
last modified time | relevance | path

Searched refs:smu_dpm (Results 1 – 11 of 11) sorted by relevance

/linux/drivers/gpu/drm/amd/pm/swsmu/smu13/
A Daldebaran_ppt.c243 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in aldebaran_allocate_dpm_context() local
247 if (!smu_dpm->dpm_context) in aldebaran_allocate_dpm_context()
253 if (!smu_dpm->dpm_current_power_state) in aldebaran_allocate_dpm_context()
258 if (!smu_dpm->dpm_request_power_state) in aldebaran_allocate_dpm_context()
492 smu->smu_dpm.dpm_context; in aldebaran_populate_umd_state_clk()
730 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in aldebaran_print_clk_levels() local
743 dpm_context = smu_dpm->dpm_context; in aldebaran_print_clk_levels()
932 smu->smu_dpm.dpm_context; in aldebaran_upload_dpm_level()
1269 struct smu_dpm_context *smu_dpm = &(smu->smu_dpm); in aldebaran_set_performance_level() local
1305 struct smu_dpm_context *smu_dpm = &(smu->smu_dpm); in aldebaran_set_soft_freq_limited_range() local
[all …]
A Dsmu_v13_0.c421 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v13_0_fini_smc_tables() local
442 kfree(smu_dpm->dpm_context); in smu_v13_0_fini_smc_tables()
443 kfree(smu_dpm->golden_dpm_context); in smu_v13_0_fini_smc_tables()
444 kfree(smu_dpm->dpm_current_power_state); in smu_v13_0_fini_smc_tables()
445 kfree(smu_dpm->dpm_request_power_state); in smu_v13_0_fini_smc_tables()
446 smu_dpm->dpm_context = NULL; in smu_v13_0_fini_smc_tables()
447 smu_dpm->golden_dpm_context = NULL; in smu_v13_0_fini_smc_tables()
448 smu_dpm->dpm_context_size = 0; in smu_v13_0_fini_smc_tables()
449 smu_dpm->dpm_current_power_state = NULL; in smu_v13_0_fini_smc_tables()
450 smu_dpm->dpm_request_power_state = NULL; in smu_v13_0_fini_smc_tables()
[all …]
A Dyellow_carp_ppt.c600 struct smu_dpm_context *smu_dpm = &(smu->smu_dpm); in yellow_carp_od_edit_dpm_table() local
604 if (smu_dpm->dpm_level != AMD_DPM_FORCED_LEVEL_MANUAL) in yellow_carp_od_edit_dpm_table()
/linux/drivers/gpu/drm/amd/pm/swsmu/smu11/
A Dsmu_v11_0.c457 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v11_0_fini_smc_tables() local
482 kfree(smu_dpm->dpm_context); in smu_v11_0_fini_smc_tables()
483 kfree(smu_dpm->golden_dpm_context); in smu_v11_0_fini_smc_tables()
484 kfree(smu_dpm->dpm_current_power_state); in smu_v11_0_fini_smc_tables()
485 kfree(smu_dpm->dpm_request_power_state); in smu_v11_0_fini_smc_tables()
486 smu_dpm->dpm_context = NULL; in smu_v11_0_fini_smc_tables()
487 smu_dpm->golden_dpm_context = NULL; in smu_v11_0_fini_smc_tables()
488 smu_dpm->dpm_context_size = 0; in smu_v11_0_fini_smc_tables()
489 smu_dpm->dpm_current_power_state = NULL; in smu_v11_0_fini_smc_tables()
490 smu_dpm->dpm_request_power_state = NULL; in smu_v11_0_fini_smc_tables()
[all …]
A Darcturus_ppt.c290 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in arcturus_allocate_dpm_context() local
292 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_11_0_dpm_context), in arcturus_allocate_dpm_context()
294 if (!smu_dpm->dpm_context) in arcturus_allocate_dpm_context()
296 smu_dpm->dpm_context_size = sizeof(struct smu_11_0_dpm_context); in arcturus_allocate_dpm_context()
300 if (!smu_dpm->dpm_current_power_state) in arcturus_allocate_dpm_context()
305 if (!smu_dpm->dpm_request_power_state) in arcturus_allocate_dpm_context()
341 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in arcturus_set_default_dpm_table()
544 smu->smu_dpm.dpm_context; in arcturus_populate_umd_state_clk()
784 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in arcturus_print_clk_levels() local
795 dpm_context = smu_dpm->dpm_context; in arcturus_print_clk_levels()
[all …]
A Dvangogh_ppt.c442 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in vangogh_allocate_dpm_context() local
444 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_11_0_dpm_context), in vangogh_allocate_dpm_context()
446 if (!smu_dpm->dpm_context) in vangogh_allocate_dpm_context()
449 smu_dpm->dpm_context_size = sizeof(struct smu_11_0_dpm_context); in vangogh_allocate_dpm_context()
581 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in vangogh_print_legacy_clk_levels()
682 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in vangogh_print_clk_levels()
1790 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in vangogh_od_edit_dpm_table()
A Dnavi10_ppt.c951 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in navi10_allocate_dpm_context() local
953 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_11_0_dpm_context), in navi10_allocate_dpm_context()
955 if (!smu_dpm->dpm_context) in navi10_allocate_dpm_context()
958 smu_dpm->dpm_context_size = sizeof(struct smu_11_0_dpm_context); in navi10_allocate_dpm_context()
980 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in navi10_set_default_dpm_table()
1274 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in navi10_print_clk_levels() local
1275 struct smu_11_0_dpm_context *dpm_context = smu_dpm->dpm_context; in navi10_print_clk_levels()
1502 smu->smu_dpm.dpm_context; in navi10_populate_umd_state_clk()
2194 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in navi10_update_pcie_parameters()
A Dsienna_cichlid_ppt.c641 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in sienna_cichlid_allocate_dpm_context() local
643 smu_dpm->dpm_context = kzalloc(sizeof(struct smu_11_0_dpm_context), in sienna_cichlid_allocate_dpm_context()
645 if (!smu_dpm->dpm_context) in sienna_cichlid_allocate_dpm_context()
648 smu_dpm->dpm_context_size = sizeof(struct smu_11_0_dpm_context); in sienna_cichlid_allocate_dpm_context()
670 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in sienna_cichlid_set_default_dpm_table()
999 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in sienna_cichlid_print_clk_levels() local
1000 struct smu_11_0_dpm_context *dpm_context = smu_dpm->dpm_context; in sienna_cichlid_print_clk_levels()
1225 smu->smu_dpm.dpm_context; in sienna_cichlid_populate_umd_state_clk()
1825 struct smu_11_0_dpm_context *dpm_context = smu->smu_dpm.dpm_context; in sienna_cichlid_update_pcie_parameters()
/linux/drivers/gpu/drm/amd/pm/swsmu/
A Damdgpu_smu.c366 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_restore_dpm_user_profile()
736 smu->smu_dpm.dpm_level, in smu_late_init()
1069 smu->smu_dpm.dpm_level = AMD_DPM_FORCED_LEVEL_AUTO; in smu_sw_init()
1070 smu->smu_dpm.requested_dpm_level = AMD_DPM_FORCED_LEVEL_AUTO; in smu_sw_init()
1668 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_enable_umd_pstate()
1730 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_adjust_power_state_dynamic()
1818 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_handle_dpm_task() local
1829 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_switch_power_profile()
1865 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_get_performance_level()
1885 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in smu_force_performance_level()
[all …]
/linux/drivers/gpu/drm/amd/pm/swsmu/smu12/
A Drenoir_ppt.c379 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in renoir_od_edit_dpm_table()
491 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in renoir_print_clk_levels()
613 struct smu_dpm_context *smu_dpm_ctx = &(smu->smu_dpm); in renoir_get_current_power_state()
/linux/drivers/gpu/drm/amd/pm/inc/
A Damdgpu_smu.h495 struct smu_dpm_context smu_dpm; member

Completed in 47 milliseconds