/linux-6.6.21/drivers/gpu/drm/amd/pm/swsmu/ |
D | smu_internal.h | 30 #define smu_ppt_funcs(intf, ret, smu, args...) \ argument 31 ((smu)->ppt_funcs ? ((smu)->ppt_funcs->intf ? (smu)->ppt_funcs->intf(smu, ##args) : ret) : -EINVAL) 33 #define smu_init_microcode(smu) smu_ppt_funcs(init_microcode, 0, smu) argument 34 #define smu_fini_microcode(smu) smu_ppt_funcs(fini_microcode, 0, smu) argument 35 #define smu_init_smc_tables(smu) smu_ppt_funcs(init_smc_tables, 0, smu) argument 36 #define smu_fini_smc_tables(smu) smu_ppt_funcs(fini_smc_tables, 0, smu) argument 37 #define smu_init_power(smu) smu_ppt_funcs(init_power, 0, smu) argument 38 #define smu_fini_power(smu) smu_ppt_funcs(fini_power, 0, smu) argument 39 #define smu_setup_pptable(smu) smu_ppt_funcs(setup_pptable, 0, smu) argument 40 #define smu_powergate_sdma(smu, gate) smu_ppt_funcs(powergate_sdma, 0, smu, gate) argument [all …]
|
D | amdgpu_smu.c | 60 static int smu_force_smuclk_levels(struct smu_context *smu, 63 static int smu_handle_task(struct smu_context *smu, 66 static int smu_reset(struct smu_context *smu); 71 static int smu_set_gfx_cgpg(struct smu_context *smu, bool enabled); 77 struct smu_context *smu = handle; in smu_sys_get_pp_feature_mask() local 79 if (!smu->pm_enabled || !smu->adev->pm.dpm_enabled) in smu_sys_get_pp_feature_mask() 82 return smu_get_pp_feature_mask(smu, buf); in smu_sys_get_pp_feature_mask() 88 struct smu_context *smu = handle; in smu_sys_set_pp_feature_mask() local 90 if (!smu->pm_enabled || !smu->adev->pm.dpm_enabled) in smu_sys_set_pp_feature_mask() 93 return smu_set_pp_feature_mask(smu, new_mask); in smu_sys_set_pp_feature_mask() [all …]
|
D | smu_cmn.c | 50 #define smu_cmn_call_asic_func(intf, smu, args...) \ argument 51 ((smu)->ppt_funcs ? ((smu)->ppt_funcs->intf ? \ 52 (smu)->ppt_funcs->intf(smu, ##args) : \ 56 static const char *smu_get_message_name(struct smu_context *smu, in smu_get_message_name() argument 65 static void smu_cmn_read_arg(struct smu_context *smu, in smu_cmn_read_arg() argument 68 struct amdgpu_device *adev = smu->adev; in smu_cmn_read_arg() 70 *arg = RREG32(smu->param_reg); in smu_cmn_read_arg() 109 static u32 __smu_cmn_poll_stat(struct smu_context *smu) in __smu_cmn_poll_stat() argument 111 struct amdgpu_device *adev = smu->adev; in __smu_cmn_poll_stat() 116 reg = RREG32(smu->resp_reg); in __smu_cmn_poll_stat() [all …]
|
D | smu_cmn.h | 41 int smu_cmn_send_msg_without_waiting(struct smu_context *smu, 44 int smu_cmn_send_smc_msg_with_param(struct smu_context *smu, 49 int smu_cmn_send_smc_msg(struct smu_context *smu, 53 int smu_cmn_send_debug_smc_msg(struct smu_context *smu, 56 int smu_cmn_send_debug_smc_msg_with_param(struct smu_context *smu, 59 int smu_cmn_wait_for_response(struct smu_context *smu); 61 int smu_cmn_to_asic_specific_index(struct smu_context *smu, 65 int smu_cmn_feature_is_supported(struct smu_context *smu, 68 int smu_cmn_feature_is_enabled(struct smu_context *smu, 71 bool smu_cmn_clk_dpm_is_enabled(struct smu_context *smu, [all …]
|
/linux-6.6.21/drivers/gpu/drm/amd/pm/swsmu/inc/ |
D | smu_v13_0.h | 129 int smu_v13_0_init_microcode(struct smu_context *smu); 131 void smu_v13_0_fini_microcode(struct smu_context *smu); 133 int smu_v13_0_load_microcode(struct smu_context *smu); 135 int smu_v13_0_init_smc_tables(struct smu_context *smu); 137 int smu_v13_0_fini_smc_tables(struct smu_context *smu); 139 int smu_v13_0_init_power(struct smu_context *smu); 141 int smu_v13_0_fini_power(struct smu_context *smu); 143 int smu_v13_0_check_fw_status(struct smu_context *smu); 145 int smu_v13_0_setup_pptable(struct smu_context *smu); 147 int smu_v13_0_get_vbios_bootup_values(struct smu_context *smu); [all …]
|
D | smu_v11_0.h | 150 int smu_v11_0_init_microcode(struct smu_context *smu); 152 void smu_v11_0_fini_microcode(struct smu_context *smu); 154 int smu_v11_0_load_microcode(struct smu_context *smu); 156 int smu_v11_0_init_smc_tables(struct smu_context *smu); 158 int smu_v11_0_fini_smc_tables(struct smu_context *smu); 160 int smu_v11_0_init_power(struct smu_context *smu); 162 int smu_v11_0_fini_power(struct smu_context *smu); 164 int smu_v11_0_check_fw_status(struct smu_context *smu); 166 int smu_v11_0_setup_pptable(struct smu_context *smu); 168 int smu_v11_0_get_vbios_bootup_values(struct smu_context *smu); [all …]
|
D | amdgpu_smu.h | 579 int (*run_btc)(struct smu_context *smu); 586 int (*get_allowed_feature_mask)(struct smu_context *smu, uint32_t *feature_mask, uint32_t num); 593 enum amd_pm_state_type (*get_current_power_state)(struct smu_context *smu); 599 int (*set_default_dpm_table)(struct smu_context *smu); 601 int (*set_power_state)(struct smu_context *smu); 607 int (*populate_umd_state_clk)(struct smu_context *smu); 616 int (*print_clk_levels)(struct smu_context *smu, enum smu_clk_type clk_type, char *buf); 629 …int (*emit_clk_levels)(struct smu_context *smu, enum smu_clk_type clk_type, char *buf, int *offset… 637 int (*force_clk_levels)(struct smu_context *smu, enum smu_clk_type clk_type, uint32_t mask); 645 int (*od_edit_dpm_table)(struct smu_context *smu, [all …]
|
D | smu_v12_0.h | 36 int smu_v12_0_check_fw_status(struct smu_context *smu); 38 int smu_v12_0_check_fw_version(struct smu_context *smu); 40 int smu_v12_0_powergate_sdma(struct smu_context *smu, bool gate); 42 int smu_v12_0_powergate_vcn(struct smu_context *smu, bool gate); 44 int smu_v12_0_powergate_jpeg(struct smu_context *smu, bool gate); 46 int smu_v12_0_set_gfx_cgpg(struct smu_context *smu, bool enable); 48 uint32_t smu_v12_0_get_gfxoff_status(struct smu_context *smu); 50 int smu_v12_0_gfx_off_control(struct smu_context *smu, bool enable); 52 int smu_v12_0_fini_smc_tables(struct smu_context *smu); 54 int smu_v12_0_set_default_dpm_tables(struct smu_context *smu); [all …]
|
/linux-6.6.21/drivers/gpu/drm/amd/pm/swsmu/smu12/ |
D | smu_v12_0.c | 58 int smu_v12_0_check_fw_status(struct smu_context *smu) in smu_v12_0_check_fw_status() argument 60 struct amdgpu_device *adev = smu->adev; in smu_v12_0_check_fw_status() 73 int smu_v12_0_check_fw_version(struct smu_context *smu) in smu_v12_0_check_fw_version() argument 75 struct amdgpu_device *adev = smu->adev; in smu_v12_0_check_fw_version() 80 ret = smu_cmn_get_smc_version(smu, &if_version, &smu_version); in smu_v12_0_check_fw_version() 88 if (smu->is_apu) in smu_v12_0_check_fw_version() 99 if (if_version != smu->smc_driver_if_version) { in smu_v12_0_check_fw_version() 100 dev_info(smu->adev->dev, "smu driver if version = 0x%08x, smu fw if version = 0x%08x, " in smu_v12_0_check_fw_version() 102 smu->smc_driver_if_version, if_version, in smu_v12_0_check_fw_version() 104 dev_info(smu->adev->dev, "SMU driver if version not matched\n"); in smu_v12_0_check_fw_version() [all …]
|
D | renoir_ppt.c | 156 static int renoir_init_smc_tables(struct smu_context *smu) in renoir_init_smc_tables() argument 158 struct smu_table_context *smu_table = &smu->smu_table; in renoir_init_smc_tables() 202 static int renoir_get_dpm_clk_limited(struct smu_context *smu, enum smu_clk_type clk_type, in renoir_get_dpm_clk_limited() argument 205 DpmClocks_t *clk_table = smu->smu_table.clocks_table; in renoir_get_dpm_clk_limited() 250 static int renoir_get_profiling_clk_mask(struct smu_context *smu, in renoir_get_profiling_clk_mask() argument 280 static int renoir_get_dpm_ultimate_freq(struct smu_context *smu, in renoir_get_dpm_ultimate_freq() argument 289 if (!smu_cmn_clk_dpm_is_enabled(smu, clk_type)) { in renoir_get_dpm_ultimate_freq() 293 clock_limit = smu->smu_table.boot_values.uclk; in renoir_get_dpm_ultimate_freq() 297 clock_limit = smu->smu_table.boot_values.gfxclk; in renoir_get_dpm_ultimate_freq() 300 clock_limit = smu->smu_table.boot_values.socclk; in renoir_get_dpm_ultimate_freq() [all …]
|
/linux-6.6.21/drivers/gpu/drm/amd/pm/swsmu/smu13/ |
D | smu_v13_0.c | 90 int smu_v13_0_init_microcode(struct smu_context *smu) in smu_v13_0_init_microcode() argument 92 struct amdgpu_device *adev = smu->adev; in smu_v13_0_init_microcode() 131 void smu_v13_0_fini_microcode(struct smu_context *smu) in smu_v13_0_fini_microcode() argument 133 struct amdgpu_device *adev = smu->adev; in smu_v13_0_fini_microcode() 139 int smu_v13_0_load_microcode(struct smu_context *smu) in smu_v13_0_load_microcode() argument 142 struct amdgpu_device *adev = smu->adev; in smu_v13_0_load_microcode() 181 int smu_v13_0_init_pptable_microcode(struct smu_context *smu) in smu_v13_0_init_pptable_microcode() argument 183 struct amdgpu_device *adev = smu->adev; in smu_v13_0_init_pptable_microcode() 209 pptable_id = smu->smu_table.boot_values.pp_table_id; in smu_v13_0_init_pptable_microcode() 216 ret = smu_v13_0_get_pptable_from_firmware(smu, &table, &size, pptable_id); in smu_v13_0_init_pptable_microcode() [all …]
|
D | smu_v13_0_5_ppt.c | 124 static int smu_v13_0_5_init_smc_tables(struct smu_context *smu) in smu_v13_0_5_init_smc_tables() argument 126 struct smu_table_context *smu_table = &smu->smu_table; in smu_v13_0_5_init_smc_tables() 166 static int smu_v13_0_5_fini_smc_tables(struct smu_context *smu) in smu_v13_0_5_fini_smc_tables() argument 168 struct smu_table_context *smu_table = &smu->smu_table; in smu_v13_0_5_fini_smc_tables() 185 static int smu_v13_0_5_system_features_control(struct smu_context *smu, bool en) in smu_v13_0_5_system_features_control() argument 187 struct amdgpu_device *adev = smu->adev; in smu_v13_0_5_system_features_control() 191 ret = smu_cmn_send_smc_msg(smu, SMU_MSG_PrepareMp1ForUnload, NULL); in smu_v13_0_5_system_features_control() 196 static int smu_v13_0_5_dpm_set_vcn_enable(struct smu_context *smu, bool enable) in smu_v13_0_5_dpm_set_vcn_enable() argument 202 ret = smu_cmn_send_smc_msg_with_param(smu, SMU_MSG_PowerUpVcn, in smu_v13_0_5_dpm_set_vcn_enable() 205 ret = smu_cmn_send_smc_msg_with_param(smu, SMU_MSG_PowerDownVcn, in smu_v13_0_5_dpm_set_vcn_enable() [all …]
|
D | yellow_carp_ppt.c | 151 static int yellow_carp_init_smc_tables(struct smu_context *smu) in yellow_carp_init_smc_tables() argument 153 struct smu_table_context *smu_table = &smu->smu_table; in yellow_carp_init_smc_tables() 193 static int yellow_carp_fini_smc_tables(struct smu_context *smu) in yellow_carp_fini_smc_tables() argument 195 struct smu_table_context *smu_table = &smu->smu_table; in yellow_carp_fini_smc_tables() 212 static int yellow_carp_system_features_control(struct smu_context *smu, bool en) in yellow_carp_system_features_control() argument 214 struct amdgpu_device *adev = smu->adev; in yellow_carp_system_features_control() 218 ret = smu_cmn_send_smc_msg(smu, SMU_MSG_PrepareMp1ForUnload, NULL); in yellow_carp_system_features_control() 223 static int yellow_carp_dpm_set_vcn_enable(struct smu_context *smu, bool enable) in yellow_carp_dpm_set_vcn_enable() argument 229 ret = smu_cmn_send_smc_msg_with_param(smu, SMU_MSG_PowerUpVcn, in yellow_carp_dpm_set_vcn_enable() 232 ret = smu_cmn_send_smc_msg_with_param(smu, SMU_MSG_PowerDownVcn, in yellow_carp_dpm_set_vcn_enable() [all …]
|
D | smu_v13_0_4_ppt.c | 150 static int smu_v13_0_4_init_smc_tables(struct smu_context *smu) in smu_v13_0_4_init_smc_tables() argument 152 struct smu_table_context *smu_table = &smu->smu_table; in smu_v13_0_4_init_smc_tables() 192 static int smu_v13_0_4_fini_smc_tables(struct smu_context *smu) in smu_v13_0_4_fini_smc_tables() argument 194 struct smu_table_context *smu_table = &smu->smu_table; in smu_v13_0_4_fini_smc_tables() 211 static bool smu_v13_0_4_is_dpm_running(struct smu_context *smu) in smu_v13_0_4_is_dpm_running() argument 216 ret = smu_cmn_get_enabled_mask(smu, &feature_enabled); in smu_v13_0_4_is_dpm_running() 224 static int smu_v13_0_4_system_features_control(struct smu_context *smu, bool en) in smu_v13_0_4_system_features_control() argument 226 struct amdgpu_device *adev = smu->adev; in smu_v13_0_4_system_features_control() 230 ret = smu_cmn_send_smc_msg(smu, SMU_MSG_PrepareMp1ForUnload, NULL); in smu_v13_0_4_system_features_control() 235 static ssize_t smu_v13_0_4_get_gpu_metrics(struct smu_context *smu, in smu_v13_0_4_get_gpu_metrics() argument [all …]
|
D | aldebaran_ppt.c | 227 static int aldebaran_tables_init(struct smu_context *smu) in aldebaran_tables_init() argument 229 struct smu_table_context *smu_table = &smu->smu_table; in aldebaran_tables_init() 269 static int aldebaran_allocate_dpm_context(struct smu_context *smu) in aldebaran_allocate_dpm_context() argument 271 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in aldebaran_allocate_dpm_context() 282 static int aldebaran_init_smc_tables(struct smu_context *smu) in aldebaran_init_smc_tables() argument 286 ret = aldebaran_tables_init(smu); in aldebaran_init_smc_tables() 290 ret = aldebaran_allocate_dpm_context(smu); in aldebaran_init_smc_tables() 294 return smu_v13_0_init_smc_tables(smu); in aldebaran_init_smc_tables() 297 static int aldebaran_get_allowed_feature_mask(struct smu_context *smu, in aldebaran_get_allowed_feature_mask() argument 309 static int aldebaran_set_default_dpm_table(struct smu_context *smu) in aldebaran_set_default_dpm_table() argument [all …]
|
D | smu_v13_0_0_ppt.c | 288 smu_v13_0_0_get_allowed_feature_mask(struct smu_context *smu, in smu_v13_0_0_get_allowed_feature_mask() argument 291 struct amdgpu_device *adev = smu->adev; in smu_v13_0_0_get_allowed_feature_mask() 312 smu_cmn_get_smc_version(smu, NULL, &smu_version); in smu_v13_0_0_get_allowed_feature_mask() 337 static int smu_v13_0_0_check_powerplay_table(struct smu_context *smu) in smu_v13_0_0_check_powerplay_table() argument 339 struct smu_table_context *table_context = &smu->smu_table; in smu_v13_0_0_check_powerplay_table() 342 struct smu_baco_context *smu_baco = &smu->smu_baco; in smu_v13_0_0_check_powerplay_table() 343 PPTable_t *pptable = smu->smu_table.driver_pptable; in smu_v13_0_0_check_powerplay_table() 345 PPTable_t *pptable = smu->smu_table.driver_pptable; in smu_v13_0_0_check_powerplay_table() 353 smu->dc_controlled_by_gpio = true; in smu_v13_0_0_check_powerplay_table() 370 smu->od_enabled = false; in smu_v13_0_0_check_powerplay_table() [all …]
|
D | smu_v13_0_6_ppt.c | 218 static int smu_v13_0_6_tables_init(struct smu_context *smu) in smu_v13_0_6_tables_init() argument 220 struct smu_table_context *smu_table = &smu->smu_table; in smu_v13_0_6_tables_init() 222 struct amdgpu_device *adev = smu->adev; in smu_v13_0_6_tables_init() 260 static int smu_v13_0_6_allocate_dpm_context(struct smu_context *smu) in smu_v13_0_6_allocate_dpm_context() argument 262 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in smu_v13_0_6_allocate_dpm_context() 273 static int smu_v13_0_6_init_smc_tables(struct smu_context *smu) in smu_v13_0_6_init_smc_tables() argument 277 ret = smu_v13_0_6_tables_init(smu); in smu_v13_0_6_init_smc_tables() 281 ret = smu_v13_0_6_allocate_dpm_context(smu); in smu_v13_0_6_init_smc_tables() 286 static int smu_v13_0_6_get_allowed_feature_mask(struct smu_context *smu, in smu_v13_0_6_get_allowed_feature_mask() argument 299 static int smu_v13_0_6_get_metrics_table(struct smu_context *smu, in smu_v13_0_6_get_metrics_table() argument [all …]
|
D | smu_v13_0_7_ppt.c | 257 smu_v13_0_7_get_allowed_feature_mask(struct smu_context *smu, in smu_v13_0_7_get_allowed_feature_mask() argument 260 struct amdgpu_device *adev = smu->adev; in smu_v13_0_7_get_allowed_feature_mask() 326 static int smu_v13_0_7_check_powerplay_table(struct smu_context *smu) in smu_v13_0_7_check_powerplay_table() argument 328 struct smu_table_context *table_context = &smu->smu_table; in smu_v13_0_7_check_powerplay_table() 331 struct smu_baco_context *smu_baco = &smu->smu_baco; in smu_v13_0_7_check_powerplay_table() 342 smu->dc_controlled_by_gpio = true; in smu_v13_0_7_check_powerplay_table() 355 smu->od_enabled = false; in smu_v13_0_7_check_powerplay_table() 361 smu->od_settings = &powerplay_table->overdrive_table; in smu_v13_0_7_check_powerplay_table() 363 smu->od_enabled = false; in smu_v13_0_7_check_powerplay_table() 372 static int smu_v13_0_7_store_powerplay_table(struct smu_context *smu) in smu_v13_0_7_store_powerplay_table() argument [all …]
|
/linux-6.6.21/drivers/gpu/drm/amd/pm/swsmu/smu11/ |
D | smu_v11_0.c | 82 static void smu_v11_0_poll_baco_exit(struct smu_context *smu) in smu_v11_0_poll_baco_exit() argument 84 struct amdgpu_device *adev = smu->adev; in smu_v11_0_poll_baco_exit() 93 int smu_v11_0_init_microcode(struct smu_context *smu) in smu_v11_0_init_microcode() argument 95 struct amdgpu_device *adev = smu->adev; in smu_v11_0_init_microcode() 135 void smu_v11_0_fini_microcode(struct smu_context *smu) in smu_v11_0_fini_microcode() argument 137 struct amdgpu_device *adev = smu->adev; in smu_v11_0_fini_microcode() 143 int smu_v11_0_load_microcode(struct smu_context *smu) in smu_v11_0_load_microcode() argument 145 struct amdgpu_device *adev = smu->adev; in smu_v11_0_load_microcode() 183 int smu_v11_0_check_fw_status(struct smu_context *smu) in smu_v11_0_check_fw_status() argument 185 struct amdgpu_device *adev = smu->adev; in smu_v11_0_check_fw_status() [all …]
|
D | sienna_cichlid_ppt.c | 77 if (smu->adev->ip_versions[MP1_HWIP][0] == IP_VERSION(11, 0, 13))\ 78 (*member) = (smu->smu_table.driver_pptable + offsetof(PPTable_beige_goby_t, field));\ 80 (*member) = (smu->smu_table.driver_pptable + offsetof(PPTable_t, field));\ 92 static int get_table_size(struct smu_context *smu) in get_table_size() argument 94 if (smu->adev->ip_versions[MP1_HWIP][0] == IP_VERSION(11, 0, 13)) in get_table_size() 275 sienna_cichlid_get_allowed_feature_mask(struct smu_context *smu, in sienna_cichlid_get_allowed_feature_mask() argument 278 struct amdgpu_device *adev = smu->adev; in sienna_cichlid_get_allowed_feature_mask() 339 if (smu->adev->pg_flags & AMD_PG_SUPPORT_ATHUB) in sienna_cichlid_get_allowed_feature_mask() 342 if (smu->adev->pg_flags & AMD_PG_SUPPORT_MMHUB) in sienna_cichlid_get_allowed_feature_mask() 345 if (smu->adev->pg_flags & AMD_PG_SUPPORT_VCN || in sienna_cichlid_get_allowed_feature_mask() [all …]
|
D | vangogh_ppt.c | 224 static int vangogh_tables_init(struct smu_context *smu) in vangogh_tables_init() argument 226 struct smu_table_context *smu_table = &smu->smu_table; in vangogh_tables_init() 232 ret = smu_cmn_get_smc_version(smu, &if_version, &smu_version); in vangogh_tables_init() 287 static int vangogh_get_legacy_smu_metrics_data(struct smu_context *smu, in vangogh_get_legacy_smu_metrics_data() argument 291 struct smu_table_context *smu_table = &smu->smu_table; in vangogh_get_legacy_smu_metrics_data() 295 ret = smu_cmn_get_metrics_table(smu, in vangogh_get_legacy_smu_metrics_data() 346 smu->cpu_core_num * sizeof(uint16_t)); in vangogh_get_legacy_smu_metrics_data() 356 static int vangogh_get_smu_metrics_data(struct smu_context *smu, in vangogh_get_smu_metrics_data() argument 360 struct smu_table_context *smu_table = &smu->smu_table; in vangogh_get_smu_metrics_data() 364 ret = smu_cmn_get_metrics_table(smu, in vangogh_get_smu_metrics_data() [all …]
|
D | arcturus_ppt.c | 250 static int arcturus_tables_init(struct smu_context *smu) in arcturus_tables_init() argument 252 struct smu_table_context *smu_table = &smu->smu_table; in arcturus_tables_init() 286 static int arcturus_allocate_dpm_context(struct smu_context *smu) in arcturus_allocate_dpm_context() argument 288 struct smu_dpm_context *smu_dpm = &smu->smu_dpm; in arcturus_allocate_dpm_context() 299 static int arcturus_init_smc_tables(struct smu_context *smu) in arcturus_init_smc_tables() argument 303 ret = arcturus_tables_init(smu); in arcturus_init_smc_tables() 307 ret = arcturus_allocate_dpm_context(smu); in arcturus_init_smc_tables() 311 return smu_v11_0_init_smc_tables(smu); in arcturus_init_smc_tables() 315 arcturus_get_allowed_feature_mask(struct smu_context *smu, in arcturus_get_allowed_feature_mask() argument 327 static int arcturus_set_default_dpm_table(struct smu_context *smu) in arcturus_set_default_dpm_table() argument [all …]
|
D | navi10_ppt.c | 262 static bool is_asic_secure(struct smu_context *smu) in is_asic_secure() argument 264 struct amdgpu_device *adev = smu->adev; in is_asic_secure() 278 navi10_get_allowed_feature_mask(struct smu_context *smu, in navi10_get_allowed_feature_mask() argument 281 struct amdgpu_device *adev = smu->adev; in navi10_get_allowed_feature_mask() 328 if (smu->adev->pg_flags & AMD_PG_SUPPORT_MMHUB) in navi10_get_allowed_feature_mask() 331 if (smu->adev->pg_flags & AMD_PG_SUPPORT_ATHUB) in navi10_get_allowed_feature_mask() 334 if (smu->adev->pg_flags & AMD_PG_SUPPORT_VCN) in navi10_get_allowed_feature_mask() 337 if (smu->adev->pg_flags & AMD_PG_SUPPORT_JPEG) in navi10_get_allowed_feature_mask() 340 if (smu->dc_controlled_by_gpio) in navi10_get_allowed_feature_mask() 347 if (!(is_asic_secure(smu) && in navi10_get_allowed_feature_mask() [all …]
|
D | cyan_skillfish_ppt.c | 87 static int cyan_skillfish_tables_init(struct smu_context *smu) in cyan_skillfish_tables_init() argument 89 struct smu_table_context *smu_table = &smu->smu_table; in cyan_skillfish_tables_init() 117 static int cyan_skillfish_init_smc_tables(struct smu_context *smu) in cyan_skillfish_init_smc_tables() argument 121 ret = cyan_skillfish_tables_init(smu); in cyan_skillfish_init_smc_tables() 125 return smu_v11_0_init_smc_tables(smu); in cyan_skillfish_init_smc_tables() 129 cyan_skillfish_get_smu_metrics_data(struct smu_context *smu, in cyan_skillfish_get_smu_metrics_data() argument 133 struct smu_table_context *smu_table = &smu->smu_table; in cyan_skillfish_get_smu_metrics_data() 137 ret = smu_cmn_get_metrics_table(smu, NULL, false); in cyan_skillfish_get_smu_metrics_data() 190 static int cyan_skillfish_read_sensor(struct smu_context *smu, in cyan_skillfish_read_sensor() argument 202 ret = cyan_skillfish_get_smu_metrics_data(smu, in cyan_skillfish_read_sensor() [all …]
|
/linux-6.6.21/drivers/macintosh/ |
D | smu.c | 100 static struct smu_device *smu; variable 116 if (list_empty(&smu->cmd_list)) in smu_start_cmd() 120 cmd = list_entry(smu->cmd_list.next, struct smu_cmd, link); in smu_start_cmd() 121 smu->cmd_cur = cmd; in smu_start_cmd() 129 smu->cmd_buf->cmd = cmd->cmd; in smu_start_cmd() 130 smu->cmd_buf->length = cmd->data_len; in smu_start_cmd() 131 memcpy(smu->cmd_buf->data, cmd->data_buf, cmd->data_len); in smu_start_cmd() 134 faddr = (unsigned long)smu->cmd_buf; in smu_start_cmd() 135 fend = faddr + smu->cmd_buf->length + 2; in smu_start_cmd() 148 if (smu->broken_nap) in smu_start_cmd() [all …]
|