/dflybsd-src/sys/dev/drm/radeon/ |
H A D | rv730_dpm.c | 42 RV770_SMC_SCLK_VALUE *sclk) in rv730_populate_sclk_value() argument 109 sclk->sclk_value = cpu_to_be32(engine_clock); in rv730_populate_sclk_value() 110 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl); in rv730_populate_sclk_value() 111 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2); in rv730_populate_sclk_value() 112 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3); in rv730_populate_sclk_value() 113 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum); in rv730_populate_sclk_value() 114 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2); in rv730_populate_sclk_value() 305 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl); in rv730_populate_smc_acpi_state() 306 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2); in rv730_populate_smc_acpi_state() 307 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3); in rv730_populate_smc_acpi_state() [all …]
|
H A D | btc_dpm.c | 1241 u32 *sclk, u32 *mclk) in btc_skip_blacklist_clocks() argument 1245 if ((sclk == NULL) || (mclk == NULL)) in btc_skip_blacklist_clocks() 1251 if ((btc_blacklist_clocks[i].sclk == *sclk) && in btc_skip_blacklist_clocks() 1258 *sclk = btc_get_valid_sclk(rdev, max_sclk, *sclk + 1); in btc_skip_blacklist_clocks() 1260 if (*sclk < max_sclk) in btc_skip_blacklist_clocks() 1261 btc_skip_blacklist_clocks(rdev, max_sclk, max_mclk, sclk, mclk); in btc_skip_blacklist_clocks() 1271 if ((pl->mclk == 0) || (pl->sclk == 0)) in btc_adjust_clock_combinations() 1274 if (pl->mclk == pl->sclk) in btc_adjust_clock_combinations() 1277 if (pl->mclk > pl->sclk) { in btc_adjust_clock_combinations() 1278 if (((pl->mclk + (pl->sclk - 1)) / pl->sclk) > rdev->pm.dpm.dyn_state.mclk_sclk_ratio) in btc_adjust_clock_combinations() [all …]
|
H A D | rv770_dpm.c | 276 a_n = (int)state->medium.sclk * pi->lmp + in rv770_populate_smc_t() 277 (int)state->low.sclk * (R600_AH_DFLT - pi->rlp); in rv770_populate_smc_t() 278 a_d = (int)state->low.sclk * (100 - (int)pi->rlp) + in rv770_populate_smc_t() 279 (int)state->medium.sclk * pi->lmp; in rv770_populate_smc_t() 284 a_n = (int)state->high.sclk * pi->lhp + (int)state->medium.sclk * in rv770_populate_smc_t() 286 a_d = (int)state->medium.sclk * (100 - (int)pi->rmp) + in rv770_populate_smc_t() 287 (int)state->high.sclk * pi->lhp; in rv770_populate_smc_t() 490 RV770_SMC_SCLK_VALUE *sclk) in rv770_populate_sclk_value() argument 560 sclk->sclk_value = cpu_to_be32(engine_clock); in rv770_populate_sclk_value() 561 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl); in rv770_populate_sclk_value() [all …]
|
H A D | trinity_dpm.c | 589 u32 index, u32 sclk) in trinity_set_divider_value() argument 597 sclk, false, ÷rs); in trinity_set_divider_value() 607 sclk/2, false, ÷rs); in trinity_set_divider_value() 727 trinity_set_divider_value(rdev, index, pl->sclk); in trinity_program_power_level() 974 if (new_ps->levels[new_ps->num_levels - 1].sclk >= in trinity_set_uvd_clock_before_set_eng_clock() 975 current_ps->levels[current_ps->num_levels - 1].sclk) in trinity_set_uvd_clock_before_set_eng_clock() 988 if (new_ps->levels[new_ps->num_levels - 1].sclk < in trinity_set_uvd_clock_after_set_eng_clock() 989 current_ps->levels[current_ps->num_levels - 1].sclk) in trinity_set_uvd_clock_after_set_eng_clock() 1339 static u8 trinity_calculate_vce_wm(struct radeon_device *rdev, u32 sclk) in trinity_calculate_vce_wm() argument 1341 if (sclk < 20000) in trinity_calculate_vce_wm() [all …]
|
H A D | rv740_dpm.c | 122 RV770_SMC_SCLK_VALUE *sclk) in rv740_populate_sclk_value() argument 177 sclk->sclk_value = cpu_to_be32(engine_clock); in rv740_populate_sclk_value() 178 sclk->vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl); in rv740_populate_sclk_value() 179 sclk->vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2); in rv740_populate_sclk_value() 180 sclk->vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3); in rv740_populate_sclk_value() 181 sclk->vCG_SPLL_SPREAD_SPECTRUM = cpu_to_be32(cg_spll_spread_spectrum); in rv740_populate_sclk_value() 182 sclk->vCG_SPLL_SPREAD_SPECTRUM_2 = cpu_to_be32(cg_spll_spread_spectrum_2); in rv740_populate_sclk_value() 383 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = cpu_to_be32(spll_func_cntl); in rv740_populate_smc_acpi_state() 384 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = cpu_to_be32(spll_func_cntl_2); in rv740_populate_smc_acpi_state() 385 table->ACPIState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = cpu_to_be32(spll_func_cntl_3); in rv740_populate_smc_acpi_state() [all …]
|
H A D | ni_dpm.c | 810 if (ps->performance_levels[i].sclk > max_limits->sclk) in ni_apply_state_adjust_rules() 811 ps->performance_levels[i].sclk = max_limits->sclk; in ni_apply_state_adjust_rules() 829 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk, in ni_apply_state_adjust_rules() 830 &ps->performance_levels[0].sclk, in ni_apply_state_adjust_rules() 834 if (ps->performance_levels[i].sclk < ps->performance_levels[i - 1].sclk) in ni_apply_state_adjust_rules() 835 ps->performance_levels[i].sclk = ps->performance_levels[i - 1].sclk; in ni_apply_state_adjust_rules() 864 btc_skip_blacklist_clocks(rdev, max_limits->sclk, max_limits->mclk, in ni_apply_state_adjust_rules() 865 &ps->performance_levels[i].sclk, in ni_apply_state_adjust_rules() 874 ps->performance_levels[i].sclk, in ni_apply_state_adjust_rules() 1621 (u8)rv770_calculate_memory_refresh_rate(rdev, pl->sclk); in ni_populate_memory_timing_parameters() [all …]
|
H A D | kv_dpm.c | 531 u32 index, u32 sclk) in kv_set_divider_value() argument 538 sclk, false, ÷rs); in kv_set_divider_value() 543 pi->graphics_level[index].SclkFrequency = cpu_to_be32(sclk); in kv_set_divider_value() 720 if (table->entries[i].clk == pi->boot_pl.sclk) in kv_program_bootup_state() 734 if (table->entries[i].sclk_frequency == pi->boot_pl.sclk) in kv_program_bootup_state() 1713 if ((table->entries[i].clk >= new_ps->levels[0].sclk) || in kv_set_valid_clock_range() 1721 if (table->entries[i].clk <= new_ps->levels[new_ps->num_levels - 1].sclk) in kv_set_valid_clock_range() 1727 if ((new_ps->levels[0].sclk - table->entries[pi->highest_valid].clk) > in kv_set_valid_clock_range() 1728 (table->entries[pi->lowest_valid].clk - new_ps->levels[new_ps->num_levels - 1].sclk)) in kv_set_valid_clock_range() 1738 if (table->entries[i].sclk_frequency >= new_ps->levels[0].sclk || in kv_set_valid_clock_range() [all …]
|
H A D | sumo_dpm.c | 350 u32 highest_engine_clock = ps->levels[ps->num_levels - 1].sclk; in sumo_program_bsp() 353 highest_engine_clock = pi->boost_pl.sclk; in sumo_program_bsp() 414 m_a = asi * ps->levels[i].sclk / 100; in sumo_program_at() 424 m_a = asi * pi->boost_pl.sclk / 100; in sumo_program_at() 558 pl->sclk, false, ÷rs); in sumo_program_power_level() 674 pi->boost_pl.sclk = pi->sys_info.boost_sclk; in sumo_patch_boost_state() 793 pi->acpi_pl.sclk, in sumo_program_acpi_power_level() 847 if (new_ps->levels[new_ps->num_levels - 1].sclk >= in sumo_set_uvd_clock_before_set_eng_clock() 848 current_ps->levels[current_ps->num_levels - 1].sclk) in sumo_set_uvd_clock_before_set_eng_clock() 865 if (new_ps->levels[new_ps->num_levels - 1].sclk < in sumo_set_uvd_clock_after_set_eng_clock() [all …]
|
H A D | si_dpm.c | 1759 SISLANDS_SMC_SCLK_VALUE *sclk); 2323 prev_sclk = state->performance_levels[i-1].sclk; in si_populate_power_containment_values() 2324 max_sclk = state->performance_levels[i].sclk; in si_populate_power_containment_values() 2343 if (min_sclk < state->performance_levels[0].sclk) in si_populate_power_containment_values() 2344 min_sclk = state->performance_levels[0].sclk; in si_populate_power_containment_values() 2418 if ((state->performance_levels[i].sclk >= rdev->pm.dpm.sq_ramping_threshold) && in si_populate_sq_ramping_values() 2853 u32 sclk = 0; in si_init_smc_spll_table() local 2866 ret = si_calculate_sclk_params(rdev, sclk, &sclk_params); in si_init_smc_spll_table() 2899 sclk += 512; in si_init_smc_spll_table() 2973 u32 mclk, sclk; in si_apply_state_adjust_rules() local [all …]
|
H A D | rv6xx_dpm.c | 440 state->low.sclk; in rv6xx_calculate_engine_speed_stepping_parameters() 442 state->medium.sclk; in rv6xx_calculate_engine_speed_stepping_parameters() 444 state->high.sclk; in rv6xx_calculate_engine_speed_stepping_parameters() 1028 rv6xx_calculate_t(state->low.sclk, in rv6xx_calculate_ap() 1029 state->medium.sclk, in rv6xx_calculate_ap() 1036 rv6xx_calculate_t(state->medium.sclk, in rv6xx_calculate_ap() 1037 state->high.sclk, in rv6xx_calculate_ap() 1427 old_state->low.sclk, in rv6xx_generate_transition_stepping() 1428 new_state->low.sclk, in rv6xx_generate_transition_stepping() 1440 new_state->low.sclk, in rv6xx_generate_low_step() [all …]
|
H A D | rs690.c | 267 fixed20_12 sclk; member 279 fixed20_12 sclk, core_bandwidth, max_bandwidth; in rs690_crtc_bandwidth_compute() local 297 sclk.full = dfixed_const(selected_sclk); in rs690_crtc_bandwidth_compute() 298 sclk.full = dfixed_div(sclk, a); in rs690_crtc_bandwidth_compute() 302 core_bandwidth.full = dfixed_div(rdev->pm.sclk, a); in rs690_crtc_bandwidth_compute() 386 sclk.full = dfixed_mul(max_bandwidth, a); in rs690_crtc_bandwidth_compute() 388 sclk.full = dfixed_div(a, sclk); in rs690_crtc_bandwidth_compute() 395 chunk_time.full = dfixed_mul(sclk, a); in rs690_crtc_bandwidth_compute() 483 fill_rate.full = dfixed_div(wm0->sclk, a); in rs690_compute_mode_priority() 531 fill_rate.full = dfixed_div(wm0->sclk, a); in rs690_compute_mode_priority() [all …]
|
H A D | ci_dpm.c | 836 u32 sclk, mclk; in ci_apply_state_adjust_rules() local 867 if (ps->performance_levels[i].sclk > max_limits->sclk) in ci_apply_state_adjust_rules() 868 ps->performance_levels[i].sclk = max_limits->sclk; in ci_apply_state_adjust_rules() 876 sclk = ps->performance_levels[0].sclk; in ci_apply_state_adjust_rules() 879 sclk = ps->performance_levels[0].sclk; in ci_apply_state_adjust_rules() 883 if (sclk < rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk) in ci_apply_state_adjust_rules() 884 sclk = rdev->pm.dpm.vce_states[rdev->pm.dpm.vce_level].sclk; in ci_apply_state_adjust_rules() 889 ps->performance_levels[0].sclk = sclk; in ci_apply_state_adjust_rules() 892 if (ps->performance_levels[1].sclk < ps->performance_levels[0].sclk) in ci_apply_state_adjust_rules() 893 ps->performance_levels[1].sclk = ps->performance_levels[0].sclk; in ci_apply_state_adjust_rules() [all …]
|
H A D | rs780_dpm.c | 752 u32 sclk; in rs780_parse_pplib_clock_info() local 754 sclk = le16_to_cpu(clock_info->rs780.usLowEngineClockLow); in rs780_parse_pplib_clock_info() 755 sclk |= clock_info->rs780.ucLowEngineClockHigh << 16; in rs780_parse_pplib_clock_info() 756 ps->sclk_low = sclk; in rs780_parse_pplib_clock_info() 757 sclk = le16_to_cpu(clock_info->rs780.usHighEngineClockLow); in rs780_parse_pplib_clock_info() 758 sclk |= clock_info->rs780.ucHighEngineClockHigh << 16; in rs780_parse_pplib_clock_info() 759 ps->sclk_high = sclk; in rs780_parse_pplib_clock_info() 990 u32 sclk = (rdev->clock.spll.reference_freq * current_fb_div) / in rs780_dpm_debugfs_print_current_performance_level() local 996 if (sclk < (ps->sclk_low + 500)) in rs780_dpm_debugfs_print_current_performance_level() 1012 u32 sclk = (rdev->clock.spll.reference_freq * current_fb_div) / in rs780_dpm_get_current_sclk() local [all …]
|
H A D | radeon_atombios.c | 2135 rdev->pm.power_state[state_index].clock_info[0].sclk = in radeon_atombios_parse_power_table_1_3() 2139 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0)) in radeon_atombios_parse_power_table_1_3() 2170 rdev->pm.power_state[state_index].clock_info[0].sclk = in radeon_atombios_parse_power_table_1_3() 2174 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0)) in radeon_atombios_parse_power_table_1_3() 2206 rdev->pm.power_state[state_index].clock_info[0].sclk = in radeon_atombios_parse_power_table_1_3() 2210 (rdev->pm.power_state[state_index].clock_info[0].sclk == 0)) in radeon_atombios_parse_power_table_1_3() 2434 rdev->pm.default_sclk = rdev->pm.power_state[state_index].clock_info[0].sclk; in radeon_atombios_parse_pplib_non_clock_info() 2449 rdev->pm.power_state[state_index].clock_info[j].sclk = in radeon_atombios_parse_pplib_non_clock_info() 2466 u32 sclk, mclk; in radeon_atombios_parse_pplib_clock_info() local 2471 sclk = le16_to_cpu(clock_info->sumo.usEngineClockLow); in radeon_atombios_parse_pplib_clock_info() [all …]
|
H A D | cypress_dpm.c | 691 ret = rv740_populate_sclk_value(rdev, pl->sclk, &level->sclk); in cypress_convert_power_level_to_smc() 725 pl->sclk, in cypress_convert_power_level_to_smc() 732 pl->sclk, in cypress_convert_power_level_to_smc() 933 new_state->low.sclk, in cypress_program_memory_timing_parameters() 936 new_state->medium.sclk, in cypress_program_memory_timing_parameters() 939 new_state->high.sclk, in cypress_program_memory_timing_parameters() 1264 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL = in cypress_populate_smc_initial_state() 1266 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_2 = in cypress_populate_smc_initial_state() 1268 table->initialState.levels[0].sclk.vCG_SPLL_FUNC_CNTL_3 = in cypress_populate_smc_initial_state() 1270 table->initialState.levels[0].sclk.vCG_SPLL_SPREAD_SPECTRUM = in cypress_populate_smc_initial_state() [all …]
|
H A D | rv770_dpm.h | 142 u32 sclk; member 181 RV770_SMC_SCLK_VALUE *sclk); 202 RV770_SMC_SCLK_VALUE *sclk);
|
H A D | radeon_clocks.c | 39 uint32_t fb_div, ref_div, post_div, sclk; in radeon_legacy_get_engine_clock() local 52 sclk = fb_div / ref_div; in radeon_legacy_get_engine_clock() 56 sclk >>= 1; in radeon_legacy_get_engine_clock() 58 sclk >>= 2; in radeon_legacy_get_engine_clock() 60 sclk >>= 3; in radeon_legacy_get_engine_clock() 62 return sclk; in radeon_legacy_get_engine_clock()
|
H A D | sumo_dpm.h | 32 u32 sclk; member 207 u32 sclk,
|
H A D | rv515.c | 946 fixed20_12 sclk; member 958 fixed20_12 sclk; in rv515_crtc_bandwidth_compute() local 976 sclk.full = dfixed_const(selected_sclk); in rv515_crtc_bandwidth_compute() 977 sclk.full = dfixed_div(sclk, a); in rv515_crtc_bandwidth_compute() 1044 chunk_time.full = dfixed_div(a, sclk); in rv515_crtc_bandwidth_compute() 1129 fill_rate.full = dfixed_div(wm0->sclk, a); in rv515_compute_mode_priority() 1177 fill_rate.full = dfixed_div(wm0->sclk, a); in rv515_compute_mode_priority() 1204 fill_rate.full = dfixed_div(wm1->sclk, a); in rv515_compute_mode_priority()
|
H A D | radeon_pm.c | 173 u32 sclk, mclk; in radeon_set_power_state() local 181 sclk = rdev->pm.power_state[rdev->pm.requested_power_state_index]. in radeon_set_power_state() 182 clock_info[rdev->pm.requested_clock_mode_index].sclk; in radeon_set_power_state() 183 if (sclk > rdev->pm.default_sclk) in radeon_set_power_state() 184 sclk = rdev->pm.default_sclk; in radeon_set_power_state() 205 if (sclk < rdev->pm.current_sclk) in radeon_set_power_state() 222 if (sclk != rdev->pm.current_sclk) { in radeon_set_power_state() 224 radeon_set_engine_clock(rdev, sclk); in radeon_set_power_state() 226 rdev->pm.current_sclk = sclk; in radeon_set_power_state() 227 DRM_DEBUG_DRIVER("Setting: e: %d\n", sclk); in radeon_set_power_state() [all …]
|
/dflybsd-src/sys/dev/drm/amd/amdgpu/ |
H A D | si_dpm.c | 1849 SISLANDS_SMC_SCLK_VALUE *sclk); 2420 prev_sclk = state->performance_levels[i-1].sclk; in si_populate_power_containment_values() 2421 max_sclk = state->performance_levels[i].sclk; in si_populate_power_containment_values() 2439 if (min_sclk < state->performance_levels[0].sclk) in si_populate_power_containment_values() 2440 min_sclk = state->performance_levels[0].sclk; in si_populate_power_containment_values() 2514 if ((state->performance_levels[i].sclk >= adev->pm.dpm.sq_ramping_threshold) && in si_populate_sq_ramping_values() 2952 u32 sclk = 0; in si_init_smc_spll_table() local 2965 ret = si_calculate_sclk_params(adev, sclk, &sclk_params); in si_init_smc_spll_table() 2997 sclk += 512; in si_init_smc_spll_table() 3181 if (new_state->performance_levels[new_state->performance_level_count - 1].sclk >= in ni_set_uvd_clock_before_set_eng_clock() [all …]
|
/dflybsd-src/sys/dev/drm/amd/powerplay/hwmgr/ |
H A D | ppatomctrl.h | 291 …oltage_evv_on_sclk(struct pp_hwmgr *hwmgr, uint8_t voltage_type, uint32_t sclk, uint16_t virtual_v… 314 uint32_t sclk, uint16_t virtual_voltage_Id, uint16_t *voltage, uint16_t dpm_level, bool debug); 319 uint32_t sclk, uint16_t virtual_voltage_Id, uint32_t *voltage);
|
H A D | smu_helper.c | 414 uint16_t virtual_voltage_id, int32_t *sclk) in phm_get_sclk_for_voltage_evv() argument 435 *sclk = table_info->vdd_dep_on_sclk->entries[entry_id].clk; in phm_get_sclk_for_voltage_evv() 522 uint32_t sclk, uint16_t id, uint16_t *voltage) in phm_get_voltage_evv_on_sclk() argument 530 ret = atomctrl_get_voltage_evv_on_sclk(hwmgr, voltage_type, sclk, id, voltage); in phm_get_voltage_evv_on_sclk() 534 ret = atomctrl_get_voltage_evv_on_sclk_ai(hwmgr, voltage_type, sclk, id, &vol); in phm_get_voltage_evv_on_sclk()
|
H A D | smu7_hwmgr.c | 1704 uint32_t sclk = 0; in smu7_get_evv_voltages() local 1716 table_info->vddgfx_lookup_table, vv_id, &sclk)) { in smu7_get_evv_voltages() 1722 if (sclk_table->entries[j].clk == sclk && in smu7_get_evv_voltages() 1724 sclk += 5000; in smu7_get_evv_voltages() 1730 (hwmgr, VOLTAGE_TYPE_VDDGFX, sclk, in smu7_get_evv_voltages() 1748 table_info->vddc_lookup_table, vv_id, &sclk)) { in smu7_get_evv_voltages() 1756 if (sclk_table->entries[j].clk == sclk && in smu7_get_evv_voltages() 1758 sclk += 5000; in smu7_get_evv_voltages() 1766 sclk, vv_id, &vddc) == 0) { in smu7_get_evv_voltages() 2098 table_info->max_clock_voltage_on_ac.sclk = in smu7_set_private_data_based_on_pptable_v1() [all …]
|
/dflybsd-src/sys/dev/drm/amd/display/dc/calcs/ |
H A D | dce_calcs.c | 99 struct bw_fixed sclk[8]; in calculate_bandwidth() local 126 sclk[s_low] = vbios->low_sclk; in calculate_bandwidth() 127 sclk[s_mid1] = vbios->mid1_sclk; in calculate_bandwidth() 128 sclk[s_mid2] = vbios->mid2_sclk; in calculate_bandwidth() 129 sclk[s_mid3] = vbios->mid3_sclk; in calculate_bandwidth() 130 sclk[s_mid4] = vbios->mid4_sclk; in calculate_bandwidth() 131 sclk[s_mid5] = vbios->mid5_sclk; in calculate_bandwidth() 132 sclk[s_mid6] = vbios->mid6_sclk; in calculate_bandwidth() 133 sclk[s_high] = vbios->high_sclk; in calculate_bandwidth() 1185 …nnels)))), bw_div(data->total_display_reads_required_data, (bw_mul(bw_mul(sclk[j], vbios->data_ret… in calculate_bandwidth() [all …]
|