1*b843c749SSergey Zigachev /*
2*b843c749SSergey Zigachev * Copyright 2012-16 Advanced Micro Devices, Inc.
3*b843c749SSergey Zigachev *
4*b843c749SSergey Zigachev * Permission is hereby granted, free of charge, to any person obtaining a
5*b843c749SSergey Zigachev * copy of this software and associated documentation files (the "Software"),
6*b843c749SSergey Zigachev * to deal in the Software without restriction, including without limitation
7*b843c749SSergey Zigachev * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8*b843c749SSergey Zigachev * and/or sell copies of the Software, and to permit persons to whom the
9*b843c749SSergey Zigachev * Software is furnished to do so, subject to the following conditions:
10*b843c749SSergey Zigachev *
11*b843c749SSergey Zigachev * The above copyright notice and this permission notice shall be included in
12*b843c749SSergey Zigachev * all copies or substantial portions of the Software.
13*b843c749SSergey Zigachev *
14*b843c749SSergey Zigachev * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15*b843c749SSergey Zigachev * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16*b843c749SSergey Zigachev * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17*b843c749SSergey Zigachev * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18*b843c749SSergey Zigachev * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19*b843c749SSergey Zigachev * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20*b843c749SSergey Zigachev * OTHER DEALINGS IN THE SOFTWARE.
21*b843c749SSergey Zigachev *
22*b843c749SSergey Zigachev * Authors: AMD
23*b843c749SSergey Zigachev *
24*b843c749SSergey Zigachev */
25*b843c749SSergey Zigachev
26*b843c749SSergey Zigachev #include "dce_clocks.h"
27*b843c749SSergey Zigachev #include "dm_services.h"
28*b843c749SSergey Zigachev #include "reg_helper.h"
29*b843c749SSergey Zigachev #include "fixed31_32.h"
30*b843c749SSergey Zigachev #include "bios_parser_interface.h"
31*b843c749SSergey Zigachev #include "dc.h"
32*b843c749SSergey Zigachev #include "dmcu.h"
33*b843c749SSergey Zigachev #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
34*b843c749SSergey Zigachev #include "dcn_calcs.h"
35*b843c749SSergey Zigachev #endif
36*b843c749SSergey Zigachev #include "core_types.h"
37*b843c749SSergey Zigachev #include "dc_types.h"
38*b843c749SSergey Zigachev #include "dal_asic_id.h"
39*b843c749SSergey Zigachev
40*b843c749SSergey Zigachev #define TO_DCE_CLOCKS(clocks)\
41*b843c749SSergey Zigachev container_of(clocks, struct dce_dccg, base)
42*b843c749SSergey Zigachev
43*b843c749SSergey Zigachev #define REG(reg) \
44*b843c749SSergey Zigachev (clk_dce->regs->reg)
45*b843c749SSergey Zigachev
46*b843c749SSergey Zigachev #undef FN
47*b843c749SSergey Zigachev #define FN(reg_name, field_name) \
48*b843c749SSergey Zigachev clk_dce->clk_shift->field_name, clk_dce->clk_mask->field_name
49*b843c749SSergey Zigachev
50*b843c749SSergey Zigachev #define CTX \
51*b843c749SSergey Zigachev clk_dce->base.ctx
52*b843c749SSergey Zigachev #define DC_LOGGER \
53*b843c749SSergey Zigachev clk->ctx->logger
54*b843c749SSergey Zigachev
55*b843c749SSergey Zigachev /* Max clock values for each state indexed by "enum clocks_state": */
56*b843c749SSergey Zigachev static const struct state_dependent_clocks dce80_max_clks_by_state[] = {
57*b843c749SSergey Zigachev /* ClocksStateInvalid - should not be used */
58*b843c749SSergey Zigachev { .display_clk_khz = 0, .pixel_clk_khz = 0 },
59*b843c749SSergey Zigachev /* ClocksStateUltraLow - not expected to be used for DCE 8.0 */
60*b843c749SSergey Zigachev { .display_clk_khz = 0, .pixel_clk_khz = 0 },
61*b843c749SSergey Zigachev /* ClocksStateLow */
62*b843c749SSergey Zigachev { .display_clk_khz = 352000, .pixel_clk_khz = 330000},
63*b843c749SSergey Zigachev /* ClocksStateNominal */
64*b843c749SSergey Zigachev { .display_clk_khz = 600000, .pixel_clk_khz = 400000 },
65*b843c749SSergey Zigachev /* ClocksStatePerformance */
66*b843c749SSergey Zigachev { .display_clk_khz = 600000, .pixel_clk_khz = 400000 } };
67*b843c749SSergey Zigachev
68*b843c749SSergey Zigachev static const struct state_dependent_clocks dce110_max_clks_by_state[] = {
69*b843c749SSergey Zigachev /*ClocksStateInvalid - should not be used*/
70*b843c749SSergey Zigachev { .display_clk_khz = 0, .pixel_clk_khz = 0 },
71*b843c749SSergey Zigachev /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
72*b843c749SSergey Zigachev { .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
73*b843c749SSergey Zigachev /*ClocksStateLow*/
74*b843c749SSergey Zigachev { .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
75*b843c749SSergey Zigachev /*ClocksStateNominal*/
76*b843c749SSergey Zigachev { .display_clk_khz = 467000, .pixel_clk_khz = 400000 },
77*b843c749SSergey Zigachev /*ClocksStatePerformance*/
78*b843c749SSergey Zigachev { .display_clk_khz = 643000, .pixel_clk_khz = 400000 } };
79*b843c749SSergey Zigachev
80*b843c749SSergey Zigachev static const struct state_dependent_clocks dce112_max_clks_by_state[] = {
81*b843c749SSergey Zigachev /*ClocksStateInvalid - should not be used*/
82*b843c749SSergey Zigachev { .display_clk_khz = 0, .pixel_clk_khz = 0 },
83*b843c749SSergey Zigachev /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
84*b843c749SSergey Zigachev { .display_clk_khz = 389189, .pixel_clk_khz = 346672 },
85*b843c749SSergey Zigachev /*ClocksStateLow*/
86*b843c749SSergey Zigachev { .display_clk_khz = 459000, .pixel_clk_khz = 400000 },
87*b843c749SSergey Zigachev /*ClocksStateNominal*/
88*b843c749SSergey Zigachev { .display_clk_khz = 667000, .pixel_clk_khz = 600000 },
89*b843c749SSergey Zigachev /*ClocksStatePerformance*/
90*b843c749SSergey Zigachev { .display_clk_khz = 1132000, .pixel_clk_khz = 600000 } };
91*b843c749SSergey Zigachev
92*b843c749SSergey Zigachev static const struct state_dependent_clocks dce120_max_clks_by_state[] = {
93*b843c749SSergey Zigachev /*ClocksStateInvalid - should not be used*/
94*b843c749SSergey Zigachev { .display_clk_khz = 0, .pixel_clk_khz = 0 },
95*b843c749SSergey Zigachev /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
96*b843c749SSergey Zigachev { .display_clk_khz = 0, .pixel_clk_khz = 0 },
97*b843c749SSergey Zigachev /*ClocksStateLow*/
98*b843c749SSergey Zigachev { .display_clk_khz = 460000, .pixel_clk_khz = 400000 },
99*b843c749SSergey Zigachev /*ClocksStateNominal*/
100*b843c749SSergey Zigachev { .display_clk_khz = 670000, .pixel_clk_khz = 600000 },
101*b843c749SSergey Zigachev /*ClocksStatePerformance*/
102*b843c749SSergey Zigachev { .display_clk_khz = 1133000, .pixel_clk_khz = 600000 } };
103*b843c749SSergey Zigachev
104*b843c749SSergey Zigachev /* Starting DID for each range */
105*b843c749SSergey Zigachev enum dentist_base_divider_id {
106*b843c749SSergey Zigachev DENTIST_BASE_DID_1 = 0x08,
107*b843c749SSergey Zigachev DENTIST_BASE_DID_2 = 0x40,
108*b843c749SSergey Zigachev DENTIST_BASE_DID_3 = 0x60,
109*b843c749SSergey Zigachev DENTIST_BASE_DID_4 = 0x7e,
110*b843c749SSergey Zigachev DENTIST_MAX_DID = 0x7f
111*b843c749SSergey Zigachev };
112*b843c749SSergey Zigachev
113*b843c749SSergey Zigachev /* Starting point and step size for each divider range.*/
114*b843c749SSergey Zigachev enum dentist_divider_range {
115*b843c749SSergey Zigachev DENTIST_DIVIDER_RANGE_1_START = 8, /* 2.00 */
116*b843c749SSergey Zigachev DENTIST_DIVIDER_RANGE_1_STEP = 1, /* 0.25 */
117*b843c749SSergey Zigachev DENTIST_DIVIDER_RANGE_2_START = 64, /* 16.00 */
118*b843c749SSergey Zigachev DENTIST_DIVIDER_RANGE_2_STEP = 2, /* 0.50 */
119*b843c749SSergey Zigachev DENTIST_DIVIDER_RANGE_3_START = 128, /* 32.00 */
120*b843c749SSergey Zigachev DENTIST_DIVIDER_RANGE_3_STEP = 4, /* 1.00 */
121*b843c749SSergey Zigachev DENTIST_DIVIDER_RANGE_4_START = 248, /* 62.00 */
122*b843c749SSergey Zigachev DENTIST_DIVIDER_RANGE_4_STEP = 264, /* 66.00 */
123*b843c749SSergey Zigachev DENTIST_DIVIDER_RANGE_SCALE_FACTOR = 4
124*b843c749SSergey Zigachev };
125*b843c749SSergey Zigachev
dentist_get_divider_from_did(int did)126*b843c749SSergey Zigachev static int dentist_get_divider_from_did(int did)
127*b843c749SSergey Zigachev {
128*b843c749SSergey Zigachev if (did < DENTIST_BASE_DID_1)
129*b843c749SSergey Zigachev did = DENTIST_BASE_DID_1;
130*b843c749SSergey Zigachev if (did > DENTIST_MAX_DID)
131*b843c749SSergey Zigachev did = DENTIST_MAX_DID;
132*b843c749SSergey Zigachev
133*b843c749SSergey Zigachev if (did < DENTIST_BASE_DID_2) {
134*b843c749SSergey Zigachev return DENTIST_DIVIDER_RANGE_1_START + DENTIST_DIVIDER_RANGE_1_STEP
135*b843c749SSergey Zigachev * (did - DENTIST_BASE_DID_1);
136*b843c749SSergey Zigachev } else if (did < DENTIST_BASE_DID_3) {
137*b843c749SSergey Zigachev return DENTIST_DIVIDER_RANGE_2_START + DENTIST_DIVIDER_RANGE_2_STEP
138*b843c749SSergey Zigachev * (did - DENTIST_BASE_DID_2);
139*b843c749SSergey Zigachev } else if (did < DENTIST_BASE_DID_4) {
140*b843c749SSergey Zigachev return DENTIST_DIVIDER_RANGE_3_START + DENTIST_DIVIDER_RANGE_3_STEP
141*b843c749SSergey Zigachev * (did - DENTIST_BASE_DID_3);
142*b843c749SSergey Zigachev } else {
143*b843c749SSergey Zigachev return DENTIST_DIVIDER_RANGE_4_START + DENTIST_DIVIDER_RANGE_4_STEP
144*b843c749SSergey Zigachev * (did - DENTIST_BASE_DID_4);
145*b843c749SSergey Zigachev }
146*b843c749SSergey Zigachev }
147*b843c749SSergey Zigachev
148*b843c749SSergey Zigachev /* SW will adjust DP REF Clock average value for all purposes
149*b843c749SSergey Zigachev * (DP DTO / DP Audio DTO and DP GTC)
150*b843c749SSergey Zigachev if clock is spread for all cases:
151*b843c749SSergey Zigachev -if SS enabled on DP Ref clock and HW de-spreading enabled with SW
152*b843c749SSergey Zigachev calculations for DS_INCR/DS_MODULO (this is planned to be default case)
153*b843c749SSergey Zigachev -if SS enabled on DP Ref clock and HW de-spreading enabled with HW
154*b843c749SSergey Zigachev calculations (not planned to be used, but average clock should still
155*b843c749SSergey Zigachev be valid)
156*b843c749SSergey Zigachev -if SS enabled on DP Ref clock and HW de-spreading disabled
157*b843c749SSergey Zigachev (should not be case with CIK) then SW should program all rates
158*b843c749SSergey Zigachev generated according to average value (case as with previous ASICs)
159*b843c749SSergey Zigachev */
dccg_adjust_dp_ref_freq_for_ss(struct dce_dccg * clk_dce,int dp_ref_clk_khz)160*b843c749SSergey Zigachev static int dccg_adjust_dp_ref_freq_for_ss(struct dce_dccg *clk_dce, int dp_ref_clk_khz)
161*b843c749SSergey Zigachev {
162*b843c749SSergey Zigachev if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
163*b843c749SSergey Zigachev struct fixed31_32 ss_percentage = dc_fixpt_div_int(
164*b843c749SSergey Zigachev dc_fixpt_from_fraction(clk_dce->dprefclk_ss_percentage,
165*b843c749SSergey Zigachev clk_dce->dprefclk_ss_divider), 200);
166*b843c749SSergey Zigachev struct fixed31_32 adj_dp_ref_clk_khz;
167*b843c749SSergey Zigachev
168*b843c749SSergey Zigachev ss_percentage = dc_fixpt_sub(dc_fixpt_one, ss_percentage);
169*b843c749SSergey Zigachev adj_dp_ref_clk_khz = dc_fixpt_mul_int(ss_percentage, dp_ref_clk_khz);
170*b843c749SSergey Zigachev dp_ref_clk_khz = dc_fixpt_floor(adj_dp_ref_clk_khz);
171*b843c749SSergey Zigachev }
172*b843c749SSergey Zigachev return dp_ref_clk_khz;
173*b843c749SSergey Zigachev }
174*b843c749SSergey Zigachev
dce_get_dp_ref_freq_khz(struct dccg * clk)175*b843c749SSergey Zigachev static int dce_get_dp_ref_freq_khz(struct dccg *clk)
176*b843c749SSergey Zigachev {
177*b843c749SSergey Zigachev struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
178*b843c749SSergey Zigachev int dprefclk_wdivider;
179*b843c749SSergey Zigachev int dprefclk_src_sel;
180*b843c749SSergey Zigachev int dp_ref_clk_khz = 600000;
181*b843c749SSergey Zigachev int target_div;
182*b843c749SSergey Zigachev
183*b843c749SSergey Zigachev /* ASSERT DP Reference Clock source is from DFS*/
184*b843c749SSergey Zigachev REG_GET(DPREFCLK_CNTL, DPREFCLK_SRC_SEL, &dprefclk_src_sel);
185*b843c749SSergey Zigachev ASSERT(dprefclk_src_sel == 0);
186*b843c749SSergey Zigachev
187*b843c749SSergey Zigachev /* Read the mmDENTIST_DISPCLK_CNTL to get the currently
188*b843c749SSergey Zigachev * programmed DID DENTIST_DPREFCLK_WDIVIDER*/
189*b843c749SSergey Zigachev REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DPREFCLK_WDIVIDER, &dprefclk_wdivider);
190*b843c749SSergey Zigachev
191*b843c749SSergey Zigachev /* Convert DENTIST_DPREFCLK_WDIVIDERto actual divider*/
192*b843c749SSergey Zigachev target_div = dentist_get_divider_from_did(dprefclk_wdivider);
193*b843c749SSergey Zigachev
194*b843c749SSergey Zigachev /* Calculate the current DFS clock, in kHz.*/
195*b843c749SSergey Zigachev dp_ref_clk_khz = (DENTIST_DIVIDER_RANGE_SCALE_FACTOR
196*b843c749SSergey Zigachev * clk_dce->dentist_vco_freq_khz) / target_div;
197*b843c749SSergey Zigachev
198*b843c749SSergey Zigachev return dccg_adjust_dp_ref_freq_for_ss(clk_dce, dp_ref_clk_khz);
199*b843c749SSergey Zigachev }
200*b843c749SSergey Zigachev
dce12_get_dp_ref_freq_khz(struct dccg * clk)201*b843c749SSergey Zigachev static int dce12_get_dp_ref_freq_khz(struct dccg *clk)
202*b843c749SSergey Zigachev {
203*b843c749SSergey Zigachev struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
204*b843c749SSergey Zigachev
205*b843c749SSergey Zigachev return dccg_adjust_dp_ref_freq_for_ss(clk_dce, 600000);
206*b843c749SSergey Zigachev }
207*b843c749SSergey Zigachev
dce_get_required_clocks_state(struct dccg * clk,struct dc_clocks * req_clocks)208*b843c749SSergey Zigachev static enum dm_pp_clocks_state dce_get_required_clocks_state(
209*b843c749SSergey Zigachev struct dccg *clk,
210*b843c749SSergey Zigachev struct dc_clocks *req_clocks)
211*b843c749SSergey Zigachev {
212*b843c749SSergey Zigachev struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
213*b843c749SSergey Zigachev int i;
214*b843c749SSergey Zigachev enum dm_pp_clocks_state low_req_clk;
215*b843c749SSergey Zigachev
216*b843c749SSergey Zigachev /* Iterate from highest supported to lowest valid state, and update
217*b843c749SSergey Zigachev * lowest RequiredState with the lowest state that satisfies
218*b843c749SSergey Zigachev * all required clocks
219*b843c749SSergey Zigachev */
220*b843c749SSergey Zigachev for (i = clk->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--)
221*b843c749SSergey Zigachev if (req_clocks->dispclk_khz >
222*b843c749SSergey Zigachev clk_dce->max_clks_by_state[i].display_clk_khz
223*b843c749SSergey Zigachev || req_clocks->phyclk_khz >
224*b843c749SSergey Zigachev clk_dce->max_clks_by_state[i].pixel_clk_khz)
225*b843c749SSergey Zigachev break;
226*b843c749SSergey Zigachev
227*b843c749SSergey Zigachev low_req_clk = i + 1;
228*b843c749SSergey Zigachev if (low_req_clk > clk->max_clks_state) {
229*b843c749SSergey Zigachev /* set max clock state for high phyclock, invalid on exceeding display clock */
230*b843c749SSergey Zigachev if (clk_dce->max_clks_by_state[clk->max_clks_state].display_clk_khz
231*b843c749SSergey Zigachev < req_clocks->dispclk_khz)
232*b843c749SSergey Zigachev low_req_clk = DM_PP_CLOCKS_STATE_INVALID;
233*b843c749SSergey Zigachev else
234*b843c749SSergey Zigachev low_req_clk = clk->max_clks_state;
235*b843c749SSergey Zigachev }
236*b843c749SSergey Zigachev
237*b843c749SSergey Zigachev return low_req_clk;
238*b843c749SSergey Zigachev }
239*b843c749SSergey Zigachev
dce_set_clock(struct dccg * clk,int requested_clk_khz)240*b843c749SSergey Zigachev static int dce_set_clock(
241*b843c749SSergey Zigachev struct dccg *clk,
242*b843c749SSergey Zigachev int requested_clk_khz)
243*b843c749SSergey Zigachev {
244*b843c749SSergey Zigachev struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
245*b843c749SSergey Zigachev struct bp_pixel_clock_parameters pxl_clk_params = { 0 };
246*b843c749SSergey Zigachev struct dc_bios *bp = clk->ctx->dc_bios;
247*b843c749SSergey Zigachev int actual_clock = requested_clk_khz;
248*b843c749SSergey Zigachev
249*b843c749SSergey Zigachev /* Make sure requested clock isn't lower than minimum threshold*/
250*b843c749SSergey Zigachev if (requested_clk_khz > 0)
251*b843c749SSergey Zigachev requested_clk_khz = max(requested_clk_khz,
252*b843c749SSergey Zigachev clk_dce->dentist_vco_freq_khz / 64);
253*b843c749SSergey Zigachev
254*b843c749SSergey Zigachev /* Prepare to program display clock*/
255*b843c749SSergey Zigachev pxl_clk_params.target_pixel_clock = requested_clk_khz;
256*b843c749SSergey Zigachev pxl_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
257*b843c749SSergey Zigachev
258*b843c749SSergey Zigachev bp->funcs->program_display_engine_pll(bp, &pxl_clk_params);
259*b843c749SSergey Zigachev
260*b843c749SSergey Zigachev if (clk_dce->dfs_bypass_enabled) {
261*b843c749SSergey Zigachev
262*b843c749SSergey Zigachev /* Cache the fixed display clock*/
263*b843c749SSergey Zigachev clk_dce->dfs_bypass_disp_clk =
264*b843c749SSergey Zigachev pxl_clk_params.dfs_bypass_display_clock;
265*b843c749SSergey Zigachev actual_clock = pxl_clk_params.dfs_bypass_display_clock;
266*b843c749SSergey Zigachev }
267*b843c749SSergey Zigachev
268*b843c749SSergey Zigachev /* from power down, we need mark the clock state as ClocksStateNominal
269*b843c749SSergey Zigachev * from HWReset, so when resume we will call pplib voltage regulator.*/
270*b843c749SSergey Zigachev if (requested_clk_khz == 0)
271*b843c749SSergey Zigachev clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
272*b843c749SSergey Zigachev return actual_clock;
273*b843c749SSergey Zigachev }
274*b843c749SSergey Zigachev
dce_psr_set_clock(struct dccg * clk,int requested_clk_khz)275*b843c749SSergey Zigachev static int dce_psr_set_clock(
276*b843c749SSergey Zigachev struct dccg *clk,
277*b843c749SSergey Zigachev int requested_clk_khz)
278*b843c749SSergey Zigachev {
279*b843c749SSergey Zigachev struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
280*b843c749SSergey Zigachev struct dc_context *ctx = clk_dce->base.ctx;
281*b843c749SSergey Zigachev struct dc *core_dc = ctx->dc;
282*b843c749SSergey Zigachev struct dmcu *dmcu = core_dc->res_pool->dmcu;
283*b843c749SSergey Zigachev int actual_clk_khz = requested_clk_khz;
284*b843c749SSergey Zigachev
285*b843c749SSergey Zigachev actual_clk_khz = dce_set_clock(clk, requested_clk_khz);
286*b843c749SSergey Zigachev
287*b843c749SSergey Zigachev dmcu->funcs->set_psr_wait_loop(dmcu, actual_clk_khz / 1000 / 7);
288*b843c749SSergey Zigachev return actual_clk_khz;
289*b843c749SSergey Zigachev }
290*b843c749SSergey Zigachev
dce112_set_clock(struct dccg * clk,int requested_clk_khz)291*b843c749SSergey Zigachev static int dce112_set_clock(
292*b843c749SSergey Zigachev struct dccg *clk,
293*b843c749SSergey Zigachev int requested_clk_khz)
294*b843c749SSergey Zigachev {
295*b843c749SSergey Zigachev struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
296*b843c749SSergey Zigachev struct bp_set_dce_clock_parameters dce_clk_params;
297*b843c749SSergey Zigachev struct dc_bios *bp = clk->ctx->dc_bios;
298*b843c749SSergey Zigachev struct dc *core_dc = clk->ctx->dc;
299*b843c749SSergey Zigachev struct dmcu *dmcu = core_dc->res_pool->dmcu;
300*b843c749SSergey Zigachev int actual_clock = requested_clk_khz;
301*b843c749SSergey Zigachev /* Prepare to program display clock*/
302*b843c749SSergey Zigachev memset(&dce_clk_params, 0, sizeof(dce_clk_params));
303*b843c749SSergey Zigachev
304*b843c749SSergey Zigachev /* Make sure requested clock isn't lower than minimum threshold*/
305*b843c749SSergey Zigachev if (requested_clk_khz > 0)
306*b843c749SSergey Zigachev requested_clk_khz = max(requested_clk_khz,
307*b843c749SSergey Zigachev clk_dce->dentist_vco_freq_khz / 62);
308*b843c749SSergey Zigachev
309*b843c749SSergey Zigachev dce_clk_params.target_clock_frequency = requested_clk_khz;
310*b843c749SSergey Zigachev dce_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
311*b843c749SSergey Zigachev dce_clk_params.clock_type = DCECLOCK_TYPE_DISPLAY_CLOCK;
312*b843c749SSergey Zigachev
313*b843c749SSergey Zigachev bp->funcs->set_dce_clock(bp, &dce_clk_params);
314*b843c749SSergey Zigachev actual_clock = dce_clk_params.target_clock_frequency;
315*b843c749SSergey Zigachev
316*b843c749SSergey Zigachev /* from power down, we need mark the clock state as ClocksStateNominal
317*b843c749SSergey Zigachev * from HWReset, so when resume we will call pplib voltage regulator.*/
318*b843c749SSergey Zigachev if (requested_clk_khz == 0)
319*b843c749SSergey Zigachev clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
320*b843c749SSergey Zigachev
321*b843c749SSergey Zigachev /*Program DP ref Clock*/
322*b843c749SSergey Zigachev /*VBIOS will determine DPREFCLK frequency, so we don't set it*/
323*b843c749SSergey Zigachev dce_clk_params.target_clock_frequency = 0;
324*b843c749SSergey Zigachev dce_clk_params.clock_type = DCECLOCK_TYPE_DPREFCLK;
325*b843c749SSergey Zigachev if (!ASICREV_IS_VEGA20_P(clk->ctx->asic_id.hw_internal_rev))
326*b843c749SSergey Zigachev dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK =
327*b843c749SSergey Zigachev (dce_clk_params.pll_id ==
328*b843c749SSergey Zigachev CLOCK_SOURCE_COMBO_DISPLAY_PLL0);
329*b843c749SSergey Zigachev else
330*b843c749SSergey Zigachev dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK = false;
331*b843c749SSergey Zigachev
332*b843c749SSergey Zigachev bp->funcs->set_dce_clock(bp, &dce_clk_params);
333*b843c749SSergey Zigachev
334*b843c749SSergey Zigachev if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
335*b843c749SSergey Zigachev if (clk_dce->dfs_bypass_disp_clk != actual_clock)
336*b843c749SSergey Zigachev dmcu->funcs->set_psr_wait_loop(dmcu,
337*b843c749SSergey Zigachev actual_clock / 1000 / 7);
338*b843c749SSergey Zigachev }
339*b843c749SSergey Zigachev
340*b843c749SSergey Zigachev clk_dce->dfs_bypass_disp_clk = actual_clock;
341*b843c749SSergey Zigachev return actual_clock;
342*b843c749SSergey Zigachev }
343*b843c749SSergey Zigachev
dce_clock_read_integrated_info(struct dce_dccg * clk_dce)344*b843c749SSergey Zigachev static void dce_clock_read_integrated_info(struct dce_dccg *clk_dce)
345*b843c749SSergey Zigachev {
346*b843c749SSergey Zigachev struct dc_debug_options *debug = &clk_dce->base.ctx->dc->debug;
347*b843c749SSergey Zigachev struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
348*b843c749SSergey Zigachev struct integrated_info info = { { { 0 } } };
349*b843c749SSergey Zigachev struct dc_firmware_info fw_info = { { 0 } };
350*b843c749SSergey Zigachev int i;
351*b843c749SSergey Zigachev
352*b843c749SSergey Zigachev if (bp->integrated_info)
353*b843c749SSergey Zigachev info = *bp->integrated_info;
354*b843c749SSergey Zigachev
355*b843c749SSergey Zigachev clk_dce->dentist_vco_freq_khz = info.dentist_vco_freq;
356*b843c749SSergey Zigachev if (clk_dce->dentist_vco_freq_khz == 0) {
357*b843c749SSergey Zigachev bp->funcs->get_firmware_info(bp, &fw_info);
358*b843c749SSergey Zigachev clk_dce->dentist_vco_freq_khz =
359*b843c749SSergey Zigachev fw_info.smu_gpu_pll_output_freq;
360*b843c749SSergey Zigachev if (clk_dce->dentist_vco_freq_khz == 0)
361*b843c749SSergey Zigachev clk_dce->dentist_vco_freq_khz = 3600000;
362*b843c749SSergey Zigachev }
363*b843c749SSergey Zigachev
364*b843c749SSergey Zigachev /*update the maximum display clock for each power state*/
365*b843c749SSergey Zigachev for (i = 0; i < NUMBER_OF_DISP_CLK_VOLTAGE; ++i) {
366*b843c749SSergey Zigachev enum dm_pp_clocks_state clk_state = DM_PP_CLOCKS_STATE_INVALID;
367*b843c749SSergey Zigachev
368*b843c749SSergey Zigachev switch (i) {
369*b843c749SSergey Zigachev case 0:
370*b843c749SSergey Zigachev clk_state = DM_PP_CLOCKS_STATE_ULTRA_LOW;
371*b843c749SSergey Zigachev break;
372*b843c749SSergey Zigachev
373*b843c749SSergey Zigachev case 1:
374*b843c749SSergey Zigachev clk_state = DM_PP_CLOCKS_STATE_LOW;
375*b843c749SSergey Zigachev break;
376*b843c749SSergey Zigachev
377*b843c749SSergey Zigachev case 2:
378*b843c749SSergey Zigachev clk_state = DM_PP_CLOCKS_STATE_NOMINAL;
379*b843c749SSergey Zigachev break;
380*b843c749SSergey Zigachev
381*b843c749SSergey Zigachev case 3:
382*b843c749SSergey Zigachev clk_state = DM_PP_CLOCKS_STATE_PERFORMANCE;
383*b843c749SSergey Zigachev break;
384*b843c749SSergey Zigachev
385*b843c749SSergey Zigachev default:
386*b843c749SSergey Zigachev clk_state = DM_PP_CLOCKS_STATE_INVALID;
387*b843c749SSergey Zigachev break;
388*b843c749SSergey Zigachev }
389*b843c749SSergey Zigachev
390*b843c749SSergey Zigachev /*Do not allow bad VBIOS/SBIOS to override with invalid values,
391*b843c749SSergey Zigachev * check for > 100MHz*/
392*b843c749SSergey Zigachev if (info.disp_clk_voltage[i].max_supported_clk >= 100000)
393*b843c749SSergey Zigachev clk_dce->max_clks_by_state[clk_state].display_clk_khz =
394*b843c749SSergey Zigachev info.disp_clk_voltage[i].max_supported_clk;
395*b843c749SSergey Zigachev }
396*b843c749SSergey Zigachev
397*b843c749SSergey Zigachev if (!debug->disable_dfs_bypass && bp->integrated_info)
398*b843c749SSergey Zigachev if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
399*b843c749SSergey Zigachev clk_dce->dfs_bypass_enabled = true;
400*b843c749SSergey Zigachev }
401*b843c749SSergey Zigachev
dce_clock_read_ss_info(struct dce_dccg * clk_dce)402*b843c749SSergey Zigachev static void dce_clock_read_ss_info(struct dce_dccg *clk_dce)
403*b843c749SSergey Zigachev {
404*b843c749SSergey Zigachev struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
405*b843c749SSergey Zigachev int ss_info_num = bp->funcs->get_ss_entry_number(
406*b843c749SSergey Zigachev bp, AS_SIGNAL_TYPE_GPU_PLL);
407*b843c749SSergey Zigachev
408*b843c749SSergey Zigachev if (ss_info_num) {
409*b843c749SSergey Zigachev struct spread_spectrum_info info = { { 0 } };
410*b843c749SSergey Zigachev enum bp_result result = bp->funcs->get_spread_spectrum_info(
411*b843c749SSergey Zigachev bp, AS_SIGNAL_TYPE_GPU_PLL, 0, &info);
412*b843c749SSergey Zigachev
413*b843c749SSergey Zigachev /* Based on VBIOS, VBIOS will keep entry for GPU PLL SS
414*b843c749SSergey Zigachev * even if SS not enabled and in that case
415*b843c749SSergey Zigachev * SSInfo.spreadSpectrumPercentage !=0 would be sign
416*b843c749SSergey Zigachev * that SS is enabled
417*b843c749SSergey Zigachev */
418*b843c749SSergey Zigachev if (result == BP_RESULT_OK &&
419*b843c749SSergey Zigachev info.spread_spectrum_percentage != 0) {
420*b843c749SSergey Zigachev clk_dce->ss_on_dprefclk = true;
421*b843c749SSergey Zigachev clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
422*b843c749SSergey Zigachev
423*b843c749SSergey Zigachev if (info.type.CENTER_MODE == 0) {
424*b843c749SSergey Zigachev /* TODO: Currently for DP Reference clock we
425*b843c749SSergey Zigachev * need only SS percentage for
426*b843c749SSergey Zigachev * downspread */
427*b843c749SSergey Zigachev clk_dce->dprefclk_ss_percentage =
428*b843c749SSergey Zigachev info.spread_spectrum_percentage;
429*b843c749SSergey Zigachev }
430*b843c749SSergey Zigachev
431*b843c749SSergey Zigachev return;
432*b843c749SSergey Zigachev }
433*b843c749SSergey Zigachev
434*b843c749SSergey Zigachev result = bp->funcs->get_spread_spectrum_info(
435*b843c749SSergey Zigachev bp, AS_SIGNAL_TYPE_DISPLAY_PORT, 0, &info);
436*b843c749SSergey Zigachev
437*b843c749SSergey Zigachev /* Based on VBIOS, VBIOS will keep entry for DPREFCLK SS
438*b843c749SSergey Zigachev * even if SS not enabled and in that case
439*b843c749SSergey Zigachev * SSInfo.spreadSpectrumPercentage !=0 would be sign
440*b843c749SSergey Zigachev * that SS is enabled
441*b843c749SSergey Zigachev */
442*b843c749SSergey Zigachev if (result == BP_RESULT_OK &&
443*b843c749SSergey Zigachev info.spread_spectrum_percentage != 0) {
444*b843c749SSergey Zigachev clk_dce->ss_on_dprefclk = true;
445*b843c749SSergey Zigachev clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
446*b843c749SSergey Zigachev
447*b843c749SSergey Zigachev if (info.type.CENTER_MODE == 0) {
448*b843c749SSergey Zigachev /* Currently for DP Reference clock we
449*b843c749SSergey Zigachev * need only SS percentage for
450*b843c749SSergey Zigachev * downspread */
451*b843c749SSergey Zigachev clk_dce->dprefclk_ss_percentage =
452*b843c749SSergey Zigachev info.spread_spectrum_percentage;
453*b843c749SSergey Zigachev }
454*b843c749SSergey Zigachev }
455*b843c749SSergey Zigachev }
456*b843c749SSergey Zigachev }
457*b843c749SSergey Zigachev
should_set_clock(bool safe_to_lower,int calc_clk,int cur_clk)458*b843c749SSergey Zigachev static inline bool should_set_clock(bool safe_to_lower, int calc_clk, int cur_clk)
459*b843c749SSergey Zigachev {
460*b843c749SSergey Zigachev return ((safe_to_lower && calc_clk < cur_clk) || calc_clk > cur_clk);
461*b843c749SSergey Zigachev }
462*b843c749SSergey Zigachev
dce12_update_clocks(struct dccg * dccg,struct dc_clocks * new_clocks,bool safe_to_lower)463*b843c749SSergey Zigachev static void dce12_update_clocks(struct dccg *dccg,
464*b843c749SSergey Zigachev struct dc_clocks *new_clocks,
465*b843c749SSergey Zigachev bool safe_to_lower)
466*b843c749SSergey Zigachev {
467*b843c749SSergey Zigachev struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
468*b843c749SSergey Zigachev
469*b843c749SSergey Zigachev /* TODO: Investigate why this is needed to fix display corruption. */
470*b843c749SSergey Zigachev new_clocks->dispclk_khz = new_clocks->dispclk_khz * 115 / 100;
471*b843c749SSergey Zigachev
472*b843c749SSergey Zigachev if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, dccg->clks.dispclk_khz)) {
473*b843c749SSergey Zigachev clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
474*b843c749SSergey Zigachev clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz;
475*b843c749SSergey Zigachev new_clocks->dispclk_khz = dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
476*b843c749SSergey Zigachev dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
477*b843c749SSergey Zigachev
478*b843c749SSergey Zigachev dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
479*b843c749SSergey Zigachev }
480*b843c749SSergey Zigachev
481*b843c749SSergey Zigachev if (should_set_clock(safe_to_lower, new_clocks->phyclk_khz, dccg->clks.phyclk_khz)) {
482*b843c749SSergey Zigachev clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
483*b843c749SSergey Zigachev clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
484*b843c749SSergey Zigachev dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
485*b843c749SSergey Zigachev
486*b843c749SSergey Zigachev dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
487*b843c749SSergey Zigachev }
488*b843c749SSergey Zigachev }
489*b843c749SSergey Zigachev
490*b843c749SSergey Zigachev #ifdef CONFIG_DRM_AMD_DC_DCN1_0
dcn1_determine_dppclk_threshold(struct dccg * dccg,struct dc_clocks * new_clocks)491*b843c749SSergey Zigachev static int dcn1_determine_dppclk_threshold(struct dccg *dccg, struct dc_clocks *new_clocks)
492*b843c749SSergey Zigachev {
493*b843c749SSergey Zigachev bool request_dpp_div = new_clocks->dispclk_khz > new_clocks->dppclk_khz;
494*b843c749SSergey Zigachev bool dispclk_increase = new_clocks->dispclk_khz > dccg->clks.dispclk_khz;
495*b843c749SSergey Zigachev int disp_clk_threshold = new_clocks->max_supported_dppclk_khz;
496*b843c749SSergey Zigachev bool cur_dpp_div = dccg->clks.dispclk_khz > dccg->clks.dppclk_khz;
497*b843c749SSergey Zigachev
498*b843c749SSergey Zigachev /* increase clock, looking for div is 0 for current, request div is 1*/
499*b843c749SSergey Zigachev if (dispclk_increase) {
500*b843c749SSergey Zigachev /* already divided by 2, no need to reach target clk with 2 steps*/
501*b843c749SSergey Zigachev if (cur_dpp_div)
502*b843c749SSergey Zigachev return new_clocks->dispclk_khz;
503*b843c749SSergey Zigachev
504*b843c749SSergey Zigachev /* request disp clk is lower than maximum supported dpp clk,
505*b843c749SSergey Zigachev * no need to reach target clk with two steps.
506*b843c749SSergey Zigachev */
507*b843c749SSergey Zigachev if (new_clocks->dispclk_khz <= disp_clk_threshold)
508*b843c749SSergey Zigachev return new_clocks->dispclk_khz;
509*b843c749SSergey Zigachev
510*b843c749SSergey Zigachev /* target dpp clk not request divided by 2, still within threshold */
511*b843c749SSergey Zigachev if (!request_dpp_div)
512*b843c749SSergey Zigachev return new_clocks->dispclk_khz;
513*b843c749SSergey Zigachev
514*b843c749SSergey Zigachev } else {
515*b843c749SSergey Zigachev /* decrease clock, looking for current dppclk divided by 2,
516*b843c749SSergey Zigachev * request dppclk not divided by 2.
517*b843c749SSergey Zigachev */
518*b843c749SSergey Zigachev
519*b843c749SSergey Zigachev /* current dpp clk not divided by 2, no need to ramp*/
520*b843c749SSergey Zigachev if (!cur_dpp_div)
521*b843c749SSergey Zigachev return new_clocks->dispclk_khz;
522*b843c749SSergey Zigachev
523*b843c749SSergey Zigachev /* current disp clk is lower than current maximum dpp clk,
524*b843c749SSergey Zigachev * no need to ramp
525*b843c749SSergey Zigachev */
526*b843c749SSergey Zigachev if (dccg->clks.dispclk_khz <= disp_clk_threshold)
527*b843c749SSergey Zigachev return new_clocks->dispclk_khz;
528*b843c749SSergey Zigachev
529*b843c749SSergey Zigachev /* request dpp clk need to be divided by 2 */
530*b843c749SSergey Zigachev if (request_dpp_div)
531*b843c749SSergey Zigachev return new_clocks->dispclk_khz;
532*b843c749SSergey Zigachev }
533*b843c749SSergey Zigachev
534*b843c749SSergey Zigachev return disp_clk_threshold;
535*b843c749SSergey Zigachev }
536*b843c749SSergey Zigachev
dcn1_ramp_up_dispclk_with_dpp(struct dccg * dccg,struct dc_clocks * new_clocks)537*b843c749SSergey Zigachev static void dcn1_ramp_up_dispclk_with_dpp(struct dccg *dccg, struct dc_clocks *new_clocks)
538*b843c749SSergey Zigachev {
539*b843c749SSergey Zigachev struct dc *dc = dccg->ctx->dc;
540*b843c749SSergey Zigachev int dispclk_to_dpp_threshold = dcn1_determine_dppclk_threshold(dccg, new_clocks);
541*b843c749SSergey Zigachev bool request_dpp_div = new_clocks->dispclk_khz > new_clocks->dppclk_khz;
542*b843c749SSergey Zigachev int i;
543*b843c749SSergey Zigachev
544*b843c749SSergey Zigachev /* set disp clk to dpp clk threshold */
545*b843c749SSergey Zigachev dccg->funcs->set_dispclk(dccg, dispclk_to_dpp_threshold);
546*b843c749SSergey Zigachev
547*b843c749SSergey Zigachev /* update request dpp clk division option */
548*b843c749SSergey Zigachev for (i = 0; i < dc->res_pool->pipe_count; i++) {
549*b843c749SSergey Zigachev struct pipe_ctx *pipe_ctx = &dc->current_state->res_ctx.pipe_ctx[i];
550*b843c749SSergey Zigachev
551*b843c749SSergey Zigachev if (!pipe_ctx->plane_state)
552*b843c749SSergey Zigachev continue;
553*b843c749SSergey Zigachev
554*b843c749SSergey Zigachev pipe_ctx->plane_res.dpp->funcs->dpp_dppclk_control(
555*b843c749SSergey Zigachev pipe_ctx->plane_res.dpp,
556*b843c749SSergey Zigachev request_dpp_div,
557*b843c749SSergey Zigachev true);
558*b843c749SSergey Zigachev }
559*b843c749SSergey Zigachev
560*b843c749SSergey Zigachev /* If target clk not same as dppclk threshold, set to target clock */
561*b843c749SSergey Zigachev if (dispclk_to_dpp_threshold != new_clocks->dispclk_khz)
562*b843c749SSergey Zigachev dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
563*b843c749SSergey Zigachev
564*b843c749SSergey Zigachev dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
565*b843c749SSergey Zigachev dccg->clks.dppclk_khz = new_clocks->dppclk_khz;
566*b843c749SSergey Zigachev dccg->clks.max_supported_dppclk_khz = new_clocks->max_supported_dppclk_khz;
567*b843c749SSergey Zigachev }
568*b843c749SSergey Zigachev
dcn1_update_clocks(struct dccg * dccg,struct dc_clocks * new_clocks,bool safe_to_lower)569*b843c749SSergey Zigachev static void dcn1_update_clocks(struct dccg *dccg,
570*b843c749SSergey Zigachev struct dc_clocks *new_clocks,
571*b843c749SSergey Zigachev bool safe_to_lower)
572*b843c749SSergey Zigachev {
573*b843c749SSergey Zigachev struct dc *dc = dccg->ctx->dc;
574*b843c749SSergey Zigachev struct pp_smu_display_requirement_rv *smu_req_cur =
575*b843c749SSergey Zigachev &dc->res_pool->pp_smu_req;
576*b843c749SSergey Zigachev struct pp_smu_display_requirement_rv smu_req = *smu_req_cur;
577*b843c749SSergey Zigachev struct pp_smu_funcs_rv *pp_smu = dc->res_pool->pp_smu;
578*b843c749SSergey Zigachev struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
579*b843c749SSergey Zigachev bool send_request_to_increase = false;
580*b843c749SSergey Zigachev bool send_request_to_lower = false;
581*b843c749SSergey Zigachev
582*b843c749SSergey Zigachev if (new_clocks->phyclk_khz)
583*b843c749SSergey Zigachev smu_req.display_count = 1;
584*b843c749SSergey Zigachev else
585*b843c749SSergey Zigachev smu_req.display_count = 0;
586*b843c749SSergey Zigachev
587*b843c749SSergey Zigachev if (new_clocks->dispclk_khz > dccg->clks.dispclk_khz
588*b843c749SSergey Zigachev || new_clocks->phyclk_khz > dccg->clks.phyclk_khz
589*b843c749SSergey Zigachev || new_clocks->fclk_khz > dccg->clks.fclk_khz
590*b843c749SSergey Zigachev || new_clocks->dcfclk_khz > dccg->clks.dcfclk_khz)
591*b843c749SSergey Zigachev send_request_to_increase = true;
592*b843c749SSergey Zigachev
593*b843c749SSergey Zigachev if (should_set_clock(safe_to_lower, new_clocks->phyclk_khz, dccg->clks.phyclk_khz)) {
594*b843c749SSergey Zigachev dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
595*b843c749SSergey Zigachev
596*b843c749SSergey Zigachev send_request_to_lower = true;
597*b843c749SSergey Zigachev }
598*b843c749SSergey Zigachev
599*b843c749SSergey Zigachev if (should_set_clock(safe_to_lower, new_clocks->fclk_khz, dccg->clks.fclk_khz)) {
600*b843c749SSergey Zigachev dccg->clks.fclk_khz = new_clocks->fclk_khz;
601*b843c749SSergey Zigachev clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_FCLK;
602*b843c749SSergey Zigachev clock_voltage_req.clocks_in_khz = new_clocks->fclk_khz;
603*b843c749SSergey Zigachev smu_req.hard_min_fclk_khz = new_clocks->fclk_khz;
604*b843c749SSergey Zigachev
605*b843c749SSergey Zigachev dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
606*b843c749SSergey Zigachev send_request_to_lower = true;
607*b843c749SSergey Zigachev }
608*b843c749SSergey Zigachev
609*b843c749SSergey Zigachev if (should_set_clock(safe_to_lower, new_clocks->dcfclk_khz, dccg->clks.dcfclk_khz)) {
610*b843c749SSergey Zigachev dccg->clks.dcfclk_khz = new_clocks->dcfclk_khz;
611*b843c749SSergey Zigachev smu_req.hard_min_dcefclk_khz = new_clocks->dcfclk_khz;
612*b843c749SSergey Zigachev
613*b843c749SSergey Zigachev send_request_to_lower = true;
614*b843c749SSergey Zigachev }
615*b843c749SSergey Zigachev
616*b843c749SSergey Zigachev if (should_set_clock(safe_to_lower,
617*b843c749SSergey Zigachev new_clocks->dcfclk_deep_sleep_khz, dccg->clks.dcfclk_deep_sleep_khz)) {
618*b843c749SSergey Zigachev dccg->clks.dcfclk_deep_sleep_khz = new_clocks->dcfclk_deep_sleep_khz;
619*b843c749SSergey Zigachev smu_req.min_deep_sleep_dcefclk_mhz = new_clocks->dcfclk_deep_sleep_khz;
620*b843c749SSergey Zigachev
621*b843c749SSergey Zigachev send_request_to_lower = true;
622*b843c749SSergey Zigachev }
623*b843c749SSergey Zigachev
624*b843c749SSergey Zigachev /* make sure dcf clk is before dpp clk to
625*b843c749SSergey Zigachev * make sure we have enough voltage to run dpp clk
626*b843c749SSergey Zigachev */
627*b843c749SSergey Zigachev if (send_request_to_increase) {
628*b843c749SSergey Zigachev /*use dcfclk to request voltage*/
629*b843c749SSergey Zigachev clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
630*b843c749SSergey Zigachev clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(dc, new_clocks);
631*b843c749SSergey Zigachev dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
632*b843c749SSergey Zigachev if (pp_smu->set_display_requirement)
633*b843c749SSergey Zigachev pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req);
634*b843c749SSergey Zigachev }
635*b843c749SSergey Zigachev
636*b843c749SSergey Zigachev /* dcn1 dppclk is tied to dispclk */
637*b843c749SSergey Zigachev /* program dispclk on = as a w/a for sleep resume clock ramping issues */
638*b843c749SSergey Zigachev if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, dccg->clks.dispclk_khz)
639*b843c749SSergey Zigachev || new_clocks->dispclk_khz == dccg->clks.dispclk_khz) {
640*b843c749SSergey Zigachev dcn1_ramp_up_dispclk_with_dpp(dccg, new_clocks);
641*b843c749SSergey Zigachev dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
642*b843c749SSergey Zigachev
643*b843c749SSergey Zigachev send_request_to_lower = true;
644*b843c749SSergey Zigachev }
645*b843c749SSergey Zigachev
646*b843c749SSergey Zigachev if (!send_request_to_increase && send_request_to_lower) {
647*b843c749SSergey Zigachev /*use dcfclk to request voltage*/
648*b843c749SSergey Zigachev clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
649*b843c749SSergey Zigachev clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(dc, new_clocks);
650*b843c749SSergey Zigachev dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
651*b843c749SSergey Zigachev if (pp_smu->set_display_requirement)
652*b843c749SSergey Zigachev pp_smu->set_display_requirement(&pp_smu->pp_smu, &smu_req);
653*b843c749SSergey Zigachev }
654*b843c749SSergey Zigachev
655*b843c749SSergey Zigachev
656*b843c749SSergey Zigachev *smu_req_cur = smu_req;
657*b843c749SSergey Zigachev }
658*b843c749SSergey Zigachev #endif
659*b843c749SSergey Zigachev
dce_update_clocks(struct dccg * dccg,struct dc_clocks * new_clocks,bool safe_to_lower)660*b843c749SSergey Zigachev static void dce_update_clocks(struct dccg *dccg,
661*b843c749SSergey Zigachev struct dc_clocks *new_clocks,
662*b843c749SSergey Zigachev bool safe_to_lower)
663*b843c749SSergey Zigachev {
664*b843c749SSergey Zigachev struct dm_pp_power_level_change_request level_change_req;
665*b843c749SSergey Zigachev
666*b843c749SSergey Zigachev level_change_req.power_level = dce_get_required_clocks_state(dccg, new_clocks);
667*b843c749SSergey Zigachev /* get max clock state from PPLIB */
668*b843c749SSergey Zigachev if ((level_change_req.power_level < dccg->cur_min_clks_state && safe_to_lower)
669*b843c749SSergey Zigachev || level_change_req.power_level > dccg->cur_min_clks_state) {
670*b843c749SSergey Zigachev if (dm_pp_apply_power_level_change_request(dccg->ctx, &level_change_req))
671*b843c749SSergey Zigachev dccg->cur_min_clks_state = level_change_req.power_level;
672*b843c749SSergey Zigachev }
673*b843c749SSergey Zigachev
674*b843c749SSergey Zigachev if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, dccg->clks.dispclk_khz)) {
675*b843c749SSergey Zigachev new_clocks->dispclk_khz = dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
676*b843c749SSergey Zigachev dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
677*b843c749SSergey Zigachev }
678*b843c749SSergey Zigachev }
679*b843c749SSergey Zigachev
680*b843c749SSergey Zigachev #ifdef CONFIG_DRM_AMD_DC_DCN1_0
681*b843c749SSergey Zigachev static const struct display_clock_funcs dcn1_funcs = {
682*b843c749SSergey Zigachev .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz,
683*b843c749SSergey Zigachev .set_dispclk = dce112_set_clock,
684*b843c749SSergey Zigachev .update_clocks = dcn1_update_clocks
685*b843c749SSergey Zigachev };
686*b843c749SSergey Zigachev #endif
687*b843c749SSergey Zigachev
688*b843c749SSergey Zigachev static const struct display_clock_funcs dce120_funcs = {
689*b843c749SSergey Zigachev .get_dp_ref_clk_frequency = dce12_get_dp_ref_freq_khz,
690*b843c749SSergey Zigachev .set_dispclk = dce112_set_clock,
691*b843c749SSergey Zigachev .update_clocks = dce12_update_clocks
692*b843c749SSergey Zigachev };
693*b843c749SSergey Zigachev
694*b843c749SSergey Zigachev static const struct display_clock_funcs dce112_funcs = {
695*b843c749SSergey Zigachev .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz,
696*b843c749SSergey Zigachev .set_dispclk = dce112_set_clock,
697*b843c749SSergey Zigachev .update_clocks = dce_update_clocks
698*b843c749SSergey Zigachev };
699*b843c749SSergey Zigachev
700*b843c749SSergey Zigachev static const struct display_clock_funcs dce110_funcs = {
701*b843c749SSergey Zigachev .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz,
702*b843c749SSergey Zigachev .set_dispclk = dce_psr_set_clock,
703*b843c749SSergey Zigachev .update_clocks = dce_update_clocks
704*b843c749SSergey Zigachev };
705*b843c749SSergey Zigachev
706*b843c749SSergey Zigachev static const struct display_clock_funcs dce_funcs = {
707*b843c749SSergey Zigachev .get_dp_ref_clk_frequency = dce_get_dp_ref_freq_khz,
708*b843c749SSergey Zigachev .set_dispclk = dce_set_clock,
709*b843c749SSergey Zigachev .update_clocks = dce_update_clocks
710*b843c749SSergey Zigachev };
711*b843c749SSergey Zigachev
dce_dccg_construct(struct dce_dccg * clk_dce,struct dc_context * ctx,const struct dccg_registers * regs,const struct dccg_shift * clk_shift,const struct dccg_mask * clk_mask)712*b843c749SSergey Zigachev static void dce_dccg_construct(
713*b843c749SSergey Zigachev struct dce_dccg *clk_dce,
714*b843c749SSergey Zigachev struct dc_context *ctx,
715*b843c749SSergey Zigachev const struct dccg_registers *regs,
716*b843c749SSergey Zigachev const struct dccg_shift *clk_shift,
717*b843c749SSergey Zigachev const struct dccg_mask *clk_mask)
718*b843c749SSergey Zigachev {
719*b843c749SSergey Zigachev struct dccg *base = &clk_dce->base;
720*b843c749SSergey Zigachev
721*b843c749SSergey Zigachev base->ctx = ctx;
722*b843c749SSergey Zigachev base->funcs = &dce_funcs;
723*b843c749SSergey Zigachev
724*b843c749SSergey Zigachev clk_dce->regs = regs;
725*b843c749SSergey Zigachev clk_dce->clk_shift = clk_shift;
726*b843c749SSergey Zigachev clk_dce->clk_mask = clk_mask;
727*b843c749SSergey Zigachev
728*b843c749SSergey Zigachev clk_dce->dfs_bypass_disp_clk = 0;
729*b843c749SSergey Zigachev
730*b843c749SSergey Zigachev clk_dce->dprefclk_ss_percentage = 0;
731*b843c749SSergey Zigachev clk_dce->dprefclk_ss_divider = 1000;
732*b843c749SSergey Zigachev clk_dce->ss_on_dprefclk = false;
733*b843c749SSergey Zigachev
734*b843c749SSergey Zigachev base->max_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
735*b843c749SSergey Zigachev base->cur_min_clks_state = DM_PP_CLOCKS_STATE_INVALID;
736*b843c749SSergey Zigachev
737*b843c749SSergey Zigachev dce_clock_read_integrated_info(clk_dce);
738*b843c749SSergey Zigachev dce_clock_read_ss_info(clk_dce);
739*b843c749SSergey Zigachev }
740*b843c749SSergey Zigachev
dce_dccg_create(struct dc_context * ctx,const struct dccg_registers * regs,const struct dccg_shift * clk_shift,const struct dccg_mask * clk_mask)741*b843c749SSergey Zigachev struct dccg *dce_dccg_create(
742*b843c749SSergey Zigachev struct dc_context *ctx,
743*b843c749SSergey Zigachev const struct dccg_registers *regs,
744*b843c749SSergey Zigachev const struct dccg_shift *clk_shift,
745*b843c749SSergey Zigachev const struct dccg_mask *clk_mask)
746*b843c749SSergey Zigachev {
747*b843c749SSergey Zigachev struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
748*b843c749SSergey Zigachev
749*b843c749SSergey Zigachev if (clk_dce == NULL) {
750*b843c749SSergey Zigachev BREAK_TO_DEBUGGER();
751*b843c749SSergey Zigachev return NULL;
752*b843c749SSergey Zigachev }
753*b843c749SSergey Zigachev
754*b843c749SSergey Zigachev memcpy(clk_dce->max_clks_by_state,
755*b843c749SSergey Zigachev dce80_max_clks_by_state,
756*b843c749SSergey Zigachev sizeof(dce80_max_clks_by_state));
757*b843c749SSergey Zigachev
758*b843c749SSergey Zigachev dce_dccg_construct(
759*b843c749SSergey Zigachev clk_dce, ctx, regs, clk_shift, clk_mask);
760*b843c749SSergey Zigachev
761*b843c749SSergey Zigachev return &clk_dce->base;
762*b843c749SSergey Zigachev }
763*b843c749SSergey Zigachev
dce110_dccg_create(struct dc_context * ctx,const struct dccg_registers * regs,const struct dccg_shift * clk_shift,const struct dccg_mask * clk_mask)764*b843c749SSergey Zigachev struct dccg *dce110_dccg_create(
765*b843c749SSergey Zigachev struct dc_context *ctx,
766*b843c749SSergey Zigachev const struct dccg_registers *regs,
767*b843c749SSergey Zigachev const struct dccg_shift *clk_shift,
768*b843c749SSergey Zigachev const struct dccg_mask *clk_mask)
769*b843c749SSergey Zigachev {
770*b843c749SSergey Zigachev struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
771*b843c749SSergey Zigachev
772*b843c749SSergey Zigachev if (clk_dce == NULL) {
773*b843c749SSergey Zigachev BREAK_TO_DEBUGGER();
774*b843c749SSergey Zigachev return NULL;
775*b843c749SSergey Zigachev }
776*b843c749SSergey Zigachev
777*b843c749SSergey Zigachev memcpy(clk_dce->max_clks_by_state,
778*b843c749SSergey Zigachev dce110_max_clks_by_state,
779*b843c749SSergey Zigachev sizeof(dce110_max_clks_by_state));
780*b843c749SSergey Zigachev
781*b843c749SSergey Zigachev dce_dccg_construct(
782*b843c749SSergey Zigachev clk_dce, ctx, regs, clk_shift, clk_mask);
783*b843c749SSergey Zigachev
784*b843c749SSergey Zigachev clk_dce->base.funcs = &dce110_funcs;
785*b843c749SSergey Zigachev
786*b843c749SSergey Zigachev return &clk_dce->base;
787*b843c749SSergey Zigachev }
788*b843c749SSergey Zigachev
dce112_dccg_create(struct dc_context * ctx,const struct dccg_registers * regs,const struct dccg_shift * clk_shift,const struct dccg_mask * clk_mask)789*b843c749SSergey Zigachev struct dccg *dce112_dccg_create(
790*b843c749SSergey Zigachev struct dc_context *ctx,
791*b843c749SSergey Zigachev const struct dccg_registers *regs,
792*b843c749SSergey Zigachev const struct dccg_shift *clk_shift,
793*b843c749SSergey Zigachev const struct dccg_mask *clk_mask)
794*b843c749SSergey Zigachev {
795*b843c749SSergey Zigachev struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
796*b843c749SSergey Zigachev
797*b843c749SSergey Zigachev if (clk_dce == NULL) {
798*b843c749SSergey Zigachev BREAK_TO_DEBUGGER();
799*b843c749SSergey Zigachev return NULL;
800*b843c749SSergey Zigachev }
801*b843c749SSergey Zigachev
802*b843c749SSergey Zigachev memcpy(clk_dce->max_clks_by_state,
803*b843c749SSergey Zigachev dce112_max_clks_by_state,
804*b843c749SSergey Zigachev sizeof(dce112_max_clks_by_state));
805*b843c749SSergey Zigachev
806*b843c749SSergey Zigachev dce_dccg_construct(
807*b843c749SSergey Zigachev clk_dce, ctx, regs, clk_shift, clk_mask);
808*b843c749SSergey Zigachev
809*b843c749SSergey Zigachev clk_dce->base.funcs = &dce112_funcs;
810*b843c749SSergey Zigachev
811*b843c749SSergey Zigachev return &clk_dce->base;
812*b843c749SSergey Zigachev }
813*b843c749SSergey Zigachev
dce120_dccg_create(struct dc_context * ctx)814*b843c749SSergey Zigachev struct dccg *dce120_dccg_create(struct dc_context *ctx)
815*b843c749SSergey Zigachev {
816*b843c749SSergey Zigachev struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
817*b843c749SSergey Zigachev
818*b843c749SSergey Zigachev if (clk_dce == NULL) {
819*b843c749SSergey Zigachev BREAK_TO_DEBUGGER();
820*b843c749SSergey Zigachev return NULL;
821*b843c749SSergey Zigachev }
822*b843c749SSergey Zigachev
823*b843c749SSergey Zigachev memcpy(clk_dce->max_clks_by_state,
824*b843c749SSergey Zigachev dce120_max_clks_by_state,
825*b843c749SSergey Zigachev sizeof(dce120_max_clks_by_state));
826*b843c749SSergey Zigachev
827*b843c749SSergey Zigachev dce_dccg_construct(
828*b843c749SSergey Zigachev clk_dce, ctx, NULL, NULL, NULL);
829*b843c749SSergey Zigachev
830*b843c749SSergey Zigachev clk_dce->base.funcs = &dce120_funcs;
831*b843c749SSergey Zigachev
832*b843c749SSergey Zigachev return &clk_dce->base;
833*b843c749SSergey Zigachev }
834*b843c749SSergey Zigachev
835*b843c749SSergey Zigachev #ifdef CONFIG_DRM_AMD_DC_DCN1_0
dcn1_dccg_create(struct dc_context * ctx)836*b843c749SSergey Zigachev struct dccg *dcn1_dccg_create(struct dc_context *ctx)
837*b843c749SSergey Zigachev {
838*b843c749SSergey Zigachev struct dc_debug_options *debug = &ctx->dc->debug;
839*b843c749SSergey Zigachev struct dc_bios *bp = ctx->dc_bios;
840*b843c749SSergey Zigachev struct dc_firmware_info fw_info = { { 0 } };
841*b843c749SSergey Zigachev struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
842*b843c749SSergey Zigachev
843*b843c749SSergey Zigachev if (clk_dce == NULL) {
844*b843c749SSergey Zigachev BREAK_TO_DEBUGGER();
845*b843c749SSergey Zigachev return NULL;
846*b843c749SSergey Zigachev }
847*b843c749SSergey Zigachev
848*b843c749SSergey Zigachev clk_dce->base.ctx = ctx;
849*b843c749SSergey Zigachev clk_dce->base.funcs = &dcn1_funcs;
850*b843c749SSergey Zigachev
851*b843c749SSergey Zigachev clk_dce->dfs_bypass_disp_clk = 0;
852*b843c749SSergey Zigachev
853*b843c749SSergey Zigachev clk_dce->dprefclk_ss_percentage = 0;
854*b843c749SSergey Zigachev clk_dce->dprefclk_ss_divider = 1000;
855*b843c749SSergey Zigachev clk_dce->ss_on_dprefclk = false;
856*b843c749SSergey Zigachev
857*b843c749SSergey Zigachev if (bp->integrated_info)
858*b843c749SSergey Zigachev clk_dce->dentist_vco_freq_khz = bp->integrated_info->dentist_vco_freq;
859*b843c749SSergey Zigachev if (clk_dce->dentist_vco_freq_khz == 0) {
860*b843c749SSergey Zigachev bp->funcs->get_firmware_info(bp, &fw_info);
861*b843c749SSergey Zigachev clk_dce->dentist_vco_freq_khz = fw_info.smu_gpu_pll_output_freq;
862*b843c749SSergey Zigachev if (clk_dce->dentist_vco_freq_khz == 0)
863*b843c749SSergey Zigachev clk_dce->dentist_vco_freq_khz = 3600000;
864*b843c749SSergey Zigachev }
865*b843c749SSergey Zigachev
866*b843c749SSergey Zigachev if (!debug->disable_dfs_bypass && bp->integrated_info)
867*b843c749SSergey Zigachev if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
868*b843c749SSergey Zigachev clk_dce->dfs_bypass_enabled = true;
869*b843c749SSergey Zigachev
870*b843c749SSergey Zigachev dce_clock_read_ss_info(clk_dce);
871*b843c749SSergey Zigachev
872*b843c749SSergey Zigachev return &clk_dce->base;
873*b843c749SSergey Zigachev }
874*b843c749SSergey Zigachev #endif
875*b843c749SSergey Zigachev
dce_dccg_destroy(struct dccg ** dccg)876*b843c749SSergey Zigachev void dce_dccg_destroy(struct dccg **dccg)
877*b843c749SSergey Zigachev {
878*b843c749SSergey Zigachev struct dce_dccg *clk_dce = TO_DCE_CLOCKS(*dccg);
879*b843c749SSergey Zigachev
880*b843c749SSergey Zigachev kfree(clk_dce);
881*b843c749SSergey Zigachev *dccg = NULL;
882*b843c749SSergey Zigachev }
883