xref: /netbsd-src/sys/external/bsd/drm2/dist/drm/i915/display/intel_cdclk.c (revision 41ec02673d281bbb3d38e6c78504ce6e30c228c1)
1 /*	$NetBSD: intel_cdclk.c,v 1.2 2021/12/18 23:45:29 riastradh Exp $	*/
2 
3 /*
4  * Copyright © 2006-2017 Intel Corporation
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the "Software"),
8  * to deal in the Software without restriction, including without limitation
9  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10  * and/or sell copies of the Software, and to permit persons to whom the
11  * Software is furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice (including the next
14  * paragraph) shall be included in all copies or substantial portions of the
15  * Software.
16  *
17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
20  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
22  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23  * DEALINGS IN THE SOFTWARE.
24  */
25 
26 #include <sys/cdefs.h>
27 __KERNEL_RCSID(0, "$NetBSD: intel_cdclk.c,v 1.2 2021/12/18 23:45:29 riastradh Exp $");
28 
29 #include "intel_atomic.h"
30 #include "intel_cdclk.h"
31 #include "intel_display_types.h"
32 #include "intel_sideband.h"
33 
34 /**
35  * DOC: CDCLK / RAWCLK
36  *
37  * The display engine uses several different clocks to do its work. There
38  * are two main clocks involved that aren't directly related to the actual
39  * pixel clock or any symbol/bit clock of the actual output port. These
40  * are the core display clock (CDCLK) and RAWCLK.
41  *
42  * CDCLK clocks most of the display pipe logic, and thus its frequency
43  * must be high enough to support the rate at which pixels are flowing
44  * through the pipes. Downscaling must also be accounted as that increases
45  * the effective pixel rate.
46  *
47  * On several platforms the CDCLK frequency can be changed dynamically
48  * to minimize power consumption for a given display configuration.
49  * Typically changes to the CDCLK frequency require all the display pipes
50  * to be shut down while the frequency is being changed.
51  *
52  * On SKL+ the DMC will toggle the CDCLK off/on during DC5/6 entry/exit.
53  * DMC will not change the active CDCLK frequency however, so that part
54  * will still be performed by the driver directly.
55  *
56  * RAWCLK is a fixed frequency clock, often used by various auxiliary
57  * blocks such as AUX CH or backlight PWM. Hence the only thing we
58  * really need to know about RAWCLK is its frequency so that various
59  * dividers can be programmed correctly.
60  */
61 
fixed_133mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)62 static void fixed_133mhz_get_cdclk(struct drm_i915_private *dev_priv,
63 				   struct intel_cdclk_state *cdclk_state)
64 {
65 	cdclk_state->cdclk = 133333;
66 }
67 
fixed_200mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)68 static void fixed_200mhz_get_cdclk(struct drm_i915_private *dev_priv,
69 				   struct intel_cdclk_state *cdclk_state)
70 {
71 	cdclk_state->cdclk = 200000;
72 }
73 
fixed_266mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)74 static void fixed_266mhz_get_cdclk(struct drm_i915_private *dev_priv,
75 				   struct intel_cdclk_state *cdclk_state)
76 {
77 	cdclk_state->cdclk = 266667;
78 }
79 
fixed_333mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)80 static void fixed_333mhz_get_cdclk(struct drm_i915_private *dev_priv,
81 				   struct intel_cdclk_state *cdclk_state)
82 {
83 	cdclk_state->cdclk = 333333;
84 }
85 
fixed_400mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)86 static void fixed_400mhz_get_cdclk(struct drm_i915_private *dev_priv,
87 				   struct intel_cdclk_state *cdclk_state)
88 {
89 	cdclk_state->cdclk = 400000;
90 }
91 
fixed_450mhz_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)92 static void fixed_450mhz_get_cdclk(struct drm_i915_private *dev_priv,
93 				   struct intel_cdclk_state *cdclk_state)
94 {
95 	cdclk_state->cdclk = 450000;
96 }
97 
i85x_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)98 static void i85x_get_cdclk(struct drm_i915_private *dev_priv,
99 			   struct intel_cdclk_state *cdclk_state)
100 {
101 	struct pci_dev *pdev = dev_priv->drm.pdev;
102 	u16 hpllcc = 0;
103 
104 	/*
105 	 * 852GM/852GMV only supports 133 MHz and the HPLLCC
106 	 * encoding is different :(
107 	 * FIXME is this the right way to detect 852GM/852GMV?
108 	 */
109 	if (pdev->revision == 0x1) {
110 		cdclk_state->cdclk = 133333;
111 		return;
112 	}
113 
114 	pci_bus_read_config_word(pdev->bus,
115 				 PCI_DEVFN(0, 3), HPLLCC, &hpllcc);
116 
117 	/* Assume that the hardware is in the high speed state.  This
118 	 * should be the default.
119 	 */
120 	switch (hpllcc & GC_CLOCK_CONTROL_MASK) {
121 	case GC_CLOCK_133_200:
122 	case GC_CLOCK_133_200_2:
123 	case GC_CLOCK_100_200:
124 		cdclk_state->cdclk = 200000;
125 		break;
126 	case GC_CLOCK_166_250:
127 		cdclk_state->cdclk = 250000;
128 		break;
129 	case GC_CLOCK_100_133:
130 		cdclk_state->cdclk = 133333;
131 		break;
132 	case GC_CLOCK_133_266:
133 	case GC_CLOCK_133_266_2:
134 	case GC_CLOCK_166_266:
135 		cdclk_state->cdclk = 266667;
136 		break;
137 	}
138 }
139 
i915gm_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)140 static void i915gm_get_cdclk(struct drm_i915_private *dev_priv,
141 			     struct intel_cdclk_state *cdclk_state)
142 {
143 	struct pci_dev *pdev = dev_priv->drm.pdev;
144 	u16 gcfgc = 0;
145 
146 	pci_read_config_word(pdev, GCFGC, &gcfgc);
147 
148 	if (gcfgc & GC_LOW_FREQUENCY_ENABLE) {
149 		cdclk_state->cdclk = 133333;
150 		return;
151 	}
152 
153 	switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
154 	case GC_DISPLAY_CLOCK_333_320_MHZ:
155 		cdclk_state->cdclk = 333333;
156 		break;
157 	default:
158 	case GC_DISPLAY_CLOCK_190_200_MHZ:
159 		cdclk_state->cdclk = 190000;
160 		break;
161 	}
162 }
163 
i945gm_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)164 static void i945gm_get_cdclk(struct drm_i915_private *dev_priv,
165 			     struct intel_cdclk_state *cdclk_state)
166 {
167 	struct pci_dev *pdev = dev_priv->drm.pdev;
168 	u16 gcfgc = 0;
169 
170 	pci_read_config_word(pdev, GCFGC, &gcfgc);
171 
172 	if (gcfgc & GC_LOW_FREQUENCY_ENABLE) {
173 		cdclk_state->cdclk = 133333;
174 		return;
175 	}
176 
177 	switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
178 	case GC_DISPLAY_CLOCK_333_320_MHZ:
179 		cdclk_state->cdclk = 320000;
180 		break;
181 	default:
182 	case GC_DISPLAY_CLOCK_190_200_MHZ:
183 		cdclk_state->cdclk = 200000;
184 		break;
185 	}
186 }
187 
intel_hpll_vco(struct drm_i915_private * dev_priv)188 static unsigned int intel_hpll_vco(struct drm_i915_private *dev_priv)
189 {
190 	static const unsigned int blb_vco[8] = {
191 		[0] = 3200000,
192 		[1] = 4000000,
193 		[2] = 5333333,
194 		[3] = 4800000,
195 		[4] = 6400000,
196 	};
197 	static const unsigned int pnv_vco[8] = {
198 		[0] = 3200000,
199 		[1] = 4000000,
200 		[2] = 5333333,
201 		[3] = 4800000,
202 		[4] = 2666667,
203 	};
204 	static const unsigned int cl_vco[8] = {
205 		[0] = 3200000,
206 		[1] = 4000000,
207 		[2] = 5333333,
208 		[3] = 6400000,
209 		[4] = 3333333,
210 		[5] = 3566667,
211 		[6] = 4266667,
212 	};
213 	static const unsigned int elk_vco[8] = {
214 		[0] = 3200000,
215 		[1] = 4000000,
216 		[2] = 5333333,
217 		[3] = 4800000,
218 	};
219 	static const unsigned int ctg_vco[8] = {
220 		[0] = 3200000,
221 		[1] = 4000000,
222 		[2] = 5333333,
223 		[3] = 6400000,
224 		[4] = 2666667,
225 		[5] = 4266667,
226 	};
227 	const unsigned int *vco_table;
228 	unsigned int vco;
229 	u8 tmp = 0;
230 
231 	/* FIXME other chipsets? */
232 	if (IS_GM45(dev_priv))
233 		vco_table = ctg_vco;
234 	else if (IS_G45(dev_priv))
235 		vco_table = elk_vco;
236 	else if (IS_I965GM(dev_priv))
237 		vco_table = cl_vco;
238 	else if (IS_PINEVIEW(dev_priv))
239 		vco_table = pnv_vco;
240 	else if (IS_G33(dev_priv))
241 		vco_table = blb_vco;
242 	else
243 		return 0;
244 
245 	tmp = I915_READ(IS_PINEVIEW(dev_priv) || IS_MOBILE(dev_priv) ?
246 			HPLLVCO_MOBILE : HPLLVCO);
247 
248 	vco = vco_table[tmp & 0x7];
249 	if (vco == 0)
250 		DRM_ERROR("Bad HPLL VCO (HPLLVCO=0x%02x)\n", tmp);
251 	else
252 		DRM_DEBUG_KMS("HPLL VCO %u kHz\n", vco);
253 
254 	return vco;
255 }
256 
g33_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)257 static void g33_get_cdclk(struct drm_i915_private *dev_priv,
258 			  struct intel_cdclk_state *cdclk_state)
259 {
260 	struct pci_dev *pdev = dev_priv->drm.pdev;
261 	static const u8 div_3200[] = { 12, 10,  8,  7, 5, 16 };
262 	static const u8 div_4000[] = { 14, 12, 10,  8, 6, 20 };
263 	static const u8 div_4800[] = { 20, 14, 12, 10, 8, 24 };
264 	static const u8 div_5333[] = { 20, 16, 12, 12, 8, 28 };
265 	const u8 *div_table;
266 	unsigned int cdclk_sel;
267 	u16 tmp = 0;
268 
269 	cdclk_state->vco = intel_hpll_vco(dev_priv);
270 
271 	pci_read_config_word(pdev, GCFGC, &tmp);
272 
273 	cdclk_sel = (tmp >> 4) & 0x7;
274 
275 	if (cdclk_sel >= ARRAY_SIZE(div_3200))
276 		goto fail;
277 
278 	switch (cdclk_state->vco) {
279 	case 3200000:
280 		div_table = div_3200;
281 		break;
282 	case 4000000:
283 		div_table = div_4000;
284 		break;
285 	case 4800000:
286 		div_table = div_4800;
287 		break;
288 	case 5333333:
289 		div_table = div_5333;
290 		break;
291 	default:
292 		goto fail;
293 	}
294 
295 	cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco,
296 					       div_table[cdclk_sel]);
297 	return;
298 
299 fail:
300 	DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%08x\n",
301 		  cdclk_state->vco, tmp);
302 	cdclk_state->cdclk = 190476;
303 }
304 
pnv_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)305 static void pnv_get_cdclk(struct drm_i915_private *dev_priv,
306 			  struct intel_cdclk_state *cdclk_state)
307 {
308 	struct pci_dev *pdev = dev_priv->drm.pdev;
309 	u16 gcfgc = 0;
310 
311 	pci_read_config_word(pdev, GCFGC, &gcfgc);
312 
313 	switch (gcfgc & GC_DISPLAY_CLOCK_MASK) {
314 	case GC_DISPLAY_CLOCK_267_MHZ_PNV:
315 		cdclk_state->cdclk = 266667;
316 		break;
317 	case GC_DISPLAY_CLOCK_333_MHZ_PNV:
318 		cdclk_state->cdclk = 333333;
319 		break;
320 	case GC_DISPLAY_CLOCK_444_MHZ_PNV:
321 		cdclk_state->cdclk = 444444;
322 		break;
323 	case GC_DISPLAY_CLOCK_200_MHZ_PNV:
324 		cdclk_state->cdclk = 200000;
325 		break;
326 	default:
327 		DRM_ERROR("Unknown pnv display core clock 0x%04x\n", gcfgc);
328 		/* fall through */
329 	case GC_DISPLAY_CLOCK_133_MHZ_PNV:
330 		cdclk_state->cdclk = 133333;
331 		break;
332 	case GC_DISPLAY_CLOCK_167_MHZ_PNV:
333 		cdclk_state->cdclk = 166667;
334 		break;
335 	}
336 }
337 
i965gm_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)338 static void i965gm_get_cdclk(struct drm_i915_private *dev_priv,
339 			     struct intel_cdclk_state *cdclk_state)
340 {
341 	struct pci_dev *pdev = dev_priv->drm.pdev;
342 	static const u8 div_3200[] = { 16, 10,  8 };
343 	static const u8 div_4000[] = { 20, 12, 10 };
344 	static const u8 div_5333[] = { 24, 16, 14 };
345 	const u8 *div_table;
346 	unsigned int cdclk_sel;
347 	u16 tmp = 0;
348 
349 	cdclk_state->vco = intel_hpll_vco(dev_priv);
350 
351 	pci_read_config_word(pdev, GCFGC, &tmp);
352 
353 	cdclk_sel = ((tmp >> 8) & 0x1f) - 1;
354 
355 	if (cdclk_sel >= ARRAY_SIZE(div_3200))
356 		goto fail;
357 
358 	switch (cdclk_state->vco) {
359 	case 3200000:
360 		div_table = div_3200;
361 		break;
362 	case 4000000:
363 		div_table = div_4000;
364 		break;
365 	case 5333333:
366 		div_table = div_5333;
367 		break;
368 	default:
369 		goto fail;
370 	}
371 
372 	cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco,
373 					       div_table[cdclk_sel]);
374 	return;
375 
376 fail:
377 	DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u kHz, CFGC=0x%04x\n",
378 		  cdclk_state->vco, tmp);
379 	cdclk_state->cdclk = 200000;
380 }
381 
gm45_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)382 static void gm45_get_cdclk(struct drm_i915_private *dev_priv,
383 			   struct intel_cdclk_state *cdclk_state)
384 {
385 	struct pci_dev *pdev = dev_priv->drm.pdev;
386 	unsigned int cdclk_sel;
387 	u16 tmp = 0;
388 
389 	cdclk_state->vco = intel_hpll_vco(dev_priv);
390 
391 	pci_read_config_word(pdev, GCFGC, &tmp);
392 
393 	cdclk_sel = (tmp >> 12) & 0x1;
394 
395 	switch (cdclk_state->vco) {
396 	case 2666667:
397 	case 4000000:
398 	case 5333333:
399 		cdclk_state->cdclk = cdclk_sel ? 333333 : 222222;
400 		break;
401 	case 3200000:
402 		cdclk_state->cdclk = cdclk_sel ? 320000 : 228571;
403 		break;
404 	default:
405 		DRM_ERROR("Unable to determine CDCLK. HPLL VCO=%u, CFGC=0x%04x\n",
406 			  cdclk_state->vco, tmp);
407 		cdclk_state->cdclk = 222222;
408 		break;
409 	}
410 }
411 
hsw_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)412 static void hsw_get_cdclk(struct drm_i915_private *dev_priv,
413 			  struct intel_cdclk_state *cdclk_state)
414 {
415 	u32 lcpll = I915_READ(LCPLL_CTL);
416 	u32 freq = lcpll & LCPLL_CLK_FREQ_MASK;
417 
418 	if (lcpll & LCPLL_CD_SOURCE_FCLK)
419 		cdclk_state->cdclk = 800000;
420 	else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
421 		cdclk_state->cdclk = 450000;
422 	else if (freq == LCPLL_CLK_FREQ_450)
423 		cdclk_state->cdclk = 450000;
424 	else if (IS_HSW_ULT(dev_priv))
425 		cdclk_state->cdclk = 337500;
426 	else
427 		cdclk_state->cdclk = 540000;
428 }
429 
vlv_calc_cdclk(struct drm_i915_private * dev_priv,int min_cdclk)430 static int vlv_calc_cdclk(struct drm_i915_private *dev_priv, int min_cdclk)
431 {
432 	int freq_320 = (dev_priv->hpll_freq <<  1) % 320000 != 0 ?
433 		333333 : 320000;
434 
435 	/*
436 	 * We seem to get an unstable or solid color picture at 200MHz.
437 	 * Not sure what's wrong. For now use 200MHz only when all pipes
438 	 * are off.
439 	 */
440 	if (IS_VALLEYVIEW(dev_priv) && min_cdclk > freq_320)
441 		return 400000;
442 	else if (min_cdclk > 266667)
443 		return freq_320;
444 	else if (min_cdclk > 0)
445 		return 266667;
446 	else
447 		return 200000;
448 }
449 
vlv_calc_voltage_level(struct drm_i915_private * dev_priv,int cdclk)450 static u8 vlv_calc_voltage_level(struct drm_i915_private *dev_priv, int cdclk)
451 {
452 	if (IS_VALLEYVIEW(dev_priv)) {
453 		if (cdclk >= 320000) /* jump to highest voltage for 400MHz too */
454 			return 2;
455 		else if (cdclk >= 266667)
456 			return 1;
457 		else
458 			return 0;
459 	} else {
460 		/*
461 		 * Specs are full of misinformation, but testing on actual
462 		 * hardware has shown that we just need to write the desired
463 		 * CCK divider into the Punit register.
464 		 */
465 		return DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1, cdclk) - 1;
466 	}
467 }
468 
vlv_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)469 static void vlv_get_cdclk(struct drm_i915_private *dev_priv,
470 			  struct intel_cdclk_state *cdclk_state)
471 {
472 	u32 val;
473 
474 	vlv_iosf_sb_get(dev_priv,
475 			BIT(VLV_IOSF_SB_CCK) | BIT(VLV_IOSF_SB_PUNIT));
476 
477 	cdclk_state->vco = vlv_get_hpll_vco(dev_priv);
478 	cdclk_state->cdclk = vlv_get_cck_clock(dev_priv, "cdclk",
479 					       CCK_DISPLAY_CLOCK_CONTROL,
480 					       cdclk_state->vco);
481 
482 	val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
483 
484 	vlv_iosf_sb_put(dev_priv,
485 			BIT(VLV_IOSF_SB_CCK) | BIT(VLV_IOSF_SB_PUNIT));
486 
487 	if (IS_VALLEYVIEW(dev_priv))
488 		cdclk_state->voltage_level = (val & DSPFREQGUAR_MASK) >>
489 			DSPFREQGUAR_SHIFT;
490 	else
491 		cdclk_state->voltage_level = (val & DSPFREQGUAR_MASK_CHV) >>
492 			DSPFREQGUAR_SHIFT_CHV;
493 }
494 
vlv_program_pfi_credits(struct drm_i915_private * dev_priv)495 static void vlv_program_pfi_credits(struct drm_i915_private *dev_priv)
496 {
497 	unsigned int credits, default_credits;
498 
499 	if (IS_CHERRYVIEW(dev_priv))
500 		default_credits = PFI_CREDIT(12);
501 	else
502 		default_credits = PFI_CREDIT(8);
503 
504 	if (dev_priv->cdclk.hw.cdclk >= dev_priv->czclk_freq) {
505 		/* CHV suggested value is 31 or 63 */
506 		if (IS_CHERRYVIEW(dev_priv))
507 			credits = PFI_CREDIT_63;
508 		else
509 			credits = PFI_CREDIT(15);
510 	} else {
511 		credits = default_credits;
512 	}
513 
514 	/*
515 	 * WA - write default credits before re-programming
516 	 * FIXME: should we also set the resend bit here?
517 	 */
518 	I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
519 		   default_credits);
520 
521 	I915_WRITE(GCI_CONTROL, VGA_FAST_MODE_DISABLE |
522 		   credits | PFI_CREDIT_RESEND);
523 
524 	/*
525 	 * FIXME is this guaranteed to clear
526 	 * immediately or should we poll for it?
527 	 */
528 	WARN_ON(I915_READ(GCI_CONTROL) & PFI_CREDIT_RESEND);
529 }
530 
vlv_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)531 static void vlv_set_cdclk(struct drm_i915_private *dev_priv,
532 			  const struct intel_cdclk_state *cdclk_state,
533 			  enum pipe pipe)
534 {
535 	int cdclk = cdclk_state->cdclk;
536 	u32 val, cmd = cdclk_state->voltage_level;
537 	intel_wakeref_t wakeref;
538 
539 	switch (cdclk) {
540 	case 400000:
541 	case 333333:
542 	case 320000:
543 	case 266667:
544 	case 200000:
545 		break;
546 	default:
547 		MISSING_CASE(cdclk);
548 		return;
549 	}
550 
551 	/* There are cases where we can end up here with power domains
552 	 * off and a CDCLK frequency other than the minimum, like when
553 	 * issuing a modeset without actually changing any display after
554 	 * a system suspend.  So grab the display core domain, which covers
555 	 * the HW blocks needed for the following programming.
556 	 */
557 	wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
558 
559 	vlv_iosf_sb_get(dev_priv,
560 			BIT(VLV_IOSF_SB_CCK) |
561 			BIT(VLV_IOSF_SB_BUNIT) |
562 			BIT(VLV_IOSF_SB_PUNIT));
563 
564 	val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
565 	val &= ~DSPFREQGUAR_MASK;
566 	val |= (cmd << DSPFREQGUAR_SHIFT);
567 	vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
568 	if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
569 		      DSPFREQSTAT_MASK) == (cmd << DSPFREQSTAT_SHIFT),
570 		     50)) {
571 		DRM_ERROR("timed out waiting for CDclk change\n");
572 	}
573 
574 	if (cdclk == 400000) {
575 		u32 divider;
576 
577 		divider = DIV_ROUND_CLOSEST(dev_priv->hpll_freq << 1,
578 					    cdclk) - 1;
579 
580 		/* adjust cdclk divider */
581 		val = vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL);
582 		val &= ~CCK_FREQUENCY_VALUES;
583 		val |= divider;
584 		vlv_cck_write(dev_priv, CCK_DISPLAY_CLOCK_CONTROL, val);
585 
586 		if (wait_for((vlv_cck_read(dev_priv, CCK_DISPLAY_CLOCK_CONTROL) &
587 			      CCK_FREQUENCY_STATUS) == (divider << CCK_FREQUENCY_STATUS_SHIFT),
588 			     50))
589 			DRM_ERROR("timed out waiting for CDclk change\n");
590 	}
591 
592 	/* adjust self-refresh exit latency value */
593 	val = vlv_bunit_read(dev_priv, BUNIT_REG_BISOC);
594 	val &= ~0x7f;
595 
596 	/*
597 	 * For high bandwidth configs, we set a higher latency in the bunit
598 	 * so that the core display fetch happens in time to avoid underruns.
599 	 */
600 	if (cdclk == 400000)
601 		val |= 4500 / 250; /* 4.5 usec */
602 	else
603 		val |= 3000 / 250; /* 3.0 usec */
604 	vlv_bunit_write(dev_priv, BUNIT_REG_BISOC, val);
605 
606 	vlv_iosf_sb_put(dev_priv,
607 			BIT(VLV_IOSF_SB_CCK) |
608 			BIT(VLV_IOSF_SB_BUNIT) |
609 			BIT(VLV_IOSF_SB_PUNIT));
610 
611 	intel_update_cdclk(dev_priv);
612 
613 	vlv_program_pfi_credits(dev_priv);
614 
615 	intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
616 }
617 
chv_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)618 static void chv_set_cdclk(struct drm_i915_private *dev_priv,
619 			  const struct intel_cdclk_state *cdclk_state,
620 			  enum pipe pipe)
621 {
622 	int cdclk = cdclk_state->cdclk;
623 	u32 val, cmd = cdclk_state->voltage_level;
624 	intel_wakeref_t wakeref;
625 
626 	switch (cdclk) {
627 	case 333333:
628 	case 320000:
629 	case 266667:
630 	case 200000:
631 		break;
632 	default:
633 		MISSING_CASE(cdclk);
634 		return;
635 	}
636 
637 	/* There are cases where we can end up here with power domains
638 	 * off and a CDCLK frequency other than the minimum, like when
639 	 * issuing a modeset without actually changing any display after
640 	 * a system suspend.  So grab the display core domain, which covers
641 	 * the HW blocks needed for the following programming.
642 	 */
643 	wakeref = intel_display_power_get(dev_priv, POWER_DOMAIN_DISPLAY_CORE);
644 
645 	vlv_punit_get(dev_priv);
646 	val = vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM);
647 	val &= ~DSPFREQGUAR_MASK_CHV;
648 	val |= (cmd << DSPFREQGUAR_SHIFT_CHV);
649 	vlv_punit_write(dev_priv, PUNIT_REG_DSPSSPM, val);
650 	if (wait_for((vlv_punit_read(dev_priv, PUNIT_REG_DSPSSPM) &
651 		      DSPFREQSTAT_MASK_CHV) == (cmd << DSPFREQSTAT_SHIFT_CHV),
652 		     50)) {
653 		DRM_ERROR("timed out waiting for CDclk change\n");
654 	}
655 
656 	vlv_punit_put(dev_priv);
657 
658 	intel_update_cdclk(dev_priv);
659 
660 	vlv_program_pfi_credits(dev_priv);
661 
662 	intel_display_power_put(dev_priv, POWER_DOMAIN_DISPLAY_CORE, wakeref);
663 }
664 
bdw_calc_cdclk(int min_cdclk)665 static int bdw_calc_cdclk(int min_cdclk)
666 {
667 	if (min_cdclk > 540000)
668 		return 675000;
669 	else if (min_cdclk > 450000)
670 		return 540000;
671 	else if (min_cdclk > 337500)
672 		return 450000;
673 	else
674 		return 337500;
675 }
676 
bdw_calc_voltage_level(int cdclk)677 static u8 bdw_calc_voltage_level(int cdclk)
678 {
679 	switch (cdclk) {
680 	default:
681 	case 337500:
682 		return 2;
683 	case 450000:
684 		return 0;
685 	case 540000:
686 		return 1;
687 	case 675000:
688 		return 3;
689 	}
690 }
691 
bdw_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)692 static void bdw_get_cdclk(struct drm_i915_private *dev_priv,
693 			  struct intel_cdclk_state *cdclk_state)
694 {
695 	u32 lcpll = I915_READ(LCPLL_CTL);
696 	u32 freq = lcpll & LCPLL_CLK_FREQ_MASK;
697 
698 	if (lcpll & LCPLL_CD_SOURCE_FCLK)
699 		cdclk_state->cdclk = 800000;
700 	else if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
701 		cdclk_state->cdclk = 450000;
702 	else if (freq == LCPLL_CLK_FREQ_450)
703 		cdclk_state->cdclk = 450000;
704 	else if (freq == LCPLL_CLK_FREQ_54O_BDW)
705 		cdclk_state->cdclk = 540000;
706 	else if (freq == LCPLL_CLK_FREQ_337_5_BDW)
707 		cdclk_state->cdclk = 337500;
708 	else
709 		cdclk_state->cdclk = 675000;
710 
711 	/*
712 	 * Can't read this out :( Let's assume it's
713 	 * at least what the CDCLK frequency requires.
714 	 */
715 	cdclk_state->voltage_level =
716 		bdw_calc_voltage_level(cdclk_state->cdclk);
717 }
718 
bdw_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)719 static void bdw_set_cdclk(struct drm_i915_private *dev_priv,
720 			  const struct intel_cdclk_state *cdclk_state,
721 			  enum pipe pipe)
722 {
723 	int cdclk = cdclk_state->cdclk;
724 	u32 val;
725 	int ret;
726 
727 	if (WARN((I915_READ(LCPLL_CTL) &
728 		  (LCPLL_PLL_DISABLE | LCPLL_PLL_LOCK |
729 		   LCPLL_CD_CLOCK_DISABLE | LCPLL_ROOT_CD_CLOCK_DISABLE |
730 		   LCPLL_CD2X_CLOCK_DISABLE | LCPLL_POWER_DOWN_ALLOW |
731 		   LCPLL_CD_SOURCE_FCLK)) != LCPLL_PLL_LOCK,
732 		 "trying to change cdclk frequency with cdclk not enabled\n"))
733 		return;
734 
735 	ret = sandybridge_pcode_write(dev_priv,
736 				      BDW_PCODE_DISPLAY_FREQ_CHANGE_REQ, 0x0);
737 	if (ret) {
738 		DRM_ERROR("failed to inform pcode about cdclk change\n");
739 		return;
740 	}
741 
742 	val = I915_READ(LCPLL_CTL);
743 	val |= LCPLL_CD_SOURCE_FCLK;
744 	I915_WRITE(LCPLL_CTL, val);
745 
746 	/*
747 	 * According to the spec, it should be enough to poll for this 1 us.
748 	 * However, extensive testing shows that this can take longer.
749 	 */
750 	if (wait_for_us(I915_READ(LCPLL_CTL) &
751 			LCPLL_CD_SOURCE_FCLK_DONE, 100))
752 		DRM_ERROR("Switching to FCLK failed\n");
753 
754 	val = I915_READ(LCPLL_CTL);
755 	val &= ~LCPLL_CLK_FREQ_MASK;
756 
757 	switch (cdclk) {
758 	default:
759 		MISSING_CASE(cdclk);
760 		/* fall through */
761 	case 337500:
762 		val |= LCPLL_CLK_FREQ_337_5_BDW;
763 		break;
764 	case 450000:
765 		val |= LCPLL_CLK_FREQ_450;
766 		break;
767 	case 540000:
768 		val |= LCPLL_CLK_FREQ_54O_BDW;
769 		break;
770 	case 675000:
771 		val |= LCPLL_CLK_FREQ_675_BDW;
772 		break;
773 	}
774 
775 	I915_WRITE(LCPLL_CTL, val);
776 
777 	val = I915_READ(LCPLL_CTL);
778 	val &= ~LCPLL_CD_SOURCE_FCLK;
779 	I915_WRITE(LCPLL_CTL, val);
780 
781 	if (wait_for_us((I915_READ(LCPLL_CTL) &
782 			LCPLL_CD_SOURCE_FCLK_DONE) == 0, 1))
783 		DRM_ERROR("Switching back to LCPLL failed\n");
784 
785 	sandybridge_pcode_write(dev_priv, HSW_PCODE_DE_WRITE_FREQ_REQ,
786 				cdclk_state->voltage_level);
787 
788 	I915_WRITE(CDCLK_FREQ, DIV_ROUND_CLOSEST(cdclk, 1000) - 1);
789 
790 	intel_update_cdclk(dev_priv);
791 }
792 
skl_calc_cdclk(int min_cdclk,int vco)793 static int skl_calc_cdclk(int min_cdclk, int vco)
794 {
795 	if (vco == 8640000) {
796 		if (min_cdclk > 540000)
797 			return 617143;
798 		else if (min_cdclk > 432000)
799 			return 540000;
800 		else if (min_cdclk > 308571)
801 			return 432000;
802 		else
803 			return 308571;
804 	} else {
805 		if (min_cdclk > 540000)
806 			return 675000;
807 		else if (min_cdclk > 450000)
808 			return 540000;
809 		else if (min_cdclk > 337500)
810 			return 450000;
811 		else
812 			return 337500;
813 	}
814 }
815 
skl_calc_voltage_level(int cdclk)816 static u8 skl_calc_voltage_level(int cdclk)
817 {
818 	if (cdclk > 540000)
819 		return 3;
820 	else if (cdclk > 450000)
821 		return 2;
822 	else if (cdclk > 337500)
823 		return 1;
824 	else
825 		return 0;
826 }
827 
skl_dpll0_update(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)828 static void skl_dpll0_update(struct drm_i915_private *dev_priv,
829 			     struct intel_cdclk_state *cdclk_state)
830 {
831 	u32 val;
832 
833 	cdclk_state->ref = 24000;
834 	cdclk_state->vco = 0;
835 
836 	val = I915_READ(LCPLL1_CTL);
837 	if ((val & LCPLL_PLL_ENABLE) == 0)
838 		return;
839 
840 	if (WARN_ON((val & LCPLL_PLL_LOCK) == 0))
841 		return;
842 
843 	val = I915_READ(DPLL_CTRL1);
844 
845 	if (WARN_ON((val & (DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) |
846 			    DPLL_CTRL1_SSC(SKL_DPLL0) |
847 			    DPLL_CTRL1_OVERRIDE(SKL_DPLL0))) !=
848 		    DPLL_CTRL1_OVERRIDE(SKL_DPLL0)))
849 		return;
850 
851 	switch (val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0)) {
852 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810, SKL_DPLL0):
853 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1350, SKL_DPLL0):
854 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1620, SKL_DPLL0):
855 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2700, SKL_DPLL0):
856 		cdclk_state->vco = 8100000;
857 		break;
858 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080, SKL_DPLL0):
859 	case DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_2160, SKL_DPLL0):
860 		cdclk_state->vco = 8640000;
861 		break;
862 	default:
863 		MISSING_CASE(val & DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
864 		break;
865 	}
866 }
867 
skl_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)868 static void skl_get_cdclk(struct drm_i915_private *dev_priv,
869 			  struct intel_cdclk_state *cdclk_state)
870 {
871 	u32 cdctl;
872 
873 	skl_dpll0_update(dev_priv, cdclk_state);
874 
875 	cdclk_state->cdclk = cdclk_state->bypass = cdclk_state->ref;
876 
877 	if (cdclk_state->vco == 0)
878 		goto out;
879 
880 	cdctl = I915_READ(CDCLK_CTL);
881 
882 	if (cdclk_state->vco == 8640000) {
883 		switch (cdctl & CDCLK_FREQ_SEL_MASK) {
884 		case CDCLK_FREQ_450_432:
885 			cdclk_state->cdclk = 432000;
886 			break;
887 		case CDCLK_FREQ_337_308:
888 			cdclk_state->cdclk = 308571;
889 			break;
890 		case CDCLK_FREQ_540:
891 			cdclk_state->cdclk = 540000;
892 			break;
893 		case CDCLK_FREQ_675_617:
894 			cdclk_state->cdclk = 617143;
895 			break;
896 		default:
897 			MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
898 			break;
899 		}
900 	} else {
901 		switch (cdctl & CDCLK_FREQ_SEL_MASK) {
902 		case CDCLK_FREQ_450_432:
903 			cdclk_state->cdclk = 450000;
904 			break;
905 		case CDCLK_FREQ_337_308:
906 			cdclk_state->cdclk = 337500;
907 			break;
908 		case CDCLK_FREQ_540:
909 			cdclk_state->cdclk = 540000;
910 			break;
911 		case CDCLK_FREQ_675_617:
912 			cdclk_state->cdclk = 675000;
913 			break;
914 		default:
915 			MISSING_CASE(cdctl & CDCLK_FREQ_SEL_MASK);
916 			break;
917 		}
918 	}
919 
920  out:
921 	/*
922 	 * Can't read this out :( Let's assume it's
923 	 * at least what the CDCLK frequency requires.
924 	 */
925 	cdclk_state->voltage_level =
926 		skl_calc_voltage_level(cdclk_state->cdclk);
927 }
928 
929 /* convert from kHz to .1 fixpoint MHz with -1MHz offset */
skl_cdclk_decimal(int cdclk)930 static int skl_cdclk_decimal(int cdclk)
931 {
932 	return DIV_ROUND_CLOSEST(cdclk - 1000, 500);
933 }
934 
skl_set_preferred_cdclk_vco(struct drm_i915_private * dev_priv,int vco)935 static void skl_set_preferred_cdclk_vco(struct drm_i915_private *dev_priv,
936 					int vco)
937 {
938 	bool changed = dev_priv->skl_preferred_vco_freq != vco;
939 
940 	dev_priv->skl_preferred_vco_freq = vco;
941 
942 	if (changed)
943 		intel_update_max_cdclk(dev_priv);
944 }
945 
skl_dpll0_enable(struct drm_i915_private * dev_priv,int vco)946 static void skl_dpll0_enable(struct drm_i915_private *dev_priv, int vco)
947 {
948 	u32 val;
949 
950 	WARN_ON(vco != 8100000 && vco != 8640000);
951 
952 	/*
953 	 * We always enable DPLL0 with the lowest link rate possible, but still
954 	 * taking into account the VCO required to operate the eDP panel at the
955 	 * desired frequency. The usual DP link rates operate with a VCO of
956 	 * 8100 while the eDP 1.4 alternate link rates need a VCO of 8640.
957 	 * The modeset code is responsible for the selection of the exact link
958 	 * rate later on, with the constraint of choosing a frequency that
959 	 * works with vco.
960 	 */
961 	val = I915_READ(DPLL_CTRL1);
962 
963 	val &= ~(DPLL_CTRL1_HDMI_MODE(SKL_DPLL0) | DPLL_CTRL1_SSC(SKL_DPLL0) |
964 		 DPLL_CTRL1_LINK_RATE_MASK(SKL_DPLL0));
965 	val |= DPLL_CTRL1_OVERRIDE(SKL_DPLL0);
966 	if (vco == 8640000)
967 		val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_1080,
968 					    SKL_DPLL0);
969 	else
970 		val |= DPLL_CTRL1_LINK_RATE(DPLL_CTRL1_LINK_RATE_810,
971 					    SKL_DPLL0);
972 
973 	I915_WRITE(DPLL_CTRL1, val);
974 	POSTING_READ(DPLL_CTRL1);
975 
976 	I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) | LCPLL_PLL_ENABLE);
977 
978 	if (intel_de_wait_for_set(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 5))
979 		DRM_ERROR("DPLL0 not locked\n");
980 
981 	dev_priv->cdclk.hw.vco = vco;
982 
983 	/* We'll want to keep using the current vco from now on. */
984 	skl_set_preferred_cdclk_vco(dev_priv, vco);
985 }
986 
skl_dpll0_disable(struct drm_i915_private * dev_priv)987 static void skl_dpll0_disable(struct drm_i915_private *dev_priv)
988 {
989 	I915_WRITE(LCPLL1_CTL, I915_READ(LCPLL1_CTL) & ~LCPLL_PLL_ENABLE);
990 	if (intel_de_wait_for_clear(dev_priv, LCPLL1_CTL, LCPLL_PLL_LOCK, 1))
991 		DRM_ERROR("Couldn't disable DPLL0\n");
992 
993 	dev_priv->cdclk.hw.vco = 0;
994 }
995 
skl_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)996 static void skl_set_cdclk(struct drm_i915_private *dev_priv,
997 			  const struct intel_cdclk_state *cdclk_state,
998 			  enum pipe pipe)
999 {
1000 	int cdclk = cdclk_state->cdclk;
1001 	int vco = cdclk_state->vco;
1002 	u32 freq_select, cdclk_ctl;
1003 	int ret;
1004 
1005 	/*
1006 	 * Based on WA#1183 CDCLK rates 308 and 617MHz CDCLK rates are
1007 	 * unsupported on SKL. In theory this should never happen since only
1008 	 * the eDP1.4 2.16 and 4.32Gbps rates require it, but eDP1.4 is not
1009 	 * supported on SKL either, see the above WA. WARN whenever trying to
1010 	 * use the corresponding VCO freq as that always leads to using the
1011 	 * minimum 308MHz CDCLK.
1012 	 */
1013 	WARN_ON_ONCE(IS_SKYLAKE(dev_priv) && vco == 8640000);
1014 
1015 	ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1016 				SKL_CDCLK_PREPARE_FOR_CHANGE,
1017 				SKL_CDCLK_READY_FOR_CHANGE,
1018 				SKL_CDCLK_READY_FOR_CHANGE, 3);
1019 	if (ret) {
1020 		DRM_ERROR("Failed to inform PCU about cdclk change (%d)\n",
1021 			  ret);
1022 		return;
1023 	}
1024 
1025 	/* Choose frequency for this cdclk */
1026 	switch (cdclk) {
1027 	default:
1028 		WARN_ON(cdclk != dev_priv->cdclk.hw.bypass);
1029 		WARN_ON(vco != 0);
1030 		/* fall through */
1031 	case 308571:
1032 	case 337500:
1033 		freq_select = CDCLK_FREQ_337_308;
1034 		break;
1035 	case 450000:
1036 	case 432000:
1037 		freq_select = CDCLK_FREQ_450_432;
1038 		break;
1039 	case 540000:
1040 		freq_select = CDCLK_FREQ_540;
1041 		break;
1042 	case 617143:
1043 	case 675000:
1044 		freq_select = CDCLK_FREQ_675_617;
1045 		break;
1046 	}
1047 
1048 	if (dev_priv->cdclk.hw.vco != 0 &&
1049 	    dev_priv->cdclk.hw.vco != vco)
1050 		skl_dpll0_disable(dev_priv);
1051 
1052 	cdclk_ctl = I915_READ(CDCLK_CTL);
1053 
1054 	if (dev_priv->cdclk.hw.vco != vco) {
1055 		/* Wa Display #1183: skl,kbl,cfl */
1056 		cdclk_ctl &= ~(CDCLK_FREQ_SEL_MASK | CDCLK_FREQ_DECIMAL_MASK);
1057 		cdclk_ctl |= freq_select | skl_cdclk_decimal(cdclk);
1058 		I915_WRITE(CDCLK_CTL, cdclk_ctl);
1059 	}
1060 
1061 	/* Wa Display #1183: skl,kbl,cfl */
1062 	cdclk_ctl |= CDCLK_DIVMUX_CD_OVERRIDE;
1063 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1064 	POSTING_READ(CDCLK_CTL);
1065 
1066 	if (dev_priv->cdclk.hw.vco != vco)
1067 		skl_dpll0_enable(dev_priv, vco);
1068 
1069 	/* Wa Display #1183: skl,kbl,cfl */
1070 	cdclk_ctl &= ~(CDCLK_FREQ_SEL_MASK | CDCLK_FREQ_DECIMAL_MASK);
1071 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1072 
1073 	cdclk_ctl |= freq_select | skl_cdclk_decimal(cdclk);
1074 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1075 
1076 	/* Wa Display #1183: skl,kbl,cfl */
1077 	cdclk_ctl &= ~CDCLK_DIVMUX_CD_OVERRIDE;
1078 	I915_WRITE(CDCLK_CTL, cdclk_ctl);
1079 	POSTING_READ(CDCLK_CTL);
1080 
1081 	/* inform PCU of the change */
1082 	sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1083 				cdclk_state->voltage_level);
1084 
1085 	intel_update_cdclk(dev_priv);
1086 }
1087 
skl_sanitize_cdclk(struct drm_i915_private * dev_priv)1088 static void skl_sanitize_cdclk(struct drm_i915_private *dev_priv)
1089 {
1090 	u32 cdctl, expected;
1091 
1092 	/*
1093 	 * check if the pre-os initialized the display
1094 	 * There is SWF18 scratchpad register defined which is set by the
1095 	 * pre-os which can be used by the OS drivers to check the status
1096 	 */
1097 	if ((I915_READ(SWF_ILK(0x18)) & 0x00FFFFFF) == 0)
1098 		goto sanitize;
1099 
1100 	intel_update_cdclk(dev_priv);
1101 	intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1102 
1103 	/* Is PLL enabled and locked ? */
1104 	if (dev_priv->cdclk.hw.vco == 0 ||
1105 	    dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1106 		goto sanitize;
1107 
1108 	/* DPLL okay; verify the cdclock
1109 	 *
1110 	 * Noticed in some instances that the freq selection is correct but
1111 	 * decimal part is programmed wrong from BIOS where pre-os does not
1112 	 * enable display. Verify the same as well.
1113 	 */
1114 	cdctl = I915_READ(CDCLK_CTL);
1115 	expected = (cdctl & CDCLK_FREQ_SEL_MASK) |
1116 		skl_cdclk_decimal(dev_priv->cdclk.hw.cdclk);
1117 	if (cdctl == expected)
1118 		/* All well; nothing to sanitize */
1119 		return;
1120 
1121 sanitize:
1122 	DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1123 
1124 	/* force cdclk programming */
1125 	dev_priv->cdclk.hw.cdclk = 0;
1126 	/* force full PLL disable + enable */
1127 	dev_priv->cdclk.hw.vco = -1;
1128 }
1129 
skl_init_cdclk(struct drm_i915_private * dev_priv)1130 static void skl_init_cdclk(struct drm_i915_private *dev_priv)
1131 {
1132 	struct intel_cdclk_state cdclk_state;
1133 
1134 	skl_sanitize_cdclk(dev_priv);
1135 
1136 	if (dev_priv->cdclk.hw.cdclk != 0 &&
1137 	    dev_priv->cdclk.hw.vco != 0) {
1138 		/*
1139 		 * Use the current vco as our initial
1140 		 * guess as to what the preferred vco is.
1141 		 */
1142 		if (dev_priv->skl_preferred_vco_freq == 0)
1143 			skl_set_preferred_cdclk_vco(dev_priv,
1144 						    dev_priv->cdclk.hw.vco);
1145 		return;
1146 	}
1147 
1148 	cdclk_state = dev_priv->cdclk.hw;
1149 
1150 	cdclk_state.vco = dev_priv->skl_preferred_vco_freq;
1151 	if (cdclk_state.vco == 0)
1152 		cdclk_state.vco = 8100000;
1153 	cdclk_state.cdclk = skl_calc_cdclk(0, cdclk_state.vco);
1154 	cdclk_state.voltage_level = skl_calc_voltage_level(cdclk_state.cdclk);
1155 
1156 	skl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1157 }
1158 
skl_uninit_cdclk(struct drm_i915_private * dev_priv)1159 static void skl_uninit_cdclk(struct drm_i915_private *dev_priv)
1160 {
1161 	struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
1162 
1163 	cdclk_state.cdclk = cdclk_state.bypass;
1164 	cdclk_state.vco = 0;
1165 	cdclk_state.voltage_level = skl_calc_voltage_level(cdclk_state.cdclk);
1166 
1167 	skl_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1168 }
1169 
1170 static const struct intel_cdclk_vals bxt_cdclk_table[] = {
1171 	{ .refclk = 19200, .cdclk = 144000, .divider = 8, .ratio = 60 },
1172 	{ .refclk = 19200, .cdclk = 288000, .divider = 4, .ratio = 60 },
1173 	{ .refclk = 19200, .cdclk = 384000, .divider = 3, .ratio = 60 },
1174 	{ .refclk = 19200, .cdclk = 576000, .divider = 2, .ratio = 60 },
1175 	{ .refclk = 19200, .cdclk = 624000, .divider = 2, .ratio = 65 },
1176 	{}
1177 };
1178 
1179 static const struct intel_cdclk_vals glk_cdclk_table[] = {
1180 	{ .refclk = 19200, .cdclk =  79200, .divider = 8, .ratio = 33 },
1181 	{ .refclk = 19200, .cdclk = 158400, .divider = 4, .ratio = 33 },
1182 	{ .refclk = 19200, .cdclk = 316800, .divider = 2, .ratio = 33 },
1183 	{}
1184 };
1185 
1186 static const struct intel_cdclk_vals cnl_cdclk_table[] = {
1187 	{ .refclk = 19200, .cdclk = 168000, .divider = 4, .ratio = 35 },
1188 	{ .refclk = 19200, .cdclk = 336000, .divider = 2, .ratio = 35 },
1189 	{ .refclk = 19200, .cdclk = 528000, .divider = 2, .ratio = 55 },
1190 
1191 	{ .refclk = 24000, .cdclk = 168000, .divider = 4, .ratio = 28 },
1192 	{ .refclk = 24000, .cdclk = 336000, .divider = 2, .ratio = 28 },
1193 	{ .refclk = 24000, .cdclk = 528000, .divider = 2, .ratio = 44 },
1194 	{}
1195 };
1196 
1197 static const struct intel_cdclk_vals icl_cdclk_table[] = {
1198 	{ .refclk = 19200, .cdclk = 172800, .divider = 2, .ratio = 18 },
1199 	{ .refclk = 19200, .cdclk = 192000, .divider = 2, .ratio = 20 },
1200 	{ .refclk = 19200, .cdclk = 307200, .divider = 2, .ratio = 32 },
1201 	{ .refclk = 19200, .cdclk = 326400, .divider = 4, .ratio = 68 },
1202 	{ .refclk = 19200, .cdclk = 556800, .divider = 2, .ratio = 58 },
1203 	{ .refclk = 19200, .cdclk = 652800, .divider = 2, .ratio = 68 },
1204 
1205 	{ .refclk = 24000, .cdclk = 180000, .divider = 2, .ratio = 15 },
1206 	{ .refclk = 24000, .cdclk = 192000, .divider = 2, .ratio = 16 },
1207 	{ .refclk = 24000, .cdclk = 312000, .divider = 2, .ratio = 26 },
1208 	{ .refclk = 24000, .cdclk = 324000, .divider = 4, .ratio = 54 },
1209 	{ .refclk = 24000, .cdclk = 552000, .divider = 2, .ratio = 46 },
1210 	{ .refclk = 24000, .cdclk = 648000, .divider = 2, .ratio = 54 },
1211 
1212 	{ .refclk = 38400, .cdclk = 172800, .divider = 2, .ratio =  9 },
1213 	{ .refclk = 38400, .cdclk = 192000, .divider = 2, .ratio = 10 },
1214 	{ .refclk = 38400, .cdclk = 307200, .divider = 2, .ratio = 16 },
1215 	{ .refclk = 38400, .cdclk = 326400, .divider = 4, .ratio = 34 },
1216 	{ .refclk = 38400, .cdclk = 556800, .divider = 2, .ratio = 29 },
1217 	{ .refclk = 38400, .cdclk = 652800, .divider = 2, .ratio = 34 },
1218 	{}
1219 };
1220 
bxt_calc_cdclk(struct drm_i915_private * dev_priv,int min_cdclk)1221 static int bxt_calc_cdclk(struct drm_i915_private *dev_priv, int min_cdclk)
1222 {
1223 	const struct intel_cdclk_vals *table = dev_priv->cdclk.table;
1224 	int i;
1225 
1226 	for (i = 0; table[i].refclk; i++)
1227 		if (table[i].refclk == dev_priv->cdclk.hw.ref &&
1228 		    table[i].cdclk >= min_cdclk)
1229 			return table[i].cdclk;
1230 
1231 	WARN(1, "Cannot satisfy minimum cdclk %d with refclk %u\n",
1232 	     min_cdclk, dev_priv->cdclk.hw.ref);
1233 	return 0;
1234 }
1235 
bxt_calc_cdclk_pll_vco(struct drm_i915_private * dev_priv,int cdclk)1236 static int bxt_calc_cdclk_pll_vco(struct drm_i915_private *dev_priv, int cdclk)
1237 {
1238 	const struct intel_cdclk_vals *table = dev_priv->cdclk.table;
1239 	int i;
1240 
1241 	if (cdclk == dev_priv->cdclk.hw.bypass)
1242 		return 0;
1243 
1244 	for (i = 0; table[i].refclk; i++)
1245 		if (table[i].refclk == dev_priv->cdclk.hw.ref &&
1246 		    table[i].cdclk == cdclk)
1247 			return dev_priv->cdclk.hw.ref * table[i].ratio;
1248 
1249 	WARN(1, "cdclk %d not valid for refclk %u\n",
1250 	     cdclk, dev_priv->cdclk.hw.ref);
1251 	return 0;
1252 }
1253 
bxt_calc_voltage_level(int cdclk)1254 static u8 bxt_calc_voltage_level(int cdclk)
1255 {
1256 	return DIV_ROUND_UP(cdclk, 25000);
1257 }
1258 
cnl_calc_voltage_level(int cdclk)1259 static u8 cnl_calc_voltage_level(int cdclk)
1260 {
1261 	if (cdclk > 336000)
1262 		return 2;
1263 	else if (cdclk > 168000)
1264 		return 1;
1265 	else
1266 		return 0;
1267 }
1268 
icl_calc_voltage_level(int cdclk)1269 static u8 icl_calc_voltage_level(int cdclk)
1270 {
1271 	if (cdclk > 556800)
1272 		return 2;
1273 	else if (cdclk > 312000)
1274 		return 1;
1275 	else
1276 		return 0;
1277 }
1278 
ehl_calc_voltage_level(int cdclk)1279 static u8 ehl_calc_voltage_level(int cdclk)
1280 {
1281 	if (cdclk > 326400)
1282 		return 3;
1283 	else if (cdclk > 312000)
1284 		return 2;
1285 	else if (cdclk > 180000)
1286 		return 1;
1287 	else
1288 		return 0;
1289 }
1290 
cnl_readout_refclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)1291 static void cnl_readout_refclk(struct drm_i915_private *dev_priv,
1292 			       struct intel_cdclk_state *cdclk_state)
1293 {
1294 	if (I915_READ(SKL_DSSM) & CNL_DSSM_CDCLK_PLL_REFCLK_24MHz)
1295 		cdclk_state->ref = 24000;
1296 	else
1297 		cdclk_state->ref = 19200;
1298 }
1299 
icl_readout_refclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)1300 static void icl_readout_refclk(struct drm_i915_private *dev_priv,
1301 			       struct intel_cdclk_state *cdclk_state)
1302 {
1303 	u32 dssm = I915_READ(SKL_DSSM) & ICL_DSSM_CDCLK_PLL_REFCLK_MASK;
1304 
1305 	switch (dssm) {
1306 	default:
1307 		MISSING_CASE(dssm);
1308 		/* fall through */
1309 	case ICL_DSSM_CDCLK_PLL_REFCLK_24MHz:
1310 		cdclk_state->ref = 24000;
1311 		break;
1312 	case ICL_DSSM_CDCLK_PLL_REFCLK_19_2MHz:
1313 		cdclk_state->ref = 19200;
1314 		break;
1315 	case ICL_DSSM_CDCLK_PLL_REFCLK_38_4MHz:
1316 		cdclk_state->ref = 38400;
1317 		break;
1318 	}
1319 }
1320 
bxt_de_pll_readout(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)1321 static void bxt_de_pll_readout(struct drm_i915_private *dev_priv,
1322 			       struct intel_cdclk_state *cdclk_state)
1323 {
1324 	u32 val, ratio;
1325 
1326 	if (INTEL_GEN(dev_priv) >= 11)
1327 		icl_readout_refclk(dev_priv, cdclk_state);
1328 	else if (IS_CANNONLAKE(dev_priv))
1329 		cnl_readout_refclk(dev_priv, cdclk_state);
1330 	else
1331 		cdclk_state->ref = 19200;
1332 
1333 	val = I915_READ(BXT_DE_PLL_ENABLE);
1334 	if ((val & BXT_DE_PLL_PLL_ENABLE) == 0 ||
1335 	    (val & BXT_DE_PLL_LOCK) == 0) {
1336 		/*
1337 		 * CDCLK PLL is disabled, the VCO/ratio doesn't matter, but
1338 		 * setting it to zero is a way to signal that.
1339 		 */
1340 		cdclk_state->vco = 0;
1341 		return;
1342 	}
1343 
1344 	/*
1345 	 * CNL+ have the ratio directly in the PLL enable register, gen9lp had
1346 	 * it in a separate PLL control register.
1347 	 */
1348 	if (INTEL_GEN(dev_priv) >= 10)
1349 		ratio = val & CNL_CDCLK_PLL_RATIO_MASK;
1350 	else
1351 		ratio = I915_READ(BXT_DE_PLL_CTL) & BXT_DE_PLL_RATIO_MASK;
1352 
1353 	cdclk_state->vco = ratio * cdclk_state->ref;
1354 }
1355 
bxt_get_cdclk(struct drm_i915_private * dev_priv,struct intel_cdclk_state * cdclk_state)1356 static void bxt_get_cdclk(struct drm_i915_private *dev_priv,
1357 			  struct intel_cdclk_state *cdclk_state)
1358 {
1359 	u32 divider;
1360 	int div;
1361 
1362 	bxt_de_pll_readout(dev_priv, cdclk_state);
1363 
1364 	if (INTEL_GEN(dev_priv) >= 12)
1365 		cdclk_state->bypass = cdclk_state->ref / 2;
1366 	else if (INTEL_GEN(dev_priv) >= 11)
1367 		cdclk_state->bypass = 50000;
1368 	else
1369 		cdclk_state->bypass = cdclk_state->ref;
1370 
1371 	if (cdclk_state->vco == 0) {
1372 		cdclk_state->cdclk = cdclk_state->bypass;
1373 		goto out;
1374 	}
1375 
1376 	divider = I915_READ(CDCLK_CTL) & BXT_CDCLK_CD2X_DIV_SEL_MASK;
1377 
1378 	switch (divider) {
1379 	case BXT_CDCLK_CD2X_DIV_SEL_1:
1380 		div = 2;
1381 		break;
1382 	case BXT_CDCLK_CD2X_DIV_SEL_1_5:
1383 		WARN(IS_GEMINILAKE(dev_priv) || INTEL_GEN(dev_priv) >= 10,
1384 		     "Unsupported divider\n");
1385 		div = 3;
1386 		break;
1387 	case BXT_CDCLK_CD2X_DIV_SEL_2:
1388 		div = 4;
1389 		break;
1390 	case BXT_CDCLK_CD2X_DIV_SEL_4:
1391 		WARN(INTEL_GEN(dev_priv) >= 10, "Unsupported divider\n");
1392 		div = 8;
1393 		break;
1394 	default:
1395 		MISSING_CASE(divider);
1396 		return;
1397 	}
1398 
1399 	cdclk_state->cdclk = DIV_ROUND_CLOSEST(cdclk_state->vco, div);
1400 
1401  out:
1402 	/*
1403 	 * Can't read this out :( Let's assume it's
1404 	 * at least what the CDCLK frequency requires.
1405 	 */
1406 	cdclk_state->voltage_level =
1407 		dev_priv->display.calc_voltage_level(cdclk_state->cdclk);
1408 }
1409 
bxt_de_pll_disable(struct drm_i915_private * dev_priv)1410 static void bxt_de_pll_disable(struct drm_i915_private *dev_priv)
1411 {
1412 	I915_WRITE(BXT_DE_PLL_ENABLE, 0);
1413 
1414 	/* Timeout 200us */
1415 	if (intel_de_wait_for_clear(dev_priv,
1416 				    BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1417 		DRM_ERROR("timeout waiting for DE PLL unlock\n");
1418 
1419 	dev_priv->cdclk.hw.vco = 0;
1420 }
1421 
bxt_de_pll_enable(struct drm_i915_private * dev_priv,int vco)1422 static void bxt_de_pll_enable(struct drm_i915_private *dev_priv, int vco)
1423 {
1424 	int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref);
1425 	u32 val;
1426 
1427 	val = I915_READ(BXT_DE_PLL_CTL);
1428 	val &= ~BXT_DE_PLL_RATIO_MASK;
1429 	val |= BXT_DE_PLL_RATIO(ratio);
1430 	I915_WRITE(BXT_DE_PLL_CTL, val);
1431 
1432 	I915_WRITE(BXT_DE_PLL_ENABLE, BXT_DE_PLL_PLL_ENABLE);
1433 
1434 	/* Timeout 200us */
1435 	if (intel_de_wait_for_set(dev_priv,
1436 				  BXT_DE_PLL_ENABLE, BXT_DE_PLL_LOCK, 1))
1437 		DRM_ERROR("timeout waiting for DE PLL lock\n");
1438 
1439 	dev_priv->cdclk.hw.vco = vco;
1440 }
1441 
cnl_cdclk_pll_disable(struct drm_i915_private * dev_priv)1442 static void cnl_cdclk_pll_disable(struct drm_i915_private *dev_priv)
1443 {
1444 	u32 val;
1445 
1446 	val = I915_READ(BXT_DE_PLL_ENABLE);
1447 	val &= ~BXT_DE_PLL_PLL_ENABLE;
1448 	I915_WRITE(BXT_DE_PLL_ENABLE, val);
1449 
1450 	/* Timeout 200us */
1451 	if (wait_for((I915_READ(BXT_DE_PLL_ENABLE) & BXT_DE_PLL_LOCK) == 0, 1))
1452 		DRM_ERROR("timeout waiting for CDCLK PLL unlock\n");
1453 
1454 	dev_priv->cdclk.hw.vco = 0;
1455 }
1456 
cnl_cdclk_pll_enable(struct drm_i915_private * dev_priv,int vco)1457 static void cnl_cdclk_pll_enable(struct drm_i915_private *dev_priv, int vco)
1458 {
1459 	int ratio = DIV_ROUND_CLOSEST(vco, dev_priv->cdclk.hw.ref);
1460 	u32 val;
1461 
1462 	val = CNL_CDCLK_PLL_RATIO(ratio);
1463 	I915_WRITE(BXT_DE_PLL_ENABLE, val);
1464 
1465 	val |= BXT_DE_PLL_PLL_ENABLE;
1466 	I915_WRITE(BXT_DE_PLL_ENABLE, val);
1467 
1468 	/* Timeout 200us */
1469 	if (wait_for((I915_READ(BXT_DE_PLL_ENABLE) & BXT_DE_PLL_LOCK) != 0, 1))
1470 		DRM_ERROR("timeout waiting for CDCLK PLL lock\n");
1471 
1472 	dev_priv->cdclk.hw.vco = vco;
1473 }
1474 
bxt_cdclk_cd2x_pipe(struct drm_i915_private * dev_priv,enum pipe pipe)1475 static u32 bxt_cdclk_cd2x_pipe(struct drm_i915_private *dev_priv, enum pipe pipe)
1476 {
1477 	if (INTEL_GEN(dev_priv) >= 12) {
1478 		if (pipe == INVALID_PIPE)
1479 			return TGL_CDCLK_CD2X_PIPE_NONE;
1480 		else
1481 			return TGL_CDCLK_CD2X_PIPE(pipe);
1482 	} else if (INTEL_GEN(dev_priv) >= 11) {
1483 		if (pipe == INVALID_PIPE)
1484 			return ICL_CDCLK_CD2X_PIPE_NONE;
1485 		else
1486 			return ICL_CDCLK_CD2X_PIPE(pipe);
1487 	} else {
1488 		if (pipe == INVALID_PIPE)
1489 			return BXT_CDCLK_CD2X_PIPE_NONE;
1490 		else
1491 			return BXT_CDCLK_CD2X_PIPE(pipe);
1492 	}
1493 }
1494 
bxt_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)1495 static void bxt_set_cdclk(struct drm_i915_private *dev_priv,
1496 			  const struct intel_cdclk_state *cdclk_state,
1497 			  enum pipe pipe)
1498 {
1499 	int cdclk = cdclk_state->cdclk;
1500 	int vco = cdclk_state->vco;
1501 	u32 val, divider;
1502 	int ret;
1503 
1504 	/* Inform power controller of upcoming frequency change. */
1505 	if (INTEL_GEN(dev_priv) >= 10)
1506 		ret = skl_pcode_request(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1507 					SKL_CDCLK_PREPARE_FOR_CHANGE,
1508 					SKL_CDCLK_READY_FOR_CHANGE,
1509 					SKL_CDCLK_READY_FOR_CHANGE, 3);
1510 	else
1511 		/*
1512 		 * BSpec requires us to wait up to 150usec, but that leads to
1513 		 * timeouts; the 2ms used here is based on experiment.
1514 		 */
1515 		ret = sandybridge_pcode_write_timeout(dev_priv,
1516 						      HSW_PCODE_DE_WRITE_FREQ_REQ,
1517 						      0x80000000, 150, 2);
1518 
1519 	if (ret) {
1520 		DRM_ERROR("Failed to inform PCU about cdclk change (err %d, freq %d)\n",
1521 			  ret, cdclk);
1522 		return;
1523 	}
1524 
1525 	/* cdclk = vco / 2 / div{1,1.5,2,4} */
1526 	switch (DIV_ROUND_CLOSEST(vco, cdclk)) {
1527 	default:
1528 		WARN_ON(cdclk != dev_priv->cdclk.hw.bypass);
1529 		WARN_ON(vco != 0);
1530 		/* fall through */
1531 	case 2:
1532 		divider = BXT_CDCLK_CD2X_DIV_SEL_1;
1533 		break;
1534 	case 3:
1535 		WARN(IS_GEMINILAKE(dev_priv) || INTEL_GEN(dev_priv) >= 10,
1536 		     "Unsupported divider\n");
1537 		divider = BXT_CDCLK_CD2X_DIV_SEL_1_5;
1538 		break;
1539 	case 4:
1540 		divider = BXT_CDCLK_CD2X_DIV_SEL_2;
1541 		break;
1542 	case 8:
1543 		WARN(INTEL_GEN(dev_priv) >= 10, "Unsupported divider\n");
1544 		divider = BXT_CDCLK_CD2X_DIV_SEL_4;
1545 		break;
1546 	}
1547 
1548 	if (INTEL_GEN(dev_priv) >= 10) {
1549 		if (dev_priv->cdclk.hw.vco != 0 &&
1550 		    dev_priv->cdclk.hw.vco != vco)
1551 			cnl_cdclk_pll_disable(dev_priv);
1552 
1553 		if (dev_priv->cdclk.hw.vco != vco)
1554 			cnl_cdclk_pll_enable(dev_priv, vco);
1555 
1556 	} else {
1557 		if (dev_priv->cdclk.hw.vco != 0 &&
1558 		    dev_priv->cdclk.hw.vco != vco)
1559 			bxt_de_pll_disable(dev_priv);
1560 
1561 		if (dev_priv->cdclk.hw.vco != vco)
1562 			bxt_de_pll_enable(dev_priv, vco);
1563 	}
1564 
1565 	val = divider | skl_cdclk_decimal(cdclk) |
1566 		bxt_cdclk_cd2x_pipe(dev_priv, pipe);
1567 
1568 	/*
1569 	 * Disable SSA Precharge when CD clock frequency < 500 MHz,
1570 	 * enable otherwise.
1571 	 */
1572 	if (IS_GEN9_LP(dev_priv) && cdclk >= 500000)
1573 		val |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
1574 	I915_WRITE(CDCLK_CTL, val);
1575 
1576 	if (pipe != INVALID_PIPE)
1577 		intel_wait_for_vblank(dev_priv, pipe);
1578 
1579 	if (INTEL_GEN(dev_priv) >= 10) {
1580 		ret = sandybridge_pcode_write(dev_priv, SKL_PCODE_CDCLK_CONTROL,
1581 					      cdclk_state->voltage_level);
1582 	} else {
1583 		/*
1584 		 * The timeout isn't specified, the 2ms used here is based on
1585 		 * experiment.
1586 		 * FIXME: Waiting for the request completion could be delayed
1587 		 * until the next PCODE request based on BSpec.
1588 		 */
1589 		ret = sandybridge_pcode_write_timeout(dev_priv,
1590 						      HSW_PCODE_DE_WRITE_FREQ_REQ,
1591 						      cdclk_state->voltage_level,
1592 						      150, 2);
1593 	}
1594 
1595 	if (ret) {
1596 		DRM_ERROR("PCode CDCLK freq set failed, (err %d, freq %d)\n",
1597 			  ret, cdclk);
1598 		return;
1599 	}
1600 
1601 	intel_update_cdclk(dev_priv);
1602 
1603 	if (INTEL_GEN(dev_priv) >= 10)
1604 		/*
1605 		 * Can't read out the voltage level :(
1606 		 * Let's just assume everything is as expected.
1607 		 */
1608 		dev_priv->cdclk.hw.voltage_level = cdclk_state->voltage_level;
1609 }
1610 
bxt_sanitize_cdclk(struct drm_i915_private * dev_priv)1611 static void bxt_sanitize_cdclk(struct drm_i915_private *dev_priv)
1612 {
1613 	u32 cdctl, expected;
1614 	int cdclk, vco;
1615 
1616 	intel_update_cdclk(dev_priv);
1617 	intel_dump_cdclk_state(&dev_priv->cdclk.hw, "Current CDCLK");
1618 
1619 	if (dev_priv->cdclk.hw.vco == 0 ||
1620 	    dev_priv->cdclk.hw.cdclk == dev_priv->cdclk.hw.bypass)
1621 		goto sanitize;
1622 
1623 	/* DPLL okay; verify the cdclock
1624 	 *
1625 	 * Some BIOS versions leave an incorrect decimal frequency value and
1626 	 * set reserved MBZ bits in CDCLK_CTL at least during exiting from S4,
1627 	 * so sanitize this register.
1628 	 */
1629 	cdctl = I915_READ(CDCLK_CTL);
1630 	/*
1631 	 * Let's ignore the pipe field, since BIOS could have configured the
1632 	 * dividers both synching to an active pipe, or asynchronously
1633 	 * (PIPE_NONE).
1634 	 */
1635 	cdctl &= ~bxt_cdclk_cd2x_pipe(dev_priv, INVALID_PIPE);
1636 
1637 	/* Make sure this is a legal cdclk value for the platform */
1638 	cdclk = bxt_calc_cdclk(dev_priv, dev_priv->cdclk.hw.cdclk);
1639 	if (cdclk != dev_priv->cdclk.hw.cdclk)
1640 		goto sanitize;
1641 
1642 	/* Make sure the VCO is correct for the cdclk */
1643 	vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
1644 	if (vco != dev_priv->cdclk.hw.vco)
1645 		goto sanitize;
1646 
1647 	expected = skl_cdclk_decimal(cdclk);
1648 
1649 	/* Figure out what CD2X divider we should be using for this cdclk */
1650 	switch (DIV_ROUND_CLOSEST(dev_priv->cdclk.hw.vco,
1651 				  dev_priv->cdclk.hw.cdclk)) {
1652 	case 2:
1653 		expected |= BXT_CDCLK_CD2X_DIV_SEL_1;
1654 		break;
1655 	case 3:
1656 		expected |= BXT_CDCLK_CD2X_DIV_SEL_1_5;
1657 		break;
1658 	case 4:
1659 		expected |= BXT_CDCLK_CD2X_DIV_SEL_2;
1660 		break;
1661 	case 8:
1662 		expected |= BXT_CDCLK_CD2X_DIV_SEL_4;
1663 		break;
1664 	default:
1665 		goto sanitize;
1666 	}
1667 
1668 	/*
1669 	 * Disable SSA Precharge when CD clock frequency < 500 MHz,
1670 	 * enable otherwise.
1671 	 */
1672 	if (IS_GEN9_LP(dev_priv) && dev_priv->cdclk.hw.cdclk >= 500000)
1673 		expected |= BXT_CDCLK_SSA_PRECHARGE_ENABLE;
1674 
1675 	if (cdctl == expected)
1676 		/* All well; nothing to sanitize */
1677 		return;
1678 
1679 sanitize:
1680 	DRM_DEBUG_KMS("Sanitizing cdclk programmed by pre-os\n");
1681 
1682 	/* force cdclk programming */
1683 	dev_priv->cdclk.hw.cdclk = 0;
1684 
1685 	/* force full PLL disable + enable */
1686 	dev_priv->cdclk.hw.vco = -1;
1687 }
1688 
bxt_init_cdclk(struct drm_i915_private * dev_priv)1689 static void bxt_init_cdclk(struct drm_i915_private *dev_priv)
1690 {
1691 	struct intel_cdclk_state cdclk_state;
1692 
1693 	bxt_sanitize_cdclk(dev_priv);
1694 
1695 	if (dev_priv->cdclk.hw.cdclk != 0 &&
1696 	    dev_priv->cdclk.hw.vco != 0)
1697 		return;
1698 
1699 	cdclk_state = dev_priv->cdclk.hw;
1700 
1701 	/*
1702 	 * FIXME:
1703 	 * - The initial CDCLK needs to be read from VBT.
1704 	 *   Need to make this change after VBT has changes for BXT.
1705 	 */
1706 	cdclk_state.cdclk = bxt_calc_cdclk(dev_priv, 0);
1707 	cdclk_state.vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk_state.cdclk);
1708 	cdclk_state.voltage_level =
1709 		dev_priv->display.calc_voltage_level(cdclk_state.cdclk);
1710 
1711 	bxt_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1712 }
1713 
bxt_uninit_cdclk(struct drm_i915_private * dev_priv)1714 static void bxt_uninit_cdclk(struct drm_i915_private *dev_priv)
1715 {
1716 	struct intel_cdclk_state cdclk_state = dev_priv->cdclk.hw;
1717 
1718 	cdclk_state.cdclk = cdclk_state.bypass;
1719 	cdclk_state.vco = 0;
1720 	cdclk_state.voltage_level =
1721 		dev_priv->display.calc_voltage_level(cdclk_state.cdclk);
1722 
1723 	bxt_set_cdclk(dev_priv, &cdclk_state, INVALID_PIPE);
1724 }
1725 
1726 /**
1727  * intel_cdclk_init - Initialize CDCLK
1728  * @i915: i915 device
1729  *
1730  * Initialize CDCLK. This consists mainly of initializing dev_priv->cdclk.hw and
1731  * sanitizing the state of the hardware if needed. This is generally done only
1732  * during the display core initialization sequence, after which the DMC will
1733  * take care of turning CDCLK off/on as needed.
1734  */
intel_cdclk_init(struct drm_i915_private * i915)1735 void intel_cdclk_init(struct drm_i915_private *i915)
1736 {
1737 	if (IS_GEN9_LP(i915) || INTEL_GEN(i915) >= 10)
1738 		bxt_init_cdclk(i915);
1739 	else if (IS_GEN9_BC(i915))
1740 		skl_init_cdclk(i915);
1741 }
1742 
1743 /**
1744  * intel_cdclk_uninit - Uninitialize CDCLK
1745  * @i915: i915 device
1746  *
1747  * Uninitialize CDCLK. This is done only during the display core
1748  * uninitialization sequence.
1749  */
intel_cdclk_uninit(struct drm_i915_private * i915)1750 void intel_cdclk_uninit(struct drm_i915_private *i915)
1751 {
1752 	if (INTEL_GEN(i915) >= 10 || IS_GEN9_LP(i915))
1753 		bxt_uninit_cdclk(i915);
1754 	else if (IS_GEN9_BC(i915))
1755 		skl_uninit_cdclk(i915);
1756 }
1757 
1758 /**
1759  * intel_cdclk_needs_modeset - Determine if two CDCLK states require a modeset on all pipes
1760  * @a: first CDCLK state
1761  * @b: second CDCLK state
1762  *
1763  * Returns:
1764  * True if the CDCLK states require pipes to be off during reprogramming, false if not.
1765  */
intel_cdclk_needs_modeset(const struct intel_cdclk_state * a,const struct intel_cdclk_state * b)1766 bool intel_cdclk_needs_modeset(const struct intel_cdclk_state *a,
1767 			       const struct intel_cdclk_state *b)
1768 {
1769 	return a->cdclk != b->cdclk ||
1770 		a->vco != b->vco ||
1771 		a->ref != b->ref;
1772 }
1773 
1774 /**
1775  * intel_cdclk_needs_cd2x_update - Determine if two CDCLK states require a cd2x divider update
1776  * @dev_priv: Not a CDCLK state, it's the drm_i915_private!
1777  * @a: first CDCLK state
1778  * @b: second CDCLK state
1779  *
1780  * Returns:
1781  * True if the CDCLK states require just a cd2x divider update, false if not.
1782  */
intel_cdclk_needs_cd2x_update(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * a,const struct intel_cdclk_state * b)1783 static bool intel_cdclk_needs_cd2x_update(struct drm_i915_private *dev_priv,
1784 					  const struct intel_cdclk_state *a,
1785 					  const struct intel_cdclk_state *b)
1786 {
1787 	/* Older hw doesn't have the capability */
1788 	if (INTEL_GEN(dev_priv) < 10 && !IS_GEN9_LP(dev_priv))
1789 		return false;
1790 
1791 	return a->cdclk != b->cdclk &&
1792 		a->vco == b->vco &&
1793 		a->ref == b->ref;
1794 }
1795 
1796 /**
1797  * intel_cdclk_changed - Determine if two CDCLK states are different
1798  * @a: first CDCLK state
1799  * @b: second CDCLK state
1800  *
1801  * Returns:
1802  * True if the CDCLK states don't match, false if they do.
1803  */
intel_cdclk_changed(const struct intel_cdclk_state * a,const struct intel_cdclk_state * b)1804 static bool intel_cdclk_changed(const struct intel_cdclk_state *a,
1805 				const struct intel_cdclk_state *b)
1806 {
1807 	return intel_cdclk_needs_modeset(a, b) ||
1808 		a->voltage_level != b->voltage_level;
1809 }
1810 
1811 /**
1812  * intel_cdclk_swap_state - make atomic CDCLK configuration effective
1813  * @state: atomic state
1814  *
1815  * This is the CDCLK version of drm_atomic_helper_swap_state() since the
1816  * helper does not handle driver-specific global state.
1817  *
1818  * Similarly to the atomic helpers this function does a complete swap,
1819  * i.e. it also puts the old state into @state. This is used by the commit
1820  * code to determine how CDCLK has changed (for instance did it increase or
1821  * decrease).
1822  */
intel_cdclk_swap_state(struct intel_atomic_state * state)1823 void intel_cdclk_swap_state(struct intel_atomic_state *state)
1824 {
1825 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
1826 
1827 	swap(state->cdclk.logical, dev_priv->cdclk.logical);
1828 	swap(state->cdclk.actual, dev_priv->cdclk.actual);
1829 }
1830 
intel_dump_cdclk_state(const struct intel_cdclk_state * cdclk_state,const char * context)1831 void intel_dump_cdclk_state(const struct intel_cdclk_state *cdclk_state,
1832 			    const char *context)
1833 {
1834 	DRM_DEBUG_DRIVER("%s %d kHz, VCO %d kHz, ref %d kHz, bypass %d kHz, voltage level %d\n",
1835 			 context, cdclk_state->cdclk, cdclk_state->vco,
1836 			 cdclk_state->ref, cdclk_state->bypass,
1837 			 cdclk_state->voltage_level);
1838 }
1839 
1840 /**
1841  * intel_set_cdclk - Push the CDCLK state to the hardware
1842  * @dev_priv: i915 device
1843  * @cdclk_state: new CDCLK state
1844  * @pipe: pipe with which to synchronize the update
1845  *
1846  * Program the hardware based on the passed in CDCLK state,
1847  * if necessary.
1848  */
intel_set_cdclk(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * cdclk_state,enum pipe pipe)1849 static void intel_set_cdclk(struct drm_i915_private *dev_priv,
1850 			    const struct intel_cdclk_state *cdclk_state,
1851 			    enum pipe pipe)
1852 {
1853 	if (!intel_cdclk_changed(&dev_priv->cdclk.hw, cdclk_state))
1854 		return;
1855 
1856 	if (WARN_ON_ONCE(!dev_priv->display.set_cdclk))
1857 		return;
1858 
1859 	intel_dump_cdclk_state(cdclk_state, "Changing CDCLK to");
1860 
1861 	dev_priv->display.set_cdclk(dev_priv, cdclk_state, pipe);
1862 
1863 	if (WARN(intel_cdclk_changed(&dev_priv->cdclk.hw, cdclk_state),
1864 		 "cdclk state doesn't match!\n")) {
1865 		intel_dump_cdclk_state(&dev_priv->cdclk.hw, "[hw state]");
1866 		intel_dump_cdclk_state(cdclk_state, "[sw state]");
1867 	}
1868 }
1869 
1870 /**
1871  * intel_set_cdclk_pre_plane_update - Push the CDCLK state to the hardware
1872  * @dev_priv: i915 device
1873  * @old_state: old CDCLK state
1874  * @new_state: new CDCLK state
1875  * @pipe: pipe with which to synchronize the update
1876  *
1877  * Program the hardware before updating the HW plane state based on the passed
1878  * in CDCLK state, if necessary.
1879  */
1880 void
intel_set_cdclk_pre_plane_update(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * old_state,const struct intel_cdclk_state * new_state,enum pipe pipe)1881 intel_set_cdclk_pre_plane_update(struct drm_i915_private *dev_priv,
1882 				 const struct intel_cdclk_state *old_state,
1883 				 const struct intel_cdclk_state *new_state,
1884 				 enum pipe pipe)
1885 {
1886 	if (pipe == INVALID_PIPE || old_state->cdclk <= new_state->cdclk)
1887 		intel_set_cdclk(dev_priv, new_state, pipe);
1888 }
1889 
1890 /**
1891  * intel_set_cdclk_post_plane_update - Push the CDCLK state to the hardware
1892  * @dev_priv: i915 device
1893  * @old_state: old CDCLK state
1894  * @new_state: new CDCLK state
1895  * @pipe: pipe with which to synchronize the update
1896  *
1897  * Program the hardware after updating the HW plane state based on the passed
1898  * in CDCLK state, if necessary.
1899  */
1900 void
intel_set_cdclk_post_plane_update(struct drm_i915_private * dev_priv,const struct intel_cdclk_state * old_state,const struct intel_cdclk_state * new_state,enum pipe pipe)1901 intel_set_cdclk_post_plane_update(struct drm_i915_private *dev_priv,
1902 				  const struct intel_cdclk_state *old_state,
1903 				  const struct intel_cdclk_state *new_state,
1904 				  enum pipe pipe)
1905 {
1906 	if (pipe != INVALID_PIPE && old_state->cdclk > new_state->cdclk)
1907 		intel_set_cdclk(dev_priv, new_state, pipe);
1908 }
1909 
intel_pixel_rate_to_cdclk(const struct intel_crtc_state * crtc_state)1910 static int intel_pixel_rate_to_cdclk(const struct intel_crtc_state *crtc_state)
1911 {
1912 	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
1913 	int pixel_rate = crtc_state->pixel_rate;
1914 
1915 	if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
1916 		return DIV_ROUND_UP(pixel_rate, 2);
1917 	else if (IS_GEN(dev_priv, 9) ||
1918 		 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
1919 		return pixel_rate;
1920 	else if (IS_CHERRYVIEW(dev_priv))
1921 		return DIV_ROUND_UP(pixel_rate * 100, 95);
1922 	else if (crtc_state->double_wide)
1923 		return DIV_ROUND_UP(pixel_rate * 100, 90 * 2);
1924 	else
1925 		return DIV_ROUND_UP(pixel_rate * 100, 90);
1926 }
1927 
intel_planes_min_cdclk(const struct intel_crtc_state * crtc_state)1928 static int intel_planes_min_cdclk(const struct intel_crtc_state *crtc_state)
1929 {
1930 	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1931 	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1932 	struct intel_plane *plane;
1933 	int min_cdclk = 0;
1934 
1935 	for_each_intel_plane_on_crtc(&dev_priv->drm, crtc, plane)
1936 		min_cdclk = max(crtc_state->min_cdclk[plane->id], min_cdclk);
1937 
1938 	return min_cdclk;
1939 }
1940 
intel_crtc_compute_min_cdclk(const struct intel_crtc_state * crtc_state)1941 int intel_crtc_compute_min_cdclk(const struct intel_crtc_state *crtc_state)
1942 {
1943 	struct drm_i915_private *dev_priv =
1944 		to_i915(crtc_state->uapi.crtc->dev);
1945 	int min_cdclk;
1946 
1947 	if (!crtc_state->hw.enable)
1948 		return 0;
1949 
1950 	min_cdclk = intel_pixel_rate_to_cdclk(crtc_state);
1951 
1952 	/* pixel rate mustn't exceed 95% of cdclk with IPS on BDW */
1953 	if (IS_BROADWELL(dev_priv) && hsw_crtc_state_ips_capable(crtc_state))
1954 		min_cdclk = DIV_ROUND_UP(min_cdclk * 100, 95);
1955 
1956 	/* BSpec says "Do not use DisplayPort with CDCLK less than 432 MHz,
1957 	 * audio enabled, port width x4, and link rate HBR2 (5.4 GHz), or else
1958 	 * there may be audio corruption or screen corruption." This cdclk
1959 	 * restriction for GLK is 316.8 MHz.
1960 	 */
1961 	if (intel_crtc_has_dp_encoder(crtc_state) &&
1962 	    crtc_state->has_audio &&
1963 	    crtc_state->port_clock >= 540000 &&
1964 	    crtc_state->lane_count == 4) {
1965 		if (IS_CANNONLAKE(dev_priv) || IS_GEMINILAKE(dev_priv)) {
1966 			/* Display WA #1145: glk,cnl */
1967 			min_cdclk = max(316800, min_cdclk);
1968 		} else if (IS_GEN(dev_priv, 9) || IS_BROADWELL(dev_priv)) {
1969 			/* Display WA #1144: skl,bxt */
1970 			min_cdclk = max(432000, min_cdclk);
1971 		}
1972 	}
1973 
1974 	/*
1975 	 * According to BSpec, "The CD clock frequency must be at least twice
1976 	 * the frequency of the Azalia BCLK." and BCLK is 96 MHz by default.
1977 	 */
1978 	if (crtc_state->has_audio && INTEL_GEN(dev_priv) >= 9)
1979 		min_cdclk = max(2 * 96000, min_cdclk);
1980 
1981 	/*
1982 	 * "For DP audio configuration, cdclk frequency shall be set to
1983 	 *  meet the following requirements:
1984 	 *  DP Link Frequency(MHz) | Cdclk frequency(MHz)
1985 	 *  270                    | 320 or higher
1986 	 *  162                    | 200 or higher"
1987 	 */
1988 	if ((IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv)) &&
1989 	    intel_crtc_has_dp_encoder(crtc_state) && crtc_state->has_audio)
1990 		min_cdclk = max(crtc_state->port_clock, min_cdclk);
1991 
1992 	/*
1993 	 * On Valleyview some DSI panels lose (v|h)sync when the clock is lower
1994 	 * than 320000KHz.
1995 	 */
1996 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) &&
1997 	    IS_VALLEYVIEW(dev_priv))
1998 		min_cdclk = max(320000, min_cdclk);
1999 
2000 	/*
2001 	 * On Geminilake once the CDCLK gets as low as 79200
2002 	 * picture gets unstable, despite that values are
2003 	 * correct for DSI PLL and DE PLL.
2004 	 */
2005 	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DSI) &&
2006 	    IS_GEMINILAKE(dev_priv))
2007 		min_cdclk = max(158400, min_cdclk);
2008 
2009 	/* Account for additional needs from the planes */
2010 	min_cdclk = max(intel_planes_min_cdclk(crtc_state), min_cdclk);
2011 
2012 	/*
2013 	 * HACK. Currently for TGL platforms we calculate
2014 	 * min_cdclk initially based on pixel_rate divided
2015 	 * by 2, accounting for also plane requirements,
2016 	 * however in some cases the lowest possible CDCLK
2017 	 * doesn't work and causing the underruns.
2018 	 * Explicitly stating here that this seems to be currently
2019 	 * rather a Hack, than final solution.
2020 	 */
2021 	if (IS_TIGERLAKE(dev_priv))
2022 		min_cdclk = max(min_cdclk, (int)crtc_state->pixel_rate);
2023 
2024 	if (min_cdclk > dev_priv->max_cdclk_freq) {
2025 		DRM_DEBUG_KMS("required cdclk (%d kHz) exceeds max (%d kHz)\n",
2026 			      min_cdclk, dev_priv->max_cdclk_freq);
2027 		return -EINVAL;
2028 	}
2029 
2030 	return min_cdclk;
2031 }
2032 
intel_compute_min_cdclk(struct intel_atomic_state * state)2033 static int intel_compute_min_cdclk(struct intel_atomic_state *state)
2034 {
2035 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2036 	struct intel_crtc *crtc;
2037 	struct intel_crtc_state *crtc_state;
2038 	int min_cdclk, i;
2039 	enum pipe pipe;
2040 
2041 	memcpy(state->min_cdclk, dev_priv->min_cdclk,
2042 	       sizeof(state->min_cdclk));
2043 
2044 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2045 		int ret;
2046 
2047 		min_cdclk = intel_crtc_compute_min_cdclk(crtc_state);
2048 		if (min_cdclk < 0)
2049 			return min_cdclk;
2050 
2051 		if (state->min_cdclk[i] == min_cdclk)
2052 			continue;
2053 
2054 		state->min_cdclk[i] = min_cdclk;
2055 
2056 		ret = intel_atomic_lock_global_state(state);
2057 		if (ret)
2058 			return ret;
2059 	}
2060 
2061 	min_cdclk = state->cdclk.force_min_cdclk;
2062 	for_each_pipe(dev_priv, pipe)
2063 		min_cdclk = max(state->min_cdclk[pipe], min_cdclk);
2064 
2065 	return min_cdclk;
2066 }
2067 
2068 /*
2069  * Account for port clock min voltage level requirements.
2070  * This only really does something on CNL+ but can be
2071  * called on earlier platforms as well.
2072  *
2073  * Note that this functions assumes that 0 is
2074  * the lowest voltage value, and higher values
2075  * correspond to increasingly higher voltages.
2076  *
2077  * Should that relationship no longer hold on
2078  * future platforms this code will need to be
2079  * adjusted.
2080  */
bxt_compute_min_voltage_level(struct intel_atomic_state * state)2081 static int bxt_compute_min_voltage_level(struct intel_atomic_state *state)
2082 {
2083 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2084 	struct intel_crtc *crtc;
2085 	struct intel_crtc_state *crtc_state;
2086 	u8 min_voltage_level;
2087 	int i;
2088 	enum pipe pipe;
2089 
2090 	memcpy(state->min_voltage_level, dev_priv->min_voltage_level,
2091 	       sizeof(state->min_voltage_level));
2092 
2093 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2094 		int ret;
2095 
2096 		if (crtc_state->hw.enable)
2097 			min_voltage_level = crtc_state->min_voltage_level;
2098 		else
2099 			min_voltage_level = 0;
2100 
2101 		if (state->min_voltage_level[i] == min_voltage_level)
2102 			continue;
2103 
2104 		state->min_voltage_level[i] = min_voltage_level;
2105 
2106 		ret = intel_atomic_lock_global_state(state);
2107 		if (ret)
2108 			return ret;
2109 	}
2110 
2111 	min_voltage_level = 0;
2112 	for_each_pipe(dev_priv, pipe)
2113 		min_voltage_level = max(state->min_voltage_level[pipe],
2114 					min_voltage_level);
2115 
2116 	return min_voltage_level;
2117 }
2118 
vlv_modeset_calc_cdclk(struct intel_atomic_state * state)2119 static int vlv_modeset_calc_cdclk(struct intel_atomic_state *state)
2120 {
2121 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2122 	int min_cdclk, cdclk;
2123 
2124 	min_cdclk = intel_compute_min_cdclk(state);
2125 	if (min_cdclk < 0)
2126 		return min_cdclk;
2127 
2128 	cdclk = vlv_calc_cdclk(dev_priv, min_cdclk);
2129 
2130 	state->cdclk.logical.cdclk = cdclk;
2131 	state->cdclk.logical.voltage_level =
2132 		vlv_calc_voltage_level(dev_priv, cdclk);
2133 
2134 	if (!state->active_pipes) {
2135 		cdclk = vlv_calc_cdclk(dev_priv, state->cdclk.force_min_cdclk);
2136 
2137 		state->cdclk.actual.cdclk = cdclk;
2138 		state->cdclk.actual.voltage_level =
2139 			vlv_calc_voltage_level(dev_priv, cdclk);
2140 	} else {
2141 		state->cdclk.actual = state->cdclk.logical;
2142 	}
2143 
2144 	return 0;
2145 }
2146 
bdw_modeset_calc_cdclk(struct intel_atomic_state * state)2147 static int bdw_modeset_calc_cdclk(struct intel_atomic_state *state)
2148 {
2149 	int min_cdclk, cdclk;
2150 
2151 	min_cdclk = intel_compute_min_cdclk(state);
2152 	if (min_cdclk < 0)
2153 		return min_cdclk;
2154 
2155 	/*
2156 	 * FIXME should also account for plane ratio
2157 	 * once 64bpp pixel formats are supported.
2158 	 */
2159 	cdclk = bdw_calc_cdclk(min_cdclk);
2160 
2161 	state->cdclk.logical.cdclk = cdclk;
2162 	state->cdclk.logical.voltage_level =
2163 		bdw_calc_voltage_level(cdclk);
2164 
2165 	if (!state->active_pipes) {
2166 		cdclk = bdw_calc_cdclk(state->cdclk.force_min_cdclk);
2167 
2168 		state->cdclk.actual.cdclk = cdclk;
2169 		state->cdclk.actual.voltage_level =
2170 			bdw_calc_voltage_level(cdclk);
2171 	} else {
2172 		state->cdclk.actual = state->cdclk.logical;
2173 	}
2174 
2175 	return 0;
2176 }
2177 
skl_dpll0_vco(struct intel_atomic_state * state)2178 static int skl_dpll0_vco(struct intel_atomic_state *state)
2179 {
2180 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2181 	struct intel_crtc *crtc;
2182 	struct intel_crtc_state *crtc_state;
2183 	int vco, i;
2184 
2185 	vco = state->cdclk.logical.vco;
2186 	if (!vco)
2187 		vco = dev_priv->skl_preferred_vco_freq;
2188 
2189 	for_each_new_intel_crtc_in_state(state, crtc, crtc_state, i) {
2190 		if (!crtc_state->hw.enable)
2191 			continue;
2192 
2193 		if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2194 			continue;
2195 
2196 		/*
2197 		 * DPLL0 VCO may need to be adjusted to get the correct
2198 		 * clock for eDP. This will affect cdclk as well.
2199 		 */
2200 		switch (crtc_state->port_clock / 2) {
2201 		case 108000:
2202 		case 216000:
2203 			vco = 8640000;
2204 			break;
2205 		default:
2206 			vco = 8100000;
2207 			break;
2208 		}
2209 	}
2210 
2211 	return vco;
2212 }
2213 
skl_modeset_calc_cdclk(struct intel_atomic_state * state)2214 static int skl_modeset_calc_cdclk(struct intel_atomic_state *state)
2215 {
2216 	int min_cdclk, cdclk, vco;
2217 
2218 	min_cdclk = intel_compute_min_cdclk(state);
2219 	if (min_cdclk < 0)
2220 		return min_cdclk;
2221 
2222 	vco = skl_dpll0_vco(state);
2223 
2224 	/*
2225 	 * FIXME should also account for plane ratio
2226 	 * once 64bpp pixel formats are supported.
2227 	 */
2228 	cdclk = skl_calc_cdclk(min_cdclk, vco);
2229 
2230 	state->cdclk.logical.vco = vco;
2231 	state->cdclk.logical.cdclk = cdclk;
2232 	state->cdclk.logical.voltage_level =
2233 		skl_calc_voltage_level(cdclk);
2234 
2235 	if (!state->active_pipes) {
2236 		cdclk = skl_calc_cdclk(state->cdclk.force_min_cdclk, vco);
2237 
2238 		state->cdclk.actual.vco = vco;
2239 		state->cdclk.actual.cdclk = cdclk;
2240 		state->cdclk.actual.voltage_level =
2241 			skl_calc_voltage_level(cdclk);
2242 	} else {
2243 		state->cdclk.actual = state->cdclk.logical;
2244 	}
2245 
2246 	return 0;
2247 }
2248 
bxt_modeset_calc_cdclk(struct intel_atomic_state * state)2249 static int bxt_modeset_calc_cdclk(struct intel_atomic_state *state)
2250 {
2251 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2252 	int min_cdclk, min_voltage_level, cdclk, vco;
2253 
2254 	min_cdclk = intel_compute_min_cdclk(state);
2255 	if (min_cdclk < 0)
2256 		return min_cdclk;
2257 
2258 	min_voltage_level = bxt_compute_min_voltage_level(state);
2259 	if (min_voltage_level < 0)
2260 		return min_voltage_level;
2261 
2262 	cdclk = bxt_calc_cdclk(dev_priv, min_cdclk);
2263 	vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
2264 
2265 	state->cdclk.logical.vco = vco;
2266 	state->cdclk.logical.cdclk = cdclk;
2267 	state->cdclk.logical.voltage_level =
2268 		max_t(int, min_voltage_level,
2269 		      dev_priv->display.calc_voltage_level(cdclk));
2270 
2271 	if (!state->active_pipes) {
2272 		cdclk = bxt_calc_cdclk(dev_priv, state->cdclk.force_min_cdclk);
2273 		vco = bxt_calc_cdclk_pll_vco(dev_priv, cdclk);
2274 
2275 		state->cdclk.actual.vco = vco;
2276 		state->cdclk.actual.cdclk = cdclk;
2277 		state->cdclk.actual.voltage_level =
2278 			dev_priv->display.calc_voltage_level(cdclk);
2279 	} else {
2280 		state->cdclk.actual = state->cdclk.logical;
2281 	}
2282 
2283 	return 0;
2284 }
2285 
intel_modeset_all_pipes(struct intel_atomic_state * state)2286 static int intel_modeset_all_pipes(struct intel_atomic_state *state)
2287 {
2288 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2289 	struct intel_crtc *crtc;
2290 
2291 	/*
2292 	 * Add all pipes to the state, and force
2293 	 * a modeset on all the active ones.
2294 	 */
2295 	for_each_intel_crtc(&dev_priv->drm, crtc) {
2296 		struct intel_crtc_state *crtc_state;
2297 		int ret;
2298 
2299 		crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
2300 		if (IS_ERR(crtc_state))
2301 			return PTR_ERR(crtc_state);
2302 
2303 		if (!crtc_state->hw.active ||
2304 		    drm_atomic_crtc_needs_modeset(&crtc_state->uapi))
2305 			continue;
2306 
2307 		crtc_state->uapi.mode_changed = true;
2308 
2309 		ret = drm_atomic_add_affected_connectors(&state->base,
2310 							 &crtc->base);
2311 		if (ret)
2312 			return ret;
2313 
2314 		ret = drm_atomic_add_affected_planes(&state->base,
2315 						     &crtc->base);
2316 		if (ret)
2317 			return ret;
2318 
2319 		crtc_state->update_planes |= crtc_state->active_planes;
2320 	}
2321 
2322 	return 0;
2323 }
2324 
fixed_modeset_calc_cdclk(struct intel_atomic_state * state)2325 static int fixed_modeset_calc_cdclk(struct intel_atomic_state *state)
2326 {
2327 	int min_cdclk;
2328 
2329 	/*
2330 	 * We can't change the cdclk frequency, but we still want to
2331 	 * check that the required minimum frequency doesn't exceed
2332 	 * the actual cdclk frequency.
2333 	 */
2334 	min_cdclk = intel_compute_min_cdclk(state);
2335 	if (min_cdclk < 0)
2336 		return min_cdclk;
2337 
2338 	return 0;
2339 }
2340 
intel_modeset_calc_cdclk(struct intel_atomic_state * state)2341 int intel_modeset_calc_cdclk(struct intel_atomic_state *state)
2342 {
2343 	struct drm_i915_private *dev_priv = to_i915(state->base.dev);
2344 	enum pipe pipe;
2345 	int ret;
2346 
2347 	ret = dev_priv->display.modeset_calc_cdclk(state);
2348 	if (ret)
2349 		return ret;
2350 
2351 	/*
2352 	 * Writes to dev_priv->cdclk.{actual,logical} must protected
2353 	 * by holding all the crtc mutexes even if we don't end up
2354 	 * touching the hardware
2355 	 */
2356 	if (intel_cdclk_changed(&dev_priv->cdclk.actual,
2357 				&state->cdclk.actual)) {
2358 		/*
2359 		 * Also serialize commits across all crtcs
2360 		 * if the actual hw needs to be poked.
2361 		 */
2362 		ret = intel_atomic_serialize_global_state(state);
2363 		if (ret)
2364 			return ret;
2365 	} else if (intel_cdclk_changed(&dev_priv->cdclk.logical,
2366 				       &state->cdclk.logical)) {
2367 		ret = intel_atomic_lock_global_state(state);
2368 		if (ret)
2369 			return ret;
2370 	} else {
2371 		return 0;
2372 	}
2373 
2374 	if (is_power_of_2(state->active_pipes) &&
2375 	    intel_cdclk_needs_cd2x_update(dev_priv,
2376 					  &dev_priv->cdclk.actual,
2377 					  &state->cdclk.actual)) {
2378 		struct intel_crtc *crtc;
2379 		struct intel_crtc_state *crtc_state;
2380 
2381 		pipe = ilog2(state->active_pipes);
2382 		crtc = intel_get_crtc_for_pipe(dev_priv, pipe);
2383 
2384 		crtc_state = intel_atomic_get_crtc_state(&state->base, crtc);
2385 		if (IS_ERR(crtc_state))
2386 			return PTR_ERR(crtc_state);
2387 
2388 		if (drm_atomic_crtc_needs_modeset(&crtc_state->uapi))
2389 			pipe = INVALID_PIPE;
2390 	} else {
2391 		pipe = INVALID_PIPE;
2392 	}
2393 
2394 	if (pipe != INVALID_PIPE) {
2395 		state->cdclk.pipe = pipe;
2396 
2397 		DRM_DEBUG_KMS("Can change cdclk with pipe %c active\n",
2398 			      pipe_name(pipe));
2399 	} else if (intel_cdclk_needs_modeset(&dev_priv->cdclk.actual,
2400 					     &state->cdclk.actual)) {
2401 		/* All pipes must be switched off while we change the cdclk. */
2402 		ret = intel_modeset_all_pipes(state);
2403 		if (ret)
2404 			return ret;
2405 
2406 		state->cdclk.pipe = INVALID_PIPE;
2407 
2408 		DRM_DEBUG_KMS("Modeset required for cdclk change\n");
2409 	}
2410 
2411 	DRM_DEBUG_KMS("New cdclk calculated to be logical %u kHz, actual %u kHz\n",
2412 		      state->cdclk.logical.cdclk,
2413 		      state->cdclk.actual.cdclk);
2414 	DRM_DEBUG_KMS("New voltage level calculated to be logical %u, actual %u\n",
2415 		      state->cdclk.logical.voltage_level,
2416 		      state->cdclk.actual.voltage_level);
2417 
2418 	return 0;
2419 }
2420 
intel_compute_max_dotclk(struct drm_i915_private * dev_priv)2421 static int intel_compute_max_dotclk(struct drm_i915_private *dev_priv)
2422 {
2423 	int max_cdclk_freq = dev_priv->max_cdclk_freq;
2424 
2425 	if (INTEL_GEN(dev_priv) >= 10 || IS_GEMINILAKE(dev_priv))
2426 		return 2 * max_cdclk_freq;
2427 	else if (IS_GEN(dev_priv, 9) ||
2428 		 IS_BROADWELL(dev_priv) || IS_HASWELL(dev_priv))
2429 		return max_cdclk_freq;
2430 	else if (IS_CHERRYVIEW(dev_priv))
2431 		return max_cdclk_freq*95/100;
2432 	else if (INTEL_GEN(dev_priv) < 4)
2433 		return 2*max_cdclk_freq*90/100;
2434 	else
2435 		return max_cdclk_freq*90/100;
2436 }
2437 
2438 /**
2439  * intel_update_max_cdclk - Determine the maximum support CDCLK frequency
2440  * @dev_priv: i915 device
2441  *
2442  * Determine the maximum CDCLK frequency the platform supports, and also
2443  * derive the maximum dot clock frequency the maximum CDCLK frequency
2444  * allows.
2445  */
intel_update_max_cdclk(struct drm_i915_private * dev_priv)2446 void intel_update_max_cdclk(struct drm_i915_private *dev_priv)
2447 {
2448 	if (IS_ELKHARTLAKE(dev_priv)) {
2449 		if (dev_priv->cdclk.hw.ref == 24000)
2450 			dev_priv->max_cdclk_freq = 552000;
2451 		else
2452 			dev_priv->max_cdclk_freq = 556800;
2453 	} else if (INTEL_GEN(dev_priv) >= 11) {
2454 		if (dev_priv->cdclk.hw.ref == 24000)
2455 			dev_priv->max_cdclk_freq = 648000;
2456 		else
2457 			dev_priv->max_cdclk_freq = 652800;
2458 	} else if (IS_CANNONLAKE(dev_priv)) {
2459 		dev_priv->max_cdclk_freq = 528000;
2460 	} else if (IS_GEN9_BC(dev_priv)) {
2461 		u32 limit = I915_READ(SKL_DFSM) & SKL_DFSM_CDCLK_LIMIT_MASK;
2462 		int max_cdclk, vco;
2463 
2464 		vco = dev_priv->skl_preferred_vco_freq;
2465 		WARN_ON(vco != 8100000 && vco != 8640000);
2466 
2467 		/*
2468 		 * Use the lower (vco 8640) cdclk values as a
2469 		 * first guess. skl_calc_cdclk() will correct it
2470 		 * if the preferred vco is 8100 instead.
2471 		 */
2472 		if (limit == SKL_DFSM_CDCLK_LIMIT_675)
2473 			max_cdclk = 617143;
2474 		else if (limit == SKL_DFSM_CDCLK_LIMIT_540)
2475 			max_cdclk = 540000;
2476 		else if (limit == SKL_DFSM_CDCLK_LIMIT_450)
2477 			max_cdclk = 432000;
2478 		else
2479 			max_cdclk = 308571;
2480 
2481 		dev_priv->max_cdclk_freq = skl_calc_cdclk(max_cdclk, vco);
2482 	} else if (IS_GEMINILAKE(dev_priv)) {
2483 		dev_priv->max_cdclk_freq = 316800;
2484 	} else if (IS_BROXTON(dev_priv)) {
2485 		dev_priv->max_cdclk_freq = 624000;
2486 	} else if (IS_BROADWELL(dev_priv))  {
2487 		/*
2488 		 * FIXME with extra cooling we can allow
2489 		 * 540 MHz for ULX and 675 Mhz for ULT.
2490 		 * How can we know if extra cooling is
2491 		 * available? PCI ID, VTB, something else?
2492 		 */
2493 		if (I915_READ(FUSE_STRAP) & HSW_CDCLK_LIMIT)
2494 			dev_priv->max_cdclk_freq = 450000;
2495 		else if (IS_BDW_ULX(dev_priv))
2496 			dev_priv->max_cdclk_freq = 450000;
2497 		else if (IS_BDW_ULT(dev_priv))
2498 			dev_priv->max_cdclk_freq = 540000;
2499 		else
2500 			dev_priv->max_cdclk_freq = 675000;
2501 	} else if (IS_CHERRYVIEW(dev_priv)) {
2502 		dev_priv->max_cdclk_freq = 320000;
2503 	} else if (IS_VALLEYVIEW(dev_priv)) {
2504 		dev_priv->max_cdclk_freq = 400000;
2505 	} else {
2506 		/* otherwise assume cdclk is fixed */
2507 		dev_priv->max_cdclk_freq = dev_priv->cdclk.hw.cdclk;
2508 	}
2509 
2510 	dev_priv->max_dotclk_freq = intel_compute_max_dotclk(dev_priv);
2511 
2512 	DRM_DEBUG_DRIVER("Max CD clock rate: %d kHz\n",
2513 			 dev_priv->max_cdclk_freq);
2514 
2515 	DRM_DEBUG_DRIVER("Max dotclock rate: %d kHz\n",
2516 			 dev_priv->max_dotclk_freq);
2517 }
2518 
2519 /**
2520  * intel_update_cdclk - Determine the current CDCLK frequency
2521  * @dev_priv: i915 device
2522  *
2523  * Determine the current CDCLK frequency.
2524  */
intel_update_cdclk(struct drm_i915_private * dev_priv)2525 void intel_update_cdclk(struct drm_i915_private *dev_priv)
2526 {
2527 	dev_priv->display.get_cdclk(dev_priv, &dev_priv->cdclk.hw);
2528 
2529 	/*
2530 	 * 9:0 CMBUS [sic] CDCLK frequency (cdfreq):
2531 	 * Programmng [sic] note: bit[9:2] should be programmed to the number
2532 	 * of cdclk that generates 4MHz reference clock freq which is used to
2533 	 * generate GMBus clock. This will vary with the cdclk freq.
2534 	 */
2535 	if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2536 		I915_WRITE(GMBUSFREQ_VLV,
2537 			   DIV_ROUND_UP(dev_priv->cdclk.hw.cdclk, 1000));
2538 }
2539 
cnp_rawclk(struct drm_i915_private * dev_priv)2540 static int cnp_rawclk(struct drm_i915_private *dev_priv)
2541 {
2542 	u32 rawclk;
2543 	int divider, fraction;
2544 
2545 	if (I915_READ(SFUSE_STRAP) & SFUSE_STRAP_RAW_FREQUENCY) {
2546 		/* 24 MHz */
2547 		divider = 24000;
2548 		fraction = 0;
2549 	} else {
2550 		/* 19.2 MHz */
2551 		divider = 19000;
2552 		fraction = 200;
2553 	}
2554 
2555 	rawclk = CNP_RAWCLK_DIV(divider / 1000);
2556 	if (fraction) {
2557 		int numerator = 1;
2558 
2559 		rawclk |= CNP_RAWCLK_DEN(DIV_ROUND_CLOSEST(numerator * 1000,
2560 							   fraction) - 1);
2561 		if (INTEL_PCH_TYPE(dev_priv) >= PCH_ICP)
2562 			rawclk |= ICP_RAWCLK_NUM(numerator);
2563 	}
2564 
2565 	I915_WRITE(PCH_RAWCLK_FREQ, rawclk);
2566 	return divider + fraction;
2567 }
2568 
pch_rawclk(struct drm_i915_private * dev_priv)2569 static int pch_rawclk(struct drm_i915_private *dev_priv)
2570 {
2571 	return (I915_READ(PCH_RAWCLK_FREQ) & RAWCLK_FREQ_MASK) * 1000;
2572 }
2573 
vlv_hrawclk(struct drm_i915_private * dev_priv)2574 static int vlv_hrawclk(struct drm_i915_private *dev_priv)
2575 {
2576 	/* RAWCLK_FREQ_VLV register updated from power well code */
2577 	return vlv_get_cck_clock_hpll(dev_priv, "hrawclk",
2578 				      CCK_DISPLAY_REF_CLOCK_CONTROL);
2579 }
2580 
g4x_hrawclk(struct drm_i915_private * dev_priv)2581 static int g4x_hrawclk(struct drm_i915_private *dev_priv)
2582 {
2583 	u32 clkcfg;
2584 
2585 	/* hrawclock is 1/4 the FSB frequency */
2586 	clkcfg = I915_READ(CLKCFG);
2587 	switch (clkcfg & CLKCFG_FSB_MASK) {
2588 	case CLKCFG_FSB_400:
2589 		return 100000;
2590 	case CLKCFG_FSB_533:
2591 		return 133333;
2592 	case CLKCFG_FSB_667:
2593 		return 166667;
2594 	case CLKCFG_FSB_800:
2595 		return 200000;
2596 	case CLKCFG_FSB_1067:
2597 	case CLKCFG_FSB_1067_ALT:
2598 		return 266667;
2599 	case CLKCFG_FSB_1333:
2600 	case CLKCFG_FSB_1333_ALT:
2601 		return 333333;
2602 	default:
2603 		return 133333;
2604 	}
2605 }
2606 
2607 /**
2608  * intel_update_rawclk - Determine the current RAWCLK frequency
2609  * @dev_priv: i915 device
2610  *
2611  * Determine the current RAWCLK frequency. RAWCLK is a fixed
2612  * frequency clock so this needs to done only once.
2613  */
intel_update_rawclk(struct drm_i915_private * dev_priv)2614 void intel_update_rawclk(struct drm_i915_private *dev_priv)
2615 {
2616 	if (INTEL_PCH_TYPE(dev_priv) >= PCH_CNP)
2617 		dev_priv->rawclk_freq = cnp_rawclk(dev_priv);
2618 	else if (HAS_PCH_SPLIT(dev_priv))
2619 		dev_priv->rawclk_freq = pch_rawclk(dev_priv);
2620 	else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2621 		dev_priv->rawclk_freq = vlv_hrawclk(dev_priv);
2622 	else if (IS_G4X(dev_priv) || IS_PINEVIEW(dev_priv))
2623 		dev_priv->rawclk_freq = g4x_hrawclk(dev_priv);
2624 	else
2625 		/* no rawclk on other platforms, or no need to know it */
2626 		return;
2627 
2628 	DRM_DEBUG_DRIVER("rawclk rate: %d kHz\n", dev_priv->rawclk_freq);
2629 }
2630 
2631 /**
2632  * intel_init_cdclk_hooks - Initialize CDCLK related modesetting hooks
2633  * @dev_priv: i915 device
2634  */
intel_init_cdclk_hooks(struct drm_i915_private * dev_priv)2635 void intel_init_cdclk_hooks(struct drm_i915_private *dev_priv)
2636 {
2637 	if (IS_ELKHARTLAKE(dev_priv)) {
2638 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2639 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2640 		dev_priv->display.calc_voltage_level = ehl_calc_voltage_level;
2641 		dev_priv->cdclk.table = icl_cdclk_table;
2642 	} else if (INTEL_GEN(dev_priv) >= 11) {
2643 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2644 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2645 		dev_priv->display.calc_voltage_level = icl_calc_voltage_level;
2646 		dev_priv->cdclk.table = icl_cdclk_table;
2647 	} else if (IS_CANNONLAKE(dev_priv)) {
2648 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2649 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2650 		dev_priv->display.calc_voltage_level = cnl_calc_voltage_level;
2651 		dev_priv->cdclk.table = cnl_cdclk_table;
2652 	} else if (IS_GEN9_LP(dev_priv)) {
2653 		dev_priv->display.set_cdclk = bxt_set_cdclk;
2654 		dev_priv->display.modeset_calc_cdclk = bxt_modeset_calc_cdclk;
2655 		dev_priv->display.calc_voltage_level = bxt_calc_voltage_level;
2656 		if (IS_GEMINILAKE(dev_priv))
2657 			dev_priv->cdclk.table = glk_cdclk_table;
2658 		else
2659 			dev_priv->cdclk.table = bxt_cdclk_table;
2660 	} else if (IS_GEN9_BC(dev_priv)) {
2661 		dev_priv->display.set_cdclk = skl_set_cdclk;
2662 		dev_priv->display.modeset_calc_cdclk = skl_modeset_calc_cdclk;
2663 	} else if (IS_BROADWELL(dev_priv)) {
2664 		dev_priv->display.set_cdclk = bdw_set_cdclk;
2665 		dev_priv->display.modeset_calc_cdclk = bdw_modeset_calc_cdclk;
2666 	} else if (IS_CHERRYVIEW(dev_priv)) {
2667 		dev_priv->display.set_cdclk = chv_set_cdclk;
2668 		dev_priv->display.modeset_calc_cdclk = vlv_modeset_calc_cdclk;
2669 	} else if (IS_VALLEYVIEW(dev_priv)) {
2670 		dev_priv->display.set_cdclk = vlv_set_cdclk;
2671 		dev_priv->display.modeset_calc_cdclk = vlv_modeset_calc_cdclk;
2672 	} else {
2673 		dev_priv->display.modeset_calc_cdclk = fixed_modeset_calc_cdclk;
2674 	}
2675 
2676 	if (INTEL_GEN(dev_priv) >= 10 || IS_GEN9_LP(dev_priv))
2677 		dev_priv->display.get_cdclk = bxt_get_cdclk;
2678 	else if (IS_GEN9_BC(dev_priv))
2679 		dev_priv->display.get_cdclk = skl_get_cdclk;
2680 	else if (IS_BROADWELL(dev_priv))
2681 		dev_priv->display.get_cdclk = bdw_get_cdclk;
2682 	else if (IS_HASWELL(dev_priv))
2683 		dev_priv->display.get_cdclk = hsw_get_cdclk;
2684 	else if (IS_VALLEYVIEW(dev_priv) || IS_CHERRYVIEW(dev_priv))
2685 		dev_priv->display.get_cdclk = vlv_get_cdclk;
2686 	else if (IS_GEN(dev_priv, 6) || IS_IVYBRIDGE(dev_priv))
2687 		dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2688 	else if (IS_GEN(dev_priv, 5))
2689 		dev_priv->display.get_cdclk = fixed_450mhz_get_cdclk;
2690 	else if (IS_GM45(dev_priv))
2691 		dev_priv->display.get_cdclk = gm45_get_cdclk;
2692 	else if (IS_G45(dev_priv))
2693 		dev_priv->display.get_cdclk = g33_get_cdclk;
2694 	else if (IS_I965GM(dev_priv))
2695 		dev_priv->display.get_cdclk = i965gm_get_cdclk;
2696 	else if (IS_I965G(dev_priv))
2697 		dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2698 	else if (IS_PINEVIEW(dev_priv))
2699 		dev_priv->display.get_cdclk = pnv_get_cdclk;
2700 	else if (IS_G33(dev_priv))
2701 		dev_priv->display.get_cdclk = g33_get_cdclk;
2702 	else if (IS_I945GM(dev_priv))
2703 		dev_priv->display.get_cdclk = i945gm_get_cdclk;
2704 	else if (IS_I945G(dev_priv))
2705 		dev_priv->display.get_cdclk = fixed_400mhz_get_cdclk;
2706 	else if (IS_I915GM(dev_priv))
2707 		dev_priv->display.get_cdclk = i915gm_get_cdclk;
2708 	else if (IS_I915G(dev_priv))
2709 		dev_priv->display.get_cdclk = fixed_333mhz_get_cdclk;
2710 	else if (IS_I865G(dev_priv))
2711 		dev_priv->display.get_cdclk = fixed_266mhz_get_cdclk;
2712 	else if (IS_I85X(dev_priv))
2713 		dev_priv->display.get_cdclk = i85x_get_cdclk;
2714 	else if (IS_I845G(dev_priv))
2715 		dev_priv->display.get_cdclk = fixed_200mhz_get_cdclk;
2716 	else { /* 830 */
2717 		WARN(!IS_I830(dev_priv),
2718 		     "Unknown platform. Assuming 133 MHz CDCLK\n");
2719 		dev_priv->display.get_cdclk = fixed_133mhz_get_cdclk;
2720 	}
2721 }
2722