xref: /netbsd-src/sys/external/bsd/drm2/dist/drm/i915/display/icl_dsi.c (revision 41ec02673d281bbb3d38e6c78504ce6e30c228c1)
1 /*	$NetBSD: icl_dsi.c,v 1.2 2021/12/18 23:45:29 riastradh Exp $	*/
2 
3 /*
4  * Copyright © 2018 Intel Corporation
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a
7  * copy of this software and associated documentation files (the "Software"),
8  * to deal in the Software without restriction, including without limitation
9  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10  * and/or sell copies of the Software, and to permit persons to whom the
11  * Software is furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice (including the next
14  * paragraph) shall be included in all copies or substantial portions of the
15  * Software.
16  *
17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
20  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
22  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23  * DEALINGS IN THE SOFTWARE.
24  *
25  * Authors:
26  *   Madhav Chauhan <madhav.chauhan@intel.com>
27  *   Jani Nikula <jani.nikula@intel.com>
28  */
29 
30 #include <sys/cdefs.h>
31 __KERNEL_RCSID(0, "$NetBSD: icl_dsi.c,v 1.2 2021/12/18 23:45:29 riastradh Exp $");
32 
33 #include <drm/drm_atomic_helper.h>
34 #include <drm/drm_mipi_dsi.h>
35 
36 #include "intel_atomic.h"
37 #include "intel_combo_phy.h"
38 #include "intel_connector.h"
39 #include "intel_ddi.h"
40 #include "intel_dsi.h"
41 #include "intel_panel.h"
42 #include "intel_vdsc.h"
43 
header_credits_available(struct drm_i915_private * dev_priv,enum transcoder dsi_trans)44 static inline int header_credits_available(struct drm_i915_private *dev_priv,
45 					   enum transcoder dsi_trans)
46 {
47 	return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_HEADER_CREDIT_MASK)
48 		>> FREE_HEADER_CREDIT_SHIFT;
49 }
50 
payload_credits_available(struct drm_i915_private * dev_priv,enum transcoder dsi_trans)51 static inline int payload_credits_available(struct drm_i915_private *dev_priv,
52 					    enum transcoder dsi_trans)
53 {
54 	return (I915_READ(DSI_CMD_TXCTL(dsi_trans)) & FREE_PLOAD_CREDIT_MASK)
55 		>> FREE_PLOAD_CREDIT_SHIFT;
56 }
57 
wait_for_header_credits(struct drm_i915_private * dev_priv,enum transcoder dsi_trans)58 static void wait_for_header_credits(struct drm_i915_private *dev_priv,
59 				    enum transcoder dsi_trans)
60 {
61 	if (wait_for_us(header_credits_available(dev_priv, dsi_trans) >=
62 			MAX_HEADER_CREDIT, 100))
63 		DRM_ERROR("DSI header credits not released\n");
64 }
65 
wait_for_payload_credits(struct drm_i915_private * dev_priv,enum transcoder dsi_trans)66 static void wait_for_payload_credits(struct drm_i915_private *dev_priv,
67 				     enum transcoder dsi_trans)
68 {
69 	if (wait_for_us(payload_credits_available(dev_priv, dsi_trans) >=
70 			MAX_PLOAD_CREDIT, 100))
71 		DRM_ERROR("DSI payload credits not released\n");
72 }
73 
dsi_port_to_transcoder(enum port port)74 static enum transcoder dsi_port_to_transcoder(enum port port)
75 {
76 	if (port == PORT_A)
77 		return TRANSCODER_DSI_0;
78 	else
79 		return TRANSCODER_DSI_1;
80 }
81 
wait_for_cmds_dispatched_to_panel(struct intel_encoder * encoder)82 static void wait_for_cmds_dispatched_to_panel(struct intel_encoder *encoder)
83 {
84 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
85 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
86 	struct mipi_dsi_device *dsi;
87 	enum port port;
88 	enum transcoder dsi_trans;
89 	int ret;
90 
91 	/* wait for header/payload credits to be released */
92 	for_each_dsi_port(port, intel_dsi->ports) {
93 		dsi_trans = dsi_port_to_transcoder(port);
94 		wait_for_header_credits(dev_priv, dsi_trans);
95 		wait_for_payload_credits(dev_priv, dsi_trans);
96 	}
97 
98 	/* send nop DCS command */
99 	for_each_dsi_port(port, intel_dsi->ports) {
100 		dsi = intel_dsi->dsi_hosts[port]->device;
101 		dsi->mode_flags |= MIPI_DSI_MODE_LPM;
102 		dsi->channel = 0;
103 		ret = mipi_dsi_dcs_nop(dsi);
104 		if (ret < 0)
105 			DRM_ERROR("error sending DCS NOP command\n");
106 	}
107 
108 	/* wait for header credits to be released */
109 	for_each_dsi_port(port, intel_dsi->ports) {
110 		dsi_trans = dsi_port_to_transcoder(port);
111 		wait_for_header_credits(dev_priv, dsi_trans);
112 	}
113 
114 	/* wait for LP TX in progress bit to be cleared */
115 	for_each_dsi_port(port, intel_dsi->ports) {
116 		dsi_trans = dsi_port_to_transcoder(port);
117 		if (wait_for_us(!(I915_READ(DSI_LP_MSG(dsi_trans)) &
118 				  LPTX_IN_PROGRESS), 20))
119 			DRM_ERROR("LPTX bit not cleared\n");
120 	}
121 }
122 
add_payld_to_queue(struct intel_dsi_host * host,const u8 * data,u32 len)123 static bool add_payld_to_queue(struct intel_dsi_host *host, const u8 *data,
124 			       u32 len)
125 {
126 	struct intel_dsi *intel_dsi = host->intel_dsi;
127 	struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev);
128 	enum transcoder dsi_trans = dsi_port_to_transcoder(host->port);
129 	int free_credits;
130 	int i, j;
131 
132 	for (i = 0; i < len; i += 4) {
133 		u32 tmp = 0;
134 
135 		free_credits = payload_credits_available(dev_priv, dsi_trans);
136 		if (free_credits < 1) {
137 			DRM_ERROR("Payload credit not available\n");
138 			return false;
139 		}
140 
141 		for (j = 0; j < min_t(u32, len - i, 4); j++)
142 			tmp |= *data++ << 8 * j;
143 
144 		I915_WRITE(DSI_CMD_TXPYLD(dsi_trans), tmp);
145 	}
146 
147 	return true;
148 }
149 
dsi_send_pkt_hdr(struct intel_dsi_host * host,struct mipi_dsi_packet pkt,bool enable_lpdt)150 static int dsi_send_pkt_hdr(struct intel_dsi_host *host,
151 			    struct mipi_dsi_packet pkt, bool enable_lpdt)
152 {
153 	struct intel_dsi *intel_dsi = host->intel_dsi;
154 	struct drm_i915_private *dev_priv = to_i915(intel_dsi->base.base.dev);
155 	enum transcoder dsi_trans = dsi_port_to_transcoder(host->port);
156 	u32 tmp;
157 	int free_credits;
158 
159 	/* check if header credit available */
160 	free_credits = header_credits_available(dev_priv, dsi_trans);
161 	if (free_credits < 1) {
162 		DRM_ERROR("send pkt header failed, not enough hdr credits\n");
163 		return -1;
164 	}
165 
166 	tmp = I915_READ(DSI_CMD_TXHDR(dsi_trans));
167 
168 	if (pkt.payload)
169 		tmp |= PAYLOAD_PRESENT;
170 	else
171 		tmp &= ~PAYLOAD_PRESENT;
172 
173 	tmp &= ~VBLANK_FENCE;
174 
175 	if (enable_lpdt)
176 		tmp |= LP_DATA_TRANSFER;
177 
178 	tmp &= ~(PARAM_WC_MASK | VC_MASK | DT_MASK);
179 	tmp |= ((pkt.header[0] & VC_MASK) << VC_SHIFT);
180 	tmp |= ((pkt.header[0] & DT_MASK) << DT_SHIFT);
181 	tmp |= (pkt.header[1] << PARAM_WC_LOWER_SHIFT);
182 	tmp |= (pkt.header[2] << PARAM_WC_UPPER_SHIFT);
183 	I915_WRITE(DSI_CMD_TXHDR(dsi_trans), tmp);
184 
185 	return 0;
186 }
187 
dsi_send_pkt_payld(struct intel_dsi_host * host,struct mipi_dsi_packet pkt)188 static int dsi_send_pkt_payld(struct intel_dsi_host *host,
189 			      struct mipi_dsi_packet pkt)
190 {
191 	/* payload queue can accept *256 bytes*, check limit */
192 	if (pkt.payload_length > MAX_PLOAD_CREDIT * 4) {
193 		DRM_ERROR("payload size exceeds max queue limit\n");
194 		return -1;
195 	}
196 
197 	/* load data into command payload queue */
198 	if (!add_payld_to_queue(host, pkt.payload,
199 				pkt.payload_length)) {
200 		DRM_ERROR("adding payload to queue failed\n");
201 		return -1;
202 	}
203 
204 	return 0;
205 }
206 
dsi_program_swing_and_deemphasis(struct intel_encoder * encoder)207 static void dsi_program_swing_and_deemphasis(struct intel_encoder *encoder)
208 {
209 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
210 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
211 	enum phy phy;
212 	u32 tmp;
213 	int lane;
214 
215 	for_each_dsi_phy(phy, intel_dsi->phys) {
216 		/*
217 		 * Program voltage swing and pre-emphasis level values as per
218 		 * table in BSPEC under DDI buffer programing
219 		 */
220 		tmp = I915_READ(ICL_PORT_TX_DW5_LN0(phy));
221 		tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
222 		tmp |= SCALING_MODE_SEL(0x2);
223 		tmp |= TAP2_DISABLE | TAP3_DISABLE;
224 		tmp |= RTERM_SELECT(0x6);
225 		I915_WRITE(ICL_PORT_TX_DW5_GRP(phy), tmp);
226 
227 		tmp = I915_READ(ICL_PORT_TX_DW5_AUX(phy));
228 		tmp &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK);
229 		tmp |= SCALING_MODE_SEL(0x2);
230 		tmp |= TAP2_DISABLE | TAP3_DISABLE;
231 		tmp |= RTERM_SELECT(0x6);
232 		I915_WRITE(ICL_PORT_TX_DW5_AUX(phy), tmp);
233 
234 		tmp = I915_READ(ICL_PORT_TX_DW2_LN0(phy));
235 		tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
236 			 RCOMP_SCALAR_MASK);
237 		tmp |= SWING_SEL_UPPER(0x2);
238 		tmp |= SWING_SEL_LOWER(0x2);
239 		tmp |= RCOMP_SCALAR(0x98);
240 		I915_WRITE(ICL_PORT_TX_DW2_GRP(phy), tmp);
241 
242 		tmp = I915_READ(ICL_PORT_TX_DW2_AUX(phy));
243 		tmp &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
244 			 RCOMP_SCALAR_MASK);
245 		tmp |= SWING_SEL_UPPER(0x2);
246 		tmp |= SWING_SEL_LOWER(0x2);
247 		tmp |= RCOMP_SCALAR(0x98);
248 		I915_WRITE(ICL_PORT_TX_DW2_AUX(phy), tmp);
249 
250 		tmp = I915_READ(ICL_PORT_TX_DW4_AUX(phy));
251 		tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
252 			 CURSOR_COEFF_MASK);
253 		tmp |= POST_CURSOR_1(0x0);
254 		tmp |= POST_CURSOR_2(0x0);
255 		tmp |= CURSOR_COEFF(0x3f);
256 		I915_WRITE(ICL_PORT_TX_DW4_AUX(phy), tmp);
257 
258 		for (lane = 0; lane <= 3; lane++) {
259 			/* Bspec: must not use GRP register for write */
260 			tmp = I915_READ(ICL_PORT_TX_DW4_LN(lane, phy));
261 			tmp &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
262 				 CURSOR_COEFF_MASK);
263 			tmp |= POST_CURSOR_1(0x0);
264 			tmp |= POST_CURSOR_2(0x0);
265 			tmp |= CURSOR_COEFF(0x3f);
266 			I915_WRITE(ICL_PORT_TX_DW4_LN(lane, phy), tmp);
267 		}
268 	}
269 }
270 
configure_dual_link_mode(struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config)271 static void configure_dual_link_mode(struct intel_encoder *encoder,
272 				     const struct intel_crtc_state *pipe_config)
273 {
274 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
275 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
276 	u32 dss_ctl1;
277 
278 	dss_ctl1 = I915_READ(DSS_CTL1);
279 	dss_ctl1 |= SPLITTER_ENABLE;
280 	dss_ctl1 &= ~OVERLAP_PIXELS_MASK;
281 	dss_ctl1 |= OVERLAP_PIXELS(intel_dsi->pixel_overlap);
282 
283 	if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK) {
284 		const struct drm_display_mode *adjusted_mode =
285 					&pipe_config->hw.adjusted_mode;
286 		u32 dss_ctl2;
287 		u16 hactive = adjusted_mode->crtc_hdisplay;
288 		u16 dl_buffer_depth;
289 
290 		dss_ctl1 &= ~DUAL_LINK_MODE_INTERLEAVE;
291 		dl_buffer_depth = hactive / 2 + intel_dsi->pixel_overlap;
292 
293 		if (dl_buffer_depth > MAX_DL_BUFFER_TARGET_DEPTH)
294 			DRM_ERROR("DL buffer depth exceed max value\n");
295 
296 		dss_ctl1 &= ~LEFT_DL_BUF_TARGET_DEPTH_MASK;
297 		dss_ctl1 |= LEFT_DL_BUF_TARGET_DEPTH(dl_buffer_depth);
298 		dss_ctl2 = I915_READ(DSS_CTL2);
299 		dss_ctl2 &= ~RIGHT_DL_BUF_TARGET_DEPTH_MASK;
300 		dss_ctl2 |= RIGHT_DL_BUF_TARGET_DEPTH(dl_buffer_depth);
301 		I915_WRITE(DSS_CTL2, dss_ctl2);
302 	} else {
303 		/* Interleave */
304 		dss_ctl1 |= DUAL_LINK_MODE_INTERLEAVE;
305 	}
306 
307 	I915_WRITE(DSS_CTL1, dss_ctl1);
308 }
309 
310 /* aka DSI 8X clock */
afe_clk(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)311 static int afe_clk(struct intel_encoder *encoder,
312 		   const struct intel_crtc_state *crtc_state)
313 {
314 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
315 	int bpp;
316 
317 	if (crtc_state->dsc.compression_enable)
318 		bpp = crtc_state->dsc.compressed_bpp;
319 	else
320 		bpp = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
321 
322 	return DIV_ROUND_CLOSEST(intel_dsi->pclk * bpp, intel_dsi->lane_count);
323 }
324 
gen11_dsi_program_esc_clk_div(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)325 static void gen11_dsi_program_esc_clk_div(struct intel_encoder *encoder,
326 					  const struct intel_crtc_state *crtc_state)
327 {
328 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
329 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
330 	enum port port;
331 	int afe_clk_khz;
332 	u32 esc_clk_div_m;
333 
334 	afe_clk_khz = afe_clk(encoder, crtc_state);
335 	esc_clk_div_m = DIV_ROUND_UP(afe_clk_khz, DSI_MAX_ESC_CLK);
336 
337 	for_each_dsi_port(port, intel_dsi->ports) {
338 		I915_WRITE(ICL_DSI_ESC_CLK_DIV(port),
339 			   esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
340 		POSTING_READ(ICL_DSI_ESC_CLK_DIV(port));
341 	}
342 
343 	for_each_dsi_port(port, intel_dsi->ports) {
344 		I915_WRITE(ICL_DPHY_ESC_CLK_DIV(port),
345 			   esc_clk_div_m & ICL_ESC_CLK_DIV_MASK);
346 		POSTING_READ(ICL_DPHY_ESC_CLK_DIV(port));
347 	}
348 }
349 
get_dsi_io_power_domains(struct drm_i915_private * dev_priv,struct intel_dsi * intel_dsi)350 static void get_dsi_io_power_domains(struct drm_i915_private *dev_priv,
351 				     struct intel_dsi *intel_dsi)
352 {
353 	enum port port;
354 
355 	for_each_dsi_port(port, intel_dsi->ports) {
356 		WARN_ON(intel_dsi->io_wakeref[port]);
357 		intel_dsi->io_wakeref[port] =
358 			intel_display_power_get(dev_priv,
359 						port == PORT_A ?
360 						POWER_DOMAIN_PORT_DDI_A_IO :
361 						POWER_DOMAIN_PORT_DDI_B_IO);
362 	}
363 }
364 
gen11_dsi_enable_io_power(struct intel_encoder * encoder)365 static void gen11_dsi_enable_io_power(struct intel_encoder *encoder)
366 {
367 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
368 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
369 	enum port port;
370 	u32 tmp;
371 
372 	for_each_dsi_port(port, intel_dsi->ports) {
373 		tmp = I915_READ(ICL_DSI_IO_MODECTL(port));
374 		tmp |= COMBO_PHY_MODE_DSI;
375 		I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp);
376 	}
377 
378 	get_dsi_io_power_domains(dev_priv, intel_dsi);
379 }
380 
gen11_dsi_power_up_lanes(struct intel_encoder * encoder)381 static void gen11_dsi_power_up_lanes(struct intel_encoder *encoder)
382 {
383 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
384 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
385 	enum phy phy;
386 
387 	for_each_dsi_phy(phy, intel_dsi->phys)
388 		intel_combo_phy_power_up_lanes(dev_priv, phy, true,
389 					       intel_dsi->lane_count, false);
390 }
391 
gen11_dsi_config_phy_lanes_sequence(struct intel_encoder * encoder)392 static void gen11_dsi_config_phy_lanes_sequence(struct intel_encoder *encoder)
393 {
394 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
395 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
396 	enum phy phy;
397 	u32 tmp;
398 	int lane;
399 
400 	/* Step 4b(i) set loadgen select for transmit and aux lanes */
401 	for_each_dsi_phy(phy, intel_dsi->phys) {
402 		tmp = I915_READ(ICL_PORT_TX_DW4_AUX(phy));
403 		tmp &= ~LOADGEN_SELECT;
404 		I915_WRITE(ICL_PORT_TX_DW4_AUX(phy), tmp);
405 		for (lane = 0; lane <= 3; lane++) {
406 			tmp = I915_READ(ICL_PORT_TX_DW4_LN(lane, phy));
407 			tmp &= ~LOADGEN_SELECT;
408 			if (lane != 2)
409 				tmp |= LOADGEN_SELECT;
410 			I915_WRITE(ICL_PORT_TX_DW4_LN(lane, phy), tmp);
411 		}
412 	}
413 
414 	/* Step 4b(ii) set latency optimization for transmit and aux lanes */
415 	for_each_dsi_phy(phy, intel_dsi->phys) {
416 		tmp = I915_READ(ICL_PORT_TX_DW2_AUX(phy));
417 		tmp &= ~FRC_LATENCY_OPTIM_MASK;
418 		tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
419 		I915_WRITE(ICL_PORT_TX_DW2_AUX(phy), tmp);
420 		tmp = I915_READ(ICL_PORT_TX_DW2_LN0(phy));
421 		tmp &= ~FRC_LATENCY_OPTIM_MASK;
422 		tmp |= FRC_LATENCY_OPTIM_VAL(0x5);
423 		I915_WRITE(ICL_PORT_TX_DW2_GRP(phy), tmp);
424 
425 		/* For EHL, TGL, set latency optimization for PCS_DW1 lanes */
426 		if (IS_ELKHARTLAKE(dev_priv) || (INTEL_GEN(dev_priv) >= 12)) {
427 			tmp = I915_READ(ICL_PORT_PCS_DW1_AUX(phy));
428 			tmp &= ~LATENCY_OPTIM_MASK;
429 			tmp |= LATENCY_OPTIM_VAL(0);
430 			I915_WRITE(ICL_PORT_PCS_DW1_AUX(phy), tmp);
431 
432 			tmp = I915_READ(ICL_PORT_PCS_DW1_LN0(phy));
433 			tmp &= ~LATENCY_OPTIM_MASK;
434 			tmp |= LATENCY_OPTIM_VAL(0x1);
435 			I915_WRITE(ICL_PORT_PCS_DW1_GRP(phy), tmp);
436 		}
437 	}
438 
439 }
440 
gen11_dsi_voltage_swing_program_seq(struct intel_encoder * encoder)441 static void gen11_dsi_voltage_swing_program_seq(struct intel_encoder *encoder)
442 {
443 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
444 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
445 	u32 tmp;
446 	enum phy phy;
447 
448 	/* clear common keeper enable bit */
449 	for_each_dsi_phy(phy, intel_dsi->phys) {
450 		tmp = I915_READ(ICL_PORT_PCS_DW1_LN0(phy));
451 		tmp &= ~COMMON_KEEPER_EN;
452 		I915_WRITE(ICL_PORT_PCS_DW1_GRP(phy), tmp);
453 		tmp = I915_READ(ICL_PORT_PCS_DW1_AUX(phy));
454 		tmp &= ~COMMON_KEEPER_EN;
455 		I915_WRITE(ICL_PORT_PCS_DW1_AUX(phy), tmp);
456 	}
457 
458 	/*
459 	 * Set SUS Clock Config bitfield to 11b
460 	 * Note: loadgen select program is done
461 	 * as part of lane phy sequence configuration
462 	 */
463 	for_each_dsi_phy(phy, intel_dsi->phys) {
464 		tmp = I915_READ(ICL_PORT_CL_DW5(phy));
465 		tmp |= SUS_CLOCK_CONFIG;
466 		I915_WRITE(ICL_PORT_CL_DW5(phy), tmp);
467 	}
468 
469 	/* Clear training enable to change swing values */
470 	for_each_dsi_phy(phy, intel_dsi->phys) {
471 		tmp = I915_READ(ICL_PORT_TX_DW5_LN0(phy));
472 		tmp &= ~TX_TRAINING_EN;
473 		I915_WRITE(ICL_PORT_TX_DW5_GRP(phy), tmp);
474 		tmp = I915_READ(ICL_PORT_TX_DW5_AUX(phy));
475 		tmp &= ~TX_TRAINING_EN;
476 		I915_WRITE(ICL_PORT_TX_DW5_AUX(phy), tmp);
477 	}
478 
479 	/* Program swing and de-emphasis */
480 	dsi_program_swing_and_deemphasis(encoder);
481 
482 	/* Set training enable to trigger update */
483 	for_each_dsi_phy(phy, intel_dsi->phys) {
484 		tmp = I915_READ(ICL_PORT_TX_DW5_LN0(phy));
485 		tmp |= TX_TRAINING_EN;
486 		I915_WRITE(ICL_PORT_TX_DW5_GRP(phy), tmp);
487 		tmp = I915_READ(ICL_PORT_TX_DW5_AUX(phy));
488 		tmp |= TX_TRAINING_EN;
489 		I915_WRITE(ICL_PORT_TX_DW5_AUX(phy), tmp);
490 	}
491 }
492 
gen11_dsi_enable_ddi_buffer(struct intel_encoder * encoder)493 static void gen11_dsi_enable_ddi_buffer(struct intel_encoder *encoder)
494 {
495 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
496 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
497 	u32 tmp;
498 	enum port port;
499 
500 	for_each_dsi_port(port, intel_dsi->ports) {
501 		tmp = I915_READ(DDI_BUF_CTL(port));
502 		tmp |= DDI_BUF_CTL_ENABLE;
503 		I915_WRITE(DDI_BUF_CTL(port), tmp);
504 
505 		if (wait_for_us(!(I915_READ(DDI_BUF_CTL(port)) &
506 				  DDI_BUF_IS_IDLE),
507 				  500))
508 			DRM_ERROR("DDI port:%c buffer idle\n", port_name(port));
509 	}
510 }
511 
512 static void
gen11_dsi_setup_dphy_timings(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)513 gen11_dsi_setup_dphy_timings(struct intel_encoder *encoder,
514 			     const struct intel_crtc_state *crtc_state)
515 {
516 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
517 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
518 	u32 tmp;
519 	enum port port;
520 	enum phy phy;
521 
522 	/* Program T-INIT master registers */
523 	for_each_dsi_port(port, intel_dsi->ports) {
524 		tmp = I915_READ(ICL_DSI_T_INIT_MASTER(port));
525 		tmp &= ~MASTER_INIT_TIMER_MASK;
526 		tmp |= intel_dsi->init_count;
527 		I915_WRITE(ICL_DSI_T_INIT_MASTER(port), tmp);
528 	}
529 
530 	/* Program DPHY clock lanes timings */
531 	for_each_dsi_port(port, intel_dsi->ports) {
532 		I915_WRITE(DPHY_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg);
533 
534 		/* shadow register inside display core */
535 		I915_WRITE(DSI_CLK_TIMING_PARAM(port), intel_dsi->dphy_reg);
536 	}
537 
538 	/* Program DPHY data lanes timings */
539 	for_each_dsi_port(port, intel_dsi->ports) {
540 		I915_WRITE(DPHY_DATA_TIMING_PARAM(port),
541 			   intel_dsi->dphy_data_lane_reg);
542 
543 		/* shadow register inside display core */
544 		I915_WRITE(DSI_DATA_TIMING_PARAM(port),
545 			   intel_dsi->dphy_data_lane_reg);
546 	}
547 
548 	/*
549 	 * If DSI link operating at or below an 800 MHz,
550 	 * TA_SURE should be override and programmed to
551 	 * a value '0' inside TA_PARAM_REGISTERS otherwise
552 	 * leave all fields at HW default values.
553 	 */
554 	if (IS_GEN(dev_priv, 11)) {
555 		if (afe_clk(encoder, crtc_state) <= 800000) {
556 			for_each_dsi_port(port, intel_dsi->ports) {
557 				tmp = I915_READ(DPHY_TA_TIMING_PARAM(port));
558 				tmp &= ~TA_SURE_MASK;
559 				tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
560 				I915_WRITE(DPHY_TA_TIMING_PARAM(port), tmp);
561 
562 				/* shadow register inside display core */
563 				tmp = I915_READ(DSI_TA_TIMING_PARAM(port));
564 				tmp &= ~TA_SURE_MASK;
565 				tmp |= TA_SURE_OVERRIDE | TA_SURE(0);
566 				I915_WRITE(DSI_TA_TIMING_PARAM(port), tmp);
567 			}
568 		}
569 	}
570 
571 	if (IS_ELKHARTLAKE(dev_priv)) {
572 		for_each_dsi_phy(phy, intel_dsi->phys) {
573 			tmp = I915_READ(ICL_DPHY_CHKN(phy));
574 			tmp |= ICL_DPHY_CHKN_AFE_OVER_PPI_STRAP;
575 			I915_WRITE(ICL_DPHY_CHKN(phy), tmp);
576 		}
577 	}
578 }
579 
gen11_dsi_gate_clocks(struct intel_encoder * encoder)580 static void gen11_dsi_gate_clocks(struct intel_encoder *encoder)
581 {
582 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
583 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
584 	u32 tmp;
585 	enum phy phy;
586 
587 	mutex_lock(&dev_priv->dpll_lock);
588 	tmp = I915_READ(ICL_DPCLKA_CFGCR0);
589 	for_each_dsi_phy(phy, intel_dsi->phys)
590 		tmp |= ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
591 
592 	I915_WRITE(ICL_DPCLKA_CFGCR0, tmp);
593 	mutex_unlock(&dev_priv->dpll_lock);
594 }
595 
gen11_dsi_ungate_clocks(struct intel_encoder * encoder)596 static void gen11_dsi_ungate_clocks(struct intel_encoder *encoder)
597 {
598 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
599 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
600 	u32 tmp;
601 	enum phy phy;
602 
603 	mutex_lock(&dev_priv->dpll_lock);
604 	tmp = I915_READ(ICL_DPCLKA_CFGCR0);
605 	for_each_dsi_phy(phy, intel_dsi->phys)
606 		tmp &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
607 
608 	I915_WRITE(ICL_DPCLKA_CFGCR0, tmp);
609 	mutex_unlock(&dev_priv->dpll_lock);
610 }
611 
gen11_dsi_map_pll(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)612 static void gen11_dsi_map_pll(struct intel_encoder *encoder,
613 			      const struct intel_crtc_state *crtc_state)
614 {
615 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
616 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
617 	struct intel_shared_dpll *pll = crtc_state->shared_dpll;
618 	enum phy phy;
619 	u32 val;
620 
621 	mutex_lock(&dev_priv->dpll_lock);
622 
623 	val = I915_READ(ICL_DPCLKA_CFGCR0);
624 	for_each_dsi_phy(phy, intel_dsi->phys) {
625 		val &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
626 		val |= ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
627 	}
628 	I915_WRITE(ICL_DPCLKA_CFGCR0, val);
629 
630 	for_each_dsi_phy(phy, intel_dsi->phys) {
631 		if (INTEL_GEN(dev_priv) >= 12)
632 			val |= ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
633 		else
634 			val &= ~ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
635 	}
636 	I915_WRITE(ICL_DPCLKA_CFGCR0, val);
637 
638 	POSTING_READ(ICL_DPCLKA_CFGCR0);
639 
640 	mutex_unlock(&dev_priv->dpll_lock);
641 }
642 
643 static void
gen11_dsi_configure_transcoder(struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config)644 gen11_dsi_configure_transcoder(struct intel_encoder *encoder,
645 			       const struct intel_crtc_state *pipe_config)
646 {
647 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
648 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
649 	struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc);
650 	enum pipe pipe = intel_crtc->pipe;
651 	u32 tmp;
652 	enum port port;
653 	enum transcoder dsi_trans;
654 
655 	for_each_dsi_port(port, intel_dsi->ports) {
656 		dsi_trans = dsi_port_to_transcoder(port);
657 		tmp = I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans));
658 
659 		if (intel_dsi->eotp_pkt)
660 			tmp &= ~EOTP_DISABLED;
661 		else
662 			tmp |= EOTP_DISABLED;
663 
664 		/* enable link calibration if freq > 1.5Gbps */
665 		if (afe_clk(encoder, pipe_config) >= 1500 * 1000) {
666 			tmp &= ~LINK_CALIBRATION_MASK;
667 			tmp |= CALIBRATION_ENABLED_INITIAL_ONLY;
668 		}
669 
670 		/* configure continuous clock */
671 		tmp &= ~CONTINUOUS_CLK_MASK;
672 		if (intel_dsi->clock_stop)
673 			tmp |= CLK_ENTER_LP_AFTER_DATA;
674 		else
675 			tmp |= CLK_HS_CONTINUOUS;
676 
677 		/* configure buffer threshold limit to minimum */
678 		tmp &= ~PIX_BUF_THRESHOLD_MASK;
679 		tmp |= PIX_BUF_THRESHOLD_1_4;
680 
681 		/* set virtual channel to '0' */
682 		tmp &= ~PIX_VIRT_CHAN_MASK;
683 		tmp |= PIX_VIRT_CHAN(0);
684 
685 		/* program BGR transmission */
686 		if (intel_dsi->bgr_enabled)
687 			tmp |= BGR_TRANSMISSION;
688 
689 		/* select pixel format */
690 		tmp &= ~PIX_FMT_MASK;
691 		if (pipe_config->dsc.compression_enable) {
692 			tmp |= PIX_FMT_COMPRESSED;
693 		} else {
694 			switch (intel_dsi->pixel_format) {
695 			default:
696 				MISSING_CASE(intel_dsi->pixel_format);
697 				/* fallthrough */
698 			case MIPI_DSI_FMT_RGB565:
699 				tmp |= PIX_FMT_RGB565;
700 				break;
701 			case MIPI_DSI_FMT_RGB666_PACKED:
702 				tmp |= PIX_FMT_RGB666_PACKED;
703 				break;
704 			case MIPI_DSI_FMT_RGB666:
705 				tmp |= PIX_FMT_RGB666_LOOSE;
706 				break;
707 			case MIPI_DSI_FMT_RGB888:
708 				tmp |= PIX_FMT_RGB888;
709 				break;
710 			}
711 		}
712 
713 		if (INTEL_GEN(dev_priv) >= 12) {
714 			if (is_vid_mode(intel_dsi))
715 				tmp |= BLANKING_PACKET_ENABLE;
716 		}
717 
718 		/* program DSI operation mode */
719 		if (is_vid_mode(intel_dsi)) {
720 			tmp &= ~OP_MODE_MASK;
721 			switch (intel_dsi->video_mode_format) {
722 			default:
723 				MISSING_CASE(intel_dsi->video_mode_format);
724 				/* fallthrough */
725 			case VIDEO_MODE_NON_BURST_WITH_SYNC_EVENTS:
726 				tmp |= VIDEO_MODE_SYNC_EVENT;
727 				break;
728 			case VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE:
729 				tmp |= VIDEO_MODE_SYNC_PULSE;
730 				break;
731 			}
732 		}
733 
734 		I915_WRITE(DSI_TRANS_FUNC_CONF(dsi_trans), tmp);
735 	}
736 
737 	/* enable port sync mode if dual link */
738 	if (intel_dsi->dual_link) {
739 		for_each_dsi_port(port, intel_dsi->ports) {
740 			dsi_trans = dsi_port_to_transcoder(port);
741 			tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans));
742 			tmp |= PORT_SYNC_MODE_ENABLE;
743 			I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
744 		}
745 
746 		/* configure stream splitting */
747 		configure_dual_link_mode(encoder, pipe_config);
748 	}
749 
750 	for_each_dsi_port(port, intel_dsi->ports) {
751 		dsi_trans = dsi_port_to_transcoder(port);
752 
753 		/* select data lane width */
754 		tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans));
755 		tmp &= ~DDI_PORT_WIDTH_MASK;
756 		tmp |= DDI_PORT_WIDTH(intel_dsi->lane_count);
757 
758 		/* select input pipe */
759 		tmp &= ~TRANS_DDI_EDP_INPUT_MASK;
760 		switch (pipe) {
761 		default:
762 			MISSING_CASE(pipe);
763 			/* fallthrough */
764 		case PIPE_A:
765 			tmp |= TRANS_DDI_EDP_INPUT_A_ON;
766 			break;
767 		case PIPE_B:
768 			tmp |= TRANS_DDI_EDP_INPUT_B_ONOFF;
769 			break;
770 		case PIPE_C:
771 			tmp |= TRANS_DDI_EDP_INPUT_C_ONOFF;
772 			break;
773 		case PIPE_D:
774 			tmp |= TRANS_DDI_EDP_INPUT_D_ONOFF;
775 			break;
776 		}
777 
778 		/* enable DDI buffer */
779 		tmp |= TRANS_DDI_FUNC_ENABLE;
780 		I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
781 	}
782 
783 	/* wait for link ready */
784 	for_each_dsi_port(port, intel_dsi->ports) {
785 		dsi_trans = dsi_port_to_transcoder(port);
786 		if (wait_for_us((I915_READ(DSI_TRANS_FUNC_CONF(dsi_trans)) &
787 				LINK_READY), 2500))
788 			DRM_ERROR("DSI link not ready\n");
789 	}
790 }
791 
792 static void
gen11_dsi_set_transcoder_timings(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)793 gen11_dsi_set_transcoder_timings(struct intel_encoder *encoder,
794 				 const struct intel_crtc_state *crtc_state)
795 {
796 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
797 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
798 	const struct drm_display_mode *adjusted_mode =
799 		&crtc_state->hw.adjusted_mode;
800 	enum port port;
801 	enum transcoder dsi_trans;
802 	/* horizontal timings */
803 	u16 htotal, hactive, hsync_start, hsync_end, hsync_size;
804 	u16 hback_porch;
805 	/* vertical timings */
806 	u16 vtotal, vactive, vsync_start, vsync_end, vsync_shift;
807 	int mul = 1, div = 1;
808 
809 	/*
810 	 * Adjust horizontal timings (htotal, hsync_start, hsync_end) to account
811 	 * for slower link speed if DSC is enabled.
812 	 *
813 	 * The compression frequency ratio is the ratio between compressed and
814 	 * non-compressed link speeds, and simplifies down to the ratio between
815 	 * compressed and non-compressed bpp.
816 	 */
817 	if (crtc_state->dsc.compression_enable) {
818 		mul = crtc_state->dsc.compressed_bpp;
819 		div = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
820 	}
821 
822 	hactive = adjusted_mode->crtc_hdisplay;
823 	htotal = DIV_ROUND_UP(adjusted_mode->crtc_htotal * mul, div);
824 	hsync_start = DIV_ROUND_UP(adjusted_mode->crtc_hsync_start * mul, div);
825 	hsync_end = DIV_ROUND_UP(adjusted_mode->crtc_hsync_end * mul, div);
826 	hsync_size  = hsync_end - hsync_start;
827 	hback_porch = (adjusted_mode->crtc_htotal -
828 		       adjusted_mode->crtc_hsync_end);
829 	vactive = adjusted_mode->crtc_vdisplay;
830 	vtotal = adjusted_mode->crtc_vtotal;
831 	vsync_start = adjusted_mode->crtc_vsync_start;
832 	vsync_end = adjusted_mode->crtc_vsync_end;
833 	vsync_shift = hsync_start - htotal / 2;
834 
835 	if (intel_dsi->dual_link) {
836 		hactive /= 2;
837 		if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK)
838 			hactive += intel_dsi->pixel_overlap;
839 		htotal /= 2;
840 	}
841 
842 	/* minimum hactive as per bspec: 256 pixels */
843 	if (adjusted_mode->crtc_hdisplay < 256)
844 		DRM_ERROR("hactive is less then 256 pixels\n");
845 
846 	/* if RGB666 format, then hactive must be multiple of 4 pixels */
847 	if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB666 && hactive % 4 != 0)
848 		DRM_ERROR("hactive pixels are not multiple of 4\n");
849 
850 	/* program TRANS_HTOTAL register */
851 	for_each_dsi_port(port, intel_dsi->ports) {
852 		dsi_trans = dsi_port_to_transcoder(port);
853 		I915_WRITE(HTOTAL(dsi_trans),
854 			   (hactive - 1) | ((htotal - 1) << 16));
855 	}
856 
857 	/* TRANS_HSYNC register to be programmed only for video mode */
858 	if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) {
859 		if (intel_dsi->video_mode_format ==
860 		    VIDEO_MODE_NON_BURST_WITH_SYNC_PULSE) {
861 			/* BSPEC: hsync size should be atleast 16 pixels */
862 			if (hsync_size < 16)
863 				DRM_ERROR("hsync size < 16 pixels\n");
864 		}
865 
866 		if (hback_porch < 16)
867 			DRM_ERROR("hback porch < 16 pixels\n");
868 
869 		if (intel_dsi->dual_link) {
870 			hsync_start /= 2;
871 			hsync_end /= 2;
872 		}
873 
874 		for_each_dsi_port(port, intel_dsi->ports) {
875 			dsi_trans = dsi_port_to_transcoder(port);
876 			I915_WRITE(HSYNC(dsi_trans),
877 				   (hsync_start - 1) | ((hsync_end - 1) << 16));
878 		}
879 	}
880 
881 	/* program TRANS_VTOTAL register */
882 	for_each_dsi_port(port, intel_dsi->ports) {
883 		dsi_trans = dsi_port_to_transcoder(port);
884 		/*
885 		 * FIXME: Programing this by assuming progressive mode, since
886 		 * non-interlaced info from VBT is not saved inside
887 		 * struct drm_display_mode.
888 		 * For interlace mode: program required pixel minus 2
889 		 */
890 		I915_WRITE(VTOTAL(dsi_trans),
891 			   (vactive - 1) | ((vtotal - 1) << 16));
892 	}
893 
894 	if (vsync_end < vsync_start || vsync_end > vtotal)
895 		DRM_ERROR("Invalid vsync_end value\n");
896 
897 	if (vsync_start < vactive)
898 		DRM_ERROR("vsync_start less than vactive\n");
899 
900 	/* program TRANS_VSYNC register */
901 	for_each_dsi_port(port, intel_dsi->ports) {
902 		dsi_trans = dsi_port_to_transcoder(port);
903 		I915_WRITE(VSYNC(dsi_trans),
904 			   (vsync_start - 1) | ((vsync_end - 1) << 16));
905 	}
906 
907 	/*
908 	 * FIXME: It has to be programmed only for interlaced
909 	 * modes. Put the check condition here once interlaced
910 	 * info available as described above.
911 	 * program TRANS_VSYNCSHIFT register
912 	 */
913 	for_each_dsi_port(port, intel_dsi->ports) {
914 		dsi_trans = dsi_port_to_transcoder(port);
915 		I915_WRITE(VSYNCSHIFT(dsi_trans), vsync_shift);
916 	}
917 
918 	/* program TRANS_VBLANK register, should be same as vtotal programmed */
919 	if (INTEL_GEN(dev_priv) >= 12) {
920 		for_each_dsi_port(port, intel_dsi->ports) {
921 			dsi_trans = dsi_port_to_transcoder(port);
922 			I915_WRITE(VBLANK(dsi_trans),
923 				   (vactive - 1) | ((vtotal - 1) << 16));
924 		}
925 	}
926 }
927 
gen11_dsi_enable_transcoder(struct intel_encoder * encoder)928 static void gen11_dsi_enable_transcoder(struct intel_encoder *encoder)
929 {
930 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
931 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
932 	enum port port;
933 	enum transcoder dsi_trans;
934 	u32 tmp;
935 
936 	for_each_dsi_port(port, intel_dsi->ports) {
937 		dsi_trans = dsi_port_to_transcoder(port);
938 		tmp = I915_READ(PIPECONF(dsi_trans));
939 		tmp |= PIPECONF_ENABLE;
940 		I915_WRITE(PIPECONF(dsi_trans), tmp);
941 
942 		/* wait for transcoder to be enabled */
943 		if (intel_de_wait_for_set(dev_priv, PIPECONF(dsi_trans),
944 					  I965_PIPECONF_ACTIVE, 10))
945 			DRM_ERROR("DSI transcoder not enabled\n");
946 	}
947 }
948 
gen11_dsi_setup_timeouts(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)949 static void gen11_dsi_setup_timeouts(struct intel_encoder *encoder,
950 				     const struct intel_crtc_state *crtc_state)
951 {
952 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
953 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
954 	enum port port;
955 	enum transcoder dsi_trans;
956 	u32 tmp, hs_tx_timeout, lp_rx_timeout, ta_timeout, divisor, mul;
957 
958 	/*
959 	 * escape clock count calculation:
960 	 * BYTE_CLK_COUNT = TIME_NS/(8 * UI)
961 	 * UI (nsec) = (10^6)/Bitrate
962 	 * TIME_NS = (BYTE_CLK_COUNT * 8 * 10^6)/ Bitrate
963 	 * ESCAPE_CLK_COUNT  = TIME_NS/ESC_CLK_NS
964 	 */
965 	divisor = intel_dsi_tlpx_ns(intel_dsi) * afe_clk(encoder, crtc_state) * 1000;
966 	mul = 8 * 1000000;
967 	hs_tx_timeout = DIV_ROUND_UP(intel_dsi->hs_tx_timeout * mul,
968 				     divisor);
969 	lp_rx_timeout = DIV_ROUND_UP(intel_dsi->lp_rx_timeout * mul, divisor);
970 	ta_timeout = DIV_ROUND_UP(intel_dsi->turn_arnd_val * mul, divisor);
971 
972 	for_each_dsi_port(port, intel_dsi->ports) {
973 		dsi_trans = dsi_port_to_transcoder(port);
974 
975 		/* program hst_tx_timeout */
976 		tmp = I915_READ(DSI_HSTX_TO(dsi_trans));
977 		tmp &= ~HSTX_TIMEOUT_VALUE_MASK;
978 		tmp |= HSTX_TIMEOUT_VALUE(hs_tx_timeout);
979 		I915_WRITE(DSI_HSTX_TO(dsi_trans), tmp);
980 
981 		/* FIXME: DSI_CALIB_TO */
982 
983 		/* program lp_rx_host timeout */
984 		tmp = I915_READ(DSI_LPRX_HOST_TO(dsi_trans));
985 		tmp &= ~LPRX_TIMEOUT_VALUE_MASK;
986 		tmp |= LPRX_TIMEOUT_VALUE(lp_rx_timeout);
987 		I915_WRITE(DSI_LPRX_HOST_TO(dsi_trans), tmp);
988 
989 		/* FIXME: DSI_PWAIT_TO */
990 
991 		/* program turn around timeout */
992 		tmp = I915_READ(DSI_TA_TO(dsi_trans));
993 		tmp &= ~TA_TIMEOUT_VALUE_MASK;
994 		tmp |= TA_TIMEOUT_VALUE(ta_timeout);
995 		I915_WRITE(DSI_TA_TO(dsi_trans), tmp);
996 	}
997 }
998 
999 static void
gen11_dsi_enable_port_and_phy(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state)1000 gen11_dsi_enable_port_and_phy(struct intel_encoder *encoder,
1001 			      const struct intel_crtc_state *crtc_state)
1002 {
1003 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1004 
1005 	/* step 4a: power up all lanes of the DDI used by DSI */
1006 	gen11_dsi_power_up_lanes(encoder);
1007 
1008 	/* step 4b: configure lane sequencing of the Combo-PHY transmitters */
1009 	gen11_dsi_config_phy_lanes_sequence(encoder);
1010 
1011 	/* step 4c: configure voltage swing and skew */
1012 	gen11_dsi_voltage_swing_program_seq(encoder);
1013 
1014 	/* enable DDI buffer */
1015 	gen11_dsi_enable_ddi_buffer(encoder);
1016 
1017 	/* setup D-PHY timings */
1018 	gen11_dsi_setup_dphy_timings(encoder, crtc_state);
1019 
1020 	/* step 4h: setup DSI protocol timeouts */
1021 	gen11_dsi_setup_timeouts(encoder, crtc_state);
1022 
1023 	/* Step (4h, 4i, 4j, 4k): Configure transcoder */
1024 	gen11_dsi_configure_transcoder(encoder, crtc_state);
1025 
1026 	/* Step 4l: Gate DDI clocks */
1027 	if (IS_GEN(dev_priv, 11))
1028 		gen11_dsi_gate_clocks(encoder);
1029 }
1030 
gen11_dsi_powerup_panel(struct intel_encoder * encoder)1031 static void gen11_dsi_powerup_panel(struct intel_encoder *encoder)
1032 {
1033 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1034 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1035 	struct mipi_dsi_device *dsi;
1036 	enum port port;
1037 	enum transcoder dsi_trans;
1038 	u32 tmp;
1039 	int ret;
1040 
1041 	/* set maximum return packet size */
1042 	for_each_dsi_port(port, intel_dsi->ports) {
1043 		dsi_trans = dsi_port_to_transcoder(port);
1044 
1045 		/*
1046 		 * FIXME: This uses the number of DW's currently in the payload
1047 		 * receive queue. This is probably not what we want here.
1048 		 */
1049 		tmp = I915_READ(DSI_CMD_RXCTL(dsi_trans));
1050 		tmp &= NUMBER_RX_PLOAD_DW_MASK;
1051 		/* multiply "Number Rx Payload DW" by 4 to get max value */
1052 		tmp = tmp * 4;
1053 		dsi = intel_dsi->dsi_hosts[port]->device;
1054 		ret = mipi_dsi_set_maximum_return_packet_size(dsi, tmp);
1055 		if (ret < 0)
1056 			DRM_ERROR("error setting max return pkt size%d\n", tmp);
1057 	}
1058 
1059 	/* panel power on related mipi dsi vbt sequences */
1060 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_ON);
1061 	intel_dsi_msleep(intel_dsi, intel_dsi->panel_on_delay);
1062 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DEASSERT_RESET);
1063 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_INIT_OTP);
1064 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_ON);
1065 
1066 	/* ensure all panel commands dispatched before enabling transcoder */
1067 	wait_for_cmds_dispatched_to_panel(encoder);
1068 }
1069 
gen11_dsi_pre_pll_enable(struct intel_encoder * encoder,const struct intel_crtc_state * crtc_state,const struct drm_connector_state * conn_state)1070 static void gen11_dsi_pre_pll_enable(struct intel_encoder *encoder,
1071 				     const struct intel_crtc_state *crtc_state,
1072 				     const struct drm_connector_state *conn_state)
1073 {
1074 	/* step2: enable IO power */
1075 	gen11_dsi_enable_io_power(encoder);
1076 
1077 	/* step3: enable DSI PLL */
1078 	gen11_dsi_program_esc_clk_div(encoder, crtc_state);
1079 }
1080 
gen11_dsi_pre_enable(struct intel_encoder * encoder,const struct intel_crtc_state * pipe_config,const struct drm_connector_state * conn_state)1081 static void gen11_dsi_pre_enable(struct intel_encoder *encoder,
1082 				 const struct intel_crtc_state *pipe_config,
1083 				 const struct drm_connector_state *conn_state)
1084 {
1085 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1086 
1087 	/* step3b */
1088 	gen11_dsi_map_pll(encoder, pipe_config);
1089 
1090 	/* step4: enable DSI port and DPHY */
1091 	gen11_dsi_enable_port_and_phy(encoder, pipe_config);
1092 
1093 	/* step5: program and powerup panel */
1094 	gen11_dsi_powerup_panel(encoder);
1095 
1096 	intel_dsc_enable(encoder, pipe_config);
1097 
1098 	/* step6c: configure transcoder timings */
1099 	gen11_dsi_set_transcoder_timings(encoder, pipe_config);
1100 
1101 	/* step6d: enable dsi transcoder */
1102 	gen11_dsi_enable_transcoder(encoder);
1103 
1104 	/* step7: enable backlight */
1105 	intel_panel_enable_backlight(pipe_config, conn_state);
1106 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_ON);
1107 }
1108 
gen11_dsi_disable_transcoder(struct intel_encoder * encoder)1109 static void gen11_dsi_disable_transcoder(struct intel_encoder *encoder)
1110 {
1111 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1112 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1113 	enum port port;
1114 	enum transcoder dsi_trans;
1115 	u32 tmp;
1116 
1117 	for_each_dsi_port(port, intel_dsi->ports) {
1118 		dsi_trans = dsi_port_to_transcoder(port);
1119 
1120 		/* disable transcoder */
1121 		tmp = I915_READ(PIPECONF(dsi_trans));
1122 		tmp &= ~PIPECONF_ENABLE;
1123 		I915_WRITE(PIPECONF(dsi_trans), tmp);
1124 
1125 		/* wait for transcoder to be disabled */
1126 		if (intel_de_wait_for_clear(dev_priv, PIPECONF(dsi_trans),
1127 					    I965_PIPECONF_ACTIVE, 50))
1128 			DRM_ERROR("DSI trancoder not disabled\n");
1129 	}
1130 }
1131 
gen11_dsi_powerdown_panel(struct intel_encoder * encoder)1132 static void gen11_dsi_powerdown_panel(struct intel_encoder *encoder)
1133 {
1134 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1135 
1136 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_DISPLAY_OFF);
1137 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_ASSERT_RESET);
1138 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_POWER_OFF);
1139 
1140 	/* ensure cmds dispatched to panel */
1141 	wait_for_cmds_dispatched_to_panel(encoder);
1142 }
1143 
gen11_dsi_deconfigure_trancoder(struct intel_encoder * encoder)1144 static void gen11_dsi_deconfigure_trancoder(struct intel_encoder *encoder)
1145 {
1146 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1147 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1148 	enum port port;
1149 	enum transcoder dsi_trans;
1150 	u32 tmp;
1151 
1152 	/* put dsi link in ULPS */
1153 	for_each_dsi_port(port, intel_dsi->ports) {
1154 		dsi_trans = dsi_port_to_transcoder(port);
1155 		tmp = I915_READ(DSI_LP_MSG(dsi_trans));
1156 		tmp |= LINK_ENTER_ULPS;
1157 		tmp &= ~LINK_ULPS_TYPE_LP11;
1158 		I915_WRITE(DSI_LP_MSG(dsi_trans), tmp);
1159 
1160 		if (wait_for_us((I915_READ(DSI_LP_MSG(dsi_trans)) &
1161 				LINK_IN_ULPS),
1162 				10))
1163 			DRM_ERROR("DSI link not in ULPS\n");
1164 	}
1165 
1166 	/* disable ddi function */
1167 	for_each_dsi_port(port, intel_dsi->ports) {
1168 		dsi_trans = dsi_port_to_transcoder(port);
1169 		tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans));
1170 		tmp &= ~TRANS_DDI_FUNC_ENABLE;
1171 		I915_WRITE(TRANS_DDI_FUNC_CTL(dsi_trans), tmp);
1172 	}
1173 
1174 	/* disable port sync mode if dual link */
1175 	if (intel_dsi->dual_link) {
1176 		for_each_dsi_port(port, intel_dsi->ports) {
1177 			dsi_trans = dsi_port_to_transcoder(port);
1178 			tmp = I915_READ(TRANS_DDI_FUNC_CTL2(dsi_trans));
1179 			tmp &= ~PORT_SYNC_MODE_ENABLE;
1180 			I915_WRITE(TRANS_DDI_FUNC_CTL2(dsi_trans), tmp);
1181 		}
1182 	}
1183 }
1184 
gen11_dsi_disable_port(struct intel_encoder * encoder)1185 static void gen11_dsi_disable_port(struct intel_encoder *encoder)
1186 {
1187 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1188 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1189 	u32 tmp;
1190 	enum port port;
1191 
1192 	gen11_dsi_ungate_clocks(encoder);
1193 	for_each_dsi_port(port, intel_dsi->ports) {
1194 		tmp = I915_READ(DDI_BUF_CTL(port));
1195 		tmp &= ~DDI_BUF_CTL_ENABLE;
1196 		I915_WRITE(DDI_BUF_CTL(port), tmp);
1197 
1198 		if (wait_for_us((I915_READ(DDI_BUF_CTL(port)) &
1199 				 DDI_BUF_IS_IDLE),
1200 				 8))
1201 			DRM_ERROR("DDI port:%c buffer not idle\n",
1202 				  port_name(port));
1203 	}
1204 	gen11_dsi_gate_clocks(encoder);
1205 }
1206 
gen11_dsi_disable_io_power(struct intel_encoder * encoder)1207 static void gen11_dsi_disable_io_power(struct intel_encoder *encoder)
1208 {
1209 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1210 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1211 	enum port port;
1212 	u32 tmp;
1213 
1214 	for_each_dsi_port(port, intel_dsi->ports) {
1215 		intel_wakeref_t wakeref;
1216 
1217 		wakeref = fetch_and_zero(&intel_dsi->io_wakeref[port]);
1218 		intel_display_power_put(dev_priv,
1219 					port == PORT_A ?
1220 					POWER_DOMAIN_PORT_DDI_A_IO :
1221 					POWER_DOMAIN_PORT_DDI_B_IO,
1222 					wakeref);
1223 	}
1224 
1225 	/* set mode to DDI */
1226 	for_each_dsi_port(port, intel_dsi->ports) {
1227 		tmp = I915_READ(ICL_DSI_IO_MODECTL(port));
1228 		tmp &= ~COMBO_PHY_MODE_DSI;
1229 		I915_WRITE(ICL_DSI_IO_MODECTL(port), tmp);
1230 	}
1231 }
1232 
gen11_dsi_disable(struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)1233 static void gen11_dsi_disable(struct intel_encoder *encoder,
1234 			      const struct intel_crtc_state *old_crtc_state,
1235 			      const struct drm_connector_state *old_conn_state)
1236 {
1237 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1238 
1239 	/* step1: turn off backlight */
1240 	intel_dsi_vbt_exec_sequence(intel_dsi, MIPI_SEQ_BACKLIGHT_OFF);
1241 	intel_panel_disable_backlight(old_conn_state);
1242 
1243 	/* step2d,e: disable transcoder and wait */
1244 	gen11_dsi_disable_transcoder(encoder);
1245 
1246 	/* step2f,g: powerdown panel */
1247 	gen11_dsi_powerdown_panel(encoder);
1248 
1249 	/* step2h,i,j: deconfig trancoder */
1250 	gen11_dsi_deconfigure_trancoder(encoder);
1251 
1252 	/* step3: disable port */
1253 	gen11_dsi_disable_port(encoder);
1254 
1255 	/* step4: disable IO power */
1256 	gen11_dsi_disable_io_power(encoder);
1257 }
1258 
gen11_dsi_post_disable(struct intel_encoder * encoder,const struct intel_crtc_state * old_crtc_state,const struct drm_connector_state * old_conn_state)1259 static void gen11_dsi_post_disable(struct intel_encoder *encoder,
1260 				   const struct intel_crtc_state *old_crtc_state,
1261 				   const struct drm_connector_state *old_conn_state)
1262 {
1263 	intel_crtc_vblank_off(old_crtc_state);
1264 
1265 	intel_dsc_disable(old_crtc_state);
1266 
1267 	skl_scaler_disable(old_crtc_state);
1268 }
1269 
gen11_dsi_mode_valid(struct drm_connector * connector,struct drm_display_mode * mode)1270 static enum drm_mode_status gen11_dsi_mode_valid(struct drm_connector *connector,
1271 						 struct drm_display_mode *mode)
1272 {
1273 	/* FIXME: DSC? */
1274 	return intel_dsi_mode_valid(connector, mode);
1275 }
1276 
gen11_dsi_get_timings(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)1277 static void gen11_dsi_get_timings(struct intel_encoder *encoder,
1278 				  struct intel_crtc_state *pipe_config)
1279 {
1280 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1281 	struct drm_display_mode *adjusted_mode =
1282 					&pipe_config->hw.adjusted_mode;
1283 
1284 	if (pipe_config->dsc.compressed_bpp) {
1285 		int div = pipe_config->dsc.compressed_bpp;
1286 		int mul = mipi_dsi_pixel_format_to_bpp(intel_dsi->pixel_format);
1287 
1288 		adjusted_mode->crtc_htotal =
1289 			DIV_ROUND_UP(adjusted_mode->crtc_htotal * mul, div);
1290 		adjusted_mode->crtc_hsync_start =
1291 			DIV_ROUND_UP(adjusted_mode->crtc_hsync_start * mul, div);
1292 		adjusted_mode->crtc_hsync_end =
1293 			DIV_ROUND_UP(adjusted_mode->crtc_hsync_end * mul, div);
1294 	}
1295 
1296 	if (intel_dsi->dual_link) {
1297 		adjusted_mode->crtc_hdisplay *= 2;
1298 		if (intel_dsi->dual_link == DSI_DUAL_LINK_FRONT_BACK)
1299 			adjusted_mode->crtc_hdisplay -=
1300 						intel_dsi->pixel_overlap;
1301 		adjusted_mode->crtc_htotal *= 2;
1302 	}
1303 	adjusted_mode->crtc_hblank_start = adjusted_mode->crtc_hdisplay;
1304 	adjusted_mode->crtc_hblank_end = adjusted_mode->crtc_htotal;
1305 
1306 	if (intel_dsi->operation_mode == INTEL_DSI_VIDEO_MODE) {
1307 		if (intel_dsi->dual_link) {
1308 			adjusted_mode->crtc_hsync_start *= 2;
1309 			adjusted_mode->crtc_hsync_end *= 2;
1310 		}
1311 	}
1312 	adjusted_mode->crtc_vblank_start = adjusted_mode->crtc_vdisplay;
1313 	adjusted_mode->crtc_vblank_end = adjusted_mode->crtc_vtotal;
1314 }
1315 
gen11_dsi_get_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config)1316 static void gen11_dsi_get_config(struct intel_encoder *encoder,
1317 				 struct intel_crtc_state *pipe_config)
1318 {
1319 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1320 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
1321 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1322 
1323 	intel_dsc_get_config(encoder, pipe_config);
1324 
1325 	/* FIXME: adapt icl_ddi_clock_get() for DSI and use that? */
1326 	pipe_config->port_clock =
1327 		cnl_calc_wrpll_link(dev_priv, &pipe_config->dpll_hw_state);
1328 
1329 	pipe_config->hw.adjusted_mode.crtc_clock = intel_dsi->pclk;
1330 	if (intel_dsi->dual_link)
1331 		pipe_config->hw.adjusted_mode.crtc_clock *= 2;
1332 
1333 	gen11_dsi_get_timings(encoder, pipe_config);
1334 	pipe_config->output_types |= BIT(INTEL_OUTPUT_DSI);
1335 	pipe_config->pipe_bpp = bdw_get_pipemisc_bpp(crtc);
1336 }
1337 
gen11_dsi_dsc_compute_config(struct intel_encoder * encoder,struct intel_crtc_state * crtc_state)1338 static int gen11_dsi_dsc_compute_config(struct intel_encoder *encoder,
1339 					struct intel_crtc_state *crtc_state)
1340 {
1341 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1342 	struct drm_dsc_config *vdsc_cfg = &crtc_state->dsc.config;
1343 	int dsc_max_bpc = INTEL_GEN(dev_priv) >= 12 ? 12 : 10;
1344 	bool use_dsc;
1345 	int ret;
1346 
1347 	use_dsc = intel_bios_get_dsc_params(encoder, crtc_state, dsc_max_bpc);
1348 	if (!use_dsc)
1349 		return 0;
1350 
1351 	if (crtc_state->pipe_bpp < 8 * 3)
1352 		return -EINVAL;
1353 
1354 	/* FIXME: split only when necessary */
1355 	if (crtc_state->dsc.slice_count > 1)
1356 		crtc_state->dsc.dsc_split = true;
1357 
1358 	vdsc_cfg->convert_rgb = true;
1359 
1360 	ret = intel_dsc_compute_params(encoder, crtc_state);
1361 	if (ret)
1362 		return ret;
1363 
1364 	/* DSI specific sanity checks on the common code */
1365 	WARN_ON(vdsc_cfg->vbr_enable);
1366 	WARN_ON(vdsc_cfg->simple_422);
1367 	WARN_ON(vdsc_cfg->pic_width % vdsc_cfg->slice_width);
1368 	WARN_ON(vdsc_cfg->slice_height < 8);
1369 	WARN_ON(vdsc_cfg->pic_height % vdsc_cfg->slice_height);
1370 
1371 	ret = drm_dsc_compute_rc_parameters(vdsc_cfg);
1372 	if (ret)
1373 		return ret;
1374 
1375 	crtc_state->dsc.compression_enable = true;
1376 
1377 	return 0;
1378 }
1379 
gen11_dsi_compute_config(struct intel_encoder * encoder,struct intel_crtc_state * pipe_config,struct drm_connector_state * conn_state)1380 static int gen11_dsi_compute_config(struct intel_encoder *encoder,
1381 				    struct intel_crtc_state *pipe_config,
1382 				    struct drm_connector_state *conn_state)
1383 {
1384 	struct intel_dsi *intel_dsi = container_of(encoder, struct intel_dsi,
1385 						   base);
1386 	struct intel_connector *intel_connector = intel_dsi->attached_connector;
1387 	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
1388 	const struct drm_display_mode *fixed_mode =
1389 					intel_connector->panel.fixed_mode;
1390 	struct drm_display_mode *adjusted_mode =
1391 					&pipe_config->hw.adjusted_mode;
1392 
1393 	pipe_config->output_format = INTEL_OUTPUT_FORMAT_RGB;
1394 	intel_fixed_panel_mode(fixed_mode, adjusted_mode);
1395 	intel_pch_panel_fitting(crtc, pipe_config, conn_state->scaling_mode);
1396 
1397 	adjusted_mode->flags = 0;
1398 
1399 	/* Dual link goes to trancoder DSI'0' */
1400 	if (intel_dsi->ports == BIT(PORT_B))
1401 		pipe_config->cpu_transcoder = TRANSCODER_DSI_1;
1402 	else
1403 		pipe_config->cpu_transcoder = TRANSCODER_DSI_0;
1404 
1405 	if (intel_dsi->pixel_format == MIPI_DSI_FMT_RGB888)
1406 		pipe_config->pipe_bpp = 24;
1407 	else
1408 		pipe_config->pipe_bpp = 18;
1409 
1410 	pipe_config->clock_set = true;
1411 
1412 	if (gen11_dsi_dsc_compute_config(encoder, pipe_config))
1413 		DRM_DEBUG_KMS("Attempting to use DSC failed\n");
1414 
1415 	pipe_config->port_clock = afe_clk(encoder, pipe_config) / 5;
1416 
1417 	return 0;
1418 }
1419 
gen11_dsi_get_power_domains(struct intel_encoder * encoder,struct intel_crtc_state * crtc_state)1420 static void gen11_dsi_get_power_domains(struct intel_encoder *encoder,
1421 					struct intel_crtc_state *crtc_state)
1422 {
1423 	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
1424 
1425 	get_dsi_io_power_domains(i915,
1426 				 enc_to_intel_dsi(encoder));
1427 
1428 	if (crtc_state->dsc.compression_enable)
1429 		intel_display_power_get(i915,
1430 					intel_dsc_power_domain(crtc_state));
1431 }
1432 
gen11_dsi_get_hw_state(struct intel_encoder * encoder,enum pipe * pipe)1433 static bool gen11_dsi_get_hw_state(struct intel_encoder *encoder,
1434 				   enum pipe *pipe)
1435 {
1436 	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1437 	struct intel_dsi *intel_dsi = enc_to_intel_dsi(encoder);
1438 	enum transcoder dsi_trans;
1439 	intel_wakeref_t wakeref;
1440 	enum port port;
1441 	bool ret = false;
1442 	u32 tmp;
1443 
1444 	wakeref = intel_display_power_get_if_enabled(dev_priv,
1445 						     encoder->power_domain);
1446 	if (!wakeref)
1447 		return false;
1448 
1449 	for_each_dsi_port(port, intel_dsi->ports) {
1450 		dsi_trans = dsi_port_to_transcoder(port);
1451 		tmp = I915_READ(TRANS_DDI_FUNC_CTL(dsi_trans));
1452 		switch (tmp & TRANS_DDI_EDP_INPUT_MASK) {
1453 		case TRANS_DDI_EDP_INPUT_A_ON:
1454 			*pipe = PIPE_A;
1455 			break;
1456 		case TRANS_DDI_EDP_INPUT_B_ONOFF:
1457 			*pipe = PIPE_B;
1458 			break;
1459 		case TRANS_DDI_EDP_INPUT_C_ONOFF:
1460 			*pipe = PIPE_C;
1461 			break;
1462 		case TRANS_DDI_EDP_INPUT_D_ONOFF:
1463 			*pipe = PIPE_D;
1464 			break;
1465 		default:
1466 			DRM_ERROR("Invalid PIPE input\n");
1467 			goto out;
1468 		}
1469 
1470 		tmp = I915_READ(PIPECONF(dsi_trans));
1471 		ret = tmp & PIPECONF_ENABLE;
1472 	}
1473 out:
1474 	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
1475 	return ret;
1476 }
1477 
gen11_dsi_encoder_destroy(struct drm_encoder * encoder)1478 static void gen11_dsi_encoder_destroy(struct drm_encoder *encoder)
1479 {
1480 	intel_encoder_destroy(encoder);
1481 }
1482 
1483 static const struct drm_encoder_funcs gen11_dsi_encoder_funcs = {
1484 	.destroy = gen11_dsi_encoder_destroy,
1485 };
1486 
1487 static const struct drm_connector_funcs gen11_dsi_connector_funcs = {
1488 	.late_register = intel_connector_register,
1489 	.early_unregister = intel_connector_unregister,
1490 	.destroy = intel_connector_destroy,
1491 	.fill_modes = drm_helper_probe_single_connector_modes,
1492 	.atomic_get_property = intel_digital_connector_atomic_get_property,
1493 	.atomic_set_property = intel_digital_connector_atomic_set_property,
1494 	.atomic_destroy_state = drm_atomic_helper_connector_destroy_state,
1495 	.atomic_duplicate_state = intel_digital_connector_duplicate_state,
1496 };
1497 
1498 static const struct drm_connector_helper_funcs gen11_dsi_connector_helper_funcs = {
1499 	.get_modes = intel_dsi_get_modes,
1500 	.mode_valid = gen11_dsi_mode_valid,
1501 	.atomic_check = intel_digital_connector_atomic_check,
1502 };
1503 
gen11_dsi_host_attach(struct mipi_dsi_host * host,struct mipi_dsi_device * dsi)1504 static int gen11_dsi_host_attach(struct mipi_dsi_host *host,
1505 				 struct mipi_dsi_device *dsi)
1506 {
1507 	return 0;
1508 }
1509 
gen11_dsi_host_detach(struct mipi_dsi_host * host,struct mipi_dsi_device * dsi)1510 static int gen11_dsi_host_detach(struct mipi_dsi_host *host,
1511 				 struct mipi_dsi_device *dsi)
1512 {
1513 	return 0;
1514 }
1515 
gen11_dsi_host_transfer(struct mipi_dsi_host * host,const struct mipi_dsi_msg * msg)1516 static ssize_t gen11_dsi_host_transfer(struct mipi_dsi_host *host,
1517 				       const struct mipi_dsi_msg *msg)
1518 {
1519 	struct intel_dsi_host *intel_dsi_host = to_intel_dsi_host(host);
1520 	struct mipi_dsi_packet dsi_pkt;
1521 	ssize_t ret;
1522 	bool enable_lpdt = false;
1523 
1524 	ret = mipi_dsi_create_packet(&dsi_pkt, msg);
1525 	if (ret < 0)
1526 		return ret;
1527 
1528 	if (msg->flags & MIPI_DSI_MSG_USE_LPM)
1529 		enable_lpdt = true;
1530 
1531 	/* send packet header */
1532 	ret  = dsi_send_pkt_hdr(intel_dsi_host, dsi_pkt, enable_lpdt);
1533 	if (ret < 0)
1534 		return ret;
1535 
1536 	/* only long packet contains payload */
1537 	if (mipi_dsi_packet_format_is_long(msg->type)) {
1538 		ret = dsi_send_pkt_payld(intel_dsi_host, dsi_pkt);
1539 		if (ret < 0)
1540 			return ret;
1541 	}
1542 
1543 	//TODO: add payload receive code if needed
1544 
1545 	ret = sizeof(dsi_pkt.header) + dsi_pkt.payload_length;
1546 
1547 	return ret;
1548 }
1549 
1550 static const struct mipi_dsi_host_ops gen11_dsi_host_ops = {
1551 	.attach = gen11_dsi_host_attach,
1552 	.detach = gen11_dsi_host_detach,
1553 	.transfer = gen11_dsi_host_transfer,
1554 };
1555 
1556 #define ICL_PREPARE_CNT_MAX	0x7
1557 #define ICL_CLK_ZERO_CNT_MAX	0xf
1558 #define ICL_TRAIL_CNT_MAX	0x7
1559 #define ICL_TCLK_PRE_CNT_MAX	0x3
1560 #define ICL_TCLK_POST_CNT_MAX	0x7
1561 #define ICL_HS_ZERO_CNT_MAX	0xf
1562 #define ICL_EXIT_ZERO_CNT_MAX	0x7
1563 
icl_dphy_param_init(struct intel_dsi * intel_dsi)1564 static void icl_dphy_param_init(struct intel_dsi *intel_dsi)
1565 {
1566 	struct drm_device *dev = intel_dsi->base.base.dev;
1567 	struct drm_i915_private *dev_priv = to_i915(dev);
1568 	struct mipi_config *mipi_config = dev_priv->vbt.dsi.config;
1569 	u32 tlpx_ns;
1570 	u32 prepare_cnt, exit_zero_cnt, clk_zero_cnt, trail_cnt;
1571 	u32 ths_prepare_ns, tclk_trail_ns;
1572 	u32 hs_zero_cnt;
1573 	u32 tclk_pre_cnt, tclk_post_cnt;
1574 
1575 	tlpx_ns = intel_dsi_tlpx_ns(intel_dsi);
1576 
1577 	tclk_trail_ns = max(mipi_config->tclk_trail, mipi_config->ths_trail);
1578 	ths_prepare_ns = max(mipi_config->ths_prepare,
1579 			     mipi_config->tclk_prepare);
1580 
1581 	/*
1582 	 * prepare cnt in escape clocks
1583 	 * this field represents a hexadecimal value with a precision
1584 	 * of 1.2 – i.e. the most significant bit is the integer
1585 	 * and the least significant 2 bits are fraction bits.
1586 	 * so, the field can represent a range of 0.25 to 1.75
1587 	 */
1588 	prepare_cnt = DIV_ROUND_UP(ths_prepare_ns * 4, tlpx_ns);
1589 	if (prepare_cnt > ICL_PREPARE_CNT_MAX) {
1590 		DRM_DEBUG_KMS("prepare_cnt out of range (%d)\n", prepare_cnt);
1591 		prepare_cnt = ICL_PREPARE_CNT_MAX;
1592 	}
1593 
1594 	/* clk zero count in escape clocks */
1595 	clk_zero_cnt = DIV_ROUND_UP(mipi_config->tclk_prepare_clkzero -
1596 				    ths_prepare_ns, tlpx_ns);
1597 	if (clk_zero_cnt > ICL_CLK_ZERO_CNT_MAX) {
1598 		DRM_DEBUG_KMS("clk_zero_cnt out of range (%d)\n", clk_zero_cnt);
1599 		clk_zero_cnt = ICL_CLK_ZERO_CNT_MAX;
1600 	}
1601 
1602 	/* trail cnt in escape clocks*/
1603 	trail_cnt = DIV_ROUND_UP(tclk_trail_ns, tlpx_ns);
1604 	if (trail_cnt > ICL_TRAIL_CNT_MAX) {
1605 		DRM_DEBUG_KMS("trail_cnt out of range (%d)\n", trail_cnt);
1606 		trail_cnt = ICL_TRAIL_CNT_MAX;
1607 	}
1608 
1609 	/* tclk pre count in escape clocks */
1610 	tclk_pre_cnt = DIV_ROUND_UP(mipi_config->tclk_pre, tlpx_ns);
1611 	if (tclk_pre_cnt > ICL_TCLK_PRE_CNT_MAX) {
1612 		DRM_DEBUG_KMS("tclk_pre_cnt out of range (%d)\n", tclk_pre_cnt);
1613 		tclk_pre_cnt = ICL_TCLK_PRE_CNT_MAX;
1614 	}
1615 
1616 	/* tclk post count in escape clocks */
1617 	tclk_post_cnt = DIV_ROUND_UP(mipi_config->tclk_post, tlpx_ns);
1618 	if (tclk_post_cnt > ICL_TCLK_POST_CNT_MAX) {
1619 		DRM_DEBUG_KMS("tclk_post_cnt out of range (%d)\n", tclk_post_cnt);
1620 		tclk_post_cnt = ICL_TCLK_POST_CNT_MAX;
1621 	}
1622 
1623 	/* hs zero cnt in escape clocks */
1624 	hs_zero_cnt = DIV_ROUND_UP(mipi_config->ths_prepare_hszero -
1625 				   ths_prepare_ns, tlpx_ns);
1626 	if (hs_zero_cnt > ICL_HS_ZERO_CNT_MAX) {
1627 		DRM_DEBUG_KMS("hs_zero_cnt out of range (%d)\n", hs_zero_cnt);
1628 		hs_zero_cnt = ICL_HS_ZERO_CNT_MAX;
1629 	}
1630 
1631 	/* hs exit zero cnt in escape clocks */
1632 	exit_zero_cnt = DIV_ROUND_UP(mipi_config->ths_exit, tlpx_ns);
1633 	if (exit_zero_cnt > ICL_EXIT_ZERO_CNT_MAX) {
1634 		DRM_DEBUG_KMS("exit_zero_cnt out of range (%d)\n", exit_zero_cnt);
1635 		exit_zero_cnt = ICL_EXIT_ZERO_CNT_MAX;
1636 	}
1637 
1638 	/* clock lane dphy timings */
1639 	intel_dsi->dphy_reg = (CLK_PREPARE_OVERRIDE |
1640 			       CLK_PREPARE(prepare_cnt) |
1641 			       CLK_ZERO_OVERRIDE |
1642 			       CLK_ZERO(clk_zero_cnt) |
1643 			       CLK_PRE_OVERRIDE |
1644 			       CLK_PRE(tclk_pre_cnt) |
1645 			       CLK_POST_OVERRIDE |
1646 			       CLK_POST(tclk_post_cnt) |
1647 			       CLK_TRAIL_OVERRIDE |
1648 			       CLK_TRAIL(trail_cnt));
1649 
1650 	/* data lanes dphy timings */
1651 	intel_dsi->dphy_data_lane_reg = (HS_PREPARE_OVERRIDE |
1652 					 HS_PREPARE(prepare_cnt) |
1653 					 HS_ZERO_OVERRIDE |
1654 					 HS_ZERO(hs_zero_cnt) |
1655 					 HS_TRAIL_OVERRIDE |
1656 					 HS_TRAIL(trail_cnt) |
1657 					 HS_EXIT_OVERRIDE |
1658 					 HS_EXIT(exit_zero_cnt));
1659 
1660 	intel_dsi_log_params(intel_dsi);
1661 }
1662 
icl_dsi_add_properties(struct intel_connector * connector)1663 static void icl_dsi_add_properties(struct intel_connector *connector)
1664 {
1665 	u32 allowed_scalers;
1666 
1667 	allowed_scalers = BIT(DRM_MODE_SCALE_ASPECT) |
1668 			   BIT(DRM_MODE_SCALE_FULLSCREEN) |
1669 			   BIT(DRM_MODE_SCALE_CENTER);
1670 
1671 	drm_connector_attach_scaling_mode_property(&connector->base,
1672 						   allowed_scalers);
1673 
1674 	connector->base.state->scaling_mode = DRM_MODE_SCALE_ASPECT;
1675 
1676 	connector->base.display_info.panel_orientation =
1677 			intel_dsi_get_panel_orientation(connector);
1678 	drm_connector_init_panel_orientation_property(&connector->base,
1679 				connector->panel.fixed_mode->hdisplay,
1680 				connector->panel.fixed_mode->vdisplay);
1681 }
1682 
icl_dsi_init(struct drm_i915_private * dev_priv)1683 void icl_dsi_init(struct drm_i915_private *dev_priv)
1684 {
1685 	struct drm_device *dev = &dev_priv->drm;
1686 	struct intel_dsi *intel_dsi;
1687 	struct intel_encoder *encoder;
1688 	struct intel_connector *intel_connector;
1689 	struct drm_connector *connector;
1690 	struct drm_display_mode *fixed_mode;
1691 	enum port port;
1692 
1693 	if (!intel_bios_is_dsi_present(dev_priv, &port))
1694 		return;
1695 
1696 	intel_dsi = kzalloc(sizeof(*intel_dsi), GFP_KERNEL);
1697 	if (!intel_dsi)
1698 		return;
1699 
1700 	intel_connector = intel_connector_alloc();
1701 	if (!intel_connector) {
1702 		kfree(intel_dsi);
1703 		return;
1704 	}
1705 
1706 	encoder = &intel_dsi->base;
1707 	intel_dsi->attached_connector = intel_connector;
1708 	connector = &intel_connector->base;
1709 
1710 	/* register DSI encoder with DRM subsystem */
1711 	drm_encoder_init(dev, &encoder->base, &gen11_dsi_encoder_funcs,
1712 			 DRM_MODE_ENCODER_DSI, "DSI %c", port_name(port));
1713 
1714 	encoder->pre_pll_enable = gen11_dsi_pre_pll_enable;
1715 	encoder->pre_enable = gen11_dsi_pre_enable;
1716 	encoder->disable = gen11_dsi_disable;
1717 	encoder->post_disable = gen11_dsi_post_disable;
1718 	encoder->port = port;
1719 	encoder->get_config = gen11_dsi_get_config;
1720 	encoder->update_pipe = intel_panel_update_backlight;
1721 	encoder->compute_config = gen11_dsi_compute_config;
1722 	encoder->get_hw_state = gen11_dsi_get_hw_state;
1723 	encoder->type = INTEL_OUTPUT_DSI;
1724 	encoder->cloneable = 0;
1725 	encoder->pipe_mask = ~0;
1726 	encoder->power_domain = POWER_DOMAIN_PORT_DSI;
1727 	encoder->get_power_domains = gen11_dsi_get_power_domains;
1728 
1729 	/* register DSI connector with DRM subsystem */
1730 	drm_connector_init(dev, connector, &gen11_dsi_connector_funcs,
1731 			   DRM_MODE_CONNECTOR_DSI);
1732 	drm_connector_helper_add(connector, &gen11_dsi_connector_helper_funcs);
1733 	connector->display_info.subpixel_order = SubPixelHorizontalRGB;
1734 	connector->interlace_allowed = false;
1735 	connector->doublescan_allowed = false;
1736 	intel_connector->get_hw_state = intel_connector_get_hw_state;
1737 
1738 	/* attach connector to encoder */
1739 	intel_connector_attach_encoder(intel_connector, encoder);
1740 
1741 	mutex_lock(&dev->mode_config.mutex);
1742 	fixed_mode = intel_panel_vbt_fixed_mode(intel_connector);
1743 	mutex_unlock(&dev->mode_config.mutex);
1744 
1745 	if (!fixed_mode) {
1746 		DRM_ERROR("DSI fixed mode info missing\n");
1747 		goto err;
1748 	}
1749 
1750 	intel_panel_init(&intel_connector->panel, fixed_mode, NULL);
1751 	intel_panel_setup_backlight(connector, INVALID_PIPE);
1752 
1753 	if (dev_priv->vbt.dsi.config->dual_link)
1754 		intel_dsi->ports = BIT(PORT_A) | BIT(PORT_B);
1755 	else
1756 		intel_dsi->ports = BIT(port);
1757 
1758 	intel_dsi->dcs_backlight_ports = dev_priv->vbt.dsi.bl_ports;
1759 	intel_dsi->dcs_cabc_ports = dev_priv->vbt.dsi.cabc_ports;
1760 
1761 	for_each_dsi_port(port, intel_dsi->ports) {
1762 		struct intel_dsi_host *host;
1763 
1764 		host = intel_dsi_host_init(intel_dsi, &gen11_dsi_host_ops, port);
1765 		if (!host)
1766 			goto err;
1767 
1768 		intel_dsi->dsi_hosts[port] = host;
1769 	}
1770 
1771 	if (!intel_dsi_vbt_init(intel_dsi, MIPI_DSI_GENERIC_PANEL_ID)) {
1772 		DRM_DEBUG_KMS("no device found\n");
1773 		goto err;
1774 	}
1775 
1776 	icl_dphy_param_init(intel_dsi);
1777 
1778 	icl_dsi_add_properties(intel_connector);
1779 	return;
1780 
1781 err:
1782 	drm_encoder_cleanup(&encoder->base);
1783 	kfree(intel_dsi);
1784 	kfree(intel_connector);
1785 }
1786