xref: /dflybsd-src/sys/dev/drm/i915/intel_dp.c (revision 450f08dbfd98cded95c51be4079ef10f5adb3241)
1 /*
2  * Copyright © 2008 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  *
23  * Authors:
24  *    Keith Packard <keithp@keithp.com>
25  *
26  * $FreeBSD: src/sys/dev/drm2/i915/intel_dp.c,v 1.1 2012/05/22 11:07:44 kib Exp $
27  */
28 
29 #include <dev/drm/drmP.h>
30 #include <dev/drm/drm.h>
31 #include <dev/drm/drm_crtc.h>
32 #include <dev/drm/drm_crtc_helper.h>
33 #include "i915_drm.h"
34 #include "i915_drv.h"
35 #include "intel_drv.h"
36 #include <dev/drm/drm_dp_helper.h>
37 
38 #define DP_RECEIVER_CAP_SIZE	0xf
39 #define DP_LINK_STATUS_SIZE	6
40 #define DP_LINK_CHECK_TIMEOUT	(10 * 1000)
41 
42 #define DP_LINK_CONFIGURATION_SIZE	9
43 
44 struct intel_dp {
45 	struct intel_encoder base;
46 	uint32_t output_reg;
47 	uint32_t DP;
48 	uint8_t  link_configuration[DP_LINK_CONFIGURATION_SIZE];
49 	bool has_audio;
50 	enum hdmi_force_audio force_audio;
51 	uint32_t color_range;
52 	int dpms_mode;
53 	uint8_t link_bw;
54 	uint8_t lane_count;
55 	uint8_t dpcd[DP_RECEIVER_CAP_SIZE];
56 	device_t dp_iic_bus;
57 	device_t adapter;
58 	bool is_pch_edp;
59 	uint8_t	train_set[4];
60 	int panel_power_up_delay;
61 	int panel_power_down_delay;
62 	int panel_power_cycle_delay;
63 	int backlight_on_delay;
64 	int backlight_off_delay;
65 	struct drm_display_mode *panel_fixed_mode;  /* for eDP */
66 	struct timeout_task panel_vdd_task;
67 	bool want_panel_vdd;
68 };
69 
70 /**
71  * is_edp - is the given port attached to an eDP panel (either CPU or PCH)
72  * @intel_dp: DP struct
73  *
74  * If a CPU or PCH DP output is attached to an eDP panel, this function
75  * will return true, and false otherwise.
76  */
77 static bool is_edp(struct intel_dp *intel_dp)
78 {
79 	return intel_dp->base.type == INTEL_OUTPUT_EDP;
80 }
81 
82 /**
83  * is_pch_edp - is the port on the PCH and attached to an eDP panel?
84  * @intel_dp: DP struct
85  *
86  * Returns true if the given DP struct corresponds to a PCH DP port attached
87  * to an eDP panel, false otherwise.  Helpful for determining whether we
88  * may need FDI resources for a given DP output or not.
89  */
90 static bool is_pch_edp(struct intel_dp *intel_dp)
91 {
92 	return intel_dp->is_pch_edp;
93 }
94 
95 /**
96  * is_cpu_edp - is the port on the CPU and attached to an eDP panel?
97  * @intel_dp: DP struct
98  *
99  * Returns true if the given DP struct corresponds to a CPU eDP port.
100  */
101 static bool is_cpu_edp(struct intel_dp *intel_dp)
102 {
103 	return is_edp(intel_dp) && !is_pch_edp(intel_dp);
104 }
105 
106 static struct intel_dp *enc_to_intel_dp(struct drm_encoder *encoder)
107 {
108 	return container_of(encoder, struct intel_dp, base.base);
109 }
110 
111 static struct intel_dp *intel_attached_dp(struct drm_connector *connector)
112 {
113 	return container_of(intel_attached_encoder(connector),
114 			    struct intel_dp, base);
115 }
116 
117 /**
118  * intel_encoder_is_pch_edp - is the given encoder a PCH attached eDP?
119  * @encoder: DRM encoder
120  *
121  * Return true if @encoder corresponds to a PCH attached eDP panel.  Needed
122  * by intel_display.c.
123  */
124 bool intel_encoder_is_pch_edp(struct drm_encoder *encoder)
125 {
126 	struct intel_dp *intel_dp;
127 
128 	if (!encoder)
129 		return false;
130 
131 	intel_dp = enc_to_intel_dp(encoder);
132 
133 	return is_pch_edp(intel_dp);
134 }
135 
136 static void intel_dp_start_link_train(struct intel_dp *intel_dp);
137 static void intel_dp_complete_link_train(struct intel_dp *intel_dp);
138 static void intel_dp_link_down(struct intel_dp *intel_dp);
139 
140 void
141 intel_edp_link_config(struct intel_encoder *intel_encoder,
142 		       int *lane_num, int *link_bw)
143 {
144 	struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
145 
146 	*lane_num = intel_dp->lane_count;
147 	if (intel_dp->link_bw == DP_LINK_BW_1_62)
148 		*link_bw = 162000;
149 	else if (intel_dp->link_bw == DP_LINK_BW_2_7)
150 		*link_bw = 270000;
151 }
152 
153 static int
154 intel_dp_max_lane_count(struct intel_dp *intel_dp)
155 {
156 	int max_lane_count = intel_dp->dpcd[DP_MAX_LANE_COUNT] & 0x1f;
157 	switch (max_lane_count) {
158 	case 1: case 2: case 4:
159 		break;
160 	default:
161 		max_lane_count = 4;
162 	}
163 	return max_lane_count;
164 }
165 
166 static int
167 intel_dp_max_link_bw(struct intel_dp *intel_dp)
168 {
169 	int max_link_bw = intel_dp->dpcd[DP_MAX_LINK_RATE];
170 
171 	switch (max_link_bw) {
172 	case DP_LINK_BW_1_62:
173 	case DP_LINK_BW_2_7:
174 		break;
175 	default:
176 		max_link_bw = DP_LINK_BW_1_62;
177 		break;
178 	}
179 	return max_link_bw;
180 }
181 
182 static int
183 intel_dp_link_clock(uint8_t link_bw)
184 {
185 	if (link_bw == DP_LINK_BW_2_7)
186 		return 270000;
187 	else
188 		return 162000;
189 }
190 
191 /*
192  * The units on the numbers in the next two are... bizarre.  Examples will
193  * make it clearer; this one parallels an example in the eDP spec.
194  *
195  * intel_dp_max_data_rate for one lane of 2.7GHz evaluates as:
196  *
197  *     270000 * 1 * 8 / 10 == 216000
198  *
199  * The actual data capacity of that configuration is 2.16Gbit/s, so the
200  * units are decakilobits.  ->clock in a drm_display_mode is in kilohertz -
201  * or equivalently, kilopixels per second - so for 1680x1050R it'd be
202  * 119000.  At 18bpp that's 2142000 kilobits per second.
203  *
204  * Thus the strange-looking division by 10 in intel_dp_link_required, to
205  * get the result in decakilobits instead of kilobits.
206  */
207 
208 static int
209 intel_dp_link_required(int pixel_clock, int bpp)
210 {
211 	return (pixel_clock * bpp + 9) / 10;
212 }
213 
214 static int
215 intel_dp_max_data_rate(int max_link_clock, int max_lanes)
216 {
217 	return (max_link_clock * max_lanes * 8) / 10;
218 }
219 
220 static bool
221 intel_dp_adjust_dithering(struct intel_dp *intel_dp,
222 			  const struct drm_display_mode *mode,
223 			  struct drm_display_mode *adjusted_mode)
224 {
225 	int max_link_clock = intel_dp_link_clock(intel_dp_max_link_bw(intel_dp));
226 	int max_lanes = intel_dp_max_lane_count(intel_dp);
227 	int max_rate, mode_rate;
228 
229 	mode_rate = intel_dp_link_required(mode->clock, 24);
230 	max_rate = intel_dp_max_data_rate(max_link_clock, max_lanes);
231 
232 	if (mode_rate > max_rate) {
233 		mode_rate = intel_dp_link_required(mode->clock, 18);
234 		if (mode_rate > max_rate)
235 			return false;
236 
237 		if (adjusted_mode)
238 			adjusted_mode->private_flags
239 				|= INTEL_MODE_DP_FORCE_6BPC;
240 
241 		return true;
242 	}
243 
244 	return true;
245 }
246 
247 static int
248 intel_dp_mode_valid(struct drm_connector *connector,
249 		    struct drm_display_mode *mode)
250 {
251 	struct intel_dp *intel_dp = intel_attached_dp(connector);
252 
253 	if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
254 		if (mode->hdisplay > intel_dp->panel_fixed_mode->hdisplay)
255 			return MODE_PANEL;
256 
257 		if (mode->vdisplay > intel_dp->panel_fixed_mode->vdisplay)
258 			return MODE_PANEL;
259 	}
260 
261 	if (!intel_dp_adjust_dithering(intel_dp, mode, NULL))
262 		return MODE_CLOCK_HIGH;
263 
264 	if (mode->clock < 10000)
265 		return MODE_CLOCK_LOW;
266 
267 	return MODE_OK;
268 }
269 
270 static uint32_t
271 pack_aux(uint8_t *src, int src_bytes)
272 {
273 	int	i;
274 	uint32_t v = 0;
275 
276 	if (src_bytes > 4)
277 		src_bytes = 4;
278 	for (i = 0; i < src_bytes; i++)
279 		v |= ((uint32_t) src[i]) << ((3-i) * 8);
280 	return v;
281 }
282 
283 static void
284 unpack_aux(uint32_t src, uint8_t *dst, int dst_bytes)
285 {
286 	int i;
287 	if (dst_bytes > 4)
288 		dst_bytes = 4;
289 	for (i = 0; i < dst_bytes; i++)
290 		dst[i] = src >> ((3-i) * 8);
291 }
292 
293 /* hrawclock is 1/4 the FSB frequency */
294 static int
295 intel_hrawclk(struct drm_device *dev)
296 {
297 	struct drm_i915_private *dev_priv = dev->dev_private;
298 	uint32_t clkcfg;
299 
300 	clkcfg = I915_READ(CLKCFG);
301 	switch (clkcfg & CLKCFG_FSB_MASK) {
302 	case CLKCFG_FSB_400:
303 		return 100;
304 	case CLKCFG_FSB_533:
305 		return 133;
306 	case CLKCFG_FSB_667:
307 		return 166;
308 	case CLKCFG_FSB_800:
309 		return 200;
310 	case CLKCFG_FSB_1067:
311 		return 266;
312 	case CLKCFG_FSB_1333:
313 		return 333;
314 	/* these two are just a guess; one of them might be right */
315 	case CLKCFG_FSB_1600:
316 	case CLKCFG_FSB_1600_ALT:
317 		return 400;
318 	default:
319 		return 133;
320 	}
321 }
322 
323 static bool ironlake_edp_have_panel_power(struct intel_dp *intel_dp)
324 {
325 	struct drm_device *dev = intel_dp->base.base.dev;
326 	struct drm_i915_private *dev_priv = dev->dev_private;
327 
328 	return (I915_READ(PCH_PP_STATUS) & PP_ON) != 0;
329 }
330 
331 static bool ironlake_edp_have_panel_vdd(struct intel_dp *intel_dp)
332 {
333 	struct drm_device *dev = intel_dp->base.base.dev;
334 	struct drm_i915_private *dev_priv = dev->dev_private;
335 
336 	return (I915_READ(PCH_PP_CONTROL) & EDP_FORCE_VDD) != 0;
337 }
338 
339 static void
340 intel_dp_check_edp(struct intel_dp *intel_dp)
341 {
342 	struct drm_device *dev = intel_dp->base.base.dev;
343 	struct drm_i915_private *dev_priv = dev->dev_private;
344 
345 	if (!is_edp(intel_dp))
346 		return;
347 	if (!ironlake_edp_have_panel_power(intel_dp) && !ironlake_edp_have_panel_vdd(intel_dp)) {
348 		kprintf("eDP powered off while attempting aux channel communication.\n");
349 		DRM_DEBUG_KMS("Status 0x%08x Control 0x%08x\n",
350 			      I915_READ(PCH_PP_STATUS),
351 			      I915_READ(PCH_PP_CONTROL));
352 	}
353 }
354 
355 static int
356 intel_dp_aux_ch(struct intel_dp *intel_dp,
357 		uint8_t *send, int send_bytes,
358 		uint8_t *recv, int recv_size)
359 {
360 	uint32_t output_reg = intel_dp->output_reg;
361 	struct drm_device *dev = intel_dp->base.base.dev;
362 	struct drm_i915_private *dev_priv = dev->dev_private;
363 	uint32_t ch_ctl = output_reg + 0x10;
364 	uint32_t ch_data = ch_ctl + 4;
365 	int i;
366 	int recv_bytes;
367 	uint32_t status;
368 	uint32_t aux_clock_divider;
369 	int try, precharge = 5;
370 
371 	intel_dp_check_edp(intel_dp);
372 	/* The clock divider is based off the hrawclk,
373 	 * and would like to run at 2MHz. So, take the
374 	 * hrawclk value and divide by 2 and use that
375 	 *
376 	 * Note that PCH attached eDP panels should use a 125MHz input
377 	 * clock divider.
378 	 */
379 	if (is_cpu_edp(intel_dp)) {
380 		if (IS_GEN6(dev) || IS_GEN7(dev))
381 			aux_clock_divider = 200; /* SNB & IVB eDP input clock at 400Mhz */
382 		else
383 			aux_clock_divider = 225; /* eDP input clock at 450Mhz */
384 	} else if (HAS_PCH_SPLIT(dev))
385 		aux_clock_divider = 63; /* IRL input clock fixed at 125Mhz */
386 	else
387 		aux_clock_divider = intel_hrawclk(dev) / 2;
388 
389 	/* Try to wait for any previous AUX channel activity */
390 	for (try = 0; try < 3; try++) {
391 		status = I915_READ(ch_ctl);
392 		if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
393 			break;
394 		DELAY(1000);
395 	}
396 
397 	if (try == 3) {
398 		kprintf("dp_aux_ch not started status 0x%08x\n",
399 		     I915_READ(ch_ctl));
400 		return -EBUSY;
401 	}
402 
403 	/* Must try at least 3 times according to DP spec */
404 	for (try = 0; try < 5; try++) {
405 		/* Load the send data into the aux channel data registers */
406 		for (i = 0; i < send_bytes; i += 4)
407 			I915_WRITE(ch_data + i,
408 				   pack_aux(send + i, send_bytes - i));
409 
410 		/* Send the command and wait for it to complete */
411 		I915_WRITE(ch_ctl,
412 			   DP_AUX_CH_CTL_SEND_BUSY |
413 			   DP_AUX_CH_CTL_TIME_OUT_400us |
414 			   (send_bytes << DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT) |
415 			   (precharge << DP_AUX_CH_CTL_PRECHARGE_2US_SHIFT) |
416 			   (aux_clock_divider << DP_AUX_CH_CTL_BIT_CLOCK_2X_SHIFT) |
417 			   DP_AUX_CH_CTL_DONE |
418 			   DP_AUX_CH_CTL_TIME_OUT_ERROR |
419 			   DP_AUX_CH_CTL_RECEIVE_ERROR);
420 		for (;;) {
421 			status = I915_READ(ch_ctl);
422 			if ((status & DP_AUX_CH_CTL_SEND_BUSY) == 0)
423 				break;
424 			DELAY(100);
425 		}
426 
427 		/* Clear done status and any errors */
428 		I915_WRITE(ch_ctl,
429 			   status |
430 			   DP_AUX_CH_CTL_DONE |
431 			   DP_AUX_CH_CTL_TIME_OUT_ERROR |
432 			   DP_AUX_CH_CTL_RECEIVE_ERROR);
433 
434 		if (status & (DP_AUX_CH_CTL_TIME_OUT_ERROR |
435 			      DP_AUX_CH_CTL_RECEIVE_ERROR))
436 			continue;
437 		if (status & DP_AUX_CH_CTL_DONE)
438 			break;
439 	}
440 
441 	if ((status & DP_AUX_CH_CTL_DONE) == 0) {
442 		DRM_ERROR("dp_aux_ch not done status 0x%08x\n", status);
443 		return -EBUSY;
444 	}
445 
446 	/* Check for timeout or receive error.
447 	 * Timeouts occur when the sink is not connected
448 	 */
449 	if (status & DP_AUX_CH_CTL_RECEIVE_ERROR) {
450 		DRM_ERROR("dp_aux_ch receive error status 0x%08x\n", status);
451 		return -EIO;
452 	}
453 
454 	/* Timeouts occur when the device isn't connected, so they're
455 	 * "normal" -- don't fill the kernel log with these */
456 	if (status & DP_AUX_CH_CTL_TIME_OUT_ERROR) {
457 		DRM_DEBUG_KMS("dp_aux_ch timeout status 0x%08x\n", status);
458 		return -ETIMEDOUT;
459 	}
460 
461 	/* Unload any bytes sent back from the other side */
462 	recv_bytes = ((status & DP_AUX_CH_CTL_MESSAGE_SIZE_MASK) >>
463 		      DP_AUX_CH_CTL_MESSAGE_SIZE_SHIFT);
464 	if (recv_bytes > recv_size)
465 		recv_bytes = recv_size;
466 
467 	for (i = 0; i < recv_bytes; i += 4)
468 		unpack_aux(I915_READ(ch_data + i),
469 			   recv + i, recv_bytes - i);
470 
471 	return recv_bytes;
472 }
473 
474 /* Write data to the aux channel in native mode */
475 static int
476 intel_dp_aux_native_write(struct intel_dp *intel_dp,
477 			  uint16_t address, uint8_t *send, int send_bytes)
478 {
479 	int ret;
480 	uint8_t	msg[20];
481 	int msg_bytes;
482 	uint8_t	ack;
483 
484 	intel_dp_check_edp(intel_dp);
485 	if (send_bytes > 16)
486 		return -1;
487 	msg[0] = AUX_NATIVE_WRITE << 4;
488 	msg[1] = address >> 8;
489 	msg[2] = address & 0xff;
490 	msg[3] = send_bytes - 1;
491 	memcpy(&msg[4], send, send_bytes);
492 	msg_bytes = send_bytes + 4;
493 	for (;;) {
494 		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes, &ack, 1);
495 		if (ret < 0)
496 			return ret;
497 		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
498 			break;
499 		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
500 			DELAY(100);
501 		else
502 			return -EIO;
503 	}
504 	return send_bytes;
505 }
506 
507 /* Write a single byte to the aux channel in native mode */
508 static int
509 intel_dp_aux_native_write_1(struct intel_dp *intel_dp,
510 			    uint16_t address, uint8_t byte)
511 {
512 	return intel_dp_aux_native_write(intel_dp, address, &byte, 1);
513 }
514 
515 /* read bytes from a native aux channel */
516 static int
517 intel_dp_aux_native_read(struct intel_dp *intel_dp,
518 			 uint16_t address, uint8_t *recv, int recv_bytes)
519 {
520 	uint8_t msg[4];
521 	int msg_bytes;
522 	uint8_t reply[20];
523 	int reply_bytes;
524 	uint8_t ack;
525 	int ret;
526 
527 	intel_dp_check_edp(intel_dp);
528 	msg[0] = AUX_NATIVE_READ << 4;
529 	msg[1] = address >> 8;
530 	msg[2] = address & 0xff;
531 	msg[3] = recv_bytes - 1;
532 
533 	msg_bytes = 4;
534 	reply_bytes = recv_bytes + 1;
535 
536 	for (;;) {
537 		ret = intel_dp_aux_ch(intel_dp, msg, msg_bytes,
538 				      reply, reply_bytes);
539 		if (ret == 0)
540 			return -EPROTO;
541 		if (ret < 0)
542 			return ret;
543 		ack = reply[0];
544 		if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK) {
545 			memcpy(recv, reply + 1, ret - 1);
546 			return ret - 1;
547 		}
548 		else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
549 			DELAY(100);
550 		else
551 			return -EIO;
552 	}
553 }
554 
555 static int
556 intel_dp_i2c_aux_ch(device_t idev, int mode, uint8_t write_byte,
557     uint8_t *read_byte)
558 {
559 	struct iic_dp_aux_data *data;
560 	struct intel_dp *intel_dp;
561 	uint16_t address;
562 	uint8_t msg[5];
563 	uint8_t reply[2];
564 	unsigned retry;
565 	int msg_bytes;
566 	int reply_bytes;
567 	int ret;
568 
569 	data = device_get_softc(idev);
570 	intel_dp = data->priv;
571 	address = data->address;
572 
573 	intel_dp_check_edp(intel_dp);
574 	/* Set up the command byte */
575 	if (mode & MODE_I2C_READ)
576 		msg[0] = AUX_I2C_READ << 4;
577 	else
578 		msg[0] = AUX_I2C_WRITE << 4;
579 
580 	if (!(mode & MODE_I2C_STOP))
581 		msg[0] |= AUX_I2C_MOT << 4;
582 
583 	msg[1] = address >> 8;
584 	msg[2] = address;
585 
586 	switch (mode) {
587 	case MODE_I2C_WRITE:
588 		msg[3] = 0;
589 		msg[4] = write_byte;
590 		msg_bytes = 5;
591 		reply_bytes = 1;
592 		break;
593 	case MODE_I2C_READ:
594 		msg[3] = 0;
595 		msg_bytes = 4;
596 		reply_bytes = 2;
597 		break;
598 	default:
599 		msg_bytes = 3;
600 		reply_bytes = 1;
601 		break;
602 	}
603 
604 	for (retry = 0; retry < 5; retry++) {
605 		ret = intel_dp_aux_ch(intel_dp,
606 				      msg, msg_bytes,
607 				      reply, reply_bytes);
608 		if (ret < 0) {
609 			DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
610 			return (-ret);
611 		}
612 
613 		switch (reply[0] & AUX_NATIVE_REPLY_MASK) {
614 		case AUX_NATIVE_REPLY_ACK:
615 			/* I2C-over-AUX Reply field is only valid
616 			 * when paired with AUX ACK.
617 			 */
618 			break;
619 		case AUX_NATIVE_REPLY_NACK:
620 			DRM_DEBUG_KMS("aux_ch native nack\n");
621 			return (EREMOTEIO);
622 		case AUX_NATIVE_REPLY_DEFER:
623 			DELAY(100);
624 			continue;
625 		default:
626 			DRM_ERROR("aux_ch invalid native reply 0x%02x\n",
627 				  reply[0]);
628 			return (EREMOTEIO);
629 		}
630 
631 		switch (reply[0] & AUX_I2C_REPLY_MASK) {
632 		case AUX_I2C_REPLY_ACK:
633 			if (mode == MODE_I2C_READ) {
634 				*read_byte = reply[1];
635 			}
636 			return (0/*reply_bytes - 1*/);
637 		case AUX_I2C_REPLY_NACK:
638 			DRM_DEBUG_KMS("aux_i2c nack\n");
639 			return (EREMOTEIO);
640 		case AUX_I2C_REPLY_DEFER:
641 			DRM_DEBUG_KMS("aux_i2c defer\n");
642 			DELAY(100);
643 			break;
644 		default:
645 			DRM_ERROR("aux_i2c invalid reply 0x%02x\n", reply[0]);
646 			return (EREMOTEIO);
647 		}
648 	}
649 
650 	DRM_ERROR("too many retries, giving up\n");
651 	return (EREMOTEIO);
652 }
653 
654 static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp);
655 static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync);
656 
657 static int
658 intel_dp_i2c_init(struct intel_dp *intel_dp,
659 		  struct intel_connector *intel_connector, const char *name)
660 {
661 	int ret;
662 
663 	DRM_DEBUG_KMS("i2c_init %s\n", name);
664 
665 	ironlake_edp_panel_vdd_on(intel_dp);
666 	ret = iic_dp_aux_add_bus(intel_connector->base.dev->device, name,
667 	    intel_dp_i2c_aux_ch, intel_dp, &intel_dp->dp_iic_bus,
668 	    &intel_dp->adapter);
669 	ironlake_edp_panel_vdd_off(intel_dp, false);
670 	return (ret);
671 }
672 
673 static bool
674 intel_dp_mode_fixup(struct drm_encoder *encoder, const struct drm_display_mode *mode,
675 		    struct drm_display_mode *adjusted_mode)
676 {
677 	struct drm_device *dev = encoder->dev;
678 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
679 	int lane_count, clock;
680 	int max_lane_count = intel_dp_max_lane_count(intel_dp);
681 	int max_clock = intel_dp_max_link_bw(intel_dp) == DP_LINK_BW_2_7 ? 1 : 0;
682 	int bpp;
683 	static int bws[2] = { DP_LINK_BW_1_62, DP_LINK_BW_2_7 };
684 
685 	if (is_edp(intel_dp) && intel_dp->panel_fixed_mode) {
686 		intel_fixed_panel_mode(intel_dp->panel_fixed_mode, adjusted_mode);
687 		intel_pch_panel_fitting(dev, DRM_MODE_SCALE_FULLSCREEN,
688 					mode, adjusted_mode);
689 	}
690 
691 	if (!intel_dp_adjust_dithering(intel_dp, adjusted_mode, adjusted_mode))
692 		return false;
693 
694 	bpp = adjusted_mode->private_flags & INTEL_MODE_DP_FORCE_6BPC ? 18 : 24;
695 
696 	for (lane_count = 1; lane_count <= max_lane_count; lane_count <<= 1) {
697 		for (clock = 0; clock <= max_clock; clock++) {
698 			int link_avail = intel_dp_max_data_rate(intel_dp_link_clock(bws[clock]), lane_count);
699 
700 			if (intel_dp_link_required(adjusted_mode->clock, bpp)
701 					<= link_avail) {
702 				intel_dp->link_bw = bws[clock];
703 				intel_dp->lane_count = lane_count;
704 				adjusted_mode->clock = intel_dp_link_clock(intel_dp->link_bw);
705 				DRM_DEBUG_KMS("Display port link bw %02x lane "
706 						"count %d clock %d\n",
707 				       intel_dp->link_bw, intel_dp->lane_count,
708 				       adjusted_mode->clock);
709 				return true;
710 			}
711 		}
712 	}
713 
714 	return false;
715 }
716 
717 struct intel_dp_m_n {
718 	uint32_t	tu;
719 	uint32_t	gmch_m;
720 	uint32_t	gmch_n;
721 	uint32_t	link_m;
722 	uint32_t	link_n;
723 };
724 
725 static void
726 intel_reduce_ratio(uint32_t *num, uint32_t *den)
727 {
728 	while (*num > 0xffffff || *den > 0xffffff) {
729 		*num >>= 1;
730 		*den >>= 1;
731 	}
732 }
733 
734 static void
735 intel_dp_compute_m_n(int bpp,
736 		     int nlanes,
737 		     int pixel_clock,
738 		     int link_clock,
739 		     struct intel_dp_m_n *m_n)
740 {
741 	m_n->tu = 64;
742 	m_n->gmch_m = (pixel_clock * bpp) >> 3;
743 	m_n->gmch_n = link_clock * nlanes;
744 	intel_reduce_ratio(&m_n->gmch_m, &m_n->gmch_n);
745 	m_n->link_m = pixel_clock;
746 	m_n->link_n = link_clock;
747 	intel_reduce_ratio(&m_n->link_m, &m_n->link_n);
748 }
749 
750 void
751 intel_dp_set_m_n(struct drm_crtc *crtc, struct drm_display_mode *mode,
752 		 struct drm_display_mode *adjusted_mode)
753 {
754 	struct drm_device *dev = crtc->dev;
755 	struct drm_mode_config *mode_config = &dev->mode_config;
756 	struct drm_encoder *encoder;
757 	struct drm_i915_private *dev_priv = dev->dev_private;
758 	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
759 	int lane_count = 4;
760 	struct intel_dp_m_n m_n;
761 	int pipe = intel_crtc->pipe;
762 
763 	/*
764 	 * Find the lane count in the intel_encoder private
765 	 */
766 	list_for_each_entry(encoder, &mode_config->encoder_list, head) {
767 		struct intel_dp *intel_dp;
768 
769 		if (encoder->crtc != crtc)
770 			continue;
771 
772 		intel_dp = enc_to_intel_dp(encoder);
773 		if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
774 		    intel_dp->base.type == INTEL_OUTPUT_EDP)
775 		{
776 			lane_count = intel_dp->lane_count;
777 			break;
778 		}
779 	}
780 
781 	/*
782 	 * Compute the GMCH and Link ratios. The '3' here is
783 	 * the number of bytes_per_pixel post-LUT, which we always
784 	 * set up for 8-bits of R/G/B, or 3 bytes total.
785 	 */
786 	intel_dp_compute_m_n(intel_crtc->bpp, lane_count,
787 			     mode->clock, adjusted_mode->clock, &m_n);
788 
789 	if (HAS_PCH_SPLIT(dev)) {
790 		I915_WRITE(TRANSDATA_M1(pipe),
791 			   ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
792 			   m_n.gmch_m);
793 		I915_WRITE(TRANSDATA_N1(pipe), m_n.gmch_n);
794 		I915_WRITE(TRANSDPLINK_M1(pipe), m_n.link_m);
795 		I915_WRITE(TRANSDPLINK_N1(pipe), m_n.link_n);
796 	} else {
797 		I915_WRITE(PIPE_GMCH_DATA_M(pipe),
798 			   ((m_n.tu - 1) << PIPE_GMCH_DATA_M_TU_SIZE_SHIFT) |
799 			   m_n.gmch_m);
800 		I915_WRITE(PIPE_GMCH_DATA_N(pipe), m_n.gmch_n);
801 		I915_WRITE(PIPE_DP_LINK_M(pipe), m_n.link_m);
802 		I915_WRITE(PIPE_DP_LINK_N(pipe), m_n.link_n);
803 	}
804 }
805 
806 static void ironlake_edp_pll_on(struct drm_encoder *encoder);
807 static void ironlake_edp_pll_off(struct drm_encoder *encoder);
808 
809 static void
810 intel_dp_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
811 		  struct drm_display_mode *adjusted_mode)
812 {
813 	struct drm_device *dev = encoder->dev;
814 	struct drm_i915_private *dev_priv = dev->dev_private;
815 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
816 	struct drm_crtc *crtc = intel_dp->base.base.crtc;
817 	struct intel_crtc *intel_crtc = to_intel_crtc(crtc);
818 
819 	/* Turn on the eDP PLL if needed */
820 	if (is_edp(intel_dp)) {
821 		if (!is_pch_edp(intel_dp))
822 			ironlake_edp_pll_on(encoder);
823 		else
824 			ironlake_edp_pll_off(encoder);
825 	}
826 
827 	/*
828 	 * There are four kinds of DP registers:
829 	 *
830 	 * 	IBX PCH
831 	 * 	SNB CPU
832 	 *	IVB CPU
833 	 * 	CPT PCH
834 	 *
835 	 * IBX PCH and CPU are the same for almost everything,
836 	 * except that the CPU DP PLL is configured in this
837 	 * register
838 	 *
839 	 * CPT PCH is quite different, having many bits moved
840 	 * to the TRANS_DP_CTL register instead. That
841 	 * configuration happens (oddly) in ironlake_pch_enable
842 	 */
843 
844 	/* Preserve the BIOS-computed detected bit. This is
845 	 * supposed to be read-only.
846 	 */
847 	intel_dp->DP = I915_READ(intel_dp->output_reg) & DP_DETECTED;
848 	intel_dp->DP |=  DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
849 
850 	/* Handle DP bits in common between all three register formats */
851 
852 	intel_dp->DP |= DP_VOLTAGE_0_4 | DP_PRE_EMPHASIS_0;
853 
854 	switch (intel_dp->lane_count) {
855 	case 1:
856 		intel_dp->DP |= DP_PORT_WIDTH_1;
857 		break;
858 	case 2:
859 		intel_dp->DP |= DP_PORT_WIDTH_2;
860 		break;
861 	case 4:
862 		intel_dp->DP |= DP_PORT_WIDTH_4;
863 		break;
864 	}
865 	if (intel_dp->has_audio) {
866 		DRM_DEBUG_KMS("Enabling DP audio on pipe %c\n",
867 				 pipe_name(intel_crtc->pipe));
868 		intel_dp->DP |= DP_AUDIO_OUTPUT_ENABLE;
869 		intel_write_eld(encoder, adjusted_mode);
870 	}
871 	memset(intel_dp->link_configuration, 0, DP_LINK_CONFIGURATION_SIZE);
872 	intel_dp->link_configuration[0] = intel_dp->link_bw;
873 	intel_dp->link_configuration[1] = intel_dp->lane_count;
874 	/*
875 	 * Check for DPCD version > 1.1 and enhanced framing support
876 	 */
877 	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
878 	    (intel_dp->dpcd[DP_MAX_LANE_COUNT] & DP_ENHANCED_FRAME_CAP)) {
879 		intel_dp->link_configuration[1] |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
880 	}
881 
882 	/* Split out the IBX/CPU vs CPT settings */
883 
884 	if (is_cpu_edp(intel_dp) && IS_GEN7(dev)) {
885 		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
886 			intel_dp->DP |= DP_SYNC_HS_HIGH;
887 		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
888 			intel_dp->DP |= DP_SYNC_VS_HIGH;
889 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
890 
891 		if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
892 			intel_dp->DP |= DP_ENHANCED_FRAMING;
893 
894 		intel_dp->DP |= intel_crtc->pipe << 29;
895 
896 		/* don't miss out required setting for eDP */
897 		intel_dp->DP |= DP_PLL_ENABLE;
898 		if (adjusted_mode->clock < 200000)
899 			intel_dp->DP |= DP_PLL_FREQ_160MHZ;
900 		else
901 			intel_dp->DP |= DP_PLL_FREQ_270MHZ;
902 	} else if (!HAS_PCH_CPT(dev) || is_cpu_edp(intel_dp)) {
903 		intel_dp->DP |= intel_dp->color_range;
904 
905 		if (adjusted_mode->flags & DRM_MODE_FLAG_PHSYNC)
906 			intel_dp->DP |= DP_SYNC_HS_HIGH;
907 		if (adjusted_mode->flags & DRM_MODE_FLAG_PVSYNC)
908 			intel_dp->DP |= DP_SYNC_VS_HIGH;
909 		intel_dp->DP |= DP_LINK_TRAIN_OFF;
910 
911 		if (intel_dp->link_configuration[1] & DP_LANE_COUNT_ENHANCED_FRAME_EN)
912 			intel_dp->DP |= DP_ENHANCED_FRAMING;
913 
914 		if (intel_crtc->pipe == 1)
915 			intel_dp->DP |= DP_PIPEB_SELECT;
916 
917 		if (is_cpu_edp(intel_dp)) {
918 			/* don't miss out required setting for eDP */
919 			intel_dp->DP |= DP_PLL_ENABLE;
920 			if (adjusted_mode->clock < 200000)
921 				intel_dp->DP |= DP_PLL_FREQ_160MHZ;
922 			else
923 				intel_dp->DP |= DP_PLL_FREQ_270MHZ;
924 		}
925 	} else {
926 		intel_dp->DP |= DP_LINK_TRAIN_OFF_CPT;
927 	}
928 }
929 
930 #define IDLE_ON_MASK		(PP_ON | 0 	  | PP_SEQUENCE_MASK | 0                     | PP_SEQUENCE_STATE_MASK)
931 #define IDLE_ON_VALUE   	(PP_ON | 0 	  | PP_SEQUENCE_NONE | 0                     | PP_SEQUENCE_STATE_ON_IDLE)
932 
933 #define IDLE_OFF_MASK		(PP_ON | 0        | PP_SEQUENCE_MASK | 0                     | PP_SEQUENCE_STATE_MASK)
934 #define IDLE_OFF_VALUE		(0     | 0        | PP_SEQUENCE_NONE | 0                     | PP_SEQUENCE_STATE_OFF_IDLE)
935 
936 #define IDLE_CYCLE_MASK		(PP_ON | 0        | PP_SEQUENCE_MASK | PP_CYCLE_DELAY_ACTIVE | PP_SEQUENCE_STATE_MASK)
937 #define IDLE_CYCLE_VALUE	(0     | 0        | PP_SEQUENCE_NONE | 0                     | PP_SEQUENCE_STATE_OFF_IDLE)
938 
939 static void ironlake_wait_panel_status(struct intel_dp *intel_dp,
940 				       u32 mask,
941 				       u32 value)
942 {
943 	struct drm_device *dev = intel_dp->base.base.dev;
944 	struct drm_i915_private *dev_priv = dev->dev_private;
945 
946 	DRM_DEBUG_KMS("mask %08x value %08x status %08x control %08x\n",
947 		      mask, value,
948 		      I915_READ(PCH_PP_STATUS),
949 		      I915_READ(PCH_PP_CONTROL));
950 
951 	if (_intel_wait_for(dev,
952 	    (I915_READ(PCH_PP_STATUS) & mask) == value, 5000, 10, "915iwp")) {
953 		DRM_ERROR("Panel status timeout: status %08x control %08x\n",
954 			  I915_READ(PCH_PP_STATUS),
955 			  I915_READ(PCH_PP_CONTROL));
956 	}
957 }
958 
959 static void ironlake_wait_panel_on(struct intel_dp *intel_dp)
960 {
961 	DRM_DEBUG_KMS("Wait for panel power on\n");
962 	ironlake_wait_panel_status(intel_dp, IDLE_ON_MASK, IDLE_ON_VALUE);
963 }
964 
965 static void ironlake_wait_panel_off(struct intel_dp *intel_dp)
966 {
967 	DRM_DEBUG_KMS("Wait for panel power off time\n");
968 	ironlake_wait_panel_status(intel_dp, IDLE_OFF_MASK, IDLE_OFF_VALUE);
969 }
970 
971 static void ironlake_wait_panel_power_cycle(struct intel_dp *intel_dp)
972 {
973 	DRM_DEBUG_KMS("Wait for panel power cycle\n");
974 	ironlake_wait_panel_status(intel_dp, IDLE_CYCLE_MASK, IDLE_CYCLE_VALUE);
975 }
976 
977 
978 /* Read the current pp_control value, unlocking the register if it
979  * is locked
980  */
981 
982 static  u32 ironlake_get_pp_control(struct drm_i915_private *dev_priv)
983 {
984 	u32	control = I915_READ(PCH_PP_CONTROL);
985 
986 	control &= ~PANEL_UNLOCK_MASK;
987 	control |= PANEL_UNLOCK_REGS;
988 	return control;
989 }
990 
991 static void ironlake_edp_panel_vdd_on(struct intel_dp *intel_dp)
992 {
993 	struct drm_device *dev = intel_dp->base.base.dev;
994 	struct drm_i915_private *dev_priv = dev->dev_private;
995 	u32 pp;
996 
997 	if (!is_edp(intel_dp))
998 		return;
999 	DRM_DEBUG_KMS("Turn eDP VDD on\n");
1000 
1001 	if (intel_dp->want_panel_vdd)
1002 		kprintf("eDP VDD already requested on\n");
1003 
1004 	intel_dp->want_panel_vdd = true;
1005 
1006 	if (ironlake_edp_have_panel_vdd(intel_dp)) {
1007 		DRM_DEBUG_KMS("eDP VDD already on\n");
1008 		return;
1009 	}
1010 
1011 	if (!ironlake_edp_have_panel_power(intel_dp))
1012 		ironlake_wait_panel_power_cycle(intel_dp);
1013 
1014 	pp = ironlake_get_pp_control(dev_priv);
1015 	pp |= EDP_FORCE_VDD;
1016 	I915_WRITE(PCH_PP_CONTROL, pp);
1017 	POSTING_READ(PCH_PP_CONTROL);
1018 	DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1019 		      I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
1020 
1021 	/*
1022 	 * If the panel wasn't on, delay before accessing aux channel
1023 	 */
1024 	if (!ironlake_edp_have_panel_power(intel_dp)) {
1025 		DRM_DEBUG_KMS("eDP was not running\n");
1026 		DELAY(intel_dp->panel_power_up_delay * 1000);
1027 	}
1028 }
1029 
1030 static void ironlake_panel_vdd_off_sync(struct intel_dp *intel_dp)
1031 {
1032 	struct drm_device *dev = intel_dp->base.base.dev;
1033 	struct drm_i915_private *dev_priv = dev->dev_private;
1034 	u32 pp;
1035 
1036 	if (!intel_dp->want_panel_vdd && ironlake_edp_have_panel_vdd(intel_dp)) {
1037 		pp = ironlake_get_pp_control(dev_priv);
1038 		pp &= ~EDP_FORCE_VDD;
1039 		I915_WRITE(PCH_PP_CONTROL, pp);
1040 		POSTING_READ(PCH_PP_CONTROL);
1041 
1042 		/* Make sure sequencer is idle before allowing subsequent activity */
1043 		DRM_DEBUG_KMS("PCH_PP_STATUS: 0x%08x PCH_PP_CONTROL: 0x%08x\n",
1044 			      I915_READ(PCH_PP_STATUS), I915_READ(PCH_PP_CONTROL));
1045 
1046 		DELAY(intel_dp->panel_power_down_delay * 1000);
1047 	}
1048 }
1049 
1050 static void ironlake_panel_vdd_work(void *arg, int pending __unused)
1051 {
1052 	struct intel_dp *intel_dp = arg;
1053 	struct drm_device *dev = intel_dp->base.base.dev;
1054 
1055 	lockmgr(&dev->mode_config.lock, LK_EXCLUSIVE);
1056 	ironlake_panel_vdd_off_sync(intel_dp);
1057 	lockmgr(&dev->mode_config.lock, LK_RELEASE);
1058 }
1059 
1060 static void ironlake_edp_panel_vdd_off(struct intel_dp *intel_dp, bool sync)
1061 {
1062 	if (!is_edp(intel_dp))
1063 		return;
1064 
1065 	DRM_DEBUG_KMS("Turn eDP VDD off %d\n", intel_dp->want_panel_vdd);
1066 	if (!intel_dp->want_panel_vdd)
1067 		kprintf("eDP VDD not forced on\n");
1068 
1069 	intel_dp->want_panel_vdd = false;
1070 
1071 	if (sync) {
1072 		ironlake_panel_vdd_off_sync(intel_dp);
1073 	} else {
1074 		/*
1075 		 * Queue the timer to fire a long
1076 		 * time from now (relative to the power down delay)
1077 		 * to keep the panel power up across a sequence of operations
1078 		 */
1079 		struct drm_i915_private *dev_priv = intel_dp->base.base.dev->dev_private;
1080 		taskqueue_enqueue_timeout(dev_priv->tq,
1081 		    &intel_dp->panel_vdd_task,
1082 		    msecs_to_jiffies(intel_dp->panel_power_cycle_delay * 5));
1083 	}
1084 }
1085 
1086 static void ironlake_edp_panel_on(struct intel_dp *intel_dp)
1087 {
1088 	struct drm_device *dev = intel_dp->base.base.dev;
1089 	struct drm_i915_private *dev_priv = dev->dev_private;
1090 	u32 pp;
1091 
1092 	if (!is_edp(intel_dp))
1093 		return;
1094 
1095 	DRM_DEBUG_KMS("Turn eDP power on\n");
1096 
1097 	if (ironlake_edp_have_panel_power(intel_dp)) {
1098 		DRM_DEBUG_KMS("eDP power already on\n");
1099 		return;
1100 	}
1101 
1102 	ironlake_wait_panel_power_cycle(intel_dp);
1103 
1104 	pp = ironlake_get_pp_control(dev_priv);
1105 	if (IS_GEN5(dev)) {
1106 		/* ILK workaround: disable reset around power sequence */
1107 		pp &= ~PANEL_POWER_RESET;
1108 		I915_WRITE(PCH_PP_CONTROL, pp);
1109 		POSTING_READ(PCH_PP_CONTROL);
1110 	}
1111 
1112 	pp |= POWER_TARGET_ON;
1113 	if (!IS_GEN5(dev))
1114 		pp |= PANEL_POWER_RESET;
1115 
1116 	I915_WRITE(PCH_PP_CONTROL, pp);
1117 	POSTING_READ(PCH_PP_CONTROL);
1118 
1119 	ironlake_wait_panel_on(intel_dp);
1120 
1121 	if (IS_GEN5(dev)) {
1122 		pp |= PANEL_POWER_RESET; /* restore panel reset bit */
1123 		I915_WRITE(PCH_PP_CONTROL, pp);
1124 		POSTING_READ(PCH_PP_CONTROL);
1125 	}
1126 }
1127 
1128 static void ironlake_edp_panel_off(struct intel_dp *intel_dp)
1129 {
1130 	struct drm_device *dev = intel_dp->base.base.dev;
1131 	struct drm_i915_private *dev_priv = dev->dev_private;
1132 	u32 pp;
1133 
1134 	if (!is_edp(intel_dp))
1135 		return;
1136 
1137 	DRM_DEBUG_KMS("Turn eDP power off\n");
1138 
1139 	if (intel_dp->want_panel_vdd)
1140 		kprintf("Cannot turn power off while VDD is on\n");
1141 
1142 	pp = ironlake_get_pp_control(dev_priv);
1143 	pp &= ~(POWER_TARGET_ON | EDP_FORCE_VDD | PANEL_POWER_RESET | EDP_BLC_ENABLE);
1144 	I915_WRITE(PCH_PP_CONTROL, pp);
1145 	POSTING_READ(PCH_PP_CONTROL);
1146 
1147 	ironlake_wait_panel_off(intel_dp);
1148 }
1149 
1150 static void ironlake_edp_backlight_on(struct intel_dp *intel_dp)
1151 {
1152 	struct drm_device *dev = intel_dp->base.base.dev;
1153 	struct drm_i915_private *dev_priv = dev->dev_private;
1154 	u32 pp;
1155 
1156 	if (!is_edp(intel_dp))
1157 		return;
1158 
1159 	DRM_DEBUG_KMS("\n");
1160 	/*
1161 	 * If we enable the backlight right away following a panel power
1162 	 * on, we may see slight flicker as the panel syncs with the eDP
1163 	 * link.  So delay a bit to make sure the image is solid before
1164 	 * allowing it to appear.
1165 	 */
1166 	DELAY(intel_dp->backlight_on_delay * 1000);
1167 	pp = ironlake_get_pp_control(dev_priv);
1168 	pp |= EDP_BLC_ENABLE;
1169 	I915_WRITE(PCH_PP_CONTROL, pp);
1170 	POSTING_READ(PCH_PP_CONTROL);
1171 }
1172 
1173 static void ironlake_edp_backlight_off(struct intel_dp *intel_dp)
1174 {
1175 	struct drm_device *dev = intel_dp->base.base.dev;
1176 	struct drm_i915_private *dev_priv = dev->dev_private;
1177 	u32 pp;
1178 
1179 	if (!is_edp(intel_dp))
1180 		return;
1181 
1182 	DRM_DEBUG_KMS("\n");
1183 	pp = ironlake_get_pp_control(dev_priv);
1184 	pp &= ~EDP_BLC_ENABLE;
1185 	I915_WRITE(PCH_PP_CONTROL, pp);
1186 	POSTING_READ(PCH_PP_CONTROL);
1187 	DELAY(intel_dp->backlight_off_delay * 1000);
1188 }
1189 
1190 static void ironlake_edp_pll_on(struct drm_encoder *encoder)
1191 {
1192 	struct drm_device *dev = encoder->dev;
1193 	struct drm_i915_private *dev_priv = dev->dev_private;
1194 	u32 dpa_ctl;
1195 
1196 	DRM_DEBUG_KMS("\n");
1197 	dpa_ctl = I915_READ(DP_A);
1198 	dpa_ctl |= DP_PLL_ENABLE;
1199 	I915_WRITE(DP_A, dpa_ctl);
1200 	POSTING_READ(DP_A);
1201 	DELAY(200);
1202 }
1203 
1204 static void ironlake_edp_pll_off(struct drm_encoder *encoder)
1205 {
1206 	struct drm_device *dev = encoder->dev;
1207 	struct drm_i915_private *dev_priv = dev->dev_private;
1208 	u32 dpa_ctl;
1209 
1210 	dpa_ctl = I915_READ(DP_A);
1211 	dpa_ctl &= ~DP_PLL_ENABLE;
1212 	I915_WRITE(DP_A, dpa_ctl);
1213 	POSTING_READ(DP_A);
1214 	DELAY(200);
1215 }
1216 
1217 /* If the sink supports it, try to set the power state appropriately */
1218 static void intel_dp_sink_dpms(struct intel_dp *intel_dp, int mode)
1219 {
1220 	int ret, i;
1221 
1222 	/* Should have a valid DPCD by this point */
1223 	if (intel_dp->dpcd[DP_DPCD_REV] < 0x11)
1224 		return;
1225 
1226 	if (mode != DRM_MODE_DPMS_ON) {
1227 		ret = intel_dp_aux_native_write_1(intel_dp, DP_SET_POWER,
1228 						  DP_SET_POWER_D3);
1229 		if (ret != 1)
1230 			DRM_DEBUG("failed to write sink power state\n");
1231 	} else {
1232 		/*
1233 		 * When turning on, we need to retry for 1ms to give the sink
1234 		 * time to wake up.
1235 		 */
1236 		for (i = 0; i < 3; i++) {
1237 			ret = intel_dp_aux_native_write_1(intel_dp,
1238 							  DP_SET_POWER,
1239 							  DP_SET_POWER_D0);
1240 			if (ret == 1)
1241 				break;
1242 			DELAY(1000);
1243 		}
1244 	}
1245 }
1246 
1247 static void intel_dp_prepare(struct drm_encoder *encoder)
1248 {
1249 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1250 
1251 	ironlake_edp_backlight_off(intel_dp);
1252 	ironlake_edp_panel_off(intel_dp);
1253 
1254 	/* Wake up the sink first */
1255 	ironlake_edp_panel_vdd_on(intel_dp);
1256 	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1257 	intel_dp_link_down(intel_dp);
1258 	ironlake_edp_panel_vdd_off(intel_dp, false);
1259 
1260 	/* Make sure the panel is off before trying to
1261 	 * change the mode
1262 	 */
1263 }
1264 
1265 static void intel_dp_commit(struct drm_encoder *encoder)
1266 {
1267 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1268 	struct drm_device *dev = encoder->dev;
1269 	struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1270 
1271 	ironlake_edp_panel_vdd_on(intel_dp);
1272 	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
1273 	intel_dp_start_link_train(intel_dp);
1274 	ironlake_edp_panel_on(intel_dp);
1275 	ironlake_edp_panel_vdd_off(intel_dp, true);
1276 	intel_dp_complete_link_train(intel_dp);
1277 	ironlake_edp_backlight_on(intel_dp);
1278 
1279 	intel_dp->dpms_mode = DRM_MODE_DPMS_ON;
1280 
1281 	if (HAS_PCH_CPT(dev))
1282 		intel_cpt_verify_modeset(dev, intel_crtc->pipe);
1283 }
1284 
1285 static void
1286 intel_dp_dpms(struct drm_encoder *encoder, int mode)
1287 {
1288 	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1289 	struct drm_device *dev = encoder->dev;
1290 	struct drm_i915_private *dev_priv = dev->dev_private;
1291 	uint32_t dp_reg = I915_READ(intel_dp->output_reg);
1292 
1293 	if (mode != DRM_MODE_DPMS_ON) {
1294 		ironlake_edp_backlight_off(intel_dp);
1295 		ironlake_edp_panel_off(intel_dp);
1296 
1297 		ironlake_edp_panel_vdd_on(intel_dp);
1298 		intel_dp_sink_dpms(intel_dp, mode);
1299 		intel_dp_link_down(intel_dp);
1300 		ironlake_edp_panel_vdd_off(intel_dp, false);
1301 
1302 		if (is_cpu_edp(intel_dp))
1303 			ironlake_edp_pll_off(encoder);
1304 	} else {
1305 		if (is_cpu_edp(intel_dp))
1306 			ironlake_edp_pll_on(encoder);
1307 
1308 		ironlake_edp_panel_vdd_on(intel_dp);
1309 		intel_dp_sink_dpms(intel_dp, mode);
1310 		if (!(dp_reg & DP_PORT_EN)) {
1311 			intel_dp_start_link_train(intel_dp);
1312 			ironlake_edp_panel_on(intel_dp);
1313 			ironlake_edp_panel_vdd_off(intel_dp, true);
1314 			intel_dp_complete_link_train(intel_dp);
1315 		} else
1316 			ironlake_edp_panel_vdd_off(intel_dp, false);
1317 		ironlake_edp_backlight_on(intel_dp);
1318 	}
1319 	intel_dp->dpms_mode = mode;
1320 }
1321 /*
1322  * Native read with retry for link status and receiver capability reads for
1323  * cases where the sink may still be asleep.
1324  */
1325 static bool
1326 intel_dp_aux_native_read_retry(struct intel_dp *intel_dp, uint16_t address,
1327 			       uint8_t *recv, int recv_bytes)
1328 {
1329 	int ret, i;
1330 
1331 	/*
1332 	 * Sinks are *supposed* to come up within 1ms from an off state,
1333 	 * but we're also supposed to retry 3 times per the spec.
1334 	 */
1335 	for (i = 0; i < 3; i++) {
1336 		ret = intel_dp_aux_native_read(intel_dp, address, recv,
1337 					       recv_bytes);
1338 		if (ret == recv_bytes)
1339 			return true;
1340 		DELAY(1000);
1341 	}
1342 
1343 	return false;
1344 }
1345 
1346 /*
1347  * Fetch AUX CH registers 0x202 - 0x207 which contain
1348  * link status information
1349  */
1350 static bool
1351 intel_dp_get_link_status(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1352 {
1353 	return intel_dp_aux_native_read_retry(intel_dp,
1354 					      DP_LANE0_1_STATUS,
1355 					      link_status,
1356 					      DP_LINK_STATUS_SIZE);
1357 }
1358 
1359 static uint8_t
1360 intel_dp_link_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1361 		     int r)
1362 {
1363 	return link_status[r - DP_LANE0_1_STATUS];
1364 }
1365 
1366 static uint8_t
1367 intel_get_adjust_request_voltage(uint8_t adjust_request[2],
1368 				 int lane)
1369 {
1370 	int	    s = ((lane & 1) ?
1371 			 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
1372 			 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
1373 	uint8_t l = adjust_request[lane>>1];
1374 
1375 	return ((l >> s) & 3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
1376 }
1377 
1378 static uint8_t
1379 intel_get_adjust_request_pre_emphasis(uint8_t adjust_request[2],
1380 				      int lane)
1381 {
1382 	int	    s = ((lane & 1) ?
1383 			 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
1384 			 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
1385 	uint8_t l = adjust_request[lane>>1];
1386 
1387 	return ((l >> s) & 3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
1388 }
1389 
1390 
1391 #if 0
1392 static char	*voltage_names[] = {
1393 	"0.4V", "0.6V", "0.8V", "1.2V"
1394 };
1395 static char	*pre_emph_names[] = {
1396 	"0dB", "3.5dB", "6dB", "9.5dB"
1397 };
1398 static char	*link_train_names[] = {
1399 	"pattern 1", "pattern 2", "idle", "off"
1400 };
1401 #endif
1402 
1403 /*
1404  * These are source-specific values; current Intel hardware supports
1405  * a maximum voltage of 800mV and a maximum pre-emphasis of 6dB
1406  */
1407 
1408 static uint8_t
1409 intel_dp_voltage_max(struct intel_dp *intel_dp)
1410 {
1411 	struct drm_device *dev = intel_dp->base.base.dev;
1412 
1413 	if (IS_GEN7(dev) && is_cpu_edp(intel_dp))
1414 		return DP_TRAIN_VOLTAGE_SWING_800;
1415 	else if (HAS_PCH_CPT(dev) && !is_cpu_edp(intel_dp))
1416 		return DP_TRAIN_VOLTAGE_SWING_1200;
1417 	else
1418 		return DP_TRAIN_VOLTAGE_SWING_800;
1419 }
1420 
1421 static uint8_t
1422 intel_dp_pre_emphasis_max(struct intel_dp *intel_dp, uint8_t voltage_swing)
1423 {
1424 	struct drm_device *dev = intel_dp->base.base.dev;
1425 
1426 	if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1427 		switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1428 		case DP_TRAIN_VOLTAGE_SWING_400:
1429 			return DP_TRAIN_PRE_EMPHASIS_6;
1430 		case DP_TRAIN_VOLTAGE_SWING_600:
1431 		case DP_TRAIN_VOLTAGE_SWING_800:
1432 			return DP_TRAIN_PRE_EMPHASIS_3_5;
1433 		default:
1434 			return DP_TRAIN_PRE_EMPHASIS_0;
1435 		}
1436 	} else {
1437 		switch (voltage_swing & DP_TRAIN_VOLTAGE_SWING_MASK) {
1438 		case DP_TRAIN_VOLTAGE_SWING_400:
1439 			return DP_TRAIN_PRE_EMPHASIS_6;
1440 		case DP_TRAIN_VOLTAGE_SWING_600:
1441 			return DP_TRAIN_PRE_EMPHASIS_6;
1442 		case DP_TRAIN_VOLTAGE_SWING_800:
1443 			return DP_TRAIN_PRE_EMPHASIS_3_5;
1444 		case DP_TRAIN_VOLTAGE_SWING_1200:
1445 		default:
1446 			return DP_TRAIN_PRE_EMPHASIS_0;
1447 		}
1448 	}
1449 }
1450 
1451 static void
1452 intel_get_adjust_train(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1453 {
1454 	uint8_t v = 0;
1455 	uint8_t p = 0;
1456 	int lane;
1457 	uint8_t	*adjust_request = link_status + (DP_ADJUST_REQUEST_LANE0_1 - DP_LANE0_1_STATUS);
1458 	uint8_t voltage_max;
1459 	uint8_t preemph_max;
1460 
1461 	for (lane = 0; lane < intel_dp->lane_count; lane++) {
1462 		uint8_t this_v = intel_get_adjust_request_voltage(adjust_request, lane);
1463 		uint8_t this_p = intel_get_adjust_request_pre_emphasis(adjust_request, lane);
1464 
1465 		if (this_v > v)
1466 			v = this_v;
1467 		if (this_p > p)
1468 			p = this_p;
1469 	}
1470 
1471 	voltage_max = intel_dp_voltage_max(intel_dp);
1472 	if (v >= voltage_max)
1473 		v = voltage_max | DP_TRAIN_MAX_SWING_REACHED;
1474 
1475 	preemph_max = intel_dp_pre_emphasis_max(intel_dp, v);
1476 	if (p >= preemph_max)
1477 		p = preemph_max | DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
1478 
1479 	for (lane = 0; lane < 4; lane++)
1480 		intel_dp->train_set[lane] = v | p;
1481 }
1482 
1483 static uint32_t
1484 intel_dp_signal_levels(uint8_t train_set)
1485 {
1486 	uint32_t	signal_levels = 0;
1487 
1488 	switch (train_set & DP_TRAIN_VOLTAGE_SWING_MASK) {
1489 	case DP_TRAIN_VOLTAGE_SWING_400:
1490 	default:
1491 		signal_levels |= DP_VOLTAGE_0_4;
1492 		break;
1493 	case DP_TRAIN_VOLTAGE_SWING_600:
1494 		signal_levels |= DP_VOLTAGE_0_6;
1495 		break;
1496 	case DP_TRAIN_VOLTAGE_SWING_800:
1497 		signal_levels |= DP_VOLTAGE_0_8;
1498 		break;
1499 	case DP_TRAIN_VOLTAGE_SWING_1200:
1500 		signal_levels |= DP_VOLTAGE_1_2;
1501 		break;
1502 	}
1503 	switch (train_set & DP_TRAIN_PRE_EMPHASIS_MASK) {
1504 	case DP_TRAIN_PRE_EMPHASIS_0:
1505 	default:
1506 		signal_levels |= DP_PRE_EMPHASIS_0;
1507 		break;
1508 	case DP_TRAIN_PRE_EMPHASIS_3_5:
1509 		signal_levels |= DP_PRE_EMPHASIS_3_5;
1510 		break;
1511 	case DP_TRAIN_PRE_EMPHASIS_6:
1512 		signal_levels |= DP_PRE_EMPHASIS_6;
1513 		break;
1514 	case DP_TRAIN_PRE_EMPHASIS_9_5:
1515 		signal_levels |= DP_PRE_EMPHASIS_9_5;
1516 		break;
1517 	}
1518 	return signal_levels;
1519 }
1520 
1521 /* Gen6's DP voltage swing and pre-emphasis control */
1522 static uint32_t
1523 intel_gen6_edp_signal_levels(uint8_t train_set)
1524 {
1525 	int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1526 					 DP_TRAIN_PRE_EMPHASIS_MASK);
1527 	switch (signal_levels) {
1528 	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1529 	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
1530 		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1531 	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
1532 		return EDP_LINK_TRAIN_400MV_3_5DB_SNB_B;
1533 	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1534 	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_6:
1535 		return EDP_LINK_TRAIN_400_600MV_6DB_SNB_B;
1536 	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1537 	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
1538 		return EDP_LINK_TRAIN_600_800MV_3_5DB_SNB_B;
1539 	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1540 	case DP_TRAIN_VOLTAGE_SWING_1200 | DP_TRAIN_PRE_EMPHASIS_0:
1541 		return EDP_LINK_TRAIN_800_1200MV_0DB_SNB_B;
1542 	default:
1543 		DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1544 			      "0x%x\n", signal_levels);
1545 		return EDP_LINK_TRAIN_400_600MV_0DB_SNB_B;
1546 	}
1547 }
1548 
1549 /* Gen7's DP voltage swing and pre-emphasis control */
1550 static uint32_t
1551 intel_gen7_edp_signal_levels(uint8_t train_set)
1552 {
1553 	int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
1554 					 DP_TRAIN_PRE_EMPHASIS_MASK);
1555 	switch (signal_levels) {
1556 	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_0:
1557 		return EDP_LINK_TRAIN_400MV_0DB_IVB;
1558 	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_3_5:
1559 		return EDP_LINK_TRAIN_400MV_3_5DB_IVB;
1560 	case DP_TRAIN_VOLTAGE_SWING_400 | DP_TRAIN_PRE_EMPHASIS_6:
1561 		return EDP_LINK_TRAIN_400MV_6DB_IVB;
1562 
1563 	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_0:
1564 		return EDP_LINK_TRAIN_600MV_0DB_IVB;
1565 	case DP_TRAIN_VOLTAGE_SWING_600 | DP_TRAIN_PRE_EMPHASIS_3_5:
1566 		return EDP_LINK_TRAIN_600MV_3_5DB_IVB;
1567 
1568 	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_0:
1569 		return EDP_LINK_TRAIN_800MV_0DB_IVB;
1570 	case DP_TRAIN_VOLTAGE_SWING_800 | DP_TRAIN_PRE_EMPHASIS_3_5:
1571 		return EDP_LINK_TRAIN_800MV_3_5DB_IVB;
1572 
1573 	default:
1574 		DRM_DEBUG_KMS("Unsupported voltage swing/pre-emphasis level:"
1575 			      "0x%x\n", signal_levels);
1576 		return EDP_LINK_TRAIN_500MV_0DB_IVB;
1577 	}
1578 }
1579 
1580 static uint8_t
1581 intel_get_lane_status(uint8_t link_status[DP_LINK_STATUS_SIZE],
1582 		      int lane)
1583 {
1584 	int s = (lane & 1) * 4;
1585 	uint8_t l = link_status[lane>>1];
1586 
1587 	return (l >> s) & 0xf;
1588 }
1589 
1590 /* Check for clock recovery is done on all channels */
1591 static bool
1592 intel_clock_recovery_ok(uint8_t link_status[DP_LINK_STATUS_SIZE], int lane_count)
1593 {
1594 	int lane;
1595 	uint8_t lane_status;
1596 
1597 	for (lane = 0; lane < lane_count; lane++) {
1598 		lane_status = intel_get_lane_status(link_status, lane);
1599 		if ((lane_status & DP_LANE_CR_DONE) == 0)
1600 			return false;
1601 	}
1602 	return true;
1603 }
1604 
1605 /* Check to see if channel eq is done on all channels */
1606 #define CHANNEL_EQ_BITS (DP_LANE_CR_DONE|\
1607 			 DP_LANE_CHANNEL_EQ_DONE|\
1608 			 DP_LANE_SYMBOL_LOCKED)
1609 static bool
1610 intel_channel_eq_ok(struct intel_dp *intel_dp, uint8_t link_status[DP_LINK_STATUS_SIZE])
1611 {
1612 	uint8_t lane_align;
1613 	uint8_t lane_status;
1614 	int lane;
1615 
1616 	lane_align = intel_dp_link_status(link_status,
1617 					  DP_LANE_ALIGN_STATUS_UPDATED);
1618 	if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
1619 		return false;
1620 	for (lane = 0; lane < intel_dp->lane_count; lane++) {
1621 		lane_status = intel_get_lane_status(link_status, lane);
1622 		if ((lane_status & CHANNEL_EQ_BITS) != CHANNEL_EQ_BITS)
1623 			return false;
1624 	}
1625 	return true;
1626 }
1627 
1628 static bool
1629 intel_dp_set_link_train(struct intel_dp *intel_dp,
1630 			uint32_t dp_reg_value,
1631 			uint8_t dp_train_pat)
1632 {
1633 	struct drm_device *dev = intel_dp->base.base.dev;
1634 	struct drm_i915_private *dev_priv = dev->dev_private;
1635 	int ret;
1636 
1637 	I915_WRITE(intel_dp->output_reg, dp_reg_value);
1638 	POSTING_READ(intel_dp->output_reg);
1639 
1640 	intel_dp_aux_native_write_1(intel_dp,
1641 				    DP_TRAINING_PATTERN_SET,
1642 				    dp_train_pat);
1643 
1644 	ret = intel_dp_aux_native_write(intel_dp,
1645 					DP_TRAINING_LANE0_SET,
1646 					intel_dp->train_set,
1647 					intel_dp->lane_count);
1648 	if (ret != intel_dp->lane_count)
1649 		return false;
1650 
1651 	return true;
1652 }
1653 
1654 /* Enable corresponding port and start training pattern 1 */
1655 static void
1656 intel_dp_start_link_train(struct intel_dp *intel_dp)
1657 {
1658 	struct drm_device *dev = intel_dp->base.base.dev;
1659 	struct drm_i915_private *dev_priv = dev->dev_private;
1660 	struct intel_crtc *intel_crtc = to_intel_crtc(intel_dp->base.base.crtc);
1661 	int i;
1662 	uint8_t voltage;
1663 	bool clock_recovery = false;
1664 	int voltage_tries, loop_tries;
1665 	u32 reg;
1666 	uint32_t DP = intel_dp->DP;
1667 
1668 	/* Enable output, wait for it to become active */
1669 	I915_WRITE(intel_dp->output_reg, intel_dp->DP);
1670 	POSTING_READ(intel_dp->output_reg);
1671 	intel_wait_for_vblank(dev, intel_crtc->pipe);
1672 
1673 	/* Write the link configuration data */
1674 	intel_dp_aux_native_write(intel_dp, DP_LINK_BW_SET,
1675 				  intel_dp->link_configuration,
1676 				  DP_LINK_CONFIGURATION_SIZE);
1677 
1678 	DP |= DP_PORT_EN;
1679 
1680 	if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1681 		DP &= ~DP_LINK_TRAIN_MASK_CPT;
1682 	else
1683 		DP &= ~DP_LINK_TRAIN_MASK;
1684 	memset(intel_dp->train_set, 0, 4);
1685 	voltage = 0xff;
1686 	voltage_tries = 0;
1687 	loop_tries = 0;
1688 	clock_recovery = false;
1689 	for (;;) {
1690 		/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1691 		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
1692 		uint32_t    signal_levels;
1693 
1694 
1695 		if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1696 			signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1697 			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
1698 		} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1699 			signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1700 			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1701 		} else {
1702 			signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1703 			DRM_DEBUG_KMS("training pattern 1 signal levels %08x\n", signal_levels);
1704 			DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1705 		}
1706 
1707 		if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1708 			reg = DP | DP_LINK_TRAIN_PAT_1_CPT;
1709 		else
1710 			reg = DP | DP_LINK_TRAIN_PAT_1;
1711 
1712 		if (!intel_dp_set_link_train(intel_dp, reg,
1713 					     DP_TRAINING_PATTERN_1))
1714 			break;
1715 		/* Set training pattern 1 */
1716 
1717 		DELAY(100);
1718 		if (!intel_dp_get_link_status(intel_dp, link_status)) {
1719 			DRM_ERROR("failed to get link status\n");
1720 			break;
1721 		}
1722 
1723 		if (intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1724 			DRM_DEBUG_KMS("clock recovery OK\n");
1725 			clock_recovery = true;
1726 			break;
1727 		}
1728 
1729 		/* Check to see if we've tried the max voltage */
1730 		for (i = 0; i < intel_dp->lane_count; i++)
1731 			if ((intel_dp->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
1732 				break;
1733 		if (i == intel_dp->lane_count) {
1734 			++loop_tries;
1735 			if (loop_tries == 5) {
1736 				DRM_DEBUG_KMS("too many full retries, give up\n");
1737 				break;
1738 			}
1739 			memset(intel_dp->train_set, 0, 4);
1740 			voltage_tries = 0;
1741 			continue;
1742 		}
1743 
1744 		/* Check to see if we've tried the same voltage 5 times */
1745 		if ((intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
1746 			++voltage_tries;
1747 			if (voltage_tries == 5) {
1748 				DRM_DEBUG_KMS("too many voltage retries, give up\n");
1749 				break;
1750 			}
1751 		} else
1752 			voltage_tries = 0;
1753 		voltage = intel_dp->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
1754 
1755 		/* Compute new intel_dp->train_set as requested by target */
1756 		intel_get_adjust_train(intel_dp, link_status);
1757 	}
1758 
1759 	intel_dp->DP = DP;
1760 }
1761 
1762 static void
1763 intel_dp_complete_link_train(struct intel_dp *intel_dp)
1764 {
1765 	struct drm_device *dev = intel_dp->base.base.dev;
1766 	struct drm_i915_private *dev_priv = dev->dev_private;
1767 	bool channel_eq = false;
1768 	int tries, cr_tries;
1769 	u32 reg;
1770 	uint32_t DP = intel_dp->DP;
1771 
1772 	/* channel equalization */
1773 	tries = 0;
1774 	cr_tries = 0;
1775 	channel_eq = false;
1776 	for (;;) {
1777 		/* Use intel_dp->train_set[0] to set the voltage and pre emphasis values */
1778 		uint32_t    signal_levels;
1779 		uint8_t	    link_status[DP_LINK_STATUS_SIZE];
1780 
1781 		if (cr_tries > 5) {
1782 			DRM_ERROR("failed to train DP, aborting\n");
1783 			intel_dp_link_down(intel_dp);
1784 			break;
1785 		}
1786 
1787 		if (IS_GEN7(dev) && is_cpu_edp(intel_dp)) {
1788 			signal_levels = intel_gen7_edp_signal_levels(intel_dp->train_set[0]);
1789 			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_IVB) | signal_levels;
1790 		} else if (IS_GEN6(dev) && is_cpu_edp(intel_dp)) {
1791 			signal_levels = intel_gen6_edp_signal_levels(intel_dp->train_set[0]);
1792 			DP = (DP & ~EDP_LINK_TRAIN_VOL_EMP_MASK_SNB) | signal_levels;
1793 		} else {
1794 			signal_levels = intel_dp_signal_levels(intel_dp->train_set[0]);
1795 			DP = (DP & ~(DP_VOLTAGE_MASK|DP_PRE_EMPHASIS_MASK)) | signal_levels;
1796 		}
1797 
1798 		if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1799 			reg = DP | DP_LINK_TRAIN_PAT_2_CPT;
1800 		else
1801 			reg = DP | DP_LINK_TRAIN_PAT_2;
1802 
1803 		/* channel eq pattern */
1804 		if (!intel_dp_set_link_train(intel_dp, reg,
1805 					     DP_TRAINING_PATTERN_2))
1806 			break;
1807 
1808 		DELAY(400);
1809 		if (!intel_dp_get_link_status(intel_dp, link_status))
1810 			break;
1811 
1812 		/* Make sure clock is still ok */
1813 		if (!intel_clock_recovery_ok(link_status, intel_dp->lane_count)) {
1814 			intel_dp_start_link_train(intel_dp);
1815 			cr_tries++;
1816 			continue;
1817 		}
1818 
1819 		if (intel_channel_eq_ok(intel_dp, link_status)) {
1820 			channel_eq = true;
1821 			break;
1822 		}
1823 
1824 		/* Try 5 times, then try clock recovery if that fails */
1825 		if (tries > 5) {
1826 			intel_dp_link_down(intel_dp);
1827 			intel_dp_start_link_train(intel_dp);
1828 			tries = 0;
1829 			cr_tries++;
1830 			continue;
1831 		}
1832 
1833 		/* Compute new intel_dp->train_set as requested by target */
1834 		intel_get_adjust_train(intel_dp, link_status);
1835 		++tries;
1836 	}
1837 
1838 	if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1839 		reg = DP | DP_LINK_TRAIN_OFF_CPT;
1840 	else
1841 		reg = DP | DP_LINK_TRAIN_OFF;
1842 
1843 	I915_WRITE(intel_dp->output_reg, reg);
1844 	POSTING_READ(intel_dp->output_reg);
1845 	intel_dp_aux_native_write_1(intel_dp,
1846 				    DP_TRAINING_PATTERN_SET, DP_TRAINING_PATTERN_DISABLE);
1847 }
1848 
1849 static void
1850 intel_dp_link_down(struct intel_dp *intel_dp)
1851 {
1852 	struct drm_device *dev = intel_dp->base.base.dev;
1853 	struct drm_i915_private *dev_priv = dev->dev_private;
1854 	uint32_t DP = intel_dp->DP;
1855 
1856 	if ((I915_READ(intel_dp->output_reg) & DP_PORT_EN) == 0)
1857 		return;
1858 
1859 	DRM_DEBUG_KMS("\n");
1860 
1861 	if (is_edp(intel_dp)) {
1862 		DP &= ~DP_PLL_ENABLE;
1863 		I915_WRITE(intel_dp->output_reg, DP);
1864 		POSTING_READ(intel_dp->output_reg);
1865 		DELAY(100);
1866 	}
1867 
1868 	if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp))) {
1869 		DP &= ~DP_LINK_TRAIN_MASK_CPT;
1870 		I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE_CPT);
1871 	} else {
1872 		DP &= ~DP_LINK_TRAIN_MASK;
1873 		I915_WRITE(intel_dp->output_reg, DP | DP_LINK_TRAIN_PAT_IDLE);
1874 	}
1875 	POSTING_READ(intel_dp->output_reg);
1876 
1877 	DELAY(17*1000);
1878 
1879 	if (is_edp(intel_dp)) {
1880 		if (HAS_PCH_CPT(dev) && (IS_GEN7(dev) || !is_cpu_edp(intel_dp)))
1881 			DP |= DP_LINK_TRAIN_OFF_CPT;
1882 		else
1883 			DP |= DP_LINK_TRAIN_OFF;
1884 	}
1885 
1886 
1887 	if (!HAS_PCH_CPT(dev) &&
1888 	    I915_READ(intel_dp->output_reg) & DP_PIPEB_SELECT) {
1889 		struct drm_crtc *crtc = intel_dp->base.base.crtc;
1890 
1891 		/* Hardware workaround: leaving our transcoder select
1892 		 * set to transcoder B while it's off will prevent the
1893 		 * corresponding HDMI output on transcoder A.
1894 		 *
1895 		 * Combine this with another hardware workaround:
1896 		 * transcoder select bit can only be cleared while the
1897 		 * port is enabled.
1898 		 */
1899 		DP &= ~DP_PIPEB_SELECT;
1900 		I915_WRITE(intel_dp->output_reg, DP);
1901 
1902 		/* Changes to enable or select take place the vblank
1903 		 * after being written.
1904 		 */
1905 		if (crtc == NULL) {
1906 			/* We can arrive here never having been attached
1907 			 * to a CRTC, for instance, due to inheriting
1908 			 * random state from the BIOS.
1909 			 *
1910 			 * If the pipe is not running, play safe and
1911 			 * wait for the clocks to stabilise before
1912 			 * continuing.
1913 			 */
1914 			POSTING_READ(intel_dp->output_reg);
1915 			DELAY(50 * 1000);
1916 		} else
1917 			intel_wait_for_vblank(dev, to_intel_crtc(crtc)->pipe);
1918 	}
1919 
1920 	DP &= ~DP_AUDIO_OUTPUT_ENABLE;
1921 	I915_WRITE(intel_dp->output_reg, DP & ~DP_PORT_EN);
1922 	POSTING_READ(intel_dp->output_reg);
1923 	DELAY(intel_dp->panel_power_down_delay * 1000);
1924 }
1925 
1926 static bool
1927 intel_dp_get_dpcd(struct intel_dp *intel_dp)
1928 {
1929 	if (intel_dp_aux_native_read_retry(intel_dp, 0x000, intel_dp->dpcd,
1930 					   sizeof(intel_dp->dpcd)) &&
1931 	    (intel_dp->dpcd[DP_DPCD_REV] != 0)) {
1932 		return true;
1933 	}
1934 
1935 	return false;
1936 }
1937 
1938 static bool
1939 intel_dp_get_sink_irq(struct intel_dp *intel_dp, u8 *sink_irq_vector)
1940 {
1941 	int ret;
1942 
1943 	ret = intel_dp_aux_native_read_retry(intel_dp,
1944 					     DP_DEVICE_SERVICE_IRQ_VECTOR,
1945 					     sink_irq_vector, 1);
1946 	if (!ret)
1947 		return false;
1948 
1949 	return true;
1950 }
1951 
1952 static void
1953 intel_dp_handle_test_request(struct intel_dp *intel_dp)
1954 {
1955 	/* NAK by default */
1956 	intel_dp_aux_native_write_1(intel_dp, DP_TEST_RESPONSE, DP_TEST_ACK);
1957 }
1958 
1959 /*
1960  * According to DP spec
1961  * 5.1.2:
1962  *  1. Read DPCD
1963  *  2. Configure link according to Receiver Capabilities
1964  *  3. Use Link Training from 2.5.3.3 and 3.5.1.3
1965  *  4. Check link status on receipt of hot-plug interrupt
1966  */
1967 
1968 static void
1969 intel_dp_check_link_status(struct intel_dp *intel_dp)
1970 {
1971 	u8 sink_irq_vector;
1972 	u8 link_status[DP_LINK_STATUS_SIZE];
1973 
1974 	if (intel_dp->dpms_mode != DRM_MODE_DPMS_ON)
1975 		return;
1976 
1977 	if (!intel_dp->base.base.crtc)
1978 		return;
1979 
1980 	/* Try to read receiver status if the link appears to be up */
1981 	if (!intel_dp_get_link_status(intel_dp, link_status)) {
1982 		intel_dp_link_down(intel_dp);
1983 		return;
1984 	}
1985 
1986 	/* Now read the DPCD to see if it's actually running */
1987 	if (!intel_dp_get_dpcd(intel_dp)) {
1988 		intel_dp_link_down(intel_dp);
1989 		return;
1990 	}
1991 
1992 	/* Try to read the source of the interrupt */
1993 	if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11 &&
1994 	    intel_dp_get_sink_irq(intel_dp, &sink_irq_vector)) {
1995 		/* Clear interrupt source */
1996 		intel_dp_aux_native_write_1(intel_dp,
1997 					    DP_DEVICE_SERVICE_IRQ_VECTOR,
1998 					    sink_irq_vector);
1999 
2000 		if (sink_irq_vector & DP_AUTOMATED_TEST_REQUEST)
2001 			intel_dp_handle_test_request(intel_dp);
2002 		if (sink_irq_vector & (DP_CP_IRQ | DP_SINK_SPECIFIC_IRQ))
2003 			DRM_DEBUG_KMS("CP or sink specific irq unhandled\n");
2004 	}
2005 
2006 	if (!intel_channel_eq_ok(intel_dp, link_status)) {
2007 		DRM_DEBUG_KMS("%s: channel EQ not ok, retraining\n",
2008 			      drm_get_encoder_name(&intel_dp->base.base));
2009  		intel_dp_start_link_train(intel_dp);
2010 		intel_dp_complete_link_train(intel_dp);
2011 	}
2012 }
2013 
2014 static enum drm_connector_status
2015 intel_dp_detect_dpcd(struct intel_dp *intel_dp)
2016 {
2017 	if (intel_dp_get_dpcd(intel_dp))
2018 		return connector_status_connected;
2019 	return connector_status_disconnected;
2020 }
2021 
2022 static enum drm_connector_status
2023 ironlake_dp_detect(struct intel_dp *intel_dp)
2024 {
2025 	enum drm_connector_status status;
2026 
2027 	/* Can't disconnect eDP, but you can close the lid... */
2028 	if (is_edp(intel_dp)) {
2029 		status = intel_panel_detect(intel_dp->base.base.dev);
2030 		if (status == connector_status_unknown)
2031 			status = connector_status_connected;
2032 		return status;
2033 	}
2034 
2035 	return intel_dp_detect_dpcd(intel_dp);
2036 }
2037 
2038 static enum drm_connector_status
2039 g4x_dp_detect(struct intel_dp *intel_dp)
2040 {
2041 	struct drm_device *dev = intel_dp->base.base.dev;
2042 	struct drm_i915_private *dev_priv = dev->dev_private;
2043 	uint32_t temp, bit;
2044 
2045 	switch (intel_dp->output_reg) {
2046 	case DP_B:
2047 		bit = DPB_HOTPLUG_INT_STATUS;
2048 		break;
2049 	case DP_C:
2050 		bit = DPC_HOTPLUG_INT_STATUS;
2051 		break;
2052 	case DP_D:
2053 		bit = DPD_HOTPLUG_INT_STATUS;
2054 		break;
2055 	default:
2056 		return connector_status_unknown;
2057 	}
2058 
2059 	temp = I915_READ(PORT_HOTPLUG_STAT);
2060 
2061 	if ((temp & bit) == 0)
2062 		return connector_status_disconnected;
2063 
2064 	return intel_dp_detect_dpcd(intel_dp);
2065 }
2066 
2067 static struct edid *
2068 intel_dp_get_edid(struct drm_connector *connector, device_t adapter)
2069 {
2070 	struct intel_dp *intel_dp = intel_attached_dp(connector);
2071 	struct edid	*edid;
2072 
2073 	ironlake_edp_panel_vdd_on(intel_dp);
2074 	edid = drm_get_edid(connector, adapter);
2075 	ironlake_edp_panel_vdd_off(intel_dp, false);
2076 	return edid;
2077 }
2078 
2079 static int
2080 intel_dp_get_edid_modes(struct drm_connector *connector, device_t adapter)
2081 {
2082 	struct intel_dp *intel_dp = intel_attached_dp(connector);
2083 	int	ret;
2084 
2085 	ironlake_edp_panel_vdd_on(intel_dp);
2086 	ret = intel_ddc_get_modes(connector, adapter);
2087 	ironlake_edp_panel_vdd_off(intel_dp, false);
2088 	return ret;
2089 }
2090 
2091 
2092 /**
2093  * Uses CRT_HOTPLUG_EN and CRT_HOTPLUG_STAT to detect DP connection.
2094  *
2095  * \return true if DP port is connected.
2096  * \return false if DP port is disconnected.
2097  */
2098 static enum drm_connector_status
2099 intel_dp_detect(struct drm_connector *connector, bool force)
2100 {
2101 	struct intel_dp *intel_dp = intel_attached_dp(connector);
2102 	struct drm_device *dev = intel_dp->base.base.dev;
2103 	enum drm_connector_status status;
2104 	struct edid *edid = NULL;
2105 
2106 	intel_dp->has_audio = false;
2107 
2108 	if (HAS_PCH_SPLIT(dev))
2109 		status = ironlake_dp_detect(intel_dp);
2110 	else
2111 		status = g4x_dp_detect(intel_dp);
2112 	if (status != connector_status_connected)
2113 		return status;
2114 
2115 	if (intel_dp->force_audio != HDMI_AUDIO_AUTO) {
2116 		intel_dp->has_audio = (intel_dp->force_audio == HDMI_AUDIO_ON);
2117 	} else {
2118 		edid = intel_dp_get_edid(connector, intel_dp->adapter);
2119 		if (edid) {
2120 			intel_dp->has_audio = drm_detect_monitor_audio(edid);
2121 			connector->display_info.raw_edid = NULL;
2122 			drm_free(edid, DRM_MEM_KMS);
2123 		}
2124 	}
2125 
2126 	return connector_status_connected;
2127 }
2128 
2129 static int intel_dp_get_modes(struct drm_connector *connector)
2130 {
2131 	struct intel_dp *intel_dp = intel_attached_dp(connector);
2132 	struct drm_device *dev = intel_dp->base.base.dev;
2133 	struct drm_i915_private *dev_priv = dev->dev_private;
2134 	int ret;
2135 
2136 	/* We should parse the EDID data and find out if it has an audio sink
2137 	 */
2138 
2139 	ret = intel_dp_get_edid_modes(connector, intel_dp->adapter);
2140 	if (ret) {
2141 		if (is_edp(intel_dp) && !intel_dp->panel_fixed_mode) {
2142 			struct drm_display_mode *newmode;
2143 			list_for_each_entry(newmode, &connector->probed_modes,
2144 					    head) {
2145 				if ((newmode->type & DRM_MODE_TYPE_PREFERRED)) {
2146 					intel_dp->panel_fixed_mode =
2147 						drm_mode_duplicate(dev, newmode);
2148 					break;
2149 				}
2150 			}
2151 		}
2152 		return ret;
2153 	}
2154 
2155 	/* if eDP has no EDID, try to use fixed panel mode from VBT */
2156 	if (is_edp(intel_dp)) {
2157 		/* initialize panel mode from VBT if available for eDP */
2158 		if (intel_dp->panel_fixed_mode == NULL && dev_priv->lfp_lvds_vbt_mode != NULL) {
2159 			intel_dp->panel_fixed_mode =
2160 				drm_mode_duplicate(dev, dev_priv->lfp_lvds_vbt_mode);
2161 			if (intel_dp->panel_fixed_mode) {
2162 				intel_dp->panel_fixed_mode->type |=
2163 					DRM_MODE_TYPE_PREFERRED;
2164 			}
2165 		}
2166 		if (intel_dp->panel_fixed_mode) {
2167 			struct drm_display_mode *mode;
2168 			mode = drm_mode_duplicate(dev, intel_dp->panel_fixed_mode);
2169 			drm_mode_probed_add(connector, mode);
2170 			return 1;
2171 		}
2172 	}
2173 	return 0;
2174 }
2175 
2176 static bool
2177 intel_dp_detect_audio(struct drm_connector *connector)
2178 {
2179 	struct intel_dp *intel_dp = intel_attached_dp(connector);
2180 	struct edid *edid;
2181 	bool has_audio = false;
2182 
2183 	edid = intel_dp_get_edid(connector, intel_dp->adapter);
2184 	if (edid) {
2185 		has_audio = drm_detect_monitor_audio(edid);
2186 
2187 		connector->display_info.raw_edid = NULL;
2188 		drm_free(edid, DRM_MEM_KMS);
2189 	}
2190 
2191 	return has_audio;
2192 }
2193 
2194 static int
2195 intel_dp_set_property(struct drm_connector *connector,
2196 		      struct drm_property *property,
2197 		      uint64_t val)
2198 {
2199 	struct drm_i915_private *dev_priv = connector->dev->dev_private;
2200 	struct intel_dp *intel_dp = intel_attached_dp(connector);
2201 	int ret;
2202 
2203 	ret = drm_connector_property_set_value(connector, property, val);
2204 	if (ret)
2205 		return ret;
2206 
2207 	if (property == dev_priv->force_audio_property) {
2208 		int i = val;
2209 		bool has_audio;
2210 
2211 		if (i == intel_dp->force_audio)
2212 			return 0;
2213 
2214 		intel_dp->force_audio = i;
2215 
2216 		if (i == HDMI_AUDIO_AUTO)
2217 			has_audio = intel_dp_detect_audio(connector);
2218 		else
2219 			has_audio = (i == HDMI_AUDIO_ON);
2220 
2221 		if (has_audio == intel_dp->has_audio)
2222 			return 0;
2223 
2224 		intel_dp->has_audio = has_audio;
2225 		goto done;
2226 	}
2227 
2228 	if (property == dev_priv->broadcast_rgb_property) {
2229 		if (val == !!intel_dp->color_range)
2230 			return 0;
2231 
2232 		intel_dp->color_range = val ? DP_COLOR_RANGE_16_235 : 0;
2233 		goto done;
2234 	}
2235 
2236 	return -EINVAL;
2237 
2238 done:
2239 	if (intel_dp->base.base.crtc) {
2240 		struct drm_crtc *crtc = intel_dp->base.base.crtc;
2241 		drm_crtc_helper_set_mode(crtc, &crtc->mode,
2242 					 crtc->x, crtc->y,
2243 					 crtc->fb);
2244 	}
2245 
2246 	return 0;
2247 }
2248 
2249 static void
2250 intel_dp_destroy(struct drm_connector *connector)
2251 {
2252 	struct drm_device *dev = connector->dev;
2253 
2254 	if (intel_dpd_is_edp(dev))
2255 		intel_panel_destroy_backlight(dev);
2256 
2257 #if 0
2258 	drm_sysfs_connector_remove(connector);
2259 #endif
2260 	drm_connector_cleanup(connector);
2261 	drm_free(connector, DRM_MEM_KMS);
2262 }
2263 
2264 static void intel_dp_encoder_destroy(struct drm_encoder *encoder)
2265 {
2266 	struct drm_device *dev;
2267 	struct intel_dp *intel_dp;
2268 
2269 	intel_dp = enc_to_intel_dp(encoder);
2270 	dev = encoder->dev;
2271 
2272 	if (intel_dp->dp_iic_bus != NULL) {
2273 		if (intel_dp->adapter != NULL) {
2274 			device_delete_child(intel_dp->dp_iic_bus,
2275 			    intel_dp->adapter);
2276 		}
2277 		device_delete_child(dev->device, intel_dp->dp_iic_bus);
2278 	}
2279 	drm_encoder_cleanup(encoder);
2280 	if (is_edp(intel_dp)) {
2281 		struct drm_i915_private *dev_priv = intel_dp->base.base.dev->dev_private;
2282 
2283 		taskqueue_cancel_timeout(dev_priv->tq,
2284 		    &intel_dp->panel_vdd_task, NULL);
2285 		taskqueue_drain_timeout(dev_priv->tq,
2286 		    &intel_dp->panel_vdd_task);
2287 		ironlake_panel_vdd_off_sync(intel_dp);
2288 	}
2289 	drm_free(intel_dp, DRM_MEM_KMS);
2290 }
2291 
2292 static const struct drm_encoder_helper_funcs intel_dp_helper_funcs = {
2293 	.dpms = intel_dp_dpms,
2294 	.mode_fixup = intel_dp_mode_fixup,
2295 	.prepare = intel_dp_prepare,
2296 	.mode_set = intel_dp_mode_set,
2297 	.commit = intel_dp_commit,
2298 };
2299 
2300 static const struct drm_connector_funcs intel_dp_connector_funcs = {
2301 	.dpms = drm_helper_connector_dpms,
2302 	.detect = intel_dp_detect,
2303 	.fill_modes = drm_helper_probe_single_connector_modes,
2304 	.set_property = intel_dp_set_property,
2305 	.destroy = intel_dp_destroy,
2306 };
2307 
2308 static const struct drm_connector_helper_funcs intel_dp_connector_helper_funcs = {
2309 	.get_modes = intel_dp_get_modes,
2310 	.mode_valid = intel_dp_mode_valid,
2311 	.best_encoder = intel_best_encoder,
2312 };
2313 
2314 static const struct drm_encoder_funcs intel_dp_enc_funcs = {
2315 	.destroy = intel_dp_encoder_destroy,
2316 };
2317 
2318 static void
2319 intel_dp_hot_plug(struct intel_encoder *intel_encoder)
2320 {
2321 	struct intel_dp *intel_dp = container_of(intel_encoder, struct intel_dp, base);
2322 
2323 	intel_dp_check_link_status(intel_dp);
2324 }
2325 
2326 /* Return which DP Port should be selected for Transcoder DP control */
2327 int
2328 intel_trans_dp_port_sel(struct drm_crtc *crtc)
2329 {
2330 	struct drm_device *dev = crtc->dev;
2331 	struct drm_mode_config *mode_config = &dev->mode_config;
2332 	struct drm_encoder *encoder;
2333 
2334 	list_for_each_entry(encoder, &mode_config->encoder_list, head) {
2335 		struct intel_dp *intel_dp;
2336 
2337 		if (encoder->crtc != crtc)
2338 			continue;
2339 
2340 		intel_dp = enc_to_intel_dp(encoder);
2341 		if (intel_dp->base.type == INTEL_OUTPUT_DISPLAYPORT ||
2342 		    intel_dp->base.type == INTEL_OUTPUT_EDP)
2343 			return intel_dp->output_reg;
2344 	}
2345 
2346 	return -1;
2347 }
2348 
2349 /* check the VBT to see whether the eDP is on DP-D port */
2350 bool intel_dpd_is_edp(struct drm_device *dev)
2351 {
2352 	struct drm_i915_private *dev_priv = dev->dev_private;
2353 	struct child_device_config *p_child;
2354 	int i;
2355 
2356 	if (!dev_priv->child_dev_num)
2357 		return false;
2358 
2359 	for (i = 0; i < dev_priv->child_dev_num; i++) {
2360 		p_child = dev_priv->child_dev + i;
2361 
2362 		if (p_child->dvo_port == PORT_IDPD &&
2363 		    p_child->device_type == DEVICE_TYPE_eDP)
2364 			return true;
2365 	}
2366 	return false;
2367 }
2368 
2369 static void
2370 intel_dp_add_properties(struct intel_dp *intel_dp, struct drm_connector *connector)
2371 {
2372 	intel_attach_force_audio_property(connector);
2373 	intel_attach_broadcast_rgb_property(connector);
2374 }
2375 
2376 void
2377 intel_dp_init(struct drm_device *dev, int output_reg)
2378 {
2379 	struct drm_i915_private *dev_priv = dev->dev_private;
2380 	struct drm_connector *connector;
2381 	struct intel_dp *intel_dp;
2382 	struct intel_encoder *intel_encoder;
2383 	struct intel_connector *intel_connector;
2384 	const char *name = NULL;
2385 	int type;
2386 
2387 	intel_dp = kmalloc(sizeof(struct intel_dp), DRM_MEM_KMS,
2388 	    M_WAITOK | M_ZERO);
2389 
2390 	intel_dp->output_reg = output_reg;
2391 	intel_dp->dpms_mode = -1;
2392 
2393 	intel_connector = kmalloc(sizeof(struct intel_connector), DRM_MEM_KMS,
2394 	    M_WAITOK | M_ZERO);
2395 	intel_encoder = &intel_dp->base;
2396 
2397 	if (HAS_PCH_SPLIT(dev) && output_reg == PCH_DP_D)
2398 		if (intel_dpd_is_edp(dev))
2399 			intel_dp->is_pch_edp = true;
2400 
2401 	if (output_reg == DP_A || is_pch_edp(intel_dp)) {
2402 		type = DRM_MODE_CONNECTOR_eDP;
2403 		intel_encoder->type = INTEL_OUTPUT_EDP;
2404 	} else {
2405 		type = DRM_MODE_CONNECTOR_DisplayPort;
2406 		intel_encoder->type = INTEL_OUTPUT_DISPLAYPORT;
2407 	}
2408 
2409 	connector = &intel_connector->base;
2410 	drm_connector_init(dev, connector, &intel_dp_connector_funcs, type);
2411 	drm_connector_helper_add(connector, &intel_dp_connector_helper_funcs);
2412 
2413 	connector->polled = DRM_CONNECTOR_POLL_HPD;
2414 
2415 	if (output_reg == DP_B || output_reg == PCH_DP_B)
2416 		intel_encoder->clone_mask = (1 << INTEL_DP_B_CLONE_BIT);
2417 	else if (output_reg == DP_C || output_reg == PCH_DP_C)
2418 		intel_encoder->clone_mask = (1 << INTEL_DP_C_CLONE_BIT);
2419 	else if (output_reg == DP_D || output_reg == PCH_DP_D)
2420 		intel_encoder->clone_mask = (1 << INTEL_DP_D_CLONE_BIT);
2421 
2422 	if (is_edp(intel_dp)) {
2423 		intel_encoder->clone_mask = (1 << INTEL_EDP_CLONE_BIT);
2424 		TIMEOUT_TASK_INIT(dev_priv->tq, &intel_dp->panel_vdd_task, 0,
2425 		    ironlake_panel_vdd_work, intel_dp);
2426 	}
2427 
2428 	intel_encoder->crtc_mask = (1 << 0) | (1 << 1) | (1 << 2);
2429 	connector->interlace_allowed = true;
2430 	connector->doublescan_allowed = 0;
2431 
2432 	drm_encoder_init(dev, &intel_encoder->base, &intel_dp_enc_funcs,
2433 			 DRM_MODE_ENCODER_TMDS);
2434 	drm_encoder_helper_add(&intel_encoder->base, &intel_dp_helper_funcs);
2435 
2436 	intel_connector_attach_encoder(intel_connector, intel_encoder);
2437 #if 0
2438 	drm_sysfs_connector_add(connector);
2439 #endif
2440 
2441 	/* Set up the DDC bus. */
2442 	switch (output_reg) {
2443 		case DP_A:
2444 			name = "DPDDC-A";
2445 			break;
2446 		case DP_B:
2447 		case PCH_DP_B:
2448 			dev_priv->hotplug_supported_mask |=
2449 				HDMIB_HOTPLUG_INT_STATUS;
2450 			name = "DPDDC-B";
2451 			break;
2452 		case DP_C:
2453 		case PCH_DP_C:
2454 			dev_priv->hotplug_supported_mask |=
2455 				HDMIC_HOTPLUG_INT_STATUS;
2456 			name = "DPDDC-C";
2457 			break;
2458 		case DP_D:
2459 		case PCH_DP_D:
2460 			dev_priv->hotplug_supported_mask |=
2461 				HDMID_HOTPLUG_INT_STATUS;
2462 			name = "DPDDC-D";
2463 			break;
2464 	}
2465 
2466 	/* Cache some DPCD data in the eDP case */
2467 	if (is_edp(intel_dp)) {
2468 		bool ret;
2469 		struct edp_power_seq	cur, vbt;
2470 		u32 pp_on, pp_off, pp_div;
2471 
2472 		pp_on = I915_READ(PCH_PP_ON_DELAYS);
2473 		pp_off = I915_READ(PCH_PP_OFF_DELAYS);
2474 		pp_div = I915_READ(PCH_PP_DIVISOR);
2475 
2476 		/* Pull timing values out of registers */
2477 		cur.t1_t3 = (pp_on & PANEL_POWER_UP_DELAY_MASK) >>
2478 			PANEL_POWER_UP_DELAY_SHIFT;
2479 
2480 		cur.t8 = (pp_on & PANEL_LIGHT_ON_DELAY_MASK) >>
2481 			PANEL_LIGHT_ON_DELAY_SHIFT;
2482 
2483 		cur.t9 = (pp_off & PANEL_LIGHT_OFF_DELAY_MASK) >>
2484 			PANEL_LIGHT_OFF_DELAY_SHIFT;
2485 
2486 		cur.t10 = (pp_off & PANEL_POWER_DOWN_DELAY_MASK) >>
2487 			PANEL_POWER_DOWN_DELAY_SHIFT;
2488 
2489 		cur.t11_t12 = ((pp_div & PANEL_POWER_CYCLE_DELAY_MASK) >>
2490 			       PANEL_POWER_CYCLE_DELAY_SHIFT) * 1000;
2491 
2492 		DRM_DEBUG_KMS("cur t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2493 			      cur.t1_t3, cur.t8, cur.t9, cur.t10, cur.t11_t12);
2494 
2495 		vbt = dev_priv->edp.pps;
2496 
2497 		DRM_DEBUG_KMS("vbt t1_t3 %d t8 %d t9 %d t10 %d t11_t12 %d\n",
2498 			      vbt.t1_t3, vbt.t8, vbt.t9, vbt.t10, vbt.t11_t12);
2499 
2500 #define get_delay(field)	((max(cur.field, vbt.field) + 9) / 10)
2501 
2502 		intel_dp->panel_power_up_delay = get_delay(t1_t3);
2503 		intel_dp->backlight_on_delay = get_delay(t8);
2504 		intel_dp->backlight_off_delay = get_delay(t9);
2505 		intel_dp->panel_power_down_delay = get_delay(t10);
2506 		intel_dp->panel_power_cycle_delay = get_delay(t11_t12);
2507 
2508 		DRM_DEBUG_KMS("panel power up delay %d, power down delay %d, power cycle delay %d\n",
2509 			      intel_dp->panel_power_up_delay, intel_dp->panel_power_down_delay,
2510 			      intel_dp->panel_power_cycle_delay);
2511 
2512 		DRM_DEBUG_KMS("backlight on delay %d, off delay %d\n",
2513 			      intel_dp->backlight_on_delay, intel_dp->backlight_off_delay);
2514 
2515 		ironlake_edp_panel_vdd_on(intel_dp);
2516 		ret = intel_dp_get_dpcd(intel_dp);
2517 		ironlake_edp_panel_vdd_off(intel_dp, false);
2518 
2519 		if (ret) {
2520 			if (intel_dp->dpcd[DP_DPCD_REV] >= 0x11)
2521 				dev_priv->no_aux_handshake =
2522 					intel_dp->dpcd[DP_MAX_DOWNSPREAD] &
2523 					DP_NO_AUX_HANDSHAKE_LINK_TRAINING;
2524 		} else {
2525 			/* if this fails, presume the device is a ghost */
2526 			DRM_INFO("failed to retrieve link info, disabling eDP\n");
2527 			intel_dp_encoder_destroy(&intel_dp->base.base);
2528 			intel_dp_destroy(&intel_connector->base);
2529 			return;
2530 		}
2531 	}
2532 
2533 	intel_dp_i2c_init(intel_dp, intel_connector, name);
2534 
2535 	intel_encoder->hot_plug = intel_dp_hot_plug;
2536 
2537 	if (is_edp(intel_dp)) {
2538 		dev_priv->int_edp_connector = connector;
2539 		intel_panel_setup_backlight(dev);
2540 	}
2541 
2542 	intel_dp_add_properties(intel_dp, connector);
2543 
2544 	/* For G4X desktop chip, PEG_BAND_GAP_DATA 3:0 must first be written
2545 	 * 0xd.  Failure to do so will result in spurious interrupts being
2546 	 * generated on the port when a cable is not attached.
2547 	 */
2548 	if (IS_G4X(dev) && !IS_GM45(dev)) {
2549 		u32 temp = I915_READ(PEG_BAND_GAP_DATA);
2550 		I915_WRITE(PEG_BAND_GAP_DATA, (temp & ~0xf) | 0xd);
2551 	}
2552 }
2553