xref: /netbsd-src/sys/external/bsd/drm2/dist/drm/amd/amdgpu/amdgpu_atombios_encoders.c (revision 41ec02673d281bbb3d38e6c78504ce6e30c228c1)
1 /*	$NetBSD: amdgpu_atombios_encoders.c,v 1.4 2021/12/18 23:44:58 riastradh Exp $	*/
2 
3 /*
4  * Copyright 2007-11 Advanced Micro Devices, Inc.
5  * Copyright 2008 Red Hat Inc.
6  *
7  * Permission is hereby granted, free of charge, to any person obtaining a
8  * copy of this software and associated documentation files (the "Software"),
9  * to deal in the Software without restriction, including without limitation
10  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
11  * and/or sell copies of the Software, and to permit persons to whom the
12  * Software is furnished to do so, subject to the following conditions:
13  *
14  * The above copyright notice and this permission notice shall be included in
15  * all copies or substantial portions of the Software.
16  *
17  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
20  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
21  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
22  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
23  * OTHER DEALINGS IN THE SOFTWARE.
24  *
25  * Authors: Dave Airlie
26  *          Alex Deucher
27  */
28 
29 #include <sys/cdefs.h>
30 __KERNEL_RCSID(0, "$NetBSD: amdgpu_atombios_encoders.c,v 1.4 2021/12/18 23:44:58 riastradh Exp $");
31 
32 #include <linux/pci.h>
33 
34 #include <drm/drm_crtc_helper.h>
35 #include <drm/amdgpu_drm.h>
36 #include "amdgpu.h"
37 #include "amdgpu_connectors.h"
38 #include "amdgpu_display.h"
39 #include "atom.h"
40 #include "atombios_encoders.h"
41 #include "atombios_dp.h"
42 #include <linux/backlight.h>
43 #include "bif/bif_4_1_d.h"
44 
45 u8
amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device * adev)46 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
47 {
48 	u8 backlight_level;
49 	u32 bios_2_scratch;
50 
51 	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
52 
53 	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
54 			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
55 
56 	return backlight_level;
57 }
58 
59 void
amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device * adev,u8 backlight_level)60 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
61 					    u8 backlight_level)
62 {
63 	u32 bios_2_scratch;
64 
65 	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
66 
67 	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
68 	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
69 			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
70 
71 	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
72 }
73 
74 u8
amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder * amdgpu_encoder)75 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
76 {
77 	struct drm_device *dev = amdgpu_encoder->base.dev;
78 	struct amdgpu_device *adev = dev->dev_private;
79 
80 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
81 		return 0;
82 
83 	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
84 }
85 
86 void
amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder * amdgpu_encoder,u8 level)87 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
88 				     u8 level)
89 {
90 	struct drm_encoder *encoder = &amdgpu_encoder->base;
91 	struct drm_device *dev = amdgpu_encoder->base.dev;
92 	struct amdgpu_device *adev = dev->dev_private;
93 	struct amdgpu_encoder_atom_dig *dig;
94 
95 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
96 		return;
97 
98 	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
99 	    amdgpu_encoder->enc_priv) {
100 		dig = amdgpu_encoder->enc_priv;
101 		dig->backlight_level = level;
102 		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
103 
104 		switch (amdgpu_encoder->encoder_id) {
105 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
106 		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
107 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
108 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
109 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
110 			if (dig->backlight_level == 0)
111 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
112 								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
113 			else {
114 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
115 								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
116 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
117 								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
118 			}
119 			break;
120 		default:
121 			break;
122 		}
123 	}
124 }
125 
126 #if IS_ENABLED(CONFIG_BACKLIGHT_CLASS_DEVICE) || IS_ENABLED(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
127 
amdgpu_atombios_encoder_backlight_level(struct backlight_device * bd)128 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
129 {
130 	u8 level;
131 
132 	/* Convert brightness to hardware level */
133 	if (bd->props.brightness < 0)
134 		level = 0;
135 	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
136 		level = AMDGPU_MAX_BL_LEVEL;
137 	else
138 		level = bd->props.brightness;
139 
140 	return level;
141 }
142 
amdgpu_atombios_encoder_update_backlight_status(struct backlight_device * bd)143 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
144 {
145 	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
146 	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
147 
148 	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
149 					     amdgpu_atombios_encoder_backlight_level(bd));
150 
151 	return 0;
152 }
153 
154 static int
amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device * bd)155 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
156 {
157 	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
158 	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
159 	struct drm_device *dev = amdgpu_encoder->base.dev;
160 	struct amdgpu_device *adev = dev->dev_private;
161 
162 	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
163 }
164 
165 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
166 	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
167 	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
168 };
169 
amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder * amdgpu_encoder,struct drm_connector * drm_connector)170 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
171 				     struct drm_connector *drm_connector)
172 {
173 	struct drm_device *dev = amdgpu_encoder->base.dev;
174 	struct amdgpu_device *adev = dev->dev_private;
175 	struct backlight_device *bd;
176 	struct backlight_properties props;
177 	struct amdgpu_backlight_privdata *pdata;
178 	struct amdgpu_encoder_atom_dig *dig;
179 	u8 backlight_level;
180 	char bl_name[16];
181 
182 	/* Mac laptops with multiple GPUs use the gmux driver for backlight
183 	 * so don't register a backlight device
184 	 */
185 	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
186 	    (adev->pdev->device == 0x6741))
187 		return;
188 
189 	if (!amdgpu_encoder->enc_priv)
190 		return;
191 
192 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
193 		return;
194 
195 	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
196 	if (!pdata) {
197 		DRM_ERROR("Memory allocation failed\n");
198 		goto error;
199 	}
200 
201 	memset(&props, 0, sizeof(props));
202 	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
203 	props.type = BACKLIGHT_RAW;
204 	snprintf(bl_name, sizeof(bl_name),
205 		 "amdgpu_bl%d", dev->primary->index);
206 	bd = backlight_device_register(bl_name, drm_connector->kdev,
207 				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
208 	if (IS_ERR(bd)) {
209 		DRM_ERROR("Backlight registration failed\n");
210 		goto error;
211 	}
212 
213 	pdata->encoder = amdgpu_encoder;
214 
215 	backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
216 
217 	dig = amdgpu_encoder->enc_priv;
218 	dig->bl_dev = bd;
219 
220 	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
221 	bd->props.power = FB_BLANK_UNBLANK;
222 	backlight_update_status(bd);
223 
224 	DRM_INFO("amdgpu atom DIG backlight initialized\n");
225 
226 	return;
227 
228 error:
229 	kfree(pdata);
230 	return;
231 }
232 
233 void
amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder * amdgpu_encoder)234 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
235 {
236 	struct drm_device *dev = amdgpu_encoder->base.dev;
237 	struct amdgpu_device *adev = dev->dev_private;
238 	struct backlight_device *bd = NULL;
239 	struct amdgpu_encoder_atom_dig *dig;
240 
241 	if (!amdgpu_encoder->enc_priv)
242 		return;
243 
244 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
245 		return;
246 
247 	dig = amdgpu_encoder->enc_priv;
248 	bd = dig->bl_dev;
249 	dig->bl_dev = NULL;
250 
251 	if (bd) {
252 		struct amdgpu_legacy_backlight_privdata *pdata;
253 
254 		pdata = bl_get_data(bd);
255 		backlight_device_unregister(bd);
256 		kfree(pdata);
257 
258 		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
259 	}
260 }
261 
262 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
263 
amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder * encoder,struct drm_connector * drm_connector)264 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder,
265     struct drm_connector *drm_connector)
266 {
267 }
268 
amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder * encoder)269 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
270 {
271 }
272 
273 #endif
274 
amdgpu_atombios_encoder_is_digital(struct drm_encoder * encoder)275 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
276 {
277 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
278 	switch (amdgpu_encoder->encoder_id) {
279 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
280 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
281 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
282 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
283 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
284 		return true;
285 	default:
286 		return false;
287 	}
288 }
289 
amdgpu_atombios_encoder_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)290 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
291 				 const struct drm_display_mode *mode,
292 				 struct drm_display_mode *adjusted_mode)
293 {
294 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
295 
296 	/* set the active encoder to connector routing */
297 	amdgpu_encoder_set_active_device(encoder);
298 	drm_mode_set_crtcinfo(adjusted_mode, 0);
299 
300 	/* hw bug */
301 	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
302 	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
303 		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
304 
305 	/* vertical FP must be at least 1 */
306 	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
307 		adjusted_mode->crtc_vsync_start++;
308 
309 	/* get the native mode for scaling */
310 	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
311 		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
312 	else if (amdgpu_encoder->rmx_type != RMX_OFF)
313 		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
314 
315 	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
316 	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
317 		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
318 		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
319 	}
320 
321 	return true;
322 }
323 
324 static void
amdgpu_atombios_encoder_setup_dac(struct drm_encoder * encoder,int action)325 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
326 {
327 	struct drm_device *dev = encoder->dev;
328 	struct amdgpu_device *adev = dev->dev_private;
329 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
330 	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
331 	int index = 0;
332 
333 	memset(&args, 0, sizeof(args));
334 
335 	switch (amdgpu_encoder->encoder_id) {
336 	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
337 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
338 		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
339 		break;
340 	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
341 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
342 		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
343 		break;
344 	}
345 
346 	args.ucAction = action;
347 	args.ucDacStandard = ATOM_DAC1_PS2;
348 	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
349 
350 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
351 
352 }
353 
amdgpu_atombios_encoder_get_bpc(struct drm_encoder * encoder)354 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
355 {
356 	int bpc = 8;
357 
358 	if (encoder->crtc) {
359 		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
360 		bpc = amdgpu_crtc->bpc;
361 	}
362 
363 	switch (bpc) {
364 	case 0:
365 		return PANEL_BPC_UNDEFINE;
366 	case 6:
367 		return PANEL_6BIT_PER_COLOR;
368 	case 8:
369 	default:
370 		return PANEL_8BIT_PER_COLOR;
371 	case 10:
372 		return PANEL_10BIT_PER_COLOR;
373 	case 12:
374 		return PANEL_12BIT_PER_COLOR;
375 	case 16:
376 		return PANEL_16BIT_PER_COLOR;
377 	}
378 }
379 
380 union dvo_encoder_control {
381 	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
382 	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
383 	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
384 	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
385 };
386 
387 static void
amdgpu_atombios_encoder_setup_dvo(struct drm_encoder * encoder,int action)388 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
389 {
390 	struct drm_device *dev = encoder->dev;
391 	struct amdgpu_device *adev = dev->dev_private;
392 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
393 	union dvo_encoder_control args;
394 	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
395 	uint8_t frev, crev;
396 
397 	memset(&args, 0, sizeof(args));
398 
399 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
400 		return;
401 
402 	switch (frev) {
403 	case 1:
404 		switch (crev) {
405 		case 1:
406 			/* R4xx, R5xx */
407 			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
408 
409 			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
410 				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
411 
412 			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
413 			break;
414 		case 2:
415 			/* RS600/690/740 */
416 			args.dvo.sDVOEncoder.ucAction = action;
417 			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
418 			/* DFP1, CRT1, TV1 depending on the type of port */
419 			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
420 
421 			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
422 				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
423 			break;
424 		case 3:
425 			/* R6xx */
426 			args.dvo_v3.ucAction = action;
427 			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
428 			args.dvo_v3.ucDVOConfig = 0; /* XXX */
429 			break;
430 		case 4:
431 			/* DCE8 */
432 			args.dvo_v4.ucAction = action;
433 			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
434 			args.dvo_v4.ucDVOConfig = 0; /* XXX */
435 			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
436 			break;
437 		default:
438 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
439 			break;
440 		}
441 		break;
442 	default:
443 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
444 		break;
445 	}
446 
447 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
448 }
449 
amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder * encoder)450 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
451 {
452 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
453 	struct drm_connector *connector;
454 	struct amdgpu_connector *amdgpu_connector;
455 	struct amdgpu_connector_atom_dig *dig_connector;
456 
457 	/* dp bridges are always DP */
458 	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
459 		return ATOM_ENCODER_MODE_DP;
460 
461 	/* DVO is always DVO */
462 	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
463 	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
464 		return ATOM_ENCODER_MODE_DVO;
465 
466 	connector = amdgpu_get_connector_for_encoder(encoder);
467 	/* if we don't have an active device yet, just use one of
468 	 * the connectors tied to the encoder.
469 	 */
470 	if (!connector)
471 		connector = amdgpu_get_connector_for_encoder_init(encoder);
472 	amdgpu_connector = to_amdgpu_connector(connector);
473 
474 	switch (connector->connector_type) {
475 	case DRM_MODE_CONNECTOR_DVII:
476 	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
477 		if (amdgpu_audio != 0) {
478 			if (amdgpu_connector->use_digital &&
479 			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
480 				return ATOM_ENCODER_MODE_HDMI;
481 			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
482 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
483 				return ATOM_ENCODER_MODE_HDMI;
484 			else if (amdgpu_connector->use_digital)
485 				return ATOM_ENCODER_MODE_DVI;
486 			else
487 				return ATOM_ENCODER_MODE_CRT;
488 		} else if (amdgpu_connector->use_digital) {
489 			return ATOM_ENCODER_MODE_DVI;
490 		} else {
491 			return ATOM_ENCODER_MODE_CRT;
492 		}
493 		break;
494 	case DRM_MODE_CONNECTOR_DVID:
495 	case DRM_MODE_CONNECTOR_HDMIA:
496 	default:
497 		if (amdgpu_audio != 0) {
498 			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
499 				return ATOM_ENCODER_MODE_HDMI;
500 			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
501 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
502 				return ATOM_ENCODER_MODE_HDMI;
503 			else
504 				return ATOM_ENCODER_MODE_DVI;
505 		} else {
506 			return ATOM_ENCODER_MODE_DVI;
507 		}
508 		break;
509 	case DRM_MODE_CONNECTOR_LVDS:
510 		return ATOM_ENCODER_MODE_LVDS;
511 		break;
512 	case DRM_MODE_CONNECTOR_DisplayPort:
513 		dig_connector = amdgpu_connector->con_priv;
514 		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
515 		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
516 			return ATOM_ENCODER_MODE_DP;
517 		} else if (amdgpu_audio != 0) {
518 			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
519 				return ATOM_ENCODER_MODE_HDMI;
520 			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
521 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
522 				return ATOM_ENCODER_MODE_HDMI;
523 			else
524 				return ATOM_ENCODER_MODE_DVI;
525 		} else {
526 			return ATOM_ENCODER_MODE_DVI;
527 		}
528 		break;
529 	case DRM_MODE_CONNECTOR_eDP:
530 		return ATOM_ENCODER_MODE_DP;
531 	case DRM_MODE_CONNECTOR_DVIA:
532 	case DRM_MODE_CONNECTOR_VGA:
533 		return ATOM_ENCODER_MODE_CRT;
534 		break;
535 	case DRM_MODE_CONNECTOR_Composite:
536 	case DRM_MODE_CONNECTOR_SVIDEO:
537 	case DRM_MODE_CONNECTOR_9PinDIN:
538 		/* fix me */
539 		return ATOM_ENCODER_MODE_TV;
540 		/*return ATOM_ENCODER_MODE_CV;*/
541 		break;
542 	}
543 }
544 
545 /*
546  * DIG Encoder/Transmitter Setup
547  *
548  * DCE 6.0
549  * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
550  * Supports up to 6 digital outputs
551  * - 6 DIG encoder blocks.
552  * - DIG to PHY mapping is hardcoded
553  * DIG1 drives UNIPHY0 link A, A+B
554  * DIG2 drives UNIPHY0 link B
555  * DIG3 drives UNIPHY1 link A, A+B
556  * DIG4 drives UNIPHY1 link B
557  * DIG5 drives UNIPHY2 link A, A+B
558  * DIG6 drives UNIPHY2 link B
559  *
560  * Routing
561  * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
562  * Examples:
563  * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
564  * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
565  * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
566  * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
567  */
568 
569 union dig_encoder_control {
570 	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
571 	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
572 	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
573 	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
574 	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
575 };
576 
577 void
amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder * encoder,int action,int panel_mode)578 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
579 				   int action, int panel_mode)
580 {
581 	struct drm_device *dev = encoder->dev;
582 	struct amdgpu_device *adev = dev->dev_private;
583 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
584 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
585 	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
586 	union dig_encoder_control args;
587 	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
588 	uint8_t frev, crev;
589 	int dp_clock = 0;
590 	int dp_lane_count = 0;
591 	int hpd_id = AMDGPU_HPD_NONE;
592 
593 	if (connector) {
594 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
595 		struct amdgpu_connector_atom_dig *dig_connector =
596 			amdgpu_connector->con_priv;
597 
598 		dp_clock = dig_connector->dp_clock;
599 		dp_lane_count = dig_connector->dp_lane_count;
600 		hpd_id = amdgpu_connector->hpd.hpd;
601 	}
602 
603 	/* no dig encoder assigned */
604 	if (dig->dig_encoder == -1)
605 		return;
606 
607 	memset(&args, 0, sizeof(args));
608 
609 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
610 		return;
611 
612 	switch (frev) {
613 	case 1:
614 		switch (crev) {
615 		case 1:
616 			args.v1.ucAction = action;
617 			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
618 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
619 				args.v3.ucPanelMode = panel_mode;
620 			else
621 				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
622 
623 			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
624 				args.v1.ucLaneNum = dp_lane_count;
625 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
626 				args.v1.ucLaneNum = 8;
627 			else
628 				args.v1.ucLaneNum = 4;
629 
630 			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
631 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
632 			switch (amdgpu_encoder->encoder_id) {
633 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
634 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
635 				break;
636 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
637 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
638 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
639 				break;
640 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
641 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
642 				break;
643 			}
644 			if (dig->linkb)
645 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
646 			else
647 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
648 			break;
649 		case 2:
650 		case 3:
651 			args.v3.ucAction = action;
652 			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
653 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
654 				args.v3.ucPanelMode = panel_mode;
655 			else
656 				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
657 
658 			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
659 				args.v3.ucLaneNum = dp_lane_count;
660 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
661 				args.v3.ucLaneNum = 8;
662 			else
663 				args.v3.ucLaneNum = 4;
664 
665 			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
666 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
667 			args.v3.acConfig.ucDigSel = dig->dig_encoder;
668 			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
669 			break;
670 		case 4:
671 			args.v4.ucAction = action;
672 			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
673 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
674 				args.v4.ucPanelMode = panel_mode;
675 			else
676 				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
677 
678 			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
679 				args.v4.ucLaneNum = dp_lane_count;
680 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
681 				args.v4.ucLaneNum = 8;
682 			else
683 				args.v4.ucLaneNum = 4;
684 
685 			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
686 				if (dp_clock == 540000)
687 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
688 				else if (dp_clock == 324000)
689 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
690 				else if (dp_clock == 270000)
691 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
692 				else
693 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
694 			}
695 			args.v4.acConfig.ucDigSel = dig->dig_encoder;
696 			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
697 			if (hpd_id == AMDGPU_HPD_NONE)
698 				args.v4.ucHPD_ID = 0;
699 			else
700 				args.v4.ucHPD_ID = hpd_id + 1;
701 			break;
702 		case 5:
703 			switch (action) {
704 			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
705 				args.v5.asDPPanelModeParam.ucAction = action;
706 				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
707 				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
708 				break;
709 			case ATOM_ENCODER_CMD_STREAM_SETUP:
710 				args.v5.asStreamParam.ucAction = action;
711 				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
712 				args.v5.asStreamParam.ucDigMode =
713 					amdgpu_atombios_encoder_get_encoder_mode(encoder);
714 				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
715 					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
716 				else if (amdgpu_dig_monitor_is_duallink(encoder,
717 									amdgpu_encoder->pixel_clock))
718 					args.v5.asStreamParam.ucLaneNum = 8;
719 				else
720 					args.v5.asStreamParam.ucLaneNum = 4;
721 				args.v5.asStreamParam.ulPixelClock =
722 					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
723 				args.v5.asStreamParam.ucBitPerColor =
724 					amdgpu_atombios_encoder_get_bpc(encoder);
725 				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
726 				break;
727 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
728 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
729 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
730 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
731 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
732 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
733 			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
734 			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
735 				args.v5.asCmdParam.ucAction = action;
736 				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
737 				break;
738 			default:
739 				DRM_ERROR("Unsupported action 0x%x\n", action);
740 				break;
741 			}
742 			break;
743 		default:
744 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
745 			break;
746 		}
747 		break;
748 	default:
749 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
750 		break;
751 	}
752 
753 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
754 
755 }
756 
757 union dig_transmitter_control {
758 	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
759 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
760 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
761 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
762 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
763 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
764 };
765 
766 void
amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder * encoder,int action,uint8_t lane_num,uint8_t lane_set)767 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
768 					      uint8_t lane_num, uint8_t lane_set)
769 {
770 	struct drm_device *dev = encoder->dev;
771 	struct amdgpu_device *adev = dev->dev_private;
772 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
773 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
774 	struct drm_connector *connector;
775 	union dig_transmitter_control args;
776 	int index = 0;
777 	uint8_t frev, crev;
778 	bool is_dp = false;
779 	int pll_id = 0;
780 	int dp_clock = 0;
781 	int dp_lane_count = 0;
782 	int connector_object_id = 0;
783 	int igp_lane_info = 0;
784 	int dig_encoder = dig->dig_encoder;
785 	int hpd_id = AMDGPU_HPD_NONE;
786 
787 	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
788 		connector = amdgpu_get_connector_for_encoder_init(encoder);
789 		/* just needed to avoid bailing in the encoder check.  the encoder
790 		 * isn't used for init
791 		 */
792 		dig_encoder = 0;
793 	} else
794 		connector = amdgpu_get_connector_for_encoder(encoder);
795 
796 	if (connector) {
797 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
798 		struct amdgpu_connector_atom_dig *dig_connector =
799 			amdgpu_connector->con_priv;
800 
801 		hpd_id = amdgpu_connector->hpd.hpd;
802 		dp_clock = dig_connector->dp_clock;
803 		dp_lane_count = dig_connector->dp_lane_count;
804 		connector_object_id =
805 			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
806 	}
807 
808 	if (encoder->crtc) {
809 		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
810 		pll_id = amdgpu_crtc->pll_id;
811 	}
812 
813 	/* no dig encoder assigned */
814 	if (dig_encoder == -1)
815 		return;
816 
817 	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
818 		is_dp = true;
819 
820 	memset(&args, 0, sizeof(args));
821 
822 	switch (amdgpu_encoder->encoder_id) {
823 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
824 		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
825 		break;
826 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
827 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
828 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
829 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
830 		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
831 		break;
832 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
833 		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
834 		break;
835 	}
836 
837 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
838 		return;
839 
840 	switch (frev) {
841 	case 1:
842 		switch (crev) {
843 		case 1:
844 			args.v1.ucAction = action;
845 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
846 				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
847 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
848 				args.v1.asMode.ucLaneSel = lane_num;
849 				args.v1.asMode.ucLaneSet = lane_set;
850 			} else {
851 				if (is_dp)
852 					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
853 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
854 					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
855 				else
856 					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
857 			}
858 
859 			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
860 
861 			if (dig_encoder)
862 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
863 			else
864 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
865 
866 			if ((adev->flags & AMD_IS_APU) &&
867 			    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
868 				if (is_dp ||
869 				    !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
870 					if (igp_lane_info & 0x1)
871 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
872 					else if (igp_lane_info & 0x2)
873 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
874 					else if (igp_lane_info & 0x4)
875 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
876 					else if (igp_lane_info & 0x8)
877 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
878 				} else {
879 					if (igp_lane_info & 0x3)
880 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
881 					else if (igp_lane_info & 0xc)
882 						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
883 				}
884 			}
885 
886 			if (dig->linkb)
887 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
888 			else
889 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
890 
891 			if (is_dp)
892 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
893 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
894 				if (dig->coherent_mode)
895 					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
896 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
897 					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
898 			}
899 			break;
900 		case 2:
901 			args.v2.ucAction = action;
902 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
903 				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
904 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
905 				args.v2.asMode.ucLaneSel = lane_num;
906 				args.v2.asMode.ucLaneSet = lane_set;
907 			} else {
908 				if (is_dp)
909 					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
910 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
911 					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
912 				else
913 					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
914 			}
915 
916 			args.v2.acConfig.ucEncoderSel = dig_encoder;
917 			if (dig->linkb)
918 				args.v2.acConfig.ucLinkSel = 1;
919 
920 			switch (amdgpu_encoder->encoder_id) {
921 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
922 				args.v2.acConfig.ucTransmitterSel = 0;
923 				break;
924 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
925 				args.v2.acConfig.ucTransmitterSel = 1;
926 				break;
927 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
928 				args.v2.acConfig.ucTransmitterSel = 2;
929 				break;
930 			}
931 
932 			if (is_dp) {
933 				args.v2.acConfig.fCoherentMode = 1;
934 				args.v2.acConfig.fDPConnector = 1;
935 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
936 				if (dig->coherent_mode)
937 					args.v2.acConfig.fCoherentMode = 1;
938 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
939 					args.v2.acConfig.fDualLinkConnector = 1;
940 			}
941 			break;
942 		case 3:
943 			args.v3.ucAction = action;
944 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
945 				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
946 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
947 				args.v3.asMode.ucLaneSel = lane_num;
948 				args.v3.asMode.ucLaneSet = lane_set;
949 			} else {
950 				if (is_dp)
951 					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
952 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
953 					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
954 				else
955 					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
956 			}
957 
958 			if (is_dp)
959 				args.v3.ucLaneNum = dp_lane_count;
960 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
961 				args.v3.ucLaneNum = 8;
962 			else
963 				args.v3.ucLaneNum = 4;
964 
965 			if (dig->linkb)
966 				args.v3.acConfig.ucLinkSel = 1;
967 			if (dig_encoder & 1)
968 				args.v3.acConfig.ucEncoderSel = 1;
969 
970 			/* Select the PLL for the PHY
971 			 * DP PHY should be clocked from external src if there is
972 			 * one.
973 			 */
974 			/* On DCE4, if there is an external clock, it generates the DP ref clock */
975 			if (is_dp && adev->clock.dp_extclk)
976 				args.v3.acConfig.ucRefClkSource = 2; /* external src */
977 			else
978 				args.v3.acConfig.ucRefClkSource = pll_id;
979 
980 			switch (amdgpu_encoder->encoder_id) {
981 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
982 				args.v3.acConfig.ucTransmitterSel = 0;
983 				break;
984 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
985 				args.v3.acConfig.ucTransmitterSel = 1;
986 				break;
987 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
988 				args.v3.acConfig.ucTransmitterSel = 2;
989 				break;
990 			}
991 
992 			if (is_dp)
993 				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
994 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
995 				if (dig->coherent_mode)
996 					args.v3.acConfig.fCoherentMode = 1;
997 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
998 					args.v3.acConfig.fDualLinkConnector = 1;
999 			}
1000 			break;
1001 		case 4:
1002 			args.v4.ucAction = action;
1003 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
1004 				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
1005 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
1006 				args.v4.asMode.ucLaneSel = lane_num;
1007 				args.v4.asMode.ucLaneSet = lane_set;
1008 			} else {
1009 				if (is_dp)
1010 					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
1011 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1012 					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
1013 				else
1014 					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1015 			}
1016 
1017 			if (is_dp)
1018 				args.v4.ucLaneNum = dp_lane_count;
1019 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1020 				args.v4.ucLaneNum = 8;
1021 			else
1022 				args.v4.ucLaneNum = 4;
1023 
1024 			if (dig->linkb)
1025 				args.v4.acConfig.ucLinkSel = 1;
1026 			if (dig_encoder & 1)
1027 				args.v4.acConfig.ucEncoderSel = 1;
1028 
1029 			/* Select the PLL for the PHY
1030 			 * DP PHY should be clocked from external src if there is
1031 			 * one.
1032 			 */
1033 			/* On DCE5 DCPLL usually generates the DP ref clock */
1034 			if (is_dp) {
1035 				if (adev->clock.dp_extclk)
1036 					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
1037 				else
1038 					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1039 			} else
1040 				args.v4.acConfig.ucRefClkSource = pll_id;
1041 
1042 			switch (amdgpu_encoder->encoder_id) {
1043 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1044 				args.v4.acConfig.ucTransmitterSel = 0;
1045 				break;
1046 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1047 				args.v4.acConfig.ucTransmitterSel = 1;
1048 				break;
1049 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1050 				args.v4.acConfig.ucTransmitterSel = 2;
1051 				break;
1052 			}
1053 
1054 			if (is_dp)
1055 				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1056 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1057 				if (dig->coherent_mode)
1058 					args.v4.acConfig.fCoherentMode = 1;
1059 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1060 					args.v4.acConfig.fDualLinkConnector = 1;
1061 			}
1062 			break;
1063 		case 5:
1064 			args.v5.ucAction = action;
1065 			if (is_dp)
1066 				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1067 			else
1068 				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1069 
1070 			switch (amdgpu_encoder->encoder_id) {
1071 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1072 				if (dig->linkb)
1073 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1074 				else
1075 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1076 				break;
1077 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1078 				if (dig->linkb)
1079 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1080 				else
1081 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1082 				break;
1083 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1084 				if (dig->linkb)
1085 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1086 				else
1087 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1088 				break;
1089 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1090 				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1091 				break;
1092 			}
1093 			if (is_dp)
1094 				args.v5.ucLaneNum = dp_lane_count;
1095 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1096 				args.v5.ucLaneNum = 8;
1097 			else
1098 				args.v5.ucLaneNum = 4;
1099 			args.v5.ucConnObjId = connector_object_id;
1100 			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1101 
1102 			if (is_dp && adev->clock.dp_extclk)
1103 				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1104 			else
1105 				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1106 
1107 			if (is_dp)
1108 				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1109 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1110 				if (dig->coherent_mode)
1111 					args.v5.asConfig.ucCoherentMode = 1;
1112 			}
1113 			if (hpd_id == AMDGPU_HPD_NONE)
1114 				args.v5.asConfig.ucHPDSel = 0;
1115 			else
1116 				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1117 			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1118 			args.v5.ucDPLaneSet = lane_set;
1119 			break;
1120 		case 6:
1121 			args.v6.ucAction = action;
1122 			if (is_dp)
1123 				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1124 			else
1125 				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1126 
1127 			switch (amdgpu_encoder->encoder_id) {
1128 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1129 				if (dig->linkb)
1130 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1131 				else
1132 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1133 				break;
1134 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1135 				if (dig->linkb)
1136 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1137 				else
1138 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1139 				break;
1140 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1141 				if (dig->linkb)
1142 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1143 				else
1144 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1145 				break;
1146 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1147 				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1148 				break;
1149 			}
1150 			if (is_dp)
1151 				args.v6.ucLaneNum = dp_lane_count;
1152 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1153 				args.v6.ucLaneNum = 8;
1154 			else
1155 				args.v6.ucLaneNum = 4;
1156 			args.v6.ucConnObjId = connector_object_id;
1157 			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1158 				args.v6.ucDPLaneSet = lane_set;
1159 			else
1160 				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1161 
1162 			if (hpd_id == AMDGPU_HPD_NONE)
1163 				args.v6.ucHPDSel = 0;
1164 			else
1165 				args.v6.ucHPDSel = hpd_id + 1;
1166 			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1167 			break;
1168 		default:
1169 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1170 			break;
1171 		}
1172 		break;
1173 	default:
1174 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1175 		break;
1176 	}
1177 
1178 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1179 }
1180 
1181 bool
amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector * connector,int action)1182 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1183 				     int action)
1184 {
1185 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1186 	struct drm_device *dev = amdgpu_connector->base.dev;
1187 	struct amdgpu_device *adev = dev->dev_private;
1188 	union dig_transmitter_control args;
1189 	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1190 	uint8_t frev, crev;
1191 
1192 	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1193 		goto done;
1194 
1195 	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1196 	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1197 		goto done;
1198 
1199 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1200 		goto done;
1201 
1202 	memset(&args, 0, sizeof(args));
1203 
1204 	args.v1.ucAction = action;
1205 
1206 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1207 
1208 	/* wait for the panel to power up */
1209 	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1210 		int i;
1211 
1212 		for (i = 0; i < 300; i++) {
1213 			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1214 				return true;
1215 			mdelay(1);
1216 		}
1217 		return false;
1218 	}
1219 done:
1220 	return true;
1221 }
1222 
1223 union external_encoder_control {
1224 	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1225 	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1226 };
1227 
1228 static void
amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder * encoder,struct drm_encoder * ext_encoder,int action)1229 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1230 					struct drm_encoder *ext_encoder,
1231 					int action)
1232 {
1233 	struct drm_device *dev = encoder->dev;
1234 	struct amdgpu_device *adev = dev->dev_private;
1235 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1236 	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1237 	union external_encoder_control args;
1238 	struct drm_connector *connector;
1239 	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1240 	u8 frev, crev;
1241 	int dp_clock = 0;
1242 	int dp_lane_count = 0;
1243 	int connector_object_id = 0;
1244 	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1245 
1246 	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1247 		connector = amdgpu_get_connector_for_encoder_init(encoder);
1248 	else
1249 		connector = amdgpu_get_connector_for_encoder(encoder);
1250 
1251 	if (connector) {
1252 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1253 		struct amdgpu_connector_atom_dig *dig_connector =
1254 			amdgpu_connector->con_priv;
1255 
1256 		dp_clock = dig_connector->dp_clock;
1257 		dp_lane_count = dig_connector->dp_lane_count;
1258 		connector_object_id =
1259 			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1260 	}
1261 
1262 	memset(&args, 0, sizeof(args));
1263 
1264 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1265 		return;
1266 
1267 	switch (frev) {
1268 	case 1:
1269 		/* no params on frev 1 */
1270 		break;
1271 	case 2:
1272 		switch (crev) {
1273 		case 1:
1274 		case 2:
1275 			args.v1.sDigEncoder.ucAction = action;
1276 			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1277 			args.v1.sDigEncoder.ucEncoderMode =
1278 				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1279 
1280 			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1281 				if (dp_clock == 270000)
1282 					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1283 				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1284 			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1285 				args.v1.sDigEncoder.ucLaneNum = 8;
1286 			else
1287 				args.v1.sDigEncoder.ucLaneNum = 4;
1288 			break;
1289 		case 3:
1290 			args.v3.sExtEncoder.ucAction = action;
1291 			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1292 				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1293 			else
1294 				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1295 			args.v3.sExtEncoder.ucEncoderMode =
1296 				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1297 
1298 			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1299 				if (dp_clock == 270000)
1300 					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1301 				else if (dp_clock == 540000)
1302 					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1303 				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1304 			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1305 				args.v3.sExtEncoder.ucLaneNum = 8;
1306 			else
1307 				args.v3.sExtEncoder.ucLaneNum = 4;
1308 			switch (ext_enum) {
1309 			case GRAPH_OBJECT_ENUM_ID1:
1310 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1311 				break;
1312 			case GRAPH_OBJECT_ENUM_ID2:
1313 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1314 				break;
1315 			case GRAPH_OBJECT_ENUM_ID3:
1316 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1317 				break;
1318 			}
1319 			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1320 			break;
1321 		default:
1322 			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1323 			return;
1324 		}
1325 		break;
1326 	default:
1327 		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1328 		return;
1329 	}
1330 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1331 }
1332 
1333 static void
amdgpu_atombios_encoder_setup_dig(struct drm_encoder * encoder,int action)1334 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1335 {
1336 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1337 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1338 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1339 	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1340 	struct amdgpu_connector *amdgpu_connector = NULL;
1341 	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1342 
1343 	if (connector) {
1344 		amdgpu_connector = to_amdgpu_connector(connector);
1345 		amdgpu_dig_connector = amdgpu_connector->con_priv;
1346 	}
1347 
1348 	if (action == ATOM_ENABLE) {
1349 		if (!connector)
1350 			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1351 		else
1352 			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1353 
1354 		/* setup and enable the encoder */
1355 		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1356 		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1357 						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1358 						   dig->panel_mode);
1359 		if (ext_encoder)
1360 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1361 								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1362 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1363 		    connector) {
1364 			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1365 				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1366 								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1367 				amdgpu_dig_connector->edp_on = true;
1368 			}
1369 		}
1370 		/* enable the transmitter */
1371 		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1372 						       ATOM_TRANSMITTER_ACTION_ENABLE,
1373 						       0, 0);
1374 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1375 		    connector) {
1376 			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1377 			amdgpu_atombios_dp_link_train(encoder, connector);
1378 			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1379 		}
1380 		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1381 			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1382 		if (ext_encoder)
1383 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1384 	} else {
1385 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1386 		    connector)
1387 			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1388 							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1389 		if (ext_encoder)
1390 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1391 		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1392 			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1393 							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1394 
1395 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1396 		    connector)
1397 			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1398 		/* disable the transmitter */
1399 		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1400 						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1401 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1402 		    connector) {
1403 			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1404 				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1405 								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1406 				amdgpu_dig_connector->edp_on = false;
1407 			}
1408 		}
1409 	}
1410 }
1411 
1412 void
amdgpu_atombios_encoder_dpms(struct drm_encoder * encoder,int mode)1413 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1414 {
1415 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1416 
1417 	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1418 		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1419 		  amdgpu_encoder->active_device);
1420 	switch (amdgpu_encoder->encoder_id) {
1421 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1422 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1423 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1424 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1425 		switch (mode) {
1426 		case DRM_MODE_DPMS_ON:
1427 			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1428 			break;
1429 		case DRM_MODE_DPMS_STANDBY:
1430 		case DRM_MODE_DPMS_SUSPEND:
1431 		case DRM_MODE_DPMS_OFF:
1432 			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1433 			break;
1434 		}
1435 		break;
1436 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1437 		switch (mode) {
1438 		case DRM_MODE_DPMS_ON:
1439 			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1440 			break;
1441 		case DRM_MODE_DPMS_STANDBY:
1442 		case DRM_MODE_DPMS_SUSPEND:
1443 		case DRM_MODE_DPMS_OFF:
1444 			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1445 			break;
1446 		}
1447 		break;
1448 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1449 		switch (mode) {
1450 		case DRM_MODE_DPMS_ON:
1451 			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1452 			break;
1453 		case DRM_MODE_DPMS_STANDBY:
1454 		case DRM_MODE_DPMS_SUSPEND:
1455 		case DRM_MODE_DPMS_OFF:
1456 			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1457 			break;
1458 		}
1459 		break;
1460 	default:
1461 		return;
1462 	}
1463 }
1464 
1465 union crtc_source_param {
1466 	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1467 	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1468 	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1469 };
1470 
1471 void
amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder * encoder)1472 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1473 {
1474 	struct drm_device *dev = encoder->dev;
1475 	struct amdgpu_device *adev = dev->dev_private;
1476 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1477 	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1478 	union crtc_source_param args;
1479 	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1480 	uint8_t frev, crev;
1481 	struct amdgpu_encoder_atom_dig *dig;
1482 
1483 	memset(&args, 0, sizeof(args));
1484 
1485 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1486 		return;
1487 
1488 	switch (frev) {
1489 	case 1:
1490 		switch (crev) {
1491 		case 1:
1492 		default:
1493 			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1494 			switch (amdgpu_encoder->encoder_id) {
1495 			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1496 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1497 				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1498 				break;
1499 			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1500 			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1501 				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1502 					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1503 				else
1504 					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1505 				break;
1506 			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1507 			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1508 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1509 				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1510 				break;
1511 			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1512 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1513 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1514 					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1515 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1516 					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1517 				else
1518 					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1519 				break;
1520 			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1521 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1522 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1523 					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1524 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1525 					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1526 				else
1527 					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1528 				break;
1529 			}
1530 			break;
1531 		case 2:
1532 			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1533 			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1534 				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1535 
1536 				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1537 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1538 				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1539 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1540 				else
1541 					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1542 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1543 				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1544 			} else {
1545 				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1546 			}
1547 			switch (amdgpu_encoder->encoder_id) {
1548 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1549 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1550 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1551 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1552 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1553 				dig = amdgpu_encoder->enc_priv;
1554 				switch (dig->dig_encoder) {
1555 				case 0:
1556 					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1557 					break;
1558 				case 1:
1559 					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1560 					break;
1561 				case 2:
1562 					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1563 					break;
1564 				case 3:
1565 					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1566 					break;
1567 				case 4:
1568 					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1569 					break;
1570 				case 5:
1571 					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1572 					break;
1573 				case 6:
1574 					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1575 					break;
1576 				}
1577 				break;
1578 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1579 				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1580 				break;
1581 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1582 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1583 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1584 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1585 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1586 				else
1587 					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1588 				break;
1589 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1590 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1591 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1592 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1593 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1594 				else
1595 					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1596 				break;
1597 			}
1598 			break;
1599 		case 3:
1600 			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1601 			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1602 				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1603 
1604 				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1605 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1606 				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1607 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1608 				else
1609 					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1610 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1611 				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1612 			} else {
1613 				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1614 			}
1615 			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1616 			switch (amdgpu_encoder->encoder_id) {
1617 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1618 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1619 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1620 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1621 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1622 				dig = amdgpu_encoder->enc_priv;
1623 				switch (dig->dig_encoder) {
1624 				case 0:
1625 					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1626 					break;
1627 				case 1:
1628 					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1629 					break;
1630 				case 2:
1631 					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1632 					break;
1633 				case 3:
1634 					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1635 					break;
1636 				case 4:
1637 					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1638 					break;
1639 				case 5:
1640 					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1641 					break;
1642 				case 6:
1643 					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1644 					break;
1645 				}
1646 				break;
1647 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1648 				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1649 				break;
1650 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1651 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1652 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1653 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1654 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1655 				else
1656 					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1657 				break;
1658 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1659 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1660 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1661 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1662 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1663 				else
1664 					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1665 				break;
1666 			}
1667 			break;
1668 		}
1669 		break;
1670 	default:
1671 		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1672 		return;
1673 	}
1674 
1675 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1676 }
1677 
1678 /* This only needs to be called once at startup */
1679 void
amdgpu_atombios_encoder_init_dig(struct amdgpu_device * adev)1680 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1681 {
1682 	struct drm_device *dev = adev->ddev;
1683 	struct drm_encoder *encoder;
1684 
1685 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1686 		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1687 		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1688 
1689 		switch (amdgpu_encoder->encoder_id) {
1690 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1691 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1692 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1693 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1694 			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1695 							       0, 0);
1696 			break;
1697 		}
1698 
1699 		if (ext_encoder)
1700 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1701 								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1702 	}
1703 }
1704 
1705 static bool
amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder * encoder,struct drm_connector * connector)1706 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1707 				 struct drm_connector *connector)
1708 {
1709 	struct drm_device *dev = encoder->dev;
1710 	struct amdgpu_device *adev = dev->dev_private;
1711 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1712 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1713 
1714 	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1715 				       ATOM_DEVICE_CV_SUPPORT |
1716 				       ATOM_DEVICE_CRT_SUPPORT)) {
1717 		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1718 		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1719 		uint8_t frev, crev;
1720 
1721 		memset(&args, 0, sizeof(args));
1722 
1723 		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1724 			return false;
1725 
1726 		args.sDacload.ucMisc = 0;
1727 
1728 		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1729 		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1730 			args.sDacload.ucDacType = ATOM_DAC_A;
1731 		else
1732 			args.sDacload.ucDacType = ATOM_DAC_B;
1733 
1734 		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1735 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1736 		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1737 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1738 		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1739 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1740 			if (crev >= 3)
1741 				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1742 		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1743 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1744 			if (crev >= 3)
1745 				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1746 		}
1747 
1748 		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1749 
1750 		return true;
1751 	} else
1752 		return false;
1753 }
1754 
1755 enum drm_connector_status
amdgpu_atombios_encoder_dac_detect(struct drm_encoder * encoder,struct drm_connector * connector)1756 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1757 			    struct drm_connector *connector)
1758 {
1759 	struct drm_device *dev = encoder->dev;
1760 	struct amdgpu_device *adev = dev->dev_private;
1761 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1762 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1763 	uint32_t bios_0_scratch;
1764 
1765 	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1766 		DRM_DEBUG_KMS("detect returned false \n");
1767 		return connector_status_unknown;
1768 	}
1769 
1770 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1771 
1772 	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1773 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1774 		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1775 			return connector_status_connected;
1776 	}
1777 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1778 		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1779 			return connector_status_connected;
1780 	}
1781 	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1782 		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1783 			return connector_status_connected;
1784 	}
1785 	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1786 		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1787 			return connector_status_connected; /* CTV */
1788 		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1789 			return connector_status_connected; /* STV */
1790 	}
1791 	return connector_status_disconnected;
1792 }
1793 
1794 enum drm_connector_status
amdgpu_atombios_encoder_dig_detect(struct drm_encoder * encoder,struct drm_connector * connector)1795 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1796 			    struct drm_connector *connector)
1797 {
1798 	struct drm_device *dev = encoder->dev;
1799 	struct amdgpu_device *adev = dev->dev_private;
1800 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1801 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1802 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1803 	u32 bios_0_scratch;
1804 
1805 	if (!ext_encoder)
1806 		return connector_status_unknown;
1807 
1808 	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1809 		return connector_status_unknown;
1810 
1811 	/* load detect on the dp bridge */
1812 	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1813 						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1814 
1815 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1816 
1817 	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1818 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1819 		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1820 			return connector_status_connected;
1821 	}
1822 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1823 		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1824 			return connector_status_connected;
1825 	}
1826 	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1827 		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1828 			return connector_status_connected;
1829 	}
1830 	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1831 		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1832 			return connector_status_connected; /* CTV */
1833 		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1834 			return connector_status_connected; /* STV */
1835 	}
1836 	return connector_status_disconnected;
1837 }
1838 
1839 void
amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder * encoder)1840 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1841 {
1842 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1843 
1844 	if (ext_encoder)
1845 		/* ddc_setup on the dp bridge */
1846 		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1847 							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1848 
1849 }
1850 
1851 void
amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector * connector,struct drm_encoder * encoder,bool connected)1852 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1853 				       struct drm_encoder *encoder,
1854 				       bool connected)
1855 {
1856 	struct drm_device *dev = connector->dev;
1857 	struct amdgpu_device *adev = dev->dev_private;
1858 	struct amdgpu_connector *amdgpu_connector =
1859 	    to_amdgpu_connector(connector);
1860 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1861 	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1862 
1863 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1864 	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1865 	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1866 
1867 	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1868 	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1869 		if (connected) {
1870 			DRM_DEBUG_KMS("LCD1 connected\n");
1871 			bios_0_scratch |= ATOM_S0_LCD1;
1872 			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1873 			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1874 		} else {
1875 			DRM_DEBUG_KMS("LCD1 disconnected\n");
1876 			bios_0_scratch &= ~ATOM_S0_LCD1;
1877 			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1878 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1879 		}
1880 	}
1881 	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1882 	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1883 		if (connected) {
1884 			DRM_DEBUG_KMS("CRT1 connected\n");
1885 			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1886 			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1887 			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1888 		} else {
1889 			DRM_DEBUG_KMS("CRT1 disconnected\n");
1890 			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1891 			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1892 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1893 		}
1894 	}
1895 	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1896 	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1897 		if (connected) {
1898 			DRM_DEBUG_KMS("CRT2 connected\n");
1899 			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1900 			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1901 			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1902 		} else {
1903 			DRM_DEBUG_KMS("CRT2 disconnected\n");
1904 			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1905 			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1906 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1907 		}
1908 	}
1909 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1910 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1911 		if (connected) {
1912 			DRM_DEBUG_KMS("DFP1 connected\n");
1913 			bios_0_scratch |= ATOM_S0_DFP1;
1914 			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1915 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1916 		} else {
1917 			DRM_DEBUG_KMS("DFP1 disconnected\n");
1918 			bios_0_scratch &= ~ATOM_S0_DFP1;
1919 			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1920 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1921 		}
1922 	}
1923 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1924 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1925 		if (connected) {
1926 			DRM_DEBUG_KMS("DFP2 connected\n");
1927 			bios_0_scratch |= ATOM_S0_DFP2;
1928 			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1929 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1930 		} else {
1931 			DRM_DEBUG_KMS("DFP2 disconnected\n");
1932 			bios_0_scratch &= ~ATOM_S0_DFP2;
1933 			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1934 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1935 		}
1936 	}
1937 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1938 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1939 		if (connected) {
1940 			DRM_DEBUG_KMS("DFP3 connected\n");
1941 			bios_0_scratch |= ATOM_S0_DFP3;
1942 			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1943 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1944 		} else {
1945 			DRM_DEBUG_KMS("DFP3 disconnected\n");
1946 			bios_0_scratch &= ~ATOM_S0_DFP3;
1947 			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1948 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1949 		}
1950 	}
1951 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1952 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1953 		if (connected) {
1954 			DRM_DEBUG_KMS("DFP4 connected\n");
1955 			bios_0_scratch |= ATOM_S0_DFP4;
1956 			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1957 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1958 		} else {
1959 			DRM_DEBUG_KMS("DFP4 disconnected\n");
1960 			bios_0_scratch &= ~ATOM_S0_DFP4;
1961 			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1962 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1963 		}
1964 	}
1965 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1966 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1967 		if (connected) {
1968 			DRM_DEBUG_KMS("DFP5 connected\n");
1969 			bios_0_scratch |= ATOM_S0_DFP5;
1970 			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1971 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1972 		} else {
1973 			DRM_DEBUG_KMS("DFP5 disconnected\n");
1974 			bios_0_scratch &= ~ATOM_S0_DFP5;
1975 			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1976 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1977 		}
1978 	}
1979 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1980 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1981 		if (connected) {
1982 			DRM_DEBUG_KMS("DFP6 connected\n");
1983 			bios_0_scratch |= ATOM_S0_DFP6;
1984 			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1985 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1986 		} else {
1987 			DRM_DEBUG_KMS("DFP6 disconnected\n");
1988 			bios_0_scratch &= ~ATOM_S0_DFP6;
1989 			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1990 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1991 		}
1992 	}
1993 
1994 	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1995 	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1996 	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1997 }
1998 
1999 union lvds_info {
2000 	struct _ATOM_LVDS_INFO info;
2001 	struct _ATOM_LVDS_INFO_V12 info_12;
2002 };
2003 
2004 struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder * encoder)2005 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
2006 {
2007 	struct drm_device *dev = encoder->base.dev;
2008 	struct amdgpu_device *adev = dev->dev_private;
2009 	struct amdgpu_mode_info *mode_info = &adev->mode_info;
2010 	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
2011 	uint16_t data_offset, misc;
2012 	union lvds_info *lvds_info;
2013 	uint8_t frev, crev;
2014 	struct amdgpu_encoder_atom_dig *lvds = NULL;
2015 	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2016 
2017 	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
2018 				   &frev, &crev, &data_offset)) {
2019 		lvds_info =
2020 			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
2021 		lvds =
2022 		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2023 
2024 		if (!lvds)
2025 			return NULL;
2026 
2027 		lvds->native_mode.clock =
2028 		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
2029 		lvds->native_mode.hdisplay =
2030 		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
2031 		lvds->native_mode.vdisplay =
2032 		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
2033 		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
2034 			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
2035 		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
2036 			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
2037 		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2038 			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2039 		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2040 			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2041 		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2042 			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2043 		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2044 			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2045 		lvds->panel_pwr_delay =
2046 		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2047 		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2048 
2049 		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2050 		if (misc & ATOM_VSYNC_POLARITY)
2051 			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2052 		if (misc & ATOM_HSYNC_POLARITY)
2053 			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2054 		if (misc & ATOM_COMPOSITESYNC)
2055 			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2056 		if (misc & ATOM_INTERLACE)
2057 			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2058 		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2059 			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2060 
2061 		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2062 		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2063 
2064 		/* set crtc values */
2065 		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2066 
2067 		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2068 
2069 		encoder->native_mode = lvds->native_mode;
2070 
2071 		if (encoder_enum == 2)
2072 			lvds->linkb = true;
2073 		else
2074 			lvds->linkb = false;
2075 
2076 		/* parse the lcd record table */
2077 		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2078 			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2079 			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2080 			bool bad_record = false;
2081 			u8 *record;
2082 
2083 			if ((frev == 1) && (crev < 2))
2084 				/* absolute */
2085 				record = (u8 *)(mode_info->atom_context->bios +
2086 						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2087 			else
2088 				/* relative */
2089 				record = (u8 *)(mode_info->atom_context->bios +
2090 						data_offset +
2091 						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2092 			while (*record != ATOM_RECORD_END_TYPE) {
2093 				switch (*record) {
2094 				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2095 					record += sizeof(ATOM_PATCH_RECORD_MODE);
2096 					break;
2097 				case LCD_RTS_RECORD_TYPE:
2098 					record += sizeof(ATOM_LCD_RTS_RECORD);
2099 					break;
2100 				case LCD_CAP_RECORD_TYPE:
2101 					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2102 					break;
2103 				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2104 					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2105 					if (fake_edid_record->ucFakeEDIDLength) {
2106 						struct edid *edid;
2107 						int edid_size =
2108 							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2109 						edid = kmalloc(edid_size, GFP_KERNEL);
2110 						if (edid) {
2111 							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2112 							       fake_edid_record->ucFakeEDIDLength);
2113 
2114 							if (drm_edid_is_valid(edid)) {
2115 								adev->mode_info.bios_hardcoded_edid = edid;
2116 								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2117 							} else
2118 								kfree(edid);
2119 						}
2120 					}
2121 					record += fake_edid_record->ucFakeEDIDLength ?
2122 						fake_edid_record->ucFakeEDIDLength + 2 :
2123 						sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2124 					break;
2125 				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2126 					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2127 					lvds->native_mode.width_mm = panel_res_record->usHSize;
2128 					lvds->native_mode.height_mm = panel_res_record->usVSize;
2129 					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2130 					break;
2131 				default:
2132 					DRM_ERROR("Bad LCD record %d\n", *record);
2133 					bad_record = true;
2134 					break;
2135 				}
2136 				if (bad_record)
2137 					break;
2138 			}
2139 		}
2140 	}
2141 	return lvds;
2142 }
2143 
2144 struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder * amdgpu_encoder)2145 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2146 {
2147 	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2148 	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2149 
2150 	if (!dig)
2151 		return NULL;
2152 
2153 	/* coherent mode by default */
2154 	dig->coherent_mode = true;
2155 	dig->dig_encoder = -1;
2156 
2157 	if (encoder_enum == 2)
2158 		dig->linkb = true;
2159 	else
2160 		dig->linkb = false;
2161 
2162 	return dig;
2163 }
2164 
2165