1*b843c749SSergey Zigachev /* 2*b843c749SSergey Zigachev * Copyright 2007-11 Advanced Micro Devices, Inc. 3*b843c749SSergey Zigachev * Copyright 2008 Red Hat Inc. 4*b843c749SSergey Zigachev * 5*b843c749SSergey Zigachev * Permission is hereby granted, free of charge, to any person obtaining a 6*b843c749SSergey Zigachev * copy of this software and associated documentation files (the "Software"), 7*b843c749SSergey Zigachev * to deal in the Software without restriction, including without limitation 8*b843c749SSergey Zigachev * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9*b843c749SSergey Zigachev * and/or sell copies of the Software, and to permit persons to whom the 10*b843c749SSergey Zigachev * Software is furnished to do so, subject to the following conditions: 11*b843c749SSergey Zigachev * 12*b843c749SSergey Zigachev * The above copyright notice and this permission notice shall be included in 13*b843c749SSergey Zigachev * all copies or substantial portions of the Software. 14*b843c749SSergey Zigachev * 15*b843c749SSergey Zigachev * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16*b843c749SSergey Zigachev * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17*b843c749SSergey Zigachev * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18*b843c749SSergey Zigachev * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19*b843c749SSergey Zigachev * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20*b843c749SSergey Zigachev * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21*b843c749SSergey Zigachev * OTHER DEALINGS IN THE SOFTWARE. 22*b843c749SSergey Zigachev * 23*b843c749SSergey Zigachev * Authors: Dave Airlie 24*b843c749SSergey Zigachev * Alex Deucher 25*b843c749SSergey Zigachev */ 26*b843c749SSergey Zigachev #include <drm/drmP.h> 27*b843c749SSergey Zigachev #include <drm/drm_crtc_helper.h> 28*b843c749SSergey Zigachev #include <drm/amdgpu_drm.h> 29*b843c749SSergey Zigachev #include "amdgpu.h" 30*b843c749SSergey Zigachev #include "amdgpu_connectors.h" 31*b843c749SSergey Zigachev #include "atom.h" 32*b843c749SSergey Zigachev #include "atombios_encoders.h" 33*b843c749SSergey Zigachev #include "atombios_dp.h" 34*b843c749SSergey Zigachev #include <linux/backlight.h> 35*b843c749SSergey Zigachev #include "bif/bif_4_1_d.h" 36*b843c749SSergey Zigachev 37*b843c749SSergey Zigachev u8 38*b843c749SSergey Zigachev amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev) 39*b843c749SSergey Zigachev { 40*b843c749SSergey Zigachev u8 backlight_level; 41*b843c749SSergey Zigachev u32 bios_2_scratch; 42*b843c749SSergey Zigachev 43*b843c749SSergey Zigachev bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 44*b843c749SSergey Zigachev 45*b843c749SSergey Zigachev backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >> 46*b843c749SSergey Zigachev ATOM_S2_CURRENT_BL_LEVEL_SHIFT); 47*b843c749SSergey Zigachev 48*b843c749SSergey Zigachev return backlight_level; 49*b843c749SSergey Zigachev } 50*b843c749SSergey Zigachev 51*b843c749SSergey Zigachev void 52*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev, 53*b843c749SSergey Zigachev u8 backlight_level) 54*b843c749SSergey Zigachev { 55*b843c749SSergey Zigachev u32 bios_2_scratch; 56*b843c749SSergey Zigachev 57*b843c749SSergey Zigachev bios_2_scratch = RREG32(mmBIOS_SCRATCH_2); 58*b843c749SSergey Zigachev 59*b843c749SSergey Zigachev bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK; 60*b843c749SSergey Zigachev bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) & 61*b843c749SSergey Zigachev ATOM_S2_CURRENT_BL_LEVEL_MASK); 62*b843c749SSergey Zigachev 63*b843c749SSergey Zigachev WREG32(mmBIOS_SCRATCH_2, bios_2_scratch); 64*b843c749SSergey Zigachev } 65*b843c749SSergey Zigachev 66*b843c749SSergey Zigachev u8 67*b843c749SSergey Zigachev amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder) 68*b843c749SSergey Zigachev { 69*b843c749SSergey Zigachev struct drm_device *dev = amdgpu_encoder->base.dev; 70*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 71*b843c749SSergey Zigachev 72*b843c749SSergey Zigachev if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 73*b843c749SSergey Zigachev return 0; 74*b843c749SSergey Zigachev 75*b843c749SSergey Zigachev return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 76*b843c749SSergey Zigachev } 77*b843c749SSergey Zigachev 78*b843c749SSergey Zigachev void 79*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder, 80*b843c749SSergey Zigachev u8 level) 81*b843c749SSergey Zigachev { 82*b843c749SSergey Zigachev struct drm_encoder *encoder = &amdgpu_encoder->base; 83*b843c749SSergey Zigachev struct drm_device *dev = amdgpu_encoder->base.dev; 84*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 85*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig *dig; 86*b843c749SSergey Zigachev 87*b843c749SSergey Zigachev if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 88*b843c749SSergey Zigachev return; 89*b843c749SSergey Zigachev 90*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) && 91*b843c749SSergey Zigachev amdgpu_encoder->enc_priv) { 92*b843c749SSergey Zigachev dig = amdgpu_encoder->enc_priv; 93*b843c749SSergey Zigachev dig->backlight_level = level; 94*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level); 95*b843c749SSergey Zigachev 96*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 97*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 98*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 99*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 100*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 101*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 102*b843c749SSergey Zigachev if (dig->backlight_level == 0) 103*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 104*b843c749SSergey Zigachev ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 105*b843c749SSergey Zigachev else { 106*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 107*b843c749SSergey Zigachev ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0); 108*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 109*b843c749SSergey Zigachev ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0); 110*b843c749SSergey Zigachev } 111*b843c749SSergey Zigachev break; 112*b843c749SSergey Zigachev default: 113*b843c749SSergey Zigachev break; 114*b843c749SSergey Zigachev } 115*b843c749SSergey Zigachev } 116*b843c749SSergey Zigachev } 117*b843c749SSergey Zigachev 118*b843c749SSergey Zigachev #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE) 119*b843c749SSergey Zigachev 120*b843c749SSergey Zigachev static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd) 121*b843c749SSergey Zigachev { 122*b843c749SSergey Zigachev u8 level; 123*b843c749SSergey Zigachev 124*b843c749SSergey Zigachev /* Convert brightness to hardware level */ 125*b843c749SSergey Zigachev if (bd->props.brightness < 0) 126*b843c749SSergey Zigachev level = 0; 127*b843c749SSergey Zigachev else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL) 128*b843c749SSergey Zigachev level = AMDGPU_MAX_BL_LEVEL; 129*b843c749SSergey Zigachev else 130*b843c749SSergey Zigachev level = bd->props.brightness; 131*b843c749SSergey Zigachev 132*b843c749SSergey Zigachev return level; 133*b843c749SSergey Zigachev } 134*b843c749SSergey Zigachev 135*b843c749SSergey Zigachev static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd) 136*b843c749SSergey Zigachev { 137*b843c749SSergey Zigachev struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 138*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 139*b843c749SSergey Zigachev 140*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, 141*b843c749SSergey Zigachev amdgpu_atombios_encoder_backlight_level(bd)); 142*b843c749SSergey Zigachev 143*b843c749SSergey Zigachev return 0; 144*b843c749SSergey Zigachev } 145*b843c749SSergey Zigachev 146*b843c749SSergey Zigachev static int 147*b843c749SSergey Zigachev amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd) 148*b843c749SSergey Zigachev { 149*b843c749SSergey Zigachev struct amdgpu_backlight_privdata *pdata = bl_get_data(bd); 150*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = pdata->encoder; 151*b843c749SSergey Zigachev struct drm_device *dev = amdgpu_encoder->base.dev; 152*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 153*b843c749SSergey Zigachev 154*b843c749SSergey Zigachev return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 155*b843c749SSergey Zigachev } 156*b843c749SSergey Zigachev 157*b843c749SSergey Zigachev static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = { 158*b843c749SSergey Zigachev .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness, 159*b843c749SSergey Zigachev .update_status = amdgpu_atombios_encoder_update_backlight_status, 160*b843c749SSergey Zigachev }; 161*b843c749SSergey Zigachev 162*b843c749SSergey Zigachev void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder, 163*b843c749SSergey Zigachev struct drm_connector *drm_connector) 164*b843c749SSergey Zigachev { 165*b843c749SSergey Zigachev struct drm_device *dev = amdgpu_encoder->base.dev; 166*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 167*b843c749SSergey Zigachev struct backlight_device *bd; 168*b843c749SSergey Zigachev struct backlight_properties props; 169*b843c749SSergey Zigachev struct amdgpu_backlight_privdata *pdata; 170*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig *dig; 171*b843c749SSergey Zigachev u8 backlight_level; 172*b843c749SSergey Zigachev char bl_name[16]; 173*b843c749SSergey Zigachev 174*b843c749SSergey Zigachev /* Mac laptops with multiple GPUs use the gmux driver for backlight 175*b843c749SSergey Zigachev * so don't register a backlight device 176*b843c749SSergey Zigachev */ 177*b843c749SSergey Zigachev if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) && 178*b843c749SSergey Zigachev (adev->pdev->device == 0x6741)) 179*b843c749SSergey Zigachev return; 180*b843c749SSergey Zigachev 181*b843c749SSergey Zigachev if (!amdgpu_encoder->enc_priv) 182*b843c749SSergey Zigachev return; 183*b843c749SSergey Zigachev 184*b843c749SSergey Zigachev if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 185*b843c749SSergey Zigachev return; 186*b843c749SSergey Zigachev 187*b843c749SSergey Zigachev pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL); 188*b843c749SSergey Zigachev if (!pdata) { 189*b843c749SSergey Zigachev DRM_ERROR("Memory allocation failed\n"); 190*b843c749SSergey Zigachev goto error; 191*b843c749SSergey Zigachev } 192*b843c749SSergey Zigachev 193*b843c749SSergey Zigachev memset(&props, 0, sizeof(props)); 194*b843c749SSergey Zigachev props.max_brightness = AMDGPU_MAX_BL_LEVEL; 195*b843c749SSergey Zigachev props.type = BACKLIGHT_RAW; 196*b843c749SSergey Zigachev snprintf(bl_name, sizeof(bl_name), 197*b843c749SSergey Zigachev "amdgpu_bl%d", dev->primary->index); 198*b843c749SSergey Zigachev bd = backlight_device_register(bl_name, drm_connector->kdev, 199*b843c749SSergey Zigachev pdata, &amdgpu_atombios_encoder_backlight_ops, &props); 200*b843c749SSergey Zigachev if (IS_ERR(bd)) { 201*b843c749SSergey Zigachev DRM_ERROR("Backlight registration failed\n"); 202*b843c749SSergey Zigachev goto error; 203*b843c749SSergey Zigachev } 204*b843c749SSergey Zigachev 205*b843c749SSergey Zigachev pdata->encoder = amdgpu_encoder; 206*b843c749SSergey Zigachev 207*b843c749SSergey Zigachev backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev); 208*b843c749SSergey Zigachev 209*b843c749SSergey Zigachev dig = amdgpu_encoder->enc_priv; 210*b843c749SSergey Zigachev dig->bl_dev = bd; 211*b843c749SSergey Zigachev 212*b843c749SSergey Zigachev bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd); 213*b843c749SSergey Zigachev bd->props.power = FB_BLANK_UNBLANK; 214*b843c749SSergey Zigachev backlight_update_status(bd); 215*b843c749SSergey Zigachev 216*b843c749SSergey Zigachev DRM_INFO("amdgpu atom DIG backlight initialized\n"); 217*b843c749SSergey Zigachev 218*b843c749SSergey Zigachev return; 219*b843c749SSergey Zigachev 220*b843c749SSergey Zigachev error: 221*b843c749SSergey Zigachev kfree(pdata); 222*b843c749SSergey Zigachev return; 223*b843c749SSergey Zigachev } 224*b843c749SSergey Zigachev 225*b843c749SSergey Zigachev void 226*b843c749SSergey Zigachev amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder) 227*b843c749SSergey Zigachev { 228*b843c749SSergey Zigachev struct drm_device *dev = amdgpu_encoder->base.dev; 229*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 230*b843c749SSergey Zigachev struct backlight_device *bd = NULL; 231*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig *dig; 232*b843c749SSergey Zigachev 233*b843c749SSergey Zigachev if (!amdgpu_encoder->enc_priv) 234*b843c749SSergey Zigachev return; 235*b843c749SSergey Zigachev 236*b843c749SSergey Zigachev if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU)) 237*b843c749SSergey Zigachev return; 238*b843c749SSergey Zigachev 239*b843c749SSergey Zigachev dig = amdgpu_encoder->enc_priv; 240*b843c749SSergey Zigachev bd = dig->bl_dev; 241*b843c749SSergey Zigachev dig->bl_dev = NULL; 242*b843c749SSergey Zigachev 243*b843c749SSergey Zigachev if (bd) { 244*b843c749SSergey Zigachev struct amdgpu_legacy_backlight_privdata *pdata; 245*b843c749SSergey Zigachev 246*b843c749SSergey Zigachev pdata = bl_get_data(bd); 247*b843c749SSergey Zigachev backlight_device_unregister(bd); 248*b843c749SSergey Zigachev kfree(pdata); 249*b843c749SSergey Zigachev 250*b843c749SSergey Zigachev DRM_INFO("amdgpu atom LVDS backlight unloaded\n"); 251*b843c749SSergey Zigachev } 252*b843c749SSergey Zigachev } 253*b843c749SSergey Zigachev 254*b843c749SSergey Zigachev #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */ 255*b843c749SSergey Zigachev 256*b843c749SSergey Zigachev void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder) 257*b843c749SSergey Zigachev { 258*b843c749SSergey Zigachev } 259*b843c749SSergey Zigachev 260*b843c749SSergey Zigachev void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder) 261*b843c749SSergey Zigachev { 262*b843c749SSergey Zigachev } 263*b843c749SSergey Zigachev 264*b843c749SSergey Zigachev #endif 265*b843c749SSergey Zigachev 266*b843c749SSergey Zigachev bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder) 267*b843c749SSergey Zigachev { 268*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 269*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 270*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 271*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 272*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 273*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 274*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 275*b843c749SSergey Zigachev return true; 276*b843c749SSergey Zigachev default: 277*b843c749SSergey Zigachev return false; 278*b843c749SSergey Zigachev } 279*b843c749SSergey Zigachev } 280*b843c749SSergey Zigachev 281*b843c749SSergey Zigachev bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder, 282*b843c749SSergey Zigachev const struct drm_display_mode *mode, 283*b843c749SSergey Zigachev struct drm_display_mode *adjusted_mode) 284*b843c749SSergey Zigachev { 285*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 286*b843c749SSergey Zigachev 287*b843c749SSergey Zigachev /* set the active encoder to connector routing */ 288*b843c749SSergey Zigachev amdgpu_encoder_set_active_device(encoder); 289*b843c749SSergey Zigachev drm_mode_set_crtcinfo(adjusted_mode, 0); 290*b843c749SSergey Zigachev 291*b843c749SSergey Zigachev /* hw bug */ 292*b843c749SSergey Zigachev if ((mode->flags & DRM_MODE_FLAG_INTERLACE) 293*b843c749SSergey Zigachev && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2))) 294*b843c749SSergey Zigachev adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2; 295*b843c749SSergey Zigachev 296*b843c749SSergey Zigachev /* vertical FP must be at least 1 */ 297*b843c749SSergey Zigachev if (mode->crtc_vsync_start == mode->crtc_vdisplay) 298*b843c749SSergey Zigachev adjusted_mode->crtc_vsync_start++; 299*b843c749SSergey Zigachev 300*b843c749SSergey Zigachev /* get the native mode for scaling */ 301*b843c749SSergey Zigachev if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT)) 302*b843c749SSergey Zigachev amdgpu_panel_mode_fixup(encoder, adjusted_mode); 303*b843c749SSergey Zigachev else if (amdgpu_encoder->rmx_type != RMX_OFF) 304*b843c749SSergey Zigachev amdgpu_panel_mode_fixup(encoder, adjusted_mode); 305*b843c749SSergey Zigachev 306*b843c749SSergey Zigachev if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) || 307*b843c749SSergey Zigachev (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) { 308*b843c749SSergey Zigachev struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 309*b843c749SSergey Zigachev amdgpu_atombios_dp_set_link_config(connector, adjusted_mode); 310*b843c749SSergey Zigachev } 311*b843c749SSergey Zigachev 312*b843c749SSergey Zigachev return true; 313*b843c749SSergey Zigachev } 314*b843c749SSergey Zigachev 315*b843c749SSergey Zigachev static void 316*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action) 317*b843c749SSergey Zigachev { 318*b843c749SSergey Zigachev struct drm_device *dev = encoder->dev; 319*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 320*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 321*b843c749SSergey Zigachev DAC_ENCODER_CONTROL_PS_ALLOCATION args; 322*b843c749SSergey Zigachev int index = 0; 323*b843c749SSergey Zigachev 324*b843c749SSergey Zigachev memset(&args, 0, sizeof(args)); 325*b843c749SSergey Zigachev 326*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 327*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_DAC1: 328*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 329*b843c749SSergey Zigachev index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl); 330*b843c749SSergey Zigachev break; 331*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_DAC2: 332*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 333*b843c749SSergey Zigachev index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl); 334*b843c749SSergey Zigachev break; 335*b843c749SSergey Zigachev } 336*b843c749SSergey Zigachev 337*b843c749SSergey Zigachev args.ucAction = action; 338*b843c749SSergey Zigachev args.ucDacStandard = ATOM_DAC1_PS2; 339*b843c749SSergey Zigachev args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 340*b843c749SSergey Zigachev 341*b843c749SSergey Zigachev amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 342*b843c749SSergey Zigachev 343*b843c749SSergey Zigachev } 344*b843c749SSergey Zigachev 345*b843c749SSergey Zigachev static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder) 346*b843c749SSergey Zigachev { 347*b843c749SSergey Zigachev int bpc = 8; 348*b843c749SSergey Zigachev 349*b843c749SSergey Zigachev if (encoder->crtc) { 350*b843c749SSergey Zigachev struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 351*b843c749SSergey Zigachev bpc = amdgpu_crtc->bpc; 352*b843c749SSergey Zigachev } 353*b843c749SSergey Zigachev 354*b843c749SSergey Zigachev switch (bpc) { 355*b843c749SSergey Zigachev case 0: 356*b843c749SSergey Zigachev return PANEL_BPC_UNDEFINE; 357*b843c749SSergey Zigachev case 6: 358*b843c749SSergey Zigachev return PANEL_6BIT_PER_COLOR; 359*b843c749SSergey Zigachev case 8: 360*b843c749SSergey Zigachev default: 361*b843c749SSergey Zigachev return PANEL_8BIT_PER_COLOR; 362*b843c749SSergey Zigachev case 10: 363*b843c749SSergey Zigachev return PANEL_10BIT_PER_COLOR; 364*b843c749SSergey Zigachev case 12: 365*b843c749SSergey Zigachev return PANEL_12BIT_PER_COLOR; 366*b843c749SSergey Zigachev case 16: 367*b843c749SSergey Zigachev return PANEL_16BIT_PER_COLOR; 368*b843c749SSergey Zigachev } 369*b843c749SSergey Zigachev } 370*b843c749SSergey Zigachev 371*b843c749SSergey Zigachev union dvo_encoder_control { 372*b843c749SSergey Zigachev ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds; 373*b843c749SSergey Zigachev DVO_ENCODER_CONTROL_PS_ALLOCATION dvo; 374*b843c749SSergey Zigachev DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3; 375*b843c749SSergey Zigachev DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4; 376*b843c749SSergey Zigachev }; 377*b843c749SSergey Zigachev 378*b843c749SSergey Zigachev static void 379*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action) 380*b843c749SSergey Zigachev { 381*b843c749SSergey Zigachev struct drm_device *dev = encoder->dev; 382*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 383*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 384*b843c749SSergey Zigachev union dvo_encoder_control args; 385*b843c749SSergey Zigachev int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl); 386*b843c749SSergey Zigachev uint8_t frev, crev; 387*b843c749SSergey Zigachev 388*b843c749SSergey Zigachev memset(&args, 0, sizeof(args)); 389*b843c749SSergey Zigachev 390*b843c749SSergey Zigachev if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 391*b843c749SSergey Zigachev return; 392*b843c749SSergey Zigachev 393*b843c749SSergey Zigachev switch (frev) { 394*b843c749SSergey Zigachev case 1: 395*b843c749SSergey Zigachev switch (crev) { 396*b843c749SSergey Zigachev case 1: 397*b843c749SSergey Zigachev /* R4xx, R5xx */ 398*b843c749SSergey Zigachev args.ext_tmds.sXTmdsEncoder.ucEnable = action; 399*b843c749SSergey Zigachev 400*b843c749SSergey Zigachev if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 401*b843c749SSergey Zigachev args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL; 402*b843c749SSergey Zigachev 403*b843c749SSergey Zigachev args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB; 404*b843c749SSergey Zigachev break; 405*b843c749SSergey Zigachev case 2: 406*b843c749SSergey Zigachev /* RS600/690/740 */ 407*b843c749SSergey Zigachev args.dvo.sDVOEncoder.ucAction = action; 408*b843c749SSergey Zigachev args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 409*b843c749SSergey Zigachev /* DFP1, CRT1, TV1 depending on the type of port */ 410*b843c749SSergey Zigachev args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX; 411*b843c749SSergey Zigachev 412*b843c749SSergey Zigachev if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 413*b843c749SSergey Zigachev args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL; 414*b843c749SSergey Zigachev break; 415*b843c749SSergey Zigachev case 3: 416*b843c749SSergey Zigachev /* R6xx */ 417*b843c749SSergey Zigachev args.dvo_v3.ucAction = action; 418*b843c749SSergey Zigachev args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 419*b843c749SSergey Zigachev args.dvo_v3.ucDVOConfig = 0; /* XXX */ 420*b843c749SSergey Zigachev break; 421*b843c749SSergey Zigachev case 4: 422*b843c749SSergey Zigachev /* DCE8 */ 423*b843c749SSergey Zigachev args.dvo_v4.ucAction = action; 424*b843c749SSergey Zigachev args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 425*b843c749SSergey Zigachev args.dvo_v4.ucDVOConfig = 0; /* XXX */ 426*b843c749SSergey Zigachev args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 427*b843c749SSergey Zigachev break; 428*b843c749SSergey Zigachev default: 429*b843c749SSergey Zigachev DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 430*b843c749SSergey Zigachev break; 431*b843c749SSergey Zigachev } 432*b843c749SSergey Zigachev break; 433*b843c749SSergey Zigachev default: 434*b843c749SSergey Zigachev DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 435*b843c749SSergey Zigachev break; 436*b843c749SSergey Zigachev } 437*b843c749SSergey Zigachev 438*b843c749SSergey Zigachev amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 439*b843c749SSergey Zigachev } 440*b843c749SSergey Zigachev 441*b843c749SSergey Zigachev int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder) 442*b843c749SSergey Zigachev { 443*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 444*b843c749SSergey Zigachev struct drm_connector *connector; 445*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector; 446*b843c749SSergey Zigachev struct amdgpu_connector_atom_dig *dig_connector; 447*b843c749SSergey Zigachev 448*b843c749SSergey Zigachev /* dp bridges are always DP */ 449*b843c749SSergey Zigachev if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) 450*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DP; 451*b843c749SSergey Zigachev 452*b843c749SSergey Zigachev /* DVO is always DVO */ 453*b843c749SSergey Zigachev if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) || 454*b843c749SSergey Zigachev (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)) 455*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DVO; 456*b843c749SSergey Zigachev 457*b843c749SSergey Zigachev connector = amdgpu_get_connector_for_encoder(encoder); 458*b843c749SSergey Zigachev /* if we don't have an active device yet, just use one of 459*b843c749SSergey Zigachev * the connectors tied to the encoder. 460*b843c749SSergey Zigachev */ 461*b843c749SSergey Zigachev if (!connector) 462*b843c749SSergey Zigachev connector = amdgpu_get_connector_for_encoder_init(encoder); 463*b843c749SSergey Zigachev amdgpu_connector = to_amdgpu_connector(connector); 464*b843c749SSergey Zigachev 465*b843c749SSergey Zigachev switch (connector->connector_type) { 466*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_DVII: 467*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */ 468*b843c749SSergey Zigachev if (amdgpu_audio != 0) { 469*b843c749SSergey Zigachev if (amdgpu_connector->use_digital && 470*b843c749SSergey Zigachev (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)) 471*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_HDMI; 472*b843c749SSergey Zigachev else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 473*b843c749SSergey Zigachev (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 474*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_HDMI; 475*b843c749SSergey Zigachev else if (amdgpu_connector->use_digital) 476*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DVI; 477*b843c749SSergey Zigachev else 478*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_CRT; 479*b843c749SSergey Zigachev } else if (amdgpu_connector->use_digital) { 480*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DVI; 481*b843c749SSergey Zigachev } else { 482*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_CRT; 483*b843c749SSergey Zigachev } 484*b843c749SSergey Zigachev break; 485*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_DVID: 486*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_HDMIA: 487*b843c749SSergey Zigachev default: 488*b843c749SSergey Zigachev if (amdgpu_audio != 0) { 489*b843c749SSergey Zigachev if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 490*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_HDMI; 491*b843c749SSergey Zigachev else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 492*b843c749SSergey Zigachev (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 493*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_HDMI; 494*b843c749SSergey Zigachev else 495*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DVI; 496*b843c749SSergey Zigachev } else { 497*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DVI; 498*b843c749SSergey Zigachev } 499*b843c749SSergey Zigachev break; 500*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_LVDS: 501*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_LVDS; 502*b843c749SSergey Zigachev break; 503*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_DisplayPort: 504*b843c749SSergey Zigachev dig_connector = amdgpu_connector->con_priv; 505*b843c749SSergey Zigachev if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) || 506*b843c749SSergey Zigachev (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) { 507*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DP; 508*b843c749SSergey Zigachev } else if (amdgpu_audio != 0) { 509*b843c749SSergey Zigachev if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE) 510*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_HDMI; 511*b843c749SSergey Zigachev else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) && 512*b843c749SSergey Zigachev (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO)) 513*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_HDMI; 514*b843c749SSergey Zigachev else 515*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DVI; 516*b843c749SSergey Zigachev } else { 517*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DVI; 518*b843c749SSergey Zigachev } 519*b843c749SSergey Zigachev break; 520*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_eDP: 521*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_DP; 522*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_DVIA: 523*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_VGA: 524*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_CRT; 525*b843c749SSergey Zigachev break; 526*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_Composite: 527*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_SVIDEO: 528*b843c749SSergey Zigachev case DRM_MODE_CONNECTOR_9PinDIN: 529*b843c749SSergey Zigachev /* fix me */ 530*b843c749SSergey Zigachev return ATOM_ENCODER_MODE_TV; 531*b843c749SSergey Zigachev /*return ATOM_ENCODER_MODE_CV;*/ 532*b843c749SSergey Zigachev break; 533*b843c749SSergey Zigachev } 534*b843c749SSergey Zigachev } 535*b843c749SSergey Zigachev 536*b843c749SSergey Zigachev /* 537*b843c749SSergey Zigachev * DIG Encoder/Transmitter Setup 538*b843c749SSergey Zigachev * 539*b843c749SSergey Zigachev * DCE 6.0 540*b843c749SSergey Zigachev * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B). 541*b843c749SSergey Zigachev * Supports up to 6 digital outputs 542*b843c749SSergey Zigachev * - 6 DIG encoder blocks. 543*b843c749SSergey Zigachev * - DIG to PHY mapping is hardcoded 544*b843c749SSergey Zigachev * DIG1 drives UNIPHY0 link A, A+B 545*b843c749SSergey Zigachev * DIG2 drives UNIPHY0 link B 546*b843c749SSergey Zigachev * DIG3 drives UNIPHY1 link A, A+B 547*b843c749SSergey Zigachev * DIG4 drives UNIPHY1 link B 548*b843c749SSergey Zigachev * DIG5 drives UNIPHY2 link A, A+B 549*b843c749SSergey Zigachev * DIG6 drives UNIPHY2 link B 550*b843c749SSergey Zigachev * 551*b843c749SSergey Zigachev * Routing 552*b843c749SSergey Zigachev * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links) 553*b843c749SSergey Zigachev * Examples: 554*b843c749SSergey Zigachev * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI 555*b843c749SSergey Zigachev * crtc1 -> dig1 -> UNIPHY0 link B -> DP 556*b843c749SSergey Zigachev * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS 557*b843c749SSergey Zigachev * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI 558*b843c749SSergey Zigachev */ 559*b843c749SSergey Zigachev 560*b843c749SSergey Zigachev union dig_encoder_control { 561*b843c749SSergey Zigachev DIG_ENCODER_CONTROL_PS_ALLOCATION v1; 562*b843c749SSergey Zigachev DIG_ENCODER_CONTROL_PARAMETERS_V2 v2; 563*b843c749SSergey Zigachev DIG_ENCODER_CONTROL_PARAMETERS_V3 v3; 564*b843c749SSergey Zigachev DIG_ENCODER_CONTROL_PARAMETERS_V4 v4; 565*b843c749SSergey Zigachev DIG_ENCODER_CONTROL_PARAMETERS_V5 v5; 566*b843c749SSergey Zigachev }; 567*b843c749SSergey Zigachev 568*b843c749SSergey Zigachev void 569*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder, 570*b843c749SSergey Zigachev int action, int panel_mode) 571*b843c749SSergey Zigachev { 572*b843c749SSergey Zigachev struct drm_device *dev = encoder->dev; 573*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 574*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 575*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 576*b843c749SSergey Zigachev struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 577*b843c749SSergey Zigachev union dig_encoder_control args; 578*b843c749SSergey Zigachev int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl); 579*b843c749SSergey Zigachev uint8_t frev, crev; 580*b843c749SSergey Zigachev int dp_clock = 0; 581*b843c749SSergey Zigachev int dp_lane_count = 0; 582*b843c749SSergey Zigachev int hpd_id = AMDGPU_HPD_NONE; 583*b843c749SSergey Zigachev 584*b843c749SSergey Zigachev if (connector) { 585*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 586*b843c749SSergey Zigachev struct amdgpu_connector_atom_dig *dig_connector = 587*b843c749SSergey Zigachev amdgpu_connector->con_priv; 588*b843c749SSergey Zigachev 589*b843c749SSergey Zigachev dp_clock = dig_connector->dp_clock; 590*b843c749SSergey Zigachev dp_lane_count = dig_connector->dp_lane_count; 591*b843c749SSergey Zigachev hpd_id = amdgpu_connector->hpd.hpd; 592*b843c749SSergey Zigachev } 593*b843c749SSergey Zigachev 594*b843c749SSergey Zigachev /* no dig encoder assigned */ 595*b843c749SSergey Zigachev if (dig->dig_encoder == -1) 596*b843c749SSergey Zigachev return; 597*b843c749SSergey Zigachev 598*b843c749SSergey Zigachev memset(&args, 0, sizeof(args)); 599*b843c749SSergey Zigachev 600*b843c749SSergey Zigachev if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 601*b843c749SSergey Zigachev return; 602*b843c749SSergey Zigachev 603*b843c749SSergey Zigachev switch (frev) { 604*b843c749SSergey Zigachev case 1: 605*b843c749SSergey Zigachev switch (crev) { 606*b843c749SSergey Zigachev case 1: 607*b843c749SSergey Zigachev args.v1.ucAction = action; 608*b843c749SSergey Zigachev args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 609*b843c749SSergey Zigachev if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 610*b843c749SSergey Zigachev args.v3.ucPanelMode = panel_mode; 611*b843c749SSergey Zigachev else 612*b843c749SSergey Zigachev args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 613*b843c749SSergey Zigachev 614*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode)) 615*b843c749SSergey Zigachev args.v1.ucLaneNum = dp_lane_count; 616*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 617*b843c749SSergey Zigachev args.v1.ucLaneNum = 8; 618*b843c749SSergey Zigachev else 619*b843c749SSergey Zigachev args.v1.ucLaneNum = 4; 620*b843c749SSergey Zigachev 621*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000)) 622*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 623*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 624*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 625*b843c749SSergey Zigachev args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1; 626*b843c749SSergey Zigachev break; 627*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 628*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 629*b843c749SSergey Zigachev args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2; 630*b843c749SSergey Zigachev break; 631*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 632*b843c749SSergey Zigachev args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3; 633*b843c749SSergey Zigachev break; 634*b843c749SSergey Zigachev } 635*b843c749SSergey Zigachev if (dig->linkb) 636*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB; 637*b843c749SSergey Zigachev else 638*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA; 639*b843c749SSergey Zigachev break; 640*b843c749SSergey Zigachev case 2: 641*b843c749SSergey Zigachev case 3: 642*b843c749SSergey Zigachev args.v3.ucAction = action; 643*b843c749SSergey Zigachev args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 644*b843c749SSergey Zigachev if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 645*b843c749SSergey Zigachev args.v3.ucPanelMode = panel_mode; 646*b843c749SSergey Zigachev else 647*b843c749SSergey Zigachev args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 648*b843c749SSergey Zigachev 649*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode)) 650*b843c749SSergey Zigachev args.v3.ucLaneNum = dp_lane_count; 651*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 652*b843c749SSergey Zigachev args.v3.ucLaneNum = 8; 653*b843c749SSergey Zigachev else 654*b843c749SSergey Zigachev args.v3.ucLaneNum = 4; 655*b843c749SSergey Zigachev 656*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000)) 657*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 658*b843c749SSergey Zigachev args.v3.acConfig.ucDigSel = dig->dig_encoder; 659*b843c749SSergey Zigachev args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 660*b843c749SSergey Zigachev break; 661*b843c749SSergey Zigachev case 4: 662*b843c749SSergey Zigachev args.v4.ucAction = action; 663*b843c749SSergey Zigachev args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 664*b843c749SSergey Zigachev if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE) 665*b843c749SSergey Zigachev args.v4.ucPanelMode = panel_mode; 666*b843c749SSergey Zigachev else 667*b843c749SSergey Zigachev args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 668*b843c749SSergey Zigachev 669*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) 670*b843c749SSergey Zigachev args.v4.ucLaneNum = dp_lane_count; 671*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 672*b843c749SSergey Zigachev args.v4.ucLaneNum = 8; 673*b843c749SSergey Zigachev else 674*b843c749SSergey Zigachev args.v4.ucLaneNum = 4; 675*b843c749SSergey Zigachev 676*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) { 677*b843c749SSergey Zigachev if (dp_clock == 540000) 678*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ; 679*b843c749SSergey Zigachev else if (dp_clock == 324000) 680*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ; 681*b843c749SSergey Zigachev else if (dp_clock == 270000) 682*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ; 683*b843c749SSergey Zigachev else 684*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ; 685*b843c749SSergey Zigachev } 686*b843c749SSergey Zigachev args.v4.acConfig.ucDigSel = dig->dig_encoder; 687*b843c749SSergey Zigachev args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 688*b843c749SSergey Zigachev if (hpd_id == AMDGPU_HPD_NONE) 689*b843c749SSergey Zigachev args.v4.ucHPD_ID = 0; 690*b843c749SSergey Zigachev else 691*b843c749SSergey Zigachev args.v4.ucHPD_ID = hpd_id + 1; 692*b843c749SSergey Zigachev break; 693*b843c749SSergey Zigachev case 5: 694*b843c749SSergey Zigachev switch (action) { 695*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_SETUP_PANEL_MODE: 696*b843c749SSergey Zigachev args.v5.asDPPanelModeParam.ucAction = action; 697*b843c749SSergey Zigachev args.v5.asDPPanelModeParam.ucPanelMode = panel_mode; 698*b843c749SSergey Zigachev args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder; 699*b843c749SSergey Zigachev break; 700*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_STREAM_SETUP: 701*b843c749SSergey Zigachev args.v5.asStreamParam.ucAction = action; 702*b843c749SSergey Zigachev args.v5.asStreamParam.ucDigId = dig->dig_encoder; 703*b843c749SSergey Zigachev args.v5.asStreamParam.ucDigMode = 704*b843c749SSergey Zigachev amdgpu_atombios_encoder_get_encoder_mode(encoder); 705*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode)) 706*b843c749SSergey Zigachev args.v5.asStreamParam.ucLaneNum = dp_lane_count; 707*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, 708*b843c749SSergey Zigachev amdgpu_encoder->pixel_clock)) 709*b843c749SSergey Zigachev args.v5.asStreamParam.ucLaneNum = 8; 710*b843c749SSergey Zigachev else 711*b843c749SSergey Zigachev args.v5.asStreamParam.ucLaneNum = 4; 712*b843c749SSergey Zigachev args.v5.asStreamParam.ulPixelClock = 713*b843c749SSergey Zigachev cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 714*b843c749SSergey Zigachev args.v5.asStreamParam.ucBitPerColor = 715*b843c749SSergey Zigachev amdgpu_atombios_encoder_get_bpc(encoder); 716*b843c749SSergey Zigachev args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000; 717*b843c749SSergey Zigachev break; 718*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START: 719*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1: 720*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2: 721*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3: 722*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4: 723*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE: 724*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_DP_VIDEO_OFF: 725*b843c749SSergey Zigachev case ATOM_ENCODER_CMD_DP_VIDEO_ON: 726*b843c749SSergey Zigachev args.v5.asCmdParam.ucAction = action; 727*b843c749SSergey Zigachev args.v5.asCmdParam.ucDigId = dig->dig_encoder; 728*b843c749SSergey Zigachev break; 729*b843c749SSergey Zigachev default: 730*b843c749SSergey Zigachev DRM_ERROR("Unsupported action 0x%x\n", action); 731*b843c749SSergey Zigachev break; 732*b843c749SSergey Zigachev } 733*b843c749SSergey Zigachev break; 734*b843c749SSergey Zigachev default: 735*b843c749SSergey Zigachev DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 736*b843c749SSergey Zigachev break; 737*b843c749SSergey Zigachev } 738*b843c749SSergey Zigachev break; 739*b843c749SSergey Zigachev default: 740*b843c749SSergey Zigachev DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 741*b843c749SSergey Zigachev break; 742*b843c749SSergey Zigachev } 743*b843c749SSergey Zigachev 744*b843c749SSergey Zigachev amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 745*b843c749SSergey Zigachev 746*b843c749SSergey Zigachev } 747*b843c749SSergey Zigachev 748*b843c749SSergey Zigachev union dig_transmitter_control { 749*b843c749SSergey Zigachev DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1; 750*b843c749SSergey Zigachev DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2; 751*b843c749SSergey Zigachev DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3; 752*b843c749SSergey Zigachev DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4; 753*b843c749SSergey Zigachev DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5; 754*b843c749SSergey Zigachev DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6; 755*b843c749SSergey Zigachev }; 756*b843c749SSergey Zigachev 757*b843c749SSergey Zigachev void 758*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action, 759*b843c749SSergey Zigachev uint8_t lane_num, uint8_t lane_set) 760*b843c749SSergey Zigachev { 761*b843c749SSergey Zigachev struct drm_device *dev = encoder->dev; 762*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 763*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 764*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 765*b843c749SSergey Zigachev struct drm_connector *connector; 766*b843c749SSergey Zigachev union dig_transmitter_control args; 767*b843c749SSergey Zigachev int index = 0; 768*b843c749SSergey Zigachev uint8_t frev, crev; 769*b843c749SSergey Zigachev bool is_dp = false; 770*b843c749SSergey Zigachev int pll_id = 0; 771*b843c749SSergey Zigachev int dp_clock = 0; 772*b843c749SSergey Zigachev int dp_lane_count = 0; 773*b843c749SSergey Zigachev int connector_object_id = 0; 774*b843c749SSergey Zigachev int igp_lane_info = 0; 775*b843c749SSergey Zigachev int dig_encoder = dig->dig_encoder; 776*b843c749SSergey Zigachev int hpd_id = AMDGPU_HPD_NONE; 777*b843c749SSergey Zigachev 778*b843c749SSergey Zigachev if (action == ATOM_TRANSMITTER_ACTION_INIT) { 779*b843c749SSergey Zigachev connector = amdgpu_get_connector_for_encoder_init(encoder); 780*b843c749SSergey Zigachev /* just needed to avoid bailing in the encoder check. the encoder 781*b843c749SSergey Zigachev * isn't used for init 782*b843c749SSergey Zigachev */ 783*b843c749SSergey Zigachev dig_encoder = 0; 784*b843c749SSergey Zigachev } else 785*b843c749SSergey Zigachev connector = amdgpu_get_connector_for_encoder(encoder); 786*b843c749SSergey Zigachev 787*b843c749SSergey Zigachev if (connector) { 788*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 789*b843c749SSergey Zigachev struct amdgpu_connector_atom_dig *dig_connector = 790*b843c749SSergey Zigachev amdgpu_connector->con_priv; 791*b843c749SSergey Zigachev 792*b843c749SSergey Zigachev hpd_id = amdgpu_connector->hpd.hpd; 793*b843c749SSergey Zigachev dp_clock = dig_connector->dp_clock; 794*b843c749SSergey Zigachev dp_lane_count = dig_connector->dp_lane_count; 795*b843c749SSergey Zigachev connector_object_id = 796*b843c749SSergey Zigachev (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 797*b843c749SSergey Zigachev } 798*b843c749SSergey Zigachev 799*b843c749SSergey Zigachev if (encoder->crtc) { 800*b843c749SSergey Zigachev struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 801*b843c749SSergey Zigachev pll_id = amdgpu_crtc->pll_id; 802*b843c749SSergey Zigachev } 803*b843c749SSergey Zigachev 804*b843c749SSergey Zigachev /* no dig encoder assigned */ 805*b843c749SSergey Zigachev if (dig_encoder == -1) 806*b843c749SSergey Zigachev return; 807*b843c749SSergey Zigachev 808*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder))) 809*b843c749SSergey Zigachev is_dp = true; 810*b843c749SSergey Zigachev 811*b843c749SSergey Zigachev memset(&args, 0, sizeof(args)); 812*b843c749SSergey Zigachev 813*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 814*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 815*b843c749SSergey Zigachev index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl); 816*b843c749SSergey Zigachev break; 817*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 818*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 819*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 820*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 821*b843c749SSergey Zigachev index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 822*b843c749SSergey Zigachev break; 823*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 824*b843c749SSergey Zigachev index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl); 825*b843c749SSergey Zigachev break; 826*b843c749SSergey Zigachev } 827*b843c749SSergey Zigachev 828*b843c749SSergey Zigachev if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 829*b843c749SSergey Zigachev return; 830*b843c749SSergey Zigachev 831*b843c749SSergey Zigachev switch (frev) { 832*b843c749SSergey Zigachev case 1: 833*b843c749SSergey Zigachev switch (crev) { 834*b843c749SSergey Zigachev case 1: 835*b843c749SSergey Zigachev args.v1.ucAction = action; 836*b843c749SSergey Zigachev if (action == ATOM_TRANSMITTER_ACTION_INIT) { 837*b843c749SSergey Zigachev args.v1.usInitInfo = cpu_to_le16(connector_object_id); 838*b843c749SSergey Zigachev } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 839*b843c749SSergey Zigachev args.v1.asMode.ucLaneSel = lane_num; 840*b843c749SSergey Zigachev args.v1.asMode.ucLaneSet = lane_set; 841*b843c749SSergey Zigachev } else { 842*b843c749SSergey Zigachev if (is_dp) 843*b843c749SSergey Zigachev args.v1.usPixelClock = cpu_to_le16(dp_clock / 10); 844*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 845*b843c749SSergey Zigachev args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 846*b843c749SSergey Zigachev else 847*b843c749SSergey Zigachev args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 848*b843c749SSergey Zigachev } 849*b843c749SSergey Zigachev 850*b843c749SSergey Zigachev args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL; 851*b843c749SSergey Zigachev 852*b843c749SSergey Zigachev if (dig_encoder) 853*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER; 854*b843c749SSergey Zigachev else 855*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER; 856*b843c749SSergey Zigachev 857*b843c749SSergey Zigachev if ((adev->flags & AMD_IS_APU) && 858*b843c749SSergey Zigachev (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) { 859*b843c749SSergey Zigachev if (is_dp || 860*b843c749SSergey Zigachev !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) { 861*b843c749SSergey Zigachev if (igp_lane_info & 0x1) 862*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3; 863*b843c749SSergey Zigachev else if (igp_lane_info & 0x2) 864*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7; 865*b843c749SSergey Zigachev else if (igp_lane_info & 0x4) 866*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11; 867*b843c749SSergey Zigachev else if (igp_lane_info & 0x8) 868*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15; 869*b843c749SSergey Zigachev } else { 870*b843c749SSergey Zigachev if (igp_lane_info & 0x3) 871*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7; 872*b843c749SSergey Zigachev else if (igp_lane_info & 0xc) 873*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15; 874*b843c749SSergey Zigachev } 875*b843c749SSergey Zigachev } 876*b843c749SSergey Zigachev 877*b843c749SSergey Zigachev if (dig->linkb) 878*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB; 879*b843c749SSergey Zigachev else 880*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA; 881*b843c749SSergey Zigachev 882*b843c749SSergey Zigachev if (is_dp) 883*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 884*b843c749SSergey Zigachev else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 885*b843c749SSergey Zigachev if (dig->coherent_mode) 886*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT; 887*b843c749SSergey Zigachev if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 888*b843c749SSergey Zigachev args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK; 889*b843c749SSergey Zigachev } 890*b843c749SSergey Zigachev break; 891*b843c749SSergey Zigachev case 2: 892*b843c749SSergey Zigachev args.v2.ucAction = action; 893*b843c749SSergey Zigachev if (action == ATOM_TRANSMITTER_ACTION_INIT) { 894*b843c749SSergey Zigachev args.v2.usInitInfo = cpu_to_le16(connector_object_id); 895*b843c749SSergey Zigachev } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 896*b843c749SSergey Zigachev args.v2.asMode.ucLaneSel = lane_num; 897*b843c749SSergey Zigachev args.v2.asMode.ucLaneSet = lane_set; 898*b843c749SSergey Zigachev } else { 899*b843c749SSergey Zigachev if (is_dp) 900*b843c749SSergey Zigachev args.v2.usPixelClock = cpu_to_le16(dp_clock / 10); 901*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 902*b843c749SSergey Zigachev args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 903*b843c749SSergey Zigachev else 904*b843c749SSergey Zigachev args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 905*b843c749SSergey Zigachev } 906*b843c749SSergey Zigachev 907*b843c749SSergey Zigachev args.v2.acConfig.ucEncoderSel = dig_encoder; 908*b843c749SSergey Zigachev if (dig->linkb) 909*b843c749SSergey Zigachev args.v2.acConfig.ucLinkSel = 1; 910*b843c749SSergey Zigachev 911*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 912*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 913*b843c749SSergey Zigachev args.v2.acConfig.ucTransmitterSel = 0; 914*b843c749SSergey Zigachev break; 915*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 916*b843c749SSergey Zigachev args.v2.acConfig.ucTransmitterSel = 1; 917*b843c749SSergey Zigachev break; 918*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 919*b843c749SSergey Zigachev args.v2.acConfig.ucTransmitterSel = 2; 920*b843c749SSergey Zigachev break; 921*b843c749SSergey Zigachev } 922*b843c749SSergey Zigachev 923*b843c749SSergey Zigachev if (is_dp) { 924*b843c749SSergey Zigachev args.v2.acConfig.fCoherentMode = 1; 925*b843c749SSergey Zigachev args.v2.acConfig.fDPConnector = 1; 926*b843c749SSergey Zigachev } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 927*b843c749SSergey Zigachev if (dig->coherent_mode) 928*b843c749SSergey Zigachev args.v2.acConfig.fCoherentMode = 1; 929*b843c749SSergey Zigachev if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 930*b843c749SSergey Zigachev args.v2.acConfig.fDualLinkConnector = 1; 931*b843c749SSergey Zigachev } 932*b843c749SSergey Zigachev break; 933*b843c749SSergey Zigachev case 3: 934*b843c749SSergey Zigachev args.v3.ucAction = action; 935*b843c749SSergey Zigachev if (action == ATOM_TRANSMITTER_ACTION_INIT) { 936*b843c749SSergey Zigachev args.v3.usInitInfo = cpu_to_le16(connector_object_id); 937*b843c749SSergey Zigachev } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 938*b843c749SSergey Zigachev args.v3.asMode.ucLaneSel = lane_num; 939*b843c749SSergey Zigachev args.v3.asMode.ucLaneSet = lane_set; 940*b843c749SSergey Zigachev } else { 941*b843c749SSergey Zigachev if (is_dp) 942*b843c749SSergey Zigachev args.v3.usPixelClock = cpu_to_le16(dp_clock / 10); 943*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 944*b843c749SSergey Zigachev args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 945*b843c749SSergey Zigachev else 946*b843c749SSergey Zigachev args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 947*b843c749SSergey Zigachev } 948*b843c749SSergey Zigachev 949*b843c749SSergey Zigachev if (is_dp) 950*b843c749SSergey Zigachev args.v3.ucLaneNum = dp_lane_count; 951*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 952*b843c749SSergey Zigachev args.v3.ucLaneNum = 8; 953*b843c749SSergey Zigachev else 954*b843c749SSergey Zigachev args.v3.ucLaneNum = 4; 955*b843c749SSergey Zigachev 956*b843c749SSergey Zigachev if (dig->linkb) 957*b843c749SSergey Zigachev args.v3.acConfig.ucLinkSel = 1; 958*b843c749SSergey Zigachev if (dig_encoder & 1) 959*b843c749SSergey Zigachev args.v3.acConfig.ucEncoderSel = 1; 960*b843c749SSergey Zigachev 961*b843c749SSergey Zigachev /* Select the PLL for the PHY 962*b843c749SSergey Zigachev * DP PHY should be clocked from external src if there is 963*b843c749SSergey Zigachev * one. 964*b843c749SSergey Zigachev */ 965*b843c749SSergey Zigachev /* On DCE4, if there is an external clock, it generates the DP ref clock */ 966*b843c749SSergey Zigachev if (is_dp && adev->clock.dp_extclk) 967*b843c749SSergey Zigachev args.v3.acConfig.ucRefClkSource = 2; /* external src */ 968*b843c749SSergey Zigachev else 969*b843c749SSergey Zigachev args.v3.acConfig.ucRefClkSource = pll_id; 970*b843c749SSergey Zigachev 971*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 972*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 973*b843c749SSergey Zigachev args.v3.acConfig.ucTransmitterSel = 0; 974*b843c749SSergey Zigachev break; 975*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 976*b843c749SSergey Zigachev args.v3.acConfig.ucTransmitterSel = 1; 977*b843c749SSergey Zigachev break; 978*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 979*b843c749SSergey Zigachev args.v3.acConfig.ucTransmitterSel = 2; 980*b843c749SSergey Zigachev break; 981*b843c749SSergey Zigachev } 982*b843c749SSergey Zigachev 983*b843c749SSergey Zigachev if (is_dp) 984*b843c749SSergey Zigachev args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */ 985*b843c749SSergey Zigachev else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 986*b843c749SSergey Zigachev if (dig->coherent_mode) 987*b843c749SSergey Zigachev args.v3.acConfig.fCoherentMode = 1; 988*b843c749SSergey Zigachev if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 989*b843c749SSergey Zigachev args.v3.acConfig.fDualLinkConnector = 1; 990*b843c749SSergey Zigachev } 991*b843c749SSergey Zigachev break; 992*b843c749SSergey Zigachev case 4: 993*b843c749SSergey Zigachev args.v4.ucAction = action; 994*b843c749SSergey Zigachev if (action == ATOM_TRANSMITTER_ACTION_INIT) { 995*b843c749SSergey Zigachev args.v4.usInitInfo = cpu_to_le16(connector_object_id); 996*b843c749SSergey Zigachev } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) { 997*b843c749SSergey Zigachev args.v4.asMode.ucLaneSel = lane_num; 998*b843c749SSergey Zigachev args.v4.asMode.ucLaneSet = lane_set; 999*b843c749SSergey Zigachev } else { 1000*b843c749SSergey Zigachev if (is_dp) 1001*b843c749SSergey Zigachev args.v4.usPixelClock = cpu_to_le16(dp_clock / 10); 1002*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1003*b843c749SSergey Zigachev args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10); 1004*b843c749SSergey Zigachev else 1005*b843c749SSergey Zigachev args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1006*b843c749SSergey Zigachev } 1007*b843c749SSergey Zigachev 1008*b843c749SSergey Zigachev if (is_dp) 1009*b843c749SSergey Zigachev args.v4.ucLaneNum = dp_lane_count; 1010*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1011*b843c749SSergey Zigachev args.v4.ucLaneNum = 8; 1012*b843c749SSergey Zigachev else 1013*b843c749SSergey Zigachev args.v4.ucLaneNum = 4; 1014*b843c749SSergey Zigachev 1015*b843c749SSergey Zigachev if (dig->linkb) 1016*b843c749SSergey Zigachev args.v4.acConfig.ucLinkSel = 1; 1017*b843c749SSergey Zigachev if (dig_encoder & 1) 1018*b843c749SSergey Zigachev args.v4.acConfig.ucEncoderSel = 1; 1019*b843c749SSergey Zigachev 1020*b843c749SSergey Zigachev /* Select the PLL for the PHY 1021*b843c749SSergey Zigachev * DP PHY should be clocked from external src if there is 1022*b843c749SSergey Zigachev * one. 1023*b843c749SSergey Zigachev */ 1024*b843c749SSergey Zigachev /* On DCE5 DCPLL usually generates the DP ref clock */ 1025*b843c749SSergey Zigachev if (is_dp) { 1026*b843c749SSergey Zigachev if (adev->clock.dp_extclk) 1027*b843c749SSergey Zigachev args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK; 1028*b843c749SSergey Zigachev else 1029*b843c749SSergey Zigachev args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL; 1030*b843c749SSergey Zigachev } else 1031*b843c749SSergey Zigachev args.v4.acConfig.ucRefClkSource = pll_id; 1032*b843c749SSergey Zigachev 1033*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 1034*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1035*b843c749SSergey Zigachev args.v4.acConfig.ucTransmitterSel = 0; 1036*b843c749SSergey Zigachev break; 1037*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1038*b843c749SSergey Zigachev args.v4.acConfig.ucTransmitterSel = 1; 1039*b843c749SSergey Zigachev break; 1040*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1041*b843c749SSergey Zigachev args.v4.acConfig.ucTransmitterSel = 2; 1042*b843c749SSergey Zigachev break; 1043*b843c749SSergey Zigachev } 1044*b843c749SSergey Zigachev 1045*b843c749SSergey Zigachev if (is_dp) 1046*b843c749SSergey Zigachev args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */ 1047*b843c749SSergey Zigachev else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1048*b843c749SSergey Zigachev if (dig->coherent_mode) 1049*b843c749SSergey Zigachev args.v4.acConfig.fCoherentMode = 1; 1050*b843c749SSergey Zigachev if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1051*b843c749SSergey Zigachev args.v4.acConfig.fDualLinkConnector = 1; 1052*b843c749SSergey Zigachev } 1053*b843c749SSergey Zigachev break; 1054*b843c749SSergey Zigachev case 5: 1055*b843c749SSergey Zigachev args.v5.ucAction = action; 1056*b843c749SSergey Zigachev if (is_dp) 1057*b843c749SSergey Zigachev args.v5.usSymClock = cpu_to_le16(dp_clock / 10); 1058*b843c749SSergey Zigachev else 1059*b843c749SSergey Zigachev args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1060*b843c749SSergey Zigachev 1061*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 1062*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1063*b843c749SSergey Zigachev if (dig->linkb) 1064*b843c749SSergey Zigachev args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1065*b843c749SSergey Zigachev else 1066*b843c749SSergey Zigachev args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1067*b843c749SSergey Zigachev break; 1068*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1069*b843c749SSergey Zigachev if (dig->linkb) 1070*b843c749SSergey Zigachev args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1071*b843c749SSergey Zigachev else 1072*b843c749SSergey Zigachev args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1073*b843c749SSergey Zigachev break; 1074*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1075*b843c749SSergey Zigachev if (dig->linkb) 1076*b843c749SSergey Zigachev args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1077*b843c749SSergey Zigachev else 1078*b843c749SSergey Zigachev args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1079*b843c749SSergey Zigachev break; 1080*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1081*b843c749SSergey Zigachev args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1082*b843c749SSergey Zigachev break; 1083*b843c749SSergey Zigachev } 1084*b843c749SSergey Zigachev if (is_dp) 1085*b843c749SSergey Zigachev args.v5.ucLaneNum = dp_lane_count; 1086*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1087*b843c749SSergey Zigachev args.v5.ucLaneNum = 8; 1088*b843c749SSergey Zigachev else 1089*b843c749SSergey Zigachev args.v5.ucLaneNum = 4; 1090*b843c749SSergey Zigachev args.v5.ucConnObjId = connector_object_id; 1091*b843c749SSergey Zigachev args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1092*b843c749SSergey Zigachev 1093*b843c749SSergey Zigachev if (is_dp && adev->clock.dp_extclk) 1094*b843c749SSergey Zigachev args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK; 1095*b843c749SSergey Zigachev else 1096*b843c749SSergey Zigachev args.v5.asConfig.ucPhyClkSrcId = pll_id; 1097*b843c749SSergey Zigachev 1098*b843c749SSergey Zigachev if (is_dp) 1099*b843c749SSergey Zigachev args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */ 1100*b843c749SSergey Zigachev else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) { 1101*b843c749SSergey Zigachev if (dig->coherent_mode) 1102*b843c749SSergey Zigachev args.v5.asConfig.ucCoherentMode = 1; 1103*b843c749SSergey Zigachev } 1104*b843c749SSergey Zigachev if (hpd_id == AMDGPU_HPD_NONE) 1105*b843c749SSergey Zigachev args.v5.asConfig.ucHPDSel = 0; 1106*b843c749SSergey Zigachev else 1107*b843c749SSergey Zigachev args.v5.asConfig.ucHPDSel = hpd_id + 1; 1108*b843c749SSergey Zigachev args.v5.ucDigEncoderSel = 1 << dig_encoder; 1109*b843c749SSergey Zigachev args.v5.ucDPLaneSet = lane_set; 1110*b843c749SSergey Zigachev break; 1111*b843c749SSergey Zigachev case 6: 1112*b843c749SSergey Zigachev args.v6.ucAction = action; 1113*b843c749SSergey Zigachev if (is_dp) 1114*b843c749SSergey Zigachev args.v6.ulSymClock = cpu_to_le32(dp_clock / 10); 1115*b843c749SSergey Zigachev else 1116*b843c749SSergey Zigachev args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10); 1117*b843c749SSergey Zigachev 1118*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 1119*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1120*b843c749SSergey Zigachev if (dig->linkb) 1121*b843c749SSergey Zigachev args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB; 1122*b843c749SSergey Zigachev else 1123*b843c749SSergey Zigachev args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA; 1124*b843c749SSergey Zigachev break; 1125*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1126*b843c749SSergey Zigachev if (dig->linkb) 1127*b843c749SSergey Zigachev args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD; 1128*b843c749SSergey Zigachev else 1129*b843c749SSergey Zigachev args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC; 1130*b843c749SSergey Zigachev break; 1131*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1132*b843c749SSergey Zigachev if (dig->linkb) 1133*b843c749SSergey Zigachev args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF; 1134*b843c749SSergey Zigachev else 1135*b843c749SSergey Zigachev args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE; 1136*b843c749SSergey Zigachev break; 1137*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1138*b843c749SSergey Zigachev args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG; 1139*b843c749SSergey Zigachev break; 1140*b843c749SSergey Zigachev } 1141*b843c749SSergey Zigachev if (is_dp) 1142*b843c749SSergey Zigachev args.v6.ucLaneNum = dp_lane_count; 1143*b843c749SSergey Zigachev else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1144*b843c749SSergey Zigachev args.v6.ucLaneNum = 8; 1145*b843c749SSergey Zigachev else 1146*b843c749SSergey Zigachev args.v6.ucLaneNum = 4; 1147*b843c749SSergey Zigachev args.v6.ucConnObjId = connector_object_id; 1148*b843c749SSergey Zigachev if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) 1149*b843c749SSergey Zigachev args.v6.ucDPLaneSet = lane_set; 1150*b843c749SSergey Zigachev else 1151*b843c749SSergey Zigachev args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1152*b843c749SSergey Zigachev 1153*b843c749SSergey Zigachev if (hpd_id == AMDGPU_HPD_NONE) 1154*b843c749SSergey Zigachev args.v6.ucHPDSel = 0; 1155*b843c749SSergey Zigachev else 1156*b843c749SSergey Zigachev args.v6.ucHPDSel = hpd_id + 1; 1157*b843c749SSergey Zigachev args.v6.ucDigEncoderSel = 1 << dig_encoder; 1158*b843c749SSergey Zigachev break; 1159*b843c749SSergey Zigachev default: 1160*b843c749SSergey Zigachev DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1161*b843c749SSergey Zigachev break; 1162*b843c749SSergey Zigachev } 1163*b843c749SSergey Zigachev break; 1164*b843c749SSergey Zigachev default: 1165*b843c749SSergey Zigachev DRM_ERROR("Unknown table version %d, %d\n", frev, crev); 1166*b843c749SSergey Zigachev break; 1167*b843c749SSergey Zigachev } 1168*b843c749SSergey Zigachev 1169*b843c749SSergey Zigachev amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1170*b843c749SSergey Zigachev } 1171*b843c749SSergey Zigachev 1172*b843c749SSergey Zigachev bool 1173*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector, 1174*b843c749SSergey Zigachev int action) 1175*b843c749SSergey Zigachev { 1176*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1177*b843c749SSergey Zigachev struct drm_device *dev = amdgpu_connector->base.dev; 1178*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 1179*b843c749SSergey Zigachev union dig_transmitter_control args; 1180*b843c749SSergey Zigachev int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl); 1181*b843c749SSergey Zigachev uint8_t frev, crev; 1182*b843c749SSergey Zigachev 1183*b843c749SSergey Zigachev if (connector->connector_type != DRM_MODE_CONNECTOR_eDP) 1184*b843c749SSergey Zigachev goto done; 1185*b843c749SSergey Zigachev 1186*b843c749SSergey Zigachev if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) && 1187*b843c749SSergey Zigachev (action != ATOM_TRANSMITTER_ACTION_POWER_OFF)) 1188*b843c749SSergey Zigachev goto done; 1189*b843c749SSergey Zigachev 1190*b843c749SSergey Zigachev if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1191*b843c749SSergey Zigachev goto done; 1192*b843c749SSergey Zigachev 1193*b843c749SSergey Zigachev memset(&args, 0, sizeof(args)); 1194*b843c749SSergey Zigachev 1195*b843c749SSergey Zigachev args.v1.ucAction = action; 1196*b843c749SSergey Zigachev 1197*b843c749SSergey Zigachev amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1198*b843c749SSergey Zigachev 1199*b843c749SSergey Zigachev /* wait for the panel to power up */ 1200*b843c749SSergey Zigachev if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) { 1201*b843c749SSergey Zigachev int i; 1202*b843c749SSergey Zigachev 1203*b843c749SSergey Zigachev for (i = 0; i < 300; i++) { 1204*b843c749SSergey Zigachev if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd)) 1205*b843c749SSergey Zigachev return true; 1206*b843c749SSergey Zigachev mdelay(1); 1207*b843c749SSergey Zigachev } 1208*b843c749SSergey Zigachev return false; 1209*b843c749SSergey Zigachev } 1210*b843c749SSergey Zigachev done: 1211*b843c749SSergey Zigachev return true; 1212*b843c749SSergey Zigachev } 1213*b843c749SSergey Zigachev 1214*b843c749SSergey Zigachev union external_encoder_control { 1215*b843c749SSergey Zigachev EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1; 1216*b843c749SSergey Zigachev EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3; 1217*b843c749SSergey Zigachev }; 1218*b843c749SSergey Zigachev 1219*b843c749SSergey Zigachev static void 1220*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder, 1221*b843c749SSergey Zigachev struct drm_encoder *ext_encoder, 1222*b843c749SSergey Zigachev int action) 1223*b843c749SSergey Zigachev { 1224*b843c749SSergey Zigachev struct drm_device *dev = encoder->dev; 1225*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 1226*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1227*b843c749SSergey Zigachev struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder); 1228*b843c749SSergey Zigachev union external_encoder_control args; 1229*b843c749SSergey Zigachev struct drm_connector *connector; 1230*b843c749SSergey Zigachev int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl); 1231*b843c749SSergey Zigachev u8 frev, crev; 1232*b843c749SSergey Zigachev int dp_clock = 0; 1233*b843c749SSergey Zigachev int dp_lane_count = 0; 1234*b843c749SSergey Zigachev int connector_object_id = 0; 1235*b843c749SSergey Zigachev u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 1236*b843c749SSergey Zigachev 1237*b843c749SSergey Zigachev if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1238*b843c749SSergey Zigachev connector = amdgpu_get_connector_for_encoder_init(encoder); 1239*b843c749SSergey Zigachev else 1240*b843c749SSergey Zigachev connector = amdgpu_get_connector_for_encoder(encoder); 1241*b843c749SSergey Zigachev 1242*b843c749SSergey Zigachev if (connector) { 1243*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1244*b843c749SSergey Zigachev struct amdgpu_connector_atom_dig *dig_connector = 1245*b843c749SSergey Zigachev amdgpu_connector->con_priv; 1246*b843c749SSergey Zigachev 1247*b843c749SSergey Zigachev dp_clock = dig_connector->dp_clock; 1248*b843c749SSergey Zigachev dp_lane_count = dig_connector->dp_lane_count; 1249*b843c749SSergey Zigachev connector_object_id = 1250*b843c749SSergey Zigachev (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT; 1251*b843c749SSergey Zigachev } 1252*b843c749SSergey Zigachev 1253*b843c749SSergey Zigachev memset(&args, 0, sizeof(args)); 1254*b843c749SSergey Zigachev 1255*b843c749SSergey Zigachev if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1256*b843c749SSergey Zigachev return; 1257*b843c749SSergey Zigachev 1258*b843c749SSergey Zigachev switch (frev) { 1259*b843c749SSergey Zigachev case 1: 1260*b843c749SSergey Zigachev /* no params on frev 1 */ 1261*b843c749SSergey Zigachev break; 1262*b843c749SSergey Zigachev case 2: 1263*b843c749SSergey Zigachev switch (crev) { 1264*b843c749SSergey Zigachev case 1: 1265*b843c749SSergey Zigachev case 2: 1266*b843c749SSergey Zigachev args.v1.sDigEncoder.ucAction = action; 1267*b843c749SSergey Zigachev args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1268*b843c749SSergey Zigachev args.v1.sDigEncoder.ucEncoderMode = 1269*b843c749SSergey Zigachev amdgpu_atombios_encoder_get_encoder_mode(encoder); 1270*b843c749SSergey Zigachev 1271*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) { 1272*b843c749SSergey Zigachev if (dp_clock == 270000) 1273*b843c749SSergey Zigachev args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ; 1274*b843c749SSergey Zigachev args.v1.sDigEncoder.ucLaneNum = dp_lane_count; 1275*b843c749SSergey Zigachev } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1276*b843c749SSergey Zigachev args.v1.sDigEncoder.ucLaneNum = 8; 1277*b843c749SSergey Zigachev else 1278*b843c749SSergey Zigachev args.v1.sDigEncoder.ucLaneNum = 4; 1279*b843c749SSergey Zigachev break; 1280*b843c749SSergey Zigachev case 3: 1281*b843c749SSergey Zigachev args.v3.sExtEncoder.ucAction = action; 1282*b843c749SSergey Zigachev if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT) 1283*b843c749SSergey Zigachev args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id); 1284*b843c749SSergey Zigachev else 1285*b843c749SSergey Zigachev args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10); 1286*b843c749SSergey Zigachev args.v3.sExtEncoder.ucEncoderMode = 1287*b843c749SSergey Zigachev amdgpu_atombios_encoder_get_encoder_mode(encoder); 1288*b843c749SSergey Zigachev 1289*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) { 1290*b843c749SSergey Zigachev if (dp_clock == 270000) 1291*b843c749SSergey Zigachev args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ; 1292*b843c749SSergey Zigachev else if (dp_clock == 540000) 1293*b843c749SSergey Zigachev args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ; 1294*b843c749SSergey Zigachev args.v3.sExtEncoder.ucLaneNum = dp_lane_count; 1295*b843c749SSergey Zigachev } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) 1296*b843c749SSergey Zigachev args.v3.sExtEncoder.ucLaneNum = 8; 1297*b843c749SSergey Zigachev else 1298*b843c749SSergey Zigachev args.v3.sExtEncoder.ucLaneNum = 4; 1299*b843c749SSergey Zigachev switch (ext_enum) { 1300*b843c749SSergey Zigachev case GRAPH_OBJECT_ENUM_ID1: 1301*b843c749SSergey Zigachev args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1; 1302*b843c749SSergey Zigachev break; 1303*b843c749SSergey Zigachev case GRAPH_OBJECT_ENUM_ID2: 1304*b843c749SSergey Zigachev args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2; 1305*b843c749SSergey Zigachev break; 1306*b843c749SSergey Zigachev case GRAPH_OBJECT_ENUM_ID3: 1307*b843c749SSergey Zigachev args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3; 1308*b843c749SSergey Zigachev break; 1309*b843c749SSergey Zigachev } 1310*b843c749SSergey Zigachev args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder); 1311*b843c749SSergey Zigachev break; 1312*b843c749SSergey Zigachev default: 1313*b843c749SSergey Zigachev DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1314*b843c749SSergey Zigachev return; 1315*b843c749SSergey Zigachev } 1316*b843c749SSergey Zigachev break; 1317*b843c749SSergey Zigachev default: 1318*b843c749SSergey Zigachev DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1319*b843c749SSergey Zigachev return; 1320*b843c749SSergey Zigachev } 1321*b843c749SSergey Zigachev amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1322*b843c749SSergey Zigachev } 1323*b843c749SSergey Zigachev 1324*b843c749SSergey Zigachev static void 1325*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action) 1326*b843c749SSergey Zigachev { 1327*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1328*b843c749SSergey Zigachev struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1329*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv; 1330*b843c749SSergey Zigachev struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1331*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector = NULL; 1332*b843c749SSergey Zigachev struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL; 1333*b843c749SSergey Zigachev 1334*b843c749SSergey Zigachev if (connector) { 1335*b843c749SSergey Zigachev amdgpu_connector = to_amdgpu_connector(connector); 1336*b843c749SSergey Zigachev amdgpu_dig_connector = amdgpu_connector->con_priv; 1337*b843c749SSergey Zigachev } 1338*b843c749SSergey Zigachev 1339*b843c749SSergey Zigachev if (action == ATOM_ENABLE) { 1340*b843c749SSergey Zigachev if (!connector) 1341*b843c749SSergey Zigachev dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE; 1342*b843c749SSergey Zigachev else 1343*b843c749SSergey Zigachev dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector); 1344*b843c749SSergey Zigachev 1345*b843c749SSergey Zigachev /* setup and enable the encoder */ 1346*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0); 1347*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1348*b843c749SSergey Zigachev ATOM_ENCODER_CMD_SETUP_PANEL_MODE, 1349*b843c749SSergey Zigachev dig->panel_mode); 1350*b843c749SSergey Zigachev if (ext_encoder) 1351*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1352*b843c749SSergey Zigachev EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP); 1353*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1354*b843c749SSergey Zigachev connector) { 1355*b843c749SSergey Zigachev if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1356*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_edp_panel_power(connector, 1357*b843c749SSergey Zigachev ATOM_TRANSMITTER_ACTION_POWER_ON); 1358*b843c749SSergey Zigachev amdgpu_dig_connector->edp_on = true; 1359*b843c749SSergey Zigachev } 1360*b843c749SSergey Zigachev } 1361*b843c749SSergey Zigachev /* enable the transmitter */ 1362*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1363*b843c749SSergey Zigachev ATOM_TRANSMITTER_ACTION_ENABLE, 1364*b843c749SSergey Zigachev 0, 0); 1365*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1366*b843c749SSergey Zigachev connector) { 1367*b843c749SSergey Zigachev /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */ 1368*b843c749SSergey Zigachev amdgpu_atombios_dp_link_train(encoder, connector); 1369*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0); 1370*b843c749SSergey Zigachev } 1371*b843c749SSergey Zigachev if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1372*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level); 1373*b843c749SSergey Zigachev if (ext_encoder) 1374*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE); 1375*b843c749SSergey Zigachev } else { 1376*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1377*b843c749SSergey Zigachev connector) 1378*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_encoder(encoder, 1379*b843c749SSergey Zigachev ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0); 1380*b843c749SSergey Zigachev if (ext_encoder) 1381*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE); 1382*b843c749SSergey Zigachev if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) 1383*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1384*b843c749SSergey Zigachev ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0); 1385*b843c749SSergey Zigachev 1386*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1387*b843c749SSergey Zigachev connector) 1388*b843c749SSergey Zigachev amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3); 1389*b843c749SSergey Zigachev /* disable the transmitter */ 1390*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_transmitter(encoder, 1391*b843c749SSergey Zigachev ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0); 1392*b843c749SSergey Zigachev if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) && 1393*b843c749SSergey Zigachev connector) { 1394*b843c749SSergey Zigachev if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) { 1395*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_edp_panel_power(connector, 1396*b843c749SSergey Zigachev ATOM_TRANSMITTER_ACTION_POWER_OFF); 1397*b843c749SSergey Zigachev amdgpu_dig_connector->edp_on = false; 1398*b843c749SSergey Zigachev } 1399*b843c749SSergey Zigachev } 1400*b843c749SSergey Zigachev } 1401*b843c749SSergey Zigachev } 1402*b843c749SSergey Zigachev 1403*b843c749SSergey Zigachev void 1404*b843c749SSergey Zigachev amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode) 1405*b843c749SSergey Zigachev { 1406*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1407*b843c749SSergey Zigachev 1408*b843c749SSergey Zigachev DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n", 1409*b843c749SSergey Zigachev amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices, 1410*b843c749SSergey Zigachev amdgpu_encoder->active_device); 1411*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 1412*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1413*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1414*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1415*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1416*b843c749SSergey Zigachev switch (mode) { 1417*b843c749SSergey Zigachev case DRM_MODE_DPMS_ON: 1418*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE); 1419*b843c749SSergey Zigachev break; 1420*b843c749SSergey Zigachev case DRM_MODE_DPMS_STANDBY: 1421*b843c749SSergey Zigachev case DRM_MODE_DPMS_SUSPEND: 1422*b843c749SSergey Zigachev case DRM_MODE_DPMS_OFF: 1423*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE); 1424*b843c749SSergey Zigachev break; 1425*b843c749SSergey Zigachev } 1426*b843c749SSergey Zigachev break; 1427*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1428*b843c749SSergey Zigachev switch (mode) { 1429*b843c749SSergey Zigachev case DRM_MODE_DPMS_ON: 1430*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE); 1431*b843c749SSergey Zigachev break; 1432*b843c749SSergey Zigachev case DRM_MODE_DPMS_STANDBY: 1433*b843c749SSergey Zigachev case DRM_MODE_DPMS_SUSPEND: 1434*b843c749SSergey Zigachev case DRM_MODE_DPMS_OFF: 1435*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE); 1436*b843c749SSergey Zigachev break; 1437*b843c749SSergey Zigachev } 1438*b843c749SSergey Zigachev break; 1439*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1440*b843c749SSergey Zigachev switch (mode) { 1441*b843c749SSergey Zigachev case DRM_MODE_DPMS_ON: 1442*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE); 1443*b843c749SSergey Zigachev break; 1444*b843c749SSergey Zigachev case DRM_MODE_DPMS_STANDBY: 1445*b843c749SSergey Zigachev case DRM_MODE_DPMS_SUSPEND: 1446*b843c749SSergey Zigachev case DRM_MODE_DPMS_OFF: 1447*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE); 1448*b843c749SSergey Zigachev break; 1449*b843c749SSergey Zigachev } 1450*b843c749SSergey Zigachev break; 1451*b843c749SSergey Zigachev default: 1452*b843c749SSergey Zigachev return; 1453*b843c749SSergey Zigachev } 1454*b843c749SSergey Zigachev } 1455*b843c749SSergey Zigachev 1456*b843c749SSergey Zigachev union crtc_source_param { 1457*b843c749SSergey Zigachev SELECT_CRTC_SOURCE_PS_ALLOCATION v1; 1458*b843c749SSergey Zigachev SELECT_CRTC_SOURCE_PARAMETERS_V2 v2; 1459*b843c749SSergey Zigachev SELECT_CRTC_SOURCE_PARAMETERS_V3 v3; 1460*b843c749SSergey Zigachev }; 1461*b843c749SSergey Zigachev 1462*b843c749SSergey Zigachev void 1463*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder) 1464*b843c749SSergey Zigachev { 1465*b843c749SSergey Zigachev struct drm_device *dev = encoder->dev; 1466*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 1467*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1468*b843c749SSergey Zigachev struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc); 1469*b843c749SSergey Zigachev union crtc_source_param args; 1470*b843c749SSergey Zigachev int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source); 1471*b843c749SSergey Zigachev uint8_t frev, crev; 1472*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig *dig; 1473*b843c749SSergey Zigachev 1474*b843c749SSergey Zigachev memset(&args, 0, sizeof(args)); 1475*b843c749SSergey Zigachev 1476*b843c749SSergey Zigachev if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1477*b843c749SSergey Zigachev return; 1478*b843c749SSergey Zigachev 1479*b843c749SSergey Zigachev switch (frev) { 1480*b843c749SSergey Zigachev case 1: 1481*b843c749SSergey Zigachev switch (crev) { 1482*b843c749SSergey Zigachev case 1: 1483*b843c749SSergey Zigachev default: 1484*b843c749SSergey Zigachev args.v1.ucCRTC = amdgpu_crtc->crtc_id; 1485*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 1486*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_TMDS1: 1487*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1: 1488*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX; 1489*b843c749SSergey Zigachev break; 1490*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_LVDS: 1491*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_LVTM1: 1492*b843c749SSergey Zigachev if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) 1493*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX; 1494*b843c749SSergey Zigachev else 1495*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX; 1496*b843c749SSergey Zigachev break; 1497*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_DVO1: 1498*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_DDI: 1499*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1500*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX; 1501*b843c749SSergey Zigachev break; 1502*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_DAC1: 1503*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1504*b843c749SSergey Zigachev if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1505*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1506*b843c749SSergey Zigachev else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1507*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1508*b843c749SSergey Zigachev else 1509*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX; 1510*b843c749SSergey Zigachev break; 1511*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_DAC2: 1512*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1513*b843c749SSergey Zigachev if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1514*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX; 1515*b843c749SSergey Zigachev else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1516*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_CV_INDEX; 1517*b843c749SSergey Zigachev else 1518*b843c749SSergey Zigachev args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX; 1519*b843c749SSergey Zigachev break; 1520*b843c749SSergey Zigachev } 1521*b843c749SSergey Zigachev break; 1522*b843c749SSergey Zigachev case 2: 1523*b843c749SSergey Zigachev args.v2.ucCRTC = amdgpu_crtc->crtc_id; 1524*b843c749SSergey Zigachev if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1525*b843c749SSergey Zigachev struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1526*b843c749SSergey Zigachev 1527*b843c749SSergey Zigachev if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1528*b843c749SSergey Zigachev args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1529*b843c749SSergey Zigachev else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1530*b843c749SSergey Zigachev args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1531*b843c749SSergey Zigachev else 1532*b843c749SSergey Zigachev args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1533*b843c749SSergey Zigachev } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1534*b843c749SSergey Zigachev args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1535*b843c749SSergey Zigachev } else { 1536*b843c749SSergey Zigachev args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1537*b843c749SSergey Zigachev } 1538*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 1539*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1540*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1541*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1542*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1543*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1544*b843c749SSergey Zigachev dig = amdgpu_encoder->enc_priv; 1545*b843c749SSergey Zigachev switch (dig->dig_encoder) { 1546*b843c749SSergey Zigachev case 0: 1547*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1548*b843c749SSergey Zigachev break; 1549*b843c749SSergey Zigachev case 1: 1550*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1551*b843c749SSergey Zigachev break; 1552*b843c749SSergey Zigachev case 2: 1553*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1554*b843c749SSergey Zigachev break; 1555*b843c749SSergey Zigachev case 3: 1556*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1557*b843c749SSergey Zigachev break; 1558*b843c749SSergey Zigachev case 4: 1559*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1560*b843c749SSergey Zigachev break; 1561*b843c749SSergey Zigachev case 5: 1562*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1563*b843c749SSergey Zigachev break; 1564*b843c749SSergey Zigachev case 6: 1565*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1566*b843c749SSergey Zigachev break; 1567*b843c749SSergey Zigachev } 1568*b843c749SSergey Zigachev break; 1569*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1570*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1571*b843c749SSergey Zigachev break; 1572*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1573*b843c749SSergey Zigachev if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1574*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1575*b843c749SSergey Zigachev else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1576*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1577*b843c749SSergey Zigachev else 1578*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1579*b843c749SSergey Zigachev break; 1580*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1581*b843c749SSergey Zigachev if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1582*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1583*b843c749SSergey Zigachev else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1584*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1585*b843c749SSergey Zigachev else 1586*b843c749SSergey Zigachev args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1587*b843c749SSergey Zigachev break; 1588*b843c749SSergey Zigachev } 1589*b843c749SSergey Zigachev break; 1590*b843c749SSergey Zigachev case 3: 1591*b843c749SSergey Zigachev args.v3.ucCRTC = amdgpu_crtc->crtc_id; 1592*b843c749SSergey Zigachev if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) { 1593*b843c749SSergey Zigachev struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder); 1594*b843c749SSergey Zigachev 1595*b843c749SSergey Zigachev if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS) 1596*b843c749SSergey Zigachev args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1597*b843c749SSergey Zigachev else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA) 1598*b843c749SSergey Zigachev args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT; 1599*b843c749SSergey Zigachev else 1600*b843c749SSergey Zigachev args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1601*b843c749SSergey Zigachev } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) { 1602*b843c749SSergey Zigachev args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS; 1603*b843c749SSergey Zigachev } else { 1604*b843c749SSergey Zigachev args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder); 1605*b843c749SSergey Zigachev } 1606*b843c749SSergey Zigachev args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder); 1607*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 1608*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1609*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1610*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1611*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1612*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA: 1613*b843c749SSergey Zigachev dig = amdgpu_encoder->enc_priv; 1614*b843c749SSergey Zigachev switch (dig->dig_encoder) { 1615*b843c749SSergey Zigachev case 0: 1616*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID; 1617*b843c749SSergey Zigachev break; 1618*b843c749SSergey Zigachev case 1: 1619*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID; 1620*b843c749SSergey Zigachev break; 1621*b843c749SSergey Zigachev case 2: 1622*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID; 1623*b843c749SSergey Zigachev break; 1624*b843c749SSergey Zigachev case 3: 1625*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID; 1626*b843c749SSergey Zigachev break; 1627*b843c749SSergey Zigachev case 4: 1628*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID; 1629*b843c749SSergey Zigachev break; 1630*b843c749SSergey Zigachev case 5: 1631*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID; 1632*b843c749SSergey Zigachev break; 1633*b843c749SSergey Zigachev case 6: 1634*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID; 1635*b843c749SSergey Zigachev break; 1636*b843c749SSergey Zigachev } 1637*b843c749SSergey Zigachev break; 1638*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1: 1639*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID; 1640*b843c749SSergey Zigachev break; 1641*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1: 1642*b843c749SSergey Zigachev if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1643*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1644*b843c749SSergey Zigachev else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1645*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1646*b843c749SSergey Zigachev else 1647*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID; 1648*b843c749SSergey Zigachev break; 1649*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2: 1650*b843c749SSergey Zigachev if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT)) 1651*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1652*b843c749SSergey Zigachev else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT)) 1653*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID; 1654*b843c749SSergey Zigachev else 1655*b843c749SSergey Zigachev args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID; 1656*b843c749SSergey Zigachev break; 1657*b843c749SSergey Zigachev } 1658*b843c749SSergey Zigachev break; 1659*b843c749SSergey Zigachev } 1660*b843c749SSergey Zigachev break; 1661*b843c749SSergey Zigachev default: 1662*b843c749SSergey Zigachev DRM_ERROR("Unknown table version: %d, %d\n", frev, crev); 1663*b843c749SSergey Zigachev return; 1664*b843c749SSergey Zigachev } 1665*b843c749SSergey Zigachev 1666*b843c749SSergey Zigachev amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1667*b843c749SSergey Zigachev } 1668*b843c749SSergey Zigachev 1669*b843c749SSergey Zigachev /* This only needs to be called once at startup */ 1670*b843c749SSergey Zigachev void 1671*b843c749SSergey Zigachev amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev) 1672*b843c749SSergey Zigachev { 1673*b843c749SSergey Zigachev struct drm_device *dev = adev->ddev; 1674*b843c749SSergey Zigachev struct drm_encoder *encoder; 1675*b843c749SSergey Zigachev 1676*b843c749SSergey Zigachev list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 1677*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1678*b843c749SSergey Zigachev struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1679*b843c749SSergey Zigachev 1680*b843c749SSergey Zigachev switch (amdgpu_encoder->encoder_id) { 1681*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY: 1682*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1: 1683*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2: 1684*b843c749SSergey Zigachev case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3: 1685*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT, 1686*b843c749SSergey Zigachev 0, 0); 1687*b843c749SSergey Zigachev break; 1688*b843c749SSergey Zigachev } 1689*b843c749SSergey Zigachev 1690*b843c749SSergey Zigachev if (ext_encoder) 1691*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1692*b843c749SSergey Zigachev EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT); 1693*b843c749SSergey Zigachev } 1694*b843c749SSergey Zigachev } 1695*b843c749SSergey Zigachev 1696*b843c749SSergey Zigachev static bool 1697*b843c749SSergey Zigachev amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder, 1698*b843c749SSergey Zigachev struct drm_connector *connector) 1699*b843c749SSergey Zigachev { 1700*b843c749SSergey Zigachev struct drm_device *dev = encoder->dev; 1701*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 1702*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1703*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1704*b843c749SSergey Zigachev 1705*b843c749SSergey Zigachev if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT | 1706*b843c749SSergey Zigachev ATOM_DEVICE_CV_SUPPORT | 1707*b843c749SSergey Zigachev ATOM_DEVICE_CRT_SUPPORT)) { 1708*b843c749SSergey Zigachev DAC_LOAD_DETECTION_PS_ALLOCATION args; 1709*b843c749SSergey Zigachev int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection); 1710*b843c749SSergey Zigachev uint8_t frev, crev; 1711*b843c749SSergey Zigachev 1712*b843c749SSergey Zigachev memset(&args, 0, sizeof(args)); 1713*b843c749SSergey Zigachev 1714*b843c749SSergey Zigachev if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev)) 1715*b843c749SSergey Zigachev return false; 1716*b843c749SSergey Zigachev 1717*b843c749SSergey Zigachev args.sDacload.ucMisc = 0; 1718*b843c749SSergey Zigachev 1719*b843c749SSergey Zigachev if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) || 1720*b843c749SSergey Zigachev (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1)) 1721*b843c749SSergey Zigachev args.sDacload.ucDacType = ATOM_DAC_A; 1722*b843c749SSergey Zigachev else 1723*b843c749SSergey Zigachev args.sDacload.ucDacType = ATOM_DAC_B; 1724*b843c749SSergey Zigachev 1725*b843c749SSergey Zigachev if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) 1726*b843c749SSergey Zigachev args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT); 1727*b843c749SSergey Zigachev else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) 1728*b843c749SSergey Zigachev args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT); 1729*b843c749SSergey Zigachev else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1730*b843c749SSergey Zigachev args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT); 1731*b843c749SSergey Zigachev if (crev >= 3) 1732*b843c749SSergey Zigachev args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1733*b843c749SSergey Zigachev } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1734*b843c749SSergey Zigachev args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT); 1735*b843c749SSergey Zigachev if (crev >= 3) 1736*b843c749SSergey Zigachev args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb; 1737*b843c749SSergey Zigachev } 1738*b843c749SSergey Zigachev 1739*b843c749SSergey Zigachev amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args); 1740*b843c749SSergey Zigachev 1741*b843c749SSergey Zigachev return true; 1742*b843c749SSergey Zigachev } else 1743*b843c749SSergey Zigachev return false; 1744*b843c749SSergey Zigachev } 1745*b843c749SSergey Zigachev 1746*b843c749SSergey Zigachev enum drm_connector_status 1747*b843c749SSergey Zigachev amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder, 1748*b843c749SSergey Zigachev struct drm_connector *connector) 1749*b843c749SSergey Zigachev { 1750*b843c749SSergey Zigachev struct drm_device *dev = encoder->dev; 1751*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 1752*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1753*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1754*b843c749SSergey Zigachev uint32_t bios_0_scratch; 1755*b843c749SSergey Zigachev 1756*b843c749SSergey Zigachev if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) { 1757*b843c749SSergey Zigachev DRM_DEBUG_KMS("detect returned false \n"); 1758*b843c749SSergey Zigachev return connector_status_unknown; 1759*b843c749SSergey Zigachev } 1760*b843c749SSergey Zigachev 1761*b843c749SSergey Zigachev bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1762*b843c749SSergey Zigachev 1763*b843c749SSergey Zigachev DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1764*b843c749SSergey Zigachev if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1765*b843c749SSergey Zigachev if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1766*b843c749SSergey Zigachev return connector_status_connected; 1767*b843c749SSergey Zigachev } 1768*b843c749SSergey Zigachev if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1769*b843c749SSergey Zigachev if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1770*b843c749SSergey Zigachev return connector_status_connected; 1771*b843c749SSergey Zigachev } 1772*b843c749SSergey Zigachev if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1773*b843c749SSergey Zigachev if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1774*b843c749SSergey Zigachev return connector_status_connected; 1775*b843c749SSergey Zigachev } 1776*b843c749SSergey Zigachev if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1777*b843c749SSergey Zigachev if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1778*b843c749SSergey Zigachev return connector_status_connected; /* CTV */ 1779*b843c749SSergey Zigachev else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1780*b843c749SSergey Zigachev return connector_status_connected; /* STV */ 1781*b843c749SSergey Zigachev } 1782*b843c749SSergey Zigachev return connector_status_disconnected; 1783*b843c749SSergey Zigachev } 1784*b843c749SSergey Zigachev 1785*b843c749SSergey Zigachev enum drm_connector_status 1786*b843c749SSergey Zigachev amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder, 1787*b843c749SSergey Zigachev struct drm_connector *connector) 1788*b843c749SSergey Zigachev { 1789*b843c749SSergey Zigachev struct drm_device *dev = encoder->dev; 1790*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 1791*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1792*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector); 1793*b843c749SSergey Zigachev struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1794*b843c749SSergey Zigachev u32 bios_0_scratch; 1795*b843c749SSergey Zigachev 1796*b843c749SSergey Zigachev if (!ext_encoder) 1797*b843c749SSergey Zigachev return connector_status_unknown; 1798*b843c749SSergey Zigachev 1799*b843c749SSergey Zigachev if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0) 1800*b843c749SSergey Zigachev return connector_status_unknown; 1801*b843c749SSergey Zigachev 1802*b843c749SSergey Zigachev /* load detect on the dp bridge */ 1803*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1804*b843c749SSergey Zigachev EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION); 1805*b843c749SSergey Zigachev 1806*b843c749SSergey Zigachev bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1807*b843c749SSergey Zigachev 1808*b843c749SSergey Zigachev DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices); 1809*b843c749SSergey Zigachev if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) { 1810*b843c749SSergey Zigachev if (bios_0_scratch & ATOM_S0_CRT1_MASK) 1811*b843c749SSergey Zigachev return connector_status_connected; 1812*b843c749SSergey Zigachev } 1813*b843c749SSergey Zigachev if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) { 1814*b843c749SSergey Zigachev if (bios_0_scratch & ATOM_S0_CRT2_MASK) 1815*b843c749SSergey Zigachev return connector_status_connected; 1816*b843c749SSergey Zigachev } 1817*b843c749SSergey Zigachev if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) { 1818*b843c749SSergey Zigachev if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A)) 1819*b843c749SSergey Zigachev return connector_status_connected; 1820*b843c749SSergey Zigachev } 1821*b843c749SSergey Zigachev if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) { 1822*b843c749SSergey Zigachev if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A)) 1823*b843c749SSergey Zigachev return connector_status_connected; /* CTV */ 1824*b843c749SSergey Zigachev else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A)) 1825*b843c749SSergey Zigachev return connector_status_connected; /* STV */ 1826*b843c749SSergey Zigachev } 1827*b843c749SSergey Zigachev return connector_status_disconnected; 1828*b843c749SSergey Zigachev } 1829*b843c749SSergey Zigachev 1830*b843c749SSergey Zigachev void 1831*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder) 1832*b843c749SSergey Zigachev { 1833*b843c749SSergey Zigachev struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder); 1834*b843c749SSergey Zigachev 1835*b843c749SSergey Zigachev if (ext_encoder) 1836*b843c749SSergey Zigachev /* ddc_setup on the dp bridge */ 1837*b843c749SSergey Zigachev amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, 1838*b843c749SSergey Zigachev EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP); 1839*b843c749SSergey Zigachev 1840*b843c749SSergey Zigachev } 1841*b843c749SSergey Zigachev 1842*b843c749SSergey Zigachev void 1843*b843c749SSergey Zigachev amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector, 1844*b843c749SSergey Zigachev struct drm_encoder *encoder, 1845*b843c749SSergey Zigachev bool connected) 1846*b843c749SSergey Zigachev { 1847*b843c749SSergey Zigachev struct drm_device *dev = connector->dev; 1848*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 1849*b843c749SSergey Zigachev struct amdgpu_connector *amdgpu_connector = 1850*b843c749SSergey Zigachev to_amdgpu_connector(connector); 1851*b843c749SSergey Zigachev struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder); 1852*b843c749SSergey Zigachev uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch; 1853*b843c749SSergey Zigachev 1854*b843c749SSergey Zigachev bios_0_scratch = RREG32(mmBIOS_SCRATCH_0); 1855*b843c749SSergey Zigachev bios_3_scratch = RREG32(mmBIOS_SCRATCH_3); 1856*b843c749SSergey Zigachev bios_6_scratch = RREG32(mmBIOS_SCRATCH_6); 1857*b843c749SSergey Zigachev 1858*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) && 1859*b843c749SSergey Zigachev (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) { 1860*b843c749SSergey Zigachev if (connected) { 1861*b843c749SSergey Zigachev DRM_DEBUG_KMS("LCD1 connected\n"); 1862*b843c749SSergey Zigachev bios_0_scratch |= ATOM_S0_LCD1; 1863*b843c749SSergey Zigachev bios_3_scratch |= ATOM_S3_LCD1_ACTIVE; 1864*b843c749SSergey Zigachev bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1; 1865*b843c749SSergey Zigachev } else { 1866*b843c749SSergey Zigachev DRM_DEBUG_KMS("LCD1 disconnected\n"); 1867*b843c749SSergey Zigachev bios_0_scratch &= ~ATOM_S0_LCD1; 1868*b843c749SSergey Zigachev bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE; 1869*b843c749SSergey Zigachev bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1; 1870*b843c749SSergey Zigachev } 1871*b843c749SSergey Zigachev } 1872*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) && 1873*b843c749SSergey Zigachev (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) { 1874*b843c749SSergey Zigachev if (connected) { 1875*b843c749SSergey Zigachev DRM_DEBUG_KMS("CRT1 connected\n"); 1876*b843c749SSergey Zigachev bios_0_scratch |= ATOM_S0_CRT1_COLOR; 1877*b843c749SSergey Zigachev bios_3_scratch |= ATOM_S3_CRT1_ACTIVE; 1878*b843c749SSergey Zigachev bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1; 1879*b843c749SSergey Zigachev } else { 1880*b843c749SSergey Zigachev DRM_DEBUG_KMS("CRT1 disconnected\n"); 1881*b843c749SSergey Zigachev bios_0_scratch &= ~ATOM_S0_CRT1_MASK; 1882*b843c749SSergey Zigachev bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE; 1883*b843c749SSergey Zigachev bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1; 1884*b843c749SSergey Zigachev } 1885*b843c749SSergey Zigachev } 1886*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) && 1887*b843c749SSergey Zigachev (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) { 1888*b843c749SSergey Zigachev if (connected) { 1889*b843c749SSergey Zigachev DRM_DEBUG_KMS("CRT2 connected\n"); 1890*b843c749SSergey Zigachev bios_0_scratch |= ATOM_S0_CRT2_COLOR; 1891*b843c749SSergey Zigachev bios_3_scratch |= ATOM_S3_CRT2_ACTIVE; 1892*b843c749SSergey Zigachev bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2; 1893*b843c749SSergey Zigachev } else { 1894*b843c749SSergey Zigachev DRM_DEBUG_KMS("CRT2 disconnected\n"); 1895*b843c749SSergey Zigachev bios_0_scratch &= ~ATOM_S0_CRT2_MASK; 1896*b843c749SSergey Zigachev bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE; 1897*b843c749SSergey Zigachev bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2; 1898*b843c749SSergey Zigachev } 1899*b843c749SSergey Zigachev } 1900*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) && 1901*b843c749SSergey Zigachev (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) { 1902*b843c749SSergey Zigachev if (connected) { 1903*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP1 connected\n"); 1904*b843c749SSergey Zigachev bios_0_scratch |= ATOM_S0_DFP1; 1905*b843c749SSergey Zigachev bios_3_scratch |= ATOM_S3_DFP1_ACTIVE; 1906*b843c749SSergey Zigachev bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1; 1907*b843c749SSergey Zigachev } else { 1908*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP1 disconnected\n"); 1909*b843c749SSergey Zigachev bios_0_scratch &= ~ATOM_S0_DFP1; 1910*b843c749SSergey Zigachev bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE; 1911*b843c749SSergey Zigachev bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1; 1912*b843c749SSergey Zigachev } 1913*b843c749SSergey Zigachev } 1914*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) && 1915*b843c749SSergey Zigachev (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) { 1916*b843c749SSergey Zigachev if (connected) { 1917*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP2 connected\n"); 1918*b843c749SSergey Zigachev bios_0_scratch |= ATOM_S0_DFP2; 1919*b843c749SSergey Zigachev bios_3_scratch |= ATOM_S3_DFP2_ACTIVE; 1920*b843c749SSergey Zigachev bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2; 1921*b843c749SSergey Zigachev } else { 1922*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP2 disconnected\n"); 1923*b843c749SSergey Zigachev bios_0_scratch &= ~ATOM_S0_DFP2; 1924*b843c749SSergey Zigachev bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE; 1925*b843c749SSergey Zigachev bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2; 1926*b843c749SSergey Zigachev } 1927*b843c749SSergey Zigachev } 1928*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) && 1929*b843c749SSergey Zigachev (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) { 1930*b843c749SSergey Zigachev if (connected) { 1931*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP3 connected\n"); 1932*b843c749SSergey Zigachev bios_0_scratch |= ATOM_S0_DFP3; 1933*b843c749SSergey Zigachev bios_3_scratch |= ATOM_S3_DFP3_ACTIVE; 1934*b843c749SSergey Zigachev bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3; 1935*b843c749SSergey Zigachev } else { 1936*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP3 disconnected\n"); 1937*b843c749SSergey Zigachev bios_0_scratch &= ~ATOM_S0_DFP3; 1938*b843c749SSergey Zigachev bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE; 1939*b843c749SSergey Zigachev bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3; 1940*b843c749SSergey Zigachev } 1941*b843c749SSergey Zigachev } 1942*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) && 1943*b843c749SSergey Zigachev (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) { 1944*b843c749SSergey Zigachev if (connected) { 1945*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP4 connected\n"); 1946*b843c749SSergey Zigachev bios_0_scratch |= ATOM_S0_DFP4; 1947*b843c749SSergey Zigachev bios_3_scratch |= ATOM_S3_DFP4_ACTIVE; 1948*b843c749SSergey Zigachev bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4; 1949*b843c749SSergey Zigachev } else { 1950*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP4 disconnected\n"); 1951*b843c749SSergey Zigachev bios_0_scratch &= ~ATOM_S0_DFP4; 1952*b843c749SSergey Zigachev bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE; 1953*b843c749SSergey Zigachev bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4; 1954*b843c749SSergey Zigachev } 1955*b843c749SSergey Zigachev } 1956*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) && 1957*b843c749SSergey Zigachev (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) { 1958*b843c749SSergey Zigachev if (connected) { 1959*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP5 connected\n"); 1960*b843c749SSergey Zigachev bios_0_scratch |= ATOM_S0_DFP5; 1961*b843c749SSergey Zigachev bios_3_scratch |= ATOM_S3_DFP5_ACTIVE; 1962*b843c749SSergey Zigachev bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5; 1963*b843c749SSergey Zigachev } else { 1964*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP5 disconnected\n"); 1965*b843c749SSergey Zigachev bios_0_scratch &= ~ATOM_S0_DFP5; 1966*b843c749SSergey Zigachev bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE; 1967*b843c749SSergey Zigachev bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5; 1968*b843c749SSergey Zigachev } 1969*b843c749SSergey Zigachev } 1970*b843c749SSergey Zigachev if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) && 1971*b843c749SSergey Zigachev (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) { 1972*b843c749SSergey Zigachev if (connected) { 1973*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP6 connected\n"); 1974*b843c749SSergey Zigachev bios_0_scratch |= ATOM_S0_DFP6; 1975*b843c749SSergey Zigachev bios_3_scratch |= ATOM_S3_DFP6_ACTIVE; 1976*b843c749SSergey Zigachev bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6; 1977*b843c749SSergey Zigachev } else { 1978*b843c749SSergey Zigachev DRM_DEBUG_KMS("DFP6 disconnected\n"); 1979*b843c749SSergey Zigachev bios_0_scratch &= ~ATOM_S0_DFP6; 1980*b843c749SSergey Zigachev bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE; 1981*b843c749SSergey Zigachev bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6; 1982*b843c749SSergey Zigachev } 1983*b843c749SSergey Zigachev } 1984*b843c749SSergey Zigachev 1985*b843c749SSergey Zigachev WREG32(mmBIOS_SCRATCH_0, bios_0_scratch); 1986*b843c749SSergey Zigachev WREG32(mmBIOS_SCRATCH_3, bios_3_scratch); 1987*b843c749SSergey Zigachev WREG32(mmBIOS_SCRATCH_6, bios_6_scratch); 1988*b843c749SSergey Zigachev } 1989*b843c749SSergey Zigachev 1990*b843c749SSergey Zigachev union lvds_info { 1991*b843c749SSergey Zigachev struct _ATOM_LVDS_INFO info; 1992*b843c749SSergey Zigachev struct _ATOM_LVDS_INFO_V12 info_12; 1993*b843c749SSergey Zigachev }; 1994*b843c749SSergey Zigachev 1995*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig * 1996*b843c749SSergey Zigachev amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder) 1997*b843c749SSergey Zigachev { 1998*b843c749SSergey Zigachev struct drm_device *dev = encoder->base.dev; 1999*b843c749SSergey Zigachev struct amdgpu_device *adev = dev->dev_private; 2000*b843c749SSergey Zigachev struct amdgpu_mode_info *mode_info = &adev->mode_info; 2001*b843c749SSergey Zigachev int index = GetIndexIntoMasterTable(DATA, LVDS_Info); 2002*b843c749SSergey Zigachev uint16_t data_offset, misc; 2003*b843c749SSergey Zigachev union lvds_info *lvds_info; 2004*b843c749SSergey Zigachev uint8_t frev, crev; 2005*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig *lvds = NULL; 2006*b843c749SSergey Zigachev int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2007*b843c749SSergey Zigachev 2008*b843c749SSergey Zigachev if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL, 2009*b843c749SSergey Zigachev &frev, &crev, &data_offset)) { 2010*b843c749SSergey Zigachev lvds_info = 2011*b843c749SSergey Zigachev (union lvds_info *)(mode_info->atom_context->bios + data_offset); 2012*b843c749SSergey Zigachev lvds = 2013*b843c749SSergey Zigachev kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2014*b843c749SSergey Zigachev 2015*b843c749SSergey Zigachev if (!lvds) 2016*b843c749SSergey Zigachev return NULL; 2017*b843c749SSergey Zigachev 2018*b843c749SSergey Zigachev lvds->native_mode.clock = 2019*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10; 2020*b843c749SSergey Zigachev lvds->native_mode.hdisplay = 2021*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.sLCDTiming.usHActive); 2022*b843c749SSergey Zigachev lvds->native_mode.vdisplay = 2023*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.sLCDTiming.usVActive); 2024*b843c749SSergey Zigachev lvds->native_mode.htotal = lvds->native_mode.hdisplay + 2025*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time); 2026*b843c749SSergey Zigachev lvds->native_mode.hsync_start = lvds->native_mode.hdisplay + 2027*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset); 2028*b843c749SSergey Zigachev lvds->native_mode.hsync_end = lvds->native_mode.hsync_start + 2029*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth); 2030*b843c749SSergey Zigachev lvds->native_mode.vtotal = lvds->native_mode.vdisplay + 2031*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time); 2032*b843c749SSergey Zigachev lvds->native_mode.vsync_start = lvds->native_mode.vdisplay + 2033*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset); 2034*b843c749SSergey Zigachev lvds->native_mode.vsync_end = lvds->native_mode.vsync_start + 2035*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth); 2036*b843c749SSergey Zigachev lvds->panel_pwr_delay = 2037*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.usOffDelayInMs); 2038*b843c749SSergey Zigachev lvds->lcd_misc = lvds_info->info.ucLVDS_Misc; 2039*b843c749SSergey Zigachev 2040*b843c749SSergey Zigachev misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess); 2041*b843c749SSergey Zigachev if (misc & ATOM_VSYNC_POLARITY) 2042*b843c749SSergey Zigachev lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC; 2043*b843c749SSergey Zigachev if (misc & ATOM_HSYNC_POLARITY) 2044*b843c749SSergey Zigachev lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC; 2045*b843c749SSergey Zigachev if (misc & ATOM_COMPOSITESYNC) 2046*b843c749SSergey Zigachev lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC; 2047*b843c749SSergey Zigachev if (misc & ATOM_INTERLACE) 2048*b843c749SSergey Zigachev lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE; 2049*b843c749SSergey Zigachev if (misc & ATOM_DOUBLE_CLOCK_MODE) 2050*b843c749SSergey Zigachev lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN; 2051*b843c749SSergey Zigachev 2052*b843c749SSergey Zigachev lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize); 2053*b843c749SSergey Zigachev lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize); 2054*b843c749SSergey Zigachev 2055*b843c749SSergey Zigachev /* set crtc values */ 2056*b843c749SSergey Zigachev drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V); 2057*b843c749SSergey Zigachev 2058*b843c749SSergey Zigachev lvds->lcd_ss_id = lvds_info->info.ucSS_Id; 2059*b843c749SSergey Zigachev 2060*b843c749SSergey Zigachev encoder->native_mode = lvds->native_mode; 2061*b843c749SSergey Zigachev 2062*b843c749SSergey Zigachev if (encoder_enum == 2) 2063*b843c749SSergey Zigachev lvds->linkb = true; 2064*b843c749SSergey Zigachev else 2065*b843c749SSergey Zigachev lvds->linkb = false; 2066*b843c749SSergey Zigachev 2067*b843c749SSergey Zigachev /* parse the lcd record table */ 2068*b843c749SSergey Zigachev if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) { 2069*b843c749SSergey Zigachev ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record; 2070*b843c749SSergey Zigachev ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record; 2071*b843c749SSergey Zigachev bool bad_record = false; 2072*b843c749SSergey Zigachev u8 *record; 2073*b843c749SSergey Zigachev 2074*b843c749SSergey Zigachev if ((frev == 1) && (crev < 2)) 2075*b843c749SSergey Zigachev /* absolute */ 2076*b843c749SSergey Zigachev record = (u8 *)(mode_info->atom_context->bios + 2077*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2078*b843c749SSergey Zigachev else 2079*b843c749SSergey Zigachev /* relative */ 2080*b843c749SSergey Zigachev record = (u8 *)(mode_info->atom_context->bios + 2081*b843c749SSergey Zigachev data_offset + 2082*b843c749SSergey Zigachev le16_to_cpu(lvds_info->info.usModePatchTableOffset)); 2083*b843c749SSergey Zigachev while (*record != ATOM_RECORD_END_TYPE) { 2084*b843c749SSergey Zigachev switch (*record) { 2085*b843c749SSergey Zigachev case LCD_MODE_PATCH_RECORD_MODE_TYPE: 2086*b843c749SSergey Zigachev record += sizeof(ATOM_PATCH_RECORD_MODE); 2087*b843c749SSergey Zigachev break; 2088*b843c749SSergey Zigachev case LCD_RTS_RECORD_TYPE: 2089*b843c749SSergey Zigachev record += sizeof(ATOM_LCD_RTS_RECORD); 2090*b843c749SSergey Zigachev break; 2091*b843c749SSergey Zigachev case LCD_CAP_RECORD_TYPE: 2092*b843c749SSergey Zigachev record += sizeof(ATOM_LCD_MODE_CONTROL_CAP); 2093*b843c749SSergey Zigachev break; 2094*b843c749SSergey Zigachev case LCD_FAKE_EDID_PATCH_RECORD_TYPE: 2095*b843c749SSergey Zigachev fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record; 2096*b843c749SSergey Zigachev if (fake_edid_record->ucFakeEDIDLength) { 2097*b843c749SSergey Zigachev struct edid *edid; 2098*b843c749SSergey Zigachev int edid_size = 2099*b843c749SSergey Zigachev max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength); 2100*b843c749SSergey Zigachev edid = kmalloc(edid_size, GFP_KERNEL); 2101*b843c749SSergey Zigachev if (edid) { 2102*b843c749SSergey Zigachev memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0], 2103*b843c749SSergey Zigachev fake_edid_record->ucFakeEDIDLength); 2104*b843c749SSergey Zigachev 2105*b843c749SSergey Zigachev if (drm_edid_is_valid(edid)) { 2106*b843c749SSergey Zigachev adev->mode_info.bios_hardcoded_edid = edid; 2107*b843c749SSergey Zigachev adev->mode_info.bios_hardcoded_edid_size = edid_size; 2108*b843c749SSergey Zigachev } else 2109*b843c749SSergey Zigachev kfree(edid); 2110*b843c749SSergey Zigachev } 2111*b843c749SSergey Zigachev } 2112*b843c749SSergey Zigachev record += fake_edid_record->ucFakeEDIDLength ? 2113*b843c749SSergey Zigachev fake_edid_record->ucFakeEDIDLength + 2 : 2114*b843c749SSergey Zigachev sizeof(ATOM_FAKE_EDID_PATCH_RECORD); 2115*b843c749SSergey Zigachev break; 2116*b843c749SSergey Zigachev case LCD_PANEL_RESOLUTION_RECORD_TYPE: 2117*b843c749SSergey Zigachev panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record; 2118*b843c749SSergey Zigachev lvds->native_mode.width_mm = panel_res_record->usHSize; 2119*b843c749SSergey Zigachev lvds->native_mode.height_mm = panel_res_record->usVSize; 2120*b843c749SSergey Zigachev record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD); 2121*b843c749SSergey Zigachev break; 2122*b843c749SSergey Zigachev default: 2123*b843c749SSergey Zigachev DRM_ERROR("Bad LCD record %d\n", *record); 2124*b843c749SSergey Zigachev bad_record = true; 2125*b843c749SSergey Zigachev break; 2126*b843c749SSergey Zigachev } 2127*b843c749SSergey Zigachev if (bad_record) 2128*b843c749SSergey Zigachev break; 2129*b843c749SSergey Zigachev } 2130*b843c749SSergey Zigachev } 2131*b843c749SSergey Zigachev } 2132*b843c749SSergey Zigachev return lvds; 2133*b843c749SSergey Zigachev } 2134*b843c749SSergey Zigachev 2135*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig * 2136*b843c749SSergey Zigachev amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder) 2137*b843c749SSergey Zigachev { 2138*b843c749SSergey Zigachev int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT; 2139*b843c749SSergey Zigachev struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL); 2140*b843c749SSergey Zigachev 2141*b843c749SSergey Zigachev if (!dig) 2142*b843c749SSergey Zigachev return NULL; 2143*b843c749SSergey Zigachev 2144*b843c749SSergey Zigachev /* coherent mode by default */ 2145*b843c749SSergey Zigachev dig->coherent_mode = true; 2146*b843c749SSergey Zigachev dig->dig_encoder = -1; 2147*b843c749SSergey Zigachev 2148*b843c749SSergey Zigachev if (encoder_enum == 2) 2149*b843c749SSergey Zigachev dig->linkb = true; 2150*b843c749SSergey Zigachev else 2151*b843c749SSergey Zigachev dig->linkb = false; 2152*b843c749SSergey Zigachev 2153*b843c749SSergey Zigachev return dig; 2154*b843c749SSergey Zigachev } 2155*b843c749SSergey Zigachev 2156