1 /*
2 * Copyright 2007-11 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26 #include <drm/drmP.h>
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/amdgpu_drm.h>
29 #include "amdgpu.h"
30 #include "amdgpu_connectors.h"
31 #include "atom.h"
32 #include "atombios_encoders.h"
33 #include "atombios_dp.h"
34 #include <linux/backlight.h>
35 #include "bif/bif_4_1_d.h"
36
37 static u8
amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device * adev)38 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
39 {
40 u8 backlight_level;
41 u32 bios_2_scratch;
42
43 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
44
45 backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
46 ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
47
48 return backlight_level;
49 }
50
51 static void
amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device * adev,u8 backlight_level)52 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
53 u8 backlight_level)
54 {
55 u32 bios_2_scratch;
56
57 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
58
59 bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
60 bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
61 ATOM_S2_CURRENT_BL_LEVEL_MASK);
62
63 WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
64 }
65
66 u8
amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder * amdgpu_encoder)67 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
68 {
69 struct drm_device *dev = amdgpu_encoder->base.dev;
70 struct amdgpu_device *adev = dev->dev_private;
71
72 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
73 return 0;
74
75 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
76 }
77
78 void
amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder * amdgpu_encoder,u8 level)79 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
80 u8 level)
81 {
82 struct drm_encoder *encoder = &amdgpu_encoder->base;
83 struct drm_device *dev = amdgpu_encoder->base.dev;
84 struct amdgpu_device *adev = dev->dev_private;
85 struct amdgpu_encoder_atom_dig *dig;
86
87 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
88 return;
89
90 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
91 amdgpu_encoder->enc_priv) {
92 dig = amdgpu_encoder->enc_priv;
93 dig->backlight_level = level;
94 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
95
96 switch (amdgpu_encoder->encoder_id) {
97 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
98 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
99 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
100 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
101 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
102 if (dig->backlight_level == 0)
103 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
104 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
105 else {
106 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
107 ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
108 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
109 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
110 }
111 break;
112 default:
113 break;
114 }
115 }
116 }
117
118 #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
119
amdgpu_atombios_encoder_backlight_level(struct backlight_device * bd)120 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
121 {
122 u8 level;
123
124 /* Convert brightness to hardware level */
125 if (bd->props.brightness < 0)
126 level = 0;
127 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
128 level = AMDGPU_MAX_BL_LEVEL;
129 else
130 level = bd->props.brightness;
131
132 return level;
133 }
134
amdgpu_atombios_encoder_update_backlight_status(struct backlight_device * bd)135 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
136 {
137 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
138 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
139
140 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
141 amdgpu_atombios_encoder_backlight_level(bd));
142
143 return 0;
144 }
145
146 static int
amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device * bd)147 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
148 {
149 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
150 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
151 struct drm_device *dev = amdgpu_encoder->base.dev;
152 struct amdgpu_device *adev = dev->dev_private;
153
154 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
155 }
156
157 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
158 .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
159 .update_status = amdgpu_atombios_encoder_update_backlight_status,
160 };
161
amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder * amdgpu_encoder,struct drm_connector * drm_connector)162 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
163 struct drm_connector *drm_connector)
164 {
165 struct drm_device *dev = amdgpu_encoder->base.dev;
166 struct amdgpu_device *adev = dev->dev_private;
167 struct backlight_device *bd;
168 struct backlight_properties props;
169 struct amdgpu_backlight_privdata *pdata;
170 struct amdgpu_encoder_atom_dig *dig;
171 u8 backlight_level;
172 char bl_name[16];
173
174 /* Mac laptops with multiple GPUs use the gmux driver for backlight
175 * so don't register a backlight device
176 */
177 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
178 (adev->pdev->device == 0x6741))
179 return;
180
181 if (!amdgpu_encoder->enc_priv)
182 return;
183
184 if (!adev->is_atom_bios)
185 return;
186
187 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
188 return;
189
190 pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
191 if (!pdata) {
192 DRM_ERROR("Memory allocation failed\n");
193 goto error;
194 }
195
196 memset(&props, 0, sizeof(props));
197 props.max_brightness = AMDGPU_MAX_BL_LEVEL;
198 props.type = BACKLIGHT_RAW;
199 snprintf(bl_name, sizeof(bl_name),
200 "amdgpu_bl%d", dev->primary->index);
201 bd = backlight_device_register(bl_name, drm_connector->kdev,
202 pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
203 if (IS_ERR(bd)) {
204 DRM_ERROR("Backlight registration failed\n");
205 goto error;
206 }
207
208 pdata->encoder = amdgpu_encoder;
209
210 backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
211
212 dig = amdgpu_encoder->enc_priv;
213 dig->bl_dev = bd;
214
215 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
216 bd->props.power = FB_BLANK_UNBLANK;
217 backlight_update_status(bd);
218
219 DRM_INFO("amdgpu atom DIG backlight initialized\n");
220
221 return;
222
223 error:
224 kfree(pdata);
225 return;
226 }
227
228 void
amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder * amdgpu_encoder)229 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
230 {
231 struct drm_device *dev = amdgpu_encoder->base.dev;
232 struct amdgpu_device *adev = dev->dev_private;
233 struct backlight_device *bd = NULL;
234 struct amdgpu_encoder_atom_dig *dig;
235
236 if (!amdgpu_encoder->enc_priv)
237 return;
238
239 if (!adev->is_atom_bios)
240 return;
241
242 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
243 return;
244
245 dig = amdgpu_encoder->enc_priv;
246 bd = dig->bl_dev;
247 dig->bl_dev = NULL;
248
249 if (bd) {
250 struct amdgpu_legacy_backlight_privdata *pdata;
251
252 pdata = bl_get_data(bd);
253 backlight_device_unregister(bd);
254 kfree(pdata);
255
256 DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
257 }
258 }
259
260 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
261
amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder * encoder)262 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
263 {
264 }
265
amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder * encoder)266 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
267 {
268 }
269
270 #endif
271
amdgpu_atombios_encoder_is_digital(struct drm_encoder * encoder)272 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
273 {
274 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
275 switch (amdgpu_encoder->encoder_id) {
276 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
277 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
278 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
279 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
280 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
281 return true;
282 default:
283 return false;
284 }
285 }
286
amdgpu_atombios_encoder_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)287 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
288 const struct drm_display_mode *mode,
289 struct drm_display_mode *adjusted_mode)
290 {
291 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
292
293 /* set the active encoder to connector routing */
294 amdgpu_encoder_set_active_device(encoder);
295 drm_mode_set_crtcinfo(adjusted_mode, 0);
296
297 /* hw bug */
298 if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
299 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
300 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
301
302 /* vertical FP must be at least 1 */
303 if (mode->crtc_vsync_start == mode->crtc_vdisplay)
304 adjusted_mode->crtc_vsync_start++;
305
306 /* get the native mode for scaling */
307 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
308 amdgpu_panel_mode_fixup(encoder, adjusted_mode);
309 else if (amdgpu_encoder->rmx_type != RMX_OFF)
310 amdgpu_panel_mode_fixup(encoder, adjusted_mode);
311
312 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
313 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
314 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
315 amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
316 }
317
318 return true;
319 }
320
321 static void
amdgpu_atombios_encoder_setup_dac(struct drm_encoder * encoder,int action)322 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
323 {
324 struct drm_device *dev = encoder->dev;
325 struct amdgpu_device *adev = dev->dev_private;
326 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
327 DAC_ENCODER_CONTROL_PS_ALLOCATION args;
328 int index = 0;
329
330 memset(&args, 0, sizeof(args));
331
332 switch (amdgpu_encoder->encoder_id) {
333 case ENCODER_OBJECT_ID_INTERNAL_DAC1:
334 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
335 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
336 break;
337 case ENCODER_OBJECT_ID_INTERNAL_DAC2:
338 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
339 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
340 break;
341 }
342
343 args.ucAction = action;
344 args.ucDacStandard = ATOM_DAC1_PS2;
345 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
346
347 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
348
349 }
350
amdgpu_atombios_encoder_get_bpc(struct drm_encoder * encoder)351 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
352 {
353 int bpc = 8;
354
355 if (encoder->crtc) {
356 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
357 bpc = amdgpu_crtc->bpc;
358 }
359
360 switch (bpc) {
361 case 0:
362 return PANEL_BPC_UNDEFINE;
363 case 6:
364 return PANEL_6BIT_PER_COLOR;
365 case 8:
366 default:
367 return PANEL_8BIT_PER_COLOR;
368 case 10:
369 return PANEL_10BIT_PER_COLOR;
370 case 12:
371 return PANEL_12BIT_PER_COLOR;
372 case 16:
373 return PANEL_16BIT_PER_COLOR;
374 }
375 }
376
377 union dvo_encoder_control {
378 ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
379 DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
380 DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
381 DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
382 };
383
384 static void
amdgpu_atombios_encoder_setup_dvo(struct drm_encoder * encoder,int action)385 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
386 {
387 struct drm_device *dev = encoder->dev;
388 struct amdgpu_device *adev = dev->dev_private;
389 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
390 union dvo_encoder_control args;
391 int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
392 uint8_t frev, crev;
393
394 memset(&args, 0, sizeof(args));
395
396 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
397 return;
398
399 switch (frev) {
400 case 1:
401 switch (crev) {
402 case 1:
403 /* R4xx, R5xx */
404 args.ext_tmds.sXTmdsEncoder.ucEnable = action;
405
406 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
407 args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
408
409 args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
410 break;
411 case 2:
412 /* RS600/690/740 */
413 args.dvo.sDVOEncoder.ucAction = action;
414 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
415 /* DFP1, CRT1, TV1 depending on the type of port */
416 args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
417
418 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
419 args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
420 break;
421 case 3:
422 /* R6xx */
423 args.dvo_v3.ucAction = action;
424 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
425 args.dvo_v3.ucDVOConfig = 0; /* XXX */
426 break;
427 case 4:
428 /* DCE8 */
429 args.dvo_v4.ucAction = action;
430 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
431 args.dvo_v4.ucDVOConfig = 0; /* XXX */
432 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
433 break;
434 default:
435 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
436 break;
437 }
438 break;
439 default:
440 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
441 break;
442 }
443
444 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
445 }
446
amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder * encoder)447 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
448 {
449 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
450 struct drm_connector *connector;
451 struct amdgpu_connector *amdgpu_connector;
452 struct amdgpu_connector_atom_dig *dig_connector;
453
454 /* dp bridges are always DP */
455 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
456 return ATOM_ENCODER_MODE_DP;
457
458 /* DVO is always DVO */
459 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
460 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
461 return ATOM_ENCODER_MODE_DVO;
462
463 connector = amdgpu_get_connector_for_encoder(encoder);
464 /* if we don't have an active device yet, just use one of
465 * the connectors tied to the encoder.
466 */
467 if (!connector)
468 connector = amdgpu_get_connector_for_encoder_init(encoder);
469 amdgpu_connector = to_amdgpu_connector(connector);
470
471 switch (connector->connector_type) {
472 case DRM_MODE_CONNECTOR_DVII:
473 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
474 if (amdgpu_audio != 0) {
475 if (amdgpu_connector->use_digital &&
476 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
477 return ATOM_ENCODER_MODE_HDMI;
478 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
479 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
480 return ATOM_ENCODER_MODE_HDMI;
481 else if (amdgpu_connector->use_digital)
482 return ATOM_ENCODER_MODE_DVI;
483 else
484 return ATOM_ENCODER_MODE_CRT;
485 } else if (amdgpu_connector->use_digital) {
486 return ATOM_ENCODER_MODE_DVI;
487 } else {
488 return ATOM_ENCODER_MODE_CRT;
489 }
490 break;
491 case DRM_MODE_CONNECTOR_DVID:
492 case DRM_MODE_CONNECTOR_HDMIA:
493 default:
494 if (amdgpu_audio != 0) {
495 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
496 return ATOM_ENCODER_MODE_HDMI;
497 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
498 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
499 return ATOM_ENCODER_MODE_HDMI;
500 else
501 return ATOM_ENCODER_MODE_DVI;
502 } else {
503 return ATOM_ENCODER_MODE_DVI;
504 }
505 break;
506 case DRM_MODE_CONNECTOR_LVDS:
507 return ATOM_ENCODER_MODE_LVDS;
508 break;
509 case DRM_MODE_CONNECTOR_DisplayPort:
510 dig_connector = amdgpu_connector->con_priv;
511 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
512 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
513 return ATOM_ENCODER_MODE_DP;
514 } else if (amdgpu_audio != 0) {
515 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
516 return ATOM_ENCODER_MODE_HDMI;
517 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
518 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
519 return ATOM_ENCODER_MODE_HDMI;
520 else
521 return ATOM_ENCODER_MODE_DVI;
522 } else {
523 return ATOM_ENCODER_MODE_DVI;
524 }
525 break;
526 case DRM_MODE_CONNECTOR_eDP:
527 return ATOM_ENCODER_MODE_DP;
528 case DRM_MODE_CONNECTOR_DVIA:
529 case DRM_MODE_CONNECTOR_VGA:
530 return ATOM_ENCODER_MODE_CRT;
531 break;
532 case DRM_MODE_CONNECTOR_Composite:
533 case DRM_MODE_CONNECTOR_SVIDEO:
534 case DRM_MODE_CONNECTOR_9PinDIN:
535 /* fix me */
536 return ATOM_ENCODER_MODE_TV;
537 /*return ATOM_ENCODER_MODE_CV;*/
538 break;
539 }
540 }
541
542 /*
543 * DIG Encoder/Transmitter Setup
544 *
545 * DCE 6.0
546 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
547 * Supports up to 6 digital outputs
548 * - 6 DIG encoder blocks.
549 * - DIG to PHY mapping is hardcoded
550 * DIG1 drives UNIPHY0 link A, A+B
551 * DIG2 drives UNIPHY0 link B
552 * DIG3 drives UNIPHY1 link A, A+B
553 * DIG4 drives UNIPHY1 link B
554 * DIG5 drives UNIPHY2 link A, A+B
555 * DIG6 drives UNIPHY2 link B
556 *
557 * Routing
558 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
559 * Examples:
560 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
561 * crtc1 -> dig1 -> UNIPHY0 link B -> DP
562 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
563 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
564 */
565
566 union dig_encoder_control {
567 DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
568 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
569 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
570 DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
571 };
572
573 void
amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder * encoder,int action,int panel_mode)574 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
575 int action, int panel_mode)
576 {
577 struct drm_device *dev = encoder->dev;
578 struct amdgpu_device *adev = dev->dev_private;
579 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
580 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
581 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
582 union dig_encoder_control args;
583 int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
584 uint8_t frev, crev;
585 int dp_clock = 0;
586 int dp_lane_count = 0;
587 int hpd_id = AMDGPU_HPD_NONE;
588
589 if (connector) {
590 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
591 struct amdgpu_connector_atom_dig *dig_connector =
592 amdgpu_connector->con_priv;
593
594 dp_clock = dig_connector->dp_clock;
595 dp_lane_count = dig_connector->dp_lane_count;
596 hpd_id = amdgpu_connector->hpd.hpd;
597 }
598
599 /* no dig encoder assigned */
600 if (dig->dig_encoder == -1)
601 return;
602
603 memset(&args, 0, sizeof(args));
604
605 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
606 return;
607
608 switch (frev) {
609 case 1:
610 switch (crev) {
611 case 1:
612 args.v1.ucAction = action;
613 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
614 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
615 args.v3.ucPanelMode = panel_mode;
616 else
617 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
618
619 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
620 args.v1.ucLaneNum = dp_lane_count;
621 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
622 args.v1.ucLaneNum = 8;
623 else
624 args.v1.ucLaneNum = 4;
625
626 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
627 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
628 switch (amdgpu_encoder->encoder_id) {
629 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
630 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
631 break;
632 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
633 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
634 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
635 break;
636 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
637 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
638 break;
639 }
640 if (dig->linkb)
641 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
642 else
643 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
644 break;
645 case 2:
646 case 3:
647 args.v3.ucAction = action;
648 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
649 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
650 args.v3.ucPanelMode = panel_mode;
651 else
652 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
653
654 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
655 args.v3.ucLaneNum = dp_lane_count;
656 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
657 args.v3.ucLaneNum = 8;
658 else
659 args.v3.ucLaneNum = 4;
660
661 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
662 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
663 args.v3.acConfig.ucDigSel = dig->dig_encoder;
664 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
665 break;
666 case 4:
667 args.v4.ucAction = action;
668 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
669 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
670 args.v4.ucPanelMode = panel_mode;
671 else
672 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
673
674 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
675 args.v4.ucLaneNum = dp_lane_count;
676 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
677 args.v4.ucLaneNum = 8;
678 else
679 args.v4.ucLaneNum = 4;
680
681 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
682 if (dp_clock == 540000)
683 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
684 else if (dp_clock == 324000)
685 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
686 else if (dp_clock == 270000)
687 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
688 else
689 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
690 }
691 args.v4.acConfig.ucDigSel = dig->dig_encoder;
692 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
693 if (hpd_id == AMDGPU_HPD_NONE)
694 args.v4.ucHPD_ID = 0;
695 else
696 args.v4.ucHPD_ID = hpd_id + 1;
697 break;
698 default:
699 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
700 break;
701 }
702 break;
703 default:
704 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
705 break;
706 }
707
708 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
709
710 }
711
712 union dig_transmitter_control {
713 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
714 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
715 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
716 DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
717 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
718 };
719
720 void
amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder * encoder,int action,uint8_t lane_num,uint8_t lane_set)721 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
722 uint8_t lane_num, uint8_t lane_set)
723 {
724 struct drm_device *dev = encoder->dev;
725 struct amdgpu_device *adev = dev->dev_private;
726 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
727 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
728 struct drm_connector *connector;
729 union dig_transmitter_control args;
730 int index = 0;
731 uint8_t frev, crev;
732 bool is_dp = false;
733 int pll_id = 0;
734 int dp_clock = 0;
735 int dp_lane_count = 0;
736 int connector_object_id = 0;
737 int igp_lane_info = 0;
738 int dig_encoder = dig->dig_encoder;
739 int hpd_id = AMDGPU_HPD_NONE;
740
741 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
742 connector = amdgpu_get_connector_for_encoder_init(encoder);
743 /* just needed to avoid bailing in the encoder check. the encoder
744 * isn't used for init
745 */
746 dig_encoder = 0;
747 } else
748 connector = amdgpu_get_connector_for_encoder(encoder);
749
750 if (connector) {
751 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
752 struct amdgpu_connector_atom_dig *dig_connector =
753 amdgpu_connector->con_priv;
754
755 hpd_id = amdgpu_connector->hpd.hpd;
756 dp_clock = dig_connector->dp_clock;
757 dp_lane_count = dig_connector->dp_lane_count;
758 connector_object_id =
759 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
760 }
761
762 if (encoder->crtc) {
763 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
764 pll_id = amdgpu_crtc->pll_id;
765 }
766
767 /* no dig encoder assigned */
768 if (dig_encoder == -1)
769 return;
770
771 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
772 is_dp = true;
773
774 memset(&args, 0, sizeof(args));
775
776 switch (amdgpu_encoder->encoder_id) {
777 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
778 index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
779 break;
780 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
781 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
782 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
783 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
784 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
785 break;
786 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
787 index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
788 break;
789 }
790
791 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
792 return;
793
794 switch (frev) {
795 case 1:
796 switch (crev) {
797 case 1:
798 args.v1.ucAction = action;
799 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
800 args.v1.usInitInfo = cpu_to_le16(connector_object_id);
801 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
802 args.v1.asMode.ucLaneSel = lane_num;
803 args.v1.asMode.ucLaneSet = lane_set;
804 } else {
805 if (is_dp)
806 args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
807 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
808 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
809 else
810 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
811 }
812
813 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
814
815 if (dig_encoder)
816 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
817 else
818 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
819
820 if ((adev->flags & AMD_IS_APU) &&
821 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
822 if (is_dp ||
823 !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
824 if (igp_lane_info & 0x1)
825 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
826 else if (igp_lane_info & 0x2)
827 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
828 else if (igp_lane_info & 0x4)
829 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
830 else if (igp_lane_info & 0x8)
831 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
832 } else {
833 if (igp_lane_info & 0x3)
834 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
835 else if (igp_lane_info & 0xc)
836 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
837 }
838 }
839
840 if (dig->linkb)
841 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
842 else
843 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
844
845 if (is_dp)
846 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
847 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
848 if (dig->coherent_mode)
849 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
850 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
851 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
852 }
853 break;
854 case 2:
855 args.v2.ucAction = action;
856 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
857 args.v2.usInitInfo = cpu_to_le16(connector_object_id);
858 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
859 args.v2.asMode.ucLaneSel = lane_num;
860 args.v2.asMode.ucLaneSet = lane_set;
861 } else {
862 if (is_dp)
863 args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
864 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
865 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
866 else
867 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
868 }
869
870 args.v2.acConfig.ucEncoderSel = dig_encoder;
871 if (dig->linkb)
872 args.v2.acConfig.ucLinkSel = 1;
873
874 switch (amdgpu_encoder->encoder_id) {
875 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
876 args.v2.acConfig.ucTransmitterSel = 0;
877 break;
878 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
879 args.v2.acConfig.ucTransmitterSel = 1;
880 break;
881 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
882 args.v2.acConfig.ucTransmitterSel = 2;
883 break;
884 }
885
886 if (is_dp) {
887 args.v2.acConfig.fCoherentMode = 1;
888 args.v2.acConfig.fDPConnector = 1;
889 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
890 if (dig->coherent_mode)
891 args.v2.acConfig.fCoherentMode = 1;
892 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
893 args.v2.acConfig.fDualLinkConnector = 1;
894 }
895 break;
896 case 3:
897 args.v3.ucAction = action;
898 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
899 args.v3.usInitInfo = cpu_to_le16(connector_object_id);
900 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
901 args.v3.asMode.ucLaneSel = lane_num;
902 args.v3.asMode.ucLaneSet = lane_set;
903 } else {
904 if (is_dp)
905 args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
906 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
907 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
908 else
909 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
910 }
911
912 if (is_dp)
913 args.v3.ucLaneNum = dp_lane_count;
914 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
915 args.v3.ucLaneNum = 8;
916 else
917 args.v3.ucLaneNum = 4;
918
919 if (dig->linkb)
920 args.v3.acConfig.ucLinkSel = 1;
921 if (dig_encoder & 1)
922 args.v3.acConfig.ucEncoderSel = 1;
923
924 /* Select the PLL for the PHY
925 * DP PHY should be clocked from external src if there is
926 * one.
927 */
928 /* On DCE4, if there is an external clock, it generates the DP ref clock */
929 if (is_dp && adev->clock.dp_extclk)
930 args.v3.acConfig.ucRefClkSource = 2; /* external src */
931 else
932 args.v3.acConfig.ucRefClkSource = pll_id;
933
934 switch (amdgpu_encoder->encoder_id) {
935 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
936 args.v3.acConfig.ucTransmitterSel = 0;
937 break;
938 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
939 args.v3.acConfig.ucTransmitterSel = 1;
940 break;
941 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
942 args.v3.acConfig.ucTransmitterSel = 2;
943 break;
944 }
945
946 if (is_dp)
947 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
948 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
949 if (dig->coherent_mode)
950 args.v3.acConfig.fCoherentMode = 1;
951 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
952 args.v3.acConfig.fDualLinkConnector = 1;
953 }
954 break;
955 case 4:
956 args.v4.ucAction = action;
957 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
958 args.v4.usInitInfo = cpu_to_le16(connector_object_id);
959 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
960 args.v4.asMode.ucLaneSel = lane_num;
961 args.v4.asMode.ucLaneSet = lane_set;
962 } else {
963 if (is_dp)
964 args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
965 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
966 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
967 else
968 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
969 }
970
971 if (is_dp)
972 args.v4.ucLaneNum = dp_lane_count;
973 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
974 args.v4.ucLaneNum = 8;
975 else
976 args.v4.ucLaneNum = 4;
977
978 if (dig->linkb)
979 args.v4.acConfig.ucLinkSel = 1;
980 if (dig_encoder & 1)
981 args.v4.acConfig.ucEncoderSel = 1;
982
983 /* Select the PLL for the PHY
984 * DP PHY should be clocked from external src if there is
985 * one.
986 */
987 /* On DCE5 DCPLL usually generates the DP ref clock */
988 if (is_dp) {
989 if (adev->clock.dp_extclk)
990 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
991 else
992 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
993 } else
994 args.v4.acConfig.ucRefClkSource = pll_id;
995
996 switch (amdgpu_encoder->encoder_id) {
997 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
998 args.v4.acConfig.ucTransmitterSel = 0;
999 break;
1000 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1001 args.v4.acConfig.ucTransmitterSel = 1;
1002 break;
1003 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1004 args.v4.acConfig.ucTransmitterSel = 2;
1005 break;
1006 }
1007
1008 if (is_dp)
1009 args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1010 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1011 if (dig->coherent_mode)
1012 args.v4.acConfig.fCoherentMode = 1;
1013 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1014 args.v4.acConfig.fDualLinkConnector = 1;
1015 }
1016 break;
1017 case 5:
1018 args.v5.ucAction = action;
1019 if (is_dp)
1020 args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1021 else
1022 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1023
1024 switch (amdgpu_encoder->encoder_id) {
1025 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1026 if (dig->linkb)
1027 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1028 else
1029 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1030 break;
1031 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1032 if (dig->linkb)
1033 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1034 else
1035 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1036 break;
1037 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1038 if (dig->linkb)
1039 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1040 else
1041 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1042 break;
1043 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1044 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1045 break;
1046 }
1047 if (is_dp)
1048 args.v5.ucLaneNum = dp_lane_count;
1049 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1050 args.v5.ucLaneNum = 8;
1051 else
1052 args.v5.ucLaneNum = 4;
1053 args.v5.ucConnObjId = connector_object_id;
1054 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1055
1056 if (is_dp && adev->clock.dp_extclk)
1057 args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1058 else
1059 args.v5.asConfig.ucPhyClkSrcId = pll_id;
1060
1061 if (is_dp)
1062 args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1063 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1064 if (dig->coherent_mode)
1065 args.v5.asConfig.ucCoherentMode = 1;
1066 }
1067 if (hpd_id == AMDGPU_HPD_NONE)
1068 args.v5.asConfig.ucHPDSel = 0;
1069 else
1070 args.v5.asConfig.ucHPDSel = hpd_id + 1;
1071 args.v5.ucDigEncoderSel = 1 << dig_encoder;
1072 args.v5.ucDPLaneSet = lane_set;
1073 break;
1074 default:
1075 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1076 break;
1077 }
1078 break;
1079 default:
1080 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1081 break;
1082 }
1083
1084 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1085 }
1086
1087 bool
amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector * connector,int action)1088 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1089 int action)
1090 {
1091 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1092 struct drm_device *dev = amdgpu_connector->base.dev;
1093 struct amdgpu_device *adev = dev->dev_private;
1094 union dig_transmitter_control args;
1095 int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1096 uint8_t frev, crev;
1097
1098 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1099 goto done;
1100
1101 if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1102 (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1103 goto done;
1104
1105 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1106 goto done;
1107
1108 memset(&args, 0, sizeof(args));
1109
1110 args.v1.ucAction = action;
1111
1112 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1113
1114 /* wait for the panel to power up */
1115 if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1116 int i;
1117
1118 for (i = 0; i < 300; i++) {
1119 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1120 return true;
1121 mdelay(1);
1122 }
1123 return false;
1124 }
1125 done:
1126 return true;
1127 }
1128
1129 union external_encoder_control {
1130 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1131 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1132 };
1133
1134 static void
amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder * encoder,struct drm_encoder * ext_encoder,int action)1135 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1136 struct drm_encoder *ext_encoder,
1137 int action)
1138 {
1139 struct drm_device *dev = encoder->dev;
1140 struct amdgpu_device *adev = dev->dev_private;
1141 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1142 struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1143 union external_encoder_control args;
1144 struct drm_connector *connector;
1145 int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1146 u8 frev, crev;
1147 int dp_clock = 0;
1148 int dp_lane_count = 0;
1149 int connector_object_id = 0;
1150 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1151
1152 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1153 connector = amdgpu_get_connector_for_encoder_init(encoder);
1154 else
1155 connector = amdgpu_get_connector_for_encoder(encoder);
1156
1157 if (connector) {
1158 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1159 struct amdgpu_connector_atom_dig *dig_connector =
1160 amdgpu_connector->con_priv;
1161
1162 dp_clock = dig_connector->dp_clock;
1163 dp_lane_count = dig_connector->dp_lane_count;
1164 connector_object_id =
1165 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1166 }
1167
1168 memset(&args, 0, sizeof(args));
1169
1170 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1171 return;
1172
1173 switch (frev) {
1174 case 1:
1175 /* no params on frev 1 */
1176 break;
1177 case 2:
1178 switch (crev) {
1179 case 1:
1180 case 2:
1181 args.v1.sDigEncoder.ucAction = action;
1182 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1183 args.v1.sDigEncoder.ucEncoderMode =
1184 amdgpu_atombios_encoder_get_encoder_mode(encoder);
1185
1186 if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1187 if (dp_clock == 270000)
1188 args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1189 args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1190 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1191 args.v1.sDigEncoder.ucLaneNum = 8;
1192 else
1193 args.v1.sDigEncoder.ucLaneNum = 4;
1194 break;
1195 case 3:
1196 args.v3.sExtEncoder.ucAction = action;
1197 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1198 args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1199 else
1200 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1201 args.v3.sExtEncoder.ucEncoderMode =
1202 amdgpu_atombios_encoder_get_encoder_mode(encoder);
1203
1204 if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1205 if (dp_clock == 270000)
1206 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1207 else if (dp_clock == 540000)
1208 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1209 args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1210 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1211 args.v3.sExtEncoder.ucLaneNum = 8;
1212 else
1213 args.v3.sExtEncoder.ucLaneNum = 4;
1214 switch (ext_enum) {
1215 case GRAPH_OBJECT_ENUM_ID1:
1216 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1217 break;
1218 case GRAPH_OBJECT_ENUM_ID2:
1219 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1220 break;
1221 case GRAPH_OBJECT_ENUM_ID3:
1222 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1223 break;
1224 }
1225 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1226 break;
1227 default:
1228 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1229 return;
1230 }
1231 break;
1232 default:
1233 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1234 return;
1235 }
1236 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1237 }
1238
1239 static void
amdgpu_atombios_encoder_setup_dig(struct drm_encoder * encoder,int action)1240 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1241 {
1242 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1243 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1244 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1245 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1246 struct amdgpu_connector *amdgpu_connector = NULL;
1247 struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1248
1249 if (connector) {
1250 amdgpu_connector = to_amdgpu_connector(connector);
1251 amdgpu_dig_connector = amdgpu_connector->con_priv;
1252 }
1253
1254 if (action == ATOM_ENABLE) {
1255 if (!connector)
1256 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1257 else
1258 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1259
1260 /* setup and enable the encoder */
1261 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1262 amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1263 ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1264 dig->panel_mode);
1265 if (ext_encoder)
1266 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1267 EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1268 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1269 connector) {
1270 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1271 amdgpu_atombios_encoder_set_edp_panel_power(connector,
1272 ATOM_TRANSMITTER_ACTION_POWER_ON);
1273 amdgpu_dig_connector->edp_on = true;
1274 }
1275 }
1276 /* enable the transmitter */
1277 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1278 ATOM_TRANSMITTER_ACTION_ENABLE,
1279 0, 0);
1280 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1281 connector) {
1282 /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1283 amdgpu_atombios_dp_link_train(encoder, connector);
1284 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1285 }
1286 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1287 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1288 if (ext_encoder)
1289 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1290 } else {
1291 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1292 connector)
1293 amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1294 ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1295 if (ext_encoder)
1296 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1297 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1298 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1299 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1300
1301 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1302 connector)
1303 amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1304 /* disable the transmitter */
1305 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1306 ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1307 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1308 connector) {
1309 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1310 amdgpu_atombios_encoder_set_edp_panel_power(connector,
1311 ATOM_TRANSMITTER_ACTION_POWER_OFF);
1312 amdgpu_dig_connector->edp_on = false;
1313 }
1314 }
1315 }
1316 }
1317
1318 void
amdgpu_atombios_encoder_dpms(struct drm_encoder * encoder,int mode)1319 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1320 {
1321 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1322
1323 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1324 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1325 amdgpu_encoder->active_device);
1326 switch (amdgpu_encoder->encoder_id) {
1327 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1328 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1329 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1330 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1331 switch (mode) {
1332 case DRM_MODE_DPMS_ON:
1333 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1334 break;
1335 case DRM_MODE_DPMS_STANDBY:
1336 case DRM_MODE_DPMS_SUSPEND:
1337 case DRM_MODE_DPMS_OFF:
1338 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1339 break;
1340 }
1341 break;
1342 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1343 switch (mode) {
1344 case DRM_MODE_DPMS_ON:
1345 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1346 break;
1347 case DRM_MODE_DPMS_STANDBY:
1348 case DRM_MODE_DPMS_SUSPEND:
1349 case DRM_MODE_DPMS_OFF:
1350 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1351 break;
1352 }
1353 break;
1354 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1355 switch (mode) {
1356 case DRM_MODE_DPMS_ON:
1357 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1358 break;
1359 case DRM_MODE_DPMS_STANDBY:
1360 case DRM_MODE_DPMS_SUSPEND:
1361 case DRM_MODE_DPMS_OFF:
1362 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1363 break;
1364 }
1365 break;
1366 default:
1367 return;
1368 }
1369 }
1370
1371 union crtc_source_param {
1372 SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1373 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1374 SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1375 };
1376
1377 void
amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder * encoder)1378 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1379 {
1380 struct drm_device *dev = encoder->dev;
1381 struct amdgpu_device *adev = dev->dev_private;
1382 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1383 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1384 union crtc_source_param args;
1385 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1386 uint8_t frev, crev;
1387 struct amdgpu_encoder_atom_dig *dig;
1388
1389 memset(&args, 0, sizeof(args));
1390
1391 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1392 return;
1393
1394 switch (frev) {
1395 case 1:
1396 switch (crev) {
1397 case 1:
1398 default:
1399 args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1400 switch (amdgpu_encoder->encoder_id) {
1401 case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1402 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1403 args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1404 break;
1405 case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1406 case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1407 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1408 args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1409 else
1410 args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1411 break;
1412 case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1413 case ENCODER_OBJECT_ID_INTERNAL_DDI:
1414 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1415 args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1416 break;
1417 case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1418 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1419 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1420 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1421 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1422 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1423 else
1424 args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1425 break;
1426 case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1427 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1428 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1429 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1430 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1431 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1432 else
1433 args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1434 break;
1435 }
1436 break;
1437 case 2:
1438 args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1439 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1440 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1441
1442 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1443 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1444 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1445 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1446 else
1447 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1448 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1449 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1450 } else {
1451 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1452 }
1453 switch (amdgpu_encoder->encoder_id) {
1454 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1455 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1456 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1457 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1458 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1459 dig = amdgpu_encoder->enc_priv;
1460 switch (dig->dig_encoder) {
1461 case 0:
1462 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1463 break;
1464 case 1:
1465 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1466 break;
1467 case 2:
1468 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1469 break;
1470 case 3:
1471 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1472 break;
1473 case 4:
1474 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1475 break;
1476 case 5:
1477 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1478 break;
1479 case 6:
1480 args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1481 break;
1482 }
1483 break;
1484 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1485 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1486 break;
1487 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1488 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1489 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1490 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1491 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1492 else
1493 args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1494 break;
1495 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1496 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1497 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1498 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1499 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1500 else
1501 args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1502 break;
1503 }
1504 break;
1505 case 3:
1506 args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1507 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1508 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1509
1510 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1511 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1512 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1513 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1514 else
1515 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1516 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1517 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1518 } else {
1519 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1520 }
1521 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1522 switch (amdgpu_encoder->encoder_id) {
1523 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1524 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1525 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1526 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1527 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1528 dig = amdgpu_encoder->enc_priv;
1529 switch (dig->dig_encoder) {
1530 case 0:
1531 args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1532 break;
1533 case 1:
1534 args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1535 break;
1536 case 2:
1537 args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1538 break;
1539 case 3:
1540 args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1541 break;
1542 case 4:
1543 args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1544 break;
1545 case 5:
1546 args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1547 break;
1548 case 6:
1549 args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1550 break;
1551 }
1552 break;
1553 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1554 args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1555 break;
1556 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1557 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1558 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1559 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1560 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1561 else
1562 args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1563 break;
1564 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1565 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1566 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1567 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1568 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1569 else
1570 args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1571 break;
1572 }
1573 break;
1574 }
1575 break;
1576 default:
1577 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1578 return;
1579 }
1580
1581 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1582 }
1583
1584 /* This only needs to be called once at startup */
1585 void
amdgpu_atombios_encoder_init_dig(struct amdgpu_device * adev)1586 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1587 {
1588 struct drm_device *dev = adev->ddev;
1589 struct drm_encoder *encoder;
1590
1591 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1592 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1593 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1594
1595 switch (amdgpu_encoder->encoder_id) {
1596 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1597 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1598 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1599 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1600 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1601 0, 0);
1602 break;
1603 }
1604
1605 if (ext_encoder)
1606 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1607 EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1608 }
1609 }
1610
1611 static bool
amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder * encoder,struct drm_connector * connector)1612 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1613 struct drm_connector *connector)
1614 {
1615 struct drm_device *dev = encoder->dev;
1616 struct amdgpu_device *adev = dev->dev_private;
1617 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1618 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1619
1620 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1621 ATOM_DEVICE_CV_SUPPORT |
1622 ATOM_DEVICE_CRT_SUPPORT)) {
1623 DAC_LOAD_DETECTION_PS_ALLOCATION args;
1624 int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1625 uint8_t frev, crev;
1626
1627 memset(&args, 0, sizeof(args));
1628
1629 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1630 return false;
1631
1632 args.sDacload.ucMisc = 0;
1633
1634 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1635 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1636 args.sDacload.ucDacType = ATOM_DAC_A;
1637 else
1638 args.sDacload.ucDacType = ATOM_DAC_B;
1639
1640 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1641 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1642 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1643 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1644 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1645 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1646 if (crev >= 3)
1647 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1648 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1649 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1650 if (crev >= 3)
1651 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1652 }
1653
1654 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1655
1656 return true;
1657 } else
1658 return false;
1659 }
1660
1661 enum drm_connector_status
amdgpu_atombios_encoder_dac_detect(struct drm_encoder * encoder,struct drm_connector * connector)1662 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1663 struct drm_connector *connector)
1664 {
1665 struct drm_device *dev = encoder->dev;
1666 struct amdgpu_device *adev = dev->dev_private;
1667 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1668 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1669 uint32_t bios_0_scratch;
1670
1671 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1672 DRM_DEBUG_KMS("detect returned false \n");
1673 return connector_status_unknown;
1674 }
1675
1676 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1677
1678 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1679 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1680 if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1681 return connector_status_connected;
1682 }
1683 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1684 if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1685 return connector_status_connected;
1686 }
1687 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1688 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1689 return connector_status_connected;
1690 }
1691 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1692 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1693 return connector_status_connected; /* CTV */
1694 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1695 return connector_status_connected; /* STV */
1696 }
1697 return connector_status_disconnected;
1698 }
1699
1700 enum drm_connector_status
amdgpu_atombios_encoder_dig_detect(struct drm_encoder * encoder,struct drm_connector * connector)1701 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1702 struct drm_connector *connector)
1703 {
1704 struct drm_device *dev = encoder->dev;
1705 struct amdgpu_device *adev = dev->dev_private;
1706 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1707 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1708 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1709 u32 bios_0_scratch;
1710
1711 if (!ext_encoder)
1712 return connector_status_unknown;
1713
1714 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1715 return connector_status_unknown;
1716
1717 /* load detect on the dp bridge */
1718 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1719 EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1720
1721 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1722
1723 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1724 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1725 if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1726 return connector_status_connected;
1727 }
1728 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1729 if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1730 return connector_status_connected;
1731 }
1732 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1733 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1734 return connector_status_connected;
1735 }
1736 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1737 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1738 return connector_status_connected; /* CTV */
1739 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1740 return connector_status_connected; /* STV */
1741 }
1742 return connector_status_disconnected;
1743 }
1744
1745 void
amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder * encoder)1746 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1747 {
1748 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1749
1750 if (ext_encoder)
1751 /* ddc_setup on the dp bridge */
1752 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1753 EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1754
1755 }
1756
1757 void
amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector * connector,struct drm_encoder * encoder,bool connected)1758 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1759 struct drm_encoder *encoder,
1760 bool connected)
1761 {
1762 struct drm_device *dev = connector->dev;
1763 struct amdgpu_device *adev = dev->dev_private;
1764 struct amdgpu_connector *amdgpu_connector =
1765 to_amdgpu_connector(connector);
1766 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1767 uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1768
1769 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1770 bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1771 bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1772
1773 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1774 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1775 if (connected) {
1776 DRM_DEBUG_KMS("LCD1 connected\n");
1777 bios_0_scratch |= ATOM_S0_LCD1;
1778 bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1779 bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1780 } else {
1781 DRM_DEBUG_KMS("LCD1 disconnected\n");
1782 bios_0_scratch &= ~ATOM_S0_LCD1;
1783 bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1784 bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1785 }
1786 }
1787 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1788 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1789 if (connected) {
1790 DRM_DEBUG_KMS("CRT1 connected\n");
1791 bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1792 bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1793 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1794 } else {
1795 DRM_DEBUG_KMS("CRT1 disconnected\n");
1796 bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1797 bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1798 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1799 }
1800 }
1801 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1802 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1803 if (connected) {
1804 DRM_DEBUG_KMS("CRT2 connected\n");
1805 bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1806 bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1807 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1808 } else {
1809 DRM_DEBUG_KMS("CRT2 disconnected\n");
1810 bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1811 bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1812 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1813 }
1814 }
1815 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1816 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1817 if (connected) {
1818 DRM_DEBUG_KMS("DFP1 connected\n");
1819 bios_0_scratch |= ATOM_S0_DFP1;
1820 bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1821 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1822 } else {
1823 DRM_DEBUG_KMS("DFP1 disconnected\n");
1824 bios_0_scratch &= ~ATOM_S0_DFP1;
1825 bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1826 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1827 }
1828 }
1829 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1830 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1831 if (connected) {
1832 DRM_DEBUG_KMS("DFP2 connected\n");
1833 bios_0_scratch |= ATOM_S0_DFP2;
1834 bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1835 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1836 } else {
1837 DRM_DEBUG_KMS("DFP2 disconnected\n");
1838 bios_0_scratch &= ~ATOM_S0_DFP2;
1839 bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1840 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1841 }
1842 }
1843 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1844 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1845 if (connected) {
1846 DRM_DEBUG_KMS("DFP3 connected\n");
1847 bios_0_scratch |= ATOM_S0_DFP3;
1848 bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1849 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1850 } else {
1851 DRM_DEBUG_KMS("DFP3 disconnected\n");
1852 bios_0_scratch &= ~ATOM_S0_DFP3;
1853 bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1854 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1855 }
1856 }
1857 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1858 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1859 if (connected) {
1860 DRM_DEBUG_KMS("DFP4 connected\n");
1861 bios_0_scratch |= ATOM_S0_DFP4;
1862 bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1863 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1864 } else {
1865 DRM_DEBUG_KMS("DFP4 disconnected\n");
1866 bios_0_scratch &= ~ATOM_S0_DFP4;
1867 bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1868 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1869 }
1870 }
1871 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1872 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1873 if (connected) {
1874 DRM_DEBUG_KMS("DFP5 connected\n");
1875 bios_0_scratch |= ATOM_S0_DFP5;
1876 bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1877 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1878 } else {
1879 DRM_DEBUG_KMS("DFP5 disconnected\n");
1880 bios_0_scratch &= ~ATOM_S0_DFP5;
1881 bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1882 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1883 }
1884 }
1885 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1886 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1887 if (connected) {
1888 DRM_DEBUG_KMS("DFP6 connected\n");
1889 bios_0_scratch |= ATOM_S0_DFP6;
1890 bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1891 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1892 } else {
1893 DRM_DEBUG_KMS("DFP6 disconnected\n");
1894 bios_0_scratch &= ~ATOM_S0_DFP6;
1895 bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1896 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1897 }
1898 }
1899
1900 WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1901 WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1902 WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1903 }
1904
1905 union lvds_info {
1906 struct _ATOM_LVDS_INFO info;
1907 struct _ATOM_LVDS_INFO_V12 info_12;
1908 };
1909
1910 struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder * encoder)1911 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1912 {
1913 struct drm_device *dev = encoder->base.dev;
1914 struct amdgpu_device *adev = dev->dev_private;
1915 struct amdgpu_mode_info *mode_info = &adev->mode_info;
1916 int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
1917 uint16_t data_offset, misc;
1918 union lvds_info *lvds_info;
1919 uint8_t frev, crev;
1920 struct amdgpu_encoder_atom_dig *lvds = NULL;
1921 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1922
1923 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
1924 &frev, &crev, &data_offset)) {
1925 lvds_info =
1926 (union lvds_info *)(mode_info->atom_context->bios + data_offset);
1927 lvds =
1928 kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
1929
1930 if (!lvds)
1931 return NULL;
1932
1933 lvds->native_mode.clock =
1934 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
1935 lvds->native_mode.hdisplay =
1936 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
1937 lvds->native_mode.vdisplay =
1938 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
1939 lvds->native_mode.htotal = lvds->native_mode.hdisplay +
1940 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
1941 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
1942 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
1943 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
1944 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
1945 lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
1946 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
1947 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
1948 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
1949 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
1950 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
1951 lvds->panel_pwr_delay =
1952 le16_to_cpu(lvds_info->info.usOffDelayInMs);
1953 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
1954
1955 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
1956 if (misc & ATOM_VSYNC_POLARITY)
1957 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
1958 if (misc & ATOM_HSYNC_POLARITY)
1959 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
1960 if (misc & ATOM_COMPOSITESYNC)
1961 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
1962 if (misc & ATOM_INTERLACE)
1963 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
1964 if (misc & ATOM_DOUBLE_CLOCK_MODE)
1965 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
1966
1967 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
1968 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
1969
1970 /* set crtc values */
1971 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
1972
1973 lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
1974
1975 encoder->native_mode = lvds->native_mode;
1976
1977 if (encoder_enum == 2)
1978 lvds->linkb = true;
1979 else
1980 lvds->linkb = false;
1981
1982 /* parse the lcd record table */
1983 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
1984 ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
1985 ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
1986 bool bad_record = false;
1987 u8 *record;
1988
1989 if ((frev == 1) && (crev < 2))
1990 /* absolute */
1991 record = (u8 *)(mode_info->atom_context->bios +
1992 le16_to_cpu(lvds_info->info.usModePatchTableOffset));
1993 else
1994 /* relative */
1995 record = (u8 *)(mode_info->atom_context->bios +
1996 data_offset +
1997 le16_to_cpu(lvds_info->info.usModePatchTableOffset));
1998 while (*record != ATOM_RECORD_END_TYPE) {
1999 switch (*record) {
2000 case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2001 record += sizeof(ATOM_PATCH_RECORD_MODE);
2002 break;
2003 case LCD_RTS_RECORD_TYPE:
2004 record += sizeof(ATOM_LCD_RTS_RECORD);
2005 break;
2006 case LCD_CAP_RECORD_TYPE:
2007 record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2008 break;
2009 case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2010 fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2011 if (fake_edid_record->ucFakeEDIDLength) {
2012 struct edid *edid;
2013 int edid_size =
2014 max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2015 edid = kmalloc(edid_size, GFP_KERNEL);
2016 if (edid) {
2017 memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2018 fake_edid_record->ucFakeEDIDLength);
2019
2020 if (drm_edid_is_valid(edid)) {
2021 adev->mode_info.bios_hardcoded_edid = edid;
2022 adev->mode_info.bios_hardcoded_edid_size = edid_size;
2023 } else
2024 kfree(edid);
2025 }
2026 }
2027 record += fake_edid_record->ucFakeEDIDLength ?
2028 fake_edid_record->ucFakeEDIDLength + 2 :
2029 sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2030 break;
2031 case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2032 panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2033 lvds->native_mode.width_mm = panel_res_record->usHSize;
2034 lvds->native_mode.height_mm = panel_res_record->usVSize;
2035 record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2036 break;
2037 default:
2038 DRM_ERROR("Bad LCD record %d\n", *record);
2039 bad_record = true;
2040 break;
2041 }
2042 if (bad_record)
2043 break;
2044 }
2045 }
2046 }
2047 return lvds;
2048 }
2049
2050 struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder * amdgpu_encoder)2051 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2052 {
2053 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2054 struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2055
2056 if (!dig)
2057 return NULL;
2058
2059 /* coherent mode by default */
2060 dig->coherent_mode = true;
2061 dig->dig_encoder = -1;
2062
2063 if (encoder_enum == 2)
2064 dig->linkb = true;
2065 else
2066 dig->linkb = false;
2067
2068 return dig;
2069 }
2070
2071