• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2007-11 Advanced Micro Devices, Inc.
3  * Copyright 2008 Red Hat Inc.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21  * OTHER DEALINGS IN THE SOFTWARE.
22  *
23  * Authors: Dave Airlie
24  *          Alex Deucher
25  */
26 
27 #include <linux/pci.h>
28 
29 #include <acpi/video.h>
30 
31 #include <drm/drm_crtc_helper.h>
32 #include <drm/amdgpu_drm.h>
33 #include "amdgpu.h"
34 #include "amdgpu_connectors.h"
35 #include "amdgpu_display.h"
36 #include "atom.h"
37 #include "atombios_encoders.h"
38 #include "atombios_dp.h"
39 #include <linux/backlight.h>
40 #include "bif/bif_4_1_d.h"
41 
42 u8
amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device * adev)43 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
44 {
45 	u8 backlight_level;
46 	u32 bios_2_scratch;
47 
48 	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
49 
50 	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
51 			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
52 
53 	return backlight_level;
54 }
55 
56 void
amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device * adev,u8 backlight_level)57 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
58 					    u8 backlight_level)
59 {
60 	u32 bios_2_scratch;
61 
62 	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
63 
64 	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
65 	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
66 			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
67 
68 	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
69 }
70 
71 u8
amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder * amdgpu_encoder)72 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
73 {
74 	struct drm_device *dev = amdgpu_encoder->base.dev;
75 	struct amdgpu_device *adev = drm_to_adev(dev);
76 
77 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
78 		return 0;
79 
80 	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
81 }
82 
83 void
amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder * amdgpu_encoder,u8 level)84 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
85 				     u8 level)
86 {
87 	struct drm_encoder *encoder = &amdgpu_encoder->base;
88 	struct drm_device *dev = amdgpu_encoder->base.dev;
89 	struct amdgpu_device *adev = drm_to_adev(dev);
90 	struct amdgpu_encoder_atom_dig *dig;
91 
92 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
93 		return;
94 
95 	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
96 	    amdgpu_encoder->enc_priv) {
97 		dig = amdgpu_encoder->enc_priv;
98 		dig->backlight_level = level;
99 		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
100 
101 		switch (amdgpu_encoder->encoder_id) {
102 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
103 		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
104 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
105 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
106 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
107 			if (dig->backlight_level == 0)
108 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
109 								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
110 			else {
111 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
112 								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
113 				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
114 								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
115 			}
116 			break;
117 		default:
118 			break;
119 		}
120 	}
121 }
122 
amdgpu_atombios_encoder_backlight_level(struct backlight_device * bd)123 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
124 {
125 	u8 level;
126 
127 	/* Convert brightness to hardware level */
128 	if (bd->props.brightness < 0)
129 		level = 0;
130 	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
131 		level = AMDGPU_MAX_BL_LEVEL;
132 	else
133 		level = bd->props.brightness;
134 
135 	return level;
136 }
137 
amdgpu_atombios_encoder_update_backlight_status(struct backlight_device * bd)138 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
139 {
140 	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
141 	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
142 
143 	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
144 					     amdgpu_atombios_encoder_backlight_level(bd));
145 
146 	return 0;
147 }
148 
149 static int
amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device * bd)150 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
151 {
152 	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
153 	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
154 	struct drm_device *dev = amdgpu_encoder->base.dev;
155 	struct amdgpu_device *adev = drm_to_adev(dev);
156 
157 	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
158 }
159 
160 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
161 	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
162 	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
163 };
164 
amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder * amdgpu_encoder,struct drm_connector * drm_connector)165 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
166 				     struct drm_connector *drm_connector)
167 {
168 	struct drm_device *dev = amdgpu_encoder->base.dev;
169 	struct amdgpu_device *adev = drm_to_adev(dev);
170 	struct backlight_device *bd;
171 	struct backlight_properties props;
172 	struct amdgpu_backlight_privdata *pdata;
173 	struct amdgpu_encoder_atom_dig *dig;
174 	char bl_name[16];
175 
176 	/* Mac laptops with multiple GPUs use the gmux driver for backlight
177 	 * so don't register a backlight device
178 	 */
179 	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
180 	    (adev->pdev->device == 0x6741))
181 		return;
182 
183 	if (!amdgpu_encoder->enc_priv)
184 		return;
185 
186 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
187 		goto register_acpi_backlight;
188 
189 	if (!acpi_video_backlight_use_native()) {
190 		drm_info(dev, "Skipping amdgpu atom DIG backlight registration\n");
191 		goto register_acpi_backlight;
192 	}
193 
194 	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
195 	if (!pdata) {
196 		DRM_ERROR("Memory allocation failed\n");
197 		goto error;
198 	}
199 
200 	memset(&props, 0, sizeof(props));
201 	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
202 	props.type = BACKLIGHT_RAW;
203 	snprintf(bl_name, sizeof(bl_name),
204 		 "amdgpu_bl%d", dev->primary->index);
205 	bd = backlight_device_register(bl_name, drm_connector->kdev,
206 				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
207 	if (IS_ERR(bd)) {
208 		DRM_ERROR("Backlight registration failed\n");
209 		goto error;
210 	}
211 
212 	pdata->encoder = amdgpu_encoder;
213 
214 	dig = amdgpu_encoder->enc_priv;
215 	dig->bl_dev = bd;
216 
217 	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
218 	bd->props.power = FB_BLANK_UNBLANK;
219 	backlight_update_status(bd);
220 
221 	DRM_INFO("amdgpu atom DIG backlight initialized\n");
222 
223 	return;
224 
225 error:
226 	kfree(pdata);
227 	return;
228 
229 register_acpi_backlight:
230 	/* Try registering an ACPI video backlight device instead. */
231 	acpi_video_register_backlight();
232 	return;
233 }
234 
235 void
amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder * amdgpu_encoder)236 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
237 {
238 	struct drm_device *dev = amdgpu_encoder->base.dev;
239 	struct amdgpu_device *adev = drm_to_adev(dev);
240 	struct backlight_device *bd = NULL;
241 	struct amdgpu_encoder_atom_dig *dig;
242 
243 	if (!amdgpu_encoder->enc_priv)
244 		return;
245 
246 	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
247 		return;
248 
249 	dig = amdgpu_encoder->enc_priv;
250 	bd = dig->bl_dev;
251 	dig->bl_dev = NULL;
252 
253 	if (bd) {
254 		struct amdgpu_legacy_backlight_privdata *pdata;
255 
256 		pdata = bl_get_data(bd);
257 		backlight_device_unregister(bd);
258 		kfree(pdata);
259 
260 		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
261 	}
262 }
263 
amdgpu_atombios_encoder_is_digital(struct drm_encoder * encoder)264 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
265 {
266 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
267 	switch (amdgpu_encoder->encoder_id) {
268 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
269 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
270 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
271 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
272 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
273 		return true;
274 	default:
275 		return false;
276 	}
277 }
278 
amdgpu_atombios_encoder_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)279 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
280 				 const struct drm_display_mode *mode,
281 				 struct drm_display_mode *adjusted_mode)
282 {
283 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
284 
285 	/* set the active encoder to connector routing */
286 	amdgpu_encoder_set_active_device(encoder);
287 	drm_mode_set_crtcinfo(adjusted_mode, 0);
288 
289 	/* hw bug */
290 	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
291 	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
292 		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
293 
294 	/* vertical FP must be at least 1 */
295 	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
296 		adjusted_mode->crtc_vsync_start++;
297 
298 	/* get the native mode for scaling */
299 	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
300 		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
301 	else if (amdgpu_encoder->rmx_type != RMX_OFF)
302 		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
303 
304 	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
305 	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
306 		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
307 		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
308 	}
309 
310 	return true;
311 }
312 
313 static void
amdgpu_atombios_encoder_setup_dac(struct drm_encoder * encoder,int action)314 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
315 {
316 	struct drm_device *dev = encoder->dev;
317 	struct amdgpu_device *adev = drm_to_adev(dev);
318 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
319 	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
320 	int index = 0;
321 
322 	memset(&args, 0, sizeof(args));
323 
324 	switch (amdgpu_encoder->encoder_id) {
325 	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
326 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
327 		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
328 		break;
329 	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
330 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
331 		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
332 		break;
333 	}
334 
335 	args.ucAction = action;
336 	args.ucDacStandard = ATOM_DAC1_PS2;
337 	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
338 
339 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
340 
341 }
342 
amdgpu_atombios_encoder_get_bpc(struct drm_encoder * encoder)343 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
344 {
345 	int bpc = 8;
346 
347 	if (encoder->crtc) {
348 		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
349 		bpc = amdgpu_crtc->bpc;
350 	}
351 
352 	switch (bpc) {
353 	case 0:
354 		return PANEL_BPC_UNDEFINE;
355 	case 6:
356 		return PANEL_6BIT_PER_COLOR;
357 	case 8:
358 	default:
359 		return PANEL_8BIT_PER_COLOR;
360 	case 10:
361 		return PANEL_10BIT_PER_COLOR;
362 	case 12:
363 		return PANEL_12BIT_PER_COLOR;
364 	case 16:
365 		return PANEL_16BIT_PER_COLOR;
366 	}
367 }
368 
369 union dvo_encoder_control {
370 	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
371 	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
372 	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
373 	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
374 };
375 
376 static void
amdgpu_atombios_encoder_setup_dvo(struct drm_encoder * encoder,int action)377 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
378 {
379 	struct drm_device *dev = encoder->dev;
380 	struct amdgpu_device *adev = drm_to_adev(dev);
381 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
382 	union dvo_encoder_control args;
383 	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
384 	uint8_t frev, crev;
385 
386 	memset(&args, 0, sizeof(args));
387 
388 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
389 		return;
390 
391 	switch (frev) {
392 	case 1:
393 		switch (crev) {
394 		case 1:
395 			/* R4xx, R5xx */
396 			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
397 
398 			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
399 				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
400 
401 			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
402 			break;
403 		case 2:
404 			/* RS600/690/740 */
405 			args.dvo.sDVOEncoder.ucAction = action;
406 			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
407 			/* DFP1, CRT1, TV1 depending on the type of port */
408 			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
409 
410 			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
411 				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
412 			break;
413 		case 3:
414 			/* R6xx */
415 			args.dvo_v3.ucAction = action;
416 			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
417 			args.dvo_v3.ucDVOConfig = 0; /* XXX */
418 			break;
419 		case 4:
420 			/* DCE8 */
421 			args.dvo_v4.ucAction = action;
422 			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
423 			args.dvo_v4.ucDVOConfig = 0; /* XXX */
424 			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
425 			break;
426 		default:
427 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
428 			break;
429 		}
430 		break;
431 	default:
432 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
433 		break;
434 	}
435 
436 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
437 }
438 
amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder * encoder)439 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
440 {
441 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
442 	struct drm_connector *connector;
443 	struct amdgpu_connector *amdgpu_connector;
444 	struct amdgpu_connector_atom_dig *dig_connector;
445 
446 	/* dp bridges are always DP */
447 	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
448 		return ATOM_ENCODER_MODE_DP;
449 
450 	/* DVO is always DVO */
451 	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
452 	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
453 		return ATOM_ENCODER_MODE_DVO;
454 
455 	connector = amdgpu_get_connector_for_encoder(encoder);
456 	/* if we don't have an active device yet, just use one of
457 	 * the connectors tied to the encoder.
458 	 */
459 	if (!connector)
460 		connector = amdgpu_get_connector_for_encoder_init(encoder);
461 	amdgpu_connector = to_amdgpu_connector(connector);
462 
463 	switch (connector->connector_type) {
464 	case DRM_MODE_CONNECTOR_DVII:
465 	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
466 		if (amdgpu_audio != 0) {
467 			if (amdgpu_connector->use_digital &&
468 			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
469 				return ATOM_ENCODER_MODE_HDMI;
470 			else if (connector->display_info.is_hdmi &&
471 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
472 				return ATOM_ENCODER_MODE_HDMI;
473 			else if (amdgpu_connector->use_digital)
474 				return ATOM_ENCODER_MODE_DVI;
475 			else
476 				return ATOM_ENCODER_MODE_CRT;
477 		} else if (amdgpu_connector->use_digital) {
478 			return ATOM_ENCODER_MODE_DVI;
479 		} else {
480 			return ATOM_ENCODER_MODE_CRT;
481 		}
482 		break;
483 	case DRM_MODE_CONNECTOR_DVID:
484 	case DRM_MODE_CONNECTOR_HDMIA:
485 	default:
486 		if (amdgpu_audio != 0) {
487 			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
488 				return ATOM_ENCODER_MODE_HDMI;
489 			else if (connector->display_info.is_hdmi &&
490 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
491 				return ATOM_ENCODER_MODE_HDMI;
492 			else
493 				return ATOM_ENCODER_MODE_DVI;
494 		} else {
495 			return ATOM_ENCODER_MODE_DVI;
496 		}
497 	case DRM_MODE_CONNECTOR_LVDS:
498 		return ATOM_ENCODER_MODE_LVDS;
499 	case DRM_MODE_CONNECTOR_DisplayPort:
500 		dig_connector = amdgpu_connector->con_priv;
501 		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
502 		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
503 			return ATOM_ENCODER_MODE_DP;
504 		} else if (amdgpu_audio != 0) {
505 			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
506 				return ATOM_ENCODER_MODE_HDMI;
507 			else if (connector->display_info.is_hdmi &&
508 				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
509 				return ATOM_ENCODER_MODE_HDMI;
510 			else
511 				return ATOM_ENCODER_MODE_DVI;
512 		} else {
513 			return ATOM_ENCODER_MODE_DVI;
514 		}
515 	case DRM_MODE_CONNECTOR_eDP:
516 		return ATOM_ENCODER_MODE_DP;
517 	case DRM_MODE_CONNECTOR_DVIA:
518 	case DRM_MODE_CONNECTOR_VGA:
519 		return ATOM_ENCODER_MODE_CRT;
520 	case DRM_MODE_CONNECTOR_Composite:
521 	case DRM_MODE_CONNECTOR_SVIDEO:
522 	case DRM_MODE_CONNECTOR_9PinDIN:
523 		/* fix me */
524 		return ATOM_ENCODER_MODE_TV;
525 	}
526 }
527 
528 /*
529  * DIG Encoder/Transmitter Setup
530  *
531  * DCE 6.0
532  * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
533  * Supports up to 6 digital outputs
534  * - 6 DIG encoder blocks.
535  * - DIG to PHY mapping is hardcoded
536  * DIG1 drives UNIPHY0 link A, A+B
537  * DIG2 drives UNIPHY0 link B
538  * DIG3 drives UNIPHY1 link A, A+B
539  * DIG4 drives UNIPHY1 link B
540  * DIG5 drives UNIPHY2 link A, A+B
541  * DIG6 drives UNIPHY2 link B
542  *
543  * Routing
544  * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
545  * Examples:
546  * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
547  * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
548  * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
549  * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
550  */
551 
552 union dig_encoder_control {
553 	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
554 	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
555 	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
556 	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
557 	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
558 };
559 
560 void
amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder * encoder,int action,int panel_mode)561 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
562 				   int action, int panel_mode)
563 {
564 	struct drm_device *dev = encoder->dev;
565 	struct amdgpu_device *adev = drm_to_adev(dev);
566 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
567 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
568 	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
569 	union dig_encoder_control args;
570 	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
571 	uint8_t frev, crev;
572 	int dp_clock = 0;
573 	int dp_lane_count = 0;
574 	int hpd_id = AMDGPU_HPD_NONE;
575 
576 	if (connector) {
577 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
578 		struct amdgpu_connector_atom_dig *dig_connector =
579 			amdgpu_connector->con_priv;
580 
581 		dp_clock = dig_connector->dp_clock;
582 		dp_lane_count = dig_connector->dp_lane_count;
583 		hpd_id = amdgpu_connector->hpd.hpd;
584 	}
585 
586 	/* no dig encoder assigned */
587 	if (dig->dig_encoder == -1)
588 		return;
589 
590 	memset(&args, 0, sizeof(args));
591 
592 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
593 		return;
594 
595 	switch (frev) {
596 	case 1:
597 		switch (crev) {
598 		case 1:
599 			args.v1.ucAction = action;
600 			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
601 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
602 				args.v3.ucPanelMode = panel_mode;
603 			else
604 				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
605 
606 			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
607 				args.v1.ucLaneNum = dp_lane_count;
608 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
609 				args.v1.ucLaneNum = 8;
610 			else
611 				args.v1.ucLaneNum = 4;
612 
613 			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
614 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
615 			switch (amdgpu_encoder->encoder_id) {
616 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
617 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
618 				break;
619 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
620 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
621 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
622 				break;
623 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
624 				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
625 				break;
626 			}
627 			if (dig->linkb)
628 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
629 			else
630 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
631 			break;
632 		case 2:
633 		case 3:
634 			args.v3.ucAction = action;
635 			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
636 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
637 				args.v3.ucPanelMode = panel_mode;
638 			else
639 				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
640 
641 			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
642 				args.v3.ucLaneNum = dp_lane_count;
643 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
644 				args.v3.ucLaneNum = 8;
645 			else
646 				args.v3.ucLaneNum = 4;
647 
648 			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
649 				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
650 			args.v3.acConfig.ucDigSel = dig->dig_encoder;
651 			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
652 			break;
653 		case 4:
654 			args.v4.ucAction = action;
655 			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
656 			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
657 				args.v4.ucPanelMode = panel_mode;
658 			else
659 				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
660 
661 			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
662 				args.v4.ucLaneNum = dp_lane_count;
663 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
664 				args.v4.ucLaneNum = 8;
665 			else
666 				args.v4.ucLaneNum = 4;
667 
668 			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
669 				if (dp_clock == 540000)
670 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
671 				else if (dp_clock == 324000)
672 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
673 				else if (dp_clock == 270000)
674 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
675 				else
676 					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
677 			}
678 			args.v4.acConfig.ucDigSel = dig->dig_encoder;
679 			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
680 			if (hpd_id == AMDGPU_HPD_NONE)
681 				args.v4.ucHPD_ID = 0;
682 			else
683 				args.v4.ucHPD_ID = hpd_id + 1;
684 			break;
685 		case 5:
686 			switch (action) {
687 			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
688 				args.v5.asDPPanelModeParam.ucAction = action;
689 				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
690 				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
691 				break;
692 			case ATOM_ENCODER_CMD_STREAM_SETUP:
693 				args.v5.asStreamParam.ucAction = action;
694 				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
695 				args.v5.asStreamParam.ucDigMode =
696 					amdgpu_atombios_encoder_get_encoder_mode(encoder);
697 				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
698 					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
699 				else if (amdgpu_dig_monitor_is_duallink(encoder,
700 									amdgpu_encoder->pixel_clock))
701 					args.v5.asStreamParam.ucLaneNum = 8;
702 				else
703 					args.v5.asStreamParam.ucLaneNum = 4;
704 				args.v5.asStreamParam.ulPixelClock =
705 					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
706 				args.v5.asStreamParam.ucBitPerColor =
707 					amdgpu_atombios_encoder_get_bpc(encoder);
708 				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
709 				break;
710 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
711 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
712 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
713 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
714 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
715 			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
716 			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
717 			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
718 				args.v5.asCmdParam.ucAction = action;
719 				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
720 				break;
721 			default:
722 				DRM_ERROR("Unsupported action 0x%x\n", action);
723 				break;
724 			}
725 			break;
726 		default:
727 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
728 			break;
729 		}
730 		break;
731 	default:
732 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
733 		break;
734 	}
735 
736 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
737 
738 }
739 
740 union dig_transmitter_control {
741 	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
742 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
743 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
744 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
745 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
746 	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
747 };
748 
749 void
amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder * encoder,int action,uint8_t lane_num,uint8_t lane_set)750 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
751 					      uint8_t lane_num, uint8_t lane_set)
752 {
753 	struct drm_device *dev = encoder->dev;
754 	struct amdgpu_device *adev = drm_to_adev(dev);
755 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
756 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
757 	struct drm_connector *connector;
758 	union dig_transmitter_control args;
759 	int index = 0;
760 	uint8_t frev, crev;
761 	bool is_dp = false;
762 	int pll_id = 0;
763 	int dp_clock = 0;
764 	int dp_lane_count = 0;
765 	int connector_object_id = 0;
766 	int dig_encoder = dig->dig_encoder;
767 	int hpd_id = AMDGPU_HPD_NONE;
768 
769 	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
770 		connector = amdgpu_get_connector_for_encoder_init(encoder);
771 		/* just needed to avoid bailing in the encoder check.  the encoder
772 		 * isn't used for init
773 		 */
774 		dig_encoder = 0;
775 	} else
776 		connector = amdgpu_get_connector_for_encoder(encoder);
777 
778 	if (connector) {
779 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
780 		struct amdgpu_connector_atom_dig *dig_connector =
781 			amdgpu_connector->con_priv;
782 
783 		hpd_id = amdgpu_connector->hpd.hpd;
784 		dp_clock = dig_connector->dp_clock;
785 		dp_lane_count = dig_connector->dp_lane_count;
786 		connector_object_id =
787 			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
788 	}
789 
790 	if (encoder->crtc) {
791 		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
792 		pll_id = amdgpu_crtc->pll_id;
793 	}
794 
795 	/* no dig encoder assigned */
796 	if (dig_encoder == -1)
797 		return;
798 
799 	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
800 		is_dp = true;
801 
802 	memset(&args, 0, sizeof(args));
803 
804 	switch (amdgpu_encoder->encoder_id) {
805 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
806 		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
807 		break;
808 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
809 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
810 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
811 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
812 		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
813 		break;
814 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
815 		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
816 		break;
817 	}
818 
819 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
820 		return;
821 
822 	switch (frev) {
823 	case 1:
824 		switch (crev) {
825 		case 1:
826 			args.v1.ucAction = action;
827 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
828 				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
829 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
830 				args.v1.asMode.ucLaneSel = lane_num;
831 				args.v1.asMode.ucLaneSet = lane_set;
832 			} else {
833 				if (is_dp)
834 					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
835 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
836 					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
837 				else
838 					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
839 			}
840 
841 			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
842 
843 			if (dig_encoder)
844 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
845 			else
846 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
847 
848 			if (dig->linkb)
849 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
850 			else
851 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
852 
853 			if (is_dp)
854 				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
855 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
856 				if (dig->coherent_mode)
857 					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
858 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
859 					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
860 			}
861 			break;
862 		case 2:
863 			args.v2.ucAction = action;
864 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
865 				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
866 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
867 				args.v2.asMode.ucLaneSel = lane_num;
868 				args.v2.asMode.ucLaneSet = lane_set;
869 			} else {
870 				if (is_dp)
871 					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
872 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
873 					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
874 				else
875 					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
876 			}
877 
878 			args.v2.acConfig.ucEncoderSel = dig_encoder;
879 			if (dig->linkb)
880 				args.v2.acConfig.ucLinkSel = 1;
881 
882 			switch (amdgpu_encoder->encoder_id) {
883 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
884 				args.v2.acConfig.ucTransmitterSel = 0;
885 				break;
886 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
887 				args.v2.acConfig.ucTransmitterSel = 1;
888 				break;
889 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
890 				args.v2.acConfig.ucTransmitterSel = 2;
891 				break;
892 			}
893 
894 			if (is_dp) {
895 				args.v2.acConfig.fCoherentMode = 1;
896 				args.v2.acConfig.fDPConnector = 1;
897 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
898 				if (dig->coherent_mode)
899 					args.v2.acConfig.fCoherentMode = 1;
900 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
901 					args.v2.acConfig.fDualLinkConnector = 1;
902 			}
903 			break;
904 		case 3:
905 			args.v3.ucAction = action;
906 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
907 				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
908 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
909 				args.v3.asMode.ucLaneSel = lane_num;
910 				args.v3.asMode.ucLaneSet = lane_set;
911 			} else {
912 				if (is_dp)
913 					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
914 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
915 					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
916 				else
917 					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
918 			}
919 
920 			if (is_dp)
921 				args.v3.ucLaneNum = dp_lane_count;
922 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
923 				args.v3.ucLaneNum = 8;
924 			else
925 				args.v3.ucLaneNum = 4;
926 
927 			if (dig->linkb)
928 				args.v3.acConfig.ucLinkSel = 1;
929 			if (dig_encoder & 1)
930 				args.v3.acConfig.ucEncoderSel = 1;
931 
932 			/* Select the PLL for the PHY
933 			 * DP PHY should be clocked from external src if there is
934 			 * one.
935 			 */
936 			/* On DCE4, if there is an external clock, it generates the DP ref clock */
937 			if (is_dp && adev->clock.dp_extclk)
938 				args.v3.acConfig.ucRefClkSource = 2; /* external src */
939 			else
940 				args.v3.acConfig.ucRefClkSource = pll_id;
941 
942 			switch (amdgpu_encoder->encoder_id) {
943 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
944 				args.v3.acConfig.ucTransmitterSel = 0;
945 				break;
946 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
947 				args.v3.acConfig.ucTransmitterSel = 1;
948 				break;
949 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
950 				args.v3.acConfig.ucTransmitterSel = 2;
951 				break;
952 			}
953 
954 			if (is_dp)
955 				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
956 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
957 				if (dig->coherent_mode)
958 					args.v3.acConfig.fCoherentMode = 1;
959 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
960 					args.v3.acConfig.fDualLinkConnector = 1;
961 			}
962 			break;
963 		case 4:
964 			args.v4.ucAction = action;
965 			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
966 				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
967 			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
968 				args.v4.asMode.ucLaneSel = lane_num;
969 				args.v4.asMode.ucLaneSet = lane_set;
970 			} else {
971 				if (is_dp)
972 					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
973 				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
974 					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
975 				else
976 					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
977 			}
978 
979 			if (is_dp)
980 				args.v4.ucLaneNum = dp_lane_count;
981 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
982 				args.v4.ucLaneNum = 8;
983 			else
984 				args.v4.ucLaneNum = 4;
985 
986 			if (dig->linkb)
987 				args.v4.acConfig.ucLinkSel = 1;
988 			if (dig_encoder & 1)
989 				args.v4.acConfig.ucEncoderSel = 1;
990 
991 			/* Select the PLL for the PHY
992 			 * DP PHY should be clocked from external src if there is
993 			 * one.
994 			 */
995 			/* On DCE5 DCPLL usually generates the DP ref clock */
996 			if (is_dp) {
997 				if (adev->clock.dp_extclk)
998 					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
999 				else
1000 					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1001 			} else
1002 				args.v4.acConfig.ucRefClkSource = pll_id;
1003 
1004 			switch (amdgpu_encoder->encoder_id) {
1005 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1006 				args.v4.acConfig.ucTransmitterSel = 0;
1007 				break;
1008 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1009 				args.v4.acConfig.ucTransmitterSel = 1;
1010 				break;
1011 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1012 				args.v4.acConfig.ucTransmitterSel = 2;
1013 				break;
1014 			}
1015 
1016 			if (is_dp)
1017 				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1018 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1019 				if (dig->coherent_mode)
1020 					args.v4.acConfig.fCoherentMode = 1;
1021 				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1022 					args.v4.acConfig.fDualLinkConnector = 1;
1023 			}
1024 			break;
1025 		case 5:
1026 			args.v5.ucAction = action;
1027 			if (is_dp)
1028 				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1029 			else
1030 				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1031 
1032 			switch (amdgpu_encoder->encoder_id) {
1033 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1034 				if (dig->linkb)
1035 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1036 				else
1037 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1038 				break;
1039 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1040 				if (dig->linkb)
1041 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1042 				else
1043 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1044 				break;
1045 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1046 				if (dig->linkb)
1047 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1048 				else
1049 					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1050 				break;
1051 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1052 				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1053 				break;
1054 			}
1055 			if (is_dp)
1056 				args.v5.ucLaneNum = dp_lane_count;
1057 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1058 				args.v5.ucLaneNum = 8;
1059 			else
1060 				args.v5.ucLaneNum = 4;
1061 			args.v5.ucConnObjId = connector_object_id;
1062 			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1063 
1064 			if (is_dp && adev->clock.dp_extclk)
1065 				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1066 			else
1067 				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1068 
1069 			if (is_dp)
1070 				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1071 			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1072 				if (dig->coherent_mode)
1073 					args.v5.asConfig.ucCoherentMode = 1;
1074 			}
1075 			if (hpd_id == AMDGPU_HPD_NONE)
1076 				args.v5.asConfig.ucHPDSel = 0;
1077 			else
1078 				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1079 			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1080 			args.v5.ucDPLaneSet = lane_set;
1081 			break;
1082 		case 6:
1083 			args.v6.ucAction = action;
1084 			if (is_dp)
1085 				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1086 			else
1087 				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1088 
1089 			switch (amdgpu_encoder->encoder_id) {
1090 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1091 				if (dig->linkb)
1092 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1093 				else
1094 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1095 				break;
1096 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1097 				if (dig->linkb)
1098 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1099 				else
1100 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1101 				break;
1102 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1103 				if (dig->linkb)
1104 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1105 				else
1106 					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1107 				break;
1108 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1109 				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1110 				break;
1111 			}
1112 			if (is_dp)
1113 				args.v6.ucLaneNum = dp_lane_count;
1114 			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1115 				args.v6.ucLaneNum = 8;
1116 			else
1117 				args.v6.ucLaneNum = 4;
1118 			args.v6.ucConnObjId = connector_object_id;
1119 			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1120 				args.v6.ucDPLaneSet = lane_set;
1121 			else
1122 				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1123 
1124 			if (hpd_id == AMDGPU_HPD_NONE)
1125 				args.v6.ucHPDSel = 0;
1126 			else
1127 				args.v6.ucHPDSel = hpd_id + 1;
1128 			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1129 			break;
1130 		default:
1131 			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1132 			break;
1133 		}
1134 		break;
1135 	default:
1136 		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1137 		break;
1138 	}
1139 
1140 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1141 }
1142 
1143 bool
amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector * connector,int action)1144 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1145 				     int action)
1146 {
1147 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1148 	struct drm_device *dev = amdgpu_connector->base.dev;
1149 	struct amdgpu_device *adev = drm_to_adev(dev);
1150 	union dig_transmitter_control args;
1151 	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1152 	uint8_t frev, crev;
1153 
1154 	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1155 		goto done;
1156 
1157 	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1158 	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1159 		goto done;
1160 
1161 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1162 		goto done;
1163 
1164 	memset(&args, 0, sizeof(args));
1165 
1166 	args.v1.ucAction = action;
1167 
1168 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1169 
1170 	/* wait for the panel to power up */
1171 	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1172 		int i;
1173 
1174 		for (i = 0; i < 300; i++) {
1175 			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1176 				return true;
1177 			mdelay(1);
1178 		}
1179 		return false;
1180 	}
1181 done:
1182 	return true;
1183 }
1184 
1185 union external_encoder_control {
1186 	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1187 	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1188 };
1189 
1190 static void
amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder * encoder,struct drm_encoder * ext_encoder,int action)1191 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1192 					struct drm_encoder *ext_encoder,
1193 					int action)
1194 {
1195 	struct drm_device *dev = encoder->dev;
1196 	struct amdgpu_device *adev = drm_to_adev(dev);
1197 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1198 	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1199 	union external_encoder_control args;
1200 	struct drm_connector *connector;
1201 	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1202 	u8 frev, crev;
1203 	int dp_clock = 0;
1204 	int dp_lane_count = 0;
1205 	int connector_object_id = 0;
1206 	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1207 
1208 	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1209 		connector = amdgpu_get_connector_for_encoder_init(encoder);
1210 	else
1211 		connector = amdgpu_get_connector_for_encoder(encoder);
1212 
1213 	if (connector) {
1214 		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1215 		struct amdgpu_connector_atom_dig *dig_connector =
1216 			amdgpu_connector->con_priv;
1217 
1218 		dp_clock = dig_connector->dp_clock;
1219 		dp_lane_count = dig_connector->dp_lane_count;
1220 		connector_object_id =
1221 			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1222 	}
1223 
1224 	memset(&args, 0, sizeof(args));
1225 
1226 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1227 		return;
1228 
1229 	switch (frev) {
1230 	case 1:
1231 		/* no params on frev 1 */
1232 		break;
1233 	case 2:
1234 		switch (crev) {
1235 		case 1:
1236 		case 2:
1237 			args.v1.sDigEncoder.ucAction = action;
1238 			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1239 			args.v1.sDigEncoder.ucEncoderMode =
1240 				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1241 
1242 			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1243 				if (dp_clock == 270000)
1244 					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1245 				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1246 			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1247 				args.v1.sDigEncoder.ucLaneNum = 8;
1248 			else
1249 				args.v1.sDigEncoder.ucLaneNum = 4;
1250 			break;
1251 		case 3:
1252 			args.v3.sExtEncoder.ucAction = action;
1253 			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1254 				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1255 			else
1256 				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1257 			args.v3.sExtEncoder.ucEncoderMode =
1258 				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1259 
1260 			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1261 				if (dp_clock == 270000)
1262 					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1263 				else if (dp_clock == 540000)
1264 					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1265 				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1266 			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1267 				args.v3.sExtEncoder.ucLaneNum = 8;
1268 			else
1269 				args.v3.sExtEncoder.ucLaneNum = 4;
1270 			switch (ext_enum) {
1271 			case GRAPH_OBJECT_ENUM_ID1:
1272 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1273 				break;
1274 			case GRAPH_OBJECT_ENUM_ID2:
1275 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1276 				break;
1277 			case GRAPH_OBJECT_ENUM_ID3:
1278 				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1279 				break;
1280 			}
1281 			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1282 			break;
1283 		default:
1284 			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1285 			return;
1286 		}
1287 		break;
1288 	default:
1289 		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1290 		return;
1291 	}
1292 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1293 }
1294 
1295 static void
amdgpu_atombios_encoder_setup_dig(struct drm_encoder * encoder,int action)1296 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1297 {
1298 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1299 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1300 	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1301 	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1302 	struct amdgpu_connector *amdgpu_connector = NULL;
1303 	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1304 
1305 	if (connector) {
1306 		amdgpu_connector = to_amdgpu_connector(connector);
1307 		amdgpu_dig_connector = amdgpu_connector->con_priv;
1308 	}
1309 
1310 	if (action == ATOM_ENABLE) {
1311 		if (!connector)
1312 			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1313 		else
1314 			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1315 
1316 		/* setup and enable the encoder */
1317 		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1318 		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1319 						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1320 						   dig->panel_mode);
1321 		if (ext_encoder)
1322 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1323 								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1324 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1325 		    connector) {
1326 			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1327 				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1328 								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1329 				amdgpu_dig_connector->edp_on = true;
1330 			}
1331 		}
1332 		/* enable the transmitter */
1333 		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1334 						       ATOM_TRANSMITTER_ACTION_ENABLE,
1335 						       0, 0);
1336 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1337 		    connector) {
1338 			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1339 			amdgpu_atombios_dp_link_train(encoder, connector);
1340 			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1341 		}
1342 		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1343 			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1344 		if (ext_encoder)
1345 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1346 	} else {
1347 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1348 		    connector)
1349 			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1350 							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1351 		if (ext_encoder)
1352 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1353 		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1354 			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1355 							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1356 
1357 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1358 		    connector)
1359 			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1360 		/* disable the transmitter */
1361 		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1362 						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1363 		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1364 		    connector) {
1365 			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1366 				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1367 								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1368 				amdgpu_dig_connector->edp_on = false;
1369 			}
1370 		}
1371 	}
1372 }
1373 
1374 void
amdgpu_atombios_encoder_dpms(struct drm_encoder * encoder,int mode)1375 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1376 {
1377 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1378 
1379 	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1380 		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1381 		  amdgpu_encoder->active_device);
1382 	switch (amdgpu_encoder->encoder_id) {
1383 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1384 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1385 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1386 	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1387 		switch (mode) {
1388 		case DRM_MODE_DPMS_ON:
1389 			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1390 			break;
1391 		case DRM_MODE_DPMS_STANDBY:
1392 		case DRM_MODE_DPMS_SUSPEND:
1393 		case DRM_MODE_DPMS_OFF:
1394 			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1395 			break;
1396 		}
1397 		break;
1398 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1399 		switch (mode) {
1400 		case DRM_MODE_DPMS_ON:
1401 			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1402 			break;
1403 		case DRM_MODE_DPMS_STANDBY:
1404 		case DRM_MODE_DPMS_SUSPEND:
1405 		case DRM_MODE_DPMS_OFF:
1406 			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1407 			break;
1408 		}
1409 		break;
1410 	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1411 		switch (mode) {
1412 		case DRM_MODE_DPMS_ON:
1413 			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1414 			break;
1415 		case DRM_MODE_DPMS_STANDBY:
1416 		case DRM_MODE_DPMS_SUSPEND:
1417 		case DRM_MODE_DPMS_OFF:
1418 			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1419 			break;
1420 		}
1421 		break;
1422 	default:
1423 		return;
1424 	}
1425 }
1426 
1427 union crtc_source_param {
1428 	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1429 	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1430 	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1431 };
1432 
1433 void
amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder * encoder)1434 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1435 {
1436 	struct drm_device *dev = encoder->dev;
1437 	struct amdgpu_device *adev = drm_to_adev(dev);
1438 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1439 	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1440 	union crtc_source_param args;
1441 	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1442 	uint8_t frev, crev;
1443 	struct amdgpu_encoder_atom_dig *dig;
1444 
1445 	memset(&args, 0, sizeof(args));
1446 
1447 	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1448 		return;
1449 
1450 	switch (frev) {
1451 	case 1:
1452 		switch (crev) {
1453 		case 1:
1454 		default:
1455 			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1456 			switch (amdgpu_encoder->encoder_id) {
1457 			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1458 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1459 				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1460 				break;
1461 			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1462 			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1463 				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1464 					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1465 				else
1466 					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1467 				break;
1468 			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1469 			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1470 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1471 				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1472 				break;
1473 			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1474 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1475 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1476 					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1477 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1478 					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1479 				else
1480 					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1481 				break;
1482 			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1483 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1484 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1485 					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1486 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1487 					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1488 				else
1489 					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1490 				break;
1491 			}
1492 			break;
1493 		case 2:
1494 			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1495 			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1496 				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1497 
1498 				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1499 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1500 				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1501 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1502 				else
1503 					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1504 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1505 				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1506 			} else {
1507 				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1508 			}
1509 			switch (amdgpu_encoder->encoder_id) {
1510 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1511 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1512 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1513 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1514 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1515 				dig = amdgpu_encoder->enc_priv;
1516 				switch (dig->dig_encoder) {
1517 				case 0:
1518 					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1519 					break;
1520 				case 1:
1521 					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1522 					break;
1523 				case 2:
1524 					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1525 					break;
1526 				case 3:
1527 					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1528 					break;
1529 				case 4:
1530 					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1531 					break;
1532 				case 5:
1533 					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1534 					break;
1535 				case 6:
1536 					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1537 					break;
1538 				}
1539 				break;
1540 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1541 				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1542 				break;
1543 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1544 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1545 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1546 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1547 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1548 				else
1549 					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1550 				break;
1551 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1552 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1553 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1554 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1555 					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1556 				else
1557 					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1558 				break;
1559 			}
1560 			break;
1561 		case 3:
1562 			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1563 			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1564 				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1565 
1566 				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1567 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1568 				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1569 					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1570 				else
1571 					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1572 			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1573 				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1574 			} else {
1575 				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1576 			}
1577 			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1578 			switch (amdgpu_encoder->encoder_id) {
1579 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1580 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1581 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1582 			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1583 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1584 				dig = amdgpu_encoder->enc_priv;
1585 				switch (dig->dig_encoder) {
1586 				case 0:
1587 					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1588 					break;
1589 				case 1:
1590 					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1591 					break;
1592 				case 2:
1593 					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1594 					break;
1595 				case 3:
1596 					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1597 					break;
1598 				case 4:
1599 					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1600 					break;
1601 				case 5:
1602 					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1603 					break;
1604 				case 6:
1605 					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1606 					break;
1607 				}
1608 				break;
1609 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1610 				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1611 				break;
1612 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1613 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1614 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1615 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1616 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1617 				else
1618 					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1619 				break;
1620 			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1621 				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1622 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1623 				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1624 					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1625 				else
1626 					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1627 				break;
1628 			}
1629 			break;
1630 		}
1631 		break;
1632 	default:
1633 		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1634 		return;
1635 	}
1636 
1637 	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1638 }
1639 
1640 /* This only needs to be called once at startup */
1641 void
amdgpu_atombios_encoder_init_dig(struct amdgpu_device * adev)1642 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1643 {
1644 	struct drm_device *dev = adev_to_drm(adev);
1645 	struct drm_encoder *encoder;
1646 
1647 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1648 		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1649 		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1650 
1651 		switch (amdgpu_encoder->encoder_id) {
1652 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1653 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1654 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1655 		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1656 			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1657 							       0, 0);
1658 			break;
1659 		}
1660 
1661 		if (ext_encoder)
1662 			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1663 								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1664 	}
1665 }
1666 
1667 static bool
amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder * encoder,struct drm_connector * connector)1668 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1669 				 struct drm_connector *connector)
1670 {
1671 	struct drm_device *dev = encoder->dev;
1672 	struct amdgpu_device *adev = drm_to_adev(dev);
1673 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1674 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1675 
1676 	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1677 				       ATOM_DEVICE_CV_SUPPORT |
1678 				       ATOM_DEVICE_CRT_SUPPORT)) {
1679 		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1680 		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1681 		uint8_t frev, crev;
1682 
1683 		memset(&args, 0, sizeof(args));
1684 
1685 		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1686 			return false;
1687 
1688 		args.sDacload.ucMisc = 0;
1689 
1690 		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1691 		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1692 			args.sDacload.ucDacType = ATOM_DAC_A;
1693 		else
1694 			args.sDacload.ucDacType = ATOM_DAC_B;
1695 
1696 		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1697 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1698 		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1699 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1700 		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1701 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1702 			if (crev >= 3)
1703 				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1704 		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1705 			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1706 			if (crev >= 3)
1707 				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1708 		}
1709 
1710 		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1711 
1712 		return true;
1713 	} else
1714 		return false;
1715 }
1716 
1717 enum drm_connector_status
amdgpu_atombios_encoder_dac_detect(struct drm_encoder * encoder,struct drm_connector * connector)1718 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1719 			    struct drm_connector *connector)
1720 {
1721 	struct drm_device *dev = encoder->dev;
1722 	struct amdgpu_device *adev = drm_to_adev(dev);
1723 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1724 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1725 	uint32_t bios_0_scratch;
1726 
1727 	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1728 		DRM_DEBUG_KMS("detect returned false \n");
1729 		return connector_status_unknown;
1730 	}
1731 
1732 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1733 
1734 	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1735 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1736 		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1737 			return connector_status_connected;
1738 	}
1739 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1740 		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1741 			return connector_status_connected;
1742 	}
1743 	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1744 		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1745 			return connector_status_connected;
1746 	}
1747 	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1748 		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1749 			return connector_status_connected; /* CTV */
1750 		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1751 			return connector_status_connected; /* STV */
1752 	}
1753 	return connector_status_disconnected;
1754 }
1755 
1756 enum drm_connector_status
amdgpu_atombios_encoder_dig_detect(struct drm_encoder * encoder,struct drm_connector * connector)1757 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1758 			    struct drm_connector *connector)
1759 {
1760 	struct drm_device *dev = encoder->dev;
1761 	struct amdgpu_device *adev = drm_to_adev(dev);
1762 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1763 	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1764 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1765 	u32 bios_0_scratch;
1766 
1767 	if (!ext_encoder)
1768 		return connector_status_unknown;
1769 
1770 	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1771 		return connector_status_unknown;
1772 
1773 	/* load detect on the dp bridge */
1774 	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1775 						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1776 
1777 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1778 
1779 	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1780 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1781 		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1782 			return connector_status_connected;
1783 	}
1784 	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1785 		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1786 			return connector_status_connected;
1787 	}
1788 	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1789 		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1790 			return connector_status_connected;
1791 	}
1792 	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1793 		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1794 			return connector_status_connected; /* CTV */
1795 		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1796 			return connector_status_connected; /* STV */
1797 	}
1798 	return connector_status_disconnected;
1799 }
1800 
1801 void
amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder * encoder)1802 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1803 {
1804 	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1805 
1806 	if (ext_encoder)
1807 		/* ddc_setup on the dp bridge */
1808 		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1809 							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1810 
1811 }
1812 
1813 void
amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector * connector,struct drm_encoder * encoder,bool connected)1814 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1815 				       struct drm_encoder *encoder,
1816 				       bool connected)
1817 {
1818 	struct drm_device *dev = connector->dev;
1819 	struct amdgpu_device *adev = drm_to_adev(dev);
1820 	struct amdgpu_connector *amdgpu_connector =
1821 	    to_amdgpu_connector(connector);
1822 	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1823 	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1824 
1825 	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1826 	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1827 	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1828 
1829 	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1830 	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1831 		if (connected) {
1832 			DRM_DEBUG_KMS("LCD1 connected\n");
1833 			bios_0_scratch |= ATOM_S0_LCD1;
1834 			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1835 			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1836 		} else {
1837 			DRM_DEBUG_KMS("LCD1 disconnected\n");
1838 			bios_0_scratch &= ~ATOM_S0_LCD1;
1839 			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1840 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1841 		}
1842 	}
1843 	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1844 	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1845 		if (connected) {
1846 			DRM_DEBUG_KMS("CRT1 connected\n");
1847 			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1848 			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1849 			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1850 		} else {
1851 			DRM_DEBUG_KMS("CRT1 disconnected\n");
1852 			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1853 			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1854 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1855 		}
1856 	}
1857 	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1858 	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1859 		if (connected) {
1860 			DRM_DEBUG_KMS("CRT2 connected\n");
1861 			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1862 			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1863 			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1864 		} else {
1865 			DRM_DEBUG_KMS("CRT2 disconnected\n");
1866 			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1867 			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1868 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1869 		}
1870 	}
1871 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1872 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1873 		if (connected) {
1874 			DRM_DEBUG_KMS("DFP1 connected\n");
1875 			bios_0_scratch |= ATOM_S0_DFP1;
1876 			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1877 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1878 		} else {
1879 			DRM_DEBUG_KMS("DFP1 disconnected\n");
1880 			bios_0_scratch &= ~ATOM_S0_DFP1;
1881 			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1882 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1883 		}
1884 	}
1885 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1886 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1887 		if (connected) {
1888 			DRM_DEBUG_KMS("DFP2 connected\n");
1889 			bios_0_scratch |= ATOM_S0_DFP2;
1890 			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1891 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1892 		} else {
1893 			DRM_DEBUG_KMS("DFP2 disconnected\n");
1894 			bios_0_scratch &= ~ATOM_S0_DFP2;
1895 			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1896 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1897 		}
1898 	}
1899 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1900 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1901 		if (connected) {
1902 			DRM_DEBUG_KMS("DFP3 connected\n");
1903 			bios_0_scratch |= ATOM_S0_DFP3;
1904 			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1905 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1906 		} else {
1907 			DRM_DEBUG_KMS("DFP3 disconnected\n");
1908 			bios_0_scratch &= ~ATOM_S0_DFP3;
1909 			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1910 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1911 		}
1912 	}
1913 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1914 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1915 		if (connected) {
1916 			DRM_DEBUG_KMS("DFP4 connected\n");
1917 			bios_0_scratch |= ATOM_S0_DFP4;
1918 			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1919 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1920 		} else {
1921 			DRM_DEBUG_KMS("DFP4 disconnected\n");
1922 			bios_0_scratch &= ~ATOM_S0_DFP4;
1923 			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1924 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1925 		}
1926 	}
1927 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1928 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1929 		if (connected) {
1930 			DRM_DEBUG_KMS("DFP5 connected\n");
1931 			bios_0_scratch |= ATOM_S0_DFP5;
1932 			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1933 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1934 		} else {
1935 			DRM_DEBUG_KMS("DFP5 disconnected\n");
1936 			bios_0_scratch &= ~ATOM_S0_DFP5;
1937 			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1938 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1939 		}
1940 	}
1941 	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1942 	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1943 		if (connected) {
1944 			DRM_DEBUG_KMS("DFP6 connected\n");
1945 			bios_0_scratch |= ATOM_S0_DFP6;
1946 			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1947 			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1948 		} else {
1949 			DRM_DEBUG_KMS("DFP6 disconnected\n");
1950 			bios_0_scratch &= ~ATOM_S0_DFP6;
1951 			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1952 			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1953 		}
1954 	}
1955 
1956 	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1957 	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1958 	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1959 }
1960 
1961 union lvds_info {
1962 	struct _ATOM_LVDS_INFO info;
1963 	struct _ATOM_LVDS_INFO_V12 info_12;
1964 };
1965 
1966 struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder * encoder)1967 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1968 {
1969 	struct drm_device *dev = encoder->base.dev;
1970 	struct amdgpu_device *adev = drm_to_adev(dev);
1971 	struct amdgpu_mode_info *mode_info = &adev->mode_info;
1972 	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
1973 	uint16_t data_offset, misc;
1974 	union lvds_info *lvds_info;
1975 	uint8_t frev, crev;
1976 	struct amdgpu_encoder_atom_dig *lvds = NULL;
1977 	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1978 
1979 	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
1980 				   &frev, &crev, &data_offset)) {
1981 		lvds_info =
1982 			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
1983 		lvds =
1984 		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
1985 
1986 		if (!lvds)
1987 			return NULL;
1988 
1989 		lvds->native_mode.clock =
1990 		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
1991 		lvds->native_mode.hdisplay =
1992 		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
1993 		lvds->native_mode.vdisplay =
1994 		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
1995 		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
1996 			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
1997 		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
1998 			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
1999 		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2000 			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2001 		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2002 			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2003 		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2004 			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2005 		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2006 			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2007 		lvds->panel_pwr_delay =
2008 		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2009 		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2010 
2011 		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2012 		if (misc & ATOM_VSYNC_POLARITY)
2013 			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2014 		if (misc & ATOM_HSYNC_POLARITY)
2015 			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2016 		if (misc & ATOM_COMPOSITESYNC)
2017 			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2018 		if (misc & ATOM_INTERLACE)
2019 			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2020 		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2021 			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2022 
2023 		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2024 		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2025 
2026 		/* set crtc values */
2027 		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2028 
2029 		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2030 
2031 		encoder->native_mode = lvds->native_mode;
2032 
2033 		if (encoder_enum == 2)
2034 			lvds->linkb = true;
2035 		else
2036 			lvds->linkb = false;
2037 
2038 		/* parse the lcd record table */
2039 		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2040 			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2041 			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2042 			bool bad_record = false;
2043 			u8 *record;
2044 
2045 			if ((frev == 1) && (crev < 2))
2046 				/* absolute */
2047 				record = (u8 *)(mode_info->atom_context->bios +
2048 						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2049 			else
2050 				/* relative */
2051 				record = (u8 *)(mode_info->atom_context->bios +
2052 						data_offset +
2053 						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2054 			while (*record != ATOM_RECORD_END_TYPE) {
2055 				switch (*record) {
2056 				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2057 					record += sizeof(ATOM_PATCH_RECORD_MODE);
2058 					break;
2059 				case LCD_RTS_RECORD_TYPE:
2060 					record += sizeof(ATOM_LCD_RTS_RECORD);
2061 					break;
2062 				case LCD_CAP_RECORD_TYPE:
2063 					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2064 					break;
2065 				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2066 					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2067 					if (fake_edid_record->ucFakeEDIDLength) {
2068 						struct edid *edid;
2069 						int edid_size =
2070 							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2071 						edid = kmalloc(edid_size, GFP_KERNEL);
2072 						if (edid) {
2073 							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2074 							       fake_edid_record->ucFakeEDIDLength);
2075 
2076 							if (drm_edid_is_valid(edid)) {
2077 								adev->mode_info.bios_hardcoded_edid = edid;
2078 								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2079 							} else
2080 								kfree(edid);
2081 						}
2082 					}
2083 					record += fake_edid_record->ucFakeEDIDLength ?
2084 						fake_edid_record->ucFakeEDIDLength + 2 :
2085 						sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2086 					break;
2087 				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2088 					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2089 					lvds->native_mode.width_mm = panel_res_record->usHSize;
2090 					lvds->native_mode.height_mm = panel_res_record->usVSize;
2091 					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2092 					break;
2093 				default:
2094 					DRM_ERROR("Bad LCD record %d\n", *record);
2095 					bad_record = true;
2096 					break;
2097 				}
2098 				if (bad_record)
2099 					break;
2100 			}
2101 		}
2102 	}
2103 	return lvds;
2104 }
2105 
2106 struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder * amdgpu_encoder)2107 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2108 {
2109 	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2110 	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2111 
2112 	if (!dig)
2113 		return NULL;
2114 
2115 	/* coherent mode by default */
2116 	dig->coherent_mode = true;
2117 	dig->dig_encoder = -1;
2118 
2119 	if (encoder_enum == 2)
2120 		dig->linkb = true;
2121 	else
2122 		dig->linkb = false;
2123 
2124 	return dig;
2125 }
2126 
2127