Merge branches 'for-4.8/alps', 'for-4.8/apple', 'for-4.8/i2c-hid', 'for-4.8/uhid...
[deliverable/linux.git] / drivers / gpu / drm / amd / amdgpu / atombios_encoders.c
1 /*
2 * Copyright 2007-11 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26 #include <drm/drmP.h>
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/amdgpu_drm.h>
29 #include "amdgpu.h"
30 #include "amdgpu_connectors.h"
31 #include "atom.h"
32 #include "atombios_encoders.h"
33 #include "atombios_dp.h"
34 #include <linux/backlight.h>
35 #include "bif/bif_4_1_d.h"
36
37 static u8
38 amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
39 {
40 u8 backlight_level;
41 u32 bios_2_scratch;
42
43 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
44
45 backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
46 ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
47
48 return backlight_level;
49 }
50
51 static void
52 amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
53 u8 backlight_level)
54 {
55 u32 bios_2_scratch;
56
57 bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
58
59 bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
60 bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
61 ATOM_S2_CURRENT_BL_LEVEL_MASK);
62
63 WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
64 }
65
66 u8
67 amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
68 {
69 struct drm_device *dev = amdgpu_encoder->base.dev;
70 struct amdgpu_device *adev = dev->dev_private;
71
72 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
73 return 0;
74
75 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
76 }
77
78 void
79 amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
80 u8 level)
81 {
82 struct drm_encoder *encoder = &amdgpu_encoder->base;
83 struct drm_device *dev = amdgpu_encoder->base.dev;
84 struct amdgpu_device *adev = dev->dev_private;
85 struct amdgpu_encoder_atom_dig *dig;
86
87 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
88 return;
89
90 if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
91 amdgpu_encoder->enc_priv) {
92 dig = amdgpu_encoder->enc_priv;
93 dig->backlight_level = level;
94 amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
95
96 switch (amdgpu_encoder->encoder_id) {
97 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
98 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
99 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
100 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
101 if (dig->backlight_level == 0)
102 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
103 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
104 else {
105 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
106 ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
107 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
108 ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
109 }
110 break;
111 default:
112 break;
113 }
114 }
115 }
116
117 #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
118
119 static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
120 {
121 u8 level;
122
123 /* Convert brightness to hardware level */
124 if (bd->props.brightness < 0)
125 level = 0;
126 else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
127 level = AMDGPU_MAX_BL_LEVEL;
128 else
129 level = bd->props.brightness;
130
131 return level;
132 }
133
134 static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
135 {
136 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
137 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
138
139 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
140 amdgpu_atombios_encoder_backlight_level(bd));
141
142 return 0;
143 }
144
145 static int
146 amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
147 {
148 struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
149 struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
150 struct drm_device *dev = amdgpu_encoder->base.dev;
151 struct amdgpu_device *adev = dev->dev_private;
152
153 return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
154 }
155
156 static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
157 .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
158 .update_status = amdgpu_atombios_encoder_update_backlight_status,
159 };
160
161 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
162 struct drm_connector *drm_connector)
163 {
164 struct drm_device *dev = amdgpu_encoder->base.dev;
165 struct amdgpu_device *adev = dev->dev_private;
166 struct backlight_device *bd;
167 struct backlight_properties props;
168 struct amdgpu_backlight_privdata *pdata;
169 struct amdgpu_encoder_atom_dig *dig;
170 u8 backlight_level;
171 char bl_name[16];
172
173 /* Mac laptops with multiple GPUs use the gmux driver for backlight
174 * so don't register a backlight device
175 */
176 if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
177 (adev->pdev->device == 0x6741))
178 return;
179
180 if (!amdgpu_encoder->enc_priv)
181 return;
182
183 if (!adev->is_atom_bios)
184 return;
185
186 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
187 return;
188
189 pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
190 if (!pdata) {
191 DRM_ERROR("Memory allocation failed\n");
192 goto error;
193 }
194
195 memset(&props, 0, sizeof(props));
196 props.max_brightness = AMDGPU_MAX_BL_LEVEL;
197 props.type = BACKLIGHT_RAW;
198 snprintf(bl_name, sizeof(bl_name),
199 "amdgpu_bl%d", dev->primary->index);
200 bd = backlight_device_register(bl_name, drm_connector->kdev,
201 pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
202 if (IS_ERR(bd)) {
203 DRM_ERROR("Backlight registration failed\n");
204 goto error;
205 }
206
207 pdata->encoder = amdgpu_encoder;
208
209 backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
210
211 dig = amdgpu_encoder->enc_priv;
212 dig->bl_dev = bd;
213
214 bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
215 bd->props.power = FB_BLANK_UNBLANK;
216 backlight_update_status(bd);
217
218 DRM_INFO("amdgpu atom DIG backlight initialized\n");
219
220 return;
221
222 error:
223 kfree(pdata);
224 return;
225 }
226
227 void
228 amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
229 {
230 struct drm_device *dev = amdgpu_encoder->base.dev;
231 struct amdgpu_device *adev = dev->dev_private;
232 struct backlight_device *bd = NULL;
233 struct amdgpu_encoder_atom_dig *dig;
234
235 if (!amdgpu_encoder->enc_priv)
236 return;
237
238 if (!adev->is_atom_bios)
239 return;
240
241 if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
242 return;
243
244 dig = amdgpu_encoder->enc_priv;
245 bd = dig->bl_dev;
246 dig->bl_dev = NULL;
247
248 if (bd) {
249 struct amdgpu_legacy_backlight_privdata *pdata;
250
251 pdata = bl_get_data(bd);
252 backlight_device_unregister(bd);
253 kfree(pdata);
254
255 DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
256 }
257 }
258
259 #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
260
261 void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
262 {
263 }
264
265 void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
266 {
267 }
268
269 #endif
270
271 bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
272 {
273 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
274 switch (amdgpu_encoder->encoder_id) {
275 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
276 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
277 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
278 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
279 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
280 return true;
281 default:
282 return false;
283 }
284 }
285
286 bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
287 const struct drm_display_mode *mode,
288 struct drm_display_mode *adjusted_mode)
289 {
290 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
291
292 /* set the active encoder to connector routing */
293 amdgpu_encoder_set_active_device(encoder);
294 drm_mode_set_crtcinfo(adjusted_mode, 0);
295
296 /* hw bug */
297 if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
298 && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
299 adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
300
301 /* vertical FP must be at least 1 */
302 if (mode->crtc_vsync_start == mode->crtc_vdisplay)
303 adjusted_mode->crtc_vsync_start++;
304
305 /* get the native mode for scaling */
306 if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
307 amdgpu_panel_mode_fixup(encoder, adjusted_mode);
308 else if (amdgpu_encoder->rmx_type != RMX_OFF)
309 amdgpu_panel_mode_fixup(encoder, adjusted_mode);
310
311 if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
312 (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
313 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
314 amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
315 }
316
317 return true;
318 }
319
320 static void
321 amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
322 {
323 struct drm_device *dev = encoder->dev;
324 struct amdgpu_device *adev = dev->dev_private;
325 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
326 DAC_ENCODER_CONTROL_PS_ALLOCATION args;
327 int index = 0;
328
329 memset(&args, 0, sizeof(args));
330
331 switch (amdgpu_encoder->encoder_id) {
332 case ENCODER_OBJECT_ID_INTERNAL_DAC1:
333 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
334 index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
335 break;
336 case ENCODER_OBJECT_ID_INTERNAL_DAC2:
337 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
338 index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
339 break;
340 }
341
342 args.ucAction = action;
343 args.ucDacStandard = ATOM_DAC1_PS2;
344 args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
345
346 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
347
348 }
349
350 static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
351 {
352 int bpc = 8;
353
354 if (encoder->crtc) {
355 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
356 bpc = amdgpu_crtc->bpc;
357 }
358
359 switch (bpc) {
360 case 0:
361 return PANEL_BPC_UNDEFINE;
362 case 6:
363 return PANEL_6BIT_PER_COLOR;
364 case 8:
365 default:
366 return PANEL_8BIT_PER_COLOR;
367 case 10:
368 return PANEL_10BIT_PER_COLOR;
369 case 12:
370 return PANEL_12BIT_PER_COLOR;
371 case 16:
372 return PANEL_16BIT_PER_COLOR;
373 }
374 }
375
376 union dvo_encoder_control {
377 ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
378 DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
379 DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
380 DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
381 };
382
383 static void
384 amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
385 {
386 struct drm_device *dev = encoder->dev;
387 struct amdgpu_device *adev = dev->dev_private;
388 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
389 union dvo_encoder_control args;
390 int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
391 uint8_t frev, crev;
392
393 memset(&args, 0, sizeof(args));
394
395 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
396 return;
397
398 switch (frev) {
399 case 1:
400 switch (crev) {
401 case 1:
402 /* R4xx, R5xx */
403 args.ext_tmds.sXTmdsEncoder.ucEnable = action;
404
405 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
406 args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
407
408 args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
409 break;
410 case 2:
411 /* RS600/690/740 */
412 args.dvo.sDVOEncoder.ucAction = action;
413 args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
414 /* DFP1, CRT1, TV1 depending on the type of port */
415 args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
416
417 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
418 args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
419 break;
420 case 3:
421 /* R6xx */
422 args.dvo_v3.ucAction = action;
423 args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
424 args.dvo_v3.ucDVOConfig = 0; /* XXX */
425 break;
426 case 4:
427 /* DCE8 */
428 args.dvo_v4.ucAction = action;
429 args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
430 args.dvo_v4.ucDVOConfig = 0; /* XXX */
431 args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
432 break;
433 default:
434 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
435 break;
436 }
437 break;
438 default:
439 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
440 break;
441 }
442
443 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
444 }
445
446 int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
447 {
448 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
449 struct drm_connector *connector;
450 struct amdgpu_connector *amdgpu_connector;
451 struct amdgpu_connector_atom_dig *dig_connector;
452
453 /* dp bridges are always DP */
454 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
455 return ATOM_ENCODER_MODE_DP;
456
457 /* DVO is always DVO */
458 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
459 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
460 return ATOM_ENCODER_MODE_DVO;
461
462 connector = amdgpu_get_connector_for_encoder(encoder);
463 /* if we don't have an active device yet, just use one of
464 * the connectors tied to the encoder.
465 */
466 if (!connector)
467 connector = amdgpu_get_connector_for_encoder_init(encoder);
468 amdgpu_connector = to_amdgpu_connector(connector);
469
470 switch (connector->connector_type) {
471 case DRM_MODE_CONNECTOR_DVII:
472 case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
473 if (amdgpu_audio != 0) {
474 if (amdgpu_connector->use_digital &&
475 (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
476 return ATOM_ENCODER_MODE_HDMI;
477 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
478 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
479 return ATOM_ENCODER_MODE_HDMI;
480 else if (amdgpu_connector->use_digital)
481 return ATOM_ENCODER_MODE_DVI;
482 else
483 return ATOM_ENCODER_MODE_CRT;
484 } else if (amdgpu_connector->use_digital) {
485 return ATOM_ENCODER_MODE_DVI;
486 } else {
487 return ATOM_ENCODER_MODE_CRT;
488 }
489 break;
490 case DRM_MODE_CONNECTOR_DVID:
491 case DRM_MODE_CONNECTOR_HDMIA:
492 default:
493 if (amdgpu_audio != 0) {
494 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
495 return ATOM_ENCODER_MODE_HDMI;
496 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
497 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
498 return ATOM_ENCODER_MODE_HDMI;
499 else
500 return ATOM_ENCODER_MODE_DVI;
501 } else {
502 return ATOM_ENCODER_MODE_DVI;
503 }
504 break;
505 case DRM_MODE_CONNECTOR_LVDS:
506 return ATOM_ENCODER_MODE_LVDS;
507 break;
508 case DRM_MODE_CONNECTOR_DisplayPort:
509 dig_connector = amdgpu_connector->con_priv;
510 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
511 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
512 return ATOM_ENCODER_MODE_DP;
513 } else if (amdgpu_audio != 0) {
514 if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
515 return ATOM_ENCODER_MODE_HDMI;
516 else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
517 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
518 return ATOM_ENCODER_MODE_HDMI;
519 else
520 return ATOM_ENCODER_MODE_DVI;
521 } else {
522 return ATOM_ENCODER_MODE_DVI;
523 }
524 break;
525 case DRM_MODE_CONNECTOR_eDP:
526 return ATOM_ENCODER_MODE_DP;
527 case DRM_MODE_CONNECTOR_DVIA:
528 case DRM_MODE_CONNECTOR_VGA:
529 return ATOM_ENCODER_MODE_CRT;
530 break;
531 case DRM_MODE_CONNECTOR_Composite:
532 case DRM_MODE_CONNECTOR_SVIDEO:
533 case DRM_MODE_CONNECTOR_9PinDIN:
534 /* fix me */
535 return ATOM_ENCODER_MODE_TV;
536 /*return ATOM_ENCODER_MODE_CV;*/
537 break;
538 }
539 }
540
541 /*
542 * DIG Encoder/Transmitter Setup
543 *
544 * DCE 6.0
545 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
546 * Supports up to 6 digital outputs
547 * - 6 DIG encoder blocks.
548 * - DIG to PHY mapping is hardcoded
549 * DIG1 drives UNIPHY0 link A, A+B
550 * DIG2 drives UNIPHY0 link B
551 * DIG3 drives UNIPHY1 link A, A+B
552 * DIG4 drives UNIPHY1 link B
553 * DIG5 drives UNIPHY2 link A, A+B
554 * DIG6 drives UNIPHY2 link B
555 *
556 * Routing
557 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
558 * Examples:
559 * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
560 * crtc1 -> dig1 -> UNIPHY0 link B -> DP
561 * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
562 * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
563 */
564
565 union dig_encoder_control {
566 DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
567 DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
568 DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
569 DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
570 DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
571 };
572
573 void
574 amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
575 int action, int panel_mode)
576 {
577 struct drm_device *dev = encoder->dev;
578 struct amdgpu_device *adev = dev->dev_private;
579 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
580 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
581 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
582 union dig_encoder_control args;
583 int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
584 uint8_t frev, crev;
585 int dp_clock = 0;
586 int dp_lane_count = 0;
587 int hpd_id = AMDGPU_HPD_NONE;
588
589 if (connector) {
590 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
591 struct amdgpu_connector_atom_dig *dig_connector =
592 amdgpu_connector->con_priv;
593
594 dp_clock = dig_connector->dp_clock;
595 dp_lane_count = dig_connector->dp_lane_count;
596 hpd_id = amdgpu_connector->hpd.hpd;
597 }
598
599 /* no dig encoder assigned */
600 if (dig->dig_encoder == -1)
601 return;
602
603 memset(&args, 0, sizeof(args));
604
605 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
606 return;
607
608 switch (frev) {
609 case 1:
610 switch (crev) {
611 case 1:
612 args.v1.ucAction = action;
613 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
614 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
615 args.v3.ucPanelMode = panel_mode;
616 else
617 args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
618
619 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
620 args.v1.ucLaneNum = dp_lane_count;
621 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
622 args.v1.ucLaneNum = 8;
623 else
624 args.v1.ucLaneNum = 4;
625
626 if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
627 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
628 switch (amdgpu_encoder->encoder_id) {
629 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
630 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
631 break;
632 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
633 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
634 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
635 break;
636 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
637 args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
638 break;
639 }
640 if (dig->linkb)
641 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
642 else
643 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
644 break;
645 case 2:
646 case 3:
647 args.v3.ucAction = action;
648 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
649 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
650 args.v3.ucPanelMode = panel_mode;
651 else
652 args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
653
654 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
655 args.v3.ucLaneNum = dp_lane_count;
656 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
657 args.v3.ucLaneNum = 8;
658 else
659 args.v3.ucLaneNum = 4;
660
661 if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
662 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
663 args.v3.acConfig.ucDigSel = dig->dig_encoder;
664 args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
665 break;
666 case 4:
667 args.v4.ucAction = action;
668 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
669 if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
670 args.v4.ucPanelMode = panel_mode;
671 else
672 args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
673
674 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
675 args.v4.ucLaneNum = dp_lane_count;
676 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
677 args.v4.ucLaneNum = 8;
678 else
679 args.v4.ucLaneNum = 4;
680
681 if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
682 if (dp_clock == 540000)
683 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
684 else if (dp_clock == 324000)
685 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
686 else if (dp_clock == 270000)
687 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
688 else
689 args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
690 }
691 args.v4.acConfig.ucDigSel = dig->dig_encoder;
692 args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
693 if (hpd_id == AMDGPU_HPD_NONE)
694 args.v4.ucHPD_ID = 0;
695 else
696 args.v4.ucHPD_ID = hpd_id + 1;
697 break;
698 case 5:
699 switch (action) {
700 case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
701 args.v5.asDPPanelModeParam.ucAction = action;
702 args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
703 args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
704 break;
705 case ATOM_ENCODER_CMD_STREAM_SETUP:
706 args.v5.asStreamParam.ucAction = action;
707 args.v5.asStreamParam.ucDigId = dig->dig_encoder;
708 args.v5.asStreamParam.ucDigMode =
709 amdgpu_atombios_encoder_get_encoder_mode(encoder);
710 if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
711 args.v5.asStreamParam.ucLaneNum = dp_lane_count;
712 else if (amdgpu_dig_monitor_is_duallink(encoder,
713 amdgpu_encoder->pixel_clock))
714 args.v5.asStreamParam.ucLaneNum = 8;
715 else
716 args.v5.asStreamParam.ucLaneNum = 4;
717 args.v5.asStreamParam.ulPixelClock =
718 cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
719 args.v5.asStreamParam.ucBitPerColor =
720 amdgpu_atombios_encoder_get_bpc(encoder);
721 args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
722 break;
723 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
724 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
725 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
726 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
727 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
728 case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
729 case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
730 case ATOM_ENCODER_CMD_DP_VIDEO_ON:
731 args.v5.asCmdParam.ucAction = action;
732 args.v5.asCmdParam.ucDigId = dig->dig_encoder;
733 break;
734 default:
735 DRM_ERROR("Unsupported action 0x%x\n", action);
736 break;
737 }
738 break;
739 default:
740 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
741 break;
742 }
743 break;
744 default:
745 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
746 break;
747 }
748
749 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
750
751 }
752
753 union dig_transmitter_control {
754 DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
755 DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
756 DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
757 DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
758 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
759 DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
760 };
761
762 void
763 amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
764 uint8_t lane_num, uint8_t lane_set)
765 {
766 struct drm_device *dev = encoder->dev;
767 struct amdgpu_device *adev = dev->dev_private;
768 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
769 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
770 struct drm_connector *connector;
771 union dig_transmitter_control args;
772 int index = 0;
773 uint8_t frev, crev;
774 bool is_dp = false;
775 int pll_id = 0;
776 int dp_clock = 0;
777 int dp_lane_count = 0;
778 int connector_object_id = 0;
779 int igp_lane_info = 0;
780 int dig_encoder = dig->dig_encoder;
781 int hpd_id = AMDGPU_HPD_NONE;
782
783 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
784 connector = amdgpu_get_connector_for_encoder_init(encoder);
785 /* just needed to avoid bailing in the encoder check. the encoder
786 * isn't used for init
787 */
788 dig_encoder = 0;
789 } else
790 connector = amdgpu_get_connector_for_encoder(encoder);
791
792 if (connector) {
793 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
794 struct amdgpu_connector_atom_dig *dig_connector =
795 amdgpu_connector->con_priv;
796
797 hpd_id = amdgpu_connector->hpd.hpd;
798 dp_clock = dig_connector->dp_clock;
799 dp_lane_count = dig_connector->dp_lane_count;
800 connector_object_id =
801 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
802 }
803
804 if (encoder->crtc) {
805 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
806 pll_id = amdgpu_crtc->pll_id;
807 }
808
809 /* no dig encoder assigned */
810 if (dig_encoder == -1)
811 return;
812
813 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
814 is_dp = true;
815
816 memset(&args, 0, sizeof(args));
817
818 switch (amdgpu_encoder->encoder_id) {
819 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
820 index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
821 break;
822 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
823 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
824 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
825 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
826 index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
827 break;
828 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
829 index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
830 break;
831 }
832
833 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
834 return;
835
836 switch (frev) {
837 case 1:
838 switch (crev) {
839 case 1:
840 args.v1.ucAction = action;
841 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
842 args.v1.usInitInfo = cpu_to_le16(connector_object_id);
843 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
844 args.v1.asMode.ucLaneSel = lane_num;
845 args.v1.asMode.ucLaneSet = lane_set;
846 } else {
847 if (is_dp)
848 args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
849 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
850 args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
851 else
852 args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
853 }
854
855 args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
856
857 if (dig_encoder)
858 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
859 else
860 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
861
862 if ((adev->flags & AMD_IS_APU) &&
863 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
864 if (is_dp ||
865 !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
866 if (igp_lane_info & 0x1)
867 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
868 else if (igp_lane_info & 0x2)
869 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
870 else if (igp_lane_info & 0x4)
871 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
872 else if (igp_lane_info & 0x8)
873 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
874 } else {
875 if (igp_lane_info & 0x3)
876 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
877 else if (igp_lane_info & 0xc)
878 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
879 }
880 }
881
882 if (dig->linkb)
883 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
884 else
885 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
886
887 if (is_dp)
888 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
889 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
890 if (dig->coherent_mode)
891 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
892 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
893 args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
894 }
895 break;
896 case 2:
897 args.v2.ucAction = action;
898 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
899 args.v2.usInitInfo = cpu_to_le16(connector_object_id);
900 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
901 args.v2.asMode.ucLaneSel = lane_num;
902 args.v2.asMode.ucLaneSet = lane_set;
903 } else {
904 if (is_dp)
905 args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
906 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
907 args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
908 else
909 args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
910 }
911
912 args.v2.acConfig.ucEncoderSel = dig_encoder;
913 if (dig->linkb)
914 args.v2.acConfig.ucLinkSel = 1;
915
916 switch (amdgpu_encoder->encoder_id) {
917 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
918 args.v2.acConfig.ucTransmitterSel = 0;
919 break;
920 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
921 args.v2.acConfig.ucTransmitterSel = 1;
922 break;
923 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
924 args.v2.acConfig.ucTransmitterSel = 2;
925 break;
926 }
927
928 if (is_dp) {
929 args.v2.acConfig.fCoherentMode = 1;
930 args.v2.acConfig.fDPConnector = 1;
931 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
932 if (dig->coherent_mode)
933 args.v2.acConfig.fCoherentMode = 1;
934 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
935 args.v2.acConfig.fDualLinkConnector = 1;
936 }
937 break;
938 case 3:
939 args.v3.ucAction = action;
940 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
941 args.v3.usInitInfo = cpu_to_le16(connector_object_id);
942 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
943 args.v3.asMode.ucLaneSel = lane_num;
944 args.v3.asMode.ucLaneSet = lane_set;
945 } else {
946 if (is_dp)
947 args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
948 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
949 args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
950 else
951 args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
952 }
953
954 if (is_dp)
955 args.v3.ucLaneNum = dp_lane_count;
956 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
957 args.v3.ucLaneNum = 8;
958 else
959 args.v3.ucLaneNum = 4;
960
961 if (dig->linkb)
962 args.v3.acConfig.ucLinkSel = 1;
963 if (dig_encoder & 1)
964 args.v3.acConfig.ucEncoderSel = 1;
965
966 /* Select the PLL for the PHY
967 * DP PHY should be clocked from external src if there is
968 * one.
969 */
970 /* On DCE4, if there is an external clock, it generates the DP ref clock */
971 if (is_dp && adev->clock.dp_extclk)
972 args.v3.acConfig.ucRefClkSource = 2; /* external src */
973 else
974 args.v3.acConfig.ucRefClkSource = pll_id;
975
976 switch (amdgpu_encoder->encoder_id) {
977 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
978 args.v3.acConfig.ucTransmitterSel = 0;
979 break;
980 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
981 args.v3.acConfig.ucTransmitterSel = 1;
982 break;
983 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
984 args.v3.acConfig.ucTransmitterSel = 2;
985 break;
986 }
987
988 if (is_dp)
989 args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
990 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
991 if (dig->coherent_mode)
992 args.v3.acConfig.fCoherentMode = 1;
993 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
994 args.v3.acConfig.fDualLinkConnector = 1;
995 }
996 break;
997 case 4:
998 args.v4.ucAction = action;
999 if (action == ATOM_TRANSMITTER_ACTION_INIT) {
1000 args.v4.usInitInfo = cpu_to_le16(connector_object_id);
1001 } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
1002 args.v4.asMode.ucLaneSel = lane_num;
1003 args.v4.asMode.ucLaneSet = lane_set;
1004 } else {
1005 if (is_dp)
1006 args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
1007 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1008 args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
1009 else
1010 args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1011 }
1012
1013 if (is_dp)
1014 args.v4.ucLaneNum = dp_lane_count;
1015 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1016 args.v4.ucLaneNum = 8;
1017 else
1018 args.v4.ucLaneNum = 4;
1019
1020 if (dig->linkb)
1021 args.v4.acConfig.ucLinkSel = 1;
1022 if (dig_encoder & 1)
1023 args.v4.acConfig.ucEncoderSel = 1;
1024
1025 /* Select the PLL for the PHY
1026 * DP PHY should be clocked from external src if there is
1027 * one.
1028 */
1029 /* On DCE5 DCPLL usually generates the DP ref clock */
1030 if (is_dp) {
1031 if (adev->clock.dp_extclk)
1032 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
1033 else
1034 args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1035 } else
1036 args.v4.acConfig.ucRefClkSource = pll_id;
1037
1038 switch (amdgpu_encoder->encoder_id) {
1039 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1040 args.v4.acConfig.ucTransmitterSel = 0;
1041 break;
1042 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1043 args.v4.acConfig.ucTransmitterSel = 1;
1044 break;
1045 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1046 args.v4.acConfig.ucTransmitterSel = 2;
1047 break;
1048 }
1049
1050 if (is_dp)
1051 args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1052 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1053 if (dig->coherent_mode)
1054 args.v4.acConfig.fCoherentMode = 1;
1055 if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1056 args.v4.acConfig.fDualLinkConnector = 1;
1057 }
1058 break;
1059 case 5:
1060 args.v5.ucAction = action;
1061 if (is_dp)
1062 args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1063 else
1064 args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1065
1066 switch (amdgpu_encoder->encoder_id) {
1067 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1068 if (dig->linkb)
1069 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1070 else
1071 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1072 break;
1073 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1074 if (dig->linkb)
1075 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1076 else
1077 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1078 break;
1079 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1080 if (dig->linkb)
1081 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1082 else
1083 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1084 break;
1085 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1086 args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1087 break;
1088 }
1089 if (is_dp)
1090 args.v5.ucLaneNum = dp_lane_count;
1091 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1092 args.v5.ucLaneNum = 8;
1093 else
1094 args.v5.ucLaneNum = 4;
1095 args.v5.ucConnObjId = connector_object_id;
1096 args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1097
1098 if (is_dp && adev->clock.dp_extclk)
1099 args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1100 else
1101 args.v5.asConfig.ucPhyClkSrcId = pll_id;
1102
1103 if (is_dp)
1104 args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1105 else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1106 if (dig->coherent_mode)
1107 args.v5.asConfig.ucCoherentMode = 1;
1108 }
1109 if (hpd_id == AMDGPU_HPD_NONE)
1110 args.v5.asConfig.ucHPDSel = 0;
1111 else
1112 args.v5.asConfig.ucHPDSel = hpd_id + 1;
1113 args.v5.ucDigEncoderSel = 1 << dig_encoder;
1114 args.v5.ucDPLaneSet = lane_set;
1115 break;
1116 case 6:
1117 args.v6.ucAction = action;
1118 if (is_dp)
1119 args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1120 else
1121 args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1122
1123 switch (amdgpu_encoder->encoder_id) {
1124 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1125 if (dig->linkb)
1126 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1127 else
1128 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1129 break;
1130 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1131 if (dig->linkb)
1132 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1133 else
1134 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1135 break;
1136 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1137 if (dig->linkb)
1138 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1139 else
1140 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1141 break;
1142 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1143 args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1144 break;
1145 }
1146 if (is_dp)
1147 args.v6.ucLaneNum = dp_lane_count;
1148 else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1149 args.v6.ucLaneNum = 8;
1150 else
1151 args.v6.ucLaneNum = 4;
1152 args.v6.ucConnObjId = connector_object_id;
1153 if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1154 args.v6.ucDPLaneSet = lane_set;
1155 else
1156 args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1157
1158 if (hpd_id == AMDGPU_HPD_NONE)
1159 args.v6.ucHPDSel = 0;
1160 else
1161 args.v6.ucHPDSel = hpd_id + 1;
1162 args.v6.ucDigEncoderSel = 1 << dig_encoder;
1163 break;
1164 default:
1165 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1166 break;
1167 }
1168 break;
1169 default:
1170 DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1171 break;
1172 }
1173
1174 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1175 }
1176
1177 bool
1178 amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1179 int action)
1180 {
1181 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1182 struct drm_device *dev = amdgpu_connector->base.dev;
1183 struct amdgpu_device *adev = dev->dev_private;
1184 union dig_transmitter_control args;
1185 int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1186 uint8_t frev, crev;
1187
1188 if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1189 goto done;
1190
1191 if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1192 (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1193 goto done;
1194
1195 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1196 goto done;
1197
1198 memset(&args, 0, sizeof(args));
1199
1200 args.v1.ucAction = action;
1201
1202 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1203
1204 /* wait for the panel to power up */
1205 if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1206 int i;
1207
1208 for (i = 0; i < 300; i++) {
1209 if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1210 return true;
1211 mdelay(1);
1212 }
1213 return false;
1214 }
1215 done:
1216 return true;
1217 }
1218
1219 union external_encoder_control {
1220 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1221 EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1222 };
1223
1224 static void
1225 amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1226 struct drm_encoder *ext_encoder,
1227 int action)
1228 {
1229 struct drm_device *dev = encoder->dev;
1230 struct amdgpu_device *adev = dev->dev_private;
1231 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1232 struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1233 union external_encoder_control args;
1234 struct drm_connector *connector;
1235 int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1236 u8 frev, crev;
1237 int dp_clock = 0;
1238 int dp_lane_count = 0;
1239 int connector_object_id = 0;
1240 u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1241
1242 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1243 connector = amdgpu_get_connector_for_encoder_init(encoder);
1244 else
1245 connector = amdgpu_get_connector_for_encoder(encoder);
1246
1247 if (connector) {
1248 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1249 struct amdgpu_connector_atom_dig *dig_connector =
1250 amdgpu_connector->con_priv;
1251
1252 dp_clock = dig_connector->dp_clock;
1253 dp_lane_count = dig_connector->dp_lane_count;
1254 connector_object_id =
1255 (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1256 }
1257
1258 memset(&args, 0, sizeof(args));
1259
1260 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1261 return;
1262
1263 switch (frev) {
1264 case 1:
1265 /* no params on frev 1 */
1266 break;
1267 case 2:
1268 switch (crev) {
1269 case 1:
1270 case 2:
1271 args.v1.sDigEncoder.ucAction = action;
1272 args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1273 args.v1.sDigEncoder.ucEncoderMode =
1274 amdgpu_atombios_encoder_get_encoder_mode(encoder);
1275
1276 if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1277 if (dp_clock == 270000)
1278 args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1279 args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1280 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1281 args.v1.sDigEncoder.ucLaneNum = 8;
1282 else
1283 args.v1.sDigEncoder.ucLaneNum = 4;
1284 break;
1285 case 3:
1286 args.v3.sExtEncoder.ucAction = action;
1287 if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1288 args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1289 else
1290 args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1291 args.v3.sExtEncoder.ucEncoderMode =
1292 amdgpu_atombios_encoder_get_encoder_mode(encoder);
1293
1294 if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1295 if (dp_clock == 270000)
1296 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1297 else if (dp_clock == 540000)
1298 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1299 args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1300 } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1301 args.v3.sExtEncoder.ucLaneNum = 8;
1302 else
1303 args.v3.sExtEncoder.ucLaneNum = 4;
1304 switch (ext_enum) {
1305 case GRAPH_OBJECT_ENUM_ID1:
1306 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1307 break;
1308 case GRAPH_OBJECT_ENUM_ID2:
1309 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1310 break;
1311 case GRAPH_OBJECT_ENUM_ID3:
1312 args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1313 break;
1314 }
1315 args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1316 break;
1317 default:
1318 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1319 return;
1320 }
1321 break;
1322 default:
1323 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1324 return;
1325 }
1326 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1327 }
1328
1329 static void
1330 amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1331 {
1332 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1333 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1334 struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1335 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1336 struct amdgpu_connector *amdgpu_connector = NULL;
1337 struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1338
1339 if (connector) {
1340 amdgpu_connector = to_amdgpu_connector(connector);
1341 amdgpu_dig_connector = amdgpu_connector->con_priv;
1342 }
1343
1344 if (action == ATOM_ENABLE) {
1345 if (!connector)
1346 dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1347 else
1348 dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1349
1350 /* setup and enable the encoder */
1351 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1352 amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1353 ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1354 dig->panel_mode);
1355 if (ext_encoder)
1356 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1357 EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1358 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1359 connector) {
1360 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1361 amdgpu_atombios_encoder_set_edp_panel_power(connector,
1362 ATOM_TRANSMITTER_ACTION_POWER_ON);
1363 amdgpu_dig_connector->edp_on = true;
1364 }
1365 }
1366 /* enable the transmitter */
1367 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1368 ATOM_TRANSMITTER_ACTION_ENABLE,
1369 0, 0);
1370 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1371 connector) {
1372 /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1373 amdgpu_atombios_dp_link_train(encoder, connector);
1374 amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1375 }
1376 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1377 amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1378 if (ext_encoder)
1379 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1380 } else {
1381 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1382 connector)
1383 amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1384 ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1385 if (ext_encoder)
1386 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1387 if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1388 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1389 ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1390
1391 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1392 connector)
1393 amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1394 /* disable the transmitter */
1395 amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1396 ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1397 if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1398 connector) {
1399 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1400 amdgpu_atombios_encoder_set_edp_panel_power(connector,
1401 ATOM_TRANSMITTER_ACTION_POWER_OFF);
1402 amdgpu_dig_connector->edp_on = false;
1403 }
1404 }
1405 }
1406 }
1407
1408 void
1409 amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1410 {
1411 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1412
1413 DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1414 amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1415 amdgpu_encoder->active_device);
1416 switch (amdgpu_encoder->encoder_id) {
1417 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1418 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1419 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1420 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1421 switch (mode) {
1422 case DRM_MODE_DPMS_ON:
1423 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1424 break;
1425 case DRM_MODE_DPMS_STANDBY:
1426 case DRM_MODE_DPMS_SUSPEND:
1427 case DRM_MODE_DPMS_OFF:
1428 amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1429 break;
1430 }
1431 break;
1432 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1433 switch (mode) {
1434 case DRM_MODE_DPMS_ON:
1435 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1436 break;
1437 case DRM_MODE_DPMS_STANDBY:
1438 case DRM_MODE_DPMS_SUSPEND:
1439 case DRM_MODE_DPMS_OFF:
1440 amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1441 break;
1442 }
1443 break;
1444 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1445 switch (mode) {
1446 case DRM_MODE_DPMS_ON:
1447 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1448 break;
1449 case DRM_MODE_DPMS_STANDBY:
1450 case DRM_MODE_DPMS_SUSPEND:
1451 case DRM_MODE_DPMS_OFF:
1452 amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1453 break;
1454 }
1455 break;
1456 default:
1457 return;
1458 }
1459 }
1460
1461 union crtc_source_param {
1462 SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1463 SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1464 SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1465 };
1466
1467 void
1468 amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1469 {
1470 struct drm_device *dev = encoder->dev;
1471 struct amdgpu_device *adev = dev->dev_private;
1472 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1473 struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1474 union crtc_source_param args;
1475 int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1476 uint8_t frev, crev;
1477 struct amdgpu_encoder_atom_dig *dig;
1478
1479 memset(&args, 0, sizeof(args));
1480
1481 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1482 return;
1483
1484 switch (frev) {
1485 case 1:
1486 switch (crev) {
1487 case 1:
1488 default:
1489 args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1490 switch (amdgpu_encoder->encoder_id) {
1491 case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1492 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1493 args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1494 break;
1495 case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1496 case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1497 if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1498 args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1499 else
1500 args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1501 break;
1502 case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1503 case ENCODER_OBJECT_ID_INTERNAL_DDI:
1504 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1505 args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1506 break;
1507 case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1508 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1509 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1510 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1511 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1512 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1513 else
1514 args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1515 break;
1516 case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1517 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1518 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1519 args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1520 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1521 args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1522 else
1523 args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1524 break;
1525 }
1526 break;
1527 case 2:
1528 args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1529 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1530 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1531
1532 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1533 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1534 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1535 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1536 else
1537 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1538 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1539 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1540 } else {
1541 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1542 }
1543 switch (amdgpu_encoder->encoder_id) {
1544 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1545 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1546 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1547 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1548 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1549 dig = amdgpu_encoder->enc_priv;
1550 switch (dig->dig_encoder) {
1551 case 0:
1552 args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1553 break;
1554 case 1:
1555 args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1556 break;
1557 case 2:
1558 args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1559 break;
1560 case 3:
1561 args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1562 break;
1563 case 4:
1564 args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1565 break;
1566 case 5:
1567 args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1568 break;
1569 case 6:
1570 args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1571 break;
1572 }
1573 break;
1574 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1575 args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1576 break;
1577 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1578 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1579 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1580 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1581 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1582 else
1583 args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1584 break;
1585 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1586 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1587 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1588 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1589 args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1590 else
1591 args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1592 break;
1593 }
1594 break;
1595 case 3:
1596 args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1597 if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1598 struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1599
1600 if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1601 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1602 else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1603 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1604 else
1605 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1606 } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1607 args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1608 } else {
1609 args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1610 }
1611 args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1612 switch (amdgpu_encoder->encoder_id) {
1613 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1614 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1615 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1616 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1617 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1618 dig = amdgpu_encoder->enc_priv;
1619 switch (dig->dig_encoder) {
1620 case 0:
1621 args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1622 break;
1623 case 1:
1624 args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1625 break;
1626 case 2:
1627 args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1628 break;
1629 case 3:
1630 args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1631 break;
1632 case 4:
1633 args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1634 break;
1635 case 5:
1636 args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1637 break;
1638 case 6:
1639 args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1640 break;
1641 }
1642 break;
1643 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1644 args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1645 break;
1646 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1647 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1648 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1649 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1650 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1651 else
1652 args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1653 break;
1654 case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1655 if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1656 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1657 else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1658 args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1659 else
1660 args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1661 break;
1662 }
1663 break;
1664 }
1665 break;
1666 default:
1667 DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1668 return;
1669 }
1670
1671 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1672 }
1673
1674 /* This only needs to be called once at startup */
1675 void
1676 amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1677 {
1678 struct drm_device *dev = adev->ddev;
1679 struct drm_encoder *encoder;
1680
1681 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1682 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1683 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1684
1685 switch (amdgpu_encoder->encoder_id) {
1686 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1687 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1688 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1689 case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1690 amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1691 0, 0);
1692 break;
1693 }
1694
1695 if (ext_encoder)
1696 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1697 EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1698 }
1699 }
1700
1701 static bool
1702 amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1703 struct drm_connector *connector)
1704 {
1705 struct drm_device *dev = encoder->dev;
1706 struct amdgpu_device *adev = dev->dev_private;
1707 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1708 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1709
1710 if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1711 ATOM_DEVICE_CV_SUPPORT |
1712 ATOM_DEVICE_CRT_SUPPORT)) {
1713 DAC_LOAD_DETECTION_PS_ALLOCATION args;
1714 int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1715 uint8_t frev, crev;
1716
1717 memset(&args, 0, sizeof(args));
1718
1719 if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1720 return false;
1721
1722 args.sDacload.ucMisc = 0;
1723
1724 if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1725 (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1726 args.sDacload.ucDacType = ATOM_DAC_A;
1727 else
1728 args.sDacload.ucDacType = ATOM_DAC_B;
1729
1730 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1731 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1732 else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1733 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1734 else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1735 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1736 if (crev >= 3)
1737 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1738 } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1739 args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1740 if (crev >= 3)
1741 args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1742 }
1743
1744 amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1745
1746 return true;
1747 } else
1748 return false;
1749 }
1750
1751 enum drm_connector_status
1752 amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1753 struct drm_connector *connector)
1754 {
1755 struct drm_device *dev = encoder->dev;
1756 struct amdgpu_device *adev = dev->dev_private;
1757 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1758 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1759 uint32_t bios_0_scratch;
1760
1761 if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1762 DRM_DEBUG_KMS("detect returned false \n");
1763 return connector_status_unknown;
1764 }
1765
1766 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1767
1768 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1769 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1770 if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1771 return connector_status_connected;
1772 }
1773 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1774 if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1775 return connector_status_connected;
1776 }
1777 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1778 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1779 return connector_status_connected;
1780 }
1781 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1782 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1783 return connector_status_connected; /* CTV */
1784 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1785 return connector_status_connected; /* STV */
1786 }
1787 return connector_status_disconnected;
1788 }
1789
1790 enum drm_connector_status
1791 amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1792 struct drm_connector *connector)
1793 {
1794 struct drm_device *dev = encoder->dev;
1795 struct amdgpu_device *adev = dev->dev_private;
1796 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1797 struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1798 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1799 u32 bios_0_scratch;
1800
1801 if (!ext_encoder)
1802 return connector_status_unknown;
1803
1804 if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1805 return connector_status_unknown;
1806
1807 /* load detect on the dp bridge */
1808 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1809 EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1810
1811 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1812
1813 DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1814 if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1815 if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1816 return connector_status_connected;
1817 }
1818 if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1819 if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1820 return connector_status_connected;
1821 }
1822 if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1823 if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1824 return connector_status_connected;
1825 }
1826 if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1827 if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1828 return connector_status_connected; /* CTV */
1829 else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1830 return connector_status_connected; /* STV */
1831 }
1832 return connector_status_disconnected;
1833 }
1834
1835 void
1836 amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1837 {
1838 struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1839
1840 if (ext_encoder)
1841 /* ddc_setup on the dp bridge */
1842 amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1843 EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1844
1845 }
1846
1847 void
1848 amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1849 struct drm_encoder *encoder,
1850 bool connected)
1851 {
1852 struct drm_device *dev = connector->dev;
1853 struct amdgpu_device *adev = dev->dev_private;
1854 struct amdgpu_connector *amdgpu_connector =
1855 to_amdgpu_connector(connector);
1856 struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1857 uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1858
1859 bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1860 bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1861 bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1862
1863 if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1864 (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1865 if (connected) {
1866 DRM_DEBUG_KMS("LCD1 connected\n");
1867 bios_0_scratch |= ATOM_S0_LCD1;
1868 bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1869 bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1870 } else {
1871 DRM_DEBUG_KMS("LCD1 disconnected\n");
1872 bios_0_scratch &= ~ATOM_S0_LCD1;
1873 bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1874 bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1875 }
1876 }
1877 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1878 (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1879 if (connected) {
1880 DRM_DEBUG_KMS("CRT1 connected\n");
1881 bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1882 bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1883 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1884 } else {
1885 DRM_DEBUG_KMS("CRT1 disconnected\n");
1886 bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1887 bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1888 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1889 }
1890 }
1891 if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1892 (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1893 if (connected) {
1894 DRM_DEBUG_KMS("CRT2 connected\n");
1895 bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1896 bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1897 bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1898 } else {
1899 DRM_DEBUG_KMS("CRT2 disconnected\n");
1900 bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1901 bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1902 bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1903 }
1904 }
1905 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1906 (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1907 if (connected) {
1908 DRM_DEBUG_KMS("DFP1 connected\n");
1909 bios_0_scratch |= ATOM_S0_DFP1;
1910 bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1911 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1912 } else {
1913 DRM_DEBUG_KMS("DFP1 disconnected\n");
1914 bios_0_scratch &= ~ATOM_S0_DFP1;
1915 bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1916 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1917 }
1918 }
1919 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1920 (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1921 if (connected) {
1922 DRM_DEBUG_KMS("DFP2 connected\n");
1923 bios_0_scratch |= ATOM_S0_DFP2;
1924 bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1925 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1926 } else {
1927 DRM_DEBUG_KMS("DFP2 disconnected\n");
1928 bios_0_scratch &= ~ATOM_S0_DFP2;
1929 bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1930 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1931 }
1932 }
1933 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1934 (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1935 if (connected) {
1936 DRM_DEBUG_KMS("DFP3 connected\n");
1937 bios_0_scratch |= ATOM_S0_DFP3;
1938 bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1939 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1940 } else {
1941 DRM_DEBUG_KMS("DFP3 disconnected\n");
1942 bios_0_scratch &= ~ATOM_S0_DFP3;
1943 bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1944 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1945 }
1946 }
1947 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1948 (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1949 if (connected) {
1950 DRM_DEBUG_KMS("DFP4 connected\n");
1951 bios_0_scratch |= ATOM_S0_DFP4;
1952 bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1953 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1954 } else {
1955 DRM_DEBUG_KMS("DFP4 disconnected\n");
1956 bios_0_scratch &= ~ATOM_S0_DFP4;
1957 bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1958 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1959 }
1960 }
1961 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1962 (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1963 if (connected) {
1964 DRM_DEBUG_KMS("DFP5 connected\n");
1965 bios_0_scratch |= ATOM_S0_DFP5;
1966 bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1967 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1968 } else {
1969 DRM_DEBUG_KMS("DFP5 disconnected\n");
1970 bios_0_scratch &= ~ATOM_S0_DFP5;
1971 bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1972 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1973 }
1974 }
1975 if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1976 (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1977 if (connected) {
1978 DRM_DEBUG_KMS("DFP6 connected\n");
1979 bios_0_scratch |= ATOM_S0_DFP6;
1980 bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1981 bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1982 } else {
1983 DRM_DEBUG_KMS("DFP6 disconnected\n");
1984 bios_0_scratch &= ~ATOM_S0_DFP6;
1985 bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1986 bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1987 }
1988 }
1989
1990 WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1991 WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1992 WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1993 }
1994
1995 union lvds_info {
1996 struct _ATOM_LVDS_INFO info;
1997 struct _ATOM_LVDS_INFO_V12 info_12;
1998 };
1999
2000 struct amdgpu_encoder_atom_dig *
2001 amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
2002 {
2003 struct drm_device *dev = encoder->base.dev;
2004 struct amdgpu_device *adev = dev->dev_private;
2005 struct amdgpu_mode_info *mode_info = &adev->mode_info;
2006 int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
2007 uint16_t data_offset, misc;
2008 union lvds_info *lvds_info;
2009 uint8_t frev, crev;
2010 struct amdgpu_encoder_atom_dig *lvds = NULL;
2011 int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2012
2013 if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
2014 &frev, &crev, &data_offset)) {
2015 lvds_info =
2016 (union lvds_info *)(mode_info->atom_context->bios + data_offset);
2017 lvds =
2018 kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2019
2020 if (!lvds)
2021 return NULL;
2022
2023 lvds->native_mode.clock =
2024 le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
2025 lvds->native_mode.hdisplay =
2026 le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
2027 lvds->native_mode.vdisplay =
2028 le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
2029 lvds->native_mode.htotal = lvds->native_mode.hdisplay +
2030 le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
2031 lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
2032 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
2033 lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2034 le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2035 lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2036 le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2037 lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2038 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2039 lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2040 le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2041 lvds->panel_pwr_delay =
2042 le16_to_cpu(lvds_info->info.usOffDelayInMs);
2043 lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2044
2045 misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2046 if (misc & ATOM_VSYNC_POLARITY)
2047 lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2048 if (misc & ATOM_HSYNC_POLARITY)
2049 lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2050 if (misc & ATOM_COMPOSITESYNC)
2051 lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2052 if (misc & ATOM_INTERLACE)
2053 lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2054 if (misc & ATOM_DOUBLE_CLOCK_MODE)
2055 lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2056
2057 lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2058 lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2059
2060 /* set crtc values */
2061 drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2062
2063 lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2064
2065 encoder->native_mode = lvds->native_mode;
2066
2067 if (encoder_enum == 2)
2068 lvds->linkb = true;
2069 else
2070 lvds->linkb = false;
2071
2072 /* parse the lcd record table */
2073 if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2074 ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2075 ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2076 bool bad_record = false;
2077 u8 *record;
2078
2079 if ((frev == 1) && (crev < 2))
2080 /* absolute */
2081 record = (u8 *)(mode_info->atom_context->bios +
2082 le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2083 else
2084 /* relative */
2085 record = (u8 *)(mode_info->atom_context->bios +
2086 data_offset +
2087 le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2088 while (*record != ATOM_RECORD_END_TYPE) {
2089 switch (*record) {
2090 case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2091 record += sizeof(ATOM_PATCH_RECORD_MODE);
2092 break;
2093 case LCD_RTS_RECORD_TYPE:
2094 record += sizeof(ATOM_LCD_RTS_RECORD);
2095 break;
2096 case LCD_CAP_RECORD_TYPE:
2097 record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2098 break;
2099 case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2100 fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2101 if (fake_edid_record->ucFakeEDIDLength) {
2102 struct edid *edid;
2103 int edid_size =
2104 max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2105 edid = kmalloc(edid_size, GFP_KERNEL);
2106 if (edid) {
2107 memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2108 fake_edid_record->ucFakeEDIDLength);
2109
2110 if (drm_edid_is_valid(edid)) {
2111 adev->mode_info.bios_hardcoded_edid = edid;
2112 adev->mode_info.bios_hardcoded_edid_size = edid_size;
2113 } else
2114 kfree(edid);
2115 }
2116 }
2117 record += fake_edid_record->ucFakeEDIDLength ?
2118 fake_edid_record->ucFakeEDIDLength + 2 :
2119 sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2120 break;
2121 case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2122 panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2123 lvds->native_mode.width_mm = panel_res_record->usHSize;
2124 lvds->native_mode.height_mm = panel_res_record->usVSize;
2125 record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2126 break;
2127 default:
2128 DRM_ERROR("Bad LCD record %d\n", *record);
2129 bad_record = true;
2130 break;
2131 }
2132 if (bad_record)
2133 break;
2134 }
2135 }
2136 }
2137 return lvds;
2138 }
2139
2140 struct amdgpu_encoder_atom_dig *
2141 amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2142 {
2143 int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2144 struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2145
2146 if (!dig)
2147 return NULL;
2148
2149 /* coherent mode by default */
2150 dig->coherent_mode = true;
2151 dig->dig_encoder = -1;
2152
2153 if (encoder_enum == 2)
2154 dig->linkb = true;
2155 else
2156 dig->linkb = false;
2157
2158 return dig;
2159 }
2160
This page took 0.159222 seconds and 6 git commands to generate.