Merge branch 'fix/asoc' of git://github.com/tiwai/sound
[deliverable/linux.git] / drivers / gpu / drm / radeon / atombios_dp.c
1 /*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26 #include "drmP.h"
27 #include "radeon_drm.h"
28 #include "radeon.h"
29
30 #include "atom.h"
31 #include "atom-bits.h"
32 #include "drm_dp_helper.h"
33
34 /* move these to drm_dp_helper.c/h */
35 #define DP_LINK_CONFIGURATION_SIZE 9
36 #define DP_LINK_STATUS_SIZE 6
37 #define DP_DPCD_SIZE 8
38
39 static char *voltage_names[] = {
40 "0.4V", "0.6V", "0.8V", "1.2V"
41 };
42 static char *pre_emph_names[] = {
43 "0dB", "3.5dB", "6dB", "9.5dB"
44 };
45
46 /***** radeon AUX functions *****/
47 union aux_channel_transaction {
48 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1;
49 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2;
50 };
51
52 static int radeon_process_aux_ch(struct radeon_i2c_chan *chan,
53 u8 *send, int send_bytes,
54 u8 *recv, int recv_size,
55 u8 delay, u8 *ack)
56 {
57 struct drm_device *dev = chan->dev;
58 struct radeon_device *rdev = dev->dev_private;
59 union aux_channel_transaction args;
60 int index = GetIndexIntoMasterTable(COMMAND, ProcessAuxChannelTransaction);
61 unsigned char *base;
62 int recv_bytes;
63
64 memset(&args, 0, sizeof(args));
65
66 base = (unsigned char *)rdev->mode_info.atom_context->scratch;
67
68 memcpy(base, send, send_bytes);
69
70 args.v1.lpAuxRequest = 0;
71 args.v1.lpDataOut = 16;
72 args.v1.ucDataOutLen = 0;
73 args.v1.ucChannelID = chan->rec.i2c_id;
74 args.v1.ucDelay = delay / 10;
75 if (ASIC_IS_DCE4(rdev))
76 args.v2.ucHPD_ID = chan->rec.hpd;
77
78 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
79
80 *ack = args.v1.ucReplyStatus;
81
82 /* timeout */
83 if (args.v1.ucReplyStatus == 1) {
84 DRM_DEBUG_KMS("dp_aux_ch timeout\n");
85 return -ETIMEDOUT;
86 }
87
88 /* flags not zero */
89 if (args.v1.ucReplyStatus == 2) {
90 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
91 return -EBUSY;
92 }
93
94 /* error */
95 if (args.v1.ucReplyStatus == 3) {
96 DRM_DEBUG_KMS("dp_aux_ch error\n");
97 return -EIO;
98 }
99
100 recv_bytes = args.v1.ucDataOutLen;
101 if (recv_bytes > recv_size)
102 recv_bytes = recv_size;
103
104 if (recv && recv_size)
105 memcpy(recv, base + 16, recv_bytes);
106
107 return recv_bytes;
108 }
109
110 static int radeon_dp_aux_native_write(struct radeon_connector *radeon_connector,
111 u16 address, u8 *send, u8 send_bytes, u8 delay)
112 {
113 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
114 int ret;
115 u8 msg[20];
116 int msg_bytes = send_bytes + 4;
117 u8 ack;
118 unsigned retry;
119
120 if (send_bytes > 16)
121 return -1;
122
123 msg[0] = address;
124 msg[1] = address >> 8;
125 msg[2] = AUX_NATIVE_WRITE << 4;
126 msg[3] = (msg_bytes << 4) | (send_bytes - 1);
127 memcpy(&msg[4], send, send_bytes);
128
129 for (retry = 0; retry < 4; retry++) {
130 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
131 msg, msg_bytes, NULL, 0, delay, &ack);
132 if (ret < 0)
133 return ret;
134 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
135 return send_bytes;
136 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
137 udelay(400);
138 else
139 return -EIO;
140 }
141
142 return -EIO;
143 }
144
145 static int radeon_dp_aux_native_read(struct radeon_connector *radeon_connector,
146 u16 address, u8 *recv, int recv_bytes, u8 delay)
147 {
148 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
149 u8 msg[4];
150 int msg_bytes = 4;
151 u8 ack;
152 int ret;
153 unsigned retry;
154
155 msg[0] = address;
156 msg[1] = address >> 8;
157 msg[2] = AUX_NATIVE_READ << 4;
158 msg[3] = (msg_bytes << 4) | (recv_bytes - 1);
159
160 for (retry = 0; retry < 4; retry++) {
161 ret = radeon_process_aux_ch(dig_connector->dp_i2c_bus,
162 msg, msg_bytes, recv, recv_bytes, delay, &ack);
163 if (ret < 0)
164 return ret;
165 if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_ACK)
166 return ret;
167 else if ((ack & AUX_NATIVE_REPLY_MASK) == AUX_NATIVE_REPLY_DEFER)
168 udelay(400);
169 else if (ret == 0)
170 return -EPROTO;
171 else
172 return -EIO;
173 }
174
175 return -EIO;
176 }
177
178 static void radeon_write_dpcd_reg(struct radeon_connector *radeon_connector,
179 u16 reg, u8 val)
180 {
181 radeon_dp_aux_native_write(radeon_connector, reg, &val, 1, 0);
182 }
183
184 static u8 radeon_read_dpcd_reg(struct radeon_connector *radeon_connector,
185 u16 reg)
186 {
187 u8 val = 0;
188
189 radeon_dp_aux_native_read(radeon_connector, reg, &val, 1, 0);
190
191 return val;
192 }
193
194 int radeon_dp_i2c_aux_ch(struct i2c_adapter *adapter, int mode,
195 u8 write_byte, u8 *read_byte)
196 {
197 struct i2c_algo_dp_aux_data *algo_data = adapter->algo_data;
198 struct radeon_i2c_chan *auxch = (struct radeon_i2c_chan *)adapter;
199 u16 address = algo_data->address;
200 u8 msg[5];
201 u8 reply[2];
202 unsigned retry;
203 int msg_bytes;
204 int reply_bytes = 1;
205 int ret;
206 u8 ack;
207
208 /* Set up the command byte */
209 if (mode & MODE_I2C_READ)
210 msg[2] = AUX_I2C_READ << 4;
211 else
212 msg[2] = AUX_I2C_WRITE << 4;
213
214 if (!(mode & MODE_I2C_STOP))
215 msg[2] |= AUX_I2C_MOT << 4;
216
217 msg[0] = address;
218 msg[1] = address >> 8;
219
220 switch (mode) {
221 case MODE_I2C_WRITE:
222 msg_bytes = 5;
223 msg[3] = msg_bytes << 4;
224 msg[4] = write_byte;
225 break;
226 case MODE_I2C_READ:
227 msg_bytes = 4;
228 msg[3] = msg_bytes << 4;
229 break;
230 default:
231 msg_bytes = 4;
232 msg[3] = 3 << 4;
233 break;
234 }
235
236 for (retry = 0; retry < 4; retry++) {
237 ret = radeon_process_aux_ch(auxch,
238 msg, msg_bytes, reply, reply_bytes, 0, &ack);
239 if (ret < 0) {
240 DRM_DEBUG_KMS("aux_ch failed %d\n", ret);
241 return ret;
242 }
243
244 switch (ack & AUX_NATIVE_REPLY_MASK) {
245 case AUX_NATIVE_REPLY_ACK:
246 /* I2C-over-AUX Reply field is only valid
247 * when paired with AUX ACK.
248 */
249 break;
250 case AUX_NATIVE_REPLY_NACK:
251 DRM_DEBUG_KMS("aux_ch native nack\n");
252 return -EREMOTEIO;
253 case AUX_NATIVE_REPLY_DEFER:
254 DRM_DEBUG_KMS("aux_ch native defer\n");
255 udelay(400);
256 continue;
257 default:
258 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack);
259 return -EREMOTEIO;
260 }
261
262 switch (ack & AUX_I2C_REPLY_MASK) {
263 case AUX_I2C_REPLY_ACK:
264 if (mode == MODE_I2C_READ)
265 *read_byte = reply[0];
266 return ret;
267 case AUX_I2C_REPLY_NACK:
268 DRM_DEBUG_KMS("aux_i2c nack\n");
269 return -EREMOTEIO;
270 case AUX_I2C_REPLY_DEFER:
271 DRM_DEBUG_KMS("aux_i2c defer\n");
272 udelay(400);
273 break;
274 default:
275 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack);
276 return -EREMOTEIO;
277 }
278 }
279
280 DRM_ERROR("aux i2c too many retries, giving up\n");
281 return -EREMOTEIO;
282 }
283
284 /***** general DP utility functions *****/
285
286 static u8 dp_link_status(u8 link_status[DP_LINK_STATUS_SIZE], int r)
287 {
288 return link_status[r - DP_LANE0_1_STATUS];
289 }
290
291 static u8 dp_get_lane_status(u8 link_status[DP_LINK_STATUS_SIZE],
292 int lane)
293 {
294 int i = DP_LANE0_1_STATUS + (lane >> 1);
295 int s = (lane & 1) * 4;
296 u8 l = dp_link_status(link_status, i);
297 return (l >> s) & 0xf;
298 }
299
300 static bool dp_clock_recovery_ok(u8 link_status[DP_LINK_STATUS_SIZE],
301 int lane_count)
302 {
303 int lane;
304 u8 lane_status;
305
306 for (lane = 0; lane < lane_count; lane++) {
307 lane_status = dp_get_lane_status(link_status, lane);
308 if ((lane_status & DP_LANE_CR_DONE) == 0)
309 return false;
310 }
311 return true;
312 }
313
314 static bool dp_channel_eq_ok(u8 link_status[DP_LINK_STATUS_SIZE],
315 int lane_count)
316 {
317 u8 lane_align;
318 u8 lane_status;
319 int lane;
320
321 lane_align = dp_link_status(link_status,
322 DP_LANE_ALIGN_STATUS_UPDATED);
323 if ((lane_align & DP_INTERLANE_ALIGN_DONE) == 0)
324 return false;
325 for (lane = 0; lane < lane_count; lane++) {
326 lane_status = dp_get_lane_status(link_status, lane);
327 if ((lane_status & DP_CHANNEL_EQ_BITS) != DP_CHANNEL_EQ_BITS)
328 return false;
329 }
330 return true;
331 }
332
333 static u8 dp_get_adjust_request_voltage(u8 link_status[DP_LINK_STATUS_SIZE],
334 int lane)
335
336 {
337 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
338 int s = ((lane & 1) ?
339 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT :
340 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT);
341 u8 l = dp_link_status(link_status, i);
342
343 return ((l >> s) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT;
344 }
345
346 static u8 dp_get_adjust_request_pre_emphasis(u8 link_status[DP_LINK_STATUS_SIZE],
347 int lane)
348 {
349 int i = DP_ADJUST_REQUEST_LANE0_1 + (lane >> 1);
350 int s = ((lane & 1) ?
351 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT :
352 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT);
353 u8 l = dp_link_status(link_status, i);
354
355 return ((l >> s) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT;
356 }
357
358 #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200
359 #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5
360
361 static void dp_get_adjust_train(u8 link_status[DP_LINK_STATUS_SIZE],
362 int lane_count,
363 u8 train_set[4])
364 {
365 u8 v = 0;
366 u8 p = 0;
367 int lane;
368
369 for (lane = 0; lane < lane_count; lane++) {
370 u8 this_v = dp_get_adjust_request_voltage(link_status, lane);
371 u8 this_p = dp_get_adjust_request_pre_emphasis(link_status, lane);
372
373 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
374 lane,
375 voltage_names[this_v >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
376 pre_emph_names[this_p >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
377
378 if (this_v > v)
379 v = this_v;
380 if (this_p > p)
381 p = this_p;
382 }
383
384 if (v >= DP_VOLTAGE_MAX)
385 v |= DP_TRAIN_MAX_SWING_REACHED;
386
387 if (p >= DP_PRE_EMPHASIS_MAX)
388 p |= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED;
389
390 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
391 voltage_names[(v & DP_TRAIN_VOLTAGE_SWING_MASK) >> DP_TRAIN_VOLTAGE_SWING_SHIFT],
392 pre_emph_names[(p & DP_TRAIN_PRE_EMPHASIS_MASK) >> DP_TRAIN_PRE_EMPHASIS_SHIFT]);
393
394 for (lane = 0; lane < 4; lane++)
395 train_set[lane] = v | p;
396 }
397
398 /* convert bits per color to bits per pixel */
399 /* get bpc from the EDID */
400 static int convert_bpc_to_bpp(int bpc)
401 {
402 if (bpc == 0)
403 return 24;
404 else
405 return bpc * 3;
406 }
407
408 /* get the max pix clock supported by the link rate and lane num */
409 static int dp_get_max_dp_pix_clock(int link_rate,
410 int lane_num,
411 int bpp)
412 {
413 return (link_rate * lane_num * 8) / bpp;
414 }
415
416 static int dp_get_max_link_rate(u8 dpcd[DP_DPCD_SIZE])
417 {
418 switch (dpcd[DP_MAX_LINK_RATE]) {
419 case DP_LINK_BW_1_62:
420 default:
421 return 162000;
422 case DP_LINK_BW_2_7:
423 return 270000;
424 case DP_LINK_BW_5_4:
425 return 540000;
426 }
427 }
428
429 static u8 dp_get_max_lane_number(u8 dpcd[DP_DPCD_SIZE])
430 {
431 return dpcd[DP_MAX_LANE_COUNT] & DP_MAX_LANE_COUNT_MASK;
432 }
433
434 static u8 dp_get_dp_link_rate_coded(int link_rate)
435 {
436 switch (link_rate) {
437 case 162000:
438 default:
439 return DP_LINK_BW_1_62;
440 case 270000:
441 return DP_LINK_BW_2_7;
442 case 540000:
443 return DP_LINK_BW_5_4;
444 }
445 }
446
447 /***** radeon specific DP functions *****/
448
449 /* First get the min lane# when low rate is used according to pixel clock
450 * (prefer low rate), second check max lane# supported by DP panel,
451 * if the max lane# < low rate lane# then use max lane# instead.
452 */
453 static int radeon_dp_get_dp_lane_number(struct drm_connector *connector,
454 u8 dpcd[DP_DPCD_SIZE],
455 int pix_clock)
456 {
457 int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
458 int max_link_rate = dp_get_max_link_rate(dpcd);
459 int max_lane_num = dp_get_max_lane_number(dpcd);
460 int lane_num;
461 int max_dp_pix_clock;
462
463 for (lane_num = 1; lane_num < max_lane_num; lane_num <<= 1) {
464 max_dp_pix_clock = dp_get_max_dp_pix_clock(max_link_rate, lane_num, bpp);
465 if (pix_clock <= max_dp_pix_clock)
466 break;
467 }
468
469 return lane_num;
470 }
471
472 static int radeon_dp_get_dp_link_clock(struct drm_connector *connector,
473 u8 dpcd[DP_DPCD_SIZE],
474 int pix_clock)
475 {
476 int bpp = convert_bpc_to_bpp(connector->display_info.bpc);
477 int lane_num, max_pix_clock;
478
479 if (radeon_connector_encoder_is_dp_bridge(connector))
480 return 270000;
481
482 lane_num = radeon_dp_get_dp_lane_number(connector, dpcd, pix_clock);
483 max_pix_clock = dp_get_max_dp_pix_clock(162000, lane_num, bpp);
484 if (pix_clock <= max_pix_clock)
485 return 162000;
486 max_pix_clock = dp_get_max_dp_pix_clock(270000, lane_num, bpp);
487 if (pix_clock <= max_pix_clock)
488 return 270000;
489 if (radeon_connector_is_dp12_capable(connector)) {
490 max_pix_clock = dp_get_max_dp_pix_clock(540000, lane_num, bpp);
491 if (pix_clock <= max_pix_clock)
492 return 540000;
493 }
494
495 return dp_get_max_link_rate(dpcd);
496 }
497
498 static u8 radeon_dp_encoder_service(struct radeon_device *rdev,
499 int action, int dp_clock,
500 u8 ucconfig, u8 lane_num)
501 {
502 DP_ENCODER_SERVICE_PARAMETERS args;
503 int index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
504
505 memset(&args, 0, sizeof(args));
506 args.ucLinkClock = dp_clock / 10;
507 args.ucConfig = ucconfig;
508 args.ucAction = action;
509 args.ucLaneNum = lane_num;
510 args.ucStatus = 0;
511
512 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
513 return args.ucStatus;
514 }
515
516 u8 radeon_dp_getsinktype(struct radeon_connector *radeon_connector)
517 {
518 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
519 struct drm_device *dev = radeon_connector->base.dev;
520 struct radeon_device *rdev = dev->dev_private;
521
522 return radeon_dp_encoder_service(rdev, ATOM_DP_ACTION_GET_SINK_TYPE, 0,
523 dig_connector->dp_i2c_bus->rec.i2c_id, 0);
524 }
525
526 bool radeon_dp_getdpcd(struct radeon_connector *radeon_connector)
527 {
528 struct radeon_connector_atom_dig *dig_connector = radeon_connector->con_priv;
529 u8 msg[25];
530 int ret, i;
531
532 ret = radeon_dp_aux_native_read(radeon_connector, DP_DPCD_REV, msg, 8, 0);
533 if (ret > 0) {
534 memcpy(dig_connector->dpcd, msg, 8);
535 DRM_DEBUG_KMS("DPCD: ");
536 for (i = 0; i < 8; i++)
537 DRM_DEBUG_KMS("%02x ", msg[i]);
538 DRM_DEBUG_KMS("\n");
539 return true;
540 }
541 dig_connector->dpcd[0] = 0;
542 return false;
543 }
544
545 static void radeon_dp_set_panel_mode(struct drm_encoder *encoder,
546 struct drm_connector *connector)
547 {
548 struct drm_device *dev = encoder->dev;
549 struct radeon_device *rdev = dev->dev_private;
550 int panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
551
552 if (!ASIC_IS_DCE4(rdev))
553 return;
554
555 if (radeon_connector_encoder_is_dp_bridge(connector))
556 panel_mode = DP_PANEL_MODE_INTERNAL_DP1_MODE;
557
558 atombios_dig_encoder_setup(encoder,
559 ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
560 panel_mode);
561 }
562
563 void radeon_dp_set_link_config(struct drm_connector *connector,
564 struct drm_display_mode *mode)
565 {
566 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
567 struct radeon_connector_atom_dig *dig_connector;
568
569 if (!radeon_connector->con_priv)
570 return;
571 dig_connector = radeon_connector->con_priv;
572
573 if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
574 (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
575 dig_connector->dp_clock =
576 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
577 dig_connector->dp_lane_count =
578 radeon_dp_get_dp_lane_number(connector, dig_connector->dpcd, mode->clock);
579 }
580 }
581
582 int radeon_dp_mode_valid_helper(struct drm_connector *connector,
583 struct drm_display_mode *mode)
584 {
585 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
586 struct radeon_connector_atom_dig *dig_connector;
587 int dp_clock;
588
589 if (!radeon_connector->con_priv)
590 return MODE_CLOCK_HIGH;
591 dig_connector = radeon_connector->con_priv;
592
593 dp_clock =
594 radeon_dp_get_dp_link_clock(connector, dig_connector->dpcd, mode->clock);
595
596 if ((dp_clock == 540000) &&
597 (!radeon_connector_is_dp12_capable(connector)))
598 return MODE_CLOCK_HIGH;
599
600 return MODE_OK;
601 }
602
603 static bool radeon_dp_get_link_status(struct radeon_connector *radeon_connector,
604 u8 link_status[DP_LINK_STATUS_SIZE])
605 {
606 int ret;
607 ret = radeon_dp_aux_native_read(radeon_connector, DP_LANE0_1_STATUS,
608 link_status, DP_LINK_STATUS_SIZE, 100);
609 if (ret <= 0) {
610 DRM_ERROR("displayport link status failed\n");
611 return false;
612 }
613
614 DRM_DEBUG_KMS("link status %02x %02x %02x %02x %02x %02x\n",
615 link_status[0], link_status[1], link_status[2],
616 link_status[3], link_status[4], link_status[5]);
617 return true;
618 }
619
620 bool radeon_dp_needs_link_train(struct radeon_connector *radeon_connector)
621 {
622 u8 link_status[DP_LINK_STATUS_SIZE];
623 struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
624
625 if (!radeon_dp_get_link_status(radeon_connector, link_status))
626 return false;
627 if (dp_channel_eq_ok(link_status, dig->dp_lane_count))
628 return false;
629 return true;
630 }
631
632 struct radeon_dp_link_train_info {
633 struct radeon_device *rdev;
634 struct drm_encoder *encoder;
635 struct drm_connector *connector;
636 struct radeon_connector *radeon_connector;
637 int enc_id;
638 int dp_clock;
639 int dp_lane_count;
640 int rd_interval;
641 bool tp3_supported;
642 u8 dpcd[8];
643 u8 train_set[4];
644 u8 link_status[DP_LINK_STATUS_SIZE];
645 u8 tries;
646 bool use_dpencoder;
647 };
648
649 static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info *dp_info)
650 {
651 /* set the initial vs/emph on the source */
652 atombios_dig_transmitter_setup(dp_info->encoder,
653 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH,
654 0, dp_info->train_set[0]); /* sets all lanes at once */
655
656 /* set the vs/emph on the sink */
657 radeon_dp_aux_native_write(dp_info->radeon_connector, DP_TRAINING_LANE0_SET,
658 dp_info->train_set, dp_info->dp_lane_count, 0);
659 }
660
661 static void radeon_dp_set_tp(struct radeon_dp_link_train_info *dp_info, int tp)
662 {
663 int rtp = 0;
664
665 /* set training pattern on the source */
666 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder) {
667 switch (tp) {
668 case DP_TRAINING_PATTERN_1:
669 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1;
670 break;
671 case DP_TRAINING_PATTERN_2:
672 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2;
673 break;
674 case DP_TRAINING_PATTERN_3:
675 rtp = ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3;
676 break;
677 }
678 atombios_dig_encoder_setup(dp_info->encoder, rtp, 0);
679 } else {
680 switch (tp) {
681 case DP_TRAINING_PATTERN_1:
682 rtp = 0;
683 break;
684 case DP_TRAINING_PATTERN_2:
685 rtp = 1;
686 break;
687 }
688 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_PATTERN_SEL,
689 dp_info->dp_clock, dp_info->enc_id, rtp);
690 }
691
692 /* enable training pattern on the sink */
693 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_TRAINING_PATTERN_SET, tp);
694 }
695
696 static int radeon_dp_link_train_init(struct radeon_dp_link_train_info *dp_info)
697 {
698 u8 tmp;
699
700 /* power up the sink */
701 if (dp_info->dpcd[0] >= 0x11)
702 radeon_write_dpcd_reg(dp_info->radeon_connector,
703 DP_SET_POWER, DP_SET_POWER_D0);
704
705 /* possibly enable downspread on the sink */
706 if (dp_info->dpcd[3] & 0x1)
707 radeon_write_dpcd_reg(dp_info->radeon_connector,
708 DP_DOWNSPREAD_CTRL, DP_SPREAD_AMP_0_5);
709 else
710 radeon_write_dpcd_reg(dp_info->radeon_connector,
711 DP_DOWNSPREAD_CTRL, 0);
712
713 radeon_dp_set_panel_mode(dp_info->encoder, dp_info->connector);
714
715 /* set the lane count on the sink */
716 tmp = dp_info->dp_lane_count;
717 if (dp_info->dpcd[0] >= 0x11)
718 tmp |= DP_LANE_COUNT_ENHANCED_FRAME_EN;
719 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LANE_COUNT_SET, tmp);
720
721 /* set the link rate on the sink */
722 tmp = dp_get_dp_link_rate_coded(dp_info->dp_clock);
723 radeon_write_dpcd_reg(dp_info->radeon_connector, DP_LINK_BW_SET, tmp);
724
725 /* start training on the source */
726 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
727 atombios_dig_encoder_setup(dp_info->encoder,
728 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START, 0);
729 else
730 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_START,
731 dp_info->dp_clock, dp_info->enc_id, 0);
732
733 /* disable the training pattern on the sink */
734 radeon_write_dpcd_reg(dp_info->radeon_connector,
735 DP_TRAINING_PATTERN_SET,
736 DP_TRAINING_PATTERN_DISABLE);
737
738 return 0;
739 }
740
741 static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info *dp_info)
742 {
743 udelay(400);
744
745 /* disable the training pattern on the sink */
746 radeon_write_dpcd_reg(dp_info->radeon_connector,
747 DP_TRAINING_PATTERN_SET,
748 DP_TRAINING_PATTERN_DISABLE);
749
750 /* disable the training pattern on the source */
751 if (ASIC_IS_DCE4(dp_info->rdev) || !dp_info->use_dpencoder)
752 atombios_dig_encoder_setup(dp_info->encoder,
753 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE, 0);
754 else
755 radeon_dp_encoder_service(dp_info->rdev, ATOM_DP_ACTION_TRAINING_COMPLETE,
756 dp_info->dp_clock, dp_info->enc_id, 0);
757
758 return 0;
759 }
760
761 static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info *dp_info)
762 {
763 bool clock_recovery;
764 u8 voltage;
765 int i;
766
767 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_1);
768 memset(dp_info->train_set, 0, 4);
769 radeon_dp_update_vs_emph(dp_info);
770
771 udelay(400);
772
773 /* clock recovery loop */
774 clock_recovery = false;
775 dp_info->tries = 0;
776 voltage = 0xff;
777 while (1) {
778 if (dp_info->rd_interval == 0)
779 udelay(100);
780 else
781 mdelay(dp_info->rd_interval * 4);
782
783 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
784 break;
785
786 if (dp_clock_recovery_ok(dp_info->link_status, dp_info->dp_lane_count)) {
787 clock_recovery = true;
788 break;
789 }
790
791 for (i = 0; i < dp_info->dp_lane_count; i++) {
792 if ((dp_info->train_set[i] & DP_TRAIN_MAX_SWING_REACHED) == 0)
793 break;
794 }
795 if (i == dp_info->dp_lane_count) {
796 DRM_ERROR("clock recovery reached max voltage\n");
797 break;
798 }
799
800 if ((dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK) == voltage) {
801 ++dp_info->tries;
802 if (dp_info->tries == 5) {
803 DRM_ERROR("clock recovery tried 5 times\n");
804 break;
805 }
806 } else
807 dp_info->tries = 0;
808
809 voltage = dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK;
810
811 /* Compute new train_set as requested by sink */
812 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
813
814 radeon_dp_update_vs_emph(dp_info);
815 }
816 if (!clock_recovery) {
817 DRM_ERROR("clock recovery failed\n");
818 return -1;
819 } else {
820 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
821 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
822 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK) >>
823 DP_TRAIN_PRE_EMPHASIS_SHIFT);
824 return 0;
825 }
826 }
827
828 static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info *dp_info)
829 {
830 bool channel_eq;
831
832 if (dp_info->tp3_supported)
833 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_3);
834 else
835 radeon_dp_set_tp(dp_info, DP_TRAINING_PATTERN_2);
836
837 /* channel equalization loop */
838 dp_info->tries = 0;
839 channel_eq = false;
840 while (1) {
841 if (dp_info->rd_interval == 0)
842 udelay(400);
843 else
844 mdelay(dp_info->rd_interval * 4);
845
846 if (!radeon_dp_get_link_status(dp_info->radeon_connector, dp_info->link_status))
847 break;
848
849 if (dp_channel_eq_ok(dp_info->link_status, dp_info->dp_lane_count)) {
850 channel_eq = true;
851 break;
852 }
853
854 /* Try 5 times */
855 if (dp_info->tries > 5) {
856 DRM_ERROR("channel eq failed: 5 tries\n");
857 break;
858 }
859
860 /* Compute new train_set as requested by sink */
861 dp_get_adjust_train(dp_info->link_status, dp_info->dp_lane_count, dp_info->train_set);
862
863 radeon_dp_update_vs_emph(dp_info);
864 dp_info->tries++;
865 }
866
867 if (!channel_eq) {
868 DRM_ERROR("channel eq failed\n");
869 return -1;
870 } else {
871 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
872 dp_info->train_set[0] & DP_TRAIN_VOLTAGE_SWING_MASK,
873 (dp_info->train_set[0] & DP_TRAIN_PRE_EMPHASIS_MASK)
874 >> DP_TRAIN_PRE_EMPHASIS_SHIFT);
875 return 0;
876 }
877 }
878
879 void radeon_dp_link_train(struct drm_encoder *encoder,
880 struct drm_connector *connector)
881 {
882 struct drm_device *dev = encoder->dev;
883 struct radeon_device *rdev = dev->dev_private;
884 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
885 struct radeon_encoder_atom_dig *dig;
886 struct radeon_connector *radeon_connector;
887 struct radeon_connector_atom_dig *dig_connector;
888 struct radeon_dp_link_train_info dp_info;
889 int index;
890 u8 tmp, frev, crev;
891
892 if (!radeon_encoder->enc_priv)
893 return;
894 dig = radeon_encoder->enc_priv;
895
896 radeon_connector = to_radeon_connector(connector);
897 if (!radeon_connector->con_priv)
898 return;
899 dig_connector = radeon_connector->con_priv;
900
901 if ((dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_DISPLAYPORT) &&
902 (dig_connector->dp_sink_type != CONNECTOR_OBJECT_ID_eDP))
903 return;
904
905 /* DPEncoderService newer than 1.1 can't program properly the
906 * training pattern. When facing such version use the
907 * DIGXEncoderControl (X== 1 | 2)
908 */
909 dp_info.use_dpencoder = true;
910 index = GetIndexIntoMasterTable(COMMAND, DPEncoderService);
911 if (atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev, &crev)) {
912 if (crev > 1) {
913 dp_info.use_dpencoder = false;
914 }
915 }
916
917 dp_info.enc_id = 0;
918 if (dig->dig_encoder)
919 dp_info.enc_id |= ATOM_DP_CONFIG_DIG2_ENCODER;
920 else
921 dp_info.enc_id |= ATOM_DP_CONFIG_DIG1_ENCODER;
922 if (dig->linkb)
923 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_B;
924 else
925 dp_info.enc_id |= ATOM_DP_CONFIG_LINK_A;
926
927 dp_info.rd_interval = radeon_read_dpcd_reg(radeon_connector, DP_TRAINING_AUX_RD_INTERVAL);
928 tmp = radeon_read_dpcd_reg(radeon_connector, DP_MAX_LANE_COUNT);
929 if (ASIC_IS_DCE5(rdev) && (tmp & DP_TPS3_SUPPORTED))
930 dp_info.tp3_supported = true;
931 else
932 dp_info.tp3_supported = false;
933
934 memcpy(dp_info.dpcd, dig_connector->dpcd, 8);
935 dp_info.rdev = rdev;
936 dp_info.encoder = encoder;
937 dp_info.connector = connector;
938 dp_info.radeon_connector = radeon_connector;
939 dp_info.dp_lane_count = dig_connector->dp_lane_count;
940 dp_info.dp_clock = dig_connector->dp_clock;
941
942 if (radeon_dp_link_train_init(&dp_info))
943 goto done;
944 if (radeon_dp_link_train_cr(&dp_info))
945 goto done;
946 if (radeon_dp_link_train_ce(&dp_info))
947 goto done;
948 done:
949 if (radeon_dp_link_train_finish(&dp_info))
950 return;
951 }
This page took 0.057273 seconds and 6 git commands to generate.