2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
27 #include "radeon_drm.h"
31 #include "atom-bits.h"
32 #include "drm_dp_helper.h"
34 /* move these to drm_dp_helper.c/h */
35 #define DP_LINK_CONFIGURATION_SIZE 9
36 #define DP_LINK_STATUS_SIZE 6
37 #define DP_DPCD_SIZE 8
39 static char *voltage_names
[] = {
40 "0.4V", "0.6V", "0.8V", "1.2V"
42 static char *pre_emph_names
[] = {
43 "0dB", "3.5dB", "6dB", "9.5dB"
46 /***** radeon AUX functions *****/
47 union aux_channel_transaction
{
48 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1
;
49 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2
;
52 static int radeon_process_aux_ch(struct radeon_i2c_chan
*chan
,
53 u8
*send
, int send_bytes
,
54 u8
*recv
, int recv_size
,
57 struct drm_device
*dev
= chan
->dev
;
58 struct radeon_device
*rdev
= dev
->dev_private
;
59 union aux_channel_transaction args
;
60 int index
= GetIndexIntoMasterTable(COMMAND
, ProcessAuxChannelTransaction
);
64 memset(&args
, 0, sizeof(args
));
66 base
= (unsigned char *)rdev
->mode_info
.atom_context
->scratch
;
68 memcpy(base
, send
, send_bytes
);
70 args
.v1
.lpAuxRequest
= 0;
71 args
.v1
.lpDataOut
= 16;
72 args
.v1
.ucDataOutLen
= 0;
73 args
.v1
.ucChannelID
= chan
->rec
.i2c_id
;
74 args
.v1
.ucDelay
= delay
/ 10;
75 if (ASIC_IS_DCE4(rdev
))
76 args
.v2
.ucHPD_ID
= chan
->rec
.hpd
;
78 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
80 *ack
= args
.v1
.ucReplyStatus
;
83 if (args
.v1
.ucReplyStatus
== 1) {
84 DRM_DEBUG_KMS("dp_aux_ch timeout\n");
89 if (args
.v1
.ucReplyStatus
== 2) {
90 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
95 if (args
.v1
.ucReplyStatus
== 3) {
96 DRM_DEBUG_KMS("dp_aux_ch error\n");
100 recv_bytes
= args
.v1
.ucDataOutLen
;
101 if (recv_bytes
> recv_size
)
102 recv_bytes
= recv_size
;
104 if (recv
&& recv_size
)
105 memcpy(recv
, base
+ 16, recv_bytes
);
110 static int radeon_dp_aux_native_write(struct radeon_connector
*radeon_connector
,
111 u16 address
, u8
*send
, u8 send_bytes
, u8 delay
)
113 struct radeon_connector_atom_dig
*dig_connector
= radeon_connector
->con_priv
;
116 int msg_bytes
= send_bytes
+ 4;
124 msg
[1] = address
>> 8;
125 msg
[2] = AUX_NATIVE_WRITE
<< 4;
126 msg
[3] = (msg_bytes
<< 4) | (send_bytes
- 1);
127 memcpy(&msg
[4], send
, send_bytes
);
129 for (retry
= 0; retry
< 4; retry
++) {
130 ret
= radeon_process_aux_ch(dig_connector
->dp_i2c_bus
,
131 msg
, msg_bytes
, NULL
, 0, delay
, &ack
);
136 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
)
138 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
147 static int radeon_dp_aux_native_read(struct radeon_connector
*radeon_connector
,
148 u16 address
, u8
*recv
, int recv_bytes
, u8 delay
)
150 struct radeon_connector_atom_dig
*dig_connector
= radeon_connector
->con_priv
;
158 msg
[1] = address
>> 8;
159 msg
[2] = AUX_NATIVE_READ
<< 4;
160 msg
[3] = (msg_bytes
<< 4) | (recv_bytes
- 1);
162 for (retry
= 0; retry
< 4; retry
++) {
163 ret
= radeon_process_aux_ch(dig_connector
->dp_i2c_bus
,
164 msg
, msg_bytes
, recv
, recv_bytes
, delay
, &ack
);
169 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
)
171 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
182 static void radeon_write_dpcd_reg(struct radeon_connector
*radeon_connector
,
185 radeon_dp_aux_native_write(radeon_connector
, reg
, &val
, 1, 0);
188 static u8
radeon_read_dpcd_reg(struct radeon_connector
*radeon_connector
,
193 radeon_dp_aux_native_read(radeon_connector
, reg
, &val
, 1, 0);
198 int radeon_dp_i2c_aux_ch(struct i2c_adapter
*adapter
, int mode
,
199 u8 write_byte
, u8
*read_byte
)
201 struct i2c_algo_dp_aux_data
*algo_data
= adapter
->algo_data
;
202 struct radeon_i2c_chan
*auxch
= (struct radeon_i2c_chan
*)adapter
;
203 u16 address
= algo_data
->address
;
212 /* Set up the command byte */
213 if (mode
& MODE_I2C_READ
)
214 msg
[2] = AUX_I2C_READ
<< 4;
216 msg
[2] = AUX_I2C_WRITE
<< 4;
218 if (!(mode
& MODE_I2C_STOP
))
219 msg
[2] |= AUX_I2C_MOT
<< 4;
222 msg
[1] = address
>> 8;
227 msg
[3] = msg_bytes
<< 4;
232 msg
[3] = msg_bytes
<< 4;
240 for (retry
= 0; retry
< 4; retry
++) {
241 ret
= radeon_process_aux_ch(auxch
,
242 msg
, msg_bytes
, reply
, reply_bytes
, 0, &ack
);
246 DRM_DEBUG_KMS("aux_ch failed %d\n", ret
);
250 switch (ack
& AUX_NATIVE_REPLY_MASK
) {
251 case AUX_NATIVE_REPLY_ACK
:
252 /* I2C-over-AUX Reply field is only valid
253 * when paired with AUX ACK.
256 case AUX_NATIVE_REPLY_NACK
:
257 DRM_DEBUG_KMS("aux_ch native nack\n");
259 case AUX_NATIVE_REPLY_DEFER
:
260 DRM_DEBUG_KMS("aux_ch native defer\n");
264 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack
);
268 switch (ack
& AUX_I2C_REPLY_MASK
) {
269 case AUX_I2C_REPLY_ACK
:
270 if (mode
== MODE_I2C_READ
)
271 *read_byte
= reply
[0];
273 case AUX_I2C_REPLY_NACK
:
274 DRM_DEBUG_KMS("aux_i2c nack\n");
276 case AUX_I2C_REPLY_DEFER
:
277 DRM_DEBUG_KMS("aux_i2c defer\n");
281 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack
);
286 DRM_DEBUG_KMS("aux i2c too many retries, giving up\n");
290 /***** general DP utility functions *****/
292 static u8
dp_link_status(u8 link_status
[DP_LINK_STATUS_SIZE
], int r
)
294 return link_status
[r
- DP_LANE0_1_STATUS
];
297 static u8
dp_get_lane_status(u8 link_status
[DP_LINK_STATUS_SIZE
],
300 int i
= DP_LANE0_1_STATUS
+ (lane
>> 1);
301 int s
= (lane
& 1) * 4;
302 u8 l
= dp_link_status(link_status
, i
);
303 return (l
>> s
) & 0xf;
306 static bool dp_clock_recovery_ok(u8 link_status
[DP_LINK_STATUS_SIZE
],
312 for (lane
= 0; lane
< lane_count
; lane
++) {
313 lane_status
= dp_get_lane_status(link_status
, lane
);
314 if ((lane_status
& DP_LANE_CR_DONE
) == 0)
320 static bool dp_channel_eq_ok(u8 link_status
[DP_LINK_STATUS_SIZE
],
327 lane_align
= dp_link_status(link_status
,
328 DP_LANE_ALIGN_STATUS_UPDATED
);
329 if ((lane_align
& DP_INTERLANE_ALIGN_DONE
) == 0)
331 for (lane
= 0; lane
< lane_count
; lane
++) {
332 lane_status
= dp_get_lane_status(link_status
, lane
);
333 if ((lane_status
& DP_CHANNEL_EQ_BITS
) != DP_CHANNEL_EQ_BITS
)
339 static u8
dp_get_adjust_request_voltage(u8 link_status
[DP_LINK_STATUS_SIZE
],
343 int i
= DP_ADJUST_REQUEST_LANE0_1
+ (lane
>> 1);
344 int s
= ((lane
& 1) ?
345 DP_ADJUST_VOLTAGE_SWING_LANE1_SHIFT
:
346 DP_ADJUST_VOLTAGE_SWING_LANE0_SHIFT
);
347 u8 l
= dp_link_status(link_status
, i
);
349 return ((l
>> s
) & 0x3) << DP_TRAIN_VOLTAGE_SWING_SHIFT
;
352 static u8
dp_get_adjust_request_pre_emphasis(u8 link_status
[DP_LINK_STATUS_SIZE
],
355 int i
= DP_ADJUST_REQUEST_LANE0_1
+ (lane
>> 1);
356 int s
= ((lane
& 1) ?
357 DP_ADJUST_PRE_EMPHASIS_LANE1_SHIFT
:
358 DP_ADJUST_PRE_EMPHASIS_LANE0_SHIFT
);
359 u8 l
= dp_link_status(link_status
, i
);
361 return ((l
>> s
) & 0x3) << DP_TRAIN_PRE_EMPHASIS_SHIFT
;
364 #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200
365 #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5
367 static void dp_get_adjust_train(u8 link_status
[DP_LINK_STATUS_SIZE
],
375 for (lane
= 0; lane
< lane_count
; lane
++) {
376 u8 this_v
= dp_get_adjust_request_voltage(link_status
, lane
);
377 u8 this_p
= dp_get_adjust_request_pre_emphasis(link_status
, lane
);
379 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
381 voltage_names
[this_v
>> DP_TRAIN_VOLTAGE_SWING_SHIFT
],
382 pre_emph_names
[this_p
>> DP_TRAIN_PRE_EMPHASIS_SHIFT
]);
390 if (v
>= DP_VOLTAGE_MAX
)
391 v
|= DP_TRAIN_MAX_SWING_REACHED
;
393 if (p
>= DP_PRE_EMPHASIS_MAX
)
394 p
|= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED
;
396 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
397 voltage_names
[(v
& DP_TRAIN_VOLTAGE_SWING_MASK
) >> DP_TRAIN_VOLTAGE_SWING_SHIFT
],
398 pre_emph_names
[(p
& DP_TRAIN_PRE_EMPHASIS_MASK
) >> DP_TRAIN_PRE_EMPHASIS_SHIFT
]);
400 for (lane
= 0; lane
< 4; lane
++)
401 train_set
[lane
] = v
| p
;
404 /* convert bits per color to bits per pixel */
405 /* get bpc from the EDID */
406 static int convert_bpc_to_bpp(int bpc
)
414 /* get the max pix clock supported by the link rate and lane num */
415 static int dp_get_max_dp_pix_clock(int link_rate
,
419 return (link_rate
* lane_num
* 8) / bpp
;
422 static int dp_get_max_link_rate(u8 dpcd
[DP_DPCD_SIZE
])
424 switch (dpcd
[DP_MAX_LINK_RATE
]) {
425 case DP_LINK_BW_1_62
:
435 static u8
dp_get_max_lane_number(u8 dpcd
[DP_DPCD_SIZE
])
437 return dpcd
[DP_MAX_LANE_COUNT
] & DP_MAX_LANE_COUNT_MASK
;
440 static u8
dp_get_dp_link_rate_coded(int link_rate
)
445 return DP_LINK_BW_1_62
;
447 return DP_LINK_BW_2_7
;
449 return DP_LINK_BW_5_4
;
453 /***** radeon specific DP functions *****/
455 /* First get the min lane# when low rate is used according to pixel clock
456 * (prefer low rate), second check max lane# supported by DP panel,
457 * if the max lane# < low rate lane# then use max lane# instead.
459 static int radeon_dp_get_dp_lane_number(struct drm_connector
*connector
,
460 u8 dpcd
[DP_DPCD_SIZE
],
463 int bpp
= convert_bpc_to_bpp(connector
->display_info
.bpc
);
464 int max_link_rate
= dp_get_max_link_rate(dpcd
);
465 int max_lane_num
= dp_get_max_lane_number(dpcd
);
467 int max_dp_pix_clock
;
469 for (lane_num
= 1; lane_num
< max_lane_num
; lane_num
<<= 1) {
470 max_dp_pix_clock
= dp_get_max_dp_pix_clock(max_link_rate
, lane_num
, bpp
);
471 if (pix_clock
<= max_dp_pix_clock
)
478 static int radeon_dp_get_dp_link_clock(struct drm_connector
*connector
,
479 u8 dpcd
[DP_DPCD_SIZE
],
482 int bpp
= convert_bpc_to_bpp(connector
->display_info
.bpc
);
483 int lane_num
, max_pix_clock
;
485 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector
) ==
486 ENCODER_OBJECT_ID_NUTMEG
)
489 lane_num
= radeon_dp_get_dp_lane_number(connector
, dpcd
, pix_clock
);
490 max_pix_clock
= dp_get_max_dp_pix_clock(162000, lane_num
, bpp
);
491 if (pix_clock
<= max_pix_clock
)
493 max_pix_clock
= dp_get_max_dp_pix_clock(270000, lane_num
, bpp
);
494 if (pix_clock
<= max_pix_clock
)
496 if (radeon_connector_is_dp12_capable(connector
)) {
497 max_pix_clock
= dp_get_max_dp_pix_clock(540000, lane_num
, bpp
);
498 if (pix_clock
<= max_pix_clock
)
502 return dp_get_max_link_rate(dpcd
);
505 static u8
radeon_dp_encoder_service(struct radeon_device
*rdev
,
506 int action
, int dp_clock
,
507 u8 ucconfig
, u8 lane_num
)
509 DP_ENCODER_SERVICE_PARAMETERS args
;
510 int index
= GetIndexIntoMasterTable(COMMAND
, DPEncoderService
);
512 memset(&args
, 0, sizeof(args
));
513 args
.ucLinkClock
= dp_clock
/ 10;
514 args
.ucConfig
= ucconfig
;
515 args
.ucAction
= action
;
516 args
.ucLaneNum
= lane_num
;
519 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
520 return args
.ucStatus
;
523 u8
radeon_dp_getsinktype(struct radeon_connector
*radeon_connector
)
525 struct radeon_connector_atom_dig
*dig_connector
= radeon_connector
->con_priv
;
526 struct drm_device
*dev
= radeon_connector
->base
.dev
;
527 struct radeon_device
*rdev
= dev
->dev_private
;
529 return radeon_dp_encoder_service(rdev
, ATOM_DP_ACTION_GET_SINK_TYPE
, 0,
530 dig_connector
->dp_i2c_bus
->rec
.i2c_id
, 0);
533 bool radeon_dp_getdpcd(struct radeon_connector
*radeon_connector
)
535 struct radeon_connector_atom_dig
*dig_connector
= radeon_connector
->con_priv
;
539 ret
= radeon_dp_aux_native_read(radeon_connector
, DP_DPCD_REV
, msg
, 8, 0);
541 memcpy(dig_connector
->dpcd
, msg
, 8);
542 DRM_DEBUG_KMS("DPCD: ");
543 for (i
= 0; i
< 8; i
++)
544 DRM_DEBUG_KMS("%02x ", msg
[i
]);
548 dig_connector
->dpcd
[0] = 0;
552 int radeon_dp_get_panel_mode(struct drm_encoder
*encoder
,
553 struct drm_connector
*connector
)
555 struct drm_device
*dev
= encoder
->dev
;
556 struct radeon_device
*rdev
= dev
->dev_private
;
557 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
558 int panel_mode
= DP_PANEL_MODE_EXTERNAL_DP_MODE
;
560 if (!ASIC_IS_DCE4(rdev
))
563 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector
) ==
564 ENCODER_OBJECT_ID_NUTMEG
)
565 panel_mode
= DP_PANEL_MODE_INTERNAL_DP1_MODE
;
566 else if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector
) ==
567 ENCODER_OBJECT_ID_TRAVIS
) {
570 for (i
= 0; i
< 6; i
++)
571 id
[i
] = radeon_read_dpcd_reg(radeon_connector
, 0x503 + i
);
578 panel_mode
= DP_PANEL_MODE_INTERNAL_DP1_MODE
;
580 panel_mode
= DP_PANEL_MODE_INTERNAL_DP2_MODE
;
581 } else if (connector
->connector_type
== DRM_MODE_CONNECTOR_eDP
) {
582 u8 tmp
= radeon_read_dpcd_reg(radeon_connector
, DP_EDP_CONFIGURATION_CAP
);
584 panel_mode
= DP_PANEL_MODE_INTERNAL_DP2_MODE
;
590 void radeon_dp_set_link_config(struct drm_connector
*connector
,
591 struct drm_display_mode
*mode
)
593 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
594 struct radeon_connector_atom_dig
*dig_connector
;
596 if (!radeon_connector
->con_priv
)
598 dig_connector
= radeon_connector
->con_priv
;
600 if ((dig_connector
->dp_sink_type
== CONNECTOR_OBJECT_ID_DISPLAYPORT
) ||
601 (dig_connector
->dp_sink_type
== CONNECTOR_OBJECT_ID_eDP
)) {
602 dig_connector
->dp_clock
=
603 radeon_dp_get_dp_link_clock(connector
, dig_connector
->dpcd
, mode
->clock
);
604 dig_connector
->dp_lane_count
=
605 radeon_dp_get_dp_lane_number(connector
, dig_connector
->dpcd
, mode
->clock
);
609 int radeon_dp_mode_valid_helper(struct drm_connector
*connector
,
610 struct drm_display_mode
*mode
)
612 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
613 struct radeon_connector_atom_dig
*dig_connector
;
616 if (!radeon_connector
->con_priv
)
617 return MODE_CLOCK_HIGH
;
618 dig_connector
= radeon_connector
->con_priv
;
621 radeon_dp_get_dp_link_clock(connector
, dig_connector
->dpcd
, mode
->clock
);
623 if ((dp_clock
== 540000) &&
624 (!radeon_connector_is_dp12_capable(connector
)))
625 return MODE_CLOCK_HIGH
;
630 static bool radeon_dp_get_link_status(struct radeon_connector
*radeon_connector
,
631 u8 link_status
[DP_LINK_STATUS_SIZE
])
634 ret
= radeon_dp_aux_native_read(radeon_connector
, DP_LANE0_1_STATUS
,
635 link_status
, DP_LINK_STATUS_SIZE
, 100);
637 DRM_ERROR("displayport link status failed\n");
641 DRM_DEBUG_KMS("link status %02x %02x %02x %02x %02x %02x\n",
642 link_status
[0], link_status
[1], link_status
[2],
643 link_status
[3], link_status
[4], link_status
[5]);
647 bool radeon_dp_needs_link_train(struct radeon_connector
*radeon_connector
)
649 u8 link_status
[DP_LINK_STATUS_SIZE
];
650 struct radeon_connector_atom_dig
*dig
= radeon_connector
->con_priv
;
652 if (!radeon_dp_get_link_status(radeon_connector
, link_status
))
654 if (dp_channel_eq_ok(link_status
, dig
->dp_lane_count
))
659 struct radeon_dp_link_train_info
{
660 struct radeon_device
*rdev
;
661 struct drm_encoder
*encoder
;
662 struct drm_connector
*connector
;
663 struct radeon_connector
*radeon_connector
;
671 u8 link_status
[DP_LINK_STATUS_SIZE
];
676 static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info
*dp_info
)
678 /* set the initial vs/emph on the source */
679 atombios_dig_transmitter_setup(dp_info
->encoder
,
680 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH
,
681 0, dp_info
->train_set
[0]); /* sets all lanes at once */
683 /* set the vs/emph on the sink */
684 radeon_dp_aux_native_write(dp_info
->radeon_connector
, DP_TRAINING_LANE0_SET
,
685 dp_info
->train_set
, dp_info
->dp_lane_count
, 0);
688 static void radeon_dp_set_tp(struct radeon_dp_link_train_info
*dp_info
, int tp
)
692 /* set training pattern on the source */
693 if (ASIC_IS_DCE4(dp_info
->rdev
) || !dp_info
->use_dpencoder
) {
695 case DP_TRAINING_PATTERN_1
:
696 rtp
= ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1
;
698 case DP_TRAINING_PATTERN_2
:
699 rtp
= ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2
;
701 case DP_TRAINING_PATTERN_3
:
702 rtp
= ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3
;
705 atombios_dig_encoder_setup(dp_info
->encoder
, rtp
, 0);
708 case DP_TRAINING_PATTERN_1
:
711 case DP_TRAINING_PATTERN_2
:
715 radeon_dp_encoder_service(dp_info
->rdev
, ATOM_DP_ACTION_TRAINING_PATTERN_SEL
,
716 dp_info
->dp_clock
, dp_info
->enc_id
, rtp
);
719 /* enable training pattern on the sink */
720 radeon_write_dpcd_reg(dp_info
->radeon_connector
, DP_TRAINING_PATTERN_SET
, tp
);
723 static int radeon_dp_link_train_init(struct radeon_dp_link_train_info
*dp_info
)
725 struct radeon_encoder
*radeon_encoder
= to_radeon_encoder(dp_info
->encoder
);
726 struct radeon_encoder_atom_dig
*dig
= radeon_encoder
->enc_priv
;
729 /* power up the sink */
730 if (dp_info
->dpcd
[0] >= 0x11)
731 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
732 DP_SET_POWER
, DP_SET_POWER_D0
);
734 /* possibly enable downspread on the sink */
735 if (dp_info
->dpcd
[3] & 0x1)
736 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
737 DP_DOWNSPREAD_CTRL
, DP_SPREAD_AMP_0_5
);
739 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
740 DP_DOWNSPREAD_CTRL
, 0);
742 if ((dp_info
->connector
->connector_type
== DRM_MODE_CONNECTOR_eDP
) &&
743 (dig
->panel_mode
== DP_PANEL_MODE_INTERNAL_DP2_MODE
)) {
744 radeon_write_dpcd_reg(dp_info
->radeon_connector
, DP_EDP_CONFIGURATION_SET
, 1);
747 /* set the lane count on the sink */
748 tmp
= dp_info
->dp_lane_count
;
749 if (dp_info
->dpcd
[0] >= 0x11)
750 tmp
|= DP_LANE_COUNT_ENHANCED_FRAME_EN
;
751 radeon_write_dpcd_reg(dp_info
->radeon_connector
, DP_LANE_COUNT_SET
, tmp
);
753 /* set the link rate on the sink */
754 tmp
= dp_get_dp_link_rate_coded(dp_info
->dp_clock
);
755 radeon_write_dpcd_reg(dp_info
->radeon_connector
, DP_LINK_BW_SET
, tmp
);
757 /* start training on the source */
758 if (ASIC_IS_DCE4(dp_info
->rdev
) || !dp_info
->use_dpencoder
)
759 atombios_dig_encoder_setup(dp_info
->encoder
,
760 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START
, 0);
762 radeon_dp_encoder_service(dp_info
->rdev
, ATOM_DP_ACTION_TRAINING_START
,
763 dp_info
->dp_clock
, dp_info
->enc_id
, 0);
765 /* disable the training pattern on the sink */
766 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
767 DP_TRAINING_PATTERN_SET
,
768 DP_TRAINING_PATTERN_DISABLE
);
773 static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info
*dp_info
)
777 /* disable the training pattern on the sink */
778 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
779 DP_TRAINING_PATTERN_SET
,
780 DP_TRAINING_PATTERN_DISABLE
);
782 /* disable the training pattern on the source */
783 if (ASIC_IS_DCE4(dp_info
->rdev
) || !dp_info
->use_dpencoder
)
784 atombios_dig_encoder_setup(dp_info
->encoder
,
785 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE
, 0);
787 radeon_dp_encoder_service(dp_info
->rdev
, ATOM_DP_ACTION_TRAINING_COMPLETE
,
788 dp_info
->dp_clock
, dp_info
->enc_id
, 0);
793 static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info
*dp_info
)
799 radeon_dp_set_tp(dp_info
, DP_TRAINING_PATTERN_1
);
800 memset(dp_info
->train_set
, 0, 4);
801 radeon_dp_update_vs_emph(dp_info
);
805 /* clock recovery loop */
806 clock_recovery
= false;
810 if (dp_info
->rd_interval
== 0)
813 mdelay(dp_info
->rd_interval
* 4);
815 if (!radeon_dp_get_link_status(dp_info
->radeon_connector
, dp_info
->link_status
))
818 if (dp_clock_recovery_ok(dp_info
->link_status
, dp_info
->dp_lane_count
)) {
819 clock_recovery
= true;
823 for (i
= 0; i
< dp_info
->dp_lane_count
; i
++) {
824 if ((dp_info
->train_set
[i
] & DP_TRAIN_MAX_SWING_REACHED
) == 0)
827 if (i
== dp_info
->dp_lane_count
) {
828 DRM_ERROR("clock recovery reached max voltage\n");
832 if ((dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
) == voltage
) {
834 if (dp_info
->tries
== 5) {
835 DRM_ERROR("clock recovery tried 5 times\n");
841 voltage
= dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
;
843 /* Compute new train_set as requested by sink */
844 dp_get_adjust_train(dp_info
->link_status
, dp_info
->dp_lane_count
, dp_info
->train_set
);
846 radeon_dp_update_vs_emph(dp_info
);
848 if (!clock_recovery
) {
849 DRM_ERROR("clock recovery failed\n");
852 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
853 dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
,
854 (dp_info
->train_set
[0] & DP_TRAIN_PRE_EMPHASIS_MASK
) >>
855 DP_TRAIN_PRE_EMPHASIS_SHIFT
);
860 static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info
*dp_info
)
864 if (dp_info
->tp3_supported
)
865 radeon_dp_set_tp(dp_info
, DP_TRAINING_PATTERN_3
);
867 radeon_dp_set_tp(dp_info
, DP_TRAINING_PATTERN_2
);
869 /* channel equalization loop */
873 if (dp_info
->rd_interval
== 0)
876 mdelay(dp_info
->rd_interval
* 4);
878 if (!radeon_dp_get_link_status(dp_info
->radeon_connector
, dp_info
->link_status
))
881 if (dp_channel_eq_ok(dp_info
->link_status
, dp_info
->dp_lane_count
)) {
887 if (dp_info
->tries
> 5) {
888 DRM_ERROR("channel eq failed: 5 tries\n");
892 /* Compute new train_set as requested by sink */
893 dp_get_adjust_train(dp_info
->link_status
, dp_info
->dp_lane_count
, dp_info
->train_set
);
895 radeon_dp_update_vs_emph(dp_info
);
900 DRM_ERROR("channel eq failed\n");
903 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
904 dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
,
905 (dp_info
->train_set
[0] & DP_TRAIN_PRE_EMPHASIS_MASK
)
906 >> DP_TRAIN_PRE_EMPHASIS_SHIFT
);
911 void radeon_dp_link_train(struct drm_encoder
*encoder
,
912 struct drm_connector
*connector
)
914 struct drm_device
*dev
= encoder
->dev
;
915 struct radeon_device
*rdev
= dev
->dev_private
;
916 struct radeon_encoder
*radeon_encoder
= to_radeon_encoder(encoder
);
917 struct radeon_encoder_atom_dig
*dig
;
918 struct radeon_connector
*radeon_connector
;
919 struct radeon_connector_atom_dig
*dig_connector
;
920 struct radeon_dp_link_train_info dp_info
;
924 if (!radeon_encoder
->enc_priv
)
926 dig
= radeon_encoder
->enc_priv
;
928 radeon_connector
= to_radeon_connector(connector
);
929 if (!radeon_connector
->con_priv
)
931 dig_connector
= radeon_connector
->con_priv
;
933 if ((dig_connector
->dp_sink_type
!= CONNECTOR_OBJECT_ID_DISPLAYPORT
) &&
934 (dig_connector
->dp_sink_type
!= CONNECTOR_OBJECT_ID_eDP
))
937 /* DPEncoderService newer than 1.1 can't program properly the
938 * training pattern. When facing such version use the
939 * DIGXEncoderControl (X== 1 | 2)
941 dp_info
.use_dpencoder
= true;
942 index
= GetIndexIntoMasterTable(COMMAND
, DPEncoderService
);
943 if (atom_parse_cmd_header(rdev
->mode_info
.atom_context
, index
, &frev
, &crev
)) {
945 dp_info
.use_dpencoder
= false;
950 if (dig
->dig_encoder
)
951 dp_info
.enc_id
|= ATOM_DP_CONFIG_DIG2_ENCODER
;
953 dp_info
.enc_id
|= ATOM_DP_CONFIG_DIG1_ENCODER
;
955 dp_info
.enc_id
|= ATOM_DP_CONFIG_LINK_B
;
957 dp_info
.enc_id
|= ATOM_DP_CONFIG_LINK_A
;
959 dp_info
.rd_interval
= radeon_read_dpcd_reg(radeon_connector
, DP_TRAINING_AUX_RD_INTERVAL
);
960 tmp
= radeon_read_dpcd_reg(radeon_connector
, DP_MAX_LANE_COUNT
);
961 if (ASIC_IS_DCE5(rdev
) && (tmp
& DP_TPS3_SUPPORTED
))
962 dp_info
.tp3_supported
= true;
964 dp_info
.tp3_supported
= false;
966 memcpy(dp_info
.dpcd
, dig_connector
->dpcd
, 8);
968 dp_info
.encoder
= encoder
;
969 dp_info
.connector
= connector
;
970 dp_info
.radeon_connector
= radeon_connector
;
971 dp_info
.dp_lane_count
= dig_connector
->dp_lane_count
;
972 dp_info
.dp_clock
= dig_connector
->dp_clock
;
974 if (radeon_dp_link_train_init(&dp_info
))
976 if (radeon_dp_link_train_cr(&dp_info
))
978 if (radeon_dp_link_train_ce(&dp_info
))
981 if (radeon_dp_link_train_finish(&dp_info
))