2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
28 #include <drm/radeon_drm.h>
32 #include "atom-bits.h"
33 #include <drm/drm_dp_helper.h>
35 /* move these to drm_dp_helper.c/h */
36 #define DP_LINK_CONFIGURATION_SIZE 9
37 #define DP_DPCD_SIZE 8
39 static char *voltage_names
[] = {
40 "0.4V", "0.6V", "0.8V", "1.2V"
42 static char *pre_emph_names
[] = {
43 "0dB", "3.5dB", "6dB", "9.5dB"
46 /***** radeon AUX functions *****/
47 union aux_channel_transaction
{
48 PROCESS_AUX_CHANNEL_TRANSACTION_PS_ALLOCATION v1
;
49 PROCESS_AUX_CHANNEL_TRANSACTION_PARAMETERS_V2 v2
;
52 static int radeon_process_aux_ch(struct radeon_i2c_chan
*chan
,
53 u8
*send
, int send_bytes
,
54 u8
*recv
, int recv_size
,
57 struct drm_device
*dev
= chan
->dev
;
58 struct radeon_device
*rdev
= dev
->dev_private
;
59 union aux_channel_transaction args
;
60 int index
= GetIndexIntoMasterTable(COMMAND
, ProcessAuxChannelTransaction
);
64 memset(&args
, 0, sizeof(args
));
66 base
= (unsigned char *)(rdev
->mode_info
.atom_context
->scratch
+ 1);
68 memcpy(base
, send
, send_bytes
);
70 args
.v1
.lpAuxRequest
= 0 + 4;
71 args
.v1
.lpDataOut
= 16 + 4;
72 args
.v1
.ucDataOutLen
= 0;
73 args
.v1
.ucChannelID
= chan
->rec
.i2c_id
;
74 args
.v1
.ucDelay
= delay
/ 10;
75 if (ASIC_IS_DCE4(rdev
))
76 args
.v2
.ucHPD_ID
= chan
->rec
.hpd
;
78 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
80 *ack
= args
.v1
.ucReplyStatus
;
83 if (args
.v1
.ucReplyStatus
== 1) {
84 DRM_DEBUG_KMS("dp_aux_ch timeout\n");
89 if (args
.v1
.ucReplyStatus
== 2) {
90 DRM_DEBUG_KMS("dp_aux_ch flags not zero\n");
95 if (args
.v1
.ucReplyStatus
== 3) {
96 DRM_DEBUG_KMS("dp_aux_ch error\n");
100 recv_bytes
= args
.v1
.ucDataOutLen
;
101 if (recv_bytes
> recv_size
)
102 recv_bytes
= recv_size
;
104 if (recv
&& recv_size
)
105 memcpy(recv
, base
+ 16, recv_bytes
);
110 static int radeon_dp_aux_native_write(struct radeon_connector
*radeon_connector
,
111 u16 address
, u8
*send
, u8 send_bytes
, u8 delay
)
113 struct radeon_connector_atom_dig
*dig_connector
= radeon_connector
->con_priv
;
116 int msg_bytes
= send_bytes
+ 4;
124 msg
[1] = address
>> 8;
125 msg
[2] = AUX_NATIVE_WRITE
<< 4;
126 msg
[3] = (msg_bytes
<< 4) | (send_bytes
- 1);
127 memcpy(&msg
[4], send
, send_bytes
);
129 for (retry
= 0; retry
< 4; retry
++) {
130 ret
= radeon_process_aux_ch(dig_connector
->dp_i2c_bus
,
131 msg
, msg_bytes
, NULL
, 0, delay
, &ack
);
136 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
)
138 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
147 static int radeon_dp_aux_native_read(struct radeon_connector
*radeon_connector
,
148 u16 address
, u8
*recv
, int recv_bytes
, u8 delay
)
150 struct radeon_connector_atom_dig
*dig_connector
= radeon_connector
->con_priv
;
158 msg
[1] = address
>> 8;
159 msg
[2] = AUX_NATIVE_READ
<< 4;
160 msg
[3] = (msg_bytes
<< 4) | (recv_bytes
- 1);
162 for (retry
= 0; retry
< 4; retry
++) {
163 ret
= radeon_process_aux_ch(dig_connector
->dp_i2c_bus
,
164 msg
, msg_bytes
, recv
, recv_bytes
, delay
, &ack
);
169 if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_ACK
)
171 else if ((ack
& AUX_NATIVE_REPLY_MASK
) == AUX_NATIVE_REPLY_DEFER
)
182 static void radeon_write_dpcd_reg(struct radeon_connector
*radeon_connector
,
185 radeon_dp_aux_native_write(radeon_connector
, reg
, &val
, 1, 0);
188 static u8
radeon_read_dpcd_reg(struct radeon_connector
*radeon_connector
,
193 radeon_dp_aux_native_read(radeon_connector
, reg
, &val
, 1, 0);
198 int radeon_dp_i2c_aux_ch(struct i2c_adapter
*adapter
, int mode
,
199 u8 write_byte
, u8
*read_byte
)
201 struct i2c_algo_dp_aux_data
*algo_data
= adapter
->algo_data
;
202 struct radeon_i2c_chan
*auxch
= (struct radeon_i2c_chan
*)adapter
;
203 u16 address
= algo_data
->address
;
212 /* Set up the command byte */
213 if (mode
& MODE_I2C_READ
)
214 msg
[2] = AUX_I2C_READ
<< 4;
216 msg
[2] = AUX_I2C_WRITE
<< 4;
218 if (!(mode
& MODE_I2C_STOP
))
219 msg
[2] |= AUX_I2C_MOT
<< 4;
222 msg
[1] = address
>> 8;
227 msg
[3] = msg_bytes
<< 4;
232 msg
[3] = msg_bytes
<< 4;
240 for (retry
= 0; retry
< 4; retry
++) {
241 ret
= radeon_process_aux_ch(auxch
,
242 msg
, msg_bytes
, reply
, reply_bytes
, 0, &ack
);
246 DRM_DEBUG_KMS("aux_ch failed %d\n", ret
);
250 switch (ack
& AUX_NATIVE_REPLY_MASK
) {
251 case AUX_NATIVE_REPLY_ACK
:
252 /* I2C-over-AUX Reply field is only valid
253 * when paired with AUX ACK.
256 case AUX_NATIVE_REPLY_NACK
:
257 DRM_DEBUG_KMS("aux_ch native nack\n");
259 case AUX_NATIVE_REPLY_DEFER
:
260 DRM_DEBUG_KMS("aux_ch native defer\n");
264 DRM_ERROR("aux_ch invalid native reply 0x%02x\n", ack
);
268 switch (ack
& AUX_I2C_REPLY_MASK
) {
269 case AUX_I2C_REPLY_ACK
:
270 if (mode
== MODE_I2C_READ
)
271 *read_byte
= reply
[0];
273 case AUX_I2C_REPLY_NACK
:
274 DRM_DEBUG_KMS("aux_i2c nack\n");
276 case AUX_I2C_REPLY_DEFER
:
277 DRM_DEBUG_KMS("aux_i2c defer\n");
281 DRM_ERROR("aux_i2c invalid reply 0x%02x\n", ack
);
286 DRM_DEBUG_KMS("aux i2c too many retries, giving up\n");
290 /***** general DP utility functions *****/
292 #define DP_VOLTAGE_MAX DP_TRAIN_VOLTAGE_SWING_1200
293 #define DP_PRE_EMPHASIS_MAX DP_TRAIN_PRE_EMPHASIS_9_5
295 static void dp_get_adjust_train(u8 link_status
[DP_LINK_STATUS_SIZE
],
303 for (lane
= 0; lane
< lane_count
; lane
++) {
304 u8 this_v
= drm_dp_get_adjust_request_voltage(link_status
, lane
);
305 u8 this_p
= drm_dp_get_adjust_request_pre_emphasis(link_status
, lane
);
307 DRM_DEBUG_KMS("requested signal parameters: lane %d voltage %s pre_emph %s\n",
309 voltage_names
[this_v
>> DP_TRAIN_VOLTAGE_SWING_SHIFT
],
310 pre_emph_names
[this_p
>> DP_TRAIN_PRE_EMPHASIS_SHIFT
]);
318 if (v
>= DP_VOLTAGE_MAX
)
319 v
|= DP_TRAIN_MAX_SWING_REACHED
;
321 if (p
>= DP_PRE_EMPHASIS_MAX
)
322 p
|= DP_TRAIN_MAX_PRE_EMPHASIS_REACHED
;
324 DRM_DEBUG_KMS("using signal parameters: voltage %s pre_emph %s\n",
325 voltage_names
[(v
& DP_TRAIN_VOLTAGE_SWING_MASK
) >> DP_TRAIN_VOLTAGE_SWING_SHIFT
],
326 pre_emph_names
[(p
& DP_TRAIN_PRE_EMPHASIS_MASK
) >> DP_TRAIN_PRE_EMPHASIS_SHIFT
]);
328 for (lane
= 0; lane
< 4; lane
++)
329 train_set
[lane
] = v
| p
;
332 /* convert bits per color to bits per pixel */
333 /* get bpc from the EDID */
334 static int convert_bpc_to_bpp(int bpc
)
342 /* get the max pix clock supported by the link rate and lane num */
343 static int dp_get_max_dp_pix_clock(int link_rate
,
347 return (link_rate
* lane_num
* 8) / bpp
;
350 static int dp_get_max_link_rate(u8 dpcd
[DP_DPCD_SIZE
])
352 switch (dpcd
[DP_MAX_LINK_RATE
]) {
353 case DP_LINK_BW_1_62
:
363 static u8
dp_get_max_lane_number(u8 dpcd
[DP_DPCD_SIZE
])
365 return dpcd
[DP_MAX_LANE_COUNT
] & DP_MAX_LANE_COUNT_MASK
;
368 static u8
dp_get_dp_link_rate_coded(int link_rate
)
373 return DP_LINK_BW_1_62
;
375 return DP_LINK_BW_2_7
;
377 return DP_LINK_BW_5_4
;
381 /***** radeon specific DP functions *****/
383 /* First get the min lane# when low rate is used according to pixel clock
384 * (prefer low rate), second check max lane# supported by DP panel,
385 * if the max lane# < low rate lane# then use max lane# instead.
387 static int radeon_dp_get_dp_lane_number(struct drm_connector
*connector
,
388 u8 dpcd
[DP_DPCD_SIZE
],
391 int bpp
= convert_bpc_to_bpp(radeon_get_monitor_bpc(connector
));
392 int max_link_rate
= dp_get_max_link_rate(dpcd
);
393 int max_lane_num
= dp_get_max_lane_number(dpcd
);
395 int max_dp_pix_clock
;
397 for (lane_num
= 1; lane_num
< max_lane_num
; lane_num
<<= 1) {
398 max_dp_pix_clock
= dp_get_max_dp_pix_clock(max_link_rate
, lane_num
, bpp
);
399 if (pix_clock
<= max_dp_pix_clock
)
406 static int radeon_dp_get_dp_link_clock(struct drm_connector
*connector
,
407 u8 dpcd
[DP_DPCD_SIZE
],
410 int bpp
= convert_bpc_to_bpp(radeon_get_monitor_bpc(connector
));
411 int lane_num
, max_pix_clock
;
413 if (radeon_connector_encoder_get_dp_bridge_encoder_id(connector
) ==
414 ENCODER_OBJECT_ID_NUTMEG
)
417 lane_num
= radeon_dp_get_dp_lane_number(connector
, dpcd
, pix_clock
);
418 max_pix_clock
= dp_get_max_dp_pix_clock(162000, lane_num
, bpp
);
419 if (pix_clock
<= max_pix_clock
)
421 max_pix_clock
= dp_get_max_dp_pix_clock(270000, lane_num
, bpp
);
422 if (pix_clock
<= max_pix_clock
)
424 if (radeon_connector_is_dp12_capable(connector
)) {
425 max_pix_clock
= dp_get_max_dp_pix_clock(540000, lane_num
, bpp
);
426 if (pix_clock
<= max_pix_clock
)
430 return dp_get_max_link_rate(dpcd
);
433 static u8
radeon_dp_encoder_service(struct radeon_device
*rdev
,
434 int action
, int dp_clock
,
435 u8 ucconfig
, u8 lane_num
)
437 DP_ENCODER_SERVICE_PARAMETERS args
;
438 int index
= GetIndexIntoMasterTable(COMMAND
, DPEncoderService
);
440 memset(&args
, 0, sizeof(args
));
441 args
.ucLinkClock
= dp_clock
/ 10;
442 args
.ucConfig
= ucconfig
;
443 args
.ucAction
= action
;
444 args
.ucLaneNum
= lane_num
;
447 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
448 return args
.ucStatus
;
451 u8
radeon_dp_getsinktype(struct radeon_connector
*radeon_connector
)
453 struct radeon_connector_atom_dig
*dig_connector
= radeon_connector
->con_priv
;
454 struct drm_device
*dev
= radeon_connector
->base
.dev
;
455 struct radeon_device
*rdev
= dev
->dev_private
;
457 return radeon_dp_encoder_service(rdev
, ATOM_DP_ACTION_GET_SINK_TYPE
, 0,
458 dig_connector
->dp_i2c_bus
->rec
.i2c_id
, 0);
461 static void radeon_dp_probe_oui(struct radeon_connector
*radeon_connector
)
463 struct radeon_connector_atom_dig
*dig_connector
= radeon_connector
->con_priv
;
466 if (!(dig_connector
->dpcd
[DP_DOWN_STREAM_PORT_COUNT
] & DP_OUI_SUPPORT
))
469 if (radeon_dp_aux_native_read(radeon_connector
, DP_SINK_OUI
, buf
, 3, 0))
470 DRM_DEBUG_KMS("Sink OUI: %02hx%02hx%02hx\n",
471 buf
[0], buf
[1], buf
[2]);
473 if (radeon_dp_aux_native_read(radeon_connector
, DP_BRANCH_OUI
, buf
, 3, 0))
474 DRM_DEBUG_KMS("Branch OUI: %02hx%02hx%02hx\n",
475 buf
[0], buf
[1], buf
[2]);
478 bool radeon_dp_getdpcd(struct radeon_connector
*radeon_connector
)
480 struct radeon_connector_atom_dig
*dig_connector
= radeon_connector
->con_priv
;
484 ret
= radeon_dp_aux_native_read(radeon_connector
, DP_DPCD_REV
, msg
, 8, 0);
486 memcpy(dig_connector
->dpcd
, msg
, 8);
487 DRM_DEBUG_KMS("DPCD: ");
488 for (i
= 0; i
< 8; i
++)
489 DRM_DEBUG_KMS("%02x ", msg
[i
]);
492 radeon_dp_probe_oui(radeon_connector
);
496 dig_connector
->dpcd
[0] = 0;
500 int radeon_dp_get_panel_mode(struct drm_encoder
*encoder
,
501 struct drm_connector
*connector
)
503 struct drm_device
*dev
= encoder
->dev
;
504 struct radeon_device
*rdev
= dev
->dev_private
;
505 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
506 int panel_mode
= DP_PANEL_MODE_EXTERNAL_DP_MODE
;
507 u16 dp_bridge
= radeon_connector_encoder_get_dp_bridge_encoder_id(connector
);
510 if (!ASIC_IS_DCE4(rdev
))
513 if (dp_bridge
!= ENCODER_OBJECT_ID_NONE
) {
514 /* DP bridge chips */
515 tmp
= radeon_read_dpcd_reg(radeon_connector
, DP_EDP_CONFIGURATION_CAP
);
517 panel_mode
= DP_PANEL_MODE_INTERNAL_DP2_MODE
;
518 else if ((dp_bridge
== ENCODER_OBJECT_ID_NUTMEG
) ||
519 (dp_bridge
== ENCODER_OBJECT_ID_TRAVIS
))
520 panel_mode
= DP_PANEL_MODE_INTERNAL_DP1_MODE
;
522 panel_mode
= DP_PANEL_MODE_EXTERNAL_DP_MODE
;
523 } else if (connector
->connector_type
== DRM_MODE_CONNECTOR_eDP
) {
525 tmp
= radeon_read_dpcd_reg(radeon_connector
, DP_EDP_CONFIGURATION_CAP
);
527 panel_mode
= DP_PANEL_MODE_INTERNAL_DP2_MODE
;
533 void radeon_dp_set_link_config(struct drm_connector
*connector
,
534 const struct drm_display_mode
*mode
)
536 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
537 struct radeon_connector_atom_dig
*dig_connector
;
539 if (!radeon_connector
->con_priv
)
541 dig_connector
= radeon_connector
->con_priv
;
543 if ((dig_connector
->dp_sink_type
== CONNECTOR_OBJECT_ID_DISPLAYPORT
) ||
544 (dig_connector
->dp_sink_type
== CONNECTOR_OBJECT_ID_eDP
)) {
545 dig_connector
->dp_clock
=
546 radeon_dp_get_dp_link_clock(connector
, dig_connector
->dpcd
, mode
->clock
);
547 dig_connector
->dp_lane_count
=
548 radeon_dp_get_dp_lane_number(connector
, dig_connector
->dpcd
, mode
->clock
);
552 int radeon_dp_mode_valid_helper(struct drm_connector
*connector
,
553 struct drm_display_mode
*mode
)
555 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
556 struct radeon_connector_atom_dig
*dig_connector
;
559 if (!radeon_connector
->con_priv
)
560 return MODE_CLOCK_HIGH
;
561 dig_connector
= radeon_connector
->con_priv
;
564 radeon_dp_get_dp_link_clock(connector
, dig_connector
->dpcd
, mode
->clock
);
566 if ((dp_clock
== 540000) &&
567 (!radeon_connector_is_dp12_capable(connector
)))
568 return MODE_CLOCK_HIGH
;
573 static bool radeon_dp_get_link_status(struct radeon_connector
*radeon_connector
,
574 u8 link_status
[DP_LINK_STATUS_SIZE
])
577 ret
= radeon_dp_aux_native_read(radeon_connector
, DP_LANE0_1_STATUS
,
578 link_status
, DP_LINK_STATUS_SIZE
, 100);
583 DRM_DEBUG_KMS("link status %*ph\n", 6, link_status
);
587 bool radeon_dp_needs_link_train(struct radeon_connector
*radeon_connector
)
589 u8 link_status
[DP_LINK_STATUS_SIZE
];
590 struct radeon_connector_atom_dig
*dig
= radeon_connector
->con_priv
;
592 if (!radeon_dp_get_link_status(radeon_connector
, link_status
))
594 if (drm_dp_channel_eq_ok(link_status
, dig
->dp_lane_count
))
599 struct radeon_dp_link_train_info
{
600 struct radeon_device
*rdev
;
601 struct drm_encoder
*encoder
;
602 struct drm_connector
*connector
;
603 struct radeon_connector
*radeon_connector
;
611 u8 link_status
[DP_LINK_STATUS_SIZE
];
616 static void radeon_dp_update_vs_emph(struct radeon_dp_link_train_info
*dp_info
)
618 /* set the initial vs/emph on the source */
619 atombios_dig_transmitter_setup(dp_info
->encoder
,
620 ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH
,
621 0, dp_info
->train_set
[0]); /* sets all lanes at once */
623 /* set the vs/emph on the sink */
624 radeon_dp_aux_native_write(dp_info
->radeon_connector
, DP_TRAINING_LANE0_SET
,
625 dp_info
->train_set
, dp_info
->dp_lane_count
, 0);
628 static void radeon_dp_set_tp(struct radeon_dp_link_train_info
*dp_info
, int tp
)
632 /* set training pattern on the source */
633 if (ASIC_IS_DCE4(dp_info
->rdev
) || !dp_info
->use_dpencoder
) {
635 case DP_TRAINING_PATTERN_1
:
636 rtp
= ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1
;
638 case DP_TRAINING_PATTERN_2
:
639 rtp
= ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2
;
641 case DP_TRAINING_PATTERN_3
:
642 rtp
= ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3
;
645 atombios_dig_encoder_setup(dp_info
->encoder
, rtp
, 0);
648 case DP_TRAINING_PATTERN_1
:
651 case DP_TRAINING_PATTERN_2
:
655 radeon_dp_encoder_service(dp_info
->rdev
, ATOM_DP_ACTION_TRAINING_PATTERN_SEL
,
656 dp_info
->dp_clock
, dp_info
->enc_id
, rtp
);
659 /* enable training pattern on the sink */
660 radeon_write_dpcd_reg(dp_info
->radeon_connector
, DP_TRAINING_PATTERN_SET
, tp
);
663 static int radeon_dp_link_train_init(struct radeon_dp_link_train_info
*dp_info
)
665 struct radeon_encoder
*radeon_encoder
= to_radeon_encoder(dp_info
->encoder
);
666 struct radeon_encoder_atom_dig
*dig
= radeon_encoder
->enc_priv
;
669 /* power up the sink */
670 if (dp_info
->dpcd
[0] >= 0x11)
671 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
672 DP_SET_POWER
, DP_SET_POWER_D0
);
674 /* possibly enable downspread on the sink */
675 if (dp_info
->dpcd
[3] & 0x1)
676 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
677 DP_DOWNSPREAD_CTRL
, DP_SPREAD_AMP_0_5
);
679 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
680 DP_DOWNSPREAD_CTRL
, 0);
682 if ((dp_info
->connector
->connector_type
== DRM_MODE_CONNECTOR_eDP
) &&
683 (dig
->panel_mode
== DP_PANEL_MODE_INTERNAL_DP2_MODE
)) {
684 radeon_write_dpcd_reg(dp_info
->radeon_connector
, DP_EDP_CONFIGURATION_SET
, 1);
687 /* set the lane count on the sink */
688 tmp
= dp_info
->dp_lane_count
;
689 if (dp_info
->dpcd
[DP_DPCD_REV
] >= 0x11 &&
690 dp_info
->dpcd
[DP_MAX_LANE_COUNT
] & DP_ENHANCED_FRAME_CAP
)
691 tmp
|= DP_LANE_COUNT_ENHANCED_FRAME_EN
;
692 radeon_write_dpcd_reg(dp_info
->radeon_connector
, DP_LANE_COUNT_SET
, tmp
);
694 /* set the link rate on the sink */
695 tmp
= dp_get_dp_link_rate_coded(dp_info
->dp_clock
);
696 radeon_write_dpcd_reg(dp_info
->radeon_connector
, DP_LINK_BW_SET
, tmp
);
698 /* start training on the source */
699 if (ASIC_IS_DCE4(dp_info
->rdev
) || !dp_info
->use_dpencoder
)
700 atombios_dig_encoder_setup(dp_info
->encoder
,
701 ATOM_ENCODER_CMD_DP_LINK_TRAINING_START
, 0);
703 radeon_dp_encoder_service(dp_info
->rdev
, ATOM_DP_ACTION_TRAINING_START
,
704 dp_info
->dp_clock
, dp_info
->enc_id
, 0);
706 /* disable the training pattern on the sink */
707 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
708 DP_TRAINING_PATTERN_SET
,
709 DP_TRAINING_PATTERN_DISABLE
);
714 static int radeon_dp_link_train_finish(struct radeon_dp_link_train_info
*dp_info
)
718 /* disable the training pattern on the sink */
719 radeon_write_dpcd_reg(dp_info
->radeon_connector
,
720 DP_TRAINING_PATTERN_SET
,
721 DP_TRAINING_PATTERN_DISABLE
);
723 /* disable the training pattern on the source */
724 if (ASIC_IS_DCE4(dp_info
->rdev
) || !dp_info
->use_dpencoder
)
725 atombios_dig_encoder_setup(dp_info
->encoder
,
726 ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE
, 0);
728 radeon_dp_encoder_service(dp_info
->rdev
, ATOM_DP_ACTION_TRAINING_COMPLETE
,
729 dp_info
->dp_clock
, dp_info
->enc_id
, 0);
734 static int radeon_dp_link_train_cr(struct radeon_dp_link_train_info
*dp_info
)
740 radeon_dp_set_tp(dp_info
, DP_TRAINING_PATTERN_1
);
741 memset(dp_info
->train_set
, 0, 4);
742 radeon_dp_update_vs_emph(dp_info
);
746 /* clock recovery loop */
747 clock_recovery
= false;
751 if (dp_info
->rd_interval
== 0)
754 mdelay(dp_info
->rd_interval
* 4);
756 if (!radeon_dp_get_link_status(dp_info
->radeon_connector
, dp_info
->link_status
)) {
757 DRM_ERROR("displayport link status failed\n");
761 if (drm_dp_clock_recovery_ok(dp_info
->link_status
, dp_info
->dp_lane_count
)) {
762 clock_recovery
= true;
766 for (i
= 0; i
< dp_info
->dp_lane_count
; i
++) {
767 if ((dp_info
->train_set
[i
] & DP_TRAIN_MAX_SWING_REACHED
) == 0)
770 if (i
== dp_info
->dp_lane_count
) {
771 DRM_ERROR("clock recovery reached max voltage\n");
775 if ((dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
) == voltage
) {
777 if (dp_info
->tries
== 5) {
778 DRM_ERROR("clock recovery tried 5 times\n");
784 voltage
= dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
;
786 /* Compute new train_set as requested by sink */
787 dp_get_adjust_train(dp_info
->link_status
, dp_info
->dp_lane_count
, dp_info
->train_set
);
789 radeon_dp_update_vs_emph(dp_info
);
791 if (!clock_recovery
) {
792 DRM_ERROR("clock recovery failed\n");
795 DRM_DEBUG_KMS("clock recovery at voltage %d pre-emphasis %d\n",
796 dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
,
797 (dp_info
->train_set
[0] & DP_TRAIN_PRE_EMPHASIS_MASK
) >>
798 DP_TRAIN_PRE_EMPHASIS_SHIFT
);
803 static int radeon_dp_link_train_ce(struct radeon_dp_link_train_info
*dp_info
)
807 if (dp_info
->tp3_supported
)
808 radeon_dp_set_tp(dp_info
, DP_TRAINING_PATTERN_3
);
810 radeon_dp_set_tp(dp_info
, DP_TRAINING_PATTERN_2
);
812 /* channel equalization loop */
816 if (dp_info
->rd_interval
== 0)
819 mdelay(dp_info
->rd_interval
* 4);
821 if (!radeon_dp_get_link_status(dp_info
->radeon_connector
, dp_info
->link_status
)) {
822 DRM_ERROR("displayport link status failed\n");
826 if (drm_dp_channel_eq_ok(dp_info
->link_status
, dp_info
->dp_lane_count
)) {
832 if (dp_info
->tries
> 5) {
833 DRM_ERROR("channel eq failed: 5 tries\n");
837 /* Compute new train_set as requested by sink */
838 dp_get_adjust_train(dp_info
->link_status
, dp_info
->dp_lane_count
, dp_info
->train_set
);
840 radeon_dp_update_vs_emph(dp_info
);
845 DRM_ERROR("channel eq failed\n");
848 DRM_DEBUG_KMS("channel eq at voltage %d pre-emphasis %d\n",
849 dp_info
->train_set
[0] & DP_TRAIN_VOLTAGE_SWING_MASK
,
850 (dp_info
->train_set
[0] & DP_TRAIN_PRE_EMPHASIS_MASK
)
851 >> DP_TRAIN_PRE_EMPHASIS_SHIFT
);
856 void radeon_dp_link_train(struct drm_encoder
*encoder
,
857 struct drm_connector
*connector
)
859 struct drm_device
*dev
= encoder
->dev
;
860 struct radeon_device
*rdev
= dev
->dev_private
;
861 struct radeon_encoder
*radeon_encoder
= to_radeon_encoder(encoder
);
862 struct radeon_encoder_atom_dig
*dig
;
863 struct radeon_connector
*radeon_connector
;
864 struct radeon_connector_atom_dig
*dig_connector
;
865 struct radeon_dp_link_train_info dp_info
;
869 if (!radeon_encoder
->enc_priv
)
871 dig
= radeon_encoder
->enc_priv
;
873 radeon_connector
= to_radeon_connector(connector
);
874 if (!radeon_connector
->con_priv
)
876 dig_connector
= radeon_connector
->con_priv
;
878 if ((dig_connector
->dp_sink_type
!= CONNECTOR_OBJECT_ID_DISPLAYPORT
) &&
879 (dig_connector
->dp_sink_type
!= CONNECTOR_OBJECT_ID_eDP
))
882 /* DPEncoderService newer than 1.1 can't program properly the
883 * training pattern. When facing such version use the
884 * DIGXEncoderControl (X== 1 | 2)
886 dp_info
.use_dpencoder
= true;
887 index
= GetIndexIntoMasterTable(COMMAND
, DPEncoderService
);
888 if (atom_parse_cmd_header(rdev
->mode_info
.atom_context
, index
, &frev
, &crev
)) {
890 dp_info
.use_dpencoder
= false;
895 if (dig
->dig_encoder
)
896 dp_info
.enc_id
|= ATOM_DP_CONFIG_DIG2_ENCODER
;
898 dp_info
.enc_id
|= ATOM_DP_CONFIG_DIG1_ENCODER
;
900 dp_info
.enc_id
|= ATOM_DP_CONFIG_LINK_B
;
902 dp_info
.enc_id
|= ATOM_DP_CONFIG_LINK_A
;
904 dp_info
.rd_interval
= radeon_read_dpcd_reg(radeon_connector
, DP_TRAINING_AUX_RD_INTERVAL
);
905 tmp
= radeon_read_dpcd_reg(radeon_connector
, DP_MAX_LANE_COUNT
);
906 if (ASIC_IS_DCE5(rdev
) && (tmp
& DP_TPS3_SUPPORTED
))
907 dp_info
.tp3_supported
= true;
909 dp_info
.tp3_supported
= false;
911 memcpy(dp_info
.dpcd
, dig_connector
->dpcd
, 8);
913 dp_info
.encoder
= encoder
;
914 dp_info
.connector
= connector
;
915 dp_info
.radeon_connector
= radeon_connector
;
916 dp_info
.dp_lane_count
= dig_connector
->dp_lane_count
;
917 dp_info
.dp_clock
= dig_connector
->dp_clock
;
919 if (radeon_dp_link_train_init(&dp_info
))
921 if (radeon_dp_link_train_cr(&dp_info
))
923 if (radeon_dp_link_train_ce(&dp_info
))
926 if (radeon_dp_link_train_finish(&dp_info
))