2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/radeon_drm.h>
29 #include <drm/drm_fixed.h>
32 #include "atom-bits.h"
34 static void atombios_overscan_setup(struct drm_crtc
*crtc
,
35 struct drm_display_mode
*mode
,
36 struct drm_display_mode
*adjusted_mode
)
38 struct drm_device
*dev
= crtc
->dev
;
39 struct radeon_device
*rdev
= dev
->dev_private
;
40 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
41 SET_CRTC_OVERSCAN_PS_ALLOCATION args
;
42 int index
= GetIndexIntoMasterTable(COMMAND
, SetCRTC_OverScan
);
45 memset(&args
, 0, sizeof(args
));
47 args
.ucCRTC
= radeon_crtc
->crtc_id
;
49 switch (radeon_crtc
->rmx_type
) {
51 args
.usOverscanTop
= (adjusted_mode
->crtc_vdisplay
- mode
->crtc_vdisplay
) / 2;
52 args
.usOverscanBottom
= (adjusted_mode
->crtc_vdisplay
- mode
->crtc_vdisplay
) / 2;
53 args
.usOverscanLeft
= (adjusted_mode
->crtc_hdisplay
- mode
->crtc_hdisplay
) / 2;
54 args
.usOverscanRight
= (adjusted_mode
->crtc_hdisplay
- mode
->crtc_hdisplay
) / 2;
57 a1
= mode
->crtc_vdisplay
* adjusted_mode
->crtc_hdisplay
;
58 a2
= adjusted_mode
->crtc_vdisplay
* mode
->crtc_hdisplay
;
61 args
.usOverscanLeft
= (adjusted_mode
->crtc_hdisplay
- (a2
/ mode
->crtc_vdisplay
)) / 2;
62 args
.usOverscanRight
= (adjusted_mode
->crtc_hdisplay
- (a2
/ mode
->crtc_vdisplay
)) / 2;
64 args
.usOverscanLeft
= (adjusted_mode
->crtc_vdisplay
- (a1
/ mode
->crtc_hdisplay
)) / 2;
65 args
.usOverscanRight
= (adjusted_mode
->crtc_vdisplay
- (a1
/ mode
->crtc_hdisplay
)) / 2;
70 args
.usOverscanRight
= radeon_crtc
->h_border
;
71 args
.usOverscanLeft
= radeon_crtc
->h_border
;
72 args
.usOverscanBottom
= radeon_crtc
->v_border
;
73 args
.usOverscanTop
= radeon_crtc
->v_border
;
76 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
79 static void atombios_scaler_setup(struct drm_crtc
*crtc
)
81 struct drm_device
*dev
= crtc
->dev
;
82 struct radeon_device
*rdev
= dev
->dev_private
;
83 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
84 ENABLE_SCALER_PS_ALLOCATION args
;
85 int index
= GetIndexIntoMasterTable(COMMAND
, EnableScaler
);
87 /* fixme - fill in enc_priv for atom dac */
88 enum radeon_tv_std tv_std
= TV_STD_NTSC
;
89 bool is_tv
= false, is_cv
= false;
90 struct drm_encoder
*encoder
;
92 if (!ASIC_IS_AVIVO(rdev
) && radeon_crtc
->crtc_id
)
95 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
97 if (encoder
->crtc
== crtc
) {
98 struct radeon_encoder
*radeon_encoder
= to_radeon_encoder(encoder
);
99 if (radeon_encoder
->active_device
& ATOM_DEVICE_TV_SUPPORT
) {
100 struct radeon_encoder_atom_dac
*tv_dac
= radeon_encoder
->enc_priv
;
101 tv_std
= tv_dac
->tv_std
;
107 memset(&args
, 0, sizeof(args
));
109 args
.ucScaler
= radeon_crtc
->crtc_id
;
115 args
.ucTVStandard
= ATOM_TV_NTSC
;
118 args
.ucTVStandard
= ATOM_TV_PAL
;
121 args
.ucTVStandard
= ATOM_TV_PALM
;
124 args
.ucTVStandard
= ATOM_TV_PAL60
;
127 args
.ucTVStandard
= ATOM_TV_NTSCJ
;
129 case TV_STD_SCART_PAL
:
130 args
.ucTVStandard
= ATOM_TV_PAL
; /* ??? */
133 args
.ucTVStandard
= ATOM_TV_SECAM
;
136 args
.ucTVStandard
= ATOM_TV_PALCN
;
139 args
.ucEnable
= SCALER_ENABLE_MULTITAP_MODE
;
141 args
.ucTVStandard
= ATOM_TV_CV
;
142 args
.ucEnable
= SCALER_ENABLE_MULTITAP_MODE
;
144 switch (radeon_crtc
->rmx_type
) {
146 args
.ucEnable
= ATOM_SCALER_EXPANSION
;
149 args
.ucEnable
= ATOM_SCALER_CENTER
;
152 args
.ucEnable
= ATOM_SCALER_EXPANSION
;
155 if (ASIC_IS_AVIVO(rdev
))
156 args
.ucEnable
= ATOM_SCALER_DISABLE
;
158 args
.ucEnable
= ATOM_SCALER_CENTER
;
162 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
164 && rdev
->family
>= CHIP_RV515
&& rdev
->family
<= CHIP_R580
) {
165 atom_rv515_force_tv_scaler(rdev
, radeon_crtc
);
169 static void atombios_lock_crtc(struct drm_crtc
*crtc
, int lock
)
171 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
172 struct drm_device
*dev
= crtc
->dev
;
173 struct radeon_device
*rdev
= dev
->dev_private
;
175 GetIndexIntoMasterTable(COMMAND
, UpdateCRTC_DoubleBufferRegisters
);
176 ENABLE_CRTC_PS_ALLOCATION args
;
178 memset(&args
, 0, sizeof(args
));
180 args
.ucCRTC
= radeon_crtc
->crtc_id
;
181 args
.ucEnable
= lock
;
183 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
186 static void atombios_enable_crtc(struct drm_crtc
*crtc
, int state
)
188 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
189 struct drm_device
*dev
= crtc
->dev
;
190 struct radeon_device
*rdev
= dev
->dev_private
;
191 int index
= GetIndexIntoMasterTable(COMMAND
, EnableCRTC
);
192 ENABLE_CRTC_PS_ALLOCATION args
;
194 memset(&args
, 0, sizeof(args
));
196 args
.ucCRTC
= radeon_crtc
->crtc_id
;
197 args
.ucEnable
= state
;
199 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
202 static void atombios_enable_crtc_memreq(struct drm_crtc
*crtc
, int state
)
204 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
205 struct drm_device
*dev
= crtc
->dev
;
206 struct radeon_device
*rdev
= dev
->dev_private
;
207 int index
= GetIndexIntoMasterTable(COMMAND
, EnableCRTCMemReq
);
208 ENABLE_CRTC_PS_ALLOCATION args
;
210 memset(&args
, 0, sizeof(args
));
212 args
.ucCRTC
= radeon_crtc
->crtc_id
;
213 args
.ucEnable
= state
;
215 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
218 static void atombios_blank_crtc(struct drm_crtc
*crtc
, int state
)
220 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
221 struct drm_device
*dev
= crtc
->dev
;
222 struct radeon_device
*rdev
= dev
->dev_private
;
223 int index
= GetIndexIntoMasterTable(COMMAND
, BlankCRTC
);
224 BLANK_CRTC_PS_ALLOCATION args
;
226 memset(&args
, 0, sizeof(args
));
228 args
.ucCRTC
= radeon_crtc
->crtc_id
;
229 args
.ucBlanking
= state
;
231 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
234 void atombios_crtc_dpms(struct drm_crtc
*crtc
, int mode
)
236 struct drm_device
*dev
= crtc
->dev
;
237 struct radeon_device
*rdev
= dev
->dev_private
;
238 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
241 case DRM_MODE_DPMS_ON
:
242 radeon_crtc
->enabled
= true;
243 /* adjust pm to dpms changes BEFORE enabling crtcs */
244 radeon_pm_compute_clocks(rdev
);
245 atombios_enable_crtc(crtc
, ATOM_ENABLE
);
246 if (ASIC_IS_DCE3(rdev
))
247 atombios_enable_crtc_memreq(crtc
, ATOM_ENABLE
);
248 atombios_blank_crtc(crtc
, ATOM_DISABLE
);
249 drm_vblank_post_modeset(dev
, radeon_crtc
->crtc_id
);
250 radeon_crtc_load_lut(crtc
);
252 case DRM_MODE_DPMS_STANDBY
:
253 case DRM_MODE_DPMS_SUSPEND
:
254 case DRM_MODE_DPMS_OFF
:
255 drm_vblank_pre_modeset(dev
, radeon_crtc
->crtc_id
);
256 atombios_blank_crtc(crtc
, ATOM_ENABLE
);
257 if (ASIC_IS_DCE3(rdev
))
258 atombios_enable_crtc_memreq(crtc
, ATOM_DISABLE
);
259 atombios_enable_crtc(crtc
, ATOM_DISABLE
);
260 radeon_crtc
->enabled
= false;
261 /* adjust pm to dpms changes AFTER disabling crtcs */
262 radeon_pm_compute_clocks(rdev
);
268 atombios_set_crtc_dtd_timing(struct drm_crtc
*crtc
,
269 struct drm_display_mode
*mode
)
271 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
272 struct drm_device
*dev
= crtc
->dev
;
273 struct radeon_device
*rdev
= dev
->dev_private
;
274 SET_CRTC_USING_DTD_TIMING_PARAMETERS args
;
275 int index
= GetIndexIntoMasterTable(COMMAND
, SetCRTC_UsingDTDTiming
);
278 memset(&args
, 0, sizeof(args
));
279 args
.usH_Size
= cpu_to_le16(mode
->crtc_hdisplay
- (radeon_crtc
->h_border
* 2));
280 args
.usH_Blanking_Time
=
281 cpu_to_le16(mode
->crtc_hblank_end
- mode
->crtc_hdisplay
+ (radeon_crtc
->h_border
* 2));
282 args
.usV_Size
= cpu_to_le16(mode
->crtc_vdisplay
- (radeon_crtc
->v_border
* 2));
283 args
.usV_Blanking_Time
=
284 cpu_to_le16(mode
->crtc_vblank_end
- mode
->crtc_vdisplay
+ (radeon_crtc
->v_border
* 2));
285 args
.usH_SyncOffset
=
286 cpu_to_le16(mode
->crtc_hsync_start
- mode
->crtc_hdisplay
+ radeon_crtc
->h_border
);
288 cpu_to_le16(mode
->crtc_hsync_end
- mode
->crtc_hsync_start
);
289 args
.usV_SyncOffset
=
290 cpu_to_le16(mode
->crtc_vsync_start
- mode
->crtc_vdisplay
+ radeon_crtc
->v_border
);
292 cpu_to_le16(mode
->crtc_vsync_end
- mode
->crtc_vsync_start
);
293 args
.ucH_Border
= radeon_crtc
->h_border
;
294 args
.ucV_Border
= radeon_crtc
->v_border
;
296 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
297 misc
|= ATOM_VSYNC_POLARITY
;
298 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
299 misc
|= ATOM_HSYNC_POLARITY
;
300 if (mode
->flags
& DRM_MODE_FLAG_CSYNC
)
301 misc
|= ATOM_COMPOSITESYNC
;
302 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
303 misc
|= ATOM_INTERLACE
;
304 if (mode
->flags
& DRM_MODE_FLAG_DBLSCAN
)
305 misc
|= ATOM_DOUBLE_CLOCK_MODE
;
307 args
.susModeMiscInfo
.usAccess
= cpu_to_le16(misc
);
308 args
.ucCRTC
= radeon_crtc
->crtc_id
;
310 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
313 static void atombios_crtc_set_timing(struct drm_crtc
*crtc
,
314 struct drm_display_mode
*mode
)
316 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
317 struct drm_device
*dev
= crtc
->dev
;
318 struct radeon_device
*rdev
= dev
->dev_private
;
319 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args
;
320 int index
= GetIndexIntoMasterTable(COMMAND
, SetCRTC_Timing
);
323 memset(&args
, 0, sizeof(args
));
324 args
.usH_Total
= cpu_to_le16(mode
->crtc_htotal
);
325 args
.usH_Disp
= cpu_to_le16(mode
->crtc_hdisplay
);
326 args
.usH_SyncStart
= cpu_to_le16(mode
->crtc_hsync_start
);
328 cpu_to_le16(mode
->crtc_hsync_end
- mode
->crtc_hsync_start
);
329 args
.usV_Total
= cpu_to_le16(mode
->crtc_vtotal
);
330 args
.usV_Disp
= cpu_to_le16(mode
->crtc_vdisplay
);
331 args
.usV_SyncStart
= cpu_to_le16(mode
->crtc_vsync_start
);
333 cpu_to_le16(mode
->crtc_vsync_end
- mode
->crtc_vsync_start
);
335 args
.ucOverscanRight
= radeon_crtc
->h_border
;
336 args
.ucOverscanLeft
= radeon_crtc
->h_border
;
337 args
.ucOverscanBottom
= radeon_crtc
->v_border
;
338 args
.ucOverscanTop
= radeon_crtc
->v_border
;
340 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
341 misc
|= ATOM_VSYNC_POLARITY
;
342 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
343 misc
|= ATOM_HSYNC_POLARITY
;
344 if (mode
->flags
& DRM_MODE_FLAG_CSYNC
)
345 misc
|= ATOM_COMPOSITESYNC
;
346 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
347 misc
|= ATOM_INTERLACE
;
348 if (mode
->flags
& DRM_MODE_FLAG_DBLSCAN
)
349 misc
|= ATOM_DOUBLE_CLOCK_MODE
;
351 args
.susModeMiscInfo
.usAccess
= cpu_to_le16(misc
);
352 args
.ucCRTC
= radeon_crtc
->crtc_id
;
354 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
357 static void atombios_disable_ss(struct drm_crtc
*crtc
)
359 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
360 struct drm_device
*dev
= crtc
->dev
;
361 struct radeon_device
*rdev
= dev
->dev_private
;
364 if (ASIC_IS_DCE4(rdev
)) {
365 switch (radeon_crtc
->pll_id
) {
367 ss_cntl
= RREG32(EVERGREEN_P1PLL_SS_CNTL
);
368 ss_cntl
&= ~EVERGREEN_PxPLL_SS_EN
;
369 WREG32(EVERGREEN_P1PLL_SS_CNTL
, ss_cntl
);
372 ss_cntl
= RREG32(EVERGREEN_P2PLL_SS_CNTL
);
373 ss_cntl
&= ~EVERGREEN_PxPLL_SS_EN
;
374 WREG32(EVERGREEN_P2PLL_SS_CNTL
, ss_cntl
);
377 case ATOM_PPLL_INVALID
:
380 } else if (ASIC_IS_AVIVO(rdev
)) {
381 switch (radeon_crtc
->pll_id
) {
383 ss_cntl
= RREG32(AVIVO_P1PLL_INT_SS_CNTL
);
385 WREG32(AVIVO_P1PLL_INT_SS_CNTL
, ss_cntl
);
388 ss_cntl
= RREG32(AVIVO_P2PLL_INT_SS_CNTL
);
390 WREG32(AVIVO_P2PLL_INT_SS_CNTL
, ss_cntl
);
393 case ATOM_PPLL_INVALID
:
400 union atom_enable_ss
{
401 ENABLE_LVDS_SS_PARAMETERS legacy
;
402 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1
;
405 static void atombios_enable_ss(struct drm_crtc
*crtc
)
407 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
408 struct drm_device
*dev
= crtc
->dev
;
409 struct radeon_device
*rdev
= dev
->dev_private
;
410 struct drm_encoder
*encoder
= NULL
;
411 struct radeon_encoder
*radeon_encoder
= NULL
;
412 struct radeon_encoder_atom_dig
*dig
= NULL
;
413 int index
= GetIndexIntoMasterTable(COMMAND
, EnableSpreadSpectrumOnPPLL
);
414 union atom_enable_ss args
;
415 uint16_t percentage
= 0;
416 uint8_t type
= 0, step
= 0, delay
= 0, range
= 0;
418 /* XXX add ss support for DCE4 */
419 if (ASIC_IS_DCE4(rdev
))
422 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
423 if (encoder
->crtc
== crtc
) {
424 radeon_encoder
= to_radeon_encoder(encoder
);
425 /* only enable spread spectrum on LVDS */
426 if (radeon_encoder
->devices
& (ATOM_DEVICE_LCD_SUPPORT
)) {
427 dig
= radeon_encoder
->enc_priv
;
428 if (dig
&& dig
->ss
) {
429 percentage
= dig
->ss
->percentage
;
430 type
= dig
->ss
->type
;
431 step
= dig
->ss
->step
;
432 delay
= dig
->ss
->delay
;
433 range
= dig
->ss
->range
;
445 memset(&args
, 0, sizeof(args
));
446 if (ASIC_IS_AVIVO(rdev
)) {
447 args
.v1
.usSpreadSpectrumPercentage
= cpu_to_le16(percentage
);
448 args
.v1
.ucSpreadSpectrumType
= type
;
449 args
.v1
.ucSpreadSpectrumStep
= step
;
450 args
.v1
.ucSpreadSpectrumDelay
= delay
;
451 args
.v1
.ucSpreadSpectrumRange
= range
;
452 args
.v1
.ucPpll
= radeon_crtc
->crtc_id
? ATOM_PPLL2
: ATOM_PPLL1
;
453 args
.v1
.ucEnable
= ATOM_ENABLE
;
455 args
.legacy
.usSpreadSpectrumPercentage
= cpu_to_le16(percentage
);
456 args
.legacy
.ucSpreadSpectrumType
= type
;
457 args
.legacy
.ucSpreadSpectrumStepSize_Delay
= (step
& 3) << 2;
458 args
.legacy
.ucSpreadSpectrumStepSize_Delay
|= (delay
& 7) << 4;
459 args
.legacy
.ucEnable
= ATOM_ENABLE
;
461 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
464 union adjust_pixel_clock
{
465 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1
;
466 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3
;
469 static u32
atombios_adjust_pll(struct drm_crtc
*crtc
,
470 struct drm_display_mode
*mode
,
471 struct radeon_pll
*pll
)
473 struct drm_device
*dev
= crtc
->dev
;
474 struct radeon_device
*rdev
= dev
->dev_private
;
475 struct drm_encoder
*encoder
= NULL
;
476 struct radeon_encoder
*radeon_encoder
= NULL
;
477 u32 adjusted_clock
= mode
->clock
;
478 int encoder_mode
= 0;
479 u32 dp_clock
= mode
->clock
;
482 /* reset the pll flags */
485 /* select the PLL algo */
486 if (ASIC_IS_AVIVO(rdev
)) {
487 if (radeon_new_pll
== 0)
488 pll
->algo
= PLL_ALGO_LEGACY
;
490 pll
->algo
= PLL_ALGO_NEW
;
492 if (radeon_new_pll
== 1)
493 pll
->algo
= PLL_ALGO_NEW
;
495 pll
->algo
= PLL_ALGO_LEGACY
;
498 if (ASIC_IS_AVIVO(rdev
)) {
499 if ((rdev
->family
== CHIP_RS600
) ||
500 (rdev
->family
== CHIP_RS690
) ||
501 (rdev
->family
== CHIP_RS740
))
502 pll
->flags
|= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/
503 RADEON_PLL_PREFER_CLOSEST_LOWER
);
505 if (ASIC_IS_DCE32(rdev
) && mode
->clock
> 200000) /* range limits??? */
506 pll
->flags
|= RADEON_PLL_PREFER_HIGH_FB_DIV
;
508 pll
->flags
|= RADEON_PLL_PREFER_LOW_REF_DIV
;
510 pll
->flags
|= RADEON_PLL_LEGACY
;
512 if (mode
->clock
> 200000) /* range limits??? */
513 pll
->flags
|= RADEON_PLL_PREFER_HIGH_FB_DIV
;
515 pll
->flags
|= RADEON_PLL_PREFER_LOW_REF_DIV
;
519 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
520 if (encoder
->crtc
== crtc
) {
521 radeon_encoder
= to_radeon_encoder(encoder
);
522 encoder_mode
= atombios_get_encoder_mode(encoder
);
523 if (radeon_encoder
->devices
& (ATOM_DEVICE_LCD_SUPPORT
| ATOM_DEVICE_DFP_SUPPORT
)) {
524 struct drm_connector
*connector
= radeon_get_connector_for_encoder(encoder
);
526 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
527 struct radeon_connector_atom_dig
*dig_connector
=
528 radeon_connector
->con_priv
;
530 dp_clock
= dig_connector
->dp_clock
;
534 if (ASIC_IS_AVIVO(rdev
)) {
535 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
536 if (radeon_encoder
->encoder_id
== ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1
)
537 adjusted_clock
= mode
->clock
* 2;
538 if (radeon_encoder
->active_device
& (ATOM_DEVICE_TV_SUPPORT
)) {
539 pll
->algo
= PLL_ALGO_LEGACY
;
540 pll
->flags
|= RADEON_PLL_PREFER_CLOSEST_LOWER
;
542 /* There is some evidence (often anecdotal) that RV515 LVDS
543 * (on some boards at least) prefers the legacy algo. I'm not
544 * sure whether this should handled generically or on a
545 * case-by-case quirk basis. Both algos should work fine in the
548 if ((radeon_encoder
->active_device
& (ATOM_DEVICE_LCD_SUPPORT
)) &&
549 (rdev
->family
== CHIP_RV515
)) {
550 /* allow the user to overrride just in case */
551 if (radeon_new_pll
== 1)
552 pll
->algo
= PLL_ALGO_NEW
;
554 pll
->algo
= PLL_ALGO_LEGACY
;
557 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DAC
)
558 pll
->flags
|= RADEON_PLL_NO_ODD_POST_DIV
;
559 if (encoder
->encoder_type
== DRM_MODE_ENCODER_LVDS
)
560 pll
->flags
|= RADEON_PLL_USE_REF_DIV
;
566 /* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock
567 * accordingly based on the encoder/transmitter to work around
568 * special hw requirements.
570 if (ASIC_IS_DCE3(rdev
)) {
571 union adjust_pixel_clock args
;
575 index
= GetIndexIntoMasterTable(COMMAND
, AdjustDisplayPll
);
576 if (!atom_parse_cmd_header(rdev
->mode_info
.atom_context
, index
, &frev
,
578 return adjusted_clock
;
580 memset(&args
, 0, sizeof(args
));
587 args
.v1
.usPixelClock
= cpu_to_le16(mode
->clock
/ 10);
588 args
.v1
.ucTransmitterID
= radeon_encoder
->encoder_id
;
589 args
.v1
.ucEncodeMode
= encoder_mode
;
590 if (encoder_mode
== ATOM_ENCODER_MODE_DP
) {
591 /* may want to enable SS on DP eventually */
592 /* args.v1.ucConfig |=
593 ADJUST_DISPLAY_CONFIG_SS_ENABLE;*/
594 } else if (encoder_mode
== ATOM_ENCODER_MODE_LVDS
) {
596 ADJUST_DISPLAY_CONFIG_SS_ENABLE
;
599 atom_execute_table(rdev
->mode_info
.atom_context
,
600 index
, (uint32_t *)&args
);
601 adjusted_clock
= le16_to_cpu(args
.v1
.usPixelClock
) * 10;
604 args
.v3
.sInput
.usPixelClock
= cpu_to_le16(mode
->clock
/ 10);
605 args
.v3
.sInput
.ucTransmitterID
= radeon_encoder
->encoder_id
;
606 args
.v3
.sInput
.ucEncodeMode
= encoder_mode
;
607 args
.v3
.sInput
.ucDispPllConfig
= 0;
608 if (radeon_encoder
->devices
& (ATOM_DEVICE_DFP_SUPPORT
)) {
609 struct radeon_encoder_atom_dig
*dig
= radeon_encoder
->enc_priv
;
611 if (encoder_mode
== ATOM_ENCODER_MODE_DP
) {
612 /* may want to enable SS on DP/eDP eventually */
613 /*args.v3.sInput.ucDispPllConfig |=
614 DISPPLL_CONFIG_SS_ENABLE;*/
615 args
.v3
.sInput
.ucDispPllConfig
|=
616 DISPPLL_CONFIG_COHERENT_MODE
;
618 args
.v3
.sInput
.usPixelClock
= cpu_to_le16(dp_clock
/ 10);
620 if (encoder_mode
== ATOM_ENCODER_MODE_HDMI
) {
621 /* deep color support */
622 args
.v3
.sInput
.usPixelClock
=
623 cpu_to_le16((mode
->clock
* bpc
/ 8) / 10);
625 if (dig
->coherent_mode
)
626 args
.v3
.sInput
.ucDispPllConfig
|=
627 DISPPLL_CONFIG_COHERENT_MODE
;
628 if (mode
->clock
> 165000)
629 args
.v3
.sInput
.ucDispPllConfig
|=
630 DISPPLL_CONFIG_DUAL_LINK
;
632 } else if (radeon_encoder
->devices
& (ATOM_DEVICE_LCD_SUPPORT
)) {
633 if (encoder_mode
== ATOM_ENCODER_MODE_DP
) {
634 /* may want to enable SS on DP/eDP eventually */
635 /*args.v3.sInput.ucDispPllConfig |=
636 DISPPLL_CONFIG_SS_ENABLE;*/
637 args
.v3
.sInput
.ucDispPllConfig
|=
638 DISPPLL_CONFIG_COHERENT_MODE
;
640 args
.v3
.sInput
.usPixelClock
= cpu_to_le16(dp_clock
/ 10);
641 } else if (encoder_mode
== ATOM_ENCODER_MODE_LVDS
) {
642 /* want to enable SS on LVDS eventually */
643 /*args.v3.sInput.ucDispPllConfig |=
644 DISPPLL_CONFIG_SS_ENABLE;*/
646 if (mode
->clock
> 165000)
647 args
.v3
.sInput
.ucDispPllConfig
|=
648 DISPPLL_CONFIG_DUAL_LINK
;
651 atom_execute_table(rdev
->mode_info
.atom_context
,
652 index
, (uint32_t *)&args
);
653 adjusted_clock
= le32_to_cpu(args
.v3
.sOutput
.ulDispPllFreq
) * 10;
654 if (args
.v3
.sOutput
.ucRefDiv
) {
655 pll
->flags
|= RADEON_PLL_USE_REF_DIV
;
656 pll
->reference_div
= args
.v3
.sOutput
.ucRefDiv
;
658 if (args
.v3
.sOutput
.ucPostDiv
) {
659 pll
->flags
|= RADEON_PLL_USE_POST_DIV
;
660 pll
->post_div
= args
.v3
.sOutput
.ucPostDiv
;
664 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
665 return adjusted_clock
;
669 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
670 return adjusted_clock
;
673 return adjusted_clock
;
676 union set_pixel_clock
{
677 SET_PIXEL_CLOCK_PS_ALLOCATION base
;
678 PIXEL_CLOCK_PARAMETERS v1
;
679 PIXEL_CLOCK_PARAMETERS_V2 v2
;
680 PIXEL_CLOCK_PARAMETERS_V3 v3
;
681 PIXEL_CLOCK_PARAMETERS_V5 v5
;
684 static void atombios_crtc_set_dcpll(struct drm_crtc
*crtc
)
686 struct drm_device
*dev
= crtc
->dev
;
687 struct radeon_device
*rdev
= dev
->dev_private
;
690 union set_pixel_clock args
;
692 memset(&args
, 0, sizeof(args
));
694 index
= GetIndexIntoMasterTable(COMMAND
, SetPixelClock
);
695 if (!atom_parse_cmd_header(rdev
->mode_info
.atom_context
, index
, &frev
,
703 /* if the default dcpll clock is specified,
704 * SetPixelClock provides the dividers
706 args
.v5
.ucCRTC
= ATOM_CRTC_INVALID
;
707 args
.v5
.usPixelClock
= rdev
->clock
.default_dispclk
;
708 args
.v5
.ucPpll
= ATOM_DCPLL
;
711 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
716 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
719 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
722 static void atombios_crtc_program_pll(struct drm_crtc
*crtc
,
733 struct drm_device
*dev
= crtc
->dev
;
734 struct radeon_device
*rdev
= dev
->dev_private
;
736 int index
= GetIndexIntoMasterTable(COMMAND
, SetPixelClock
);
737 union set_pixel_clock args
;
739 memset(&args
, 0, sizeof(args
));
741 if (!atom_parse_cmd_header(rdev
->mode_info
.atom_context
, index
, &frev
,
749 if (clock
== ATOM_DISABLE
)
751 args
.v1
.usPixelClock
= cpu_to_le16(clock
/ 10);
752 args
.v1
.usRefDiv
= cpu_to_le16(ref_div
);
753 args
.v1
.usFbDiv
= cpu_to_le16(fb_div
);
754 args
.v1
.ucFracFbDiv
= frac_fb_div
;
755 args
.v1
.ucPostDiv
= post_div
;
756 args
.v1
.ucPpll
= pll_id
;
757 args
.v1
.ucCRTC
= crtc_id
;
758 args
.v1
.ucRefDivSrc
= 1;
761 args
.v2
.usPixelClock
= cpu_to_le16(clock
/ 10);
762 args
.v2
.usRefDiv
= cpu_to_le16(ref_div
);
763 args
.v2
.usFbDiv
= cpu_to_le16(fb_div
);
764 args
.v2
.ucFracFbDiv
= frac_fb_div
;
765 args
.v2
.ucPostDiv
= post_div
;
766 args
.v2
.ucPpll
= pll_id
;
767 args
.v2
.ucCRTC
= crtc_id
;
768 args
.v2
.ucRefDivSrc
= 1;
771 args
.v3
.usPixelClock
= cpu_to_le16(clock
/ 10);
772 args
.v3
.usRefDiv
= cpu_to_le16(ref_div
);
773 args
.v3
.usFbDiv
= cpu_to_le16(fb_div
);
774 args
.v3
.ucFracFbDiv
= frac_fb_div
;
775 args
.v3
.ucPostDiv
= post_div
;
776 args
.v3
.ucPpll
= pll_id
;
777 args
.v3
.ucMiscInfo
= (pll_id
<< 2);
778 args
.v3
.ucTransmitterId
= encoder_id
;
779 args
.v3
.ucEncoderMode
= encoder_mode
;
782 args
.v5
.ucCRTC
= crtc_id
;
783 args
.v5
.usPixelClock
= cpu_to_le16(clock
/ 10);
784 args
.v5
.ucRefDiv
= ref_div
;
785 args
.v5
.usFbDiv
= cpu_to_le16(fb_div
);
786 args
.v5
.ulFbDivDecFrac
= cpu_to_le32(frac_fb_div
* 100000);
787 args
.v5
.ucPostDiv
= post_div
;
788 args
.v5
.ucMiscInfo
= 0; /* HDMI depth, etc. */
789 args
.v5
.ucTransmitterID
= encoder_id
;
790 args
.v5
.ucEncoderMode
= encoder_mode
;
791 args
.v5
.ucPpll
= pll_id
;
794 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
799 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
803 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
806 static void atombios_crtc_set_pll(struct drm_crtc
*crtc
, struct drm_display_mode
*mode
)
808 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
809 struct drm_device
*dev
= crtc
->dev
;
810 struct radeon_device
*rdev
= dev
->dev_private
;
811 struct drm_encoder
*encoder
= NULL
;
812 struct radeon_encoder
*radeon_encoder
= NULL
;
813 u32 pll_clock
= mode
->clock
;
814 u32 ref_div
= 0, fb_div
= 0, frac_fb_div
= 0, post_div
= 0;
815 struct radeon_pll
*pll
;
817 int encoder_mode
= 0;
819 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
820 if (encoder
->crtc
== crtc
) {
821 radeon_encoder
= to_radeon_encoder(encoder
);
822 encoder_mode
= atombios_get_encoder_mode(encoder
);
830 switch (radeon_crtc
->pll_id
) {
832 pll
= &rdev
->clock
.p1pll
;
835 pll
= &rdev
->clock
.p2pll
;
838 case ATOM_PPLL_INVALID
:
840 pll
= &rdev
->clock
.dcpll
;
844 /* adjust pixel clock as needed */
845 adjusted_clock
= atombios_adjust_pll(crtc
, mode
, pll
);
847 radeon_compute_pll(pll
, adjusted_clock
, &pll_clock
, &fb_div
, &frac_fb_div
,
848 &ref_div
, &post_div
);
850 atombios_crtc_program_pll(crtc
, radeon_crtc
->crtc_id
, radeon_crtc
->pll_id
,
851 encoder_mode
, radeon_encoder
->encoder_id
, mode
->clock
,
852 ref_div
, fb_div
, frac_fb_div
, post_div
);
856 static int evergreen_crtc_set_base(struct drm_crtc
*crtc
, int x
, int y
,
857 struct drm_framebuffer
*old_fb
)
859 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
860 struct drm_device
*dev
= crtc
->dev
;
861 struct radeon_device
*rdev
= dev
->dev_private
;
862 struct radeon_framebuffer
*radeon_fb
;
863 struct drm_gem_object
*obj
;
864 struct radeon_bo
*rbo
;
865 uint64_t fb_location
;
866 uint32_t fb_format
, fb_pitch_pixels
, tiling_flags
;
871 DRM_DEBUG_KMS("No FB bound\n");
875 radeon_fb
= to_radeon_framebuffer(crtc
->fb
);
877 /* Pin framebuffer & get tilling informations */
878 obj
= radeon_fb
->obj
;
879 rbo
= obj
->driver_private
;
880 r
= radeon_bo_reserve(rbo
, false);
881 if (unlikely(r
!= 0))
883 r
= radeon_bo_pin(rbo
, RADEON_GEM_DOMAIN_VRAM
, &fb_location
);
884 if (unlikely(r
!= 0)) {
885 radeon_bo_unreserve(rbo
);
888 radeon_bo_get_tiling_flags(rbo
, &tiling_flags
, NULL
);
889 radeon_bo_unreserve(rbo
);
891 switch (crtc
->fb
->bits_per_pixel
) {
893 fb_format
= (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP
) |
894 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED
));
897 fb_format
= (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP
) |
898 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555
));
901 fb_format
= (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP
) |
902 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565
));
906 fb_format
= (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP
) |
907 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888
));
910 DRM_ERROR("Unsupported screen depth %d\n",
911 crtc
->fb
->bits_per_pixel
);
915 if (tiling_flags
& RADEON_TILING_MACRO
)
916 fb_format
|= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1
);
917 else if (tiling_flags
& RADEON_TILING_MICRO
)
918 fb_format
|= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1
);
920 switch (radeon_crtc
->crtc_id
) {
922 WREG32(AVIVO_D1VGA_CONTROL
, 0);
925 WREG32(AVIVO_D2VGA_CONTROL
, 0);
928 WREG32(EVERGREEN_D3VGA_CONTROL
, 0);
931 WREG32(EVERGREEN_D4VGA_CONTROL
, 0);
934 WREG32(EVERGREEN_D5VGA_CONTROL
, 0);
937 WREG32(EVERGREEN_D6VGA_CONTROL
, 0);
943 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH
+ radeon_crtc
->crtc_offset
,
944 upper_32_bits(fb_location
));
945 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH
+ radeon_crtc
->crtc_offset
,
946 upper_32_bits(fb_location
));
947 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS
+ radeon_crtc
->crtc_offset
,
948 (u32
)fb_location
& EVERGREEN_GRPH_SURFACE_ADDRESS_MASK
);
949 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS
+ radeon_crtc
->crtc_offset
,
950 (u32
) fb_location
& EVERGREEN_GRPH_SURFACE_ADDRESS_MASK
);
951 WREG32(EVERGREEN_GRPH_CONTROL
+ radeon_crtc
->crtc_offset
, fb_format
);
953 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X
+ radeon_crtc
->crtc_offset
, 0);
954 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y
+ radeon_crtc
->crtc_offset
, 0);
955 WREG32(EVERGREEN_GRPH_X_START
+ radeon_crtc
->crtc_offset
, 0);
956 WREG32(EVERGREEN_GRPH_Y_START
+ radeon_crtc
->crtc_offset
, 0);
957 WREG32(EVERGREEN_GRPH_X_END
+ radeon_crtc
->crtc_offset
, crtc
->fb
->width
);
958 WREG32(EVERGREEN_GRPH_Y_END
+ radeon_crtc
->crtc_offset
, crtc
->fb
->height
);
960 fb_pitch_pixels
= crtc
->fb
->pitch
/ (crtc
->fb
->bits_per_pixel
/ 8);
961 WREG32(EVERGREEN_GRPH_PITCH
+ radeon_crtc
->crtc_offset
, fb_pitch_pixels
);
962 WREG32(EVERGREEN_GRPH_ENABLE
+ radeon_crtc
->crtc_offset
, 1);
964 WREG32(EVERGREEN_DESKTOP_HEIGHT
+ radeon_crtc
->crtc_offset
,
965 crtc
->mode
.vdisplay
);
968 WREG32(EVERGREEN_VIEWPORT_START
+ radeon_crtc
->crtc_offset
,
970 WREG32(EVERGREEN_VIEWPORT_SIZE
+ radeon_crtc
->crtc_offset
,
971 (crtc
->mode
.hdisplay
<< 16) | crtc
->mode
.vdisplay
);
973 if (crtc
->mode
.flags
& DRM_MODE_FLAG_INTERLACE
)
974 WREG32(EVERGREEN_DATA_FORMAT
+ radeon_crtc
->crtc_offset
,
975 EVERGREEN_INTERLEAVE_EN
);
977 WREG32(EVERGREEN_DATA_FORMAT
+ radeon_crtc
->crtc_offset
, 0);
979 if (old_fb
&& old_fb
!= crtc
->fb
) {
980 radeon_fb
= to_radeon_framebuffer(old_fb
);
981 rbo
= radeon_fb
->obj
->driver_private
;
982 r
= radeon_bo_reserve(rbo
, false);
983 if (unlikely(r
!= 0))
985 radeon_bo_unpin(rbo
);
986 radeon_bo_unreserve(rbo
);
989 /* Bytes per pixel may have changed */
990 radeon_bandwidth_update(rdev
);
995 static int avivo_crtc_set_base(struct drm_crtc
*crtc
, int x
, int y
,
996 struct drm_framebuffer
*old_fb
)
998 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
999 struct drm_device
*dev
= crtc
->dev
;
1000 struct radeon_device
*rdev
= dev
->dev_private
;
1001 struct radeon_framebuffer
*radeon_fb
;
1002 struct drm_gem_object
*obj
;
1003 struct radeon_bo
*rbo
;
1004 uint64_t fb_location
;
1005 uint32_t fb_format
, fb_pitch_pixels
, tiling_flags
;
1010 DRM_DEBUG_KMS("No FB bound\n");
1014 radeon_fb
= to_radeon_framebuffer(crtc
->fb
);
1016 /* Pin framebuffer & get tilling informations */
1017 obj
= radeon_fb
->obj
;
1018 rbo
= obj
->driver_private
;
1019 r
= radeon_bo_reserve(rbo
, false);
1020 if (unlikely(r
!= 0))
1022 r
= radeon_bo_pin(rbo
, RADEON_GEM_DOMAIN_VRAM
, &fb_location
);
1023 if (unlikely(r
!= 0)) {
1024 radeon_bo_unreserve(rbo
);
1027 radeon_bo_get_tiling_flags(rbo
, &tiling_flags
, NULL
);
1028 radeon_bo_unreserve(rbo
);
1030 switch (crtc
->fb
->bits_per_pixel
) {
1033 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP
|
1034 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED
;
1038 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP
|
1039 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555
;
1043 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP
|
1044 AVIVO_D1GRPH_CONTROL_16BPP_RGB565
;
1049 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP
|
1050 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888
;
1053 DRM_ERROR("Unsupported screen depth %d\n",
1054 crtc
->fb
->bits_per_pixel
);
1058 if (rdev
->family
>= CHIP_R600
) {
1059 if (tiling_flags
& RADEON_TILING_MACRO
)
1060 fb_format
|= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1
;
1061 else if (tiling_flags
& RADEON_TILING_MICRO
)
1062 fb_format
|= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1
;
1064 if (tiling_flags
& RADEON_TILING_MACRO
)
1065 fb_format
|= AVIVO_D1GRPH_MACRO_ADDRESS_MODE
;
1067 if (tiling_flags
& RADEON_TILING_MICRO
)
1068 fb_format
|= AVIVO_D1GRPH_TILED
;
1071 if (radeon_crtc
->crtc_id
== 0)
1072 WREG32(AVIVO_D1VGA_CONTROL
, 0);
1074 WREG32(AVIVO_D2VGA_CONTROL
, 0);
1076 if (rdev
->family
>= CHIP_RV770
) {
1077 if (radeon_crtc
->crtc_id
) {
1078 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH
, upper_32_bits(fb_location
));
1079 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH
, upper_32_bits(fb_location
));
1081 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH
, upper_32_bits(fb_location
));
1082 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH
, upper_32_bits(fb_location
));
1085 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS
+ radeon_crtc
->crtc_offset
,
1087 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS
+
1088 radeon_crtc
->crtc_offset
, (u32
) fb_location
);
1089 WREG32(AVIVO_D1GRPH_CONTROL
+ radeon_crtc
->crtc_offset
, fb_format
);
1091 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X
+ radeon_crtc
->crtc_offset
, 0);
1092 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y
+ radeon_crtc
->crtc_offset
, 0);
1093 WREG32(AVIVO_D1GRPH_X_START
+ radeon_crtc
->crtc_offset
, 0);
1094 WREG32(AVIVO_D1GRPH_Y_START
+ radeon_crtc
->crtc_offset
, 0);
1095 WREG32(AVIVO_D1GRPH_X_END
+ radeon_crtc
->crtc_offset
, crtc
->fb
->width
);
1096 WREG32(AVIVO_D1GRPH_Y_END
+ radeon_crtc
->crtc_offset
, crtc
->fb
->height
);
1098 fb_pitch_pixels
= crtc
->fb
->pitch
/ (crtc
->fb
->bits_per_pixel
/ 8);
1099 WREG32(AVIVO_D1GRPH_PITCH
+ radeon_crtc
->crtc_offset
, fb_pitch_pixels
);
1100 WREG32(AVIVO_D1GRPH_ENABLE
+ radeon_crtc
->crtc_offset
, 1);
1102 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT
+ radeon_crtc
->crtc_offset
,
1103 crtc
->mode
.vdisplay
);
1106 WREG32(AVIVO_D1MODE_VIEWPORT_START
+ radeon_crtc
->crtc_offset
,
1108 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE
+ radeon_crtc
->crtc_offset
,
1109 (crtc
->mode
.hdisplay
<< 16) | crtc
->mode
.vdisplay
);
1111 if (crtc
->mode
.flags
& DRM_MODE_FLAG_INTERLACE
)
1112 WREG32(AVIVO_D1MODE_DATA_FORMAT
+ radeon_crtc
->crtc_offset
,
1113 AVIVO_D1MODE_INTERLEAVE_EN
);
1115 WREG32(AVIVO_D1MODE_DATA_FORMAT
+ radeon_crtc
->crtc_offset
, 0);
1117 if (old_fb
&& old_fb
!= crtc
->fb
) {
1118 radeon_fb
= to_radeon_framebuffer(old_fb
);
1119 rbo
= radeon_fb
->obj
->driver_private
;
1120 r
= radeon_bo_reserve(rbo
, false);
1121 if (unlikely(r
!= 0))
1123 radeon_bo_unpin(rbo
);
1124 radeon_bo_unreserve(rbo
);
1127 /* Bytes per pixel may have changed */
1128 radeon_bandwidth_update(rdev
);
1133 int atombios_crtc_set_base(struct drm_crtc
*crtc
, int x
, int y
,
1134 struct drm_framebuffer
*old_fb
)
1136 struct drm_device
*dev
= crtc
->dev
;
1137 struct radeon_device
*rdev
= dev
->dev_private
;
1139 if (ASIC_IS_DCE4(rdev
))
1140 return evergreen_crtc_set_base(crtc
, x
, y
, old_fb
);
1141 else if (ASIC_IS_AVIVO(rdev
))
1142 return avivo_crtc_set_base(crtc
, x
, y
, old_fb
);
1144 return radeon_crtc_set_base(crtc
, x
, y
, old_fb
);
1147 /* properly set additional regs when using atombios */
1148 static void radeon_legacy_atom_fixup(struct drm_crtc
*crtc
)
1150 struct drm_device
*dev
= crtc
->dev
;
1151 struct radeon_device
*rdev
= dev
->dev_private
;
1152 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1153 u32 disp_merge_cntl
;
1155 switch (radeon_crtc
->crtc_id
) {
1157 disp_merge_cntl
= RREG32(RADEON_DISP_MERGE_CNTL
);
1158 disp_merge_cntl
&= ~RADEON_DISP_RGB_OFFSET_EN
;
1159 WREG32(RADEON_DISP_MERGE_CNTL
, disp_merge_cntl
);
1162 disp_merge_cntl
= RREG32(RADEON_DISP2_MERGE_CNTL
);
1163 disp_merge_cntl
&= ~RADEON_DISP2_RGB_OFFSET_EN
;
1164 WREG32(RADEON_DISP2_MERGE_CNTL
, disp_merge_cntl
);
1165 WREG32(RADEON_FP_H2_SYNC_STRT_WID
, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID
));
1166 WREG32(RADEON_FP_V2_SYNC_STRT_WID
, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID
));
1171 static int radeon_atom_pick_pll(struct drm_crtc
*crtc
)
1173 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1174 struct drm_device
*dev
= crtc
->dev
;
1175 struct radeon_device
*rdev
= dev
->dev_private
;
1176 struct drm_encoder
*test_encoder
;
1177 struct drm_crtc
*test_crtc
;
1178 uint32_t pll_in_use
= 0;
1180 if (ASIC_IS_DCE4(rdev
)) {
1181 /* if crtc is driving DP and we have an ext clock, use that */
1182 list_for_each_entry(test_encoder
, &dev
->mode_config
.encoder_list
, head
) {
1183 if (test_encoder
->crtc
&& (test_encoder
->crtc
== crtc
)) {
1184 if (atombios_get_encoder_mode(test_encoder
) == ATOM_ENCODER_MODE_DP
) {
1185 if (rdev
->clock
.dp_extclk
)
1186 return ATOM_PPLL_INVALID
;
1191 /* otherwise, pick one of the plls */
1192 list_for_each_entry(test_crtc
, &dev
->mode_config
.crtc_list
, head
) {
1193 struct radeon_crtc
*radeon_test_crtc
;
1195 if (crtc
== test_crtc
)
1198 radeon_test_crtc
= to_radeon_crtc(test_crtc
);
1199 if ((radeon_test_crtc
->pll_id
>= ATOM_PPLL1
) &&
1200 (radeon_test_crtc
->pll_id
<= ATOM_PPLL2
))
1201 pll_in_use
|= (1 << radeon_test_crtc
->pll_id
);
1203 if (!(pll_in_use
& 1))
1207 return radeon_crtc
->crtc_id
;
1211 int atombios_crtc_mode_set(struct drm_crtc
*crtc
,
1212 struct drm_display_mode
*mode
,
1213 struct drm_display_mode
*adjusted_mode
,
1214 int x
, int y
, struct drm_framebuffer
*old_fb
)
1216 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1217 struct drm_device
*dev
= crtc
->dev
;
1218 struct radeon_device
*rdev
= dev
->dev_private
;
1219 struct drm_encoder
*encoder
;
1220 bool is_tvcv
= false;
1222 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
1224 if (encoder
->crtc
== crtc
) {
1225 struct radeon_encoder
*radeon_encoder
= to_radeon_encoder(encoder
);
1226 if (radeon_encoder
->active_device
&
1227 (ATOM_DEVICE_TV_SUPPORT
| ATOM_DEVICE_CV_SUPPORT
))
1232 atombios_disable_ss(crtc
);
1233 /* always set DCPLL */
1234 if (ASIC_IS_DCE4(rdev
))
1235 atombios_crtc_set_dcpll(crtc
);
1236 atombios_crtc_set_pll(crtc
, adjusted_mode
);
1237 atombios_enable_ss(crtc
);
1239 if (ASIC_IS_DCE4(rdev
))
1240 atombios_set_crtc_dtd_timing(crtc
, adjusted_mode
);
1241 else if (ASIC_IS_AVIVO(rdev
)) {
1243 atombios_crtc_set_timing(crtc
, adjusted_mode
);
1245 atombios_set_crtc_dtd_timing(crtc
, adjusted_mode
);
1247 atombios_crtc_set_timing(crtc
, adjusted_mode
);
1248 if (radeon_crtc
->crtc_id
== 0)
1249 atombios_set_crtc_dtd_timing(crtc
, adjusted_mode
);
1250 radeon_legacy_atom_fixup(crtc
);
1252 atombios_crtc_set_base(crtc
, x
, y
, old_fb
);
1253 atombios_overscan_setup(crtc
, mode
, adjusted_mode
);
1254 atombios_scaler_setup(crtc
);
1258 static bool atombios_crtc_mode_fixup(struct drm_crtc
*crtc
,
1259 struct drm_display_mode
*mode
,
1260 struct drm_display_mode
*adjusted_mode
)
1262 struct drm_device
*dev
= crtc
->dev
;
1263 struct radeon_device
*rdev
= dev
->dev_private
;
1265 /* adjust pm to upcoming mode change */
1266 radeon_pm_compute_clocks(rdev
);
1268 if (!radeon_crtc_scaling_mode_fixup(crtc
, mode
, adjusted_mode
))
1273 static void atombios_crtc_prepare(struct drm_crtc
*crtc
)
1275 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1278 radeon_crtc
->pll_id
= radeon_atom_pick_pll(crtc
);
1280 atombios_lock_crtc(crtc
, ATOM_ENABLE
);
1281 atombios_crtc_dpms(crtc
, DRM_MODE_DPMS_OFF
);
1284 static void atombios_crtc_commit(struct drm_crtc
*crtc
)
1286 atombios_crtc_dpms(crtc
, DRM_MODE_DPMS_ON
);
1287 atombios_lock_crtc(crtc
, ATOM_DISABLE
);
1290 static void atombios_crtc_disable(struct drm_crtc
*crtc
)
1292 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1293 atombios_crtc_dpms(crtc
, DRM_MODE_DPMS_OFF
);
1295 switch (radeon_crtc
->pll_id
) {
1298 /* disable the ppll */
1299 atombios_crtc_program_pll(crtc
, radeon_crtc
->crtc_id
, radeon_crtc
->pll_id
,
1300 0, 0, ATOM_DISABLE
, 0, 0, 0, 0);
1305 radeon_crtc
->pll_id
= -1;
1308 static const struct drm_crtc_helper_funcs atombios_helper_funcs
= {
1309 .dpms
= atombios_crtc_dpms
,
1310 .mode_fixup
= atombios_crtc_mode_fixup
,
1311 .mode_set
= atombios_crtc_mode_set
,
1312 .mode_set_base
= atombios_crtc_set_base
,
1313 .prepare
= atombios_crtc_prepare
,
1314 .commit
= atombios_crtc_commit
,
1315 .load_lut
= radeon_crtc_load_lut
,
1316 .disable
= atombios_crtc_disable
,
1319 void radeon_atombios_init_crtc(struct drm_device
*dev
,
1320 struct radeon_crtc
*radeon_crtc
)
1322 struct radeon_device
*rdev
= dev
->dev_private
;
1324 if (ASIC_IS_DCE4(rdev
)) {
1325 switch (radeon_crtc
->crtc_id
) {
1328 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC0_REGISTER_OFFSET
;
1331 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC1_REGISTER_OFFSET
;
1334 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC2_REGISTER_OFFSET
;
1337 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC3_REGISTER_OFFSET
;
1340 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC4_REGISTER_OFFSET
;
1343 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC5_REGISTER_OFFSET
;
1347 if (radeon_crtc
->crtc_id
== 1)
1348 radeon_crtc
->crtc_offset
=
1349 AVIVO_D2CRTC_H_TOTAL
- AVIVO_D1CRTC_H_TOTAL
;
1351 radeon_crtc
->crtc_offset
= 0;
1353 radeon_crtc
->pll_id
= -1;
1354 drm_crtc_helper_add(&radeon_crtc
->base
, &atombios_helper_funcs
);