2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/radeon_drm.h>
29 #include <drm/drm_fixed.h>
32 #include "atom-bits.h"
34 static void atombios_overscan_setup(struct drm_crtc
*crtc
,
35 struct drm_display_mode
*mode
,
36 struct drm_display_mode
*adjusted_mode
)
38 struct drm_device
*dev
= crtc
->dev
;
39 struct radeon_device
*rdev
= dev
->dev_private
;
40 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
41 SET_CRTC_OVERSCAN_PS_ALLOCATION args
;
42 int index
= GetIndexIntoMasterTable(COMMAND
, SetCRTC_OverScan
);
45 memset(&args
, 0, sizeof(args
));
47 args
.ucCRTC
= radeon_crtc
->crtc_id
;
49 switch (radeon_crtc
->rmx_type
) {
51 args
.usOverscanTop
= (adjusted_mode
->crtc_vdisplay
- mode
->crtc_vdisplay
) / 2;
52 args
.usOverscanBottom
= (adjusted_mode
->crtc_vdisplay
- mode
->crtc_vdisplay
) / 2;
53 args
.usOverscanLeft
= (adjusted_mode
->crtc_hdisplay
- mode
->crtc_hdisplay
) / 2;
54 args
.usOverscanRight
= (adjusted_mode
->crtc_hdisplay
- mode
->crtc_hdisplay
) / 2;
57 a1
= mode
->crtc_vdisplay
* adjusted_mode
->crtc_hdisplay
;
58 a2
= adjusted_mode
->crtc_vdisplay
* mode
->crtc_hdisplay
;
61 args
.usOverscanLeft
= (adjusted_mode
->crtc_hdisplay
- (a2
/ mode
->crtc_vdisplay
)) / 2;
62 args
.usOverscanRight
= (adjusted_mode
->crtc_hdisplay
- (a2
/ mode
->crtc_vdisplay
)) / 2;
64 args
.usOverscanLeft
= (adjusted_mode
->crtc_vdisplay
- (a1
/ mode
->crtc_hdisplay
)) / 2;
65 args
.usOverscanRight
= (adjusted_mode
->crtc_vdisplay
- (a1
/ mode
->crtc_hdisplay
)) / 2;
70 args
.usOverscanRight
= radeon_crtc
->h_border
;
71 args
.usOverscanLeft
= radeon_crtc
->h_border
;
72 args
.usOverscanBottom
= radeon_crtc
->v_border
;
73 args
.usOverscanTop
= radeon_crtc
->v_border
;
76 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
79 static void atombios_scaler_setup(struct drm_crtc
*crtc
)
81 struct drm_device
*dev
= crtc
->dev
;
82 struct radeon_device
*rdev
= dev
->dev_private
;
83 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
84 ENABLE_SCALER_PS_ALLOCATION args
;
85 int index
= GetIndexIntoMasterTable(COMMAND
, EnableScaler
);
87 /* fixme - fill in enc_priv for atom dac */
88 enum radeon_tv_std tv_std
= TV_STD_NTSC
;
89 bool is_tv
= false, is_cv
= false;
90 struct drm_encoder
*encoder
;
92 if (!ASIC_IS_AVIVO(rdev
) && radeon_crtc
->crtc_id
)
95 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
97 if (encoder
->crtc
== crtc
) {
98 struct radeon_encoder
*radeon_encoder
= to_radeon_encoder(encoder
);
99 if (radeon_encoder
->active_device
& ATOM_DEVICE_TV_SUPPORT
) {
100 struct radeon_encoder_atom_dac
*tv_dac
= radeon_encoder
->enc_priv
;
101 tv_std
= tv_dac
->tv_std
;
107 memset(&args
, 0, sizeof(args
));
109 args
.ucScaler
= radeon_crtc
->crtc_id
;
115 args
.ucTVStandard
= ATOM_TV_NTSC
;
118 args
.ucTVStandard
= ATOM_TV_PAL
;
121 args
.ucTVStandard
= ATOM_TV_PALM
;
124 args
.ucTVStandard
= ATOM_TV_PAL60
;
127 args
.ucTVStandard
= ATOM_TV_NTSCJ
;
129 case TV_STD_SCART_PAL
:
130 args
.ucTVStandard
= ATOM_TV_PAL
; /* ??? */
133 args
.ucTVStandard
= ATOM_TV_SECAM
;
136 args
.ucTVStandard
= ATOM_TV_PALCN
;
139 args
.ucEnable
= SCALER_ENABLE_MULTITAP_MODE
;
141 args
.ucTVStandard
= ATOM_TV_CV
;
142 args
.ucEnable
= SCALER_ENABLE_MULTITAP_MODE
;
144 switch (radeon_crtc
->rmx_type
) {
146 args
.ucEnable
= ATOM_SCALER_EXPANSION
;
149 args
.ucEnable
= ATOM_SCALER_CENTER
;
152 args
.ucEnable
= ATOM_SCALER_EXPANSION
;
155 if (ASIC_IS_AVIVO(rdev
))
156 args
.ucEnable
= ATOM_SCALER_DISABLE
;
158 args
.ucEnable
= ATOM_SCALER_CENTER
;
162 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
164 && rdev
->family
>= CHIP_RV515
&& rdev
->family
<= CHIP_R580
) {
165 atom_rv515_force_tv_scaler(rdev
, radeon_crtc
);
169 static void atombios_lock_crtc(struct drm_crtc
*crtc
, int lock
)
171 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
172 struct drm_device
*dev
= crtc
->dev
;
173 struct radeon_device
*rdev
= dev
->dev_private
;
175 GetIndexIntoMasterTable(COMMAND
, UpdateCRTC_DoubleBufferRegisters
);
176 ENABLE_CRTC_PS_ALLOCATION args
;
178 memset(&args
, 0, sizeof(args
));
180 args
.ucCRTC
= radeon_crtc
->crtc_id
;
181 args
.ucEnable
= lock
;
183 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
186 static void atombios_enable_crtc(struct drm_crtc
*crtc
, int state
)
188 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
189 struct drm_device
*dev
= crtc
->dev
;
190 struct radeon_device
*rdev
= dev
->dev_private
;
191 int index
= GetIndexIntoMasterTable(COMMAND
, EnableCRTC
);
192 ENABLE_CRTC_PS_ALLOCATION args
;
194 memset(&args
, 0, sizeof(args
));
196 args
.ucCRTC
= radeon_crtc
->crtc_id
;
197 args
.ucEnable
= state
;
199 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
202 static void atombios_enable_crtc_memreq(struct drm_crtc
*crtc
, int state
)
204 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
205 struct drm_device
*dev
= crtc
->dev
;
206 struct radeon_device
*rdev
= dev
->dev_private
;
207 int index
= GetIndexIntoMasterTable(COMMAND
, EnableCRTCMemReq
);
208 ENABLE_CRTC_PS_ALLOCATION args
;
210 memset(&args
, 0, sizeof(args
));
212 args
.ucCRTC
= radeon_crtc
->crtc_id
;
213 args
.ucEnable
= state
;
215 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
218 static void atombios_blank_crtc(struct drm_crtc
*crtc
, int state
)
220 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
221 struct drm_device
*dev
= crtc
->dev
;
222 struct radeon_device
*rdev
= dev
->dev_private
;
223 int index
= GetIndexIntoMasterTable(COMMAND
, BlankCRTC
);
224 BLANK_CRTC_PS_ALLOCATION args
;
226 memset(&args
, 0, sizeof(args
));
228 args
.ucCRTC
= radeon_crtc
->crtc_id
;
229 args
.ucBlanking
= state
;
231 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
234 void atombios_crtc_dpms(struct drm_crtc
*crtc
, int mode
)
236 struct drm_device
*dev
= crtc
->dev
;
237 struct radeon_device
*rdev
= dev
->dev_private
;
238 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
241 case DRM_MODE_DPMS_ON
:
242 radeon_crtc
->enabled
= true;
243 /* adjust pm to dpms changes BEFORE enabling crtcs */
244 radeon_pm_compute_clocks(rdev
);
245 atombios_enable_crtc(crtc
, ATOM_ENABLE
);
246 if (ASIC_IS_DCE3(rdev
))
247 atombios_enable_crtc_memreq(crtc
, ATOM_ENABLE
);
248 atombios_blank_crtc(crtc
, ATOM_DISABLE
);
249 drm_vblank_post_modeset(dev
, radeon_crtc
->crtc_id
);
250 radeon_crtc_load_lut(crtc
);
252 case DRM_MODE_DPMS_STANDBY
:
253 case DRM_MODE_DPMS_SUSPEND
:
254 case DRM_MODE_DPMS_OFF
:
255 drm_vblank_pre_modeset(dev
, radeon_crtc
->crtc_id
);
256 atombios_blank_crtc(crtc
, ATOM_ENABLE
);
257 if (ASIC_IS_DCE3(rdev
))
258 atombios_enable_crtc_memreq(crtc
, ATOM_DISABLE
);
259 atombios_enable_crtc(crtc
, ATOM_DISABLE
);
260 radeon_crtc
->enabled
= false;
261 /* adjust pm to dpms changes AFTER disabling crtcs */
262 radeon_pm_compute_clocks(rdev
);
268 atombios_set_crtc_dtd_timing(struct drm_crtc
*crtc
,
269 struct drm_display_mode
*mode
)
271 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
272 struct drm_device
*dev
= crtc
->dev
;
273 struct radeon_device
*rdev
= dev
->dev_private
;
274 SET_CRTC_USING_DTD_TIMING_PARAMETERS args
;
275 int index
= GetIndexIntoMasterTable(COMMAND
, SetCRTC_UsingDTDTiming
);
278 memset(&args
, 0, sizeof(args
));
279 args
.usH_Size
= cpu_to_le16(mode
->crtc_hdisplay
- (radeon_crtc
->h_border
* 2));
280 args
.usH_Blanking_Time
=
281 cpu_to_le16(mode
->crtc_hblank_end
- mode
->crtc_hdisplay
+ (radeon_crtc
->h_border
* 2));
282 args
.usV_Size
= cpu_to_le16(mode
->crtc_vdisplay
- (radeon_crtc
->v_border
* 2));
283 args
.usV_Blanking_Time
=
284 cpu_to_le16(mode
->crtc_vblank_end
- mode
->crtc_vdisplay
+ (radeon_crtc
->v_border
* 2));
285 args
.usH_SyncOffset
=
286 cpu_to_le16(mode
->crtc_hsync_start
- mode
->crtc_hdisplay
+ radeon_crtc
->h_border
);
288 cpu_to_le16(mode
->crtc_hsync_end
- mode
->crtc_hsync_start
);
289 args
.usV_SyncOffset
=
290 cpu_to_le16(mode
->crtc_vsync_start
- mode
->crtc_vdisplay
+ radeon_crtc
->v_border
);
292 cpu_to_le16(mode
->crtc_vsync_end
- mode
->crtc_vsync_start
);
293 args
.ucH_Border
= radeon_crtc
->h_border
;
294 args
.ucV_Border
= radeon_crtc
->v_border
;
296 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
297 misc
|= ATOM_VSYNC_POLARITY
;
298 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
299 misc
|= ATOM_HSYNC_POLARITY
;
300 if (mode
->flags
& DRM_MODE_FLAG_CSYNC
)
301 misc
|= ATOM_COMPOSITESYNC
;
302 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
303 misc
|= ATOM_INTERLACE
;
304 if (mode
->flags
& DRM_MODE_FLAG_DBLSCAN
)
305 misc
|= ATOM_DOUBLE_CLOCK_MODE
;
307 args
.susModeMiscInfo
.usAccess
= cpu_to_le16(misc
);
308 args
.ucCRTC
= radeon_crtc
->crtc_id
;
310 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
313 static void atombios_crtc_set_timing(struct drm_crtc
*crtc
,
314 struct drm_display_mode
*mode
)
316 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
317 struct drm_device
*dev
= crtc
->dev
;
318 struct radeon_device
*rdev
= dev
->dev_private
;
319 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args
;
320 int index
= GetIndexIntoMasterTable(COMMAND
, SetCRTC_Timing
);
323 memset(&args
, 0, sizeof(args
));
324 args
.usH_Total
= cpu_to_le16(mode
->crtc_htotal
);
325 args
.usH_Disp
= cpu_to_le16(mode
->crtc_hdisplay
);
326 args
.usH_SyncStart
= cpu_to_le16(mode
->crtc_hsync_start
);
328 cpu_to_le16(mode
->crtc_hsync_end
- mode
->crtc_hsync_start
);
329 args
.usV_Total
= cpu_to_le16(mode
->crtc_vtotal
);
330 args
.usV_Disp
= cpu_to_le16(mode
->crtc_vdisplay
);
331 args
.usV_SyncStart
= cpu_to_le16(mode
->crtc_vsync_start
);
333 cpu_to_le16(mode
->crtc_vsync_end
- mode
->crtc_vsync_start
);
335 args
.ucOverscanRight
= radeon_crtc
->h_border
;
336 args
.ucOverscanLeft
= radeon_crtc
->h_border
;
337 args
.ucOverscanBottom
= radeon_crtc
->v_border
;
338 args
.ucOverscanTop
= radeon_crtc
->v_border
;
340 if (mode
->flags
& DRM_MODE_FLAG_NVSYNC
)
341 misc
|= ATOM_VSYNC_POLARITY
;
342 if (mode
->flags
& DRM_MODE_FLAG_NHSYNC
)
343 misc
|= ATOM_HSYNC_POLARITY
;
344 if (mode
->flags
& DRM_MODE_FLAG_CSYNC
)
345 misc
|= ATOM_COMPOSITESYNC
;
346 if (mode
->flags
& DRM_MODE_FLAG_INTERLACE
)
347 misc
|= ATOM_INTERLACE
;
348 if (mode
->flags
& DRM_MODE_FLAG_DBLSCAN
)
349 misc
|= ATOM_DOUBLE_CLOCK_MODE
;
351 args
.susModeMiscInfo
.usAccess
= cpu_to_le16(misc
);
352 args
.ucCRTC
= radeon_crtc
->crtc_id
;
354 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
357 static void atombios_disable_ss(struct drm_crtc
*crtc
)
359 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
360 struct drm_device
*dev
= crtc
->dev
;
361 struct radeon_device
*rdev
= dev
->dev_private
;
364 if (ASIC_IS_DCE4(rdev
)) {
365 switch (radeon_crtc
->pll_id
) {
367 ss_cntl
= RREG32(EVERGREEN_P1PLL_SS_CNTL
);
368 ss_cntl
&= ~EVERGREEN_PxPLL_SS_EN
;
369 WREG32(EVERGREEN_P1PLL_SS_CNTL
, ss_cntl
);
372 ss_cntl
= RREG32(EVERGREEN_P2PLL_SS_CNTL
);
373 ss_cntl
&= ~EVERGREEN_PxPLL_SS_EN
;
374 WREG32(EVERGREEN_P2PLL_SS_CNTL
, ss_cntl
);
377 case ATOM_PPLL_INVALID
:
380 } else if (ASIC_IS_AVIVO(rdev
)) {
381 switch (radeon_crtc
->pll_id
) {
383 ss_cntl
= RREG32(AVIVO_P1PLL_INT_SS_CNTL
);
385 WREG32(AVIVO_P1PLL_INT_SS_CNTL
, ss_cntl
);
388 ss_cntl
= RREG32(AVIVO_P2PLL_INT_SS_CNTL
);
390 WREG32(AVIVO_P2PLL_INT_SS_CNTL
, ss_cntl
);
393 case ATOM_PPLL_INVALID
:
400 union atom_enable_ss
{
401 ENABLE_LVDS_SS_PARAMETERS legacy
;
402 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1
;
405 static void atombios_enable_ss(struct drm_crtc
*crtc
)
407 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
408 struct drm_device
*dev
= crtc
->dev
;
409 struct radeon_device
*rdev
= dev
->dev_private
;
410 struct drm_encoder
*encoder
= NULL
;
411 struct radeon_encoder
*radeon_encoder
= NULL
;
412 struct radeon_encoder_atom_dig
*dig
= NULL
;
413 int index
= GetIndexIntoMasterTable(COMMAND
, EnableSpreadSpectrumOnPPLL
);
414 union atom_enable_ss args
;
415 uint16_t percentage
= 0;
416 uint8_t type
= 0, step
= 0, delay
= 0, range
= 0;
418 /* XXX add ss support for DCE4 */
419 if (ASIC_IS_DCE4(rdev
))
422 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
423 if (encoder
->crtc
== crtc
) {
424 radeon_encoder
= to_radeon_encoder(encoder
);
425 /* only enable spread spectrum on LVDS */
426 if (radeon_encoder
->devices
& (ATOM_DEVICE_LCD_SUPPORT
)) {
427 dig
= radeon_encoder
->enc_priv
;
428 if (dig
&& dig
->ss
) {
429 percentage
= dig
->ss
->percentage
;
430 type
= dig
->ss
->type
;
431 step
= dig
->ss
->step
;
432 delay
= dig
->ss
->delay
;
433 range
= dig
->ss
->range
;
445 memset(&args
, 0, sizeof(args
));
446 if (ASIC_IS_AVIVO(rdev
)) {
447 args
.v1
.usSpreadSpectrumPercentage
= cpu_to_le16(percentage
);
448 args
.v1
.ucSpreadSpectrumType
= type
;
449 args
.v1
.ucSpreadSpectrumStep
= step
;
450 args
.v1
.ucSpreadSpectrumDelay
= delay
;
451 args
.v1
.ucSpreadSpectrumRange
= range
;
452 args
.v1
.ucPpll
= radeon_crtc
->crtc_id
? ATOM_PPLL2
: ATOM_PPLL1
;
453 args
.v1
.ucEnable
= ATOM_ENABLE
;
455 args
.legacy
.usSpreadSpectrumPercentage
= cpu_to_le16(percentage
);
456 args
.legacy
.ucSpreadSpectrumType
= type
;
457 args
.legacy
.ucSpreadSpectrumStepSize_Delay
= (step
& 3) << 2;
458 args
.legacy
.ucSpreadSpectrumStepSize_Delay
|= (delay
& 7) << 4;
459 args
.legacy
.ucEnable
= ATOM_ENABLE
;
461 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
464 union adjust_pixel_clock
{
465 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1
;
466 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3
;
469 static u32
atombios_adjust_pll(struct drm_crtc
*crtc
,
470 struct drm_display_mode
*mode
,
471 struct radeon_pll
*pll
)
473 struct drm_device
*dev
= crtc
->dev
;
474 struct radeon_device
*rdev
= dev
->dev_private
;
475 struct drm_encoder
*encoder
= NULL
;
476 struct radeon_encoder
*radeon_encoder
= NULL
;
477 u32 adjusted_clock
= mode
->clock
;
478 int encoder_mode
= 0;
479 u32 dp_clock
= mode
->clock
;
482 /* reset the pll flags */
485 /* select the PLL algo */
486 if (ASIC_IS_AVIVO(rdev
)) {
487 if (radeon_new_pll
== 0)
488 pll
->algo
= PLL_ALGO_LEGACY
;
490 pll
->algo
= PLL_ALGO_NEW
;
492 if (radeon_new_pll
== 1)
493 pll
->algo
= PLL_ALGO_NEW
;
495 pll
->algo
= PLL_ALGO_LEGACY
;
498 if (ASIC_IS_AVIVO(rdev
)) {
499 if ((rdev
->family
== CHIP_RS600
) ||
500 (rdev
->family
== CHIP_RS690
) ||
501 (rdev
->family
== CHIP_RS740
))
502 pll
->flags
|= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/
503 RADEON_PLL_PREFER_CLOSEST_LOWER
);
505 if (ASIC_IS_DCE32(rdev
) && mode
->clock
> 200000) /* range limits??? */
506 pll
->flags
|= RADEON_PLL_PREFER_HIGH_FB_DIV
;
508 pll
->flags
|= RADEON_PLL_PREFER_LOW_REF_DIV
;
510 pll
->flags
|= RADEON_PLL_LEGACY
;
512 if (mode
->clock
> 200000) /* range limits??? */
513 pll
->flags
|= RADEON_PLL_PREFER_HIGH_FB_DIV
;
515 pll
->flags
|= RADEON_PLL_PREFER_LOW_REF_DIV
;
519 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
520 if (encoder
->crtc
== crtc
) {
521 radeon_encoder
= to_radeon_encoder(encoder
);
522 encoder_mode
= atombios_get_encoder_mode(encoder
);
523 if (radeon_encoder
->devices
& (ATOM_DEVICE_LCD_SUPPORT
| ATOM_DEVICE_DFP_SUPPORT
)) {
524 struct drm_connector
*connector
= radeon_get_connector_for_encoder(encoder
);
526 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
527 struct radeon_connector_atom_dig
*dig_connector
=
528 radeon_connector
->con_priv
;
530 dp_clock
= dig_connector
->dp_clock
;
534 if (ASIC_IS_AVIVO(rdev
)) {
535 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
536 if (radeon_encoder
->encoder_id
== ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1
)
537 adjusted_clock
= mode
->clock
* 2;
538 if (radeon_encoder
->active_device
& (ATOM_DEVICE_TV_SUPPORT
)) {
539 pll
->algo
= PLL_ALGO_LEGACY
;
540 pll
->flags
|= RADEON_PLL_PREFER_CLOSEST_LOWER
;
542 /* There is some evidence (often anecdotal) that RV515/RV620 LVDS
543 * (on some boards at least) prefers the legacy algo. I'm not
544 * sure whether this should handled generically or on a
545 * case-by-case quirk basis. Both algos should work fine in the
548 if ((radeon_encoder
->active_device
& (ATOM_DEVICE_LCD_SUPPORT
)) &&
549 ((rdev
->family
== CHIP_RV515
) ||
550 (rdev
->family
== CHIP_RV620
))) {
551 /* allow the user to overrride just in case */
552 if (radeon_new_pll
== 1)
553 pll
->algo
= PLL_ALGO_NEW
;
555 pll
->algo
= PLL_ALGO_LEGACY
;
558 if (encoder
->encoder_type
!= DRM_MODE_ENCODER_DAC
)
559 pll
->flags
|= RADEON_PLL_NO_ODD_POST_DIV
;
560 if (encoder
->encoder_type
== DRM_MODE_ENCODER_LVDS
)
561 pll
->flags
|= RADEON_PLL_USE_REF_DIV
;
567 /* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock
568 * accordingly based on the encoder/transmitter to work around
569 * special hw requirements.
571 if (ASIC_IS_DCE3(rdev
)) {
572 union adjust_pixel_clock args
;
576 index
= GetIndexIntoMasterTable(COMMAND
, AdjustDisplayPll
);
577 if (!atom_parse_cmd_header(rdev
->mode_info
.atom_context
, index
, &frev
,
579 return adjusted_clock
;
581 memset(&args
, 0, sizeof(args
));
588 args
.v1
.usPixelClock
= cpu_to_le16(mode
->clock
/ 10);
589 args
.v1
.ucTransmitterID
= radeon_encoder
->encoder_id
;
590 args
.v1
.ucEncodeMode
= encoder_mode
;
591 if (encoder_mode
== ATOM_ENCODER_MODE_DP
) {
592 /* may want to enable SS on DP eventually */
593 /* args.v1.ucConfig |=
594 ADJUST_DISPLAY_CONFIG_SS_ENABLE;*/
595 } else if (encoder_mode
== ATOM_ENCODER_MODE_LVDS
) {
597 ADJUST_DISPLAY_CONFIG_SS_ENABLE
;
600 atom_execute_table(rdev
->mode_info
.atom_context
,
601 index
, (uint32_t *)&args
);
602 adjusted_clock
= le16_to_cpu(args
.v1
.usPixelClock
) * 10;
605 args
.v3
.sInput
.usPixelClock
= cpu_to_le16(mode
->clock
/ 10);
606 args
.v3
.sInput
.ucTransmitterID
= radeon_encoder
->encoder_id
;
607 args
.v3
.sInput
.ucEncodeMode
= encoder_mode
;
608 args
.v3
.sInput
.ucDispPllConfig
= 0;
609 if (radeon_encoder
->devices
& (ATOM_DEVICE_DFP_SUPPORT
)) {
610 struct radeon_encoder_atom_dig
*dig
= radeon_encoder
->enc_priv
;
612 if (encoder_mode
== ATOM_ENCODER_MODE_DP
) {
613 /* may want to enable SS on DP/eDP eventually */
614 /*args.v3.sInput.ucDispPllConfig |=
615 DISPPLL_CONFIG_SS_ENABLE;*/
616 args
.v3
.sInput
.ucDispPllConfig
|=
617 DISPPLL_CONFIG_COHERENT_MODE
;
619 args
.v3
.sInput
.usPixelClock
= cpu_to_le16(dp_clock
/ 10);
621 if (encoder_mode
== ATOM_ENCODER_MODE_HDMI
) {
622 /* deep color support */
623 args
.v3
.sInput
.usPixelClock
=
624 cpu_to_le16((mode
->clock
* bpc
/ 8) / 10);
626 if (dig
->coherent_mode
)
627 args
.v3
.sInput
.ucDispPllConfig
|=
628 DISPPLL_CONFIG_COHERENT_MODE
;
629 if (mode
->clock
> 165000)
630 args
.v3
.sInput
.ucDispPllConfig
|=
631 DISPPLL_CONFIG_DUAL_LINK
;
633 } else if (radeon_encoder
->devices
& (ATOM_DEVICE_LCD_SUPPORT
)) {
634 if (encoder_mode
== ATOM_ENCODER_MODE_DP
) {
635 /* may want to enable SS on DP/eDP eventually */
636 /*args.v3.sInput.ucDispPllConfig |=
637 DISPPLL_CONFIG_SS_ENABLE;*/
638 args
.v3
.sInput
.ucDispPllConfig
|=
639 DISPPLL_CONFIG_COHERENT_MODE
;
641 args
.v3
.sInput
.usPixelClock
= cpu_to_le16(dp_clock
/ 10);
642 } else if (encoder_mode
== ATOM_ENCODER_MODE_LVDS
) {
643 /* want to enable SS on LVDS eventually */
644 /*args.v3.sInput.ucDispPllConfig |=
645 DISPPLL_CONFIG_SS_ENABLE;*/
647 if (mode
->clock
> 165000)
648 args
.v3
.sInput
.ucDispPllConfig
|=
649 DISPPLL_CONFIG_DUAL_LINK
;
652 atom_execute_table(rdev
->mode_info
.atom_context
,
653 index
, (uint32_t *)&args
);
654 adjusted_clock
= le32_to_cpu(args
.v3
.sOutput
.ulDispPllFreq
) * 10;
655 if (args
.v3
.sOutput
.ucRefDiv
) {
656 pll
->flags
|= RADEON_PLL_USE_REF_DIV
;
657 pll
->reference_div
= args
.v3
.sOutput
.ucRefDiv
;
659 if (args
.v3
.sOutput
.ucPostDiv
) {
660 pll
->flags
|= RADEON_PLL_USE_POST_DIV
;
661 pll
->post_div
= args
.v3
.sOutput
.ucPostDiv
;
665 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
666 return adjusted_clock
;
670 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
671 return adjusted_clock
;
674 return adjusted_clock
;
677 union set_pixel_clock
{
678 SET_PIXEL_CLOCK_PS_ALLOCATION base
;
679 PIXEL_CLOCK_PARAMETERS v1
;
680 PIXEL_CLOCK_PARAMETERS_V2 v2
;
681 PIXEL_CLOCK_PARAMETERS_V3 v3
;
682 PIXEL_CLOCK_PARAMETERS_V5 v5
;
685 static void atombios_crtc_set_dcpll(struct drm_crtc
*crtc
)
687 struct drm_device
*dev
= crtc
->dev
;
688 struct radeon_device
*rdev
= dev
->dev_private
;
691 union set_pixel_clock args
;
693 memset(&args
, 0, sizeof(args
));
695 index
= GetIndexIntoMasterTable(COMMAND
, SetPixelClock
);
696 if (!atom_parse_cmd_header(rdev
->mode_info
.atom_context
, index
, &frev
,
704 /* if the default dcpll clock is specified,
705 * SetPixelClock provides the dividers
707 args
.v5
.ucCRTC
= ATOM_CRTC_INVALID
;
708 args
.v5
.usPixelClock
= rdev
->clock
.default_dispclk
;
709 args
.v5
.ucPpll
= ATOM_DCPLL
;
712 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
717 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
720 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
723 static void atombios_crtc_program_pll(struct drm_crtc
*crtc
,
734 struct drm_device
*dev
= crtc
->dev
;
735 struct radeon_device
*rdev
= dev
->dev_private
;
737 int index
= GetIndexIntoMasterTable(COMMAND
, SetPixelClock
);
738 union set_pixel_clock args
;
740 memset(&args
, 0, sizeof(args
));
742 if (!atom_parse_cmd_header(rdev
->mode_info
.atom_context
, index
, &frev
,
750 if (clock
== ATOM_DISABLE
)
752 args
.v1
.usPixelClock
= cpu_to_le16(clock
/ 10);
753 args
.v1
.usRefDiv
= cpu_to_le16(ref_div
);
754 args
.v1
.usFbDiv
= cpu_to_le16(fb_div
);
755 args
.v1
.ucFracFbDiv
= frac_fb_div
;
756 args
.v1
.ucPostDiv
= post_div
;
757 args
.v1
.ucPpll
= pll_id
;
758 args
.v1
.ucCRTC
= crtc_id
;
759 args
.v1
.ucRefDivSrc
= 1;
762 args
.v2
.usPixelClock
= cpu_to_le16(clock
/ 10);
763 args
.v2
.usRefDiv
= cpu_to_le16(ref_div
);
764 args
.v2
.usFbDiv
= cpu_to_le16(fb_div
);
765 args
.v2
.ucFracFbDiv
= frac_fb_div
;
766 args
.v2
.ucPostDiv
= post_div
;
767 args
.v2
.ucPpll
= pll_id
;
768 args
.v2
.ucCRTC
= crtc_id
;
769 args
.v2
.ucRefDivSrc
= 1;
772 args
.v3
.usPixelClock
= cpu_to_le16(clock
/ 10);
773 args
.v3
.usRefDiv
= cpu_to_le16(ref_div
);
774 args
.v3
.usFbDiv
= cpu_to_le16(fb_div
);
775 args
.v3
.ucFracFbDiv
= frac_fb_div
;
776 args
.v3
.ucPostDiv
= post_div
;
777 args
.v3
.ucPpll
= pll_id
;
778 args
.v3
.ucMiscInfo
= (pll_id
<< 2);
779 args
.v3
.ucTransmitterId
= encoder_id
;
780 args
.v3
.ucEncoderMode
= encoder_mode
;
783 args
.v5
.ucCRTC
= crtc_id
;
784 args
.v5
.usPixelClock
= cpu_to_le16(clock
/ 10);
785 args
.v5
.ucRefDiv
= ref_div
;
786 args
.v5
.usFbDiv
= cpu_to_le16(fb_div
);
787 args
.v5
.ulFbDivDecFrac
= cpu_to_le32(frac_fb_div
* 100000);
788 args
.v5
.ucPostDiv
= post_div
;
789 args
.v5
.ucMiscInfo
= 0; /* HDMI depth, etc. */
790 args
.v5
.ucTransmitterID
= encoder_id
;
791 args
.v5
.ucEncoderMode
= encoder_mode
;
792 args
.v5
.ucPpll
= pll_id
;
795 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
800 DRM_ERROR("Unknown table version %d %d\n", frev
, crev
);
804 atom_execute_table(rdev
->mode_info
.atom_context
, index
, (uint32_t *)&args
);
807 static void atombios_crtc_set_pll(struct drm_crtc
*crtc
, struct drm_display_mode
*mode
)
809 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
810 struct drm_device
*dev
= crtc
->dev
;
811 struct radeon_device
*rdev
= dev
->dev_private
;
812 struct drm_encoder
*encoder
= NULL
;
813 struct radeon_encoder
*radeon_encoder
= NULL
;
814 u32 pll_clock
= mode
->clock
;
815 u32 ref_div
= 0, fb_div
= 0, frac_fb_div
= 0, post_div
= 0;
816 struct radeon_pll
*pll
;
818 int encoder_mode
= 0;
820 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
821 if (encoder
->crtc
== crtc
) {
822 radeon_encoder
= to_radeon_encoder(encoder
);
823 encoder_mode
= atombios_get_encoder_mode(encoder
);
831 switch (radeon_crtc
->pll_id
) {
833 pll
= &rdev
->clock
.p1pll
;
836 pll
= &rdev
->clock
.p2pll
;
839 case ATOM_PPLL_INVALID
:
841 pll
= &rdev
->clock
.dcpll
;
845 /* adjust pixel clock as needed */
846 adjusted_clock
= atombios_adjust_pll(crtc
, mode
, pll
);
848 radeon_compute_pll(pll
, adjusted_clock
, &pll_clock
, &fb_div
, &frac_fb_div
,
849 &ref_div
, &post_div
);
851 atombios_crtc_program_pll(crtc
, radeon_crtc
->crtc_id
, radeon_crtc
->pll_id
,
852 encoder_mode
, radeon_encoder
->encoder_id
, mode
->clock
,
853 ref_div
, fb_div
, frac_fb_div
, post_div
);
857 static int evergreen_crtc_set_base(struct drm_crtc
*crtc
, int x
, int y
,
858 struct drm_framebuffer
*old_fb
)
860 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
861 struct drm_device
*dev
= crtc
->dev
;
862 struct radeon_device
*rdev
= dev
->dev_private
;
863 struct radeon_framebuffer
*radeon_fb
;
864 struct drm_gem_object
*obj
;
865 struct radeon_bo
*rbo
;
866 uint64_t fb_location
;
867 uint32_t fb_format
, fb_pitch_pixels
, tiling_flags
;
872 DRM_DEBUG_KMS("No FB bound\n");
876 radeon_fb
= to_radeon_framebuffer(crtc
->fb
);
878 /* Pin framebuffer & get tilling informations */
879 obj
= radeon_fb
->obj
;
880 rbo
= obj
->driver_private
;
881 r
= radeon_bo_reserve(rbo
, false);
882 if (unlikely(r
!= 0))
884 r
= radeon_bo_pin(rbo
, RADEON_GEM_DOMAIN_VRAM
, &fb_location
);
885 if (unlikely(r
!= 0)) {
886 radeon_bo_unreserve(rbo
);
889 radeon_bo_get_tiling_flags(rbo
, &tiling_flags
, NULL
);
890 radeon_bo_unreserve(rbo
);
892 switch (crtc
->fb
->bits_per_pixel
) {
894 fb_format
= (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP
) |
895 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED
));
898 fb_format
= (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP
) |
899 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555
));
902 fb_format
= (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP
) |
903 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565
));
907 fb_format
= (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP
) |
908 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888
));
911 DRM_ERROR("Unsupported screen depth %d\n",
912 crtc
->fb
->bits_per_pixel
);
916 if (tiling_flags
& RADEON_TILING_MACRO
)
917 fb_format
|= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1
);
918 else if (tiling_flags
& RADEON_TILING_MICRO
)
919 fb_format
|= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1
);
921 switch (radeon_crtc
->crtc_id
) {
923 WREG32(AVIVO_D1VGA_CONTROL
, 0);
926 WREG32(AVIVO_D2VGA_CONTROL
, 0);
929 WREG32(EVERGREEN_D3VGA_CONTROL
, 0);
932 WREG32(EVERGREEN_D4VGA_CONTROL
, 0);
935 WREG32(EVERGREEN_D5VGA_CONTROL
, 0);
938 WREG32(EVERGREEN_D6VGA_CONTROL
, 0);
944 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH
+ radeon_crtc
->crtc_offset
,
945 upper_32_bits(fb_location
));
946 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH
+ radeon_crtc
->crtc_offset
,
947 upper_32_bits(fb_location
));
948 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS
+ radeon_crtc
->crtc_offset
,
949 (u32
)fb_location
& EVERGREEN_GRPH_SURFACE_ADDRESS_MASK
);
950 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS
+ radeon_crtc
->crtc_offset
,
951 (u32
) fb_location
& EVERGREEN_GRPH_SURFACE_ADDRESS_MASK
);
952 WREG32(EVERGREEN_GRPH_CONTROL
+ radeon_crtc
->crtc_offset
, fb_format
);
954 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X
+ radeon_crtc
->crtc_offset
, 0);
955 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y
+ radeon_crtc
->crtc_offset
, 0);
956 WREG32(EVERGREEN_GRPH_X_START
+ radeon_crtc
->crtc_offset
, 0);
957 WREG32(EVERGREEN_GRPH_Y_START
+ radeon_crtc
->crtc_offset
, 0);
958 WREG32(EVERGREEN_GRPH_X_END
+ radeon_crtc
->crtc_offset
, crtc
->fb
->width
);
959 WREG32(EVERGREEN_GRPH_Y_END
+ radeon_crtc
->crtc_offset
, crtc
->fb
->height
);
961 fb_pitch_pixels
= crtc
->fb
->pitch
/ (crtc
->fb
->bits_per_pixel
/ 8);
962 WREG32(EVERGREEN_GRPH_PITCH
+ radeon_crtc
->crtc_offset
, fb_pitch_pixels
);
963 WREG32(EVERGREEN_GRPH_ENABLE
+ radeon_crtc
->crtc_offset
, 1);
965 WREG32(EVERGREEN_DESKTOP_HEIGHT
+ radeon_crtc
->crtc_offset
,
966 crtc
->mode
.vdisplay
);
969 WREG32(EVERGREEN_VIEWPORT_START
+ radeon_crtc
->crtc_offset
,
971 WREG32(EVERGREEN_VIEWPORT_SIZE
+ radeon_crtc
->crtc_offset
,
972 (crtc
->mode
.hdisplay
<< 16) | crtc
->mode
.vdisplay
);
974 if (crtc
->mode
.flags
& DRM_MODE_FLAG_INTERLACE
)
975 WREG32(EVERGREEN_DATA_FORMAT
+ radeon_crtc
->crtc_offset
,
976 EVERGREEN_INTERLEAVE_EN
);
978 WREG32(EVERGREEN_DATA_FORMAT
+ radeon_crtc
->crtc_offset
, 0);
980 if (old_fb
&& old_fb
!= crtc
->fb
) {
981 radeon_fb
= to_radeon_framebuffer(old_fb
);
982 rbo
= radeon_fb
->obj
->driver_private
;
983 r
= radeon_bo_reserve(rbo
, false);
984 if (unlikely(r
!= 0))
986 radeon_bo_unpin(rbo
);
987 radeon_bo_unreserve(rbo
);
990 /* Bytes per pixel may have changed */
991 radeon_bandwidth_update(rdev
);
996 static int avivo_crtc_set_base(struct drm_crtc
*crtc
, int x
, int y
,
997 struct drm_framebuffer
*old_fb
)
999 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1000 struct drm_device
*dev
= crtc
->dev
;
1001 struct radeon_device
*rdev
= dev
->dev_private
;
1002 struct radeon_framebuffer
*radeon_fb
;
1003 struct drm_gem_object
*obj
;
1004 struct radeon_bo
*rbo
;
1005 uint64_t fb_location
;
1006 uint32_t fb_format
, fb_pitch_pixels
, tiling_flags
;
1011 DRM_DEBUG_KMS("No FB bound\n");
1015 radeon_fb
= to_radeon_framebuffer(crtc
->fb
);
1017 /* Pin framebuffer & get tilling informations */
1018 obj
= radeon_fb
->obj
;
1019 rbo
= obj
->driver_private
;
1020 r
= radeon_bo_reserve(rbo
, false);
1021 if (unlikely(r
!= 0))
1023 r
= radeon_bo_pin(rbo
, RADEON_GEM_DOMAIN_VRAM
, &fb_location
);
1024 if (unlikely(r
!= 0)) {
1025 radeon_bo_unreserve(rbo
);
1028 radeon_bo_get_tiling_flags(rbo
, &tiling_flags
, NULL
);
1029 radeon_bo_unreserve(rbo
);
1031 switch (crtc
->fb
->bits_per_pixel
) {
1034 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP
|
1035 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED
;
1039 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP
|
1040 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555
;
1044 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP
|
1045 AVIVO_D1GRPH_CONTROL_16BPP_RGB565
;
1050 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP
|
1051 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888
;
1054 DRM_ERROR("Unsupported screen depth %d\n",
1055 crtc
->fb
->bits_per_pixel
);
1059 if (rdev
->family
>= CHIP_R600
) {
1060 if (tiling_flags
& RADEON_TILING_MACRO
)
1061 fb_format
|= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1
;
1062 else if (tiling_flags
& RADEON_TILING_MICRO
)
1063 fb_format
|= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1
;
1065 if (tiling_flags
& RADEON_TILING_MACRO
)
1066 fb_format
|= AVIVO_D1GRPH_MACRO_ADDRESS_MODE
;
1068 if (tiling_flags
& RADEON_TILING_MICRO
)
1069 fb_format
|= AVIVO_D1GRPH_TILED
;
1072 if (radeon_crtc
->crtc_id
== 0)
1073 WREG32(AVIVO_D1VGA_CONTROL
, 0);
1075 WREG32(AVIVO_D2VGA_CONTROL
, 0);
1077 if (rdev
->family
>= CHIP_RV770
) {
1078 if (radeon_crtc
->crtc_id
) {
1079 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH
, upper_32_bits(fb_location
));
1080 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH
, upper_32_bits(fb_location
));
1082 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH
, upper_32_bits(fb_location
));
1083 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH
, upper_32_bits(fb_location
));
1086 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS
+ radeon_crtc
->crtc_offset
,
1088 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS
+
1089 radeon_crtc
->crtc_offset
, (u32
) fb_location
);
1090 WREG32(AVIVO_D1GRPH_CONTROL
+ radeon_crtc
->crtc_offset
, fb_format
);
1092 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X
+ radeon_crtc
->crtc_offset
, 0);
1093 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y
+ radeon_crtc
->crtc_offset
, 0);
1094 WREG32(AVIVO_D1GRPH_X_START
+ radeon_crtc
->crtc_offset
, 0);
1095 WREG32(AVIVO_D1GRPH_Y_START
+ radeon_crtc
->crtc_offset
, 0);
1096 WREG32(AVIVO_D1GRPH_X_END
+ radeon_crtc
->crtc_offset
, crtc
->fb
->width
);
1097 WREG32(AVIVO_D1GRPH_Y_END
+ radeon_crtc
->crtc_offset
, crtc
->fb
->height
);
1099 fb_pitch_pixels
= crtc
->fb
->pitch
/ (crtc
->fb
->bits_per_pixel
/ 8);
1100 WREG32(AVIVO_D1GRPH_PITCH
+ radeon_crtc
->crtc_offset
, fb_pitch_pixels
);
1101 WREG32(AVIVO_D1GRPH_ENABLE
+ radeon_crtc
->crtc_offset
, 1);
1103 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT
+ radeon_crtc
->crtc_offset
,
1104 crtc
->mode
.vdisplay
);
1107 WREG32(AVIVO_D1MODE_VIEWPORT_START
+ radeon_crtc
->crtc_offset
,
1109 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE
+ radeon_crtc
->crtc_offset
,
1110 (crtc
->mode
.hdisplay
<< 16) | crtc
->mode
.vdisplay
);
1112 if (crtc
->mode
.flags
& DRM_MODE_FLAG_INTERLACE
)
1113 WREG32(AVIVO_D1MODE_DATA_FORMAT
+ radeon_crtc
->crtc_offset
,
1114 AVIVO_D1MODE_INTERLEAVE_EN
);
1116 WREG32(AVIVO_D1MODE_DATA_FORMAT
+ radeon_crtc
->crtc_offset
, 0);
1118 if (old_fb
&& old_fb
!= crtc
->fb
) {
1119 radeon_fb
= to_radeon_framebuffer(old_fb
);
1120 rbo
= radeon_fb
->obj
->driver_private
;
1121 r
= radeon_bo_reserve(rbo
, false);
1122 if (unlikely(r
!= 0))
1124 radeon_bo_unpin(rbo
);
1125 radeon_bo_unreserve(rbo
);
1128 /* Bytes per pixel may have changed */
1129 radeon_bandwidth_update(rdev
);
1134 int atombios_crtc_set_base(struct drm_crtc
*crtc
, int x
, int y
,
1135 struct drm_framebuffer
*old_fb
)
1137 struct drm_device
*dev
= crtc
->dev
;
1138 struct radeon_device
*rdev
= dev
->dev_private
;
1140 if (ASIC_IS_DCE4(rdev
))
1141 return evergreen_crtc_set_base(crtc
, x
, y
, old_fb
);
1142 else if (ASIC_IS_AVIVO(rdev
))
1143 return avivo_crtc_set_base(crtc
, x
, y
, old_fb
);
1145 return radeon_crtc_set_base(crtc
, x
, y
, old_fb
);
1148 /* properly set additional regs when using atombios */
1149 static void radeon_legacy_atom_fixup(struct drm_crtc
*crtc
)
1151 struct drm_device
*dev
= crtc
->dev
;
1152 struct radeon_device
*rdev
= dev
->dev_private
;
1153 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1154 u32 disp_merge_cntl
;
1156 switch (radeon_crtc
->crtc_id
) {
1158 disp_merge_cntl
= RREG32(RADEON_DISP_MERGE_CNTL
);
1159 disp_merge_cntl
&= ~RADEON_DISP_RGB_OFFSET_EN
;
1160 WREG32(RADEON_DISP_MERGE_CNTL
, disp_merge_cntl
);
1163 disp_merge_cntl
= RREG32(RADEON_DISP2_MERGE_CNTL
);
1164 disp_merge_cntl
&= ~RADEON_DISP2_RGB_OFFSET_EN
;
1165 WREG32(RADEON_DISP2_MERGE_CNTL
, disp_merge_cntl
);
1166 WREG32(RADEON_FP_H2_SYNC_STRT_WID
, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID
));
1167 WREG32(RADEON_FP_V2_SYNC_STRT_WID
, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID
));
1172 static int radeon_atom_pick_pll(struct drm_crtc
*crtc
)
1174 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1175 struct drm_device
*dev
= crtc
->dev
;
1176 struct radeon_device
*rdev
= dev
->dev_private
;
1177 struct drm_encoder
*test_encoder
;
1178 struct drm_crtc
*test_crtc
;
1179 uint32_t pll_in_use
= 0;
1181 if (ASIC_IS_DCE4(rdev
)) {
1182 /* if crtc is driving DP and we have an ext clock, use that */
1183 list_for_each_entry(test_encoder
, &dev
->mode_config
.encoder_list
, head
) {
1184 if (test_encoder
->crtc
&& (test_encoder
->crtc
== crtc
)) {
1185 if (atombios_get_encoder_mode(test_encoder
) == ATOM_ENCODER_MODE_DP
) {
1186 if (rdev
->clock
.dp_extclk
)
1187 return ATOM_PPLL_INVALID
;
1192 /* otherwise, pick one of the plls */
1193 list_for_each_entry(test_crtc
, &dev
->mode_config
.crtc_list
, head
) {
1194 struct radeon_crtc
*radeon_test_crtc
;
1196 if (crtc
== test_crtc
)
1199 radeon_test_crtc
= to_radeon_crtc(test_crtc
);
1200 if ((radeon_test_crtc
->pll_id
>= ATOM_PPLL1
) &&
1201 (radeon_test_crtc
->pll_id
<= ATOM_PPLL2
))
1202 pll_in_use
|= (1 << radeon_test_crtc
->pll_id
);
1204 if (!(pll_in_use
& 1))
1208 return radeon_crtc
->crtc_id
;
1212 int atombios_crtc_mode_set(struct drm_crtc
*crtc
,
1213 struct drm_display_mode
*mode
,
1214 struct drm_display_mode
*adjusted_mode
,
1215 int x
, int y
, struct drm_framebuffer
*old_fb
)
1217 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1218 struct drm_device
*dev
= crtc
->dev
;
1219 struct radeon_device
*rdev
= dev
->dev_private
;
1220 struct drm_encoder
*encoder
;
1221 bool is_tvcv
= false;
1223 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
1225 if (encoder
->crtc
== crtc
) {
1226 struct radeon_encoder
*radeon_encoder
= to_radeon_encoder(encoder
);
1227 if (radeon_encoder
->active_device
&
1228 (ATOM_DEVICE_TV_SUPPORT
| ATOM_DEVICE_CV_SUPPORT
))
1233 atombios_disable_ss(crtc
);
1234 /* always set DCPLL */
1235 if (ASIC_IS_DCE4(rdev
))
1236 atombios_crtc_set_dcpll(crtc
);
1237 atombios_crtc_set_pll(crtc
, adjusted_mode
);
1238 atombios_enable_ss(crtc
);
1240 if (ASIC_IS_DCE4(rdev
))
1241 atombios_set_crtc_dtd_timing(crtc
, adjusted_mode
);
1242 else if (ASIC_IS_AVIVO(rdev
)) {
1244 atombios_crtc_set_timing(crtc
, adjusted_mode
);
1246 atombios_set_crtc_dtd_timing(crtc
, adjusted_mode
);
1248 atombios_crtc_set_timing(crtc
, adjusted_mode
);
1249 if (radeon_crtc
->crtc_id
== 0)
1250 atombios_set_crtc_dtd_timing(crtc
, adjusted_mode
);
1251 radeon_legacy_atom_fixup(crtc
);
1253 atombios_crtc_set_base(crtc
, x
, y
, old_fb
);
1254 atombios_overscan_setup(crtc
, mode
, adjusted_mode
);
1255 atombios_scaler_setup(crtc
);
1259 static bool atombios_crtc_mode_fixup(struct drm_crtc
*crtc
,
1260 struct drm_display_mode
*mode
,
1261 struct drm_display_mode
*adjusted_mode
)
1263 struct drm_device
*dev
= crtc
->dev
;
1264 struct radeon_device
*rdev
= dev
->dev_private
;
1266 /* adjust pm to upcoming mode change */
1267 radeon_pm_compute_clocks(rdev
);
1269 if (!radeon_crtc_scaling_mode_fixup(crtc
, mode
, adjusted_mode
))
1274 static void atombios_crtc_prepare(struct drm_crtc
*crtc
)
1276 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1279 radeon_crtc
->pll_id
= radeon_atom_pick_pll(crtc
);
1281 atombios_lock_crtc(crtc
, ATOM_ENABLE
);
1282 atombios_crtc_dpms(crtc
, DRM_MODE_DPMS_OFF
);
1285 static void atombios_crtc_commit(struct drm_crtc
*crtc
)
1287 atombios_crtc_dpms(crtc
, DRM_MODE_DPMS_ON
);
1288 atombios_lock_crtc(crtc
, ATOM_DISABLE
);
1291 static void atombios_crtc_disable(struct drm_crtc
*crtc
)
1293 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1294 atombios_crtc_dpms(crtc
, DRM_MODE_DPMS_OFF
);
1296 switch (radeon_crtc
->pll_id
) {
1299 /* disable the ppll */
1300 atombios_crtc_program_pll(crtc
, radeon_crtc
->crtc_id
, radeon_crtc
->pll_id
,
1301 0, 0, ATOM_DISABLE
, 0, 0, 0, 0);
1306 radeon_crtc
->pll_id
= -1;
1309 static const struct drm_crtc_helper_funcs atombios_helper_funcs
= {
1310 .dpms
= atombios_crtc_dpms
,
1311 .mode_fixup
= atombios_crtc_mode_fixup
,
1312 .mode_set
= atombios_crtc_mode_set
,
1313 .mode_set_base
= atombios_crtc_set_base
,
1314 .prepare
= atombios_crtc_prepare
,
1315 .commit
= atombios_crtc_commit
,
1316 .load_lut
= radeon_crtc_load_lut
,
1317 .disable
= atombios_crtc_disable
,
1320 void radeon_atombios_init_crtc(struct drm_device
*dev
,
1321 struct radeon_crtc
*radeon_crtc
)
1323 struct radeon_device
*rdev
= dev
->dev_private
;
1325 if (ASIC_IS_DCE4(rdev
)) {
1326 switch (radeon_crtc
->crtc_id
) {
1329 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC0_REGISTER_OFFSET
;
1332 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC1_REGISTER_OFFSET
;
1335 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC2_REGISTER_OFFSET
;
1338 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC3_REGISTER_OFFSET
;
1341 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC4_REGISTER_OFFSET
;
1344 radeon_crtc
->crtc_offset
= EVERGREEN_CRTC5_REGISTER_OFFSET
;
1348 if (radeon_crtc
->crtc_id
== 1)
1349 radeon_crtc
->crtc_offset
=
1350 AVIVO_D2CRTC_H_TOTAL
- AVIVO_D1CRTC_H_TOTAL
;
1352 radeon_crtc
->crtc_offset
= 0;
1354 radeon_crtc
->pll_id
= -1;
1355 drm_crtc_helper_add(&radeon_crtc
->base
, &atombios_helper_funcs
);