SUNRPC: Prevent kernel stack corruption on long values of flush
[deliverable/linux.git] / drivers / gpu / drm / radeon / atombios_crtc.c
1 /*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26 #include <drm/drmP.h>
27 #include <drm/drm_crtc_helper.h>
28 #include <drm/radeon_drm.h>
29 #include <drm/drm_fixed.h>
30 #include "radeon.h"
31 #include "atom.h"
32 #include "atom-bits.h"
33
34 static void atombios_overscan_setup(struct drm_crtc *crtc,
35 struct drm_display_mode *mode,
36 struct drm_display_mode *adjusted_mode)
37 {
38 struct drm_device *dev = crtc->dev;
39 struct radeon_device *rdev = dev->dev_private;
40 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
41 SET_CRTC_OVERSCAN_PS_ALLOCATION args;
42 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
43 int a1, a2;
44
45 memset(&args, 0, sizeof(args));
46
47 args.ucCRTC = radeon_crtc->crtc_id;
48
49 switch (radeon_crtc->rmx_type) {
50 case RMX_CENTER:
51 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
52 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
53 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
54 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
55 break;
56 case RMX_ASPECT:
57 a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
58 a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
59
60 if (a1 > a2) {
61 args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
62 args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
63 } else if (a2 > a1) {
64 args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
65 args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
66 }
67 break;
68 case RMX_FULL:
69 default:
70 args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
71 args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
72 args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
73 args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
74 break;
75 }
76 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
77 }
78
79 static void atombios_scaler_setup(struct drm_crtc *crtc)
80 {
81 struct drm_device *dev = crtc->dev;
82 struct radeon_device *rdev = dev->dev_private;
83 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
84 ENABLE_SCALER_PS_ALLOCATION args;
85 int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
86 struct radeon_encoder *radeon_encoder =
87 to_radeon_encoder(radeon_crtc->encoder);
88 /* fixme - fill in enc_priv for atom dac */
89 enum radeon_tv_std tv_std = TV_STD_NTSC;
90 bool is_tv = false, is_cv = false;
91
92 if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
93 return;
94
95 if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
96 struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
97 tv_std = tv_dac->tv_std;
98 is_tv = true;
99 }
100
101 memset(&args, 0, sizeof(args));
102
103 args.ucScaler = radeon_crtc->crtc_id;
104
105 if (is_tv) {
106 switch (tv_std) {
107 case TV_STD_NTSC:
108 default:
109 args.ucTVStandard = ATOM_TV_NTSC;
110 break;
111 case TV_STD_PAL:
112 args.ucTVStandard = ATOM_TV_PAL;
113 break;
114 case TV_STD_PAL_M:
115 args.ucTVStandard = ATOM_TV_PALM;
116 break;
117 case TV_STD_PAL_60:
118 args.ucTVStandard = ATOM_TV_PAL60;
119 break;
120 case TV_STD_NTSC_J:
121 args.ucTVStandard = ATOM_TV_NTSCJ;
122 break;
123 case TV_STD_SCART_PAL:
124 args.ucTVStandard = ATOM_TV_PAL; /* ??? */
125 break;
126 case TV_STD_SECAM:
127 args.ucTVStandard = ATOM_TV_SECAM;
128 break;
129 case TV_STD_PAL_CN:
130 args.ucTVStandard = ATOM_TV_PALCN;
131 break;
132 }
133 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
134 } else if (is_cv) {
135 args.ucTVStandard = ATOM_TV_CV;
136 args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
137 } else {
138 switch (radeon_crtc->rmx_type) {
139 case RMX_FULL:
140 args.ucEnable = ATOM_SCALER_EXPANSION;
141 break;
142 case RMX_CENTER:
143 args.ucEnable = ATOM_SCALER_CENTER;
144 break;
145 case RMX_ASPECT:
146 args.ucEnable = ATOM_SCALER_EXPANSION;
147 break;
148 default:
149 if (ASIC_IS_AVIVO(rdev))
150 args.ucEnable = ATOM_SCALER_DISABLE;
151 else
152 args.ucEnable = ATOM_SCALER_CENTER;
153 break;
154 }
155 }
156 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
157 if ((is_tv || is_cv)
158 && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
159 atom_rv515_force_tv_scaler(rdev, radeon_crtc);
160 }
161 }
162
163 static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
164 {
165 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
166 struct drm_device *dev = crtc->dev;
167 struct radeon_device *rdev = dev->dev_private;
168 int index =
169 GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
170 ENABLE_CRTC_PS_ALLOCATION args;
171
172 memset(&args, 0, sizeof(args));
173
174 args.ucCRTC = radeon_crtc->crtc_id;
175 args.ucEnable = lock;
176
177 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
178 }
179
180 static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
181 {
182 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
183 struct drm_device *dev = crtc->dev;
184 struct radeon_device *rdev = dev->dev_private;
185 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
186 ENABLE_CRTC_PS_ALLOCATION args;
187
188 memset(&args, 0, sizeof(args));
189
190 args.ucCRTC = radeon_crtc->crtc_id;
191 args.ucEnable = state;
192
193 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
194 }
195
196 static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
197 {
198 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
199 struct drm_device *dev = crtc->dev;
200 struct radeon_device *rdev = dev->dev_private;
201 int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
202 ENABLE_CRTC_PS_ALLOCATION args;
203
204 memset(&args, 0, sizeof(args));
205
206 args.ucCRTC = radeon_crtc->crtc_id;
207 args.ucEnable = state;
208
209 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
210 }
211
212 static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
213 {
214 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
215 struct drm_device *dev = crtc->dev;
216 struct radeon_device *rdev = dev->dev_private;
217 int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
218 BLANK_CRTC_PS_ALLOCATION args;
219
220 memset(&args, 0, sizeof(args));
221
222 args.ucCRTC = radeon_crtc->crtc_id;
223 args.ucBlanking = state;
224
225 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
226 }
227
228 static void atombios_powergate_crtc(struct drm_crtc *crtc, int state)
229 {
230 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
231 struct drm_device *dev = crtc->dev;
232 struct radeon_device *rdev = dev->dev_private;
233 int index = GetIndexIntoMasterTable(COMMAND, EnableDispPowerGating);
234 ENABLE_DISP_POWER_GATING_PARAMETERS_V2_1 args;
235
236 memset(&args, 0, sizeof(args));
237
238 args.ucDispPipeId = radeon_crtc->crtc_id;
239 args.ucEnable = state;
240
241 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
242 }
243
244 void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
245 {
246 struct drm_device *dev = crtc->dev;
247 struct radeon_device *rdev = dev->dev_private;
248 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
249
250 switch (mode) {
251 case DRM_MODE_DPMS_ON:
252 radeon_crtc->enabled = true;
253 /* adjust pm to dpms changes BEFORE enabling crtcs */
254 radeon_pm_compute_clocks(rdev);
255 if (ASIC_IS_DCE6(rdev) && !radeon_crtc->in_mode_set)
256 atombios_powergate_crtc(crtc, ATOM_DISABLE);
257 atombios_enable_crtc(crtc, ATOM_ENABLE);
258 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
259 atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
260 atombios_blank_crtc(crtc, ATOM_DISABLE);
261 drm_vblank_post_modeset(dev, radeon_crtc->crtc_id);
262 radeon_crtc_load_lut(crtc);
263 break;
264 case DRM_MODE_DPMS_STANDBY:
265 case DRM_MODE_DPMS_SUSPEND:
266 case DRM_MODE_DPMS_OFF:
267 drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id);
268 if (radeon_crtc->enabled)
269 atombios_blank_crtc(crtc, ATOM_ENABLE);
270 if (ASIC_IS_DCE3(rdev) && !ASIC_IS_DCE6(rdev))
271 atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
272 atombios_enable_crtc(crtc, ATOM_DISABLE);
273 radeon_crtc->enabled = false;
274 if (ASIC_IS_DCE6(rdev) && !radeon_crtc->in_mode_set)
275 atombios_powergate_crtc(crtc, ATOM_ENABLE);
276 /* adjust pm to dpms changes AFTER disabling crtcs */
277 radeon_pm_compute_clocks(rdev);
278 break;
279 }
280 }
281
282 static void
283 atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
284 struct drm_display_mode *mode)
285 {
286 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
287 struct drm_device *dev = crtc->dev;
288 struct radeon_device *rdev = dev->dev_private;
289 SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
290 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
291 u16 misc = 0;
292
293 memset(&args, 0, sizeof(args));
294 args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
295 args.usH_Blanking_Time =
296 cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
297 args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
298 args.usV_Blanking_Time =
299 cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
300 args.usH_SyncOffset =
301 cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
302 args.usH_SyncWidth =
303 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
304 args.usV_SyncOffset =
305 cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
306 args.usV_SyncWidth =
307 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
308 args.ucH_Border = radeon_crtc->h_border;
309 args.ucV_Border = radeon_crtc->v_border;
310
311 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
312 misc |= ATOM_VSYNC_POLARITY;
313 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
314 misc |= ATOM_HSYNC_POLARITY;
315 if (mode->flags & DRM_MODE_FLAG_CSYNC)
316 misc |= ATOM_COMPOSITESYNC;
317 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
318 misc |= ATOM_INTERLACE;
319 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
320 misc |= ATOM_DOUBLE_CLOCK_MODE;
321
322 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
323 args.ucCRTC = radeon_crtc->crtc_id;
324
325 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
326 }
327
328 static void atombios_crtc_set_timing(struct drm_crtc *crtc,
329 struct drm_display_mode *mode)
330 {
331 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
332 struct drm_device *dev = crtc->dev;
333 struct radeon_device *rdev = dev->dev_private;
334 SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
335 int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
336 u16 misc = 0;
337
338 memset(&args, 0, sizeof(args));
339 args.usH_Total = cpu_to_le16(mode->crtc_htotal);
340 args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
341 args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
342 args.usH_SyncWidth =
343 cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
344 args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
345 args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
346 args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
347 args.usV_SyncWidth =
348 cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
349
350 args.ucOverscanRight = radeon_crtc->h_border;
351 args.ucOverscanLeft = radeon_crtc->h_border;
352 args.ucOverscanBottom = radeon_crtc->v_border;
353 args.ucOverscanTop = radeon_crtc->v_border;
354
355 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
356 misc |= ATOM_VSYNC_POLARITY;
357 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
358 misc |= ATOM_HSYNC_POLARITY;
359 if (mode->flags & DRM_MODE_FLAG_CSYNC)
360 misc |= ATOM_COMPOSITESYNC;
361 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
362 misc |= ATOM_INTERLACE;
363 if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
364 misc |= ATOM_DOUBLE_CLOCK_MODE;
365
366 args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
367 args.ucCRTC = radeon_crtc->crtc_id;
368
369 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
370 }
371
372 static void atombios_disable_ss(struct radeon_device *rdev, int pll_id)
373 {
374 u32 ss_cntl;
375
376 if (ASIC_IS_DCE4(rdev)) {
377 switch (pll_id) {
378 case ATOM_PPLL1:
379 ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
380 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
381 WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
382 break;
383 case ATOM_PPLL2:
384 ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
385 ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
386 WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
387 break;
388 case ATOM_DCPLL:
389 case ATOM_PPLL_INVALID:
390 return;
391 }
392 } else if (ASIC_IS_AVIVO(rdev)) {
393 switch (pll_id) {
394 case ATOM_PPLL1:
395 ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
396 ss_cntl &= ~1;
397 WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
398 break;
399 case ATOM_PPLL2:
400 ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
401 ss_cntl &= ~1;
402 WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
403 break;
404 case ATOM_DCPLL:
405 case ATOM_PPLL_INVALID:
406 return;
407 }
408 }
409 }
410
411
412 union atom_enable_ss {
413 ENABLE_LVDS_SS_PARAMETERS lvds_ss;
414 ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
415 ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
416 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
417 ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
418 };
419
420 static void atombios_crtc_program_ss(struct radeon_device *rdev,
421 int enable,
422 int pll_id,
423 int crtc_id,
424 struct radeon_atom_ss *ss)
425 {
426 unsigned i;
427 int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
428 union atom_enable_ss args;
429
430 if (!enable) {
431 for (i = 0; i < rdev->num_crtc; i++) {
432 if (rdev->mode_info.crtcs[i] &&
433 rdev->mode_info.crtcs[i]->enabled &&
434 i != crtc_id &&
435 pll_id == rdev->mode_info.crtcs[i]->pll_id) {
436 /* one other crtc is using this pll don't turn
437 * off spread spectrum as it might turn off
438 * display on active crtc
439 */
440 return;
441 }
442 }
443 }
444
445 memset(&args, 0, sizeof(args));
446
447 if (ASIC_IS_DCE5(rdev)) {
448 args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
449 args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
450 switch (pll_id) {
451 case ATOM_PPLL1:
452 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
453 break;
454 case ATOM_PPLL2:
455 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
456 break;
457 case ATOM_DCPLL:
458 args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
459 break;
460 case ATOM_PPLL_INVALID:
461 return;
462 }
463 args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
464 args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
465 args.v3.ucEnable = enable;
466 if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK) || ASIC_IS_DCE61(rdev))
467 args.v3.ucEnable = ATOM_DISABLE;
468 } else if (ASIC_IS_DCE4(rdev)) {
469 args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
470 args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
471 switch (pll_id) {
472 case ATOM_PPLL1:
473 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
474 break;
475 case ATOM_PPLL2:
476 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
477 break;
478 case ATOM_DCPLL:
479 args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
480 break;
481 case ATOM_PPLL_INVALID:
482 return;
483 }
484 args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
485 args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
486 args.v2.ucEnable = enable;
487 if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK) || ASIC_IS_DCE41(rdev))
488 args.v2.ucEnable = ATOM_DISABLE;
489 } else if (ASIC_IS_DCE3(rdev)) {
490 args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
491 args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
492 args.v1.ucSpreadSpectrumStep = ss->step;
493 args.v1.ucSpreadSpectrumDelay = ss->delay;
494 args.v1.ucSpreadSpectrumRange = ss->range;
495 args.v1.ucPpll = pll_id;
496 args.v1.ucEnable = enable;
497 } else if (ASIC_IS_AVIVO(rdev)) {
498 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
499 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
500 atombios_disable_ss(rdev, pll_id);
501 return;
502 }
503 args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
504 args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
505 args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
506 args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
507 args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
508 args.lvds_ss_2.ucEnable = enable;
509 } else {
510 if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
511 (ss->type & ATOM_EXTERNAL_SS_MASK)) {
512 atombios_disable_ss(rdev, pll_id);
513 return;
514 }
515 args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
516 args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
517 args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
518 args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
519 args.lvds_ss.ucEnable = enable;
520 }
521 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
522 }
523
524 union adjust_pixel_clock {
525 ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
526 ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
527 };
528
529 static u32 atombios_adjust_pll(struct drm_crtc *crtc,
530 struct drm_display_mode *mode)
531 {
532 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
533 struct drm_device *dev = crtc->dev;
534 struct radeon_device *rdev = dev->dev_private;
535 struct drm_encoder *encoder = radeon_crtc->encoder;
536 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
537 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
538 u32 adjusted_clock = mode->clock;
539 int encoder_mode = atombios_get_encoder_mode(encoder);
540 u32 dp_clock = mode->clock;
541 int bpc = radeon_get_monitor_bpc(connector);
542 bool is_duallink = radeon_dig_monitor_is_duallink(encoder, mode->clock);
543
544 /* reset the pll flags */
545 radeon_crtc->pll_flags = 0;
546
547 if (ASIC_IS_AVIVO(rdev)) {
548 if ((rdev->family == CHIP_RS600) ||
549 (rdev->family == CHIP_RS690) ||
550 (rdev->family == CHIP_RS740))
551 radeon_crtc->pll_flags |= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/
552 RADEON_PLL_PREFER_CLOSEST_LOWER);
553
554 if (ASIC_IS_DCE32(rdev) && mode->clock > 200000) /* range limits??? */
555 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
556 else
557 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
558
559 if (rdev->family < CHIP_RV770)
560 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
561 /* use frac fb div on APUs */
562 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE61(rdev))
563 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
564 } else {
565 radeon_crtc->pll_flags |= RADEON_PLL_LEGACY;
566
567 if (mode->clock > 200000) /* range limits??? */
568 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
569 else
570 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
571 }
572
573 if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
574 (radeon_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
575 if (connector) {
576 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
577 struct radeon_connector_atom_dig *dig_connector =
578 radeon_connector->con_priv;
579
580 dp_clock = dig_connector->dp_clock;
581 }
582 }
583
584 /* use recommended ref_div for ss */
585 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
586 if (radeon_crtc->ss_enabled) {
587 if (radeon_crtc->ss.refdiv) {
588 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
589 radeon_crtc->pll_reference_div = radeon_crtc->ss.refdiv;
590 if (ASIC_IS_AVIVO(rdev))
591 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
592 }
593 }
594 }
595
596 if (ASIC_IS_AVIVO(rdev)) {
597 /* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
598 if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
599 adjusted_clock = mode->clock * 2;
600 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
601 radeon_crtc->pll_flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
602 if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
603 radeon_crtc->pll_flags |= RADEON_PLL_IS_LCD;
604 } else {
605 if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
606 radeon_crtc->pll_flags |= RADEON_PLL_NO_ODD_POST_DIV;
607 if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
608 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
609 }
610
611 /* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock
612 * accordingly based on the encoder/transmitter to work around
613 * special hw requirements.
614 */
615 if (ASIC_IS_DCE3(rdev)) {
616 union adjust_pixel_clock args;
617 u8 frev, crev;
618 int index;
619
620 index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
621 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
622 &crev))
623 return adjusted_clock;
624
625 memset(&args, 0, sizeof(args));
626
627 switch (frev) {
628 case 1:
629 switch (crev) {
630 case 1:
631 case 2:
632 args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
633 args.v1.ucTransmitterID = radeon_encoder->encoder_id;
634 args.v1.ucEncodeMode = encoder_mode;
635 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
636 args.v1.ucConfig |=
637 ADJUST_DISPLAY_CONFIG_SS_ENABLE;
638
639 atom_execute_table(rdev->mode_info.atom_context,
640 index, (uint32_t *)&args);
641 adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
642 break;
643 case 3:
644 args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10);
645 args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
646 args.v3.sInput.ucEncodeMode = encoder_mode;
647 args.v3.sInput.ucDispPllConfig = 0;
648 if (radeon_crtc->ss_enabled && radeon_crtc->ss.percentage)
649 args.v3.sInput.ucDispPllConfig |=
650 DISPPLL_CONFIG_SS_ENABLE;
651 if (ENCODER_MODE_IS_DP(encoder_mode)) {
652 args.v3.sInput.ucDispPllConfig |=
653 DISPPLL_CONFIG_COHERENT_MODE;
654 /* 16200 or 27000 */
655 args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
656 } else if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
657 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
658 if (encoder_mode == ATOM_ENCODER_MODE_HDMI)
659 /* deep color support */
660 args.v3.sInput.usPixelClock =
661 cpu_to_le16((mode->clock * bpc / 8) / 10);
662 if (dig->coherent_mode)
663 args.v3.sInput.ucDispPllConfig |=
664 DISPPLL_CONFIG_COHERENT_MODE;
665 if (is_duallink)
666 args.v3.sInput.ucDispPllConfig |=
667 DISPPLL_CONFIG_DUAL_LINK;
668 }
669 if (radeon_encoder_get_dp_bridge_encoder_id(encoder) !=
670 ENCODER_OBJECT_ID_NONE)
671 args.v3.sInput.ucExtTransmitterID =
672 radeon_encoder_get_dp_bridge_encoder_id(encoder);
673 else
674 args.v3.sInput.ucExtTransmitterID = 0;
675
676 atom_execute_table(rdev->mode_info.atom_context,
677 index, (uint32_t *)&args);
678 adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
679 if (args.v3.sOutput.ucRefDiv) {
680 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
681 radeon_crtc->pll_flags |= RADEON_PLL_USE_REF_DIV;
682 radeon_crtc->pll_reference_div = args.v3.sOutput.ucRefDiv;
683 }
684 if (args.v3.sOutput.ucPostDiv) {
685 radeon_crtc->pll_flags |= RADEON_PLL_USE_FRAC_FB_DIV;
686 radeon_crtc->pll_flags |= RADEON_PLL_USE_POST_DIV;
687 radeon_crtc->pll_post_div = args.v3.sOutput.ucPostDiv;
688 }
689 break;
690 default:
691 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
692 return adjusted_clock;
693 }
694 break;
695 default:
696 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
697 return adjusted_clock;
698 }
699 }
700 return adjusted_clock;
701 }
702
703 union set_pixel_clock {
704 SET_PIXEL_CLOCK_PS_ALLOCATION base;
705 PIXEL_CLOCK_PARAMETERS v1;
706 PIXEL_CLOCK_PARAMETERS_V2 v2;
707 PIXEL_CLOCK_PARAMETERS_V3 v3;
708 PIXEL_CLOCK_PARAMETERS_V5 v5;
709 PIXEL_CLOCK_PARAMETERS_V6 v6;
710 };
711
712 /* on DCE5, make sure the voltage is high enough to support the
713 * required disp clk.
714 */
715 static void atombios_crtc_set_disp_eng_pll(struct radeon_device *rdev,
716 u32 dispclk)
717 {
718 u8 frev, crev;
719 int index;
720 union set_pixel_clock args;
721
722 memset(&args, 0, sizeof(args));
723
724 index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
725 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
726 &crev))
727 return;
728
729 switch (frev) {
730 case 1:
731 switch (crev) {
732 case 5:
733 /* if the default dcpll clock is specified,
734 * SetPixelClock provides the dividers
735 */
736 args.v5.ucCRTC = ATOM_CRTC_INVALID;
737 args.v5.usPixelClock = cpu_to_le16(dispclk);
738 args.v5.ucPpll = ATOM_DCPLL;
739 break;
740 case 6:
741 /* if the default dcpll clock is specified,
742 * SetPixelClock provides the dividers
743 */
744 args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
745 if (ASIC_IS_DCE61(rdev))
746 args.v6.ucPpll = ATOM_EXT_PLL1;
747 else if (ASIC_IS_DCE6(rdev))
748 args.v6.ucPpll = ATOM_PPLL0;
749 else
750 args.v6.ucPpll = ATOM_DCPLL;
751 break;
752 default:
753 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
754 return;
755 }
756 break;
757 default:
758 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
759 return;
760 }
761 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
762 }
763
764 static void atombios_crtc_program_pll(struct drm_crtc *crtc,
765 u32 crtc_id,
766 int pll_id,
767 u32 encoder_mode,
768 u32 encoder_id,
769 u32 clock,
770 u32 ref_div,
771 u32 fb_div,
772 u32 frac_fb_div,
773 u32 post_div,
774 int bpc,
775 bool ss_enabled,
776 struct radeon_atom_ss *ss)
777 {
778 struct drm_device *dev = crtc->dev;
779 struct radeon_device *rdev = dev->dev_private;
780 u8 frev, crev;
781 int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
782 union set_pixel_clock args;
783
784 memset(&args, 0, sizeof(args));
785
786 if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
787 &crev))
788 return;
789
790 switch (frev) {
791 case 1:
792 switch (crev) {
793 case 1:
794 if (clock == ATOM_DISABLE)
795 return;
796 args.v1.usPixelClock = cpu_to_le16(clock / 10);
797 args.v1.usRefDiv = cpu_to_le16(ref_div);
798 args.v1.usFbDiv = cpu_to_le16(fb_div);
799 args.v1.ucFracFbDiv = frac_fb_div;
800 args.v1.ucPostDiv = post_div;
801 args.v1.ucPpll = pll_id;
802 args.v1.ucCRTC = crtc_id;
803 args.v1.ucRefDivSrc = 1;
804 break;
805 case 2:
806 args.v2.usPixelClock = cpu_to_le16(clock / 10);
807 args.v2.usRefDiv = cpu_to_le16(ref_div);
808 args.v2.usFbDiv = cpu_to_le16(fb_div);
809 args.v2.ucFracFbDiv = frac_fb_div;
810 args.v2.ucPostDiv = post_div;
811 args.v2.ucPpll = pll_id;
812 args.v2.ucCRTC = crtc_id;
813 args.v2.ucRefDivSrc = 1;
814 break;
815 case 3:
816 args.v3.usPixelClock = cpu_to_le16(clock / 10);
817 args.v3.usRefDiv = cpu_to_le16(ref_div);
818 args.v3.usFbDiv = cpu_to_le16(fb_div);
819 args.v3.ucFracFbDiv = frac_fb_div;
820 args.v3.ucPostDiv = post_div;
821 args.v3.ucPpll = pll_id;
822 if (crtc_id == ATOM_CRTC2)
823 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC2;
824 else
825 args.v3.ucMiscInfo = PIXEL_CLOCK_MISC_CRTC_SEL_CRTC1;
826 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
827 args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
828 args.v3.ucTransmitterId = encoder_id;
829 args.v3.ucEncoderMode = encoder_mode;
830 break;
831 case 5:
832 args.v5.ucCRTC = crtc_id;
833 args.v5.usPixelClock = cpu_to_le16(clock / 10);
834 args.v5.ucRefDiv = ref_div;
835 args.v5.usFbDiv = cpu_to_le16(fb_div);
836 args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
837 args.v5.ucPostDiv = post_div;
838 args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */
839 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
840 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
841 switch (bpc) {
842 case 8:
843 default:
844 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
845 break;
846 case 10:
847 args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
848 break;
849 }
850 args.v5.ucTransmitterID = encoder_id;
851 args.v5.ucEncoderMode = encoder_mode;
852 args.v5.ucPpll = pll_id;
853 break;
854 case 6:
855 args.v6.ulDispEngClkFreq = cpu_to_le32(crtc_id << 24 | clock / 10);
856 args.v6.ucRefDiv = ref_div;
857 args.v6.usFbDiv = cpu_to_le16(fb_div);
858 args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
859 args.v6.ucPostDiv = post_div;
860 args.v6.ucMiscInfo = 0; /* HDMI depth, etc. */
861 if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
862 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
863 switch (bpc) {
864 case 8:
865 default:
866 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
867 break;
868 case 10:
869 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP;
870 break;
871 case 12:
872 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP;
873 break;
874 case 16:
875 args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
876 break;
877 }
878 args.v6.ucTransmitterID = encoder_id;
879 args.v6.ucEncoderMode = encoder_mode;
880 args.v6.ucPpll = pll_id;
881 break;
882 default:
883 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
884 return;
885 }
886 break;
887 default:
888 DRM_ERROR("Unknown table version %d %d\n", frev, crev);
889 return;
890 }
891
892 atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
893 }
894
895 static bool atombios_crtc_prepare_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
896 {
897 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
898 struct drm_device *dev = crtc->dev;
899 struct radeon_device *rdev = dev->dev_private;
900 struct radeon_encoder *radeon_encoder =
901 to_radeon_encoder(radeon_crtc->encoder);
902 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
903
904 radeon_crtc->bpc = 8;
905 radeon_crtc->ss_enabled = false;
906
907 if ((radeon_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
908 (radeon_encoder_get_dp_bridge_encoder_id(radeon_crtc->encoder) != ENCODER_OBJECT_ID_NONE)) {
909 struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
910 struct drm_connector *connector =
911 radeon_get_connector_for_encoder(radeon_crtc->encoder);
912 struct radeon_connector *radeon_connector =
913 to_radeon_connector(connector);
914 struct radeon_connector_atom_dig *dig_connector =
915 radeon_connector->con_priv;
916 int dp_clock;
917 radeon_crtc->bpc = radeon_get_monitor_bpc(connector);
918
919 switch (encoder_mode) {
920 case ATOM_ENCODER_MODE_DP_MST:
921 case ATOM_ENCODER_MODE_DP:
922 /* DP/eDP */
923 dp_clock = dig_connector->dp_clock / 10;
924 if (ASIC_IS_DCE4(rdev))
925 radeon_crtc->ss_enabled =
926 radeon_atombios_get_asic_ss_info(rdev, &radeon_crtc->ss,
927 ASIC_INTERNAL_SS_ON_DP,
928 dp_clock);
929 else {
930 if (dp_clock == 16200) {
931 radeon_crtc->ss_enabled =
932 radeon_atombios_get_ppll_ss_info(rdev,
933 &radeon_crtc->ss,
934 ATOM_DP_SS_ID2);
935 if (!radeon_crtc->ss_enabled)
936 radeon_crtc->ss_enabled =
937 radeon_atombios_get_ppll_ss_info(rdev,
938 &radeon_crtc->ss,
939 ATOM_DP_SS_ID1);
940 } else
941 radeon_crtc->ss_enabled =
942 radeon_atombios_get_ppll_ss_info(rdev,
943 &radeon_crtc->ss,
944 ATOM_DP_SS_ID1);
945 }
946 break;
947 case ATOM_ENCODER_MODE_LVDS:
948 if (ASIC_IS_DCE4(rdev))
949 radeon_crtc->ss_enabled =
950 radeon_atombios_get_asic_ss_info(rdev,
951 &radeon_crtc->ss,
952 dig->lcd_ss_id,
953 mode->clock / 10);
954 else
955 radeon_crtc->ss_enabled =
956 radeon_atombios_get_ppll_ss_info(rdev,
957 &radeon_crtc->ss,
958 dig->lcd_ss_id);
959 break;
960 case ATOM_ENCODER_MODE_DVI:
961 if (ASIC_IS_DCE4(rdev))
962 radeon_crtc->ss_enabled =
963 radeon_atombios_get_asic_ss_info(rdev,
964 &radeon_crtc->ss,
965 ASIC_INTERNAL_SS_ON_TMDS,
966 mode->clock / 10);
967 break;
968 case ATOM_ENCODER_MODE_HDMI:
969 if (ASIC_IS_DCE4(rdev))
970 radeon_crtc->ss_enabled =
971 radeon_atombios_get_asic_ss_info(rdev,
972 &radeon_crtc->ss,
973 ASIC_INTERNAL_SS_ON_HDMI,
974 mode->clock / 10);
975 break;
976 default:
977 break;
978 }
979 }
980
981 /* adjust pixel clock as needed */
982 radeon_crtc->adjusted_clock = atombios_adjust_pll(crtc, mode);
983
984 return true;
985 }
986
987 static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
988 {
989 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
990 struct drm_device *dev = crtc->dev;
991 struct radeon_device *rdev = dev->dev_private;
992 struct radeon_encoder *radeon_encoder =
993 to_radeon_encoder(radeon_crtc->encoder);
994 u32 pll_clock = mode->clock;
995 u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
996 struct radeon_pll *pll;
997 int encoder_mode = atombios_get_encoder_mode(radeon_crtc->encoder);
998
999 switch (radeon_crtc->pll_id) {
1000 case ATOM_PPLL1:
1001 pll = &rdev->clock.p1pll;
1002 break;
1003 case ATOM_PPLL2:
1004 pll = &rdev->clock.p2pll;
1005 break;
1006 case ATOM_DCPLL:
1007 case ATOM_PPLL_INVALID:
1008 default:
1009 pll = &rdev->clock.dcpll;
1010 break;
1011 }
1012
1013 /* update pll params */
1014 pll->flags = radeon_crtc->pll_flags;
1015 pll->reference_div = radeon_crtc->pll_reference_div;
1016 pll->post_div = radeon_crtc->pll_post_div;
1017
1018 if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1019 /* TV seems to prefer the legacy algo on some boards */
1020 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1021 &fb_div, &frac_fb_div, &ref_div, &post_div);
1022 else if (ASIC_IS_AVIVO(rdev))
1023 radeon_compute_pll_avivo(pll, radeon_crtc->adjusted_clock, &pll_clock,
1024 &fb_div, &frac_fb_div, &ref_div, &post_div);
1025 else
1026 radeon_compute_pll_legacy(pll, radeon_crtc->adjusted_clock, &pll_clock,
1027 &fb_div, &frac_fb_div, &ref_div, &post_div);
1028
1029 atombios_crtc_program_ss(rdev, ATOM_DISABLE, radeon_crtc->pll_id,
1030 radeon_crtc->crtc_id, &radeon_crtc->ss);
1031
1032 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1033 encoder_mode, radeon_encoder->encoder_id, mode->clock,
1034 ref_div, fb_div, frac_fb_div, post_div,
1035 radeon_crtc->bpc, radeon_crtc->ss_enabled, &radeon_crtc->ss);
1036
1037 if (radeon_crtc->ss_enabled) {
1038 /* calculate ss amount and step size */
1039 if (ASIC_IS_DCE4(rdev)) {
1040 u32 step_size;
1041 u32 amount = (((fb_div * 10) + frac_fb_div) * radeon_crtc->ss.percentage) / 10000;
1042 radeon_crtc->ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1043 radeon_crtc->ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1044 ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1045 if (radeon_crtc->ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1046 step_size = (4 * amount * ref_div * (radeon_crtc->ss.rate * 2048)) /
1047 (125 * 25 * pll->reference_freq / 100);
1048 else
1049 step_size = (2 * amount * ref_div * (radeon_crtc->ss.rate * 2048)) /
1050 (125 * 25 * pll->reference_freq / 100);
1051 radeon_crtc->ss.step = step_size;
1052 }
1053
1054 atombios_crtc_program_ss(rdev, ATOM_ENABLE, radeon_crtc->pll_id,
1055 radeon_crtc->crtc_id, &radeon_crtc->ss);
1056 }
1057 }
1058
1059 static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1060 struct drm_framebuffer *fb,
1061 int x, int y, int atomic)
1062 {
1063 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1064 struct drm_device *dev = crtc->dev;
1065 struct radeon_device *rdev = dev->dev_private;
1066 struct radeon_framebuffer *radeon_fb;
1067 struct drm_framebuffer *target_fb;
1068 struct drm_gem_object *obj;
1069 struct radeon_bo *rbo;
1070 uint64_t fb_location;
1071 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1072 unsigned bankw, bankh, mtaspect, tile_split;
1073 u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1074 u32 tmp, viewport_w, viewport_h;
1075 int r;
1076
1077 /* no fb bound */
1078 if (!atomic && !crtc->fb) {
1079 DRM_DEBUG_KMS("No FB bound\n");
1080 return 0;
1081 }
1082
1083 if (atomic) {
1084 radeon_fb = to_radeon_framebuffer(fb);
1085 target_fb = fb;
1086 }
1087 else {
1088 radeon_fb = to_radeon_framebuffer(crtc->fb);
1089 target_fb = crtc->fb;
1090 }
1091
1092 /* If atomic, assume fb object is pinned & idle & fenced and
1093 * just update base pointers
1094 */
1095 obj = radeon_fb->obj;
1096 rbo = gem_to_radeon_bo(obj);
1097 r = radeon_bo_reserve(rbo, false);
1098 if (unlikely(r != 0))
1099 return r;
1100
1101 if (atomic)
1102 fb_location = radeon_bo_gpu_offset(rbo);
1103 else {
1104 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1105 if (unlikely(r != 0)) {
1106 radeon_bo_unreserve(rbo);
1107 return -EINVAL;
1108 }
1109 }
1110
1111 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1112 radeon_bo_unreserve(rbo);
1113
1114 switch (target_fb->bits_per_pixel) {
1115 case 8:
1116 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1117 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1118 break;
1119 case 15:
1120 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1121 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1122 break;
1123 case 16:
1124 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1125 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1126 #ifdef __BIG_ENDIAN
1127 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1128 #endif
1129 break;
1130 case 24:
1131 case 32:
1132 fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1133 EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1134 #ifdef __BIG_ENDIAN
1135 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1136 #endif
1137 break;
1138 default:
1139 DRM_ERROR("Unsupported screen depth %d\n",
1140 target_fb->bits_per_pixel);
1141 return -EINVAL;
1142 }
1143
1144 if (tiling_flags & RADEON_TILING_MACRO) {
1145 if (rdev->family >= CHIP_TAHITI)
1146 tmp = rdev->config.si.tile_config;
1147 else if (rdev->family >= CHIP_CAYMAN)
1148 tmp = rdev->config.cayman.tile_config;
1149 else
1150 tmp = rdev->config.evergreen.tile_config;
1151
1152 switch ((tmp & 0xf0) >> 4) {
1153 case 0: /* 4 banks */
1154 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_4_BANK);
1155 break;
1156 case 1: /* 8 banks */
1157 default:
1158 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_8_BANK);
1159 break;
1160 case 2: /* 16 banks */
1161 fb_format |= EVERGREEN_GRPH_NUM_BANKS(EVERGREEN_ADDR_SURF_16_BANK);
1162 break;
1163 }
1164
1165 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1166
1167 evergreen_tiling_fields(tiling_flags, &bankw, &bankh, &mtaspect, &tile_split);
1168 fb_format |= EVERGREEN_GRPH_TILE_SPLIT(tile_split);
1169 fb_format |= EVERGREEN_GRPH_BANK_WIDTH(bankw);
1170 fb_format |= EVERGREEN_GRPH_BANK_HEIGHT(bankh);
1171 fb_format |= EVERGREEN_GRPH_MACRO_TILE_ASPECT(mtaspect);
1172 } else if (tiling_flags & RADEON_TILING_MICRO)
1173 fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1174
1175 if ((rdev->family == CHIP_TAHITI) ||
1176 (rdev->family == CHIP_PITCAIRN))
1177 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P8_32x32_8x16);
1178 else if (rdev->family == CHIP_VERDE)
1179 fb_format |= SI_GRPH_PIPE_CONFIG(SI_ADDR_SURF_P4_8x16);
1180
1181 switch (radeon_crtc->crtc_id) {
1182 case 0:
1183 WREG32(AVIVO_D1VGA_CONTROL, 0);
1184 break;
1185 case 1:
1186 WREG32(AVIVO_D2VGA_CONTROL, 0);
1187 break;
1188 case 2:
1189 WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1190 break;
1191 case 3:
1192 WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1193 break;
1194 case 4:
1195 WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1196 break;
1197 case 5:
1198 WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1199 break;
1200 default:
1201 break;
1202 }
1203
1204 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1205 upper_32_bits(fb_location));
1206 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1207 upper_32_bits(fb_location));
1208 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1209 (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1210 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1211 (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1212 WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1213 WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1214
1215 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1216 WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1217 WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1218 WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1219 WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1220 WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1221
1222 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1223 WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1224 WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1225
1226 WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1227 target_fb->height);
1228 x &= ~3;
1229 y &= ~1;
1230 WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1231 (x << 16) | y);
1232 viewport_w = crtc->mode.hdisplay;
1233 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1234 WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1235 (viewport_w << 16) | viewport_h);
1236
1237 /* pageflip setup */
1238 /* make sure flip is at vb rather than hb */
1239 tmp = RREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1240 tmp &= ~EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1241 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1242
1243 /* set pageflip to happen anywhere in vblank interval */
1244 WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1245
1246 if (!atomic && fb && fb != crtc->fb) {
1247 radeon_fb = to_radeon_framebuffer(fb);
1248 rbo = gem_to_radeon_bo(radeon_fb->obj);
1249 r = radeon_bo_reserve(rbo, false);
1250 if (unlikely(r != 0))
1251 return r;
1252 radeon_bo_unpin(rbo);
1253 radeon_bo_unreserve(rbo);
1254 }
1255
1256 /* Bytes per pixel may have changed */
1257 radeon_bandwidth_update(rdev);
1258
1259 return 0;
1260 }
1261
1262 static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1263 struct drm_framebuffer *fb,
1264 int x, int y, int atomic)
1265 {
1266 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1267 struct drm_device *dev = crtc->dev;
1268 struct radeon_device *rdev = dev->dev_private;
1269 struct radeon_framebuffer *radeon_fb;
1270 struct drm_gem_object *obj;
1271 struct radeon_bo *rbo;
1272 struct drm_framebuffer *target_fb;
1273 uint64_t fb_location;
1274 uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1275 u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1276 u32 tmp, viewport_w, viewport_h;
1277 int r;
1278
1279 /* no fb bound */
1280 if (!atomic && !crtc->fb) {
1281 DRM_DEBUG_KMS("No FB bound\n");
1282 return 0;
1283 }
1284
1285 if (atomic) {
1286 radeon_fb = to_radeon_framebuffer(fb);
1287 target_fb = fb;
1288 }
1289 else {
1290 radeon_fb = to_radeon_framebuffer(crtc->fb);
1291 target_fb = crtc->fb;
1292 }
1293
1294 obj = radeon_fb->obj;
1295 rbo = gem_to_radeon_bo(obj);
1296 r = radeon_bo_reserve(rbo, false);
1297 if (unlikely(r != 0))
1298 return r;
1299
1300 /* If atomic, assume fb object is pinned & idle & fenced and
1301 * just update base pointers
1302 */
1303 if (atomic)
1304 fb_location = radeon_bo_gpu_offset(rbo);
1305 else {
1306 r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1307 if (unlikely(r != 0)) {
1308 radeon_bo_unreserve(rbo);
1309 return -EINVAL;
1310 }
1311 }
1312 radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1313 radeon_bo_unreserve(rbo);
1314
1315 switch (target_fb->bits_per_pixel) {
1316 case 8:
1317 fb_format =
1318 AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1319 AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1320 break;
1321 case 15:
1322 fb_format =
1323 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1324 AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1325 break;
1326 case 16:
1327 fb_format =
1328 AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1329 AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1330 #ifdef __BIG_ENDIAN
1331 fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1332 #endif
1333 break;
1334 case 24:
1335 case 32:
1336 fb_format =
1337 AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1338 AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1339 #ifdef __BIG_ENDIAN
1340 fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1341 #endif
1342 break;
1343 default:
1344 DRM_ERROR("Unsupported screen depth %d\n",
1345 target_fb->bits_per_pixel);
1346 return -EINVAL;
1347 }
1348
1349 if (rdev->family >= CHIP_R600) {
1350 if (tiling_flags & RADEON_TILING_MACRO)
1351 fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1352 else if (tiling_flags & RADEON_TILING_MICRO)
1353 fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1354 } else {
1355 if (tiling_flags & RADEON_TILING_MACRO)
1356 fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1357
1358 if (tiling_flags & RADEON_TILING_MICRO)
1359 fb_format |= AVIVO_D1GRPH_TILED;
1360 }
1361
1362 if (radeon_crtc->crtc_id == 0)
1363 WREG32(AVIVO_D1VGA_CONTROL, 0);
1364 else
1365 WREG32(AVIVO_D2VGA_CONTROL, 0);
1366
1367 if (rdev->family >= CHIP_RV770) {
1368 if (radeon_crtc->crtc_id) {
1369 WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1370 WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1371 } else {
1372 WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1373 WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1374 }
1375 }
1376 WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1377 (u32) fb_location);
1378 WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1379 radeon_crtc->crtc_offset, (u32) fb_location);
1380 WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1381 if (rdev->family >= CHIP_R600)
1382 WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1383
1384 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1385 WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1386 WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1387 WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1388 WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1389 WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1390
1391 fb_pitch_pixels = target_fb->pitches[0] / (target_fb->bits_per_pixel / 8);
1392 WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1393 WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1394
1395 WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1396 target_fb->height);
1397 x &= ~3;
1398 y &= ~1;
1399 WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1400 (x << 16) | y);
1401 viewport_w = crtc->mode.hdisplay;
1402 viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1403 WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1404 (viewport_w << 16) | viewport_h);
1405
1406 /* pageflip setup */
1407 /* make sure flip is at vb rather than hb */
1408 tmp = RREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1409 tmp &= ~AVIVO_D1GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1410 WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1411
1412 /* set pageflip to happen anywhere in vblank interval */
1413 WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1414
1415 if (!atomic && fb && fb != crtc->fb) {
1416 radeon_fb = to_radeon_framebuffer(fb);
1417 rbo = gem_to_radeon_bo(radeon_fb->obj);
1418 r = radeon_bo_reserve(rbo, false);
1419 if (unlikely(r != 0))
1420 return r;
1421 radeon_bo_unpin(rbo);
1422 radeon_bo_unreserve(rbo);
1423 }
1424
1425 /* Bytes per pixel may have changed */
1426 radeon_bandwidth_update(rdev);
1427
1428 return 0;
1429 }
1430
1431 int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1432 struct drm_framebuffer *old_fb)
1433 {
1434 struct drm_device *dev = crtc->dev;
1435 struct radeon_device *rdev = dev->dev_private;
1436
1437 if (ASIC_IS_DCE4(rdev))
1438 return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1439 else if (ASIC_IS_AVIVO(rdev))
1440 return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1441 else
1442 return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1443 }
1444
1445 int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1446 struct drm_framebuffer *fb,
1447 int x, int y, enum mode_set_atomic state)
1448 {
1449 struct drm_device *dev = crtc->dev;
1450 struct radeon_device *rdev = dev->dev_private;
1451
1452 if (ASIC_IS_DCE4(rdev))
1453 return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1454 else if (ASIC_IS_AVIVO(rdev))
1455 return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1456 else
1457 return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1458 }
1459
1460 /* properly set additional regs when using atombios */
1461 static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1462 {
1463 struct drm_device *dev = crtc->dev;
1464 struct radeon_device *rdev = dev->dev_private;
1465 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1466 u32 disp_merge_cntl;
1467
1468 switch (radeon_crtc->crtc_id) {
1469 case 0:
1470 disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1471 disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1472 WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1473 break;
1474 case 1:
1475 disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1476 disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1477 WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1478 WREG32(RADEON_FP_H2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1479 WREG32(RADEON_FP_V2_SYNC_STRT_WID, RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1480 break;
1481 }
1482 }
1483
1484 /**
1485 * radeon_get_pll_use_mask - look up a mask of which pplls are in use
1486 *
1487 * @crtc: drm crtc
1488 *
1489 * Returns the mask of which PPLLs (Pixel PLLs) are in use.
1490 */
1491 static u32 radeon_get_pll_use_mask(struct drm_crtc *crtc)
1492 {
1493 struct drm_device *dev = crtc->dev;
1494 struct drm_crtc *test_crtc;
1495 struct radeon_crtc *test_radeon_crtc;
1496 u32 pll_in_use = 0;
1497
1498 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1499 if (crtc == test_crtc)
1500 continue;
1501
1502 test_radeon_crtc = to_radeon_crtc(test_crtc);
1503 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1504 pll_in_use |= (1 << test_radeon_crtc->pll_id);
1505 }
1506 return pll_in_use;
1507 }
1508
1509 /**
1510 * radeon_get_shared_dp_ppll - return the PPLL used by another crtc for DP
1511 *
1512 * @crtc: drm crtc
1513 *
1514 * Returns the PPLL (Pixel PLL) used by another crtc/encoder which is
1515 * also in DP mode. For DP, a single PPLL can be used for all DP
1516 * crtcs/encoders.
1517 */
1518 static int radeon_get_shared_dp_ppll(struct drm_crtc *crtc)
1519 {
1520 struct drm_device *dev = crtc->dev;
1521 struct drm_crtc *test_crtc;
1522 struct radeon_crtc *test_radeon_crtc;
1523
1524 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1525 if (crtc == test_crtc)
1526 continue;
1527 test_radeon_crtc = to_radeon_crtc(test_crtc);
1528 if (test_radeon_crtc->encoder &&
1529 ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1530 /* for DP use the same PLL for all */
1531 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1532 return test_radeon_crtc->pll_id;
1533 }
1534 }
1535 return ATOM_PPLL_INVALID;
1536 }
1537
1538 /**
1539 * radeon_get_shared_nondp_ppll - return the PPLL used by another non-DP crtc
1540 *
1541 * @crtc: drm crtc
1542 * @encoder: drm encoder
1543 *
1544 * Returns the PPLL (Pixel PLL) used by another non-DP crtc/encoder which can
1545 * be shared (i.e., same clock).
1546 */
1547 static int radeon_get_shared_nondp_ppll(struct drm_crtc *crtc)
1548 {
1549 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1550 struct drm_device *dev = crtc->dev;
1551 struct drm_crtc *test_crtc;
1552 struct radeon_crtc *test_radeon_crtc;
1553 u32 adjusted_clock, test_adjusted_clock;
1554
1555 adjusted_clock = radeon_crtc->adjusted_clock;
1556
1557 if (adjusted_clock == 0)
1558 return ATOM_PPLL_INVALID;
1559
1560 list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1561 if (crtc == test_crtc)
1562 continue;
1563 test_radeon_crtc = to_radeon_crtc(test_crtc);
1564 if (test_radeon_crtc->encoder &&
1565 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(test_radeon_crtc->encoder))) {
1566 /* check if we are already driving this connector with another crtc */
1567 if (test_radeon_crtc->connector == radeon_crtc->connector) {
1568 /* if we are, return that pll */
1569 if (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID)
1570 return test_radeon_crtc->pll_id;
1571 }
1572 /* for non-DP check the clock */
1573 test_adjusted_clock = test_radeon_crtc->adjusted_clock;
1574 if ((crtc->mode.clock == test_crtc->mode.clock) &&
1575 (adjusted_clock == test_adjusted_clock) &&
1576 (radeon_crtc->ss_enabled == test_radeon_crtc->ss_enabled) &&
1577 (test_radeon_crtc->pll_id != ATOM_PPLL_INVALID))
1578 return test_radeon_crtc->pll_id;
1579 }
1580 }
1581 return ATOM_PPLL_INVALID;
1582 }
1583
1584 /**
1585 * radeon_atom_pick_pll - Allocate a PPLL for use by the crtc.
1586 *
1587 * @crtc: drm crtc
1588 *
1589 * Returns the PPLL (Pixel PLL) to be used by the crtc. For DP monitors
1590 * a single PPLL can be used for all DP crtcs/encoders. For non-DP
1591 * monitors a dedicated PPLL must be used. If a particular board has
1592 * an external DP PLL, return ATOM_PPLL_INVALID to skip PLL programming
1593 * as there is no need to program the PLL itself. If we are not able to
1594 * allocate a PLL, return ATOM_PPLL_INVALID to skip PLL programming to
1595 * avoid messing up an existing monitor.
1596 *
1597 * Asic specific PLL information
1598 *
1599 * DCE 6.1
1600 * - PPLL2 is only available to UNIPHYA (both DP and non-DP)
1601 * - PPLL0, PPLL1 are available for UNIPHYB/C/D/E/F (both DP and non-DP)
1602 *
1603 * DCE 6.0
1604 * - PPLL0 is available to all UNIPHY (DP only)
1605 * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC
1606 *
1607 * DCE 5.0
1608 * - DCPLL is available to all UNIPHY (DP only)
1609 * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC
1610 *
1611 * DCE 3.0/4.0/4.1
1612 * - PPLL1, PPLL2 are available for all UNIPHY (both DP and non-DP) and DAC
1613 *
1614 */
1615 static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1616 {
1617 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1618 struct drm_device *dev = crtc->dev;
1619 struct radeon_device *rdev = dev->dev_private;
1620 struct radeon_encoder *radeon_encoder =
1621 to_radeon_encoder(radeon_crtc->encoder);
1622 u32 pll_in_use;
1623 int pll;
1624
1625 if (ASIC_IS_DCE61(rdev)) {
1626 struct radeon_encoder_atom_dig *dig =
1627 radeon_encoder->enc_priv;
1628
1629 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY) &&
1630 (dig->linkb == false))
1631 /* UNIPHY A uses PPLL2 */
1632 return ATOM_PPLL2;
1633 else if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1634 /* UNIPHY B/C/D/E/F */
1635 if (rdev->clock.dp_extclk)
1636 /* skip PPLL programming if using ext clock */
1637 return ATOM_PPLL_INVALID;
1638 else {
1639 /* use the same PPLL for all DP monitors */
1640 pll = radeon_get_shared_dp_ppll(crtc);
1641 if (pll != ATOM_PPLL_INVALID)
1642 return pll;
1643 }
1644 } else {
1645 /* use the same PPLL for all monitors with the same clock */
1646 pll = radeon_get_shared_nondp_ppll(crtc);
1647 if (pll != ATOM_PPLL_INVALID)
1648 return pll;
1649 }
1650 /* UNIPHY B/C/D/E/F */
1651 pll_in_use = radeon_get_pll_use_mask(crtc);
1652 if (!(pll_in_use & (1 << ATOM_PPLL0)))
1653 return ATOM_PPLL0;
1654 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1655 return ATOM_PPLL1;
1656 DRM_ERROR("unable to allocate a PPLL\n");
1657 return ATOM_PPLL_INVALID;
1658 } else if (ASIC_IS_DCE4(rdev)) {
1659 /* in DP mode, the DP ref clock can come from PPLL, DCPLL, or ext clock,
1660 * depending on the asic:
1661 * DCE4: PPLL or ext clock
1662 * DCE5: PPLL, DCPLL, or ext clock
1663 * DCE6: PPLL, PPLL0, or ext clock
1664 *
1665 * Setting ATOM_PPLL_INVALID will cause SetPixelClock to skip
1666 * PPLL/DCPLL programming and only program the DP DTO for the
1667 * crtc virtual pixel clock.
1668 */
1669 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1670 if (rdev->clock.dp_extclk)
1671 /* skip PPLL programming if using ext clock */
1672 return ATOM_PPLL_INVALID;
1673 else if (ASIC_IS_DCE6(rdev))
1674 /* use PPLL0 for all DP */
1675 return ATOM_PPLL0;
1676 else if (ASIC_IS_DCE5(rdev))
1677 /* use DCPLL for all DP */
1678 return ATOM_DCPLL;
1679 else {
1680 /* use the same PPLL for all DP monitors */
1681 pll = radeon_get_shared_dp_ppll(crtc);
1682 if (pll != ATOM_PPLL_INVALID)
1683 return pll;
1684 }
1685 } else {
1686 /* use the same PPLL for all monitors with the same clock */
1687 pll = radeon_get_shared_nondp_ppll(crtc);
1688 if (pll != ATOM_PPLL_INVALID)
1689 return pll;
1690 }
1691 /* all other cases */
1692 pll_in_use = radeon_get_pll_use_mask(crtc);
1693 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1694 return ATOM_PPLL2;
1695 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1696 return ATOM_PPLL1;
1697 DRM_ERROR("unable to allocate a PPLL\n");
1698 return ATOM_PPLL_INVALID;
1699 } else {
1700 if (ASIC_IS_AVIVO(rdev)) {
1701 /* in DP mode, the DP ref clock can come from either PPLL
1702 * depending on the asic:
1703 * DCE3: PPLL1 or PPLL2
1704 */
1705 if (ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder))) {
1706 /* use the same PPLL for all DP monitors */
1707 pll = radeon_get_shared_dp_ppll(crtc);
1708 if (pll != ATOM_PPLL_INVALID)
1709 return pll;
1710 } else {
1711 /* use the same PPLL for all monitors with the same clock */
1712 pll = radeon_get_shared_nondp_ppll(crtc);
1713 if (pll != ATOM_PPLL_INVALID)
1714 return pll;
1715 }
1716 /* all other cases */
1717 pll_in_use = radeon_get_pll_use_mask(crtc);
1718 if (!(pll_in_use & (1 << ATOM_PPLL2)))
1719 return ATOM_PPLL2;
1720 if (!(pll_in_use & (1 << ATOM_PPLL1)))
1721 return ATOM_PPLL1;
1722 DRM_ERROR("unable to allocate a PPLL\n");
1723 return ATOM_PPLL_INVALID;
1724 } else {
1725 /* on pre-R5xx asics, the crtc to pll mapping is hardcoded */
1726 return radeon_crtc->crtc_id;
1727 }
1728 }
1729 }
1730
1731 void radeon_atom_disp_eng_pll_init(struct radeon_device *rdev)
1732 {
1733 /* always set DCPLL */
1734 if (ASIC_IS_DCE6(rdev))
1735 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
1736 else if (ASIC_IS_DCE4(rdev)) {
1737 struct radeon_atom_ss ss;
1738 bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
1739 ASIC_INTERNAL_SS_ON_DCPLL,
1740 rdev->clock.default_dispclk);
1741 if (ss_enabled)
1742 atombios_crtc_program_ss(rdev, ATOM_DISABLE, ATOM_DCPLL, -1, &ss);
1743 /* XXX: DCE5, make sure voltage, dispclk is high enough */
1744 atombios_crtc_set_disp_eng_pll(rdev, rdev->clock.default_dispclk);
1745 if (ss_enabled)
1746 atombios_crtc_program_ss(rdev, ATOM_ENABLE, ATOM_DCPLL, -1, &ss);
1747 }
1748
1749 }
1750
1751 int atombios_crtc_mode_set(struct drm_crtc *crtc,
1752 struct drm_display_mode *mode,
1753 struct drm_display_mode *adjusted_mode,
1754 int x, int y, struct drm_framebuffer *old_fb)
1755 {
1756 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1757 struct drm_device *dev = crtc->dev;
1758 struct radeon_device *rdev = dev->dev_private;
1759 struct radeon_encoder *radeon_encoder =
1760 to_radeon_encoder(radeon_crtc->encoder);
1761 bool is_tvcv = false;
1762
1763 if (radeon_encoder->active_device &
1764 (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
1765 is_tvcv = true;
1766
1767 atombios_crtc_set_pll(crtc, adjusted_mode);
1768
1769 if (ASIC_IS_DCE4(rdev))
1770 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1771 else if (ASIC_IS_AVIVO(rdev)) {
1772 if (is_tvcv)
1773 atombios_crtc_set_timing(crtc, adjusted_mode);
1774 else
1775 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1776 } else {
1777 atombios_crtc_set_timing(crtc, adjusted_mode);
1778 if (radeon_crtc->crtc_id == 0)
1779 atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1780 radeon_legacy_atom_fixup(crtc);
1781 }
1782 atombios_crtc_set_base(crtc, x, y, old_fb);
1783 atombios_overscan_setup(crtc, mode, adjusted_mode);
1784 atombios_scaler_setup(crtc);
1785 return 0;
1786 }
1787
1788 static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
1789 const struct drm_display_mode *mode,
1790 struct drm_display_mode *adjusted_mode)
1791 {
1792 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1793 struct drm_device *dev = crtc->dev;
1794 struct drm_encoder *encoder;
1795
1796 /* assign the encoder to the radeon crtc to avoid repeated lookups later */
1797 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1798 if (encoder->crtc == crtc) {
1799 radeon_crtc->encoder = encoder;
1800 radeon_crtc->connector = radeon_get_connector_for_encoder(encoder);
1801 break;
1802 }
1803 }
1804 if ((radeon_crtc->encoder == NULL) || (radeon_crtc->connector == NULL)) {
1805 radeon_crtc->encoder = NULL;
1806 radeon_crtc->connector = NULL;
1807 return false;
1808 }
1809 if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
1810 return false;
1811 if (!atombios_crtc_prepare_pll(crtc, adjusted_mode))
1812 return false;
1813 /* pick pll */
1814 radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
1815 /* if we can't get a PPLL for a non-DP encoder, fail */
1816 if ((radeon_crtc->pll_id == ATOM_PPLL_INVALID) &&
1817 !ENCODER_MODE_IS_DP(atombios_get_encoder_mode(radeon_crtc->encoder)))
1818 return false;
1819
1820 return true;
1821 }
1822
1823 static void atombios_crtc_prepare(struct drm_crtc *crtc)
1824 {
1825 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1826 struct drm_device *dev = crtc->dev;
1827 struct radeon_device *rdev = dev->dev_private;
1828
1829 radeon_crtc->in_mode_set = true;
1830
1831 /* disable crtc pair power gating before programming */
1832 if (ASIC_IS_DCE6(rdev))
1833 atombios_powergate_crtc(crtc, ATOM_DISABLE);
1834
1835 atombios_lock_crtc(crtc, ATOM_ENABLE);
1836 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1837 }
1838
1839 static void atombios_crtc_commit(struct drm_crtc *crtc)
1840 {
1841 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1842
1843 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
1844 atombios_lock_crtc(crtc, ATOM_DISABLE);
1845 radeon_crtc->in_mode_set = false;
1846 }
1847
1848 static void atombios_crtc_disable(struct drm_crtc *crtc)
1849 {
1850 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1851 struct drm_device *dev = crtc->dev;
1852 struct radeon_device *rdev = dev->dev_private;
1853 struct radeon_atom_ss ss;
1854 int i;
1855
1856 atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1857
1858 for (i = 0; i < rdev->num_crtc; i++) {
1859 if (rdev->mode_info.crtcs[i] &&
1860 rdev->mode_info.crtcs[i]->enabled &&
1861 i != radeon_crtc->crtc_id &&
1862 radeon_crtc->pll_id == rdev->mode_info.crtcs[i]->pll_id) {
1863 /* one other crtc is using this pll don't turn
1864 * off the pll
1865 */
1866 goto done;
1867 }
1868 }
1869
1870 switch (radeon_crtc->pll_id) {
1871 case ATOM_PPLL1:
1872 case ATOM_PPLL2:
1873 /* disable the ppll */
1874 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1875 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
1876 break;
1877 case ATOM_PPLL0:
1878 /* disable the ppll */
1879 if (ASIC_IS_DCE61(rdev))
1880 atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1881 0, 0, ATOM_DISABLE, 0, 0, 0, 0, 0, false, &ss);
1882 break;
1883 default:
1884 break;
1885 }
1886 done:
1887 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
1888 radeon_crtc->adjusted_clock = 0;
1889 radeon_crtc->encoder = NULL;
1890 radeon_crtc->connector = NULL;
1891 }
1892
1893 static const struct drm_crtc_helper_funcs atombios_helper_funcs = {
1894 .dpms = atombios_crtc_dpms,
1895 .mode_fixup = atombios_crtc_mode_fixup,
1896 .mode_set = atombios_crtc_mode_set,
1897 .mode_set_base = atombios_crtc_set_base,
1898 .mode_set_base_atomic = atombios_crtc_set_base_atomic,
1899 .prepare = atombios_crtc_prepare,
1900 .commit = atombios_crtc_commit,
1901 .load_lut = radeon_crtc_load_lut,
1902 .disable = atombios_crtc_disable,
1903 };
1904
1905 void radeon_atombios_init_crtc(struct drm_device *dev,
1906 struct radeon_crtc *radeon_crtc)
1907 {
1908 struct radeon_device *rdev = dev->dev_private;
1909
1910 if (ASIC_IS_DCE4(rdev)) {
1911 switch (radeon_crtc->crtc_id) {
1912 case 0:
1913 default:
1914 radeon_crtc->crtc_offset = EVERGREEN_CRTC0_REGISTER_OFFSET;
1915 break;
1916 case 1:
1917 radeon_crtc->crtc_offset = EVERGREEN_CRTC1_REGISTER_OFFSET;
1918 break;
1919 case 2:
1920 radeon_crtc->crtc_offset = EVERGREEN_CRTC2_REGISTER_OFFSET;
1921 break;
1922 case 3:
1923 radeon_crtc->crtc_offset = EVERGREEN_CRTC3_REGISTER_OFFSET;
1924 break;
1925 case 4:
1926 radeon_crtc->crtc_offset = EVERGREEN_CRTC4_REGISTER_OFFSET;
1927 break;
1928 case 5:
1929 radeon_crtc->crtc_offset = EVERGREEN_CRTC5_REGISTER_OFFSET;
1930 break;
1931 }
1932 } else {
1933 if (radeon_crtc->crtc_id == 1)
1934 radeon_crtc->crtc_offset =
1935 AVIVO_D2CRTC_H_TOTAL - AVIVO_D1CRTC_H_TOTAL;
1936 else
1937 radeon_crtc->crtc_offset = 0;
1938 }
1939 radeon_crtc->pll_id = ATOM_PPLL_INVALID;
1940 radeon_crtc->adjusted_clock = 0;
1941 radeon_crtc->encoder = NULL;
1942 radeon_crtc->connector = NULL;
1943 drm_crtc_helper_add(&radeon_crtc->base, &atombios_helper_funcs);
1944 }
This page took 0.094232 seconds and 5 git commands to generate.