drm/radeon/kms: add support for square microtiles on r3xx-r5xx
[deliverable/linux.git] / drivers / gpu / drm / radeon / radeon_display.c
CommitLineData
771fe6b9
JG
1/*
2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
22 *
23 * Authors: Dave Airlie
24 * Alex Deucher
25 */
26#include "drmP.h"
27#include "radeon_drm.h"
28#include "radeon.h"
29
30#include "atom.h"
31#include <asm/div64.h>
32
33#include "drm_crtc_helper.h"
34#include "drm_edid.h"
35
36static int radeon_ddc_dump(struct drm_connector *connector);
37
38static void avivo_crtc_load_lut(struct drm_crtc *crtc)
39{
40 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
41 struct drm_device *dev = crtc->dev;
42 struct radeon_device *rdev = dev->dev_private;
43 int i;
44
45 DRM_DEBUG("%d\n", radeon_crtc->crtc_id);
46 WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0);
47
48 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
49 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
50 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
51
52 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
53 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
54 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
55
56 WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id);
57 WREG32(AVIVO_DC_LUT_RW_MODE, 0);
58 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK, 0x0000003f);
59
60 WREG8(AVIVO_DC_LUT_RW_INDEX, 0);
61 for (i = 0; i < 256; i++) {
62 WREG32(AVIVO_DC_LUT_30_COLOR,
63 (radeon_crtc->lut_r[i] << 20) |
64 (radeon_crtc->lut_g[i] << 10) |
65 (radeon_crtc->lut_b[i] << 0));
66 }
67
68 WREG32(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id);
69}
70
bcc1c2a1
AD
71static void evergreen_crtc_load_lut(struct drm_crtc *crtc)
72{
73 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
74 struct drm_device *dev = crtc->dev;
75 struct radeon_device *rdev = dev->dev_private;
76 int i;
77
78 DRM_DEBUG("%d\n", radeon_crtc->crtc_id);
79 WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
80
81 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
82 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
83 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
84
85 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
86 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
87 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
88
89 WREG32(EVERGREEN_DC_LUT_RW_MODE, radeon_crtc->crtc_id);
90 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK, 0x00000007);
91
92 WREG32(EVERGREEN_DC_LUT_RW_INDEX, 0);
93 for (i = 0; i < 256; i++) {
94 WREG32(EVERGREEN_DC_LUT_30_COLOR,
95 (radeon_crtc->lut_r[i] << 20) |
96 (radeon_crtc->lut_g[i] << 10) |
97 (radeon_crtc->lut_b[i] << 0));
98 }
99}
100
771fe6b9
JG
101static void legacy_crtc_load_lut(struct drm_crtc *crtc)
102{
103 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
104 struct drm_device *dev = crtc->dev;
105 struct radeon_device *rdev = dev->dev_private;
106 int i;
107 uint32_t dac2_cntl;
108
109 dac2_cntl = RREG32(RADEON_DAC_CNTL2);
110 if (radeon_crtc->crtc_id == 0)
111 dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL;
112 else
113 dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL;
114 WREG32(RADEON_DAC_CNTL2, dac2_cntl);
115
116 WREG8(RADEON_PALETTE_INDEX, 0);
117 for (i = 0; i < 256; i++) {
118 WREG32(RADEON_PALETTE_30_DATA,
119 (radeon_crtc->lut_r[i] << 20) |
120 (radeon_crtc->lut_g[i] << 10) |
121 (radeon_crtc->lut_b[i] << 0));
122 }
123}
124
125void radeon_crtc_load_lut(struct drm_crtc *crtc)
126{
127 struct drm_device *dev = crtc->dev;
128 struct radeon_device *rdev = dev->dev_private;
129
130 if (!crtc->enabled)
131 return;
132
bcc1c2a1
AD
133 if (ASIC_IS_DCE4(rdev))
134 evergreen_crtc_load_lut(crtc);
135 else if (ASIC_IS_AVIVO(rdev))
771fe6b9
JG
136 avivo_crtc_load_lut(crtc);
137 else
138 legacy_crtc_load_lut(crtc);
139}
140
b8c00ac5 141/** Sets the color ramps on behalf of fbcon */
771fe6b9
JG
142void radeon_crtc_fb_gamma_set(struct drm_crtc *crtc, u16 red, u16 green,
143 u16 blue, int regno)
144{
145 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
146
771fe6b9
JG
147 radeon_crtc->lut_r[regno] = red >> 6;
148 radeon_crtc->lut_g[regno] = green >> 6;
149 radeon_crtc->lut_b[regno] = blue >> 6;
150}
151
b8c00ac5
DA
152/** Gets the color ramps on behalf of fbcon */
153void radeon_crtc_fb_gamma_get(struct drm_crtc *crtc, u16 *red, u16 *green,
154 u16 *blue, int regno)
155{
156 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
157
158 *red = radeon_crtc->lut_r[regno] << 6;
159 *green = radeon_crtc->lut_g[regno] << 6;
160 *blue = radeon_crtc->lut_b[regno] << 6;
161}
162
771fe6b9
JG
163static void radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green,
164 u16 *blue, uint32_t size)
165{
166 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
b8c00ac5 167 int i;
771fe6b9
JG
168
169 if (size != 256) {
170 return;
171 }
771fe6b9 172
b8c00ac5
DA
173 /* userspace palettes are always correct as is */
174 for (i = 0; i < 256; i++) {
175 radeon_crtc->lut_r[i] = red[i] >> 6;
176 radeon_crtc->lut_g[i] = green[i] >> 6;
177 radeon_crtc->lut_b[i] = blue[i] >> 6;
771fe6b9 178 }
771fe6b9
JG
179 radeon_crtc_load_lut(crtc);
180}
181
182static void radeon_crtc_destroy(struct drm_crtc *crtc)
183{
184 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
185
771fe6b9
JG
186 drm_crtc_cleanup(crtc);
187 kfree(radeon_crtc);
188}
189
190static const struct drm_crtc_funcs radeon_crtc_funcs = {
191 .cursor_set = radeon_crtc_cursor_set,
192 .cursor_move = radeon_crtc_cursor_move,
193 .gamma_set = radeon_crtc_gamma_set,
194 .set_config = drm_crtc_helper_set_config,
195 .destroy = radeon_crtc_destroy,
196};
197
198static void radeon_crtc_init(struct drm_device *dev, int index)
199{
200 struct radeon_device *rdev = dev->dev_private;
201 struct radeon_crtc *radeon_crtc;
202 int i;
203
204 radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
205 if (radeon_crtc == NULL)
206 return;
207
208 drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs);
209
210 drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256);
211 radeon_crtc->crtc_id = index;
c93bb85b 212 rdev->mode_info.crtcs[index] = radeon_crtc;
771fe6b9 213
785b93ef 214#if 0
771fe6b9
JG
215 radeon_crtc->mode_set.crtc = &radeon_crtc->base;
216 radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1);
217 radeon_crtc->mode_set.num_connectors = 0;
785b93ef 218#endif
771fe6b9
JG
219
220 for (i = 0; i < 256; i++) {
221 radeon_crtc->lut_r[i] = i << 2;
222 radeon_crtc->lut_g[i] = i << 2;
223 radeon_crtc->lut_b[i] = i << 2;
224 }
225
226 if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom))
227 radeon_atombios_init_crtc(dev, radeon_crtc);
228 else
229 radeon_legacy_init_crtc(dev, radeon_crtc);
230}
231
232static const char *encoder_names[34] = {
233 "NONE",
234 "INTERNAL_LVDS",
235 "INTERNAL_TMDS1",
236 "INTERNAL_TMDS2",
237 "INTERNAL_DAC1",
238 "INTERNAL_DAC2",
239 "INTERNAL_SDVOA",
240 "INTERNAL_SDVOB",
241 "SI170B",
242 "CH7303",
243 "CH7301",
244 "INTERNAL_DVO1",
245 "EXTERNAL_SDVOA",
246 "EXTERNAL_SDVOB",
247 "TITFP513",
248 "INTERNAL_LVTM1",
249 "VT1623",
250 "HDMI_SI1930",
251 "HDMI_INTERNAL",
252 "INTERNAL_KLDSCP_TMDS1",
253 "INTERNAL_KLDSCP_DVO1",
254 "INTERNAL_KLDSCP_DAC1",
255 "INTERNAL_KLDSCP_DAC2",
256 "SI178",
257 "MVPU_FPGA",
258 "INTERNAL_DDI",
259 "VT1625",
260 "HDMI_SI1932",
261 "DP_AN9801",
262 "DP_DP501",
263 "INTERNAL_UNIPHY",
264 "INTERNAL_KLDSCP_LVTMA",
265 "INTERNAL_UNIPHY1",
266 "INTERNAL_UNIPHY2",
267};
268
196c58d2 269static const char *connector_names[15] = {
771fe6b9
JG
270 "Unknown",
271 "VGA",
272 "DVI-I",
273 "DVI-D",
274 "DVI-A",
275 "Composite",
276 "S-video",
277 "LVDS",
278 "Component",
279 "DIN",
280 "DisplayPort",
281 "HDMI-A",
282 "HDMI-B",
196c58d2
AD
283 "TV",
284 "eDP",
771fe6b9
JG
285};
286
eed45b30
AD
287static const char *hpd_names[7] = {
288 "NONE",
289 "HPD1",
290 "HPD2",
291 "HPD3",
292 "HPD4",
293 "HPD5",
294 "HPD6",
295};
296
771fe6b9
JG
297static void radeon_print_display_setup(struct drm_device *dev)
298{
299 struct drm_connector *connector;
300 struct radeon_connector *radeon_connector;
301 struct drm_encoder *encoder;
302 struct radeon_encoder *radeon_encoder;
303 uint32_t devices;
304 int i = 0;
305
306 DRM_INFO("Radeon Display Connectors\n");
307 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
308 radeon_connector = to_radeon_connector(connector);
309 DRM_INFO("Connector %d:\n", i);
310 DRM_INFO(" %s\n", connector_names[connector->connector_type]);
eed45b30
AD
311 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
312 DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]);
771fe6b9
JG
313 if (radeon_connector->ddc_bus)
314 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
315 radeon_connector->ddc_bus->rec.mask_clk_reg,
316 radeon_connector->ddc_bus->rec.mask_data_reg,
317 radeon_connector->ddc_bus->rec.a_clk_reg,
318 radeon_connector->ddc_bus->rec.a_data_reg,
9b9fe724
AD
319 radeon_connector->ddc_bus->rec.en_clk_reg,
320 radeon_connector->ddc_bus->rec.en_data_reg,
321 radeon_connector->ddc_bus->rec.y_clk_reg,
322 radeon_connector->ddc_bus->rec.y_data_reg);
771fe6b9
JG
323 DRM_INFO(" Encoders:\n");
324 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
325 radeon_encoder = to_radeon_encoder(encoder);
326 devices = radeon_encoder->devices & radeon_connector->devices;
327 if (devices) {
328 if (devices & ATOM_DEVICE_CRT1_SUPPORT)
329 DRM_INFO(" CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]);
330 if (devices & ATOM_DEVICE_CRT2_SUPPORT)
331 DRM_INFO(" CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]);
332 if (devices & ATOM_DEVICE_LCD1_SUPPORT)
333 DRM_INFO(" LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]);
334 if (devices & ATOM_DEVICE_DFP1_SUPPORT)
335 DRM_INFO(" DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]);
336 if (devices & ATOM_DEVICE_DFP2_SUPPORT)
337 DRM_INFO(" DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]);
338 if (devices & ATOM_DEVICE_DFP3_SUPPORT)
339 DRM_INFO(" DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]);
340 if (devices & ATOM_DEVICE_DFP4_SUPPORT)
341 DRM_INFO(" DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]);
342 if (devices & ATOM_DEVICE_DFP5_SUPPORT)
343 DRM_INFO(" DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]);
344 if (devices & ATOM_DEVICE_TV1_SUPPORT)
345 DRM_INFO(" TV1: %s\n", encoder_names[radeon_encoder->encoder_id]);
346 if (devices & ATOM_DEVICE_CV_SUPPORT)
347 DRM_INFO(" CV: %s\n", encoder_names[radeon_encoder->encoder_id]);
348 }
349 }
350 i++;
351 }
352}
353
4ce001ab 354static bool radeon_setup_enc_conn(struct drm_device *dev)
771fe6b9
JG
355{
356 struct radeon_device *rdev = dev->dev_private;
357 struct drm_connector *drm_connector;
358 bool ret = false;
359
360 if (rdev->bios) {
361 if (rdev->is_atom_bios) {
362 if (rdev->family >= CHIP_R600)
363 ret = radeon_get_atom_connector_info_from_object_table(dev);
364 else
365 ret = radeon_get_atom_connector_info_from_supported_devices_table(dev);
b9597a1c 366 } else {
771fe6b9 367 ret = radeon_get_legacy_connector_info_from_bios(dev);
b9597a1c
AD
368 if (ret == false)
369 ret = radeon_get_legacy_connector_info_from_table(dev);
370 }
771fe6b9
JG
371 } else {
372 if (!ASIC_IS_AVIVO(rdev))
373 ret = radeon_get_legacy_connector_info_from_table(dev);
374 }
375 if (ret) {
1f3b6a45 376 radeon_setup_encoder_clones(dev);
771fe6b9
JG
377 radeon_print_display_setup(dev);
378 list_for_each_entry(drm_connector, &dev->mode_config.connector_list, head)
379 radeon_ddc_dump(drm_connector);
380 }
381
382 return ret;
383}
384
385int radeon_ddc_get_modes(struct radeon_connector *radeon_connector)
386{
3c537889
AD
387 struct drm_device *dev = radeon_connector->base.dev;
388 struct radeon_device *rdev = dev->dev_private;
771fe6b9
JG
389 int ret = 0;
390
196c58d2
AD
391 if ((radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_DisplayPort) ||
392 (radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_eDP)) {
746c1aa4 393 struct radeon_connector_atom_dig *dig = radeon_connector->con_priv;
7a15cbd4
DA
394 if ((dig->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT ||
395 dig->dp_sink_type == CONNECTOR_OBJECT_ID_eDP) && dig->dp_i2c_bus)
9fa05c98 396 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &dig->dp_i2c_bus->adapter);
746c1aa4 397 }
771fe6b9
JG
398 if (!radeon_connector->ddc_bus)
399 return -1;
4ce001ab 400 if (!radeon_connector->edid) {
0294cf4f 401 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter);
0294cf4f 402 }
3c537889
AD
403 /* some servers provide a hardcoded edid in rom for KVMs */
404 if (!radeon_connector->edid)
405 radeon_connector->edid = radeon_combios_get_hardcoded_edid(rdev);
0294cf4f
AD
406 if (radeon_connector->edid) {
407 drm_mode_connector_update_edid_property(&radeon_connector->base, radeon_connector->edid);
408 ret = drm_add_edid_modes(&radeon_connector->base, radeon_connector->edid);
771fe6b9
JG
409 return ret;
410 }
411 drm_mode_connector_update_edid_property(&radeon_connector->base, NULL);
42dea5dd 412 return 0;
771fe6b9
JG
413}
414
415static int radeon_ddc_dump(struct drm_connector *connector)
416{
417 struct edid *edid;
418 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
419 int ret = 0;
420
421 if (!radeon_connector->ddc_bus)
422 return -1;
771fe6b9 423 edid = drm_get_edid(connector, &radeon_connector->ddc_bus->adapter);
771fe6b9
JG
424 if (edid) {
425 kfree(edid);
426 }
427 return ret;
428}
429
430static inline uint32_t radeon_div(uint64_t n, uint32_t d)
431{
432 uint64_t mod;
433
434 n += d / 2;
435
436 mod = do_div(n, d);
437 return n;
438}
439
7c27f87d
AD
440static void radeon_compute_pll_legacy(struct radeon_pll *pll,
441 uint64_t freq,
442 uint32_t *dot_clock_p,
443 uint32_t *fb_div_p,
444 uint32_t *frac_fb_div_p,
445 uint32_t *ref_div_p,
446 uint32_t *post_div_p)
771fe6b9
JG
447{
448 uint32_t min_ref_div = pll->min_ref_div;
449 uint32_t max_ref_div = pll->max_ref_div;
fc10332b
AD
450 uint32_t min_post_div = pll->min_post_div;
451 uint32_t max_post_div = pll->max_post_div;
771fe6b9
JG
452 uint32_t min_fractional_feed_div = 0;
453 uint32_t max_fractional_feed_div = 0;
454 uint32_t best_vco = pll->best_vco;
455 uint32_t best_post_div = 1;
456 uint32_t best_ref_div = 1;
457 uint32_t best_feedback_div = 1;
458 uint32_t best_frac_feedback_div = 0;
459 uint32_t best_freq = -1;
460 uint32_t best_error = 0xffffffff;
461 uint32_t best_vco_diff = 1;
462 uint32_t post_div;
463
464 DRM_DEBUG("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div);
465 freq = freq * 1000;
466
fc10332b 467 if (pll->flags & RADEON_PLL_USE_REF_DIV)
771fe6b9
JG
468 min_ref_div = max_ref_div = pll->reference_div;
469 else {
470 while (min_ref_div < max_ref_div-1) {
471 uint32_t mid = (min_ref_div + max_ref_div) / 2;
472 uint32_t pll_in = pll->reference_freq / mid;
473 if (pll_in < pll->pll_in_min)
474 max_ref_div = mid;
475 else if (pll_in > pll->pll_in_max)
476 min_ref_div = mid;
477 else
478 break;
479 }
480 }
481
fc10332b
AD
482 if (pll->flags & RADEON_PLL_USE_POST_DIV)
483 min_post_div = max_post_div = pll->post_div;
484
485 if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
771fe6b9
JG
486 min_fractional_feed_div = pll->min_frac_feedback_div;
487 max_fractional_feed_div = pll->max_frac_feedback_div;
488 }
489
fc10332b 490 for (post_div = min_post_div; post_div <= max_post_div; ++post_div) {
771fe6b9
JG
491 uint32_t ref_div;
492
fc10332b 493 if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1))
771fe6b9
JG
494 continue;
495
496 /* legacy radeons only have a few post_divs */
fc10332b 497 if (pll->flags & RADEON_PLL_LEGACY) {
771fe6b9
JG
498 if ((post_div == 5) ||
499 (post_div == 7) ||
500 (post_div == 9) ||
501 (post_div == 10) ||
502 (post_div == 11) ||
503 (post_div == 13) ||
504 (post_div == 14) ||
505 (post_div == 15))
506 continue;
507 }
508
509 for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) {
510 uint32_t feedback_div, current_freq = 0, error, vco_diff;
511 uint32_t pll_in = pll->reference_freq / ref_div;
512 uint32_t min_feed_div = pll->min_feedback_div;
513 uint32_t max_feed_div = pll->max_feedback_div + 1;
514
515 if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max)
516 continue;
517
518 while (min_feed_div < max_feed_div) {
519 uint32_t vco;
520 uint32_t min_frac_feed_div = min_fractional_feed_div;
521 uint32_t max_frac_feed_div = max_fractional_feed_div + 1;
522 uint32_t frac_feedback_div;
523 uint64_t tmp;
524
525 feedback_div = (min_feed_div + max_feed_div) / 2;
526
527 tmp = (uint64_t)pll->reference_freq * feedback_div;
528 vco = radeon_div(tmp, ref_div);
529
530 if (vco < pll->pll_out_min) {
531 min_feed_div = feedback_div + 1;
532 continue;
533 } else if (vco > pll->pll_out_max) {
534 max_feed_div = feedback_div;
535 continue;
536 }
537
538 while (min_frac_feed_div < max_frac_feed_div) {
539 frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2;
540 tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div;
541 tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div;
542 current_freq = radeon_div(tmp, ref_div * post_div);
543
fc10332b 544 if (pll->flags & RADEON_PLL_PREFER_CLOSEST_LOWER) {
d0e275a9
AD
545 error = freq - current_freq;
546 error = error < 0 ? 0xffffffff : error;
547 } else
548 error = abs(current_freq - freq);
771fe6b9
JG
549 vco_diff = abs(vco - best_vco);
550
551 if ((best_vco == 0 && error < best_error) ||
552 (best_vco != 0 &&
553 (error < best_error - 100 ||
554 (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) {
555 best_post_div = post_div;
556 best_ref_div = ref_div;
557 best_feedback_div = feedback_div;
558 best_frac_feedback_div = frac_feedback_div;
559 best_freq = current_freq;
560 best_error = error;
561 best_vco_diff = vco_diff;
562 } else if (current_freq == freq) {
563 if (best_freq == -1) {
564 best_post_div = post_div;
565 best_ref_div = ref_div;
566 best_feedback_div = feedback_div;
567 best_frac_feedback_div = frac_feedback_div;
568 best_freq = current_freq;
569 best_error = error;
570 best_vco_diff = vco_diff;
fc10332b
AD
571 } else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) ||
572 ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) ||
573 ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) ||
574 ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) ||
575 ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) ||
576 ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) {
771fe6b9
JG
577 best_post_div = post_div;
578 best_ref_div = ref_div;
579 best_feedback_div = feedback_div;
580 best_frac_feedback_div = frac_feedback_div;
581 best_freq = current_freq;
582 best_error = error;
583 best_vco_diff = vco_diff;
584 }
585 }
586 if (current_freq < freq)
587 min_frac_feed_div = frac_feedback_div + 1;
588 else
589 max_frac_feed_div = frac_feedback_div;
590 }
591 if (current_freq < freq)
592 min_feed_div = feedback_div + 1;
593 else
594 max_feed_div = feedback_div;
595 }
596 }
597 }
598
599 *dot_clock_p = best_freq / 10000;
600 *fb_div_p = best_feedback_div;
601 *frac_fb_div_p = best_frac_feedback_div;
602 *ref_div_p = best_ref_div;
603 *post_div_p = best_post_div;
604}
605
7c27f87d
AD
606static void radeon_compute_pll_avivo(struct radeon_pll *pll,
607 uint64_t freq,
608 uint32_t *dot_clock_p,
609 uint32_t *fb_div_p,
610 uint32_t *frac_fb_div_p,
611 uint32_t *ref_div_p,
612 uint32_t *post_div_p)
b27b6375
AD
613{
614 fixed20_12 m, n, frac_n, p, f_vco, f_pclk, best_freq;
615 fixed20_12 pll_out_max, pll_out_min;
616 fixed20_12 pll_in_max, pll_in_min;
617 fixed20_12 reference_freq;
618 fixed20_12 error, ffreq, a, b;
619
620 pll_out_max.full = rfixed_const(pll->pll_out_max);
621 pll_out_min.full = rfixed_const(pll->pll_out_min);
622 pll_in_max.full = rfixed_const(pll->pll_in_max);
623 pll_in_min.full = rfixed_const(pll->pll_in_min);
624 reference_freq.full = rfixed_const(pll->reference_freq);
625 do_div(freq, 10);
626 ffreq.full = rfixed_const(freq);
627 error.full = rfixed_const(100 * 100);
628
629 /* max p */
630 p.full = rfixed_div(pll_out_max, ffreq);
631 p.full = rfixed_floor(p);
632
633 /* min m */
634 m.full = rfixed_div(reference_freq, pll_in_max);
635 m.full = rfixed_ceil(m);
636
637 while (1) {
638 n.full = rfixed_div(ffreq, reference_freq);
639 n.full = rfixed_mul(n, m);
640 n.full = rfixed_mul(n, p);
641
642 f_vco.full = rfixed_div(n, m);
643 f_vco.full = rfixed_mul(f_vco, reference_freq);
644
645 f_pclk.full = rfixed_div(f_vco, p);
646
647 if (f_pclk.full > ffreq.full)
648 error.full = f_pclk.full - ffreq.full;
649 else
650 error.full = ffreq.full - f_pclk.full;
651 error.full = rfixed_div(error, f_pclk);
652 a.full = rfixed_const(100 * 100);
653 error.full = rfixed_mul(error, a);
654
655 a.full = rfixed_mul(m, p);
656 a.full = rfixed_div(n, a);
657 best_freq.full = rfixed_mul(reference_freq, a);
658
659 if (rfixed_trunc(error) < 25)
660 break;
661
662 a.full = rfixed_const(1);
663 m.full = m.full + a.full;
664 a.full = rfixed_div(reference_freq, m);
665 if (a.full >= pll_in_min.full)
666 continue;
667
668 m.full = rfixed_div(reference_freq, pll_in_max);
669 m.full = rfixed_ceil(m);
670 a.full= rfixed_const(1);
671 p.full = p.full - a.full;
672 a.full = rfixed_mul(p, ffreq);
673 if (a.full >= pll_out_min.full)
674 continue;
675 else {
676 DRM_ERROR("Unable to find pll dividers\n");
677 break;
678 }
679 }
680
681 a.full = rfixed_const(10);
682 b.full = rfixed_mul(n, a);
683
684 frac_n.full = rfixed_floor(n);
685 frac_n.full = rfixed_mul(frac_n, a);
686 frac_n.full = b.full - frac_n.full;
687
688 *dot_clock_p = rfixed_trunc(best_freq);
689 *fb_div_p = rfixed_trunc(n);
690 *frac_fb_div_p = rfixed_trunc(frac_n);
691 *ref_div_p = rfixed_trunc(m);
692 *post_div_p = rfixed_trunc(p);
693
694 DRM_DEBUG("%u %d.%d, %d, %d\n", *dot_clock_p * 10, *fb_div_p, *frac_fb_div_p, *ref_div_p, *post_div_p);
695}
696
7c27f87d
AD
697void radeon_compute_pll(struct radeon_pll *pll,
698 uint64_t freq,
699 uint32_t *dot_clock_p,
700 uint32_t *fb_div_p,
701 uint32_t *frac_fb_div_p,
702 uint32_t *ref_div_p,
703 uint32_t *post_div_p)
704{
705 switch (pll->algo) {
706 case PLL_ALGO_AVIVO:
707 radeon_compute_pll_avivo(pll, freq, dot_clock_p, fb_div_p,
708 frac_fb_div_p, ref_div_p, post_div_p);
709 break;
710 case PLL_ALGO_LEGACY:
711 default:
712 radeon_compute_pll_legacy(pll, freq, dot_clock_p, fb_div_p,
713 frac_fb_div_p, ref_div_p, post_div_p);
714 break;
715 }
716}
717
771fe6b9
JG
718static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb)
719{
720 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
721 struct drm_device *dev = fb->dev;
722
723 if (fb->fbdev)
724 radeonfb_remove(dev, fb);
725
726 if (radeon_fb->obj) {
771fe6b9
JG
727 mutex_lock(&dev->struct_mutex);
728 drm_gem_object_unreference(radeon_fb->obj);
729 mutex_unlock(&dev->struct_mutex);
730 }
731 drm_framebuffer_cleanup(fb);
732 kfree(radeon_fb);
733}
734
735static int radeon_user_framebuffer_create_handle(struct drm_framebuffer *fb,
736 struct drm_file *file_priv,
737 unsigned int *handle)
738{
739 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb);
740
741 return drm_gem_handle_create(file_priv, radeon_fb->obj, handle);
742}
743
744static const struct drm_framebuffer_funcs radeon_fb_funcs = {
745 .destroy = radeon_user_framebuffer_destroy,
746 .create_handle = radeon_user_framebuffer_create_handle,
747};
748
749struct drm_framebuffer *
750radeon_framebuffer_create(struct drm_device *dev,
751 struct drm_mode_fb_cmd *mode_cmd,
752 struct drm_gem_object *obj)
753{
754 struct radeon_framebuffer *radeon_fb;
755
756 radeon_fb = kzalloc(sizeof(*radeon_fb), GFP_KERNEL);
757 if (radeon_fb == NULL) {
758 return NULL;
759 }
760 drm_framebuffer_init(dev, &radeon_fb->base, &radeon_fb_funcs);
761 drm_helper_mode_fill_fb_struct(&radeon_fb->base, mode_cmd);
762 radeon_fb->obj = obj;
763 return &radeon_fb->base;
764}
765
766static struct drm_framebuffer *
767radeon_user_framebuffer_create(struct drm_device *dev,
768 struct drm_file *file_priv,
769 struct drm_mode_fb_cmd *mode_cmd)
770{
771 struct drm_gem_object *obj;
772
773 obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handle);
7e71c9e2
JG
774 if (obj == NULL) {
775 dev_err(&dev->pdev->dev, "No GEM object associated to handle 0x%08X, "
776 "can't create framebuffer\n", mode_cmd->handle);
777 return NULL;
778 }
771fe6b9
JG
779 return radeon_framebuffer_create(dev, mode_cmd, obj);
780}
781
782static const struct drm_mode_config_funcs radeon_mode_funcs = {
783 .fb_create = radeon_user_framebuffer_create,
784 .fb_changed = radeonfb_probe,
785};
786
445282db
DA
787struct drm_prop_enum_list {
788 int type;
789 char *name;
790};
791
792static struct drm_prop_enum_list radeon_tmds_pll_enum_list[] =
793{ { 0, "driver" },
794 { 1, "bios" },
795};
796
797static struct drm_prop_enum_list radeon_tv_std_enum_list[] =
798{ { TV_STD_NTSC, "ntsc" },
799 { TV_STD_PAL, "pal" },
800 { TV_STD_PAL_M, "pal-m" },
801 { TV_STD_PAL_60, "pal-60" },
802 { TV_STD_NTSC_J, "ntsc-j" },
803 { TV_STD_SCART_PAL, "scart-pal" },
804 { TV_STD_PAL_CN, "pal-cn" },
805 { TV_STD_SECAM, "secam" },
806};
807
d79766fa 808static int radeon_modeset_create_props(struct radeon_device *rdev)
445282db
DA
809{
810 int i, sz;
811
812 if (rdev->is_atom_bios) {
813 rdev->mode_info.coherent_mode_property =
814 drm_property_create(rdev->ddev,
815 DRM_MODE_PROP_RANGE,
816 "coherent", 2);
817 if (!rdev->mode_info.coherent_mode_property)
818 return -ENOMEM;
819
820 rdev->mode_info.coherent_mode_property->values[0] = 0;
390d0bbe 821 rdev->mode_info.coherent_mode_property->values[1] = 1;
445282db
DA
822 }
823
824 if (!ASIC_IS_AVIVO(rdev)) {
825 sz = ARRAY_SIZE(radeon_tmds_pll_enum_list);
826 rdev->mode_info.tmds_pll_property =
827 drm_property_create(rdev->ddev,
828 DRM_MODE_PROP_ENUM,
829 "tmds_pll", sz);
830 for (i = 0; i < sz; i++) {
831 drm_property_add_enum(rdev->mode_info.tmds_pll_property,
832 i,
833 radeon_tmds_pll_enum_list[i].type,
834 radeon_tmds_pll_enum_list[i].name);
835 }
836 }
837
838 rdev->mode_info.load_detect_property =
839 drm_property_create(rdev->ddev,
840 DRM_MODE_PROP_RANGE,
841 "load detection", 2);
842 if (!rdev->mode_info.load_detect_property)
843 return -ENOMEM;
844 rdev->mode_info.load_detect_property->values[0] = 0;
390d0bbe 845 rdev->mode_info.load_detect_property->values[1] = 1;
445282db
DA
846
847 drm_mode_create_scaling_mode_property(rdev->ddev);
848
849 sz = ARRAY_SIZE(radeon_tv_std_enum_list);
850 rdev->mode_info.tv_std_property =
851 drm_property_create(rdev->ddev,
852 DRM_MODE_PROP_ENUM,
853 "tv standard", sz);
854 for (i = 0; i < sz; i++) {
855 drm_property_add_enum(rdev->mode_info.tv_std_property,
856 i,
857 radeon_tv_std_enum_list[i].type,
858 radeon_tv_std_enum_list[i].name);
859 }
860
861 return 0;
862}
863
771fe6b9
JG
864int radeon_modeset_init(struct radeon_device *rdev)
865{
18917b60 866 int i;
771fe6b9
JG
867 int ret;
868
869 drm_mode_config_init(rdev->ddev);
870 rdev->mode_info.mode_config_initialized = true;
871
872 rdev->ddev->mode_config.funcs = (void *)&radeon_mode_funcs;
873
874 if (ASIC_IS_AVIVO(rdev)) {
875 rdev->ddev->mode_config.max_width = 8192;
876 rdev->ddev->mode_config.max_height = 8192;
877 } else {
878 rdev->ddev->mode_config.max_width = 4096;
879 rdev->ddev->mode_config.max_height = 4096;
880 }
881
882 rdev->ddev->mode_config.fb_base = rdev->mc.aper_base;
883
445282db
DA
884 ret = radeon_modeset_create_props(rdev);
885 if (ret) {
886 return ret;
887 }
dfee5614 888
3c537889
AD
889 /* check combios for a valid hardcoded EDID - Sun servers */
890 if (!rdev->is_atom_bios) {
891 /* check for hardcoded EDID in BIOS */
892 radeon_combios_check_hardcoded_edid(rdev);
893 }
894
dfee5614 895 if (rdev->flags & RADEON_SINGLE_CRTC)
18917b60 896 rdev->num_crtc = 1;
bcc1c2a1
AD
897 else {
898 if (ASIC_IS_DCE4(rdev))
899 rdev->num_crtc = 6;
900 else
901 rdev->num_crtc = 2;
902 }
dfee5614
DA
903
904 /* allocate crtcs */
18917b60 905 for (i = 0; i < rdev->num_crtc; i++) {
771fe6b9
JG
906 radeon_crtc_init(rdev->ddev, i);
907 }
908
909 /* okay we should have all the bios connectors */
910 ret = radeon_setup_enc_conn(rdev->ddev);
911 if (!ret) {
912 return ret;
913 }
d4877cf2
AD
914 /* initialize hpd */
915 radeon_hpd_init(rdev);
771fe6b9
JG
916 drm_helper_initial_config(rdev->ddev);
917 return 0;
918}
919
920void radeon_modeset_fini(struct radeon_device *rdev)
921{
3c537889
AD
922 kfree(rdev->mode_info.bios_hardcoded_edid);
923
771fe6b9 924 if (rdev->mode_info.mode_config_initialized) {
d4877cf2 925 radeon_hpd_fini(rdev);
771fe6b9
JG
926 drm_mode_config_cleanup(rdev->ddev);
927 rdev->mode_info.mode_config_initialized = false;
928 }
929}
930
c93bb85b
JG
931bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
932 struct drm_display_mode *mode,
933 struct drm_display_mode *adjusted_mode)
771fe6b9 934{
c93bb85b
JG
935 struct drm_device *dev = crtc->dev;
936 struct drm_encoder *encoder;
937 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
938 struct radeon_encoder *radeon_encoder;
939 bool first = true;
771fe6b9 940
c93bb85b
JG
941 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
942 radeon_encoder = to_radeon_encoder(encoder);
943 if (encoder->crtc != crtc)
944 continue;
945 if (first) {
80297e87
AD
946 /* set scaling */
947 if (radeon_encoder->rmx_type == RMX_OFF)
948 radeon_crtc->rmx_type = RMX_OFF;
949 else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay ||
950 mode->vdisplay < radeon_encoder->native_mode.vdisplay)
951 radeon_crtc->rmx_type = radeon_encoder->rmx_type;
952 else
953 radeon_crtc->rmx_type = RMX_OFF;
954 /* copy native mode */
c93bb85b 955 memcpy(&radeon_crtc->native_mode,
80297e87 956 &radeon_encoder->native_mode,
de2103e4 957 sizeof(struct drm_display_mode));
c93bb85b
JG
958 first = false;
959 } else {
960 if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) {
961 /* WARNING: Right now this can't happen but
962 * in the future we need to check that scaling
963 * are consistent accross different encoder
964 * (ie all encoder can work with the same
965 * scaling).
966 */
967 DRM_ERROR("Scaling not consistent accross encoder.\n");
968 return false;
969 }
771fe6b9
JG
970 }
971 }
c93bb85b
JG
972 if (radeon_crtc->rmx_type != RMX_OFF) {
973 fixed20_12 a, b;
974 a.full = rfixed_const(crtc->mode.vdisplay);
de2103e4 975 b.full = rfixed_const(radeon_crtc->native_mode.hdisplay);
c93bb85b
JG
976 radeon_crtc->vsc.full = rfixed_div(a, b);
977 a.full = rfixed_const(crtc->mode.hdisplay);
de2103e4 978 b.full = rfixed_const(radeon_crtc->native_mode.vdisplay);
c93bb85b 979 radeon_crtc->hsc.full = rfixed_div(a, b);
771fe6b9 980 } else {
c93bb85b
JG
981 radeon_crtc->vsc.full = rfixed_const(1);
982 radeon_crtc->hsc.full = rfixed_const(1);
771fe6b9 983 }
c93bb85b 984 return true;
771fe6b9 985}
This page took 0.85318 seconds and 5 git commands to generate.