drm/nvd0/disp: move link training helpers into core as display methods
[deliverable/linux.git] / drivers / gpu / drm / nouveau / nvd0_display.c
1 /*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
25 #include <linux/dma-mapping.h>
26
27 #include <drm/drmP.h>
28 #include <drm/drm_crtc_helper.h>
29
30 #include "nouveau_drm.h"
31 #include "nouveau_dma.h"
32 #include "nouveau_gem.h"
33 #include "nouveau_connector.h"
34 #include "nouveau_encoder.h"
35 #include "nouveau_crtc.h"
36 #include "nouveau_fence.h"
37 #include "nv50_display.h"
38
39 #include <core/client.h>
40 #include <core/gpuobj.h>
41 #include <core/class.h>
42
43 #include <subdev/timer.h>
44 #include <subdev/bar.h>
45 #include <subdev/fb.h>
46
47 #define EVO_DMA_NR 9
48
49 #define EVO_MASTER (0x00)
50 #define EVO_FLIP(c) (0x01 + (c))
51 #define EVO_OVLY(c) (0x05 + (c))
52 #define EVO_OIMM(c) (0x09 + (c))
53 #define EVO_CURS(c) (0x0d + (c))
54
55 /* offsets in shared sync bo of various structures */
56 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
57 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
58 #define EVO_FLIP_SEM0(c) EVO_SYNC((c), 0x00)
59 #define EVO_FLIP_SEM1(c) EVO_SYNC((c), 0x10)
60
61 #define EVO_CORE_HANDLE (0xd1500000)
62 #define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
63 #define EVO_CHAN_OCLASS(t,c) ((nv_hclass(c) & 0xff00) | ((t) & 0x00ff))
64 #define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) | \
65 (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
66
67 /******************************************************************************
68 * EVO channel
69 *****************************************************************************/
70
71 struct nvd0_chan {
72 struct nouveau_object *user;
73 u32 handle;
74 };
75
76 static int
77 nvd0_chan_create(struct nouveau_object *core, u32 bclass, u8 head,
78 void *data, u32 size, struct nvd0_chan *chan)
79 {
80 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
81 const u32 oclass = EVO_CHAN_OCLASS(bclass, core);
82 const u32 handle = EVO_CHAN_HANDLE(bclass, head);
83 int ret;
84
85 ret = nouveau_object_new(client, EVO_CORE_HANDLE, handle,
86 oclass, data, size, &chan->user);
87 if (ret)
88 return ret;
89
90 chan->handle = handle;
91 return 0;
92 }
93
94 static void
95 nvd0_chan_destroy(struct nouveau_object *core, struct nvd0_chan *chan)
96 {
97 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
98 if (chan->handle)
99 nouveau_object_del(client, EVO_CORE_HANDLE, chan->handle);
100 }
101
102 /******************************************************************************
103 * PIO EVO channel
104 *****************************************************************************/
105
106 struct nvd0_pioc {
107 struct nvd0_chan base;
108 };
109
110 static void
111 nvd0_pioc_destroy(struct nouveau_object *core, struct nvd0_pioc *pioc)
112 {
113 nvd0_chan_destroy(core, &pioc->base);
114 }
115
116 static int
117 nvd0_pioc_create(struct nouveau_object *core, u32 bclass, u8 head,
118 void *data, u32 size, struct nvd0_pioc *pioc)
119 {
120 return nvd0_chan_create(core, bclass, head, data, size, &pioc->base);
121 }
122
123 /******************************************************************************
124 * DMA EVO channel
125 *****************************************************************************/
126
127 struct nvd0_dmac {
128 struct nvd0_chan base;
129 dma_addr_t handle;
130 u32 *ptr;
131 };
132
133 static void
134 nvd0_dmac_destroy(struct nouveau_object *core, struct nvd0_dmac *dmac)
135 {
136 if (dmac->ptr) {
137 struct pci_dev *pdev = nv_device(core)->pdev;
138 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
139 }
140
141 nvd0_chan_destroy(core, &dmac->base);
142 }
143
144 static int
145 nvd0_dmac_create(struct nouveau_object *core, u32 bclass, u8 head,
146 void *data, u32 size, u64 syncbuf,
147 struct nvd0_dmac *dmac)
148 {
149 struct nouveau_fb *pfb = nouveau_fb(core);
150 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
151 struct nouveau_object *object;
152 u32 pushbuf = *(u32 *)data;
153 dma_addr_t handle;
154 void *ptr;
155 int ret;
156
157 ptr = pci_alloc_consistent(nv_device(core)->pdev, PAGE_SIZE, &handle);
158 if (!ptr)
159 return -ENOMEM;
160
161 ret = nouveau_object_new(client, NVDRM_DEVICE, pushbuf,
162 NV_DMA_FROM_MEMORY_CLASS,
163 &(struct nv_dma_class) {
164 .flags = NV_DMA_TARGET_PCI_US |
165 NV_DMA_ACCESS_RD,
166 .start = handle + 0x0000,
167 .limit = handle + 0x0fff,
168 }, sizeof(struct nv_dma_class), &object);
169 if (ret)
170 return ret;
171
172 ret = nvd0_chan_create(core, bclass, head, data, size, &dmac->base);
173 if (ret)
174 return ret;
175
176 dmac->handle = handle;
177 dmac->ptr = ptr;
178
179 ret = nouveau_object_new(client, dmac->base.handle, NvEvoSync,
180 NV_DMA_IN_MEMORY_CLASS,
181 &(struct nv_dma_class) {
182 .flags = NV_DMA_TARGET_VRAM |
183 NV_DMA_ACCESS_RDWR,
184 .start = syncbuf + 0x0000,
185 .limit = syncbuf + 0x0fff,
186 }, sizeof(struct nv_dma_class), &object);
187 if (ret)
188 goto out;
189
190 ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM,
191 NV_DMA_IN_MEMORY_CLASS,
192 &(struct nv_dma_class) {
193 .flags = NV_DMA_TARGET_VRAM |
194 NV_DMA_ACCESS_RDWR,
195 .start = 0,
196 .limit = pfb->ram.size - 1,
197 }, sizeof(struct nv_dma_class), &object);
198 if (ret)
199 goto out;
200
201 ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM_LP,
202 NV_DMA_IN_MEMORY_CLASS,
203 &(struct nv_dma_class) {
204 .flags = NV_DMA_TARGET_VRAM |
205 NV_DMA_ACCESS_RDWR,
206 .start = 0,
207 .limit = pfb->ram.size - 1,
208 .conf0 = NVD0_DMA_CONF0_ENABLE |
209 NVD0_DMA_CONF0_PAGE_LP,
210 }, sizeof(struct nv_dma_class), &object);
211 if (ret)
212 goto out;
213
214 ret = nouveau_object_new(client, dmac->base.handle, NvEvoFB32,
215 NV_DMA_IN_MEMORY_CLASS,
216 &(struct nv_dma_class) {
217 .flags = NV_DMA_TARGET_VRAM |
218 NV_DMA_ACCESS_RDWR,
219 .start = 0,
220 .limit = pfb->ram.size - 1,
221 .conf0 = 0x00fe |
222 NVD0_DMA_CONF0_ENABLE |
223 NVD0_DMA_CONF0_PAGE_LP,
224 }, sizeof(struct nv_dma_class), &object);
225 out:
226 if (ret)
227 nvd0_dmac_destroy(core, dmac);
228 return ret;
229 }
230
231 struct nvd0_mast {
232 struct nvd0_dmac base;
233 };
234
235 struct nvd0_curs {
236 struct nvd0_pioc base;
237 };
238
239 struct nvd0_sync {
240 struct nvd0_dmac base;
241 struct {
242 u32 offset;
243 u16 value;
244 } sem;
245 };
246
247 struct nvd0_ovly {
248 struct nvd0_dmac base;
249 };
250
251 struct nvd0_oimm {
252 struct nvd0_pioc base;
253 };
254
255 struct nvd0_head {
256 struct nouveau_crtc base;
257 struct nvd0_curs curs;
258 struct nvd0_sync sync;
259 struct nvd0_ovly ovly;
260 struct nvd0_oimm oimm;
261 };
262
263 #define nvd0_head(c) ((struct nvd0_head *)nouveau_crtc(c))
264 #define nvd0_curs(c) (&nvd0_head(c)->curs)
265 #define nvd0_sync(c) (&nvd0_head(c)->sync)
266 #define nvd0_ovly(c) (&nvd0_head(c)->ovly)
267 #define nvd0_oimm(c) (&nvd0_head(c)->oimm)
268 #define nvd0_chan(c) (&(c)->base.base)
269
270 struct nvd0_disp {
271 struct nouveau_object *core;
272 struct nvd0_mast mast;
273
274 u32 modeset;
275
276 struct nouveau_bo *sync;
277 };
278
279 static struct nvd0_disp *
280 nvd0_disp(struct drm_device *dev)
281 {
282 return nouveau_display(dev)->priv;
283 }
284
285 #define nvd0_mast(d) (&nvd0_disp(d)->mast)
286
287 static struct drm_crtc *
288 nvd0_display_crtc_get(struct drm_encoder *encoder)
289 {
290 return nouveau_encoder(encoder)->crtc;
291 }
292
293 /******************************************************************************
294 * EVO channel helpers
295 *****************************************************************************/
296 static u32 *
297 evo_wait(void *evoc, int nr)
298 {
299 struct nvd0_dmac *dmac = evoc;
300 u32 put = nv_ro32(dmac->base.user, 0x0000) / 4;
301
302 if (put + nr >= (PAGE_SIZE / 4)) {
303 dmac->ptr[put] = 0x20000000;
304
305 nv_wo32(dmac->base.user, 0x0000, 0x00000000);
306 if (!nv_wait(dmac->base.user, 0x0004, ~0, 0x00000000)) {
307 NV_ERROR(dmac->base.user, "channel stalled\n");
308 return NULL;
309 }
310
311 put = 0;
312 }
313
314 return dmac->ptr + put;
315 }
316
317 static void
318 evo_kick(u32 *push, void *evoc)
319 {
320 struct nvd0_dmac *dmac = evoc;
321 nv_wo32(dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
322 }
323
324 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
325 #define evo_data(p,d) *((p)++) = (d)
326
327 static bool
328 evo_sync_wait(void *data)
329 {
330 return nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000;
331 }
332
333 static int
334 evo_sync(struct drm_device *dev)
335 {
336 struct nouveau_device *device = nouveau_dev(dev);
337 struct nvd0_disp *disp = nvd0_disp(dev);
338 struct nvd0_mast *mast = nvd0_mast(dev);
339 u32 *push = evo_wait(mast, 8);
340 if (push) {
341 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
342 evo_mthd(push, 0x0084, 1);
343 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
344 evo_mthd(push, 0x0080, 2);
345 evo_data(push, 0x00000000);
346 evo_data(push, 0x00000000);
347 evo_kick(push, mast);
348 if (nv_wait_cb(device, evo_sync_wait, disp->sync))
349 return 0;
350 }
351
352 return -EBUSY;
353 }
354
355 /******************************************************************************
356 * Page flipping channel
357 *****************************************************************************/
358 struct nouveau_bo *
359 nvd0_display_crtc_sema(struct drm_device *dev, int crtc)
360 {
361 return nvd0_disp(dev)->sync;
362 }
363
364 void
365 nvd0_display_flip_stop(struct drm_crtc *crtc)
366 {
367 struct nvd0_sync *sync = nvd0_sync(crtc);
368 u32 *push;
369
370 push = evo_wait(sync, 8);
371 if (push) {
372 evo_mthd(push, 0x0084, 1);
373 evo_data(push, 0x00000000);
374 evo_mthd(push, 0x0094, 1);
375 evo_data(push, 0x00000000);
376 evo_mthd(push, 0x00c0, 1);
377 evo_data(push, 0x00000000);
378 evo_mthd(push, 0x0080, 1);
379 evo_data(push, 0x00000000);
380 evo_kick(push, sync);
381 }
382 }
383
384 int
385 nvd0_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
386 struct nouveau_channel *chan, u32 swap_interval)
387 {
388 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
389 struct nvd0_disp *disp = nvd0_disp(crtc->dev);
390 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
391 struct nvd0_sync *sync = nvd0_sync(crtc);
392 u64 offset;
393 u32 *push;
394 int ret;
395
396 swap_interval <<= 4;
397 if (swap_interval == 0)
398 swap_interval |= 0x100;
399
400 push = evo_wait(sync, 128);
401 if (unlikely(push == NULL))
402 return -EBUSY;
403
404 /* synchronise with the rendering channel, if necessary */
405 if (likely(chan)) {
406 ret = RING_SPACE(chan, 10);
407 if (ret)
408 return ret;
409
410
411 offset = nvc0_fence_crtc(chan, nv_crtc->index);
412 offset += sync->sem.offset;
413
414 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
415 OUT_RING (chan, upper_32_bits(offset));
416 OUT_RING (chan, lower_32_bits(offset));
417 OUT_RING (chan, 0xf00d0000 | sync->sem.value);
418 OUT_RING (chan, 0x1002);
419 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
420 OUT_RING (chan, upper_32_bits(offset));
421 OUT_RING (chan, lower_32_bits(offset ^ 0x10));
422 OUT_RING (chan, 0x74b1e000);
423 OUT_RING (chan, 0x1001);
424 FIRE_RING (chan);
425 } else {
426 nouveau_bo_wr32(disp->sync, sync->sem.offset / 4,
427 0xf00d0000 | sync->sem.value);
428 evo_sync(crtc->dev);
429 }
430
431 /* queue the flip */
432 evo_mthd(push, 0x0100, 1);
433 evo_data(push, 0xfffe0000);
434 evo_mthd(push, 0x0084, 1);
435 evo_data(push, swap_interval);
436 if (!(swap_interval & 0x00000100)) {
437 evo_mthd(push, 0x00e0, 1);
438 evo_data(push, 0x40000000);
439 }
440 evo_mthd(push, 0x0088, 4);
441 evo_data(push, sync->sem.offset);
442 evo_data(push, 0xf00d0000 | sync->sem.value);
443 evo_data(push, 0x74b1e000);
444 evo_data(push, NvEvoSync);
445 evo_mthd(push, 0x00a0, 2);
446 evo_data(push, 0x00000000);
447 evo_data(push, 0x00000000);
448 evo_mthd(push, 0x00c0, 1);
449 evo_data(push, nv_fb->r_dma);
450 evo_mthd(push, 0x0110, 2);
451 evo_data(push, 0x00000000);
452 evo_data(push, 0x00000000);
453 evo_mthd(push, 0x0400, 5);
454 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
455 evo_data(push, 0);
456 evo_data(push, (fb->height << 16) | fb->width);
457 evo_data(push, nv_fb->r_pitch);
458 evo_data(push, nv_fb->r_format);
459 evo_mthd(push, 0x0080, 1);
460 evo_data(push, 0x00000000);
461 evo_kick(push, sync);
462
463 sync->sem.offset ^= 0x10;
464 sync->sem.value++;
465 return 0;
466 }
467
468 /******************************************************************************
469 * CRTC
470 *****************************************************************************/
471 static int
472 nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
473 {
474 struct nouveau_drm *drm = nouveau_drm(nv_crtc->base.dev);
475 struct drm_device *dev = nv_crtc->base.dev;
476 struct nouveau_connector *nv_connector;
477 struct drm_connector *connector;
478 u32 *push, mode = 0x00;
479 u32 mthd;
480
481 nv_connector = nouveau_crtc_connector_get(nv_crtc);
482 connector = &nv_connector->base;
483 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
484 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
485 mode = DITHERING_MODE_DYNAMIC2X2;
486 } else {
487 mode = nv_connector->dithering_mode;
488 }
489
490 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
491 if (connector->display_info.bpc >= 8)
492 mode |= DITHERING_DEPTH_8BPC;
493 } else {
494 mode |= nv_connector->dithering_depth;
495 }
496
497 if (nv_device(drm->device)->card_type < NV_E0)
498 mthd = 0x0490 + (nv_crtc->index * 0x0300);
499 else
500 mthd = 0x04a0 + (nv_crtc->index * 0x0300);
501
502 push = evo_wait(nvd0_mast(dev), 4);
503 if (push) {
504 evo_mthd(push, mthd, 1);
505 evo_data(push, mode);
506 if (update) {
507 evo_mthd(push, 0x0080, 1);
508 evo_data(push, 0x00000000);
509 }
510 evo_kick(push, nvd0_mast(dev));
511 }
512
513 return 0;
514 }
515
516 static int
517 nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
518 {
519 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
520 struct drm_device *dev = nv_crtc->base.dev;
521 struct drm_crtc *crtc = &nv_crtc->base;
522 struct nouveau_connector *nv_connector;
523 int mode = DRM_MODE_SCALE_NONE;
524 u32 oX, oY, *push;
525
526 /* start off at the resolution we programmed the crtc for, this
527 * effectively handles NONE/FULL scaling
528 */
529 nv_connector = nouveau_crtc_connector_get(nv_crtc);
530 if (nv_connector && nv_connector->native_mode)
531 mode = nv_connector->scaling_mode;
532
533 if (mode != DRM_MODE_SCALE_NONE)
534 omode = nv_connector->native_mode;
535 else
536 omode = umode;
537
538 oX = omode->hdisplay;
539 oY = omode->vdisplay;
540 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
541 oY *= 2;
542
543 /* add overscan compensation if necessary, will keep the aspect
544 * ratio the same as the backend mode unless overridden by the
545 * user setting both hborder and vborder properties.
546 */
547 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
548 (nv_connector->underscan == UNDERSCAN_AUTO &&
549 nv_connector->edid &&
550 drm_detect_hdmi_monitor(nv_connector->edid)))) {
551 u32 bX = nv_connector->underscan_hborder;
552 u32 bY = nv_connector->underscan_vborder;
553 u32 aspect = (oY << 19) / oX;
554
555 if (bX) {
556 oX -= (bX * 2);
557 if (bY) oY -= (bY * 2);
558 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
559 } else {
560 oX -= (oX >> 4) + 32;
561 if (bY) oY -= (bY * 2);
562 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
563 }
564 }
565
566 /* handle CENTER/ASPECT scaling, taking into account the areas
567 * removed already for overscan compensation
568 */
569 switch (mode) {
570 case DRM_MODE_SCALE_CENTER:
571 oX = min((u32)umode->hdisplay, oX);
572 oY = min((u32)umode->vdisplay, oY);
573 /* fall-through */
574 case DRM_MODE_SCALE_ASPECT:
575 if (oY < oX) {
576 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
577 oX = ((oY * aspect) + (aspect / 2)) >> 19;
578 } else {
579 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
580 oY = ((oX * aspect) + (aspect / 2)) >> 19;
581 }
582 break;
583 default:
584 break;
585 }
586
587 push = evo_wait(nvd0_mast(dev), 8);
588 if (push) {
589 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
590 evo_data(push, (oY << 16) | oX);
591 evo_data(push, (oY << 16) | oX);
592 evo_data(push, (oY << 16) | oX);
593 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
594 evo_data(push, 0x00000000);
595 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
596 evo_data(push, (umode->vdisplay << 16) | umode->hdisplay);
597 evo_kick(push, nvd0_mast(dev));
598 if (update) {
599 nvd0_display_flip_stop(crtc);
600 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
601 }
602 }
603
604 return 0;
605 }
606
607 static int
608 nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
609 int x, int y, bool update)
610 {
611 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
612 u32 *push;
613
614 push = evo_wait(nvd0_mast(fb->dev), 16);
615 if (push) {
616 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
617 evo_data(push, nvfb->nvbo->bo.offset >> 8);
618 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
619 evo_data(push, (fb->height << 16) | fb->width);
620 evo_data(push, nvfb->r_pitch);
621 evo_data(push, nvfb->r_format);
622 evo_data(push, nvfb->r_dma);
623 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
624 evo_data(push, (y << 16) | x);
625 if (update) {
626 evo_mthd(push, 0x0080, 1);
627 evo_data(push, 0x00000000);
628 }
629 evo_kick(push, nvd0_mast(fb->dev));
630 }
631
632 nv_crtc->fb.tile_flags = nvfb->r_dma;
633 return 0;
634 }
635
636 static void
637 nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
638 {
639 struct drm_device *dev = nv_crtc->base.dev;
640 u32 *push = evo_wait(nvd0_mast(dev), 16);
641 if (push) {
642 if (show) {
643 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
644 evo_data(push, 0x85000000);
645 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
646 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
647 evo_data(push, NvEvoVRAM);
648 } else {
649 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
650 evo_data(push, 0x05000000);
651 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
652 evo_data(push, 0x00000000);
653 }
654
655 if (update) {
656 evo_mthd(push, 0x0080, 1);
657 evo_data(push, 0x00000000);
658 }
659
660 evo_kick(push, nvd0_mast(dev));
661 }
662 }
663
664 static void
665 nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
666 {
667 }
668
669 static void
670 nvd0_crtc_prepare(struct drm_crtc *crtc)
671 {
672 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
673 u32 *push;
674
675 nvd0_display_flip_stop(crtc);
676
677 push = evo_wait(nvd0_mast(crtc->dev), 2);
678 if (push) {
679 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
680 evo_data(push, 0x00000000);
681 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
682 evo_data(push, 0x03000000);
683 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
684 evo_data(push, 0x00000000);
685 evo_kick(push, nvd0_mast(crtc->dev));
686 }
687
688 nvd0_crtc_cursor_show(nv_crtc, false, false);
689 }
690
691 static void
692 nvd0_crtc_commit(struct drm_crtc *crtc)
693 {
694 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
695 u32 *push;
696
697 push = evo_wait(nvd0_mast(crtc->dev), 32);
698 if (push) {
699 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
700 evo_data(push, nv_crtc->fb.tile_flags);
701 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
702 evo_data(push, 0x83000000);
703 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
704 evo_data(push, 0x00000000);
705 evo_data(push, 0x00000000);
706 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
707 evo_data(push, NvEvoVRAM);
708 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
709 evo_data(push, 0xffffff00);
710 evo_kick(push, nvd0_mast(crtc->dev));
711 }
712
713 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
714 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
715 }
716
717 static bool
718 nvd0_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
719 struct drm_display_mode *adjusted_mode)
720 {
721 return true;
722 }
723
724 static int
725 nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
726 {
727 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
728 int ret;
729
730 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
731 if (ret)
732 return ret;
733
734 if (old_fb) {
735 nvfb = nouveau_framebuffer(old_fb);
736 nouveau_bo_unpin(nvfb->nvbo);
737 }
738
739 return 0;
740 }
741
742 static int
743 nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
744 struct drm_display_mode *mode, int x, int y,
745 struct drm_framebuffer *old_fb)
746 {
747 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
748 struct nouveau_connector *nv_connector;
749 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
750 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
751 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
752 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
753 u32 vblan2e = 0, vblan2s = 1;
754 u32 *push;
755 int ret;
756
757 hactive = mode->htotal;
758 hsynce = mode->hsync_end - mode->hsync_start - 1;
759 hbackp = mode->htotal - mode->hsync_end;
760 hblanke = hsynce + hbackp;
761 hfrontp = mode->hsync_start - mode->hdisplay;
762 hblanks = mode->htotal - hfrontp - 1;
763
764 vactive = mode->vtotal * vscan / ilace;
765 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
766 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
767 vblanke = vsynce + vbackp;
768 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
769 vblanks = vactive - vfrontp - 1;
770 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
771 vblan2e = vactive + vsynce + vbackp;
772 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
773 vactive = (vactive * 2) + 1;
774 }
775
776 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
777 if (ret)
778 return ret;
779
780 push = evo_wait(nvd0_mast(crtc->dev), 64);
781 if (push) {
782 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
783 evo_data(push, 0x00000000);
784 evo_data(push, (vactive << 16) | hactive);
785 evo_data(push, ( vsynce << 16) | hsynce);
786 evo_data(push, (vblanke << 16) | hblanke);
787 evo_data(push, (vblanks << 16) | hblanks);
788 evo_data(push, (vblan2e << 16) | vblan2s);
789 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
790 evo_data(push, 0x00000000); /* ??? */
791 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
792 evo_data(push, mode->clock * 1000);
793 evo_data(push, 0x00200000); /* ??? */
794 evo_data(push, mode->clock * 1000);
795 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
796 evo_data(push, 0x00000311);
797 evo_data(push, 0x00000100);
798 evo_kick(push, nvd0_mast(crtc->dev));
799 }
800
801 nv_connector = nouveau_crtc_connector_get(nv_crtc);
802 nvd0_crtc_set_dither(nv_crtc, false);
803 nvd0_crtc_set_scale(nv_crtc, false);
804 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
805 return 0;
806 }
807
808 static int
809 nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
810 struct drm_framebuffer *old_fb)
811 {
812 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
813 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
814 int ret;
815
816 if (!crtc->fb) {
817 NV_DEBUG(drm, "No FB bound\n");
818 return 0;
819 }
820
821 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
822 if (ret)
823 return ret;
824
825 nvd0_display_flip_stop(crtc);
826 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
827 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
828 return 0;
829 }
830
831 static int
832 nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
833 struct drm_framebuffer *fb, int x, int y,
834 enum mode_set_atomic state)
835 {
836 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
837 nvd0_display_flip_stop(crtc);
838 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
839 return 0;
840 }
841
842 static void
843 nvd0_crtc_lut_load(struct drm_crtc *crtc)
844 {
845 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
846 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
847 int i;
848
849 for (i = 0; i < 256; i++) {
850 writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
851 writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
852 writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
853 }
854 }
855
856 static int
857 nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
858 uint32_t handle, uint32_t width, uint32_t height)
859 {
860 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
861 struct drm_device *dev = crtc->dev;
862 struct drm_gem_object *gem;
863 struct nouveau_bo *nvbo;
864 bool visible = (handle != 0);
865 int i, ret = 0;
866
867 if (visible) {
868 if (width != 64 || height != 64)
869 return -EINVAL;
870
871 gem = drm_gem_object_lookup(dev, file_priv, handle);
872 if (unlikely(!gem))
873 return -ENOENT;
874 nvbo = nouveau_gem_object(gem);
875
876 ret = nouveau_bo_map(nvbo);
877 if (ret == 0) {
878 for (i = 0; i < 64 * 64; i++) {
879 u32 v = nouveau_bo_rd32(nvbo, i);
880 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
881 }
882 nouveau_bo_unmap(nvbo);
883 }
884
885 drm_gem_object_unreference_unlocked(gem);
886 }
887
888 if (visible != nv_crtc->cursor.visible) {
889 nvd0_crtc_cursor_show(nv_crtc, visible, true);
890 nv_crtc->cursor.visible = visible;
891 }
892
893 return ret;
894 }
895
896 static int
897 nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
898 {
899 struct nvd0_curs *curs = nvd0_curs(crtc);
900 struct nvd0_chan *chan = nvd0_chan(curs);
901 nv_wo32(chan->user, 0x0084, (y << 16) | (x & 0xffff));
902 nv_wo32(chan->user, 0x0080, 0x00000000);
903 return 0;
904 }
905
906 static void
907 nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
908 uint32_t start, uint32_t size)
909 {
910 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
911 u32 end = max(start + size, (u32)256);
912 u32 i;
913
914 for (i = start; i < end; i++) {
915 nv_crtc->lut.r[i] = r[i];
916 nv_crtc->lut.g[i] = g[i];
917 nv_crtc->lut.b[i] = b[i];
918 }
919
920 nvd0_crtc_lut_load(crtc);
921 }
922
923 static void
924 nvd0_crtc_destroy(struct drm_crtc *crtc)
925 {
926 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
927 struct nvd0_disp *disp = nvd0_disp(crtc->dev);
928 struct nvd0_head *head = nvd0_head(crtc);
929 nvd0_dmac_destroy(disp->core, &head->ovly.base);
930 nvd0_pioc_destroy(disp->core, &head->oimm.base);
931 nvd0_dmac_destroy(disp->core, &head->sync.base);
932 nvd0_pioc_destroy(disp->core, &head->curs.base);
933 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
934 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
935 nouveau_bo_unmap(nv_crtc->lut.nvbo);
936 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
937 drm_crtc_cleanup(crtc);
938 kfree(crtc);
939 }
940
941 static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
942 .dpms = nvd0_crtc_dpms,
943 .prepare = nvd0_crtc_prepare,
944 .commit = nvd0_crtc_commit,
945 .mode_fixup = nvd0_crtc_mode_fixup,
946 .mode_set = nvd0_crtc_mode_set,
947 .mode_set_base = nvd0_crtc_mode_set_base,
948 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
949 .load_lut = nvd0_crtc_lut_load,
950 };
951
952 static const struct drm_crtc_funcs nvd0_crtc_func = {
953 .cursor_set = nvd0_crtc_cursor_set,
954 .cursor_move = nvd0_crtc_cursor_move,
955 .gamma_set = nvd0_crtc_gamma_set,
956 .set_config = drm_crtc_helper_set_config,
957 .destroy = nvd0_crtc_destroy,
958 .page_flip = nouveau_crtc_page_flip,
959 };
960
961 static void
962 nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
963 {
964 }
965
966 static void
967 nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
968 {
969 }
970
971 static int
972 nvd0_crtc_create(struct drm_device *dev, struct nouveau_object *core, int index)
973 {
974 struct nvd0_disp *disp = nvd0_disp(dev);
975 struct nvd0_head *head;
976 struct drm_crtc *crtc;
977 int ret, i;
978
979 head = kzalloc(sizeof(*head), GFP_KERNEL);
980 if (!head)
981 return -ENOMEM;
982
983 head->base.index = index;
984 head->base.set_dither = nvd0_crtc_set_dither;
985 head->base.set_scale = nvd0_crtc_set_scale;
986 head->base.cursor.set_offset = nvd0_cursor_set_offset;
987 head->base.cursor.set_pos = nvd0_cursor_set_pos;
988 for (i = 0; i < 256; i++) {
989 head->base.lut.r[i] = i << 8;
990 head->base.lut.g[i] = i << 8;
991 head->base.lut.b[i] = i << 8;
992 }
993
994 crtc = &head->base.base;
995 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
996 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
997 drm_mode_crtc_set_gamma_size(crtc, 256);
998
999 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1000 0, 0x0000, NULL, &head->base.lut.nvbo);
1001 if (!ret) {
1002 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1003 if (!ret)
1004 ret = nouveau_bo_map(head->base.lut.nvbo);
1005 if (ret)
1006 nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1007 }
1008
1009 if (ret)
1010 goto out;
1011
1012 nvd0_crtc_lut_load(crtc);
1013
1014 /* allocate cursor resources */
1015 ret = nvd0_pioc_create(disp->core, NV50_DISP_CURS_CLASS, index,
1016 &(struct nv50_display_curs_class) {
1017 .head = index,
1018 }, sizeof(struct nv50_display_curs_class),
1019 &head->curs.base);
1020 if (ret)
1021 goto out;
1022
1023 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
1024 0, 0x0000, NULL, &head->base.cursor.nvbo);
1025 if (!ret) {
1026 ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
1027 if (!ret)
1028 ret = nouveau_bo_map(head->base.cursor.nvbo);
1029 if (ret)
1030 nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
1031 }
1032
1033 if (ret)
1034 goto out;
1035
1036 /* allocate page flip / sync resources */
1037 ret = nvd0_dmac_create(disp->core, NV50_DISP_SYNC_CLASS, index,
1038 &(struct nv50_display_sync_class) {
1039 .pushbuf = EVO_PUSH_HANDLE(SYNC, index),
1040 .head = index,
1041 }, sizeof(struct nv50_display_sync_class),
1042 disp->sync->bo.offset, &head->sync.base);
1043 if (ret)
1044 goto out;
1045
1046 head->sync.sem.offset = EVO_SYNC(1 + index, 0x00);
1047
1048 /* allocate overlay resources */
1049 ret = nvd0_pioc_create(disp->core, NV50_DISP_OIMM_CLASS, index,
1050 &(struct nv50_display_oimm_class) {
1051 .head = index,
1052 }, sizeof(struct nv50_display_oimm_class),
1053 &head->oimm.base);
1054 if (ret)
1055 goto out;
1056
1057 ret = nvd0_dmac_create(disp->core, NV50_DISP_OVLY_CLASS, index,
1058 &(struct nv50_display_ovly_class) {
1059 .pushbuf = EVO_PUSH_HANDLE(OVLY, index),
1060 .head = index,
1061 }, sizeof(struct nv50_display_ovly_class),
1062 disp->sync->bo.offset, &head->ovly.base);
1063 if (ret)
1064 goto out;
1065
1066 out:
1067 if (ret)
1068 nvd0_crtc_destroy(crtc);
1069 return ret;
1070 }
1071
1072 /******************************************************************************
1073 * DAC
1074 *****************************************************************************/
1075 static void
1076 nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
1077 {
1078 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1079 struct drm_device *dev = encoder->dev;
1080 struct nouveau_device *device = nouveau_dev(dev);
1081 int or = nv_encoder->or;
1082 u32 dpms_ctrl;
1083
1084 dpms_ctrl = 0x80000000;
1085 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
1086 dpms_ctrl |= 0x00000001;
1087 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
1088 dpms_ctrl |= 0x00000004;
1089
1090 nv_wait(device, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
1091 nv_mask(device, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
1092 nv_wait(device, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
1093 }
1094
1095 static bool
1096 nvd0_dac_mode_fixup(struct drm_encoder *encoder,
1097 const struct drm_display_mode *mode,
1098 struct drm_display_mode *adjusted_mode)
1099 {
1100 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1101 struct nouveau_connector *nv_connector;
1102
1103 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1104 if (nv_connector && nv_connector->native_mode) {
1105 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1106 int id = adjusted_mode->base.id;
1107 *adjusted_mode = *nv_connector->native_mode;
1108 adjusted_mode->base.id = id;
1109 }
1110 }
1111
1112 return true;
1113 }
1114
1115 static void
1116 nvd0_dac_commit(struct drm_encoder *encoder)
1117 {
1118 }
1119
1120 static void
1121 nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1122 struct drm_display_mode *adjusted_mode)
1123 {
1124 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1125 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1126 u32 syncs, magic, *push;
1127
1128 syncs = 0x00000001;
1129 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1130 syncs |= 0x00000008;
1131 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1132 syncs |= 0x00000010;
1133
1134 magic = 0x31ec6000 | (nv_crtc->index << 25);
1135 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1136 magic |= 0x00000001;
1137
1138 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1139
1140 push = evo_wait(nvd0_mast(encoder->dev), 8);
1141 if (push) {
1142 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1143 evo_data(push, syncs);
1144 evo_data(push, magic);
1145 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 2);
1146 evo_data(push, 1 << nv_crtc->index);
1147 evo_data(push, 0x00ff);
1148 evo_kick(push, nvd0_mast(encoder->dev));
1149 }
1150
1151 nv_encoder->crtc = encoder->crtc;
1152 }
1153
1154 static void
1155 nvd0_dac_disconnect(struct drm_encoder *encoder)
1156 {
1157 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1158 struct drm_device *dev = encoder->dev;
1159 u32 *push;
1160
1161 if (nv_encoder->crtc) {
1162 nvd0_crtc_prepare(nv_encoder->crtc);
1163
1164 push = evo_wait(nvd0_mast(dev), 4);
1165 if (push) {
1166 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
1167 evo_data(push, 0x00000000);
1168 evo_mthd(push, 0x0080, 1);
1169 evo_data(push, 0x00000000);
1170 evo_kick(push, nvd0_mast(dev));
1171 }
1172
1173 nv_encoder->crtc = NULL;
1174 }
1175 }
1176
1177 static enum drm_connector_status
1178 nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1179 {
1180 enum drm_connector_status status = connector_status_disconnected;
1181 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1182 struct drm_device *dev = encoder->dev;
1183 struct nouveau_device *device = nouveau_dev(dev);
1184 int or = nv_encoder->or;
1185 u32 load;
1186
1187 nv_wr32(device, 0x61a00c + (or * 0x800), 0x00100000);
1188 udelay(9500);
1189 nv_wr32(device, 0x61a00c + (or * 0x800), 0x80000000);
1190
1191 load = nv_rd32(device, 0x61a00c + (or * 0x800));
1192 if ((load & 0x38000000) == 0x38000000)
1193 status = connector_status_connected;
1194
1195 nv_wr32(device, 0x61a00c + (or * 0x800), 0x00000000);
1196 return status;
1197 }
1198
1199 static void
1200 nvd0_dac_destroy(struct drm_encoder *encoder)
1201 {
1202 drm_encoder_cleanup(encoder);
1203 kfree(encoder);
1204 }
1205
1206 static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
1207 .dpms = nvd0_dac_dpms,
1208 .mode_fixup = nvd0_dac_mode_fixup,
1209 .prepare = nvd0_dac_disconnect,
1210 .commit = nvd0_dac_commit,
1211 .mode_set = nvd0_dac_mode_set,
1212 .disable = nvd0_dac_disconnect,
1213 .get_crtc = nvd0_display_crtc_get,
1214 .detect = nvd0_dac_detect
1215 };
1216
1217 static const struct drm_encoder_funcs nvd0_dac_func = {
1218 .destroy = nvd0_dac_destroy,
1219 };
1220
1221 static int
1222 nvd0_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1223 {
1224 struct drm_device *dev = connector->dev;
1225 struct nouveau_encoder *nv_encoder;
1226 struct drm_encoder *encoder;
1227
1228 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1229 if (!nv_encoder)
1230 return -ENOMEM;
1231 nv_encoder->dcb = dcbe;
1232 nv_encoder->or = ffs(dcbe->or) - 1;
1233
1234 encoder = to_drm_encoder(nv_encoder);
1235 encoder->possible_crtcs = dcbe->heads;
1236 encoder->possible_clones = 0;
1237 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
1238 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
1239
1240 drm_mode_connector_attach_encoder(connector, encoder);
1241 return 0;
1242 }
1243
1244 /******************************************************************************
1245 * Audio
1246 *****************************************************************************/
1247 static void
1248 nvd0_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1249 {
1250 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1251 struct nouveau_connector *nv_connector;
1252 struct drm_device *dev = encoder->dev;
1253 struct nouveau_device *device = nouveau_dev(dev);
1254 int i, or = nv_encoder->or * 0x30;
1255
1256 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1257 if (!drm_detect_monitor_audio(nv_connector->edid))
1258 return;
1259
1260 nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000001);
1261
1262 drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1263 if (nv_connector->base.eld[0]) {
1264 u8 *eld = nv_connector->base.eld;
1265
1266 for (i = 0; i < eld[2] * 4; i++)
1267 nv_wr32(device, 0x10ec00 + or, (i << 8) | eld[i]);
1268 for (i = eld[2] * 4; i < 0x60; i++)
1269 nv_wr32(device, 0x10ec00 + or, (i << 8) | 0x00);
1270
1271 nv_mask(device, 0x10ec10 + or, 0x80000002, 0x80000002);
1272 }
1273 }
1274
1275 static void
1276 nvd0_audio_disconnect(struct drm_encoder *encoder)
1277 {
1278 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1279 struct drm_device *dev = encoder->dev;
1280 struct nouveau_device *device = nouveau_dev(dev);
1281 int or = nv_encoder->or * 0x30;
1282
1283 nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000000);
1284 }
1285
1286 /******************************************************************************
1287 * HDMI
1288 *****************************************************************************/
1289 static void
1290 nvd0_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1291 {
1292 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1293 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1294 struct nouveau_connector *nv_connector;
1295 struct drm_device *dev = encoder->dev;
1296 struct nouveau_device *device = nouveau_dev(dev);
1297 int head = nv_crtc->index * 0x800;
1298 u32 rekey = 56; /* binary driver, and tegra constant */
1299 u32 max_ac_packet;
1300
1301 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1302 if (!drm_detect_hdmi_monitor(nv_connector->edid))
1303 return;
1304
1305 max_ac_packet = mode->htotal - mode->hdisplay;
1306 max_ac_packet -= rekey;
1307 max_ac_packet -= 18; /* constant from tegra */
1308 max_ac_packet /= 32;
1309
1310 /* AVI InfoFrame */
1311 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1312 nv_wr32(device, 0x61671c + head, 0x000d0282);
1313 nv_wr32(device, 0x616720 + head, 0x0000006f);
1314 nv_wr32(device, 0x616724 + head, 0x00000000);
1315 nv_wr32(device, 0x616728 + head, 0x00000000);
1316 nv_wr32(device, 0x61672c + head, 0x00000000);
1317 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000001);
1318
1319 /* ??? InfoFrame? */
1320 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1321 nv_wr32(device, 0x6167ac + head, 0x00000010);
1322 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000001);
1323
1324 /* HDMI_CTRL */
1325 nv_mask(device, 0x616798 + head, 0x401f007f, 0x40000000 | rekey |
1326 max_ac_packet << 16);
1327
1328 /* NFI, audio doesn't work without it though.. */
1329 nv_mask(device, 0x616548 + head, 0x00000070, 0x00000000);
1330
1331 nvd0_audio_mode_set(encoder, mode);
1332 }
1333
1334 static void
1335 nvd0_hdmi_disconnect(struct drm_encoder *encoder)
1336 {
1337 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1338 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1339 struct drm_device *dev = encoder->dev;
1340 struct nouveau_device *device = nouveau_dev(dev);
1341 int head = nv_crtc->index * 0x800;
1342
1343 nvd0_audio_disconnect(encoder);
1344
1345 nv_mask(device, 0x616798 + head, 0x40000000, 0x00000000);
1346 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1347 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1348 }
1349
1350 /******************************************************************************
1351 * SOR
1352 *****************************************************************************/
1353 static void
1354 nvd0_sor_dp_train_set(struct drm_device *dev, struct dcb_output *dcb, u8 pattern)
1355 {
1356 struct nvd0_disp *disp = nvd0_disp(dev);
1357 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1358 const u32 moff = (link << 2) | or;
1359 nv_call(disp->core, NV94_DISP_SOR_DP_TRAIN + moff, pattern);
1360 }
1361
1362 static void
1363 nvd0_sor_dp_train_adj(struct drm_device *dev, struct dcb_output *dcb,
1364 u8 lane, u8 swing, u8 preem)
1365 {
1366 struct nvd0_disp *disp = nvd0_disp(dev);
1367 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1368 const u32 moff = (link << 2) | or;
1369 const u32 data = (swing << 8) | preem;
1370 nv_call(disp->core, NV94_DISP_SOR_DP_DRVCTL(lane) + moff, data);
1371 }
1372
1373 static void
1374 nvd0_sor_dp_link_set(struct drm_device *dev, struct dcb_output *dcb, int crtc,
1375 int link_nr, u32 link_bw, bool enhframe)
1376 {
1377 struct nvd0_disp *disp = nvd0_disp(dev);
1378 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1379 const u32 moff = (crtc << 3) | (link << 2) | or;
1380 u32 data = ((link_bw / 27000) << 8) | link_nr;
1381 if (enhframe)
1382 data |= NV94_DISP_SOR_DP_LNKCTL_FRAME_ENH;
1383 nv_call(disp->core, NV94_DISP_SOR_DP_LNKCTL + moff, data);
1384 }
1385
1386 static void
1387 nvd0_sor_dp_link_get(struct drm_device *dev, struct dcb_output *dcb,
1388 u32 *link_nr, u32 *link_bw)
1389 {
1390 struct nouveau_device *device = nouveau_dev(dev);
1391 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1392 const u32 loff = (or * 0x800) + (link * 0x80);
1393 const u32 soff = (or * 0x800);
1394 u32 dpctrl = nv_rd32(device, 0x61c10c + loff) & 0x000f0000;
1395 u32 clksor = nv_rd32(device, 0x612300 + soff);
1396
1397 if (dpctrl > 0x00030000) *link_nr = 4;
1398 else if (dpctrl > 0x00010000) *link_nr = 2;
1399 else *link_nr = 1;
1400
1401 *link_bw = (clksor & 0x007c0000) >> 18;
1402 *link_bw *= 27000;
1403 }
1404
1405 static void
1406 nvd0_sor_dp_calc_tu(struct drm_device *dev, struct dcb_output *dcb,
1407 u32 crtc, u32 datarate)
1408 {
1409 struct nouveau_device *device = nouveau_dev(dev);
1410 const u32 symbol = 100000;
1411 const u32 TU = 64;
1412 u32 link_nr, link_bw;
1413 u64 ratio, value;
1414
1415 nvd0_sor_dp_link_get(dev, dcb, &link_nr, &link_bw);
1416
1417 ratio = datarate;
1418 ratio *= symbol;
1419 do_div(ratio, link_nr * link_bw);
1420
1421 value = (symbol - ratio) * TU;
1422 value *= ratio;
1423 do_div(value, symbol);
1424 do_div(value, symbol);
1425
1426 value += 5;
1427 value |= 0x08000000;
1428
1429 nv_wr32(device, 0x616610 + (crtc * 0x800), value);
1430 }
1431
1432 static void
1433 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
1434 {
1435 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1436 struct drm_device *dev = encoder->dev;
1437 struct nouveau_device *device = nouveau_dev(dev);
1438 struct drm_encoder *partner;
1439 int or = nv_encoder->or;
1440 u32 dpms_ctrl;
1441
1442 nv_encoder->last_dpms = mode;
1443
1444 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1445 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1446
1447 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1448 continue;
1449
1450 if (nv_partner != nv_encoder &&
1451 nv_partner->dcb->or == nv_encoder->dcb->or) {
1452 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1453 return;
1454 break;
1455 }
1456 }
1457
1458 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
1459 dpms_ctrl |= 0x80000000;
1460
1461 nv_wait(device, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1462 nv_mask(device, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
1463 nv_wait(device, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
1464 nv_wait(device, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
1465
1466 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1467 struct dp_train_func func = {
1468 .link_set = nvd0_sor_dp_link_set,
1469 .train_set = nvd0_sor_dp_train_set,
1470 .train_adj = nvd0_sor_dp_train_adj
1471 };
1472
1473 nouveau_dp_dpms(encoder, mode, nv_encoder->dp.datarate, &func);
1474 }
1475 }
1476
1477 static bool
1478 nvd0_sor_mode_fixup(struct drm_encoder *encoder,
1479 const struct drm_display_mode *mode,
1480 struct drm_display_mode *adjusted_mode)
1481 {
1482 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1483 struct nouveau_connector *nv_connector;
1484
1485 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1486 if (nv_connector && nv_connector->native_mode) {
1487 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1488 int id = adjusted_mode->base.id;
1489 *adjusted_mode = *nv_connector->native_mode;
1490 adjusted_mode->base.id = id;
1491 }
1492 }
1493
1494 return true;
1495 }
1496
1497 static void
1498 nvd0_sor_disconnect(struct drm_encoder *encoder)
1499 {
1500 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1501 struct drm_device *dev = encoder->dev;
1502 u32 *push;
1503
1504 if (nv_encoder->crtc) {
1505 nvd0_crtc_prepare(nv_encoder->crtc);
1506
1507 push = evo_wait(nvd0_mast(dev), 4);
1508 if (push) {
1509 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1510 evo_data(push, 0x00000000);
1511 evo_mthd(push, 0x0080, 1);
1512 evo_data(push, 0x00000000);
1513 evo_kick(push, nvd0_mast(dev));
1514 }
1515
1516 nvd0_hdmi_disconnect(encoder);
1517
1518 nv_encoder->crtc = NULL;
1519 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1520 }
1521 }
1522
1523 static void
1524 nvd0_sor_prepare(struct drm_encoder *encoder)
1525 {
1526 nvd0_sor_disconnect(encoder);
1527 if (nouveau_encoder(encoder)->dcb->type == DCB_OUTPUT_DP)
1528 evo_sync(encoder->dev);
1529 }
1530
1531 static void
1532 nvd0_sor_commit(struct drm_encoder *encoder)
1533 {
1534 }
1535
1536 static void
1537 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1538 struct drm_display_mode *mode)
1539 {
1540 struct drm_device *dev = encoder->dev;
1541 struct nouveau_drm *drm = nouveau_drm(dev);
1542 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1543 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1544 struct nouveau_connector *nv_connector;
1545 struct nvbios *bios = &drm->vbios;
1546 u32 mode_ctrl = (1 << nv_crtc->index);
1547 u32 syncs, magic, *push;
1548 u32 or_config;
1549
1550 syncs = 0x00000001;
1551 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1552 syncs |= 0x00000008;
1553 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1554 syncs |= 0x00000010;
1555
1556 magic = 0x31ec6000 | (nv_crtc->index << 25);
1557 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1558 magic |= 0x00000001;
1559
1560 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1561 switch (nv_encoder->dcb->type) {
1562 case DCB_OUTPUT_TMDS:
1563 if (nv_encoder->dcb->sorconf.link & 1) {
1564 if (mode->clock < 165000)
1565 mode_ctrl |= 0x00000100;
1566 else
1567 mode_ctrl |= 0x00000500;
1568 } else {
1569 mode_ctrl |= 0x00000200;
1570 }
1571
1572 or_config = (mode_ctrl & 0x00000f00) >> 8;
1573 if (mode->clock >= 165000)
1574 or_config |= 0x0100;
1575
1576 nvd0_hdmi_mode_set(encoder, mode);
1577 break;
1578 case DCB_OUTPUT_LVDS:
1579 or_config = (mode_ctrl & 0x00000f00) >> 8;
1580 if (bios->fp_no_ddc) {
1581 if (bios->fp.dual_link)
1582 or_config |= 0x0100;
1583 if (bios->fp.if_is_24bit)
1584 or_config |= 0x0200;
1585 } else {
1586 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1587 if (((u8 *)nv_connector->edid)[121] == 2)
1588 or_config |= 0x0100;
1589 } else
1590 if (mode->clock >= bios->fp.duallink_transition_clk) {
1591 or_config |= 0x0100;
1592 }
1593
1594 if (or_config & 0x0100) {
1595 if (bios->fp.strapless_is_24bit & 2)
1596 or_config |= 0x0200;
1597 } else {
1598 if (bios->fp.strapless_is_24bit & 1)
1599 or_config |= 0x0200;
1600 }
1601
1602 if (nv_connector->base.display_info.bpc == 8)
1603 or_config |= 0x0200;
1604
1605 }
1606 break;
1607 case DCB_OUTPUT_DP:
1608 if (nv_connector->base.display_info.bpc == 6) {
1609 nv_encoder->dp.datarate = mode->clock * 18 / 8;
1610 syncs |= 0x00000002 << 6;
1611 } else {
1612 nv_encoder->dp.datarate = mode->clock * 24 / 8;
1613 syncs |= 0x00000005 << 6;
1614 }
1615
1616 if (nv_encoder->dcb->sorconf.link & 1)
1617 mode_ctrl |= 0x00000800;
1618 else
1619 mode_ctrl |= 0x00000900;
1620
1621 or_config = (mode_ctrl & 0x00000f00) >> 8;
1622 break;
1623 default:
1624 BUG_ON(1);
1625 break;
1626 }
1627
1628 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
1629
1630 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1631 nvd0_sor_dp_calc_tu(dev, nv_encoder->dcb, nv_crtc->index,
1632 nv_encoder->dp.datarate);
1633 }
1634
1635 push = evo_wait(nvd0_mast(dev), 8);
1636 if (push) {
1637 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1638 evo_data(push, syncs);
1639 evo_data(push, magic);
1640 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 2);
1641 evo_data(push, mode_ctrl);
1642 evo_data(push, or_config);
1643 evo_kick(push, nvd0_mast(dev));
1644 }
1645
1646 nv_encoder->crtc = encoder->crtc;
1647 }
1648
1649 static void
1650 nvd0_sor_destroy(struct drm_encoder *encoder)
1651 {
1652 drm_encoder_cleanup(encoder);
1653 kfree(encoder);
1654 }
1655
1656 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
1657 .dpms = nvd0_sor_dpms,
1658 .mode_fixup = nvd0_sor_mode_fixup,
1659 .prepare = nvd0_sor_prepare,
1660 .commit = nvd0_sor_commit,
1661 .mode_set = nvd0_sor_mode_set,
1662 .disable = nvd0_sor_disconnect,
1663 .get_crtc = nvd0_display_crtc_get,
1664 };
1665
1666 static const struct drm_encoder_funcs nvd0_sor_func = {
1667 .destroy = nvd0_sor_destroy,
1668 };
1669
1670 static int
1671 nvd0_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1672 {
1673 struct drm_device *dev = connector->dev;
1674 struct nouveau_encoder *nv_encoder;
1675 struct drm_encoder *encoder;
1676
1677 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1678 if (!nv_encoder)
1679 return -ENOMEM;
1680 nv_encoder->dcb = dcbe;
1681 nv_encoder->or = ffs(dcbe->or) - 1;
1682 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1683
1684 encoder = to_drm_encoder(nv_encoder);
1685 encoder->possible_crtcs = dcbe->heads;
1686 encoder->possible_clones = 0;
1687 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
1688 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
1689
1690 drm_mode_connector_attach_encoder(connector, encoder);
1691 return 0;
1692 }
1693
1694 /******************************************************************************
1695 * Init
1696 *****************************************************************************/
1697 void
1698 nvd0_display_fini(struct drm_device *dev)
1699 {
1700 }
1701
1702 int
1703 nvd0_display_init(struct drm_device *dev)
1704 {
1705 u32 *push = evo_wait(nvd0_mast(dev), 32);
1706 if (push) {
1707 evo_mthd(push, 0x0088, 1);
1708 evo_data(push, NvEvoSync);
1709 evo_mthd(push, 0x0084, 1);
1710 evo_data(push, 0x00000000);
1711 evo_mthd(push, 0x0084, 1);
1712 evo_data(push, 0x80000000);
1713 evo_mthd(push, 0x008c, 1);
1714 evo_data(push, 0x00000000);
1715 evo_kick(push, nvd0_mast(dev));
1716 return 0;
1717 }
1718
1719 return -EBUSY;
1720 }
1721
1722 void
1723 nvd0_display_destroy(struct drm_device *dev)
1724 {
1725 struct nvd0_disp *disp = nvd0_disp(dev);
1726
1727 nvd0_dmac_destroy(disp->core, &disp->mast.base);
1728
1729 nouveau_bo_unmap(disp->sync);
1730 nouveau_bo_ref(NULL, &disp->sync);
1731
1732 nouveau_display(dev)->priv = NULL;
1733 kfree(disp);
1734 }
1735
1736 int
1737 nvd0_display_create(struct drm_device *dev)
1738 {
1739 static const u16 oclass[] = {
1740 NVE0_DISP_CLASS,
1741 NVD0_DISP_CLASS,
1742 };
1743 struct nouveau_device *device = nouveau_dev(dev);
1744 struct nouveau_drm *drm = nouveau_drm(dev);
1745 struct dcb_table *dcb = &drm->vbios.dcb;
1746 struct drm_connector *connector, *tmp;
1747 struct nvd0_disp *disp;
1748 struct dcb_output *dcbe;
1749 int crtcs, ret, i;
1750
1751 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1752 if (!disp)
1753 return -ENOMEM;
1754
1755 nouveau_display(dev)->priv = disp;
1756 nouveau_display(dev)->dtor = nvd0_display_destroy;
1757 nouveau_display(dev)->init = nvd0_display_init;
1758 nouveau_display(dev)->fini = nvd0_display_fini;
1759
1760 /* small shared memory area we use for notifiers and semaphores */
1761 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
1762 0, 0x0000, NULL, &disp->sync);
1763 if (!ret) {
1764 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
1765 if (!ret)
1766 ret = nouveau_bo_map(disp->sync);
1767 if (ret)
1768 nouveau_bo_ref(NULL, &disp->sync);
1769 }
1770
1771 if (ret)
1772 goto out;
1773
1774 /* attempt to allocate a supported evo display class */
1775 ret = -ENODEV;
1776 for (i = 0; ret && i < ARRAY_SIZE(oclass); i++) {
1777 ret = nouveau_object_new(nv_object(drm), NVDRM_DEVICE,
1778 0xd1500000, oclass[i], NULL, 0,
1779 &disp->core);
1780 }
1781
1782 if (ret)
1783 goto out;
1784
1785 /* allocate master evo channel */
1786 ret = nvd0_dmac_create(disp->core, NV50_DISP_MAST_CLASS, 0,
1787 &(struct nv50_display_mast_class) {
1788 .pushbuf = EVO_PUSH_HANDLE(MAST, 0),
1789 }, sizeof(struct nv50_display_mast_class),
1790 disp->sync->bo.offset, &disp->mast.base);
1791 if (ret)
1792 goto out;
1793
1794 /* create crtc objects to represent the hw heads */
1795 crtcs = nv_rd32(device, 0x022448);
1796 for (i = 0; i < crtcs; i++) {
1797 ret = nvd0_crtc_create(dev, disp->core, i);
1798 if (ret)
1799 goto out;
1800 }
1801
1802 /* create encoder/connector objects based on VBIOS DCB table */
1803 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1804 connector = nouveau_connector_create(dev, dcbe->connector);
1805 if (IS_ERR(connector))
1806 continue;
1807
1808 if (dcbe->location != DCB_LOC_ON_CHIP) {
1809 NV_WARN(drm, "skipping off-chip encoder %d/%d\n",
1810 dcbe->type, ffs(dcbe->or) - 1);
1811 continue;
1812 }
1813
1814 switch (dcbe->type) {
1815 case DCB_OUTPUT_TMDS:
1816 case DCB_OUTPUT_LVDS:
1817 case DCB_OUTPUT_DP:
1818 nvd0_sor_create(connector, dcbe);
1819 break;
1820 case DCB_OUTPUT_ANALOG:
1821 nvd0_dac_create(connector, dcbe);
1822 break;
1823 default:
1824 NV_WARN(drm, "skipping unsupported encoder %d/%d\n",
1825 dcbe->type, ffs(dcbe->or) - 1);
1826 continue;
1827 }
1828 }
1829
1830 /* cull any connectors we created that don't have an encoder */
1831 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1832 if (connector->encoder_ids[0])
1833 continue;
1834
1835 NV_WARN(drm, "%s has no encoders, removing\n",
1836 drm_get_connector_name(connector));
1837 connector->funcs->destroy(connector);
1838 }
1839
1840 out:
1841 if (ret)
1842 nvd0_display_destroy(dev);
1843 return ret;
1844 }
This page took 0.097294 seconds and 5 git commands to generate.