drm/nvd0/disp: call into core to handle dac power state changes
[deliverable/linux.git] / drivers / gpu / drm / nouveau / nvd0_display.c
1 /*
2 * Copyright 2011 Red Hat Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Ben Skeggs
23 */
24
25 #include <linux/dma-mapping.h>
26
27 #include <drm/drmP.h>
28 #include <drm/drm_crtc_helper.h>
29
30 #include "nouveau_drm.h"
31 #include "nouveau_dma.h"
32 #include "nouveau_gem.h"
33 #include "nouveau_connector.h"
34 #include "nouveau_encoder.h"
35 #include "nouveau_crtc.h"
36 #include "nouveau_fence.h"
37 #include "nv50_display.h"
38
39 #include <core/client.h>
40 #include <core/gpuobj.h>
41 #include <core/class.h>
42
43 #include <subdev/timer.h>
44 #include <subdev/bar.h>
45 #include <subdev/fb.h>
46
47 #define EVO_DMA_NR 9
48
49 #define EVO_MASTER (0x00)
50 #define EVO_FLIP(c) (0x01 + (c))
51 #define EVO_OVLY(c) (0x05 + (c))
52 #define EVO_OIMM(c) (0x09 + (c))
53 #define EVO_CURS(c) (0x0d + (c))
54
55 /* offsets in shared sync bo of various structures */
56 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
57 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
58 #define EVO_FLIP_SEM0(c) EVO_SYNC((c), 0x00)
59 #define EVO_FLIP_SEM1(c) EVO_SYNC((c), 0x10)
60
61 #define EVO_CORE_HANDLE (0xd1500000)
62 #define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
63 #define EVO_CHAN_OCLASS(t,c) ((nv_hclass(c) & 0xff00) | ((t) & 0x00ff))
64 #define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) | \
65 (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
66
67 /******************************************************************************
68 * EVO channel
69 *****************************************************************************/
70
71 struct nvd0_chan {
72 struct nouveau_object *user;
73 u32 handle;
74 };
75
76 static int
77 nvd0_chan_create(struct nouveau_object *core, u32 bclass, u8 head,
78 void *data, u32 size, struct nvd0_chan *chan)
79 {
80 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
81 const u32 oclass = EVO_CHAN_OCLASS(bclass, core);
82 const u32 handle = EVO_CHAN_HANDLE(bclass, head);
83 int ret;
84
85 ret = nouveau_object_new(client, EVO_CORE_HANDLE, handle,
86 oclass, data, size, &chan->user);
87 if (ret)
88 return ret;
89
90 chan->handle = handle;
91 return 0;
92 }
93
94 static void
95 nvd0_chan_destroy(struct nouveau_object *core, struct nvd0_chan *chan)
96 {
97 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
98 if (chan->handle)
99 nouveau_object_del(client, EVO_CORE_HANDLE, chan->handle);
100 }
101
102 /******************************************************************************
103 * PIO EVO channel
104 *****************************************************************************/
105
106 struct nvd0_pioc {
107 struct nvd0_chan base;
108 };
109
110 static void
111 nvd0_pioc_destroy(struct nouveau_object *core, struct nvd0_pioc *pioc)
112 {
113 nvd0_chan_destroy(core, &pioc->base);
114 }
115
116 static int
117 nvd0_pioc_create(struct nouveau_object *core, u32 bclass, u8 head,
118 void *data, u32 size, struct nvd0_pioc *pioc)
119 {
120 return nvd0_chan_create(core, bclass, head, data, size, &pioc->base);
121 }
122
123 /******************************************************************************
124 * DMA EVO channel
125 *****************************************************************************/
126
127 struct nvd0_dmac {
128 struct nvd0_chan base;
129 dma_addr_t handle;
130 u32 *ptr;
131 };
132
133 static void
134 nvd0_dmac_destroy(struct nouveau_object *core, struct nvd0_dmac *dmac)
135 {
136 if (dmac->ptr) {
137 struct pci_dev *pdev = nv_device(core)->pdev;
138 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
139 }
140
141 nvd0_chan_destroy(core, &dmac->base);
142 }
143
144 static int
145 nvd0_dmac_create(struct nouveau_object *core, u32 bclass, u8 head,
146 void *data, u32 size, u64 syncbuf,
147 struct nvd0_dmac *dmac)
148 {
149 struct nouveau_fb *pfb = nouveau_fb(core);
150 struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
151 struct nouveau_object *object;
152 u32 pushbuf = *(u32 *)data;
153 dma_addr_t handle;
154 void *ptr;
155 int ret;
156
157 ptr = pci_alloc_consistent(nv_device(core)->pdev, PAGE_SIZE, &handle);
158 if (!ptr)
159 return -ENOMEM;
160
161 ret = nouveau_object_new(client, NVDRM_DEVICE, pushbuf,
162 NV_DMA_FROM_MEMORY_CLASS,
163 &(struct nv_dma_class) {
164 .flags = NV_DMA_TARGET_PCI_US |
165 NV_DMA_ACCESS_RD,
166 .start = handle + 0x0000,
167 .limit = handle + 0x0fff,
168 }, sizeof(struct nv_dma_class), &object);
169 if (ret)
170 return ret;
171
172 ret = nvd0_chan_create(core, bclass, head, data, size, &dmac->base);
173 if (ret)
174 return ret;
175
176 dmac->handle = handle;
177 dmac->ptr = ptr;
178
179 ret = nouveau_object_new(client, dmac->base.handle, NvEvoSync,
180 NV_DMA_IN_MEMORY_CLASS,
181 &(struct nv_dma_class) {
182 .flags = NV_DMA_TARGET_VRAM |
183 NV_DMA_ACCESS_RDWR,
184 .start = syncbuf + 0x0000,
185 .limit = syncbuf + 0x0fff,
186 }, sizeof(struct nv_dma_class), &object);
187 if (ret)
188 goto out;
189
190 ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM,
191 NV_DMA_IN_MEMORY_CLASS,
192 &(struct nv_dma_class) {
193 .flags = NV_DMA_TARGET_VRAM |
194 NV_DMA_ACCESS_RDWR,
195 .start = 0,
196 .limit = pfb->ram.size - 1,
197 }, sizeof(struct nv_dma_class), &object);
198 if (ret)
199 goto out;
200
201 ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM_LP,
202 NV_DMA_IN_MEMORY_CLASS,
203 &(struct nv_dma_class) {
204 .flags = NV_DMA_TARGET_VRAM |
205 NV_DMA_ACCESS_RDWR,
206 .start = 0,
207 .limit = pfb->ram.size - 1,
208 .conf0 = NVD0_DMA_CONF0_ENABLE |
209 NVD0_DMA_CONF0_PAGE_LP,
210 }, sizeof(struct nv_dma_class), &object);
211 if (ret)
212 goto out;
213
214 ret = nouveau_object_new(client, dmac->base.handle, NvEvoFB32,
215 NV_DMA_IN_MEMORY_CLASS,
216 &(struct nv_dma_class) {
217 .flags = NV_DMA_TARGET_VRAM |
218 NV_DMA_ACCESS_RDWR,
219 .start = 0,
220 .limit = pfb->ram.size - 1,
221 .conf0 = 0x00fe |
222 NVD0_DMA_CONF0_ENABLE |
223 NVD0_DMA_CONF0_PAGE_LP,
224 }, sizeof(struct nv_dma_class), &object);
225 out:
226 if (ret)
227 nvd0_dmac_destroy(core, dmac);
228 return ret;
229 }
230
231 struct nvd0_mast {
232 struct nvd0_dmac base;
233 };
234
235 struct nvd0_curs {
236 struct nvd0_pioc base;
237 };
238
239 struct nvd0_sync {
240 struct nvd0_dmac base;
241 struct {
242 u32 offset;
243 u16 value;
244 } sem;
245 };
246
247 struct nvd0_ovly {
248 struct nvd0_dmac base;
249 };
250
251 struct nvd0_oimm {
252 struct nvd0_pioc base;
253 };
254
255 struct nvd0_head {
256 struct nouveau_crtc base;
257 struct nvd0_curs curs;
258 struct nvd0_sync sync;
259 struct nvd0_ovly ovly;
260 struct nvd0_oimm oimm;
261 };
262
263 #define nvd0_head(c) ((struct nvd0_head *)nouveau_crtc(c))
264 #define nvd0_curs(c) (&nvd0_head(c)->curs)
265 #define nvd0_sync(c) (&nvd0_head(c)->sync)
266 #define nvd0_ovly(c) (&nvd0_head(c)->ovly)
267 #define nvd0_oimm(c) (&nvd0_head(c)->oimm)
268 #define nvd0_chan(c) (&(c)->base.base)
269
270 struct nvd0_disp {
271 struct nouveau_object *core;
272 struct nvd0_mast mast;
273
274 u32 modeset;
275
276 struct nouveau_bo *sync;
277 };
278
279 static struct nvd0_disp *
280 nvd0_disp(struct drm_device *dev)
281 {
282 return nouveau_display(dev)->priv;
283 }
284
285 #define nvd0_mast(d) (&nvd0_disp(d)->mast)
286
287 static struct drm_crtc *
288 nvd0_display_crtc_get(struct drm_encoder *encoder)
289 {
290 return nouveau_encoder(encoder)->crtc;
291 }
292
293 /******************************************************************************
294 * EVO channel helpers
295 *****************************************************************************/
296 static u32 *
297 evo_wait(void *evoc, int nr)
298 {
299 struct nvd0_dmac *dmac = evoc;
300 u32 put = nv_ro32(dmac->base.user, 0x0000) / 4;
301
302 if (put + nr >= (PAGE_SIZE / 4)) {
303 dmac->ptr[put] = 0x20000000;
304
305 nv_wo32(dmac->base.user, 0x0000, 0x00000000);
306 if (!nv_wait(dmac->base.user, 0x0004, ~0, 0x00000000)) {
307 NV_ERROR(dmac->base.user, "channel stalled\n");
308 return NULL;
309 }
310
311 put = 0;
312 }
313
314 return dmac->ptr + put;
315 }
316
317 static void
318 evo_kick(u32 *push, void *evoc)
319 {
320 struct nvd0_dmac *dmac = evoc;
321 nv_wo32(dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
322 }
323
324 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
325 #define evo_data(p,d) *((p)++) = (d)
326
327 static bool
328 evo_sync_wait(void *data)
329 {
330 return nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000;
331 }
332
333 static int
334 evo_sync(struct drm_device *dev)
335 {
336 struct nouveau_device *device = nouveau_dev(dev);
337 struct nvd0_disp *disp = nvd0_disp(dev);
338 struct nvd0_mast *mast = nvd0_mast(dev);
339 u32 *push = evo_wait(mast, 8);
340 if (push) {
341 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
342 evo_mthd(push, 0x0084, 1);
343 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
344 evo_mthd(push, 0x0080, 2);
345 evo_data(push, 0x00000000);
346 evo_data(push, 0x00000000);
347 evo_kick(push, mast);
348 if (nv_wait_cb(device, evo_sync_wait, disp->sync))
349 return 0;
350 }
351
352 return -EBUSY;
353 }
354
355 /******************************************************************************
356 * Page flipping channel
357 *****************************************************************************/
358 struct nouveau_bo *
359 nvd0_display_crtc_sema(struct drm_device *dev, int crtc)
360 {
361 return nvd0_disp(dev)->sync;
362 }
363
364 void
365 nvd0_display_flip_stop(struct drm_crtc *crtc)
366 {
367 struct nvd0_sync *sync = nvd0_sync(crtc);
368 u32 *push;
369
370 push = evo_wait(sync, 8);
371 if (push) {
372 evo_mthd(push, 0x0084, 1);
373 evo_data(push, 0x00000000);
374 evo_mthd(push, 0x0094, 1);
375 evo_data(push, 0x00000000);
376 evo_mthd(push, 0x00c0, 1);
377 evo_data(push, 0x00000000);
378 evo_mthd(push, 0x0080, 1);
379 evo_data(push, 0x00000000);
380 evo_kick(push, sync);
381 }
382 }
383
384 int
385 nvd0_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
386 struct nouveau_channel *chan, u32 swap_interval)
387 {
388 struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
389 struct nvd0_disp *disp = nvd0_disp(crtc->dev);
390 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
391 struct nvd0_sync *sync = nvd0_sync(crtc);
392 u64 offset;
393 u32 *push;
394 int ret;
395
396 swap_interval <<= 4;
397 if (swap_interval == 0)
398 swap_interval |= 0x100;
399
400 push = evo_wait(sync, 128);
401 if (unlikely(push == NULL))
402 return -EBUSY;
403
404 /* synchronise with the rendering channel, if necessary */
405 if (likely(chan)) {
406 ret = RING_SPACE(chan, 10);
407 if (ret)
408 return ret;
409
410
411 offset = nvc0_fence_crtc(chan, nv_crtc->index);
412 offset += sync->sem.offset;
413
414 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
415 OUT_RING (chan, upper_32_bits(offset));
416 OUT_RING (chan, lower_32_bits(offset));
417 OUT_RING (chan, 0xf00d0000 | sync->sem.value);
418 OUT_RING (chan, 0x1002);
419 BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
420 OUT_RING (chan, upper_32_bits(offset));
421 OUT_RING (chan, lower_32_bits(offset ^ 0x10));
422 OUT_RING (chan, 0x74b1e000);
423 OUT_RING (chan, 0x1001);
424 FIRE_RING (chan);
425 } else {
426 nouveau_bo_wr32(disp->sync, sync->sem.offset / 4,
427 0xf00d0000 | sync->sem.value);
428 evo_sync(crtc->dev);
429 }
430
431 /* queue the flip */
432 evo_mthd(push, 0x0100, 1);
433 evo_data(push, 0xfffe0000);
434 evo_mthd(push, 0x0084, 1);
435 evo_data(push, swap_interval);
436 if (!(swap_interval & 0x00000100)) {
437 evo_mthd(push, 0x00e0, 1);
438 evo_data(push, 0x40000000);
439 }
440 evo_mthd(push, 0x0088, 4);
441 evo_data(push, sync->sem.offset);
442 evo_data(push, 0xf00d0000 | sync->sem.value);
443 evo_data(push, 0x74b1e000);
444 evo_data(push, NvEvoSync);
445 evo_mthd(push, 0x00a0, 2);
446 evo_data(push, 0x00000000);
447 evo_data(push, 0x00000000);
448 evo_mthd(push, 0x00c0, 1);
449 evo_data(push, nv_fb->r_dma);
450 evo_mthd(push, 0x0110, 2);
451 evo_data(push, 0x00000000);
452 evo_data(push, 0x00000000);
453 evo_mthd(push, 0x0400, 5);
454 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
455 evo_data(push, 0);
456 evo_data(push, (fb->height << 16) | fb->width);
457 evo_data(push, nv_fb->r_pitch);
458 evo_data(push, nv_fb->r_format);
459 evo_mthd(push, 0x0080, 1);
460 evo_data(push, 0x00000000);
461 evo_kick(push, sync);
462
463 sync->sem.offset ^= 0x10;
464 sync->sem.value++;
465 return 0;
466 }
467
468 /******************************************************************************
469 * CRTC
470 *****************************************************************************/
471 static int
472 nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
473 {
474 struct nouveau_drm *drm = nouveau_drm(nv_crtc->base.dev);
475 struct drm_device *dev = nv_crtc->base.dev;
476 struct nouveau_connector *nv_connector;
477 struct drm_connector *connector;
478 u32 *push, mode = 0x00;
479 u32 mthd;
480
481 nv_connector = nouveau_crtc_connector_get(nv_crtc);
482 connector = &nv_connector->base;
483 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
484 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
485 mode = DITHERING_MODE_DYNAMIC2X2;
486 } else {
487 mode = nv_connector->dithering_mode;
488 }
489
490 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
491 if (connector->display_info.bpc >= 8)
492 mode |= DITHERING_DEPTH_8BPC;
493 } else {
494 mode |= nv_connector->dithering_depth;
495 }
496
497 if (nv_device(drm->device)->card_type < NV_E0)
498 mthd = 0x0490 + (nv_crtc->index * 0x0300);
499 else
500 mthd = 0x04a0 + (nv_crtc->index * 0x0300);
501
502 push = evo_wait(nvd0_mast(dev), 4);
503 if (push) {
504 evo_mthd(push, mthd, 1);
505 evo_data(push, mode);
506 if (update) {
507 evo_mthd(push, 0x0080, 1);
508 evo_data(push, 0x00000000);
509 }
510 evo_kick(push, nvd0_mast(dev));
511 }
512
513 return 0;
514 }
515
516 static int
517 nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
518 {
519 struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
520 struct drm_device *dev = nv_crtc->base.dev;
521 struct drm_crtc *crtc = &nv_crtc->base;
522 struct nouveau_connector *nv_connector;
523 int mode = DRM_MODE_SCALE_NONE;
524 u32 oX, oY, *push;
525
526 /* start off at the resolution we programmed the crtc for, this
527 * effectively handles NONE/FULL scaling
528 */
529 nv_connector = nouveau_crtc_connector_get(nv_crtc);
530 if (nv_connector && nv_connector->native_mode)
531 mode = nv_connector->scaling_mode;
532
533 if (mode != DRM_MODE_SCALE_NONE)
534 omode = nv_connector->native_mode;
535 else
536 omode = umode;
537
538 oX = omode->hdisplay;
539 oY = omode->vdisplay;
540 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
541 oY *= 2;
542
543 /* add overscan compensation if necessary, will keep the aspect
544 * ratio the same as the backend mode unless overridden by the
545 * user setting both hborder and vborder properties.
546 */
547 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
548 (nv_connector->underscan == UNDERSCAN_AUTO &&
549 nv_connector->edid &&
550 drm_detect_hdmi_monitor(nv_connector->edid)))) {
551 u32 bX = nv_connector->underscan_hborder;
552 u32 bY = nv_connector->underscan_vborder;
553 u32 aspect = (oY << 19) / oX;
554
555 if (bX) {
556 oX -= (bX * 2);
557 if (bY) oY -= (bY * 2);
558 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
559 } else {
560 oX -= (oX >> 4) + 32;
561 if (bY) oY -= (bY * 2);
562 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
563 }
564 }
565
566 /* handle CENTER/ASPECT scaling, taking into account the areas
567 * removed already for overscan compensation
568 */
569 switch (mode) {
570 case DRM_MODE_SCALE_CENTER:
571 oX = min((u32)umode->hdisplay, oX);
572 oY = min((u32)umode->vdisplay, oY);
573 /* fall-through */
574 case DRM_MODE_SCALE_ASPECT:
575 if (oY < oX) {
576 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
577 oX = ((oY * aspect) + (aspect / 2)) >> 19;
578 } else {
579 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
580 oY = ((oX * aspect) + (aspect / 2)) >> 19;
581 }
582 break;
583 default:
584 break;
585 }
586
587 push = evo_wait(nvd0_mast(dev), 8);
588 if (push) {
589 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
590 evo_data(push, (oY << 16) | oX);
591 evo_data(push, (oY << 16) | oX);
592 evo_data(push, (oY << 16) | oX);
593 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
594 evo_data(push, 0x00000000);
595 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
596 evo_data(push, (umode->vdisplay << 16) | umode->hdisplay);
597 evo_kick(push, nvd0_mast(dev));
598 if (update) {
599 nvd0_display_flip_stop(crtc);
600 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
601 }
602 }
603
604 return 0;
605 }
606
607 static int
608 nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
609 int x, int y, bool update)
610 {
611 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
612 u32 *push;
613
614 push = evo_wait(nvd0_mast(fb->dev), 16);
615 if (push) {
616 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
617 evo_data(push, nvfb->nvbo->bo.offset >> 8);
618 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
619 evo_data(push, (fb->height << 16) | fb->width);
620 evo_data(push, nvfb->r_pitch);
621 evo_data(push, nvfb->r_format);
622 evo_data(push, nvfb->r_dma);
623 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
624 evo_data(push, (y << 16) | x);
625 if (update) {
626 evo_mthd(push, 0x0080, 1);
627 evo_data(push, 0x00000000);
628 }
629 evo_kick(push, nvd0_mast(fb->dev));
630 }
631
632 nv_crtc->fb.tile_flags = nvfb->r_dma;
633 return 0;
634 }
635
636 static void
637 nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
638 {
639 struct drm_device *dev = nv_crtc->base.dev;
640 u32 *push = evo_wait(nvd0_mast(dev), 16);
641 if (push) {
642 if (show) {
643 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
644 evo_data(push, 0x85000000);
645 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
646 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
647 evo_data(push, NvEvoVRAM);
648 } else {
649 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
650 evo_data(push, 0x05000000);
651 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
652 evo_data(push, 0x00000000);
653 }
654
655 if (update) {
656 evo_mthd(push, 0x0080, 1);
657 evo_data(push, 0x00000000);
658 }
659
660 evo_kick(push, nvd0_mast(dev));
661 }
662 }
663
664 static void
665 nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
666 {
667 }
668
669 static void
670 nvd0_crtc_prepare(struct drm_crtc *crtc)
671 {
672 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
673 u32 *push;
674
675 nvd0_display_flip_stop(crtc);
676
677 push = evo_wait(nvd0_mast(crtc->dev), 2);
678 if (push) {
679 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
680 evo_data(push, 0x00000000);
681 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
682 evo_data(push, 0x03000000);
683 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
684 evo_data(push, 0x00000000);
685 evo_kick(push, nvd0_mast(crtc->dev));
686 }
687
688 nvd0_crtc_cursor_show(nv_crtc, false, false);
689 }
690
691 static void
692 nvd0_crtc_commit(struct drm_crtc *crtc)
693 {
694 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
695 u32 *push;
696
697 push = evo_wait(nvd0_mast(crtc->dev), 32);
698 if (push) {
699 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
700 evo_data(push, nv_crtc->fb.tile_flags);
701 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
702 evo_data(push, 0x83000000);
703 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
704 evo_data(push, 0x00000000);
705 evo_data(push, 0x00000000);
706 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
707 evo_data(push, NvEvoVRAM);
708 evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
709 evo_data(push, 0xffffff00);
710 evo_kick(push, nvd0_mast(crtc->dev));
711 }
712
713 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
714 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
715 }
716
717 static bool
718 nvd0_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
719 struct drm_display_mode *adjusted_mode)
720 {
721 return true;
722 }
723
724 static int
725 nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
726 {
727 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
728 int ret;
729
730 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
731 if (ret)
732 return ret;
733
734 if (old_fb) {
735 nvfb = nouveau_framebuffer(old_fb);
736 nouveau_bo_unpin(nvfb->nvbo);
737 }
738
739 return 0;
740 }
741
742 static int
743 nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
744 struct drm_display_mode *mode, int x, int y,
745 struct drm_framebuffer *old_fb)
746 {
747 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
748 struct nouveau_connector *nv_connector;
749 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
750 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
751 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
752 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
753 u32 vblan2e = 0, vblan2s = 1;
754 u32 *push;
755 int ret;
756
757 hactive = mode->htotal;
758 hsynce = mode->hsync_end - mode->hsync_start - 1;
759 hbackp = mode->htotal - mode->hsync_end;
760 hblanke = hsynce + hbackp;
761 hfrontp = mode->hsync_start - mode->hdisplay;
762 hblanks = mode->htotal - hfrontp - 1;
763
764 vactive = mode->vtotal * vscan / ilace;
765 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
766 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
767 vblanke = vsynce + vbackp;
768 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
769 vblanks = vactive - vfrontp - 1;
770 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
771 vblan2e = vactive + vsynce + vbackp;
772 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
773 vactive = (vactive * 2) + 1;
774 }
775
776 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
777 if (ret)
778 return ret;
779
780 push = evo_wait(nvd0_mast(crtc->dev), 64);
781 if (push) {
782 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
783 evo_data(push, 0x00000000);
784 evo_data(push, (vactive << 16) | hactive);
785 evo_data(push, ( vsynce << 16) | hsynce);
786 evo_data(push, (vblanke << 16) | hblanke);
787 evo_data(push, (vblanks << 16) | hblanks);
788 evo_data(push, (vblan2e << 16) | vblan2s);
789 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
790 evo_data(push, 0x00000000); /* ??? */
791 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
792 evo_data(push, mode->clock * 1000);
793 evo_data(push, 0x00200000); /* ??? */
794 evo_data(push, mode->clock * 1000);
795 evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
796 evo_data(push, 0x00000311);
797 evo_data(push, 0x00000100);
798 evo_kick(push, nvd0_mast(crtc->dev));
799 }
800
801 nv_connector = nouveau_crtc_connector_get(nv_crtc);
802 nvd0_crtc_set_dither(nv_crtc, false);
803 nvd0_crtc_set_scale(nv_crtc, false);
804 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
805 return 0;
806 }
807
808 static int
809 nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
810 struct drm_framebuffer *old_fb)
811 {
812 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
813 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
814 int ret;
815
816 if (!crtc->fb) {
817 NV_DEBUG(drm, "No FB bound\n");
818 return 0;
819 }
820
821 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
822 if (ret)
823 return ret;
824
825 nvd0_display_flip_stop(crtc);
826 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
827 nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
828 return 0;
829 }
830
831 static int
832 nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
833 struct drm_framebuffer *fb, int x, int y,
834 enum mode_set_atomic state)
835 {
836 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
837 nvd0_display_flip_stop(crtc);
838 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
839 return 0;
840 }
841
842 static void
843 nvd0_crtc_lut_load(struct drm_crtc *crtc)
844 {
845 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
846 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
847 int i;
848
849 for (i = 0; i < 256; i++) {
850 writew(0x6000 + (nv_crtc->lut.r[i] >> 2), lut + (i * 0x20) + 0);
851 writew(0x6000 + (nv_crtc->lut.g[i] >> 2), lut + (i * 0x20) + 2);
852 writew(0x6000 + (nv_crtc->lut.b[i] >> 2), lut + (i * 0x20) + 4);
853 }
854 }
855
856 static int
857 nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
858 uint32_t handle, uint32_t width, uint32_t height)
859 {
860 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
861 struct drm_device *dev = crtc->dev;
862 struct drm_gem_object *gem;
863 struct nouveau_bo *nvbo;
864 bool visible = (handle != 0);
865 int i, ret = 0;
866
867 if (visible) {
868 if (width != 64 || height != 64)
869 return -EINVAL;
870
871 gem = drm_gem_object_lookup(dev, file_priv, handle);
872 if (unlikely(!gem))
873 return -ENOENT;
874 nvbo = nouveau_gem_object(gem);
875
876 ret = nouveau_bo_map(nvbo);
877 if (ret == 0) {
878 for (i = 0; i < 64 * 64; i++) {
879 u32 v = nouveau_bo_rd32(nvbo, i);
880 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
881 }
882 nouveau_bo_unmap(nvbo);
883 }
884
885 drm_gem_object_unreference_unlocked(gem);
886 }
887
888 if (visible != nv_crtc->cursor.visible) {
889 nvd0_crtc_cursor_show(nv_crtc, visible, true);
890 nv_crtc->cursor.visible = visible;
891 }
892
893 return ret;
894 }
895
896 static int
897 nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
898 {
899 struct nvd0_curs *curs = nvd0_curs(crtc);
900 struct nvd0_chan *chan = nvd0_chan(curs);
901 nv_wo32(chan->user, 0x0084, (y << 16) | (x & 0xffff));
902 nv_wo32(chan->user, 0x0080, 0x00000000);
903 return 0;
904 }
905
906 static void
907 nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
908 uint32_t start, uint32_t size)
909 {
910 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
911 u32 end = max(start + size, (u32)256);
912 u32 i;
913
914 for (i = start; i < end; i++) {
915 nv_crtc->lut.r[i] = r[i];
916 nv_crtc->lut.g[i] = g[i];
917 nv_crtc->lut.b[i] = b[i];
918 }
919
920 nvd0_crtc_lut_load(crtc);
921 }
922
923 static void
924 nvd0_crtc_destroy(struct drm_crtc *crtc)
925 {
926 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
927 struct nvd0_disp *disp = nvd0_disp(crtc->dev);
928 struct nvd0_head *head = nvd0_head(crtc);
929 nvd0_dmac_destroy(disp->core, &head->ovly.base);
930 nvd0_pioc_destroy(disp->core, &head->oimm.base);
931 nvd0_dmac_destroy(disp->core, &head->sync.base);
932 nvd0_pioc_destroy(disp->core, &head->curs.base);
933 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
934 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
935 nouveau_bo_unmap(nv_crtc->lut.nvbo);
936 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
937 drm_crtc_cleanup(crtc);
938 kfree(crtc);
939 }
940
941 static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
942 .dpms = nvd0_crtc_dpms,
943 .prepare = nvd0_crtc_prepare,
944 .commit = nvd0_crtc_commit,
945 .mode_fixup = nvd0_crtc_mode_fixup,
946 .mode_set = nvd0_crtc_mode_set,
947 .mode_set_base = nvd0_crtc_mode_set_base,
948 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
949 .load_lut = nvd0_crtc_lut_load,
950 };
951
952 static const struct drm_crtc_funcs nvd0_crtc_func = {
953 .cursor_set = nvd0_crtc_cursor_set,
954 .cursor_move = nvd0_crtc_cursor_move,
955 .gamma_set = nvd0_crtc_gamma_set,
956 .set_config = drm_crtc_helper_set_config,
957 .destroy = nvd0_crtc_destroy,
958 .page_flip = nouveau_crtc_page_flip,
959 };
960
961 static void
962 nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
963 {
964 }
965
966 static void
967 nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
968 {
969 }
970
971 static int
972 nvd0_crtc_create(struct drm_device *dev, struct nouveau_object *core, int index)
973 {
974 struct nvd0_disp *disp = nvd0_disp(dev);
975 struct nvd0_head *head;
976 struct drm_crtc *crtc;
977 int ret, i;
978
979 head = kzalloc(sizeof(*head), GFP_KERNEL);
980 if (!head)
981 return -ENOMEM;
982
983 head->base.index = index;
984 head->base.set_dither = nvd0_crtc_set_dither;
985 head->base.set_scale = nvd0_crtc_set_scale;
986 head->base.cursor.set_offset = nvd0_cursor_set_offset;
987 head->base.cursor.set_pos = nvd0_cursor_set_pos;
988 for (i = 0; i < 256; i++) {
989 head->base.lut.r[i] = i << 8;
990 head->base.lut.g[i] = i << 8;
991 head->base.lut.b[i] = i << 8;
992 }
993
994 crtc = &head->base.base;
995 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
996 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
997 drm_mode_crtc_set_gamma_size(crtc, 256);
998
999 ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1000 0, 0x0000, NULL, &head->base.lut.nvbo);
1001 if (!ret) {
1002 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1003 if (!ret)
1004 ret = nouveau_bo_map(head->base.lut.nvbo);
1005 if (ret)
1006 nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1007 }
1008
1009 if (ret)
1010 goto out;
1011
1012 nvd0_crtc_lut_load(crtc);
1013
1014 /* allocate cursor resources */
1015 ret = nvd0_pioc_create(disp->core, NV50_DISP_CURS_CLASS, index,
1016 &(struct nv50_display_curs_class) {
1017 .head = index,
1018 }, sizeof(struct nv50_display_curs_class),
1019 &head->curs.base);
1020 if (ret)
1021 goto out;
1022
1023 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
1024 0, 0x0000, NULL, &head->base.cursor.nvbo);
1025 if (!ret) {
1026 ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
1027 if (!ret)
1028 ret = nouveau_bo_map(head->base.cursor.nvbo);
1029 if (ret)
1030 nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
1031 }
1032
1033 if (ret)
1034 goto out;
1035
1036 /* allocate page flip / sync resources */
1037 ret = nvd0_dmac_create(disp->core, NV50_DISP_SYNC_CLASS, index,
1038 &(struct nv50_display_sync_class) {
1039 .pushbuf = EVO_PUSH_HANDLE(SYNC, index),
1040 .head = index,
1041 }, sizeof(struct nv50_display_sync_class),
1042 disp->sync->bo.offset, &head->sync.base);
1043 if (ret)
1044 goto out;
1045
1046 head->sync.sem.offset = EVO_SYNC(1 + index, 0x00);
1047
1048 /* allocate overlay resources */
1049 ret = nvd0_pioc_create(disp->core, NV50_DISP_OIMM_CLASS, index,
1050 &(struct nv50_display_oimm_class) {
1051 .head = index,
1052 }, sizeof(struct nv50_display_oimm_class),
1053 &head->oimm.base);
1054 if (ret)
1055 goto out;
1056
1057 ret = nvd0_dmac_create(disp->core, NV50_DISP_OVLY_CLASS, index,
1058 &(struct nv50_display_ovly_class) {
1059 .pushbuf = EVO_PUSH_HANDLE(OVLY, index),
1060 .head = index,
1061 }, sizeof(struct nv50_display_ovly_class),
1062 disp->sync->bo.offset, &head->ovly.base);
1063 if (ret)
1064 goto out;
1065
1066 out:
1067 if (ret)
1068 nvd0_crtc_destroy(crtc);
1069 return ret;
1070 }
1071
1072 /******************************************************************************
1073 * DAC
1074 *****************************************************************************/
1075 static void
1076 nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
1077 {
1078 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1079 struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1080 int or = nv_encoder->or;
1081 u32 dpms_ctrl;
1082
1083 dpms_ctrl = 0x00000000;
1084 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
1085 dpms_ctrl |= 0x00000001;
1086 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
1087 dpms_ctrl |= 0x00000004;
1088
1089 nv_call(disp->core, NV50_DISP_DAC_PWR + or, dpms_ctrl);
1090 }
1091
1092 static bool
1093 nvd0_dac_mode_fixup(struct drm_encoder *encoder,
1094 const struct drm_display_mode *mode,
1095 struct drm_display_mode *adjusted_mode)
1096 {
1097 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1098 struct nouveau_connector *nv_connector;
1099
1100 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1101 if (nv_connector && nv_connector->native_mode) {
1102 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1103 int id = adjusted_mode->base.id;
1104 *adjusted_mode = *nv_connector->native_mode;
1105 adjusted_mode->base.id = id;
1106 }
1107 }
1108
1109 return true;
1110 }
1111
1112 static void
1113 nvd0_dac_commit(struct drm_encoder *encoder)
1114 {
1115 }
1116
1117 static void
1118 nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1119 struct drm_display_mode *adjusted_mode)
1120 {
1121 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1122 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1123 u32 syncs, magic, *push;
1124
1125 syncs = 0x00000001;
1126 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1127 syncs |= 0x00000008;
1128 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1129 syncs |= 0x00000010;
1130
1131 magic = 0x31ec6000 | (nv_crtc->index << 25);
1132 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1133 magic |= 0x00000001;
1134
1135 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1136
1137 push = evo_wait(nvd0_mast(encoder->dev), 8);
1138 if (push) {
1139 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1140 evo_data(push, syncs);
1141 evo_data(push, magic);
1142 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 2);
1143 evo_data(push, 1 << nv_crtc->index);
1144 evo_data(push, 0x00ff);
1145 evo_kick(push, nvd0_mast(encoder->dev));
1146 }
1147
1148 nv_encoder->crtc = encoder->crtc;
1149 }
1150
1151 static void
1152 nvd0_dac_disconnect(struct drm_encoder *encoder)
1153 {
1154 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1155 struct drm_device *dev = encoder->dev;
1156 u32 *push;
1157
1158 if (nv_encoder->crtc) {
1159 nvd0_crtc_prepare(nv_encoder->crtc);
1160
1161 push = evo_wait(nvd0_mast(dev), 4);
1162 if (push) {
1163 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
1164 evo_data(push, 0x00000000);
1165 evo_mthd(push, 0x0080, 1);
1166 evo_data(push, 0x00000000);
1167 evo_kick(push, nvd0_mast(dev));
1168 }
1169
1170 nv_encoder->crtc = NULL;
1171 }
1172 }
1173
1174 static enum drm_connector_status
1175 nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1176 {
1177 struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1178 int ret, or = nouveau_encoder(encoder)->or;
1179 u32 load;
1180
1181 ret = nv_exec(disp->core, NV50_DISP_DAC_LOAD + or, &load, sizeof(load));
1182 if (ret || load != 7)
1183 return connector_status_disconnected;
1184
1185 return connector_status_connected;
1186 }
1187
1188 static void
1189 nvd0_dac_destroy(struct drm_encoder *encoder)
1190 {
1191 drm_encoder_cleanup(encoder);
1192 kfree(encoder);
1193 }
1194
1195 static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
1196 .dpms = nvd0_dac_dpms,
1197 .mode_fixup = nvd0_dac_mode_fixup,
1198 .prepare = nvd0_dac_disconnect,
1199 .commit = nvd0_dac_commit,
1200 .mode_set = nvd0_dac_mode_set,
1201 .disable = nvd0_dac_disconnect,
1202 .get_crtc = nvd0_display_crtc_get,
1203 .detect = nvd0_dac_detect
1204 };
1205
1206 static const struct drm_encoder_funcs nvd0_dac_func = {
1207 .destroy = nvd0_dac_destroy,
1208 };
1209
1210 static int
1211 nvd0_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1212 {
1213 struct drm_device *dev = connector->dev;
1214 struct nouveau_encoder *nv_encoder;
1215 struct drm_encoder *encoder;
1216
1217 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1218 if (!nv_encoder)
1219 return -ENOMEM;
1220 nv_encoder->dcb = dcbe;
1221 nv_encoder->or = ffs(dcbe->or) - 1;
1222
1223 encoder = to_drm_encoder(nv_encoder);
1224 encoder->possible_crtcs = dcbe->heads;
1225 encoder->possible_clones = 0;
1226 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
1227 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
1228
1229 drm_mode_connector_attach_encoder(connector, encoder);
1230 return 0;
1231 }
1232
1233 /******************************************************************************
1234 * Audio
1235 *****************************************************************************/
1236 static void
1237 nvd0_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1238 {
1239 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1240 struct nouveau_connector *nv_connector;
1241 struct drm_device *dev = encoder->dev;
1242 struct nouveau_device *device = nouveau_dev(dev);
1243 int i, or = nv_encoder->or * 0x30;
1244
1245 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1246 if (!drm_detect_monitor_audio(nv_connector->edid))
1247 return;
1248
1249 nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000001);
1250
1251 drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1252 if (nv_connector->base.eld[0]) {
1253 u8 *eld = nv_connector->base.eld;
1254
1255 for (i = 0; i < eld[2] * 4; i++)
1256 nv_wr32(device, 0x10ec00 + or, (i << 8) | eld[i]);
1257 for (i = eld[2] * 4; i < 0x60; i++)
1258 nv_wr32(device, 0x10ec00 + or, (i << 8) | 0x00);
1259
1260 nv_mask(device, 0x10ec10 + or, 0x80000002, 0x80000002);
1261 }
1262 }
1263
1264 static void
1265 nvd0_audio_disconnect(struct drm_encoder *encoder)
1266 {
1267 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1268 struct drm_device *dev = encoder->dev;
1269 struct nouveau_device *device = nouveau_dev(dev);
1270 int or = nv_encoder->or * 0x30;
1271
1272 nv_mask(device, 0x10ec10 + or, 0x80000003, 0x80000000);
1273 }
1274
1275 /******************************************************************************
1276 * HDMI
1277 *****************************************************************************/
1278 static void
1279 nvd0_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1280 {
1281 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1282 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1283 struct nouveau_connector *nv_connector;
1284 struct drm_device *dev = encoder->dev;
1285 struct nouveau_device *device = nouveau_dev(dev);
1286 int head = nv_crtc->index * 0x800;
1287 u32 rekey = 56; /* binary driver, and tegra constant */
1288 u32 max_ac_packet;
1289
1290 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1291 if (!drm_detect_hdmi_monitor(nv_connector->edid))
1292 return;
1293
1294 max_ac_packet = mode->htotal - mode->hdisplay;
1295 max_ac_packet -= rekey;
1296 max_ac_packet -= 18; /* constant from tegra */
1297 max_ac_packet /= 32;
1298
1299 /* AVI InfoFrame */
1300 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1301 nv_wr32(device, 0x61671c + head, 0x000d0282);
1302 nv_wr32(device, 0x616720 + head, 0x0000006f);
1303 nv_wr32(device, 0x616724 + head, 0x00000000);
1304 nv_wr32(device, 0x616728 + head, 0x00000000);
1305 nv_wr32(device, 0x61672c + head, 0x00000000);
1306 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000001);
1307
1308 /* ??? InfoFrame? */
1309 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1310 nv_wr32(device, 0x6167ac + head, 0x00000010);
1311 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000001);
1312
1313 /* HDMI_CTRL */
1314 nv_mask(device, 0x616798 + head, 0x401f007f, 0x40000000 | rekey |
1315 max_ac_packet << 16);
1316
1317 /* NFI, audio doesn't work without it though.. */
1318 nv_mask(device, 0x616548 + head, 0x00000070, 0x00000000);
1319
1320 nvd0_audio_mode_set(encoder, mode);
1321 }
1322
1323 static void
1324 nvd0_hdmi_disconnect(struct drm_encoder *encoder)
1325 {
1326 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1327 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1328 struct drm_device *dev = encoder->dev;
1329 struct nouveau_device *device = nouveau_dev(dev);
1330 int head = nv_crtc->index * 0x800;
1331
1332 nvd0_audio_disconnect(encoder);
1333
1334 nv_mask(device, 0x616798 + head, 0x40000000, 0x00000000);
1335 nv_mask(device, 0x6167a4 + head, 0x00000001, 0x00000000);
1336 nv_mask(device, 0x616714 + head, 0x00000001, 0x00000000);
1337 }
1338
1339 /******************************************************************************
1340 * SOR
1341 *****************************************************************************/
1342 static void
1343 nvd0_sor_dp_train_set(struct drm_device *dev, struct dcb_output *dcb, u8 pattern)
1344 {
1345 struct nvd0_disp *disp = nvd0_disp(dev);
1346 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1347 const u32 moff = (link << 2) | or;
1348 nv_call(disp->core, NV94_DISP_SOR_DP_TRAIN + moff, pattern);
1349 }
1350
1351 static void
1352 nvd0_sor_dp_train_adj(struct drm_device *dev, struct dcb_output *dcb,
1353 u8 lane, u8 swing, u8 preem)
1354 {
1355 struct nvd0_disp *disp = nvd0_disp(dev);
1356 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1357 const u32 moff = (link << 2) | or;
1358 const u32 data = (swing << 8) | preem;
1359 nv_call(disp->core, NV94_DISP_SOR_DP_DRVCTL(lane) + moff, data);
1360 }
1361
1362 static void
1363 nvd0_sor_dp_link_set(struct drm_device *dev, struct dcb_output *dcb, int crtc,
1364 int link_nr, u32 link_bw, bool enhframe)
1365 {
1366 struct nvd0_disp *disp = nvd0_disp(dev);
1367 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1368 const u32 moff = (crtc << 3) | (link << 2) | or;
1369 u32 data = ((link_bw / 27000) << 8) | link_nr;
1370 if (enhframe)
1371 data |= NV94_DISP_SOR_DP_LNKCTL_FRAME_ENH;
1372 nv_call(disp->core, NV94_DISP_SOR_DP_LNKCTL + moff, data);
1373 }
1374
1375 static void
1376 nvd0_sor_dp_link_get(struct drm_device *dev, struct dcb_output *dcb,
1377 u32 *link_nr, u32 *link_bw)
1378 {
1379 struct nouveau_device *device = nouveau_dev(dev);
1380 const u32 or = ffs(dcb->or) - 1, link = !(dcb->sorconf.link & 1);
1381 const u32 loff = (or * 0x800) + (link * 0x80);
1382 const u32 soff = (or * 0x800);
1383 u32 dpctrl = nv_rd32(device, 0x61c10c + loff) & 0x000f0000;
1384 u32 clksor = nv_rd32(device, 0x612300 + soff);
1385
1386 if (dpctrl > 0x00030000) *link_nr = 4;
1387 else if (dpctrl > 0x00010000) *link_nr = 2;
1388 else *link_nr = 1;
1389
1390 *link_bw = (clksor & 0x007c0000) >> 18;
1391 *link_bw *= 27000;
1392 }
1393
1394 static void
1395 nvd0_sor_dp_calc_tu(struct drm_device *dev, struct dcb_output *dcb,
1396 u32 crtc, u32 datarate)
1397 {
1398 struct nouveau_device *device = nouveau_dev(dev);
1399 const u32 symbol = 100000;
1400 const u32 TU = 64;
1401 u32 link_nr, link_bw;
1402 u64 ratio, value;
1403
1404 nvd0_sor_dp_link_get(dev, dcb, &link_nr, &link_bw);
1405
1406 ratio = datarate;
1407 ratio *= symbol;
1408 do_div(ratio, link_nr * link_bw);
1409
1410 value = (symbol - ratio) * TU;
1411 value *= ratio;
1412 do_div(value, symbol);
1413 do_div(value, symbol);
1414
1415 value += 5;
1416 value |= 0x08000000;
1417
1418 nv_wr32(device, 0x616610 + (crtc * 0x800), value);
1419 }
1420
1421 static void
1422 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
1423 {
1424 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1425 struct drm_device *dev = encoder->dev;
1426 struct nvd0_disp *disp = nvd0_disp(dev);
1427 struct drm_encoder *partner;
1428 int or = nv_encoder->or;
1429
1430 nv_encoder->last_dpms = mode;
1431
1432 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1433 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1434
1435 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1436 continue;
1437
1438 if (nv_partner != nv_encoder &&
1439 nv_partner->dcb->or == nv_encoder->dcb->or) {
1440 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1441 return;
1442 break;
1443 }
1444 }
1445
1446 nv_call(disp->core, NV50_DISP_SOR_PWR + or, (mode == DRM_MODE_DPMS_ON));
1447
1448 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1449 struct dp_train_func func = {
1450 .link_set = nvd0_sor_dp_link_set,
1451 .train_set = nvd0_sor_dp_train_set,
1452 .train_adj = nvd0_sor_dp_train_adj
1453 };
1454
1455 nouveau_dp_dpms(encoder, mode, nv_encoder->dp.datarate, &func);
1456 }
1457 }
1458
1459 static bool
1460 nvd0_sor_mode_fixup(struct drm_encoder *encoder,
1461 const struct drm_display_mode *mode,
1462 struct drm_display_mode *adjusted_mode)
1463 {
1464 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1465 struct nouveau_connector *nv_connector;
1466
1467 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1468 if (nv_connector && nv_connector->native_mode) {
1469 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1470 int id = adjusted_mode->base.id;
1471 *adjusted_mode = *nv_connector->native_mode;
1472 adjusted_mode->base.id = id;
1473 }
1474 }
1475
1476 return true;
1477 }
1478
1479 static void
1480 nvd0_sor_disconnect(struct drm_encoder *encoder)
1481 {
1482 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1483 struct drm_device *dev = encoder->dev;
1484 u32 *push;
1485
1486 if (nv_encoder->crtc) {
1487 nvd0_crtc_prepare(nv_encoder->crtc);
1488
1489 push = evo_wait(nvd0_mast(dev), 4);
1490 if (push) {
1491 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
1492 evo_data(push, 0x00000000);
1493 evo_mthd(push, 0x0080, 1);
1494 evo_data(push, 0x00000000);
1495 evo_kick(push, nvd0_mast(dev));
1496 }
1497
1498 nvd0_hdmi_disconnect(encoder);
1499
1500 nv_encoder->crtc = NULL;
1501 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1502 }
1503 }
1504
1505 static void
1506 nvd0_sor_prepare(struct drm_encoder *encoder)
1507 {
1508 nvd0_sor_disconnect(encoder);
1509 if (nouveau_encoder(encoder)->dcb->type == DCB_OUTPUT_DP)
1510 evo_sync(encoder->dev);
1511 }
1512
1513 static void
1514 nvd0_sor_commit(struct drm_encoder *encoder)
1515 {
1516 }
1517
1518 static void
1519 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1520 struct drm_display_mode *mode)
1521 {
1522 struct drm_device *dev = encoder->dev;
1523 struct nouveau_drm *drm = nouveau_drm(dev);
1524 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1525 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1526 struct nouveau_connector *nv_connector;
1527 struct nvbios *bios = &drm->vbios;
1528 u32 mode_ctrl = (1 << nv_crtc->index);
1529 u32 syncs, magic, *push;
1530 u32 or_config;
1531
1532 syncs = 0x00000001;
1533 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1534 syncs |= 0x00000008;
1535 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1536 syncs |= 0x00000010;
1537
1538 magic = 0x31ec6000 | (nv_crtc->index << 25);
1539 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1540 magic |= 0x00000001;
1541
1542 nv_connector = nouveau_encoder_connector_get(nv_encoder);
1543 switch (nv_encoder->dcb->type) {
1544 case DCB_OUTPUT_TMDS:
1545 if (nv_encoder->dcb->sorconf.link & 1) {
1546 if (mode->clock < 165000)
1547 mode_ctrl |= 0x00000100;
1548 else
1549 mode_ctrl |= 0x00000500;
1550 } else {
1551 mode_ctrl |= 0x00000200;
1552 }
1553
1554 or_config = (mode_ctrl & 0x00000f00) >> 8;
1555 if (mode->clock >= 165000)
1556 or_config |= 0x0100;
1557
1558 nvd0_hdmi_mode_set(encoder, mode);
1559 break;
1560 case DCB_OUTPUT_LVDS:
1561 or_config = (mode_ctrl & 0x00000f00) >> 8;
1562 if (bios->fp_no_ddc) {
1563 if (bios->fp.dual_link)
1564 or_config |= 0x0100;
1565 if (bios->fp.if_is_24bit)
1566 or_config |= 0x0200;
1567 } else {
1568 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1569 if (((u8 *)nv_connector->edid)[121] == 2)
1570 or_config |= 0x0100;
1571 } else
1572 if (mode->clock >= bios->fp.duallink_transition_clk) {
1573 or_config |= 0x0100;
1574 }
1575
1576 if (or_config & 0x0100) {
1577 if (bios->fp.strapless_is_24bit & 2)
1578 or_config |= 0x0200;
1579 } else {
1580 if (bios->fp.strapless_is_24bit & 1)
1581 or_config |= 0x0200;
1582 }
1583
1584 if (nv_connector->base.display_info.bpc == 8)
1585 or_config |= 0x0200;
1586
1587 }
1588 break;
1589 case DCB_OUTPUT_DP:
1590 if (nv_connector->base.display_info.bpc == 6) {
1591 nv_encoder->dp.datarate = mode->clock * 18 / 8;
1592 syncs |= 0x00000002 << 6;
1593 } else {
1594 nv_encoder->dp.datarate = mode->clock * 24 / 8;
1595 syncs |= 0x00000005 << 6;
1596 }
1597
1598 if (nv_encoder->dcb->sorconf.link & 1)
1599 mode_ctrl |= 0x00000800;
1600 else
1601 mode_ctrl |= 0x00000900;
1602
1603 or_config = (mode_ctrl & 0x00000f00) >> 8;
1604 break;
1605 default:
1606 BUG_ON(1);
1607 break;
1608 }
1609
1610 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
1611
1612 if (nv_encoder->dcb->type == DCB_OUTPUT_DP) {
1613 nvd0_sor_dp_calc_tu(dev, nv_encoder->dcb, nv_crtc->index,
1614 nv_encoder->dp.datarate);
1615 }
1616
1617 push = evo_wait(nvd0_mast(dev), 8);
1618 if (push) {
1619 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1620 evo_data(push, syncs);
1621 evo_data(push, magic);
1622 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 2);
1623 evo_data(push, mode_ctrl);
1624 evo_data(push, or_config);
1625 evo_kick(push, nvd0_mast(dev));
1626 }
1627
1628 nv_encoder->crtc = encoder->crtc;
1629 }
1630
1631 static void
1632 nvd0_sor_destroy(struct drm_encoder *encoder)
1633 {
1634 drm_encoder_cleanup(encoder);
1635 kfree(encoder);
1636 }
1637
1638 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
1639 .dpms = nvd0_sor_dpms,
1640 .mode_fixup = nvd0_sor_mode_fixup,
1641 .prepare = nvd0_sor_prepare,
1642 .commit = nvd0_sor_commit,
1643 .mode_set = nvd0_sor_mode_set,
1644 .disable = nvd0_sor_disconnect,
1645 .get_crtc = nvd0_display_crtc_get,
1646 };
1647
1648 static const struct drm_encoder_funcs nvd0_sor_func = {
1649 .destroy = nvd0_sor_destroy,
1650 };
1651
1652 static int
1653 nvd0_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1654 {
1655 struct drm_device *dev = connector->dev;
1656 struct nouveau_encoder *nv_encoder;
1657 struct drm_encoder *encoder;
1658
1659 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1660 if (!nv_encoder)
1661 return -ENOMEM;
1662 nv_encoder->dcb = dcbe;
1663 nv_encoder->or = ffs(dcbe->or) - 1;
1664 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1665
1666 encoder = to_drm_encoder(nv_encoder);
1667 encoder->possible_crtcs = dcbe->heads;
1668 encoder->possible_clones = 0;
1669 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
1670 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
1671
1672 drm_mode_connector_attach_encoder(connector, encoder);
1673 return 0;
1674 }
1675
1676 /******************************************************************************
1677 * Init
1678 *****************************************************************************/
1679 void
1680 nvd0_display_fini(struct drm_device *dev)
1681 {
1682 }
1683
1684 int
1685 nvd0_display_init(struct drm_device *dev)
1686 {
1687 u32 *push = evo_wait(nvd0_mast(dev), 32);
1688 if (push) {
1689 evo_mthd(push, 0x0088, 1);
1690 evo_data(push, NvEvoSync);
1691 evo_mthd(push, 0x0084, 1);
1692 evo_data(push, 0x00000000);
1693 evo_mthd(push, 0x0084, 1);
1694 evo_data(push, 0x80000000);
1695 evo_mthd(push, 0x008c, 1);
1696 evo_data(push, 0x00000000);
1697 evo_kick(push, nvd0_mast(dev));
1698 return 0;
1699 }
1700
1701 return -EBUSY;
1702 }
1703
1704 void
1705 nvd0_display_destroy(struct drm_device *dev)
1706 {
1707 struct nvd0_disp *disp = nvd0_disp(dev);
1708
1709 nvd0_dmac_destroy(disp->core, &disp->mast.base);
1710
1711 nouveau_bo_unmap(disp->sync);
1712 nouveau_bo_ref(NULL, &disp->sync);
1713
1714 nouveau_display(dev)->priv = NULL;
1715 kfree(disp);
1716 }
1717
1718 int
1719 nvd0_display_create(struct drm_device *dev)
1720 {
1721 static const u16 oclass[] = {
1722 NVE0_DISP_CLASS,
1723 NVD0_DISP_CLASS,
1724 };
1725 struct nouveau_device *device = nouveau_dev(dev);
1726 struct nouveau_drm *drm = nouveau_drm(dev);
1727 struct dcb_table *dcb = &drm->vbios.dcb;
1728 struct drm_connector *connector, *tmp;
1729 struct nvd0_disp *disp;
1730 struct dcb_output *dcbe;
1731 int crtcs, ret, i;
1732
1733 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1734 if (!disp)
1735 return -ENOMEM;
1736
1737 nouveau_display(dev)->priv = disp;
1738 nouveau_display(dev)->dtor = nvd0_display_destroy;
1739 nouveau_display(dev)->init = nvd0_display_init;
1740 nouveau_display(dev)->fini = nvd0_display_fini;
1741
1742 /* small shared memory area we use for notifiers and semaphores */
1743 ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
1744 0, 0x0000, NULL, &disp->sync);
1745 if (!ret) {
1746 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
1747 if (!ret)
1748 ret = nouveau_bo_map(disp->sync);
1749 if (ret)
1750 nouveau_bo_ref(NULL, &disp->sync);
1751 }
1752
1753 if (ret)
1754 goto out;
1755
1756 /* attempt to allocate a supported evo display class */
1757 ret = -ENODEV;
1758 for (i = 0; ret && i < ARRAY_SIZE(oclass); i++) {
1759 ret = nouveau_object_new(nv_object(drm), NVDRM_DEVICE,
1760 0xd1500000, oclass[i], NULL, 0,
1761 &disp->core);
1762 }
1763
1764 if (ret)
1765 goto out;
1766
1767 /* allocate master evo channel */
1768 ret = nvd0_dmac_create(disp->core, NV50_DISP_MAST_CLASS, 0,
1769 &(struct nv50_display_mast_class) {
1770 .pushbuf = EVO_PUSH_HANDLE(MAST, 0),
1771 }, sizeof(struct nv50_display_mast_class),
1772 disp->sync->bo.offset, &disp->mast.base);
1773 if (ret)
1774 goto out;
1775
1776 /* create crtc objects to represent the hw heads */
1777 crtcs = nv_rd32(device, 0x022448);
1778 for (i = 0; i < crtcs; i++) {
1779 ret = nvd0_crtc_create(dev, disp->core, i);
1780 if (ret)
1781 goto out;
1782 }
1783
1784 /* create encoder/connector objects based on VBIOS DCB table */
1785 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1786 connector = nouveau_connector_create(dev, dcbe->connector);
1787 if (IS_ERR(connector))
1788 continue;
1789
1790 if (dcbe->location != DCB_LOC_ON_CHIP) {
1791 NV_WARN(drm, "skipping off-chip encoder %d/%d\n",
1792 dcbe->type, ffs(dcbe->or) - 1);
1793 continue;
1794 }
1795
1796 switch (dcbe->type) {
1797 case DCB_OUTPUT_TMDS:
1798 case DCB_OUTPUT_LVDS:
1799 case DCB_OUTPUT_DP:
1800 nvd0_sor_create(connector, dcbe);
1801 break;
1802 case DCB_OUTPUT_ANALOG:
1803 nvd0_dac_create(connector, dcbe);
1804 break;
1805 default:
1806 NV_WARN(drm, "skipping unsupported encoder %d/%d\n",
1807 dcbe->type, ffs(dcbe->or) - 1);
1808 continue;
1809 }
1810 }
1811
1812 /* cull any connectors we created that don't have an encoder */
1813 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1814 if (connector->encoder_ids[0])
1815 continue;
1816
1817 NV_WARN(drm, "%s has no encoders, removing\n",
1818 drm_get_connector_name(connector));
1819 connector->funcs->destroy(connector);
1820 }
1821
1822 out:
1823 if (ret)
1824 nvd0_display_destroy(dev);
1825 return ret;
1826 }
This page took 0.075829 seconds and 5 git commands to generate.