drm/nouveau: port all engines to new engine module format
[deliverable/linux.git] / drivers / gpu / drm / nouveau / nouveau_display.c
1 /*
2 * Copyright (C) 2008 Maarten Maathuis.
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 */
26
27 #include "drmP.h"
28 #include "drm_crtc_helper.h"
29 #include "nouveau_drv.h"
30 #include "nouveau_fb.h"
31 #include "nouveau_fbcon.h"
32 #include "nouveau_hw.h"
33 #include "nouveau_crtc.h"
34 #include "nouveau_dma.h"
35 #include "nouveau_connector.h"
36 #include "nv50_display.h"
37
38 #include "nouveau_fence.h"
39
40 #include <subdev/bios/gpio.h>
41
42 static void
43 nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb)
44 {
45 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
46
47 if (fb->nvbo)
48 drm_gem_object_unreference_unlocked(fb->nvbo->gem);
49
50 drm_framebuffer_cleanup(drm_fb);
51 kfree(fb);
52 }
53
54 static int
55 nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb,
56 struct drm_file *file_priv,
57 unsigned int *handle)
58 {
59 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
60
61 return drm_gem_handle_create(file_priv, fb->nvbo->gem, handle);
62 }
63
64 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = {
65 .destroy = nouveau_user_framebuffer_destroy,
66 .create_handle = nouveau_user_framebuffer_create_handle,
67 };
68
69 int
70 nouveau_framebuffer_init(struct drm_device *dev,
71 struct nouveau_framebuffer *nv_fb,
72 struct drm_mode_fb_cmd2 *mode_cmd,
73 struct nouveau_bo *nvbo)
74 {
75 struct drm_nouveau_private *dev_priv = dev->dev_private;
76 struct drm_framebuffer *fb = &nv_fb->base;
77 int ret;
78
79 ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs);
80 if (ret) {
81 return ret;
82 }
83
84 drm_helper_mode_fill_fb_struct(fb, mode_cmd);
85 nv_fb->nvbo = nvbo;
86
87 if (dev_priv->card_type >= NV_50) {
88 u32 tile_flags = nouveau_bo_tile_layout(nvbo);
89 if (tile_flags == 0x7a00 ||
90 tile_flags == 0xfe00)
91 nv_fb->r_dma = NvEvoFB32;
92 else
93 if (tile_flags == 0x7000)
94 nv_fb->r_dma = NvEvoFB16;
95 else
96 nv_fb->r_dma = NvEvoVRAM_LP;
97
98 switch (fb->depth) {
99 case 8: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_8; break;
100 case 15: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_15; break;
101 case 16: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_16; break;
102 case 24:
103 case 32: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_24; break;
104 case 30: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_30; break;
105 default:
106 NV_ERROR(dev, "unknown depth %d\n", fb->depth);
107 return -EINVAL;
108 }
109
110 if (dev_priv->chipset == 0x50)
111 nv_fb->r_format |= (tile_flags << 8);
112
113 if (!tile_flags) {
114 if (dev_priv->card_type < NV_D0)
115 nv_fb->r_pitch = 0x00100000 | fb->pitches[0];
116 else
117 nv_fb->r_pitch = 0x01000000 | fb->pitches[0];
118 } else {
119 u32 mode = nvbo->tile_mode;
120 if (dev_priv->card_type >= NV_C0)
121 mode >>= 4;
122 nv_fb->r_pitch = ((fb->pitches[0] / 4) << 4) | mode;
123 }
124 }
125
126 return 0;
127 }
128
129 static struct drm_framebuffer *
130 nouveau_user_framebuffer_create(struct drm_device *dev,
131 struct drm_file *file_priv,
132 struct drm_mode_fb_cmd2 *mode_cmd)
133 {
134 struct nouveau_framebuffer *nouveau_fb;
135 struct drm_gem_object *gem;
136 int ret;
137
138 gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
139 if (!gem)
140 return ERR_PTR(-ENOENT);
141
142 nouveau_fb = kzalloc(sizeof(struct nouveau_framebuffer), GFP_KERNEL);
143 if (!nouveau_fb)
144 return ERR_PTR(-ENOMEM);
145
146 ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem));
147 if (ret) {
148 drm_gem_object_unreference(gem);
149 return ERR_PTR(ret);
150 }
151
152 return &nouveau_fb->base;
153 }
154
155 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = {
156 .fb_create = nouveau_user_framebuffer_create,
157 .output_poll_changed = nouveau_fbcon_output_poll_changed,
158 };
159
160
161 struct nouveau_drm_prop_enum_list {
162 u8 gen_mask;
163 int type;
164 char *name;
165 };
166
167 static struct nouveau_drm_prop_enum_list underscan[] = {
168 { 6, UNDERSCAN_AUTO, "auto" },
169 { 6, UNDERSCAN_OFF, "off" },
170 { 6, UNDERSCAN_ON, "on" },
171 {}
172 };
173
174 static struct nouveau_drm_prop_enum_list dither_mode[] = {
175 { 7, DITHERING_MODE_AUTO, "auto" },
176 { 7, DITHERING_MODE_OFF, "off" },
177 { 1, DITHERING_MODE_ON, "on" },
178 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" },
179 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" },
180 { 4, DITHERING_MODE_TEMPORAL, "temporal" },
181 {}
182 };
183
184 static struct nouveau_drm_prop_enum_list dither_depth[] = {
185 { 6, DITHERING_DEPTH_AUTO, "auto" },
186 { 6, DITHERING_DEPTH_6BPC, "6 bpc" },
187 { 6, DITHERING_DEPTH_8BPC, "8 bpc" },
188 {}
189 };
190
191 #define PROP_ENUM(p,gen,n,list) do { \
192 struct nouveau_drm_prop_enum_list *l = (list); \
193 int c = 0; \
194 while (l->gen_mask) { \
195 if (l->gen_mask & (1 << (gen))) \
196 c++; \
197 l++; \
198 } \
199 if (c) { \
200 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \
201 l = (list); \
202 c = 0; \
203 while (p && l->gen_mask) { \
204 if (l->gen_mask & (1 << (gen))) { \
205 drm_property_add_enum(p, c, l->type, l->name); \
206 c++; \
207 } \
208 l++; \
209 } \
210 } \
211 } while(0)
212
213 int
214 nouveau_display_init(struct drm_device *dev)
215 {
216 struct drm_nouveau_private *dev_priv = dev->dev_private;
217 struct nouveau_display_engine *disp = &dev_priv->engine.display;
218 struct drm_connector *connector;
219 int ret;
220
221 ret = disp->init(dev);
222 if (ret)
223 return ret;
224
225 /* power on internal panel if it's not already. the init tables of
226 * some vbios default this to off for some reason, causing the
227 * panel to not work after resume
228 */
229 if (nouveau_gpio_func_get(dev, DCB_GPIO_PANEL_POWER) == 0) {
230 nouveau_gpio_func_set(dev, DCB_GPIO_PANEL_POWER, true);
231 msleep(300);
232 }
233
234 /* enable polling for external displays */
235 drm_kms_helper_poll_enable(dev);
236
237 /* enable hotplug interrupts */
238 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
239 struct nouveau_connector *conn = nouveau_connector(connector);
240 nouveau_gpio_irq(dev, 0, conn->hpd, 0xff, true);
241 }
242
243 return ret;
244 }
245
246 void
247 nouveau_display_fini(struct drm_device *dev)
248 {
249 struct drm_nouveau_private *dev_priv = dev->dev_private;
250 struct nouveau_display_engine *disp = &dev_priv->engine.display;
251 struct drm_connector *connector;
252
253 /* disable hotplug interrupts */
254 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
255 struct nouveau_connector *conn = nouveau_connector(connector);
256 nouveau_gpio_irq(dev, 0, conn->hpd, 0xff, false);
257 }
258
259 drm_kms_helper_poll_disable(dev);
260 disp->fini(dev);
261 }
262
263 static void
264 nouveau_display_vblank_notify(void *data, int crtc)
265 {
266 drm_handle_vblank(data, crtc);
267 }
268
269 static void
270 nouveau_display_vblank_get(void *data, int crtc)
271 {
272 drm_vblank_get(data, crtc);
273 }
274
275 static void
276 nouveau_display_vblank_put(void *data, int crtc)
277 {
278 drm_vblank_put(data, crtc);
279 }
280
281 int
282 nouveau_display_create(struct drm_device *dev)
283 {
284 struct drm_nouveau_private *dev_priv = dev->dev_private;
285 struct nouveau_display_engine *disp = &dev_priv->engine.display;
286 int ret, gen;
287
288 drm_mode_config_init(dev);
289 drm_mode_create_scaling_mode_property(dev);
290 drm_mode_create_dvi_i_properties(dev);
291
292 if (dev_priv->card_type < NV_50)
293 gen = 0;
294 else
295 if (dev_priv->card_type < NV_D0)
296 gen = 1;
297 else
298 gen = 2;
299
300 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode);
301 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth);
302 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan);
303
304 disp->underscan_hborder_property =
305 drm_property_create_range(dev, 0, "underscan hborder", 0, 128);
306
307 disp->underscan_vborder_property =
308 drm_property_create_range(dev, 0, "underscan vborder", 0, 128);
309
310 if (gen == 1) {
311 disp->vibrant_hue_property =
312 drm_property_create(dev, DRM_MODE_PROP_RANGE,
313 "vibrant hue", 2);
314 disp->vibrant_hue_property->values[0] = 0;
315 disp->vibrant_hue_property->values[1] = 180; /* -90..+90 */
316
317 disp->color_vibrance_property =
318 drm_property_create(dev, DRM_MODE_PROP_RANGE,
319 "color vibrance", 2);
320 disp->color_vibrance_property->values[0] = 0;
321 disp->color_vibrance_property->values[1] = 200; /* -100..+100 */
322 }
323
324 dev->mode_config.funcs = &nouveau_mode_config_funcs;
325 dev->mode_config.fb_base = pci_resource_start(dev->pdev, 1);
326
327 dev->mode_config.min_width = 0;
328 dev->mode_config.min_height = 0;
329 if (dev_priv->card_type < NV_10) {
330 dev->mode_config.max_width = 2048;
331 dev->mode_config.max_height = 2048;
332 } else
333 if (dev_priv->card_type < NV_50) {
334 dev->mode_config.max_width = 4096;
335 dev->mode_config.max_height = 4096;
336 } else {
337 dev->mode_config.max_width = 8192;
338 dev->mode_config.max_height = 8192;
339 }
340
341 dev->mode_config.preferred_depth = 24;
342 dev->mode_config.prefer_shadow = 1;
343
344 drm_kms_helper_poll_init(dev);
345 drm_kms_helper_poll_disable(dev);
346
347 ret = disp->create(dev);
348 if (ret)
349 goto disp_create_err;
350
351 if (dev->mode_config.num_crtc) {
352 ret = drm_vblank_init(dev, dev->mode_config.num_crtc);
353 if (ret)
354 goto vblank_err;
355 }
356
357 return 0;
358
359 vblank_err:
360 disp->destroy(dev);
361 disp_create_err:
362 drm_kms_helper_poll_fini(dev);
363 drm_mode_config_cleanup(dev);
364 return ret;
365 }
366
367 void
368 nouveau_display_destroy(struct drm_device *dev)
369 {
370 struct drm_nouveau_private *dev_priv = dev->dev_private;
371 struct nouveau_display_engine *disp = &dev_priv->engine.display;
372
373 drm_vblank_cleanup(dev);
374
375 disp->destroy(dev);
376
377 drm_kms_helper_poll_fini(dev);
378 drm_mode_config_cleanup(dev);
379 }
380
381 int
382 nouveau_vblank_enable(struct drm_device *dev, int crtc)
383 {
384 struct drm_nouveau_private *dev_priv = dev->dev_private;
385
386 if (dev_priv->card_type >= NV_D0)
387 nv_mask(dev, 0x6100c0 + (crtc * 0x800), 1, 1);
388 else
389
390 if (dev_priv->card_type >= NV_50)
391 nv_mask(dev, NV50_PDISPLAY_INTR_EN_1, 0,
392 NV50_PDISPLAY_INTR_EN_1_VBLANK_CRTC_(crtc));
393 else
394 NVWriteCRTC(dev, crtc, NV_PCRTC_INTR_EN_0,
395 NV_PCRTC_INTR_0_VBLANK);
396
397 return 0;
398 }
399
400 void
401 nouveau_vblank_disable(struct drm_device *dev, int crtc)
402 {
403 struct drm_nouveau_private *dev_priv = dev->dev_private;
404
405 if (dev_priv->card_type >= NV_D0)
406 nv_mask(dev, 0x6100c0 + (crtc * 0x800), 1, 0);
407 else
408 if (dev_priv->card_type >= NV_50)
409 nv_mask(dev, NV50_PDISPLAY_INTR_EN_1,
410 NV50_PDISPLAY_INTR_EN_1_VBLANK_CRTC_(crtc), 0);
411 else
412 NVWriteCRTC(dev, crtc, NV_PCRTC_INTR_EN_0, 0);
413 }
414
415 static int
416 nouveau_page_flip_reserve(struct nouveau_bo *old_bo,
417 struct nouveau_bo *new_bo)
418 {
419 int ret;
420
421 ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM);
422 if (ret)
423 return ret;
424
425 ret = ttm_bo_reserve(&new_bo->bo, false, false, false, 0);
426 if (ret)
427 goto fail;
428
429 ret = ttm_bo_reserve(&old_bo->bo, false, false, false, 0);
430 if (ret)
431 goto fail_unreserve;
432
433 return 0;
434
435 fail_unreserve:
436 ttm_bo_unreserve(&new_bo->bo);
437 fail:
438 nouveau_bo_unpin(new_bo);
439 return ret;
440 }
441
442 static void
443 nouveau_page_flip_unreserve(struct nouveau_bo *old_bo,
444 struct nouveau_bo *new_bo,
445 struct nouveau_fence *fence)
446 {
447 nouveau_bo_fence(new_bo, fence);
448 ttm_bo_unreserve(&new_bo->bo);
449
450 nouveau_bo_fence(old_bo, fence);
451 ttm_bo_unreserve(&old_bo->bo);
452
453 nouveau_bo_unpin(old_bo);
454 }
455
456 static int
457 nouveau_page_flip_emit(struct nouveau_channel *chan,
458 struct nouveau_bo *old_bo,
459 struct nouveau_bo *new_bo,
460 struct nouveau_page_flip_state *s,
461 struct nouveau_fence **pfence)
462 {
463 struct nouveau_fence_chan *fctx = chan->fence;
464 struct drm_device *dev = nouveau_drv(chan->drm);
465 struct drm_nouveau_private *dev_priv = dev->dev_private;
466 unsigned long flags;
467 int ret;
468
469 /* Queue it to the pending list */
470 spin_lock_irqsave(&dev->event_lock, flags);
471 list_add_tail(&s->head, &fctx->flip);
472 spin_unlock_irqrestore(&dev->event_lock, flags);
473
474 /* Synchronize with the old framebuffer */
475 ret = nouveau_fence_sync(old_bo->bo.sync_obj, chan);
476 if (ret)
477 goto fail;
478
479 /* Emit the pageflip */
480 ret = RING_SPACE(chan, 3);
481 if (ret)
482 goto fail;
483
484 if (dev_priv->card_type < NV_C0) {
485 BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1);
486 OUT_RING (chan, 0x00000000);
487 OUT_RING (chan, 0x00000000);
488 } else {
489 BEGIN_NVC0(chan, 0, NV10_SUBCHAN_REF_CNT, 1);
490 OUT_RING (chan, 0);
491 BEGIN_IMC0(chan, 0, NVSW_SUBCHAN_PAGE_FLIP, 0x0000);
492 }
493 FIRE_RING (chan);
494
495 ret = nouveau_fence_new(chan, pfence);
496 if (ret)
497 goto fail;
498
499 return 0;
500 fail:
501 spin_lock_irqsave(&dev->event_lock, flags);
502 list_del(&s->head);
503 spin_unlock_irqrestore(&dev->event_lock, flags);
504 return ret;
505 }
506
507 int
508 nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb,
509 struct drm_pending_vblank_event *event)
510 {
511 struct drm_device *dev = crtc->dev;
512 struct drm_nouveau_private *dev_priv = dev->dev_private;
513 struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->fb)->nvbo;
514 struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo;
515 struct nouveau_page_flip_state *s;
516 struct nouveau_channel *chan = NULL;
517 struct nouveau_fence *fence;
518 int ret;
519
520 if (!nvdrm_channel(dev))
521 return -ENODEV;
522
523 s = kzalloc(sizeof(*s), GFP_KERNEL);
524 if (!s)
525 return -ENOMEM;
526
527 /* Don't let the buffers go away while we flip */
528 ret = nouveau_page_flip_reserve(old_bo, new_bo);
529 if (ret)
530 goto fail_free;
531
532 /* Initialize a page flip struct */
533 *s = (struct nouveau_page_flip_state)
534 { { }, event, nouveau_crtc(crtc)->index,
535 fb->bits_per_pixel, fb->pitches[0], crtc->x, crtc->y,
536 new_bo->bo.offset };
537
538 /* Choose the channel the flip will be handled in */
539 fence = new_bo->bo.sync_obj;
540 if (fence)
541 chan = fence->channel;
542 if (!chan)
543 chan = nvdrm_channel(dev);
544 mutex_lock(nvchan_mutex(chan));
545
546 /* Emit a page flip */
547 if (dev_priv->card_type >= NV_50) {
548 if (dev_priv->card_type >= NV_D0)
549 ret = nvd0_display_flip_next(crtc, fb, chan, 0);
550 else
551 ret = nv50_display_flip_next(crtc, fb, chan);
552 if (ret) {
553 mutex_unlock(nvchan_mutex(chan));
554 goto fail_unreserve;
555 }
556 }
557
558 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence);
559 mutex_unlock(nvchan_mutex(chan));
560 if (ret)
561 goto fail_unreserve;
562
563 /* Update the crtc struct and cleanup */
564 crtc->fb = fb;
565
566 nouveau_page_flip_unreserve(old_bo, new_bo, fence);
567 nouveau_fence_unref(&fence);
568 return 0;
569
570 fail_unreserve:
571 nouveau_page_flip_unreserve(old_bo, new_bo, NULL);
572 fail_free:
573 kfree(s);
574 return ret;
575 }
576
577 int
578 nouveau_finish_page_flip(struct nouveau_channel *chan,
579 struct nouveau_page_flip_state *ps)
580 {
581 struct nouveau_fence_chan *fctx = chan->fence;
582 struct drm_device *dev = nouveau_drv(chan->drm);
583 struct nouveau_page_flip_state *s;
584 unsigned long flags;
585
586 spin_lock_irqsave(&dev->event_lock, flags);
587
588 if (list_empty(&fctx->flip)) {
589 NV_ERROR(dev, "unexpected pageflip\n");
590 spin_unlock_irqrestore(&dev->event_lock, flags);
591 return -EINVAL;
592 }
593
594 s = list_first_entry(&fctx->flip, struct nouveau_page_flip_state, head);
595 if (s->event) {
596 struct drm_pending_vblank_event *e = s->event;
597 struct timeval now;
598
599 do_gettimeofday(&now);
600 e->event.sequence = 0;
601 e->event.tv_sec = now.tv_sec;
602 e->event.tv_usec = now.tv_usec;
603 list_add_tail(&e->base.link, &e->base.file_priv->event_list);
604 wake_up_interruptible(&e->base.file_priv->event_wait);
605 }
606
607 list_del(&s->head);
608 if (ps)
609 *ps = *s;
610 kfree(s);
611
612 spin_unlock_irqrestore(&dev->event_lock, flags);
613 return 0;
614 }
615
616 int
617 nouveau_flip_complete(void *data)
618 {
619 struct nouveau_channel *chan = data;
620 struct drm_device *dev = nouveau_drv(chan->drm);
621 struct drm_nouveau_private *dev_priv = dev->dev_private;
622 struct nouveau_page_flip_state state;
623
624 if (!nouveau_finish_page_flip(chan, &state)) {
625 if (dev_priv->card_type < NV_50) {
626 nv_set_crtc_base(dev, state.crtc, state.offset +
627 state.y * state.pitch +
628 state.x * state.bpp / 8);
629 }
630 }
631
632 return 0;
633 }
634
635 int
636 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev,
637 struct drm_mode_create_dumb *args)
638 {
639 struct nouveau_bo *bo;
640 int ret;
641
642 args->pitch = roundup(args->width * (args->bpp / 8), 256);
643 args->size = args->pitch * args->height;
644 args->size = roundup(args->size, PAGE_SIZE);
645
646 ret = nouveau_gem_new(dev, args->size, 0, NOUVEAU_GEM_DOMAIN_VRAM, 0, 0, &bo);
647 if (ret)
648 return ret;
649
650 ret = drm_gem_handle_create(file_priv, bo->gem, &args->handle);
651 drm_gem_object_unreference_unlocked(bo->gem);
652 return ret;
653 }
654
655 int
656 nouveau_display_dumb_destroy(struct drm_file *file_priv, struct drm_device *dev,
657 uint32_t handle)
658 {
659 return drm_gem_handle_delete(file_priv, handle);
660 }
661
662 int
663 nouveau_display_dumb_map_offset(struct drm_file *file_priv,
664 struct drm_device *dev,
665 uint32_t handle, uint64_t *poffset)
666 {
667 struct drm_gem_object *gem;
668
669 gem = drm_gem_object_lookup(dev, file_priv, handle);
670 if (gem) {
671 struct nouveau_bo *bo = gem->driver_private;
672 *poffset = bo->bo.addr_space_offset;
673 drm_gem_object_unreference_unlocked(gem);
674 return 0;
675 }
676
677 return -ENOENT;
678 }
This page took 0.094086 seconds and 5 git commands to generate.