drm/fbdev: rework output polling to be back in the core. (v4)
[deliverable/linux.git] / drivers / gpu / drm / nouveau / nouveau_state.c
1 /*
2 * Copyright 2005 Stephane Marchesin
3 * Copyright 2008 Stuart Bennett
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the next
14 * paragraph) shall be included in all copies or substantial portions of the
15 * Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
21 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
22 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23 * DEALINGS IN THE SOFTWARE.
24 */
25
26 #include <linux/swab.h>
27 #include <linux/slab.h>
28 #include "drmP.h"
29 #include "drm.h"
30 #include "drm_sarea.h"
31 #include "drm_crtc_helper.h"
32 #include <linux/vgaarb.h>
33 #include <linux/vga_switcheroo.h>
34
35 #include "nouveau_drv.h"
36 #include "nouveau_drm.h"
37 #include "nouveau_fbcon.h"
38 #include "nv50_display.h"
39
40 static void nouveau_stub_takedown(struct drm_device *dev) {}
41
42 static int nouveau_init_engine_ptrs(struct drm_device *dev)
43 {
44 struct drm_nouveau_private *dev_priv = dev->dev_private;
45 struct nouveau_engine *engine = &dev_priv->engine;
46
47 switch (dev_priv->chipset & 0xf0) {
48 case 0x00:
49 engine->instmem.init = nv04_instmem_init;
50 engine->instmem.takedown = nv04_instmem_takedown;
51 engine->instmem.suspend = nv04_instmem_suspend;
52 engine->instmem.resume = nv04_instmem_resume;
53 engine->instmem.populate = nv04_instmem_populate;
54 engine->instmem.clear = nv04_instmem_clear;
55 engine->instmem.bind = nv04_instmem_bind;
56 engine->instmem.unbind = nv04_instmem_unbind;
57 engine->instmem.prepare_access = nv04_instmem_prepare_access;
58 engine->instmem.finish_access = nv04_instmem_finish_access;
59 engine->mc.init = nv04_mc_init;
60 engine->mc.takedown = nv04_mc_takedown;
61 engine->timer.init = nv04_timer_init;
62 engine->timer.read = nv04_timer_read;
63 engine->timer.takedown = nv04_timer_takedown;
64 engine->fb.init = nv04_fb_init;
65 engine->fb.takedown = nv04_fb_takedown;
66 engine->graph.grclass = nv04_graph_grclass;
67 engine->graph.init = nv04_graph_init;
68 engine->graph.takedown = nv04_graph_takedown;
69 engine->graph.fifo_access = nv04_graph_fifo_access;
70 engine->graph.channel = nv04_graph_channel;
71 engine->graph.create_context = nv04_graph_create_context;
72 engine->graph.destroy_context = nv04_graph_destroy_context;
73 engine->graph.load_context = nv04_graph_load_context;
74 engine->graph.unload_context = nv04_graph_unload_context;
75 engine->fifo.channels = 16;
76 engine->fifo.init = nv04_fifo_init;
77 engine->fifo.takedown = nouveau_stub_takedown;
78 engine->fifo.disable = nv04_fifo_disable;
79 engine->fifo.enable = nv04_fifo_enable;
80 engine->fifo.reassign = nv04_fifo_reassign;
81 engine->fifo.cache_flush = nv04_fifo_cache_flush;
82 engine->fifo.cache_pull = nv04_fifo_cache_pull;
83 engine->fifo.channel_id = nv04_fifo_channel_id;
84 engine->fifo.create_context = nv04_fifo_create_context;
85 engine->fifo.destroy_context = nv04_fifo_destroy_context;
86 engine->fifo.load_context = nv04_fifo_load_context;
87 engine->fifo.unload_context = nv04_fifo_unload_context;
88 break;
89 case 0x10:
90 engine->instmem.init = nv04_instmem_init;
91 engine->instmem.takedown = nv04_instmem_takedown;
92 engine->instmem.suspend = nv04_instmem_suspend;
93 engine->instmem.resume = nv04_instmem_resume;
94 engine->instmem.populate = nv04_instmem_populate;
95 engine->instmem.clear = nv04_instmem_clear;
96 engine->instmem.bind = nv04_instmem_bind;
97 engine->instmem.unbind = nv04_instmem_unbind;
98 engine->instmem.prepare_access = nv04_instmem_prepare_access;
99 engine->instmem.finish_access = nv04_instmem_finish_access;
100 engine->mc.init = nv04_mc_init;
101 engine->mc.takedown = nv04_mc_takedown;
102 engine->timer.init = nv04_timer_init;
103 engine->timer.read = nv04_timer_read;
104 engine->timer.takedown = nv04_timer_takedown;
105 engine->fb.init = nv10_fb_init;
106 engine->fb.takedown = nv10_fb_takedown;
107 engine->fb.set_region_tiling = nv10_fb_set_region_tiling;
108 engine->graph.grclass = nv10_graph_grclass;
109 engine->graph.init = nv10_graph_init;
110 engine->graph.takedown = nv10_graph_takedown;
111 engine->graph.channel = nv10_graph_channel;
112 engine->graph.create_context = nv10_graph_create_context;
113 engine->graph.destroy_context = nv10_graph_destroy_context;
114 engine->graph.fifo_access = nv04_graph_fifo_access;
115 engine->graph.load_context = nv10_graph_load_context;
116 engine->graph.unload_context = nv10_graph_unload_context;
117 engine->graph.set_region_tiling = nv10_graph_set_region_tiling;
118 engine->fifo.channels = 32;
119 engine->fifo.init = nv10_fifo_init;
120 engine->fifo.takedown = nouveau_stub_takedown;
121 engine->fifo.disable = nv04_fifo_disable;
122 engine->fifo.enable = nv04_fifo_enable;
123 engine->fifo.reassign = nv04_fifo_reassign;
124 engine->fifo.cache_flush = nv04_fifo_cache_flush;
125 engine->fifo.cache_pull = nv04_fifo_cache_pull;
126 engine->fifo.channel_id = nv10_fifo_channel_id;
127 engine->fifo.create_context = nv10_fifo_create_context;
128 engine->fifo.destroy_context = nv10_fifo_destroy_context;
129 engine->fifo.load_context = nv10_fifo_load_context;
130 engine->fifo.unload_context = nv10_fifo_unload_context;
131 break;
132 case 0x20:
133 engine->instmem.init = nv04_instmem_init;
134 engine->instmem.takedown = nv04_instmem_takedown;
135 engine->instmem.suspend = nv04_instmem_suspend;
136 engine->instmem.resume = nv04_instmem_resume;
137 engine->instmem.populate = nv04_instmem_populate;
138 engine->instmem.clear = nv04_instmem_clear;
139 engine->instmem.bind = nv04_instmem_bind;
140 engine->instmem.unbind = nv04_instmem_unbind;
141 engine->instmem.prepare_access = nv04_instmem_prepare_access;
142 engine->instmem.finish_access = nv04_instmem_finish_access;
143 engine->mc.init = nv04_mc_init;
144 engine->mc.takedown = nv04_mc_takedown;
145 engine->timer.init = nv04_timer_init;
146 engine->timer.read = nv04_timer_read;
147 engine->timer.takedown = nv04_timer_takedown;
148 engine->fb.init = nv10_fb_init;
149 engine->fb.takedown = nv10_fb_takedown;
150 engine->fb.set_region_tiling = nv10_fb_set_region_tiling;
151 engine->graph.grclass = nv20_graph_grclass;
152 engine->graph.init = nv20_graph_init;
153 engine->graph.takedown = nv20_graph_takedown;
154 engine->graph.channel = nv10_graph_channel;
155 engine->graph.create_context = nv20_graph_create_context;
156 engine->graph.destroy_context = nv20_graph_destroy_context;
157 engine->graph.fifo_access = nv04_graph_fifo_access;
158 engine->graph.load_context = nv20_graph_load_context;
159 engine->graph.unload_context = nv20_graph_unload_context;
160 engine->graph.set_region_tiling = nv20_graph_set_region_tiling;
161 engine->fifo.channels = 32;
162 engine->fifo.init = nv10_fifo_init;
163 engine->fifo.takedown = nouveau_stub_takedown;
164 engine->fifo.disable = nv04_fifo_disable;
165 engine->fifo.enable = nv04_fifo_enable;
166 engine->fifo.reassign = nv04_fifo_reassign;
167 engine->fifo.cache_flush = nv04_fifo_cache_flush;
168 engine->fifo.cache_pull = nv04_fifo_cache_pull;
169 engine->fifo.channel_id = nv10_fifo_channel_id;
170 engine->fifo.create_context = nv10_fifo_create_context;
171 engine->fifo.destroy_context = nv10_fifo_destroy_context;
172 engine->fifo.load_context = nv10_fifo_load_context;
173 engine->fifo.unload_context = nv10_fifo_unload_context;
174 break;
175 case 0x30:
176 engine->instmem.init = nv04_instmem_init;
177 engine->instmem.takedown = nv04_instmem_takedown;
178 engine->instmem.suspend = nv04_instmem_suspend;
179 engine->instmem.resume = nv04_instmem_resume;
180 engine->instmem.populate = nv04_instmem_populate;
181 engine->instmem.clear = nv04_instmem_clear;
182 engine->instmem.bind = nv04_instmem_bind;
183 engine->instmem.unbind = nv04_instmem_unbind;
184 engine->instmem.prepare_access = nv04_instmem_prepare_access;
185 engine->instmem.finish_access = nv04_instmem_finish_access;
186 engine->mc.init = nv04_mc_init;
187 engine->mc.takedown = nv04_mc_takedown;
188 engine->timer.init = nv04_timer_init;
189 engine->timer.read = nv04_timer_read;
190 engine->timer.takedown = nv04_timer_takedown;
191 engine->fb.init = nv10_fb_init;
192 engine->fb.takedown = nv10_fb_takedown;
193 engine->fb.set_region_tiling = nv10_fb_set_region_tiling;
194 engine->graph.grclass = nv30_graph_grclass;
195 engine->graph.init = nv30_graph_init;
196 engine->graph.takedown = nv20_graph_takedown;
197 engine->graph.fifo_access = nv04_graph_fifo_access;
198 engine->graph.channel = nv10_graph_channel;
199 engine->graph.create_context = nv20_graph_create_context;
200 engine->graph.destroy_context = nv20_graph_destroy_context;
201 engine->graph.load_context = nv20_graph_load_context;
202 engine->graph.unload_context = nv20_graph_unload_context;
203 engine->graph.set_region_tiling = nv20_graph_set_region_tiling;
204 engine->fifo.channels = 32;
205 engine->fifo.init = nv10_fifo_init;
206 engine->fifo.takedown = nouveau_stub_takedown;
207 engine->fifo.disable = nv04_fifo_disable;
208 engine->fifo.enable = nv04_fifo_enable;
209 engine->fifo.reassign = nv04_fifo_reassign;
210 engine->fifo.cache_flush = nv04_fifo_cache_flush;
211 engine->fifo.cache_pull = nv04_fifo_cache_pull;
212 engine->fifo.channel_id = nv10_fifo_channel_id;
213 engine->fifo.create_context = nv10_fifo_create_context;
214 engine->fifo.destroy_context = nv10_fifo_destroy_context;
215 engine->fifo.load_context = nv10_fifo_load_context;
216 engine->fifo.unload_context = nv10_fifo_unload_context;
217 break;
218 case 0x40:
219 case 0x60:
220 engine->instmem.init = nv04_instmem_init;
221 engine->instmem.takedown = nv04_instmem_takedown;
222 engine->instmem.suspend = nv04_instmem_suspend;
223 engine->instmem.resume = nv04_instmem_resume;
224 engine->instmem.populate = nv04_instmem_populate;
225 engine->instmem.clear = nv04_instmem_clear;
226 engine->instmem.bind = nv04_instmem_bind;
227 engine->instmem.unbind = nv04_instmem_unbind;
228 engine->instmem.prepare_access = nv04_instmem_prepare_access;
229 engine->instmem.finish_access = nv04_instmem_finish_access;
230 engine->mc.init = nv40_mc_init;
231 engine->mc.takedown = nv40_mc_takedown;
232 engine->timer.init = nv04_timer_init;
233 engine->timer.read = nv04_timer_read;
234 engine->timer.takedown = nv04_timer_takedown;
235 engine->fb.init = nv40_fb_init;
236 engine->fb.takedown = nv40_fb_takedown;
237 engine->fb.set_region_tiling = nv40_fb_set_region_tiling;
238 engine->graph.grclass = nv40_graph_grclass;
239 engine->graph.init = nv40_graph_init;
240 engine->graph.takedown = nv40_graph_takedown;
241 engine->graph.fifo_access = nv04_graph_fifo_access;
242 engine->graph.channel = nv40_graph_channel;
243 engine->graph.create_context = nv40_graph_create_context;
244 engine->graph.destroy_context = nv40_graph_destroy_context;
245 engine->graph.load_context = nv40_graph_load_context;
246 engine->graph.unload_context = nv40_graph_unload_context;
247 engine->graph.set_region_tiling = nv40_graph_set_region_tiling;
248 engine->fifo.channels = 32;
249 engine->fifo.init = nv40_fifo_init;
250 engine->fifo.takedown = nouveau_stub_takedown;
251 engine->fifo.disable = nv04_fifo_disable;
252 engine->fifo.enable = nv04_fifo_enable;
253 engine->fifo.reassign = nv04_fifo_reassign;
254 engine->fifo.cache_flush = nv04_fifo_cache_flush;
255 engine->fifo.cache_pull = nv04_fifo_cache_pull;
256 engine->fifo.channel_id = nv10_fifo_channel_id;
257 engine->fifo.create_context = nv40_fifo_create_context;
258 engine->fifo.destroy_context = nv40_fifo_destroy_context;
259 engine->fifo.load_context = nv40_fifo_load_context;
260 engine->fifo.unload_context = nv40_fifo_unload_context;
261 break;
262 case 0x50:
263 case 0x80: /* gotta love NVIDIA's consistency.. */
264 case 0x90:
265 case 0xA0:
266 engine->instmem.init = nv50_instmem_init;
267 engine->instmem.takedown = nv50_instmem_takedown;
268 engine->instmem.suspend = nv50_instmem_suspend;
269 engine->instmem.resume = nv50_instmem_resume;
270 engine->instmem.populate = nv50_instmem_populate;
271 engine->instmem.clear = nv50_instmem_clear;
272 engine->instmem.bind = nv50_instmem_bind;
273 engine->instmem.unbind = nv50_instmem_unbind;
274 engine->instmem.prepare_access = nv50_instmem_prepare_access;
275 engine->instmem.finish_access = nv50_instmem_finish_access;
276 engine->mc.init = nv50_mc_init;
277 engine->mc.takedown = nv50_mc_takedown;
278 engine->timer.init = nv04_timer_init;
279 engine->timer.read = nv04_timer_read;
280 engine->timer.takedown = nv04_timer_takedown;
281 engine->fb.init = nv50_fb_init;
282 engine->fb.takedown = nv50_fb_takedown;
283 engine->graph.grclass = nv50_graph_grclass;
284 engine->graph.init = nv50_graph_init;
285 engine->graph.takedown = nv50_graph_takedown;
286 engine->graph.fifo_access = nv50_graph_fifo_access;
287 engine->graph.channel = nv50_graph_channel;
288 engine->graph.create_context = nv50_graph_create_context;
289 engine->graph.destroy_context = nv50_graph_destroy_context;
290 engine->graph.load_context = nv50_graph_load_context;
291 engine->graph.unload_context = nv50_graph_unload_context;
292 engine->fifo.channels = 128;
293 engine->fifo.init = nv50_fifo_init;
294 engine->fifo.takedown = nv50_fifo_takedown;
295 engine->fifo.disable = nv04_fifo_disable;
296 engine->fifo.enable = nv04_fifo_enable;
297 engine->fifo.reassign = nv04_fifo_reassign;
298 engine->fifo.channel_id = nv50_fifo_channel_id;
299 engine->fifo.create_context = nv50_fifo_create_context;
300 engine->fifo.destroy_context = nv50_fifo_destroy_context;
301 engine->fifo.load_context = nv50_fifo_load_context;
302 engine->fifo.unload_context = nv50_fifo_unload_context;
303 break;
304 default:
305 NV_ERROR(dev, "NV%02x unsupported\n", dev_priv->chipset);
306 return 1;
307 }
308
309 return 0;
310 }
311
312 static unsigned int
313 nouveau_vga_set_decode(void *priv, bool state)
314 {
315 struct drm_device *dev = priv;
316 struct drm_nouveau_private *dev_priv = dev->dev_private;
317
318 if (dev_priv->chipset >= 0x40)
319 nv_wr32(dev, 0x88054, state);
320 else
321 nv_wr32(dev, 0x1854, state);
322
323 if (state)
324 return VGA_RSRC_LEGACY_IO | VGA_RSRC_LEGACY_MEM |
325 VGA_RSRC_NORMAL_IO | VGA_RSRC_NORMAL_MEM;
326 else
327 return VGA_RSRC_NORMAL_IO | VGA_RSRC_NORMAL_MEM;
328 }
329
330 static int
331 nouveau_card_init_channel(struct drm_device *dev)
332 {
333 struct drm_nouveau_private *dev_priv = dev->dev_private;
334 struct nouveau_gpuobj *gpuobj;
335 int ret;
336
337 ret = nouveau_channel_alloc(dev, &dev_priv->channel,
338 (struct drm_file *)-2,
339 NvDmaFB, NvDmaTT);
340 if (ret)
341 return ret;
342
343 gpuobj = NULL;
344 ret = nouveau_gpuobj_dma_new(dev_priv->channel, NV_CLASS_DMA_IN_MEMORY,
345 0, dev_priv->vram_size,
346 NV_DMA_ACCESS_RW, NV_DMA_TARGET_VIDMEM,
347 &gpuobj);
348 if (ret)
349 goto out_err;
350
351 ret = nouveau_gpuobj_ref_add(dev, dev_priv->channel, NvDmaVRAM,
352 gpuobj, NULL);
353 if (ret)
354 goto out_err;
355
356 gpuobj = NULL;
357 ret = nouveau_gpuobj_gart_dma_new(dev_priv->channel, 0,
358 dev_priv->gart_info.aper_size,
359 NV_DMA_ACCESS_RW, &gpuobj, NULL);
360 if (ret)
361 goto out_err;
362
363 ret = nouveau_gpuobj_ref_add(dev, dev_priv->channel, NvDmaGART,
364 gpuobj, NULL);
365 if (ret)
366 goto out_err;
367
368 return 0;
369 out_err:
370 nouveau_gpuobj_del(dev, &gpuobj);
371 nouveau_channel_free(dev_priv->channel);
372 dev_priv->channel = NULL;
373 return ret;
374 }
375
376 static void nouveau_switcheroo_set_state(struct pci_dev *pdev,
377 enum vga_switcheroo_state state)
378 {
379 pm_message_t pmm = { .event = PM_EVENT_SUSPEND };
380 if (state == VGA_SWITCHEROO_ON) {
381 printk(KERN_ERR "VGA switcheroo: switched nouveau on\n");
382 nouveau_pci_resume(pdev);
383 } else {
384 printk(KERN_ERR "VGA switcheroo: switched nouveau off\n");
385 nouveau_pci_suspend(pdev, pmm);
386 }
387 }
388
389 static bool nouveau_switcheroo_can_switch(struct pci_dev *pdev)
390 {
391 struct drm_device *dev = pci_get_drvdata(pdev);
392 bool can_switch;
393
394 spin_lock(&dev->count_lock);
395 can_switch = (dev->open_count == 0);
396 spin_unlock(&dev->count_lock);
397 return can_switch;
398 }
399
400 int
401 nouveau_card_init(struct drm_device *dev)
402 {
403 struct drm_nouveau_private *dev_priv = dev->dev_private;
404 struct nouveau_engine *engine;
405 int ret;
406
407 NV_DEBUG(dev, "prev state = %d\n", dev_priv->init_state);
408
409 if (dev_priv->init_state == NOUVEAU_CARD_INIT_DONE)
410 return 0;
411
412 vga_client_register(dev->pdev, dev, NULL, nouveau_vga_set_decode);
413 vga_switcheroo_register_client(dev->pdev, nouveau_switcheroo_set_state,
414 nouveau_switcheroo_can_switch);
415
416 /* Initialise internal driver API hooks */
417 ret = nouveau_init_engine_ptrs(dev);
418 if (ret)
419 goto out;
420 engine = &dev_priv->engine;
421 dev_priv->init_state = NOUVEAU_CARD_INIT_FAILED;
422 spin_lock_init(&dev_priv->context_switch_lock);
423
424 /* Parse BIOS tables / Run init tables if card not POSTed */
425 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
426 ret = nouveau_bios_init(dev);
427 if (ret)
428 goto out;
429 }
430
431 ret = nouveau_mem_detect(dev);
432 if (ret)
433 goto out_bios;
434
435 ret = nouveau_gpuobj_early_init(dev);
436 if (ret)
437 goto out_bios;
438
439 /* Initialise instance memory, must happen before mem_init so we
440 * know exactly how much VRAM we're able to use for "normal"
441 * purposes.
442 */
443 ret = engine->instmem.init(dev);
444 if (ret)
445 goto out_gpuobj_early;
446
447 /* Setup the memory manager */
448 ret = nouveau_mem_init(dev);
449 if (ret)
450 goto out_instmem;
451
452 ret = nouveau_gpuobj_init(dev);
453 if (ret)
454 goto out_mem;
455
456 /* PMC */
457 ret = engine->mc.init(dev);
458 if (ret)
459 goto out_gpuobj;
460
461 /* PTIMER */
462 ret = engine->timer.init(dev);
463 if (ret)
464 goto out_mc;
465
466 /* PFB */
467 ret = engine->fb.init(dev);
468 if (ret)
469 goto out_timer;
470
471 if (nouveau_noaccel)
472 engine->graph.accel_blocked = true;
473 else {
474 /* PGRAPH */
475 ret = engine->graph.init(dev);
476 if (ret)
477 goto out_fb;
478
479 /* PFIFO */
480 ret = engine->fifo.init(dev);
481 if (ret)
482 goto out_graph;
483 }
484
485 /* this call irq_preinstall, register irq handler and
486 * call irq_postinstall
487 */
488 ret = drm_irq_install(dev);
489 if (ret)
490 goto out_fifo;
491
492 ret = drm_vblank_init(dev, 0);
493 if (ret)
494 goto out_irq;
495
496 /* what about PVIDEO/PCRTC/PRAMDAC etc? */
497
498 if (!engine->graph.accel_blocked) {
499 ret = nouveau_card_init_channel(dev);
500 if (ret)
501 goto out_irq;
502 }
503
504 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
505 if (dev_priv->card_type >= NV_50)
506 ret = nv50_display_create(dev);
507 else
508 ret = nv04_display_create(dev);
509 if (ret)
510 goto out_channel;
511 }
512
513 ret = nouveau_backlight_init(dev);
514 if (ret)
515 NV_ERROR(dev, "Error %d registering backlight\n", ret);
516
517 dev_priv->init_state = NOUVEAU_CARD_INIT_DONE;
518
519 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
520 nouveau_fbcon_init(dev);
521 drm_kms_helper_poll_init(dev);
522 }
523
524 return 0;
525
526 out_channel:
527 if (dev_priv->channel) {
528 nouveau_channel_free(dev_priv->channel);
529 dev_priv->channel = NULL;
530 }
531 out_irq:
532 drm_irq_uninstall(dev);
533 out_fifo:
534 if (!nouveau_noaccel)
535 engine->fifo.takedown(dev);
536 out_graph:
537 if (!nouveau_noaccel)
538 engine->graph.takedown(dev);
539 out_fb:
540 engine->fb.takedown(dev);
541 out_timer:
542 engine->timer.takedown(dev);
543 out_mc:
544 engine->mc.takedown(dev);
545 out_gpuobj:
546 nouveau_gpuobj_takedown(dev);
547 out_mem:
548 nouveau_sgdma_takedown(dev);
549 nouveau_mem_close(dev);
550 out_instmem:
551 engine->instmem.takedown(dev);
552 out_gpuobj_early:
553 nouveau_gpuobj_late_takedown(dev);
554 out_bios:
555 nouveau_bios_takedown(dev);
556 out:
557 vga_client_register(dev->pdev, NULL, NULL, NULL);
558 return ret;
559 }
560
561 static void nouveau_card_takedown(struct drm_device *dev)
562 {
563 struct drm_nouveau_private *dev_priv = dev->dev_private;
564 struct nouveau_engine *engine = &dev_priv->engine;
565
566 NV_DEBUG(dev, "prev state = %d\n", dev_priv->init_state);
567
568 if (dev_priv->init_state != NOUVEAU_CARD_INIT_DOWN) {
569
570 nouveau_backlight_exit(dev);
571
572 if (dev_priv->channel) {
573 nouveau_channel_free(dev_priv->channel);
574 dev_priv->channel = NULL;
575 }
576
577 if (!nouveau_noaccel) {
578 engine->fifo.takedown(dev);
579 engine->graph.takedown(dev);
580 }
581 engine->fb.takedown(dev);
582 engine->timer.takedown(dev);
583 engine->mc.takedown(dev);
584
585 mutex_lock(&dev->struct_mutex);
586 ttm_bo_clean_mm(&dev_priv->ttm.bdev, TTM_PL_VRAM);
587 ttm_bo_clean_mm(&dev_priv->ttm.bdev, TTM_PL_TT);
588 mutex_unlock(&dev->struct_mutex);
589 nouveau_sgdma_takedown(dev);
590
591 nouveau_gpuobj_takedown(dev);
592 nouveau_mem_close(dev);
593 engine->instmem.takedown(dev);
594
595 if (drm_core_check_feature(dev, DRIVER_MODESET))
596 drm_irq_uninstall(dev);
597
598 nouveau_gpuobj_late_takedown(dev);
599 nouveau_bios_takedown(dev);
600
601 vga_client_register(dev->pdev, NULL, NULL, NULL);
602
603 dev_priv->init_state = NOUVEAU_CARD_INIT_DOWN;
604 }
605 }
606
607 /* here a client dies, release the stuff that was allocated for its
608 * file_priv */
609 void nouveau_preclose(struct drm_device *dev, struct drm_file *file_priv)
610 {
611 nouveau_channel_cleanup(dev, file_priv);
612 }
613
614 /* first module load, setup the mmio/fb mapping */
615 /* KMS: we need mmio at load time, not when the first drm client opens. */
616 int nouveau_firstopen(struct drm_device *dev)
617 {
618 return 0;
619 }
620
621 /* if we have an OF card, copy vbios to RAMIN */
622 static void nouveau_OF_copy_vbios_to_ramin(struct drm_device *dev)
623 {
624 #if defined(__powerpc__)
625 int size, i;
626 const uint32_t *bios;
627 struct device_node *dn = pci_device_to_OF_node(dev->pdev);
628 if (!dn) {
629 NV_INFO(dev, "Unable to get the OF node\n");
630 return;
631 }
632
633 bios = of_get_property(dn, "NVDA,BMP", &size);
634 if (bios) {
635 for (i = 0; i < size; i += 4)
636 nv_wi32(dev, i, bios[i/4]);
637 NV_INFO(dev, "OF bios successfully copied (%d bytes)\n", size);
638 } else {
639 NV_INFO(dev, "Unable to get the OF bios\n");
640 }
641 #endif
642 }
643
644 static struct apertures_struct *nouveau_get_apertures(struct drm_device *dev)
645 {
646 struct pci_dev *pdev = dev->pdev;
647 struct apertures_struct *aper = alloc_apertures(3);
648 if (!aper)
649 return NULL;
650
651 aper->ranges[0].base = pci_resource_start(pdev, 1);
652 aper->ranges[0].size = pci_resource_len(pdev, 1);
653 aper->count = 1;
654
655 if (pci_resource_len(pdev, 2)) {
656 aper->ranges[aper->count].base = pci_resource_start(pdev, 2);
657 aper->ranges[aper->count].size = pci_resource_len(pdev, 2);
658 aper->count++;
659 }
660
661 if (pci_resource_len(pdev, 3)) {
662 aper->ranges[aper->count].base = pci_resource_start(pdev, 3);
663 aper->ranges[aper->count].size = pci_resource_len(pdev, 3);
664 aper->count++;
665 }
666
667 return aper;
668 }
669
670 static int nouveau_remove_conflicting_drivers(struct drm_device *dev)
671 {
672 struct drm_nouveau_private *dev_priv = dev->dev_private;
673 bool primary = false;
674 dev_priv->apertures = nouveau_get_apertures(dev);
675 if (!dev_priv->apertures)
676 return -ENOMEM;
677
678 #ifdef CONFIG_X86
679 primary = dev->pdev->resource[PCI_ROM_RESOURCE].flags & IORESOURCE_ROM_SHADOW;
680 #endif
681
682 remove_conflicting_framebuffers(dev_priv->apertures, "nouveaufb", primary);
683 return 0;
684 }
685
686 int nouveau_load(struct drm_device *dev, unsigned long flags)
687 {
688 struct drm_nouveau_private *dev_priv;
689 uint32_t reg0;
690 resource_size_t mmio_start_offs;
691
692 dev_priv = kzalloc(sizeof(*dev_priv), GFP_KERNEL);
693 if (!dev_priv)
694 return -ENOMEM;
695 dev->dev_private = dev_priv;
696 dev_priv->dev = dev;
697
698 dev_priv->flags = flags & NOUVEAU_FLAGS;
699 dev_priv->init_state = NOUVEAU_CARD_INIT_DOWN;
700
701 NV_DEBUG(dev, "vendor: 0x%X device: 0x%X class: 0x%X\n",
702 dev->pci_vendor, dev->pci_device, dev->pdev->class);
703
704 dev_priv->wq = create_workqueue("nouveau");
705 if (!dev_priv->wq)
706 return -EINVAL;
707
708 /* resource 0 is mmio regs */
709 /* resource 1 is linear FB */
710 /* resource 2 is RAMIN (mmio regs + 0x1000000) */
711 /* resource 6 is bios */
712
713 /* map the mmio regs */
714 mmio_start_offs = pci_resource_start(dev->pdev, 0);
715 dev_priv->mmio = ioremap(mmio_start_offs, 0x00800000);
716 if (!dev_priv->mmio) {
717 NV_ERROR(dev, "Unable to initialize the mmio mapping. "
718 "Please report your setup to " DRIVER_EMAIL "\n");
719 return -EINVAL;
720 }
721 NV_DEBUG(dev, "regs mapped ok at 0x%llx\n",
722 (unsigned long long)mmio_start_offs);
723
724 #ifdef __BIG_ENDIAN
725 /* Put the card in BE mode if it's not */
726 if (nv_rd32(dev, NV03_PMC_BOOT_1))
727 nv_wr32(dev, NV03_PMC_BOOT_1, 0x00000001);
728
729 DRM_MEMORYBARRIER();
730 #endif
731
732 /* Time to determine the card architecture */
733 reg0 = nv_rd32(dev, NV03_PMC_BOOT_0);
734
735 /* We're dealing with >=NV10 */
736 if ((reg0 & 0x0f000000) > 0) {
737 /* Bit 27-20 contain the architecture in hex */
738 dev_priv->chipset = (reg0 & 0xff00000) >> 20;
739 /* NV04 or NV05 */
740 } else if ((reg0 & 0xff00fff0) == 0x20004000) {
741 if (reg0 & 0x00f00000)
742 dev_priv->chipset = 0x05;
743 else
744 dev_priv->chipset = 0x04;
745 } else
746 dev_priv->chipset = 0xff;
747
748 switch (dev_priv->chipset & 0xf0) {
749 case 0x00:
750 case 0x10:
751 case 0x20:
752 case 0x30:
753 dev_priv->card_type = dev_priv->chipset & 0xf0;
754 break;
755 case 0x40:
756 case 0x60:
757 dev_priv->card_type = NV_40;
758 break;
759 case 0x50:
760 case 0x80:
761 case 0x90:
762 case 0xa0:
763 dev_priv->card_type = NV_50;
764 break;
765 default:
766 NV_INFO(dev, "Unsupported chipset 0x%08x\n", reg0);
767 return -EINVAL;
768 }
769
770 NV_INFO(dev, "Detected an NV%2x generation card (0x%08x)\n",
771 dev_priv->card_type, reg0);
772
773 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
774 int ret = nouveau_remove_conflicting_drivers(dev);
775 if (ret)
776 return ret;
777 }
778
779 /* map larger RAMIN aperture on NV40 cards */
780 dev_priv->ramin = NULL;
781 if (dev_priv->card_type >= NV_40) {
782 int ramin_bar = 2;
783 if (pci_resource_len(dev->pdev, ramin_bar) == 0)
784 ramin_bar = 3;
785
786 dev_priv->ramin_size = pci_resource_len(dev->pdev, ramin_bar);
787 dev_priv->ramin = ioremap(
788 pci_resource_start(dev->pdev, ramin_bar),
789 dev_priv->ramin_size);
790 if (!dev_priv->ramin) {
791 NV_ERROR(dev, "Failed to init RAMIN mapping, "
792 "limited instance memory available\n");
793 }
794 }
795
796 /* On older cards (or if the above failed), create a map covering
797 * the BAR0 PRAMIN aperture */
798 if (!dev_priv->ramin) {
799 dev_priv->ramin_size = 1 * 1024 * 1024;
800 dev_priv->ramin = ioremap(mmio_start_offs + NV_RAMIN,
801 dev_priv->ramin_size);
802 if (!dev_priv->ramin) {
803 NV_ERROR(dev, "Failed to map BAR0 PRAMIN.\n");
804 return -ENOMEM;
805 }
806 }
807
808 nouveau_OF_copy_vbios_to_ramin(dev);
809
810 /* Special flags */
811 if (dev->pci_device == 0x01a0)
812 dev_priv->flags |= NV_NFORCE;
813 else if (dev->pci_device == 0x01f0)
814 dev_priv->flags |= NV_NFORCE2;
815
816 /* For kernel modesetting, init card now and bring up fbcon */
817 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
818 int ret = nouveau_card_init(dev);
819 if (ret)
820 return ret;
821 }
822
823 return 0;
824 }
825
826 static void nouveau_close(struct drm_device *dev)
827 {
828 struct drm_nouveau_private *dev_priv = dev->dev_private;
829
830 /* In the case of an error dev_priv may not be allocated yet */
831 if (dev_priv)
832 nouveau_card_takedown(dev);
833 }
834
835 /* KMS: we need mmio at load time, not when the first drm client opens. */
836 void nouveau_lastclose(struct drm_device *dev)
837 {
838 if (drm_core_check_feature(dev, DRIVER_MODESET))
839 return;
840
841 nouveau_close(dev);
842 }
843
844 int nouveau_unload(struct drm_device *dev)
845 {
846 struct drm_nouveau_private *dev_priv = dev->dev_private;
847
848 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
849 drm_kms_helper_poll_fini(dev);
850 nouveau_fbcon_fini(dev);
851 if (dev_priv->card_type >= NV_50)
852 nv50_display_destroy(dev);
853 else
854 nv04_display_destroy(dev);
855 nouveau_close(dev);
856 }
857
858 iounmap(dev_priv->mmio);
859 iounmap(dev_priv->ramin);
860
861 kfree(dev_priv);
862 dev->dev_private = NULL;
863 return 0;
864 }
865
866 int nouveau_ioctl_getparam(struct drm_device *dev, void *data,
867 struct drm_file *file_priv)
868 {
869 struct drm_nouveau_private *dev_priv = dev->dev_private;
870 struct drm_nouveau_getparam *getparam = data;
871
872 NOUVEAU_CHECK_INITIALISED_WITH_RETURN;
873
874 switch (getparam->param) {
875 case NOUVEAU_GETPARAM_CHIPSET_ID:
876 getparam->value = dev_priv->chipset;
877 break;
878 case NOUVEAU_GETPARAM_PCI_VENDOR:
879 getparam->value = dev->pci_vendor;
880 break;
881 case NOUVEAU_GETPARAM_PCI_DEVICE:
882 getparam->value = dev->pci_device;
883 break;
884 case NOUVEAU_GETPARAM_BUS_TYPE:
885 if (drm_device_is_agp(dev))
886 getparam->value = NV_AGP;
887 else if (drm_device_is_pcie(dev))
888 getparam->value = NV_PCIE;
889 else
890 getparam->value = NV_PCI;
891 break;
892 case NOUVEAU_GETPARAM_FB_PHYSICAL:
893 getparam->value = dev_priv->fb_phys;
894 break;
895 case NOUVEAU_GETPARAM_AGP_PHYSICAL:
896 getparam->value = dev_priv->gart_info.aper_base;
897 break;
898 case NOUVEAU_GETPARAM_PCI_PHYSICAL:
899 if (dev->sg) {
900 getparam->value = (unsigned long)dev->sg->virtual;
901 } else {
902 NV_ERROR(dev, "Requested PCIGART address, "
903 "while no PCIGART was created\n");
904 return -EINVAL;
905 }
906 break;
907 case NOUVEAU_GETPARAM_FB_SIZE:
908 getparam->value = dev_priv->fb_available_size;
909 break;
910 case NOUVEAU_GETPARAM_AGP_SIZE:
911 getparam->value = dev_priv->gart_info.aper_size;
912 break;
913 case NOUVEAU_GETPARAM_VM_VRAM_BASE:
914 getparam->value = dev_priv->vm_vram_base;
915 break;
916 case NOUVEAU_GETPARAM_GRAPH_UNITS:
917 /* NV40 and NV50 versions are quite different, but register
918 * address is the same. User is supposed to know the card
919 * family anyway... */
920 if (dev_priv->chipset >= 0x40) {
921 getparam->value = nv_rd32(dev, NV40_PMC_GRAPH_UNITS);
922 break;
923 }
924 /* FALLTHRU */
925 default:
926 NV_ERROR(dev, "unknown parameter %lld\n", getparam->param);
927 return -EINVAL;
928 }
929
930 return 0;
931 }
932
933 int
934 nouveau_ioctl_setparam(struct drm_device *dev, void *data,
935 struct drm_file *file_priv)
936 {
937 struct drm_nouveau_setparam *setparam = data;
938
939 NOUVEAU_CHECK_INITIALISED_WITH_RETURN;
940
941 switch (setparam->param) {
942 default:
943 NV_ERROR(dev, "unknown parameter %lld\n", setparam->param);
944 return -EINVAL;
945 }
946
947 return 0;
948 }
949
950 /* Wait until (value(reg) & mask) == val, up until timeout has hit */
951 bool nouveau_wait_until(struct drm_device *dev, uint64_t timeout,
952 uint32_t reg, uint32_t mask, uint32_t val)
953 {
954 struct drm_nouveau_private *dev_priv = dev->dev_private;
955 struct nouveau_timer_engine *ptimer = &dev_priv->engine.timer;
956 uint64_t start = ptimer->read(dev);
957
958 do {
959 if ((nv_rd32(dev, reg) & mask) == val)
960 return true;
961 } while (ptimer->read(dev) - start < timeout);
962
963 return false;
964 }
965
966 /* Waits for PGRAPH to go completely idle */
967 bool nouveau_wait_for_idle(struct drm_device *dev)
968 {
969 if (!nv_wait(NV04_PGRAPH_STATUS, 0xffffffff, 0x00000000)) {
970 NV_ERROR(dev, "PGRAPH idle timed out with status 0x%08x\n",
971 nv_rd32(dev, NV04_PGRAPH_STATUS));
972 return false;
973 }
974
975 return true;
976 }
977
This page took 0.080909 seconds and 5 git commands to generate.