drm/nouveau: port all engines to new engine module format
[deliverable/linux.git] / drivers / gpu / drm / nouveau / core / engine / graph / nv04.c
CommitLineData
6ee73861
BS
1/*
2 * Copyright 2007 Stephane Marchesin
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24
ebb945a9
BS
25#include <core/os.h>
26#include <core/class.h>
27#include <core/handle.h>
28#include <core/namedb.h>
29
30#include <subdev/fb.h>
31#include <subdev/instmem.h>
32#include <subdev/timer.h>
33
34#include <engine/fifo.h>
35#include <engine/graph.h>
b8c157d3 36
ebb945a9
BS
37#include "regs.h"
38
39static u32
40nv04_graph_ctx_regs[] = {
ea911a1c
FJ
41 0x0040053c,
42 0x00400544,
43 0x00400540,
44 0x00400548,
6ee73861
BS
45 NV04_PGRAPH_CTX_SWITCH1,
46 NV04_PGRAPH_CTX_SWITCH2,
47 NV04_PGRAPH_CTX_SWITCH3,
48 NV04_PGRAPH_CTX_SWITCH4,
49 NV04_PGRAPH_CTX_CACHE1,
50 NV04_PGRAPH_CTX_CACHE2,
51 NV04_PGRAPH_CTX_CACHE3,
52 NV04_PGRAPH_CTX_CACHE4,
53 0x00400184,
54 0x004001a4,
55 0x004001c4,
56 0x004001e4,
57 0x00400188,
58 0x004001a8,
59 0x004001c8,
60 0x004001e8,
61 0x0040018c,
62 0x004001ac,
63 0x004001cc,
64 0x004001ec,
65 0x00400190,
66 0x004001b0,
67 0x004001d0,
68 0x004001f0,
69 0x00400194,
70 0x004001b4,
71 0x004001d4,
72 0x004001f4,
73 0x00400198,
74 0x004001b8,
75 0x004001d8,
76 0x004001f8,
77 0x0040019c,
78 0x004001bc,
79 0x004001dc,
80 0x004001fc,
81 0x00400174,
82 NV04_PGRAPH_DMA_START_0,
83 NV04_PGRAPH_DMA_START_1,
84 NV04_PGRAPH_DMA_LENGTH,
85 NV04_PGRAPH_DMA_MISC,
86 NV04_PGRAPH_DMA_PITCH,
87 NV04_PGRAPH_BOFFSET0,
88 NV04_PGRAPH_BBASE0,
89 NV04_PGRAPH_BLIMIT0,
90 NV04_PGRAPH_BOFFSET1,
91 NV04_PGRAPH_BBASE1,
92 NV04_PGRAPH_BLIMIT1,
93 NV04_PGRAPH_BOFFSET2,
94 NV04_PGRAPH_BBASE2,
95 NV04_PGRAPH_BLIMIT2,
96 NV04_PGRAPH_BOFFSET3,
97 NV04_PGRAPH_BBASE3,
98 NV04_PGRAPH_BLIMIT3,
99 NV04_PGRAPH_BOFFSET4,
100 NV04_PGRAPH_BBASE4,
101 NV04_PGRAPH_BLIMIT4,
102 NV04_PGRAPH_BOFFSET5,
103 NV04_PGRAPH_BBASE5,
104 NV04_PGRAPH_BLIMIT5,
105 NV04_PGRAPH_BPITCH0,
106 NV04_PGRAPH_BPITCH1,
107 NV04_PGRAPH_BPITCH2,
108 NV04_PGRAPH_BPITCH3,
109 NV04_PGRAPH_BPITCH4,
110 NV04_PGRAPH_SURFACE,
111 NV04_PGRAPH_STATE,
112 NV04_PGRAPH_BSWIZZLE2,
113 NV04_PGRAPH_BSWIZZLE5,
114 NV04_PGRAPH_BPIXEL,
115 NV04_PGRAPH_NOTIFY,
116 NV04_PGRAPH_PATT_COLOR0,
117 NV04_PGRAPH_PATT_COLOR1,
118 NV04_PGRAPH_PATT_COLORRAM+0x00,
6ee73861 119 NV04_PGRAPH_PATT_COLORRAM+0x04,
6ee73861 120 NV04_PGRAPH_PATT_COLORRAM+0x08,
ea911a1c 121 NV04_PGRAPH_PATT_COLORRAM+0x0c,
6ee73861 122 NV04_PGRAPH_PATT_COLORRAM+0x10,
6ee73861 123 NV04_PGRAPH_PATT_COLORRAM+0x14,
6ee73861 124 NV04_PGRAPH_PATT_COLORRAM+0x18,
ea911a1c 125 NV04_PGRAPH_PATT_COLORRAM+0x1c,
6ee73861 126 NV04_PGRAPH_PATT_COLORRAM+0x20,
6ee73861 127 NV04_PGRAPH_PATT_COLORRAM+0x24,
6ee73861 128 NV04_PGRAPH_PATT_COLORRAM+0x28,
ea911a1c 129 NV04_PGRAPH_PATT_COLORRAM+0x2c,
6ee73861 130 NV04_PGRAPH_PATT_COLORRAM+0x30,
6ee73861 131 NV04_PGRAPH_PATT_COLORRAM+0x34,
6ee73861 132 NV04_PGRAPH_PATT_COLORRAM+0x38,
ea911a1c
FJ
133 NV04_PGRAPH_PATT_COLORRAM+0x3c,
134 NV04_PGRAPH_PATT_COLORRAM+0x40,
135 NV04_PGRAPH_PATT_COLORRAM+0x44,
136 NV04_PGRAPH_PATT_COLORRAM+0x48,
137 NV04_PGRAPH_PATT_COLORRAM+0x4c,
138 NV04_PGRAPH_PATT_COLORRAM+0x50,
139 NV04_PGRAPH_PATT_COLORRAM+0x54,
140 NV04_PGRAPH_PATT_COLORRAM+0x58,
141 NV04_PGRAPH_PATT_COLORRAM+0x5c,
142 NV04_PGRAPH_PATT_COLORRAM+0x60,
143 NV04_PGRAPH_PATT_COLORRAM+0x64,
144 NV04_PGRAPH_PATT_COLORRAM+0x68,
145 NV04_PGRAPH_PATT_COLORRAM+0x6c,
146 NV04_PGRAPH_PATT_COLORRAM+0x70,
147 NV04_PGRAPH_PATT_COLORRAM+0x74,
148 NV04_PGRAPH_PATT_COLORRAM+0x78,
149 NV04_PGRAPH_PATT_COLORRAM+0x7c,
150 NV04_PGRAPH_PATT_COLORRAM+0x80,
151 NV04_PGRAPH_PATT_COLORRAM+0x84,
152 NV04_PGRAPH_PATT_COLORRAM+0x88,
153 NV04_PGRAPH_PATT_COLORRAM+0x8c,
154 NV04_PGRAPH_PATT_COLORRAM+0x90,
155 NV04_PGRAPH_PATT_COLORRAM+0x94,
156 NV04_PGRAPH_PATT_COLORRAM+0x98,
157 NV04_PGRAPH_PATT_COLORRAM+0x9c,
158 NV04_PGRAPH_PATT_COLORRAM+0xa0,
159 NV04_PGRAPH_PATT_COLORRAM+0xa4,
160 NV04_PGRAPH_PATT_COLORRAM+0xa8,
161 NV04_PGRAPH_PATT_COLORRAM+0xac,
162 NV04_PGRAPH_PATT_COLORRAM+0xb0,
163 NV04_PGRAPH_PATT_COLORRAM+0xb4,
164 NV04_PGRAPH_PATT_COLORRAM+0xb8,
165 NV04_PGRAPH_PATT_COLORRAM+0xbc,
166 NV04_PGRAPH_PATT_COLORRAM+0xc0,
167 NV04_PGRAPH_PATT_COLORRAM+0xc4,
168 NV04_PGRAPH_PATT_COLORRAM+0xc8,
169 NV04_PGRAPH_PATT_COLORRAM+0xcc,
170 NV04_PGRAPH_PATT_COLORRAM+0xd0,
171 NV04_PGRAPH_PATT_COLORRAM+0xd4,
172 NV04_PGRAPH_PATT_COLORRAM+0xd8,
173 NV04_PGRAPH_PATT_COLORRAM+0xdc,
174 NV04_PGRAPH_PATT_COLORRAM+0xe0,
175 NV04_PGRAPH_PATT_COLORRAM+0xe4,
176 NV04_PGRAPH_PATT_COLORRAM+0xe8,
177 NV04_PGRAPH_PATT_COLORRAM+0xec,
178 NV04_PGRAPH_PATT_COLORRAM+0xf0,
179 NV04_PGRAPH_PATT_COLORRAM+0xf4,
180 NV04_PGRAPH_PATT_COLORRAM+0xf8,
181 NV04_PGRAPH_PATT_COLORRAM+0xfc,
6ee73861
BS
182 NV04_PGRAPH_PATTERN,
183 0x0040080c,
184 NV04_PGRAPH_PATTERN_SHAPE,
185 0x00400600,
186 NV04_PGRAPH_ROP3,
187 NV04_PGRAPH_CHROMA,
188 NV04_PGRAPH_BETA_AND,
189 NV04_PGRAPH_BETA_PREMULT,
190 NV04_PGRAPH_CONTROL0,
191 NV04_PGRAPH_CONTROL1,
192 NV04_PGRAPH_CONTROL2,
193 NV04_PGRAPH_BLEND,
194 NV04_PGRAPH_STORED_FMT,
195 NV04_PGRAPH_SOURCE_COLOR,
196 0x00400560,
197 0x00400568,
198 0x00400564,
199 0x0040056c,
200 0x00400400,
201 0x00400480,
202 0x00400404,
203 0x00400484,
204 0x00400408,
205 0x00400488,
206 0x0040040c,
207 0x0040048c,
208 0x00400410,
209 0x00400490,
210 0x00400414,
211 0x00400494,
212 0x00400418,
213 0x00400498,
214 0x0040041c,
215 0x0040049c,
216 0x00400420,
217 0x004004a0,
218 0x00400424,
219 0x004004a4,
220 0x00400428,
221 0x004004a8,
222 0x0040042c,
223 0x004004ac,
224 0x00400430,
225 0x004004b0,
226 0x00400434,
227 0x004004b4,
228 0x00400438,
229 0x004004b8,
230 0x0040043c,
231 0x004004bc,
232 0x00400440,
233 0x004004c0,
234 0x00400444,
235 0x004004c4,
236 0x00400448,
237 0x004004c8,
238 0x0040044c,
239 0x004004cc,
240 0x00400450,
241 0x004004d0,
242 0x00400454,
243 0x004004d4,
244 0x00400458,
245 0x004004d8,
246 0x0040045c,
247 0x004004dc,
248 0x00400460,
249 0x004004e0,
250 0x00400464,
251 0x004004e4,
252 0x00400468,
253 0x004004e8,
254 0x0040046c,
255 0x004004ec,
256 0x00400470,
257 0x004004f0,
258 0x00400474,
259 0x004004f4,
260 0x00400478,
261 0x004004f8,
262 0x0040047c,
263 0x004004fc,
6ee73861
BS
264 0x00400534,
265 0x00400538,
266 0x00400514,
267 0x00400518,
268 0x0040051c,
269 0x00400520,
270 0x00400524,
271 0x00400528,
272 0x0040052c,
273 0x00400530,
274 0x00400d00,
275 0x00400d40,
276 0x00400d80,
277 0x00400d04,
278 0x00400d44,
279 0x00400d84,
280 0x00400d08,
281 0x00400d48,
282 0x00400d88,
283 0x00400d0c,
284 0x00400d4c,
285 0x00400d8c,
286 0x00400d10,
287 0x00400d50,
288 0x00400d90,
289 0x00400d14,
290 0x00400d54,
291 0x00400d94,
292 0x00400d18,
293 0x00400d58,
294 0x00400d98,
295 0x00400d1c,
296 0x00400d5c,
297 0x00400d9c,
298 0x00400d20,
299 0x00400d60,
300 0x00400da0,
301 0x00400d24,
302 0x00400d64,
303 0x00400da4,
304 0x00400d28,
305 0x00400d68,
306 0x00400da8,
307 0x00400d2c,
308 0x00400d6c,
309 0x00400dac,
310 0x00400d30,
311 0x00400d70,
312 0x00400db0,
313 0x00400d34,
314 0x00400d74,
315 0x00400db4,
316 0x00400d38,
317 0x00400d78,
318 0x00400db8,
319 0x00400d3c,
320 0x00400d7c,
321 0x00400dbc,
322 0x00400590,
323 0x00400594,
324 0x00400598,
325 0x0040059c,
326 0x004005a8,
327 0x004005ac,
328 0x004005b0,
329 0x004005b4,
330 0x004005c0,
331 0x004005c4,
332 0x004005c8,
333 0x004005cc,
334 0x004005d0,
335 0x004005d4,
336 0x004005d8,
337 0x004005dc,
338 0x004005e0,
339 NV04_PGRAPH_PASSTHRU_0,
340 NV04_PGRAPH_PASSTHRU_1,
341 NV04_PGRAPH_PASSTHRU_2,
342 NV04_PGRAPH_DVD_COLORFMT,
343 NV04_PGRAPH_SCALED_FORMAT,
344 NV04_PGRAPH_MISC24_0,
345 NV04_PGRAPH_MISC24_1,
346 NV04_PGRAPH_MISC24_2,
347 0x00400500,
348 0x00400504,
349 NV04_PGRAPH_VALID1,
ea911a1c
FJ
350 NV04_PGRAPH_VALID2,
351 NV04_PGRAPH_DEBUG_3
6ee73861
BS
352};
353
ebb945a9
BS
354struct nv04_graph_priv {
355 struct nouveau_graph base;
356 struct nv04_graph_chan *chan[16];
357 spinlock_t lock;
6ee73861
BS
358};
359
ebb945a9
BS
360struct nv04_graph_chan {
361 struct nouveau_object base;
362 int chid;
363 u32 nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
364};
4ea52f89 365
4ea52f89 366
ebb945a9
BS
367static inline struct nv04_graph_priv *
368nv04_graph_priv(struct nv04_graph_chan *chan)
6ee73861 369{
ebb945a9 370 return (void *)nv_object(chan)->engine;
6ee73861
BS
371}
372
ebb945a9
BS
373/*******************************************************************************
374 * Graphics object classes
375 ******************************************************************************/
6ee73861 376
f23d4cf4
MK
377/*
378 * Software methods, why they are needed, and how they all work:
379 *
380 * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
381 * 2d engine settings are kept inside the grobjs themselves. The grobjs are
382 * 3 words long on both. grobj format on NV04 is:
383 *
384 * word 0:
385 * - bits 0-7: class
386 * - bit 12: color key active
387 * - bit 13: clip rect active
388 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
389 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
390 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
391 * NV03_CONTEXT_SURFACE_DST].
392 * - bits 15-17: 2d operation [aka patch config]
393 * - bit 24: patch valid [enables rendering using this object]
394 * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
395 * word 1:
396 * - bits 0-1: mono format
397 * - bits 8-13: color format
398 * - bits 16-31: DMA_NOTIFY instance
399 * word 2:
400 * - bits 0-15: DMA_A instance
401 * - bits 16-31: DMA_B instance
402 *
403 * On NV05 it's:
404 *
405 * word 0:
406 * - bits 0-7: class
407 * - bit 12: color key active
408 * - bit 13: clip rect active
409 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
410 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
411 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
412 * NV03_CONTEXT_SURFACE_DST].
413 * - bits 15-17: 2d operation [aka patch config]
414 * - bits 20-22: dither mode
415 * - bit 24: patch valid [enables rendering using this object]
416 * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
417 * - bit 26: surface_src/surface_zeta valid
418 * - bit 27: pattern valid
419 * - bit 28: rop valid
420 * - bit 29: beta1 valid
421 * - bit 30: beta4 valid
422 * word 1:
423 * - bits 0-1: mono format
424 * - bits 8-13: color format
425 * - bits 16-31: DMA_NOTIFY instance
426 * word 2:
427 * - bits 0-15: DMA_A instance
428 * - bits 16-31: DMA_B instance
429 *
430 * NV05 will set/unset the relevant valid bits when you poke the relevant
431 * object-binding methods with object of the proper type, or with the NULL
432 * type. It'll only allow rendering using the grobj if all needed objects
433 * are bound. The needed set of objects depends on selected operation: for
434 * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
435 *
436 * NV04 doesn't have these methods implemented at all, and doesn't have the
437 * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
438 * is set. So we have to emulate them in software, internally keeping the
439 * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
440 * but the last word isn't actually used for anything, we abuse it for this
441 * purpose.
442 *
443 * Actually, NV05 can optionally check bit 24 too, but we disable this since
444 * there's no use for it.
445 *
446 * For unknown reasons, NV04 implements surf3d binding in hardware as an
447 * exception. Also for unknown reasons, NV04 doesn't implement the clipping
448 * methods on the surf3d object, so we have to emulate them too.
449 */
450
451static void
ebb945a9 452nv04_graph_set_ctx1(struct nouveau_object *object, u32 mask, u32 value)
6ee73861 453{
ebb945a9
BS
454 struct nv04_graph_priv *priv = (void *)object->engine;
455 int subc = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
b8c157d3 456 u32 tmp;
6ee73861 457
ebb945a9 458 tmp = nv_ro32(object, 0x00);
f23d4cf4
MK
459 tmp &= ~mask;
460 tmp |= value;
ebb945a9 461 nv_wo32(object, 0x00, tmp);
6ee73861 462
ebb945a9
BS
463 nv_wr32(priv, NV04_PGRAPH_CTX_SWITCH1, tmp);
464 nv_wr32(priv, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
f23d4cf4
MK
465}
466
467static void
ebb945a9 468nv04_graph_set_ctx_val(struct nouveau_object *object, u32 mask, u32 value)
f23d4cf4 469{
f23d4cf4 470 int class, op, valid = 1;
ebb945a9 471 u32 tmp, ctx1;
f23d4cf4 472
ebb945a9 473 ctx1 = nv_ro32(object, 0x00);
f23d4cf4
MK
474 class = ctx1 & 0xff;
475 op = (ctx1 >> 15) & 7;
ebb945a9
BS
476
477 tmp = nv_ro32(object, 0x0c);
f23d4cf4
MK
478 tmp &= ~mask;
479 tmp |= value;
ebb945a9 480 nv_wo32(object, 0x0c, tmp);
f23d4cf4
MK
481
482 /* check for valid surf2d/surf_dst/surf_color */
483 if (!(tmp & 0x02000000))
484 valid = 0;
485 /* check for valid surf_src/surf_zeta */
486 if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
487 valid = 0;
488
489 switch (op) {
490 /* SRCCOPY_AND, SRCCOPY: no extra objects required */
491 case 0:
492 case 3:
493 break;
494 /* ROP_AND: requires pattern and rop */
495 case 1:
496 if (!(tmp & 0x18000000))
497 valid = 0;
498 break;
499 /* BLEND_AND: requires beta1 */
500 case 2:
501 if (!(tmp & 0x20000000))
502 valid = 0;
503 break;
504 /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
505 case 4:
506 case 5:
507 if (!(tmp & 0x40000000))
508 valid = 0;
509 break;
510 }
511
ebb945a9 512 nv04_graph_set_ctx1(object, 0x01000000, valid << 24);
f23d4cf4
MK
513}
514
515static int
ebb945a9
BS
516nv04_graph_mthd_set_operation(struct nouveau_object *object, u32 mthd,
517 void *args, u32 size)
f23d4cf4 518{
ebb945a9
BS
519 u32 class = nv_ro32(object, 0) & 0xff;
520 u32 data = *(u32 *)args;
f23d4cf4
MK
521 if (data > 5)
522 return 1;
523 /* Old versions of the objects only accept first three operations. */
b8c157d3 524 if (data > 2 && class < 0x40)
f23d4cf4 525 return 1;
ebb945a9 526 nv04_graph_set_ctx1(object, 0x00038000, data << 15);
f23d4cf4 527 /* changing operation changes set of objects needed for validation */
ebb945a9 528 nv04_graph_set_ctx_val(object, 0, 0);
f23d4cf4
MK
529 return 0;
530}
531
532static int
ebb945a9
BS
533nv04_graph_mthd_surf3d_clip_h(struct nouveau_object *object, u32 mthd,
534 void *args, u32 size)
f23d4cf4 535{
ebb945a9
BS
536 struct nv04_graph_priv *priv = (void *)object->engine;
537 u32 data = *(u32 *)args;
538 u32 min = data & 0xffff, max;
539 u32 w = data >> 16;
f23d4cf4
MK
540 if (min & 0x8000)
541 /* too large */
542 return 1;
543 if (w & 0x8000)
544 /* yes, it accepts negative for some reason. */
545 w |= 0xffff0000;
546 max = min + w;
547 max &= 0x3ffff;
ebb945a9
BS
548 nv_wr32(priv, 0x40053c, min);
549 nv_wr32(priv, 0x400544, max);
f23d4cf4
MK
550 return 0;
551}
552
553static int
ebb945a9
BS
554nv04_graph_mthd_surf3d_clip_v(struct nouveau_object *object, u32 mthd,
555 void *args, u32 size)
f23d4cf4 556{
ebb945a9
BS
557 struct nv04_graph_priv *priv = (void *)object->engine;
558 u32 data = *(u32 *)args;
559 u32 min = data & 0xffff, max;
560 u32 w = data >> 16;
f23d4cf4
MK
561 if (min & 0x8000)
562 /* too large */
563 return 1;
564 if (w & 0x8000)
565 /* yes, it accepts negative for some reason. */
566 w |= 0xffff0000;
567 max = min + w;
568 max &= 0x3ffff;
ebb945a9
BS
569 nv_wr32(priv, 0x400540, min);
570 nv_wr32(priv, 0x400548, max);
6ee73861
BS
571 return 0;
572}
573
ebb945a9
BS
574static u16
575nv04_graph_mthd_bind_class(struct nouveau_object *object, u32 *args, u32 size)
576{
577 struct nouveau_instmem *imem = nouveau_instmem(object);
578 u32 inst = *(u32 *)args << 4;
579 return nv_ro32(imem, inst);
580}
581
f23d4cf4 582static int
ebb945a9
BS
583nv04_graph_mthd_bind_surf2d(struct nouveau_object *object, u32 mthd,
584 void *args, u32 size)
f23d4cf4 585{
ebb945a9 586 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 587 case 0x30:
ebb945a9
BS
588 nv04_graph_set_ctx1(object, 0x00004000, 0);
589 nv04_graph_set_ctx_val(object, 0x02000000, 0);
f23d4cf4
MK
590 return 0;
591 case 0x42:
ebb945a9
BS
592 nv04_graph_set_ctx1(object, 0x00004000, 0);
593 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
f23d4cf4
MK
594 return 0;
595 }
596 return 1;
597}
598
599static int
ebb945a9
BS
600nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_object *object, u32 mthd,
601 void *args, u32 size)
f23d4cf4 602{
ebb945a9 603 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 604 case 0x30:
ebb945a9
BS
605 nv04_graph_set_ctx1(object, 0x00004000, 0);
606 nv04_graph_set_ctx_val(object, 0x02000000, 0);
f23d4cf4
MK
607 return 0;
608 case 0x42:
ebb945a9
BS
609 nv04_graph_set_ctx1(object, 0x00004000, 0);
610 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
f23d4cf4
MK
611 return 0;
612 case 0x52:
ebb945a9
BS
613 nv04_graph_set_ctx1(object, 0x00004000, 0x00004000);
614 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
f23d4cf4
MK
615 return 0;
616 }
617 return 1;
618}
619
620static int
ebb945a9
BS
621nv01_graph_mthd_bind_patt(struct nouveau_object *object, u32 mthd,
622 void *args, u32 size)
f23d4cf4 623{
ebb945a9 624 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 625 case 0x30:
ebb945a9 626 nv04_graph_set_ctx_val(object, 0x08000000, 0);
f23d4cf4
MK
627 return 0;
628 case 0x18:
ebb945a9 629 nv04_graph_set_ctx_val(object, 0x08000000, 0x08000000);
f23d4cf4
MK
630 return 0;
631 }
632 return 1;
633}
634
635static int
ebb945a9
BS
636nv04_graph_mthd_bind_patt(struct nouveau_object *object, u32 mthd,
637 void *args, u32 size)
f23d4cf4 638{
ebb945a9 639 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 640 case 0x30:
ebb945a9 641 nv04_graph_set_ctx_val(object, 0x08000000, 0);
f23d4cf4
MK
642 return 0;
643 case 0x44:
ebb945a9 644 nv04_graph_set_ctx_val(object, 0x08000000, 0x08000000);
f23d4cf4
MK
645 return 0;
646 }
647 return 1;
648}
649
650static int
ebb945a9
BS
651nv04_graph_mthd_bind_rop(struct nouveau_object *object, u32 mthd,
652 void *args, u32 size)
f23d4cf4 653{
ebb945a9 654 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 655 case 0x30:
ebb945a9 656 nv04_graph_set_ctx_val(object, 0x10000000, 0);
f23d4cf4
MK
657 return 0;
658 case 0x43:
ebb945a9 659 nv04_graph_set_ctx_val(object, 0x10000000, 0x10000000);
f23d4cf4
MK
660 return 0;
661 }
662 return 1;
663}
664
665static int
ebb945a9
BS
666nv04_graph_mthd_bind_beta1(struct nouveau_object *object, u32 mthd,
667 void *args, u32 size)
f23d4cf4 668{
ebb945a9 669 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 670 case 0x30:
ebb945a9 671 nv04_graph_set_ctx_val(object, 0x20000000, 0);
f23d4cf4
MK
672 return 0;
673 case 0x12:
ebb945a9 674 nv04_graph_set_ctx_val(object, 0x20000000, 0x20000000);
f23d4cf4
MK
675 return 0;
676 }
677 return 1;
678}
679
680static int
ebb945a9
BS
681nv04_graph_mthd_bind_beta4(struct nouveau_object *object, u32 mthd,
682 void *args, u32 size)
f23d4cf4 683{
ebb945a9 684 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 685 case 0x30:
ebb945a9 686 nv04_graph_set_ctx_val(object, 0x40000000, 0);
f23d4cf4
MK
687 return 0;
688 case 0x72:
ebb945a9 689 nv04_graph_set_ctx_val(object, 0x40000000, 0x40000000);
f23d4cf4
MK
690 return 0;
691 }
692 return 1;
693}
694
695static int
ebb945a9
BS
696nv04_graph_mthd_bind_surf_dst(struct nouveau_object *object, u32 mthd,
697 void *args, u32 size)
f23d4cf4 698{
ebb945a9 699 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 700 case 0x30:
ebb945a9 701 nv04_graph_set_ctx_val(object, 0x02000000, 0);
f23d4cf4
MK
702 return 0;
703 case 0x58:
ebb945a9 704 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
f23d4cf4
MK
705 return 0;
706 }
707 return 1;
708}
709
710static int
ebb945a9
BS
711nv04_graph_mthd_bind_surf_src(struct nouveau_object *object, u32 mthd,
712 void *args, u32 size)
f23d4cf4 713{
ebb945a9 714 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 715 case 0x30:
ebb945a9 716 nv04_graph_set_ctx_val(object, 0x04000000, 0);
f23d4cf4
MK
717 return 0;
718 case 0x59:
ebb945a9 719 nv04_graph_set_ctx_val(object, 0x04000000, 0x04000000);
f23d4cf4
MK
720 return 0;
721 }
722 return 1;
723}
724
725static int
ebb945a9
BS
726nv04_graph_mthd_bind_surf_color(struct nouveau_object *object, u32 mthd,
727 void *args, u32 size)
f23d4cf4 728{
ebb945a9 729 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 730 case 0x30:
ebb945a9 731 nv04_graph_set_ctx_val(object, 0x02000000, 0);
f23d4cf4
MK
732 return 0;
733 case 0x5a:
ebb945a9 734 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
f23d4cf4
MK
735 return 0;
736 }
737 return 1;
738}
739
740static int
ebb945a9
BS
741nv04_graph_mthd_bind_surf_zeta(struct nouveau_object *object, u32 mthd,
742 void *args, u32 size)
f23d4cf4 743{
ebb945a9 744 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 745 case 0x30:
ebb945a9 746 nv04_graph_set_ctx_val(object, 0x04000000, 0);
f23d4cf4
MK
747 return 0;
748 case 0x5b:
ebb945a9 749 nv04_graph_set_ctx_val(object, 0x04000000, 0x04000000);
f23d4cf4
MK
750 return 0;
751 }
752 return 1;
753}
754
755static int
ebb945a9
BS
756nv01_graph_mthd_bind_clip(struct nouveau_object *object, u32 mthd,
757 void *args, u32 size)
f23d4cf4 758{
ebb945a9 759 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 760 case 0x30:
ebb945a9 761 nv04_graph_set_ctx1(object, 0x2000, 0);
f23d4cf4
MK
762 return 0;
763 case 0x19:
ebb945a9 764 nv04_graph_set_ctx1(object, 0x2000, 0x2000);
f23d4cf4
MK
765 return 0;
766 }
767 return 1;
768}
769
770static int
ebb945a9
BS
771nv01_graph_mthd_bind_chroma(struct nouveau_object *object, u32 mthd,
772 void *args, u32 size)
f23d4cf4 773{
ebb945a9 774 switch (nv04_graph_mthd_bind_class(object, args, size)) {
f23d4cf4 775 case 0x30:
ebb945a9 776 nv04_graph_set_ctx1(object, 0x1000, 0);
f23d4cf4
MK
777 return 0;
778 /* Yes, for some reason even the old versions of objects
779 * accept 0x57 and not 0x17. Consistency be damned.
780 */
781 case 0x57:
ebb945a9 782 nv04_graph_set_ctx1(object, 0x1000, 0x1000);
f23d4cf4
MK
783 return 0;
784 }
785 return 1;
786}
787
ebb945a9
BS
788static struct nouveau_omthds
789nv03_graph_gdi_omthds[] = {
790 { 0x0184, nv01_graph_mthd_bind_patt },
791 { 0x0188, nv04_graph_mthd_bind_rop },
792 { 0x018c, nv04_graph_mthd_bind_beta1 },
793 { 0x0190, nv04_graph_mthd_bind_surf_dst },
794 { 0x02fc, nv04_graph_mthd_set_operation },
795 {}
796};
797
798static struct nouveau_omthds
799nv04_graph_gdi_omthds[] = {
800 { 0x0188, nv04_graph_mthd_bind_patt },
801 { 0x018c, nv04_graph_mthd_bind_rop },
802 { 0x0190, nv04_graph_mthd_bind_beta1 },
803 { 0x0194, nv04_graph_mthd_bind_beta4 },
804 { 0x0198, nv04_graph_mthd_bind_surf2d },
805 { 0x02fc, nv04_graph_mthd_set_operation },
806 {}
807};
808
809static struct nouveau_omthds
810nv01_graph_blit_omthds[] = {
811 { 0x0184, nv01_graph_mthd_bind_chroma },
812 { 0x0188, nv01_graph_mthd_bind_clip },
813 { 0x018c, nv01_graph_mthd_bind_patt },
814 { 0x0190, nv04_graph_mthd_bind_rop },
815 { 0x0194, nv04_graph_mthd_bind_beta1 },
816 { 0x0198, nv04_graph_mthd_bind_surf_dst },
817 { 0x019c, nv04_graph_mthd_bind_surf_src },
818 { 0x02fc, nv04_graph_mthd_set_operation },
819 {}
820};
821
822static struct nouveau_omthds
823nv04_graph_blit_omthds[] = {
824 { 0x0184, nv01_graph_mthd_bind_chroma },
825 { 0x0188, nv01_graph_mthd_bind_clip },
826 { 0x018c, nv04_graph_mthd_bind_patt },
827 { 0x0190, nv04_graph_mthd_bind_rop },
828 { 0x0194, nv04_graph_mthd_bind_beta1 },
829 { 0x0198, nv04_graph_mthd_bind_beta4 },
830 { 0x019c, nv04_graph_mthd_bind_surf2d },
831 { 0x02fc, nv04_graph_mthd_set_operation },
832 {}
833};
834
835static struct nouveau_omthds
836nv04_graph_iifc_omthds[] = {
837 { 0x0188, nv01_graph_mthd_bind_chroma },
838 { 0x018c, nv01_graph_mthd_bind_clip },
839 { 0x0190, nv04_graph_mthd_bind_patt },
840 { 0x0194, nv04_graph_mthd_bind_rop },
841 { 0x0198, nv04_graph_mthd_bind_beta1 },
842 { 0x019c, nv04_graph_mthd_bind_beta4 },
843 { 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf },
844 { 0x03e4, nv04_graph_mthd_set_operation },
845 {}
846};
847
848static struct nouveau_omthds
849nv01_graph_ifc_omthds[] = {
850 { 0x0184, nv01_graph_mthd_bind_chroma },
851 { 0x0188, nv01_graph_mthd_bind_clip },
852 { 0x018c, nv01_graph_mthd_bind_patt },
853 { 0x0190, nv04_graph_mthd_bind_rop },
854 { 0x0194, nv04_graph_mthd_bind_beta1 },
855 { 0x0198, nv04_graph_mthd_bind_surf_dst },
856 { 0x02fc, nv04_graph_mthd_set_operation },
857 {}
858};
859
860static struct nouveau_omthds
861nv04_graph_ifc_omthds[] = {
862 { 0x0184, nv01_graph_mthd_bind_chroma },
863 { 0x0188, nv01_graph_mthd_bind_clip },
864 { 0x018c, nv04_graph_mthd_bind_patt },
865 { 0x0190, nv04_graph_mthd_bind_rop },
866 { 0x0194, nv04_graph_mthd_bind_beta1 },
867 { 0x0198, nv04_graph_mthd_bind_beta4 },
868 { 0x019c, nv04_graph_mthd_bind_surf2d },
869 { 0x02fc, nv04_graph_mthd_set_operation },
870 {}
871};
872
873static struct nouveau_omthds
874nv03_graph_sifc_omthds[] = {
875 { 0x0184, nv01_graph_mthd_bind_chroma },
876 { 0x0188, nv01_graph_mthd_bind_patt },
877 { 0x018c, nv04_graph_mthd_bind_rop },
878 { 0x0190, nv04_graph_mthd_bind_beta1 },
879 { 0x0194, nv04_graph_mthd_bind_surf_dst },
880 { 0x02fc, nv04_graph_mthd_set_operation },
881 {}
882};
883
884static struct nouveau_omthds
885nv04_graph_sifc_omthds[] = {
886 { 0x0184, nv01_graph_mthd_bind_chroma },
887 { 0x0188, nv04_graph_mthd_bind_patt },
888 { 0x018c, nv04_graph_mthd_bind_rop },
889 { 0x0190, nv04_graph_mthd_bind_beta1 },
890 { 0x0194, nv04_graph_mthd_bind_beta4 },
891 { 0x0198, nv04_graph_mthd_bind_surf2d },
892 { 0x02fc, nv04_graph_mthd_set_operation },
893 {}
894};
895
896static struct nouveau_omthds
897nv03_graph_sifm_omthds[] = {
898 { 0x0188, nv01_graph_mthd_bind_patt },
899 { 0x018c, nv04_graph_mthd_bind_rop },
900 { 0x0190, nv04_graph_mthd_bind_beta1 },
901 { 0x0194, nv04_graph_mthd_bind_surf_dst },
902 { 0x0304, nv04_graph_mthd_set_operation },
903 {}
904};
905
906static struct nouveau_omthds
907nv04_graph_sifm_omthds[] = {
908 { 0x0188, nv04_graph_mthd_bind_patt },
909 { 0x018c, nv04_graph_mthd_bind_rop },
910 { 0x0190, nv04_graph_mthd_bind_beta1 },
911 { 0x0194, nv04_graph_mthd_bind_beta4 },
912 { 0x0198, nv04_graph_mthd_bind_surf2d },
913 { 0x0304, nv04_graph_mthd_set_operation },
914 {}
915};
916
917static struct nouveau_omthds
918nv04_graph_surf3d_omthds[] = {
919 { 0x02f8, nv04_graph_mthd_surf3d_clip_h },
920 { 0x02fc, nv04_graph_mthd_surf3d_clip_v },
921 {}
922};
923
924static struct nouveau_omthds
925nv03_graph_ttri_omthds[] = {
926 { 0x0188, nv01_graph_mthd_bind_clip },
927 { 0x018c, nv04_graph_mthd_bind_surf_color },
928 { 0x0190, nv04_graph_mthd_bind_surf_zeta },
929 {}
930};
931
932static struct nouveau_omthds
933nv01_graph_prim_omthds[] = {
934 { 0x0184, nv01_graph_mthd_bind_clip },
935 { 0x0188, nv01_graph_mthd_bind_patt },
936 { 0x018c, nv04_graph_mthd_bind_rop },
937 { 0x0190, nv04_graph_mthd_bind_beta1 },
938 { 0x0194, nv04_graph_mthd_bind_surf_dst },
939 { 0x02fc, nv04_graph_mthd_set_operation },
940 {}
941};
942
943static struct nouveau_omthds
944nv04_graph_prim_omthds[] = {
945 { 0x0184, nv01_graph_mthd_bind_clip },
946 { 0x0188, nv04_graph_mthd_bind_patt },
947 { 0x018c, nv04_graph_mthd_bind_rop },
948 { 0x0190, nv04_graph_mthd_bind_beta1 },
949 { 0x0194, nv04_graph_mthd_bind_beta4 },
950 { 0x0198, nv04_graph_mthd_bind_surf2d },
951 { 0x02fc, nv04_graph_mthd_set_operation },
952 {}
953};
954
955static int
956nv04_graph_object_ctor(struct nouveau_object *parent,
957 struct nouveau_object *engine,
958 struct nouveau_oclass *oclass, void *data, u32 size,
959 struct nouveau_object **pobject)
960{
961 struct nouveau_gpuobj *obj;
962 int ret;
963
964 ret = nouveau_gpuobj_create(parent, engine, oclass, 0, parent,
965 16, 16, 0, &obj);
966 *pobject = nv_object(obj);
967 if (ret)
968 return ret;
969
970 nv_wo32(obj, 0x00, nv_mclass(obj));
971#ifdef __BIG_ENDIAN
972 nv_mo32(obj, 0x00, 0x00080000, 0x00080000);
973#endif
974 nv_wo32(obj, 0x04, 0x00000000);
975 nv_wo32(obj, 0x08, 0x00000000);
976 nv_wo32(obj, 0x0c, 0x00000000);
977 return 0;
978}
979
980struct nouveau_ofuncs
981nv04_graph_ofuncs = {
982 .ctor = nv04_graph_object_ctor,
983 .dtor = _nouveau_gpuobj_dtor,
984 .init = _nouveau_gpuobj_init,
985 .fini = _nouveau_gpuobj_fini,
986 .rd32 = _nouveau_gpuobj_rd32,
987 .wr32 = _nouveau_gpuobj_wr32,
988};
989
990static struct nouveau_oclass
991nv04_graph_sclass[] = {
992 { 0x0012, &nv04_graph_ofuncs }, /* beta1 */
993 { 0x0017, &nv04_graph_ofuncs }, /* chroma */
994 { 0x0018, &nv04_graph_ofuncs }, /* pattern (nv01) */
995 { 0x0019, &nv04_graph_ofuncs }, /* clip */
996 { 0x001c, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* line */
997 { 0x001d, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* tri */
998 { 0x001e, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* rect */
999 { 0x001f, &nv04_graph_ofuncs, nv01_graph_blit_omthds },
1000 { 0x0021, &nv04_graph_ofuncs, nv01_graph_ifc_omthds },
1001 { 0x0030, &nv04_graph_ofuncs }, /* null */
1002 { 0x0036, &nv04_graph_ofuncs, nv03_graph_sifc_omthds },
1003 { 0x0037, &nv04_graph_ofuncs, nv03_graph_sifm_omthds },
1004 { 0x0038, &nv04_graph_ofuncs }, /* dvd subpicture */
1005 { 0x0039, &nv04_graph_ofuncs }, /* m2mf */
1006 { 0x0042, &nv04_graph_ofuncs }, /* surf2d */
1007 { 0x0043, &nv04_graph_ofuncs }, /* rop */
1008 { 0x0044, &nv04_graph_ofuncs }, /* pattern */
1009 { 0x0048, &nv04_graph_ofuncs, nv03_graph_ttri_omthds },
1010 { 0x004a, &nv04_graph_ofuncs, nv04_graph_gdi_omthds },
1011 { 0x004b, &nv04_graph_ofuncs, nv03_graph_gdi_omthds },
1012 { 0x0052, &nv04_graph_ofuncs }, /* swzsurf */
1013 { 0x0053, &nv04_graph_ofuncs, nv04_graph_surf3d_omthds },
1014 { 0x0054, &nv04_graph_ofuncs }, /* ttri */
1015 { 0x0055, &nv04_graph_ofuncs }, /* mtri */
1016 { 0x0057, &nv04_graph_ofuncs }, /* chroma */
1017 { 0x0058, &nv04_graph_ofuncs }, /* surf_dst */
1018 { 0x0059, &nv04_graph_ofuncs }, /* surf_src */
1019 { 0x005a, &nv04_graph_ofuncs }, /* surf_color */
1020 { 0x005b, &nv04_graph_ofuncs }, /* surf_zeta */
1021 { 0x005c, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* line */
1022 { 0x005d, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* tri */
1023 { 0x005e, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* rect */
1024 { 0x005f, &nv04_graph_ofuncs, nv04_graph_blit_omthds },
1025 { 0x0060, &nv04_graph_ofuncs, nv04_graph_iifc_omthds },
1026 { 0x0061, &nv04_graph_ofuncs, nv04_graph_ifc_omthds },
1027 { 0x0064, &nv04_graph_ofuncs }, /* iifc (nv05) */
1028 { 0x0065, &nv04_graph_ofuncs }, /* ifc (nv05) */
1029 { 0x0066, &nv04_graph_ofuncs }, /* sifc (nv05) */
1030 { 0x0072, &nv04_graph_ofuncs }, /* beta4 */
1031 { 0x0076, &nv04_graph_ofuncs, nv04_graph_sifc_omthds },
1032 { 0x0077, &nv04_graph_ofuncs, nv04_graph_sifm_omthds },
1033 {},
1034};
1035
1036/*******************************************************************************
1037 * PGRAPH context
1038 ******************************************************************************/
1039
1040static struct nv04_graph_chan *
1041nv04_graph_channel(struct nv04_graph_priv *priv)
1042{
1043 struct nv04_graph_chan *chan = NULL;
1044 if (nv_rd32(priv, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) {
1045 int chid = nv_rd32(priv, NV04_PGRAPH_CTX_USER) >> 24;
1046 if (chid < ARRAY_SIZE(priv->chan))
1047 chan = priv->chan[chid];
1048 }
1049 return chan;
1050}
1051
1052static int
1053nv04_graph_load_context(struct nv04_graph_chan *chan, int chid)
1054{
1055 struct nv04_graph_priv *priv = nv04_graph_priv(chan);
1056 int i;
1057
1058 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
1059 nv_wr32(priv, nv04_graph_ctx_regs[i], chan->nv04[i]);
1060
1061 nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
1062 nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24);
1063 nv_mask(priv, NV04_PGRAPH_FFINTFC_ST2, 0xfff00000, 0x00000000);
1064 return 0;
1065}
1066
1067static int
1068nv04_graph_unload_context(struct nv04_graph_chan *chan)
1069{
1070 struct nv04_graph_priv *priv = nv04_graph_priv(chan);
1071 int i;
1072
1073 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
1074 chan->nv04[i] = nv_rd32(priv, nv04_graph_ctx_regs[i]);
1075
1076 nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
1077 nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1078 return 0;
1079}
1080
1081static void
1082nv04_graph_context_switch(struct nv04_graph_priv *priv)
1083{
1084 struct nv04_graph_chan *prev = NULL;
1085 struct nv04_graph_chan *next = NULL;
1086 unsigned long flags;
1087 int chid;
1088
1089 spin_lock_irqsave(&priv->lock, flags);
1090 nv04_graph_idle(priv);
1091
1092 /* If previous context is valid, we need to save it */
1093 prev = nv04_graph_channel(priv);
1094 if (prev)
1095 nv04_graph_unload_context(prev);
1096
1097 /* load context for next channel */
1098 chid = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f;
1099 next = priv->chan[chid];
1100 if (next)
1101 nv04_graph_load_context(next, chid);
1102
1103 spin_unlock_irqrestore(&priv->lock, flags);
1104}
1105
1106static u32 *ctx_reg(struct nv04_graph_chan *chan, u32 reg)
1107{
1108 int i;
1109
1110 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
1111 if (nv04_graph_ctx_regs[i] == reg)
1112 return &chan->nv04[i];
1113 }
1114
1115 return NULL;
1116}
1117
1118static int
1119nv04_graph_context_ctor(struct nouveau_object *parent,
1120 struct nouveau_object *engine,
1121 struct nouveau_oclass *oclass, void *data, u32 size,
1122 struct nouveau_object **pobject)
1123{
1124 struct nouveau_fifo_chan *fifo = (void *)parent;
1125 struct nv04_graph_priv *priv = (void *)engine;
1126 struct nv04_graph_chan *chan;
1127 unsigned long flags;
1128 int ret;
1129
1130 ret = nouveau_object_create(parent, engine, oclass, 0, &chan);
1131 *pobject = nv_object(chan);
1132 if (ret)
1133 return ret;
1134
1135 spin_lock_irqsave(&priv->lock, flags);
1136 if (priv->chan[fifo->chid]) {
1137 *pobject = nv_object(priv->chan[fifo->chid]);
1138 atomic_inc(&(*pobject)->refcount);
1139 spin_unlock_irqrestore(&priv->lock, flags);
1140 nouveau_object_destroy(&chan->base);
1141 return 1;
1142 }
1143
1144 *ctx_reg(chan, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
1145
1146 priv->chan[fifo->chid] = chan;
1147 chan->chid = fifo->chid;
1148 spin_unlock_irqrestore(&priv->lock, flags);
1149 return 0;
1150}
1151
1152static void
1153nv04_graph_context_dtor(struct nouveau_object *object)
1154{
1155 struct nv04_graph_priv *priv = (void *)object->engine;
1156 struct nv04_graph_chan *chan = (void *)object;
1157 unsigned long flags;
1158
1159 spin_lock_irqsave(&priv->lock, flags);
1160 priv->chan[chan->chid] = NULL;
1161 spin_unlock_irqrestore(&priv->lock, flags);
1162
1163 nouveau_object_destroy(&chan->base);
1164}
1165
1166static int
1167nv04_graph_context_fini(struct nouveau_object *object, bool suspend)
1168{
1169 struct nv04_graph_priv *priv = (void *)object->engine;
1170 struct nv04_graph_chan *chan = (void *)object;
1171 unsigned long flags;
1172
1173 spin_lock_irqsave(&priv->lock, flags);
1174 nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
1175 if (nv04_graph_channel(priv) == chan)
1176 nv04_graph_unload_context(chan);
1177 nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
1178 spin_unlock_irqrestore(&priv->lock, flags);
1179
1180 return nouveau_object_fini(&chan->base, suspend);
1181}
1182
1183static struct nouveau_oclass
1184nv04_graph_cclass = {
1185 .handle = NV_ENGCTX(GR, 0x04),
1186 .ofuncs = &(struct nouveau_ofuncs) {
1187 .ctor = nv04_graph_context_ctor,
1188 .dtor = nv04_graph_context_dtor,
1189 .init = nouveau_object_init,
1190 .fini = nv04_graph_context_fini,
1191 },
1192};
1193
1194/*******************************************************************************
1195 * PGRAPH engine/subdev functions
1196 ******************************************************************************/
1197
1198bool
1199nv04_graph_idle(void *obj)
1200{
1201 struct nouveau_graph *graph = nouveau_graph(obj);
1202 u32 mask = 0xffffffff;
1203
1204 if (nv_device(obj)->card_type == NV_40)
1205 mask &= ~NV40_PGRAPH_STATUS_SYNC_STALL;
1206
1207 if (!nv_wait(graph, NV04_PGRAPH_STATUS, mask, 0)) {
1208 nv_error(graph, "idle timed out with status 0x%08x\n",
1209 nv_rd32(graph, NV04_PGRAPH_STATUS));
1210 return false;
1211 }
1212
1213 return true;
1214}
1215
1216static struct nouveau_bitfield
1217nv04_graph_intr_name[] = {
4976986b
BS
1218 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1219 {}
1220};
1221
ebb945a9
BS
1222static struct nouveau_bitfield
1223nv04_graph_nstatus[] = {
4976986b
BS
1224 { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1225 { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1226 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1227 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1228 {}
1229};
1230
ebb945a9
BS
1231struct nouveau_bitfield
1232nv04_graph_nsource[] = {
4976986b
BS
1233 { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
1234 { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
1235 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
1236 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
1237 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
1238 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
1239 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
1240 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
1241 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
1242 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
1243 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
1244 { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
1245 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
1246 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
1247 { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
1248 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
1249 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1250 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
1251 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
1252 {}
1253};
1254
1255static void
ebb945a9 1256nv04_graph_intr(struct nouveau_subdev *subdev)
b8c157d3 1257{
ebb945a9
BS
1258 struct nv04_graph_priv *priv = (void *)subdev;
1259 struct nv04_graph_chan *chan = NULL;
1260 struct nouveau_namedb *namedb = NULL;
1261 struct nouveau_handle *handle = NULL;
1262 u32 stat = nv_rd32(priv, NV03_PGRAPH_INTR);
1263 u32 nsource = nv_rd32(priv, NV03_PGRAPH_NSOURCE);
1264 u32 nstatus = nv_rd32(priv, NV03_PGRAPH_NSTATUS);
1265 u32 addr = nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR);
1266 u32 chid = (addr & 0x0f000000) >> 24;
1267 u32 subc = (addr & 0x0000e000) >> 13;
1268 u32 mthd = (addr & 0x00001ffc);
1269 u32 data = nv_rd32(priv, NV04_PGRAPH_TRAPPED_DATA);
1270 u32 class = nv_rd32(priv, 0x400180 + subc * 4) & 0xff;
1271 u32 inst = (nv_rd32(priv, 0x40016c) & 0xffff) << 4;
1272 u32 show = stat;
1273 unsigned long flags;
4976986b 1274
ebb945a9
BS
1275 spin_lock_irqsave(&priv->lock, flags);
1276 chan = priv->chan[chid];
4976986b 1277 if (chan)
ebb945a9
BS
1278 namedb = (void *)nv_pclass(nv_object(chan), NV_NAMEDB_CLASS);
1279 spin_unlock_irqrestore(&priv->lock, flags);
1280
1281 if (stat & NV_PGRAPH_INTR_NOTIFY) {
1282 if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
1283 handle = nouveau_namedb_get_vinst(namedb, inst);
1284 if (handle && !nv_call(handle->object, mthd, data))
1285 show &= ~NV_PGRAPH_INTR_NOTIFY;
4976986b 1286 }
ebb945a9 1287 }
4976986b 1288
ebb945a9
BS
1289 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1290 nv_wr32(priv, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1291 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1292 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1293 nv04_graph_context_switch(priv);
1294 }
4976986b 1295
ebb945a9
BS
1296 nv_wr32(priv, NV03_PGRAPH_INTR, stat);
1297 nv_wr32(priv, NV04_PGRAPH_FIFO, 0x00000001);
1298
1299 if (show) {
1300 nv_error(priv, "");
1301 nouveau_bitfield_print(nv04_graph_intr_name, show);
1302 printk(" nsource:");
1303 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1304 printk(" nstatus:");
1305 nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1306 printk("\n");
1307 nv_error(priv, "ch %d/%d class 0x%04x "
1308 "mthd 0x%04x data 0x%08x\n",
1309 chid, subc, class, mthd, data);
4976986b 1310 }
ebb945a9
BS
1311
1312 nouveau_namedb_put(handle);
4976986b
BS
1313}
1314
ebb945a9
BS
1315static int
1316nv04_graph_ctor(struct nouveau_object *parent, struct nouveau_object *engine,
1317 struct nouveau_oclass *oclass, void *data, u32 size,
1318 struct nouveau_object **pobject)
4976986b 1319{
ebb945a9
BS
1320 struct nv04_graph_priv *priv;
1321 int ret;
4976986b 1322
ebb945a9
BS
1323 ret = nouveau_graph_create(parent, engine, oclass, true, &priv);
1324 *pobject = nv_object(priv);
1325 if (ret)
1326 return ret;
4976986b 1327
ebb945a9
BS
1328 nv_subdev(priv)->unit = 0x00001000;
1329 nv_subdev(priv)->intr = nv04_graph_intr;
1330 nv_engine(priv)->cclass = &nv04_graph_cclass;
1331 nv_engine(priv)->sclass = nv04_graph_sclass;
1332 spin_lock_init(&priv->lock);
1333 return 0;
4976986b
BS
1334}
1335
ebb945a9
BS
1336static int
1337nv04_graph_init(struct nouveau_object *object)
4976986b 1338{
ebb945a9
BS
1339 struct nouveau_engine *engine = nv_engine(object);
1340 struct nv04_graph_priv *priv = (void *)engine;
1341 int ret;
1342
1343 ret = nouveau_graph_init(&priv->base);
1344 if (ret)
1345 return ret;
b8c157d3 1346
ebb945a9
BS
1347 /* Enable PGRAPH interrupts */
1348 nv_wr32(priv, NV03_PGRAPH_INTR, 0xFFFFFFFF);
1349 nv_wr32(priv, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
1350
1351 nv_wr32(priv, NV04_PGRAPH_VALID1, 0);
1352 nv_wr32(priv, NV04_PGRAPH_VALID2, 0);
1353 /*nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x000001FF);
1354 nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
1355 nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x1231c000);
1356 /*1231C000 blob, 001 haiku*/
1357 /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
1358 nv_wr32(priv, NV04_PGRAPH_DEBUG_1, 0x72111100);
1359 /*0x72111100 blob , 01 haiku*/
1360 /*nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
1361 nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
1362 /*haiku same*/
1363
1364 /*nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
1365 nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
1366 /*haiku and blob 10d4*/
1367
1368 nv_wr32(priv, NV04_PGRAPH_STATE , 0xFFFFFFFF);
1369 nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
1370 nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1371
1372 /* These don't belong here, they're part of a per-channel context */
1373 nv_wr32(priv, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
1374 nv_wr32(priv, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
b8c157d3 1375 return 0;
274fec93 1376}
ebb945a9
BS
1377
1378struct nouveau_oclass
1379nv04_graph_oclass = {
1380 .handle = NV_ENGINE(GR, 0x04),
1381 .ofuncs = &(struct nouveau_ofuncs) {
1382 .ctor = nv04_graph_ctor,
1383 .dtor = _nouveau_graph_dtor,
1384 .init = nv04_graph_init,
1385 .fini = _nouveau_graph_fini,
1386 },
1387};
This page took 0.303593 seconds and 5 git commands to generate.