1/* $NetBSD: nouveau_engine_graph_nv04.c,v 1.1.1.1 2014/08/06 12:36:26 riastradh Exp $ */
2
3/*
4 * Copyright 2007 Stephane Marchesin
5 * All Rights Reserved.
6 *
7 * Permission is hereby granted, free of charge, to any person obtaining a
8 * copy of this software and associated documentation files (the "Software"),
9 * to deal in the Software without restriction, including without limitation
10 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
11 * and/or sell copies of the Software, and to permit persons to whom the
12 * Software is furnished to do so, subject to the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the next
15 * paragraph) shall be included in all copies or substantial portions of the
16 * Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
22 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
23 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
24 * DEALINGS IN THE SOFTWARE.
25 */
26
27#include <sys/cdefs.h>
28__KERNEL_RCSID(0, "$NetBSD: nouveau_engine_graph_nv04.c,v 1.1.1.1 2014/08/06 12:36:26 riastradh Exp $");
29
30#include <core/client.h>
31#include <core/os.h>
32#include <core/class.h>
33#include <core/handle.h>
34#include <core/namedb.h>
35
36#include <subdev/fb.h>
37#include <subdev/instmem.h>
38#include <subdev/timer.h>
39
40#include <engine/fifo.h>
41#include <engine/graph.h>
42
43#include "regs.h"
44
45static u32
46nv04_graph_ctx_regs[] = {
47 0x0040053c,
48 0x00400544,
49 0x00400540,
50 0x00400548,
51 NV04_PGRAPH_CTX_SWITCH1,
52 NV04_PGRAPH_CTX_SWITCH2,
53 NV04_PGRAPH_CTX_SWITCH3,
54 NV04_PGRAPH_CTX_SWITCH4,
55 NV04_PGRAPH_CTX_CACHE1,
56 NV04_PGRAPH_CTX_CACHE2,
57 NV04_PGRAPH_CTX_CACHE3,
58 NV04_PGRAPH_CTX_CACHE4,
59 0x00400184,
60 0x004001a4,
61 0x004001c4,
62 0x004001e4,
63 0x00400188,
64 0x004001a8,
65 0x004001c8,
66 0x004001e8,
67 0x0040018c,
68 0x004001ac,
69 0x004001cc,
70 0x004001ec,
71 0x00400190,
72 0x004001b0,
73 0x004001d0,
74 0x004001f0,
75 0x00400194,
76 0x004001b4,
77 0x004001d4,
78 0x004001f4,
79 0x00400198,
80 0x004001b8,
81 0x004001d8,
82 0x004001f8,
83 0x0040019c,
84 0x004001bc,
85 0x004001dc,
86 0x004001fc,
87 0x00400174,
88 NV04_PGRAPH_DMA_START_0,
89 NV04_PGRAPH_DMA_START_1,
90 NV04_PGRAPH_DMA_LENGTH,
91 NV04_PGRAPH_DMA_MISC,
92 NV04_PGRAPH_DMA_PITCH,
93 NV04_PGRAPH_BOFFSET0,
94 NV04_PGRAPH_BBASE0,
95 NV04_PGRAPH_BLIMIT0,
96 NV04_PGRAPH_BOFFSET1,
97 NV04_PGRAPH_BBASE1,
98 NV04_PGRAPH_BLIMIT1,
99 NV04_PGRAPH_BOFFSET2,
100 NV04_PGRAPH_BBASE2,
101 NV04_PGRAPH_BLIMIT2,
102 NV04_PGRAPH_BOFFSET3,
103 NV04_PGRAPH_BBASE3,
104 NV04_PGRAPH_BLIMIT3,
105 NV04_PGRAPH_BOFFSET4,
106 NV04_PGRAPH_BBASE4,
107 NV04_PGRAPH_BLIMIT4,
108 NV04_PGRAPH_BOFFSET5,
109 NV04_PGRAPH_BBASE5,
110 NV04_PGRAPH_BLIMIT5,
111 NV04_PGRAPH_BPITCH0,
112 NV04_PGRAPH_BPITCH1,
113 NV04_PGRAPH_BPITCH2,
114 NV04_PGRAPH_BPITCH3,
115 NV04_PGRAPH_BPITCH4,
116 NV04_PGRAPH_SURFACE,
117 NV04_PGRAPH_STATE,
118 NV04_PGRAPH_BSWIZZLE2,
119 NV04_PGRAPH_BSWIZZLE5,
120 NV04_PGRAPH_BPIXEL,
121 NV04_PGRAPH_NOTIFY,
122 NV04_PGRAPH_PATT_COLOR0,
123 NV04_PGRAPH_PATT_COLOR1,
124 NV04_PGRAPH_PATT_COLORRAM+0x00,
125 NV04_PGRAPH_PATT_COLORRAM+0x04,
126 NV04_PGRAPH_PATT_COLORRAM+0x08,
127 NV04_PGRAPH_PATT_COLORRAM+0x0c,
128 NV04_PGRAPH_PATT_COLORRAM+0x10,
129 NV04_PGRAPH_PATT_COLORRAM+0x14,
130 NV04_PGRAPH_PATT_COLORRAM+0x18,
131 NV04_PGRAPH_PATT_COLORRAM+0x1c,
132 NV04_PGRAPH_PATT_COLORRAM+0x20,
133 NV04_PGRAPH_PATT_COLORRAM+0x24,
134 NV04_PGRAPH_PATT_COLORRAM+0x28,
135 NV04_PGRAPH_PATT_COLORRAM+0x2c,
136 NV04_PGRAPH_PATT_COLORRAM+0x30,
137 NV04_PGRAPH_PATT_COLORRAM+0x34,
138 NV04_PGRAPH_PATT_COLORRAM+0x38,
139 NV04_PGRAPH_PATT_COLORRAM+0x3c,
140 NV04_PGRAPH_PATT_COLORRAM+0x40,
141 NV04_PGRAPH_PATT_COLORRAM+0x44,
142 NV04_PGRAPH_PATT_COLORRAM+0x48,
143 NV04_PGRAPH_PATT_COLORRAM+0x4c,
144 NV04_PGRAPH_PATT_COLORRAM+0x50,
145 NV04_PGRAPH_PATT_COLORRAM+0x54,
146 NV04_PGRAPH_PATT_COLORRAM+0x58,
147 NV04_PGRAPH_PATT_COLORRAM+0x5c,
148 NV04_PGRAPH_PATT_COLORRAM+0x60,
149 NV04_PGRAPH_PATT_COLORRAM+0x64,
150 NV04_PGRAPH_PATT_COLORRAM+0x68,
151 NV04_PGRAPH_PATT_COLORRAM+0x6c,
152 NV04_PGRAPH_PATT_COLORRAM+0x70,
153 NV04_PGRAPH_PATT_COLORRAM+0x74,
154 NV04_PGRAPH_PATT_COLORRAM+0x78,
155 NV04_PGRAPH_PATT_COLORRAM+0x7c,
156 NV04_PGRAPH_PATT_COLORRAM+0x80,
157 NV04_PGRAPH_PATT_COLORRAM+0x84,
158 NV04_PGRAPH_PATT_COLORRAM+0x88,
159 NV04_PGRAPH_PATT_COLORRAM+0x8c,
160 NV04_PGRAPH_PATT_COLORRAM+0x90,
161 NV04_PGRAPH_PATT_COLORRAM+0x94,
162 NV04_PGRAPH_PATT_COLORRAM+0x98,
163 NV04_PGRAPH_PATT_COLORRAM+0x9c,
164 NV04_PGRAPH_PATT_COLORRAM+0xa0,
165 NV04_PGRAPH_PATT_COLORRAM+0xa4,
166 NV04_PGRAPH_PATT_COLORRAM+0xa8,
167 NV04_PGRAPH_PATT_COLORRAM+0xac,
168 NV04_PGRAPH_PATT_COLORRAM+0xb0,
169 NV04_PGRAPH_PATT_COLORRAM+0xb4,
170 NV04_PGRAPH_PATT_COLORRAM+0xb8,
171 NV04_PGRAPH_PATT_COLORRAM+0xbc,
172 NV04_PGRAPH_PATT_COLORRAM+0xc0,
173 NV04_PGRAPH_PATT_COLORRAM+0xc4,
174 NV04_PGRAPH_PATT_COLORRAM+0xc8,
175 NV04_PGRAPH_PATT_COLORRAM+0xcc,
176 NV04_PGRAPH_PATT_COLORRAM+0xd0,
177 NV04_PGRAPH_PATT_COLORRAM+0xd4,
178 NV04_PGRAPH_PATT_COLORRAM+0xd8,
179 NV04_PGRAPH_PATT_COLORRAM+0xdc,
180 NV04_PGRAPH_PATT_COLORRAM+0xe0,
181 NV04_PGRAPH_PATT_COLORRAM+0xe4,
182 NV04_PGRAPH_PATT_COLORRAM+0xe8,
183 NV04_PGRAPH_PATT_COLORRAM+0xec,
184 NV04_PGRAPH_PATT_COLORRAM+0xf0,
185 NV04_PGRAPH_PATT_COLORRAM+0xf4,
186 NV04_PGRAPH_PATT_COLORRAM+0xf8,
187 NV04_PGRAPH_PATT_COLORRAM+0xfc,
188 NV04_PGRAPH_PATTERN,
189 0x0040080c,
190 NV04_PGRAPH_PATTERN_SHAPE,
191 0x00400600,
192 NV04_PGRAPH_ROP3,
193 NV04_PGRAPH_CHROMA,
194 NV04_PGRAPH_BETA_AND,
195 NV04_PGRAPH_BETA_PREMULT,
196 NV04_PGRAPH_CONTROL0,
197 NV04_PGRAPH_CONTROL1,
198 NV04_PGRAPH_CONTROL2,
199 NV04_PGRAPH_BLEND,
200 NV04_PGRAPH_STORED_FMT,
201 NV04_PGRAPH_SOURCE_COLOR,
202 0x00400560,
203 0x00400568,
204 0x00400564,
205 0x0040056c,
206 0x00400400,
207 0x00400480,
208 0x00400404,
209 0x00400484,
210 0x00400408,
211 0x00400488,
212 0x0040040c,
213 0x0040048c,
214 0x00400410,
215 0x00400490,
216 0x00400414,
217 0x00400494,
218 0x00400418,
219 0x00400498,
220 0x0040041c,
221 0x0040049c,
222 0x00400420,
223 0x004004a0,
224 0x00400424,
225 0x004004a4,
226 0x00400428,
227 0x004004a8,
228 0x0040042c,
229 0x004004ac,
230 0x00400430,
231 0x004004b0,
232 0x00400434,
233 0x004004b4,
234 0x00400438,
235 0x004004b8,
236 0x0040043c,
237 0x004004bc,
238 0x00400440,
239 0x004004c0,
240 0x00400444,
241 0x004004c4,
242 0x00400448,
243 0x004004c8,
244 0x0040044c,
245 0x004004cc,
246 0x00400450,
247 0x004004d0,
248 0x00400454,
249 0x004004d4,
250 0x00400458,
251 0x004004d8,
252 0x0040045c,
253 0x004004dc,
254 0x00400460,
255 0x004004e0,
256 0x00400464,
257 0x004004e4,
258 0x00400468,
259 0x004004e8,
260 0x0040046c,
261 0x004004ec,
262 0x00400470,
263 0x004004f0,
264 0x00400474,
265 0x004004f4,
266 0x00400478,
267 0x004004f8,
268 0x0040047c,
269 0x004004fc,
270 0x00400534,
271 0x00400538,
272 0x00400514,
273 0x00400518,
274 0x0040051c,
275 0x00400520,
276 0x00400524,
277 0x00400528,
278 0x0040052c,
279 0x00400530,
280 0x00400d00,
281 0x00400d40,
282 0x00400d80,
283 0x00400d04,
284 0x00400d44,
285 0x00400d84,
286 0x00400d08,
287 0x00400d48,
288 0x00400d88,
289 0x00400d0c,
290 0x00400d4c,
291 0x00400d8c,
292 0x00400d10,
293 0x00400d50,
294 0x00400d90,
295 0x00400d14,
296 0x00400d54,
297 0x00400d94,
298 0x00400d18,
299 0x00400d58,
300 0x00400d98,
301 0x00400d1c,
302 0x00400d5c,
303 0x00400d9c,
304 0x00400d20,
305 0x00400d60,
306 0x00400da0,
307 0x00400d24,
308 0x00400d64,
309 0x00400da4,
310 0x00400d28,
311 0x00400d68,
312 0x00400da8,
313 0x00400d2c,
314 0x00400d6c,
315 0x00400dac,
316 0x00400d30,
317 0x00400d70,
318 0x00400db0,
319 0x00400d34,
320 0x00400d74,
321 0x00400db4,
322 0x00400d38,
323 0x00400d78,
324 0x00400db8,
325 0x00400d3c,
326 0x00400d7c,
327 0x00400dbc,
328 0x00400590,
329 0x00400594,
330 0x00400598,
331 0x0040059c,
332 0x004005a8,
333 0x004005ac,
334 0x004005b0,
335 0x004005b4,
336 0x004005c0,
337 0x004005c4,
338 0x004005c8,
339 0x004005cc,
340 0x004005d0,
341 0x004005d4,
342 0x004005d8,
343 0x004005dc,
344 0x004005e0,
345 NV04_PGRAPH_PASSTHRU_0,
346 NV04_PGRAPH_PASSTHRU_1,
347 NV04_PGRAPH_PASSTHRU_2,
348 NV04_PGRAPH_DVD_COLORFMT,
349 NV04_PGRAPH_SCALED_FORMAT,
350 NV04_PGRAPH_MISC24_0,
351 NV04_PGRAPH_MISC24_1,
352 NV04_PGRAPH_MISC24_2,
353 0x00400500,
354 0x00400504,
355 NV04_PGRAPH_VALID1,
356 NV04_PGRAPH_VALID2,
357 NV04_PGRAPH_DEBUG_3
358};
359
360struct nv04_graph_priv {
361 struct nouveau_graph base;
362 struct nv04_graph_chan *chan[16];
363 spinlock_t lock;
364};
365
366struct nv04_graph_chan {
367 struct nouveau_object base;
368 int chid;
369 u32 nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
370};
371
372
373static inline struct nv04_graph_priv *
374nv04_graph_priv(struct nv04_graph_chan *chan)
375{
376 return (void *)nv_object(chan)->engine;
377}
378
379/*******************************************************************************
380 * Graphics object classes
381 ******************************************************************************/
382
383/*
384 * Software methods, why they are needed, and how they all work:
385 *
386 * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
387 * 2d engine settings are kept inside the grobjs themselves. The grobjs are
388 * 3 words long on both. grobj format on NV04 is:
389 *
390 * word 0:
391 * - bits 0-7: class
392 * - bit 12: color key active
393 * - bit 13: clip rect active
394 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
395 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
396 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
397 * NV03_CONTEXT_SURFACE_DST].
398 * - bits 15-17: 2d operation [aka patch config]
399 * - bit 24: patch valid [enables rendering using this object]
400 * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
401 * word 1:
402 * - bits 0-1: mono format
403 * - bits 8-13: color format
404 * - bits 16-31: DMA_NOTIFY instance
405 * word 2:
406 * - bits 0-15: DMA_A instance
407 * - bits 16-31: DMA_B instance
408 *
409 * On NV05 it's:
410 *
411 * word 0:
412 * - bits 0-7: class
413 * - bit 12: color key active
414 * - bit 13: clip rect active
415 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
416 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
417 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
418 * NV03_CONTEXT_SURFACE_DST].
419 * - bits 15-17: 2d operation [aka patch config]
420 * - bits 20-22: dither mode
421 * - bit 24: patch valid [enables rendering using this object]
422 * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
423 * - bit 26: surface_src/surface_zeta valid
424 * - bit 27: pattern valid
425 * - bit 28: rop valid
426 * - bit 29: beta1 valid
427 * - bit 30: beta4 valid
428 * word 1:
429 * - bits 0-1: mono format
430 * - bits 8-13: color format
431 * - bits 16-31: DMA_NOTIFY instance
432 * word 2:
433 * - bits 0-15: DMA_A instance
434 * - bits 16-31: DMA_B instance
435 *
436 * NV05 will set/unset the relevant valid bits when you poke the relevant
437 * object-binding methods with object of the proper type, or with the NULL
438 * type. It'll only allow rendering using the grobj if all needed objects
439 * are bound. The needed set of objects depends on selected operation: for
440 * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
441 *
442 * NV04 doesn't have these methods implemented at all, and doesn't have the
443 * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
444 * is set. So we have to emulate them in software, internally keeping the
445 * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
446 * but the last word isn't actually used for anything, we abuse it for this
447 * purpose.
448 *
449 * Actually, NV05 can optionally check bit 24 too, but we disable this since
450 * there's no use for it.
451 *
452 * For unknown reasons, NV04 implements surf3d binding in hardware as an
453 * exception. Also for unknown reasons, NV04 doesn't implement the clipping
454 * methods on the surf3d object, so we have to emulate them too.
455 */
456
457static void
458nv04_graph_set_ctx1(struct nouveau_object *object, u32 mask, u32 value)
459{
460 struct nv04_graph_priv *priv = (void *)object->engine;
461 int subc = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
462 u32 tmp;
463
464 tmp = nv_ro32(object, 0x00);
465 tmp &= ~mask;
466 tmp |= value;
467 nv_wo32(object, 0x00, tmp);
468
469 nv_wr32(priv, NV04_PGRAPH_CTX_SWITCH1, tmp);
470 nv_wr32(priv, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
471}
472
473static void
474nv04_graph_set_ctx_val(struct nouveau_object *object, u32 mask, u32 value)
475{
476 int class, op, valid = 1;
477 u32 tmp, ctx1;
478
479 ctx1 = nv_ro32(object, 0x00);
480 class = ctx1 & 0xff;
481 op = (ctx1 >> 15) & 7;
482
483 tmp = nv_ro32(object, 0x0c);
484 tmp &= ~mask;
485 tmp |= value;
486 nv_wo32(object, 0x0c, tmp);
487
488 /* check for valid surf2d/surf_dst/surf_color */
489 if (!(tmp & 0x02000000))
490 valid = 0;
491 /* check for valid surf_src/surf_zeta */
492 if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
493 valid = 0;
494
495 switch (op) {
496 /* SRCCOPY_AND, SRCCOPY: no extra objects required */
497 case 0:
498 case 3:
499 break;
500 /* ROP_AND: requires pattern and rop */
501 case 1:
502 if (!(tmp & 0x18000000))
503 valid = 0;
504 break;
505 /* BLEND_AND: requires beta1 */
506 case 2:
507 if (!(tmp & 0x20000000))
508 valid = 0;
509 break;
510 /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
511 case 4:
512 case 5:
513 if (!(tmp & 0x40000000))
514 valid = 0;
515 break;
516 }
517
518 nv04_graph_set_ctx1(object, 0x01000000, valid << 24);
519}
520
521static int
522nv04_graph_mthd_set_operation(struct nouveau_object *object, u32 mthd,
523 void *args, u32 size)
524{
525 u32 class = nv_ro32(object, 0) & 0xff;
526 u32 data = *(u32 *)args;
527 if (data > 5)
528 return 1;
529 /* Old versions of the objects only accept first three operations. */
530 if (data > 2 && class < 0x40)
531 return 1;
532 nv04_graph_set_ctx1(object, 0x00038000, data << 15);
533 /* changing operation changes set of objects needed for validation */
534 nv04_graph_set_ctx_val(object, 0, 0);
535 return 0;
536}
537
538static int
539nv04_graph_mthd_surf3d_clip_h(struct nouveau_object *object, u32 mthd,
540 void *args, u32 size)
541{
542 struct nv04_graph_priv *priv = (void *)object->engine;
543 u32 data = *(u32 *)args;
544 u32 min = data & 0xffff, max;
545 u32 w = data >> 16;
546 if (min & 0x8000)
547 /* too large */
548 return 1;
549 if (w & 0x8000)
550 /* yes, it accepts negative for some reason. */
551 w |= 0xffff0000;
552 max = min + w;
553 max &= 0x3ffff;
554 nv_wr32(priv, 0x40053c, min);
555 nv_wr32(priv, 0x400544, max);
556 return 0;
557}
558
559static int
560nv04_graph_mthd_surf3d_clip_v(struct nouveau_object *object, u32 mthd,
561 void *args, u32 size)
562{
563 struct nv04_graph_priv *priv = (void *)object->engine;
564 u32 data = *(u32 *)args;
565 u32 min = data & 0xffff, max;
566 u32 w = data >> 16;
567 if (min & 0x8000)
568 /* too large */
569 return 1;
570 if (w & 0x8000)
571 /* yes, it accepts negative for some reason. */
572 w |= 0xffff0000;
573 max = min + w;
574 max &= 0x3ffff;
575 nv_wr32(priv, 0x400540, min);
576 nv_wr32(priv, 0x400548, max);
577 return 0;
578}
579
580static u16
581nv04_graph_mthd_bind_class(struct nouveau_object *object, u32 *args, u32 size)
582{
583 struct nouveau_instmem *imem = nouveau_instmem(object);
584 u32 inst = *(u32 *)args << 4;
585 return nv_ro32(imem, inst);
586}
587
588static int
589nv04_graph_mthd_bind_surf2d(struct nouveau_object *object, u32 mthd,
590 void *args, u32 size)
591{
592 switch (nv04_graph_mthd_bind_class(object, args, size)) {
593 case 0x30:
594 nv04_graph_set_ctx1(object, 0x00004000, 0);
595 nv04_graph_set_ctx_val(object, 0x02000000, 0);
596 return 0;
597 case 0x42:
598 nv04_graph_set_ctx1(object, 0x00004000, 0);
599 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
600 return 0;
601 }
602 return 1;
603}
604
605static int
606nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_object *object, u32 mthd,
607 void *args, u32 size)
608{
609 switch (nv04_graph_mthd_bind_class(object, args, size)) {
610 case 0x30:
611 nv04_graph_set_ctx1(object, 0x00004000, 0);
612 nv04_graph_set_ctx_val(object, 0x02000000, 0);
613 return 0;
614 case 0x42:
615 nv04_graph_set_ctx1(object, 0x00004000, 0);
616 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
617 return 0;
618 case 0x52:
619 nv04_graph_set_ctx1(object, 0x00004000, 0x00004000);
620 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
621 return 0;
622 }
623 return 1;
624}
625
626static int
627nv01_graph_mthd_bind_patt(struct nouveau_object *object, u32 mthd,
628 void *args, u32 size)
629{
630 switch (nv04_graph_mthd_bind_class(object, args, size)) {
631 case 0x30:
632 nv04_graph_set_ctx_val(object, 0x08000000, 0);
633 return 0;
634 case 0x18:
635 nv04_graph_set_ctx_val(object, 0x08000000, 0x08000000);
636 return 0;
637 }
638 return 1;
639}
640
641static int
642nv04_graph_mthd_bind_patt(struct nouveau_object *object, u32 mthd,
643 void *args, u32 size)
644{
645 switch (nv04_graph_mthd_bind_class(object, args, size)) {
646 case 0x30:
647 nv04_graph_set_ctx_val(object, 0x08000000, 0);
648 return 0;
649 case 0x44:
650 nv04_graph_set_ctx_val(object, 0x08000000, 0x08000000);
651 return 0;
652 }
653 return 1;
654}
655
656static int
657nv04_graph_mthd_bind_rop(struct nouveau_object *object, u32 mthd,
658 void *args, u32 size)
659{
660 switch (nv04_graph_mthd_bind_class(object, args, size)) {
661 case 0x30:
662 nv04_graph_set_ctx_val(object, 0x10000000, 0);
663 return 0;
664 case 0x43:
665 nv04_graph_set_ctx_val(object, 0x10000000, 0x10000000);
666 return 0;
667 }
668 return 1;
669}
670
671static int
672nv04_graph_mthd_bind_beta1(struct nouveau_object *object, u32 mthd,
673 void *args, u32 size)
674{
675 switch (nv04_graph_mthd_bind_class(object, args, size)) {
676 case 0x30:
677 nv04_graph_set_ctx_val(object, 0x20000000, 0);
678 return 0;
679 case 0x12:
680 nv04_graph_set_ctx_val(object, 0x20000000, 0x20000000);
681 return 0;
682 }
683 return 1;
684}
685
686static int
687nv04_graph_mthd_bind_beta4(struct nouveau_object *object, u32 mthd,
688 void *args, u32 size)
689{
690 switch (nv04_graph_mthd_bind_class(object, args, size)) {
691 case 0x30:
692 nv04_graph_set_ctx_val(object, 0x40000000, 0);
693 return 0;
694 case 0x72:
695 nv04_graph_set_ctx_val(object, 0x40000000, 0x40000000);
696 return 0;
697 }
698 return 1;
699}
700
701static int
702nv04_graph_mthd_bind_surf_dst(struct nouveau_object *object, u32 mthd,
703 void *args, u32 size)
704{
705 switch (nv04_graph_mthd_bind_class(object, args, size)) {
706 case 0x30:
707 nv04_graph_set_ctx_val(object, 0x02000000, 0);
708 return 0;
709 case 0x58:
710 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
711 return 0;
712 }
713 return 1;
714}
715
716static int
717nv04_graph_mthd_bind_surf_src(struct nouveau_object *object, u32 mthd,
718 void *args, u32 size)
719{
720 switch (nv04_graph_mthd_bind_class(object, args, size)) {
721 case 0x30:
722 nv04_graph_set_ctx_val(object, 0x04000000, 0);
723 return 0;
724 case 0x59:
725 nv04_graph_set_ctx_val(object, 0x04000000, 0x04000000);
726 return 0;
727 }
728 return 1;
729}
730
731static int
732nv04_graph_mthd_bind_surf_color(struct nouveau_object *object, u32 mthd,
733 void *args, u32 size)
734{
735 switch (nv04_graph_mthd_bind_class(object, args, size)) {
736 case 0x30:
737 nv04_graph_set_ctx_val(object, 0x02000000, 0);
738 return 0;
739 case 0x5a:
740 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
741 return 0;
742 }
743 return 1;
744}
745
746static int
747nv04_graph_mthd_bind_surf_zeta(struct nouveau_object *object, u32 mthd,
748 void *args, u32 size)
749{
750 switch (nv04_graph_mthd_bind_class(object, args, size)) {
751 case 0x30:
752 nv04_graph_set_ctx_val(object, 0x04000000, 0);
753 return 0;
754 case 0x5b:
755 nv04_graph_set_ctx_val(object, 0x04000000, 0x04000000);
756 return 0;
757 }
758 return 1;
759}
760
761static int
762nv01_graph_mthd_bind_clip(struct nouveau_object *object, u32 mthd,
763 void *args, u32 size)
764{
765 switch (nv04_graph_mthd_bind_class(object, args, size)) {
766 case 0x30:
767 nv04_graph_set_ctx1(object, 0x2000, 0);
768 return 0;
769 case 0x19:
770 nv04_graph_set_ctx1(object, 0x2000, 0x2000);
771 return 0;
772 }
773 return 1;
774}
775
776static int
777nv01_graph_mthd_bind_chroma(struct nouveau_object *object, u32 mthd,
778 void *args, u32 size)
779{
780 switch (nv04_graph_mthd_bind_class(object, args, size)) {
781 case 0x30:
782 nv04_graph_set_ctx1(object, 0x1000, 0);
783 return 0;
784 /* Yes, for some reason even the old versions of objects
785 * accept 0x57 and not 0x17. Consistency be damned.
786 */
787 case 0x57:
788 nv04_graph_set_ctx1(object, 0x1000, 0x1000);
789 return 0;
790 }
791 return 1;
792}
793
794static struct nouveau_omthds
795nv03_graph_gdi_omthds[] = {
796 { 0x0184, 0x0184, nv01_graph_mthd_bind_patt },
797 { 0x0188, 0x0188, nv04_graph_mthd_bind_rop },
798 { 0x018c, 0x018c, nv04_graph_mthd_bind_beta1 },
799 { 0x0190, 0x0190, nv04_graph_mthd_bind_surf_dst },
800 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
801 {}
802};
803
804static struct nouveau_omthds
805nv04_graph_gdi_omthds[] = {
806 { 0x0188, 0x0188, nv04_graph_mthd_bind_patt },
807 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
808 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
809 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta4 },
810 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf2d },
811 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
812 {}
813};
814
815static struct nouveau_omthds
816nv01_graph_blit_omthds[] = {
817 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
818 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
819 { 0x018c, 0x018c, nv01_graph_mthd_bind_patt },
820 { 0x0190, 0x0190, nv04_graph_mthd_bind_rop },
821 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta1 },
822 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf_dst },
823 { 0x019c, 0x019c, nv04_graph_mthd_bind_surf_src },
824 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
825 {}
826};
827
828static struct nouveau_omthds
829nv04_graph_blit_omthds[] = {
830 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
831 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
832 { 0x018c, 0x018c, nv04_graph_mthd_bind_patt },
833 { 0x0190, 0x0190, nv04_graph_mthd_bind_rop },
834 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta1 },
835 { 0x0198, 0x0198, nv04_graph_mthd_bind_beta4 },
836 { 0x019c, 0x019c, nv04_graph_mthd_bind_surf2d },
837 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
838 {}
839};
840
841static struct nouveau_omthds
842nv04_graph_iifc_omthds[] = {
843 { 0x0188, 0x0188, nv01_graph_mthd_bind_chroma },
844 { 0x018c, 0x018c, nv01_graph_mthd_bind_clip },
845 { 0x0190, 0x0190, nv04_graph_mthd_bind_patt },
846 { 0x0194, 0x0194, nv04_graph_mthd_bind_rop },
847 { 0x0198, 0x0198, nv04_graph_mthd_bind_beta1 },
848 { 0x019c, 0x019c, nv04_graph_mthd_bind_beta4 },
849 { 0x01a0, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf },
850 { 0x03e4, 0x03e4, nv04_graph_mthd_set_operation },
851 {}
852};
853
854static struct nouveau_omthds
855nv01_graph_ifc_omthds[] = {
856 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
857 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
858 { 0x018c, 0x018c, nv01_graph_mthd_bind_patt },
859 { 0x0190, 0x0190, nv04_graph_mthd_bind_rop },
860 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta1 },
861 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf_dst },
862 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
863 {}
864};
865
866static struct nouveau_omthds
867nv04_graph_ifc_omthds[] = {
868 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
869 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
870 { 0x018c, 0x018c, nv04_graph_mthd_bind_patt },
871 { 0x0190, 0x0190, nv04_graph_mthd_bind_rop },
872 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta1 },
873 { 0x0198, 0x0198, nv04_graph_mthd_bind_beta4 },
874 { 0x019c, 0x019c, nv04_graph_mthd_bind_surf2d },
875 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
876 {}
877};
878
879static struct nouveau_omthds
880nv03_graph_sifc_omthds[] = {
881 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
882 { 0x0188, 0x0188, nv01_graph_mthd_bind_patt },
883 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
884 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
885 { 0x0194, 0x0194, nv04_graph_mthd_bind_surf_dst },
886 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
887 {}
888};
889
890static struct nouveau_omthds
891nv04_graph_sifc_omthds[] = {
892 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
893 { 0x0188, 0x0188, nv04_graph_mthd_bind_patt },
894 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
895 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
896 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta4 },
897 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf2d },
898 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
899 {}
900};
901
902static struct nouveau_omthds
903nv03_graph_sifm_omthds[] = {
904 { 0x0188, 0x0188, nv01_graph_mthd_bind_patt },
905 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
906 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
907 { 0x0194, 0x0194, nv04_graph_mthd_bind_surf_dst },
908 { 0x0304, 0x0304, nv04_graph_mthd_set_operation },
909 {}
910};
911
912static struct nouveau_omthds
913nv04_graph_sifm_omthds[] = {
914 { 0x0188, 0x0188, nv04_graph_mthd_bind_patt },
915 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
916 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
917 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta4 },
918 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf2d },
919 { 0x0304, 0x0304, nv04_graph_mthd_set_operation },
920 {}
921};
922
923static struct nouveau_omthds
924nv04_graph_surf3d_omthds[] = {
925 { 0x02f8, 0x02f8, nv04_graph_mthd_surf3d_clip_h },
926 { 0x02fc, 0x02fc, nv04_graph_mthd_surf3d_clip_v },
927 {}
928};
929
930static struct nouveau_omthds
931nv03_graph_ttri_omthds[] = {
932 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
933 { 0x018c, 0x018c, nv04_graph_mthd_bind_surf_color },
934 { 0x0190, 0x0190, nv04_graph_mthd_bind_surf_zeta },
935 {}
936};
937
938static struct nouveau_omthds
939nv01_graph_prim_omthds[] = {
940 { 0x0184, 0x0184, nv01_graph_mthd_bind_clip },
941 { 0x0188, 0x0188, nv01_graph_mthd_bind_patt },
942 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
943 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
944 { 0x0194, 0x0194, nv04_graph_mthd_bind_surf_dst },
945 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
946 {}
947};
948
949static struct nouveau_omthds
950nv04_graph_prim_omthds[] = {
951 { 0x0184, 0x0184, nv01_graph_mthd_bind_clip },
952 { 0x0188, 0x0188, nv04_graph_mthd_bind_patt },
953 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
954 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
955 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta4 },
956 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf2d },
957 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
958 {}
959};
960
961static int
962nv04_graph_object_ctor(struct nouveau_object *parent,
963 struct nouveau_object *engine,
964 struct nouveau_oclass *oclass, void *data, u32 size,
965 struct nouveau_object **pobject)
966{
967 struct nouveau_gpuobj *obj;
968 int ret;
969
970 ret = nouveau_gpuobj_create(parent, engine, oclass, 0, parent,
971 16, 16, 0, &obj);
972 *pobject = nv_object(obj);
973 if (ret)
974 return ret;
975
976 nv_wo32(obj, 0x00, nv_mclass(obj));
977#ifdef __BIG_ENDIAN
978 nv_mo32(obj, 0x00, 0x00080000, 0x00080000);
979#endif
980 nv_wo32(obj, 0x04, 0x00000000);
981 nv_wo32(obj, 0x08, 0x00000000);
982 nv_wo32(obj, 0x0c, 0x00000000);
983 return 0;
984}
985
986struct nouveau_ofuncs
987nv04_graph_ofuncs = {
988 .ctor = nv04_graph_object_ctor,
989 .dtor = _nouveau_gpuobj_dtor,
990 .init = _nouveau_gpuobj_init,
991 .fini = _nouveau_gpuobj_fini,
992 .rd32 = _nouveau_gpuobj_rd32,
993 .wr32 = _nouveau_gpuobj_wr32,
994};
995
996static struct nouveau_oclass
997nv04_graph_sclass[] = {
998 { 0x0012, &nv04_graph_ofuncs }, /* beta1 */
999 { 0x0017, &nv04_graph_ofuncs }, /* chroma */
1000 { 0x0018, &nv04_graph_ofuncs }, /* pattern (nv01) */
1001 { 0x0019, &nv04_graph_ofuncs }, /* clip */
1002 { 0x001c, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* line */
1003 { 0x001d, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* tri */
1004 { 0x001e, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* rect */
1005 { 0x001f, &nv04_graph_ofuncs, nv01_graph_blit_omthds },
1006 { 0x0021, &nv04_graph_ofuncs, nv01_graph_ifc_omthds },
1007 { 0x0030, &nv04_graph_ofuncs }, /* null */
1008 { 0x0036, &nv04_graph_ofuncs, nv03_graph_sifc_omthds },
1009 { 0x0037, &nv04_graph_ofuncs, nv03_graph_sifm_omthds },
1010 { 0x0038, &nv04_graph_ofuncs }, /* dvd subpicture */
1011 { 0x0039, &nv04_graph_ofuncs }, /* m2mf */
1012 { 0x0042, &nv04_graph_ofuncs }, /* surf2d */
1013 { 0x0043, &nv04_graph_ofuncs }, /* rop */
1014 { 0x0044, &nv04_graph_ofuncs }, /* pattern */
1015 { 0x0048, &nv04_graph_ofuncs, nv03_graph_ttri_omthds },
1016 { 0x004a, &nv04_graph_ofuncs, nv04_graph_gdi_omthds },
1017 { 0x004b, &nv04_graph_ofuncs, nv03_graph_gdi_omthds },
1018 { 0x0052, &nv04_graph_ofuncs }, /* swzsurf */
1019 { 0x0053, &nv04_graph_ofuncs, nv04_graph_surf3d_omthds },
1020 { 0x0054, &nv04_graph_ofuncs }, /* ttri */
1021 { 0x0055, &nv04_graph_ofuncs }, /* mtri */
1022 { 0x0057, &nv04_graph_ofuncs }, /* chroma */
1023 { 0x0058, &nv04_graph_ofuncs }, /* surf_dst */
1024 { 0x0059, &nv04_graph_ofuncs }, /* surf_src */
1025 { 0x005a, &nv04_graph_ofuncs }, /* surf_color */
1026 { 0x005b, &nv04_graph_ofuncs }, /* surf_zeta */
1027 { 0x005c, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* line */
1028 { 0x005d, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* tri */
1029 { 0x005e, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* rect */
1030 { 0x005f, &nv04_graph_ofuncs, nv04_graph_blit_omthds },
1031 { 0x0060, &nv04_graph_ofuncs, nv04_graph_iifc_omthds },
1032 { 0x0061, &nv04_graph_ofuncs, nv04_graph_ifc_omthds },
1033 { 0x0064, &nv04_graph_ofuncs }, /* iifc (nv05) */
1034 { 0x0065, &nv04_graph_ofuncs }, /* ifc (nv05) */
1035 { 0x0066, &nv04_graph_ofuncs }, /* sifc (nv05) */
1036 { 0x0072, &nv04_graph_ofuncs }, /* beta4 */
1037 { 0x0076, &nv04_graph_ofuncs, nv04_graph_sifc_omthds },
1038 { 0x0077, &nv04_graph_ofuncs, nv04_graph_sifm_omthds },
1039 {},
1040};
1041
1042/*******************************************************************************
1043 * PGRAPH context
1044 ******************************************************************************/
1045
1046static struct nv04_graph_chan *
1047nv04_graph_channel(struct nv04_graph_priv *priv)
1048{
1049 struct nv04_graph_chan *chan = NULL;
1050 if (nv_rd32(priv, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) {
1051 int chid = nv_rd32(priv, NV04_PGRAPH_CTX_USER) >> 24;
1052 if (chid < ARRAY_SIZE(priv->chan))
1053 chan = priv->chan[chid];
1054 }
1055 return chan;
1056}
1057
1058static int
1059nv04_graph_load_context(struct nv04_graph_chan *chan, int chid)
1060{
1061 struct nv04_graph_priv *priv = nv04_graph_priv(chan);
1062 int i;
1063
1064 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
1065 nv_wr32(priv, nv04_graph_ctx_regs[i], chan->nv04[i]);
1066
1067 nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
1068 nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24);
1069 nv_mask(priv, NV04_PGRAPH_FFINTFC_ST2, 0xfff00000, 0x00000000);
1070 return 0;
1071}
1072
1073static int
1074nv04_graph_unload_context(struct nv04_graph_chan *chan)
1075{
1076 struct nv04_graph_priv *priv = nv04_graph_priv(chan);
1077 int i;
1078
1079 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
1080 chan->nv04[i] = nv_rd32(priv, nv04_graph_ctx_regs[i]);
1081
1082 nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
1083 nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1084 return 0;
1085}
1086
1087static void
1088nv04_graph_context_switch(struct nv04_graph_priv *priv)
1089{
1090 struct nv04_graph_chan *prev = NULL;
1091 struct nv04_graph_chan *next = NULL;
1092 unsigned long flags;
1093 int chid;
1094
1095 spin_lock_irqsave(&priv->lock, flags);
1096 nv04_graph_idle(priv);
1097
1098 /* If previous context is valid, we need to save it */
1099 prev = nv04_graph_channel(priv);
1100 if (prev)
1101 nv04_graph_unload_context(prev);
1102
1103 /* load context for next channel */
1104 chid = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f;
1105 next = priv->chan[chid];
1106 if (next)
1107 nv04_graph_load_context(next, chid);
1108
1109 spin_unlock_irqrestore(&priv->lock, flags);
1110}
1111
1112static u32 *ctx_reg(struct nv04_graph_chan *chan, u32 reg)
1113{
1114 int i;
1115
1116 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
1117 if (nv04_graph_ctx_regs[i] == reg)
1118 return &chan->nv04[i];
1119 }
1120
1121 return NULL;
1122}
1123
1124static int
1125nv04_graph_context_ctor(struct nouveau_object *parent,
1126 struct nouveau_object *engine,
1127 struct nouveau_oclass *oclass, void *data, u32 size,
1128 struct nouveau_object **pobject)
1129{
1130 struct nouveau_fifo_chan *fifo = (void *)parent;
1131 struct nv04_graph_priv *priv = (void *)engine;
1132 struct nv04_graph_chan *chan;
1133 unsigned long flags;
1134 int ret;
1135
1136 ret = nouveau_object_create(parent, engine, oclass, 0, &chan);
1137 *pobject = nv_object(chan);
1138 if (ret)
1139 return ret;
1140
1141 spin_lock_irqsave(&priv->lock, flags);
1142 if (priv->chan[fifo->chid]) {
1143 *pobject = nv_object(priv->chan[fifo->chid]);
1144 atomic_inc(&(*pobject)->refcount);
1145 spin_unlock_irqrestore(&priv->lock, flags);
1146 nouveau_object_destroy(&chan->base);
1147 return 1;
1148 }
1149
1150 *ctx_reg(chan, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
1151
1152 priv->chan[fifo->chid] = chan;
1153 chan->chid = fifo->chid;
1154 spin_unlock_irqrestore(&priv->lock, flags);
1155 return 0;
1156}
1157
1158static void
1159nv04_graph_context_dtor(struct nouveau_object *object)
1160{
1161 struct nv04_graph_priv *priv = (void *)object->engine;
1162 struct nv04_graph_chan *chan = (void *)object;
1163 unsigned long flags;
1164
1165 spin_lock_irqsave(&priv->lock, flags);
1166 priv->chan[chan->chid] = NULL;
1167 spin_unlock_irqrestore(&priv->lock, flags);
1168
1169 nouveau_object_destroy(&chan->base);
1170}
1171
1172static int
1173nv04_graph_context_fini(struct nouveau_object *object, bool suspend)
1174{
1175 struct nv04_graph_priv *priv = (void *)object->engine;
1176 struct nv04_graph_chan *chan = (void *)object;
1177 unsigned long flags;
1178
1179 spin_lock_irqsave(&priv->lock, flags);
1180 nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
1181 if (nv04_graph_channel(priv) == chan)
1182 nv04_graph_unload_context(chan);
1183 nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
1184 spin_unlock_irqrestore(&priv->lock, flags);
1185
1186 return nouveau_object_fini(&chan->base, suspend);
1187}
1188
1189static struct nouveau_oclass
1190nv04_graph_cclass = {
1191 .handle = NV_ENGCTX(GR, 0x04),
1192 .ofuncs = &(struct nouveau_ofuncs) {
1193 .ctor = nv04_graph_context_ctor,
1194 .dtor = nv04_graph_context_dtor,
1195 .init = nouveau_object_init,
1196 .fini = nv04_graph_context_fini,
1197 },
1198};
1199
1200/*******************************************************************************
1201 * PGRAPH engine/subdev functions
1202 ******************************************************************************/
1203
1204bool
1205nv04_graph_idle(void *obj)
1206{
1207 struct nouveau_graph *graph = nouveau_graph(obj);
1208 u32 mask = 0xffffffff;
1209
1210 if (nv_device(obj)->card_type == NV_40)
1211 mask &= ~NV40_PGRAPH_STATUS_SYNC_STALL;
1212
1213 if (!nv_wait(graph, NV04_PGRAPH_STATUS, mask, 0)) {
1214 nv_error(graph, "idle timed out with status 0x%08x\n",
1215 nv_rd32(graph, NV04_PGRAPH_STATUS));
1216 return false;
1217 }
1218
1219 return true;
1220}
1221
1222static const struct nouveau_bitfield
1223nv04_graph_intr_name[] = {
1224 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1225 {}
1226};
1227
1228static const struct nouveau_bitfield
1229nv04_graph_nstatus[] = {
1230 { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1231 { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1232 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1233 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1234 {}
1235};
1236
1237const struct nouveau_bitfield
1238nv04_graph_nsource[] = {
1239 { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
1240 { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
1241 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
1242 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
1243 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
1244 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
1245 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
1246 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
1247 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
1248 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
1249 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
1250 { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
1251 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
1252 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
1253 { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
1254 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
1255 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1256 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
1257 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
1258 {}
1259};
1260
1261static void
1262nv04_graph_intr(struct nouveau_subdev *subdev)
1263{
1264 struct nv04_graph_priv *priv = (void *)subdev;
1265 struct nv04_graph_chan *chan = NULL;
1266 struct nouveau_namedb *namedb = NULL;
1267 struct nouveau_handle *handle = NULL;
1268 u32 stat = nv_rd32(priv, NV03_PGRAPH_INTR);
1269 u32 nsource = nv_rd32(priv, NV03_PGRAPH_NSOURCE);
1270 u32 nstatus = nv_rd32(priv, NV03_PGRAPH_NSTATUS);
1271 u32 addr = nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR);
1272 u32 chid = (addr & 0x0f000000) >> 24;
1273 u32 subc = (addr & 0x0000e000) >> 13;
1274 u32 mthd = (addr & 0x00001ffc);
1275 u32 data = nv_rd32(priv, NV04_PGRAPH_TRAPPED_DATA);
1276 u32 class = nv_rd32(priv, 0x400180 + subc * 4) & 0xff;
1277 u32 inst = (nv_rd32(priv, 0x40016c) & 0xffff) << 4;
1278 u32 show = stat;
1279 unsigned long flags;
1280
1281 spin_lock_irqsave(&priv->lock, flags);
1282 chan = priv->chan[chid];
1283 if (chan)
1284 namedb = (void *)nv_pclass(nv_object(chan), NV_NAMEDB_CLASS);
1285 spin_unlock_irqrestore(&priv->lock, flags);
1286
1287 if (stat & NV_PGRAPH_INTR_NOTIFY) {
1288 if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
1289 handle = nouveau_namedb_get_vinst(namedb, inst);
1290 if (handle && !nv_call(handle->object, mthd, data))
1291 show &= ~NV_PGRAPH_INTR_NOTIFY;
1292 }
1293 }
1294
1295 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1296 nv_wr32(priv, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1297 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1298 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1299 nv04_graph_context_switch(priv);
1300 }
1301
1302 nv_wr32(priv, NV03_PGRAPH_INTR, stat);
1303 nv_wr32(priv, NV04_PGRAPH_FIFO, 0x00000001);
1304
1305 if (show) {
1306 nv_error(priv, "%s", "");
1307 nouveau_bitfield_print(nv04_graph_intr_name, show);
1308 pr_cont(" nsource:");
1309 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1310 pr_cont(" nstatus:");
1311 nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1312 pr_cont("\n");
1313 nv_error(priv,
1314 "ch %d [%s] subc %d class 0x%04x mthd 0x%04x data 0x%08x\n",
1315 chid, nouveau_client_name(chan), subc, class, mthd,
1316 data);
1317 }
1318
1319 nouveau_namedb_put(handle);
1320}
1321
1322static int
1323nv04_graph_ctor(struct nouveau_object *parent, struct nouveau_object *engine,
1324 struct nouveau_oclass *oclass, void *data, u32 size,
1325 struct nouveau_object **pobject)
1326{
1327 struct nv04_graph_priv *priv;
1328 int ret;
1329
1330 ret = nouveau_graph_create(parent, engine, oclass, true, &priv);
1331 *pobject = nv_object(priv);
1332 if (ret)
1333 return ret;
1334
1335 nv_subdev(priv)->unit = 0x00001000;
1336 nv_subdev(priv)->intr = nv04_graph_intr;
1337 nv_engine(priv)->cclass = &nv04_graph_cclass;
1338 nv_engine(priv)->sclass = nv04_graph_sclass;
1339 spin_lock_init(&priv->lock);
1340 return 0;
1341}
1342
1343static int
1344nv04_graph_init(struct nouveau_object *object)
1345{
1346 struct nouveau_engine *engine = nv_engine(object);
1347 struct nv04_graph_priv *priv = (void *)engine;
1348 int ret;
1349
1350 ret = nouveau_graph_init(&priv->base);
1351 if (ret)
1352 return ret;
1353
1354 /* Enable PGRAPH interrupts */
1355 nv_wr32(priv, NV03_PGRAPH_INTR, 0xFFFFFFFF);
1356 nv_wr32(priv, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
1357
1358 nv_wr32(priv, NV04_PGRAPH_VALID1, 0);
1359 nv_wr32(priv, NV04_PGRAPH_VALID2, 0);
1360 /*nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x000001FF);
1361 nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
1362 nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x1231c000);
1363 /*1231C000 blob, 001 haiku*/
1364 /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
1365 nv_wr32(priv, NV04_PGRAPH_DEBUG_1, 0x72111100);
1366 /*0x72111100 blob , 01 haiku*/
1367 /*nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
1368 nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
1369 /*haiku same*/
1370
1371 /*nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
1372 nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
1373 /*haiku and blob 10d4*/
1374
1375 nv_wr32(priv, NV04_PGRAPH_STATE , 0xFFFFFFFF);
1376 nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
1377 nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1378
1379 /* These don't belong here, they're part of a per-channel context */
1380 nv_wr32(priv, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
1381 nv_wr32(priv, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
1382 return 0;
1383}
1384
1385struct nouveau_oclass
1386nv04_graph_oclass = {
1387 .handle = NV_ENGINE(GR, 0x04),
1388 .ofuncs = &(struct nouveau_ofuncs) {
1389 .ctor = nv04_graph_ctor,
1390 .dtor = _nouveau_graph_dtor,
1391 .init = nv04_graph_init,
1392 .fini = _nouveau_graph_fini,
1393 },
1394};
1395