1 /*
2 * Copyright 2007 Stephane Marchesin
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24
25 #include <core/client.h>
26 #include <core/os.h>
27 #include <core/class.h>
28 #include <core/handle.h>
29 #include <core/namedb.h>
30
31 #include <subdev/fb.h>
32 #include <subdev/instmem.h>
33 #include <subdev/timer.h>
34
35 #include <engine/fifo.h>
36 #include <engine/graph.h>
37
38 #include "regs.h"
39
40 static u32
41 nv04_graph_ctx_regs[] = {
42 0x0040053c,
43 0x00400544,
44 0x00400540,
45 0x00400548,
46 NV04_PGRAPH_CTX_SWITCH1,
47 NV04_PGRAPH_CTX_SWITCH2,
48 NV04_PGRAPH_CTX_SWITCH3,
49 NV04_PGRAPH_CTX_SWITCH4,
50 NV04_PGRAPH_CTX_CACHE1,
51 NV04_PGRAPH_CTX_CACHE2,
52 NV04_PGRAPH_CTX_CACHE3,
53 NV04_PGRAPH_CTX_CACHE4,
54 0x00400184,
55 0x004001a4,
56 0x004001c4,
57 0x004001e4,
58 0x00400188,
59 0x004001a8,
60 0x004001c8,
61 0x004001e8,
62 0x0040018c,
63 0x004001ac,
64 0x004001cc,
65 0x004001ec,
66 0x00400190,
67 0x004001b0,
68 0x004001d0,
69 0x004001f0,
70 0x00400194,
71 0x004001b4,
72 0x004001d4,
73 0x004001f4,
74 0x00400198,
75 0x004001b8,
76 0x004001d8,
77 0x004001f8,
78 0x0040019c,
79 0x004001bc,
80 0x004001dc,
81 0x004001fc,
82 0x00400174,
83 NV04_PGRAPH_DMA_START_0,
84 NV04_PGRAPH_DMA_START_1,
85 NV04_PGRAPH_DMA_LENGTH,
86 NV04_PGRAPH_DMA_MISC,
87 NV04_PGRAPH_DMA_PITCH,
88 NV04_PGRAPH_BOFFSET0,
89 NV04_PGRAPH_BBASE0,
90 NV04_PGRAPH_BLIMIT0,
91 NV04_PGRAPH_BOFFSET1,
92 NV04_PGRAPH_BBASE1,
93 NV04_PGRAPH_BLIMIT1,
94 NV04_PGRAPH_BOFFSET2,
95 NV04_PGRAPH_BBASE2,
96 NV04_PGRAPH_BLIMIT2,
97 NV04_PGRAPH_BOFFSET3,
98 NV04_PGRAPH_BBASE3,
99 NV04_PGRAPH_BLIMIT3,
100 NV04_PGRAPH_BOFFSET4,
101 NV04_PGRAPH_BBASE4,
102 NV04_PGRAPH_BLIMIT4,
103 NV04_PGRAPH_BOFFSET5,
104 NV04_PGRAPH_BBASE5,
105 NV04_PGRAPH_BLIMIT5,
106 NV04_PGRAPH_BPITCH0,
107 NV04_PGRAPH_BPITCH1,
108 NV04_PGRAPH_BPITCH2,
109 NV04_PGRAPH_BPITCH3,
110 NV04_PGRAPH_BPITCH4,
111 NV04_PGRAPH_SURFACE,
112 NV04_PGRAPH_STATE,
113 NV04_PGRAPH_BSWIZZLE2,
114 NV04_PGRAPH_BSWIZZLE5,
115 NV04_PGRAPH_BPIXEL,
116 NV04_PGRAPH_NOTIFY,
117 NV04_PGRAPH_PATT_COLOR0,
118 NV04_PGRAPH_PATT_COLOR1,
119 NV04_PGRAPH_PATT_COLORRAM+0x00,
120 NV04_PGRAPH_PATT_COLORRAM+0x04,
121 NV04_PGRAPH_PATT_COLORRAM+0x08,
122 NV04_PGRAPH_PATT_COLORRAM+0x0c,
123 NV04_PGRAPH_PATT_COLORRAM+0x10,
124 NV04_PGRAPH_PATT_COLORRAM+0x14,
125 NV04_PGRAPH_PATT_COLORRAM+0x18,
126 NV04_PGRAPH_PATT_COLORRAM+0x1c,
127 NV04_PGRAPH_PATT_COLORRAM+0x20,
128 NV04_PGRAPH_PATT_COLORRAM+0x24,
129 NV04_PGRAPH_PATT_COLORRAM+0x28,
130 NV04_PGRAPH_PATT_COLORRAM+0x2c,
131 NV04_PGRAPH_PATT_COLORRAM+0x30,
132 NV04_PGRAPH_PATT_COLORRAM+0x34,
133 NV04_PGRAPH_PATT_COLORRAM+0x38,
134 NV04_PGRAPH_PATT_COLORRAM+0x3c,
135 NV04_PGRAPH_PATT_COLORRAM+0x40,
136 NV04_PGRAPH_PATT_COLORRAM+0x44,
137 NV04_PGRAPH_PATT_COLORRAM+0x48,
138 NV04_PGRAPH_PATT_COLORRAM+0x4c,
139 NV04_PGRAPH_PATT_COLORRAM+0x50,
140 NV04_PGRAPH_PATT_COLORRAM+0x54,
141 NV04_PGRAPH_PATT_COLORRAM+0x58,
142 NV04_PGRAPH_PATT_COLORRAM+0x5c,
143 NV04_PGRAPH_PATT_COLORRAM+0x60,
144 NV04_PGRAPH_PATT_COLORRAM+0x64,
145 NV04_PGRAPH_PATT_COLORRAM+0x68,
146 NV04_PGRAPH_PATT_COLORRAM+0x6c,
147 NV04_PGRAPH_PATT_COLORRAM+0x70,
148 NV04_PGRAPH_PATT_COLORRAM+0x74,
149 NV04_PGRAPH_PATT_COLORRAM+0x78,
150 NV04_PGRAPH_PATT_COLORRAM+0x7c,
151 NV04_PGRAPH_PATT_COLORRAM+0x80,
152 NV04_PGRAPH_PATT_COLORRAM+0x84,
153 NV04_PGRAPH_PATT_COLORRAM+0x88,
154 NV04_PGRAPH_PATT_COLORRAM+0x8c,
155 NV04_PGRAPH_PATT_COLORRAM+0x90,
156 NV04_PGRAPH_PATT_COLORRAM+0x94,
157 NV04_PGRAPH_PATT_COLORRAM+0x98,
158 NV04_PGRAPH_PATT_COLORRAM+0x9c,
159 NV04_PGRAPH_PATT_COLORRAM+0xa0,
160 NV04_PGRAPH_PATT_COLORRAM+0xa4,
161 NV04_PGRAPH_PATT_COLORRAM+0xa8,
162 NV04_PGRAPH_PATT_COLORRAM+0xac,
163 NV04_PGRAPH_PATT_COLORRAM+0xb0,
164 NV04_PGRAPH_PATT_COLORRAM+0xb4,
165 NV04_PGRAPH_PATT_COLORRAM+0xb8,
166 NV04_PGRAPH_PATT_COLORRAM+0xbc,
167 NV04_PGRAPH_PATT_COLORRAM+0xc0,
168 NV04_PGRAPH_PATT_COLORRAM+0xc4,
169 NV04_PGRAPH_PATT_COLORRAM+0xc8,
170 NV04_PGRAPH_PATT_COLORRAM+0xcc,
171 NV04_PGRAPH_PATT_COLORRAM+0xd0,
172 NV04_PGRAPH_PATT_COLORRAM+0xd4,
173 NV04_PGRAPH_PATT_COLORRAM+0xd8,
174 NV04_PGRAPH_PATT_COLORRAM+0xdc,
175 NV04_PGRAPH_PATT_COLORRAM+0xe0,
176 NV04_PGRAPH_PATT_COLORRAM+0xe4,
177 NV04_PGRAPH_PATT_COLORRAM+0xe8,
178 NV04_PGRAPH_PATT_COLORRAM+0xec,
179 NV04_PGRAPH_PATT_COLORRAM+0xf0,
180 NV04_PGRAPH_PATT_COLORRAM+0xf4,
181 NV04_PGRAPH_PATT_COLORRAM+0xf8,
182 NV04_PGRAPH_PATT_COLORRAM+0xfc,
183 NV04_PGRAPH_PATTERN,
184 0x0040080c,
185 NV04_PGRAPH_PATTERN_SHAPE,
186 0x00400600,
187 NV04_PGRAPH_ROP3,
188 NV04_PGRAPH_CHROMA,
189 NV04_PGRAPH_BETA_AND,
190 NV04_PGRAPH_BETA_PREMULT,
191 NV04_PGRAPH_CONTROL0,
192 NV04_PGRAPH_CONTROL1,
193 NV04_PGRAPH_CONTROL2,
194 NV04_PGRAPH_BLEND,
195 NV04_PGRAPH_STORED_FMT,
196 NV04_PGRAPH_SOURCE_COLOR,
197 0x00400560,
198 0x00400568,
199 0x00400564,
200 0x0040056c,
201 0x00400400,
202 0x00400480,
203 0x00400404,
204 0x00400484,
205 0x00400408,
206 0x00400488,
207 0x0040040c,
208 0x0040048c,
209 0x00400410,
210 0x00400490,
211 0x00400414,
212 0x00400494,
213 0x00400418,
214 0x00400498,
215 0x0040041c,
216 0x0040049c,
217 0x00400420,
218 0x004004a0,
219 0x00400424,
220 0x004004a4,
221 0x00400428,
222 0x004004a8,
223 0x0040042c,
224 0x004004ac,
225 0x00400430,
226 0x004004b0,
227 0x00400434,
228 0x004004b4,
229 0x00400438,
230 0x004004b8,
231 0x0040043c,
232 0x004004bc,
233 0x00400440,
234 0x004004c0,
235 0x00400444,
236 0x004004c4,
237 0x00400448,
238 0x004004c8,
239 0x0040044c,
240 0x004004cc,
241 0x00400450,
242 0x004004d0,
243 0x00400454,
244 0x004004d4,
245 0x00400458,
246 0x004004d8,
247 0x0040045c,
248 0x004004dc,
249 0x00400460,
250 0x004004e0,
251 0x00400464,
252 0x004004e4,
253 0x00400468,
254 0x004004e8,
255 0x0040046c,
256 0x004004ec,
257 0x00400470,
258 0x004004f0,
259 0x00400474,
260 0x004004f4,
261 0x00400478,
262 0x004004f8,
263 0x0040047c,
264 0x004004fc,
265 0x00400534,
266 0x00400538,
267 0x00400514,
268 0x00400518,
269 0x0040051c,
270 0x00400520,
271 0x00400524,
272 0x00400528,
273 0x0040052c,
274 0x00400530,
275 0x00400d00,
276 0x00400d40,
277 0x00400d80,
278 0x00400d04,
279 0x00400d44,
280 0x00400d84,
281 0x00400d08,
282 0x00400d48,
283 0x00400d88,
284 0x00400d0c,
285 0x00400d4c,
286 0x00400d8c,
287 0x00400d10,
288 0x00400d50,
289 0x00400d90,
290 0x00400d14,
291 0x00400d54,
292 0x00400d94,
293 0x00400d18,
294 0x00400d58,
295 0x00400d98,
296 0x00400d1c,
297 0x00400d5c,
298 0x00400d9c,
299 0x00400d20,
300 0x00400d60,
301 0x00400da0,
302 0x00400d24,
303 0x00400d64,
304 0x00400da4,
305 0x00400d28,
306 0x00400d68,
307 0x00400da8,
308 0x00400d2c,
309 0x00400d6c,
310 0x00400dac,
311 0x00400d30,
312 0x00400d70,
313 0x00400db0,
314 0x00400d34,
315 0x00400d74,
316 0x00400db4,
317 0x00400d38,
318 0x00400d78,
319 0x00400db8,
320 0x00400d3c,
321 0x00400d7c,
322 0x00400dbc,
323 0x00400590,
324 0x00400594,
325 0x00400598,
326 0x0040059c,
327 0x004005a8,
328 0x004005ac,
329 0x004005b0,
330 0x004005b4,
331 0x004005c0,
332 0x004005c4,
333 0x004005c8,
334 0x004005cc,
335 0x004005d0,
336 0x004005d4,
337 0x004005d8,
338 0x004005dc,
339 0x004005e0,
340 NV04_PGRAPH_PASSTHRU_0,
341 NV04_PGRAPH_PASSTHRU_1,
342 NV04_PGRAPH_PASSTHRU_2,
343 NV04_PGRAPH_DVD_COLORFMT,
344 NV04_PGRAPH_SCALED_FORMAT,
345 NV04_PGRAPH_MISC24_0,
346 NV04_PGRAPH_MISC24_1,
347 NV04_PGRAPH_MISC24_2,
348 0x00400500,
349 0x00400504,
350 NV04_PGRAPH_VALID1,
351 NV04_PGRAPH_VALID2,
352 NV04_PGRAPH_DEBUG_3
353 };
354
355 struct nv04_graph_priv {
356 struct nouveau_graph base;
357 struct nv04_graph_chan *chan[16];
358 spinlock_t lock;
359 };
360
361 struct nv04_graph_chan {
362 struct nouveau_object base;
363 int chid;
364 u32 nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
365 };
366
367
368 static inline struct nv04_graph_priv *
nv04_graph_priv(struct nv04_graph_chan * chan)369 nv04_graph_priv(struct nv04_graph_chan *chan)
370 {
371 return (void *)nv_object(chan)->engine;
372 }
373
374 /*******************************************************************************
375 * Graphics object classes
376 ******************************************************************************/
377
378 /*
379 * Software methods, why they are needed, and how they all work:
380 *
381 * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
382 * 2d engine settings are kept inside the grobjs themselves. The grobjs are
383 * 3 words long on both. grobj format on NV04 is:
384 *
385 * word 0:
386 * - bits 0-7: class
387 * - bit 12: color key active
388 * - bit 13: clip rect active
389 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
390 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
391 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
392 * NV03_CONTEXT_SURFACE_DST].
393 * - bits 15-17: 2d operation [aka patch config]
394 * - bit 24: patch valid [enables rendering using this object]
395 * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
396 * word 1:
397 * - bits 0-1: mono format
398 * - bits 8-13: color format
399 * - bits 16-31: DMA_NOTIFY instance
400 * word 2:
401 * - bits 0-15: DMA_A instance
402 * - bits 16-31: DMA_B instance
403 *
404 * On NV05 it's:
405 *
406 * word 0:
407 * - bits 0-7: class
408 * - bit 12: color key active
409 * - bit 13: clip rect active
410 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
411 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
412 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
413 * NV03_CONTEXT_SURFACE_DST].
414 * - bits 15-17: 2d operation [aka patch config]
415 * - bits 20-22: dither mode
416 * - bit 24: patch valid [enables rendering using this object]
417 * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
418 * - bit 26: surface_src/surface_zeta valid
419 * - bit 27: pattern valid
420 * - bit 28: rop valid
421 * - bit 29: beta1 valid
422 * - bit 30: beta4 valid
423 * word 1:
424 * - bits 0-1: mono format
425 * - bits 8-13: color format
426 * - bits 16-31: DMA_NOTIFY instance
427 * word 2:
428 * - bits 0-15: DMA_A instance
429 * - bits 16-31: DMA_B instance
430 *
431 * NV05 will set/unset the relevant valid bits when you poke the relevant
432 * object-binding methods with object of the proper type, or with the NULL
433 * type. It'll only allow rendering using the grobj if all needed objects
434 * are bound. The needed set of objects depends on selected operation: for
435 * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
436 *
437 * NV04 doesn't have these methods implemented at all, and doesn't have the
438 * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
439 * is set. So we have to emulate them in software, internally keeping the
440 * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
441 * but the last word isn't actually used for anything, we abuse it for this
442 * purpose.
443 *
444 * Actually, NV05 can optionally check bit 24 too, but we disable this since
445 * there's no use for it.
446 *
447 * For unknown reasons, NV04 implements surf3d binding in hardware as an
448 * exception. Also for unknown reasons, NV04 doesn't implement the clipping
449 * methods on the surf3d object, so we have to emulate them too.
450 */
451
452 static void
nv04_graph_set_ctx1(struct nouveau_object * object,u32 mask,u32 value)453 nv04_graph_set_ctx1(struct nouveau_object *object, u32 mask, u32 value)
454 {
455 struct nv04_graph_priv *priv = (void *)object->engine;
456 int subc = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
457 u32 tmp;
458
459 tmp = nv_ro32(object, 0x00);
460 tmp &= ~mask;
461 tmp |= value;
462 nv_wo32(object, 0x00, tmp);
463
464 nv_wr32(priv, NV04_PGRAPH_CTX_SWITCH1, tmp);
465 nv_wr32(priv, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
466 }
467
468 static void
nv04_graph_set_ctx_val(struct nouveau_object * object,u32 mask,u32 value)469 nv04_graph_set_ctx_val(struct nouveau_object *object, u32 mask, u32 value)
470 {
471 int class, op, valid = 1;
472 u32 tmp, ctx1;
473
474 ctx1 = nv_ro32(object, 0x00);
475 class = ctx1 & 0xff;
476 op = (ctx1 >> 15) & 7;
477
478 tmp = nv_ro32(object, 0x0c);
479 tmp &= ~mask;
480 tmp |= value;
481 nv_wo32(object, 0x0c, tmp);
482
483 /* check for valid surf2d/surf_dst/surf_color */
484 if (!(tmp & 0x02000000))
485 valid = 0;
486 /* check for valid surf_src/surf_zeta */
487 if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
488 valid = 0;
489
490 switch (op) {
491 /* SRCCOPY_AND, SRCCOPY: no extra objects required */
492 case 0:
493 case 3:
494 break;
495 /* ROP_AND: requires pattern and rop */
496 case 1:
497 if (!(tmp & 0x18000000))
498 valid = 0;
499 break;
500 /* BLEND_AND: requires beta1 */
501 case 2:
502 if (!(tmp & 0x20000000))
503 valid = 0;
504 break;
505 /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
506 case 4:
507 case 5:
508 if (!(tmp & 0x40000000))
509 valid = 0;
510 break;
511 }
512
513 nv04_graph_set_ctx1(object, 0x01000000, valid << 24);
514 }
515
516 static int
nv04_graph_mthd_set_operation(struct nouveau_object * object,u32 mthd,void * args,u32 size)517 nv04_graph_mthd_set_operation(struct nouveau_object *object, u32 mthd,
518 void *args, u32 size)
519 {
520 u32 class = nv_ro32(object, 0) & 0xff;
521 u32 data = *(u32 *)args;
522 if (data > 5)
523 return 1;
524 /* Old versions of the objects only accept first three operations. */
525 if (data > 2 && class < 0x40)
526 return 1;
527 nv04_graph_set_ctx1(object, 0x00038000, data << 15);
528 /* changing operation changes set of objects needed for validation */
529 nv04_graph_set_ctx_val(object, 0, 0);
530 return 0;
531 }
532
533 static int
nv04_graph_mthd_surf3d_clip_h(struct nouveau_object * object,u32 mthd,void * args,u32 size)534 nv04_graph_mthd_surf3d_clip_h(struct nouveau_object *object, u32 mthd,
535 void *args, u32 size)
536 {
537 struct nv04_graph_priv *priv = (void *)object->engine;
538 u32 data = *(u32 *)args;
539 u32 min = data & 0xffff, max;
540 u32 w = data >> 16;
541 if (min & 0x8000)
542 /* too large */
543 return 1;
544 if (w & 0x8000)
545 /* yes, it accepts negative for some reason. */
546 w |= 0xffff0000;
547 max = min + w;
548 max &= 0x3ffff;
549 nv_wr32(priv, 0x40053c, min);
550 nv_wr32(priv, 0x400544, max);
551 return 0;
552 }
553
554 static int
nv04_graph_mthd_surf3d_clip_v(struct nouveau_object * object,u32 mthd,void * args,u32 size)555 nv04_graph_mthd_surf3d_clip_v(struct nouveau_object *object, u32 mthd,
556 void *args, u32 size)
557 {
558 struct nv04_graph_priv *priv = (void *)object->engine;
559 u32 data = *(u32 *)args;
560 u32 min = data & 0xffff, max;
561 u32 w = data >> 16;
562 if (min & 0x8000)
563 /* too large */
564 return 1;
565 if (w & 0x8000)
566 /* yes, it accepts negative for some reason. */
567 w |= 0xffff0000;
568 max = min + w;
569 max &= 0x3ffff;
570 nv_wr32(priv, 0x400540, min);
571 nv_wr32(priv, 0x400548, max);
572 return 0;
573 }
574
575 static u16
nv04_graph_mthd_bind_class(struct nouveau_object * object,u32 * args,u32 size)576 nv04_graph_mthd_bind_class(struct nouveau_object *object, u32 *args, u32 size)
577 {
578 struct nouveau_instmem *imem = nouveau_instmem(object);
579 u32 inst = *(u32 *)args << 4;
580 return nv_ro32(imem, inst);
581 }
582
583 static int
nv04_graph_mthd_bind_surf2d(struct nouveau_object * object,u32 mthd,void * args,u32 size)584 nv04_graph_mthd_bind_surf2d(struct nouveau_object *object, u32 mthd,
585 void *args, u32 size)
586 {
587 switch (nv04_graph_mthd_bind_class(object, args, size)) {
588 case 0x30:
589 nv04_graph_set_ctx1(object, 0x00004000, 0);
590 nv04_graph_set_ctx_val(object, 0x02000000, 0);
591 return 0;
592 case 0x42:
593 nv04_graph_set_ctx1(object, 0x00004000, 0);
594 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
595 return 0;
596 }
597 return 1;
598 }
599
600 static int
nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_object * object,u32 mthd,void * args,u32 size)601 nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_object *object, u32 mthd,
602 void *args, u32 size)
603 {
604 switch (nv04_graph_mthd_bind_class(object, args, size)) {
605 case 0x30:
606 nv04_graph_set_ctx1(object, 0x00004000, 0);
607 nv04_graph_set_ctx_val(object, 0x02000000, 0);
608 return 0;
609 case 0x42:
610 nv04_graph_set_ctx1(object, 0x00004000, 0);
611 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
612 return 0;
613 case 0x52:
614 nv04_graph_set_ctx1(object, 0x00004000, 0x00004000);
615 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
616 return 0;
617 }
618 return 1;
619 }
620
621 static int
nv01_graph_mthd_bind_patt(struct nouveau_object * object,u32 mthd,void * args,u32 size)622 nv01_graph_mthd_bind_patt(struct nouveau_object *object, u32 mthd,
623 void *args, u32 size)
624 {
625 switch (nv04_graph_mthd_bind_class(object, args, size)) {
626 case 0x30:
627 nv04_graph_set_ctx_val(object, 0x08000000, 0);
628 return 0;
629 case 0x18:
630 nv04_graph_set_ctx_val(object, 0x08000000, 0x08000000);
631 return 0;
632 }
633 return 1;
634 }
635
636 static int
nv04_graph_mthd_bind_patt(struct nouveau_object * object,u32 mthd,void * args,u32 size)637 nv04_graph_mthd_bind_patt(struct nouveau_object *object, u32 mthd,
638 void *args, u32 size)
639 {
640 switch (nv04_graph_mthd_bind_class(object, args, size)) {
641 case 0x30:
642 nv04_graph_set_ctx_val(object, 0x08000000, 0);
643 return 0;
644 case 0x44:
645 nv04_graph_set_ctx_val(object, 0x08000000, 0x08000000);
646 return 0;
647 }
648 return 1;
649 }
650
651 static int
nv04_graph_mthd_bind_rop(struct nouveau_object * object,u32 mthd,void * args,u32 size)652 nv04_graph_mthd_bind_rop(struct nouveau_object *object, u32 mthd,
653 void *args, u32 size)
654 {
655 switch (nv04_graph_mthd_bind_class(object, args, size)) {
656 case 0x30:
657 nv04_graph_set_ctx_val(object, 0x10000000, 0);
658 return 0;
659 case 0x43:
660 nv04_graph_set_ctx_val(object, 0x10000000, 0x10000000);
661 return 0;
662 }
663 return 1;
664 }
665
666 static int
nv04_graph_mthd_bind_beta1(struct nouveau_object * object,u32 mthd,void * args,u32 size)667 nv04_graph_mthd_bind_beta1(struct nouveau_object *object, u32 mthd,
668 void *args, u32 size)
669 {
670 switch (nv04_graph_mthd_bind_class(object, args, size)) {
671 case 0x30:
672 nv04_graph_set_ctx_val(object, 0x20000000, 0);
673 return 0;
674 case 0x12:
675 nv04_graph_set_ctx_val(object, 0x20000000, 0x20000000);
676 return 0;
677 }
678 return 1;
679 }
680
681 static int
nv04_graph_mthd_bind_beta4(struct nouveau_object * object,u32 mthd,void * args,u32 size)682 nv04_graph_mthd_bind_beta4(struct nouveau_object *object, u32 mthd,
683 void *args, u32 size)
684 {
685 switch (nv04_graph_mthd_bind_class(object, args, size)) {
686 case 0x30:
687 nv04_graph_set_ctx_val(object, 0x40000000, 0);
688 return 0;
689 case 0x72:
690 nv04_graph_set_ctx_val(object, 0x40000000, 0x40000000);
691 return 0;
692 }
693 return 1;
694 }
695
696 static int
nv04_graph_mthd_bind_surf_dst(struct nouveau_object * object,u32 mthd,void * args,u32 size)697 nv04_graph_mthd_bind_surf_dst(struct nouveau_object *object, u32 mthd,
698 void *args, u32 size)
699 {
700 switch (nv04_graph_mthd_bind_class(object, args, size)) {
701 case 0x30:
702 nv04_graph_set_ctx_val(object, 0x02000000, 0);
703 return 0;
704 case 0x58:
705 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
706 return 0;
707 }
708 return 1;
709 }
710
711 static int
nv04_graph_mthd_bind_surf_src(struct nouveau_object * object,u32 mthd,void * args,u32 size)712 nv04_graph_mthd_bind_surf_src(struct nouveau_object *object, u32 mthd,
713 void *args, u32 size)
714 {
715 switch (nv04_graph_mthd_bind_class(object, args, size)) {
716 case 0x30:
717 nv04_graph_set_ctx_val(object, 0x04000000, 0);
718 return 0;
719 case 0x59:
720 nv04_graph_set_ctx_val(object, 0x04000000, 0x04000000);
721 return 0;
722 }
723 return 1;
724 }
725
726 static int
nv04_graph_mthd_bind_surf_color(struct nouveau_object * object,u32 mthd,void * args,u32 size)727 nv04_graph_mthd_bind_surf_color(struct nouveau_object *object, u32 mthd,
728 void *args, u32 size)
729 {
730 switch (nv04_graph_mthd_bind_class(object, args, size)) {
731 case 0x30:
732 nv04_graph_set_ctx_val(object, 0x02000000, 0);
733 return 0;
734 case 0x5a:
735 nv04_graph_set_ctx_val(object, 0x02000000, 0x02000000);
736 return 0;
737 }
738 return 1;
739 }
740
741 static int
nv04_graph_mthd_bind_surf_zeta(struct nouveau_object * object,u32 mthd,void * args,u32 size)742 nv04_graph_mthd_bind_surf_zeta(struct nouveau_object *object, u32 mthd,
743 void *args, u32 size)
744 {
745 switch (nv04_graph_mthd_bind_class(object, args, size)) {
746 case 0x30:
747 nv04_graph_set_ctx_val(object, 0x04000000, 0);
748 return 0;
749 case 0x5b:
750 nv04_graph_set_ctx_val(object, 0x04000000, 0x04000000);
751 return 0;
752 }
753 return 1;
754 }
755
756 static int
nv01_graph_mthd_bind_clip(struct nouveau_object * object,u32 mthd,void * args,u32 size)757 nv01_graph_mthd_bind_clip(struct nouveau_object *object, u32 mthd,
758 void *args, u32 size)
759 {
760 switch (nv04_graph_mthd_bind_class(object, args, size)) {
761 case 0x30:
762 nv04_graph_set_ctx1(object, 0x2000, 0);
763 return 0;
764 case 0x19:
765 nv04_graph_set_ctx1(object, 0x2000, 0x2000);
766 return 0;
767 }
768 return 1;
769 }
770
771 static int
nv01_graph_mthd_bind_chroma(struct nouveau_object * object,u32 mthd,void * args,u32 size)772 nv01_graph_mthd_bind_chroma(struct nouveau_object *object, u32 mthd,
773 void *args, u32 size)
774 {
775 switch (nv04_graph_mthd_bind_class(object, args, size)) {
776 case 0x30:
777 nv04_graph_set_ctx1(object, 0x1000, 0);
778 return 0;
779 /* Yes, for some reason even the old versions of objects
780 * accept 0x57 and not 0x17. Consistency be damned.
781 */
782 case 0x57:
783 nv04_graph_set_ctx1(object, 0x1000, 0x1000);
784 return 0;
785 }
786 return 1;
787 }
788
789 static struct nouveau_omthds
790 nv03_graph_gdi_omthds[] = {
791 { 0x0184, 0x0184, nv01_graph_mthd_bind_patt },
792 { 0x0188, 0x0188, nv04_graph_mthd_bind_rop },
793 { 0x018c, 0x018c, nv04_graph_mthd_bind_beta1 },
794 { 0x0190, 0x0190, nv04_graph_mthd_bind_surf_dst },
795 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
796 {}
797 };
798
799 static struct nouveau_omthds
800 nv04_graph_gdi_omthds[] = {
801 { 0x0188, 0x0188, nv04_graph_mthd_bind_patt },
802 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
803 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
804 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta4 },
805 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf2d },
806 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
807 {}
808 };
809
810 static struct nouveau_omthds
811 nv01_graph_blit_omthds[] = {
812 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
813 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
814 { 0x018c, 0x018c, nv01_graph_mthd_bind_patt },
815 { 0x0190, 0x0190, nv04_graph_mthd_bind_rop },
816 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta1 },
817 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf_dst },
818 { 0x019c, 0x019c, nv04_graph_mthd_bind_surf_src },
819 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
820 {}
821 };
822
823 static struct nouveau_omthds
824 nv04_graph_blit_omthds[] = {
825 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
826 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
827 { 0x018c, 0x018c, nv04_graph_mthd_bind_patt },
828 { 0x0190, 0x0190, nv04_graph_mthd_bind_rop },
829 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta1 },
830 { 0x0198, 0x0198, nv04_graph_mthd_bind_beta4 },
831 { 0x019c, 0x019c, nv04_graph_mthd_bind_surf2d },
832 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
833 {}
834 };
835
836 static struct nouveau_omthds
837 nv04_graph_iifc_omthds[] = {
838 { 0x0188, 0x0188, nv01_graph_mthd_bind_chroma },
839 { 0x018c, 0x018c, nv01_graph_mthd_bind_clip },
840 { 0x0190, 0x0190, nv04_graph_mthd_bind_patt },
841 { 0x0194, 0x0194, nv04_graph_mthd_bind_rop },
842 { 0x0198, 0x0198, nv04_graph_mthd_bind_beta1 },
843 { 0x019c, 0x019c, nv04_graph_mthd_bind_beta4 },
844 { 0x01a0, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf },
845 { 0x03e4, 0x03e4, nv04_graph_mthd_set_operation },
846 {}
847 };
848
849 static struct nouveau_omthds
850 nv01_graph_ifc_omthds[] = {
851 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
852 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
853 { 0x018c, 0x018c, nv01_graph_mthd_bind_patt },
854 { 0x0190, 0x0190, nv04_graph_mthd_bind_rop },
855 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta1 },
856 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf_dst },
857 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
858 {}
859 };
860
861 static struct nouveau_omthds
862 nv04_graph_ifc_omthds[] = {
863 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
864 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
865 { 0x018c, 0x018c, nv04_graph_mthd_bind_patt },
866 { 0x0190, 0x0190, nv04_graph_mthd_bind_rop },
867 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta1 },
868 { 0x0198, 0x0198, nv04_graph_mthd_bind_beta4 },
869 { 0x019c, 0x019c, nv04_graph_mthd_bind_surf2d },
870 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
871 {}
872 };
873
874 static struct nouveau_omthds
875 nv03_graph_sifc_omthds[] = {
876 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
877 { 0x0188, 0x0188, nv01_graph_mthd_bind_patt },
878 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
879 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
880 { 0x0194, 0x0194, nv04_graph_mthd_bind_surf_dst },
881 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
882 {}
883 };
884
885 static struct nouveau_omthds
886 nv04_graph_sifc_omthds[] = {
887 { 0x0184, 0x0184, nv01_graph_mthd_bind_chroma },
888 { 0x0188, 0x0188, nv04_graph_mthd_bind_patt },
889 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
890 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
891 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta4 },
892 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf2d },
893 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
894 {}
895 };
896
897 static struct nouveau_omthds
898 nv03_graph_sifm_omthds[] = {
899 { 0x0188, 0x0188, nv01_graph_mthd_bind_patt },
900 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
901 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
902 { 0x0194, 0x0194, nv04_graph_mthd_bind_surf_dst },
903 { 0x0304, 0x0304, nv04_graph_mthd_set_operation },
904 {}
905 };
906
907 static struct nouveau_omthds
908 nv04_graph_sifm_omthds[] = {
909 { 0x0188, 0x0188, nv04_graph_mthd_bind_patt },
910 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
911 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
912 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta4 },
913 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf2d },
914 { 0x0304, 0x0304, nv04_graph_mthd_set_operation },
915 {}
916 };
917
918 static struct nouveau_omthds
919 nv04_graph_surf3d_omthds[] = {
920 { 0x02f8, 0x02f8, nv04_graph_mthd_surf3d_clip_h },
921 { 0x02fc, 0x02fc, nv04_graph_mthd_surf3d_clip_v },
922 {}
923 };
924
925 static struct nouveau_omthds
926 nv03_graph_ttri_omthds[] = {
927 { 0x0188, 0x0188, nv01_graph_mthd_bind_clip },
928 { 0x018c, 0x018c, nv04_graph_mthd_bind_surf_color },
929 { 0x0190, 0x0190, nv04_graph_mthd_bind_surf_zeta },
930 {}
931 };
932
933 static struct nouveau_omthds
934 nv01_graph_prim_omthds[] = {
935 { 0x0184, 0x0184, nv01_graph_mthd_bind_clip },
936 { 0x0188, 0x0188, nv01_graph_mthd_bind_patt },
937 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
938 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
939 { 0x0194, 0x0194, nv04_graph_mthd_bind_surf_dst },
940 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
941 {}
942 };
943
944 static struct nouveau_omthds
945 nv04_graph_prim_omthds[] = {
946 { 0x0184, 0x0184, nv01_graph_mthd_bind_clip },
947 { 0x0188, 0x0188, nv04_graph_mthd_bind_patt },
948 { 0x018c, 0x018c, nv04_graph_mthd_bind_rop },
949 { 0x0190, 0x0190, nv04_graph_mthd_bind_beta1 },
950 { 0x0194, 0x0194, nv04_graph_mthd_bind_beta4 },
951 { 0x0198, 0x0198, nv04_graph_mthd_bind_surf2d },
952 { 0x02fc, 0x02fc, nv04_graph_mthd_set_operation },
953 {}
954 };
955
956 static int
nv04_graph_object_ctor(struct nouveau_object * parent,struct nouveau_object * engine,struct nouveau_oclass * oclass,void * data,u32 size,struct nouveau_object ** pobject)957 nv04_graph_object_ctor(struct nouveau_object *parent,
958 struct nouveau_object *engine,
959 struct nouveau_oclass *oclass, void *data, u32 size,
960 struct nouveau_object **pobject)
961 {
962 struct nouveau_gpuobj *obj;
963 int ret;
964
965 ret = nouveau_gpuobj_create(parent, engine, oclass, 0, parent,
966 16, 16, 0, &obj);
967 *pobject = nv_object(obj);
968 if (ret)
969 return ret;
970
971 nv_wo32(obj, 0x00, nv_mclass(obj));
972 #ifdef __BIG_ENDIAN
973 nv_mo32(obj, 0x00, 0x00080000, 0x00080000);
974 #endif
975 nv_wo32(obj, 0x04, 0x00000000);
976 nv_wo32(obj, 0x08, 0x00000000);
977 nv_wo32(obj, 0x0c, 0x00000000);
978 return 0;
979 }
980
981 struct nouveau_ofuncs
982 nv04_graph_ofuncs = {
983 .ctor = nv04_graph_object_ctor,
984 .dtor = _nouveau_gpuobj_dtor,
985 .init = _nouveau_gpuobj_init,
986 .fini = _nouveau_gpuobj_fini,
987 .rd32 = _nouveau_gpuobj_rd32,
988 .wr32 = _nouveau_gpuobj_wr32,
989 };
990
991 static struct nouveau_oclass
992 nv04_graph_sclass[] = {
993 { 0x0012, &nv04_graph_ofuncs }, /* beta1 */
994 { 0x0017, &nv04_graph_ofuncs }, /* chroma */
995 { 0x0018, &nv04_graph_ofuncs }, /* pattern (nv01) */
996 { 0x0019, &nv04_graph_ofuncs }, /* clip */
997 { 0x001c, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* line */
998 { 0x001d, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* tri */
999 { 0x001e, &nv04_graph_ofuncs, nv01_graph_prim_omthds }, /* rect */
1000 { 0x001f, &nv04_graph_ofuncs, nv01_graph_blit_omthds },
1001 { 0x0021, &nv04_graph_ofuncs, nv01_graph_ifc_omthds },
1002 { 0x0030, &nv04_graph_ofuncs }, /* null */
1003 { 0x0036, &nv04_graph_ofuncs, nv03_graph_sifc_omthds },
1004 { 0x0037, &nv04_graph_ofuncs, nv03_graph_sifm_omthds },
1005 { 0x0038, &nv04_graph_ofuncs }, /* dvd subpicture */
1006 { 0x0039, &nv04_graph_ofuncs }, /* m2mf */
1007 { 0x0042, &nv04_graph_ofuncs }, /* surf2d */
1008 { 0x0043, &nv04_graph_ofuncs }, /* rop */
1009 { 0x0044, &nv04_graph_ofuncs }, /* pattern */
1010 { 0x0048, &nv04_graph_ofuncs, nv03_graph_ttri_omthds },
1011 { 0x004a, &nv04_graph_ofuncs, nv04_graph_gdi_omthds },
1012 { 0x004b, &nv04_graph_ofuncs, nv03_graph_gdi_omthds },
1013 { 0x0052, &nv04_graph_ofuncs }, /* swzsurf */
1014 { 0x0053, &nv04_graph_ofuncs, nv04_graph_surf3d_omthds },
1015 { 0x0054, &nv04_graph_ofuncs }, /* ttri */
1016 { 0x0055, &nv04_graph_ofuncs }, /* mtri */
1017 { 0x0057, &nv04_graph_ofuncs }, /* chroma */
1018 { 0x0058, &nv04_graph_ofuncs }, /* surf_dst */
1019 { 0x0059, &nv04_graph_ofuncs }, /* surf_src */
1020 { 0x005a, &nv04_graph_ofuncs }, /* surf_color */
1021 { 0x005b, &nv04_graph_ofuncs }, /* surf_zeta */
1022 { 0x005c, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* line */
1023 { 0x005d, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* tri */
1024 { 0x005e, &nv04_graph_ofuncs, nv04_graph_prim_omthds }, /* rect */
1025 { 0x005f, &nv04_graph_ofuncs, nv04_graph_blit_omthds },
1026 { 0x0060, &nv04_graph_ofuncs, nv04_graph_iifc_omthds },
1027 { 0x0061, &nv04_graph_ofuncs, nv04_graph_ifc_omthds },
1028 { 0x0064, &nv04_graph_ofuncs }, /* iifc (nv05) */
1029 { 0x0065, &nv04_graph_ofuncs }, /* ifc (nv05) */
1030 { 0x0066, &nv04_graph_ofuncs }, /* sifc (nv05) */
1031 { 0x0072, &nv04_graph_ofuncs }, /* beta4 */
1032 { 0x0076, &nv04_graph_ofuncs, nv04_graph_sifc_omthds },
1033 { 0x0077, &nv04_graph_ofuncs, nv04_graph_sifm_omthds },
1034 {},
1035 };
1036
1037 /*******************************************************************************
1038 * PGRAPH context
1039 ******************************************************************************/
1040
1041 static struct nv04_graph_chan *
nv04_graph_channel(struct nv04_graph_priv * priv)1042 nv04_graph_channel(struct nv04_graph_priv *priv)
1043 {
1044 struct nv04_graph_chan *chan = NULL;
1045 if (nv_rd32(priv, NV04_PGRAPH_CTX_CONTROL) & 0x00010000) {
1046 int chid = nv_rd32(priv, NV04_PGRAPH_CTX_USER) >> 24;
1047 if (chid < ARRAY_SIZE(priv->chan))
1048 chan = priv->chan[chid];
1049 }
1050 return chan;
1051 }
1052
1053 static int
nv04_graph_load_context(struct nv04_graph_chan * chan,int chid)1054 nv04_graph_load_context(struct nv04_graph_chan *chan, int chid)
1055 {
1056 struct nv04_graph_priv *priv = nv04_graph_priv(chan);
1057 int i;
1058
1059 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
1060 nv_wr32(priv, nv04_graph_ctx_regs[i], chan->nv04[i]);
1061
1062 nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
1063 nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, chid << 24);
1064 nv_mask(priv, NV04_PGRAPH_FFINTFC_ST2, 0xfff00000, 0x00000000);
1065 return 0;
1066 }
1067
1068 static int
nv04_graph_unload_context(struct nv04_graph_chan * chan)1069 nv04_graph_unload_context(struct nv04_graph_chan *chan)
1070 {
1071 struct nv04_graph_priv *priv = nv04_graph_priv(chan);
1072 int i;
1073
1074 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
1075 chan->nv04[i] = nv_rd32(priv, nv04_graph_ctx_regs[i]);
1076
1077 nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
1078 nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1079 return 0;
1080 }
1081
1082 static void
nv04_graph_context_switch(struct nv04_graph_priv * priv)1083 nv04_graph_context_switch(struct nv04_graph_priv *priv)
1084 {
1085 struct nv04_graph_chan *prev = NULL;
1086 struct nv04_graph_chan *next = NULL;
1087 unsigned long flags;
1088 int chid;
1089
1090 spin_lock_irqsave(&priv->lock, flags);
1091 nv04_graph_idle(priv);
1092
1093 /* If previous context is valid, we need to save it */
1094 prev = nv04_graph_channel(priv);
1095 if (prev)
1096 nv04_graph_unload_context(prev);
1097
1098 /* load context for next channel */
1099 chid = (nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR) >> 24) & 0x0f;
1100 next = priv->chan[chid];
1101 if (next)
1102 nv04_graph_load_context(next, chid);
1103
1104 spin_unlock_irqrestore(&priv->lock, flags);
1105 }
1106
ctx_reg(struct nv04_graph_chan * chan,u32 reg)1107 static u32 *ctx_reg(struct nv04_graph_chan *chan, u32 reg)
1108 {
1109 int i;
1110
1111 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
1112 if (nv04_graph_ctx_regs[i] == reg)
1113 return &chan->nv04[i];
1114 }
1115
1116 return NULL;
1117 }
1118
1119 static int
nv04_graph_context_ctor(struct nouveau_object * parent,struct nouveau_object * engine,struct nouveau_oclass * oclass,void * data,u32 size,struct nouveau_object ** pobject)1120 nv04_graph_context_ctor(struct nouveau_object *parent,
1121 struct nouveau_object *engine,
1122 struct nouveau_oclass *oclass, void *data, u32 size,
1123 struct nouveau_object **pobject)
1124 {
1125 struct nouveau_fifo_chan *fifo = (void *)parent;
1126 struct nv04_graph_priv *priv = (void *)engine;
1127 struct nv04_graph_chan *chan;
1128 unsigned long flags;
1129 int ret;
1130
1131 ret = nouveau_object_create(parent, engine, oclass, 0, &chan);
1132 *pobject = nv_object(chan);
1133 if (ret)
1134 return ret;
1135
1136 spin_lock_irqsave(&priv->lock, flags);
1137 if (priv->chan[fifo->chid]) {
1138 *pobject = nv_object(priv->chan[fifo->chid]);
1139 atomic_inc(&(*pobject)->refcount);
1140 spin_unlock_irqrestore(&priv->lock, flags);
1141 nouveau_object_destroy(&chan->base);
1142 return 1;
1143 }
1144
1145 *ctx_reg(chan, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
1146
1147 priv->chan[fifo->chid] = chan;
1148 chan->chid = fifo->chid;
1149 spin_unlock_irqrestore(&priv->lock, flags);
1150 return 0;
1151 }
1152
1153 static void
nv04_graph_context_dtor(struct nouveau_object * object)1154 nv04_graph_context_dtor(struct nouveau_object *object)
1155 {
1156 struct nv04_graph_priv *priv = (void *)object->engine;
1157 struct nv04_graph_chan *chan = (void *)object;
1158 unsigned long flags;
1159
1160 spin_lock_irqsave(&priv->lock, flags);
1161 priv->chan[chan->chid] = NULL;
1162 spin_unlock_irqrestore(&priv->lock, flags);
1163
1164 nouveau_object_destroy(&chan->base);
1165 }
1166
1167 static int
nv04_graph_context_fini(struct nouveau_object * object,bool suspend)1168 nv04_graph_context_fini(struct nouveau_object *object, bool suspend)
1169 {
1170 struct nv04_graph_priv *priv = (void *)object->engine;
1171 struct nv04_graph_chan *chan = (void *)object;
1172 unsigned long flags;
1173
1174 spin_lock_irqsave(&priv->lock, flags);
1175 nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
1176 if (nv04_graph_channel(priv) == chan)
1177 nv04_graph_unload_context(chan);
1178 nv_mask(priv, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
1179 spin_unlock_irqrestore(&priv->lock, flags);
1180
1181 return nouveau_object_fini(&chan->base, suspend);
1182 }
1183
1184 static struct nouveau_oclass
1185 nv04_graph_cclass = {
1186 .handle = NV_ENGCTX(GR, 0x04),
1187 .ofuncs = &(struct nouveau_ofuncs) {
1188 .ctor = nv04_graph_context_ctor,
1189 .dtor = nv04_graph_context_dtor,
1190 .init = nouveau_object_init,
1191 .fini = nv04_graph_context_fini,
1192 },
1193 };
1194
1195 /*******************************************************************************
1196 * PGRAPH engine/subdev functions
1197 ******************************************************************************/
1198
1199 bool
nv04_graph_idle(void * obj)1200 nv04_graph_idle(void *obj)
1201 {
1202 struct nouveau_graph *graph = nouveau_graph(obj);
1203 u32 mask = 0xffffffff;
1204
1205 if (nv_device(obj)->card_type == NV_40)
1206 mask &= ~NV40_PGRAPH_STATUS_SYNC_STALL;
1207
1208 if (!nv_wait(graph, NV04_PGRAPH_STATUS, mask, 0)) {
1209 nv_error(graph, "idle timed out with status 0x%08x\n",
1210 nv_rd32(graph, NV04_PGRAPH_STATUS));
1211 return false;
1212 }
1213
1214 return true;
1215 }
1216
1217 static const struct nouveau_bitfield
1218 nv04_graph_intr_name[] = {
1219 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1220 {}
1221 };
1222
1223 static const struct nouveau_bitfield
1224 nv04_graph_nstatus[] = {
1225 { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1226 { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1227 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1228 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1229 {}
1230 };
1231
1232 const struct nouveau_bitfield
1233 nv04_graph_nsource[] = {
1234 { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
1235 { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
1236 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
1237 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
1238 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
1239 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
1240 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
1241 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
1242 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
1243 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
1244 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
1245 { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
1246 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
1247 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
1248 { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
1249 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
1250 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1251 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
1252 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
1253 {}
1254 };
1255
1256 static void
nv04_graph_intr(struct nouveau_subdev * subdev)1257 nv04_graph_intr(struct nouveau_subdev *subdev)
1258 {
1259 struct nv04_graph_priv *priv = (void *)subdev;
1260 struct nv04_graph_chan *chan = NULL;
1261 struct nouveau_namedb *namedb = NULL;
1262 struct nouveau_handle *handle = NULL;
1263 u32 stat = nv_rd32(priv, NV03_PGRAPH_INTR);
1264 u32 nsource = nv_rd32(priv, NV03_PGRAPH_NSOURCE);
1265 u32 nstatus = nv_rd32(priv, NV03_PGRAPH_NSTATUS);
1266 u32 addr = nv_rd32(priv, NV04_PGRAPH_TRAPPED_ADDR);
1267 u32 chid = (addr & 0x0f000000) >> 24;
1268 u32 subc = (addr & 0x0000e000) >> 13;
1269 u32 mthd = (addr & 0x00001ffc);
1270 u32 data = nv_rd32(priv, NV04_PGRAPH_TRAPPED_DATA);
1271 u32 class = nv_rd32(priv, 0x400180 + subc * 4) & 0xff;
1272 u32 inst = (nv_rd32(priv, 0x40016c) & 0xffff) << 4;
1273 u32 show = stat;
1274 unsigned long flags;
1275
1276 spin_lock_irqsave(&priv->lock, flags);
1277 chan = priv->chan[chid];
1278 if (chan)
1279 namedb = (void *)nv_pclass(nv_object(chan), NV_NAMEDB_CLASS);
1280 spin_unlock_irqrestore(&priv->lock, flags);
1281
1282 if (stat & NV_PGRAPH_INTR_NOTIFY) {
1283 if (chan && (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD)) {
1284 handle = nouveau_namedb_get_vinst(namedb, inst);
1285 if (handle && !nv_call(handle->object, mthd, data))
1286 show &= ~NV_PGRAPH_INTR_NOTIFY;
1287 }
1288 }
1289
1290 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1291 nv_wr32(priv, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1292 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1293 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1294 nv04_graph_context_switch(priv);
1295 }
1296
1297 nv_wr32(priv, NV03_PGRAPH_INTR, stat);
1298 nv_wr32(priv, NV04_PGRAPH_FIFO, 0x00000001);
1299
1300 if (show) {
1301 nv_error(priv, "%s", "");
1302 nouveau_bitfield_print(nv04_graph_intr_name, show);
1303 pr_cont(" nsource:");
1304 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1305 pr_cont(" nstatus:");
1306 nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1307 pr_cont("\n");
1308 nv_error(priv,
1309 "ch %d [%s] subc %d class 0x%04x mthd 0x%04x data 0x%08x\n",
1310 chid, nouveau_client_name(chan), subc, class, mthd,
1311 data);
1312 }
1313
1314 nouveau_namedb_put(handle);
1315 }
1316
1317 static int
nv04_graph_ctor(struct nouveau_object * parent,struct nouveau_object * engine,struct nouveau_oclass * oclass,void * data,u32 size,struct nouveau_object ** pobject)1318 nv04_graph_ctor(struct nouveau_object *parent, struct nouveau_object *engine,
1319 struct nouveau_oclass *oclass, void *data, u32 size,
1320 struct nouveau_object **pobject)
1321 {
1322 struct nv04_graph_priv *priv;
1323 int ret;
1324
1325 ret = nouveau_graph_create(parent, engine, oclass, true, &priv);
1326 *pobject = nv_object(priv);
1327 if (ret)
1328 return ret;
1329
1330 nv_subdev(priv)->unit = 0x00001000;
1331 nv_subdev(priv)->intr = nv04_graph_intr;
1332 nv_engine(priv)->cclass = &nv04_graph_cclass;
1333 nv_engine(priv)->sclass = nv04_graph_sclass;
1334 spin_lock_init(&priv->lock);
1335 return 0;
1336 }
1337
1338 static int
nv04_graph_init(struct nouveau_object * object)1339 nv04_graph_init(struct nouveau_object *object)
1340 {
1341 struct nouveau_engine *engine = nv_engine(object);
1342 struct nv04_graph_priv *priv = (void *)engine;
1343 int ret;
1344
1345 ret = nouveau_graph_init(&priv->base);
1346 if (ret)
1347 return ret;
1348
1349 /* Enable PGRAPH interrupts */
1350 nv_wr32(priv, NV03_PGRAPH_INTR, 0xFFFFFFFF);
1351 nv_wr32(priv, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
1352
1353 nv_wr32(priv, NV04_PGRAPH_VALID1, 0);
1354 nv_wr32(priv, NV04_PGRAPH_VALID2, 0);
1355 /*nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x000001FF);
1356 nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
1357 nv_wr32(priv, NV04_PGRAPH_DEBUG_0, 0x1231c000);
1358 /*1231C000 blob, 001 haiku*/
1359 /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
1360 nv_wr32(priv, NV04_PGRAPH_DEBUG_1, 0x72111100);
1361 /*0x72111100 blob , 01 haiku*/
1362 /*nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
1363 nv_wr32(priv, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
1364 /*haiku same*/
1365
1366 /*nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
1367 nv_wr32(priv, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
1368 /*haiku and blob 10d4*/
1369
1370 nv_wr32(priv, NV04_PGRAPH_STATE , 0xFFFFFFFF);
1371 nv_wr32(priv, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
1372 nv_mask(priv, NV04_PGRAPH_CTX_USER, 0xff000000, 0x0f000000);
1373
1374 /* These don't belong here, they're part of a per-channel context */
1375 nv_wr32(priv, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
1376 nv_wr32(priv, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
1377 return 0;
1378 }
1379
1380 struct nouveau_oclass
1381 nv04_graph_oclass = {
1382 .handle = NV_ENGINE(GR, 0x04),
1383 .ofuncs = &(struct nouveau_ofuncs) {
1384 .ctor = nv04_graph_ctor,
1385 .dtor = _nouveau_graph_dtor,
1386 .init = nv04_graph_init,
1387 .fini = _nouveau_graph_fini,
1388 },
1389 };
1390