1 /*
2  * Copyright 2007 Stephane Marchesin
3  * All Rights Reserved.
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice (including the next
13  * paragraph) shall be included in all copies or substantial portions of the
14  * Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
19  * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22  * DEALINGS IN THE SOFTWARE.
23  */
24 
25 #include "drmP.h"
26 #include "drm.h"
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_hw.h"
30 #include "nouveau_util.h"
31 #include "nouveau_ramht.h"
32 
33 struct nv04_graph_engine {
34 	struct nouveau_exec_engine base;
35 };
36 
37 static uint32_t nv04_graph_ctx_regs[] = {
38 	0x0040053c,
39 	0x00400544,
40 	0x00400540,
41 	0x00400548,
42 	NV04_PGRAPH_CTX_SWITCH1,
43 	NV04_PGRAPH_CTX_SWITCH2,
44 	NV04_PGRAPH_CTX_SWITCH3,
45 	NV04_PGRAPH_CTX_SWITCH4,
46 	NV04_PGRAPH_CTX_CACHE1,
47 	NV04_PGRAPH_CTX_CACHE2,
48 	NV04_PGRAPH_CTX_CACHE3,
49 	NV04_PGRAPH_CTX_CACHE4,
50 	0x00400184,
51 	0x004001a4,
52 	0x004001c4,
53 	0x004001e4,
54 	0x00400188,
55 	0x004001a8,
56 	0x004001c8,
57 	0x004001e8,
58 	0x0040018c,
59 	0x004001ac,
60 	0x004001cc,
61 	0x004001ec,
62 	0x00400190,
63 	0x004001b0,
64 	0x004001d0,
65 	0x004001f0,
66 	0x00400194,
67 	0x004001b4,
68 	0x004001d4,
69 	0x004001f4,
70 	0x00400198,
71 	0x004001b8,
72 	0x004001d8,
73 	0x004001f8,
74 	0x0040019c,
75 	0x004001bc,
76 	0x004001dc,
77 	0x004001fc,
78 	0x00400174,
79 	NV04_PGRAPH_DMA_START_0,
80 	NV04_PGRAPH_DMA_START_1,
81 	NV04_PGRAPH_DMA_LENGTH,
82 	NV04_PGRAPH_DMA_MISC,
83 	NV04_PGRAPH_DMA_PITCH,
84 	NV04_PGRAPH_BOFFSET0,
85 	NV04_PGRAPH_BBASE0,
86 	NV04_PGRAPH_BLIMIT0,
87 	NV04_PGRAPH_BOFFSET1,
88 	NV04_PGRAPH_BBASE1,
89 	NV04_PGRAPH_BLIMIT1,
90 	NV04_PGRAPH_BOFFSET2,
91 	NV04_PGRAPH_BBASE2,
92 	NV04_PGRAPH_BLIMIT2,
93 	NV04_PGRAPH_BOFFSET3,
94 	NV04_PGRAPH_BBASE3,
95 	NV04_PGRAPH_BLIMIT3,
96 	NV04_PGRAPH_BOFFSET4,
97 	NV04_PGRAPH_BBASE4,
98 	NV04_PGRAPH_BLIMIT4,
99 	NV04_PGRAPH_BOFFSET5,
100 	NV04_PGRAPH_BBASE5,
101 	NV04_PGRAPH_BLIMIT5,
102 	NV04_PGRAPH_BPITCH0,
103 	NV04_PGRAPH_BPITCH1,
104 	NV04_PGRAPH_BPITCH2,
105 	NV04_PGRAPH_BPITCH3,
106 	NV04_PGRAPH_BPITCH4,
107 	NV04_PGRAPH_SURFACE,
108 	NV04_PGRAPH_STATE,
109 	NV04_PGRAPH_BSWIZZLE2,
110 	NV04_PGRAPH_BSWIZZLE5,
111 	NV04_PGRAPH_BPIXEL,
112 	NV04_PGRAPH_NOTIFY,
113 	NV04_PGRAPH_PATT_COLOR0,
114 	NV04_PGRAPH_PATT_COLOR1,
115 	NV04_PGRAPH_PATT_COLORRAM+0x00,
116 	NV04_PGRAPH_PATT_COLORRAM+0x04,
117 	NV04_PGRAPH_PATT_COLORRAM+0x08,
118 	NV04_PGRAPH_PATT_COLORRAM+0x0c,
119 	NV04_PGRAPH_PATT_COLORRAM+0x10,
120 	NV04_PGRAPH_PATT_COLORRAM+0x14,
121 	NV04_PGRAPH_PATT_COLORRAM+0x18,
122 	NV04_PGRAPH_PATT_COLORRAM+0x1c,
123 	NV04_PGRAPH_PATT_COLORRAM+0x20,
124 	NV04_PGRAPH_PATT_COLORRAM+0x24,
125 	NV04_PGRAPH_PATT_COLORRAM+0x28,
126 	NV04_PGRAPH_PATT_COLORRAM+0x2c,
127 	NV04_PGRAPH_PATT_COLORRAM+0x30,
128 	NV04_PGRAPH_PATT_COLORRAM+0x34,
129 	NV04_PGRAPH_PATT_COLORRAM+0x38,
130 	NV04_PGRAPH_PATT_COLORRAM+0x3c,
131 	NV04_PGRAPH_PATT_COLORRAM+0x40,
132 	NV04_PGRAPH_PATT_COLORRAM+0x44,
133 	NV04_PGRAPH_PATT_COLORRAM+0x48,
134 	NV04_PGRAPH_PATT_COLORRAM+0x4c,
135 	NV04_PGRAPH_PATT_COLORRAM+0x50,
136 	NV04_PGRAPH_PATT_COLORRAM+0x54,
137 	NV04_PGRAPH_PATT_COLORRAM+0x58,
138 	NV04_PGRAPH_PATT_COLORRAM+0x5c,
139 	NV04_PGRAPH_PATT_COLORRAM+0x60,
140 	NV04_PGRAPH_PATT_COLORRAM+0x64,
141 	NV04_PGRAPH_PATT_COLORRAM+0x68,
142 	NV04_PGRAPH_PATT_COLORRAM+0x6c,
143 	NV04_PGRAPH_PATT_COLORRAM+0x70,
144 	NV04_PGRAPH_PATT_COLORRAM+0x74,
145 	NV04_PGRAPH_PATT_COLORRAM+0x78,
146 	NV04_PGRAPH_PATT_COLORRAM+0x7c,
147 	NV04_PGRAPH_PATT_COLORRAM+0x80,
148 	NV04_PGRAPH_PATT_COLORRAM+0x84,
149 	NV04_PGRAPH_PATT_COLORRAM+0x88,
150 	NV04_PGRAPH_PATT_COLORRAM+0x8c,
151 	NV04_PGRAPH_PATT_COLORRAM+0x90,
152 	NV04_PGRAPH_PATT_COLORRAM+0x94,
153 	NV04_PGRAPH_PATT_COLORRAM+0x98,
154 	NV04_PGRAPH_PATT_COLORRAM+0x9c,
155 	NV04_PGRAPH_PATT_COLORRAM+0xa0,
156 	NV04_PGRAPH_PATT_COLORRAM+0xa4,
157 	NV04_PGRAPH_PATT_COLORRAM+0xa8,
158 	NV04_PGRAPH_PATT_COLORRAM+0xac,
159 	NV04_PGRAPH_PATT_COLORRAM+0xb0,
160 	NV04_PGRAPH_PATT_COLORRAM+0xb4,
161 	NV04_PGRAPH_PATT_COLORRAM+0xb8,
162 	NV04_PGRAPH_PATT_COLORRAM+0xbc,
163 	NV04_PGRAPH_PATT_COLORRAM+0xc0,
164 	NV04_PGRAPH_PATT_COLORRAM+0xc4,
165 	NV04_PGRAPH_PATT_COLORRAM+0xc8,
166 	NV04_PGRAPH_PATT_COLORRAM+0xcc,
167 	NV04_PGRAPH_PATT_COLORRAM+0xd0,
168 	NV04_PGRAPH_PATT_COLORRAM+0xd4,
169 	NV04_PGRAPH_PATT_COLORRAM+0xd8,
170 	NV04_PGRAPH_PATT_COLORRAM+0xdc,
171 	NV04_PGRAPH_PATT_COLORRAM+0xe0,
172 	NV04_PGRAPH_PATT_COLORRAM+0xe4,
173 	NV04_PGRAPH_PATT_COLORRAM+0xe8,
174 	NV04_PGRAPH_PATT_COLORRAM+0xec,
175 	NV04_PGRAPH_PATT_COLORRAM+0xf0,
176 	NV04_PGRAPH_PATT_COLORRAM+0xf4,
177 	NV04_PGRAPH_PATT_COLORRAM+0xf8,
178 	NV04_PGRAPH_PATT_COLORRAM+0xfc,
179 	NV04_PGRAPH_PATTERN,
180 	0x0040080c,
181 	NV04_PGRAPH_PATTERN_SHAPE,
182 	0x00400600,
183 	NV04_PGRAPH_ROP3,
184 	NV04_PGRAPH_CHROMA,
185 	NV04_PGRAPH_BETA_AND,
186 	NV04_PGRAPH_BETA_PREMULT,
187 	NV04_PGRAPH_CONTROL0,
188 	NV04_PGRAPH_CONTROL1,
189 	NV04_PGRAPH_CONTROL2,
190 	NV04_PGRAPH_BLEND,
191 	NV04_PGRAPH_STORED_FMT,
192 	NV04_PGRAPH_SOURCE_COLOR,
193 	0x00400560,
194 	0x00400568,
195 	0x00400564,
196 	0x0040056c,
197 	0x00400400,
198 	0x00400480,
199 	0x00400404,
200 	0x00400484,
201 	0x00400408,
202 	0x00400488,
203 	0x0040040c,
204 	0x0040048c,
205 	0x00400410,
206 	0x00400490,
207 	0x00400414,
208 	0x00400494,
209 	0x00400418,
210 	0x00400498,
211 	0x0040041c,
212 	0x0040049c,
213 	0x00400420,
214 	0x004004a0,
215 	0x00400424,
216 	0x004004a4,
217 	0x00400428,
218 	0x004004a8,
219 	0x0040042c,
220 	0x004004ac,
221 	0x00400430,
222 	0x004004b0,
223 	0x00400434,
224 	0x004004b4,
225 	0x00400438,
226 	0x004004b8,
227 	0x0040043c,
228 	0x004004bc,
229 	0x00400440,
230 	0x004004c0,
231 	0x00400444,
232 	0x004004c4,
233 	0x00400448,
234 	0x004004c8,
235 	0x0040044c,
236 	0x004004cc,
237 	0x00400450,
238 	0x004004d0,
239 	0x00400454,
240 	0x004004d4,
241 	0x00400458,
242 	0x004004d8,
243 	0x0040045c,
244 	0x004004dc,
245 	0x00400460,
246 	0x004004e0,
247 	0x00400464,
248 	0x004004e4,
249 	0x00400468,
250 	0x004004e8,
251 	0x0040046c,
252 	0x004004ec,
253 	0x00400470,
254 	0x004004f0,
255 	0x00400474,
256 	0x004004f4,
257 	0x00400478,
258 	0x004004f8,
259 	0x0040047c,
260 	0x004004fc,
261 	0x00400534,
262 	0x00400538,
263 	0x00400514,
264 	0x00400518,
265 	0x0040051c,
266 	0x00400520,
267 	0x00400524,
268 	0x00400528,
269 	0x0040052c,
270 	0x00400530,
271 	0x00400d00,
272 	0x00400d40,
273 	0x00400d80,
274 	0x00400d04,
275 	0x00400d44,
276 	0x00400d84,
277 	0x00400d08,
278 	0x00400d48,
279 	0x00400d88,
280 	0x00400d0c,
281 	0x00400d4c,
282 	0x00400d8c,
283 	0x00400d10,
284 	0x00400d50,
285 	0x00400d90,
286 	0x00400d14,
287 	0x00400d54,
288 	0x00400d94,
289 	0x00400d18,
290 	0x00400d58,
291 	0x00400d98,
292 	0x00400d1c,
293 	0x00400d5c,
294 	0x00400d9c,
295 	0x00400d20,
296 	0x00400d60,
297 	0x00400da0,
298 	0x00400d24,
299 	0x00400d64,
300 	0x00400da4,
301 	0x00400d28,
302 	0x00400d68,
303 	0x00400da8,
304 	0x00400d2c,
305 	0x00400d6c,
306 	0x00400dac,
307 	0x00400d30,
308 	0x00400d70,
309 	0x00400db0,
310 	0x00400d34,
311 	0x00400d74,
312 	0x00400db4,
313 	0x00400d38,
314 	0x00400d78,
315 	0x00400db8,
316 	0x00400d3c,
317 	0x00400d7c,
318 	0x00400dbc,
319 	0x00400590,
320 	0x00400594,
321 	0x00400598,
322 	0x0040059c,
323 	0x004005a8,
324 	0x004005ac,
325 	0x004005b0,
326 	0x004005b4,
327 	0x004005c0,
328 	0x004005c4,
329 	0x004005c8,
330 	0x004005cc,
331 	0x004005d0,
332 	0x004005d4,
333 	0x004005d8,
334 	0x004005dc,
335 	0x004005e0,
336 	NV04_PGRAPH_PASSTHRU_0,
337 	NV04_PGRAPH_PASSTHRU_1,
338 	NV04_PGRAPH_PASSTHRU_2,
339 	NV04_PGRAPH_DVD_COLORFMT,
340 	NV04_PGRAPH_SCALED_FORMAT,
341 	NV04_PGRAPH_MISC24_0,
342 	NV04_PGRAPH_MISC24_1,
343 	NV04_PGRAPH_MISC24_2,
344 	0x00400500,
345 	0x00400504,
346 	NV04_PGRAPH_VALID1,
347 	NV04_PGRAPH_VALID2,
348 	NV04_PGRAPH_DEBUG_3
349 };
350 
351 struct graph_state {
352 	uint32_t nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
353 };
354 
355 static struct nouveau_channel *
nv04_graph_channel(struct drm_device * dev)356 nv04_graph_channel(struct drm_device *dev)
357 {
358 	struct drm_nouveau_private *dev_priv = dev->dev_private;
359 	int chid = dev_priv->engine.fifo.channels;
360 
361 	if (nv_rd32(dev, NV04_PGRAPH_CTX_CONTROL) & 0x00010000)
362 		chid = nv_rd32(dev, NV04_PGRAPH_CTX_USER) >> 24;
363 
364 	if (chid >= dev_priv->engine.fifo.channels)
365 		return NULL;
366 
367 	return dev_priv->channels.ptr[chid];
368 }
369 
ctx_reg(struct graph_state * ctx,uint32_t reg)370 static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
371 {
372 	int i;
373 
374 	for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
375 		if (nv04_graph_ctx_regs[i] == reg)
376 			return &ctx->nv04[i];
377 	}
378 
379 	return NULL;
380 }
381 
382 static int
nv04_graph_load_context(struct nouveau_channel * chan)383 nv04_graph_load_context(struct nouveau_channel *chan)
384 {
385 	struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
386 	struct drm_device *dev = chan->dev;
387 	uint32_t tmp;
388 	int i;
389 
390 	for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
391 		nv_wr32(dev, nv04_graph_ctx_regs[i], pgraph_ctx->nv04[i]);
392 
393 	nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
394 
395 	tmp  = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
396 	nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp | chan->id << 24);
397 
398 	tmp = nv_rd32(dev, NV04_PGRAPH_FFINTFC_ST2);
399 	nv_wr32(dev, NV04_PGRAPH_FFINTFC_ST2, tmp & 0x000fffff);
400 
401 	return 0;
402 }
403 
404 static int
nv04_graph_unload_context(struct drm_device * dev)405 nv04_graph_unload_context(struct drm_device *dev)
406 {
407 	struct drm_nouveau_private *dev_priv = dev->dev_private;
408 	struct nouveau_channel *chan = NULL;
409 	struct graph_state *ctx;
410 	uint32_t tmp;
411 	int i;
412 
413 	chan = nv04_graph_channel(dev);
414 	if (!chan)
415 		return 0;
416 	ctx = chan->engctx[NVOBJ_ENGINE_GR];
417 
418 	for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
419 		ctx->nv04[i] = nv_rd32(dev, nv04_graph_ctx_regs[i]);
420 
421 	nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
422 	tmp  = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
423 	tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
424 	nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
425 	return 0;
426 }
427 
428 static int
nv04_graph_context_new(struct nouveau_channel * chan,int engine)429 nv04_graph_context_new(struct nouveau_channel *chan, int engine)
430 {
431 	struct graph_state *pgraph_ctx;
432 	NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
433 
434 	pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
435 	if (pgraph_ctx == NULL)
436 		return -ENOMEM;
437 
438 	*ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
439 
440 	chan->engctx[engine] = pgraph_ctx;
441 	return 0;
442 }
443 
444 static void
nv04_graph_context_del(struct nouveau_channel * chan,int engine)445 nv04_graph_context_del(struct nouveau_channel *chan, int engine)
446 {
447 	struct drm_device *dev = chan->dev;
448 	struct drm_nouveau_private *dev_priv = dev->dev_private;
449 	struct graph_state *pgraph_ctx = chan->engctx[engine];
450 	unsigned long flags;
451 
452 	spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
453 	nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
454 
455 	/* Unload the context if it's the currently active one */
456 	if (nv04_graph_channel(dev) == chan)
457 		nv04_graph_unload_context(dev);
458 
459 	nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
460 	spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
461 
462 	/* Free the context resources */
463 	kfree(pgraph_ctx);
464 	chan->engctx[engine] = NULL;
465 }
466 
467 int
nv04_graph_object_new(struct nouveau_channel * chan,int engine,u32 handle,u16 class)468 nv04_graph_object_new(struct nouveau_channel *chan, int engine,
469 		      u32 handle, u16 class)
470 {
471 	struct drm_device *dev = chan->dev;
472 	struct nouveau_gpuobj *obj = NULL;
473 	int ret;
474 
475 	ret = nouveau_gpuobj_new(dev, chan, 16, 16, NVOBJ_FLAG_ZERO_FREE, &obj);
476 	if (ret)
477 		return ret;
478 	obj->engine = 1;
479 	obj->class  = class;
480 
481 #ifdef __BIG_ENDIAN
482 	nv_wo32(obj, 0x00, 0x00080000 | class);
483 #else
484 	nv_wo32(obj, 0x00, class);
485 #endif
486 	nv_wo32(obj, 0x04, 0x00000000);
487 	nv_wo32(obj, 0x08, 0x00000000);
488 	nv_wo32(obj, 0x0c, 0x00000000);
489 
490 	ret = nouveau_ramht_insert(chan, handle, obj);
491 	nouveau_gpuobj_ref(NULL, &obj);
492 	return ret;
493 }
494 
495 static int
nv04_graph_init(struct drm_device * dev,int engine)496 nv04_graph_init(struct drm_device *dev, int engine)
497 {
498 	struct drm_nouveau_private *dev_priv = dev->dev_private;
499 	uint32_t tmp;
500 
501 	nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
502 			~NV_PMC_ENABLE_PGRAPH);
503 	nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
504 			 NV_PMC_ENABLE_PGRAPH);
505 
506 	/* Enable PGRAPH interrupts */
507 	nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF);
508 	nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
509 
510 	nv_wr32(dev, NV04_PGRAPH_VALID1, 0);
511 	nv_wr32(dev, NV04_PGRAPH_VALID2, 0);
512 	/*nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x000001FF);
513 	nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
514 	nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x1231c000);
515 	/*1231C000 blob, 001 haiku*/
516 	/*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
517 	nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x72111100);
518 	/*0x72111100 blob , 01 haiku*/
519 	/*nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
520 	nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
521 	/*haiku same*/
522 
523 	/*nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
524 	nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
525 	/*haiku and blob 10d4*/
526 
527 	nv_wr32(dev, NV04_PGRAPH_STATE        , 0xFFFFFFFF);
528 	nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL  , 0x10000100);
529 	tmp  = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
530 	tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
531 	nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
532 
533 	/* These don't belong here, they're part of a per-channel context */
534 	nv_wr32(dev, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
535 	nv_wr32(dev, NV04_PGRAPH_BETA_AND     , 0xFFFFFFFF);
536 
537 	return 0;
538 }
539 
540 static int
nv04_graph_fini(struct drm_device * dev,int engine,bool suspend)541 nv04_graph_fini(struct drm_device *dev, int engine, bool suspend)
542 {
543 	nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
544 	if (!nv_wait(dev, NV04_PGRAPH_STATUS, ~0, 0) && suspend) {
545 		nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
546 		return -EBUSY;
547 	}
548 	nv04_graph_unload_context(dev);
549 	nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
550 	return 0;
551 }
552 
553 static int
nv04_graph_mthd_set_ref(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)554 nv04_graph_mthd_set_ref(struct nouveau_channel *chan,
555 			u32 class, u32 mthd, u32 data)
556 {
557 	atomic_set(&chan->fence.last_sequence_irq, data);
558 	return 0;
559 }
560 
561 int
nv04_graph_mthd_page_flip(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)562 nv04_graph_mthd_page_flip(struct nouveau_channel *chan,
563 			  u32 class, u32 mthd, u32 data)
564 {
565 	struct drm_device *dev = chan->dev;
566 	struct nouveau_page_flip_state s;
567 
568 	if (!nouveau_finish_page_flip(chan, &s))
569 		nv_set_crtc_base(dev, s.crtc,
570 				 s.offset + s.y * s.pitch + s.x * s.bpp / 8);
571 
572 	return 0;
573 }
574 
575 /*
576  * Software methods, why they are needed, and how they all work:
577  *
578  * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
579  * 2d engine settings are kept inside the grobjs themselves. The grobjs are
580  * 3 words long on both. grobj format on NV04 is:
581  *
582  * word 0:
583  *  - bits 0-7: class
584  *  - bit 12: color key active
585  *  - bit 13: clip rect active
586  *  - bit 14: if set, destination surface is swizzled and taken from buffer 5
587  *            [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
588  *            from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
589  *            NV03_CONTEXT_SURFACE_DST].
590  *  - bits 15-17: 2d operation [aka patch config]
591  *  - bit 24: patch valid [enables rendering using this object]
592  *  - bit 25: surf3d valid [for tex_tri and multitex_tri only]
593  * word 1:
594  *  - bits 0-1: mono format
595  *  - bits 8-13: color format
596  *  - bits 16-31: DMA_NOTIFY instance
597  * word 2:
598  *  - bits 0-15: DMA_A instance
599  *  - bits 16-31: DMA_B instance
600  *
601  * On NV05 it's:
602  *
603  * word 0:
604  *  - bits 0-7: class
605  *  - bit 12: color key active
606  *  - bit 13: clip rect active
607  *  - bit 14: if set, destination surface is swizzled and taken from buffer 5
608  *            [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
609  *            from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
610  *            NV03_CONTEXT_SURFACE_DST].
611  *  - bits 15-17: 2d operation [aka patch config]
612  *  - bits 20-22: dither mode
613  *  - bit 24: patch valid [enables rendering using this object]
614  *  - bit 25: surface_dst/surface_color/surf2d/surf3d valid
615  *  - bit 26: surface_src/surface_zeta valid
616  *  - bit 27: pattern valid
617  *  - bit 28: rop valid
618  *  - bit 29: beta1 valid
619  *  - bit 30: beta4 valid
620  * word 1:
621  *  - bits 0-1: mono format
622  *  - bits 8-13: color format
623  *  - bits 16-31: DMA_NOTIFY instance
624  * word 2:
625  *  - bits 0-15: DMA_A instance
626  *  - bits 16-31: DMA_B instance
627  *
628  * NV05 will set/unset the relevant valid bits when you poke the relevant
629  * object-binding methods with object of the proper type, or with the NULL
630  * type. It'll only allow rendering using the grobj if all needed objects
631  * are bound. The needed set of objects depends on selected operation: for
632  * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
633  *
634  * NV04 doesn't have these methods implemented at all, and doesn't have the
635  * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
636  * is set. So we have to emulate them in software, internally keeping the
637  * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
638  * but the last word isn't actually used for anything, we abuse it for this
639  * purpose.
640  *
641  * Actually, NV05 can optionally check bit 24 too, but we disable this since
642  * there's no use for it.
643  *
644  * For unknown reasons, NV04 implements surf3d binding in hardware as an
645  * exception. Also for unknown reasons, NV04 doesn't implement the clipping
646  * methods on the surf3d object, so we have to emulate them too.
647  */
648 
649 static void
nv04_graph_set_ctx1(struct nouveau_channel * chan,u32 mask,u32 value)650 nv04_graph_set_ctx1(struct nouveau_channel *chan, u32 mask, u32 value)
651 {
652 	struct drm_device *dev = chan->dev;
653 	u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
654 	int subc = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
655 	u32 tmp;
656 
657 	tmp  = nv_ri32(dev, instance);
658 	tmp &= ~mask;
659 	tmp |= value;
660 
661 	nv_wi32(dev, instance, tmp);
662 	nv_wr32(dev, NV04_PGRAPH_CTX_SWITCH1, tmp);
663 	nv_wr32(dev, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
664 }
665 
666 static void
nv04_graph_set_ctx_val(struct nouveau_channel * chan,u32 mask,u32 value)667 nv04_graph_set_ctx_val(struct nouveau_channel *chan, u32 mask, u32 value)
668 {
669 	struct drm_device *dev = chan->dev;
670 	u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
671 	u32 tmp, ctx1;
672 	int class, op, valid = 1;
673 
674 	ctx1 = nv_ri32(dev, instance);
675 	class = ctx1 & 0xff;
676 	op = (ctx1 >> 15) & 7;
677 	tmp  = nv_ri32(dev, instance + 0xc);
678 	tmp &= ~mask;
679 	tmp |= value;
680 	nv_wi32(dev, instance + 0xc, tmp);
681 
682 	/* check for valid surf2d/surf_dst/surf_color */
683 	if (!(tmp & 0x02000000))
684 		valid = 0;
685 	/* check for valid surf_src/surf_zeta */
686 	if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
687 		valid = 0;
688 
689 	switch (op) {
690 	/* SRCCOPY_AND, SRCCOPY: no extra objects required */
691 	case 0:
692 	case 3:
693 		break;
694 	/* ROP_AND: requires pattern and rop */
695 	case 1:
696 		if (!(tmp & 0x18000000))
697 			valid = 0;
698 		break;
699 	/* BLEND_AND: requires beta1 */
700 	case 2:
701 		if (!(tmp & 0x20000000))
702 			valid = 0;
703 		break;
704 	/* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
705 	case 4:
706 	case 5:
707 		if (!(tmp & 0x40000000))
708 			valid = 0;
709 		break;
710 	}
711 
712 	nv04_graph_set_ctx1(chan, 0x01000000, valid << 24);
713 }
714 
715 static int
nv04_graph_mthd_set_operation(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)716 nv04_graph_mthd_set_operation(struct nouveau_channel *chan,
717 			      u32 class, u32 mthd, u32 data)
718 {
719 	if (data > 5)
720 		return 1;
721 	/* Old versions of the objects only accept first three operations. */
722 	if (data > 2 && class < 0x40)
723 		return 1;
724 	nv04_graph_set_ctx1(chan, 0x00038000, data << 15);
725 	/* changing operation changes set of objects needed for validation */
726 	nv04_graph_set_ctx_val(chan, 0, 0);
727 	return 0;
728 }
729 
730 static int
nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)731 nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel *chan,
732 			      u32 class, u32 mthd, u32 data)
733 {
734 	uint32_t min = data & 0xffff, max;
735 	uint32_t w = data >> 16;
736 	if (min & 0x8000)
737 		/* too large */
738 		return 1;
739 	if (w & 0x8000)
740 		/* yes, it accepts negative for some reason. */
741 		w |= 0xffff0000;
742 	max = min + w;
743 	max &= 0x3ffff;
744 	nv_wr32(chan->dev, 0x40053c, min);
745 	nv_wr32(chan->dev, 0x400544, max);
746 	return 0;
747 }
748 
749 static int
nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)750 nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel *chan,
751 			      u32 class, u32 mthd, u32 data)
752 {
753 	uint32_t min = data & 0xffff, max;
754 	uint32_t w = data >> 16;
755 	if (min & 0x8000)
756 		/* too large */
757 		return 1;
758 	if (w & 0x8000)
759 		/* yes, it accepts negative for some reason. */
760 		w |= 0xffff0000;
761 	max = min + w;
762 	max &= 0x3ffff;
763 	nv_wr32(chan->dev, 0x400540, min);
764 	nv_wr32(chan->dev, 0x400548, max);
765 	return 0;
766 }
767 
768 static int
nv04_graph_mthd_bind_surf2d(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)769 nv04_graph_mthd_bind_surf2d(struct nouveau_channel *chan,
770 			    u32 class, u32 mthd, u32 data)
771 {
772 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
773 	case 0x30:
774 		nv04_graph_set_ctx1(chan, 0x00004000, 0);
775 		nv04_graph_set_ctx_val(chan, 0x02000000, 0);
776 		return 0;
777 	case 0x42:
778 		nv04_graph_set_ctx1(chan, 0x00004000, 0);
779 		nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
780 		return 0;
781 	}
782 	return 1;
783 }
784 
785 static int
nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)786 nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel *chan,
787 				    u32 class, u32 mthd, u32 data)
788 {
789 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
790 	case 0x30:
791 		nv04_graph_set_ctx1(chan, 0x00004000, 0);
792 		nv04_graph_set_ctx_val(chan, 0x02000000, 0);
793 		return 0;
794 	case 0x42:
795 		nv04_graph_set_ctx1(chan, 0x00004000, 0);
796 		nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
797 		return 0;
798 	case 0x52:
799 		nv04_graph_set_ctx1(chan, 0x00004000, 0x00004000);
800 		nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
801 		return 0;
802 	}
803 	return 1;
804 }
805 
806 static int
nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)807 nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel *chan,
808 			       u32 class, u32 mthd, u32 data)
809 {
810 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
811 	case 0x30:
812 		nv04_graph_set_ctx_val(chan, 0x08000000, 0);
813 		return 0;
814 	case 0x18:
815 		nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
816 		return 0;
817 	}
818 	return 1;
819 }
820 
821 static int
nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)822 nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel *chan,
823 			       u32 class, u32 mthd, u32 data)
824 {
825 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
826 	case 0x30:
827 		nv04_graph_set_ctx_val(chan, 0x08000000, 0);
828 		return 0;
829 	case 0x44:
830 		nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
831 		return 0;
832 	}
833 	return 1;
834 }
835 
836 static int
nv04_graph_mthd_bind_rop(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)837 nv04_graph_mthd_bind_rop(struct nouveau_channel *chan,
838 			 u32 class, u32 mthd, u32 data)
839 {
840 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
841 	case 0x30:
842 		nv04_graph_set_ctx_val(chan, 0x10000000, 0);
843 		return 0;
844 	case 0x43:
845 		nv04_graph_set_ctx_val(chan, 0x10000000, 0x10000000);
846 		return 0;
847 	}
848 	return 1;
849 }
850 
851 static int
nv04_graph_mthd_bind_beta1(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)852 nv04_graph_mthd_bind_beta1(struct nouveau_channel *chan,
853 			   u32 class, u32 mthd, u32 data)
854 {
855 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
856 	case 0x30:
857 		nv04_graph_set_ctx_val(chan, 0x20000000, 0);
858 		return 0;
859 	case 0x12:
860 		nv04_graph_set_ctx_val(chan, 0x20000000, 0x20000000);
861 		return 0;
862 	}
863 	return 1;
864 }
865 
866 static int
nv04_graph_mthd_bind_beta4(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)867 nv04_graph_mthd_bind_beta4(struct nouveau_channel *chan,
868 			   u32 class, u32 mthd, u32 data)
869 {
870 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
871 	case 0x30:
872 		nv04_graph_set_ctx_val(chan, 0x40000000, 0);
873 		return 0;
874 	case 0x72:
875 		nv04_graph_set_ctx_val(chan, 0x40000000, 0x40000000);
876 		return 0;
877 	}
878 	return 1;
879 }
880 
881 static int
nv04_graph_mthd_bind_surf_dst(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)882 nv04_graph_mthd_bind_surf_dst(struct nouveau_channel *chan,
883 			      u32 class, u32 mthd, u32 data)
884 {
885 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
886 	case 0x30:
887 		nv04_graph_set_ctx_val(chan, 0x02000000, 0);
888 		return 0;
889 	case 0x58:
890 		nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
891 		return 0;
892 	}
893 	return 1;
894 }
895 
896 static int
nv04_graph_mthd_bind_surf_src(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)897 nv04_graph_mthd_bind_surf_src(struct nouveau_channel *chan,
898 			      u32 class, u32 mthd, u32 data)
899 {
900 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
901 	case 0x30:
902 		nv04_graph_set_ctx_val(chan, 0x04000000, 0);
903 		return 0;
904 	case 0x59:
905 		nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
906 		return 0;
907 	}
908 	return 1;
909 }
910 
911 static int
nv04_graph_mthd_bind_surf_color(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)912 nv04_graph_mthd_bind_surf_color(struct nouveau_channel *chan,
913 				u32 class, u32 mthd, u32 data)
914 {
915 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
916 	case 0x30:
917 		nv04_graph_set_ctx_val(chan, 0x02000000, 0);
918 		return 0;
919 	case 0x5a:
920 		nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
921 		return 0;
922 	}
923 	return 1;
924 }
925 
926 static int
nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)927 nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel *chan,
928 			       u32 class, u32 mthd, u32 data)
929 {
930 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
931 	case 0x30:
932 		nv04_graph_set_ctx_val(chan, 0x04000000, 0);
933 		return 0;
934 	case 0x5b:
935 		nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
936 		return 0;
937 	}
938 	return 1;
939 }
940 
941 static int
nv04_graph_mthd_bind_clip(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)942 nv04_graph_mthd_bind_clip(struct nouveau_channel *chan,
943 			  u32 class, u32 mthd, u32 data)
944 {
945 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
946 	case 0x30:
947 		nv04_graph_set_ctx1(chan, 0x2000, 0);
948 		return 0;
949 	case 0x19:
950 		nv04_graph_set_ctx1(chan, 0x2000, 0x2000);
951 		return 0;
952 	}
953 	return 1;
954 }
955 
956 static int
nv04_graph_mthd_bind_chroma(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)957 nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan,
958 			    u32 class, u32 mthd, u32 data)
959 {
960 	switch (nv_ri32(chan->dev, data << 4) & 0xff) {
961 	case 0x30:
962 		nv04_graph_set_ctx1(chan, 0x1000, 0);
963 		return 0;
964 	/* Yes, for some reason even the old versions of objects
965 	 * accept 0x57 and not 0x17. Consistency be damned.
966 	 */
967 	case 0x57:
968 		nv04_graph_set_ctx1(chan, 0x1000, 0x1000);
969 		return 0;
970 	}
971 	return 1;
972 }
973 
974 static struct nouveau_bitfield nv04_graph_intr[] = {
975 	{ NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
976 	{}
977 };
978 
979 static struct nouveau_bitfield nv04_graph_nstatus[] = {
980 	{ NV04_PGRAPH_NSTATUS_STATE_IN_USE,       "STATE_IN_USE" },
981 	{ NV04_PGRAPH_NSTATUS_INVALID_STATE,      "INVALID_STATE" },
982 	{ NV04_PGRAPH_NSTATUS_BAD_ARGUMENT,       "BAD_ARGUMENT" },
983 	{ NV04_PGRAPH_NSTATUS_PROTECTION_FAULT,   "PROTECTION_FAULT" },
984 	{}
985 };
986 
987 struct nouveau_bitfield nv04_graph_nsource[] = {
988 	{ NV03_PGRAPH_NSOURCE_NOTIFICATION,       "NOTIFICATION" },
989 	{ NV03_PGRAPH_NSOURCE_DATA_ERROR,         "DATA_ERROR" },
990 	{ NV03_PGRAPH_NSOURCE_PROTECTION_ERROR,   "PROTECTION_ERROR" },
991 	{ NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION,    "RANGE_EXCEPTION" },
992 	{ NV03_PGRAPH_NSOURCE_LIMIT_COLOR,        "LIMIT_COLOR" },
993 	{ NV03_PGRAPH_NSOURCE_LIMIT_ZETA,         "LIMIT_ZETA" },
994 	{ NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD,       "ILLEGAL_MTHD" },
995 	{ NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION,   "DMA_R_PROTECTION" },
996 	{ NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION,   "DMA_W_PROTECTION" },
997 	{ NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION,   "FORMAT_EXCEPTION" },
998 	{ NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION,    "PATCH_EXCEPTION" },
999 	{ NV03_PGRAPH_NSOURCE_STATE_INVALID,      "STATE_INVALID" },
1000 	{ NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY,      "DOUBLE_NOTIFY" },
1001 	{ NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE,      "NOTIFY_IN_USE" },
1002 	{ NV03_PGRAPH_NSOURCE_METHOD_CNT,         "METHOD_CNT" },
1003 	{ NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION,   "BFR_NOTIFICATION" },
1004 	{ NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1005 	{ NV03_PGRAPH_NSOURCE_DMA_WIDTH_A,        "DMA_WIDTH_A" },
1006 	{ NV03_PGRAPH_NSOURCE_DMA_WIDTH_B,        "DMA_WIDTH_B" },
1007 	{}
1008 };
1009 
1010 static void
nv04_graph_context_switch(struct drm_device * dev)1011 nv04_graph_context_switch(struct drm_device *dev)
1012 {
1013 	struct drm_nouveau_private *dev_priv = dev->dev_private;
1014 	struct nouveau_channel *chan = NULL;
1015 	int chid;
1016 
1017 	nouveau_wait_for_idle(dev);
1018 
1019 	/* If previous context is valid, we need to save it */
1020 	nv04_graph_unload_context(dev);
1021 
1022 	/* Load context for next channel */
1023 	chid = dev_priv->engine.fifo.channel_id(dev);
1024 	chan = dev_priv->channels.ptr[chid];
1025 	if (chan)
1026 		nv04_graph_load_context(chan);
1027 }
1028 
1029 static void
nv04_graph_isr(struct drm_device * dev)1030 nv04_graph_isr(struct drm_device *dev)
1031 {
1032 	u32 stat;
1033 
1034 	while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1035 		u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1036 		u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1037 		u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1038 		u32 chid = (addr & 0x0f000000) >> 24;
1039 		u32 subc = (addr & 0x0000e000) >> 13;
1040 		u32 mthd = (addr & 0x00001ffc);
1041 		u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1042 		u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
1043 		u32 show = stat;
1044 
1045 		if (stat & NV_PGRAPH_INTR_NOTIFY) {
1046 			if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1047 				if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1048 					show &= ~NV_PGRAPH_INTR_NOTIFY;
1049 			}
1050 		}
1051 
1052 		if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1053 			nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1054 			stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1055 			show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1056 			nv04_graph_context_switch(dev);
1057 		}
1058 
1059 		nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1060 		nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1061 
1062 		if (show && nouveau_ratelimit()) {
1063 			NV_INFO(dev, "PGRAPH -");
1064 			nouveau_bitfield_print(nv04_graph_intr, show);
1065 			printk(" nsource:");
1066 			nouveau_bitfield_print(nv04_graph_nsource, nsource);
1067 			printk(" nstatus:");
1068 			nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1069 			printk("\n");
1070 			NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1071 				     "mthd 0x%04x data 0x%08x\n",
1072 				chid, subc, class, mthd, data);
1073 		}
1074 	}
1075 }
1076 
1077 static void
nv04_graph_destroy(struct drm_device * dev,int engine)1078 nv04_graph_destroy(struct drm_device *dev, int engine)
1079 {
1080 	struct nv04_graph_engine *pgraph = nv_engine(dev, engine);
1081 
1082 	nouveau_irq_unregister(dev, 12);
1083 
1084 	NVOBJ_ENGINE_DEL(dev, GR);
1085 	kfree(pgraph);
1086 }
1087 
1088 int
nv04_graph_create(struct drm_device * dev)1089 nv04_graph_create(struct drm_device *dev)
1090 {
1091 	struct nv04_graph_engine *pgraph;
1092 
1093 	pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
1094 	if (!pgraph)
1095 		return -ENOMEM;
1096 
1097 	pgraph->base.destroy = nv04_graph_destroy;
1098 	pgraph->base.init = nv04_graph_init;
1099 	pgraph->base.fini = nv04_graph_fini;
1100 	pgraph->base.context_new = nv04_graph_context_new;
1101 	pgraph->base.context_del = nv04_graph_context_del;
1102 	pgraph->base.object_new = nv04_graph_object_new;
1103 
1104 	NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
1105 	nouveau_irq_register(dev, 12, nv04_graph_isr);
1106 
1107 	/* dvd subpicture */
1108 	NVOBJ_CLASS(dev, 0x0038, GR);
1109 
1110 	/* m2mf */
1111 	NVOBJ_CLASS(dev, 0x0039, GR);
1112 
1113 	/* nv03 gdirect */
1114 	NVOBJ_CLASS(dev, 0x004b, GR);
1115 	NVOBJ_MTHD (dev, 0x004b, 0x0184, nv04_graph_mthd_bind_nv01_patt);
1116 	NVOBJ_MTHD (dev, 0x004b, 0x0188, nv04_graph_mthd_bind_rop);
1117 	NVOBJ_MTHD (dev, 0x004b, 0x018c, nv04_graph_mthd_bind_beta1);
1118 	NVOBJ_MTHD (dev, 0x004b, 0x0190, nv04_graph_mthd_bind_surf_dst);
1119 	NVOBJ_MTHD (dev, 0x004b, 0x02fc, nv04_graph_mthd_set_operation);
1120 
1121 	/* nv04 gdirect */
1122 	NVOBJ_CLASS(dev, 0x004a, GR);
1123 	NVOBJ_MTHD (dev, 0x004a, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1124 	NVOBJ_MTHD (dev, 0x004a, 0x018c, nv04_graph_mthd_bind_rop);
1125 	NVOBJ_MTHD (dev, 0x004a, 0x0190, nv04_graph_mthd_bind_beta1);
1126 	NVOBJ_MTHD (dev, 0x004a, 0x0194, nv04_graph_mthd_bind_beta4);
1127 	NVOBJ_MTHD (dev, 0x004a, 0x0198, nv04_graph_mthd_bind_surf2d);
1128 	NVOBJ_MTHD (dev, 0x004a, 0x02fc, nv04_graph_mthd_set_operation);
1129 
1130 	/* nv01 imageblit */
1131 	NVOBJ_CLASS(dev, 0x001f, GR);
1132 	NVOBJ_MTHD (dev, 0x001f, 0x0184, nv04_graph_mthd_bind_chroma);
1133 	NVOBJ_MTHD (dev, 0x001f, 0x0188, nv04_graph_mthd_bind_clip);
1134 	NVOBJ_MTHD (dev, 0x001f, 0x018c, nv04_graph_mthd_bind_nv01_patt);
1135 	NVOBJ_MTHD (dev, 0x001f, 0x0190, nv04_graph_mthd_bind_rop);
1136 	NVOBJ_MTHD (dev, 0x001f, 0x0194, nv04_graph_mthd_bind_beta1);
1137 	NVOBJ_MTHD (dev, 0x001f, 0x0198, nv04_graph_mthd_bind_surf_dst);
1138 	NVOBJ_MTHD (dev, 0x001f, 0x019c, nv04_graph_mthd_bind_surf_src);
1139 	NVOBJ_MTHD (dev, 0x001f, 0x02fc, nv04_graph_mthd_set_operation);
1140 
1141 	/* nv04 imageblit */
1142 	NVOBJ_CLASS(dev, 0x005f, GR);
1143 	NVOBJ_MTHD (dev, 0x005f, 0x0184, nv04_graph_mthd_bind_chroma);
1144 	NVOBJ_MTHD (dev, 0x005f, 0x0188, nv04_graph_mthd_bind_clip);
1145 	NVOBJ_MTHD (dev, 0x005f, 0x018c, nv04_graph_mthd_bind_nv04_patt);
1146 	NVOBJ_MTHD (dev, 0x005f, 0x0190, nv04_graph_mthd_bind_rop);
1147 	NVOBJ_MTHD (dev, 0x005f, 0x0194, nv04_graph_mthd_bind_beta1);
1148 	NVOBJ_MTHD (dev, 0x005f, 0x0198, nv04_graph_mthd_bind_beta4);
1149 	NVOBJ_MTHD (dev, 0x005f, 0x019c, nv04_graph_mthd_bind_surf2d);
1150 	NVOBJ_MTHD (dev, 0x005f, 0x02fc, nv04_graph_mthd_set_operation);
1151 
1152 	/* nv04 iifc */
1153 	NVOBJ_CLASS(dev, 0x0060, GR);
1154 	NVOBJ_MTHD (dev, 0x0060, 0x0188, nv04_graph_mthd_bind_chroma);
1155 	NVOBJ_MTHD (dev, 0x0060, 0x018c, nv04_graph_mthd_bind_clip);
1156 	NVOBJ_MTHD (dev, 0x0060, 0x0190, nv04_graph_mthd_bind_nv04_patt);
1157 	NVOBJ_MTHD (dev, 0x0060, 0x0194, nv04_graph_mthd_bind_rop);
1158 	NVOBJ_MTHD (dev, 0x0060, 0x0198, nv04_graph_mthd_bind_beta1);
1159 	NVOBJ_MTHD (dev, 0x0060, 0x019c, nv04_graph_mthd_bind_beta4);
1160 	NVOBJ_MTHD (dev, 0x0060, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf);
1161 	NVOBJ_MTHD (dev, 0x0060, 0x03e4, nv04_graph_mthd_set_operation);
1162 
1163 	/* nv05 iifc */
1164 	NVOBJ_CLASS(dev, 0x0064, GR);
1165 
1166 	/* nv01 ifc */
1167 	NVOBJ_CLASS(dev, 0x0021, GR);
1168 	NVOBJ_MTHD (dev, 0x0021, 0x0184, nv04_graph_mthd_bind_chroma);
1169 	NVOBJ_MTHD (dev, 0x0021, 0x0188, nv04_graph_mthd_bind_clip);
1170 	NVOBJ_MTHD (dev, 0x0021, 0x018c, nv04_graph_mthd_bind_nv01_patt);
1171 	NVOBJ_MTHD (dev, 0x0021, 0x0190, nv04_graph_mthd_bind_rop);
1172 	NVOBJ_MTHD (dev, 0x0021, 0x0194, nv04_graph_mthd_bind_beta1);
1173 	NVOBJ_MTHD (dev, 0x0021, 0x0198, nv04_graph_mthd_bind_surf_dst);
1174 	NVOBJ_MTHD (dev, 0x0021, 0x02fc, nv04_graph_mthd_set_operation);
1175 
1176 	/* nv04 ifc */
1177 	NVOBJ_CLASS(dev, 0x0061, GR);
1178 	NVOBJ_MTHD (dev, 0x0061, 0x0184, nv04_graph_mthd_bind_chroma);
1179 	NVOBJ_MTHD (dev, 0x0061, 0x0188, nv04_graph_mthd_bind_clip);
1180 	NVOBJ_MTHD (dev, 0x0061, 0x018c, nv04_graph_mthd_bind_nv04_patt);
1181 	NVOBJ_MTHD (dev, 0x0061, 0x0190, nv04_graph_mthd_bind_rop);
1182 	NVOBJ_MTHD (dev, 0x0061, 0x0194, nv04_graph_mthd_bind_beta1);
1183 	NVOBJ_MTHD (dev, 0x0061, 0x0198, nv04_graph_mthd_bind_beta4);
1184 	NVOBJ_MTHD (dev, 0x0061, 0x019c, nv04_graph_mthd_bind_surf2d);
1185 	NVOBJ_MTHD (dev, 0x0061, 0x02fc, nv04_graph_mthd_set_operation);
1186 
1187 	/* nv05 ifc */
1188 	NVOBJ_CLASS(dev, 0x0065, GR);
1189 
1190 	/* nv03 sifc */
1191 	NVOBJ_CLASS(dev, 0x0036, GR);
1192 	NVOBJ_MTHD (dev, 0x0036, 0x0184, nv04_graph_mthd_bind_chroma);
1193 	NVOBJ_MTHD (dev, 0x0036, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1194 	NVOBJ_MTHD (dev, 0x0036, 0x018c, nv04_graph_mthd_bind_rop);
1195 	NVOBJ_MTHD (dev, 0x0036, 0x0190, nv04_graph_mthd_bind_beta1);
1196 	NVOBJ_MTHD (dev, 0x0036, 0x0194, nv04_graph_mthd_bind_surf_dst);
1197 	NVOBJ_MTHD (dev, 0x0036, 0x02fc, nv04_graph_mthd_set_operation);
1198 
1199 	/* nv04 sifc */
1200 	NVOBJ_CLASS(dev, 0x0076, GR);
1201 	NVOBJ_MTHD (dev, 0x0076, 0x0184, nv04_graph_mthd_bind_chroma);
1202 	NVOBJ_MTHD (dev, 0x0076, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1203 	NVOBJ_MTHD (dev, 0x0076, 0x018c, nv04_graph_mthd_bind_rop);
1204 	NVOBJ_MTHD (dev, 0x0076, 0x0190, nv04_graph_mthd_bind_beta1);
1205 	NVOBJ_MTHD (dev, 0x0076, 0x0194, nv04_graph_mthd_bind_beta4);
1206 	NVOBJ_MTHD (dev, 0x0076, 0x0198, nv04_graph_mthd_bind_surf2d);
1207 	NVOBJ_MTHD (dev, 0x0076, 0x02fc, nv04_graph_mthd_set_operation);
1208 
1209 	/* nv05 sifc */
1210 	NVOBJ_CLASS(dev, 0x0066, GR);
1211 
1212 	/* nv03 sifm */
1213 	NVOBJ_CLASS(dev, 0x0037, GR);
1214 	NVOBJ_MTHD (dev, 0x0037, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1215 	NVOBJ_MTHD (dev, 0x0037, 0x018c, nv04_graph_mthd_bind_rop);
1216 	NVOBJ_MTHD (dev, 0x0037, 0x0190, nv04_graph_mthd_bind_beta1);
1217 	NVOBJ_MTHD (dev, 0x0037, 0x0194, nv04_graph_mthd_bind_surf_dst);
1218 	NVOBJ_MTHD (dev, 0x0037, 0x0304, nv04_graph_mthd_set_operation);
1219 
1220 	/* nv04 sifm */
1221 	NVOBJ_CLASS(dev, 0x0077, GR);
1222 	NVOBJ_MTHD (dev, 0x0077, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1223 	NVOBJ_MTHD (dev, 0x0077, 0x018c, nv04_graph_mthd_bind_rop);
1224 	NVOBJ_MTHD (dev, 0x0077, 0x0190, nv04_graph_mthd_bind_beta1);
1225 	NVOBJ_MTHD (dev, 0x0077, 0x0194, nv04_graph_mthd_bind_beta4);
1226 	NVOBJ_MTHD (dev, 0x0077, 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf);
1227 	NVOBJ_MTHD (dev, 0x0077, 0x0304, nv04_graph_mthd_set_operation);
1228 
1229 	/* null */
1230 	NVOBJ_CLASS(dev, 0x0030, GR);
1231 
1232 	/* surf2d */
1233 	NVOBJ_CLASS(dev, 0x0042, GR);
1234 
1235 	/* rop */
1236 	NVOBJ_CLASS(dev, 0x0043, GR);
1237 
1238 	/* beta1 */
1239 	NVOBJ_CLASS(dev, 0x0012, GR);
1240 
1241 	/* beta4 */
1242 	NVOBJ_CLASS(dev, 0x0072, GR);
1243 
1244 	/* cliprect */
1245 	NVOBJ_CLASS(dev, 0x0019, GR);
1246 
1247 	/* nv01 pattern */
1248 	NVOBJ_CLASS(dev, 0x0018, GR);
1249 
1250 	/* nv04 pattern */
1251 	NVOBJ_CLASS(dev, 0x0044, GR);
1252 
1253 	/* swzsurf */
1254 	NVOBJ_CLASS(dev, 0x0052, GR);
1255 
1256 	/* surf3d */
1257 	NVOBJ_CLASS(dev, 0x0053, GR);
1258 	NVOBJ_MTHD (dev, 0x0053, 0x02f8, nv04_graph_mthd_surf3d_clip_h);
1259 	NVOBJ_MTHD (dev, 0x0053, 0x02fc, nv04_graph_mthd_surf3d_clip_v);
1260 
1261 	/* nv03 tex_tri */
1262 	NVOBJ_CLASS(dev, 0x0048, GR);
1263 	NVOBJ_MTHD (dev, 0x0048, 0x0188, nv04_graph_mthd_bind_clip);
1264 	NVOBJ_MTHD (dev, 0x0048, 0x018c, nv04_graph_mthd_bind_surf_color);
1265 	NVOBJ_MTHD (dev, 0x0048, 0x0190, nv04_graph_mthd_bind_surf_zeta);
1266 
1267 	/* tex_tri */
1268 	NVOBJ_CLASS(dev, 0x0054, GR);
1269 
1270 	/* multitex_tri */
1271 	NVOBJ_CLASS(dev, 0x0055, GR);
1272 
1273 	/* nv01 chroma */
1274 	NVOBJ_CLASS(dev, 0x0017, GR);
1275 
1276 	/* nv04 chroma */
1277 	NVOBJ_CLASS(dev, 0x0057, GR);
1278 
1279 	/* surf_dst */
1280 	NVOBJ_CLASS(dev, 0x0058, GR);
1281 
1282 	/* surf_src */
1283 	NVOBJ_CLASS(dev, 0x0059, GR);
1284 
1285 	/* surf_color */
1286 	NVOBJ_CLASS(dev, 0x005a, GR);
1287 
1288 	/* surf_zeta */
1289 	NVOBJ_CLASS(dev, 0x005b, GR);
1290 
1291 	/* nv01 line */
1292 	NVOBJ_CLASS(dev, 0x001c, GR);
1293 	NVOBJ_MTHD (dev, 0x001c, 0x0184, nv04_graph_mthd_bind_clip);
1294 	NVOBJ_MTHD (dev, 0x001c, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1295 	NVOBJ_MTHD (dev, 0x001c, 0x018c, nv04_graph_mthd_bind_rop);
1296 	NVOBJ_MTHD (dev, 0x001c, 0x0190, nv04_graph_mthd_bind_beta1);
1297 	NVOBJ_MTHD (dev, 0x001c, 0x0194, nv04_graph_mthd_bind_surf_dst);
1298 	NVOBJ_MTHD (dev, 0x001c, 0x02fc, nv04_graph_mthd_set_operation);
1299 
1300 	/* nv04 line */
1301 	NVOBJ_CLASS(dev, 0x005c, GR);
1302 	NVOBJ_MTHD (dev, 0x005c, 0x0184, nv04_graph_mthd_bind_clip);
1303 	NVOBJ_MTHD (dev, 0x005c, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1304 	NVOBJ_MTHD (dev, 0x005c, 0x018c, nv04_graph_mthd_bind_rop);
1305 	NVOBJ_MTHD (dev, 0x005c, 0x0190, nv04_graph_mthd_bind_beta1);
1306 	NVOBJ_MTHD (dev, 0x005c, 0x0194, nv04_graph_mthd_bind_beta4);
1307 	NVOBJ_MTHD (dev, 0x005c, 0x0198, nv04_graph_mthd_bind_surf2d);
1308 	NVOBJ_MTHD (dev, 0x005c, 0x02fc, nv04_graph_mthd_set_operation);
1309 
1310 	/* nv01 tri */
1311 	NVOBJ_CLASS(dev, 0x001d, GR);
1312 	NVOBJ_MTHD (dev, 0x001d, 0x0184, nv04_graph_mthd_bind_clip);
1313 	NVOBJ_MTHD (dev, 0x001d, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1314 	NVOBJ_MTHD (dev, 0x001d, 0x018c, nv04_graph_mthd_bind_rop);
1315 	NVOBJ_MTHD (dev, 0x001d, 0x0190, nv04_graph_mthd_bind_beta1);
1316 	NVOBJ_MTHD (dev, 0x001d, 0x0194, nv04_graph_mthd_bind_surf_dst);
1317 	NVOBJ_MTHD (dev, 0x001d, 0x02fc, nv04_graph_mthd_set_operation);
1318 
1319 	/* nv04 tri */
1320 	NVOBJ_CLASS(dev, 0x005d, GR);
1321 	NVOBJ_MTHD (dev, 0x005d, 0x0184, nv04_graph_mthd_bind_clip);
1322 	NVOBJ_MTHD (dev, 0x005d, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1323 	NVOBJ_MTHD (dev, 0x005d, 0x018c, nv04_graph_mthd_bind_rop);
1324 	NVOBJ_MTHD (dev, 0x005d, 0x0190, nv04_graph_mthd_bind_beta1);
1325 	NVOBJ_MTHD (dev, 0x005d, 0x0194, nv04_graph_mthd_bind_beta4);
1326 	NVOBJ_MTHD (dev, 0x005d, 0x0198, nv04_graph_mthd_bind_surf2d);
1327 	NVOBJ_MTHD (dev, 0x005d, 0x02fc, nv04_graph_mthd_set_operation);
1328 
1329 	/* nv01 rect */
1330 	NVOBJ_CLASS(dev, 0x001e, GR);
1331 	NVOBJ_MTHD (dev, 0x001e, 0x0184, nv04_graph_mthd_bind_clip);
1332 	NVOBJ_MTHD (dev, 0x001e, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1333 	NVOBJ_MTHD (dev, 0x001e, 0x018c, nv04_graph_mthd_bind_rop);
1334 	NVOBJ_MTHD (dev, 0x001e, 0x0190, nv04_graph_mthd_bind_beta1);
1335 	NVOBJ_MTHD (dev, 0x001e, 0x0194, nv04_graph_mthd_bind_surf_dst);
1336 	NVOBJ_MTHD (dev, 0x001e, 0x02fc, nv04_graph_mthd_set_operation);
1337 
1338 	/* nv04 rect */
1339 	NVOBJ_CLASS(dev, 0x005e, GR);
1340 	NVOBJ_MTHD (dev, 0x005e, 0x0184, nv04_graph_mthd_bind_clip);
1341 	NVOBJ_MTHD (dev, 0x005e, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1342 	NVOBJ_MTHD (dev, 0x005e, 0x018c, nv04_graph_mthd_bind_rop);
1343 	NVOBJ_MTHD (dev, 0x005e, 0x0190, nv04_graph_mthd_bind_beta1);
1344 	NVOBJ_MTHD (dev, 0x005e, 0x0194, nv04_graph_mthd_bind_beta4);
1345 	NVOBJ_MTHD (dev, 0x005e, 0x0198, nv04_graph_mthd_bind_surf2d);
1346 	NVOBJ_MTHD (dev, 0x005e, 0x02fc, nv04_graph_mthd_set_operation);
1347 
1348 	/* nvsw */
1349 	NVOBJ_CLASS(dev, 0x506e, SW);
1350 	NVOBJ_MTHD (dev, 0x506e, 0x0150, nv04_graph_mthd_set_ref);
1351 	NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
1352 	return 0;
1353 }
1354