1 /*
2 * Copyright 2007 Matthieu CASTET <castet.matthieu@free.fr>
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24
25 #include "drmP.h"
26 #include "drm.h"
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_util.h"
30
31 static int nv10_graph_register(struct drm_device *);
32 static void nv10_graph_isr(struct drm_device *);
33
34 #define NV10_FIFO_NUMBER 32
35
36 struct pipe_state {
37 uint32_t pipe_0x0000[0x040/4];
38 uint32_t pipe_0x0040[0x010/4];
39 uint32_t pipe_0x0200[0x0c0/4];
40 uint32_t pipe_0x4400[0x080/4];
41 uint32_t pipe_0x6400[0x3b0/4];
42 uint32_t pipe_0x6800[0x2f0/4];
43 uint32_t pipe_0x6c00[0x030/4];
44 uint32_t pipe_0x7000[0x130/4];
45 uint32_t pipe_0x7400[0x0c0/4];
46 uint32_t pipe_0x7800[0x0c0/4];
47 };
48
49 static int nv10_graph_ctx_regs[] = {
50 NV10_PGRAPH_CTX_SWITCH(0),
51 NV10_PGRAPH_CTX_SWITCH(1),
52 NV10_PGRAPH_CTX_SWITCH(2),
53 NV10_PGRAPH_CTX_SWITCH(3),
54 NV10_PGRAPH_CTX_SWITCH(4),
55 NV10_PGRAPH_CTX_CACHE(0, 0),
56 NV10_PGRAPH_CTX_CACHE(0, 1),
57 NV10_PGRAPH_CTX_CACHE(0, 2),
58 NV10_PGRAPH_CTX_CACHE(0, 3),
59 NV10_PGRAPH_CTX_CACHE(0, 4),
60 NV10_PGRAPH_CTX_CACHE(1, 0),
61 NV10_PGRAPH_CTX_CACHE(1, 1),
62 NV10_PGRAPH_CTX_CACHE(1, 2),
63 NV10_PGRAPH_CTX_CACHE(1, 3),
64 NV10_PGRAPH_CTX_CACHE(1, 4),
65 NV10_PGRAPH_CTX_CACHE(2, 0),
66 NV10_PGRAPH_CTX_CACHE(2, 1),
67 NV10_PGRAPH_CTX_CACHE(2, 2),
68 NV10_PGRAPH_CTX_CACHE(2, 3),
69 NV10_PGRAPH_CTX_CACHE(2, 4),
70 NV10_PGRAPH_CTX_CACHE(3, 0),
71 NV10_PGRAPH_CTX_CACHE(3, 1),
72 NV10_PGRAPH_CTX_CACHE(3, 2),
73 NV10_PGRAPH_CTX_CACHE(3, 3),
74 NV10_PGRAPH_CTX_CACHE(3, 4),
75 NV10_PGRAPH_CTX_CACHE(4, 0),
76 NV10_PGRAPH_CTX_CACHE(4, 1),
77 NV10_PGRAPH_CTX_CACHE(4, 2),
78 NV10_PGRAPH_CTX_CACHE(4, 3),
79 NV10_PGRAPH_CTX_CACHE(4, 4),
80 NV10_PGRAPH_CTX_CACHE(5, 0),
81 NV10_PGRAPH_CTX_CACHE(5, 1),
82 NV10_PGRAPH_CTX_CACHE(5, 2),
83 NV10_PGRAPH_CTX_CACHE(5, 3),
84 NV10_PGRAPH_CTX_CACHE(5, 4),
85 NV10_PGRAPH_CTX_CACHE(6, 0),
86 NV10_PGRAPH_CTX_CACHE(6, 1),
87 NV10_PGRAPH_CTX_CACHE(6, 2),
88 NV10_PGRAPH_CTX_CACHE(6, 3),
89 NV10_PGRAPH_CTX_CACHE(6, 4),
90 NV10_PGRAPH_CTX_CACHE(7, 0),
91 NV10_PGRAPH_CTX_CACHE(7, 1),
92 NV10_PGRAPH_CTX_CACHE(7, 2),
93 NV10_PGRAPH_CTX_CACHE(7, 3),
94 NV10_PGRAPH_CTX_CACHE(7, 4),
95 NV10_PGRAPH_CTX_USER,
96 NV04_PGRAPH_DMA_START_0,
97 NV04_PGRAPH_DMA_START_1,
98 NV04_PGRAPH_DMA_LENGTH,
99 NV04_PGRAPH_DMA_MISC,
100 NV10_PGRAPH_DMA_PITCH,
101 NV04_PGRAPH_BOFFSET0,
102 NV04_PGRAPH_BBASE0,
103 NV04_PGRAPH_BLIMIT0,
104 NV04_PGRAPH_BOFFSET1,
105 NV04_PGRAPH_BBASE1,
106 NV04_PGRAPH_BLIMIT1,
107 NV04_PGRAPH_BOFFSET2,
108 NV04_PGRAPH_BBASE2,
109 NV04_PGRAPH_BLIMIT2,
110 NV04_PGRAPH_BOFFSET3,
111 NV04_PGRAPH_BBASE3,
112 NV04_PGRAPH_BLIMIT3,
113 NV04_PGRAPH_BOFFSET4,
114 NV04_PGRAPH_BBASE4,
115 NV04_PGRAPH_BLIMIT4,
116 NV04_PGRAPH_BOFFSET5,
117 NV04_PGRAPH_BBASE5,
118 NV04_PGRAPH_BLIMIT5,
119 NV04_PGRAPH_BPITCH0,
120 NV04_PGRAPH_BPITCH1,
121 NV04_PGRAPH_BPITCH2,
122 NV04_PGRAPH_BPITCH3,
123 NV04_PGRAPH_BPITCH4,
124 NV10_PGRAPH_SURFACE,
125 NV10_PGRAPH_STATE,
126 NV04_PGRAPH_BSWIZZLE2,
127 NV04_PGRAPH_BSWIZZLE5,
128 NV04_PGRAPH_BPIXEL,
129 NV10_PGRAPH_NOTIFY,
130 NV04_PGRAPH_PATT_COLOR0,
131 NV04_PGRAPH_PATT_COLOR1,
132 NV04_PGRAPH_PATT_COLORRAM, /* 64 values from 0x400900 to 0x4009fc */
133 0x00400904,
134 0x00400908,
135 0x0040090c,
136 0x00400910,
137 0x00400914,
138 0x00400918,
139 0x0040091c,
140 0x00400920,
141 0x00400924,
142 0x00400928,
143 0x0040092c,
144 0x00400930,
145 0x00400934,
146 0x00400938,
147 0x0040093c,
148 0x00400940,
149 0x00400944,
150 0x00400948,
151 0x0040094c,
152 0x00400950,
153 0x00400954,
154 0x00400958,
155 0x0040095c,
156 0x00400960,
157 0x00400964,
158 0x00400968,
159 0x0040096c,
160 0x00400970,
161 0x00400974,
162 0x00400978,
163 0x0040097c,
164 0x00400980,
165 0x00400984,
166 0x00400988,
167 0x0040098c,
168 0x00400990,
169 0x00400994,
170 0x00400998,
171 0x0040099c,
172 0x004009a0,
173 0x004009a4,
174 0x004009a8,
175 0x004009ac,
176 0x004009b0,
177 0x004009b4,
178 0x004009b8,
179 0x004009bc,
180 0x004009c0,
181 0x004009c4,
182 0x004009c8,
183 0x004009cc,
184 0x004009d0,
185 0x004009d4,
186 0x004009d8,
187 0x004009dc,
188 0x004009e0,
189 0x004009e4,
190 0x004009e8,
191 0x004009ec,
192 0x004009f0,
193 0x004009f4,
194 0x004009f8,
195 0x004009fc,
196 NV04_PGRAPH_PATTERN, /* 2 values from 0x400808 to 0x40080c */
197 0x0040080c,
198 NV04_PGRAPH_PATTERN_SHAPE,
199 NV03_PGRAPH_MONO_COLOR0,
200 NV04_PGRAPH_ROP3,
201 NV04_PGRAPH_CHROMA,
202 NV04_PGRAPH_BETA_AND,
203 NV04_PGRAPH_BETA_PREMULT,
204 0x00400e70,
205 0x00400e74,
206 0x00400e78,
207 0x00400e7c,
208 0x00400e80,
209 0x00400e84,
210 0x00400e88,
211 0x00400e8c,
212 0x00400ea0,
213 0x00400ea4,
214 0x00400ea8,
215 0x00400e90,
216 0x00400e94,
217 0x00400e98,
218 0x00400e9c,
219 NV10_PGRAPH_WINDOWCLIP_HORIZONTAL, /* 8 values from 0x400f00-0x400f1c */
220 NV10_PGRAPH_WINDOWCLIP_VERTICAL, /* 8 values from 0x400f20-0x400f3c */
221 0x00400f04,
222 0x00400f24,
223 0x00400f08,
224 0x00400f28,
225 0x00400f0c,
226 0x00400f2c,
227 0x00400f10,
228 0x00400f30,
229 0x00400f14,
230 0x00400f34,
231 0x00400f18,
232 0x00400f38,
233 0x00400f1c,
234 0x00400f3c,
235 NV10_PGRAPH_XFMODE0,
236 NV10_PGRAPH_XFMODE1,
237 NV10_PGRAPH_GLOBALSTATE0,
238 NV10_PGRAPH_GLOBALSTATE1,
239 NV04_PGRAPH_STORED_FMT,
240 NV04_PGRAPH_SOURCE_COLOR,
241 NV03_PGRAPH_ABS_X_RAM, /* 32 values from 0x400400 to 0x40047c */
242 NV03_PGRAPH_ABS_Y_RAM, /* 32 values from 0x400480 to 0x4004fc */
243 0x00400404,
244 0x00400484,
245 0x00400408,
246 0x00400488,
247 0x0040040c,
248 0x0040048c,
249 0x00400410,
250 0x00400490,
251 0x00400414,
252 0x00400494,
253 0x00400418,
254 0x00400498,
255 0x0040041c,
256 0x0040049c,
257 0x00400420,
258 0x004004a0,
259 0x00400424,
260 0x004004a4,
261 0x00400428,
262 0x004004a8,
263 0x0040042c,
264 0x004004ac,
265 0x00400430,
266 0x004004b0,
267 0x00400434,
268 0x004004b4,
269 0x00400438,
270 0x004004b8,
271 0x0040043c,
272 0x004004bc,
273 0x00400440,
274 0x004004c0,
275 0x00400444,
276 0x004004c4,
277 0x00400448,
278 0x004004c8,
279 0x0040044c,
280 0x004004cc,
281 0x00400450,
282 0x004004d0,
283 0x00400454,
284 0x004004d4,
285 0x00400458,
286 0x004004d8,
287 0x0040045c,
288 0x004004dc,
289 0x00400460,
290 0x004004e0,
291 0x00400464,
292 0x004004e4,
293 0x00400468,
294 0x004004e8,
295 0x0040046c,
296 0x004004ec,
297 0x00400470,
298 0x004004f0,
299 0x00400474,
300 0x004004f4,
301 0x00400478,
302 0x004004f8,
303 0x0040047c,
304 0x004004fc,
305 NV03_PGRAPH_ABS_UCLIP_XMIN,
306 NV03_PGRAPH_ABS_UCLIP_XMAX,
307 NV03_PGRAPH_ABS_UCLIP_YMIN,
308 NV03_PGRAPH_ABS_UCLIP_YMAX,
309 0x00400550,
310 0x00400558,
311 0x00400554,
312 0x0040055c,
313 NV03_PGRAPH_ABS_UCLIPA_XMIN,
314 NV03_PGRAPH_ABS_UCLIPA_XMAX,
315 NV03_PGRAPH_ABS_UCLIPA_YMIN,
316 NV03_PGRAPH_ABS_UCLIPA_YMAX,
317 NV03_PGRAPH_ABS_ICLIP_XMAX,
318 NV03_PGRAPH_ABS_ICLIP_YMAX,
319 NV03_PGRAPH_XY_LOGIC_MISC0,
320 NV03_PGRAPH_XY_LOGIC_MISC1,
321 NV03_PGRAPH_XY_LOGIC_MISC2,
322 NV03_PGRAPH_XY_LOGIC_MISC3,
323 NV03_PGRAPH_CLIPX_0,
324 NV03_PGRAPH_CLIPX_1,
325 NV03_PGRAPH_CLIPY_0,
326 NV03_PGRAPH_CLIPY_1,
327 NV10_PGRAPH_COMBINER0_IN_ALPHA,
328 NV10_PGRAPH_COMBINER1_IN_ALPHA,
329 NV10_PGRAPH_COMBINER0_IN_RGB,
330 NV10_PGRAPH_COMBINER1_IN_RGB,
331 NV10_PGRAPH_COMBINER_COLOR0,
332 NV10_PGRAPH_COMBINER_COLOR1,
333 NV10_PGRAPH_COMBINER0_OUT_ALPHA,
334 NV10_PGRAPH_COMBINER1_OUT_ALPHA,
335 NV10_PGRAPH_COMBINER0_OUT_RGB,
336 NV10_PGRAPH_COMBINER1_OUT_RGB,
337 NV10_PGRAPH_COMBINER_FINAL0,
338 NV10_PGRAPH_COMBINER_FINAL1,
339 0x00400e00,
340 0x00400e04,
341 0x00400e08,
342 0x00400e0c,
343 0x00400e10,
344 0x00400e14,
345 0x00400e18,
346 0x00400e1c,
347 0x00400e20,
348 0x00400e24,
349 0x00400e28,
350 0x00400e2c,
351 0x00400e30,
352 0x00400e34,
353 0x00400e38,
354 0x00400e3c,
355 NV04_PGRAPH_PASSTHRU_0,
356 NV04_PGRAPH_PASSTHRU_1,
357 NV04_PGRAPH_PASSTHRU_2,
358 NV10_PGRAPH_DIMX_TEXTURE,
359 NV10_PGRAPH_WDIMX_TEXTURE,
360 NV10_PGRAPH_DVD_COLORFMT,
361 NV10_PGRAPH_SCALED_FORMAT,
362 NV04_PGRAPH_MISC24_0,
363 NV04_PGRAPH_MISC24_1,
364 NV04_PGRAPH_MISC24_2,
365 NV03_PGRAPH_X_MISC,
366 NV03_PGRAPH_Y_MISC,
367 NV04_PGRAPH_VALID1,
368 NV04_PGRAPH_VALID2,
369 };
370
371 static int nv17_graph_ctx_regs[] = {
372 NV10_PGRAPH_DEBUG_4,
373 0x004006b0,
374 0x00400eac,
375 0x00400eb0,
376 0x00400eb4,
377 0x00400eb8,
378 0x00400ebc,
379 0x00400ec0,
380 0x00400ec4,
381 0x00400ec8,
382 0x00400ecc,
383 0x00400ed0,
384 0x00400ed4,
385 0x00400ed8,
386 0x00400edc,
387 0x00400ee0,
388 0x00400a00,
389 0x00400a04,
390 };
391
392 struct graph_state {
393 int nv10[ARRAY_SIZE(nv10_graph_ctx_regs)];
394 int nv17[ARRAY_SIZE(nv17_graph_ctx_regs)];
395 struct pipe_state pipe_state;
396 uint32_t lma_window[4];
397 };
398
399 #define PIPE_SAVE(dev, state, addr) \
400 do { \
401 int __i; \
402 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
403 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
404 state[__i] = nv_rd32(dev, NV10_PGRAPH_PIPE_DATA); \
405 } while (0)
406
407 #define PIPE_RESTORE(dev, state, addr) \
408 do { \
409 int __i; \
410 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
411 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
412 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, state[__i]); \
413 } while (0)
414
nv10_graph_save_pipe(struct nouveau_channel * chan)415 static void nv10_graph_save_pipe(struct nouveau_channel *chan)
416 {
417 struct drm_device *dev = chan->dev;
418 struct graph_state *pgraph_ctx = chan->pgraph_ctx;
419 struct pipe_state *pipe = &pgraph_ctx->pipe_state;
420
421 PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
422 PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
423 PIPE_SAVE(dev, pipe->pipe_0x6400, 0x6400);
424 PIPE_SAVE(dev, pipe->pipe_0x6800, 0x6800);
425 PIPE_SAVE(dev, pipe->pipe_0x6c00, 0x6c00);
426 PIPE_SAVE(dev, pipe->pipe_0x7000, 0x7000);
427 PIPE_SAVE(dev, pipe->pipe_0x7400, 0x7400);
428 PIPE_SAVE(dev, pipe->pipe_0x7800, 0x7800);
429 PIPE_SAVE(dev, pipe->pipe_0x0040, 0x0040);
430 PIPE_SAVE(dev, pipe->pipe_0x0000, 0x0000);
431 }
432
nv10_graph_load_pipe(struct nouveau_channel * chan)433 static void nv10_graph_load_pipe(struct nouveau_channel *chan)
434 {
435 struct drm_device *dev = chan->dev;
436 struct graph_state *pgraph_ctx = chan->pgraph_ctx;
437 struct pipe_state *pipe = &pgraph_ctx->pipe_state;
438 uint32_t xfmode0, xfmode1;
439 int i;
440
441 nouveau_wait_for_idle(dev);
442 /* XXX check haiku comments */
443 xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
444 xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
445 nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
446 nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
447 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
448 for (i = 0; i < 4; i++)
449 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
450 for (i = 0; i < 4; i++)
451 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
452
453 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
454 for (i = 0; i < 3; i++)
455 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
456
457 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
458 for (i = 0; i < 3; i++)
459 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
460
461 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
462 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
463
464
465 PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
466 nouveau_wait_for_idle(dev);
467
468 /* restore XFMODE */
469 nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
470 nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
471 PIPE_RESTORE(dev, pipe->pipe_0x6400, 0x6400);
472 PIPE_RESTORE(dev, pipe->pipe_0x6800, 0x6800);
473 PIPE_RESTORE(dev, pipe->pipe_0x6c00, 0x6c00);
474 PIPE_RESTORE(dev, pipe->pipe_0x7000, 0x7000);
475 PIPE_RESTORE(dev, pipe->pipe_0x7400, 0x7400);
476 PIPE_RESTORE(dev, pipe->pipe_0x7800, 0x7800);
477 PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
478 PIPE_RESTORE(dev, pipe->pipe_0x0000, 0x0000);
479 PIPE_RESTORE(dev, pipe->pipe_0x0040, 0x0040);
480 nouveau_wait_for_idle(dev);
481 }
482
nv10_graph_create_pipe(struct nouveau_channel * chan)483 static void nv10_graph_create_pipe(struct nouveau_channel *chan)
484 {
485 struct drm_device *dev = chan->dev;
486 struct graph_state *pgraph_ctx = chan->pgraph_ctx;
487 struct pipe_state *fifo_pipe_state = &pgraph_ctx->pipe_state;
488 uint32_t *fifo_pipe_state_addr;
489 int i;
490 #define PIPE_INIT(addr) \
491 do { \
492 fifo_pipe_state_addr = fifo_pipe_state->pipe_##addr; \
493 } while (0)
494 #define PIPE_INIT_END(addr) \
495 do { \
496 uint32_t *__end_addr = fifo_pipe_state->pipe_##addr + \
497 ARRAY_SIZE(fifo_pipe_state->pipe_##addr); \
498 if (fifo_pipe_state_addr != __end_addr) \
499 NV_ERROR(dev, "incomplete pipe init for 0x%x : %p/%p\n", \
500 addr, fifo_pipe_state_addr, __end_addr); \
501 } while (0)
502 #define NV_WRITE_PIPE_INIT(value) *(fifo_pipe_state_addr++) = value
503
504 PIPE_INIT(0x0200);
505 for (i = 0; i < 48; i++)
506 NV_WRITE_PIPE_INIT(0x00000000);
507 PIPE_INIT_END(0x0200);
508
509 PIPE_INIT(0x6400);
510 for (i = 0; i < 211; i++)
511 NV_WRITE_PIPE_INIT(0x00000000);
512 NV_WRITE_PIPE_INIT(0x3f800000);
513 NV_WRITE_PIPE_INIT(0x40000000);
514 NV_WRITE_PIPE_INIT(0x40000000);
515 NV_WRITE_PIPE_INIT(0x40000000);
516 NV_WRITE_PIPE_INIT(0x40000000);
517 NV_WRITE_PIPE_INIT(0x00000000);
518 NV_WRITE_PIPE_INIT(0x00000000);
519 NV_WRITE_PIPE_INIT(0x3f800000);
520 NV_WRITE_PIPE_INIT(0x00000000);
521 NV_WRITE_PIPE_INIT(0x3f000000);
522 NV_WRITE_PIPE_INIT(0x3f000000);
523 NV_WRITE_PIPE_INIT(0x00000000);
524 NV_WRITE_PIPE_INIT(0x00000000);
525 NV_WRITE_PIPE_INIT(0x00000000);
526 NV_WRITE_PIPE_INIT(0x00000000);
527 NV_WRITE_PIPE_INIT(0x3f800000);
528 NV_WRITE_PIPE_INIT(0x00000000);
529 NV_WRITE_PIPE_INIT(0x00000000);
530 NV_WRITE_PIPE_INIT(0x00000000);
531 NV_WRITE_PIPE_INIT(0x00000000);
532 NV_WRITE_PIPE_INIT(0x00000000);
533 NV_WRITE_PIPE_INIT(0x3f800000);
534 NV_WRITE_PIPE_INIT(0x3f800000);
535 NV_WRITE_PIPE_INIT(0x3f800000);
536 NV_WRITE_PIPE_INIT(0x3f800000);
537 PIPE_INIT_END(0x6400);
538
539 PIPE_INIT(0x6800);
540 for (i = 0; i < 162; i++)
541 NV_WRITE_PIPE_INIT(0x00000000);
542 NV_WRITE_PIPE_INIT(0x3f800000);
543 for (i = 0; i < 25; i++)
544 NV_WRITE_PIPE_INIT(0x00000000);
545 PIPE_INIT_END(0x6800);
546
547 PIPE_INIT(0x6c00);
548 NV_WRITE_PIPE_INIT(0x00000000);
549 NV_WRITE_PIPE_INIT(0x00000000);
550 NV_WRITE_PIPE_INIT(0x00000000);
551 NV_WRITE_PIPE_INIT(0x00000000);
552 NV_WRITE_PIPE_INIT(0xbf800000);
553 NV_WRITE_PIPE_INIT(0x00000000);
554 NV_WRITE_PIPE_INIT(0x00000000);
555 NV_WRITE_PIPE_INIT(0x00000000);
556 NV_WRITE_PIPE_INIT(0x00000000);
557 NV_WRITE_PIPE_INIT(0x00000000);
558 NV_WRITE_PIPE_INIT(0x00000000);
559 NV_WRITE_PIPE_INIT(0x00000000);
560 PIPE_INIT_END(0x6c00);
561
562 PIPE_INIT(0x7000);
563 NV_WRITE_PIPE_INIT(0x00000000);
564 NV_WRITE_PIPE_INIT(0x00000000);
565 NV_WRITE_PIPE_INIT(0x00000000);
566 NV_WRITE_PIPE_INIT(0x00000000);
567 NV_WRITE_PIPE_INIT(0x00000000);
568 NV_WRITE_PIPE_INIT(0x00000000);
569 NV_WRITE_PIPE_INIT(0x00000000);
570 NV_WRITE_PIPE_INIT(0x00000000);
571 NV_WRITE_PIPE_INIT(0x00000000);
572 NV_WRITE_PIPE_INIT(0x00000000);
573 NV_WRITE_PIPE_INIT(0x00000000);
574 NV_WRITE_PIPE_INIT(0x00000000);
575 NV_WRITE_PIPE_INIT(0x7149f2ca);
576 NV_WRITE_PIPE_INIT(0x00000000);
577 NV_WRITE_PIPE_INIT(0x00000000);
578 NV_WRITE_PIPE_INIT(0x00000000);
579 NV_WRITE_PIPE_INIT(0x7149f2ca);
580 NV_WRITE_PIPE_INIT(0x00000000);
581 NV_WRITE_PIPE_INIT(0x00000000);
582 NV_WRITE_PIPE_INIT(0x00000000);
583 NV_WRITE_PIPE_INIT(0x7149f2ca);
584 NV_WRITE_PIPE_INIT(0x00000000);
585 NV_WRITE_PIPE_INIT(0x00000000);
586 NV_WRITE_PIPE_INIT(0x00000000);
587 NV_WRITE_PIPE_INIT(0x7149f2ca);
588 NV_WRITE_PIPE_INIT(0x00000000);
589 NV_WRITE_PIPE_INIT(0x00000000);
590 NV_WRITE_PIPE_INIT(0x00000000);
591 NV_WRITE_PIPE_INIT(0x7149f2ca);
592 NV_WRITE_PIPE_INIT(0x00000000);
593 NV_WRITE_PIPE_INIT(0x00000000);
594 NV_WRITE_PIPE_INIT(0x00000000);
595 NV_WRITE_PIPE_INIT(0x7149f2ca);
596 NV_WRITE_PIPE_INIT(0x00000000);
597 NV_WRITE_PIPE_INIT(0x00000000);
598 NV_WRITE_PIPE_INIT(0x00000000);
599 NV_WRITE_PIPE_INIT(0x7149f2ca);
600 NV_WRITE_PIPE_INIT(0x00000000);
601 NV_WRITE_PIPE_INIT(0x00000000);
602 NV_WRITE_PIPE_INIT(0x00000000);
603 NV_WRITE_PIPE_INIT(0x7149f2ca);
604 for (i = 0; i < 35; i++)
605 NV_WRITE_PIPE_INIT(0x00000000);
606 PIPE_INIT_END(0x7000);
607
608 PIPE_INIT(0x7400);
609 for (i = 0; i < 48; i++)
610 NV_WRITE_PIPE_INIT(0x00000000);
611 PIPE_INIT_END(0x7400);
612
613 PIPE_INIT(0x7800);
614 for (i = 0; i < 48; i++)
615 NV_WRITE_PIPE_INIT(0x00000000);
616 PIPE_INIT_END(0x7800);
617
618 PIPE_INIT(0x4400);
619 for (i = 0; i < 32; i++)
620 NV_WRITE_PIPE_INIT(0x00000000);
621 PIPE_INIT_END(0x4400);
622
623 PIPE_INIT(0x0000);
624 for (i = 0; i < 16; i++)
625 NV_WRITE_PIPE_INIT(0x00000000);
626 PIPE_INIT_END(0x0000);
627
628 PIPE_INIT(0x0040);
629 for (i = 0; i < 4; i++)
630 NV_WRITE_PIPE_INIT(0x00000000);
631 PIPE_INIT_END(0x0040);
632
633 #undef PIPE_INIT
634 #undef PIPE_INIT_END
635 #undef NV_WRITE_PIPE_INIT
636 }
637
nv10_graph_ctx_regs_find_offset(struct drm_device * dev,int reg)638 static int nv10_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
639 {
640 int i;
641 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++) {
642 if (nv10_graph_ctx_regs[i] == reg)
643 return i;
644 }
645 NV_ERROR(dev, "unknow offset nv10_ctx_regs %d\n", reg);
646 return -1;
647 }
648
nv17_graph_ctx_regs_find_offset(struct drm_device * dev,int reg)649 static int nv17_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
650 {
651 int i;
652 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++) {
653 if (nv17_graph_ctx_regs[i] == reg)
654 return i;
655 }
656 NV_ERROR(dev, "unknow offset nv17_ctx_regs %d\n", reg);
657 return -1;
658 }
659
nv10_graph_load_dma_vtxbuf(struct nouveau_channel * chan,uint32_t inst)660 static void nv10_graph_load_dma_vtxbuf(struct nouveau_channel *chan,
661 uint32_t inst)
662 {
663 struct drm_device *dev = chan->dev;
664 struct drm_nouveau_private *dev_priv = dev->dev_private;
665 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
666 uint32_t st2, st2_dl, st2_dh, fifo_ptr, fifo[0x60/4];
667 uint32_t ctx_user, ctx_switch[5];
668 int i, subchan = -1;
669
670 /* NV10TCL_DMA_VTXBUF (method 0x18c) modifies hidden state
671 * that cannot be restored via MMIO. Do it through the FIFO
672 * instead.
673 */
674
675 /* Look for a celsius object */
676 for (i = 0; i < 8; i++) {
677 int class = nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff;
678
679 if (class == 0x56 || class == 0x96 || class == 0x99) {
680 subchan = i;
681 break;
682 }
683 }
684
685 if (subchan < 0 || !inst)
686 return;
687
688 /* Save the current ctx object */
689 ctx_user = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
690 for (i = 0; i < 5; i++)
691 ctx_switch[i] = nv_rd32(dev, NV10_PGRAPH_CTX_SWITCH(i));
692
693 /* Save the FIFO state */
694 st2 = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
695 st2_dl = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DL);
696 st2_dh = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DH);
697 fifo_ptr = nv_rd32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR);
698
699 for (i = 0; i < ARRAY_SIZE(fifo); i++)
700 fifo[i] = nv_rd32(dev, 0x4007a0 + 4 * i);
701
702 /* Switch to the celsius subchannel */
703 for (i = 0; i < 5; i++)
704 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i),
705 nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(subchan, i)));
706 nv_mask(dev, NV10_PGRAPH_CTX_USER, 0xe000, subchan << 13);
707
708 /* Inject NV10TCL_DMA_VTXBUF */
709 nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, 0);
710 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2,
711 0x2c000000 | chan->id << 20 | subchan << 16 | 0x18c);
712 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, inst);
713 nv_mask(dev, NV10_PGRAPH_CTX_CONTROL, 0, 0x10000);
714 pgraph->fifo_access(dev, true);
715 pgraph->fifo_access(dev, false);
716
717 /* Restore the FIFO state */
718 for (i = 0; i < ARRAY_SIZE(fifo); i++)
719 nv_wr32(dev, 0x4007a0 + 4 * i, fifo[i]);
720
721 nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, fifo_ptr);
722 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, st2);
723 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, st2_dl);
724 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DH, st2_dh);
725
726 /* Restore the current ctx object */
727 for (i = 0; i < 5; i++)
728 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i), ctx_switch[i]);
729 nv_wr32(dev, NV10_PGRAPH_CTX_USER, ctx_user);
730 }
731
nv10_graph_load_context(struct nouveau_channel * chan)732 int nv10_graph_load_context(struct nouveau_channel *chan)
733 {
734 struct drm_device *dev = chan->dev;
735 struct drm_nouveau_private *dev_priv = dev->dev_private;
736 struct graph_state *pgraph_ctx = chan->pgraph_ctx;
737 uint32_t tmp;
738 int i;
739
740 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
741 nv_wr32(dev, nv10_graph_ctx_regs[i], pgraph_ctx->nv10[i]);
742 if (dev_priv->chipset >= 0x17) {
743 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
744 nv_wr32(dev, nv17_graph_ctx_regs[i],
745 pgraph_ctx->nv17[i]);
746 }
747
748 nv10_graph_load_pipe(chan);
749 nv10_graph_load_dma_vtxbuf(chan, (nv_rd32(dev, NV10_PGRAPH_GLOBALSTATE1)
750 & 0xffff));
751
752 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10010100);
753 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
754 nv_wr32(dev, NV10_PGRAPH_CTX_USER, (tmp & 0xffffff) | chan->id << 24);
755 tmp = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
756 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, tmp & 0xcfffffff);
757 return 0;
758 }
759
760 int
nv10_graph_unload_context(struct drm_device * dev)761 nv10_graph_unload_context(struct drm_device *dev)
762 {
763 struct drm_nouveau_private *dev_priv = dev->dev_private;
764 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
765 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo;
766 struct nouveau_channel *chan;
767 struct graph_state *ctx;
768 uint32_t tmp;
769 int i;
770
771 chan = pgraph->channel(dev);
772 if (!chan)
773 return 0;
774 ctx = chan->pgraph_ctx;
775
776 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
777 ctx->nv10[i] = nv_rd32(dev, nv10_graph_ctx_regs[i]);
778
779 if (dev_priv->chipset >= 0x17) {
780 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
781 ctx->nv17[i] = nv_rd32(dev, nv17_graph_ctx_regs[i]);
782 }
783
784 nv10_graph_save_pipe(chan);
785
786 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000000);
787 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
788 tmp |= (pfifo->channels - 1) << 24;
789 nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
790 return 0;
791 }
792
793 static void
nv10_graph_context_switch(struct drm_device * dev)794 nv10_graph_context_switch(struct drm_device *dev)
795 {
796 struct drm_nouveau_private *dev_priv = dev->dev_private;
797 struct nouveau_channel *chan = NULL;
798 int chid;
799
800 nouveau_wait_for_idle(dev);
801
802 /* If previous context is valid, we need to save it */
803 nv10_graph_unload_context(dev);
804
805 /* Load context for next channel */
806 chid = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 20) & 0x1f;
807 chan = dev_priv->channels.ptr[chid];
808 if (chan && chan->pgraph_ctx)
809 nv10_graph_load_context(chan);
810 }
811
812 #define NV_WRITE_CTX(reg, val) do { \
813 int offset = nv10_graph_ctx_regs_find_offset(dev, reg); \
814 if (offset > 0) \
815 pgraph_ctx->nv10[offset] = val; \
816 } while (0)
817
818 #define NV17_WRITE_CTX(reg, val) do { \
819 int offset = nv17_graph_ctx_regs_find_offset(dev, reg); \
820 if (offset > 0) \
821 pgraph_ctx->nv17[offset] = val; \
822 } while (0)
823
824 struct nouveau_channel *
nv10_graph_channel(struct drm_device * dev)825 nv10_graph_channel(struct drm_device *dev)
826 {
827 struct drm_nouveau_private *dev_priv = dev->dev_private;
828 int chid = dev_priv->engine.fifo.channels;
829
830 if (nv_rd32(dev, NV10_PGRAPH_CTX_CONTROL) & 0x00010000)
831 chid = nv_rd32(dev, NV10_PGRAPH_CTX_USER) >> 24;
832
833 if (chid >= dev_priv->engine.fifo.channels)
834 return NULL;
835
836 return dev_priv->channels.ptr[chid];
837 }
838
nv10_graph_create_context(struct nouveau_channel * chan)839 int nv10_graph_create_context(struct nouveau_channel *chan)
840 {
841 struct drm_device *dev = chan->dev;
842 struct drm_nouveau_private *dev_priv = dev->dev_private;
843 struct graph_state *pgraph_ctx;
844
845 NV_DEBUG(dev, "nv10_graph_context_create %d\n", chan->id);
846
847 chan->pgraph_ctx = pgraph_ctx = kzalloc(sizeof(*pgraph_ctx),
848 GFP_KERNEL);
849 if (pgraph_ctx == NULL)
850 return -ENOMEM;
851
852
853 NV_WRITE_CTX(0x00400e88, 0x08000000);
854 NV_WRITE_CTX(0x00400e9c, 0x4b7fffff);
855 NV_WRITE_CTX(NV03_PGRAPH_XY_LOGIC_MISC0, 0x0001ffff);
856 NV_WRITE_CTX(0x00400e10, 0x00001000);
857 NV_WRITE_CTX(0x00400e14, 0x00001000);
858 NV_WRITE_CTX(0x00400e30, 0x00080008);
859 NV_WRITE_CTX(0x00400e34, 0x00080008);
860 if (dev_priv->chipset >= 0x17) {
861 /* is it really needed ??? */
862 NV17_WRITE_CTX(NV10_PGRAPH_DEBUG_4,
863 nv_rd32(dev, NV10_PGRAPH_DEBUG_4));
864 NV17_WRITE_CTX(0x004006b0, nv_rd32(dev, 0x004006b0));
865 NV17_WRITE_CTX(0x00400eac, 0x0fff0000);
866 NV17_WRITE_CTX(0x00400eb0, 0x0fff0000);
867 NV17_WRITE_CTX(0x00400ec0, 0x00000080);
868 NV17_WRITE_CTX(0x00400ed0, 0x00000080);
869 }
870 NV_WRITE_CTX(NV10_PGRAPH_CTX_USER, chan->id << 24);
871
872 nv10_graph_create_pipe(chan);
873 return 0;
874 }
875
nv10_graph_destroy_context(struct nouveau_channel * chan)876 void nv10_graph_destroy_context(struct nouveau_channel *chan)
877 {
878 struct drm_device *dev = chan->dev;
879 struct drm_nouveau_private *dev_priv = dev->dev_private;
880 struct nouveau_pgraph_engine *pgraph = &dev_priv->engine.graph;
881 struct graph_state *pgraph_ctx = chan->pgraph_ctx;
882 unsigned long flags;
883
884 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
885 pgraph->fifo_access(dev, false);
886
887 /* Unload the context if it's the currently active one */
888 if (pgraph->channel(dev) == chan)
889 pgraph->unload_context(dev);
890
891 /* Free the context resources */
892 kfree(pgraph_ctx);
893 chan->pgraph_ctx = NULL;
894
895 pgraph->fifo_access(dev, true);
896 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
897 }
898
899 void
nv10_graph_set_tile_region(struct drm_device * dev,int i)900 nv10_graph_set_tile_region(struct drm_device *dev, int i)
901 {
902 struct drm_nouveau_private *dev_priv = dev->dev_private;
903 struct nouveau_tile_reg *tile = &dev_priv->tile.reg[i];
904
905 nv_wr32(dev, NV10_PGRAPH_TLIMIT(i), tile->limit);
906 nv_wr32(dev, NV10_PGRAPH_TSIZE(i), tile->pitch);
907 nv_wr32(dev, NV10_PGRAPH_TILE(i), tile->addr);
908 }
909
nv10_graph_init(struct drm_device * dev)910 int nv10_graph_init(struct drm_device *dev)
911 {
912 struct drm_nouveau_private *dev_priv = dev->dev_private;
913 uint32_t tmp;
914 int ret, i;
915
916 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
917 ~NV_PMC_ENABLE_PGRAPH);
918 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
919 NV_PMC_ENABLE_PGRAPH);
920
921 ret = nv10_graph_register(dev);
922 if (ret)
923 return ret;
924
925 nouveau_irq_register(dev, 12, nv10_graph_isr);
926 nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF);
927 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
928
929 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
930 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x00000000);
931 nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x00118700);
932 /* nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x24E00810); */ /* 0x25f92ad9 */
933 nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x25f92ad9);
934 nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0x55DE0830 |
935 (1<<29) |
936 (1<<31));
937 if (dev_priv->chipset >= 0x17) {
938 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x1f000000);
939 nv_wr32(dev, 0x400a10, 0x3ff3fb6);
940 nv_wr32(dev, 0x400838, 0x2f8684);
941 nv_wr32(dev, 0x40083c, 0x115f3f);
942 nv_wr32(dev, 0x004006b0, 0x40000020);
943 } else
944 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x00000000);
945
946 /* Turn all the tiling regions off. */
947 for (i = 0; i < NV10_PFB_TILE__SIZE; i++)
948 nv10_graph_set_tile_region(dev, i);
949
950 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(0), 0x00000000);
951 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(1), 0x00000000);
952 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(2), 0x00000000);
953 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(3), 0x00000000);
954 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(4), 0x00000000);
955 nv_wr32(dev, NV10_PGRAPH_STATE, 0xFFFFFFFF);
956
957 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
958 tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
959 nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
960 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
961 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, 0x08000000);
962
963 return 0;
964 }
965
nv10_graph_takedown(struct drm_device * dev)966 void nv10_graph_takedown(struct drm_device *dev)
967 {
968 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
969 nouveau_irq_unregister(dev, 12);
970 }
971
972 static int
nv17_graph_mthd_lma_window(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)973 nv17_graph_mthd_lma_window(struct nouveau_channel *chan,
974 u32 class, u32 mthd, u32 data)
975 {
976 struct drm_device *dev = chan->dev;
977 struct graph_state *ctx = chan->pgraph_ctx;
978 struct pipe_state *pipe = &ctx->pipe_state;
979 uint32_t pipe_0x0040[1], pipe_0x64c0[8], pipe_0x6a80[3], pipe_0x6ab0[3];
980 uint32_t xfmode0, xfmode1;
981 int i;
982
983 ctx->lma_window[(mthd - 0x1638) / 4] = data;
984
985 if (mthd != 0x1644)
986 return 0;
987
988 nouveau_wait_for_idle(dev);
989
990 PIPE_SAVE(dev, pipe_0x0040, 0x0040);
991 PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
992
993 PIPE_RESTORE(dev, ctx->lma_window, 0x6790);
994
995 nouveau_wait_for_idle(dev);
996
997 xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
998 xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
999
1000 PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
1001 PIPE_SAVE(dev, pipe_0x64c0, 0x64c0);
1002 PIPE_SAVE(dev, pipe_0x6ab0, 0x6ab0);
1003 PIPE_SAVE(dev, pipe_0x6a80, 0x6a80);
1004
1005 nouveau_wait_for_idle(dev);
1006
1007 nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
1008 nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
1009 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
1010 for (i = 0; i < 4; i++)
1011 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
1012 for (i = 0; i < 4; i++)
1013 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1014
1015 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
1016 for (i = 0; i < 3; i++)
1017 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
1018
1019 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
1020 for (i = 0; i < 3; i++)
1021 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1022
1023 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
1024 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
1025
1026 PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
1027
1028 nouveau_wait_for_idle(dev);
1029
1030 PIPE_RESTORE(dev, pipe_0x0040, 0x0040);
1031
1032 nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
1033 nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
1034
1035 PIPE_RESTORE(dev, pipe_0x64c0, 0x64c0);
1036 PIPE_RESTORE(dev, pipe_0x6ab0, 0x6ab0);
1037 PIPE_RESTORE(dev, pipe_0x6a80, 0x6a80);
1038 PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
1039
1040 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000000c0);
1041 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1042
1043 nouveau_wait_for_idle(dev);
1044
1045 return 0;
1046 }
1047
1048 static int
nv17_graph_mthd_lma_enable(struct nouveau_channel * chan,u32 class,u32 mthd,u32 data)1049 nv17_graph_mthd_lma_enable(struct nouveau_channel *chan,
1050 u32 class, u32 mthd, u32 data)
1051 {
1052 struct drm_device *dev = chan->dev;
1053
1054 nouveau_wait_for_idle(dev);
1055
1056 nv_wr32(dev, NV10_PGRAPH_DEBUG_4,
1057 nv_rd32(dev, NV10_PGRAPH_DEBUG_4) | 0x1 << 8);
1058 nv_wr32(dev, 0x004006b0,
1059 nv_rd32(dev, 0x004006b0) | 0x8 << 24);
1060
1061 return 0;
1062 }
1063
1064 static int
nv10_graph_register(struct drm_device * dev)1065 nv10_graph_register(struct drm_device *dev)
1066 {
1067 struct drm_nouveau_private *dev_priv = dev->dev_private;
1068
1069 if (dev_priv->engine.graph.registered)
1070 return 0;
1071
1072 NVOBJ_CLASS(dev, 0x506e, SW); /* nvsw */
1073 NVOBJ_CLASS(dev, 0x0030, GR); /* null */
1074 NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */
1075 NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */
1076 NVOBJ_CLASS(dev, 0x005f, GR); /* imageblit */
1077 NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */
1078 NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */
1079 NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */
1080 NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */
1081 NVOBJ_CLASS(dev, 0x0043, GR); /* rop */
1082 NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */
1083 NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */
1084 NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */
1085 NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */
1086 NVOBJ_CLASS(dev, 0x0052, GR); /* swzsurf */
1087 NVOBJ_CLASS(dev, 0x0093, GR); /* surf3d */
1088 NVOBJ_CLASS(dev, 0x0094, GR); /* tex_tri */
1089 NVOBJ_CLASS(dev, 0x0095, GR); /* multitex_tri */
1090
1091 /* celcius */
1092 if (dev_priv->chipset <= 0x10) {
1093 NVOBJ_CLASS(dev, 0x0056, GR);
1094 } else
1095 if (dev_priv->chipset < 0x17 || dev_priv->chipset == 0x1a) {
1096 NVOBJ_CLASS(dev, 0x0096, GR);
1097 } else {
1098 NVOBJ_CLASS(dev, 0x0099, GR);
1099 NVOBJ_MTHD (dev, 0x0099, 0x1638, nv17_graph_mthd_lma_window);
1100 NVOBJ_MTHD (dev, 0x0099, 0x163c, nv17_graph_mthd_lma_window);
1101 NVOBJ_MTHD (dev, 0x0099, 0x1640, nv17_graph_mthd_lma_window);
1102 NVOBJ_MTHD (dev, 0x0099, 0x1644, nv17_graph_mthd_lma_window);
1103 NVOBJ_MTHD (dev, 0x0099, 0x1658, nv17_graph_mthd_lma_enable);
1104 }
1105
1106 /* nvsw */
1107 NVOBJ_CLASS(dev, 0x506e, SW);
1108 NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);
1109
1110 dev_priv->engine.graph.registered = true;
1111 return 0;
1112 }
1113
1114 struct nouveau_bitfield nv10_graph_intr[] = {
1115 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1116 { NV_PGRAPH_INTR_ERROR, "ERROR" },
1117 {}
1118 };
1119
1120 struct nouveau_bitfield nv10_graph_nstatus[] =
1121 {
1122 { NV10_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1123 { NV10_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1124 { NV10_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1125 { NV10_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1126 {}
1127 };
1128
1129 static void
nv10_graph_isr(struct drm_device * dev)1130 nv10_graph_isr(struct drm_device *dev)
1131 {
1132 u32 stat;
1133
1134 while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1135 u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1136 u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1137 u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1138 u32 chid = (addr & 0x01f00000) >> 20;
1139 u32 subc = (addr & 0x00070000) >> 16;
1140 u32 mthd = (addr & 0x00001ffc);
1141 u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1142 u32 class = nv_rd32(dev, 0x400160 + subc * 4) & 0xfff;
1143 u32 show = stat;
1144
1145 if (stat & NV_PGRAPH_INTR_ERROR) {
1146 if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1147 if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1148 show &= ~NV_PGRAPH_INTR_ERROR;
1149 }
1150 }
1151
1152 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1153 nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1154 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1155 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1156 nv10_graph_context_switch(dev);
1157 }
1158
1159 nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1160 nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1161
1162 if (show && nouveau_ratelimit()) {
1163 NV_INFO(dev, "PGRAPH -");
1164 nouveau_bitfield_print(nv10_graph_intr, show);
1165 printk(" nsource:");
1166 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1167 printk(" nstatus:");
1168 nouveau_bitfield_print(nv10_graph_nstatus, nstatus);
1169 printk("\n");
1170 NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1171 "mthd 0x%04x data 0x%08x\n",
1172 chid, subc, class, mthd, data);
1173 }
1174 }
1175 }
1176