Home
last modified time | relevance | path

Searched refs:nv_rd32 (Results 1 – 25 of 67) sorted by relevance

123

/linux-3.4.99/drivers/gpu/drm/nouveau/
Dnv50_graph.c49 nv_wr32(dev, 0x400500, nv_rd32(dev, 0x400500) | mask); in nv50_graph_fifo_access()
51 nv_wr32(dev, 0x400500, nv_rd32(dev, 0x400500) & ~mask); in nv50_graph_fifo_access()
67 inst = nv_rd32(dev, NV50_PGRAPH_CTXCTL_CUR); in nv50_graph_channel()
85 uint32_t fifo = nv_rd32(dev, 0x400500); in nv50_graph_do_load_context()
89 nv_wr32(dev, 0x400824, nv_rd32(dev, 0x400824) | 0x40); in nv50_graph_do_load_context()
90 nv_wr32(dev, 0x400320, nv_rd32(dev, 0x400320) | 0x11); in nv50_graph_do_load_context()
92 (void)nv_rd32(dev, 0x400040); in nv50_graph_do_load_context()
94 nv_wr32(dev, 0x400304, nv_rd32(dev, 0x400304) | 1); in nv50_graph_do_load_context()
108 inst = nv_rd32(dev, NV50_PGRAPH_CTXCTL_CUR); in nv50_graph_unload_context()
115 nv_wr32(dev, 0x400824, nv_rd32(dev, 0x400824) | 0x20); in nv50_graph_unload_context()
[all …]
Dnv40_fifo.c71 nv_rd32(dev, NV04_PFIFO_MODE) | (1 << chan->id)); in nv40_fifo_create_context()
92 tmp2 = nv_rd32(dev, 0x2058) & 0xFFF; in nv40_fifo_do_load_context()
109 tmp = nv_rd32(dev, NV04_PFIFO_DMA_TIMESLICE) & ~0x1FFFF; in nv40_fifo_do_load_context()
138 tmp = nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_CTL) & ~(1 << 31); in nv40_fifo_load_context()
157 nv_wi32(dev, fc + 0, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_PUT)); in nv40_fifo_unload_context()
158 nv_wi32(dev, fc + 4, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_GET)); in nv40_fifo_unload_context()
159 nv_wi32(dev, fc + 8, nv_rd32(dev, NV10_PFIFO_CACHE1_REF_CNT)); in nv40_fifo_unload_context()
160 nv_wi32(dev, fc + 12, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_INSTANCE)); in nv40_fifo_unload_context()
161 nv_wi32(dev, fc + 16, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_DCOUNT)); in nv40_fifo_unload_context()
162 nv_wi32(dev, fc + 20, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_STATE)); in nv40_fifo_unload_context()
[all …]
Dnv50_fifo.c101 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & ~pmc_e); in nv50_fifo_init_reset()
102 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) | pmc_e); in nv50_fifo_init_reset()
223 return nv_rd32(dev, NV03_PFIFO_CACHE1_PUSH1) & in nv50_fifo_channel_id()
439 nv_wo32(ramfc, 0x00, nv_rd32(dev, 0x3330)); in nv50_fifo_unload_context()
440 nv_wo32(ramfc, 0x04, nv_rd32(dev, 0x3334)); in nv50_fifo_unload_context()
441 nv_wo32(ramfc, 0x08, nv_rd32(dev, 0x3240)); in nv50_fifo_unload_context()
442 nv_wo32(ramfc, 0x0c, nv_rd32(dev, 0x3320)); in nv50_fifo_unload_context()
443 nv_wo32(ramfc, 0x10, nv_rd32(dev, 0x3244)); in nv50_fifo_unload_context()
444 nv_wo32(ramfc, 0x14, nv_rd32(dev, 0x3328)); in nv50_fifo_unload_context()
445 nv_wo32(ramfc, 0x18, nv_rd32(dev, 0x3368)); in nv50_fifo_unload_context()
[all …]
Dnv04_fifo.c51 tmp = nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_PUSH); in nv04_fifo_disable()
54 tmp = nv_rd32(dev, NV03_PFIFO_CACHE1_PULL1); in nv04_fifo_disable()
68 uint32_t reassign = nv_rd32(dev, NV03_PFIFO_CACHES); in nv04_fifo_reassign()
92 if (nv_rd32(dev, NV04_PFIFO_CACHE1_PULL0) & in nv04_fifo_cache_pull()
106 return nv_rd32(dev, NV03_PFIFO_CACHE1_PUSH1) & in nv04_fifo_channel_id()
150 nv_rd32(dev, NV04_PFIFO_MODE) | (1 << chan->id)); in nv04_fifo_create_context()
219 tmp = nv_rd32(chan->dev, NV04_PFIFO_CACHE1_DMA_CTL) & ~(1 << 31); in nv04_fifo_load_context()
244 RAMFC_WR(DMA_PUT, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_PUT)); in nv04_fifo_unload_context()
245 RAMFC_WR(DMA_GET, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_GET)); in nv04_fifo_unload_context()
246 tmp = nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_DCOUNT) << 16; in nv04_fifo_unload_context()
[all …]
Dnv10_fifo.c38 return nv_rd32(dev, NV03_PFIFO_CACHE1_PUSH1) & in nv10_fifo_channel_id()
77 nv_rd32(dev, NV04_PFIFO_MODE) | (1 << chan->id)); in nv10_fifo_create_context()
128 tmp = nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_CTL) & ~(1 << 31); in nv10_fifo_load_context()
147 nv_wi32(dev, fc + 0, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_PUT)); in nv10_fifo_unload_context()
148 nv_wi32(dev, fc + 4, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_GET)); in nv10_fifo_unload_context()
149 nv_wi32(dev, fc + 8, nv_rd32(dev, NV10_PFIFO_CACHE1_REF_CNT)); in nv10_fifo_unload_context()
150 tmp = nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_INSTANCE) & 0xFFFF; in nv10_fifo_unload_context()
151 tmp |= (nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_DCOUNT) << 16); in nv10_fifo_unload_context()
153 nv_wi32(dev, fc + 16, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_STATE)); in nv10_fifo_unload_context()
154 nv_wi32(dev, fc + 20, nv_rd32(dev, NV04_PFIFO_CACHE1_DMA_FETCH)); in nv10_fifo_unload_context()
[all …]
Dnv40_graph.c68 if ((nv_rd32(dev, 0x003204) & 0x0000001f) == chan->id) in nv40_graph_context_new()
88 if (nv_rd32(dev, 0x40032c) == inst) in nv40_graph_context_del()
90 if (nv_rd32(dev, 0x400330) == inst) in nv40_graph_context_del()
191 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) & in nv40_graph_init()
193 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) | in nv40_graph_init()
229 j = nv_rd32(dev, 0x1540) & 0xff; in nv40_graph_init()
310 nv_wr32(dev, 0x4009A4, nv_rd32(dev, NV04_PFB_CFG0)); in nv40_graph_init()
311 nv_wr32(dev, 0x4009A8, nv_rd32(dev, NV04_PFB_CFG1)); in nv40_graph_init()
312 nv_wr32(dev, 0x4069A4, nv_rd32(dev, NV04_PFB_CFG0)); in nv40_graph_init()
313 nv_wr32(dev, 0x4069A8, nv_rd32(dev, NV04_PFB_CFG1)); in nv40_graph_init()
[all …]
Dnv50_display.c64 mask |= nv_rd32(dev, NV50_PDISPLAY_SOR_MODE_CTRL_C(i)); in nv50_display_active_crtcs()
67 mask |= nv_rd32(dev, NV90_PDISPLAY_SOR_MODE_CTRL_C(i)); in nv50_display_active_crtcs()
71 mask |= nv_rd32(dev, NV50_PDISPLAY_DAC_MODE_CTRL_C(i)); in nv50_display_active_crtcs()
94 u32 ctrl = nv_rd32(dev, 0x610200); in nv50_display_early_init()
172 nv_wr32(dev, 0x00610184, nv_rd32(dev, 0x00614004)); in nv50_display_init()
180 val = nv_rd32(dev, 0x00616100 + (i * 0x800)); in nv50_display_init()
182 val = nv_rd32(dev, 0x00616104 + (i * 0x800)); in nv50_display_init()
184 val = nv_rd32(dev, 0x00616108 + (i * 0x800)); in nv50_display_init()
186 val = nv_rd32(dev, 0x0061610c + (i * 0x800)); in nv50_display_init()
192 val = nv_rd32(dev, 0x0061a000 + (i * 0x800)); in nv50_display_init()
[all …]
Dnv50_pm.c60 return nv_rd32(dev, 0x004700); in read_div()
64 return nv_rd32(dev, 0x004800); in read_div()
75 u32 rsel = nv_rd32(dev, 0x00e18c); in read_pll_src()
91 coef = nv_rd32(dev, 0x00e81c + (id * 0x0c)); in read_pll_src()
100 coef = nv_rd32(dev, 0x00e81c); in read_pll_src()
108 rsel = nv_rd32(dev, 0x00c050); in read_pll_src()
126 coef = nv_rd32(dev, 0x00e81c + (id * 0x28)); in read_pll_src()
127 P = (nv_rd32(dev, 0x00e824 + (id * 0x28)) >> 16) & 7; in read_pll_src()
144 u32 src, mast = nv_rd32(dev, 0x00c040); in read_pll_ref()
175 u32 mast = nv_rd32(dev, 0x00c040); in read_pll()
[all …]
Dnv10_graph.c403 state[__i] = nv_rd32(dev, NV10_PGRAPH_PIPE_DATA); \
442 xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0); in nv10_graph_load_pipe()
443 xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1); in nv10_graph_load_pipe()
674 int class = nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff; in nv10_graph_load_dma_vtxbuf()
686 ctx_user = nv_rd32(dev, NV10_PGRAPH_CTX_USER); in nv10_graph_load_dma_vtxbuf()
688 ctx_switch[i] = nv_rd32(dev, NV10_PGRAPH_CTX_SWITCH(i)); in nv10_graph_load_dma_vtxbuf()
691 st2 = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2); in nv10_graph_load_dma_vtxbuf()
692 st2_dl = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DL); in nv10_graph_load_dma_vtxbuf()
693 st2_dh = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DH); in nv10_graph_load_dma_vtxbuf()
694 fifo_ptr = nv_rd32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR); in nv10_graph_load_dma_vtxbuf()
[all …]
Dnv04_timer.c39 if (!nv_rd32(dev, NV04_PTIMER_NUMERATOR) || in nv04_timer_init()
40 !nv_rd32(dev, NV04_PTIMER_DENOMINATOR)) { in nv04_timer_init()
74 hi = nv_rd32(dev, NV04_PTIMER_TIME_1); in nv04_timer_read()
75 lo = nv_rd32(dev, NV04_PTIMER_TIME_0); in nv04_timer_read()
76 } while (hi != nv_rd32(dev, NV04_PTIMER_TIME_1)); in nv04_timer_read()
Dnv50_gpio.c65 return !!(nv_rd32(dev, reg) & (4 << shift)); in nv50_gpio_sense()
90 return !!(nv_rd32(dev, 0x00d610 + (line * 4)) & 0x00004000); in nvd0_gpio_sense()
100 intr0 = nv_rd32(dev, 0xe054) & nv_rd32(dev, 0xe050); in nv50_gpio_isr()
102 intr1 = nv_rd32(dev, 0xe074) & nv_rd32(dev, 0xe070); in nv50_gpio_isr()
Dnv40_pm.c36 u32 ctrl = nv_rd32(dev, reg + 0x00); in read_pll_1()
51 u32 ctrl = nv_rd32(dev, reg + 0x00); in read_pll_2()
52 u32 coef = nv_rd32(dev, reg + 0x04); in read_pll_2()
91 u32 ctrl = nv_rd32(dev, 0x00c040); in nv40_pm_clocks_get()
215 if ((nv_rd32(dev, 0x400760) & 0x000000f0) >> 4 != in nv40_pm_gr_idle()
216 (nv_rd32(dev, 0x400760) & 0x0000000f)) in nv40_pm_gr_idle()
219 if (nv_rd32(dev, 0x400700)) in nv40_pm_gr_idle()
238 u32 vbl = nv_rd32(dev, 0x600808 + (i * 0x2000)); in nv40_pm_clocks_set()
241 if (vbl != nv_rd32(dev, 0x600808 + (i * 0x2000))) { in nv40_pm_clocks_set()
357 u32 reg = nv_rd32(dev, 0x0010f0); in nv40_pm_pwm_get()
[all …]
Dnvc0_fifo.c58 if (!(nv_rd32(dev, 0x3004 + (i * 8)) & 1)) in nvc0_fifo_playlist_update()
180 NV_WARN(dev, "0x2634 != chid: 0x%08x\n", nv_rd32(dev, 0x2634)); in nvc0_fifo_destroy_context()
213 if (!(nv_rd32(dev, 0x003004 + (i * 8)) & 1)) in nvc0_fifo_unload_context()
220 i, nv_rd32(dev, 0x002634)); in nvc0_fifo_unload_context()
311 priv->spoon_nr = hweight32(nv_rd32(dev, 0x002204)); in nvc0_fifo_init()
418 u32 inst = nv_rd32(dev, 0x2800 + (unit * 0x10)); in nvc0_fifo_isr_vm_fault()
419 u32 valo = nv_rd32(dev, 0x2804 + (unit * 0x10)); in nvc0_fifo_isr_vm_fault()
420 u32 vahi = nv_rd32(dev, 0x2808 + (unit * 0x10)); in nvc0_fifo_isr_vm_fault()
421 u32 stat = nv_rd32(dev, 0x280c + (unit * 0x10)); in nvc0_fifo_isr_vm_fault()
460 u32 stat = nv_rd32(dev, 0x040108 + (unit * 0x2000)); in nvc0_fifo_isr_subfifo_intr()
[all …]
Dnva3_copy.c91 if (nv_rd32(dev, 0x104050) == inst) in nva3_copy_context_del()
94 if (nv_rd32(dev, 0x104054) == inst) in nva3_copy_context_del()
167 u32 dispatch = nv_rd32(dev, 0x10401c); in nva3_copy_isr()
168 u32 stat = nv_rd32(dev, 0x104008) & dispatch & ~(dispatch >> 16); in nva3_copy_isr()
169 u32 inst = nv_rd32(dev, 0x104050) & 0x3fffffff; in nva3_copy_isr()
170 u32 ssta = nv_rd32(dev, 0x104040) & 0x0000ffff; in nva3_copy_isr()
171 u32 addr = nv_rd32(dev, 0x104040) >> 16; in nva3_copy_isr()
174 u32 data = nv_rd32(dev, 0x104044); in nva3_copy_isr()
Dnva3_pm.c36 u32 sctl = nv_rd32(dev, 0x4120 + (clk * 4)); in read_vco()
52 return nv_rd32(dev, 0x00471c) * 1000; in read_clk()
58 sctl = nv_rd32(dev, 0x4120 + (clk * 4)); in read_clk()
81 u32 ctrl = nv_rd32(dev, pll + 0); in read_pll()
86 u32 coef = nv_rd32(dev, pll + 4); in read_pll()
172 reg->clk = nv_rd32(dev, 0x4120 + (clk * 4)); in calc_clk()
192 cntl = nv_rd32(dev, ctrl) & 0xfffffff2; in prog_pll()
283 if (!(nv_rd32(dev, 0x400304) & 0x00000001)) in nva3_pm_grcp_idle()
285 if (nv_rd32(dev, 0x400308) == 0x0050001c) in nva3_pm_grcp_idle()
324 if (nv_rd32(dev, 0x4000) & 0x00000008) in nva3_pm_clocks_set()
[all …]
Dnv31_mpeg.c75 if ((nv_rd32(dev, 0x003204) & 0x1f) == chan->id) in nv40_mpeg_context_new()
96 if (nv_rd32(dev, 0x00b318) == inst) in nv40_mpeg_context_del()
154 NV_ERROR(dev, "PMPEG init: 0x%08x\n", nv_rd32(dev, 0x00b200)); in nv31_mpeg_init()
247 u32 inst = (nv_rd32(dev, 0x00b318) & 0x000fffff) << 4; in nv31_mpeg_isr()
249 u32 stat = nv_rd32(dev, 0x00b100); in nv31_mpeg_isr()
250 u32 type = nv_rd32(dev, 0x00b230); in nv31_mpeg_isr()
251 u32 mthd = nv_rd32(dev, 0x00b234); in nv31_mpeg_isr()
252 u32 data = nv_rd32(dev, 0x00b238); in nv31_mpeg_isr()
280 if (nv_rd32(dev, 0x00b100)) in nv31_vpe_isr()
283 if (nv_rd32(dev, 0x00b800)) { in nv31_vpe_isr()
[all …]
Dnv50_mpeg.c94 if (nv_rd32(dev, 0x00b318) == inst) in nv50_mpeg_context_del()
155 NV_ERROR(dev, "PMPEG init: 0x%08x\n", nv_rd32(dev, 0x00b200)); in nv50_mpeg_init()
174 u32 stat = nv_rd32(dev, 0x00b100); in nv50_mpeg_isr()
175 u32 type = nv_rd32(dev, 0x00b230); in nv50_mpeg_isr()
176 u32 mthd = nv_rd32(dev, 0x00b234); in nv50_mpeg_isr()
177 u32 data = nv_rd32(dev, 0x00b238); in nv50_mpeg_isr()
201 if (nv_rd32(dev, 0x00b100)) in nv50_vpe_isr()
204 if (nv_rd32(dev, 0x00b800)) { in nv50_vpe_isr()
205 u32 stat = nv_rd32(dev, 0x00b800); in nv50_vpe_isr()
Dnouveau_backlight.c46 int val = (nv_rd32(dev, NV40_PMC_BACKLIGHT) & NV40_PMC_BACKLIGHT_MASK) in nv40_get_intensity()
57 int reg = nv_rd32(dev, NV40_PMC_BACKLIGHT); in nv40_set_intensity()
79 if (!(nv_rd32(dev, NV40_PMC_BACKLIGHT) & NV40_PMC_BACKLIGHT_MASK)) in nv40_backlight_init()
106 val = nv_rd32(dev, NV50_PDISP_SOR_PWM_CTL(or)); in nv50_get_intensity()
139 div = nv_rd32(dev, NV50_PDISP_SOR_PWM_DIV(or)); in nva3_get_intensity()
140 val = nv_rd32(dev, NV50_PDISP_SOR_PWM_CTL(or)); in nva3_get_intensity()
156 div = nv_rd32(dev, NV50_PDISP_SOR_PWM_DIV(or)); in nva3_set_intensity()
191 if (!nv_rd32(dev, NV50_PDISP_SOR_PWM_CTL(nv_encoder->or))) in nv50_backlight_init()
Dnvc0_vram.c109 u32 parts = nv_rd32(dev, 0x022438); in nvc0_vram_init()
110 u32 pmask = nv_rd32(dev, 0x022554); in nvc0_vram_init()
111 u32 bsize = nv_rd32(dev, 0x10f20c); in nvc0_vram_init()
116 NV_DEBUG(dev, "0x100800: 0x%08x\n", nv_rd32(dev, 0x100800)); in nvc0_vram_init()
120 dev_priv->vram_rank_B = !!(nv_rd32(dev, 0x10f200) & 0x00000004); in nvc0_vram_init()
125 u32 psize = nv_rd32(dev, 0x11020c + (part * 0x1000)); in nvc0_vram_init()
Dnv50_vram.c149 r0 = nv_rd32(dev, 0x100200); in nv50_vram_rblock()
150 r4 = nv_rd32(dev, 0x100204); in nv50_vram_rblock()
151 rt = nv_rd32(dev, 0x100250); in nv50_vram_rblock()
152 ru = nv_rd32(dev, 0x001540); in nv50_vram_rblock()
192 u32 pfb714 = nv_rd32(dev, 0x100714); in nv50_vram_init()
210 dev_priv->vram_rank_B = !!(nv_rd32(dev, 0x100200) & 0x4); in nv50_vram_init()
211 dev_priv->vram_size = nv_rd32(dev, 0x10020c); in nv50_vram_init()
219 dev_priv->vram_sys_base = (u64)nv_rd32(dev, 0x100e10) << 12; in nv50_vram_init()
Dnv40_fb.c60 vinst = nv_rd32(dev, 0x10020c); in nv44_fb_init_gart()
86 u32 pbus1218 = nv_rd32(dev, 0x001218); in nv40_fb_vram_init()
95 u32 pfb914 = nv_rd32(dev, 0x100914); in nv40_fb_vram_init()
104 u32 pfb474 = nv_rd32(dev, 0x100474); in nv40_fb_vram_init()
115 dev_priv->vram_size = nv_rd32(dev, 0x10020c) & 0xff000000; in nv40_fb_vram_init()
137 tmp = nv_rd32(dev, NV10_PFB_CLOSE_PAGE2); in nv40_fb_init()
Dnv20_graph.c64 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff; in nv20_graph_unload_context()
506 nv_rd32(dev, NV03_PMC_ENABLE) & ~NV_PMC_ENABLE_PGRAPH); in nv20_graph_init()
508 nv_rd32(dev, NV03_PMC_ENABLE) | NV_PMC_ENABLE_PGRAPH); in nv20_graph_init()
548 nv_wr32(dev, 0x4009a0, nv_rd32(dev, 0x100324)); in nv20_graph_init()
550 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, nv_rd32(dev, 0x100324)); in nv20_graph_init()
555 tmp = nv_rd32(dev, NV10_PGRAPH_SURFACE) & 0x0007ff00; in nv20_graph_init()
557 tmp = nv_rd32(dev, NV10_PGRAPH_SURFACE) | 0x00020100; in nv20_graph_init()
562 nv_wr32(dev, 0x4009A4, nv_rd32(dev, NV04_PFB_CFG0)); in nv20_graph_init()
563 nv_wr32(dev, 0x4009A8, nv_rd32(dev, NV04_PFB_CFG1)); in nv20_graph_init()
565 nv_wr32(dev, NV10_PGRAPH_RDI_DATA , nv_rd32(dev, NV04_PFB_CFG0)); in nv20_graph_init()
[all …]
Dnvc0_graph.c41 nv_rd32(dev, base + 0x400)); in nvc0_graph_ctxctl_debug_unit()
43 nv_rd32(dev, base + 0x800), nv_rd32(dev, base + 0x804), in nvc0_graph_ctxctl_debug_unit()
44 nv_rd32(dev, base + 0x808), nv_rd32(dev, base + 0x80c)); in nvc0_graph_ctxctl_debug_unit()
46 nv_rd32(dev, base + 0x810), nv_rd32(dev, base + 0x814), in nvc0_graph_ctxctl_debug_unit()
47 nv_rd32(dev, base + 0x818), nv_rd32(dev, base + 0x81c)); in nvc0_graph_ctxctl_debug_unit()
53 u32 gpcnr = nv_rd32(dev, 0x409604) & 0xffff; in nvc0_graph_ctxctl_debug()
411 nv_wr32(dev, GPC_BCAST(0x08ac), nv_rd32(dev, 0x100800)); in nvc0_graph_init_gpc_0()
532 priv->grctx_size = nv_rd32(dev, 0x409804); in nvc0_graph_init_ctxctl()
562 priv->grctx_size = nv_rd32(dev, 0x409800); in nvc0_graph_init_ctxctl()
644 u32 ustat = nv_rd32(dev, 0x409c18); in nvc0_graph_ctxctl_isr()
[all …]
Dnv84_crypt.c79 if (nv_rd32(dev, 0x102188) == inst) in nv84_crypt_context_del()
81 if (nv_rd32(dev, 0x10218c) == inst) in nv84_crypt_context_del()
123 u32 stat = nv_rd32(dev, 0x102130); in nv84_crypt_isr()
124 u32 mthd = nv_rd32(dev, 0x102190); in nv84_crypt_isr()
125 u32 data = nv_rd32(dev, 0x102194); in nv84_crypt_isr()
126 u32 inst = nv_rd32(dev, 0x102188) & 0x7fffffff; in nv84_crypt_isr()
Dnvc0_copy.c86 if (nv_rd32(dev, pcopy->fuc + 0x050) == inst) in nvc0_copy_context_del()
89 if (nv_rd32(dev, pcopy->fuc + 0x054) == inst) in nvc0_copy_context_del()
157 u32 disp = nv_rd32(dev, pcopy->fuc + 0x01c); in nvc0_copy_isr()
158 u32 stat = nv_rd32(dev, pcopy->fuc + 0x008) & disp & ~(disp >> 16); in nvc0_copy_isr()
159 u64 inst = (u64)(nv_rd32(dev, pcopy->fuc + 0x050) & 0x0fffffff) << 12; in nvc0_copy_isr()
161 u32 ssta = nv_rd32(dev, pcopy->fuc + 0x040) & 0x0000ffff; in nvc0_copy_isr()
162 u32 addr = nv_rd32(dev, pcopy->fuc + 0x040) >> 16; in nvc0_copy_isr()
165 u32 data = nv_rd32(dev, pcopy->fuc + 0x044); in nvc0_copy_isr()

123