Lines Matching refs:nvkm_rd32

71 		u32 tmp = nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0);  in nv04_fifo_pause()
76 if (nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0) & in nv04_fifo_pause()
146 u32 engine = nvkm_rd32(device, 0x003280); in nv04_fifo_swmthd()
154 data = nvkm_rd32(device, 0x003258) & 0x0000ffff; in nv04_fifo_swmthd()
175 u32 pull0 = nvkm_rd32(device, 0x003250); in nv04_fifo_cache_error()
187 mthd = nvkm_rd32(device, NV04_PFIFO_CACHE1_METHOD(ptr)); in nv04_fifo_cache_error()
188 data = nvkm_rd32(device, NV04_PFIFO_CACHE1_DATA(ptr)); in nv04_fifo_cache_error()
190 mthd = nvkm_rd32(device, NV40_PFIFO_CACHE1_METHOD(ptr)); in nv04_fifo_cache_error()
191 data = nvkm_rd32(device, NV40_PFIFO_CACHE1_DATA(ptr)); in nv04_fifo_cache_error()
208 nvkm_rd32(device, NV03_PFIFO_CACHE1_PUSH0) & ~1); in nv04_fifo_cache_error()
211 nvkm_rd32(device, NV03_PFIFO_CACHE1_PUSH0) | 1); in nv04_fifo_cache_error()
215 nvkm_rd32(device, NV04_PFIFO_CACHE1_DMA_PUSH) | 1); in nv04_fifo_cache_error()
224 u32 dma_get = nvkm_rd32(device, 0x003244); in nv04_fifo_dma_pusher()
225 u32 dma_put = nvkm_rd32(device, 0x003240); in nv04_fifo_dma_pusher()
226 u32 push = nvkm_rd32(device, 0x003220); in nv04_fifo_dma_pusher()
227 u32 state = nvkm_rd32(device, 0x003228); in nv04_fifo_dma_pusher()
235 u32 ho_get = nvkm_rd32(device, 0x003328); in nv04_fifo_dma_pusher()
236 u32 ho_put = nvkm_rd32(device, 0x003320); in nv04_fifo_dma_pusher()
237 u32 ib_get = nvkm_rd32(device, 0x003334); in nv04_fifo_dma_pusher()
238 u32 ib_put = nvkm_rd32(device, 0x003330); in nv04_fifo_dma_pusher()
277 u32 mask = nvkm_rd32(device, NV03_PFIFO_INTR_EN_0); in nv04_fifo_intr()
278 u32 stat = nvkm_rd32(device, NV03_PFIFO_INTR_0) & mask; in nv04_fifo_intr()
281 reassign = nvkm_rd32(device, NV03_PFIFO_CACHES) & 1; in nv04_fifo_intr()
284 chid = nvkm_rd32(device, NV03_PFIFO_CACHE1_PUSH1) & (fifo->base.nr - 1); in nv04_fifo_intr()
285 get = nvkm_rd32(device, NV03_PFIFO_CACHE1_GET); in nv04_fifo_intr()
301 sem = nvkm_rd32(device, NV10_PFIFO_CACHE1_SEMAPHORE); in nv04_fifo_intr()