/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
D | nv50.c | 35 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv50_gr_units() 244 u32 e0c = nvkm_rd32(device, ustatus_addr + 0x04); in nv50_gr_prop_trap() 245 u32 e10 = nvkm_rd32(device, ustatus_addr + 0x08); in nv50_gr_prop_trap() 246 u32 e14 = nvkm_rd32(device, ustatus_addr + 0x0c); in nv50_gr_prop_trap() 247 u32 e18 = nvkm_rd32(device, ustatus_addr + 0x10); in nv50_gr_prop_trap() 248 u32 e1c = nvkm_rd32(device, ustatus_addr + 0x14); in nv50_gr_prop_trap() 249 u32 e20 = nvkm_rd32(device, ustatus_addr + 0x18); in nv50_gr_prop_trap() 250 u32 e24 = nvkm_rd32(device, ustatus_addr + 0x1c); in nv50_gr_prop_trap() 286 u32 units = nvkm_rd32(device, 0x1540); in nv50_gr_mp_trap() 298 mp10 = nvkm_rd32(device, addr + 0x10); in nv50_gr_mp_trap() [all …]
|
D | nv40.c | 36 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv40_gr_units() 103 if (nvkm_rd32(device, 0x40032c) == inst) { in nv40_gr_chan_fini() 110 if (!(nvkm_rd32(device, 0x400300) & 0x00000001)) in nv40_gr_chan_fini() 113 u32 insn = nvkm_rd32(device, 0x400308); in nv40_gr_chan_fini() 122 if (nvkm_rd32(device, 0x400330) == inst) in nv40_gr_chan_fini() 238 u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR); in nv40_gr_intr() 239 u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE); in nv40_gr_intr() 240 u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS); in nv40_gr_intr() 241 u32 inst = nvkm_rd32(device, 0x40032c) & 0x000fffff; in nv40_gr_intr() 242 u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR); in nv40_gr_intr() [all …]
|
D | g84.c | 134 for (tmp = nvkm_rd32(device, 0x400380); tmp && idle; tmp >>= 3) { in g84_gr_tlb_flush() 139 for (tmp = nvkm_rd32(device, 0x400384); tmp && idle; tmp >>= 3) { in g84_gr_tlb_flush() 144 for (tmp = nvkm_rd32(device, 0x400388); tmp && idle; tmp >>= 3) { in g84_gr_tlb_flush() 154 tmp = nvkm_rd32(device, 0x400700); in g84_gr_tlb_flush() 159 nvkm_rd32(device, 0x400380)); in g84_gr_tlb_flush() 161 nvkm_rd32(device, 0x400384)); in g84_gr_tlb_flush() 163 nvkm_rd32(device, 0x400388)); in g84_gr_tlb_flush() 169 if (!(nvkm_rd32(device, 0x100c80) & 0x00000001)) in g84_gr_tlb_flush()
|
D | nv20.c | 39 if (nvkm_rd32(device, 0x400144) & 0x00010000) in nv20_gr_chan_fini() 40 chid = (nvkm_rd32(device, 0x400148) & 0x1f000000) >> 24; in nv20_gr_chan_fini() 45 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_chan_fini() 186 u32 stat = nvkm_rd32(device, NV03_PGRAPH_INTR); in nv20_gr_intr() 187 u32 nsource = nvkm_rd32(device, NV03_PGRAPH_NSOURCE); in nv20_gr_intr() 188 u32 nstatus = nvkm_rd32(device, NV03_PGRAPH_NSTATUS); in nv20_gr_intr() 189 u32 addr = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_ADDR); in nv20_gr_intr() 193 u32 data = nvkm_rd32(device, NV04_PGRAPH_TRAPPED_DATA); in nv20_gr_intr() 194 u32 class = nvkm_rd32(device, 0x400160 + subc * 4) & 0xfff; in nv20_gr_intr() 244 if (!nvkm_rd32(device, 0x400700)) in nv20_gr_init() [all …]
|
D | gf100.c | 721 return nvkm_rd32(gr->engine.subdev.device, 0x409b00); in gf100_gr_ctxsw_inst() 734 u32 stat = nvkm_rd32(device, 0x409804); in gf100_gr_fecs_ctrl_ctxsw() 783 u32 stat = nvkm_rd32(device, 0x409800); in gf100_gr_fecs_bind_pointer() 803 if (nvkm_rd32(device, 0x409800) == 0x00000001) in gf100_gr_fecs_set_reglist_virtual_address() 820 if (nvkm_rd32(device, 0x409800) == 0x00000001) in gf100_gr_fecs_set_reglist_bind_instance() 836 if ((*psize = nvkm_rd32(device, 0x409800))) in gf100_gr_fecs_discover_reglist_image_size() 874 if ((*psize = nvkm_rd32(device, 0x409800))) in gf100_gr_fecs_discover_pm_image_size() 890 if ((*psize = nvkm_rd32(device, 0x409800))) in gf100_gr_fecs_discover_zcull_image_size() 906 if ((*psize = nvkm_rd32(device, 0x409800))) in gf100_gr_fecs_discover_image_size() 928 u32 trace = nvkm_rd32(gr->base.engine.subdev.device, 0x40981c); in gf100_gr_chsw_load() [all …]
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/subdev/pmu/ |
D | gt215.c | 39 addr = nvkm_rd32(device, 0x10a4a0); in gt215_pmu_send() 41 u32 tmp = nvkm_rd32(device, 0x10a4b0); in gt215_pmu_send() 61 } while (nvkm_rd32(device, 0x10a580) != 0x00000001); in gt215_pmu_send() 94 u32 addr = nvkm_rd32(device, 0x10a4cc); in gt215_pmu_recv() 95 if (addr == nvkm_rd32(device, 0x10a4c8)) in gt215_pmu_recv() 101 } while (nvkm_rd32(device, 0x10a580) != 0x00000002); in gt215_pmu_recv() 106 process = nvkm_rd32(device, 0x10a1c4); in gt215_pmu_recv() 107 message = nvkm_rd32(device, 0x10a1c4); in gt215_pmu_recv() 108 data0 = nvkm_rd32(device, 0x10a1c4); in gt215_pmu_recv() 109 data1 = nvkm_rd32(device, 0x10a1c4); in gt215_pmu_recv() [all …]
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/subdev/privring/ |
D | gf100.c | 31 u32 addr = nvkm_rd32(device, 0x122120 + (i * 0x0400)); in gf100_privring_intr_hub() 32 u32 data = nvkm_rd32(device, 0x122124 + (i * 0x0400)); in gf100_privring_intr_hub() 33 u32 stat = nvkm_rd32(device, 0x122128 + (i * 0x0400)); in gf100_privring_intr_hub() 41 u32 addr = nvkm_rd32(device, 0x124120 + (i * 0x0400)); in gf100_privring_intr_rop() 42 u32 data = nvkm_rd32(device, 0x124124 + (i * 0x0400)); in gf100_privring_intr_rop() 43 u32 stat = nvkm_rd32(device, 0x124128 + (i * 0x0400)); in gf100_privring_intr_rop() 51 u32 addr = nvkm_rd32(device, 0x128120 + (i * 0x0400)); in gf100_privring_intr_gpc() 52 u32 data = nvkm_rd32(device, 0x128124 + (i * 0x0400)); in gf100_privring_intr_gpc() 53 u32 stat = nvkm_rd32(device, 0x128128 + (i * 0x0400)); in gf100_privring_intr_gpc() 61 u32 intr0 = nvkm_rd32(device, 0x121c58); in gf100_privring_intr() [all …]
|
D | gk104.c | 31 u32 addr = nvkm_rd32(device, 0x122120 + (i * 0x0800)); in gk104_privring_intr_hub() 32 u32 data = nvkm_rd32(device, 0x122124 + (i * 0x0800)); in gk104_privring_intr_hub() 33 u32 stat = nvkm_rd32(device, 0x122128 + (i * 0x0800)); in gk104_privring_intr_hub() 41 u32 addr = nvkm_rd32(device, 0x124120 + (i * 0x0800)); in gk104_privring_intr_rop() 42 u32 data = nvkm_rd32(device, 0x124124 + (i * 0x0800)); in gk104_privring_intr_rop() 43 u32 stat = nvkm_rd32(device, 0x124128 + (i * 0x0800)); in gk104_privring_intr_rop() 51 u32 addr = nvkm_rd32(device, 0x128120 + (i * 0x0800)); in gk104_privring_intr_gpc() 52 u32 data = nvkm_rd32(device, 0x128124 + (i * 0x0800)); in gk104_privring_intr_gpc() 53 u32 stat = nvkm_rd32(device, 0x128128 + (i * 0x0800)); in gk104_privring_intr_gpc() 61 u32 intr0 = nvkm_rd32(device, 0x120058); in gk104_privring_intr() [all …]
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
D | nv04.c | 71 u32 tmp = nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0); in nv04_fifo_pause() 76 if (nvkm_rd32(device, NV04_PFIFO_CACHE1_PULL0) & in nv04_fifo_pause() 146 u32 engine = nvkm_rd32(device, 0x003280); in nv04_fifo_swmthd() 154 data = nvkm_rd32(device, 0x003258) & 0x0000ffff; in nv04_fifo_swmthd() 175 u32 pull0 = nvkm_rd32(device, 0x003250); in nv04_fifo_cache_error() 187 mthd = nvkm_rd32(device, NV04_PFIFO_CACHE1_METHOD(ptr)); in nv04_fifo_cache_error() 188 data = nvkm_rd32(device, NV04_PFIFO_CACHE1_DATA(ptr)); in nv04_fifo_cache_error() 190 mthd = nvkm_rd32(device, NV40_PFIFO_CACHE1_METHOD(ptr)); in nv04_fifo_cache_error() 191 data = nvkm_rd32(device, NV40_PFIFO_CACHE1_DATA(ptr)); in nv04_fifo_cache_error() 208 nvkm_rd32(device, NV03_PFIFO_CACHE1_PUSH0) & ~1); in nv04_fifo_cache_error() [all …]
|
D | gf100.c | 86 !(nvkm_rd32(device, 0x00227c) & 0x00100000), in gf100_fifo_runlist_commit() 327 u32 stat = nvkm_rd32(device, 0x002640 + (engn * 0x04)); in gf100_fifo_intr_sched_ctxsw() 355 u32 intr = nvkm_rd32(device, 0x00254c); in gf100_fifo_intr_sched() 376 u32 inst = nvkm_rd32(device, 0x002800 + (unit * 0x10)); in gf100_fifo_intr_fault() 377 u32 valo = nvkm_rd32(device, 0x002804 + (unit * 0x10)); in gf100_fifo_intr_fault() 378 u32 vahi = nvkm_rd32(device, 0x002808 + (unit * 0x10)); in gf100_fifo_intr_fault() 379 u32 type = nvkm_rd32(device, 0x00280c + (unit * 0x10)); in gf100_fifo_intr_fault() 409 u32 stat = nvkm_rd32(device, 0x040108 + (unit * 0x2000)); in gf100_fifo_intr_pbdma() 410 u32 addr = nvkm_rd32(device, 0x0400c0 + (unit * 0x2000)); in gf100_fifo_intr_pbdma() 411 u32 data = nvkm_rd32(device, 0x0400c4 + (unit * 0x2000)); in gf100_fifo_intr_pbdma() [all …]
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/engine/disp/ |
D | headgv100.c | 44 *vline = nvkm_rd32(device, 0x616330 + hoff) & 0x0000ffff; in gv100_head_rgpos() 45 *hline = nvkm_rd32(device, 0x616334 + hoff) & 0x0000ffff; in gv100_head_rgpos() 55 data = nvkm_rd32(device, 0x682064 + hoff); in gv100_head_state() 58 data = nvkm_rd32(device, 0x682068 + hoff); in gv100_head_state() 61 data = nvkm_rd32(device, 0x68206c + hoff); in gv100_head_state() 64 data = nvkm_rd32(device, 0x682070 + hoff); in gv100_head_state() 67 state->hz = nvkm_rd32(device, 0x68200c + hoff); in gv100_head_state() 69 data = nvkm_rd32(device, 0x682004 + hoff); in gv100_head_state() 94 if (!(nvkm_rd32(device, 0x610060) & (0x00000001 << id))) in gv100_head_new() 103 *pmask = nvkm_rd32(device, 0x610060) & 0x000000ff; in gv100_head_cnt() [all …]
|
D | gf119.c | 45 mask[head->id] = nvkm_rd32(device, 0x6101d4 + (head->id * 0x800)); in gf119_disp_super() 94 u32 stat = nvkm_rd32(device, 0x6101f0 + (chid * 12)); in gf119_disp_intr_error() 97 u32 data = nvkm_rd32(device, 0x6101f4 + (chid * 12)); in gf119_disp_intr_error() 98 u32 code = nvkm_rd32(device, 0x6101f8 + (chid * 12)); in gf119_disp_intr_error() 127 u32 intr = nvkm_rd32(device, 0x610088); in gf119_disp_intr() 130 u32 stat = nvkm_rd32(device, 0x61008c); in gf119_disp_intr() 140 u32 stat = nvkm_rd32(device, 0x61009c); in gf119_disp_intr() 148 u32 stat = nvkm_rd32(device, 0x6100ac); in gf119_disp_intr() 168 u32 stat = nvkm_rd32(device, 0x6100bc + hoff); in gf119_disp_intr() 172 nvkm_rd32(device, 0x6100c0 + hoff); in gf119_disp_intr() [all …]
|
D | gv100.c | 35 *pmask = nvkm_rd32(device, 0x610064); in gv100_disp_wndw_cnt() 36 return (nvkm_rd32(device, 0x610074) & 0x03f00000) >> 20; in gv100_disp_wndw_cnt() 47 u32 stat = nvkm_rd32(device, 0x6107a8); in gv100_disp_super() 52 mask[head->id] = nvkm_rd32(device, 0x6107ac + (head->id * 4)); in gv100_disp_super() 101 u32 stat = nvkm_rd32(device, 0x611020 + (chid * 12)); in gv100_disp_exception() 113 u32 data = nvkm_rd32(device, 0x611024 + (chid * 12)); in gv100_disp_exception() 114 u32 code = nvkm_rd32(device, 0x611028 + (chid * 12)); in gv100_disp_exception() 143 u32 stat = nvkm_rd32(device, 0x611c30); in gv100_disp_intr_ctrl_disp() 165 unsigned long wndws = nvkm_rd32(device, 0x611858); in gv100_disp_intr_ctrl_disp() 166 unsigned long other = nvkm_rd32(device, 0x61185c); in gv100_disp_intr_ctrl_disp() [all …]
|
D | headgf119.c | 56 data = nvkm_rd32(device, 0x640414 + hoff); in gf119_head_state() 59 data = nvkm_rd32(device, 0x640418 + hoff); in gf119_head_state() 62 data = nvkm_rd32(device, 0x64041c + hoff); in gf119_head_state() 65 data = nvkm_rd32(device, 0x640420 + hoff); in gf119_head_state() 68 state->hz = nvkm_rd32(device, 0x640450 + hoff); in gf119_head_state() 70 data = nvkm_rd32(device, 0x640404 + hoff); in gf119_head_state() 102 *pmask = nvkm_rd32(device, 0x612004) & 0x0000000f; in gf119_head_cnt() 103 return nvkm_rd32(device, 0x022448); in gf119_head_cnt()
|
D | headnv50.c | 53 *vline = nvkm_rd32(device, 0x616340 + hoff) & 0x0000ffff; in nv50_head_rgpos() 54 *hline = nvkm_rd32(device, 0x616344 + hoff) & 0x0000ffff; in nv50_head_rgpos() 64 data = nvkm_rd32(device, 0x610ae8 + hoff); in nv50_head_state() 67 data = nvkm_rd32(device, 0x610af0 + hoff); in nv50_head_state() 70 data = nvkm_rd32(device, 0x610af8 + hoff); in nv50_head_state() 73 data = nvkm_rd32(device, 0x610b00 + hoff); in nv50_head_state() 76 state->hz = (nvkm_rd32(device, 0x610ad0 + hoff) & 0x003fffff) * 1000; in nv50_head_state()
|
D | sorg94.c | 65 data[0] = nvkm_rd32(device, 0x61c118 + loff) & ~(0x000000ff << shift); in g94_sor_dp_drive() 66 data[1] = nvkm_rd32(device, 0x61c120 + loff) & ~(0x000000ff << shift); in g94_sor_dp_drive() 67 data[2] = nvkm_rd32(device, 0x61c130 + loff); in g94_sor_dp_drive() 108 if (!(nvkm_rd32(device, 0x61c034 + soff) & 0x80000000)) in g94_sor_dp_power() 139 switch (nvkm_rd32(device, 0x614300 + soff) & 0x00030000) { in g94_sor_war_needed() 162 clksor = nvkm_rd32(device, 0x614300 + nv50_ior_base(ior)); in g94_sor_war_update_sppll1() 189 sorpwr = nvkm_rd32(device, 0x61c004 + soff); in g94_sor_war_3() 191 u32 seqctl = nvkm_rd32(device, 0x61c030 + soff); in g94_sor_war_3() 198 if (!(nvkm_rd32(device, 0x61c030 + soff) & 0x10000000)) in g94_sor_war_3() 203 if (!(nvkm_rd32(device, 0x61c030 + soff) & 0x10000000)) in g94_sor_war_3() [all …]
|
D | piocnv50.c | 40 if (!(nvkm_rd32(device, 0x610200 + (ctrl * 0x10)) & 0x00030000)) in nv50_disp_pioc_fini() 44 nvkm_rd32(device, 0x610200 + (ctrl * 0x10))); in nv50_disp_pioc_fini() 59 if (!(nvkm_rd32(device, 0x610200 + (ctrl * 0x10)) & 0x00030000)) in nv50_disp_pioc_init() 63 nvkm_rd32(device, 0x610200 + (ctrl * 0x10))); in nv50_disp_pioc_init() 69 u32 tmp = nvkm_rd32(device, 0x610200 + (ctrl * 0x10)); in nv50_disp_pioc_init() 74 nvkm_rd32(device, 0x610200 + (ctrl * 0x10))); in nv50_disp_pioc_init()
|
D | corenv50.c | 176 if (!(nvkm_rd32(device, 0x610200) & 0x001e0000)) in nv50_disp_core_fini() 180 nvkm_rd32(device, 0x610200)); in nv50_disp_core_fini() 183 chan->suspend_put = nvkm_rd32(device, 0x640000); in nv50_disp_core_fini() 193 if ((nvkm_rd32(device, 0x610200) & 0x009f0000) == 0x00020000) in nv50_disp_core_init() 195 if ((nvkm_rd32(device, 0x610200) & 0x003f0000) == 0x00030000) in nv50_disp_core_init() 208 if (!(nvkm_rd32(device, 0x610200) & 0x80000000)) in nv50_disp_core_init() 212 nvkm_rd32(device, 0x610200)); in nv50_disp_core_init()
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/engine/pm/ |
D | gf100.c | 152 case 0: ctr->ctr = nvkm_rd32(device, dom->addr + 0x08c); break; in gf100_perfctr_read() 153 case 1: ctr->ctr = nvkm_rd32(device, dom->addr + 0x088); break; in gf100_perfctr_read() 154 case 2: ctr->ctr = nvkm_rd32(device, dom->addr + 0x080); break; in gf100_perfctr_read() 155 case 3: ctr->ctr = nvkm_rd32(device, dom->addr + 0x090); break; in gf100_perfctr_read() 157 dom->clk = nvkm_rd32(device, dom->addr + 0x070); in gf100_perfctr_read() 210 mask = (1 << nvkm_rd32(device, 0x022430)) - 1; in gf100_pm_new_() 211 mask &= ~nvkm_rd32(device, 0x022504); in gf100_pm_new_() 212 mask &= ~nvkm_rd32(device, 0x022584); in gf100_pm_new_() 220 mask = (1 << nvkm_rd32(device, 0x022438)) - 1; in gf100_pm_new_() 221 mask &= ~nvkm_rd32(device, 0x022548); in gf100_pm_new_() [all …]
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/subdev/bus/ |
D | nv31.c | 35 u32 stat = nvkm_rd32(device, 0x001100) & nvkm_rd32(device, 0x001140); in nv31_bus_intr() 36 u32 gpio = nvkm_rd32(device, 0x001104) & nvkm_rd32(device, 0x001144); in nv31_bus_intr() 45 u32 addr = nvkm_rd32(device, 0x009084); in nv31_bus_intr() 46 u32 data = nvkm_rd32(device, 0x009088); in nv31_bus_intr()
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/subdev/bios/ |
D | shadowramin.c | 37 *(u32 *)&bios->data[i] = nvkm_rd32(device, 0x700000 + i); in pramin_read() 71 addr = nvkm_rd32(device, 0x021c04); in pramin_init() 74 addr = nvkm_rd32(device, 0x022500); in pramin_init() 85 addr = nvkm_rd32(device, 0x625f04); in pramin_init() 87 addr = nvkm_rd32(device, 0x619f04); in pramin_init() 100 addr = (u64)nvkm_rd32(device, 0x001700) << 16; in pramin_init() 111 priv->bar0 = nvkm_rd32(device, 0x001700); in pramin_init()
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/subdev/gpio/ |
D | g94.c | 30 u32 intr0 = nvkm_rd32(device, 0x00e054); in g94_gpio_intr_stat() 31 u32 intr1 = nvkm_rd32(device, 0x00e074); in g94_gpio_intr_stat() 32 u32 stat0 = nvkm_rd32(device, 0x00e050) & intr0; in g94_gpio_intr_stat() 33 u32 stat1 = nvkm_rd32(device, 0x00e070) & intr1; in g94_gpio_intr_stat() 44 u32 inte0 = nvkm_rd32(device, 0x00e050); in g94_gpio_intr_mask() 45 u32 inte1 = nvkm_rd32(device, 0x00e070); in g94_gpio_intr_mask()
|
D | gk104.c | 30 u32 intr0 = nvkm_rd32(device, 0x00dc00); in gk104_gpio_intr_stat() 31 u32 intr1 = nvkm_rd32(device, 0x00dc80); in gk104_gpio_intr_stat() 32 u32 stat0 = nvkm_rd32(device, 0x00dc08) & intr0; in gk104_gpio_intr_stat() 33 u32 stat1 = nvkm_rd32(device, 0x00dc88) & intr1; in gk104_gpio_intr_stat() 44 u32 inte0 = nvkm_rd32(device, 0x00dc08); in gk104_gpio_intr_mask() 45 u32 inte1 = nvkm_rd32(device, 0x00dc88); in gk104_gpio_intr_mask()
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/subdev/therm/ |
D | nv40.c | 63 return nvkm_rd32(device, 0x15b4) & 0x3fff; in nv40_sensor_setup() 67 return nvkm_rd32(device, 0x15b4) & 0xff; in nv40_sensor_setup() 82 core_temp = nvkm_rd32(device, 0x15b4) & 0x3fff; in nv40_temp_get() 85 core_temp = nvkm_rd32(device, 0x15b4) & 0xff; in nv40_temp_get() 126 u32 reg = nvkm_rd32(device, 0x0010f0); in nv40_fan_pwm_get() 134 u32 reg = nvkm_rd32(device, 0x0015f4); in nv40_fan_pwm_get() 136 *divs = nvkm_rd32(device, 0x0015f8); in nv40_fan_pwm_get() 172 uint32_t stat = nvkm_rd32(device, 0x1100); in nv40_therm_intr()
|
/linux-5.19.10/drivers/gpu/drm/nouveau/nvkm/subdev/clk/ |
D | nv50.c | 41 return nvkm_rd32(device, 0x004700); in read_div() 45 return nvkm_rd32(device, 0x004800); in read_div() 57 u32 rsel = nvkm_rd32(device, 0x00e18c); in read_pll_src() 73 coef = nvkm_rd32(device, 0x00e81c + (id * 0x0c)); in read_pll_src() 82 coef = nvkm_rd32(device, 0x00e81c); in read_pll_src() 90 rsel = nvkm_rd32(device, 0x00c050); in read_pll_src() 108 coef = nvkm_rd32(device, 0x00e81c + (id * 0x28)); in read_pll_src() 109 P = (nvkm_rd32(device, 0x00e824 + (id * 0x28)) >> 16) & 7; in read_pll_src() 129 u32 src, mast = nvkm_rd32(device, 0x00c040); in read_pll_ref() 161 u32 mast = nvkm_rd32(device, 0x00c040); in read_pll() [all …]
|