Lines Matching refs:disp

40 	struct nvkm_device *device = ior->disp->engine.subdev.device;  in gv100_sor_hda_device_entry()
56 struct nvkm_device *device = sor->disp->engine.subdev.device; in gv100_sor_dp_watermark()
65 struct nvkm_device *device = sor->disp->engine.subdev.device; in gv100_sor_dp_audio_sym()
75 struct nvkm_device *device = sor->disp->engine.subdev.device; in gv100_sor_dp_audio()
103 struct nvkm_device *device = ior->disp->engine.subdev.device; in gv100_sor_hdmi_ctrl()
164 struct nvkm_device *device = sor->disp->engine.subdev.device; in gv100_sor_state()
202 gv100_sor_new(struct nvkm_disp *disp, int id) in gv100_sor_new() argument
204 struct nvkm_device *device = disp->engine.subdev.device; in gv100_sor_new()
210 return nvkm_ior_new_(&gv100_sor, disp, SOR, id, hda & BIT(id)); in gv100_sor_new()
214 gv100_sor_cnt(struct nvkm_disp *disp, unsigned long *pmask) in gv100_sor_cnt() argument
216 struct nvkm_device *device = disp->engine.subdev.device; in gv100_sor_cnt()
225 struct nvkm_device *device = head->disp->engine.subdev.device; in gv100_head_vblank_put()
232 struct nvkm_device *device = head->disp->engine.subdev.device; in gv100_head_vblank_get()
239 struct nvkm_device *device = head->disp->engine.subdev.device; in gv100_head_rgpos()
249 struct nvkm_device *device = head->disp->engine.subdev.device; in gv100_head_state()
289 gv100_head_new(struct nvkm_disp *disp, int id) in gv100_head_new() argument
291 struct nvkm_device *device = disp->engine.subdev.device; in gv100_head_new()
296 return nvkm_head_new_(&gv100_head, disp, id); in gv100_head_new()
300 gv100_head_cnt(struct nvkm_disp *disp, unsigned long *pmask) in gv100_head_cnt() argument
302 struct nvkm_device *device = disp->engine.subdev.device; in gv100_head_cnt()
322 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_dmac_idle()
336 return nvkm_ramht_insert(chan->disp->ramht, object, chan->chid.user, -9, handle, in gv100_disp_dmac_bind()
343 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_dmac_fini()
355 struct nvkm_subdev *subdev = &chan->disp->engine.subdev; in gv100_disp_dmac_init()
375 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_wimm_intr()
501 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_wndw_intr()
526 gv100_disp_wndw_cnt(struct nvkm_disp *disp, unsigned long *pmask) in gv100_disp_wndw_cnt() argument
528 struct nvkm_device *device = disp->engine.subdev.device; in gv100_disp_wndw_cnt()
537 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_curs_idle()
550 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_curs_intr()
559 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_curs_fini()
569 struct nvkm_subdev *subdev = &chan->disp->engine.subdev; in gv100_disp_curs_init()
704 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_core_idle()
723 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_core_intr()
732 struct nvkm_device *device = chan->disp->engine.subdev.device; in gv100_disp_core_fini()
742 struct nvkm_subdev *subdev = &chan->disp->engine.subdev; in gv100_disp_core_init()
778 struct nvkm_disp *disp; member
786 struct nvkm_device *device = caps->disp->engine.subdev.device; in gv100_disp_caps_map()
802 struct nvkm_disp *disp = nvkm_udisp(oclass->parent); in gv100_disp_caps_new() local
810 caps->disp = disp; in gv100_disp_caps_new()
817 struct nvkm_disp *disp = container_of(work, struct nvkm_disp, super.work); in gv100_disp_super() local
818 struct nvkm_subdev *subdev = &disp->engine.subdev; in gv100_disp_super()
823 mutex_lock(&disp->super.mutex); in gv100_disp_super()
826 nvkm_debug(subdev, "supervisor %d: %08x\n", ffs(disp->super.pending), stat); in gv100_disp_super()
827 list_for_each_entry(head, &disp->heads, head) { in gv100_disp_super()
832 if (disp->super.pending & 0x00000001) { in gv100_disp_super()
833 nv50_disp_chan_mthd(disp->chan[0], NV_DBG_DEBUG); in gv100_disp_super()
834 nv50_disp_super_1(disp); in gv100_disp_super()
835 list_for_each_entry(head, &disp->heads, head) { in gv100_disp_super()
838 nv50_disp_super_1_0(disp, head); in gv100_disp_super()
841 if (disp->super.pending & 0x00000002) { in gv100_disp_super()
842 list_for_each_entry(head, &disp->heads, head) { in gv100_disp_super()
845 nv50_disp_super_2_0(disp, head); in gv100_disp_super()
847 nvkm_outp_route(disp); in gv100_disp_super()
848 list_for_each_entry(head, &disp->heads, head) { in gv100_disp_super()
851 nv50_disp_super_2_1(disp, head); in gv100_disp_super()
853 list_for_each_entry(head, &disp->heads, head) { in gv100_disp_super()
856 nv50_disp_super_2_2(disp, head); in gv100_disp_super()
859 if (disp->super.pending & 0x00000004) { in gv100_disp_super()
860 list_for_each_entry(head, &disp->heads, head) { in gv100_disp_super()
863 nv50_disp_super_3_0(disp, head); in gv100_disp_super()
867 list_for_each_entry(head, &disp->heads, head) in gv100_disp_super()
871 mutex_unlock(&disp->super.mutex); in gv100_disp_super()
875 gv100_disp_exception(struct nvkm_disp *disp, int chid) in gv100_disp_exception() argument
877 struct nvkm_subdev *subdev = &disp->engine.subdev; in gv100_disp_exception()
903 if (chid < ARRAY_SIZE(disp->chan) && disp->chan[chid]) { in gv100_disp_exception()
906 nv50_disp_chan_mthd(disp->chan[chid], NV_DBG_ERROR); in gv100_disp_exception()
917 gv100_disp_intr_ctrl_disp(struct nvkm_disp *disp) in gv100_disp_intr_ctrl_disp() argument
919 struct nvkm_subdev *subdev = &disp->engine.subdev; in gv100_disp_intr_ctrl_disp()
924 disp->super.pending = (stat & 0x00000007); in gv100_disp_intr_ctrl_disp()
925 queue_work(disp->super.wq, &disp->super.work); in gv100_disp_intr_ctrl_disp()
926 nvkm_wr32(device, 0x611860, disp->super.pending); in gv100_disp_intr_ctrl_disp()
952 nv50_disp_chan_uevent_send(disp, 0); in gv100_disp_intr_ctrl_disp()
955 for_each_set_bit(wndw, &wndws, disp->wndw.nr) { in gv100_disp_intr_ctrl_disp()
956 nv50_disp_chan_uevent_send(disp, 1 + wndw); in gv100_disp_intr_ctrl_disp()
965 gv100_disp_intr_exc_other(struct nvkm_disp *disp) in gv100_disp_intr_exc_other() argument
967 struct nvkm_subdev *subdev = &disp->engine.subdev; in gv100_disp_intr_exc_other()
975 gv100_disp_exception(disp, 0); in gv100_disp_intr_exc_other()
980 for_each_set_bit(head, &mask, disp->wndw.nr) { in gv100_disp_intr_exc_other()
982 gv100_disp_exception(disp, 73 + head); in gv100_disp_intr_exc_other()
994 gv100_disp_intr_exc_winim(struct nvkm_disp *disp) in gv100_disp_intr_exc_winim() argument
996 struct nvkm_subdev *subdev = &disp->engine.subdev; in gv100_disp_intr_exc_winim()
1001 for_each_set_bit(wndw, &stat, disp->wndw.nr) { in gv100_disp_intr_exc_winim()
1003 gv100_disp_exception(disp, 33 + wndw); in gv100_disp_intr_exc_winim()
1014 gv100_disp_intr_exc_win(struct nvkm_disp *disp) in gv100_disp_intr_exc_win() argument
1016 struct nvkm_subdev *subdev = &disp->engine.subdev; in gv100_disp_intr_exc_win()
1021 for_each_set_bit(wndw, &stat, disp->wndw.nr) { in gv100_disp_intr_exc_win()
1023 gv100_disp_exception(disp, 1 + wndw); in gv100_disp_intr_exc_win()
1034 gv100_disp_intr_head_timing(struct nvkm_disp *disp, int head) in gv100_disp_intr_head_timing() argument
1036 struct nvkm_subdev *subdev = &disp->engine.subdev; in gv100_disp_intr_head_timing()
1047 nvkm_disp_vblank(disp, head); in gv100_disp_intr_head_timing()
1059 gv100_disp_intr(struct nvkm_disp *disp) in gv100_disp_intr() argument
1061 struct nvkm_subdev *subdev = &disp->engine.subdev; in gv100_disp_intr()
1069 gv100_disp_intr_head_timing(disp, head); in gv100_disp_intr()
1075 gv100_disp_intr_exc_win(disp); in gv100_disp_intr()
1080 gv100_disp_intr_exc_winim(disp); in gv100_disp_intr()
1085 gv100_disp_intr_exc_other(disp); in gv100_disp_intr()
1090 gv100_disp_intr_ctrl_disp(disp); in gv100_disp_intr()
1099 gv100_disp_fini(struct nvkm_disp *disp) in gv100_disp_fini() argument
1101 struct nvkm_device *device = disp->engine.subdev.device; in gv100_disp_fini()
1106 gv100_disp_init(struct nvkm_disp *disp) in gv100_disp_init() argument
1108 struct nvkm_device *device = disp->engine.subdev.device; in gv100_disp_init()
1128 for (i = 0; i < disp->sor.nr; i++) { in gv100_disp_init()
1135 list_for_each_entry(head, &disp->heads, head) { in gv100_disp_init()
1150 for (i = 0; i < disp->wndw.nr; i++) { in gv100_disp_init()
1167 switch (nvkm_memory_target(disp->inst->memory)) { in gv100_disp_init()
1175 nvkm_wr32(device, 0x610014, disp->inst->addr >> 16); in gv100_disp_init()
1182 nvkm_wr32(device, 0x611cec, disp->head.mask << 16 | in gv100_disp_init()
1187 nvkm_wr32(device, 0x611ce8, disp->wndw.mask); /* MSK. */ in gv100_disp_init()
1191 nvkm_wr32(device, 0x611ce4, disp->wndw.mask); /* MSK. */ in gv100_disp_init()
1195 list_for_each_entry(head, &disp->heads, head) { in gv100_disp_init()