Home
last modified time | relevance | path

Searched refs:engn (Results 1 – 25 of 30) sorted by relevance

12

/linux-6.6.21/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
Dcgrp.c37 struct nvkm_engn *engn = ectx->engn; in nvkm_cgrp_ectx_put() local
40 CGRP_TRACE(cgrp, "dtor ectx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_cgrp_ectx_put()
51 nvkm_cgrp_ectx_get(struct nvkm_cgrp *cgrp, struct nvkm_engn *engn, struct nvkm_ectx **pectx, in nvkm_cgrp_ectx_get() argument
54 struct nvkm_engine *engine = engn->engine; in nvkm_cgrp_ectx_get()
63 ectx = nvkm_list_find(ectx, &cgrp->ectxs, head, ectx->engn == engn); in nvkm_cgrp_ectx_get()
71 CGRP_TRACE(cgrp, "ctor ectx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_cgrp_ectx_get()
75 ectx->engn = engn; in nvkm_cgrp_ectx_get()
98 struct nvkm_engn *engn = vctx->ectx->engn; in nvkm_cgrp_vctx_put() local
101 CGRP_TRACE(cgrp, "dtor vctx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_cgrp_vctx_put()
107 atomic_dec(&vctx->vmm->engref[engn->engine->subdev.type]); in nvkm_cgrp_vctx_put()
[all …]
Drunl.c34 nvkm_engn_cgrp_get(struct nvkm_engn *engn, unsigned long *pirqflags) in nvkm_engn_cgrp_get() argument
41 id = engn->func->cxid(engn, &cgid); in nvkm_engn_cgrp_get()
46 chan = nvkm_runl_chan_get_chid(engn->runl, id, pirqflags); in nvkm_engn_cgrp_get()
50 cgrp = nvkm_runl_cgrp_get_cgid(engn->runl, id, pirqflags); in nvkm_engn_cgrp_get()
63 struct nvkm_engn *engn; in nvkm_runl_rc() local
101 nvkm_runl_foreach_engn_cond(engn, runl, engn->func->cxid) { in nvkm_runl_rc()
102 cgrp = nvkm_engn_cgrp_get(engn, &flags); in nvkm_runl_rc()
104 ENGN_DEBUG(engn, "cxid not valid"); in nvkm_runl_rc()
111 ENGN_DEBUG(engn, "cxid not in recovery"); in nvkm_runl_rc()
115 ENGN_DEBUG(engn, "resetting..."); in nvkm_runl_rc()
[all …]
Dgf100.c161 gf100_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in gf100_ectx_bind() argument
166 switch (engn->engine->subdev.type) { in gf100_ectx_bind()
169 case NVKM_ENGINE_CE : ptr0 = 0x0230 + (engn->engine->subdev.inst * 0x10); break; in gf100_ectx_bind()
190 gf100_ectx_ctor(struct nvkm_engn *engn, struct nvkm_vctx *vctx) in gf100_ectx_ctor() argument
202 gf100_engn_mmu_fault_triggered(struct nvkm_engn *engn) in gf100_engn_mmu_fault_triggered() argument
204 struct nvkm_runl *runl = engn->runl; in gf100_engn_mmu_fault_triggered()
207 u32 data = nvkm_rd32(device, 0x002a30 + (engn->id * 4)); in gf100_engn_mmu_fault_triggered()
209 ENGN_DEBUG(engn, "%08x: mmu fault triggered", data); in gf100_engn_mmu_fault_triggered()
214 nvkm_mask(device, 0x002a30 + (engn->id * 4), 0x00000100, 0x00000000); in gf100_engn_mmu_fault_triggered()
222 gf100_engn_mmu_fault_trigger(struct nvkm_engn *engn) in gf100_engn_mmu_fault_trigger() argument
[all …]
Duchan.c87 nvkm_chan_cctx_bind(chan, ectx->engn, NULL); in nvkm_uchan_object_fini_1()
120 nvkm_chan_cctx_bind(chan, ectx->engn, cctx); in nvkm_uchan_object_init_0()
134 struct nvkm_engn *engn; in nvkm_uchan_object_dtor() local
139 engn = uobj->cctx->vctx->ectx->engn; in nvkm_uchan_object_dtor()
140 if (engn->func->ramht_del) in nvkm_uchan_object_dtor()
141 engn->func->ramht_del(uobj->chan, uobj->hash); in nvkm_uchan_object_dtor()
159 struct nvkm_engn *engn; in nvkm_uchan_object_new() local
164 engn = nvkm_runl_find_engn(engn, cgrp->runl, engn->engine == oclass->engine); in nvkm_uchan_object_new()
165 if (WARN_ON(!engn)) in nvkm_uchan_object_new()
177 ret = nvkm_chan_cctx_get(chan, engn, &uobj->cctx, oclass->client); in nvkm_uchan_object_new()
[all …]
Dchan.c42 nvkm_chan_cctx_bind(struct nvkm_chan *chan, struct nvkm_engn *engn, struct nvkm_cctx *cctx) in nvkm_chan_cctx_bind() argument
46 struct nvkm_engine *engine = engn->engine; in nvkm_chan_cctx_bind()
48 if (!engn->func->bind) in nvkm_chan_cctx_bind()
51 CHAN_TRACE(chan, "%sbind cctx %d[%s]", cctx ? "" : "un", engn->id, engine->subdev.name); in nvkm_chan_cctx_bind()
63 engn->func->bind(engn, cctx, chan); in nvkm_chan_cctx_bind()
78 struct nvkm_engn *engn = cctx->vctx->ectx->engn; in nvkm_chan_cctx_put() local
81 CHAN_TRACE(chan, "dtor cctx %d[%s]", engn->id, engn->engine->subdev.name); in nvkm_chan_cctx_put()
93 nvkm_chan_cctx_get(struct nvkm_chan *chan, struct nvkm_engn *engn, struct nvkm_cctx **pcctx, in nvkm_chan_cctx_get() argument
104 cctx->vctx->ectx->engn == engn && cctx->vctx->vmm == chan->vmm); in nvkm_chan_cctx_get()
113 ret = nvkm_cgrp_vctx_get(cgrp, engn, chan, &vctx, client); in nvkm_chan_cctx_get()
[all …]
Dg98.c39 nvkm_runl_add(runl, 1, fifo->func->engn, NVKM_ENGINE_GR, 0); in g98_fifo_runl_ctor()
40 nvkm_runl_add(runl, 2, fifo->func->engn, NVKM_ENGINE_MSPPP, 0); in g98_fifo_runl_ctor()
41 nvkm_runl_add(runl, 3, fifo->func->engn, NVKM_ENGINE_CE, 0); in g98_fifo_runl_ctor()
42 nvkm_runl_add(runl, 4, fifo->func->engn, NVKM_ENGINE_MSPDEC, 0); in g98_fifo_runl_ctor()
43 nvkm_runl_add(runl, 5, fifo->func->engn, NVKM_ENGINE_SEC, 0); in g98_fifo_runl_ctor()
44 nvkm_runl_add(runl, 6, fifo->func->engn, NVKM_ENGINE_MSVLD, 0); in g98_fifo_runl_ctor()
59 .engn = &g84_engn,
Dga100.c122 ga100_engn_cxid(struct nvkm_engn *engn, bool *cgid) in ga100_engn_cxid() argument
124 struct nvkm_runl *runl = engn->runl; in ga100_engn_cxid()
126 u32 stat = nvkm_rd32(device, runl->addr + 0x200 + engn->id * 0x40); in ga100_engn_cxid()
128 ENGN_DEBUG(engn, "status %08x", stat); in ga100_engn_cxid()
137 if (nvkm_engine_chsw_load(engn->engine)) in ga100_engn_cxid()
149 ga100_engn_nonstall(struct nvkm_engn *engn) in ga100_engn_nonstall() argument
151 struct nvkm_engine *engine = engn->engine; in ga100_engn_nonstall()
333 struct nvkm_engn *engn; in ga100_runl_intr() local
346 nvkm_runl_foreach_engn_cond(engn, runl, stat & BIT(engn->id)) { in ga100_runl_intr()
347 info = nvkm_rd32(device, runl->addr + 0x224 + (engn->id * 0x40)); in ga100_runl_intr()
[all …]
Dbase.c42 struct nvkm_engn *engn; in nvkm_fifo_ctxsw_in_progress() local
45 nvkm_runl_foreach_engn(engn, runl) { in nvkm_fifo_ctxsw_in_progress()
46 if (engn->engine == engine) in nvkm_fifo_ctxsw_in_progress()
47 return engn->func->chsw ? engn->func->chsw(engn) : false; in nvkm_fifo_ctxsw_in_progress()
78 if (oclass->engn == &fifo->func->cgrp.user) in nvkm_fifo_class_new()
81 if (oclass->engn == &fifo->func->chan.user) in nvkm_fifo_class_new()
105 oclass->engn = &fifo->func->cgrp.user; in nvkm_fifo_class_get()
115 oclass->engn = &fifo->func->chan.user; in nvkm_fifo_class_get()
173 struct nvkm_engn *engn; in nvkm_fifo_info() local
191 nvkm_runl_foreach_engn(engn, runl) { in nvkm_fifo_info()
[all …]
Dg84.c106 g84_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in g84_ectx_bind() argument
113 switch (engn->engine->subdev.type) { in g84_ectx_bind()
130 save = nvkm_mask(device, 0x002520, 0x0000003f, BIT(engn->id - 1)); in g84_ectx_bind()
200 nvkm_runl_add(runl, 1, fifo->func->engn, NVKM_ENGINE_GR, 0); in g84_fifo_runl_ctor()
201 nvkm_runl_add(runl, 2, fifo->func->engn, NVKM_ENGINE_MPEG, 0); in g84_fifo_runl_ctor()
202 nvkm_runl_add(runl, 3, fifo->func->engn, NVKM_ENGINE_ME, 0); in g84_fifo_runl_ctor()
203 nvkm_runl_add(runl, 4, fifo->func->engn, NVKM_ENGINE_VP, 0); in g84_fifo_runl_ctor()
204 nvkm_runl_add(runl, 5, fifo->func->engn, NVKM_ENGINE_CIPHER, 0); in g84_fifo_runl_ctor()
205 nvkm_runl_add(runl, 6, fifo->func->engn, NVKM_ENGINE_BSP, 0); in g84_fifo_runl_ctor()
220 .engn = &g84_engn,
Dtu102.c136 tu102_fifo_intr_ctxsw_timeout_info(struct nvkm_engn *engn, u32 info) in tu102_fifo_intr_ctxsw_timeout_info() argument
138 struct nvkm_runl *runl = engn->runl; in tu102_fifo_intr_ctxsw_timeout_info()
143 ENGN_DEBUG(engn, "CTXSW_TIMEOUT %08x", info); in tu102_fifo_intr_ctxsw_timeout_info()
172 struct nvkm_engn *engn; in tu102_fifo_intr_ctxsw_timeout() local
177 nvkm_runl_foreach_engn_cond(engn, runl, engm & BIT(engn->id)) { in tu102_fifo_intr_ctxsw_timeout()
178 info = nvkm_rd32(device, 0x003200 + (engn->id * 4)); in tu102_fifo_intr_ctxsw_timeout()
179 tu102_fifo_intr_ctxsw_timeout_info(engn, info); in tu102_fifo_intr_ctxsw_timeout()
275 .engn = &gv100_engn,
Dgv100.c92 gv100_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in gv100_ectx_bind() argument
117 gv100_ectx_ce_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in gv100_ectx_ce_bind() argument
129 gv100_ectx_ce_ctor(struct nvkm_engn *engn, struct nvkm_vctx *vctx) in gv100_ectx_ce_ctor() argument
459 struct nvkm_engn *engn; in gv100_fifo_intr_ctxsw_timeout() local
462 nvkm_runl_foreach_engn_cond(engn, runl, engm & BIT(engn->id)) in gv100_fifo_intr_ctxsw_timeout()
463 nvkm_runl_rc_engn(runl, engn); in gv100_fifo_intr_ctxsw_timeout()
481 .engn = &gv100_engn,
Drunl.h114 #define nvkm_runl_find_engn(engn,runl,cond) nvkm_list_find(engn, &(runl)->engns, head, (cond)) argument
119 #define nvkm_runl_foreach_engn(engn,runl) list_for_each_entry((engn), &(runl)->engns, head) argument
120 #define nvkm_runl_foreach_engn_cond(engn,runl,cond) \ argument
121 nvkm_list_foreach(engn, &(runl)->engns, head, (cond))
Dgk104.c134 gk104_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in gk104_ectx_bind() argument
139 switch (engn->engine->subdev.type) { in gk104_ectx_bind()
153 if (!engn->engine->subdev.inst) in gk104_ectx_bind()
178 gk104_ectx_ctor(struct nvkm_engn *engn, struct nvkm_vctx *vctx) in gk104_ectx_ctor() argument
204 gk104_engn_status(struct nvkm_engn *engn, struct gk104_engn_status *status) in gk104_engn_status() argument
206 u32 stat = nvkm_rd32(engn->runl->fifo->engine.subdev.device, 0x002640 + (engn->id * 0x08)); in gk104_engn_status()
221 if (nvkm_engine_chsw_load(engn->engine)) in gk104_engn_status()
236 ENGN_DEBUG(engn, "%08x: busy %d faulted %d chsw %d save %d load %d %sid %d%s-> %sid %d%s", in gk104_engn_status()
245 gk104_engn_cxid(struct nvkm_engn *engn, bool *cgid) in gk104_engn_cxid() argument
249 gk104_engn_status(engn, &status); in gk104_engn_cxid()
[all …]
Dnv40.c111 nv40_eobj_ramht_add(struct nvkm_engn *engn, struct nvkm_object *eobj, struct nvkm_chan *chan) in nv40_eobj_ramht_add() argument
115 u32 context = chan->id << 23 | engn->id << 20; in nv40_eobj_ramht_add()
125 nv40_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in nv40_ectx_bind() argument
133 switch (engn->engine->subdev.type) { in nv40_ectx_bind()
241 .engn = &nv40_engn,
Dnv50.c42 nv50_eobj_ramht_add(struct nvkm_engn *engn, struct nvkm_object *eobj, struct nvkm_chan *chan) in nv50_eobj_ramht_add() argument
44 return nvkm_ramht_insert(chan->ramht, eobj, 0, 4, eobj->handle, engn->id << 20); in nv50_eobj_ramht_add()
150 nv50_ectx_bind(struct nvkm_engn *engn, struct nvkm_cctx *cctx, struct nvkm_chan *chan) in nv50_ectx_bind() argument
157 switch (engn->engine->subdev.type) { in nv50_ectx_bind()
387 .engn = &nv50_engn,
Dnv04.c192 nv04_eobj_ramht_add(struct nvkm_engn *engn, struct nvkm_object *eobj, struct nvkm_chan *chan) in nv04_eobj_ramht_add() argument
196 u32 context = 0x80000000 | chan->id << 24 | engn->id << 16; in nv04_eobj_ramht_add()
504 nvkm_runl_add(runl, 1, fifo->func->engn , NVKM_ENGINE_GR, 0); in nv04_fifo_runl_ctor()
505 nvkm_runl_add(runl, 2, fifo->func->engn , NVKM_ENGINE_MPEG, 0); /* NV31- */ in nv04_fifo_runl_ctor()
532 .engn = &nv04_engn,
Dga102.c34 .engn = &ga100_engn,
Dgk20a.c41 .engn = &gk104_engn,
Dgm200.c55 .engn = &gk104_engn,
Dgk208.c66 .engn = &gk104_engn,
Dnv10.c102 .engn = &nv04_engn,
Dcgrp.h20 struct nvkm_engn *engn; member
/linux-6.6.21/drivers/gpu/drm/nouveau/nvkm/engine/sw/
Dbase.c54 const struct nvkm_sw_chan_sclass *sclass = oclass->engn; in nvkm_sw_oclass_new()
66 oclass->engn = &sw->func->sclass[index]; in nvkm_sw_oclass_get()
/linux-6.6.21/drivers/gpu/drm/nouveau/include/nvkm/core/
Doclass.h22 const void *engn; member
/linux-6.6.21/drivers/gpu/drm/nouveau/nvkm/engine/dma/
Dbase.c74 sclass->engn = oclass; in nvkm_dma_oclass_base_get()

12