Home
last modified time | relevance | path

Searched refs:runl (Results 1 – 25 of 34) sorted by relevance

12

/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/
H A Drunl.c46 chan = nvkm_runl_chan_get_chid(engn->runl, id, pirqflags); in nvkm_engn_cgrp_get()
50 cgrp = nvkm_runl_cgrp_get_cgid(engn->runl, id, pirqflags); in nvkm_engn_cgrp_get()
58 nvkm_runl_rc(struct nvkm_runl *runl) in nvkm_runl_rc() argument
60 struct nvkm_fifo *fifo = runl->fifo; in nvkm_runl_rc()
69 BUG_ON(!mutex_is_locked(&runl->mutex)); in nvkm_runl_rc()
70 rc = atomic_xchg(&runl->rc_pending, 0); in nvkm_runl_rc()
75 nvkm_runl_foreach_cgrp_safe(cgrp, gtmp, runl) { in nvkm_runl_rc()
87 if (runl->func->preempt) { in nvkm_runl_rc()
88 for (i = 0; i < runl->runq_nr; i++) { in nvkm_runl_rc()
89 struct nvkm_runq *runq = runl->runq[i]; in nvkm_runl_rc()
[all …]
H A Dga100.c39 return (chan->cgrp->runl->doorbell << 16) | chan->id; in ga100_chan_doorbell_handle()
45 struct nvkm_runl *runl = chan->cgrp->runl; in ga100_chan_stop() local
47 nvkm_wr32(runl->fifo->engine.subdev.device, runl->chan + (chan->id * 4), 0x00000003); in ga100_chan_stop()
53 struct nvkm_runl *runl = chan->cgrp->runl; in ga100_chan_start() local
54 struct nvkm_device *device = runl->fifo->engine.subdev.device; in ga100_chan_start()
57 nvkm_wr32(device, runl->chan + (chan->id * 4), 0x00000002); in ga100_chan_start()
58 nvkm_wr32(device, runl->addr + 0x0090, (gfid << 16) | chan->id); /* INTERNAL_DOORBELL. */ in ga100_chan_start()
64 struct nvkm_runl *runl = chan->cgrp->runl; in ga100_chan_unbind() local
66 nvkm_wr32(runl->fifo->engine.subdev.device, runl->chan + (chan->id * 4), 0xffffffff); in ga100_chan_unbind()
84 nvkm_wo32(chan->inst, 0x0f8, 0x80000000 | chan->cgrp->runl->nonstall.vector); in ga100_chan_ramfc_write()
[all …]
H A Dnv50.c50 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nv50_chan_stop()
58 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nv50_chan_start()
66 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nv50_chan_unbind()
74 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nv50_chan_bind()
82 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nv50_chan_ramfc_write()
152 struct nvkm_subdev *subdev = &chan->cgrp->runl->fifo->engine.subdev; in nv50_ectx_bind()
218 nv50_runl_pending(struct nvkm_runl *runl) in nv50_runl_pending() argument
220 return nvkm_rd32(runl->fifo->engine.subdev.device, 0x0032ec) & 0x00000100; in nv50_runl_pending()
224 nv50_runl_wait(struct nvkm_runl *runl) in nv50_runl_wait() argument
226 struct nvkm_fifo *fifo = runl->fifo; in nv50_runl_wait()
[all …]
H A Dchan.c44 struct nvkm_runl *runl = cgrp->runl; in nvkm_chan_cctx_bind() local
56 nvkm_runl_block(runl); in nvkm_chan_cctx_bind()
66 nvkm_runl_allow(runl); in nvkm_chan_cctx_bind()
138 struct nvkm_runl *runl = chan->cgrp->runl; in nvkm_chan_preempt_locked() local
145 return nvkm_runl_preempt_wait(runl); in nvkm_chan_preempt_locked()
156 mutex_lock(&chan->cgrp->runl->mutex); in nvkm_chan_preempt()
158 mutex_unlock(&chan->cgrp->runl->mutex); in nvkm_chan_preempt()
166 struct nvkm_runl *runl = cgrp->runl; in nvkm_chan_remove_locked() local
173 runl->cgrp_nr--; in nvkm_chan_remove_locked()
176 runl->chan_nr--; in nvkm_chan_remove_locked()
[all …]
H A Dg98.c31 struct nvkm_runl *runl; in g98_fifo_runl_ctor() local
33 runl = nvkm_runl_new(fifo, 0, 0, 0); in g98_fifo_runl_ctor()
34 if (IS_ERR(runl)) in g98_fifo_runl_ctor()
35 return PTR_ERR(runl); in g98_fifo_runl_ctor()
37 nvkm_runl_add(runl, 0, fifo->func->engn_sw, NVKM_ENGINE_SW, 0); in g98_fifo_runl_ctor()
38 nvkm_runl_add(runl, 0, fifo->func->engn_sw, NVKM_ENGINE_DMAOBJ, 0); in g98_fifo_runl_ctor()
39 nvkm_runl_add(runl, 1, fifo->func->engn, NVKM_ENGINE_GR, 0); in g98_fifo_runl_ctor()
40 nvkm_runl_add(runl, 2, fifo->func->engn, NVKM_ENGINE_MSPPP, 0); in g98_fifo_runl_ctor()
41 nvkm_runl_add(runl, 3, fifo->func->engn, NVKM_ENGINE_CE, 0); in g98_fifo_runl_ctor()
42 nvkm_runl_add(runl, 4, fifo->func->engn, NVKM_ENGINE_MSPDEC, 0); in g98_fifo_runl_ctor()
[all …]
H A Dbase.c41 struct nvkm_runl *runl; in nvkm_fifo_ctxsw_in_progress() local
44 nvkm_runl_foreach(runl, engine->subdev.device->fifo) { in nvkm_fifo_ctxsw_in_progress()
45 nvkm_runl_foreach_engn(engn, runl) { in nvkm_fifo_ctxsw_in_progress()
128 struct nvkm_runl *runl; in nvkm_fifo_fini() local
132 nvkm_runl_foreach(runl, fifo) in nvkm_fifo_fini()
133 nvkm_runl_fini(runl); in nvkm_fifo_fini()
143 struct nvkm_runl *runl; in nvkm_fifo_init() local
156 nvkm_runl_foreach(runl, fifo) { in nvkm_fifo_init()
157 if (runl->func->init) in nvkm_fifo_init()
158 runl->func->init(runl); in nvkm_fifo_init()
[all …]
H A Dgk104.c42 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gk104_chan_stop()
50 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gk104_chan_start()
58 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gk104_chan_unbind()
66 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gk104_chan_bind_inst()
74 struct nvkm_runl *runl = chan->cgrp->runl; in gk104_chan_bind() local
75 struct nvkm_device *device = runl->fifo->engine.subdev.device; in gk104_chan_bind()
77 nvkm_mask(device, 0x800004 + (chan->id * 8), 0x000f0000, runl->id << 16); in gk104_chan_bind()
206 u32 stat = nvkm_rd32(engn->runl->fifo->engine.subdev.device, 0x002640 + (engn->id * 0x08)); in gk104_engn_status()
404 gk104_runl_fault_clear(struct nvkm_runl *runl) in gk104_runl_fault_clear() argument
406 nvkm_wr32(runl->fifo->engine.subdev.device, 0x00262c, BIT(runl->id)); in gk104_runl_fault_clear()
[all …]
H A Dtu102.c37 return (chan->cgrp->runl->id << 16) | chan->id; in tu102_chan_doorbell_handle()
43 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in tu102_chan_start()
63 tu102_runl_pending(struct nvkm_runl *runl) in tu102_runl_pending() argument
65 struct nvkm_device *device = runl->fifo->engine.subdev.device; in tu102_runl_pending()
67 return nvkm_rd32(device, 0x002b0c + (runl->id * 0x10)) & 0x00008000; in tu102_runl_pending()
71 tu102_runl_commit(struct nvkm_runl *runl, struct nvkm_memory *memory, u32 start, int count) in tu102_runl_commit() argument
73 struct nvkm_device *device = runl->fifo->engine.subdev.device; in tu102_runl_commit()
77 nvkm_wr32(device, 0x002b00 + (runl->id * 0x10), lower_32_bits(addr)); in tu102_runl_commit()
78 nvkm_wr32(device, 0x002b04 + (runl->id * 0x10), upper_32_bits(addr)); in tu102_runl_commit()
79 nvkm_wr32(device, 0x002b08 + (runl->id * 0x10), count); in tu102_runl_commit()
[all …]
H A Drunl.h25 struct nvkm_runl *runl; member
41 RUNL_PRINT((e)->runl, l, p, "%02d[%8s]:"f, (e)->id, (e)->engine->subdev.name, ##a)
120 #define nvkm_runl_find_engn(engn,runl,cond) nvkm_list_find(engn, &(runl)->engns, head, (cond)) argument
123 #define nvkm_runl_foreach(runl,fifo) list_for_each_entry((runl), &(fifo)->runls, head) argument
124 #define nvkm_runl_foreach_cond(runl,fifo,cond) nvkm_list_foreach(runl, &(fifo)->runls, head, (cond)) argument
125 #define nvkm_runl_foreach_engn(engn,runl) list_for_each_entry((engn), &(runl)->engns, head) argument
126 #define nvkm_runl_foreach_engn_cond(engn,runl,cond) \ argument
127 nvkm_list_foreach(engn, &(runl)->engns, head, (cond))
128 #define nvkm_runl_foreach_cgrp(cgrp,runl) list_for_each_entry((cgrp), &(runl)->cgrps, head) argument
129 #define nvkm_runl_foreach_cgrp_safe(cgrp,gtmp,runl) \ argument
[all …]
H A Dgf100.c43 nvkm_wr32(chan->cgrp->runl->fifo->engine.subdev.device, 0x002634, chan->id); in gf100_chan_preempt()
49 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gf100_chan_stop()
57 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gf100_chan_start()
67 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in gf100_chan_unbind()
79 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in gf100_chan_bind()
204 struct nvkm_runl *runl = engn->runl; in gf100_engn_mmu_fault_triggered() local
205 struct nvkm_fifo *fifo = runl->fifo; in gf100_engn_mmu_fault_triggered()
215 if (atomic_dec_and_test(&runl->rc_triggered)) in gf100_engn_mmu_fault_triggered()
224 struct nvkm_runl *runl = engn->runl; in gf100_engn_mmu_fault_trigger() local
225 struct nvkm_fifo *fifo = runl->fifo; in gf100_engn_mmu_fault_trigger()
[all …]
H A Dg84.c37 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in g84_chan_bind()
45 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in g84_chan_ramfc_write()
108 struct nvkm_subdev *subdev = &chan->cgrp->runl->fifo->engine.subdev; in g84_ectx_bind()
192 struct nvkm_runl *runl; in g84_fifo_runl_ctor() local
194 runl = nvkm_runl_new(fifo, 0, 0, 0); in g84_fifo_runl_ctor()
195 if (IS_ERR(runl)) in g84_fifo_runl_ctor()
196 return PTR_ERR(runl); in g84_fifo_runl_ctor()
198 nvkm_runl_add(runl, 0, fifo->func->engn_sw, NVKM_ENGINE_SW, 0); in g84_fifo_runl_ctor()
199 nvkm_runl_add(runl, 0, fifo->func->engn_sw, NVKM_ENGINE_DMAOBJ, 0); in g84_fifo_runl_ctor()
200 nvkm_runl_add(runl, 1, fifo->func->engn, NVKM_ENGINE_GR, 0); in g84_fifo_runl_ctor()
[all …]
H A Dr535.c51 return (chan->cgrp->runl->id << 16) | chan->id; in r535_chan_doorbell_handle()
67 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in r535_chan_ramfc_clear()
82 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in r535_chan_ramfc_write()
97 nvkm_runl_foreach_engn(engn, chan->cgrp->runl) { in r535_chan_ramfc_write()
230 struct nvkm_runl *runl = chan->cgrp->runl; in r535_chan_id_put() local
231 struct nvkm_fifo *fifo = runl->fifo; in r535_chan_id_put()
243 nvkm_chid_put(runl->chid, userd->chid, &chan->cgrp->lock); in r535_chan_id_put()
259 struct nvkm_runl *runl = chan->cgrp->runl; in r535_chan_id_get_locked() local
260 struct nvkm_fifo *fifo = runl->fifo; in r535_chan_id_get_locked()
267 RUNL_DEBUG(runl, "ouserd %llx", ouserd); in r535_chan_id_get_locked()
[all …]
H A Dcgrp.c178 struct nvkm_runl *runl = cgrp->runl; in nvkm_cgrp_del() local
180 if (runl->cgid) in nvkm_cgrp_del()
181 nvkm_chid_put(runl->cgid, cgrp->id, &cgrp->lock); in nvkm_cgrp_del()
222 nvkm_cgrp_new(struct nvkm_runl *runl, const char *name, struct nvkm_vmm *vmm, bool hw, in nvkm_cgrp_new() argument
230 cgrp->func = runl->fifo->func->cgrp.func; in nvkm_cgrp_new()
232 cgrp->runl = runl; in nvkm_cgrp_new()
245 if (runl->cgid) { in nvkm_cgrp_new()
246 cgrp->id = nvkm_chid_get(runl->cgid, cgrp); in nvkm_cgrp_new()
248 RUNL_ERROR(runl, "!cgids"); in nvkm_cgrp_new()
H A Ducgrp.c42 return nvkm_uchan_new(cgrp->runl->fifo, cgrp, oclass, argv, argc, pobject); in nvkm_ucgrp_chan_new()
49 struct nvkm_fifo *fifo = cgrp->runl->fifo; in nvkm_ucgrp_sclass()
85 struct nvkm_runl *runl; in nvkm_ucgrp_new() local
98 runl = nvkm_runl_get(fifo, args->v0.runlist, 0); in nvkm_ucgrp_new()
99 if (!runl) in nvkm_ucgrp_new()
115 ret = nvkm_cgrp_new(runl, args->v0.name, vmm, true, &ucgrp->cgrp); in nvkm_ucgrp_new()
H A Duchan.c45 struct nvkm_runl *runl = chan->cgrp->runl; in nvkm_uchan_uevent() local
55 return nvkm_uevent_add(uevent, &runl->fifo->nonstall.event, runl->id, in nvkm_uchan_uevent()
58 return nvkm_uevent_add(uevent, &runl->chid->event, chan->id, in nvkm_uchan_uevent()
164 engn = nvkm_runl_find_engn(engn, cgrp->runl, engn->engine == oclass->engine); in nvkm_uchan_object_new()
210 nvkm_runl_foreach_engn(engn, chan->cgrp->runl) { in nvkm_uchan_sclass()
215 if (engn->runl->func->runqs) { in nvkm_uchan_sclass()
259 struct nvkm_device *device = chan->cgrp->runl->fifo->engine.subdev.device; in nvkm_uchan_map()
334 struct nvkm_runl *runl; in nvkm_uchan_new() local
350 runl = nvkm_runl_get(fifo, args->v0.runlist, 0); in nvkm_uchan_new()
351 if (!runl) in nvkm_uchan_new()
[all …]
H A Dnv04.c43 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv04_chan_stop()
94 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv04_chan_start()
105 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv04_chan_ramfc_clear()
118 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv04_chan_ramfc_write()
183 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv04_eobj_ramht_del()
194 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv04_eobj_ramht_add()
496 struct nvkm_runl *runl; in nv04_fifo_runl_ctor() local
498 runl = nvkm_runl_new(fifo, 0, 0, 0); in nv04_fifo_runl_ctor()
499 if (IS_ERR(runl)) in nv04_fifo_runl_ctor()
500 return PTR_ERR(runl); in nv04_fifo_runl_ctor()
[all …]
H A Dgv100.c177 gv100_runl_preempt(struct nvkm_runl *runl) in gv100_runl_preempt() argument
179 nvkm_wr32(runl->fifo->engine.subdev.device, 0x002638, BIT(runl->id)); in gv100_runl_preempt()
458 struct nvkm_runl *runl; in gv100_fifo_intr_ctxsw_timeout() local
461 nvkm_runl_foreach(runl, fifo) { in gv100_fifo_intr_ctxsw_timeout()
462 nvkm_runl_foreach_engn_cond(engn, runl, engm & BIT(engn->id)) in gv100_fifo_intr_ctxsw_timeout()
463 nvkm_runl_rc_engn(runl, engn); in gv100_fifo_intr_ctxsw_timeout()
479 .runl = &gv100_runl,
H A Dcgrp.h34 struct nvkm_runl *runl; member
71 #define CGRP_PRCLI(c,l,p,f,a...) RUNL_PRINT((c)->runl, l, p, "%04x:[%s]"f, (c)->id, (c)->name, ##a)
72 #define CGRP_PRINT(c,l,p,f,a...) RUNL_PRINT((c)->runl, l, p, "%04x:"f, (c)->id, ##a)
H A Dnv40.c41 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv40_chan_ramfc_write()
113 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv40_eobj_ramht_add()
127 struct nvkm_fifo *fifo = chan->cgrp->runl->fifo; in nv40_ectx_bind()
240 .runl = &nv04_runl,
H A Dnv10.c39 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv10_chan_ramfc_write()
101 .runl = &nv04_runl,
H A Dgk110.c63 nvkm_wr32(cgrp->runl->fifo->engine.subdev.device, 0x002634, 0x01000000 | cgrp->id); in gk110_cgrp_preempt()
119 .runl = &gk110_runl,
H A Dnv17.c40 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv17_chan_ramfc_write()
130 .runl = &nv04_runl,
H A Dga102.c34 .runl = &ga100_runl,
H A Dgk20a.c39 .runl = &gk110_runl,
/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_chan.c519 kfree(drm->runl); in nouveau_channels_fini()
548 drm->runl = kcalloc(drm->runl_nr, sizeof(*drm->runl), GFP_KERNEL); in nouveau_channels_init()
549 if (!drm->runl) in nouveau_channels_init()
564 drm->runl[i].chan_nr = args.v.channels.data; in nouveau_channels_init()
565 drm->runl[i].chan_id_base = drm->chan_total; in nouveau_channels_init()
566 drm->runl[i].context_base = dma_fence_context_alloc(drm->runl[i].chan_nr); in nouveau_channels_init()
568 drm->chan_total += drm->runl[i].chan_nr; in nouveau_channels_init()
571 drm->runl[0].context_base = dma_fence_context_alloc(drm->chan_nr); in nouveau_channels_init()
573 drm->runl[i].context_base = drm->runl[0].context_base; in nouveau_channels_init()

12