/linux/drivers/gpu/drm/nouveau/nvkm/core/ |
H A D | ioctl.c | 42 nvkm_ioctl_sclass_(struct nvkm_object *object, int index, struct nvkm_oclass *oclass) in nvkm_ioctl_sclass_() argument 46 oclass->ctor = nvkm_uevent_new; in nvkm_ioctl_sclass_() 47 oclass->base.minver = 0; in nvkm_ioctl_sclass_() 48 oclass->base.maxver = 0; in nvkm_ioctl_sclass_() 49 oclass->base.oclass = NVIF_CLASS_EVENT; in nvkm_ioctl_sclass_() 54 return object->func->sclass(object, index, oclass); in nvkm_ioctl_sclass_() 66 struct nvkm_oclass oclass = { .client = client }; in nvkm_ioctl_sclass() local 73 if (size != args->v0.count * sizeof(args->v0.oclass[0])) in nvkm_ioctl_sclass() 76 while (nvkm_ioctl_sclass_(object, i, &oclass) >= 0) { in nvkm_ioctl_sclass() 78 args->v0.oclass[i].oclass = oclass.base.oclass; in nvkm_ioctl_sclass() [all …]
|
H A D | client.c | 34 nvkm_uclient_new(const struct nvkm_oclass *oclass, void *argv, u32 argc, in nvkm_uclient_new() argument 45 ret = nvkm_client_new(args->v0.name, oclass->client->device, NULL, in nvkm_uclient_new() 46 NULL, oclass->client->event, &client); in nvkm_uclient_new() 52 client->object.client = oclass->client; in nvkm_uclient_new() 53 client->object.handle = oclass->handle; in nvkm_uclient_new() 54 client->object.object = oclass->object; in nvkm_uclient_new() 55 client->debug = oclass->client->debug; in nvkm_uclient_new() 62 .oclass = NVIF_CLASS_CLIENT, 69 nvkm_client_child_new(const struct nvkm_oclass *oclass, in nvkm_client_child_new() argument 72 return oclass->base.ctor(oclass, data, size, pobject); in nvkm_client_child_new() [all …]
|
H A D | object.c | 261 const struct nvkm_oclass *oclass, struct nvkm_object *object) in nvkm_object_ctor() argument 264 object->client = oclass->client; in nvkm_object_ctor() 265 object->engine = nvkm_engine_ref(oclass->engine); in nvkm_object_ctor() 266 object->oclass = oclass->base.oclass; in nvkm_object_ctor() 267 object->handle = oclass->handle; in nvkm_object_ctor() 268 object->object = oclass->object; in nvkm_object_ctor() 277 const struct nvkm_oclass *oclass, void *data, u32 size, in nvkm_object_new_() argument 283 nvkm_object_ctor(func, oclass, *pobject); in nvkm_object_new_() 294 nvkm_object_new(const struct nvkm_oclass *oclass, void *data, u32 size, in nvkm_object_new() argument 298 oclass->base.func ? oclass->base.func : &nvkm_object_func; in nvkm_object_new() [all …]
|
H A D | oproxy.c | 68 struct nvkm_oclass *oclass) in nvkm_oproxy_sclass() argument 71 oclass->parent = oproxy->object; in nvkm_oproxy_sclass() 74 return oproxy->object->func->sclass(oproxy->object, index, oclass); in nvkm_oproxy_sclass() 171 const struct nvkm_oclass *oclass, struct nvkm_oproxy *oproxy) in nvkm_oproxy_ctor() argument 173 nvkm_object_ctor(&nvkm_oproxy_func, oclass, &oproxy->base); in nvkm_oproxy_ctor() 179 const struct nvkm_oclass *oclass, struct nvkm_oproxy **poproxy) in nvkm_oproxy_new_() argument 183 nvkm_oproxy_ctor(func, oclass, *poproxy); in nvkm_oproxy_new_()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/disp/ |
H A D | chan.c | 79 nvkm_disp_chan_child_new(const struct nvkm_oclass *oclass, void *argv, u32 argc, in nvkm_disp_chan_child_new() argument 82 struct nvkm_disp_chan *chan = nvkm_disp_chan(oclass->parent); in nvkm_disp_chan_child_new() 85 const struct nvkm_device_oclass *sclass = oclass->priv; in nvkm_disp_chan_child_new() 91 nvkm_oproxy_ctor(&nvkm_disp_chan_child_func_, oclass, &object->oproxy); in nvkm_disp_chan_child_new() 95 ret = sclass->ctor(device, oclass, argv, argc, &object->oproxy.object); in nvkm_disp_chan_child_new() 99 object->hash = chan->func->bind(chan, object->oproxy.object, oclass->handle); in nvkm_disp_chan_child_new() 111 const struct nvkm_device_oclass *oclass = NULL; in nvkm_disp_chan_child_get() local 119 sclass->engine->func->base.sclass(sclass, index, &oclass); in nvkm_disp_chan_child_get() 120 if (oclass) { in nvkm_disp_chan_child_get() 122 sclass->priv = oclass; in nvkm_disp_chan_child_get() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
H A D | uchan.c | 154 nvkm_uchan_object_new(const struct nvkm_oclass *oclass, void *argv, u32 argc, in nvkm_uchan_object_new() argument 157 struct nvkm_chan *chan = nvkm_uchan(oclass->parent)->chan; in nvkm_uchan_object_new() 164 engn = nvkm_runl_find_engn(engn, cgrp->runl, engn->engine == oclass->engine); in nvkm_uchan_object_new() 172 nvkm_oproxy_ctor(&nvkm_uchan_object, oclass, &uobj->oproxy); in nvkm_uchan_object_new() 177 ret = nvkm_chan_cctx_get(chan, engn, &uobj->cctx, oclass->client); in nvkm_uchan_object_new() 182 ret = oclass->base.ctor(&(const struct nvkm_oclass) { in nvkm_uchan_object_new() 183 .base = oclass->base, in nvkm_uchan_object_new() 184 .engn = oclass->engn, in nvkm_uchan_object_new() 185 .handle = oclass->handle, in nvkm_uchan_object_new() 186 .object = oclass->object, in nvkm_uchan_object_new() [all …]
|
H A D | ucgrp.c | 37 nvkm_ucgrp_chan_new(const struct nvkm_oclass *oclass, void *argv, u32 argc, in nvkm_ucgrp_chan_new() argument 40 struct nvkm_cgrp *cgrp = nvkm_ucgrp(oclass->parent)->cgrp; in nvkm_ucgrp_chan_new() 42 return nvkm_uchan_new(cgrp->runl->fifo, cgrp, oclass, argv, argc, pobject); in nvkm_ucgrp_chan_new() 46 nvkm_ucgrp_sclass(struct nvkm_object *object, int index, struct nvkm_oclass *oclass) in nvkm_ucgrp_sclass() argument 54 if (chan->user.oclass) { in nvkm_ucgrp_sclass() 56 oclass->base = chan->user; in nvkm_ucgrp_sclass() 57 oclass->ctor = nvkm_ucgrp_chan_new; in nvkm_ucgrp_sclass() 81 nvkm_ucgrp_new(struct nvkm_fifo *fifo, const struct nvkm_oclass *oclass, void *argv, u32 argc, in nvkm_ucgrp_new() argument 102 vmm = nvkm_uvmm_search(oclass->client, args->v0.vmm); in nvkm_ucgrp_new() 112 nvkm_object_ctor(&nvkm_ucgrp, oclass, &ucgrp->object); in nvkm_ucgrp_new()
|
H A D | base.c | 73 nvkm_fifo_class_new(struct nvkm_device *device, const struct nvkm_oclass *oclass, in nvkm_fifo_class_new() argument 76 struct nvkm_fifo *fifo = nvkm_fifo(oclass->engine); in nvkm_fifo_class_new() 78 if (oclass->engn == &fifo->func->cgrp.user) in nvkm_fifo_class_new() 79 return nvkm_ucgrp_new(fifo, oclass, argv, argc, pobject); in nvkm_fifo_class_new() 81 if (oclass->engn == &fifo->func->chan.user) in nvkm_fifo_class_new() 82 return nvkm_uchan_new(fifo, NULL, oclass, argv, argc, pobject); in nvkm_fifo_class_new() 94 nvkm_fifo_class_get(struct nvkm_oclass *oclass, int index, const struct nvkm_device_oclass **class) in nvkm_fifo_class_get() argument 96 struct nvkm_fifo *fifo = nvkm_fifo(oclass->engine); in nvkm_fifo_class_get() 102 if (cgrp->user.oclass) { in nvkm_fifo_class_get() 104 oclass->base = cgrp->user; in nvkm_fifo_class_get() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/sw/ |
H A D | base.c | 50 nvkm_sw_oclass_new(const struct nvkm_oclass *oclass, void *data, u32 size, in nvkm_sw_oclass_new() argument 53 struct nvkm_sw_chan *chan = nvkm_sw_chan(oclass->parent); in nvkm_sw_oclass_new() 54 const struct nvkm_sw_chan_sclass *sclass = oclass->engn; in nvkm_sw_oclass_new() 55 return sclass->ctor(chan, oclass, data, size, pobject); in nvkm_sw_oclass_new() 59 nvkm_sw_oclass_get(struct nvkm_oclass *oclass, int index) in nvkm_sw_oclass_get() argument 61 struct nvkm_sw *sw = nvkm_sw(oclass->engine); in nvkm_sw_oclass_get() 66 oclass->engn = &sw->func->sclass[index]; in nvkm_sw_oclass_get() 67 oclass->base = sw->func->sclass[index].base; in nvkm_sw_oclass_get() 68 oclass->base.ctor = nvkm_sw_oclass_new; in nvkm_sw_oclass_get() 77 nvkm_sw_cclass_get(struct nvkm_chan *fifoch, const struct nvkm_oclass *oclass, in nvkm_sw_cclass_get() argument [all …]
|
H A D | nvsw.c | 62 const struct nvkm_oclass *oclass, void *data, u32 size, in nvkm_nvsw_new_() argument 71 nvkm_object_ctor(&nvkm_nvsw_, oclass, &nvsw->object); in nvkm_nvsw_new_() 82 nvkm_nvsw_new(struct nvkm_sw_chan *chan, const struct nvkm_oclass *oclass, in nvkm_nvsw_new() argument 85 return nvkm_nvsw_new_(&nvkm_nvsw, chan, oclass, data, size, pobject); in nvkm_nvsw_new()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/dma/ |
H A D | base.c | 33 const struct nvkm_oclass *oclass, void *data, u32 size, in nvkm_dma_oclass_new() argument 36 struct nvkm_dma *dma = nvkm_dma(oclass->engine); in nvkm_dma_oclass_new() 40 ret = dma->func->class_new(dma, oclass, data, size, &dmaobj); in nvkm_dma_oclass_new() 52 nvkm_dma_oclass_fifo_new(const struct nvkm_oclass *oclass, void *data, u32 size, in nvkm_dma_oclass_fifo_new() argument 55 return nvkm_dma_oclass_new(oclass->engine->subdev.device, in nvkm_dma_oclass_fifo_new() 56 oclass, data, size, pobject); in nvkm_dma_oclass_fifo_new() 72 const struct nvkm_sclass *oclass = &nvkm_dma_sclass[index]; in nvkm_dma_oclass_base_get() local 73 sclass->base = oclass[0]; in nvkm_dma_oclass_base_get() 74 sclass->engn = oclass; in nvkm_dma_oclass_base_get() 82 nvkm_dma_oclass_fifo_get(struct nvkm_oclass *oclass, int index) in nvkm_dma_oclass_fifo_get() argument [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/ce/ |
H A D | r535.c | 52 r535_ce_obj_ctor(const struct nvkm_oclass *oclass, void *argv, u32 argc, in r535_ce_obj_ctor() argument 55 struct nvkm_chan *chan = nvkm_uchan_chan(oclass->parent); in r535_ce_obj_ctor() 62 nvkm_object_ctor(&r535_ce_obj, oclass, &obj->object); in r535_ce_obj_ctor() 65 args = nvkm_gsp_rm_alloc_get(&chan->rm.object, oclass->handle, oclass->base.oclass, in r535_ce_obj_ctor() 71 args->engineType = NV2080_ENGINE_TYPE_COPY0 + oclass->engine->subdev.inst; in r535_ce_obj_ctor() 90 for (nclass = 0; hw->sclass[nclass].oclass; nclass++); in r535_ce_new() 99 rm->sclass[i].oclass = hw->sclass[i].oclass; in r535_ce_new()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/nvjpg/ |
H A D | r535.c | 51 r535_nvjpg_obj_ctor(const struct nvkm_oclass *oclass, void *argv, u32 argc, in r535_nvjpg_obj_ctor() argument 54 struct nvkm_chan *chan = nvkm_uchan_chan(oclass->parent); in r535_nvjpg_obj_ctor() 61 nvkm_object_ctor(&r535_nvjpg_obj, oclass, &obj->object); in r535_nvjpg_obj_ctor() 64 args = nvkm_gsp_rm_alloc_get(&chan->rm.object, oclass->handle, oclass->base.oclass, in r535_nvjpg_obj_ctor() 70 args->engineInstance = oclass->engine->subdev.inst; in r535_nvjpg_obj_ctor() 89 for (nclass = 0; hw->sclass[nclass].oclass; nclass++); in r535_nvjpg_new() 98 rm->sclass[i].oclass = hw->sclass[i].oclass; in r535_nvjpg_new()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/nvdec/ |
H A D | r535.c | 51 r535_nvdec_obj_ctor(const struct nvkm_oclass *oclass, void *argv, u32 argc, in r535_nvdec_obj_ctor() argument 54 struct nvkm_chan *chan = nvkm_uchan_chan(oclass->parent); in r535_nvdec_obj_ctor() 61 nvkm_object_ctor(&r535_nvdec_obj, oclass, &obj->object); in r535_nvdec_obj_ctor() 64 args = nvkm_gsp_rm_alloc_get(&chan->rm.object, oclass->handle, oclass->base.oclass, in r535_nvdec_obj_ctor() 70 args->engineInstance = oclass->engine->subdev.inst; in r535_nvdec_obj_ctor() 91 for (nclass = 0; hw->sclass[nclass].oclass; nclass++); in r535_nvdec_new() 100 rm->sclass[i].oclass = hw->sclass[i].oclass; in r535_nvdec_new()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/nvenc/ |
H A D | r535.c | 51 r535_nvenc_obj_ctor(const struct nvkm_oclass *oclass, void *argv, u32 argc, in r535_nvenc_obj_ctor() argument 54 struct nvkm_chan *chan = nvkm_uchan_chan(oclass->parent); in r535_nvenc_obj_ctor() 61 nvkm_object_ctor(&r535_nvenc_obj, oclass, &obj->object); in r535_nvenc_obj_ctor() 64 args = nvkm_gsp_rm_alloc_get(&chan->rm.object, oclass->handle, oclass->base.oclass, in r535_nvenc_obj_ctor() 70 args->engineInstance = oclass->engine->subdev.inst; in r535_nvenc_obj_ctor() 91 for (nclass = 0; hw->sclass[nclass].oclass; nclass++); in r535_nvenc_new() 100 rm->sclass[i].oclass = hw->sclass[i].oclass; in r535_nvenc_new()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/ofa/ |
H A D | r535.c | 52 r535_ofa_obj_ctor(const struct nvkm_oclass *oclass, void *argv, u32 argc, in r535_ofa_obj_ctor() argument 55 struct nvkm_chan *chan = nvkm_uchan_chan(oclass->parent); in r535_ofa_obj_ctor() 62 nvkm_object_ctor(&r535_ofa_obj, oclass, &obj->object); in r535_ofa_obj_ctor() 65 args = nvkm_gsp_rm_alloc_get(&chan->rm.object, oclass->handle, oclass->base.oclass, in r535_ofa_obj_ctor() 89 for (nclass = 0; hw->sclass[nclass].oclass; nclass++); in r535_ofa_new() 98 rm->sclass[i].oclass = hw->sclass[i].oclass; in r535_ofa_new()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/ |
H A D | base.c | 89 nvkm_gr_oclass_get(struct nvkm_oclass *oclass, int index) in nvkm_gr_oclass_get() argument 91 struct nvkm_gr *gr = nvkm_gr(oclass->engine); in nvkm_gr_oclass_get() 95 int ret = gr->func->object_get(gr, index, &oclass->base); in nvkm_gr_oclass_get() 96 if (oclass->base.oclass) in nvkm_gr_oclass_get() 101 while (gr->func->sclass[c].oclass) { in nvkm_gr_oclass_get() 103 oclass->base = gr->func->sclass[index]; in nvkm_gr_oclass_get() 112 nvkm_gr_cclass_new(struct nvkm_chan *chan, const struct nvkm_oclass *oclass, in nvkm_gr_cclass_new() argument 115 struct nvkm_gr *gr = nvkm_gr(oclass->engine); in nvkm_gr_cclass_new() 117 return gr->func->chan_new(gr, chan, oclass, pobject); in nvkm_gr_cclass_new()
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/device/ |
H A D | user.c | 260 nvkm_udevice_child_new(const struct nvkm_oclass *oclass, in nvkm_udevice_child_new() argument 263 struct nvkm_udevice *udev = nvkm_udevice(oclass->parent); in nvkm_udevice_child_new() 264 const struct nvkm_device_oclass *sclass = oclass->priv; in nvkm_udevice_child_new() 265 return sclass->ctor(udev->device, oclass, data, size, pobject); in nvkm_udevice_child_new() 270 struct nvkm_oclass *oclass) in nvkm_udevice_child_get() argument 285 oclass->engine = engine; in nvkm_udevice_child_get() 287 index -= engine->func->base.sclass(oclass, index, &sclass); in nvkm_udevice_child_get() 302 oclass->base = sclass->base; in nvkm_udevice_child_get() 303 oclass->engine = NULL; in nvkm_udevice_child_get() 306 oclass->ctor = nvkm_udevice_child_new; in nvkm_udevice_child_get() [all …]
|
/linux/drivers/gpu/drm/nouveau/include/nvif/ |
H A D | object.h | 7 s32 oclass; member 17 s32 oclass; member 32 s32 oclass, void *, u32, struct nvif_object *); 70 s32 oclass; member 83 for (i = 0; ret < 0 && mclass[i].oclass; i++) { \ 85 if (mclass[i].oclass == sclass[j].oclass && \ 103 for (_cid = 0; _mclass[_cid].oclass; _cid++) { \ 104 if (_mclass[_cid].oclass == _oclass) \ 107 _cid = _mclass[_cid].oclass ? _cid : -ENOSYS; \
|
/linux/drivers/gpu/drm/nouveau/dispnv50/ |
H A D | oimm507b.c | 28 s32 oclass, struct nv50_wndw *wndw) in oimm507b_init_() argument 36 ret = nvif_object_ctor(&disp->disp->object, "kmsOvim", 0, oclass, in oimm507b_init_() 39 NV_ERROR(drm, "oimm%04x allocation failed: %d\n", oclass, ret); in oimm507b_init_() 49 oimm507b_init(struct nouveau_drm *drm, s32 oclass, struct nv50_wndw *wndw) in oimm507b_init() argument 51 return oimm507b_init_(&curs507a, drm, oclass, wndw); in oimm507b_init()
|
H A D | wimmc37b.c | 69 s32 oclass, struct nv50_wndw *wndw) in wimmc37b_init_() argument 77 &oclass, 0, &args, sizeof(args), -1, in wimmc37b_init_() 80 NV_ERROR(drm, "wimm%04x allocation failed: %d\n", oclass, ret); in wimmc37b_init_() 90 wimmc37b_init(struct nouveau_drm *drm, s32 oclass, struct nv50_wndw *wndw) in wimmc37b_init() argument 92 return wimmc37b_init_(&wimmc37b, drm, oclass, wndw); in wimmc37b_init()
|
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
H A D | ummu.c | 33 struct nvkm_oclass *oclass) in nvkm_ummu_sclass() argument 37 if (mmu->func->mem.user.oclass) { in nvkm_ummu_sclass() 39 oclass->base = mmu->func->mem.user; in nvkm_ummu_sclass() 40 oclass->ctor = nvkm_umem_new; in nvkm_ummu_sclass() 45 if (mmu->func->vmm.user.oclass) { in nvkm_ummu_sclass() 47 oclass->base = mmu->func->vmm.user; in nvkm_ummu_sclass() 48 oclass->ctor = nvkm_uvmm_new; in nvkm_ummu_sclass() 153 nvkm_ummu_new(struct nvkm_device *device, const struct nvkm_oclass *oclass, in nvkm_ummu_new() argument 177 nvkm_object_ctor(&nvkm_ummu, oclass, &ummu->object); in nvkm_ummu_new()
|
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_abi16.c | 534 s32 oclass = 0; in nouveau_abi16_ioctl_grobj_alloc() local 553 for (i = 0; !oclass && i < ret; i++) { in nouveau_abi16_ioctl_grobj_alloc() 554 switch (sclass[i].oclass) { in nouveau_abi16_ioctl_grobj_alloc() 559 oclass = sclass[i].oclass; in nouveau_abi16_ioctl_grobj_alloc() 569 if ((sclass[i].oclass & 0x00ff) == 0x00b1) { in nouveau_abi16_ioctl_grobj_alloc() 570 oclass = sclass[i].oclass; in nouveau_abi16_ioctl_grobj_alloc() 578 if ((sclass[i].oclass & 0x00ff) == 0x00b2) { in nouveau_abi16_ioctl_grobj_alloc() 579 oclass = sclass[i].oclass; in nouveau_abi16_ioctl_grobj_alloc() 587 if ((sclass[i].oclass & 0x00ff) == 0x00b3) { in nouveau_abi16_ioctl_grobj_alloc() 588 oclass = sclass[i].oclass; in nouveau_abi16_ioctl_grobj_alloc() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvif/ |
H A D | object.c | 67 size = sizeof(*args) + cnt * sizeof(args->sclass.oclass[0]); in nvif_object_sclass_get() 87 (*psclass)[i].oclass = args->sclass.oclass[i].oclass; in nvif_object_sclass_get() 88 (*psclass)[i].minver = args->sclass.oclass[i].minver; in nvif_object_sclass_get() 89 (*psclass)[i].maxver = args->sclass.oclass[i].maxver; in nvif_object_sclass_get() 230 s32 oclass, void *data, u32 size, struct nvif_object *object) in nvif_object_ctor() argument 241 object->oclass = oclass; in nvif_object_ctor() 266 args->new.oclass = oclass; in nvif_object_ctor()
|
H A D | disp.c | 36 nvif_disp_ctor(struct nvif_device *device, const char *name, s32 oclass, struct nvif_disp *disp) in nvif_disp_ctor() argument 61 cid = nvif_sclass(&device->object, disps, oclass); in nvif_disp_ctor() 64 NVIF_DEBUG(&device->object, "[NEW disp%04x] not supported", oclass); in nvif_disp_ctor() 71 disps[cid].oclass, &args, sizeof(args), &disp->object); in nvif_disp_ctor() 72 NVIF_ERRON(ret, &device->object, "[NEW disp%04x]", disps[cid].oclass); in nvif_disp_ctor()
|