Lines Matching refs:imem
48 struct nv50_instmem *imem; member
60 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_wr32_slow() local
61 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_wr32_slow()
66 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_wr32_slow()
67 if (unlikely(imem->addr != base)) { in nv50_instobj_wr32_slow()
69 imem->addr = base; in nv50_instobj_wr32_slow()
72 spin_unlock_irqrestore(&imem->base.lock, flags); in nv50_instobj_wr32_slow()
79 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_rd32_slow() local
80 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_rd32_slow()
86 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_rd32_slow()
87 if (unlikely(imem->addr != base)) { in nv50_instobj_rd32_slow()
89 imem->addr = base; in nv50_instobj_rd32_slow()
92 spin_unlock_irqrestore(&imem->base.lock, flags); in nv50_instobj_rd32_slow()
123 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_kmap() local
126 struct nvkm_subdev *subdev = &imem->base.subdev; in nv50_instobj_kmap()
137 mutex_unlock(&imem->base.mutex); in nv50_instobj_kmap()
142 mutex_lock(&imem->base.mutex); in nv50_instobj_kmap()
143 eobj = list_first_entry_or_null(&imem->lru, typeof(*eobj), lru); in nv50_instobj_kmap()
155 mutex_unlock(&imem->base.mutex); in nv50_instobj_kmap()
164 mutex_lock(&imem->base.mutex); in nv50_instobj_kmap()
167 mutex_unlock(&imem->base.mutex); in nv50_instobj_kmap()
169 mutex_lock(&imem->base.mutex); in nv50_instobj_kmap()
195 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_release() local
196 struct nvkm_subdev *subdev = &imem->base.subdev; in nv50_instobj_release()
201 if (refcount_dec_and_mutex_lock(&iobj->maps, &imem->base.mutex)) { in nv50_instobj_release()
207 list_add_tail(&iobj->lru, &imem->lru); in nv50_instobj_release()
212 mutex_unlock(&imem->base.mutex); in nv50_instobj_release()
220 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_acquire() local
234 mutex_lock(&imem->mutex); in nv50_instobj_acquire()
236 mutex_unlock(&imem->mutex); in nv50_instobj_acquire()
241 if ((vmm = nvkm_bar_bar2_vmm(imem->subdev.device))) { in nv50_instobj_acquire()
261 mutex_unlock(&imem->mutex); in nv50_instobj_acquire()
269 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_boot() local
274 mutex_lock(&imem->mutex); in nv50_instobj_boot()
281 nvkm_instmem_boot(imem); in nv50_instobj_boot()
282 mutex_unlock(&imem->mutex); in nv50_instobj_boot()
320 struct nvkm_instmem *imem = &iobj->imem->base; in nv50_instobj_dtor() local
324 mutex_lock(&imem->mutex); in nv50_instobj_dtor()
329 mutex_unlock(&imem->mutex); in nv50_instobj_dtor()
332 struct nvkm_vmm *vmm = nvkm_bar_bar2_vmm(imem->subdev.device); in nv50_instobj_dtor()
339 nvkm_instobj_dtor(imem, &iobj->base); in nv50_instobj_dtor()
360 struct nv50_instmem *imem = nv50_instmem(base); in nv50_instobj_wrap() local
367 nvkm_instobj_ctor(&nv50_instobj_func, &imem->base, &iobj->base); in nv50_instobj_wrap()
368 iobj->imem = imem; in nv50_instobj_wrap()
377 nv50_instobj_new(struct nvkm_instmem *imem, u32 size, u32 align, bool zero, in nv50_instobj_new() argument
384 ret = nvkm_ram_get(imem->subdev.device, 0, 1, page, size, true, true, &ram); in nv50_instobj_new()
388 ret = nv50_instobj_wrap(imem, ram, pmemory); in nv50_instobj_new()
425 struct nv50_instmem *imem; in nv50_instmem_new_() local
427 if (!(imem = kzalloc(sizeof(*imem), GFP_KERNEL))) in nv50_instmem_new_()
429 nvkm_instmem_ctor(func, device, type, inst, &imem->base); in nv50_instmem_new_()
430 INIT_LIST_HEAD(&imem->lru); in nv50_instmem_new_()
431 *pimem = &imem->base; in nv50_instmem_new_()