Home
last modified time | relevance | path

Searched refs:nvbo (Results 1 – 13 of 13) sorted by relevance

/linux/drivers/gpu/drm/nouveau/
H A Dnouveau_bo.c141 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_del_ttm() local
143 WARN_ON(nvbo->bo.pin_count > 0); in nouveau_bo_del_ttm()
145 nv10_bo_put_tile_region(dev, nvbo->tile, NULL); in nouveau_bo_del_ttm()
158 if (nvbo->no_share) in nouveau_bo_del_ttm()
159 drm_gem_object_put(nvbo->r_obj); in nouveau_bo_del_ttm()
166 kfree(nvbo); in nouveau_bo_del_ttm()
178 nouveau_bo_fixup_align(struct nouveau_bo *nvbo, int *align, u64 *size) in nouveau_bo_fixup_align() argument
180 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_fixup_align()
184 if (nvbo->mode) { in nouveau_bo_fixup_align()
187 *size = roundup_64(*size, 64 * nvbo->mode); in nouveau_bo_fixup_align()
[all …]
H A Dnouveau_gem.c79 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_del() local
80 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_gem_object_del()
90 ttm_bo_fini(&nvbo->bo); in nouveau_gem_object_del()
100 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_open() local
101 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_gem_object_open()
111 if (nvbo->no_share && uvmm && in nouveau_gem_object_open()
112 drm_gpuvm_resv(&uvmm->base) != nvbo->bo.base.resv) in nouveau_gem_object_open()
115 ret = ttm_bo_reserve(&nvbo->bo, false, false, NULL); in nouveau_gem_object_open()
127 ret = nouveau_vma_new(nvbo, vmm, &vma); in nouveau_gem_object_open()
133 ttm_bo_unreserve(&nvbo->bo); in nouveau_gem_object_open()
[all …]
H A Dnouveau_vmm.c49 nouveau_vma_find(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm) in nouveau_vma_find() argument
53 list_for_each_entry(vma, &nvbo->vma_list, head) { in nouveau_vma_find()
77 nouveau_vma_new(struct nouveau_bo *nvbo, struct nouveau_vmm *vmm, in nouveau_vma_new() argument
80 struct nouveau_mem *mem = nouveau_mem(nvbo->bo.resource); in nouveau_vma_new()
85 if ((vma = *pvma = nouveau_vma_find(nvbo, vmm))) { in nouveau_vma_new()
97 list_add_tail(&vma->head, &nvbo->vma_list); in nouveau_vma_new()
99 if (nvbo->bo.resource->mem_type != TTM_PL_SYSTEM && in nouveau_vma_new()
100 mem->mem.page == nvbo->page) { in nouveau_vma_new()
H A Dnouveau_ttm.c69 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_vram_manager_new() local
76 ret = nouveau_mem_new(drm, nvbo->kind, nvbo->comp, res); in nouveau_vram_manager_new()
82 ret = nouveau_mem_vram(*res, nvbo->contig, nvbo->page); in nouveau_vram_manager_new()
104 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_gart_manager_new() local
108 ret = nouveau_mem_new(drm, nvbo->kind, nvbo->comp, res); in nouveau_gart_manager_new()
130 struct nouveau_bo *nvbo = nouveau_bo(bo); in nv04_gart_manager_new() local
135 ret = nouveau_mem_new(drm, nvbo->kind, nvbo->comp, res); in nv04_gart_manager_new()
H A Dnouveau_display.c174 const struct nouveau_bo *nvbo = nouveau_gem_object(fb->obj[0]); in nouveau_framebuffer_get_layout() local
176 *tile_mode = nvbo->mode; in nouveau_framebuffer_get_layout()
177 *kind = nvbo->kind; in nouveau_framebuffer_get_layout()
226 nouveau_check_bl_size(struct nouveau_drm *drm, struct nouveau_bo *nvbo, in nouveau_check_bl_size() argument
247 bl_size, nvbo->bo.base.size); in nouveau_check_bl_size()
249 if (bl_size + offset > nvbo->bo.base.size) in nouveau_check_bl_size()
263 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_framebuffer_new() local
295 tile_mode = nvbo->mode; in nouveau_framebuffer_new()
296 kind = nvbo->kind; in nouveau_framebuffer_new()
305 ret = nouveau_check_bl_size(drm, nvbo, in nouveau_framebuffer_new()
[all …]
H A Dnouveau_sgdma.c71 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_sgdma_create_ttm() local
75 if (nvbo->force_coherent) in nouveau_sgdma_create_ttm()
H A Dnouveau_uvmm.c470 struct nouveau_bo *nvbo = nouveau_gem_object(op->gem.obj); in select_page_shift() local
480 if (op_map_aligned_to_page_shift(op, nvbo->page)) in select_page_shift()
481 return nvbo->page; in select_page_shift()
483 struct nouveau_mem *mem = nouveau_mem(nvbo->bo.resource); in select_page_shift()
490 if (vmm->page[i].shift >= nvbo->page) in select_page_shift()
847 struct nouveau_bo *nvbo = nouveau_gem_object(uvma->va.gem.obj); in op_map() local
849 nouveau_uvma_map(uvma, nouveau_mem(nvbo->bo.resource)); in op_map()
1827 nouveau_uvmm_bo_map_all(struct nouveau_bo *nvbo, struct nouveau_mem *mem) in nouveau_uvmm_bo_map_all() argument
1829 struct drm_gem_object *obj = &nvbo->bo.base; in nouveau_uvmm_bo_map_all()
1846 nouveau_uvmm_bo_unmap_all(struct nouveau_bo *nvbo) in nouveau_uvmm_bo_unmap_all() argument
[all …]
H A Dnouveau_crtc.h57 struct nouveau_bo *nvbo; member
H A Dnouveau_fence.c340 nouveau_fence_sync(struct nouveau_bo *nvbo, struct nouveau_channel *chan, in nouveau_fence_sync() argument
344 struct dma_resv *resv = nvbo->bo.base.resv; in nouveau_fence_sync()
/linux/drivers/gpu/drm/nouveau/dispnv04/
H A Ddisp.c80 struct nouveau_bo *nvbo; in nv04_display_fini() local
84 nvbo = nouveau_gem_object(fb->obj[0]); in nv04_display_fini()
85 nouveau_bo_unpin(nvbo); in nv04_display_fini()
90 if (nv_crtc->cursor.nvbo) { in nv04_display_fini()
92 nouveau_bo_unmap(nv_crtc->cursor.nvbo); in nv04_display_fini()
93 nouveau_bo_unpin(nv_crtc->cursor.nvbo); in nv04_display_fini()
132 struct nouveau_bo *nvbo; in nv04_display_init() local
136 nvbo = nouveau_gem_object(fb->obj[0]); in nv04_display_init()
137 ret = nouveau_bo_pin(nvbo, NOUVEAU_GEM_DOMAIN_VRAM, true); in nv04_display_init()
144 if (!nv_crtc->cursor.nvbo) in nv04_display_init()
[all …]
H A Doverlay.c126 struct nouveau_bo *nvbo; in nv10_update_plane() local
144 nvbo = nouveau_gem_object(fb->obj[0]); in nv10_update_plane()
145 ret = nouveau_bo_pin(nvbo, NOUVEAU_GEM_DOMAIN_VRAM, false); in nv10_update_plane()
149 nv_plane->cur = nvbo; in nv10_update_plane()
155 nvif_wr32(dev, NV_PVIDEO_OFFSET_BUFF(flip), nvbo->offset); in nv10_update_plane()
177 nvbo->offset + fb->offsets[1]); in nv10_update_plane()
375 struct nouveau_bo *nvbo; in nv04_update_plane() local
390 nvbo = nouveau_gem_object(fb->obj[0]); in nv04_update_plane()
391 ret = nouveau_bo_pin(nvbo, NOUVEAU_GEM_DOMAIN_VRAM, false); in nv04_update_plane()
395 nv_plane->cur = nvbo; in nv04_update_plane()
[all …]
H A Dcrtc.c614 struct nouveau_bo *nvbo = nouveau_gem_object(fb->obj[0]); in nv_crtc_swap_fbs() local
618 ret = nouveau_bo_pin(nvbo, NOUVEAU_GEM_DOMAIN_VRAM, false); in nv_crtc_swap_fbs()
627 drm_gem_object_get(&nvbo->bo.base); in nv_crtc_swap_fbs()
628 disp->image[nv_crtc->index] = nvbo; in nv_crtc_swap_fbs()
771 nouveau_bo_unpin_del(&nv_crtc->cursor.nvbo); in nv_crtc_destroy()
846 struct nouveau_bo *nvbo; in nv04_crtc_do_mode_set_base() local
860 nvbo = nouveau_gem_object(drm_fb->obj[0]); in nv04_crtc_do_mode_set_base()
861 nv_crtc->fb.offset = nvbo->offset; in nv04_crtc_do_mode_set_base()
1008 nv11_cursor_upload(dev, cursor, nv_crtc->cursor.nvbo); in nv04_crtc_cursor_set()
1010 nv04_cursor_upload(dev, cursor, nv_crtc->cursor.nvbo); in nv04_crtc_cursor_set()
[all …]
/linux/drivers/gpu/drm/nouveau/dispnv50/
H A Dwndw.c527 struct nouveau_bo *nvbo; in nv50_wndw_cleanup_fb() local
533 nvbo = nouveau_gem_object(old_state->fb->obj[0]); in nv50_wndw_cleanup_fb()
534 nouveau_bo_unpin(nvbo); in nv50_wndw_cleanup_fb()
544 struct nouveau_bo *nvbo; in nv50_wndw_prepare_fb() local
553 nvbo = nouveau_gem_object(fb->obj[0]); in nv50_wndw_prepare_fb()
554 ret = nouveau_bo_pin(nvbo, NOUVEAU_GEM_DOMAIN_VRAM, true); in nv50_wndw_prepare_fb()
562 nouveau_bo_unpin(nvbo); in nv50_wndw_prepare_fb()
583 asyw->image.offset[0] = nvbo->offset; in nv50_wndw_prepare_fb()
654 struct nouveau_bo *nvbo; in nv50_wndw_get_scanout_buffer() local
665 nvbo = nouveau_gem_object(fb->obj[0]); in nv50_wndw_get_scanout_buffer()
[all …]