Lines Matching refs:ppgtt

19 static void gen6_write_pde(const struct gen6_ppgtt *ppgtt,  in gen6_write_pde()  argument
23 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde()
27 ppgtt->pd_addr + pde); in gen6_write_pde()
77 struct gen6_ppgtt * const ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_ppgtt_clear_range() local
86 i915_pt_entry(ppgtt->base.pd, pde++); in gen6_ppgtt_clear_range()
94 ppgtt->scan_for_unused_pt = true; in gen6_ppgtt_clear_range()
115 struct i915_ppgtt *ppgtt = i915_vm_to_ppgtt(vm); in gen6_ppgtt_insert_entries() local
116 struct i915_page_directory * const pd = ppgtt->pd; in gen6_ppgtt_insert_entries()
150 static void gen6_flush_pd(struct gen6_ppgtt *ppgtt, u64 start, u64 end) in gen6_flush_pd() argument
152 struct i915_page_directory * const pd = ppgtt->base.pd; in gen6_flush_pd()
159 mutex_lock(&ppgtt->flush); in gen6_flush_pd()
162 gen6_write_pde(ppgtt, pde, pt); in gen6_flush_pd()
165 ioread32(ppgtt->pd_addr + pde - 1); in gen6_flush_pd()
166 gen6_ggtt_invalidate(ppgtt->base.vm.gt->ggtt); in gen6_flush_pd()
169 mutex_unlock(&ppgtt->flush); in gen6_flush_pd()
176 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_alloc_va_range() local
177 struct i915_page_directory * const pd = ppgtt->base.pd; in gen6_alloc_va_range()
211 if (flush && i915_vma_is_bound(ppgtt->vma, I915_VMA_GLOBAL_BIND)) { in gen6_alloc_va_range()
215 gen6_flush_pd(ppgtt, from, start); in gen6_alloc_va_range()
219 static int gen6_ppgtt_init_scratch(struct gen6_ppgtt *ppgtt) in gen6_ppgtt_init_scratch() argument
221 struct i915_address_space * const vm = &ppgtt->base.vm; in gen6_ppgtt_init_scratch()
256 static void gen6_ppgtt_free_pd(struct gen6_ppgtt *ppgtt) in gen6_ppgtt_free_pd() argument
258 struct i915_page_directory * const pd = ppgtt->base.pd; in gen6_ppgtt_free_pd()
264 free_pt(&ppgtt->base.vm, pt); in gen6_ppgtt_free_pd()
269 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(i915_vm_to_ppgtt(vm)); in gen6_ppgtt_cleanup() local
271 gen6_ppgtt_free_pd(ppgtt); in gen6_ppgtt_cleanup()
274 if (ppgtt->base.pd) in gen6_ppgtt_cleanup()
275 free_pd(&ppgtt->base.vm, ppgtt->base.pd); in gen6_ppgtt_cleanup()
277 mutex_destroy(&ppgtt->flush); in gen6_ppgtt_cleanup()
287 struct gen6_ppgtt *ppgtt = vma_res->private; in pd_vma_bind() local
290 ppgtt->pp_dir = ggtt_offset * sizeof(gen6_pte_t) << 10; in pd_vma_bind()
291 ppgtt->pd_addr = (gen6_pte_t __iomem *)ggtt->gsm + ggtt_offset; in pd_vma_bind()
293 gen6_flush_pd(ppgtt, 0, ppgtt->base.vm.total); in pd_vma_bind()
299 struct gen6_ppgtt *ppgtt = vma_res->private; in pd_vma_unbind() local
300 struct i915_page_directory * const pd = ppgtt->base.pd; in pd_vma_unbind()
304 if (!ppgtt->scan_for_unused_pt) in pd_vma_unbind()
308 gen6_for_all_pdes(pt, ppgtt->base.pd, pde) { in pd_vma_unbind()
312 free_pt(&ppgtt->base.vm, pt); in pd_vma_unbind()
316 ppgtt->scan_for_unused_pt = false; in pd_vma_unbind()
326 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(base); in gen6_ppgtt_pin() local
329 GEM_BUG_ON(!kref_read(&ppgtt->base.vm.ref)); in gen6_ppgtt_pin()
337 if (atomic_add_unless(&ppgtt->pin_count, 1, 0)) in gen6_ppgtt_pin()
341 err = i915_vm_lock_objects(&ppgtt->base.vm, ww); in gen6_ppgtt_pin()
350 if (!atomic_read(&ppgtt->pin_count)) { in gen6_ppgtt_pin()
351 err = i915_ggtt_pin(ppgtt->vma, ww, GEN6_PD_ALIGN, PIN_HIGH); in gen6_ppgtt_pin()
353 GEM_BUG_ON(ppgtt->vma->fence); in gen6_ppgtt_pin()
354 clear_bit(I915_VMA_CAN_FENCE_BIT, __i915_vma_flags(ppgtt->vma)); in gen6_ppgtt_pin()
357 atomic_inc(&ppgtt->pin_count); in gen6_ppgtt_pin()
380 gen6_alloc_top_pd(struct gen6_ppgtt *ppgtt) in gen6_alloc_top_pd() argument
382 struct i915_ggtt * const ggtt = ppgtt->base.vm.gt->ggtt; in gen6_alloc_top_pd()
390 pd->pt.base = __i915_gem_object_create_internal(ppgtt->base.vm.gt->i915, in gen6_alloc_top_pd()
399 pd->pt.base->base.resv = i915_vm_resv_get(&ppgtt->base.vm); in gen6_alloc_top_pd()
400 pd->pt.base->shares_resv_from = &ppgtt->base.vm; in gen6_alloc_top_pd()
402 ppgtt->vma = i915_vma_instance(pd->pt.base, &ggtt->vm, NULL); in gen6_alloc_top_pd()
403 if (IS_ERR(ppgtt->vma)) { in gen6_alloc_top_pd()
404 err = PTR_ERR(ppgtt->vma); in gen6_alloc_top_pd()
405 ppgtt->vma = NULL; in gen6_alloc_top_pd()
410 ppgtt->vma->ops = &pd_vma_ops; in gen6_alloc_top_pd()
411 ppgtt->vma->private = ppgtt; in gen6_alloc_top_pd()
415 free_pd(&ppgtt->base.vm, pd); in gen6_alloc_top_pd()
421 struct gen6_ppgtt *ppgtt = to_gen6_ppgtt(base); in gen6_ppgtt_unpin() local
423 GEM_BUG_ON(!atomic_read(&ppgtt->pin_count)); in gen6_ppgtt_unpin()
424 if (atomic_dec_and_test(&ppgtt->pin_count)) in gen6_ppgtt_unpin()
425 i915_vma_unpin(ppgtt->vma); in gen6_ppgtt_unpin()
431 struct gen6_ppgtt *ppgtt; in gen6_ppgtt_create() local
434 ppgtt = kzalloc(sizeof(*ppgtt), GFP_KERNEL); in gen6_ppgtt_create()
435 if (!ppgtt) in gen6_ppgtt_create()
438 mutex_init(&ppgtt->flush); in gen6_ppgtt_create()
440 ppgtt_init(&ppgtt->base, gt, 0); in gen6_ppgtt_create()
441 ppgtt->base.vm.pd_shift = ilog2(SZ_4K * SZ_4K / sizeof(gen6_pte_t)); in gen6_ppgtt_create()
442 ppgtt->base.vm.top = 1; in gen6_ppgtt_create()
444 ppgtt->base.vm.bind_async_flags = I915_VMA_LOCAL_BIND; in gen6_ppgtt_create()
445 ppgtt->base.vm.allocate_va_range = gen6_alloc_va_range; in gen6_ppgtt_create()
446 ppgtt->base.vm.clear_range = gen6_ppgtt_clear_range; in gen6_ppgtt_create()
447 ppgtt->base.vm.insert_entries = gen6_ppgtt_insert_entries; in gen6_ppgtt_create()
448 ppgtt->base.vm.cleanup = gen6_ppgtt_cleanup; in gen6_ppgtt_create()
450 ppgtt->base.vm.alloc_pt_dma = alloc_pt_dma; in gen6_ppgtt_create()
451 ppgtt->base.vm.alloc_scratch_dma = alloc_pt_dma; in gen6_ppgtt_create()
452 ppgtt->base.vm.pte_encode = ggtt->vm.pte_encode; in gen6_ppgtt_create()
454 err = gen6_ppgtt_init_scratch(ppgtt); in gen6_ppgtt_create()
458 ppgtt->base.pd = gen6_alloc_top_pd(ppgtt); in gen6_ppgtt_create()
459 if (IS_ERR(ppgtt->base.pd)) { in gen6_ppgtt_create()
460 err = PTR_ERR(ppgtt->base.pd); in gen6_ppgtt_create()
464 return &ppgtt->base; in gen6_ppgtt_create()
467 i915_vm_put(&ppgtt->base.vm); in gen6_ppgtt_create()