Home
last modified time | relevance | path

Searched refs:ndw (Results 1 – 23 of 23) sorted by relevance

/linux/drivers/gpu/drm/radeon/
H A Dni_dma.c319 unsigned ndw; in cayman_dma_vm_copy_pages() local
322 ndw = count * 2; in cayman_dma_vm_copy_pages()
323 if (ndw > 0xFFFFE) in cayman_dma_vm_copy_pages()
324 ndw = 0xFFFFE; in cayman_dma_vm_copy_pages()
327 0, 0, ndw); in cayman_dma_vm_copy_pages()
333 pe += ndw * 4; in cayman_dma_vm_copy_pages()
334 src += ndw * 4; in cayman_dma_vm_copy_pages()
335 count -= ndw / 2; in cayman_dma_vm_copy_pages()
359 unsigned ndw; in cayman_dma_vm_write_pages() local
362 ndw = count * 2; in cayman_dma_vm_write_pages()
[all …]
H A Dsi_dma.c111 unsigned ndw; in si_dma_vm_write_pages() local
114 ndw = count * 2; in si_dma_vm_write_pages()
115 if (ndw > 0xFFFFE) in si_dma_vm_write_pages()
116 ndw = 0xFFFFE; in si_dma_vm_write_pages()
119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
122 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in si_dma_vm_write_pages()
158 unsigned ndw; in si_dma_vm_set_pages() local
161 ndw = count * 2; in si_dma_vm_set_pages()
162 if (ndw > 0xFFFFE) in si_dma_vm_set_pages()
163 ndw = 0xFFFFE; in si_dma_vm_set_pages()
[all …]
H A Dradeon_ring.c109 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) in radeon_ring_alloc() argument
114 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc()
119 ndw = (ndw + ring->align_mask) & ~ring->align_mask; in radeon_ring_alloc()
120 while (ndw > (ring->ring_free_dw - 1)) { in radeon_ring_alloc()
122 if (ndw < ring->ring_free_dw) { in radeon_ring_alloc()
129 ring->count_dw = ndw; in radeon_ring_alloc()
145 int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) in radeon_ring_lock() argument
150 r = radeon_ring_alloc(rdev, ring, ndw); in radeon_ring_lock()
H A Dcik_sdma.c847 unsigned ndw; in cik_sdma_vm_write_pages() local
850 ndw = count * 2; in cik_sdma_vm_write_pages()
851 if (ndw > 0xFFFFE) in cik_sdma_vm_write_pages()
852 ndw = 0xFFFFE; in cik_sdma_vm_write_pages()
859 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pages()
860 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cik_sdma_vm_write_pages()
896 unsigned ndw; in cik_sdma_vm_set_pages() local
899 ndw = count; in cik_sdma_vm_set_pages()
900 if (ndw > 0x7FFFF) in cik_sdma_vm_set_pages()
901 ndw = 0x7FFFF; in cik_sdma_vm_set_pages()
[all …]
H A Dradeon_vm.c647 unsigned count = 0, pt_idx, ndw; in radeon_vm_update_page_directory() local
652 ndw = 64; in radeon_vm_update_page_directory()
655 ndw += vm->max_pde_used * 6; in radeon_vm_update_page_directory()
658 if (ndw > 0xfffff) in radeon_vm_update_page_directory()
661 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, ndw * 4); in radeon_vm_update_page_directory()
705 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory()
917 unsigned nptes, ncmds, ndw; in radeon_vm_bo_update() local
973 ndw = 64; in radeon_vm_bo_update()
978 ndw += ncmds * 7; in radeon_vm_bo_update()
982 ndw += ncmds * 4; in radeon_vm_bo_update()
[all …]
H A Dr100.c913 unsigned ndw; in r100_copy_blit() local
925 ndw = 64 + (10 * num_loops); in r100_copy_blit()
926 r = radeon_ring_lock(rdev, ring, ndw); in r100_copy_blit()
928 DRM_ERROR("radeon: moving bo (%d) asking for %u dw.\n", r, ndw); in r100_copy_blit()
H A Dradeon.h978 int radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw);
979 int radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *cp, unsigned ndw);
/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_vm_sdma.c49 unsigned int ndw; in amdgpu_vm_sdma_alloc_job() local
53 ndw = AMDGPU_VM_SDMA_MIN_NUM_DW; in amdgpu_vm_sdma_alloc_job()
55 ndw += count * 2; in amdgpu_vm_sdma_alloc_job()
56 ndw = min(ndw, AMDGPU_VM_SDMA_MAX_NUM_DW); in amdgpu_vm_sdma_alloc_job()
59 ndw * 4, pool, &p->job); in amdgpu_vm_sdma_alloc_job()
63 p->num_dw_left = ndw; in amdgpu_vm_sdma_alloc_job()
225 unsigned int i, ndw, nptes; in amdgpu_vm_sdma_update() local
244 ndw = p->num_dw_left; in amdgpu_vm_sdma_update()
245 ndw -= p->job->ibs->length_dw; in amdgpu_vm_sdma_update()
247 if (ndw < 32) { in amdgpu_vm_sdma_update()
[all …]
H A Dsi_dma.c335 unsigned ndw = count * 2; in si_dma_vm_write_pte() local
337 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pte()
340 for (; ndw > 0; ndw -= 2) { in si_dma_vm_write_pte()
365 unsigned ndw; in si_dma_vm_set_pte_pde() local
368 ndw = count * 2; in si_dma_vm_set_pte_pde()
369 if (ndw > 0xFFFFE) in si_dma_vm_set_pte_pde()
370 ndw = 0xFFFFE; in si_dma_vm_set_pte_pde()
378 ib->ptr[ib->length_dw++] = DMA_PTE_PDE_PACKET(ndw); in si_dma_vm_set_pte_pde()
387 pe += ndw * 4; in si_dma_vm_set_pte_pde()
388 addr += (ndw / 2) * incr; in si_dma_vm_set_pte_pde()
[all …]
H A Damdgpu_ring.c81 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned int ndw) in amdgpu_ring_alloc() argument
85 ndw = (ndw + ring->funcs->align_mask) & ~ring->funcs->align_mask; in amdgpu_ring_alloc()
90 if (WARN_ON_ONCE(ndw > ring->max_dw)) in amdgpu_ring_alloc()
93 ring->count_dw = ndw; in amdgpu_ring_alloc()
H A Damdgpu_gmc.c685 unsigned int ndw; in amdgpu_gmc_flush_gpu_tlb_pasid() local
713 ndw = kiq->pmf->invalidate_tlbs_size + 8; in amdgpu_gmc_flush_gpu_tlb_pasid()
716 ndw += kiq->pmf->invalidate_tlbs_size; in amdgpu_gmc_flush_gpu_tlb_pasid()
719 ndw += kiq->pmf->invalidate_tlbs_size; in amdgpu_gmc_flush_gpu_tlb_pasid()
722 r = amdgpu_ring_alloc(ring, ndw); in amdgpu_gmc_flush_gpu_tlb_pasid()
H A Dsdma_v2_4.c682 unsigned ndw = count * 2; in sdma_v2_4_vm_write_pte() local
688 ib->ptr[ib->length_dw++] = ndw; in sdma_v2_4_vm_write_pte()
689 for (; ndw > 0; ndw -= 2) { in sdma_v2_4_vm_write_pte()
H A Dcik_sdma.c747 unsigned ndw = count * 2; in cik_sdma_vm_write_pte() local
753 ib->ptr[ib->length_dw++] = ndw; in cik_sdma_vm_write_pte()
754 for (; ndw > 0; ndw -= 2) { in cik_sdma_vm_write_pte()
H A Dsdma_v3_0.c955 unsigned ndw = count * 2; in sdma_v3_0_vm_write_pte() local
961 ib->ptr[ib->length_dw++] = ndw; in sdma_v3_0_vm_write_pte()
962 for (; ndw > 0; ndw -= 2) { in sdma_v3_0_vm_write_pte()
H A Dsdma_v6_0.c1095 unsigned ndw = count * 2; in sdma_v6_0_vm_write_pte() local
1101 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v6_0_vm_write_pte()
1102 for (; ndw > 0; ndw -= 2) { in sdma_v6_0_vm_write_pte()
H A Dsdma_v7_0.c1117 unsigned ndw = count * 2; in sdma_v7_0_vm_write_pte() local
1123 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v7_0_vm_write_pte()
1124 for (; ndw > 0; ndw -= 2) { in sdma_v7_0_vm_write_pte()
H A Dsdma_v5_2.c1074 unsigned ndw = count * 2; in sdma_v5_2_vm_write_pte() local
1080 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v5_2_vm_write_pte()
1081 for (; ndw > 0; ndw -= 2) { in sdma_v5_2_vm_write_pte()
H A Dsdma_v5_0.c1225 unsigned ndw = count * 2; in sdma_v5_0_vm_write_pte() local
1231 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v5_0_vm_write_pte()
1232 for (; ndw > 0; ndw -= 2) { in sdma_v5_0_vm_write_pte()
H A Dsdma_v4_4_2.c1163 unsigned ndw = count * 2; in sdma_v4_4_2_vm_write_pte() local
1169 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v4_4_2_vm_write_pte()
1170 for (; ndw > 0; ndw -= 2) { in sdma_v4_4_2_vm_write_pte()
H A Dsdma_v4_0.c1618 unsigned ndw = count * 2; in sdma_v4_0_vm_write_pte() local
1624 ib->ptr[ib->length_dw++] = ndw - 1; in sdma_v4_0_vm_write_pte()
1625 for (; ndw > 0; ndw -= 2) { in sdma_v4_0_vm_write_pte()
H A Damdgpu_ring.h342 int amdgpu_ring_alloc(struct amdgpu_ring *ring, unsigned ndw);
/linux/drivers/crypto/aspeed/
H A Daspeed-acry.c253 int nbits, ndw; in aspeed_acry_rsa_ctx_copy() local
273 ndw = DIV_ROUND_UP(nbytes, BYTES_PER_DWORD); in aspeed_acry_rsa_ctx_copy()
279 for (j = ndw; j > 0; j--) { in aspeed_acry_rsa_ctx_copy()
/linux/kernel/rcu/
H A Dtree_nocb.h964 int ndw; in do_nocb_deferred_wakeup_common() local
972 ndw = rdp_gp->nocb_defer_wakeup; in do_nocb_deferred_wakeup_common()
973 ret = __wake_nocb_gp(rdp_gp, rdp, ndw == RCU_NOCB_WAKE_FORCE, flags); in do_nocb_deferred_wakeup_common()