Home
last modified time | relevance | path

Searched refs:length_dw (Results 1 – 25 of 31) sorted by relevance

12

/linux/drivers/gpu/drm/radeon/
H A Dsi_dma.c78 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages()
80 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages()
119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
120 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
121 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
132 ib->ptr[ib->length_dw++] = value; in si_dma_vm_write_pages()
133 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages()
[all …]
H A Dni_dma.c145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages()
328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
331 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in cayman_dma_vm_copy_pages()
367 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, in cayman_dma_vm_write_pages()
369 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages()
370 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages()
381 ib->ptr[ib->length_dw++] = value; in cayman_dma_vm_write_pages()
[all …]
H A Dradeon_cs.c95 p->nrelocs = chunk->length_dw / 4; in radeon_cs_parser_relocs()
313 p->chunks[i].length_dw = user_chunk.length_dw; in radeon_cs_parser_init()
320 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
326 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
332 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init()
336 size = p->chunks[i].length_dw; in radeon_cs_parser_init()
357 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init()
359 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init()
562 if (parser->const_ib.length_dw) { in radeon_cs_ib_vm_chunk()
628 if (ib_chunk->length_dw > RADEON_IB_VM_MAX_SIZE) { in radeon_cs_ib_fill()
[all …]
H A Dcik_sdma.c156 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
731 ib.length_dw = 5; in cik_sdma_ib_test()
812 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pages()
814 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pages()
815 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pages()
816 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pages()
817 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pages()
818 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages()
819 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages()
855 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pages()
[all …]
H A Dradeon_vm.c409 ib.length_dw = 0; in radeon_vm_clear_bo()
413 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo()
661 ib.length_dw = 0; in radeon_vm_update_page_directory()
698 if (ib.length_dw != 0) { in radeon_vm_update_page_directory()
702 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory()
999 ib.length_dw = 0; in radeon_vm_bo_update()
1017 WARN_ON(ib.length_dw > ndw); in radeon_vm_bo_update()
H A Dr600_dma.c362 ib.length_dw = 4; in r600_dma_ib_test()
426 radeon_ring_write(ring, (ib->length_dw << 16) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in r600_dma_ring_ib_execute()
H A Devergreen_dma.c89 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
H A Dradeon_ib.c133 if (!ib->length_dw || !ring->ready) { in radeon_ib_schedule()
H A Dradeon_trace.h41 __entry->dw = p->chunk_ib->length_dw;
H A Dr600_cs.c2328 } while (p->idx < p->chunk_ib->length_dw); in r600_cs_parse()
2330 for (r = 0; r < p->ib.length_dw; r++) { in r600_cs_parse()
2396 if (p->idx >= ib_chunk->length_dw) { in r600_dma_cs_parse()
2398 p->idx, ib_chunk->length_dw); in r600_dma_cs_parse()
2534 } while (p->idx < p->chunk_ib->length_dw); in r600_dma_cs_parse()
2536 for (r = 0; r < p->ib->length_dw; r++) { in r600_dma_cs_parse()
H A Devergreen_cs.c2867 } while (p->idx < p->chunk_ib->length_dw); in evergreen_cs_parse()
2869 for (r = 0; r < p->ib.length_dw; r++) { in evergreen_cs_parse()
2899 if (p->idx >= ib_chunk->length_dw) { in evergreen_dma_cs_parse()
2901 p->idx, ib_chunk->length_dw); in evergreen_dma_cs_parse()
3306 } while (p->idx < p->chunk_ib->length_dw); in evergreen_dma_cs_parse()
3308 for (r = 0; r < p->ib->length_dw; r++) { in evergreen_dma_cs_parse()
3648 } while (idx < ib->length_dw); in evergreen_ib_parse()
3754 } while (idx < ib->length_dw); in evergreen_dma_ib_parse()
H A Dr100.c2102 } while (p->idx < p->chunk_ib->length_dw); in r100_cs_parse()
3722 radeon_ring_write(ring, ib->length_dw); in r100_ring_ib_execute()
3752 ib.length_dw = 8; in r100_ib_test()
/linux/drivers/gpu/drm/amd/amdgpu/
H A Dsdma_v6_0.c288 amdgpu_ring_write(ring, ib->length_dw); in sdma_v6_0_ring_emit_ib()
1018 ib.length_dw = 8; in sdma_v6_0_ring_test_ib()
1066 ib->ptr[ib->length_dw++] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COPY) | in sdma_v6_0_vm_copy_pte()
1068 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v6_0_vm_copy_pte()
1069 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v6_0_vm_copy_pte()
1070 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v6_0_vm_copy_pte()
1071 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v6_0_vm_copy_pte()
1072 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v6_0_vm_copy_pte()
1073 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v6_0_vm_copy_pte()
1094 ib->ptr[ib->length_dw++] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) | in sdma_v6_0_vm_write_pte()
[all …]
H A Dsdma_v5_0.c453 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_0_ring_emit_ib()
1112 ib.length_dw = 8; in sdma_v5_0_ring_test_ib()
1160 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v5_0_vm_copy_pte()
1162 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_0_vm_copy_pte()
1163 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v5_0_vm_copy_pte()
1164 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v5_0_vm_copy_pte()
1165 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v5_0_vm_copy_pte()
1166 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_0_vm_copy_pte()
1167 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v5_0_vm_copy_pte()
1188 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_0_vm_write_pte()
[all …]
H A Dsdma_v5_2.c301 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_2_ring_emit_ib()
1011 ib.length_dw = 8; in sdma_v5_2_ring_test_ib()
1059 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v5_2_vm_copy_pte()
1061 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_2_vm_copy_pte()
1062 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v5_2_vm_copy_pte()
1063 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v5_2_vm_copy_pte()
1064 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v5_2_vm_copy_pte()
1065 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_2_vm_copy_pte()
1066 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v5_2_vm_copy_pte()
1087 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_2_vm_write_pte()
[all …]
H A Damdgpu_cs.c213 p->chunks[i].length_dw = user_chunk.length_dw; in amdgpu_cs_pass1()
215 size = p->chunks[i].length_dw; in amdgpu_cs_pass1()
397 ib->length_dw = chunk_ib->ib_bytes / 4; in amdgpu_cs_p2_ib()
410 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_dependencies()
481 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_in()
499 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_timeline_wait()
519 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_out()
552 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_timeline_signal()
1045 if ((va_start + ib->length_dw * 4) > in amdgpu_cs_patch_ibs()
1059 memcpy(ib->ptr, kptr, ib->length_dw * 4); in amdgpu_cs_patch_ibs()
H A Damdgpu_cs.h39 uint32_t length_dw; member
H A Dvcn_sw_ring.c53 amdgpu_ring_write(ring, ib->length_dw); in vcn_dec_sw_ring_emit_ib()
H A Damdgpu_ring.c169 u32 count = ib->length_dw & align_mask; in amdgpu_ring_generic_pad_ib()
174 memset32(&ib->ptr[ib->length_dw], ring->funcs->nop, count); in amdgpu_ring_generic_pad_ib()
176 ib->length_dw += count; in amdgpu_ring_generic_pad_ib()
H A Damdgpu_jpeg.c212 ib->length_dw = 16; in amdgpu_jpeg_dec_set_reg()
571 for (i = 0; i < ib->length_dw ; i += 2) { in amdgpu_jpeg_dec_parse_cs()
H A Damdgpu_ring.h104 uint32_t length_dw; member
H A Damdgpu_amdkfd.c683 ib->length_dw = ib_len; in amdgpu_amdkfd_submit_ib()
/linux/drivers/net/ethernet/qlogic/qed/
H A Dqed_hw.c469 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command()
484 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command()
616 u32 length_dw) in qed_dmae_execute_sub_operation() argument
634 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()
655 cmd->length_dw = cpu_to_le16((u16)length_dw); in qed_dmae_execute_sub_operation()
664 src_addr, dst_addr, length_dw); in qed_dmae_execute_sub_operation()
671 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()
/linux/include/uapi/drm/
H A Dradeon_drm.h969 __u32 length_dw; member
H A Damdgpu_drm.h961 __u32 length_dw; member

12