| /linux/drivers/gpu/drm/radeon/ |
| H A D | si_dma.c | 78 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages() 80 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages() 81 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages() 82 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages() 83 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages() 119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages() 120 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages() 121 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages() 132 ib->ptr[ib->length_dw++] = value; in si_dma_vm_write_pages() 133 ib->ptr[ib->length_dw++] = upper_32_bits(value); in si_dma_vm_write_pages() [all …]
|
| H A D | ni_dma.c | 145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute() 326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages() 328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages() 329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages() 330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages() 331 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in cayman_dma_vm_copy_pages() 367 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, in cayman_dma_vm_write_pages() 369 ib->ptr[ib->length_dw++] = pe; in cayman_dma_vm_write_pages() 370 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_write_pages() 381 ib->ptr[ib->length_dw++] = value; in cayman_dma_vm_write_pages() [all …]
|
| H A D | radeon_cs.c | 95 p->nrelocs = chunk->length_dw / 4; in radeon_cs_parser_relocs() 313 p->chunks[i].length_dw = user_chunk.length_dw; in radeon_cs_parser_init() 320 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init() 326 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init() 332 if (p->chunks[i].length_dw == 0) in radeon_cs_parser_init() 336 size = p->chunks[i].length_dw; in radeon_cs_parser_init() 357 if (p->chunks[i].length_dw > 1) in radeon_cs_parser_init() 359 if (p->chunks[i].length_dw > 2) in radeon_cs_parser_init() 562 if (parser->const_ib.length_dw) { in radeon_cs_ib_vm_chunk() 628 if (ib_chunk->length_dw > RADEON_IB_VM_MAX_SIZE) { in radeon_cs_ib_fill() [all …]
|
| H A D | cik_sdma.c | 156 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute() 731 ib.length_dw = 5; in cik_sdma_ib_test() 812 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_COPY, in cik_sdma_vm_copy_pages() 814 ib->ptr[ib->length_dw++] = bytes; in cik_sdma_vm_copy_pages() 815 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pages() 816 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cik_sdma_vm_copy_pages() 817 ib->ptr[ib->length_dw++] = upper_32_bits(src); in cik_sdma_vm_copy_pages() 818 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cik_sdma_vm_copy_pages() 819 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in cik_sdma_vm_copy_pages() 855 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_vm_write_pages() [all …]
|
| H A D | radeon_vm.c | 409 ib.length_dw = 0; in radeon_vm_clear_bo() 413 WARN_ON(ib.length_dw > 64); in radeon_vm_clear_bo() 661 ib.length_dw = 0; in radeon_vm_update_page_directory() 698 if (ib.length_dw != 0) { in radeon_vm_update_page_directory() 702 WARN_ON(ib.length_dw > ndw); in radeon_vm_update_page_directory() 999 ib.length_dw = 0; in radeon_vm_bo_update() 1017 WARN_ON(ib.length_dw > ndw); in radeon_vm_bo_update()
|
| H A D | r600_dma.c | 362 ib.length_dw = 4; in r600_dma_ib_test() 426 radeon_ring_write(ring, (ib->length_dw << 16) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in r600_dma_ring_ib_execute()
|
| H A D | evergreen_dma.c | 89 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in evergreen_dma_ring_ib_execute()
|
| H A D | radeon_ib.c | 133 if (!ib->length_dw || !ring->ready) { in radeon_ib_schedule()
|
| H A D | radeon_trace.h | 41 __entry->dw = p->chunk_ib->length_dw;
|
| H A D | r600_cs.c | 2328 } while (p->idx < p->chunk_ib->length_dw); in r600_cs_parse() 2330 for (r = 0; r < p->ib.length_dw; r++) { in r600_cs_parse() 2396 if (p->idx >= ib_chunk->length_dw) { in r600_dma_cs_parse() 2398 p->idx, ib_chunk->length_dw); in r600_dma_cs_parse() 2534 } while (p->idx < p->chunk_ib->length_dw); in r600_dma_cs_parse() 2536 for (r = 0; r < p->ib->length_dw; r++) { in r600_dma_cs_parse()
|
| H A D | evergreen_cs.c | 2867 } while (p->idx < p->chunk_ib->length_dw); in evergreen_cs_parse() 2869 for (r = 0; r < p->ib.length_dw; r++) { in evergreen_cs_parse() 2899 if (p->idx >= ib_chunk->length_dw) { in evergreen_dma_cs_parse() 2901 p->idx, ib_chunk->length_dw); in evergreen_dma_cs_parse() 3306 } while (p->idx < p->chunk_ib->length_dw); in evergreen_dma_cs_parse() 3308 for (r = 0; r < p->ib->length_dw; r++) { in evergreen_dma_cs_parse() 3648 } while (idx < ib->length_dw); in evergreen_ib_parse() 3754 } while (idx < ib->length_dw); in evergreen_dma_ib_parse()
|
| H A D | r100.c | 2102 } while (p->idx < p->chunk_ib->length_dw); in r100_cs_parse() 3722 radeon_ring_write(ring, ib->length_dw); in r100_ring_ib_execute() 3752 ib.length_dw = 8; in r100_ib_test()
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | sdma_v6_0.c | 288 amdgpu_ring_write(ring, ib->length_dw); in sdma_v6_0_ring_emit_ib() 1018 ib.length_dw = 8; in sdma_v6_0_ring_test_ib() 1066 ib->ptr[ib->length_dw++] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_COPY) | in sdma_v6_0_vm_copy_pte() 1068 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v6_0_vm_copy_pte() 1069 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v6_0_vm_copy_pte() 1070 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v6_0_vm_copy_pte() 1071 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v6_0_vm_copy_pte() 1072 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v6_0_vm_copy_pte() 1073 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v6_0_vm_copy_pte() 1094 ib->ptr[ib->length_dw++] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) | in sdma_v6_0_vm_write_pte() [all …]
|
| H A D | sdma_v5_0.c | 453 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_0_ring_emit_ib() 1112 ib.length_dw = 8; in sdma_v5_0_ring_test_ib() 1160 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v5_0_vm_copy_pte() 1162 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_0_vm_copy_pte() 1163 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v5_0_vm_copy_pte() 1164 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v5_0_vm_copy_pte() 1165 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v5_0_vm_copy_pte() 1166 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_0_vm_copy_pte() 1167 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v5_0_vm_copy_pte() 1188 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_0_vm_write_pte() [all …]
|
| H A D | sdma_v5_2.c | 301 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_2_ring_emit_ib() 1011 ib.length_dw = 8; in sdma_v5_2_ring_test_ib() 1059 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_COPY) | in sdma_v5_2_vm_copy_pte() 1061 ib->ptr[ib->length_dw++] = bytes - 1; in sdma_v5_2_vm_copy_pte() 1062 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in sdma_v5_2_vm_copy_pte() 1063 ib->ptr[ib->length_dw++] = lower_32_bits(src); in sdma_v5_2_vm_copy_pte() 1064 ib->ptr[ib->length_dw++] = upper_32_bits(src); in sdma_v5_2_vm_copy_pte() 1065 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in sdma_v5_2_vm_copy_pte() 1066 ib->ptr[ib->length_dw++] = upper_32_bits(pe); in sdma_v5_2_vm_copy_pte() 1087 ib->ptr[ib->length_dw++] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_2_vm_write_pte() [all …]
|
| H A D | amdgpu_cs.c | 213 p->chunks[i].length_dw = user_chunk.length_dw; in amdgpu_cs_pass1() 215 size = p->chunks[i].length_dw; in amdgpu_cs_pass1() 397 ib->length_dw = chunk_ib->ib_bytes / 4; in amdgpu_cs_p2_ib() 410 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_dependencies() 481 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_in() 499 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_timeline_wait() 519 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_out() 552 num_deps = chunk->length_dw * 4 / in amdgpu_cs_p2_syncobj_timeline_signal() 1045 if ((va_start + ib->length_dw * 4) > in amdgpu_cs_patch_ibs() 1059 memcpy(ib->ptr, kptr, ib->length_dw * 4); in amdgpu_cs_patch_ibs()
|
| H A D | amdgpu_cs.h | 39 uint32_t length_dw; member
|
| H A D | vcn_sw_ring.c | 53 amdgpu_ring_write(ring, ib->length_dw); in vcn_dec_sw_ring_emit_ib()
|
| H A D | amdgpu_ring.c | 169 u32 count = ib->length_dw & align_mask; in amdgpu_ring_generic_pad_ib() 174 memset32(&ib->ptr[ib->length_dw], ring->funcs->nop, count); in amdgpu_ring_generic_pad_ib() 176 ib->length_dw += count; in amdgpu_ring_generic_pad_ib()
|
| H A D | amdgpu_jpeg.c | 212 ib->length_dw = 16; in amdgpu_jpeg_dec_set_reg() 571 for (i = 0; i < ib->length_dw ; i += 2) { in amdgpu_jpeg_dec_parse_cs()
|
| H A D | amdgpu_ring.h | 104 uint32_t length_dw; member
|
| H A D | amdgpu_amdkfd.c | 683 ib->length_dw = ib_len; in amdgpu_amdkfd_submit_ib()
|
| /linux/drivers/net/ethernet/qlogic/qed/ |
| H A D | qed_hw.c | 469 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command() 484 le16_to_cpu(p_command->length_dw), in qed_dmae_post_command() 616 u32 length_dw) in qed_dmae_execute_sub_operation() argument 634 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation() 655 cmd->length_dw = cpu_to_le16((u16)length_dw); in qed_dmae_execute_sub_operation() 664 src_addr, dst_addr, length_dw); in qed_dmae_execute_sub_operation() 671 length_dw * sizeof(u32)); in qed_dmae_execute_sub_operation()
|
| /linux/include/uapi/drm/ |
| H A D | radeon_drm.h | 969 __u32 length_dw; member
|
| H A D | amdgpu_drm.h | 961 __u32 length_dw; member
|