| /linux/drivers/media/usb/pvrusb2/ |
| H A D | pvrusb2-debugifc.c | 50 const char *wptr; in debugifc_isolate_word() local 55 wptr = NULL; in debugifc_isolate_word() 63 wptr = buf; in debugifc_isolate_word() 68 *wstrPtr = wptr; in debugifc_isolate_word() 177 const char *wptr; in pvr2_debugifc_do1cmd() local 181 scnt = debugifc_isolate_word(buf,count,&wptr,&wlen); in pvr2_debugifc_do1cmd() 184 if (!wptr) return 0; in pvr2_debugifc_do1cmd() 186 pvr2_trace(PVR2_TRACE_DEBUGIFC,"debugifc cmd: \"%.*s\"",wlen,wptr); in pvr2_debugifc_do1cmd() 187 if (debugifc_match_keyword(wptr,wlen,"reset")) { in pvr2_debugifc_do1cmd() 188 scnt = debugifc_isolate_word(buf,count,&wptr,&wlen); in pvr2_debugifc_do1cmd() [all …]
|
| /linux/drivers/media/platform/amphion/ |
| H A D | vpu_rpc.c | 39 ptr1 = desc->wptr; in vpu_rpc_check_buffer_space() 43 ptr2 = desc->wptr; in vpu_rpc_check_buffer_space() 61 u32 wptr; in vpu_rpc_send_cmd_buf() local 70 wptr = desc->wptr; in vpu_rpc_send_cmd_buf() 71 data = (u32 *)(shared->cmd_mem_vir + desc->wptr - desc->start); in vpu_rpc_send_cmd_buf() 76 wptr += 4; in vpu_rpc_send_cmd_buf() 78 if (wptr >= desc->end) { in vpu_rpc_send_cmd_buf() 79 wptr = desc->start; in vpu_rpc_send_cmd_buf() 85 wptr += 4; in vpu_rpc_send_cmd_buf() 87 if (wptr >= desc->end) { in vpu_rpc_send_cmd_buf() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| H A D | iceland_ih.c | 196 u32 wptr, tmp; in iceland_ih_get_wptr() local 198 wptr = le32_to_cpu(*ih->wptr_cpu); in iceland_ih_get_wptr() 203 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in iceland_ih_get_wptr() 207 wptr = RREG32(mmIH_RB_WPTR); in iceland_ih_get_wptr() 209 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in iceland_ih_get_wptr() 212 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in iceland_ih_get_wptr() 218 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in iceland_ih_get_wptr() 219 ih->rptr = (wptr + 16) & ih->ptr_mask; in iceland_ih_get_wptr() 231 return (wptr & ih->ptr_mask); in iceland_ih_get_wptr()
|
| H A D | cz_ih.c | 196 u32 wptr, tmp; in cz_ih_get_wptr() local 198 wptr = le32_to_cpu(*ih->wptr_cpu); in cz_ih_get_wptr() 203 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in cz_ih_get_wptr() 207 wptr = RREG32(mmIH_RB_WPTR); in cz_ih_get_wptr() 209 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in cz_ih_get_wptr() 212 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in cz_ih_get_wptr() 219 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cz_ih_get_wptr() 220 ih->rptr = (wptr + 16) & ih->ptr_mask; in cz_ih_get_wptr() 232 return (wptr & ih->ptr_mask); in cz_ih_get_wptr()
|
| H A D | tonga_ih.c | 198 u32 wptr, tmp; in tonga_ih_get_wptr() local 200 wptr = le32_to_cpu(*ih->wptr_cpu); in tonga_ih_get_wptr() 205 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in tonga_ih_get_wptr() 209 wptr = RREG32(mmIH_RB_WPTR); in tonga_ih_get_wptr() 211 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in tonga_ih_get_wptr() 214 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in tonga_ih_get_wptr() 222 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in tonga_ih_get_wptr() 223 ih->rptr = (wptr + 16) & ih->ptr_mask; in tonga_ih_get_wptr() 235 return (wptr & ih->ptr_mask); in tonga_ih_get_wptr()
|
| H A D | si_ih.c | 114 u32 wptr, tmp; in si_ih_get_wptr() local 116 wptr = le32_to_cpu(*ih->wptr_cpu); in si_ih_get_wptr() 121 if (wptr & IH_RB_WPTR__RB_OVERFLOW_MASK) { in si_ih_get_wptr() 122 wptr &= ~IH_RB_WPTR__RB_OVERFLOW_MASK; in si_ih_get_wptr() 124 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in si_ih_get_wptr() 125 ih->rptr = (wptr + 16) & ih->ptr_mask; in si_ih_get_wptr() 138 return (wptr & ih->ptr_mask); in si_ih_get_wptr()
|
| H A D | cik_ih.c | 194 u32 wptr, tmp; in cik_ih_get_wptr() local 196 wptr = le32_to_cpu(*ih->wptr_cpu); in cik_ih_get_wptr() 201 if (wptr & IH_RB_WPTR__RB_OVERFLOW_MASK) { in cik_ih_get_wptr() 202 wptr &= ~IH_RB_WPTR__RB_OVERFLOW_MASK; in cik_ih_get_wptr() 208 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cik_ih_get_wptr() 209 ih->rptr = (wptr + 16) & ih->ptr_mask; in cik_ih_get_wptr() 222 return (wptr & ih->ptr_mask); in cik_ih_get_wptr()
|
| H A D | amdgpu_vpe.c | 628 ret = ring->wptr & ring->buf_mask; in vpe_ring_init_cond_exec() 725 uint64_t wptr; in vpe_ring_get_wptr() local 728 wptr = atomic64_read((atomic64_t *)ring->wptr_cpu_addr); in vpe_ring_get_wptr() 729 dev_dbg(adev->dev, "wptr/doorbell before shift == 0x%016llx\n", wptr); in vpe_ring_get_wptr() 731 wptr = RREG32(vpe_get_reg_offset(vpe, ring->me, vpe->regs.queue0_rb_wptr_hi)); in vpe_ring_get_wptr() 732 wptr = wptr << 32; in vpe_ring_get_wptr() 733 wptr |= RREG32(vpe_get_reg_offset(vpe, ring->me, vpe->regs.queue0_rb_wptr_lo)); in vpe_ring_get_wptr() 734 dev_dbg(adev->dev, "wptr before shift [%i] == 0x%016llx\n", ring->me, wptr); in vpe_ring_get_wptr() 737 return (wptr >> 2); in vpe_ring_get_wptr() 751 lower_32_bits(ring->wptr << 2), in vpe_ring_set_wptr() [all …]
|
| H A D | amdgpu_ring_mux.c | 213 void amdgpu_ring_mux_set_wptr(struct amdgpu_ring_mux *mux, struct amdgpu_ring *ring, u64 wptr) in amdgpu_ring_mux_set_wptr() argument 239 e->sw_wptr = wptr; in amdgpu_ring_mux_set_wptr() 240 e->start_ptr_in_hw_ring = mux->real_ring->wptr; in amdgpu_ring_mux_set_wptr() 243 if (ring->hw_prio > AMDGPU_RING_PRIO_DEFAULT || mux->wptr_resubmit < wptr) { in amdgpu_ring_mux_set_wptr() 244 amdgpu_ring_mux_copy_pkt_from_sw_ring(mux, ring, e->sw_cptr, wptr); in amdgpu_ring_mux_set_wptr() 245 e->end_ptr_in_hw_ring = mux->real_ring->wptr; in amdgpu_ring_mux_set_wptr() 248 e->end_ptr_in_hw_ring = mux->real_ring->wptr; in amdgpu_ring_mux_set_wptr() 340 amdgpu_ring_mux_set_wptr(mux, ring, ring->wptr); in amdgpu_sw_ring_set_wptr_gfx() 428 offset = ring->wptr & ring->buf_mask; in amdgpu_sw_ring_ib_mark_offset() 454 chunk->start = ring->wptr; in amdgpu_ring_mux_start_ib() [all …]
|
| H A D | amdgpu_cper.c | 483 wptr_old = ring->wptr; in amdgpu_cper_ring_write() 487 ent_sz = amdgpu_cper_ring_get_ent_sz(ring, ring->wptr); in amdgpu_cper_ring_write() 490 memcpy(&ring->ring[ring->wptr], s, chunk); in amdgpu_cper_ring_write() 492 ring->wptr += (chunk >> 2); in amdgpu_cper_ring_write() 493 ring->wptr &= ring->ptr_mask; in amdgpu_cper_ring_write() 502 if (((wptr_old < rptr) && (rptr <= ring->wptr)) || in amdgpu_cper_ring_write() 503 ((ring->wptr < wptr_old) && (wptr_old < rptr)) || in amdgpu_cper_ring_write() 504 ((rptr <= ring->wptr) && (ring->wptr < wptr_old))) { in amdgpu_cper_ring_write() 505 pos = (ring->wptr + 1) & ring->ptr_mask; in amdgpu_cper_ring_write() 530 return ring->wptr; in amdgpu_cper_ring_get_wptr() [all …]
|
| H A D | vcn_v3_0.c | 387 ring->wptr = 0; in vcn_v3_0_hw_init() 400 ring->wptr = 0; in vcn_v3_0_hw_init() 1170 ring->wptr = RREG32_SOC15(VCN, inst_idx, mmUVD_RBC_RB_RPTR); in vcn_v3_0_start_dpg_mode() 1172 lower_32_bits(ring->wptr)); in vcn_v3_0_start_dpg_mode() 1176 fw_shared->rb.wptr = lower_32_bits(ring->wptr); in vcn_v3_0_start_dpg_mode() 1350 ring->wptr = RREG32_SOC15(VCN, i, mmUVD_RBC_RB_RPTR); in vcn_v3_0_start() 1352 lower_32_bits(ring->wptr)); in vcn_v3_0_start() 1353 fw_shared->rb.wptr = lower_32_bits(ring->wptr); in vcn_v3_0_start() 1360 WREG32_SOC15(VCN, i, mmUVD_RB_RPTR, lower_32_bits(ring->wptr)); in vcn_v3_0_start() 1361 WREG32_SOC15(VCN, i, mmUVD_RB_WPTR, lower_32_bits(ring->wptr)); in vcn_v3_0_start() [all …]
|
| H A D | sdma_v6_0.c | 154 ret = ring->wptr & ring->buf_mask; in sdma_v6_0_ring_init_cond_exec() 188 u64 wptr = 0; in sdma_v6_0_ring_get_wptr() local 192 wptr = READ_ONCE(*((u64 *)ring->wptr_cpu_addr)); in sdma_v6_0_ring_get_wptr() 193 DRM_DEBUG("wptr/doorbell before shift == 0x%016llx\n", wptr); in sdma_v6_0_ring_get_wptr() 196 return wptr >> 2; in sdma_v6_0_ring_get_wptr() 216 lower_32_bits(ring->wptr << 2), in sdma_v6_0_ring_set_wptr() 217 upper_32_bits(ring->wptr << 2)); in sdma_v6_0_ring_set_wptr() 220 ring->wptr << 2); in sdma_v6_0_ring_set_wptr() 222 ring->doorbell_index, ring->wptr << 2); in sdma_v6_0_ring_set_wptr() 223 WDOORBELL64(ring->doorbell_index, ring->wptr << 2); in sdma_v6_0_ring_set_wptr() [all …]
|
| H A D | amdgpu_ring.c | 95 ring->wptr_old = ring->wptr; in amdgpu_ring_alloc() 120 ring->wptr_old = ring->wptr; in amdgpu_ring_alloc_reemit() 137 occupied = ring->wptr & ring->buf_mask; in amdgpu_ring_insert_nop() 148 ring->wptr += count; in amdgpu_ring_insert_nop() 149 ring->wptr &= ring->ptr_mask; in amdgpu_ring_insert_nop() 193 (ring->wptr & ring->funcs->align_mask); in amdgpu_ring_commit() 215 ring->wptr = ring->wptr_old; in amdgpu_ring_undo() 359 ring->wptr = *ring->rptr_cpu_addr = 0; in amdgpu_ring_init() 589 early[2] = ring->wptr & ring->buf_mask; in amdgpu_debugfs_ring_read() 812 ring->wptr = 0; in amdgpu_ring_init_mqd()
|
| H A D | amdgpu_fence.c | 250 drv->signalled_wptr = am_fence->wptr; in amdgpu_fence_process() 744 af->wptr = af->ring->wptr; in amdgpu_fence_save_wptr() 765 u64 wptr; in amdgpu_ring_backup_unprocessed_commands() local 770 wptr = ring->fence_drv.signalled_wptr; in amdgpu_ring_backup_unprocessed_commands() 788 amdgpu_ring_backup_unprocessed_command(ring, wptr, in amdgpu_ring_backup_unprocessed_commands() 789 fence->wptr); in amdgpu_ring_backup_unprocessed_commands() 790 wptr = fence->wptr; in amdgpu_ring_backup_unprocessed_commands()
|
| H A D | amdgpu_ring.h | 148 u64 wptr; member 319 u64 wptr; member 488 ring->ring[ring->wptr++ & ring->buf_mask] = v; in amdgpu_ring_write() 489 ring->wptr &= ring->ptr_mask; in amdgpu_ring_write() 498 occupied = ring->wptr & ring->buf_mask; in amdgpu_ring_write_multiple() 513 ring->wptr += count_dw; in amdgpu_ring_write_multiple() 514 ring->wptr &= ring->ptr_mask; in amdgpu_ring_write_multiple() 536 cur = (ring->wptr - 1) & ring->buf_mask; in amdgpu_ring_patch_cond_exec()
|
| /linux/drivers/net/ethernet/tehuti/ |
| H A D | tehuti.c | 171 f->wptr = 0; in bdx_fifo_init() 1101 rxfd = (struct rxf_desc *)(f->m.va + f->m.wptr); in bdx_rx_alloc_skbs() 1109 f->m.wptr += sizeof(struct rxf_desc); in bdx_rx_alloc_skbs() 1110 delta = f->m.wptr - f->m.memsz; in bdx_rx_alloc_skbs() 1112 f->m.wptr = delta; in bdx_rx_alloc_skbs() 1121 WRITE_REG(priv, f->m.reg_WPTR, f->m.wptr & TXF_WPTR_WR_PTR); in bdx_rx_alloc_skbs() 1156 rxfd = (struct rxf_desc *)(f->m.va + f->m.wptr); in bdx_recycle_skb() 1164 f->m.wptr += sizeof(struct rxf_desc); in bdx_recycle_skb() 1165 delta = f->m.wptr - f->m.memsz; in bdx_recycle_skb() 1167 f->m.wptr = delta; in bdx_recycle_skb() [all …]
|
| H A D | tn40.c | 54 f->wptr = 0; in tn40_fifo_alloc() 212 rxfd = (struct tn40_rxf_desc *)(f->m.va + f->m.wptr); in tn40_set_rx_desc() 218 f->m.wptr += sizeof(struct tn40_rxf_desc); in tn40_set_rx_desc() 219 delta = f->m.wptr - f->m.memsz; in tn40_set_rx_desc() 221 f->m.wptr = delta; in tn40_set_rx_desc() 261 f->m.wptr & TN40_TXF_WPTR_WR_PTR); in tn40_rx_alloc_buffers() 263 f->m.reg_wptr, f->m.wptr & TN40_TXF_WPTR_WR_PTR); in tn40_rx_alloc_buffers() 281 tn40_write_reg(priv, f->m.reg_wptr, f->m.wptr & TN40_TXF_WPTR_WR_PTR); in tn40_recycle_rx_buffer() 296 f->m.wptr = tn40_read_reg(priv, f->m.reg_wptr) & TN40_TXF_WPTR_WR_PTR; in tn40_rx_receive() 297 size = f->m.wptr - f->m.rptr; in tn40_rx_receive() [all …]
|
| /linux/drivers/crypto/ccp/ |
| H A D | tee-dev.c | 104 tee->rb_mgr.wptr = 0; in tee_init_ring() 230 (tee->rb_mgr.ring_start + tee->rb_mgr.wptr); in tee_submit_cmd() 237 if (!(tee->rb_mgr.wptr + sizeof(struct tee_ring_cmd) == rptr || in tee_submit_cmd() 242 rptr, tee->rb_mgr.wptr); in tee_submit_cmd() 252 (tee->rb_mgr.wptr + sizeof(struct tee_ring_cmd) == rptr || in tee_submit_cmd() 255 rptr, tee->rb_mgr.wptr, cmd->flag); in tee_submit_cmd() 278 tee->rb_mgr.wptr += sizeof(struct tee_ring_cmd); in tee_submit_cmd() 279 if (tee->rb_mgr.wptr >= tee->rb_mgr.ring_size) in tee_submit_cmd() 280 tee->rb_mgr.wptr = 0; in tee_submit_cmd() 283 iowrite32(tee->rb_mgr.wptr, tee->io_regs + tee->vdata->ring_wptr_reg); in tee_submit_cmd()
|
| /linux/drivers/gpu/drm/radeon/ |
| H A D | radeon_ring.c | 89 ring->ring_free_dw -= ring->wptr; in radeon_ring_free_size() 130 ring->wptr_old = ring->wptr; in radeon_ring_alloc() 178 while (ring->wptr & ring->align_mask) { in radeon_ring_commit() 216 ring->wptr = ring->wptr_old; in radeon_ring_undo() 316 size = ring->wptr + (ring->ring_size / 4); in radeon_ring_backup() 472 uint32_t rptr, wptr, rptr_next; in radeon_debugfs_ring_info_show() local 478 wptr = radeon_ring_get_wptr(rdev, ring); in radeon_debugfs_ring_info_show() 480 wptr, wptr); in radeon_debugfs_ring_info_show() 494 ring->wptr, ring->wptr); in radeon_debugfs_ring_info_show()
|
| H A D | vce_v1_0.c | 98 WREG32(VCE_RB_WPTR, ring->wptr); in vce_v1_0_set_wptr() 100 WREG32(VCE_RB_WPTR2, ring->wptr); in vce_v1_0_set_wptr() 298 WREG32(VCE_RB_RPTR, ring->wptr); in vce_v1_0_start() 299 WREG32(VCE_RB_WPTR, ring->wptr); in vce_v1_0_start() 305 WREG32(VCE_RB_RPTR2, ring->wptr); in vce_v1_0_start() 306 WREG32(VCE_RB_WPTR2, ring->wptr); in vce_v1_0_start()
|
| /linux/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/ |
| H A D | rpc.c | 153 u32 wptr = *gsp->msgq.wptr; in r535_gsp_msgq_wait() local 155 used = wptr + gsp->msgq.cnt - rptr; in r535_gsp_msgq_wait() 361 u32 wptr, size, step, len; in r535_gsp_cmdq_push() local 377 wptr = *gsp->cmdq.wptr; in r535_gsp_cmdq_push() 380 free = *gsp->cmdq.rptr + gsp->cmdq.cnt - wptr - 1; in r535_gsp_cmdq_push() 394 cqe = (void *)((u8 *)gsp->shm.cmdq.ptr + 0x1000 + wptr * 0x1000); in r535_gsp_cmdq_push() 395 step = min_t(u32, free, (gsp->cmdq.cnt - wptr)); in r535_gsp_cmdq_push() 400 wptr += DIV_ROUND_UP(size, 0x1000); in r535_gsp_cmdq_push() 401 if (wptr == gsp->cmdq.cnt) in r535_gsp_cmdq_push() 402 wptr = 0; in r535_gsp_cmdq_push() [all …]
|
| /linux/drivers/net/ppp/ |
| H A D | ppp_deflate.c | 190 unsigned char *wptr; in z_compress() local 204 wptr = obuf; in z_compress() 209 wptr[0] = PPP_ADDRESS(rptr); in z_compress() 210 wptr[1] = PPP_CONTROL(rptr); in z_compress() 211 put_unaligned_be16(PPP_COMP, wptr + 2); in z_compress() 212 wptr += PPP_HDRLEN; in z_compress() 213 put_unaligned_be16(state->seqno, wptr); in z_compress() 214 wptr += DEFLATE_OVHD; in z_compress() 216 state->strm.next_out = wptr; in z_compress()
|
| /linux/drivers/video/fbdev/ |
| H A D | maxinefb.c | 67 unsigned char *wptr; in maxinefb_ims332_write_register() local 69 wptr = regs + 0xa0000 + (regno << 4); in maxinefb_ims332_write_register() 71 *((volatile unsigned short *) (wptr)) = val; in maxinefb_ims332_write_register()
|
| /linux/drivers/gpu/drm/msm/adreno/ |
| H A D | a5xx_preempt.c | 43 uint32_t wptr; in update_wptr() local 49 wptr = get_wptr(ring); in update_wptr() 52 gpu_write(gpu, REG_A5XX_CP_RB_WPTR, wptr); in update_wptr() 149 a5xx_gpu->preempt[ring->id]->wptr = get_wptr(ring); in a5xx_preempt_trigger() 233 a5xx_gpu->preempt[i]->wptr = 0; in a5xx_preempt_hw_init()
|
| /linux/drivers/tty/serial/ |
| H A D | men_z135_uart.c | 300 u32 wptr; in men_z135_handle_tx() local 317 wptr = ioread32(port->membase + MEN_Z135_TX_CTRL); in men_z135_handle_tx() 318 txc = (wptr >> 16) & 0x3ff; in men_z135_handle_tx() 319 wptr &= 0x3ff; in men_z135_handle_tx() 335 if (align && qlen >= 3 && BYTES_TO_ALIGN(wptr)) in men_z135_handle_tx() 336 n = 4 - BYTES_TO_ALIGN(wptr); in men_z135_handle_tx() 450 u32 wptr; in men_z135_tx_empty() local 453 wptr = ioread32(port->membase + MEN_Z135_TX_CTRL); in men_z135_tx_empty() 454 txc = (wptr >> 16) & 0x3ff; in men_z135_tx_empty()
|