Home
last modified time | relevance | path

Searched refs:wptr (Results 1 – 25 of 97) sorted by relevance

1234

/drivers/media/usb/pvrusb2/
Dpvrusb2-debugifc.c55 const char *wptr; in debugifc_isolate_word() local
60 wptr = NULL; in debugifc_isolate_word()
68 wptr = buf; in debugifc_isolate_word()
73 *wstrPtr = wptr; in debugifc_isolate_word()
182 const char *wptr; in pvr2_debugifc_do1cmd() local
186 scnt = debugifc_isolate_word(buf,count,&wptr,&wlen); in pvr2_debugifc_do1cmd()
189 if (!wptr) return 0; in pvr2_debugifc_do1cmd()
191 pvr2_trace(PVR2_TRACE_DEBUGIFC,"debugifc cmd: \"%.*s\"",wlen,wptr); in pvr2_debugifc_do1cmd()
192 if (debugifc_match_keyword(wptr,wlen,"reset")) { in pvr2_debugifc_do1cmd()
193 scnt = debugifc_isolate_word(buf,count,&wptr,&wlen); in pvr2_debugifc_do1cmd()
[all …]
/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ih.c151 uint32_t wptr = le32_to_cpu(*ih->wptr_cpu) >> 2; in amdgpu_ih_ring_write() local
155 ih->ring[wptr++] = cpu_to_le32(iv[i]); in amdgpu_ih_ring_write()
157 wptr <<= 2; in amdgpu_ih_ring_write()
158 wptr &= ih->ptr_mask; in amdgpu_ih_ring_write()
161 if (wptr != READ_ONCE(ih->rptr)) { in amdgpu_ih_ring_write()
163 WRITE_ONCE(*ih->wptr_cpu, cpu_to_le32(wptr)); in amdgpu_ih_ring_write()
227 u32 wptr; in amdgpu_ih_process() local
232 wptr = amdgpu_ih_get_wptr(adev, ih); in amdgpu_ih_process()
236 DRM_DEBUG("%s: rptr %d, wptr %d\n", __func__, ih->rptr, wptr); in amdgpu_ih_process()
241 while (ih->rptr != wptr && --count) { in amdgpu_ih_process()
[all …]
Diceland_ih.c193 u32 wptr, tmp; in iceland_ih_get_wptr() local
195 wptr = le32_to_cpu(*ih->wptr_cpu); in iceland_ih_get_wptr()
197 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in iceland_ih_get_wptr()
201 wptr = RREG32(mmIH_RB_WPTR); in iceland_ih_get_wptr()
203 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in iceland_ih_get_wptr()
206 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in iceland_ih_get_wptr()
212 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in iceland_ih_get_wptr()
213 ih->rptr = (wptr + 16) & ih->ptr_mask; in iceland_ih_get_wptr()
220 return (wptr & ih->ptr_mask); in iceland_ih_get_wptr()
Dcz_ih.c193 u32 wptr, tmp; in cz_ih_get_wptr() local
195 wptr = le32_to_cpu(*ih->wptr_cpu); in cz_ih_get_wptr()
197 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in cz_ih_get_wptr()
201 wptr = RREG32(mmIH_RB_WPTR); in cz_ih_get_wptr()
203 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in cz_ih_get_wptr()
206 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in cz_ih_get_wptr()
213 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cz_ih_get_wptr()
214 ih->rptr = (wptr + 16) & ih->ptr_mask; in cz_ih_get_wptr()
221 return (wptr & ih->ptr_mask); in cz_ih_get_wptr()
Dtonga_ih.c195 u32 wptr, tmp; in tonga_ih_get_wptr() local
197 wptr = le32_to_cpu(*ih->wptr_cpu); in tonga_ih_get_wptr()
199 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in tonga_ih_get_wptr()
203 wptr = RREG32(mmIH_RB_WPTR); in tonga_ih_get_wptr()
205 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in tonga_ih_get_wptr()
208 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in tonga_ih_get_wptr()
216 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in tonga_ih_get_wptr()
217 ih->rptr = (wptr + 16) & ih->ptr_mask; in tonga_ih_get_wptr()
223 return (wptr & ih->ptr_mask); in tonga_ih_get_wptr()
Dsi_ih.c110 u32 wptr, tmp; in si_ih_get_wptr() local
112 wptr = le32_to_cpu(*ih->wptr_cpu); in si_ih_get_wptr()
114 if (wptr & IH_RB_WPTR__RB_OVERFLOW_MASK) { in si_ih_get_wptr()
115 wptr &= ~IH_RB_WPTR__RB_OVERFLOW_MASK; in si_ih_get_wptr()
117 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in si_ih_get_wptr()
118 ih->rptr = (wptr + 16) & ih->ptr_mask; in si_ih_get_wptr()
123 return (wptr & ih->ptr_mask); in si_ih_get_wptr()
Dcik_ih.c191 u32 wptr, tmp; in cik_ih_get_wptr() local
193 wptr = le32_to_cpu(*ih->wptr_cpu); in cik_ih_get_wptr()
195 if (wptr & IH_RB_WPTR__RB_OVERFLOW_MASK) { in cik_ih_get_wptr()
196 wptr &= ~IH_RB_WPTR__RB_OVERFLOW_MASK; in cik_ih_get_wptr()
202 wptr, ih->rptr, (wptr + 16) & ih->ptr_mask); in cik_ih_get_wptr()
203 ih->rptr = (wptr + 16) & ih->ptr_mask; in cik_ih_get_wptr()
208 return (wptr & ih->ptr_mask); in cik_ih_get_wptr()
Dnavi10_ih.c423 u32 wptr, tmp; in navi10_ih_get_wptr() local
426 wptr = le32_to_cpu(*ih->wptr_cpu); in navi10_ih_get_wptr()
429 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in navi10_ih_get_wptr()
432 wptr = RREG32_NO_KIQ(ih_regs->ih_rb_wptr); in navi10_ih_get_wptr()
433 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in navi10_ih_get_wptr()
435 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in navi10_ih_get_wptr()
441 tmp = (wptr + 32) & ih->ptr_mask; in navi10_ih_get_wptr()
444 wptr, ih->rptr, tmp); in navi10_ih_get_wptr()
451 return (wptr & ih->ptr_mask); in navi10_ih_get_wptr()
519 uint32_t wptr = cpu_to_le32(entry->src_data[0]); in navi10_ih_self_irq() local
[all …]
Dvega10_ih.c338 u32 wptr, tmp; in vega10_ih_get_wptr() local
345 wptr = le32_to_cpu(*ih->wptr_cpu); in vega10_ih_get_wptr()
347 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in vega10_ih_get_wptr()
354 wptr = RREG32_NO_KIQ(ih_regs->ih_rb_wptr); in vega10_ih_get_wptr()
355 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in vega10_ih_get_wptr()
358 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in vega10_ih_get_wptr()
364 tmp = (wptr + 32) & ih->ptr_mask; in vega10_ih_get_wptr()
367 wptr, ih->rptr, tmp); in vega10_ih_get_wptr()
375 return (wptr & ih->ptr_mask); in vega10_ih_get_wptr()
Dsdma_v5_2.c210 ret = ring->wptr & ring->buf_mask;/* this is the offset we need patch later */ in sdma_v5_2_ring_init_cond_exec()
224 cur = (ring->wptr - 1) & ring->buf_mask; in sdma_v5_2_ring_patch_cond_exec()
259 u64 wptr; in sdma_v5_2_ring_get_wptr() local
263 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v5_2_ring_get_wptr()
264 DRM_DEBUG("wptr/doorbell before shift == 0x%016llx\n", wptr); in sdma_v5_2_ring_get_wptr()
266 wptr = RREG32(sdma_v5_2_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR_HI)); in sdma_v5_2_ring_get_wptr()
267 wptr = wptr << 32; in sdma_v5_2_ring_get_wptr()
268 wptr |= RREG32(sdma_v5_2_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR)); in sdma_v5_2_ring_get_wptr()
269 DRM_DEBUG("wptr before shift [%i] wptr == 0x%016llx\n", ring->me, wptr); in sdma_v5_2_ring_get_wptr()
272 return wptr >> 2; in sdma_v5_2_ring_get_wptr()
[all …]
Dvcn_v2_0.c918 ring->wptr = RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR); in vcn_v2_0_start_dpg_mode()
920 lower_32_bits(ring->wptr)); in vcn_v2_0_start_dpg_mode()
1076 ring->wptr = RREG32_SOC15(UVD, 0, mmUVD_RBC_RB_RPTR); in vcn_v2_0_start()
1078 lower_32_bits(ring->wptr)); in vcn_v2_0_start()
1083 WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR, lower_32_bits(ring->wptr)); in vcn_v2_0_start()
1084 WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR, lower_32_bits(ring->wptr)); in vcn_v2_0_start()
1092 WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR2, lower_32_bits(ring->wptr)); in vcn_v2_0_start()
1093 WREG32_SOC15(UVD, 0, mmUVD_RB_WPTR2, lower_32_bits(ring->wptr)); in vcn_v2_0_start()
1234 ring->wptr = 0; in vcn_v2_0_pause_dpg_mode()
1238 WREG32_SOC15(UVD, 0, mmUVD_RB_RPTR, lower_32_bits(ring->wptr)); in vcn_v2_0_pause_dpg_mode()
[all …]
Dvcn_v3_0.c332 ring->wptr = 0; in vcn_v3_0_hw_init()
344 ring->wptr = 0; in vcn_v3_0_hw_init()
1092 ring->wptr = RREG32_SOC15(VCN, inst_idx, mmUVD_RBC_RB_RPTR); in vcn_v3_0_start_dpg_mode()
1094 lower_32_bits(ring->wptr)); in vcn_v3_0_start_dpg_mode()
1098 fw_shared->rb.wptr = lower_32_bits(ring->wptr); in vcn_v3_0_start_dpg_mode()
1268 ring->wptr = RREG32_SOC15(VCN, i, mmUVD_RBC_RB_RPTR); in vcn_v3_0_start()
1270 lower_32_bits(ring->wptr)); in vcn_v3_0_start()
1271 fw_shared->rb.wptr = lower_32_bits(ring->wptr); in vcn_v3_0_start()
1277 WREG32_SOC15(VCN, i, mmUVD_RB_RPTR, lower_32_bits(ring->wptr)); in vcn_v3_0_start()
1278 WREG32_SOC15(VCN, i, mmUVD_RB_WPTR, lower_32_bits(ring->wptr)); in vcn_v3_0_start()
[all …]
Dsdma_v4_0.c739 u64 wptr; in sdma_v4_0_ring_get_wptr() local
743 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v4_0_ring_get_wptr()
744 DRM_DEBUG("wptr/doorbell before shift == 0x%016llx\n", wptr); in sdma_v4_0_ring_get_wptr()
746 wptr = RREG32_SDMA(ring->me, mmSDMA0_GFX_RB_WPTR_HI); in sdma_v4_0_ring_get_wptr()
747 wptr = wptr << 32; in sdma_v4_0_ring_get_wptr()
748 wptr |= RREG32_SDMA(ring->me, mmSDMA0_GFX_RB_WPTR); in sdma_v4_0_ring_get_wptr()
750 ring->me, wptr); in sdma_v4_0_ring_get_wptr()
753 return wptr >> 2; in sdma_v4_0_ring_get_wptr()
776 lower_32_bits(ring->wptr << 2), in sdma_v4_0_ring_set_wptr()
777 upper_32_bits(ring->wptr << 2)); in sdma_v4_0_ring_set_wptr()
[all …]
Dvega20_ih.c389 u32 wptr, tmp; in vega20_ih_get_wptr() local
396 wptr = le32_to_cpu(*ih->wptr_cpu); in vega20_ih_get_wptr()
398 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in vega20_ih_get_wptr()
405 wptr = RREG32_NO_KIQ(ih_regs->ih_rb_wptr); in vega20_ih_get_wptr()
406 if (!REG_GET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW)) in vega20_ih_get_wptr()
409 wptr = REG_SET_FIELD(wptr, IH_RB_WPTR, RB_OVERFLOW, 0); in vega20_ih_get_wptr()
415 tmp = (wptr + 32) & ih->ptr_mask; in vega20_ih_get_wptr()
418 wptr, ih->rptr, tmp); in vega20_ih_get_wptr()
426 return (wptr & ih->ptr_mask); in vega20_ih_get_wptr()
Dsdma_v5_0.c323 ret = ring->wptr & ring->buf_mask;/* this is the offset we need patch later */ in sdma_v5_0_ring_init_cond_exec()
337 cur = (ring->wptr - 1) & ring->buf_mask; in sdma_v5_0_ring_patch_cond_exec()
372 u64 wptr; in sdma_v5_0_ring_get_wptr() local
376 wptr = READ_ONCE(*((u64 *)&adev->wb.wb[ring->wptr_offs])); in sdma_v5_0_ring_get_wptr()
377 DRM_DEBUG("wptr/doorbell before shift == 0x%016llx\n", wptr); in sdma_v5_0_ring_get_wptr()
379 wptr = RREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR_HI)); in sdma_v5_0_ring_get_wptr()
380 wptr = wptr << 32; in sdma_v5_0_ring_get_wptr()
381 wptr |= RREG32_SOC15_IP(GC, sdma_v5_0_get_reg_offset(adev, ring->me, mmSDMA0_GFX_RB_WPTR)); in sdma_v5_0_ring_get_wptr()
382 DRM_DEBUG("wptr before shift [%i] wptr == 0x%016llx\n", ring->me, wptr); in sdma_v5_0_ring_get_wptr()
385 return wptr >> 2; in sdma_v5_0_ring_get_wptr()
[all …]
/drivers/net/ppp/
Dbsd_comp.c580 unsigned char *wptr; in bsd_compress() local
586 if (wptr) \ in bsd_compress()
588 *wptr++ = (unsigned char) (v); \ in bsd_compress()
591 wptr = NULL; \ in bsd_compress()
630 wptr = obuf; in bsd_compress()
639 if (wptr) in bsd_compress()
641 *wptr++ = PPP_ADDRESS(rptr); in bsd_compress()
642 *wptr++ = PPP_CONTROL(rptr); in bsd_compress()
643 *wptr++ = 0; in bsd_compress()
644 *wptr++ = PPP_COMP; in bsd_compress()
[all …]
Dppp_deflate.c190 unsigned char *wptr; in z_compress() local
204 wptr = obuf; in z_compress()
209 wptr[0] = PPP_ADDRESS(rptr); in z_compress()
210 wptr[1] = PPP_CONTROL(rptr); in z_compress()
211 put_unaligned_be16(PPP_COMP, wptr + 2); in z_compress()
212 wptr += PPP_HDRLEN; in z_compress()
213 put_unaligned_be16(state->seqno, wptr); in z_compress()
214 wptr += DEFLATE_OVHD; in z_compress()
216 state->strm.next_out = wptr; in z_compress()
/drivers/net/ethernet/tehuti/
Dtehuti.c171 f->wptr = 0; in bdx_fifo_init()
1099 rxfd = (struct rxf_desc *)(f->m.va + f->m.wptr); in bdx_rx_alloc_skbs()
1107 f->m.wptr += sizeof(struct rxf_desc); in bdx_rx_alloc_skbs()
1108 delta = f->m.wptr - f->m.memsz; in bdx_rx_alloc_skbs()
1110 f->m.wptr = delta; in bdx_rx_alloc_skbs()
1119 WRITE_REG(priv, f->m.reg_WPTR, f->m.wptr & TXF_WPTR_WR_PTR); in bdx_rx_alloc_skbs()
1154 rxfd = (struct rxf_desc *)(f->m.va + f->m.wptr); in bdx_recycle_skb()
1162 f->m.wptr += sizeof(struct rxf_desc); in bdx_recycle_skb()
1163 delta = f->m.wptr - f->m.memsz; in bdx_recycle_skb()
1165 f->m.wptr = delta; in bdx_recycle_skb()
[all …]
/drivers/gpu/drm/radeon/
Dradeon_ring.c87 ring->ring_free_dw -= ring->wptr; in radeon_ring_free_size()
128 ring->wptr_old = ring->wptr; in radeon_ring_alloc()
176 while (ring->wptr & ring->align_mask) { in radeon_ring_commit()
214 ring->wptr = ring->wptr_old; in radeon_ring_undo()
314 size = ring->wptr + (ring->ring_size / 4); in radeon_ring_backup()
470 uint32_t rptr, wptr, rptr_next; in radeon_debugfs_ring_info_show() local
476 wptr = radeon_ring_get_wptr(rdev, ring); in radeon_debugfs_ring_info_show()
478 wptr, wptr); in radeon_debugfs_ring_info_show()
492 ring->wptr, ring->wptr); in radeon_debugfs_ring_info_show()
Dvce_v1_0.c98 WREG32(VCE_RB_WPTR, ring->wptr); in vce_v1_0_set_wptr()
100 WREG32(VCE_RB_WPTR2, ring->wptr); in vce_v1_0_set_wptr()
298 WREG32(VCE_RB_RPTR, ring->wptr); in vce_v1_0_start()
299 WREG32(VCE_RB_WPTR, ring->wptr); in vce_v1_0_start()
305 WREG32(VCE_RB_RPTR2, ring->wptr); in vce_v1_0_start()
306 WREG32(VCE_RB_WPTR2, ring->wptr); in vce_v1_0_start()
/drivers/gpu/drm/amd/amdkfd/
Dkfd_kernel_queue.c233 uint32_t wptr, rptr; in kq_acquire_packet_buffer() local
243 wptr = kq->pending_wptr; in kq_acquire_packet_buffer()
249 pr_debug("wptr: %d\n", wptr); in kq_acquire_packet_buffer()
252 available_size = (rptr + queue_size_dwords - 1 - wptr) % in kq_acquire_packet_buffer()
263 if (wptr + packet_size_in_dwords >= queue_size_dwords) { in kq_acquire_packet_buffer()
271 while (wptr > 0) { in kq_acquire_packet_buffer()
272 queue_address[wptr] = kq->nop_packet; in kq_acquire_packet_buffer()
273 wptr = (wptr + 1) % queue_size_dwords; in kq_acquire_packet_buffer()
278 *buffer_ptr = &queue_address[wptr]; in kq_acquire_packet_buffer()
279 kq->pending_wptr = wptr + packet_size_in_dwords; in kq_acquire_packet_buffer()
/drivers/crypto/ccp/
Dtee-dev.c124 tee->rb_mgr.wptr = 0; in tee_init_ring()
259 (tee->rb_mgr.ring_start + tee->rb_mgr.wptr); in tee_submit_cmd()
266 if (!(tee->rb_mgr.wptr + sizeof(struct tee_ring_cmd) == rptr || in tee_submit_cmd()
271 rptr, tee->rb_mgr.wptr); in tee_submit_cmd()
281 (tee->rb_mgr.wptr + sizeof(struct tee_ring_cmd) == rptr || in tee_submit_cmd()
284 rptr, tee->rb_mgr.wptr, cmd->flag); in tee_submit_cmd()
307 tee->rb_mgr.wptr += sizeof(struct tee_ring_cmd); in tee_submit_cmd()
308 if (tee->rb_mgr.wptr >= tee->rb_mgr.ring_size) in tee_submit_cmd()
309 tee->rb_mgr.wptr = 0; in tee_submit_cmd()
312 iowrite32(tee->rb_mgr.wptr, tee->io_regs + tee->vdata->ring_wptr_reg); in tee_submit_cmd()
/drivers/gpu/drm/msm/adreno/
Dadreno_gpu.c468 uint32_t wptr; in adreno_flush() local
478 wptr = get_wptr(ring); in adreno_flush()
483 gpu_write(gpu, reg, wptr); in adreno_flush()
489 uint32_t wptr = get_wptr(ring); in adreno_idle() local
492 if (!spin_until(get_rptr(adreno_gpu, ring) == wptr)) in adreno_idle()
497 gpu->name, ring->id, get_rptr(adreno_gpu, ring), wptr); in adreno_idle()
518 state->ring[i].wptr = get_wptr(gpu->rb[i]); in adreno_gpu_state_get()
521 size = state->ring[i].wptr; in adreno_gpu_state_get()
524 for (j = state->ring[i].wptr; j < MSM_GPU_RINGBUFFER_SZ >> 2; j++) in adreno_gpu_state_get()
713 drm_printf(p, " wptr: %d\n", state->ring[i].wptr); in adreno_show()
[all …]
Da5xx_preempt.c43 uint32_t wptr; in update_wptr() local
49 wptr = get_wptr(ring); in update_wptr()
52 gpu_write(gpu, REG_A5XX_CP_RB_WPTR, wptr); in update_wptr()
135 a5xx_gpu->preempt[ring->id]->wptr = get_wptr(ring); in a5xx_preempt_trigger()
208 a5xx_gpu->preempt[i]->wptr = 0; in a5xx_preempt_hw_init()
/drivers/video/fbdev/
Dmaxinefb.c67 unsigned char *wptr; in maxinefb_ims332_write_register() local
69 wptr = regs + 0xa0000 + (regno << 4); in maxinefb_ims332_write_register()
71 *((volatile unsigned short *) (wptr)) = val; in maxinefb_ims332_write_register()

1234