Home
last modified time | relevance | path

Searched refs:seqno (Results 1 – 25 of 45) sorted by relevance

12

/drivers/media/pci/saa7164/
Dsaa7164-cmd.c36 ret = dev->cmds[i].seqno; in saa7164_cmd_alloc_seqno()
45 static void saa7164_cmd_free_seqno(struct saa7164_dev *dev, u8 seqno) in saa7164_cmd_free_seqno() argument
48 if ((dev->cmds[seqno].inuse == 1) && in saa7164_cmd_free_seqno()
49 (dev->cmds[seqno].seqno == seqno)) { in saa7164_cmd_free_seqno()
50 dev->cmds[seqno].inuse = 0; in saa7164_cmd_free_seqno()
51 dev->cmds[seqno].signalled = 0; in saa7164_cmd_free_seqno()
52 dev->cmds[seqno].timeout = 0; in saa7164_cmd_free_seqno()
57 static void saa7164_cmd_timeout_seqno(struct saa7164_dev *dev, u8 seqno) in saa7164_cmd_timeout_seqno() argument
60 if ((dev->cmds[seqno].inuse == 1) && in saa7164_cmd_timeout_seqno()
61 (dev->cmds[seqno].seqno == seqno)) { in saa7164_cmd_timeout_seqno()
[all …]
Dsaa7164-bus.c118 dprintk(DBGLVL_BUS, " .seqno = %d\n", m->seqno); in saa7164_bus_dumpmsg()
391 (msg_tmp.seqno != msg->seqno) || (msg_tmp.size != msg->size)) { in saa7164_bus_get()
Dsaa7164-types.h101 u8 seqno; member
116 u8 seqno; member
/drivers/gpu/drm/radeon/
Dradeon_trace.h32 TP_PROTO(struct drm_device *dev, u32 seqno),
34 TP_ARGS(dev, seqno),
38 __field(u32, seqno)
43 __entry->seqno = seqno;
46 TP_printk("dev=%u, seqno=%u", __entry->dev, __entry->seqno)
51 TP_PROTO(struct drm_device *dev, u32 seqno),
53 TP_ARGS(dev, seqno)
58 TP_PROTO(struct drm_device *dev, u32 seqno),
60 TP_ARGS(dev, seqno)
65 TP_PROTO(struct drm_device *dev, u32 seqno),
[all …]
/drivers/gpu/drm/i915/
Di915_trace.h232 TP_PROTO(struct intel_ring_buffer *ring, u32 seqno, u32 flags),
233 TP_ARGS(ring, seqno, flags),
238 __field(u32, seqno)
245 __entry->seqno = seqno;
247 i915_trace_irq_get(ring, seqno);
251 __entry->dev, __entry->ring, __entry->seqno, __entry->flags)
278 TP_PROTO(struct intel_ring_buffer *ring, u32 seqno),
279 TP_ARGS(ring, seqno),
284 __field(u32, seqno)
290 __entry->seqno = seqno;
[all …]
Di915_gem.c951 i915_gem_check_olr(struct intel_ring_buffer *ring, u32 seqno) in i915_gem_check_olr() argument
958 if (seqno == ring->outstanding_lazy_request) in i915_gem_check_olr()
982 static int __wait_seqno(struct intel_ring_buffer *ring, u32 seqno, in __wait_seqno() argument
993 if (i915_seqno_passed(ring->get_seqno(ring, true), seqno)) in __wait_seqno()
996 trace_i915_gem_request_wait_begin(ring, seqno); in __wait_seqno()
1012 (i915_seqno_passed(ring->get_seqno(ring, false), seqno) || \ in __wait_seqno()
1039 trace_i915_gem_request_wait_end(ring, seqno); in __wait_seqno()
1067 i915_wait_seqno(struct intel_ring_buffer *ring, uint32_t seqno) in i915_wait_seqno() argument
1075 BUG_ON(seqno == 0); in i915_wait_seqno()
1081 ret = i915_gem_check_olr(ring, seqno); in i915_wait_seqno()
[all …]
Dintel_ringbuffer.h94 u32 seqno);
103 u32 seqno);
220 void intel_ring_init_seqno(struct intel_ring_buffer *ring, u32 seqno);
242 static inline void i915_trace_irq_get(struct intel_ring_buffer *ring, u32 seqno) in i915_trace_irq_get() argument
245 ring->trace_irq_seqno = seqno; in i915_trace_irq_get()
Dintel_ringbuffer.c621 u32 seqno) in i915_gem_has_seqno_wrapped() argument
624 return dev_priv->last_seqno < seqno; in i915_gem_has_seqno_wrapped()
637 u32 seqno) in gen6_ring_sync() argument
648 seqno -= 1; in gen6_ring_sync()
658 if (likely(!i915_gem_has_seqno_wrapped(waiter->dev, seqno))) { in gen6_ring_sync()
662 intel_ring_emit(waiter, seqno); in gen6_ring_sync()
752 ring_set_seqno(struct intel_ring_buffer *ring, u32 seqno) in ring_set_seqno() argument
754 intel_write_status_page(ring, I915_GEM_HWS_INDEX, seqno); in ring_set_seqno()
765 pc_render_set_seqno(struct intel_ring_buffer *ring, u32 seqno) in pc_render_set_seqno() argument
768 pc->cpu_page[0] = seqno; in pc_render_set_seqno()
[all …]
/drivers/gpu/drm/vmwgfx/
Dvmwgfx_irq.c63 static bool vmw_fifo_idle(struct vmw_private *dev_priv, uint32_t seqno) in vmw_fifo_idle() argument
78 uint32_t seqno = ioread32(fifo_mem + SVGA_FIFO_FENCE); in vmw_update_seqno() local
80 if (dev_priv->last_read_seqno != seqno) { in vmw_update_seqno()
81 dev_priv->last_read_seqno = seqno; in vmw_update_seqno()
82 vmw_marker_pull(&fifo_state->marker_queue, seqno); in vmw_update_seqno()
88 uint32_t seqno) in vmw_seqno_passed() argument
93 if (likely(dev_priv->last_read_seqno - seqno < VMW_FENCE_WRAP)) in vmw_seqno_passed()
98 if (likely(dev_priv->last_read_seqno - seqno < VMW_FENCE_WRAP)) in vmw_seqno_passed()
102 vmw_fifo_idle(dev_priv, seqno)) in vmw_seqno_passed()
110 ret = ((atomic_read(&dev_priv->marker_seq) - seqno) in vmw_seqno_passed()
[all …]
Dvmwgfx_marker.c33 uint32_t seqno; member
57 uint32_t seqno) in vmw_marker_push() argument
64 marker->seqno = seqno; in vmw_marker_push()
91 if (signaled_seqno - marker->seqno > (1 << 30)) in vmw_marker_pull()
148 uint32_t seqno; in vmw_wait_lag() local
154 seqno = atomic_read(&dev_priv->marker_seq); in vmw_wait_lag()
158 seqno = marker->seqno; in vmw_wait_lag()
162 ret = vmw_wait_seqno(dev_priv, false, seqno, true, in vmw_wait_lag()
168 (void) vmw_marker_pull(queue, seqno); in vmw_wait_lag()
Dvmwgfx_fence.c211 u32 seqno, in vmw_fence_obj_init() argument
219 fence->seqno = seqno; in vmw_fence_obj_init()
329 iowrite32(fence->seqno, in vmw_fence_goal_new_locked()
365 goal_seqno - fence->seqno < VMW_FENCE_WRAP)) in vmw_fence_goal_check_locked()
368 iowrite32(fence->seqno, fifo_mem + SVGA_FIFO_FENCE_GOAL); in vmw_fence_goal_check_locked()
380 uint32_t seqno, new_seqno; in vmw_fences_update() local
383 seqno = ioread32(fifo_mem + SVGA_FIFO_FENCE); in vmw_fences_update()
387 if (seqno - fence->seqno < VMW_FENCE_WRAP) { in vmw_fences_update()
399 needs_rerun = vmw_fence_goal_new_locked(fman, seqno); in vmw_fences_update()
413 if (new_seqno != seqno) { in vmw_fences_update()
[all …]
Dvmwgfx_fifo.c468 int vmw_fifo_send_fence(struct vmw_private *dev_priv, uint32_t *seqno) in vmw_fifo_send_fence() argument
478 *seqno = atomic_read(&dev_priv->marker_seq); in vmw_fifo_send_fence()
480 (void)vmw_fallback_wait(dev_priv, false, true, *seqno, in vmw_fifo_send_fence()
486 *seqno = atomic_add_return(1, &dev_priv->marker_seq); in vmw_fifo_send_fence()
487 } while (*seqno == 0); in vmw_fifo_send_fence()
504 iowrite32(*seqno, &cmd_fence->fence); in vmw_fifo_send_fence()
506 (void) vmw_marker_push(&fifo_state->marker_queue, *seqno); in vmw_fifo_send_fence()
Dvmwgfx_fence.h54 u32 seqno; member
87 uint32_t seqno,
Dvmwgfx_drv.h543 uint32_t *seqno);
610 uint32_t seqno, bool interruptible,
616 uint32_t seqno);
620 uint32_t seqno,
638 uint32_t seqno);
/drivers/net/ppp/
Dppp_deflate.c28 int seqno; member
153 state->seqno = 0; in z_comp_init()
173 state->seqno = 0; in z_comp_reset()
216 put_unaligned_be16(state->seqno, wptr); in z_compress()
221 ++state->seqno; in z_compress()
368 state->seqno = 0; in z_decomp_init()
389 state->seqno = 0; in z_decomp_reset()
431 if (seq != (state->seqno & 0xffff)) { in z_decompress()
434 state->unit, seq, state->seqno & 0xffff); in z_decompress()
437 ++state->seqno; in z_decompress()
[all …]
Dbsd_comp.c144 unsigned short seqno; /* sequence # of next packet */ member
317 db->seqno = 0; in bsd_reset()
645 *wptr++ = db->seqno >> 8; in bsd_compress()
646 *wptr++ = db->seqno; in bsd_compress()
745 ++db->seqno; in bsd_compress()
875 if (seq != db->seqno) in bsd_decompress()
880 db->unit, seq, db->seqno - 1); in bsd_decompress()
885 ++db->seqno; in bsd_decompress()
958 max_ent, explen, db->seqno); in bsd_decompress()
1122 db->unit, db->seqno - 1); in bsd_decompress()
/drivers/isdn/i4l/
Disdn_bsdcomp.c123 u16 seqno; /* sequence # of next packet */ member
269 db->seqno = 0; in bsd_reset()
505 v[0] = db->seqno >> 8; in bsd_compress()
506 v[1] = db->seqno; in bsd_compress()
588 ++db->seqno; in bsd_compress()
689 if (seq != db->seqno) { in bsd_decompress()
692 db->unit, seq, db->seqno - 1); in bsd_decompress()
697 ++db->seqno; in bsd_decompress()
753 max_ent, skb_out->len, db->seqno); in bsd_decompress()
888 db->unit, db->seqno - 1); in bsd_decompress()
/drivers/tty/hvc/
Dhvsi.c88 atomic_t seqno; /* HVSI packet sequence number */ member
224 header->seqno); in dump_packet()
298 packet.hdr.seqno = atomic_inc_return(&hp->seqno); in hvsi_version_respond()
322 hvsi_version_respond(hp, query->hdr.seqno); in hvsi_recv_query()
558 packet.hdr.seqno = atomic_inc_return(&hp->seqno); in hvsi_query()
600 packet.hdr.seqno = atomic_inc_return(&hp->seqno); in hvsi_set_mctrl()
683 packet.hdr.seqno = atomic_inc_return(&hp->seqno); in hvsi_put_chars()
700 packet.hdr.seqno = atomic_inc_return(&hp->seqno); in hvsi_close_protocol()
729 atomic_set(&hp->seqno, 0); in hvsi_open()
Dhvsi_lib.c12 packet->seqno = atomic_inc_return(&pv->seqno); in hvsi_send_packet()
24 atomic_set(&pv->seqno, 0); in hvsi_start_handshake()
101 r.query_seqno = pkt->hdr.seqno; in hvsi_got_query()
268 q.hdr.seqno = atomic_inc_return(&pv->seqno); in hvsilib_read_mctrl()
/drivers/net/wireless/ath/ath9k/
Dxmit.c65 int seqno);
136 static void ath_send_bar(struct ath_atx_tid *tid, u16 seqno) in ath_send_bar() argument
139 seqno << IEEE80211_SEQ_SEQ_SHIFT); in ath_send_bar()
177 ath_tx_update_baw(sc, tid, bf->bf_state.seqno); in ath_tx_flush_tid()
194 int seqno) in ath_tx_update_baw() argument
198 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_update_baw()
212 u16 seqno) in ath_tx_addto_baw() argument
216 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_addto_baw()
257 ath_tx_update_baw(sc, tid, bf->bf_state.seqno); in ath_tid_drain()
351 ba_index = ATH_BA_INDEX(seq_st, bf->bf_state.seqno); in ath_tx_count_frames()
[all …]
/drivers/net/ethernet/sfc/
Dmcdi.c77 u32 xflags, seqno; in efx_mcdi_copyin() local
82 seqno = mcdi->seqno & SEQ_MASK; in efx_mcdi_copyin()
92 MCDI_HEADER_SEQ, seqno, in efx_mcdi_copyin()
173 } else if ((respseq ^ mcdi->seqno) & SEQ_MASK) { in efx_mcdi_poll()
176 respseq, mcdi->seqno); in efx_mcdi_poll()
301 static void efx_mcdi_ev_cpl(struct efx_nic *efx, unsigned int seqno, in efx_mcdi_ev_cpl() argument
309 if ((seqno ^ mcdi->seqno) & SEQ_MASK) { in efx_mcdi_ev_cpl()
316 "seq 0x%x\n", seqno, mcdi->seqno); in efx_mcdi_ev_cpl()
350 ++mcdi->seqno; in efx_mcdi_rpc_start()
375 ++mcdi->seqno; in efx_mcdi_rpc_finish()
/drivers/bluetooth/
Dhci_bcsp.c353 u8 seqno; in bcsp_pkt_cull() local
358 seqno = bcsp->msgq_txseq; in bcsp_pkt_cull()
361 if (bcsp->rxack == seqno) in bcsp_pkt_cull()
364 seqno = (seqno - 1) & 0x07; in bcsp_pkt_cull()
367 if (bcsp->rxack != seqno) in bcsp_pkt_cull()
372 (seqno - 1) & 0x07); in bcsp_pkt_cull()
/drivers/net/wireless/rt2x00/
Drt2x00queue.c222 u16 seqno; in rt2x00queue_create_tx_descriptor_seq() local
255 seqno = atomic_add_return(0x10, &intf->seqno); in rt2x00queue_create_tx_descriptor_seq()
257 seqno = atomic_read(&intf->seqno); in rt2x00queue_create_tx_descriptor_seq()
260 hdr->seq_ctrl |= cpu_to_le16(seqno); in rt2x00queue_create_tx_descriptor_seq()
/drivers/net/wireless/ath/ath6kl/
Dhtc_pipe.c167 packet->info.tx.seqno = ep->seqno; in get_htc_packet_credit_based()
168 ep->seqno++; in get_htc_packet_credit_based()
194 packet->info.tx.seqno = ep->seqno; in get_htc_packet()
197 ep->seqno++; in get_htc_packet()
248 htc_hdr->ctrl[1] = (u8) packet->info.tx.seqno; in htc_issue_packets()
Dhtc.h270 int seqno; member
520 u8 seqno; member

12