Home
last modified time | relevance | path

Searched refs:tq (Results 1 – 18 of 18) sorted by relevance

/drivers/net/vmxnet3/
Dvmxnet3_drv.c103 vmxnet3_tq_stopped(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) in vmxnet3_tq_stopped() argument
105 return tq->stopped; in vmxnet3_tq_stopped()
110 vmxnet3_tq_start(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) in vmxnet3_tq_start() argument
112 tq->stopped = false; in vmxnet3_tq_start()
113 netif_start_subqueue(adapter->netdev, tq - adapter->tx_queue); in vmxnet3_tq_start()
118 vmxnet3_tq_wake(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) in vmxnet3_tq_wake() argument
120 tq->stopped = false; in vmxnet3_tq_wake()
121 netif_wake_subqueue(adapter->netdev, (tq - adapter->tx_queue)); in vmxnet3_tq_wake()
126 vmxnet3_tq_stop(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) in vmxnet3_tq_stop() argument
128 tq->stopped = true; in vmxnet3_tq_stop()
[all …]
Dvmxnet3_ethtool.c478 struct vmxnet3_tx_queue *tq = &adapter->tx_queue[i]; in vmxnet3_get_regs() local
483 buf[j++] = VMXNET3_GET_ADDR_LO(tq->tx_ring.basePA); in vmxnet3_get_regs()
484 buf[j++] = VMXNET3_GET_ADDR_HI(tq->tx_ring.basePA); in vmxnet3_get_regs()
485 buf[j++] = tq->tx_ring.size; in vmxnet3_get_regs()
486 buf[j++] = tq->tx_ring.next2fill; in vmxnet3_get_regs()
487 buf[j++] = tq->tx_ring.next2comp; in vmxnet3_get_regs()
488 buf[j++] = tq->tx_ring.gen; in vmxnet3_get_regs()
490 buf[j++] = VMXNET3_GET_ADDR_LO(tq->data_ring.basePA); in vmxnet3_get_regs()
491 buf[j++] = VMXNET3_GET_ADDR_HI(tq->data_ring.basePA); in vmxnet3_get_regs()
492 buf[j++] = tq->data_ring.size; in vmxnet3_get_regs()
[all …]
Dvmxnet3_int.h413 #define VMXNET3_WAKE_QUEUE_THRESHOLD(tq) (5) argument
/drivers/net/wireless/ath/ath5k/
Dqcu.c286 struct ath5k_txq_info *tq = &ah->ah_txq[queue]; in ath5k_hw_set_tx_retry_limits() local
292 (tq->tqi_cw_min << AR5K_NODCU_RETRY_LMT_CW_MIN_S) in ath5k_hw_set_tx_retry_limits()
326 struct ath5k_txq_info *tq = &ah->ah_txq[queue]; in ath5k_hw_reset_tx_queue() local
333 (tq->tqi_type == AR5K_TX_QUEUE_INACTIVE)) in ath5k_hw_reset_tx_queue()
341 AR5K_REG_SM(tq->tqi_cw_min, AR5K_DCU_LCL_IFS_CW_MIN) | in ath5k_hw_reset_tx_queue()
342 AR5K_REG_SM(tq->tqi_cw_max, AR5K_DCU_LCL_IFS_CW_MAX) | in ath5k_hw_reset_tx_queue()
343 AR5K_REG_SM(tq->tqi_aifs, AR5K_DCU_LCL_IFS_AIFS), in ath5k_hw_reset_tx_queue()
366 if (tq->tqi_cbr_period) { in ath5k_hw_reset_tx_queue()
367 ath5k_hw_reg_write(ah, AR5K_REG_SM(tq->tqi_cbr_period, in ath5k_hw_reset_tx_queue()
369 AR5K_REG_SM(tq->tqi_cbr_overflow_limit, in ath5k_hw_reset_tx_queue()
[all …]
/drivers/net/ethernet/chelsio/cxgb4vf/
Dsge.c233 static inline unsigned int txq_avail(const struct sge_txq *tq) in txq_avail() argument
235 return tq->size - 1 - tq->in_use; in txq_avail()
307 const struct ulptx_sgl *sgl, const struct sge_txq *tq) in unmap_sgl() argument
326 if (likely((u8 *)(p + 1) <= (u8 *)tq->stat)) { in unmap_sgl()
333 } else if ((u8 *)p == (u8 *)tq->stat) { in unmap_sgl()
334 p = (const struct ulptx_sge_pair *)tq->desc; in unmap_sgl()
336 } else if ((u8 *)p + 8 == (u8 *)tq->stat) { in unmap_sgl()
337 const __be64 *addr = (const __be64 *)tq->desc; in unmap_sgl()
345 const __be64 *addr = (const __be64 *)tq->desc; in unmap_sgl()
357 if ((u8 *)p == (u8 *)tq->stat) in unmap_sgl()
[all …]
Dcxgb4vf_main.c535 struct sge_txq *tq; in fwevtq_handler() local
552 tq = s->egr_map[eq_idx]; in fwevtq_handler()
553 if (unlikely(tq == NULL)) { in fwevtq_handler()
558 txq = container_of(tq, struct sge_eth_txq, q); in fwevtq_handler()
559 if (unlikely(tq->abs_id != qid)) { in fwevtq_handler()
562 qid, tq->abs_id); in fwevtq_handler()
/drivers/media/v4l2-core/
Dv4l2-jpeg.c211 int c, h_v, tq; in jpeg_parse_frame_header() local
232 tq = jpeg_get_byte(stream); in jpeg_parse_frame_header()
233 if (tq < 0) in jpeg_parse_frame_header()
234 return tq; in jpeg_parse_frame_header()
241 component->quantization_table_selector = tq; in jpeg_parse_frame_header()
317 u8 pq, tq, *qk; in jpeg_parse_quantization_tables() local
335 tq = pq_tq & 0xf; in jpeg_parse_quantization_tables()
336 if (tq > 3) in jpeg_parse_quantization_tables()
346 tables[tq].start = qk; in jpeg_parse_quantization_tables()
347 tables[tq].length = pq ? 128 : 64; in jpeg_parse_quantization_tables()
/drivers/net/
Difb.c51 struct sk_buff_head tq; member
69 skb = skb_peek(&txp->tq); in ifb_ri_tasklet()
73 skb_queue_splice_tail_init(&txp->rq, &txp->tq); in ifb_ri_tasklet()
77 while ((skb = __skb_dequeue(&txp->tq)) != NULL) { in ifb_ri_tasklet()
94 if (skb_queue_len(&txp->tq) != 0) in ifb_ri_tasklet()
172 __skb_queue_head_init(&txp->tq); in ifb_dev_init()
206 __skb_queue_purge(&txp->tq); in ifb_dev_free()
/drivers/input/serio/
Dhp_sdc.c190 curr = hp_sdc.tq[hp_sdc.rcurr]; in hp_sdc_take()
316 curr = hp_sdc.tq[hp_sdc.rcurr]; in hp_sdc_tasklet()
378 if (hp_sdc.tq[curridx] != NULL) in hp_sdc_put()
392 if (hp_sdc.tq[curridx] != NULL) in hp_sdc_put()
412 curr = hp_sdc.tq[curridx]; in hp_sdc_put()
416 hp_sdc.tq[curridx] = NULL; in hp_sdc_put()
430 hp_sdc.tq[curridx] = NULL; in hp_sdc_put()
573 hp_sdc.tq[curridx] = NULL; in hp_sdc_put()
609 if (hp_sdc.tq[i] == this) in __hp_sdc_enqueue_transaction()
617 if (hp_sdc.tq[i] == NULL) { in __hp_sdc_enqueue_transaction()
[all …]
/drivers/input/keyboard/
Dsunkbd.c63 struct work_struct tq; member
103 schedule_work(&sunkbd->tq); in sunkbd_interrupt()
230 struct sunkbd *sunkbd = container_of(work, struct sunkbd, tq); in sunkbd_reinit()
253 cancel_work_sync(&sunkbd->tq); in sunkbd_enable()
277 INIT_WORK(&sunkbd->tq, sunkbd_reinit); in sunkbd_connect()
Dlkkbd.c270 struct work_struct tq; member
457 schedule_work(&lk->tq); in lkkbd_interrupt()
568 struct lkkbd *lk = container_of(work, struct lkkbd, tq); in lkkbd_reinit()
623 INIT_WORK(&lk->tq, lkkbd_reinit); in lkkbd_connect()
/drivers/gpu/drm/i915/gem/selftests/
Di915_gem_context.c656 struct i915_request *tq[5] = {}; in igt_ctx_exec() local
714 err = throttle(ce, tq, ARRAY_SIZE(tq)); in igt_ctx_exec()
749 throttle_release(tq, ARRAY_SIZE(tq)); in igt_ctx_exec()
766 struct i915_request *tq[5] = {}; in igt_shared_ctx_exec() local
852 err = throttle(ce, tq, ARRAY_SIZE(tq)); in igt_shared_ctx_exec()
888 throttle_release(tq, ARRAY_SIZE(tq)); in igt_shared_ctx_exec()
1355 struct i915_request *tq[5] = {}; in igt_ctx_readonly() local
1430 err = throttle(ce, tq, ARRAY_SIZE(tq)); in igt_ctx_readonly()
1466 throttle_release(tq, ARRAY_SIZE(tq)); in igt_ctx_readonly()
/drivers/net/can/dev/
Ddev.c204 bt->tq = (u32)v64; in can_calc_bittiming()
258 brp64 = (u64)priv->clock.freq * (u64)bt->tq; in can_fixup_bittiming()
309 if (!bt->tq && bt->bitrate && btc) in can_get_bittiming()
311 else if (bt->tq && !bt->bitrate && btc) in can_get_bittiming()
313 else if (!bt->tq && bt->bitrate && bitrate_const) in can_get_bittiming()
/drivers/net/can/usb/
Ducan.c140 __le32 tq; /* Time quanta (TQ) in nanoseconds */ member
1249 cmd_set_bittiming->tq = cpu_to_le32(up->can.bittiming.tq); in ucan_set_bittiming()
/drivers/net/can/
DKconfig71 arguments "tq", "prop_seg", "phase_seg1", "phase_seg2" and "sjw".
/drivers/perf/
Dxgene_pmu.c300 XGENE_PMU_EVENT_ATTR(tq-full, 0x08),
403 XGENE_PMU_EVENT_ATTR(tq-bank-conflict-issue-stall, 0x0b),
404 XGENE_PMU_EVENT_ATTR(tq-full, 0x0c),
/drivers/tty/ipwireless/
Dhardware.c1735 struct ipw_tx_packet *tp, *tq; in ipwireless_hardware_free() local
1745 list_for_each_entry_safe(tp, tq, &hw->tx_queue[i], queue) { in ipwireless_hardware_free()
/drivers/net/ethernet/chelsio/cxgb4/
Dsge.c2059 struct sge_txq *tq = &txq->q; in cxgb4_vf_eth_xmit() local
2067 if (unlikely((void *)sgl == (void *)tq->stat)) { in cxgb4_vf_eth_xmit()
2068 sgl = (void *)tq->desc; in cxgb4_vf_eth_xmit()
2069 end = (void *)((void *)tq->desc + in cxgb4_vf_eth_xmit()
2070 ((void *)end - (void *)tq->stat)); in cxgb4_vf_eth_xmit()
2073 cxgb4_write_sgl(skb, tq, sgl, end, 0, sgl_sdesc->addr); in cxgb4_vf_eth_xmit()