Lines Matching refs:txq
224 unsigned int txq) in mvpp2_txdesc_txq_set() argument
227 tx_desc->pp21.phys_txq = txq; in mvpp2_txdesc_txq_set()
229 tx_desc->pp22.phys_txq = txq; in mvpp2_txdesc_txq_set()
344 static inline int mvpp2_txq_phys(int port, int txq) in mvpp2_txq_phys() argument
346 return (MVPP2_MAX_TCONT + port) * MVPP2_MAX_TXQ + txq; in mvpp2_txq_phys()
2025 struct mvpp2_tx_queue *txq = port->txqs[queue]; in mvpp2_egress_enable() local
2027 if (txq->descs) in mvpp2_egress_enable()
2131 mvpp2_txq_next_desc_get(struct mvpp2_tx_queue *txq) in mvpp2_txq_next_desc_get() argument
2133 int tx_desc = txq->next_desc_to_proc; in mvpp2_txq_next_desc_get()
2135 txq->next_desc_to_proc = MVPP2_QUEUE_NEXT_DESC(txq, tx_desc); in mvpp2_txq_next_desc_get()
2136 return txq->descs + tx_desc; in mvpp2_txq_next_desc_get()
2183 struct mvpp2_tx_queue *txq, int num) in mvpp2_txq_alloc_reserved_desc() argument
2189 val = (txq->id << MVPP2_TXQ_RSVD_REQ_Q_OFFSET) | num; in mvpp2_txq_alloc_reserved_desc()
2201 struct mvpp2_tx_queue *txq, in mvpp2_txq_reserved_desc_num_proc() argument
2220 txq_pcpu_aux = per_cpu_ptr(txq->pcpu, thread); in mvpp2_txq_reserved_desc_num_proc()
2229 (txq->size - (MVPP2_MAX_THREADS * MVPP2_CPU_DESC_CHUNK))) in mvpp2_txq_reserved_desc_num_proc()
2232 txq_pcpu->reserved_num += mvpp2_txq_alloc_reserved_desc(port, txq, req); in mvpp2_txq_reserved_desc_num_proc()
2243 static void mvpp2_txq_desc_put(struct mvpp2_tx_queue *txq) in mvpp2_txq_desc_put() argument
2245 if (txq->next_desc_to_proc == 0) in mvpp2_txq_desc_put()
2246 txq->next_desc_to_proc = txq->last_desc - 1; in mvpp2_txq_desc_put()
2248 txq->next_desc_to_proc--; in mvpp2_txq_desc_put()
2293 struct mvpp2_tx_queue *txq) in mvpp2_txq_sent_desc_proc() argument
2300 MVPP2_TXQ_SENT_REG(txq->id)); in mvpp2_txq_sent_desc_proc()
2331 int txq, tx_port_num; in mvpp2_txp_max_tx_size_set() local
2360 for (txq = 0; txq < port->ntxqs; txq++) { in mvpp2_txp_max_tx_size_set()
2362 MVPP2_TXQ_SCHED_TOKEN_SIZE_REG(txq)); in mvpp2_txp_max_tx_size_set()
2370 MVPP2_TXQ_SCHED_TOKEN_SIZE_REG(txq), in mvpp2_txp_max_tx_size_set()
2396 struct mvpp2_tx_queue *txq) in mvpp2_tx_pkts_coal_set() argument
2401 if (txq->done_pkts_coal > MVPP2_TXQ_THRESH_MASK) in mvpp2_tx_pkts_coal_set()
2402 txq->done_pkts_coal = MVPP2_TXQ_THRESH_MASK; in mvpp2_tx_pkts_coal_set()
2404 val = (txq->done_pkts_coal << MVPP2_TXQ_THRESH_OFFSET); in mvpp2_tx_pkts_coal_set()
2407 mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_NUM_REG, txq->id); in mvpp2_tx_pkts_coal_set()
2466 struct mvpp2_tx_queue *txq, in mvpp2_txq_bufs_free() argument
2506 static void mvpp2_txq_done(struct mvpp2_port *port, struct mvpp2_tx_queue *txq, in mvpp2_txq_done() argument
2509 struct netdev_queue *nq = netdev_get_tx_queue(port->dev, txq->log_id); in mvpp2_txq_done()
2515 tx_done = mvpp2_txq_sent_desc_proc(port, txq); in mvpp2_txq_done()
2518 mvpp2_txq_bufs_free(port, txq, txq_pcpu, tx_done); in mvpp2_txq_done()
2530 struct mvpp2_tx_queue *txq; in mvpp2_tx_done() local
2535 txq = mvpp2_get_tx_queue(port, cause); in mvpp2_tx_done()
2536 if (!txq) in mvpp2_tx_done()
2539 txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in mvpp2_tx_done()
2542 mvpp2_txq_done(port, txq, txq_pcpu); in mvpp2_tx_done()
2546 cause &= ~(1 << txq->log_id); in mvpp2_tx_done()
2736 struct mvpp2_tx_queue *txq) in mvpp2_txq_init() argument
2743 txq->size = port->tx_ring_size; in mvpp2_txq_init()
2746 txq->descs = dma_alloc_coherent(port->dev->dev.parent, in mvpp2_txq_init()
2747 txq->size * MVPP2_DESC_ALIGNED_SIZE, in mvpp2_txq_init()
2748 &txq->descs_dma, GFP_KERNEL); in mvpp2_txq_init()
2749 if (!txq->descs) in mvpp2_txq_init()
2752 txq->last_desc = txq->size - 1; in mvpp2_txq_init()
2756 mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_NUM_REG, txq->id); in mvpp2_txq_init()
2758 txq->descs_dma); in mvpp2_txq_init()
2760 txq->size & MVPP2_TXQ_DESC_SIZE_MASK); in mvpp2_txq_init()
2763 txq->id << MVPP2_TXQ_RSVD_CLR_OFFSET); in mvpp2_txq_init()
2775 (txq->log_id * desc_per_txq); in mvpp2_txq_init()
2786 val = mvpp2_read(port->priv, MVPP2_TXQ_SCHED_REFILL_REG(txq->log_id)); in mvpp2_txq_init()
2790 mvpp2_write(port->priv, MVPP2_TXQ_SCHED_REFILL_REG(txq->log_id), val); in mvpp2_txq_init()
2793 mvpp2_write(port->priv, MVPP2_TXQ_SCHED_TOKEN_SIZE_REG(txq->log_id), in mvpp2_txq_init()
2797 txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in mvpp2_txq_init()
2798 txq_pcpu->size = txq->size; in mvpp2_txq_init()
2811 txq_pcpu->stop_threshold = txq->size - MVPP2_MAX_SKB_DESCS; in mvpp2_txq_init()
2828 struct mvpp2_tx_queue *txq) in mvpp2_txq_deinit() argument
2834 txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in mvpp2_txq_deinit()
2846 if (txq->descs) in mvpp2_txq_deinit()
2848 txq->size * MVPP2_DESC_ALIGNED_SIZE, in mvpp2_txq_deinit()
2849 txq->descs, txq->descs_dma); in mvpp2_txq_deinit()
2851 txq->descs = NULL; in mvpp2_txq_deinit()
2852 txq->last_desc = 0; in mvpp2_txq_deinit()
2853 txq->next_desc_to_proc = 0; in mvpp2_txq_deinit()
2854 txq->descs_dma = 0; in mvpp2_txq_deinit()
2857 mvpp2_write(port->priv, MVPP2_TXQ_SCHED_TOKEN_CNTR_REG(txq->log_id), 0); in mvpp2_txq_deinit()
2861 mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_NUM_REG, txq->id); in mvpp2_txq_deinit()
2868 static void mvpp2_txq_clean(struct mvpp2_port *port, struct mvpp2_tx_queue *txq) in mvpp2_txq_clean() argument
2875 mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_NUM_REG, txq->id); in mvpp2_txq_clean()
2888 port->id, txq->log_id); in mvpp2_txq_clean()
2904 txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in mvpp2_txq_clean()
2907 mvpp2_txq_bufs_free(port, txq, txq_pcpu, txq_pcpu->count); in mvpp2_txq_clean()
2919 struct mvpp2_tx_queue *txq; in mvpp2_cleanup_txqs() local
2930 txq = port->txqs[queue]; in mvpp2_cleanup_txqs()
2931 mvpp2_txq_clean(port, txq); in mvpp2_cleanup_txqs()
2932 mvpp2_txq_deinit(port, txq); in mvpp2_cleanup_txqs()
2970 struct mvpp2_tx_queue *txq; in mvpp2_setup_txqs() local
2974 txq = port->txqs[queue]; in mvpp2_setup_txqs()
2975 err = mvpp2_txq_init(port, txq); in mvpp2_setup_txqs()
2987 txq = port->txqs[queue]; in mvpp2_setup_txqs()
2988 mvpp2_tx_pkts_coal_set(port, txq); in mvpp2_setup_txqs()
3285 struct mvpp2_tx_queue *txq; in mvpp2_xdp_finish_tx() local
3288 txq = port->txqs[txq_id]; in mvpp2_xdp_finish_tx()
3289 txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in mvpp2_xdp_finish_tx()
3305 if (!port->has_tx_irqs && txq_pcpu->count >= txq->done_pkts_coal) in mvpp2_xdp_finish_tx()
3306 mvpp2_txq_done(port, txq, txq_pcpu); in mvpp2_xdp_finish_tx()
3320 struct mvpp2_tx_queue *txq; in mvpp2_xdp_submit_frame() local
3324 txq = port->txqs[txq_id]; in mvpp2_xdp_submit_frame()
3325 txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in mvpp2_xdp_submit_frame()
3330 mvpp2_txq_reserved_desc_num_proc(port, txq, txq_pcpu, 1)) { in mvpp2_xdp_submit_frame()
3337 mvpp2_txdesc_txq_set(port, tx_desc, txq->id); in mvpp2_xdp_submit_frame()
3346 mvpp2_txq_desc_put(txq); in mvpp2_xdp_submit_frame()
3720 tx_desc_unmap_put(struct mvpp2_port *port, struct mvpp2_tx_queue *txq, in tx_desc_unmap_put() argument
3724 struct mvpp2_txq_pcpu *txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in tx_desc_unmap_put()
3733 mvpp2_txq_desc_put(txq); in tx_desc_unmap_put()
3827 struct mvpp2_tx_queue *txq) in mvpp2_tx_frag_process() argument
3830 struct mvpp2_txq_pcpu *txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in mvpp2_tx_frag_process()
3841 mvpp2_txdesc_txq_set(port, tx_desc, txq->id); in mvpp2_tx_frag_process()
3848 mvpp2_txq_desc_put(txq); in mvpp2_tx_frag_process()
3872 tx_desc = txq->descs + i; in mvpp2_tx_frag_process()
3873 tx_desc_unmap_put(port, txq, tx_desc); in mvpp2_tx_frag_process()
3881 struct mvpp2_tx_queue *txq, in mvpp2_tso_put_hdr() argument
3891 mvpp2_txdesc_txq_set(port, tx_desc, txq->id); in mvpp2_tso_put_hdr()
3906 struct mvpp2_tx_queue *txq, in mvpp2_tso_put_data() argument
3916 mvpp2_txdesc_txq_set(port, tx_desc, txq->id); in mvpp2_tso_put_data()
3922 mvpp2_txq_desc_put(txq); in mvpp2_tso_put_data()
3943 struct mvpp2_tx_queue *txq, in mvpp2_tx_tso() argument
3953 mvpp2_txq_reserved_desc_num_proc(port, txq, txq_pcpu, in mvpp2_tx_tso()
3969 mvpp2_tso_put_hdr(skb, dev, txq, aggr_txq, txq_pcpu, hdr_sz); in mvpp2_tx_tso()
3976 if (mvpp2_tso_put_data(skb, dev, &tso, txq, aggr_txq, in mvpp2_tx_tso()
3987 struct mvpp2_tx_desc *tx_desc = txq->descs + i; in mvpp2_tx_tso()
3988 tx_desc_unmap_put(port, txq, tx_desc); in mvpp2_tx_tso()
3997 struct mvpp2_tx_queue *txq, *aggr_txq; in mvpp2_tx() local
4010 txq = port->txqs[txq_id]; in mvpp2_tx()
4011 txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in mvpp2_tx()
4018 frags = mvpp2_tx_tso(skb, dev, txq, aggr_txq, txq_pcpu); in mvpp2_tx()
4025 mvpp2_txq_reserved_desc_num_proc(port, txq, txq_pcpu, frags)) { in mvpp2_tx()
4035 mvpp2_txdesc_txq_set(port, tx_desc, txq->id); in mvpp2_tx()
4041 mvpp2_txq_desc_put(txq); in mvpp2_tx()
4062 if (mvpp2_tx_frag_process(port, skb, aggr_txq, txq)) { in mvpp2_tx()
4063 tx_desc_unmap_put(port, txq, tx_desc); in mvpp2_tx()
4094 if (!port->has_tx_irqs && txq_pcpu->count >= txq->done_pkts_coal) in mvpp2_tx()
4095 mvpp2_txq_done(port, txq, txq_pcpu); in mvpp2_tx()
5055 struct mvpp2_tx_queue *txq = port->txqs[queue]; in mvpp2_ethtool_set_coalesce() local
5057 txq->done_pkts_coal = c->tx_max_coalesced_frames; in mvpp2_ethtool_set_coalesce()
5060 mvpp2_tx_pkts_coal_set(port, txq); in mvpp2_ethtool_set_coalesce()
5589 struct mvpp2_tx_queue *txq; in mvpp2_port_init() local
5591 txq = devm_kzalloc(dev, sizeof(*txq), GFP_KERNEL); in mvpp2_port_init()
5592 if (!txq) { in mvpp2_port_init()
5597 txq->pcpu = alloc_percpu(struct mvpp2_txq_pcpu); in mvpp2_port_init()
5598 if (!txq->pcpu) { in mvpp2_port_init()
5603 txq->id = queue_phy_id; in mvpp2_port_init()
5604 txq->log_id = queue; in mvpp2_port_init()
5605 txq->done_pkts_coal = MVPP2_TXDONE_COAL_PKTS_THRESH; in mvpp2_port_init()
5607 txq_pcpu = per_cpu_ptr(txq->pcpu, thread); in mvpp2_port_init()
5611 port->txqs[queue] = txq; in mvpp2_port_init()