Home
last modified time | relevance | path

Searched refs:q_id (Results 1 – 25 of 45) sorted by relevance

12

/drivers/net/ethernet/huawei/hinic/
Dhinic_hw_eqs.c33 HINIC_CSR_AEQ_CONS_IDX_ADDR((eq)->q_id) : \
34 HINIC_CSR_CEQ_CONS_IDX_ADDR((eq)->q_id))
37 HINIC_CSR_AEQ_PROD_IDX_ADDR((eq)->q_id) : \
38 HINIC_CSR_CEQ_PROD_IDX_ADDR((eq)->q_id))
41 HINIC_CSR_AEQ_HI_PHYS_ADDR_REG((eq)->q_id, pg_num) : \
42 HINIC_CSR_CEQ_HI_PHYS_ADDR_REG((eq)->q_id, pg_num))
45 HINIC_CSR_AEQ_LO_PHYS_ADDR_REG((eq)->q_id, pg_num) : \
46 HINIC_CSR_CEQ_LO_PHYS_ADDR_REG((eq)->q_id, pg_num))
80 container_of((eq) - (eq)->q_id, struct hinic_aeqs, aeq[0])
83 container_of((eq) - (eq)->q_id, struct hinic_ceqs, ceq[0])
[all …]
Dhinic_hw_csr.h88 #define HINIC_CSR_AEQ_HI_PHYS_ADDR_REG(q_id, pg_num) \ argument
89 (HINIC_CSR_AEQ_MTT_OFF(q_id) + \
92 #define HINIC_CSR_CEQ_HI_PHYS_ADDR_REG(q_id, pg_num) \ argument
93 (HINIC_CSR_CEQ_MTT_OFF(q_id) + \
96 #define HINIC_CSR_AEQ_LO_PHYS_ADDR_REG(q_id, pg_num) \ argument
97 (HINIC_CSR_AEQ_MTT_OFF(q_id) + \
100 #define HINIC_CSR_CEQ_LO_PHYS_ADDR_REG(q_id, pg_num) \ argument
101 (HINIC_CSR_CEQ_MTT_OFF(q_id) + \
Dhinic_hw_io.c30 #define CI_ADDR(base_addr, q_id) ((base_addr) + \ argument
31 (q_id) * CI_Q_ADDR_SIZE)
132 base_qpn + qp->q_id); in write_sq_ctxts()
176 base_qpn + qp->q_id); in write_rq_ctxts()
272 struct hinic_qp *qp, int q_id, in init_qp() argument
281 qp->q_id = q_id; in init_qp()
283 err = hinic_wq_allocate(&func_to_io->wqs, &func_to_io->sq_wq[q_id], in init_qp()
291 err = hinic_wq_allocate(&func_to_io->wqs, &func_to_io->rq_wq[q_id], in init_qp()
306 func_to_io->sq_db[q_id] = db_base; in init_qp()
308 qp->sq.qid = q_id; in init_qp()
[all …]
Dhinic_tx.c493 u16 prod_idx, q_id = skb->queue_mapping; in hinic_lb_xmit_frame() local
501 txq = &nic_dev->txqs[q_id]; in hinic_lb_xmit_frame()
513 netif_stop_subqueue(netdev, qp->q_id); in hinic_lb_xmit_frame()
517 netif_wake_subqueue(nic_dev->netdev, qp->q_id); in hinic_lb_xmit_frame()
536 netdev_txq = netdev_get_tx_queue(netdev, q_id); in hinic_lb_xmit_frame()
554 u16 prod_idx, q_id = skb->queue_mapping; in hinic_xmit_frame() local
562 txq = &nic_dev->txqs[q_id]; in hinic_xmit_frame()
594 netif_stop_subqueue(netdev, qp->q_id); in hinic_xmit_frame()
601 netif_wake_subqueue(nic_dev->netdev, qp->q_id); in hinic_xmit_frame()
625 netdev_txq = netdev_get_tx_queue(netdev, q_id); in hinic_xmit_frame()
[all …]
Dhinic_hw_cmdq.h118 u32 q_id; member
181 enum hinic_set_arm_qtype q_type, u32 q_id);
Dhinic_rx.c421 skb_record_rx_queue(skb, qp->q_id); in rxq_recv()
531 intr_coal = &nic_dev->rx_intr_coalesce[qp->q_id]; in rx_request_irq()
548 cpumask_set_cpu(qp->q_id % num_online_cpus(), &rq->affinity_mask); in rx_request_irq()
593 "%s_rxq%d", netdev->name, qp->q_id); in hinic_init_rxq()
Dhinic_main.c818 u16 num_sqs, q_id; in hinic_tx_timeout() local
824 for (q_id = 0; q_id < num_sqs; q_id++) { in hinic_tx_timeout()
825 if (!netif_xmit_stopped(netdev_get_tx_queue(netdev, q_id))) in hinic_tx_timeout()
828 sq = hinic_hwdev_get_sq(nic_dev->hwdev, q_id); in hinic_tx_timeout()
833 q_id, sw_pi, hw_ci, sw_ci, in hinic_tx_timeout()
834 nic_dev->txqs[q_id].napi.state); in hinic_tx_timeout()
Dhinic_hw_qp.c42 #define SQ_CTXT_OFFSET(max_sqs, max_rqs, q_id) \ argument
43 (((max_rqs) + (max_sqs)) * CTXT_RSVD + (q_id) * Q_CTXT_SIZE)
45 #define RQ_CTXT_OFFSET(max_sqs, max_rqs, q_id) \ argument
47 (max_sqs + (q_id)) * Q_CTXT_SIZE)
628 HINIC_SQ_DB_INFO_SET(qp->q_id, QID)); in sq_prepare_db()
Dhinic_hw_cmdq.c513 enum hinic_set_arm_qtype q_type, u32 q_id) in hinic_set_arm_bit() argument
522 arm_bit.q_id = q_id; in hinic_set_arm_bit()
526 dev_err(&pdev->dev, "Failed to set arm for qid %d\n", q_id); in hinic_set_arm_bit()
Dhinic_ethtool.c676 static int set_queue_coalesce(struct hinic_dev *nic_dev, u16 q_id, in set_queue_coalesce() argument
686 intr_coal = set_rx_coal ? &nic_dev->rx_intr_coalesce[q_id] : in set_queue_coalesce()
687 &nic_dev->tx_intr_coalesce[q_id]; in set_queue_coalesce()
696 q_id >= nic_dev->num_qps) in set_queue_coalesce()
699 msix_idx = set_rx_coal ? nic_dev->rxqs[q_id].rq->msix_entry : in set_queue_coalesce()
700 nic_dev->txqs[q_id].sq->msix_entry; in set_queue_coalesce()
710 set_rx_coal ? "rx" : "tx", q_id); in set_queue_coalesce()
Dhinic_hw_qp.h125 u16 q_id; member
Dhinic_hw_eqs.h177 int q_id; member
/drivers/net/ethernet/intel/ice/
Dice_base.c660 u16 q_id, q_base; in ice_vsi_map_rings_to_vectors() local
670 for (q_id = q_base; q_id < (q_base + tx_rings_per_v); q_id++) { in ice_vsi_map_rings_to_vectors()
671 struct ice_ring *tx_ring = vsi->tx_rings[q_id]; in ice_vsi_map_rings_to_vectors()
687 for (q_id = q_base; q_id < (q_base + rx_rings_per_v); q_id++) { in ice_vsi_map_rings_to_vectors()
688 struct ice_ring *rx_ring = vsi->rx_rings[q_id]; in ice_vsi_map_rings_to_vectors()
902 &txq_meta->q_id, &txq_meta->q_teid, rst_src, in ice_vsi_stop_tx_ring()
943 txq_meta->q_id = ring->reg_idx; in ice_fill_txq_meta()
Dice_idc.c132 u16 q_id; in ice_del_rdma_qset() local
143 q_id = qset->qs_handle; in ice_del_rdma_qset()
148 return ice_dis_vsi_rdma_qset(vsi->port_info, 1, &teid, &q_id); in ice_del_rdma_qset()
Dice_switch.h106 u16 q_id:11; member
Dice_common.c3799 u16 item_size = struct_size(item, q_id, item->num_qs); in ice_aq_dis_lan_txq()
3821 le16_to_cpu(qg_list[0].q_id[0]), in ice_aq_dis_lan_txq()
4273 buf_size = struct_size(qg_list, q_id, 1); in ice_dis_vsi_txq()
4299 qg_list->q_id[0] = cpu_to_le16(q_ids[i]); in ice_dis_vsi_txq()
4480 u16 *q_id) in ice_dis_vsi_rdma_qset() argument
4493 qg_size = struct_size(qg_list, q_id, 1); in ice_dis_vsi_rdma_qset()
4509 qg_list->q_id[0] = in ice_dis_vsi_rdma_qset()
4510 cpu_to_le16(q_id[i] | in ice_dis_vsi_rdma_qset()
/drivers/net/wireless/intel/iwlwifi/pcie/
Dtx.c87 txq_id != trans->txqs.cmd.q_id && in iwl_pcie_txq_inc_wr_ptr()
217 if (txq_id != trans->txqs.cmd.q_id) { in iwl_pcie_txq_unmap()
229 txq_id == trans->txqs.cmd.q_id) in iwl_pcie_txq_unmap()
265 if (txq_id == trans->txqs.cmd.q_id) in iwl_pcie_txq_free()
328 iwl_trans_ac_txq_enable(trans, trans->txqs.cmd.q_id, in iwl_pcie_tx_start()
536 bool cmd_queue = (txq_id == trans->txqs.cmd.q_id); in iwl_pcie_tx_alloc()
590 bool cmd_queue = (txq_id == trans->txqs.cmd.q_id); in iwl_pcie_tx_init()
752 if (txq_id == trans->txqs.cmd.q_id && in iwl_trans_pcie_txq_enable()
760 if (txq_id != trans->txqs.cmd.q_id) in iwl_trans_pcie_txq_enable()
830 if (txq_id == trans->txqs.cmd.q_id && in iwl_trans_pcie_txq_enable()
[all …]
Dtx-gen2.c31 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_pcie_gen2_enqueue_hcmd()
143 cpu_to_le16(QUEUE_TO_SEQ(trans->txqs.cmd.q_id) | in iwl_pcie_gen2_enqueue_hcmd()
191 cmd_size, txq->write_ptr, idx, trans->txqs.cmd.q_id); in iwl_pcie_gen2_enqueue_hcmd()
Dctxt-info.c221 cpu_to_le64(trans->txqs.txq[trans->txqs.cmd.q_id]->dma_addr); in iwl_pcie_ctxt_info_init()
/drivers/crypto/hisilicon/sec/
Dsec_drv.c688 int q_id; in sec_isr_handle() local
693 q_id = outorder_msg->data & SEC_OUT_BD_INFO_Q_ID_M; in sec_isr_handle()
694 msg = msg_ring->vaddr + q_id; in sec_isr_handle()
701 set_bit(q_id, queue->unprocessed); in sec_isr_handle()
702 if (q_id == queue->expected) in sec_isr_handle()
719 q_id = outorder_msg->data & SEC_OUT_BD_INFO_Q_ID_M; in sec_isr_handle()
720 msg = msg_ring->vaddr + q_id; in sec_isr_handle()
/drivers/net/ethernet/hisilicon/hns3/hns3pf/
Dhclge_tm.h254 int hclge_tm_get_q_to_qs_map(struct hclge_dev *hdev, u16 q_id, u16 *qset_id);
255 int hclge_tm_get_q_to_tc(struct hclge_dev *hdev, u16 q_id, u8 *tc_id);
Dhclge_tm.c303 u16 q_id, u16 qs_id) in hclge_tm_q_to_qs_map_cfg() argument
314 map->nq_id = cpu_to_le16(q_id); in hclge_tm_q_to_qs_map_cfg()
1814 int hclge_tm_get_q_to_qs_map(struct hclge_dev *hdev, u16 q_id, u16 *qset_id) in hclge_tm_get_q_to_qs_map() argument
1824 map->nq_id = cpu_to_le16(q_id); in hclge_tm_get_q_to_qs_map()
1852 int hclge_tm_get_q_to_tc(struct hclge_dev *hdev, u16 q_id, u8 *tc_id) in hclge_tm_get_q_to_tc() argument
1862 tc->queue_id = cpu_to_le16(q_id); in hclge_tm_get_q_to_tc()
/drivers/net/hyperv/
Dhyperv_net.h728 u16 q_id; member
739 u16 q_id; member
745 u16 q_id; member
790 u16 q_id; member
/drivers/net/wireless/intel/iwlwifi/queue/
Dtx.c810 if (txq_id != trans->txqs.cmd.q_id) { in iwl_txq_gen2_unmap()
880 if (txq_id == trans->txqs.cmd.q_id) in iwl_txq_gen2_free()
1286 (txq_id == trans->txqs.cmd.q_id)); in iwl_txq_gen2_init()
1444 if (txq_id != trans->txqs.cmd.q_id) in iwl_txq_gen1_inval_byte_cnt_tbl()
1531 if (WARN_ON(txq_id == trans->txqs.cmd.q_id)) in iwl_txq_reclaim()
1714 struct iwl_txq *txq = trans->txqs.txq[trans->txqs.cmd.q_id]; in iwl_trans_txq_send_hcmd_sync()
/drivers/net/ethernet/hisilicon/hns/
Dhns_dsaf_main.c250 u32 q_id, q_num_per_port; in hns_dsaf_mix_def_qid_cfg() local
256 for (i = 0, q_id = 0; i < DSAF_SERVICE_NW_NUM; i++) { in hns_dsaf_mix_def_qid_cfg()
259 0xff, 0, q_id); in hns_dsaf_mix_def_qid_cfg()
260 q_id += q_num_per_port; in hns_dsaf_mix_def_qid_cfg()
267 u32 q_id, q_num_per_port; in hns_dsaf_inner_qid_cfg() local
276 for (mac_id = 0, q_id = 0; mac_id < DSAF_SERVICE_NW_NUM; mac_id++) { in hns_dsaf_inner_qid_cfg()
281 q_id); in hns_dsaf_inner_qid_cfg()
282 q_id += q_num_per_port; in hns_dsaf_inner_qid_cfg()

12