• Home
  • Raw
  • Download

Lines Matching refs:reg_idx

24 	u16 reg_idx, pool;  in ixgbe_cache_ring_dcb_sriov()  local
36 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
37 for (i = 0, pool = 0; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_dcb_sriov()
39 if ((reg_idx & ~vmdq->mask) >= tcs) { in ixgbe_cache_ring_dcb_sriov()
41 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
43 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
47 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
48 for (i = 0; i < adapter->num_tx_queues; i++, reg_idx++) { in ixgbe_cache_ring_dcb_sriov()
50 if ((reg_idx & ~vmdq->mask) >= tcs) in ixgbe_cache_ring_dcb_sriov()
51 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
52 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
69 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
71 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
72 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
74 reg_idx++; in ixgbe_cache_ring_dcb_sriov()
77 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
79 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
80 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
81 reg_idx++; in ixgbe_cache_ring_dcb_sriov()
163 adapter->tx_ring[offset + i]->reg_idx = tx_idx; in ixgbe_cache_ring_dcb()
164 adapter->rx_ring[offset + i]->reg_idx = rx_idx; in ixgbe_cache_ring_dcb()
190 u16 reg_idx, pool; in ixgbe_cache_ring_sriov() local
199 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
200 for (i = 0; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
207 if ((reg_idx & ~vmdq->mask) >= rss->indices) { in ixgbe_cache_ring_sriov()
209 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()
211 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
217 for (; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
218 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
223 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
224 for (i = 0; i < adapter->num_tx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
231 if ((reg_idx & rss->mask) >= rss->indices) in ixgbe_cache_ring_sriov()
232 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()
233 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
238 for (; i < adapter->num_tx_queues; i++, reg_idx++) in ixgbe_cache_ring_sriov()
239 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
255 int i, reg_idx; in ixgbe_cache_ring_rss() local
258 adapter->rx_ring[i]->reg_idx = i; in ixgbe_cache_ring_rss()
261 for (i = 0, reg_idx = 0; i < adapter->num_tx_queues; i++, reg_idx++) in ixgbe_cache_ring_rss()
262 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_rss()
263 for (i = 0; i < adapter->num_xdp_queues; i++, reg_idx++) in ixgbe_cache_ring_rss()
264 adapter->xdp_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_rss()
283 adapter->rx_ring[0]->reg_idx = 0; in ixgbe_cache_ring_register()
284 adapter->tx_ring[0]->reg_idx = 0; in ixgbe_cache_ring_register()