• Home
  • Raw
  • Download

Lines Matching refs:reg_idx

48 	u16 reg_idx;  in ixgbe_cache_ring_dcb_sriov()  local
60 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
61 for (i = 0; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_dcb_sriov()
63 if ((reg_idx & ~vmdq->mask) >= tcs) in ixgbe_cache_ring_dcb_sriov()
64 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
65 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
68 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
69 for (i = 0; i < adapter->num_tx_queues; i++, reg_idx++) { in ixgbe_cache_ring_dcb_sriov()
71 if ((reg_idx & ~vmdq->mask) >= tcs) in ixgbe_cache_ring_dcb_sriov()
72 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
73 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
90 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
92 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
93 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
94 reg_idx++; in ixgbe_cache_ring_dcb_sriov()
97 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
99 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
100 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
101 reg_idx++; in ixgbe_cache_ring_dcb_sriov()
181 adapter->tx_ring[offset + i]->reg_idx = tx_idx; in ixgbe_cache_ring_dcb()
182 adapter->rx_ring[offset + i]->reg_idx = rx_idx; in ixgbe_cache_ring_dcb()
208 u16 reg_idx; in ixgbe_cache_ring_sriov() local
215 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
216 for (i = 0; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
223 if ((reg_idx & ~vmdq->mask) >= rss->indices) in ixgbe_cache_ring_sriov()
224 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()
225 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
230 for (; i < adapter->num_rx_queues; i++, reg_idx++) in ixgbe_cache_ring_sriov()
231 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
234 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
235 for (i = 0; i < adapter->num_tx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
242 if ((reg_idx & rss->mask) >= rss->indices) in ixgbe_cache_ring_sriov()
243 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()
244 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
249 for (; i < adapter->num_tx_queues; i++, reg_idx++) in ixgbe_cache_ring_sriov()
250 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
269 adapter->rx_ring[i]->reg_idx = i; in ixgbe_cache_ring_rss()
271 adapter->tx_ring[i]->reg_idx = i; in ixgbe_cache_ring_rss()
290 adapter->rx_ring[0]->reg_idx = 0; in ixgbe_cache_ring_register()
291 adapter->tx_ring[0]->reg_idx = 0; in ixgbe_cache_ring_register()