Lines Matching refs:ring
45 struct b43legacy_dmadesc32 *op32_idx2desc(struct b43legacy_dmaring *ring, in op32_idx2desc() argument
51 *meta = &(ring->meta[slot]); in op32_idx2desc()
52 desc = ring->descbase; in op32_idx2desc()
58 static void op32_fill_descriptor(struct b43legacy_dmaring *ring, in op32_fill_descriptor() argument
63 struct b43legacy_dmadesc32 *descbase = ring->descbase; in op32_fill_descriptor()
70 B43legacy_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op32_fill_descriptor()
75 addr |= ring->dev->dma.translation; in op32_fill_descriptor()
76 ctl = (bufsize - ring->frameoffset) in op32_fill_descriptor()
78 if (slot == ring->nr_slots - 1) in op32_fill_descriptor()
93 static void op32_poke_tx(struct b43legacy_dmaring *ring, int slot) in op32_poke_tx() argument
95 b43legacy_dma_write(ring, B43legacy_DMA32_TXINDEX, in op32_poke_tx()
99 static void op32_tx_suspend(struct b43legacy_dmaring *ring) in op32_tx_suspend() argument
101 b43legacy_dma_write(ring, B43legacy_DMA32_TXCTL, in op32_tx_suspend()
102 b43legacy_dma_read(ring, B43legacy_DMA32_TXCTL) in op32_tx_suspend()
106 static void op32_tx_resume(struct b43legacy_dmaring *ring) in op32_tx_resume() argument
108 b43legacy_dma_write(ring, B43legacy_DMA32_TXCTL, in op32_tx_resume()
109 b43legacy_dma_read(ring, B43legacy_DMA32_TXCTL) in op32_tx_resume()
113 static int op32_get_current_rxslot(struct b43legacy_dmaring *ring) in op32_get_current_rxslot() argument
117 val = b43legacy_dma_read(ring, B43legacy_DMA32_RXSTATUS); in op32_get_current_rxslot()
123 static void op32_set_current_rxslot(struct b43legacy_dmaring *ring, in op32_set_current_rxslot() argument
126 b43legacy_dma_write(ring, B43legacy_DMA32_RXINDEX, in op32_set_current_rxslot()
130 static inline int free_slots(struct b43legacy_dmaring *ring) in free_slots() argument
132 return (ring->nr_slots - ring->used_slots); in free_slots()
135 static inline int next_slot(struct b43legacy_dmaring *ring, int slot) in next_slot() argument
137 B43legacy_WARN_ON(!(slot >= -1 && slot <= ring->nr_slots - 1)); in next_slot()
138 if (slot == ring->nr_slots - 1) in next_slot()
143 static inline int prev_slot(struct b43legacy_dmaring *ring, int slot) in prev_slot() argument
145 B43legacy_WARN_ON(!(slot >= 0 && slot <= ring->nr_slots - 1)); in prev_slot()
147 return ring->nr_slots - 1; in prev_slot()
152 static void update_max_used_slots(struct b43legacy_dmaring *ring, in update_max_used_slots() argument
155 if (current_used_slots <= ring->max_used_slots) in update_max_used_slots()
157 ring->max_used_slots = current_used_slots; in update_max_used_slots()
158 if (b43legacy_debug(ring->dev, B43legacy_DBG_DMAVERBOSE)) in update_max_used_slots()
159 b43legacydbg(ring->dev->wl, in update_max_used_slots()
161 ring->max_used_slots, in update_max_used_slots()
162 ring->tx ? "TX" : "RX", in update_max_used_slots()
163 ring->index); in update_max_used_slots()
167 void update_max_used_slots(struct b43legacy_dmaring *ring, in update_max_used_slots() argument
174 int request_slot(struct b43legacy_dmaring *ring) in request_slot() argument
178 B43legacy_WARN_ON(!ring->tx); in request_slot()
179 B43legacy_WARN_ON(ring->stopped); in request_slot()
180 B43legacy_WARN_ON(free_slots(ring) == 0); in request_slot()
182 slot = next_slot(ring, ring->current_slot); in request_slot()
183 ring->current_slot = slot; in request_slot()
184 ring->used_slots++; in request_slot()
186 update_max_used_slots(ring, ring->used_slots); in request_slot()
196 struct b43legacy_dmaring *ring; in priority_to_txring() local
207 ring = dev->dma.tx_ring3; in priority_to_txring()
210 ring = dev->dma.tx_ring2; in priority_to_txring()
213 ring = dev->dma.tx_ring1; in priority_to_txring()
216 ring = dev->dma.tx_ring0; in priority_to_txring()
219 ring = dev->dma.tx_ring4; in priority_to_txring()
222 ring = dev->dma.tx_ring5; in priority_to_txring()
226 return ring; in priority_to_txring()
230 static inline int txring_to_priority(struct b43legacy_dmaring *ring) in txring_to_priority() argument
238 return idx_to_prio[ring->index]; in txring_to_priority()
260 dma_addr_t map_descbuffer(struct b43legacy_dmaring *ring, in map_descbuffer() argument
268 dmaaddr = dma_map_single(ring->dev->dev->dma_dev, in map_descbuffer()
272 dmaaddr = dma_map_single(ring->dev->dev->dma_dev, in map_descbuffer()
280 void unmap_descbuffer(struct b43legacy_dmaring *ring, in unmap_descbuffer() argument
286 dma_unmap_single(ring->dev->dev->dma_dev, in unmap_descbuffer()
290 dma_unmap_single(ring->dev->dev->dma_dev, in unmap_descbuffer()
296 void sync_descbuffer_for_cpu(struct b43legacy_dmaring *ring, in sync_descbuffer_for_cpu() argument
300 B43legacy_WARN_ON(ring->tx); in sync_descbuffer_for_cpu()
302 dma_sync_single_for_cpu(ring->dev->dev->dma_dev, in sync_descbuffer_for_cpu()
307 void sync_descbuffer_for_device(struct b43legacy_dmaring *ring, in sync_descbuffer_for_device() argument
311 B43legacy_WARN_ON(ring->tx); in sync_descbuffer_for_device()
313 dma_sync_single_for_device(ring->dev->dev->dma_dev, in sync_descbuffer_for_device()
318 void free_descriptor_buffer(struct b43legacy_dmaring *ring, in free_descriptor_buffer() argument
331 static int alloc_ringmemory(struct b43legacy_dmaring *ring) in alloc_ringmemory() argument
334 ring->descbase = dma_zalloc_coherent(ring->dev->dev->dma_dev, in alloc_ringmemory()
336 &(ring->dmabase), GFP_KERNEL); in alloc_ringmemory()
337 if (!ring->descbase) in alloc_ringmemory()
343 static void free_ringmemory(struct b43legacy_dmaring *ring) in free_ringmemory() argument
345 dma_free_coherent(ring->dev->dev->dma_dev, B43legacy_DMA_RINGMEMSIZE, in free_ringmemory()
346 ring->descbase, ring->dmabase); in free_ringmemory()
424 static bool b43legacy_dma_mapping_error(struct b43legacy_dmaring *ring, in b43legacy_dma_mapping_error() argument
429 if (unlikely(dma_mapping_error(ring->dev->dev->dma_dev, addr))) in b43legacy_dma_mapping_error()
432 switch (ring->type) { in b43legacy_dma_mapping_error()
448 unmap_descbuffer(ring, addr, buffersize, dma_to_device); in b43legacy_dma_mapping_error()
453 static int setup_rx_descbuffer(struct b43legacy_dmaring *ring, in setup_rx_descbuffer() argument
463 B43legacy_WARN_ON(ring->tx); in setup_rx_descbuffer()
465 skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags); in setup_rx_descbuffer()
468 dmaaddr = map_descbuffer(ring, skb->data, in setup_rx_descbuffer()
469 ring->rx_buffersize, 0); in setup_rx_descbuffer()
470 if (b43legacy_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) { in setup_rx_descbuffer()
476 skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags); in setup_rx_descbuffer()
479 dmaaddr = map_descbuffer(ring, skb->data, in setup_rx_descbuffer()
480 ring->rx_buffersize, 0); in setup_rx_descbuffer()
483 if (b43legacy_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) { in setup_rx_descbuffer()
490 op32_fill_descriptor(ring, desc, dmaaddr, ring->rx_buffersize, 0, 0, 0); in setup_rx_descbuffer()
503 static int alloc_initial_descbuffers(struct b43legacy_dmaring *ring) in alloc_initial_descbuffers() argument
510 for (i = 0; i < ring->nr_slots; i++) { in alloc_initial_descbuffers()
511 desc = op32_idx2desc(ring, i, &meta); in alloc_initial_descbuffers()
513 err = setup_rx_descbuffer(ring, desc, meta, GFP_KERNEL); in alloc_initial_descbuffers()
515 b43legacyerr(ring->dev->wl, in alloc_initial_descbuffers()
521 ring->used_slots = ring->nr_slots; in alloc_initial_descbuffers()
528 desc = op32_idx2desc(ring, i, &meta); in alloc_initial_descbuffers()
530 unmap_descbuffer(ring, meta->dmaaddr, ring->rx_buffersize, 0); in alloc_initial_descbuffers()
540 static int dmacontroller_setup(struct b43legacy_dmaring *ring) in dmacontroller_setup() argument
545 u32 trans = ring->dev->dma.translation; in dmacontroller_setup()
546 u32 ringbase = (u32)(ring->dmabase); in dmacontroller_setup()
548 if (ring->tx) { in dmacontroller_setup()
554 b43legacy_dma_write(ring, B43legacy_DMA32_TXCTL, value); in dmacontroller_setup()
555 b43legacy_dma_write(ring, B43legacy_DMA32_TXRING, in dmacontroller_setup()
559 err = alloc_initial_descbuffers(ring); in dmacontroller_setup()
565 value = (ring->frameoffset << in dmacontroller_setup()
570 b43legacy_dma_write(ring, B43legacy_DMA32_RXCTL, value); in dmacontroller_setup()
571 b43legacy_dma_write(ring, B43legacy_DMA32_RXRING, in dmacontroller_setup()
574 b43legacy_dma_write(ring, B43legacy_DMA32_RXINDEX, 200); in dmacontroller_setup()
582 static void dmacontroller_cleanup(struct b43legacy_dmaring *ring) in dmacontroller_cleanup() argument
584 if (ring->tx) { in dmacontroller_cleanup()
585 b43legacy_dmacontroller_tx_reset(ring->dev, ring->mmio_base, in dmacontroller_cleanup()
586 ring->type); in dmacontroller_cleanup()
587 b43legacy_dma_write(ring, B43legacy_DMA32_TXRING, 0); in dmacontroller_cleanup()
589 b43legacy_dmacontroller_rx_reset(ring->dev, ring->mmio_base, in dmacontroller_cleanup()
590 ring->type); in dmacontroller_cleanup()
591 b43legacy_dma_write(ring, B43legacy_DMA32_RXRING, 0); in dmacontroller_cleanup()
595 static void free_all_descbuffers(struct b43legacy_dmaring *ring) in free_all_descbuffers() argument
600 if (!ring->used_slots) in free_all_descbuffers()
602 for (i = 0; i < ring->nr_slots; i++) { in free_all_descbuffers()
603 op32_idx2desc(ring, i, &meta); in free_all_descbuffers()
606 B43legacy_WARN_ON(!ring->tx); in free_all_descbuffers()
609 if (ring->tx) in free_all_descbuffers()
610 unmap_descbuffer(ring, meta->dmaaddr, in free_all_descbuffers()
613 unmap_descbuffer(ring, meta->dmaaddr, in free_all_descbuffers()
614 ring->rx_buffersize, 0); in free_all_descbuffers()
615 free_descriptor_buffer(ring, meta, 0); in free_all_descbuffers()
653 struct b43legacy_dmaring *ring; in b43legacy_setup_dmaring() local
658 ring = kzalloc(sizeof(*ring), GFP_KERNEL); in b43legacy_setup_dmaring()
659 if (!ring) in b43legacy_setup_dmaring()
661 ring->type = type; in b43legacy_setup_dmaring()
662 ring->dev = dev; in b43legacy_setup_dmaring()
668 ring->meta = kcalloc(nr_slots, sizeof(struct b43legacy_dmadesc_meta), in b43legacy_setup_dmaring()
670 if (!ring->meta) in b43legacy_setup_dmaring()
673 ring->txhdr_cache = kcalloc(nr_slots, in b43legacy_setup_dmaring()
676 if (!ring->txhdr_cache) in b43legacy_setup_dmaring()
680 dma_test = dma_map_single(dev->dev->dma_dev, ring->txhdr_cache, in b43legacy_setup_dmaring()
684 if (b43legacy_dma_mapping_error(ring, dma_test, in b43legacy_setup_dmaring()
687 kfree(ring->txhdr_cache); in b43legacy_setup_dmaring()
688 ring->txhdr_cache = kcalloc(nr_slots, in b43legacy_setup_dmaring()
691 if (!ring->txhdr_cache) in b43legacy_setup_dmaring()
695 ring->txhdr_cache, in b43legacy_setup_dmaring()
699 if (b43legacy_dma_mapping_error(ring, dma_test, in b43legacy_setup_dmaring()
709 ring->nr_slots = nr_slots; in b43legacy_setup_dmaring()
710 ring->mmio_base = b43legacy_dmacontroller_base(type, controller_index); in b43legacy_setup_dmaring()
711 ring->index = controller_index; in b43legacy_setup_dmaring()
713 ring->tx = true; in b43legacy_setup_dmaring()
714 ring->current_slot = -1; in b43legacy_setup_dmaring()
716 if (ring->index == 0) { in b43legacy_setup_dmaring()
717 ring->rx_buffersize = B43legacy_DMA0_RX_BUFFERSIZE; in b43legacy_setup_dmaring()
718 ring->frameoffset = B43legacy_DMA0_RX_FRAMEOFFSET; in b43legacy_setup_dmaring()
719 } else if (ring->index == 3) { in b43legacy_setup_dmaring()
720 ring->rx_buffersize = B43legacy_DMA3_RX_BUFFERSIZE; in b43legacy_setup_dmaring()
721 ring->frameoffset = B43legacy_DMA3_RX_FRAMEOFFSET; in b43legacy_setup_dmaring()
726 ring->last_injected_overflow = jiffies; in b43legacy_setup_dmaring()
729 err = alloc_ringmemory(ring); in b43legacy_setup_dmaring()
732 err = dmacontroller_setup(ring); in b43legacy_setup_dmaring()
737 return ring; in b43legacy_setup_dmaring()
740 free_ringmemory(ring); in b43legacy_setup_dmaring()
742 kfree(ring->txhdr_cache); in b43legacy_setup_dmaring()
744 kfree(ring->meta); in b43legacy_setup_dmaring()
746 kfree(ring); in b43legacy_setup_dmaring()
747 ring = NULL; in b43legacy_setup_dmaring()
752 static void b43legacy_destroy_dmaring(struct b43legacy_dmaring *ring) in b43legacy_destroy_dmaring() argument
754 if (!ring) in b43legacy_destroy_dmaring()
757 b43legacydbg(ring->dev->wl, "DMA-%u 0x%04X (%s) max used slots:" in b43legacy_destroy_dmaring()
758 " %d/%d\n", (unsigned int)(ring->type), ring->mmio_base, in b43legacy_destroy_dmaring()
759 (ring->tx) ? "TX" : "RX", ring->max_used_slots, in b43legacy_destroy_dmaring()
760 ring->nr_slots); in b43legacy_destroy_dmaring()
764 dmacontroller_cleanup(ring); in b43legacy_destroy_dmaring()
765 free_all_descbuffers(ring); in b43legacy_destroy_dmaring()
766 free_ringmemory(ring); in b43legacy_destroy_dmaring()
768 kfree(ring->txhdr_cache); in b43legacy_destroy_dmaring()
769 kfree(ring->meta); in b43legacy_destroy_dmaring()
770 kfree(ring); in b43legacy_destroy_dmaring()
840 struct b43legacy_dmaring *ring; in b43legacy_dma_init() local
864 ring = b43legacy_setup_dmaring(dev, 0, 1, type); in b43legacy_dma_init()
865 if (!ring) in b43legacy_dma_init()
867 dma->tx_ring0 = ring; in b43legacy_dma_init()
869 ring = b43legacy_setup_dmaring(dev, 1, 1, type); in b43legacy_dma_init()
870 if (!ring) in b43legacy_dma_init()
872 dma->tx_ring1 = ring; in b43legacy_dma_init()
874 ring = b43legacy_setup_dmaring(dev, 2, 1, type); in b43legacy_dma_init()
875 if (!ring) in b43legacy_dma_init()
877 dma->tx_ring2 = ring; in b43legacy_dma_init()
879 ring = b43legacy_setup_dmaring(dev, 3, 1, type); in b43legacy_dma_init()
880 if (!ring) in b43legacy_dma_init()
882 dma->tx_ring3 = ring; in b43legacy_dma_init()
884 ring = b43legacy_setup_dmaring(dev, 4, 1, type); in b43legacy_dma_init()
885 if (!ring) in b43legacy_dma_init()
887 dma->tx_ring4 = ring; in b43legacy_dma_init()
889 ring = b43legacy_setup_dmaring(dev, 5, 1, type); in b43legacy_dma_init()
890 if (!ring) in b43legacy_dma_init()
892 dma->tx_ring5 = ring; in b43legacy_dma_init()
895 ring = b43legacy_setup_dmaring(dev, 0, 0, type); in b43legacy_dma_init()
896 if (!ring) in b43legacy_dma_init()
898 dma->rx_ring0 = ring; in b43legacy_dma_init()
901 ring = b43legacy_setup_dmaring(dev, 3, 0, type); in b43legacy_dma_init()
902 if (!ring) in b43legacy_dma_init()
904 dma->rx_ring3 = ring; in b43legacy_dma_init()
937 static u16 generate_cookie(struct b43legacy_dmaring *ring, in generate_cookie() argument
948 switch (ring->index) { in generate_cookie()
980 struct b43legacy_dmaring *ring = NULL; in parse_cookie() local
984 ring = dma->tx_ring0; in parse_cookie()
987 ring = dma->tx_ring1; in parse_cookie()
990 ring = dma->tx_ring2; in parse_cookie()
993 ring = dma->tx_ring3; in parse_cookie()
996 ring = dma->tx_ring4; in parse_cookie()
999 ring = dma->tx_ring5; in parse_cookie()
1005 B43legacy_WARN_ON(!(ring && *slot >= 0 && *slot < ring->nr_slots)); in parse_cookie()
1007 return ring; in parse_cookie()
1010 static int dma_tx_fragment(struct b43legacy_dmaring *ring, in dma_tx_fragment() argument
1026 old_top_slot = ring->current_slot; in dma_tx_fragment()
1027 old_used_slots = ring->used_slots; in dma_tx_fragment()
1030 slot = request_slot(ring); in dma_tx_fragment()
1031 desc = op32_idx2desc(ring, slot, &meta_hdr); in dma_tx_fragment()
1034 header = &(ring->txhdr_cache[slot * sizeof( in dma_tx_fragment()
1036 err = b43legacy_generate_txhdr(ring->dev, header, in dma_tx_fragment()
1038 generate_cookie(ring, slot)); in dma_tx_fragment()
1040 ring->current_slot = old_top_slot; in dma_tx_fragment()
1041 ring->used_slots = old_used_slots; in dma_tx_fragment()
1045 meta_hdr->dmaaddr = map_descbuffer(ring, (unsigned char *)header, in dma_tx_fragment()
1047 if (b43legacy_dma_mapping_error(ring, meta_hdr->dmaaddr, in dma_tx_fragment()
1049 ring->current_slot = old_top_slot; in dma_tx_fragment()
1050 ring->used_slots = old_used_slots; in dma_tx_fragment()
1053 op32_fill_descriptor(ring, desc, meta_hdr->dmaaddr, in dma_tx_fragment()
1057 slot = request_slot(ring); in dma_tx_fragment()
1058 desc = op32_idx2desc(ring, slot, &meta); in dma_tx_fragment()
1064 meta->dmaaddr = map_descbuffer(ring, skb->data, skb->len, 1); in dma_tx_fragment()
1066 if (b43legacy_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) { in dma_tx_fragment()
1069 ring->current_slot = old_top_slot; in dma_tx_fragment()
1070 ring->used_slots = old_used_slots; in dma_tx_fragment()
1085 meta->dmaaddr = map_descbuffer(ring, skb->data, skb->len, 1); in dma_tx_fragment()
1086 if (b43legacy_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) { in dma_tx_fragment()
1087 ring->current_slot = old_top_slot; in dma_tx_fragment()
1088 ring->used_slots = old_used_slots; in dma_tx_fragment()
1094 op32_fill_descriptor(ring, desc, meta->dmaaddr, in dma_tx_fragment()
1099 op32_poke_tx(ring, next_slot(ring, slot)); in dma_tx_fragment()
1105 unmap_descbuffer(ring, meta_hdr->dmaaddr, in dma_tx_fragment()
1111 int should_inject_overflow(struct b43legacy_dmaring *ring) in should_inject_overflow() argument
1114 if (unlikely(b43legacy_debug(ring->dev, in should_inject_overflow()
1120 next_overflow = ring->last_injected_overflow + HZ; in should_inject_overflow()
1122 ring->last_injected_overflow = jiffies; in should_inject_overflow()
1123 b43legacydbg(ring->dev->wl, in should_inject_overflow()
1125 "DMA controller %d\n", ring->index); in should_inject_overflow()
1136 struct b43legacy_dmaring *ring; in b43legacy_dma_tx() local
1139 ring = priority_to_txring(dev, skb_get_queue_mapping(skb)); in b43legacy_dma_tx()
1140 B43legacy_WARN_ON(!ring->tx); in b43legacy_dma_tx()
1142 if (unlikely(ring->stopped)) { in b43legacy_dma_tx()
1152 if (unlikely(WARN_ON(free_slots(ring) < SLOTS_PER_PACKET))) { in b43legacy_dma_tx()
1161 err = dma_tx_fragment(ring, &skb); in b43legacy_dma_tx()
1172 if ((free_slots(ring) < SLOTS_PER_PACKET) || in b43legacy_dma_tx()
1173 should_inject_overflow(ring)) { in b43legacy_dma_tx()
1178 ring->stopped = true; in b43legacy_dma_tx()
1181 ring->index); in b43legacy_dma_tx()
1189 struct b43legacy_dmaring *ring; in b43legacy_dma_handle_txstatus() local
1195 ring = parse_cookie(dev, status->cookie, &slot); in b43legacy_dma_handle_txstatus()
1196 if (unlikely(!ring)) in b43legacy_dma_handle_txstatus()
1198 B43legacy_WARN_ON(!ring->tx); in b43legacy_dma_handle_txstatus()
1203 firstused = ring->current_slot - ring->used_slots + 1; in b43legacy_dma_handle_txstatus()
1205 firstused = ring->nr_slots + firstused; in b43legacy_dma_handle_txstatus()
1212 ring->index, firstused, slot); in b43legacy_dma_handle_txstatus()
1217 B43legacy_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in b43legacy_dma_handle_txstatus()
1218 op32_idx2desc(ring, slot, &meta); in b43legacy_dma_handle_txstatus()
1221 unmap_descbuffer(ring, meta->dmaaddr, in b43legacy_dma_handle_txstatus()
1224 unmap_descbuffer(ring, meta->dmaaddr, in b43legacy_dma_handle_txstatus()
1279 ring->used_slots--; in b43legacy_dma_handle_txstatus()
1283 slot = next_slot(ring, slot); in b43legacy_dma_handle_txstatus()
1286 if (ring->stopped) { in b43legacy_dma_handle_txstatus()
1287 B43legacy_WARN_ON(free_slots(ring) < SLOTS_PER_PACKET); in b43legacy_dma_handle_txstatus()
1288 ring->stopped = false; in b43legacy_dma_handle_txstatus()
1291 if (dev->wl->tx_queue_stopped[ring->queue_prio]) { in b43legacy_dma_handle_txstatus()
1292 dev->wl->tx_queue_stopped[ring->queue_prio] = 0; in b43legacy_dma_handle_txstatus()
1296 ieee80211_wake_queue(dev->wl->hw, ring->queue_prio); in b43legacy_dma_handle_txstatus()
1299 ring->index); in b43legacy_dma_handle_txstatus()
1305 static void dma_rx(struct b43legacy_dmaring *ring, in dma_rx() argument
1316 desc = op32_idx2desc(ring, *slot, &meta); in dma_rx()
1318 sync_descbuffer_for_cpu(ring, meta->dmaaddr, ring->rx_buffersize); in dma_rx()
1321 if (ring->index == 3) { in dma_rx()
1334 b43legacy_handle_hwtxstatus(ring->dev, hw); in dma_rx()
1336 sync_descbuffer_for_device(ring, meta->dmaaddr, in dma_rx()
1337 ring->rx_buffersize); in dma_rx()
1353 sync_descbuffer_for_device(ring, meta->dmaaddr, in dma_rx()
1354 ring->rx_buffersize); in dma_rx()
1358 if (unlikely(len > ring->rx_buffersize)) { in dma_rx()
1368 desc = op32_idx2desc(ring, *slot, &meta); in dma_rx()
1370 sync_descbuffer_for_device(ring, meta->dmaaddr, in dma_rx()
1371 ring->rx_buffersize); in dma_rx()
1372 *slot = next_slot(ring, *slot); in dma_rx()
1374 tmp -= ring->rx_buffersize; in dma_rx()
1378 b43legacyerr(ring->dev->wl, "DMA RX buffer too small " in dma_rx()
1380 len, ring->rx_buffersize, cnt); in dma_rx()
1385 err = setup_rx_descbuffer(ring, desc, meta, GFP_ATOMIC); in dma_rx()
1387 b43legacydbg(ring->dev->wl, "DMA RX: setup_rx_descbuffer()" in dma_rx()
1389 sync_descbuffer_for_device(ring, dmaaddr, in dma_rx()
1390 ring->rx_buffersize); in dma_rx()
1394 unmap_descbuffer(ring, dmaaddr, ring->rx_buffersize, 0); in dma_rx()
1395 skb_put(skb, len + ring->frameoffset); in dma_rx()
1396 skb_pull(skb, ring->frameoffset); in dma_rx()
1398 b43legacy_rx(ring->dev, skb, rxhdr); in dma_rx()
1403 void b43legacy_dma_rx(struct b43legacy_dmaring *ring) in b43legacy_dma_rx() argument
1409 B43legacy_WARN_ON(ring->tx); in b43legacy_dma_rx()
1410 current_slot = op32_get_current_rxslot(ring); in b43legacy_dma_rx()
1412 ring->nr_slots)); in b43legacy_dma_rx()
1414 slot = ring->current_slot; in b43legacy_dma_rx()
1415 for (; slot != current_slot; slot = next_slot(ring, slot)) { in b43legacy_dma_rx()
1416 dma_rx(ring, &slot); in b43legacy_dma_rx()
1417 update_max_used_slots(ring, ++used_slots); in b43legacy_dma_rx()
1419 op32_set_current_rxslot(ring, slot); in b43legacy_dma_rx()
1420 ring->current_slot = slot; in b43legacy_dma_rx()
1423 static void b43legacy_dma_tx_suspend_ring(struct b43legacy_dmaring *ring) in b43legacy_dma_tx_suspend_ring() argument
1425 B43legacy_WARN_ON(!ring->tx); in b43legacy_dma_tx_suspend_ring()
1426 op32_tx_suspend(ring); in b43legacy_dma_tx_suspend_ring()
1429 static void b43legacy_dma_tx_resume_ring(struct b43legacy_dmaring *ring) in b43legacy_dma_tx_resume_ring() argument
1431 B43legacy_WARN_ON(!ring->tx); in b43legacy_dma_tx_resume_ring()
1432 op32_tx_resume(ring); in b43legacy_dma_tx_resume_ring()