Lines Matching refs:ring
32 struct b43legacy_dmadesc32 *op32_idx2desc(struct b43legacy_dmaring *ring, in op32_idx2desc() argument
38 *meta = &(ring->meta[slot]); in op32_idx2desc()
39 desc = ring->descbase; in op32_idx2desc()
45 static void op32_fill_descriptor(struct b43legacy_dmaring *ring, in op32_fill_descriptor() argument
50 struct b43legacy_dmadesc32 *descbase = ring->descbase; in op32_fill_descriptor()
57 B43legacy_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op32_fill_descriptor()
62 addr |= ring->dev->dma.translation; in op32_fill_descriptor()
63 ctl = (bufsize - ring->frameoffset) in op32_fill_descriptor()
65 if (slot == ring->nr_slots - 1) in op32_fill_descriptor()
80 static void op32_poke_tx(struct b43legacy_dmaring *ring, int slot) in op32_poke_tx() argument
82 b43legacy_dma_write(ring, B43legacy_DMA32_TXINDEX, in op32_poke_tx()
86 static void op32_tx_suspend(struct b43legacy_dmaring *ring) in op32_tx_suspend() argument
88 b43legacy_dma_write(ring, B43legacy_DMA32_TXCTL, in op32_tx_suspend()
89 b43legacy_dma_read(ring, B43legacy_DMA32_TXCTL) in op32_tx_suspend()
93 static void op32_tx_resume(struct b43legacy_dmaring *ring) in op32_tx_resume() argument
95 b43legacy_dma_write(ring, B43legacy_DMA32_TXCTL, in op32_tx_resume()
96 b43legacy_dma_read(ring, B43legacy_DMA32_TXCTL) in op32_tx_resume()
100 static int op32_get_current_rxslot(struct b43legacy_dmaring *ring) in op32_get_current_rxslot() argument
104 val = b43legacy_dma_read(ring, B43legacy_DMA32_RXSTATUS); in op32_get_current_rxslot()
110 static void op32_set_current_rxslot(struct b43legacy_dmaring *ring, in op32_set_current_rxslot() argument
113 b43legacy_dma_write(ring, B43legacy_DMA32_RXINDEX, in op32_set_current_rxslot()
117 static inline int free_slots(struct b43legacy_dmaring *ring) in free_slots() argument
119 return (ring->nr_slots - ring->used_slots); in free_slots()
122 static inline int next_slot(struct b43legacy_dmaring *ring, int slot) in next_slot() argument
124 B43legacy_WARN_ON(!(slot >= -1 && slot <= ring->nr_slots - 1)); in next_slot()
125 if (slot == ring->nr_slots - 1) in next_slot()
130 static inline int prev_slot(struct b43legacy_dmaring *ring, int slot) in prev_slot() argument
132 B43legacy_WARN_ON(!(slot >= 0 && slot <= ring->nr_slots - 1)); in prev_slot()
134 return ring->nr_slots - 1; in prev_slot()
139 static void update_max_used_slots(struct b43legacy_dmaring *ring, in update_max_used_slots() argument
142 if (current_used_slots <= ring->max_used_slots) in update_max_used_slots()
144 ring->max_used_slots = current_used_slots; in update_max_used_slots()
145 if (b43legacy_debug(ring->dev, B43legacy_DBG_DMAVERBOSE)) in update_max_used_slots()
146 b43legacydbg(ring->dev->wl, in update_max_used_slots()
148 ring->max_used_slots, in update_max_used_slots()
149 ring->tx ? "TX" : "RX", in update_max_used_slots()
150 ring->index); in update_max_used_slots()
154 void update_max_used_slots(struct b43legacy_dmaring *ring, in update_max_used_slots() argument
161 int request_slot(struct b43legacy_dmaring *ring) in request_slot() argument
165 B43legacy_WARN_ON(!ring->tx); in request_slot()
166 B43legacy_WARN_ON(ring->stopped); in request_slot()
167 B43legacy_WARN_ON(free_slots(ring) == 0); in request_slot()
169 slot = next_slot(ring, ring->current_slot); in request_slot()
170 ring->current_slot = slot; in request_slot()
171 ring->used_slots++; in request_slot()
173 update_max_used_slots(ring, ring->used_slots); in request_slot()
183 struct b43legacy_dmaring *ring; in priority_to_txring() local
194 ring = dev->dma.tx_ring3; in priority_to_txring()
197 ring = dev->dma.tx_ring2; in priority_to_txring()
200 ring = dev->dma.tx_ring1; in priority_to_txring()
203 ring = dev->dma.tx_ring0; in priority_to_txring()
206 ring = dev->dma.tx_ring4; in priority_to_txring()
209 ring = dev->dma.tx_ring5; in priority_to_txring()
213 return ring; in priority_to_txring()
234 dma_addr_t map_descbuffer(struct b43legacy_dmaring *ring, in map_descbuffer() argument
242 dmaaddr = dma_map_single(ring->dev->dev->dma_dev, in map_descbuffer()
246 dmaaddr = dma_map_single(ring->dev->dev->dma_dev, in map_descbuffer()
254 void unmap_descbuffer(struct b43legacy_dmaring *ring, in unmap_descbuffer() argument
260 dma_unmap_single(ring->dev->dev->dma_dev, in unmap_descbuffer()
264 dma_unmap_single(ring->dev->dev->dma_dev, in unmap_descbuffer()
270 void sync_descbuffer_for_cpu(struct b43legacy_dmaring *ring, in sync_descbuffer_for_cpu() argument
274 B43legacy_WARN_ON(ring->tx); in sync_descbuffer_for_cpu()
276 dma_sync_single_for_cpu(ring->dev->dev->dma_dev, in sync_descbuffer_for_cpu()
281 void sync_descbuffer_for_device(struct b43legacy_dmaring *ring, in sync_descbuffer_for_device() argument
285 B43legacy_WARN_ON(ring->tx); in sync_descbuffer_for_device()
287 dma_sync_single_for_device(ring->dev->dev->dma_dev, in sync_descbuffer_for_device()
292 void free_descriptor_buffer(struct b43legacy_dmaring *ring, in free_descriptor_buffer() argument
305 static int alloc_ringmemory(struct b43legacy_dmaring *ring) in alloc_ringmemory() argument
308 ring->descbase = dma_alloc_coherent(ring->dev->dev->dma_dev, in alloc_ringmemory()
310 &(ring->dmabase), GFP_KERNEL); in alloc_ringmemory()
311 if (!ring->descbase) in alloc_ringmemory()
317 static void free_ringmemory(struct b43legacy_dmaring *ring) in free_ringmemory() argument
319 dma_free_coherent(ring->dev->dev->dma_dev, B43legacy_DMA_RINGMEMSIZE, in free_ringmemory()
320 ring->descbase, ring->dmabase); in free_ringmemory()
398 static bool b43legacy_dma_mapping_error(struct b43legacy_dmaring *ring, in b43legacy_dma_mapping_error() argument
403 if (unlikely(dma_mapping_error(ring->dev->dev->dma_dev, addr))) in b43legacy_dma_mapping_error()
406 switch (ring->type) { in b43legacy_dma_mapping_error()
422 unmap_descbuffer(ring, addr, buffersize, dma_to_device); in b43legacy_dma_mapping_error()
427 static int setup_rx_descbuffer(struct b43legacy_dmaring *ring, in setup_rx_descbuffer() argument
437 B43legacy_WARN_ON(ring->tx); in setup_rx_descbuffer()
439 skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags); in setup_rx_descbuffer()
442 dmaaddr = map_descbuffer(ring, skb->data, in setup_rx_descbuffer()
443 ring->rx_buffersize, 0); in setup_rx_descbuffer()
444 if (b43legacy_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) { in setup_rx_descbuffer()
450 skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags); in setup_rx_descbuffer()
453 dmaaddr = map_descbuffer(ring, skb->data, in setup_rx_descbuffer()
454 ring->rx_buffersize, 0); in setup_rx_descbuffer()
457 if (b43legacy_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) { in setup_rx_descbuffer()
464 op32_fill_descriptor(ring, desc, dmaaddr, ring->rx_buffersize, 0, 0, 0); in setup_rx_descbuffer()
477 static int alloc_initial_descbuffers(struct b43legacy_dmaring *ring) in alloc_initial_descbuffers() argument
484 for (i = 0; i < ring->nr_slots; i++) { in alloc_initial_descbuffers()
485 desc = op32_idx2desc(ring, i, &meta); in alloc_initial_descbuffers()
487 err = setup_rx_descbuffer(ring, desc, meta, GFP_KERNEL); in alloc_initial_descbuffers()
489 b43legacyerr(ring->dev->wl, in alloc_initial_descbuffers()
495 ring->used_slots = ring->nr_slots; in alloc_initial_descbuffers()
502 desc = op32_idx2desc(ring, i, &meta); in alloc_initial_descbuffers()
504 unmap_descbuffer(ring, meta->dmaaddr, ring->rx_buffersize, 0); in alloc_initial_descbuffers()
514 static int dmacontroller_setup(struct b43legacy_dmaring *ring) in dmacontroller_setup() argument
519 u32 trans = ring->dev->dma.translation; in dmacontroller_setup()
520 u32 ringbase = (u32)(ring->dmabase); in dmacontroller_setup()
522 if (ring->tx) { in dmacontroller_setup()
528 b43legacy_dma_write(ring, B43legacy_DMA32_TXCTL, value); in dmacontroller_setup()
529 b43legacy_dma_write(ring, B43legacy_DMA32_TXRING, in dmacontroller_setup()
533 err = alloc_initial_descbuffers(ring); in dmacontroller_setup()
539 value = (ring->frameoffset << in dmacontroller_setup()
544 b43legacy_dma_write(ring, B43legacy_DMA32_RXCTL, value); in dmacontroller_setup()
545 b43legacy_dma_write(ring, B43legacy_DMA32_RXRING, in dmacontroller_setup()
548 b43legacy_dma_write(ring, B43legacy_DMA32_RXINDEX, 200); in dmacontroller_setup()
556 static void dmacontroller_cleanup(struct b43legacy_dmaring *ring) in dmacontroller_cleanup() argument
558 if (ring->tx) { in dmacontroller_cleanup()
559 b43legacy_dmacontroller_tx_reset(ring->dev, ring->mmio_base, in dmacontroller_cleanup()
560 ring->type); in dmacontroller_cleanup()
561 b43legacy_dma_write(ring, B43legacy_DMA32_TXRING, 0); in dmacontroller_cleanup()
563 b43legacy_dmacontroller_rx_reset(ring->dev, ring->mmio_base, in dmacontroller_cleanup()
564 ring->type); in dmacontroller_cleanup()
565 b43legacy_dma_write(ring, B43legacy_DMA32_RXRING, 0); in dmacontroller_cleanup()
569 static void free_all_descbuffers(struct b43legacy_dmaring *ring) in free_all_descbuffers() argument
574 if (!ring->used_slots) in free_all_descbuffers()
576 for (i = 0; i < ring->nr_slots; i++) { in free_all_descbuffers()
577 op32_idx2desc(ring, i, &meta); in free_all_descbuffers()
580 B43legacy_WARN_ON(!ring->tx); in free_all_descbuffers()
583 if (ring->tx) in free_all_descbuffers()
584 unmap_descbuffer(ring, meta->dmaaddr, in free_all_descbuffers()
587 unmap_descbuffer(ring, meta->dmaaddr, in free_all_descbuffers()
588 ring->rx_buffersize, 0); in free_all_descbuffers()
589 free_descriptor_buffer(ring, meta, 0); in free_all_descbuffers()
616 struct b43legacy_dmaring *ring; in b43legacy_setup_dmaring() local
621 ring = kzalloc(sizeof(*ring), GFP_KERNEL); in b43legacy_setup_dmaring()
622 if (!ring) in b43legacy_setup_dmaring()
624 ring->type = type; in b43legacy_setup_dmaring()
625 ring->dev = dev; in b43legacy_setup_dmaring()
631 ring->meta = kcalloc(nr_slots, sizeof(struct b43legacy_dmadesc_meta), in b43legacy_setup_dmaring()
633 if (!ring->meta) in b43legacy_setup_dmaring()
636 ring->txhdr_cache = kcalloc(nr_slots, in b43legacy_setup_dmaring()
639 if (!ring->txhdr_cache) in b43legacy_setup_dmaring()
643 dma_test = dma_map_single(dev->dev->dma_dev, ring->txhdr_cache, in b43legacy_setup_dmaring()
647 if (b43legacy_dma_mapping_error(ring, dma_test, in b43legacy_setup_dmaring()
650 kfree(ring->txhdr_cache); in b43legacy_setup_dmaring()
651 ring->txhdr_cache = kcalloc(nr_slots, in b43legacy_setup_dmaring()
654 if (!ring->txhdr_cache) in b43legacy_setup_dmaring()
658 ring->txhdr_cache, in b43legacy_setup_dmaring()
662 if (b43legacy_dma_mapping_error(ring, dma_test, in b43legacy_setup_dmaring()
672 ring->nr_slots = nr_slots; in b43legacy_setup_dmaring()
673 ring->mmio_base = b43legacy_dmacontroller_base(type, controller_index); in b43legacy_setup_dmaring()
674 ring->index = controller_index; in b43legacy_setup_dmaring()
676 ring->tx = true; in b43legacy_setup_dmaring()
677 ring->current_slot = -1; in b43legacy_setup_dmaring()
679 if (ring->index == 0) { in b43legacy_setup_dmaring()
680 ring->rx_buffersize = B43legacy_DMA0_RX_BUFFERSIZE; in b43legacy_setup_dmaring()
681 ring->frameoffset = B43legacy_DMA0_RX_FRAMEOFFSET; in b43legacy_setup_dmaring()
682 } else if (ring->index == 3) { in b43legacy_setup_dmaring()
683 ring->rx_buffersize = B43legacy_DMA3_RX_BUFFERSIZE; in b43legacy_setup_dmaring()
684 ring->frameoffset = B43legacy_DMA3_RX_FRAMEOFFSET; in b43legacy_setup_dmaring()
689 ring->last_injected_overflow = jiffies; in b43legacy_setup_dmaring()
692 err = alloc_ringmemory(ring); in b43legacy_setup_dmaring()
695 err = dmacontroller_setup(ring); in b43legacy_setup_dmaring()
700 return ring; in b43legacy_setup_dmaring()
703 free_ringmemory(ring); in b43legacy_setup_dmaring()
705 kfree(ring->txhdr_cache); in b43legacy_setup_dmaring()
707 kfree(ring->meta); in b43legacy_setup_dmaring()
709 kfree(ring); in b43legacy_setup_dmaring()
710 ring = NULL; in b43legacy_setup_dmaring()
715 static void b43legacy_destroy_dmaring(struct b43legacy_dmaring *ring) in b43legacy_destroy_dmaring() argument
717 if (!ring) in b43legacy_destroy_dmaring()
720 b43legacydbg(ring->dev->wl, "DMA-%u 0x%04X (%s) max used slots:" in b43legacy_destroy_dmaring()
721 " %d/%d\n", (unsigned int)(ring->type), ring->mmio_base, in b43legacy_destroy_dmaring()
722 (ring->tx) ? "TX" : "RX", ring->max_used_slots, in b43legacy_destroy_dmaring()
723 ring->nr_slots); in b43legacy_destroy_dmaring()
727 dmacontroller_cleanup(ring); in b43legacy_destroy_dmaring()
728 free_all_descbuffers(ring); in b43legacy_destroy_dmaring()
729 free_ringmemory(ring); in b43legacy_destroy_dmaring()
731 kfree(ring->txhdr_cache); in b43legacy_destroy_dmaring()
732 kfree(ring->meta); in b43legacy_destroy_dmaring()
733 kfree(ring); in b43legacy_destroy_dmaring()
766 struct b43legacy_dmaring *ring; in b43legacy_dma_init() local
787 ring = b43legacy_setup_dmaring(dev, 0, 1, type); in b43legacy_dma_init()
788 if (!ring) in b43legacy_dma_init()
790 dma->tx_ring0 = ring; in b43legacy_dma_init()
792 ring = b43legacy_setup_dmaring(dev, 1, 1, type); in b43legacy_dma_init()
793 if (!ring) in b43legacy_dma_init()
795 dma->tx_ring1 = ring; in b43legacy_dma_init()
797 ring = b43legacy_setup_dmaring(dev, 2, 1, type); in b43legacy_dma_init()
798 if (!ring) in b43legacy_dma_init()
800 dma->tx_ring2 = ring; in b43legacy_dma_init()
802 ring = b43legacy_setup_dmaring(dev, 3, 1, type); in b43legacy_dma_init()
803 if (!ring) in b43legacy_dma_init()
805 dma->tx_ring3 = ring; in b43legacy_dma_init()
807 ring = b43legacy_setup_dmaring(dev, 4, 1, type); in b43legacy_dma_init()
808 if (!ring) in b43legacy_dma_init()
810 dma->tx_ring4 = ring; in b43legacy_dma_init()
812 ring = b43legacy_setup_dmaring(dev, 5, 1, type); in b43legacy_dma_init()
813 if (!ring) in b43legacy_dma_init()
815 dma->tx_ring5 = ring; in b43legacy_dma_init()
818 ring = b43legacy_setup_dmaring(dev, 0, 0, type); in b43legacy_dma_init()
819 if (!ring) in b43legacy_dma_init()
821 dma->rx_ring0 = ring; in b43legacy_dma_init()
824 ring = b43legacy_setup_dmaring(dev, 3, 0, type); in b43legacy_dma_init()
825 if (!ring) in b43legacy_dma_init()
827 dma->rx_ring3 = ring; in b43legacy_dma_init()
860 static u16 generate_cookie(struct b43legacy_dmaring *ring, in generate_cookie() argument
871 switch (ring->index) { in generate_cookie()
903 struct b43legacy_dmaring *ring = NULL; in parse_cookie() local
907 ring = dma->tx_ring0; in parse_cookie()
910 ring = dma->tx_ring1; in parse_cookie()
913 ring = dma->tx_ring2; in parse_cookie()
916 ring = dma->tx_ring3; in parse_cookie()
919 ring = dma->tx_ring4; in parse_cookie()
922 ring = dma->tx_ring5; in parse_cookie()
928 B43legacy_WARN_ON(!(ring && *slot >= 0 && *slot < ring->nr_slots)); in parse_cookie()
930 return ring; in parse_cookie()
933 static int dma_tx_fragment(struct b43legacy_dmaring *ring, in dma_tx_fragment() argument
949 old_top_slot = ring->current_slot; in dma_tx_fragment()
950 old_used_slots = ring->used_slots; in dma_tx_fragment()
953 slot = request_slot(ring); in dma_tx_fragment()
954 desc = op32_idx2desc(ring, slot, &meta_hdr); in dma_tx_fragment()
957 header = &(ring->txhdr_cache[slot * sizeof( in dma_tx_fragment()
959 err = b43legacy_generate_txhdr(ring->dev, header, in dma_tx_fragment()
961 generate_cookie(ring, slot)); in dma_tx_fragment()
963 ring->current_slot = old_top_slot; in dma_tx_fragment()
964 ring->used_slots = old_used_slots; in dma_tx_fragment()
968 meta_hdr->dmaaddr = map_descbuffer(ring, (unsigned char *)header, in dma_tx_fragment()
970 if (b43legacy_dma_mapping_error(ring, meta_hdr->dmaaddr, in dma_tx_fragment()
972 ring->current_slot = old_top_slot; in dma_tx_fragment()
973 ring->used_slots = old_used_slots; in dma_tx_fragment()
976 op32_fill_descriptor(ring, desc, meta_hdr->dmaaddr, in dma_tx_fragment()
980 slot = request_slot(ring); in dma_tx_fragment()
981 desc = op32_idx2desc(ring, slot, &meta); in dma_tx_fragment()
987 meta->dmaaddr = map_descbuffer(ring, skb->data, skb->len, 1); in dma_tx_fragment()
989 if (b43legacy_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) { in dma_tx_fragment()
992 ring->current_slot = old_top_slot; in dma_tx_fragment()
993 ring->used_slots = old_used_slots; in dma_tx_fragment()
1008 meta->dmaaddr = map_descbuffer(ring, skb->data, skb->len, 1); in dma_tx_fragment()
1009 if (b43legacy_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) { in dma_tx_fragment()
1010 ring->current_slot = old_top_slot; in dma_tx_fragment()
1011 ring->used_slots = old_used_slots; in dma_tx_fragment()
1017 op32_fill_descriptor(ring, desc, meta->dmaaddr, in dma_tx_fragment()
1022 op32_poke_tx(ring, next_slot(ring, slot)); in dma_tx_fragment()
1028 unmap_descbuffer(ring, meta_hdr->dmaaddr, in dma_tx_fragment()
1034 int should_inject_overflow(struct b43legacy_dmaring *ring) in should_inject_overflow() argument
1037 if (unlikely(b43legacy_debug(ring->dev, in should_inject_overflow()
1043 next_overflow = ring->last_injected_overflow + HZ; in should_inject_overflow()
1045 ring->last_injected_overflow = jiffies; in should_inject_overflow()
1046 b43legacydbg(ring->dev->wl, in should_inject_overflow()
1048 "DMA controller %d\n", ring->index); in should_inject_overflow()
1059 struct b43legacy_dmaring *ring; in b43legacy_dma_tx() local
1062 ring = priority_to_txring(dev, skb_get_queue_mapping(skb)); in b43legacy_dma_tx()
1063 B43legacy_WARN_ON(!ring->tx); in b43legacy_dma_tx()
1065 if (unlikely(ring->stopped)) { in b43legacy_dma_tx()
1075 if (WARN_ON(free_slots(ring) < SLOTS_PER_PACKET)) { in b43legacy_dma_tx()
1084 err = dma_tx_fragment(ring, &skb); in b43legacy_dma_tx()
1095 if ((free_slots(ring) < SLOTS_PER_PACKET) || in b43legacy_dma_tx()
1096 should_inject_overflow(ring)) { in b43legacy_dma_tx()
1101 ring->stopped = true; in b43legacy_dma_tx()
1104 ring->index); in b43legacy_dma_tx()
1112 struct b43legacy_dmaring *ring; in b43legacy_dma_handle_txstatus() local
1118 ring = parse_cookie(dev, status->cookie, &slot); in b43legacy_dma_handle_txstatus()
1119 if (unlikely(!ring)) in b43legacy_dma_handle_txstatus()
1121 B43legacy_WARN_ON(!ring->tx); in b43legacy_dma_handle_txstatus()
1126 firstused = ring->current_slot - ring->used_slots + 1; in b43legacy_dma_handle_txstatus()
1128 firstused = ring->nr_slots + firstused; in b43legacy_dma_handle_txstatus()
1135 ring->index, firstused, slot); in b43legacy_dma_handle_txstatus()
1140 B43legacy_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in b43legacy_dma_handle_txstatus()
1141 op32_idx2desc(ring, slot, &meta); in b43legacy_dma_handle_txstatus()
1144 unmap_descbuffer(ring, meta->dmaaddr, in b43legacy_dma_handle_txstatus()
1147 unmap_descbuffer(ring, meta->dmaaddr, in b43legacy_dma_handle_txstatus()
1202 ring->used_slots--; in b43legacy_dma_handle_txstatus()
1206 slot = next_slot(ring, slot); in b43legacy_dma_handle_txstatus()
1209 if (ring->stopped) { in b43legacy_dma_handle_txstatus()
1210 B43legacy_WARN_ON(free_slots(ring) < SLOTS_PER_PACKET); in b43legacy_dma_handle_txstatus()
1211 ring->stopped = false; in b43legacy_dma_handle_txstatus()
1214 if (dev->wl->tx_queue_stopped[ring->queue_prio]) { in b43legacy_dma_handle_txstatus()
1215 dev->wl->tx_queue_stopped[ring->queue_prio] = 0; in b43legacy_dma_handle_txstatus()
1219 ieee80211_wake_queue(dev->wl->hw, ring->queue_prio); in b43legacy_dma_handle_txstatus()
1222 ring->index); in b43legacy_dma_handle_txstatus()
1228 static void dma_rx(struct b43legacy_dmaring *ring, in dma_rx() argument
1239 desc = op32_idx2desc(ring, *slot, &meta); in dma_rx()
1241 sync_descbuffer_for_cpu(ring, meta->dmaaddr, ring->rx_buffersize); in dma_rx()
1244 if (ring->index == 3) { in dma_rx()
1257 b43legacy_handle_hwtxstatus(ring->dev, hw); in dma_rx()
1259 sync_descbuffer_for_device(ring, meta->dmaaddr, in dma_rx()
1260 ring->rx_buffersize); in dma_rx()
1276 sync_descbuffer_for_device(ring, meta->dmaaddr, in dma_rx()
1277 ring->rx_buffersize); in dma_rx()
1281 if (unlikely(len > ring->rx_buffersize)) { in dma_rx()
1291 desc = op32_idx2desc(ring, *slot, &meta); in dma_rx()
1293 sync_descbuffer_for_device(ring, meta->dmaaddr, in dma_rx()
1294 ring->rx_buffersize); in dma_rx()
1295 *slot = next_slot(ring, *slot); in dma_rx()
1297 tmp -= ring->rx_buffersize; in dma_rx()
1301 b43legacyerr(ring->dev->wl, "DMA RX buffer too small " in dma_rx()
1303 len, ring->rx_buffersize, cnt); in dma_rx()
1308 err = setup_rx_descbuffer(ring, desc, meta, GFP_ATOMIC); in dma_rx()
1310 b43legacydbg(ring->dev->wl, "DMA RX: setup_rx_descbuffer()" in dma_rx()
1312 sync_descbuffer_for_device(ring, dmaaddr, in dma_rx()
1313 ring->rx_buffersize); in dma_rx()
1317 unmap_descbuffer(ring, dmaaddr, ring->rx_buffersize, 0); in dma_rx()
1318 skb_put(skb, len + ring->frameoffset); in dma_rx()
1319 skb_pull(skb, ring->frameoffset); in dma_rx()
1321 b43legacy_rx(ring->dev, skb, rxhdr); in dma_rx()
1326 void b43legacy_dma_rx(struct b43legacy_dmaring *ring) in b43legacy_dma_rx() argument
1332 B43legacy_WARN_ON(ring->tx); in b43legacy_dma_rx()
1333 current_slot = op32_get_current_rxslot(ring); in b43legacy_dma_rx()
1335 ring->nr_slots)); in b43legacy_dma_rx()
1337 slot = ring->current_slot; in b43legacy_dma_rx()
1338 for (; slot != current_slot; slot = next_slot(ring, slot)) { in b43legacy_dma_rx()
1339 dma_rx(ring, &slot); in b43legacy_dma_rx()
1340 update_max_used_slots(ring, ++used_slots); in b43legacy_dma_rx()
1342 op32_set_current_rxslot(ring, slot); in b43legacy_dma_rx()
1343 ring->current_slot = slot; in b43legacy_dma_rx()
1346 static void b43legacy_dma_tx_suspend_ring(struct b43legacy_dmaring *ring) in b43legacy_dma_tx_suspend_ring() argument
1348 B43legacy_WARN_ON(!ring->tx); in b43legacy_dma_tx_suspend_ring()
1349 op32_tx_suspend(ring); in b43legacy_dma_tx_suspend_ring()
1352 static void b43legacy_dma_tx_resume_ring(struct b43legacy_dmaring *ring) in b43legacy_dma_tx_resume_ring() argument
1354 B43legacy_WARN_ON(!ring->tx); in b43legacy_dma_tx_resume_ring()
1355 op32_tx_resume(ring); in b43legacy_dma_tx_resume_ring()