Lines Matching refs:ring
72 struct b43_dmadesc_generic *op32_idx2desc(struct b43_dmaring *ring, in op32_idx2desc() argument
78 *meta = &(ring->meta[slot]); in op32_idx2desc()
79 desc = ring->descbase; in op32_idx2desc()
85 static void op32_fill_descriptor(struct b43_dmaring *ring, in op32_fill_descriptor() argument
90 struct b43_dmadesc32 *descbase = ring->descbase; in op32_fill_descriptor()
97 B43_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op32_fill_descriptor()
99 addr = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW); in op32_fill_descriptor()
100 addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT); in op32_fill_descriptor()
103 if (slot == ring->nr_slots - 1) in op32_fill_descriptor()
118 static void op32_poke_tx(struct b43_dmaring *ring, int slot) in op32_poke_tx() argument
120 b43_dma_write(ring, B43_DMA32_TXINDEX, in op32_poke_tx()
124 static void op32_tx_suspend(struct b43_dmaring *ring) in op32_tx_suspend() argument
126 b43_dma_write(ring, B43_DMA32_TXCTL, b43_dma_read(ring, B43_DMA32_TXCTL) in op32_tx_suspend()
130 static void op32_tx_resume(struct b43_dmaring *ring) in op32_tx_resume() argument
132 b43_dma_write(ring, B43_DMA32_TXCTL, b43_dma_read(ring, B43_DMA32_TXCTL) in op32_tx_resume()
136 static int op32_get_current_rxslot(struct b43_dmaring *ring) in op32_get_current_rxslot() argument
140 val = b43_dma_read(ring, B43_DMA32_RXSTATUS); in op32_get_current_rxslot()
146 static void op32_set_current_rxslot(struct b43_dmaring *ring, int slot) in op32_set_current_rxslot() argument
148 b43_dma_write(ring, B43_DMA32_RXINDEX, in op32_set_current_rxslot()
164 struct b43_dmadesc_generic *op64_idx2desc(struct b43_dmaring *ring, in op64_idx2desc() argument
170 *meta = &(ring->meta[slot]); in op64_idx2desc()
171 desc = ring->descbase; in op64_idx2desc()
177 static void op64_fill_descriptor(struct b43_dmaring *ring, in op64_fill_descriptor() argument
182 struct b43_dmadesc64 *descbase = ring->descbase; in op64_fill_descriptor()
189 B43_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op64_fill_descriptor()
191 addrlo = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW); in op64_fill_descriptor()
192 addrhi = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_HIGH); in op64_fill_descriptor()
193 addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT); in op64_fill_descriptor()
195 if (slot == ring->nr_slots - 1) in op64_fill_descriptor()
213 static void op64_poke_tx(struct b43_dmaring *ring, int slot) in op64_poke_tx() argument
215 b43_dma_write(ring, B43_DMA64_TXINDEX, in op64_poke_tx()
219 static void op64_tx_suspend(struct b43_dmaring *ring) in op64_tx_suspend() argument
221 b43_dma_write(ring, B43_DMA64_TXCTL, b43_dma_read(ring, B43_DMA64_TXCTL) in op64_tx_suspend()
225 static void op64_tx_resume(struct b43_dmaring *ring) in op64_tx_resume() argument
227 b43_dma_write(ring, B43_DMA64_TXCTL, b43_dma_read(ring, B43_DMA64_TXCTL) in op64_tx_resume()
231 static int op64_get_current_rxslot(struct b43_dmaring *ring) in op64_get_current_rxslot() argument
235 val = b43_dma_read(ring, B43_DMA64_RXSTATUS); in op64_get_current_rxslot()
241 static void op64_set_current_rxslot(struct b43_dmaring *ring, int slot) in op64_set_current_rxslot() argument
243 b43_dma_write(ring, B43_DMA64_RXINDEX, in op64_set_current_rxslot()
257 static inline int free_slots(struct b43_dmaring *ring) in free_slots() argument
259 return (ring->nr_slots - ring->used_slots); in free_slots()
262 static inline int next_slot(struct b43_dmaring *ring, int slot) in next_slot() argument
264 B43_WARN_ON(!(slot >= -1 && slot <= ring->nr_slots - 1)); in next_slot()
265 if (slot == ring->nr_slots - 1) in next_slot()
270 static inline int prev_slot(struct b43_dmaring *ring, int slot) in prev_slot() argument
272 B43_WARN_ON(!(slot >= 0 && slot <= ring->nr_slots - 1)); in prev_slot()
274 return ring->nr_slots - 1; in prev_slot()
279 static void update_max_used_slots(struct b43_dmaring *ring, in update_max_used_slots() argument
282 if (current_used_slots <= ring->max_used_slots) in update_max_used_slots()
284 ring->max_used_slots = current_used_slots; in update_max_used_slots()
285 if (b43_debug(ring->dev, B43_DBG_DMAVERBOSE)) { in update_max_used_slots()
286 b43dbg(ring->dev->wl, in update_max_used_slots()
288 ring->max_used_slots, in update_max_used_slots()
289 ring->tx ? "TX" : "RX", ring->index); in update_max_used_slots()
294 void update_max_used_slots(struct b43_dmaring *ring, int current_used_slots) in update_max_used_slots() argument
300 static inline int request_slot(struct b43_dmaring *ring) in request_slot() argument
304 B43_WARN_ON(!ring->tx); in request_slot()
305 B43_WARN_ON(ring->stopped); in request_slot()
306 B43_WARN_ON(free_slots(ring) == 0); in request_slot()
308 slot = next_slot(ring, ring->current_slot); in request_slot()
309 ring->current_slot = slot; in request_slot()
310 ring->used_slots++; in request_slot()
312 update_max_used_slots(ring, ring->used_slots); in request_slot()
347 dma_addr_t map_descbuffer(struct b43_dmaring *ring, in map_descbuffer() argument
353 dmaaddr = dma_map_single(ring->dev->dev->dma_dev, in map_descbuffer()
356 dmaaddr = dma_map_single(ring->dev->dev->dma_dev, in map_descbuffer()
364 void unmap_descbuffer(struct b43_dmaring *ring, in unmap_descbuffer() argument
368 dma_unmap_single(ring->dev->dev->dma_dev, in unmap_descbuffer()
371 dma_unmap_single(ring->dev->dev->dma_dev, in unmap_descbuffer()
377 void sync_descbuffer_for_cpu(struct b43_dmaring *ring, in sync_descbuffer_for_cpu() argument
380 B43_WARN_ON(ring->tx); in sync_descbuffer_for_cpu()
381 dma_sync_single_for_cpu(ring->dev->dev->dma_dev, in sync_descbuffer_for_cpu()
386 void sync_descbuffer_for_device(struct b43_dmaring *ring, in sync_descbuffer_for_device() argument
389 B43_WARN_ON(ring->tx); in sync_descbuffer_for_device()
390 dma_sync_single_for_device(ring->dev->dev->dma_dev, in sync_descbuffer_for_device()
395 void free_descriptor_buffer(struct b43_dmaring *ring, in free_descriptor_buffer() argument
399 if (ring->tx) in free_descriptor_buffer()
400 ieee80211_free_txskb(ring->dev->wl->hw, meta->skb); in free_descriptor_buffer()
407 static int alloc_ringmemory(struct b43_dmaring *ring) in alloc_ringmemory() argument
418 u16 ring_mem_size = (ring->type == B43_DMA_64BIT) ? in alloc_ringmemory()
421 ring->descbase = dma_alloc_coherent(ring->dev->dev->dma_dev, in alloc_ringmemory()
422 ring_mem_size, &(ring->dmabase), in alloc_ringmemory()
424 if (!ring->descbase) in alloc_ringmemory()
430 static void free_ringmemory(struct b43_dmaring *ring) in free_ringmemory() argument
432 u16 ring_mem_size = (ring->type == B43_DMA_64BIT) ? in free_ringmemory()
434 dma_free_coherent(ring->dev->dev->dma_dev, ring_mem_size, in free_ringmemory()
435 ring->descbase, ring->dmabase); in free_ringmemory()
538 static bool b43_dma_mapping_error(struct b43_dmaring *ring, in b43_dma_mapping_error() argument
542 if (unlikely(dma_mapping_error(ring->dev->dev->dma_dev, addr))) in b43_dma_mapping_error()
545 switch (ring->type) { in b43_dma_mapping_error()
565 unmap_descbuffer(ring, addr, buffersize, dma_to_device); in b43_dma_mapping_error()
570 static bool b43_rx_buffer_is_poisoned(struct b43_dmaring *ring, struct sk_buff *skb) in b43_rx_buffer_is_poisoned() argument
572 unsigned char *f = skb->data + ring->frameoffset; in b43_rx_buffer_is_poisoned()
577 static void b43_poison_rx_buffer(struct b43_dmaring *ring, struct sk_buff *skb) in b43_poison_rx_buffer() argument
587 B43_WARN_ON(ring->rx_buffersize < ring->frameoffset + sizeof(struct b43_plcp_hdr6) + 2); in b43_poison_rx_buffer()
588 frame = skb->data + ring->frameoffset; in b43_poison_rx_buffer()
592 static int setup_rx_descbuffer(struct b43_dmaring *ring, in setup_rx_descbuffer() argument
599 B43_WARN_ON(ring->tx); in setup_rx_descbuffer()
601 skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags); in setup_rx_descbuffer()
604 b43_poison_rx_buffer(ring, skb); in setup_rx_descbuffer()
605 dmaaddr = map_descbuffer(ring, skb->data, ring->rx_buffersize, 0); in setup_rx_descbuffer()
606 if (b43_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) { in setup_rx_descbuffer()
612 skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags); in setup_rx_descbuffer()
615 b43_poison_rx_buffer(ring, skb); in setup_rx_descbuffer()
616 dmaaddr = map_descbuffer(ring, skb->data, in setup_rx_descbuffer()
617 ring->rx_buffersize, 0); in setup_rx_descbuffer()
618 if (b43_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) { in setup_rx_descbuffer()
619 b43err(ring->dev->wl, "RX DMA buffer allocation failed\n"); in setup_rx_descbuffer()
627 ring->ops->fill_descriptor(ring, desc, dmaaddr, in setup_rx_descbuffer()
628 ring->rx_buffersize, 0, 0, 0); in setup_rx_descbuffer()
636 static int alloc_initial_descbuffers(struct b43_dmaring *ring) in alloc_initial_descbuffers() argument
642 for (i = 0; i < ring->nr_slots; i++) { in alloc_initial_descbuffers()
643 desc = ring->ops->idx2desc(ring, i, &meta); in alloc_initial_descbuffers()
645 err = setup_rx_descbuffer(ring, desc, meta, GFP_KERNEL); in alloc_initial_descbuffers()
647 b43err(ring->dev->wl, in alloc_initial_descbuffers()
653 ring->used_slots = ring->nr_slots; in alloc_initial_descbuffers()
660 desc = ring->ops->idx2desc(ring, i, &meta); in alloc_initial_descbuffers()
662 unmap_descbuffer(ring, meta->dmaaddr, ring->rx_buffersize, 0); in alloc_initial_descbuffers()
672 static int dmacontroller_setup(struct b43_dmaring *ring) in dmacontroller_setup() argument
677 bool parity = ring->dev->dma.parity; in dmacontroller_setup()
681 if (ring->tx) { in dmacontroller_setup()
682 if (ring->type == B43_DMA_64BIT) { in dmacontroller_setup()
683 u64 ringbase = (u64) (ring->dmabase); in dmacontroller_setup()
684 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); in dmacontroller_setup()
685 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); in dmacontroller_setup()
686 addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH); in dmacontroller_setup()
693 b43_dma_write(ring, B43_DMA64_TXCTL, value); in dmacontroller_setup()
694 b43_dma_write(ring, B43_DMA64_TXRINGLO, addrlo); in dmacontroller_setup()
695 b43_dma_write(ring, B43_DMA64_TXRINGHI, addrhi); in dmacontroller_setup()
697 u32 ringbase = (u32) (ring->dmabase); in dmacontroller_setup()
698 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); in dmacontroller_setup()
699 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); in dmacontroller_setup()
706 b43_dma_write(ring, B43_DMA32_TXCTL, value); in dmacontroller_setup()
707 b43_dma_write(ring, B43_DMA32_TXRING, addrlo); in dmacontroller_setup()
710 err = alloc_initial_descbuffers(ring); in dmacontroller_setup()
713 if (ring->type == B43_DMA_64BIT) { in dmacontroller_setup()
714 u64 ringbase = (u64) (ring->dmabase); in dmacontroller_setup()
715 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); in dmacontroller_setup()
716 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); in dmacontroller_setup()
717 addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH); in dmacontroller_setup()
719 value = (ring->frameoffset << B43_DMA64_RXFROFF_SHIFT); in dmacontroller_setup()
725 b43_dma_write(ring, B43_DMA64_RXCTL, value); in dmacontroller_setup()
726 b43_dma_write(ring, B43_DMA64_RXRINGLO, addrlo); in dmacontroller_setup()
727 b43_dma_write(ring, B43_DMA64_RXRINGHI, addrhi); in dmacontroller_setup()
728 b43_dma_write(ring, B43_DMA64_RXINDEX, ring->nr_slots * in dmacontroller_setup()
731 u32 ringbase = (u32) (ring->dmabase); in dmacontroller_setup()
732 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); in dmacontroller_setup()
733 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); in dmacontroller_setup()
735 value = (ring->frameoffset << B43_DMA32_RXFROFF_SHIFT); in dmacontroller_setup()
741 b43_dma_write(ring, B43_DMA32_RXCTL, value); in dmacontroller_setup()
742 b43_dma_write(ring, B43_DMA32_RXRING, addrlo); in dmacontroller_setup()
743 b43_dma_write(ring, B43_DMA32_RXINDEX, ring->nr_slots * in dmacontroller_setup()
753 static void dmacontroller_cleanup(struct b43_dmaring *ring) in dmacontroller_cleanup() argument
755 if (ring->tx) { in dmacontroller_cleanup()
756 b43_dmacontroller_tx_reset(ring->dev, ring->mmio_base, in dmacontroller_cleanup()
757 ring->type); in dmacontroller_cleanup()
758 if (ring->type == B43_DMA_64BIT) { in dmacontroller_cleanup()
759 b43_dma_write(ring, B43_DMA64_TXRINGLO, 0); in dmacontroller_cleanup()
760 b43_dma_write(ring, B43_DMA64_TXRINGHI, 0); in dmacontroller_cleanup()
762 b43_dma_write(ring, B43_DMA32_TXRING, 0); in dmacontroller_cleanup()
764 b43_dmacontroller_rx_reset(ring->dev, ring->mmio_base, in dmacontroller_cleanup()
765 ring->type); in dmacontroller_cleanup()
766 if (ring->type == B43_DMA_64BIT) { in dmacontroller_cleanup()
767 b43_dma_write(ring, B43_DMA64_RXRINGLO, 0); in dmacontroller_cleanup()
768 b43_dma_write(ring, B43_DMA64_RXRINGHI, 0); in dmacontroller_cleanup()
770 b43_dma_write(ring, B43_DMA32_RXRING, 0); in dmacontroller_cleanup()
774 static void free_all_descbuffers(struct b43_dmaring *ring) in free_all_descbuffers() argument
779 if (!ring->used_slots) in free_all_descbuffers()
781 for (i = 0; i < ring->nr_slots; i++) { in free_all_descbuffers()
783 ring->ops->idx2desc(ring, i, &meta); in free_all_descbuffers()
786 B43_WARN_ON(!ring->tx); in free_all_descbuffers()
789 if (ring->tx) { in free_all_descbuffers()
790 unmap_descbuffer(ring, meta->dmaaddr, in free_all_descbuffers()
793 unmap_descbuffer(ring, meta->dmaaddr, in free_all_descbuffers()
794 ring->rx_buffersize, 0); in free_all_descbuffers()
796 free_descriptor_buffer(ring, meta); in free_all_descbuffers()
837 struct b43_dmaring *ring; in b43_setup_dmaring() local
841 ring = kzalloc(sizeof(*ring), GFP_KERNEL); in b43_setup_dmaring()
842 if (!ring) in b43_setup_dmaring()
845 ring->nr_slots = B43_RXRING_SLOTS; in b43_setup_dmaring()
847 ring->nr_slots = B43_TXRING_SLOTS; in b43_setup_dmaring()
849 ring->meta = kcalloc(ring->nr_slots, sizeof(struct b43_dmadesc_meta), in b43_setup_dmaring()
851 if (!ring->meta) in b43_setup_dmaring()
853 for (i = 0; i < ring->nr_slots; i++) in b43_setup_dmaring()
854 ring->meta->skb = B43_DMA_PTR_POISON; in b43_setup_dmaring()
856 ring->type = type; in b43_setup_dmaring()
857 ring->dev = dev; in b43_setup_dmaring()
858 ring->mmio_base = b43_dmacontroller_base(type, controller_index); in b43_setup_dmaring()
859 ring->index = controller_index; in b43_setup_dmaring()
861 ring->ops = &dma64_ops; in b43_setup_dmaring()
863 ring->ops = &dma32_ops; in b43_setup_dmaring()
865 ring->tx = true; in b43_setup_dmaring()
866 ring->current_slot = -1; in b43_setup_dmaring()
868 if (ring->index == 0) { in b43_setup_dmaring()
871 ring->rx_buffersize = B43_DMA0_RX_FW598_BUFSIZE; in b43_setup_dmaring()
872 ring->frameoffset = B43_DMA0_RX_FW598_FO; in b43_setup_dmaring()
876 ring->rx_buffersize = B43_DMA0_RX_FW351_BUFSIZE; in b43_setup_dmaring()
877 ring->frameoffset = B43_DMA0_RX_FW351_FO; in b43_setup_dmaring()
884 ring->last_injected_overflow = jiffies; in b43_setup_dmaring()
891 ring->txhdr_cache = kcalloc(ring->nr_slots / TX_SLOTS_PER_FRAME, in b43_setup_dmaring()
894 if (!ring->txhdr_cache) in b43_setup_dmaring()
899 ring->txhdr_cache, in b43_setup_dmaring()
903 if (b43_dma_mapping_error(ring, dma_test, in b43_setup_dmaring()
906 kfree(ring->txhdr_cache); in b43_setup_dmaring()
907 ring->txhdr_cache = kcalloc(ring->nr_slots / TX_SLOTS_PER_FRAME, in b43_setup_dmaring()
910 if (!ring->txhdr_cache) in b43_setup_dmaring()
914 ring->txhdr_cache, in b43_setup_dmaring()
918 if (b43_dma_mapping_error(ring, dma_test, in b43_setup_dmaring()
932 err = alloc_ringmemory(ring); in b43_setup_dmaring()
935 err = dmacontroller_setup(ring); in b43_setup_dmaring()
940 return ring; in b43_setup_dmaring()
943 free_ringmemory(ring); in b43_setup_dmaring()
945 kfree(ring->txhdr_cache); in b43_setup_dmaring()
947 kfree(ring->meta); in b43_setup_dmaring()
949 kfree(ring); in b43_setup_dmaring()
950 ring = NULL; in b43_setup_dmaring()
966 static void b43_destroy_dmaring(struct b43_dmaring *ring, in b43_destroy_dmaring() argument
969 if (!ring) in b43_destroy_dmaring()
975 u64 failed_packets = ring->nr_failed_tx_packets; in b43_destroy_dmaring()
976 u64 succeed_packets = ring->nr_succeed_tx_packets; in b43_destroy_dmaring()
983 average_tries = divide(ring->nr_total_packet_tries * 100, nr_packets); in b43_destroy_dmaring()
985 b43dbg(ring->dev->wl, "DMA-%u %s: " in b43_destroy_dmaring()
988 (unsigned int)(ring->type), ringname, in b43_destroy_dmaring()
989 ring->max_used_slots, in b43_destroy_dmaring()
990 ring->nr_slots, in b43_destroy_dmaring()
1003 dmacontroller_cleanup(ring); in b43_destroy_dmaring()
1004 free_all_descbuffers(ring); in b43_destroy_dmaring()
1005 free_ringmemory(ring); in b43_destroy_dmaring()
1007 kfree(ring->txhdr_cache); in b43_destroy_dmaring()
1008 kfree(ring->meta); in b43_destroy_dmaring()
1009 kfree(ring); in b43_destroy_dmaring()
1012 #define destroy_ring(dma, ring) do { \ argument
1013 b43_destroy_dmaring((dma)->ring, __stringify(ring)); \
1014 (dma)->ring = NULL; \
1136 static u16 generate_cookie(struct b43_dmaring *ring, int slot) in generate_cookie() argument
1148 cookie = (((u16)ring->index + 1) << 12); in generate_cookie()
1160 struct b43_dmaring *ring = NULL; in parse_cookie() local
1164 ring = dma->tx_ring_AC_BK; in parse_cookie()
1167 ring = dma->tx_ring_AC_BE; in parse_cookie()
1170 ring = dma->tx_ring_AC_VI; in parse_cookie()
1173 ring = dma->tx_ring_AC_VO; in parse_cookie()
1176 ring = dma->tx_ring_mcast; in parse_cookie()
1180 if (unlikely(!ring || *slot < 0 || *slot >= ring->nr_slots)) { in parse_cookie()
1186 return ring; in parse_cookie()
1189 static int dma_tx_fragment(struct b43_dmaring *ring, in dma_tx_fragment() argument
1192 const struct b43_dma_ops *ops = ring->ops; in dma_tx_fragment()
1202 size_t hdrsize = b43_txhdr_size(ring->dev); in dma_tx_fragment()
1209 old_top_slot = ring->current_slot; in dma_tx_fragment()
1210 old_used_slots = ring->used_slots; in dma_tx_fragment()
1213 slot = request_slot(ring); in dma_tx_fragment()
1214 desc = ops->idx2desc(ring, slot, &meta_hdr); in dma_tx_fragment()
1217 header = &(ring->txhdr_cache[(slot / TX_SLOTS_PER_FRAME) * hdrsize]); in dma_tx_fragment()
1218 cookie = generate_cookie(ring, slot); in dma_tx_fragment()
1219 err = b43_generate_txhdr(ring->dev, header, in dma_tx_fragment()
1222 ring->current_slot = old_top_slot; in dma_tx_fragment()
1223 ring->used_slots = old_used_slots; in dma_tx_fragment()
1227 meta_hdr->dmaaddr = map_descbuffer(ring, (unsigned char *)header, in dma_tx_fragment()
1229 if (b43_dma_mapping_error(ring, meta_hdr->dmaaddr, hdrsize, 1)) { in dma_tx_fragment()
1230 ring->current_slot = old_top_slot; in dma_tx_fragment()
1231 ring->used_slots = old_used_slots; in dma_tx_fragment()
1234 ops->fill_descriptor(ring, desc, meta_hdr->dmaaddr, in dma_tx_fragment()
1238 slot = request_slot(ring); in dma_tx_fragment()
1239 desc = ops->idx2desc(ring, slot, &meta); in dma_tx_fragment()
1246 meta->dmaaddr = map_descbuffer(ring, skb->data, skb->len, 1); in dma_tx_fragment()
1248 if (b43_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) { in dma_tx_fragment()
1252 ring->current_slot = old_top_slot; in dma_tx_fragment()
1253 ring->used_slots = old_used_slots; in dma_tx_fragment()
1258 meta->dmaaddr = map_descbuffer(ring, priv_info->bouncebuffer, skb->len, 1); in dma_tx_fragment()
1259 if (b43_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) { in dma_tx_fragment()
1262 ring->current_slot = old_top_slot; in dma_tx_fragment()
1263 ring->used_slots = old_used_slots; in dma_tx_fragment()
1269 ops->fill_descriptor(ring, desc, meta->dmaaddr, skb->len, 0, 1, 1); in dma_tx_fragment()
1274 b43_shm_write16(ring->dev, B43_SHM_SHARED, in dma_tx_fragment()
1279 ops->poke_tx(ring, next_slot(ring, slot)); in dma_tx_fragment()
1283 unmap_descbuffer(ring, meta_hdr->dmaaddr, in dma_tx_fragment()
1288 static inline int should_inject_overflow(struct b43_dmaring *ring) in should_inject_overflow() argument
1291 if (unlikely(b43_debug(ring->dev, B43_DBG_DMAOVERFLOW))) { in should_inject_overflow()
1296 next_overflow = ring->last_injected_overflow + HZ; in should_inject_overflow()
1298 ring->last_injected_overflow = jiffies; in should_inject_overflow()
1299 b43dbg(ring->dev->wl, in should_inject_overflow()
1301 "DMA controller %d\n", ring->index); in should_inject_overflow()
1313 struct b43_dmaring *ring; in select_ring_by_priority() local
1322 ring = dev->dma.tx_ring_AC_VO; in select_ring_by_priority()
1325 ring = dev->dma.tx_ring_AC_VI; in select_ring_by_priority()
1328 ring = dev->dma.tx_ring_AC_BE; in select_ring_by_priority()
1331 ring = dev->dma.tx_ring_AC_BK; in select_ring_by_priority()
1335 ring = dev->dma.tx_ring_AC_BE; in select_ring_by_priority()
1337 return ring; in select_ring_by_priority()
1342 struct b43_dmaring *ring; in b43_dma_tx() local
1350 ring = dev->dma.tx_ring_mcast; in b43_dma_tx()
1356 ring = select_ring_by_priority( in b43_dma_tx()
1360 B43_WARN_ON(!ring->tx); in b43_dma_tx()
1362 if (unlikely(ring->stopped)) { in b43_dma_tx()
1373 if (WARN_ON(free_slots(ring) < TX_SLOTS_PER_FRAME)) { in b43_dma_tx()
1384 ring->queue_prio = skb_get_queue_mapping(skb); in b43_dma_tx()
1386 err = dma_tx_fragment(ring, skb); in b43_dma_tx()
1398 if ((free_slots(ring) < TX_SLOTS_PER_FRAME) || in b43_dma_tx()
1399 should_inject_overflow(ring)) { in b43_dma_tx()
1404 ring->stopped = true; in b43_dma_tx()
1406 b43dbg(dev->wl, "Stopped TX ring %d\n", ring->index); in b43_dma_tx()
1418 struct b43_dmaring *ring; in b43_dma_handle_txstatus() local
1427 ring = parse_cookie(dev, status->cookie, &slot); in b43_dma_handle_txstatus()
1428 if (unlikely(!ring)) in b43_dma_handle_txstatus()
1430 B43_WARN_ON(!ring->tx); in b43_dma_handle_txstatus()
1435 firstused = ring->current_slot - ring->used_slots + 1; in b43_dma_handle_txstatus()
1437 firstused = ring->nr_slots + firstused; in b43_dma_handle_txstatus()
1444 if (slot == next_slot(ring, next_slot(ring, firstused))) { in b43_dma_handle_txstatus()
1454 ring->index, slot); in b43_dma_handle_txstatus()
1465 ring->index, firstused, slot); in b43_dma_handle_txstatus()
1472 ops = ring->ops; in b43_dma_handle_txstatus()
1474 B43_WARN_ON(slot < 0 || slot >= ring->nr_slots); in b43_dma_handle_txstatus()
1476 ops->idx2desc(ring, slot, &meta); in b43_dma_handle_txstatus()
1481 slot, firstused, ring->index); in b43_dma_handle_txstatus()
1489 unmap_descbuffer(ring, meta->dmaaddr, in b43_dma_handle_txstatus()
1494 unmap_descbuffer(ring, meta->dmaaddr, in b43_dma_handle_txstatus()
1507 slot, firstused, ring->index); in b43_dma_handle_txstatus()
1529 ring->nr_succeed_tx_packets++; in b43_dma_handle_txstatus()
1531 ring->nr_failed_tx_packets++; in b43_dma_handle_txstatus()
1532 ring->nr_total_packet_tries += status->frame_count; in b43_dma_handle_txstatus()
1546 slot, firstused, ring->index); in b43_dma_handle_txstatus()
1552 ring->used_slots--; in b43_dma_handle_txstatus()
1559 slot = next_slot(ring, slot); in b43_dma_handle_txstatus()
1563 if (ring->stopped) { in b43_dma_handle_txstatus()
1564 B43_WARN_ON(free_slots(ring) < TX_SLOTS_PER_FRAME); in b43_dma_handle_txstatus()
1565 ring->stopped = false; in b43_dma_handle_txstatus()
1568 if (dev->wl->tx_queue_stopped[ring->queue_prio]) { in b43_dma_handle_txstatus()
1569 dev->wl->tx_queue_stopped[ring->queue_prio] = false; in b43_dma_handle_txstatus()
1573 ieee80211_wake_queue(dev->wl->hw, ring->queue_prio); in b43_dma_handle_txstatus()
1575 b43dbg(dev->wl, "Woke up TX ring %d\n", ring->index); in b43_dma_handle_txstatus()
1582 static void dma_rx(struct b43_dmaring *ring, int *slot) in dma_rx() argument
1584 const struct b43_dma_ops *ops = ring->ops; in dma_rx()
1593 desc = ops->idx2desc(ring, *slot, &meta); in dma_rx()
1595 sync_descbuffer_for_cpu(ring, meta->dmaaddr, ring->rx_buffersize); in dma_rx()
1613 if (unlikely(b43_rx_buffer_is_poisoned(ring, skb))) { in dma_rx()
1616 b43dbg(ring->dev->wl, "DMA RX: Dropping poisoned buffer.\n"); in dma_rx()
1620 if (unlikely(len + ring->frameoffset > ring->rx_buffersize)) { in dma_rx()
1630 desc = ops->idx2desc(ring, *slot, &meta); in dma_rx()
1632 b43_poison_rx_buffer(ring, meta->skb); in dma_rx()
1633 sync_descbuffer_for_device(ring, meta->dmaaddr, in dma_rx()
1634 ring->rx_buffersize); in dma_rx()
1635 *slot = next_slot(ring, *slot); in dma_rx()
1637 tmp -= ring->rx_buffersize; in dma_rx()
1641 b43err(ring->dev->wl, "DMA RX buffer too small " in dma_rx()
1643 len, ring->rx_buffersize, cnt); in dma_rx()
1648 err = setup_rx_descbuffer(ring, desc, meta, GFP_ATOMIC); in dma_rx()
1650 b43dbg(ring->dev->wl, "DMA RX: setup_rx_descbuffer() failed\n"); in dma_rx()
1654 unmap_descbuffer(ring, dmaaddr, ring->rx_buffersize, 0); in dma_rx()
1655 skb_put(skb, len + ring->frameoffset); in dma_rx()
1656 skb_pull(skb, ring->frameoffset); in dma_rx()
1658 b43_rx(ring->dev, skb, rxhdr); in dma_rx()
1664 b43_poison_rx_buffer(ring, skb); in dma_rx()
1665 sync_descbuffer_for_device(ring, dmaaddr, ring->rx_buffersize); in dma_rx()
1668 void b43_dma_handle_rx_overflow(struct b43_dmaring *ring) in b43_dma_handle_rx_overflow() argument
1672 B43_WARN_ON(ring->tx); in b43_dma_handle_rx_overflow()
1682 current_slot = ring->ops->get_current_rxslot(ring); in b43_dma_handle_rx_overflow()
1683 previous_slot = prev_slot(ring, current_slot); in b43_dma_handle_rx_overflow()
1684 ring->ops->set_current_rxslot(ring, previous_slot); in b43_dma_handle_rx_overflow()
1687 void b43_dma_rx(struct b43_dmaring *ring) in b43_dma_rx() argument
1689 const struct b43_dma_ops *ops = ring->ops; in b43_dma_rx()
1693 B43_WARN_ON(ring->tx); in b43_dma_rx()
1694 current_slot = ops->get_current_rxslot(ring); in b43_dma_rx()
1695 B43_WARN_ON(!(current_slot >= 0 && current_slot < ring->nr_slots)); in b43_dma_rx()
1697 slot = ring->current_slot; in b43_dma_rx()
1698 for (; slot != current_slot; slot = next_slot(ring, slot)) { in b43_dma_rx()
1699 dma_rx(ring, &slot); in b43_dma_rx()
1700 update_max_used_slots(ring, ++used_slots); in b43_dma_rx()
1703 ops->set_current_rxslot(ring, slot); in b43_dma_rx()
1704 ring->current_slot = slot; in b43_dma_rx()
1707 static void b43_dma_tx_suspend_ring(struct b43_dmaring *ring) in b43_dma_tx_suspend_ring() argument
1709 B43_WARN_ON(!ring->tx); in b43_dma_tx_suspend_ring()
1710 ring->ops->tx_suspend(ring); in b43_dma_tx_suspend_ring()
1713 static void b43_dma_tx_resume_ring(struct b43_dmaring *ring) in b43_dma_tx_resume_ring() argument
1715 B43_WARN_ON(!ring->tx); in b43_dma_tx_resume_ring()
1716 ring->ops->tx_resume(ring); in b43_dma_tx_resume_ring()