Lines Matching refs:re

1155 	struct rx_ring_info *re;  in sky2_get_rx_data_size()  local
1162 BUG_ON(sky2->rx_nfrags > ARRAY_SIZE(re->frag_addr)); in sky2_get_rx_data_size()
1196 const struct rx_ring_info *re) in sky2_rx_submit() argument
1200 sky2_rx_add(sky2, OP_PACKET, re->data_addr, sky2->rx_data_size); in sky2_rx_submit()
1202 for (i = 0; i < skb_shinfo(re->skb)->nr_frags; i++) in sky2_rx_submit()
1203 sky2_rx_add(sky2, OP_BUFFER, re->frag_addr[i], PAGE_SIZE); in sky2_rx_submit()
1207 static int sky2_rx_map_skb(struct pci_dev *pdev, struct rx_ring_info *re, in sky2_rx_map_skb() argument
1210 struct sk_buff *skb = re->skb; in sky2_rx_map_skb()
1213 re->data_addr = dma_map_single(&pdev->dev, skb->data, size, in sky2_rx_map_skb()
1215 if (dma_mapping_error(&pdev->dev, re->data_addr)) in sky2_rx_map_skb()
1218 dma_unmap_len_set(re, data_size, size); in sky2_rx_map_skb()
1223 re->frag_addr[i] = skb_frag_dma_map(&pdev->dev, frag, 0, in sky2_rx_map_skb()
1227 if (dma_mapping_error(&pdev->dev, re->frag_addr[i])) in sky2_rx_map_skb()
1234 dma_unmap_page(&pdev->dev, re->frag_addr[i], in sky2_rx_map_skb()
1239 dma_unmap_single(&pdev->dev, re->data_addr, in sky2_rx_map_skb()
1240 dma_unmap_len(re, data_size), DMA_FROM_DEVICE); in sky2_rx_map_skb()
1249 static void sky2_rx_unmap_skb(struct pci_dev *pdev, struct rx_ring_info *re) in sky2_rx_unmap_skb() argument
1251 struct sk_buff *skb = re->skb; in sky2_rx_unmap_skb()
1254 dma_unmap_single(&pdev->dev, re->data_addr, in sky2_rx_unmap_skb()
1255 dma_unmap_len(re, data_size), DMA_FROM_DEVICE); in sky2_rx_unmap_skb()
1258 dma_unmap_page(&pdev->dev, re->frag_addr[i], in sky2_rx_unmap_skb()
1355 struct rx_ring_info *re = sky2->rx_ring + i; in sky2_rx_clean() local
1357 if (re->skb) { in sky2_rx_clean()
1358 sky2_rx_unmap_skb(sky2->hw->pdev, re); in sky2_rx_clean()
1359 kfree_skb(re->skb); in sky2_rx_clean()
1360 re->skb = NULL; in sky2_rx_clean()
1494 struct rx_ring_info *re = sky2->rx_ring + i; in sky2_alloc_rx_skbs() local
1496 re->skb = sky2_rx_alloc(sky2, GFP_KERNEL); in sky2_alloc_rx_skbs()
1497 if (!re->skb) in sky2_alloc_rx_skbs()
1500 if (sky2_rx_map_skb(hw->pdev, re, sky2->rx_data_size)) { in sky2_alloc_rx_skbs()
1501 dev_kfree_skb(re->skb); in sky2_alloc_rx_skbs()
1502 re->skb = NULL; in sky2_alloc_rx_skbs()
1521 struct rx_ring_info *re; in sky2_rx_start() local
1549 re = sky2->rx_ring + i; in sky2_rx_start()
1550 sky2_rx_submit(sky2, re); in sky2_rx_start()
1808 static void sky2_tx_unmap(struct pci_dev *pdev, struct tx_ring_info *re) in sky2_tx_unmap() argument
1810 if (re->flags & TX_MAP_SINGLE) in sky2_tx_unmap()
1811 dma_unmap_single(&pdev->dev, dma_unmap_addr(re, mapaddr), in sky2_tx_unmap()
1812 dma_unmap_len(re, maplen), DMA_TO_DEVICE); in sky2_tx_unmap()
1813 else if (re->flags & TX_MAP_PAGE) in sky2_tx_unmap()
1814 dma_unmap_page(&pdev->dev, dma_unmap_addr(re, mapaddr), in sky2_tx_unmap()
1815 dma_unmap_len(re, maplen), DMA_TO_DEVICE); in sky2_tx_unmap()
1816 re->flags = 0; in sky2_tx_unmap()
1831 struct tx_ring_info *re; in sky2_xmit_frame() local
1923 re = sky2->tx_ring + slot; in sky2_xmit_frame()
1924 re->flags = TX_MAP_SINGLE; in sky2_xmit_frame()
1925 dma_unmap_addr_set(re, mapaddr, mapping); in sky2_xmit_frame()
1926 dma_unmap_len_set(re, maplen, len); in sky2_xmit_frame()
1952 re = sky2->tx_ring + slot; in sky2_xmit_frame()
1953 re->flags = TX_MAP_PAGE; in sky2_xmit_frame()
1954 dma_unmap_addr_set(re, mapaddr, mapping); in sky2_xmit_frame()
1955 dma_unmap_len_set(re, maplen, skb_frag_size(frag)); in sky2_xmit_frame()
1964 re->skb = skb; in sky2_xmit_frame()
1979 re = sky2->tx_ring + i; in sky2_xmit_frame()
1981 sky2_tx_unmap(hw->pdev, re); in sky2_xmit_frame()
2011 struct tx_ring_info *re = sky2->tx_ring + idx; in sky2_tx_complete() local
2012 struct sk_buff *skb = re->skb; in sky2_tx_complete()
2014 sky2_tx_unmap(sky2->hw->pdev, re); in sky2_tx_complete()
2023 re->skb = NULL; in sky2_tx_complete()
2448 static inline bool needs_copy(const struct rx_ring_info *re, in needs_copy() argument
2453 if (!IS_ALIGNED(re->data_addr + ETH_HLEN, sizeof(u32))) in needs_copy()
2461 const struct rx_ring_info *re, in receive_copy() argument
2468 dma_sync_single_for_cpu(&sky2->hw->pdev->dev, re->data_addr, in receive_copy()
2470 skb_copy_from_linear_data(re->skb, skb->data, length); in receive_copy()
2471 skb->ip_summed = re->skb->ip_summed; in receive_copy()
2472 skb->csum = re->skb->csum; in receive_copy()
2473 skb_copy_hash(skb, re->skb); in receive_copy()
2474 __vlan_hwaccel_copy_tag(skb, re->skb); in receive_copy()
2477 re->data_addr, length, in receive_copy()
2479 __vlan_hwaccel_clear_tag(re->skb); in receive_copy()
2480 skb_clear_hash(re->skb); in receive_copy()
2481 re->skb->ip_summed = CHECKSUM_NONE; in receive_copy()
2522 struct rx_ring_info *re, in receive_new() argument
2536 skb = re->skb; in receive_new()
2537 sky2_rx_unmap_skb(sky2->hw->pdev, re); in receive_new()
2539 *re = nre; in receive_new()
2561 struct rx_ring_info *re = sky2->rx_ring + sky2->rx_next; in sky2_receive() local
2572 if (skb_vlan_tag_present(re->skb)) in sky2_receive()
2595 if (needs_copy(re, length)) in sky2_receive()
2596 skb = receive_copy(sky2, re, length); in sky2_receive()
2598 skb = receive_new(sky2, re, length); in sky2_receive()
2603 sky2_rx_submit(sky2, re); in sky2_receive()