Home
last modified time | relevance | path

Searched refs:rx_queue (Results 1 – 25 of 112) sorted by relevance

12345

/linux/drivers/net/ethernet/sfc/falcon/
A Drx.c115 index = rx_queue->page_remove & rx_queue->page_ptr_mask; in ef4_reuse_page()
122 if (rx_queue->page_remove != rx_queue->page_add) in ef4_reuse_page()
191 index = rx_queue->added_count & rx_queue->ptr_mask; in ef4_init_rx_buffers()
256 index = rx_queue->page_add & rx_queue->page_ptr_mask; in ef4_recycle_rx_page()
367 if (rx_queue->added_count == rx_queue->removed_count) in ef4_fast_push_rx_descriptors()
376 rx_queue->added_count - rx_queue->removed_count); in ef4_fast_push_rx_descriptors()
379 if (rx_queue->notified_count != rx_queue->added_count) in ef4_fast_push_rx_descriptors()
747 netif_dbg(rx_queue->efx, drv, rx_queue->efx->net_dev, in ef4_init_rx_queue()
758 rx_queue->page_add = rx_queue->page_ptr_mask + 1; in ef4_init_rx_queue()
789 netif_dbg(rx_queue->efx, drv, rx_queue->efx->net_dev, in ef4_fini_rx_queue()
[all …]
A Dfarch.c497 rx_queue, in ef4_farch_rx_write()
498 rx_queue->notified_count & rx_queue->ptr_mask); in ef4_farch_rx_write()
503 write_ptr = rx_queue->added_count & rx_queue->ptr_mask; in ef4_farch_rx_write()
536 ef4_rx_queue_index(rx_queue), rx_queue->rxd.index, in ef4_farch_rx_init()
537 rx_queue->rxd.index + rx_queue->rxd.entries - 1); in ef4_farch_rx_init()
592 ef4_free_special_buffer(rx_queue->efx, &rx_queue->rxd); in ef4_farch_rx_remove()
1014 rx_queue->removed_count & rx_queue->ptr_mask, in ef4_farch_handle_rx_event()
1016 rx_queue->removed_count += rx_queue->scatter_n; in ef4_farch_handle_rx_event()
1028 rx_queue->removed_count & rx_queue->ptr_mask, in ef4_farch_handle_rx_event()
1079 rx_queue->removed_count & rx_queue->ptr_mask, in ef4_farch_handle_rx_event()
[all …]
A Dnic.h112 return ((ef4_qword_t *) (rx_queue->rxd.buf.addr)) + index; in ef4_rx_desc()
335 return rx_queue->efx->type->rx_probe(rx_queue); in ef4_nic_probe_rx()
339 rx_queue->efx->type->rx_init(rx_queue); in ef4_nic_init_rx()
343 rx_queue->efx->type->rx_remove(rx_queue); in ef4_nic_remove_rx()
347 rx_queue->efx->type->rx_write(rx_queue); in ef4_nic_notify_rx_desc()
351 rx_queue->efx->type->rx_defer_refill(rx_queue); in ef4_nic_generate_fill_event()
390 int ef4_farch_rx_probe(struct ef4_rx_queue *rx_queue);
391 void ef4_farch_rx_init(struct ef4_rx_queue *rx_queue);
392 void ef4_farch_rx_fini(struct ef4_rx_queue *rx_queue);
393 void ef4_farch_rx_remove(struct ef4_rx_queue *rx_queue);
[all …]
A Dnet_driver.h444 struct ef4_rx_queue rx_queue; member
1090 int (*rx_probe)(struct ef4_rx_queue *rx_queue);
1091 void (*rx_init)(struct ef4_rx_queue *rx_queue);
1092 void (*rx_remove)(struct ef4_rx_queue *rx_queue);
1093 void (*rx_write)(struct ef4_rx_queue *rx_queue);
1236 return channel->rx_queue.core_index >= 0; in ef4_channel_has_rx_queue()
1243 return &channel->rx_queue; in ef4_channel_get_rx_queue()
1251 for (_rx_queue = &(_channel)->rx_queue; \
1258 return container_of(rx_queue, struct ef4_channel, rx_queue); in ef4_rx_queue_channel()
1263 return ef4_rx_queue_channel(rx_queue)->channel; in ef4_rx_queue_index()
[all …]
A Defx.h40 int ef4_probe_rx_queue(struct ef4_rx_queue *rx_queue);
41 void ef4_remove_rx_queue(struct ef4_rx_queue *rx_queue);
42 void ef4_init_rx_queue(struct ef4_rx_queue *rx_queue);
43 void ef4_fini_rx_queue(struct ef4_rx_queue *rx_queue);
44 void ef4_fast_push_rx_descriptors(struct ef4_rx_queue *rx_queue, bool atomic);
47 void ef4_rx_packet(struct ef4_rx_queue *rx_queue, unsigned int index,
54 void ef4_schedule_slow_fill(struct ef4_rx_queue *rx_queue);
A Defx.c428 struct ef4_rx_queue *rx_queue; in ef4_alloc_channel() local
447 rx_queue = &channel->rx_queue; in ef4_alloc_channel()
448 rx_queue->efx = efx; in ef4_alloc_channel()
461 struct ef4_rx_queue *rx_queue; in ef4_copy_channel() local
485 rx_queue = &channel->rx_queue; in ef4_copy_channel()
486 rx_queue->buffer = NULL; in ef4_copy_channel()
487 memset(&rx_queue->rxd, 0, sizeof(rx_queue->rxd)); in ef4_copy_channel()
496 struct ef4_rx_queue *rx_queue; in ef4_probe_channel() local
598 struct ef4_rx_queue *rx_queue; in ef4_start_datapath() local
789 rx_queue->rxd.index + in ef4_realloc_channels()
[all …]
/linux/drivers/net/ethernet/sfc/
A Drx_common.c50 index = rx_queue->page_remove & rx_queue->page_ptr_mask; in efx_reuse_page()
57 if (rx_queue->page_remove != rx_queue->page_add) in efx_reuse_page()
92 index = rx_queue->page_add & rx_queue->page_ptr_mask; in efx_recycle_rx_page()
239 netif_dbg(rx_queue->efx, drv, rx_queue->efx->net_dev, in efx_init_rx_queue()
250 rx_queue->page_add = rx_queue->page_ptr_mask + 1; in efx_init_rx_queue()
293 netif_dbg(rx_queue->efx, drv, rx_queue->efx->net_dev, in efx_fini_rx_queue()
300 for (i = rx_queue->removed_count; i < rx_queue->added_count; in efx_fini_rx_queue()
319 netif_dbg(rx_queue->efx, drv, rx_queue->efx->net_dev, in efx_remove_rx_queue()
420 index = rx_queue->added_count & rx_queue->ptr_mask; in efx_init_rx_buffers()
509 rx_queue->added_count - rx_queue->removed_count); in efx_fast_push_rx_descriptors()
[all …]
A Def100_rx.c90 struct efx_rx_queue *rx_queue = in __ef100_rx_packet() local
109 struct efx_nic *efx = rx_queue->efx; in ef100_rx_packet()
111 ++rx_queue->rx_packets; in ef100_rx_packet()
144 ef100_rx_packet(rx_queue, in efx_ef100_ev_rx()
145 rx_queue->removed_count & rx_queue->ptr_mask); in efx_ef100_ev_rx()
146 ++rx_queue->removed_count; in efx_ef100_ev_rx()
157 while (rx_queue->notified_count != rx_queue->added_count) { in ef100_rx_write()
158 idx = rx_queue->notified_count & rx_queue->ptr_mask; in ef100_rx_write()
160 rxd = efx_rx_desc(rx_queue, idx); in ef100_rx_write()
164 ++rx_queue->notified_count; in ef100_rx_write()
[all …]
A Drx.c47 struct efx_nic *efx = rx_queue->efx; in efx_rx_packet__check_len()
124 struct efx_nic *efx = rx_queue->efx; in efx_rx_packet()
128 rx_queue->rx_packets++; in efx_rx_packet()
130 rx_buf = efx_rx_buffer(rx_queue, index); in efx_rx_packet()
150 efx_rx_queue_index(rx_queue), index, in efx_rx_packet()
197 rx_buf = efx_rx_buffer(rx_queue, index); in efx_rx_packet()
217 struct efx_rx_queue *rx_queue; in efx_rx_deliver() local
255 struct efx_rx_queue *rx_queue; in efx_do_xdp() local
271 efx_free_rx_buffers(rx_queue, rx_buf, in efx_do_xdp()
378 struct efx_rx_queue *rx_queue; in __efx_rx_packet() local
[all …]
A Dnic_common.h100 efx_rx_desc(struct efx_rx_queue *rx_queue, unsigned int index) in efx_rx_desc() argument
102 return ((efx_qword_t *) (rx_queue->rxd.buf.addr)) + index; in efx_rx_desc()
140 static inline int efx_nic_probe_rx(struct efx_rx_queue *rx_queue) in efx_nic_probe_rx() argument
142 return rx_queue->efx->type->rx_probe(rx_queue); in efx_nic_probe_rx()
144 static inline void efx_nic_init_rx(struct efx_rx_queue *rx_queue) in efx_nic_init_rx() argument
146 rx_queue->efx->type->rx_init(rx_queue); in efx_nic_init_rx()
148 static inline void efx_nic_remove_rx(struct efx_rx_queue *rx_queue) in efx_nic_remove_rx() argument
150 rx_queue->efx->type->rx_remove(rx_queue); in efx_nic_remove_rx()
152 static inline void efx_nic_notify_rx_desc(struct efx_rx_queue *rx_queue) in efx_nic_notify_rx_desc() argument
154 rx_queue->efx->type->rx_write(rx_queue); in efx_nic_notify_rx_desc()
[all …]
A Dmcdi_functions.c272 return efx_nic_alloc_buffer(rx_queue->efx, &rx_queue->rxd.buf, in efx_mcdi_rx_probe()
273 (rx_queue->ptr_mask + 1) * in efx_mcdi_rx_probe()
283 struct efx_nic *efx = rx_queue->efx; in efx_mcdi_rx_init()
290 rx_queue->scatter_n = 0; in efx_mcdi_rx_init()
291 rx_queue->scatter_len = 0; in efx_mcdi_rx_init()
301 efx_rx_queue_index(rx_queue)); in efx_mcdi_rx_init()
323 efx_rx_queue_index(rx_queue)); in efx_mcdi_rx_init()
328 efx_nic_free_buffer(rx_queue->efx, &rx_queue->rxd.buf); in efx_mcdi_rx_remove()
335 struct efx_nic *efx = rx_queue->efx; in efx_mcdi_rx_fini()
358 struct efx_rx_queue *rx_queue; in efx_fini_dmaq() local
[all …]
A Dfarch.c490 rx_queue, in efx_farch_rx_write()
491 rx_queue->notified_count & rx_queue->ptr_mask); in efx_farch_rx_write()
496 write_ptr = rx_queue->added_count & rx_queue->ptr_mask; in efx_farch_rx_write()
523 efx_rx_queue_index(rx_queue), rx_queue->rxd.index, in efx_farch_rx_init()
524 rx_queue->rxd.index + rx_queue->rxd.entries - 1); in efx_farch_rx_init()
579 efx_free_special_buffer(rx_queue->efx, &rx_queue->rxd); in efx_farch_rx_remove()
1005 rx_queue->removed_count & rx_queue->ptr_mask, in efx_farch_handle_rx_event()
1007 rx_queue->removed_count += rx_queue->scatter_n; in efx_farch_handle_rx_event()
1019 rx_queue->removed_count & rx_queue->ptr_mask, in efx_farch_handle_rx_event()
1070 rx_queue->removed_count & rx_queue->ptr_mask, in efx_farch_handle_rx_event()
[all …]
A Drx_common.h49 int efx_probe_rx_queue(struct efx_rx_queue *rx_queue);
50 void efx_init_rx_queue(struct efx_rx_queue *rx_queue);
51 void efx_fini_rx_queue(struct efx_rx_queue *rx_queue);
52 void efx_remove_rx_queue(struct efx_rx_queue *rx_queue);
53 void efx_destroy_rx_queue(struct efx_rx_queue *rx_queue);
55 void efx_init_rx_buffer(struct efx_rx_queue *rx_queue,
69 void efx_free_rx_buffers(struct efx_rx_queue *rx_queue,
73 void efx_schedule_slow_fill(struct efx_rx_queue *rx_queue);
75 void efx_fast_push_rx_descriptors(struct efx_rx_queue *rx_queue, bool atomic);
A Defx_channels.c531 struct efx_rx_queue *rx_queue; in efx_alloc_channel() local
556 rx_queue = &channel->rx_queue; in efx_alloc_channel()
557 rx_queue->efx = efx; in efx_alloc_channel()
601 struct efx_rx_queue *rx_queue; in efx_copy_channel() local
627 rx_queue = &channel->rx_queue; in efx_copy_channel()
628 rx_queue->buffer = NULL; in efx_copy_channel()
629 memset(&rx_queue->rxd, 0, sizeof(rx_queue->rxd)); in efx_copy_channel()
641 struct efx_rx_queue *rx_queue; in efx_probe_channel() local
743 struct efx_rx_queue *rx_queue; in efx_remove_channel() local
791 rx_queue->rxd.index + in efx_realloc_channels()
[all …]
A Dnet_driver.h548 struct efx_rx_queue rx_queue; member
1381 int (*rx_probe)(struct efx_rx_queue *rx_queue);
1382 void (*rx_init)(struct efx_rx_queue *rx_queue);
1573 return channel->rx_queue.core_index >= 0; in efx_channel_has_rx_queue()
1580 return &channel->rx_queue; in efx_channel_get_rx_queue()
1588 for (_rx_queue = &(_channel)->rx_queue; \
1595 return container_of(rx_queue, struct efx_channel, rx_queue); in efx_rx_queue_channel()
1600 return efx_rx_queue_channel(rx_queue)->channel; in efx_rx_queue_index()
1609 return &rx_queue->buffer[index]; in efx_rx_buffer()
1615 if (unlikely(rx_buf == efx_rx_buffer(rx_queue, rx_queue->ptr_mask))) in efx_rx_buf_next()
[all …]
A Dmcdi_functions.h25 int efx_mcdi_rx_probe(struct efx_rx_queue *rx_queue);
26 void efx_mcdi_rx_init(struct efx_rx_queue *rx_queue);
27 void efx_mcdi_rx_remove(struct efx_rx_queue *rx_queue);
28 void efx_mcdi_rx_fini(struct efx_rx_queue *rx_queue);
A Def10.c2582 rx_queue, in efx_ef10_rx_write()
2583 rx_queue->notified_count & rx_queue->ptr_mask); in efx_ef10_rx_write()
2671 netif_dbg(rx_queue->efx, hw, rx_queue->efx->net_dev, in efx_ef10_handle_rx_abort()
2673 rx_queue->scatter_n); in efx_ef10_handle_rx_abort()
2675 rx_desc_ptr = rx_queue->removed_count & rx_queue->ptr_mask; in efx_ef10_handle_rx_abort()
2677 efx_rx_packet(rx_queue, rx_desc_ptr, rx_queue->scatter_n, in efx_ef10_handle_rx_abort()
2680 rx_queue->removed_count += rx_queue->scatter_n; in efx_ef10_handle_rx_abort()
2857 ++rx_queue->scatter_n; in efx_ef10_handle_rx_event()
2906 rx_queue->removed_count & rx_queue->ptr_mask, in efx_ef10_handle_rx_event()
2907 rx_queue->scatter_n, rx_queue->scatter_len, in efx_ef10_handle_rx_event()
[all …]
A Dnic.h321 int efx_farch_rx_probe(struct efx_rx_queue *rx_queue);
322 void efx_farch_rx_init(struct efx_rx_queue *rx_queue);
323 void efx_farch_rx_fini(struct efx_rx_queue *rx_queue);
324 void efx_farch_rx_remove(struct efx_rx_queue *rx_queue);
325 void efx_farch_rx_write(struct efx_rx_queue *rx_queue);
326 void efx_farch_rx_defer_refill(struct efx_rx_queue *rx_queue);
/linux/drivers/net/ethernet/freescale/
A Dgianfar.c117 if (bdp == rx_queue->rx_bd_base + rx_queue->rx_ring_size - 1) in gfar_init_rxbdp()
552 if (!grp->rx_queue) in gfar_parse_group()
553 grp->rx_queue = priv->rx_queue[i]; in gfar_parse_group()
1142 rx_queue = priv->rx_queue[i]; in free_skb_resources()
1317 rx_queue = priv->rx_queue[i]; in gfar_init_bds()
1326 gfar_alloc_rx_buffs(rx_queue, gfar_rxbd_unused(rx_queue)); in gfar_init_bds()
1373 rx_queue = priv->rx_queue[i]; in gfar_alloc_skb_resources()
1397 rx_queue = priv->rx_queue[i]; in gfar_alloc_skb_resources()
1398 rx_queue->rx_buff = kcalloc(rx_queue->rx_ring_size, in gfar_alloc_skb_resources()
1549 rx_queue = priv->rx_queue[i]; in gfar_update_link_state()
[all …]
/linux/drivers/net/ethernet/ibm/
A Dibmveth.c105 return be32_to_cpu(adapter->rx_queue.queue_addr[adapter->rx_queue.index].flags_off); in ibmveth_rxq_flags()
136 return be32_to_cpu(adapter->rx_queue.queue_addr[adapter->rx_queue.index].length); in ibmveth_rxq_frame_length()
397 u64 correlator = adapter->rx_queue.queue_addr[adapter->rx_queue.index].correlator; in ibmveth_rxq_get_buffer()
440 if (++adapter->rx_queue.index == adapter->rx_queue.num_slots) { in ibmveth_rxq_recycle_buffer()
441 adapter->rx_queue.index = 0; in ibmveth_rxq_recycle_buffer()
442 adapter->rx_queue.toggle = !adapter->rx_queue.toggle; in ibmveth_rxq_recycle_buffer()
453 if (++adapter->rx_queue.index == adapter->rx_queue.num_slots) { in ibmveth_rxq_harvest_buffer()
454 adapter->rx_queue.index = 0; in ibmveth_rxq_harvest_buffer()
455 adapter->rx_queue.toggle = !adapter->rx_queue.toggle; in ibmveth_rxq_harvest_buffer()
541 adapter->rx_queue.index = 0; in ibmveth_open()
[all …]
/linux/tools/testing/selftests/bpf/progs/
A Dbpf_iter_tcp4.c85 int rx_queue; in dump_tcp_sock() local
116 rx_queue = sp->sk_ack_backlog; in dump_tcp_sock()
118 rx_queue = tp->rcv_nxt - tp->copied_seq; in dump_tcp_sock()
119 if (rx_queue < 0) in dump_tcp_sock()
120 rx_queue = 0; in dump_tcp_sock()
127 tp->write_seq - tp->snd_una, rx_queue, in dump_tcp_sock()
A Dbpf_iter_tcp6.c85 int rx_queue; in dump_tcp6_sock() local
116 rx_queue = sp->sk_ack_backlog; in dump_tcp6_sock()
118 rx_queue = tp->tcp.rcv_nxt - tp->tcp.copied_seq; in dump_tcp6_sock()
119 if (rx_queue < 0) in dump_tcp6_sock()
120 rx_queue = 0; in dump_tcp6_sock()
131 tp->tcp.write_seq - tp->tcp.snd_una, rx_queue, in dump_tcp6_sock()
/linux/drivers/net/xen-netback/
A Drx.c89 spin_lock_irqsave(&queue->rx_queue.lock, flags); in xenvif_rx_queue_tail()
98 if (skb_queue_empty(&queue->rx_queue)) in xenvif_rx_queue_tail()
101 __skb_queue_tail(&queue->rx_queue, skb); in xenvif_rx_queue_tail()
106 spin_unlock_irqrestore(&queue->rx_queue.lock, flags); in xenvif_rx_queue_tail()
113 spin_lock_irq(&queue->rx_queue.lock); in xenvif_rx_dequeue()
115 skb = __skb_dequeue(&queue->rx_queue); in xenvif_rx_dequeue()
117 xenvif_update_needed_slots(queue, skb_peek(&queue->rx_queue)); in xenvif_rx_dequeue()
128 spin_unlock_irq(&queue->rx_queue.lock); in xenvif_rx_dequeue()
146 skb = skb_peek(&queue->rx_queue); in xenvif_rx_queue_drop_expired()
549 skb = skb_peek(&queue->rx_queue); in xenvif_rx_queue_timeout()
[all …]
/linux/drivers/net/wan/
A Dhdlc_x25.c28 struct sk_buff_head rx_queue; member
42 struct sk_buff *skb = skb_dequeue(&x25st->rx_queue); in x25_rx_queue_kick()
46 skb = skb_dequeue(&x25st->rx_queue); in x25_rx_queue_kick()
67 skb_queue_tail(&x25st->rx_queue, skb); in x25_connect_disconnect()
98 skb_queue_tail(&x25st->rx_queue, skb); in x25_data_indication()
348 skb_queue_head_init(&state(hdlc)->rx_queue); in x25_ioctl()
A Dlapbether.c57 struct sk_buff_head rx_queue; member
96 skb = skb_dequeue(&lapbeth->rx_queue); in lapbeth_napi_poll()
177 skb_queue_tail(&lapbeth->rx_queue, skb); in lapbeth_data_indication()
276 skb_queue_tail(&lapbeth->rx_queue, skb); in lapbeth_connected()
294 skb_queue_tail(&lapbeth->rx_queue, skb); in lapbeth_disconnected()
410 skb_queue_head_init(&lapbeth->rx_queue); in lapbeth_new_device()

Completed in 81 milliseconds

12345