Searched refs:rbi (Results 1 – 6 of 6) sorted by relevance
/linux/drivers/hv/ |
A D | ring_buffer.c | 57 if (READ_ONCE(rbi->ring_buffer->interrupt_mask)) in hv_signal_on_write() 141 dsize = rbi->ring_datasize; in hv_get_ringbuffer_availbytes() 441 u32 priv_read_loc = rbi->priv_read_index; in hv_pkt_iter_avail() 462 return (struct vmpacket_descriptor *)(hv_get_ring_buffer(rbi) + rbi->priv_read_index); in hv_pkt_iter_first_raw() 481 bytes_avail = min(rbi->pkt_buffer_size, hv_pkt_iter_avail(rbi)); in hv_pkt_iter_first() 533 u32 dsize = rbi->ring_datasize; in __hv_pkt_iter_next() 538 if (rbi->priv_read_index >= dsize) in __hv_pkt_iter_next() 539 rbi->priv_read_index -= dsize; in __hv_pkt_iter_next() 554 rbi->priv_read_index; in hv_pkt_iter_bytes_read() 589 rbi->ring_buffer->read_index = rbi->priv_read_index; in hv_pkt_iter_close() [all …]
|
A D | vmbus_drv.c | 1723 mutex_lock(&rbi->ring_buffer_mutex); in out_mask_show() 1724 if (!rbi->ring_buffer) { in out_mask_show() 1730 mutex_unlock(&rbi->ring_buffer_mutex); in out_mask_show() 1740 mutex_lock(&rbi->ring_buffer_mutex); in in_mask_show() 1741 if (!rbi->ring_buffer) { in in_mask_show() 1747 mutex_unlock(&rbi->ring_buffer_mutex); in in_mask_show() 1757 mutex_lock(&rbi->ring_buffer_mutex); in read_avail_show() 1758 if (!rbi->ring_buffer) { in read_avail_show() 1764 mutex_unlock(&rbi->ring_buffer_mutex); in read_avail_show() 1774 mutex_lock(&rbi->ring_buffer_mutex); in write_avail_show() [all …]
|
/linux/include/linux/ |
A D | hyperv.h | 195 dsize = rbi->ring_datasize; in hv_get_bytes_to_read() 196 read_loc = rbi->ring_buffer->read_index; in hv_get_bytes_to_read() 209 dsize = rbi->ring_datasize; in hv_get_bytes_to_write() 210 read_loc = READ_ONCE(rbi->ring_buffer->read_index); in hv_get_bytes_to_write() 211 write_loc = rbi->ring_buffer->write_index; in hv_get_bytes_to_write() 219 const struct hv_ring_buffer_info *rbi) in hv_get_avail_to_write_percent() argument 221 u32 avail_write = hv_get_bytes_to_write(rbi); in hv_get_avail_to_write_percent() 225 rbi->ring_size_div10_reciprocal); in hv_get_avail_to_write_percent() 1618 rbi->ring_buffer->interrupt_mask = 1; in hv_begin_read() 1630 rbi->ring_buffer->interrupt_mask = 0; in hv_end_read() [all …]
|
/linux/drivers/net/vmxnet3/ |
A D | vmxnet3_drv.c | 578 rbi->len, in vmxnet3_rq_alloc_rx_buf() 587 rbi->skb->data, rbi->len, in vmxnet3_rq_alloc_rx_buf() 615 put_page(rbi->page); in vmxnet3_rq_alloc_rx_buf() 1419 ctx->skb = rbi->skb; in vmxnet3_rq_rx_complete() 1447 ctx->skb = rbi->skb; in vmxnet3_rq_rx_complete() 1468 rbi->dma_addr, in vmxnet3_rq_rx_complete() 1469 rbi->len, in vmxnet3_rq_rx_complete() 1473 rbi->skb = new_skb; in vmxnet3_rq_rx_complete() 1476 rxd->len = rbi->len; in vmxnet3_rq_rx_complete() 1561 rbi->dma_addr, rbi->len, in vmxnet3_rq_rx_complete() [all …]
|
/linux/drivers/md/ |
A D | raid5.c | 1075 struct bio *bi, *rbi; in ops_run_io() local 1250 rbi->bi_opf, i); in ops_run_io() 1263 rbi->bi_vcnt = 1; in ops_run_io() 1274 rbi->bi_vcnt = 0; in ops_run_io() 1392 rbi = dev->read; in ops_complete_biofill() 1394 while (rbi && rbi->bi_iter.bi_sector < in ops_complete_biofill() 1397 bio_endio(rbi); in ops_complete_biofill() 1398 rbi = rbi2; in ops_complete_biofill() 1422 struct bio *rbi; in ops_run_biofill() local 1427 while (rbi && rbi->bi_iter.bi_sector < in ops_run_biofill() [all …]
|
/linux/drivers/net/hyperv/ |
A D | netvsc.c | 1578 struct hv_ring_buffer_info *rbi = &channel->inbound; in netvsc_channel_cb() local 1581 prefetch(hv_get_ring_buffer(rbi) + rbi->priv_read_index); in netvsc_channel_cb() 1585 hv_begin_read(rbi); in netvsc_channel_cb()
|
Completed in 39 milliseconds