/linux/tools/virtio/ringtest/ |
A D | virtio_ring_0_9.c | 75 memset(p, 0, vring_size(ring_size, 0x1000)); in alloc_ring() 76 vring_init(&ring, ring_size, p, 0x1000); in alloc_ring() 85 for (i = 0; i < ring_size - 1; i++) in alloc_ring() 89 guest.num_free = ring_size; in alloc_ring() 90 data = malloc(ring_size * sizeof *data); in alloc_ring() 95 memset(data, 0, ring_size * sizeof *data); in alloc_ring() 136 ring.avail->ring[avail & (ring_size - 1)] = in add_inbuf() 165 index &= ring_size - 1; in get_buf() 283 used_idx &= ring_size - 1; in use_buf() 284 desc = &ring.desc[head & (ring_size - 1)]; in use_buf() [all …]
|
A D | ring.c | 82 ret = posix_memalign((void **)&ring, 0x1000, ring_size * sizeof *ring); in alloc_ring() 97 for (i = 0; i < ring_size; ++i) { in alloc_ring() 103 guest.num_free = ring_size; in alloc_ring() 104 data = calloc(ring_size, sizeof(*data)); in alloc_ring() 120 head = (ring_size - 1) & (guest.avail_idx++); in add_inbuf() 145 unsigned head = (ring_size - 1) & guest.last_used_idx; in get_buf() 154 index = ring[head].index & (ring_size - 1); in get_buf() 166 unsigned head = (ring_size - 1) & guest.last_used_idx; in used_empty() 221 unsigned head = (ring_size - 1) & host.used_idx; in avail_empty() 228 unsigned head = (ring_size - 1) & host.used_idx; in use_buf()
|
A D | main.c | 29 unsigned ring_size = 256; variable 288 ring_size, in help() 322 ring_size = strtol(optarg, &endptr, 0); in main() 323 assert(ring_size && !(ring_size & (ring_size - 1))); in main()
|
/linux/include/xen/interface/io/ |
A D | ring.h | 348 static inline RING_IDX name##_mask(RING_IDX idx, RING_IDX ring_size) \ 350 return idx & (ring_size - 1); \ 355 RING_IDX ring_size) \ 357 return buf + name##_mask(idx, ring_size); \ 365 RING_IDX ring_size) \ 368 size <= ring_size - *masked_cons) { \ 371 memcpy(opaque, buf + *masked_cons, ring_size - *masked_cons); \ 372 memcpy((unsigned char *)opaque + ring_size - *masked_cons, buf, \ 373 size - (ring_size - *masked_cons)); \ 375 *masked_cons = name##_mask(*masked_cons + size, ring_size); \ [all …]
|
/linux/drivers/gpu/drm/amd/amdgpu/ |
A D | amdgpu_ih.c | 42 unsigned ring_size, bool use_bus_addr) in amdgpu_ih_ring_init() argument 48 rb_bufsz = order_base_2(ring_size / 4); in amdgpu_ih_ring_init() 49 ring_size = (1 << rb_bufsz) * 4; in amdgpu_ih_ring_init() 50 ih->ring_size = ring_size; in amdgpu_ih_ring_init() 51 ih->ptr_mask = ih->ring_size - 1; in amdgpu_ih_ring_init() 64 ih->ring = dma_alloc_coherent(adev->dev, ih->ring_size + 8, in amdgpu_ih_ring_init() 70 ih->wptr_addr = dma_addr + ih->ring_size; in amdgpu_ih_ring_init() 71 ih->wptr_cpu = &ih->ring[ih->ring_size / 4]; in amdgpu_ih_ring_init() 72 ih->rptr_addr = dma_addr + ih->ring_size + 4; in amdgpu_ih_ring_init() 73 ih->rptr_cpu = &ih->ring[(ih->ring_size / 4) + 1]; in amdgpu_ih_ring_init() [all …]
|
A D | navi10_ih.c | 53 if (adev->irq.ih.ring_size) { in navi10_ih_init_register_offset() 66 if (adev->irq.ih1.ring_size) { in navi10_ih_init_register_offset() 77 if (adev->irq.ih2.ring_size) { in navi10_ih_init_register_offset() 203 if (ih[i]->ring_size) { in navi10_ih_toggle_interrupts() 215 int rb_bufsz = order_base_2(ih->ring_size / 4); in navi10_ih_rb_cntl() 353 if (ih[i]->ring_size) { in navi10_ih_irq_init() 373 if (adev->irq.ih_soft.ring_size) in navi10_ih_irq_init() 466 if ((v < ih->ring_size) && (v != ih->rptr)) in navi10_ih_irq_rearm() 570 adev->irq.ih1.ring_size = 0; in navi10_ih_sw_init() 571 adev->irq.ih2.ring_size = 0; in navi10_ih_sw_init()
|
A D | psp_v11_0_8.c | 43 ring->ring_size = 0x1000; in psp_v11_0_8_ring_init() 44 ret = amdgpu_bo_create_kernel(adev, ring->ring_size, PAGE_SIZE, in psp_v11_0_8_ring_init() 50 ring->ring_size = 0; in psp_v11_0_8_ring_init() 135 psp_ring_reg = ring->ring_size; in psp_v11_0_8_ring_create()
|
A D | vega10_ih.c | 51 if (adev->irq.ih.ring_size) { in vega10_ih_init_register_offset() 64 if (adev->irq.ih1.ring_size) { in vega10_ih_init_register_offset() 75 if (adev->irq.ih2.ring_size) { in vega10_ih_init_register_offset() 148 if (ih[i]->ring_size) { in vega10_ih_toggle_interrupts() 160 int rb_bufsz = order_base_2(ih->ring_size / 4); in vega10_ih_rb_cntl() 285 if (ih[i]->ring_size) { in vega10_ih_irq_init() 299 if (adev->irq.ih_soft.ring_size) in vega10_ih_irq_init() 392 if ((v < ih->ring_size) && (v != ih->rptr)) in vega10_ih_irq_rearm()
|
A D | vega20_ih.c | 54 if (adev->irq.ih.ring_size) { in vega20_ih_init_register_offset() 67 if (adev->irq.ih1.ring_size) { in vega20_ih_init_register_offset() 78 if (adev->irq.ih2.ring_size) { in vega20_ih_init_register_offset() 152 if (ih[i]->ring_size) { in vega20_ih_toggle_interrupts() 164 int rb_bufsz = order_base_2(ih->ring_size / 4); in vega20_ih_rb_cntl() 334 if (ih[i]->ring_size) { in vega20_ih_irq_init() 350 if (adev->irq.ih_soft.ring_size) in vega20_ih_irq_init() 444 if ((v < ih->ring_size) && (v != ih->rptr)) in vega20_ih_irq_rearm()
|
A D | amdgpu_ring.c | 244 ring->ring_size = roundup_pow_of_two(max_dw * 4 * sched_hw_submission); in amdgpu_ring_init() 246 ring->buf_mask = (ring->ring_size / 4) - 1; in amdgpu_ring_init() 251 r = amdgpu_bo_create_kernel(adev, ring->ring_size + ring->funcs->extra_dw, PAGE_SIZE, in amdgpu_ring_init() 394 if (*pos >= (ring->ring_size + 12)) in amdgpu_debugfs_ring_read() 429 ring->ring_size + 12); in amdgpu_debugfs_ring_init()
|
A D | psp_v10_0.c | 141 ring->ring_size = 0x1000; in psp_v10_0_ring_init() 142 ret = amdgpu_bo_create_kernel(adev, ring->ring_size, PAGE_SIZE, in psp_v10_0_ring_init() 148 ring->ring_size = 0; in psp_v10_0_ring_init() 170 psp_ring_reg = ring->ring_size; in psp_v10_0_ring_create()
|
A D | psp_v3_1.c | 174 ring->ring_size = 0x1000; in psp_v3_1_ring_init() 175 ret = amdgpu_bo_create_kernel(adev, ring->ring_size, PAGE_SIZE, in psp_v3_1_ring_init() 181 ring->ring_size = 0; in psp_v3_1_ring_init() 265 psp_ring_reg = ring->ring_size; in psp_v3_1_ring_create()
|
A D | psp_v13_0.c | 231 ring->ring_size = 0x1000; in psp_v13_0_ring_init() 232 ret = amdgpu_bo_create_kernel(adev, ring->ring_size, PAGE_SIZE, in psp_v13_0_ring_init() 238 ring->ring_size = 0; in psp_v13_0_ring_init() 323 psp_ring_reg = ring->ring_size; in psp_v13_0_ring_create()
|
A D | amdgpu_ih.h | 49 unsigned ring_size; member 88 unsigned ring_size, bool use_bus_addr);
|
/linux/drivers/crypto/ccp/ |
A D | tee-dev.c | 24 static int tee_alloc_ring(struct psp_tee_device *tee, int ring_size) in tee_alloc_ring() argument 29 if (!ring_size) in tee_alloc_ring() 35 start_addr = (void *)__get_free_pages(GFP_KERNEL, get_order(ring_size)); in tee_alloc_ring() 39 memset(start_addr, 0x0, ring_size); in tee_alloc_ring() 41 rb_mgr->ring_size = ring_size; in tee_alloc_ring() 56 get_order(rb_mgr->ring_size)); in tee_free_ring() 59 rb_mgr->ring_size = 0; in tee_free_ring() 95 cmd->size = tee->rb_mgr.ring_size; in tee_alloc_cmd_buffer() 110 int ring_size = MAX_RING_BUFFER_ENTRIES * sizeof(struct tee_ring_cmd); in tee_init_ring() local 118 ret = tee_alloc_ring(tee, ring_size); in tee_init_ring() [all …]
|
/linux/drivers/gpu/drm/radeon/ |
A D | radeon_ring.c | 86 ring->ring_free_dw = rptr + (ring->ring_size / 4); in radeon_ring_free_size() 91 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_free_size() 112 if (ndw > (ring->ring_size / 4)) in radeon_ring_alloc() 314 size = ring->wptr + (ring->ring_size / 4); in radeon_ring_backup() 381 int radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size, in radeon_ring_init() argument 386 ring->ring_size = ring_size; in radeon_ring_init() 392 r = radeon_bo_create(rdev, ring->ring_size, PAGE_SIZE, true, in radeon_ring_init() 417 ring->ptr_mask = (ring->ring_size / 4) - 1; in radeon_ring_init() 418 ring->ring_free_dw = ring->ring_size / 4; in radeon_ring_init() 474 count = (ring->ring_size / 4) - ring->ring_free_dw; in radeon_debugfs_ring_info_show()
|
/linux/drivers/xen/ |
A D | evtchn.c | 67 unsigned int ring_size; member 98 return idx & (u->ring_size - 1); in evtchn_ring_offset() 178 if ((prod - cons) < u->ring_size) { in evtchn_interrupt() 235 if (((c ^ p) & u->ring_size) != 0) { in evtchn_read() 236 bytes1 = (u->ring_size - evtchn_ring_offset(u, c)) * in evtchn_read() 322 if (u->nr_evtchns <= u->ring_size) in evtchn_resize_ring() 325 if (u->ring_size == 0) in evtchn_resize_ring() 328 new_size = 2 * u->ring_size; in evtchn_resize_ring() 355 memcpy(new_ring + u->ring_size, old_ring, in evtchn_resize_ring() 356 u->ring_size * sizeof(*u->ring)); in evtchn_resize_ring() [all …]
|
/linux/drivers/net/ethernet/amazon/ena/ |
A D | ena_netdev.h | 84 #define ENA_TX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument 86 #define ENA_RX_RING_IDX_NEXT(idx, ring_size) (((idx) + 1) & ((ring_size) - 1)) argument 87 #define ENA_RX_RING_IDX_ADD(idx, n, ring_size) \ argument 88 (((idx) + (n)) & ((ring_size) - 1)) 279 int ring_size; member
|
/linux/drivers/crypto/qat/qat_common/ |
A D | adf_transport.c | 85 ADF_MAX_INFLIGHTS(ring->ring_size, ring->msg_size)) { in adf_send_message() 95 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_send_message() 116 ADF_RING_SIZE_MODULO(ring->ring_size)); in adf_handle_response() 131 u32 ring_config = BUILD_RING_CONFIG(ring->ring_size); in adf_configure_tx_ring() 143 BUILD_RESP_RING_CONFIG(ring->ring_size, in adf_configure_rx_ring() 160 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_init_ring() 186 ring->ring_size); in adf_init_ring() 198 ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size); in adf_cleanup_ring() 263 ring->ring_size = adf_verify_ring_size(msg_size, num_msgs); in adf_create_ring()
|
/linux/drivers/net/ethernet/atheros/ |
A D | ag71xx.c | 794 int ring_mask, ring_size; in ag71xx_tx_packets() local 798 ring_size = BIT(ring->order); in ag71xx_tx_packets() 1225 int ring_size = BIT(ring->order); in ag71xx_ring_tx_init() local 1226 int ring_mask = ring_size - 1; in ag71xx_ring_tx_init() 1229 for (i = 0; i < ring_size; i++) { in ag71xx_ring_tx_init() 1250 int ring_size = BIT(ring->order); in ag71xx_ring_rx_clean() local 1256 for (i = 0; i < ring_size; i++) in ag71xx_ring_rx_clean() 1370 int ring_size, tx_size; in ag71xx_rings_init() local 1400 int ring_size; in ag71xx_rings_free() local 1576 ring_size = BIT(ring->order); in ag71xx_hard_start_xmit() [all …]
|
/linux/arch/powerpc/platforms/pasemi/ |
A D | dma_lib.c | 240 int pasemi_dma_alloc_ring(struct pasemi_dmachan *chan, int ring_size) in pasemi_dma_alloc_ring() argument 244 chan->ring_size = ring_size; in pasemi_dma_alloc_ring() 247 ring_size * sizeof(u64), in pasemi_dma_alloc_ring() 266 dma_free_coherent(&dma_pdev->dev, chan->ring_size * sizeof(u64), in pasemi_dma_free_ring() 269 chan->ring_size = 0; in pasemi_dma_free_ring()
|
/linux/drivers/net/ethernet/pensando/ionic/ |
A D | ionic_dev.c | 515 .q_init.ring_size = ilog2(q->num_descs), in ionic_dev_cmd_adminq_init() 532 unsigned int ring_size; in ionic_cq_init() local 537 ring_size = ilog2(num_descs); in ionic_cq_init() 538 if (ring_size < 2 || ring_size > 16) in ionic_cq_init() 600 unsigned int ring_size; in ionic_q_init() local 605 ring_size = ilog2(num_descs); in ionic_q_init() 606 if (ring_size < 2 || ring_size > 16) in ionic_q_init()
|
/linux/tools/testing/selftests/bpf/progs/ |
A D | test_ringbuf.c | 31 long ring_size = 0; variable 72 ring_size = bpf_ringbuf_query(&ringbuf, BPF_RB_RING_SIZE); in test_ringbuf()
|
/linux/drivers/gpu/drm/r128/ |
A D | r128_ioc32.c | 45 int ring_size; member 77 init.ring_size = init32.ring_size; in compat_r128_init()
|
/linux/drivers/net/ethernet/ti/ |
A D | cpmac.c | 191 int ring_size; member 684 for (i = 0; i < priv->ring_size; i++) { in cpmac_clear_rx() 829 ring->rx_pending = priv->ring_size; in cpmac_get_ringparam() 842 priv->ring_size = ring->rx_pending; in cpmac_set_ringparam() 925 size = priv->ring_size + CPMAC_QUEUES; in cpmac_open() 939 for (i = 0, desc = priv->rx_head; i < priv->ring_size; i++, desc++) { in cpmac_open() 952 desc->next = &priv->rx_head[(i + 1) % priv->ring_size]; in cpmac_open() 978 for (i = 0; i < priv->ring_size; i++) { in cpmac_open() 1024 for (i = 0; i < priv->ring_size; i++) { in cpmac_stop() 1035 (CPMAC_QUEUES + priv->ring_size), in cpmac_stop() [all …]
|