/linux/tools/testing/selftests/bpf/ |
A D | test_lru_map.c | 260 unsigned int batch_size; in test_lru_sanity1() local 273 batch_size = tgt_free / 2; in test_lru_sanity1() 276 map_size = tgt_free + batch_size; in test_lru_sanity1() 292 end_key = 1 + batch_size; in test_lru_sanity1() 337 unsigned int batch_size; in test_lru_sanity2() local 350 batch_size = tgt_free / 2; in test_lru_sanity2() 392 end_key = 1 + batch_size; in test_lru_sanity2() 444 unsigned int batch_size; in test_lru_sanity3() local 457 batch_size = tgt_free / 2; in test_lru_sanity3() 476 end_key = tgt_free + batch_size; in test_lru_sanity3() [all …]
|
A D | xdpxceiver.c | 717 static void complete_pkts(struct xsk_socket_info *xsk, int batch_size) in complete_pkts() argument 725 rcvd = xsk_ring_cons__peek(&xsk->umem->cq, batch_size, &idx); in complete_pkts()
|
/linux/samples/bpf/ |
A D | xdpsock_user.c | 1177 int batch_size) in complete_tx_only() argument 1273 batch_size) { in tx_only() 1274 complete_tx_only(xsk, batch_size); in tx_only() 1279 for (i = 0; i < batch_size; i++) { in tx_only() 1286 xsk_ring_prod__submit(&xsk->tx, batch_size); in tx_only() 1287 xsk->ring_stats.tx_npkts += batch_size; in tx_only() 1288 xsk->outstanding_tx += batch_size; in tx_only() 1289 *frame_nb += batch_size; in tx_only() 1291 complete_tx_only(xsk, batch_size); in tx_only() 1334 int batch_size = get_batch_size(pkt_cnt); in tx_only_all() local [all …]
|
/linux/drivers/gpu/drm/i915/gt/ |
A D | intel_renderstate.c | 88 so->batch_size = rodata->batch_items * sizeof(u32); in render_state_setup() 223 so->batch_offset, so->batch_size, in intel_renderstate_emit()
|
A D | intel_renderstate.h | 40 u32 batch_size; member
|
/linux/drivers/net/ethernet/sfc/falcon/ |
A D | rx.c | 334 unsigned int fill_level, batch_size; in ef4_fast_push_rx_descriptors() local 352 batch_size = efx->rx_pages_per_batch * efx->rx_bufs_per_page; in ef4_fast_push_rx_descriptors() 354 EF4_BUG_ON_PARANOID(space < batch_size); in ef4_fast_push_rx_descriptors() 371 } while ((space -= batch_size) >= batch_size); in ef4_fast_push_rx_descriptors()
|
/linux/drivers/net/ethernet/sfc/ |
A D | rx_common.c | 469 unsigned int fill_level, batch_size; in efx_fast_push_rx_descriptors() local 487 batch_size = efx->rx_pages_per_batch * efx->rx_bufs_per_page; in efx_fast_push_rx_descriptors() 489 EFX_WARN_ON_ONCE_PARANOID(space < batch_size); in efx_fast_push_rx_descriptors() 504 } while ((space -= batch_size) >= batch_size); in efx_fast_push_rx_descriptors()
|
/linux/drivers/dma/idxd/ |
A D | sysfs.c | 876 u64 batch_size; in wq_max_batch_size_store() local 882 rc = __get_sysfs_u64(buf, &batch_size); in wq_max_batch_size_store() 886 if (batch_size > idxd->max_batch_size) in wq_max_batch_size_store() 889 wq->max_batch_size = (u32)batch_size; in wq_max_batch_size_store()
|
/linux/drivers/md/ |
A D | raid5.c | 6378 int i, batch_size = 0, hash; in handle_active_stripes() local 6383 batch[batch_size++] = sh; in handle_active_stripes() 6385 if (batch_size == 0) { in handle_active_stripes() 6393 return batch_size; in handle_active_stripes() 6419 return batch_size; in handle_active_stripes() 6438 int batch_size, released; in raid5_do_work() local 6445 if (!batch_size && !released) in raid5_do_work() 6447 handled += batch_size; in raid5_do_work() 6489 int batch_size, released; in raid5d() local 6520 if (!batch_size && !released) in raid5d() [all …]
|
/linux/fs/btrfs/ |
A D | tree-log.c | 3746 int batch_size = 0; in process_dir_items_leaf() local 3834 if (batch_size > 0) { in process_dir_items_leaf() 3836 batch_start, batch_size); in process_dir_items_leaf() 3839 batch_size = 0; in process_dir_items_leaf() 3843 if (batch_size == 0) in process_dir_items_leaf() 3845 batch_size++; in process_dir_items_leaf() 3848 if (batch_size > 0) { in process_dir_items_leaf() 3852 batch_start, batch_size); in process_dir_items_leaf()
|
/linux/drivers/net/wireless/mediatek/mt76/ |
A D | mt76_connac_mcu.c | 1989 int i, n_chan, batch_size, idx = 0, tx_power, last_ch; in mt76_connac_mcu_rate_txpower_band() local 2009 batch_size = DIV_ROUND_UP(n_chan, batch_len); in mt76_connac_mcu_rate_txpower_band() 2018 for (i = 0; i < batch_size; i++) { in mt76_connac_mcu_rate_txpower_band() 2023 num_ch = i == batch_size - 1 ? n_chan % batch_len : batch_len; in mt76_connac_mcu_rate_txpower_band()
|
/linux/drivers/media/pci/tw5864/ |
A D | tw5864-video.c | 1323 int batch_size = H264_BUF_CNT; in tw5864_handle_frame_task() local 1326 while (dev->h264_buf_r_index != dev->h264_buf_w_index && batch_size--) { in tw5864_handle_frame_task()
|
/linux/mm/ |
A D | vmscan.c | 716 long batch_size = shrinker->batch ? shrinker->batch in do_shrink_slab() local 766 while (total_scan >= batch_size || in do_shrink_slab() 769 unsigned long nr_to_scan = min(batch_size, total_scan); in do_shrink_slab()
|
A D | memory.c | 1818 const int batch_size = min_t(int, pages_to_write_in_pmd, 8); in insert_pages() local 1821 for (pte = start_pte; pte_idx < batch_size; ++pte, ++pte_idx) { in insert_pages() 1834 pages_to_write_in_pmd -= batch_size; in insert_pages() 1835 remaining_pages_total -= batch_size; in insert_pages()
|
/linux/tools/perf/scripts/python/ |
A D | exported-sql-viewer.py | 2620 def FetchBatch(self, batch_size): argument 2622 while batch_size > fetched: 2641 batch_size = min(glb_chunk_sz, target - self.fetched) 2642 self.FetchBatch(batch_size)
|