Home
last modified time | relevance | path

Searched refs:chunk_size (Results 1 – 25 of 130) sorted by relevance

123456

/linux/drivers/md/
A Ddm-exception-store.c145 unsigned chunk_size; in set_chunk_size() local
147 if (kstrtouint(chunk_size_arg, 10, &chunk_size)) { in set_chunk_size()
152 if (!chunk_size) { in set_chunk_size()
161 unsigned chunk_size, in dm_exception_store_set_chunk_size() argument
165 if (!is_power_of_2(chunk_size)) { in dm_exception_store_set_chunk_size()
171 if (chunk_size % in dm_exception_store_set_chunk_size()
173 chunk_size % in dm_exception_store_set_chunk_size()
179 if (chunk_size > INT_MAX >> SECTOR_SHIFT) { in dm_exception_store_set_chunk_size()
184 store->chunk_size = chunk_size; in dm_exception_store_set_chunk_size()
185 store->chunk_mask = chunk_size - 1; in dm_exception_store_set_chunk_size()
[all …]
A Ddm-unstripe.c21 uint32_t chunk_size; member
61 if (kstrtouint(argv[1], 10, &uc->chunk_size) || !uc->chunk_size) { in unstripe_ctr()
87 uc->unstripe_offset = uc->unstripe * uc->chunk_size; in unstripe_ctr()
88 uc->unstripe_width = (uc->stripes - 1) * uc->chunk_size; in unstripe_ctr()
89 uc->chunk_shift = is_power_of_2(uc->chunk_size) ? fls(uc->chunk_size) - 1 : 0; in unstripe_ctr()
92 if (sector_div(tmp_len, uc->chunk_size)) { in unstripe_ctr()
97 if (dm_set_target_max_io_len(ti, uc->chunk_size)) { in unstripe_ctr()
126 sector_div(tmp_sector, uc->chunk_size); in map_to_core()
156 uc->stripes, (unsigned long long)uc->chunk_size, uc->unstripe, in unstripe_status()
179 limits->chunk_sectors = uc->chunk_size; in unstripe_io_hints()
A Ddm-snap-persistent.c81 __le32 chunk_size; member
234 .sector = ps->store->chunk_size * chunk, in chunk_io()
235 .count = ps->store->chunk_size, in chunk_io()
307 unsigned chunk_size; in read_header() local
315 if (!ps->store->chunk_size) { in read_header()
352 chunk_size = le32_to_cpu(dh->chunk_size); in read_header()
354 if (ps->store->chunk_size == chunk_size) in read_header()
360 chunk_size, ps->store->chunk_size); in read_header()
369 chunk_size, chunk_err); in read_header()
391 dh->chunk_size = cpu_to_le32(ps->store->chunk_size); in write_header()
[all …]
A Ddm-stripe.c35 uint32_t chunk_size; member
90 uint32_t chunk_size; in stripe_ctr() local
104 if (kstrtouint(argv[1], 10, &chunk_size) || !chunk_size) { in stripe_ctr()
117 if (sector_div(tmp_len, chunk_size)) { in stripe_ctr()
163 sc->chunk_size = chunk_size; in stripe_ctr()
164 if (chunk_size & (chunk_size - 1)) in stripe_ctr()
167 sc->chunk_size_shift = __ffs(chunk_size); in stripe_ctr()
224 chunk *= sc->chunk_size; in stripe_map_sector()
426 (unsigned long long)sc->chunk_size); in stripe_status()
505 unsigned chunk_size = sc->chunk_size << SECTOR_SHIFT; in stripe_io_hints() local
[all …]
A Ddm-snap-transient.c45 if (size < (tc->next_free + store->chunk_size)) in transient_prepare_exception()
49 tc->next_free += store->chunk_size; in transient_prepare_exception()
97 DMEMIT(" N %llu", (unsigned long long)store->chunk_size); in transient_status()
/linux/tools/testing/selftests/net/
A Dtcp_mmap.c85 static size_t chunk_size = 512*1024; variable
193 zc.length = chunk_size; in child_thread()
201 assert(zc.length <= chunk_size); in child_thread()
212 assert(zc.recv_skip_hint <= chunk_size); in child_thread()
223 while (sub < chunk_size) { in child_thread()
224 lu = read(fd, buffer + sub, chunk_size - sub); in child_thread()
263 munmap(raddr, chunk_size + map_align); in child_thread()
317 rcvlowat = chunk_size; in do_accept()
423 chunk_size = atol(optarg); in main()
506 if (wr > chunk_size) in main()
[all …]
/linux/drivers/gpu/drm/i915/
A Di915_buddy.c74 if (size < chunk_size) in i915_buddy_init()
77 if (chunk_size < PAGE_SIZE) in i915_buddy_init()
80 if (!is_power_of_2(chunk_size)) in i915_buddy_init()
83 size = round_down(size, chunk_size); in i915_buddy_init()
87 mm->chunk_size = chunk_size; in i915_buddy_init()
88 mm->max_order = ilog2(size) - ilog2(chunk_size); in i915_buddy_init()
122 order = ilog2(root_size) - ilog2(chunk_size); in i915_buddy_init()
180 offset + (mm->chunk_size << block_order)); in split_block()
333 if (size < mm->chunk_size) in i915_buddy_alloc_range()
336 if (!IS_ALIGNED(size | start, mm->chunk_size)) in i915_buddy_alloc_range()
[all …]
A Di915_ttm_buddy_manager.c61 GEM_BUG_ON(min_page_size < mm->chunk_size); in i915_ttm_buddy_man_alloc()
62 min_order = ilog2(min_page_size) - ilog2(mm->chunk_size); in i915_ttm_buddy_man_alloc()
65 min_order = ilog2(size) - ilog2(mm->chunk_size); in i915_ttm_buddy_man_alloc()
73 n_pages = size >> ilog2(mm->chunk_size); in i915_ttm_buddy_man_alloc()
183 u64 chunk_size) in i915_ttm_buddy_man_init() argument
193 err = i915_buddy_init(&bman->mm, size, chunk_size); in i915_ttm_buddy_man_init()
199 GEM_BUG_ON(default_page_size < chunk_size); in i915_ttm_buddy_man_init()
A Di915_buddy.h72 u64 chunk_size; member
117 return mm->chunk_size << i915_buddy_block_order(block); in i915_buddy_block_size()
120 int i915_buddy_init(struct i915_buddy_mm *mm, u64 size, u64 chunk_size);
/linux/net/xdp/
A Dxdp_umem.c155 u32 npgs_rem, chunk_size = mr->chunk_size, headroom = mr->headroom; in xdp_umem_reg() local
161 if (chunk_size < XDP_UMEM_MIN_CHUNK_SIZE || chunk_size > PAGE_SIZE) { in xdp_umem_reg()
174 if (!unaligned_chunks && !is_power_of_2(chunk_size)) in xdp_umem_reg()
193 chunks = (unsigned int)div_u64_rem(size, chunk_size, &chunks_rem); in xdp_umem_reg()
200 if (headroom >= chunk_size - XDP_PACKET_HEADROOM) in xdp_umem_reg()
205 umem->chunk_size = chunk_size; in xdp_umem_reg()
A Dxsk_buff_pool.c61 pool->chunk_mask = ~((u64)umem->chunk_size - 1); in xp_create_and_assign_umem()
66 pool->chunk_size = umem->chunk_size; in xp_create_and_assign_umem()
67 pool->chunk_shift = ffs(umem->chunk_size) - 1; in xp_create_and_assign_umem()
69 pool->frame_len = umem->chunk_size - umem->headroom - in xp_create_and_assign_umem()
85 xskb->xdp.frame_sz = umem->chunk_size - umem->headroom; in xp_create_and_assign_umem()
90 xp_init_xskb_addr(xskb, pool, i * pool->chunk_size); in xp_create_and_assign_umem()
435 return xp_desc_crosses_non_contig_pg(pool, addr, pool->chunk_size); in xp_addr_crosses_non_contig_pg()
442 *addr + pool->chunk_size > pool->addrs_cnt || in xp_check_unaligned()
/linux/drivers/net/ethernet/mellanox/mlx5/core/steering/
A Ddr_icm_pool.c242 enum mlx5dr_icm_chunk_size chunk_size, in dr_icm_chunk_create() argument
260 mlx5dr_icm_pool_chunk_size_to_entries(chunk_size); in dr_icm_chunk_create()
268 chunk_size); in dr_icm_chunk_create()
323 enum mlx5dr_icm_chunk_size chunk_size, in dr_icm_handle_buddies_get_mem() argument
335 chunk_size, seg); in dr_icm_handle_buddies_get_mem()
343 chunk_size); in dr_icm_handle_buddies_get_mem()
353 chunk_size); in dr_icm_handle_buddies_get_mem()
372 enum mlx5dr_icm_chunk_size chunk_size) in mlx5dr_icm_alloc_chunk() argument
379 if (chunk_size > pool->max_log_chunk_sz) in mlx5dr_icm_alloc_chunk()
388 chunk = dr_icm_chunk_create(pool, chunk_size, buddy, seg); in mlx5dr_icm_alloc_chunk()
[all …]
/linux/drivers/gpu/drm/i915/selftests/
A Di915_buddy.c58 if (block_size < mm->chunk_size) { in igt_check_block()
68 if (!IS_ALIGNED(block_size, mm->chunk_size)) { in igt_check_block()
73 if (!IS_ALIGNED(offset, mm->chunk_size)) { in igt_check_block()
294 *chunk_size = (u64)ms << 12; in igt_mm_config()
303 u64 chunk_size; in igt_buddy_alloc_smoke() local
308 igt_mm_config(&mm_size, &chunk_size); in igt_buddy_alloc_smoke()
312 err = i915_buddy_init(&mm, mm_size, chunk_size); in igt_buddy_alloc_smoke()
635 u64 chunk_size; in igt_buddy_alloc_range() local
641 igt_mm_config(&size, &chunk_size); in igt_buddy_alloc_range()
645 err = i915_buddy_init(&mm, size, chunk_size); in igt_buddy_alloc_range()
[all …]
/linux/arch/x86/platform/olpc/
A Dolpc_dt.c131 const size_t chunk_size = max(PAGE_SIZE, size); in prom_early_alloc() local
139 res = memblock_alloc(chunk_size, SMP_CACHE_BYTES); in prom_early_alloc()
142 chunk_size); in prom_early_alloc()
144 prom_early_allocated += chunk_size; in prom_early_alloc()
145 memset(res, 0, chunk_size); in prom_early_alloc()
146 free_mem = chunk_size; in prom_early_alloc()
/linux/drivers/net/ethernet/mellanox/mlxsw/
A Di2c.c320 int off = mlxsw_i2c->cmd.mb_off_in, chunk_size, i, j; in mlxsw_i2c_write() local
336 write_tran.len = MLXSW_I2C_ADDR_WIDTH + chunk_size; in mlxsw_i2c_write()
339 mlxsw_i2c->block_size * i, chunk_size); in mlxsw_i2c_write()
359 off += chunk_size; in mlxsw_i2c_write()
360 in_mbox_size -= chunk_size; in mlxsw_i2c_write()
399 int num, chunk_size, reg_size, i, j; in mlxsw_i2c_cmd() local
447 chunk_size = (reg_size > mlxsw_i2c->block_size) ? in mlxsw_i2c_cmd()
449 read_tran[1].len = chunk_size; in mlxsw_i2c_cmd()
471 off += chunk_size; in mlxsw_i2c_cmd()
472 reg_size -= chunk_size; in mlxsw_i2c_cmd()
[all …]
/linux/drivers/rtc/
A Drtc-isl12026.c328 size_t chunk_size, num_written; in isl12026_nvm_write() local
348 chunk_size = round_down(offset, ISL12026_PAGESIZE) + in isl12026_nvm_write()
350 chunk_size = min(bytes, chunk_size); in isl12026_nvm_write()
355 memcpy(payload + 2, v + num_written, chunk_size); in isl12026_nvm_write()
358 msgs[0].len = chunk_size + 2; in isl12026_nvm_write()
368 bytes -= chunk_size; in isl12026_nvm_write()
369 offset += chunk_size; in isl12026_nvm_write()
370 num_written += chunk_size; in isl12026_nvm_write()
/linux/drivers/rpmsg/
A Dqcom_glink_native.c819 __le32 chunk_size; in qcom_glink_rx_data() member
822 unsigned int chunk_size; in qcom_glink_rx_data() local
835 chunk_size = le32_to_cpu(hdr.chunk_size); in qcom_glink_rx_data()
1299 __le32 chunk_size; in __qcom_glink_send() member
1304 int chunk_size = len; in __qcom_glink_send() local
1340 chunk_size = SZ_8K; in __qcom_glink_send()
1346 req.chunk_size = cpu_to_le32(chunk_size); in __qcom_glink_send()
1359 chunk_size = left_size; in __qcom_glink_send()
1360 if (chunk_size > SZ_8K) in __qcom_glink_send()
1361 chunk_size = SZ_8K; in __qcom_glink_send()
[all …]
/linux/fs/nilfs2/
A Ddir.c120 unsigned int chunk_size = nilfs_chunk_size(dir); in nilfs_check_page() local
129 if (limit & (chunk_size - 1)) in nilfs_check_page()
144 if (((offs + rec_len - 1) ^ offs) & ~(chunk_size-1)) in nilfs_check_page()
443 unsigned int chunk_size = nilfs_chunk_size(dir); in nilfs_add_link() local
475 rec_len = chunk_size; in nilfs_add_link()
476 de->rec_len = nilfs_rec_len_to_disk(chunk_size); in nilfs_add_link()
583 unsigned int chunk_size = nilfs_chunk_size(inode); in nilfs_make_empty() local
591 err = nilfs_prepare_chunk(page, 0, chunk_size); in nilfs_make_empty()
597 memset(kaddr, 0, chunk_size); in nilfs_make_empty()
607 de->rec_len = nilfs_rec_len_to_disk(chunk_size - NILFS_DIR_REC_LEN(1)); in nilfs_make_empty()
[all …]
/linux/arch/x86/kernel/cpu/mtrr/
A Dcleanup.c474 u64 chunk_size, u64 gran_size) in x86_setup_var_mtrrs() argument
483 var_state.chunk_sizek = chunk_size >> 10; in x86_setup_var_mtrrs()
590 mtrr_calc_range_state(u64 chunk_size, u64 gran_size, in mtrr_calc_range_state() argument
615 result[i].chunk_sizek = chunk_size >> 10; in mtrr_calc_range_state()
687 u64 chunk_size, gran_size; in mtrr_cleanup() local
758 for (chunk_size = gran_size; chunk_size < (1ULL<<32); in mtrr_cleanup()
759 chunk_size <<= 1) { in mtrr_cleanup()
764 mtrr_calc_range_state(chunk_size, gran_size, in mtrr_cleanup()
784 chunk_size = result[i].chunk_sizek; in mtrr_cleanup()
785 chunk_size <<= 10; in mtrr_cleanup()
[all …]
/linux/fs/ext2/
A Ddir.c113 unsigned chunk_size = ext2_chunk_size(dir); in ext2_check_page() local
122 if (limit & (chunk_size - 1)) in ext2_check_page()
137 if (unlikely(((offs + rec_len - 1) ^ offs) & ~(chunk_size-1))) in ext2_check_page()
492 unsigned chunk_size = ext2_chunk_size(dir); in ext2_add_link() local
525 rec_len = chunk_size; in ext2_add_link()
526 de->rec_len = ext2_rec_len_to_disk(chunk_size); in ext2_add_link()
632 unsigned chunk_size = ext2_chunk_size(inode); in ext2_make_empty() local
640 err = ext2_prepare_chunk(page, 0, chunk_size); in ext2_make_empty()
646 memset(kaddr, 0, chunk_size); in ext2_make_empty()
656 de->rec_len = ext2_rec_len_to_disk(chunk_size - EXT2_DIR_REC_LEN(1)); in ext2_make_empty()
[all …]
/linux/drivers/net/wireless/marvell/libertas/
A Dif_sdio.c444 u32 chunk_size; in if_sdio_prog_helper() local
473 chunk_size = min_t(size_t, size, 60); in if_sdio_prog_helper()
485 firmware += chunk_size; in if_sdio_prog_helper()
486 size -= chunk_size; in if_sdio_prog_helper()
540 u32 chunk_size; in if_sdio_prog_real() local
611 chunk_size = min_t(size_t, req_size, 512); in if_sdio_prog_real()
613 memcpy(chunk_buffer, firmware, chunk_size); in if_sdio_prog_real()
619 chunk_buffer, roundup(chunk_size, 32)); in if_sdio_prog_real()
623 firmware += chunk_size; in if_sdio_prog_real()
624 size -= chunk_size; in if_sdio_prog_real()
[all …]
/linux/Documentation/admin-guide/device-mapper/
A Dstriped.rst36 my $chunk_size = 128 * 2;
54 $stripe_dev_size -= $stripe_dev_size % ($chunk_size * $num_devs);
56 $table = "0 $stripe_dev_size striped $num_devs $chunk_size";
/linux/drivers/net/wireless/ath/wcn36xx/
A Ddxe.c238 int i, chunk_size = pool->chunk_size; in wcn36xx_dxe_init_tx_bd() local
249 bd_phy_addr += chunk_size; in wcn36xx_dxe_init_tx_bd()
250 bd_cpu_addr += chunk_size; in wcn36xx_dxe_init_tx_bd()
678 wcn->mgmt_mem_pool.chunk_size = WCN36XX_BD_CHUNK_SIZE + in wcn36xx_dxe_allocate_mem_pools()
681 s = wcn->mgmt_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_H; in wcn36xx_dxe_allocate_mem_pools()
693 wcn->data_mem_pool.chunk_size = WCN36XX_BD_CHUNK_SIZE + in wcn36xx_dxe_allocate_mem_pools()
696 s = wcn->data_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_L; in wcn36xx_dxe_allocate_mem_pools()
716 dma_free_coherent(wcn->dev, wcn->mgmt_mem_pool.chunk_size * in wcn36xx_dxe_free_mem_pools()
722 dma_free_coherent(wcn->dev, wcn->data_mem_pool.chunk_size * in wcn36xx_dxe_free_mem_pools()
/linux/drivers/gpu/drm/amd/display/dc/dcn21/
A Ddcn21_hubp.c151 CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size, in hubp21_program_requestor()
160 CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size, in hubp21_program_requestor()
273 CHUNK_SIZE, &rq_regs.rq_regs_l.chunk_size, in hubp21_validate_dml_output()
282 CHUNK_SIZE_C, &rq_regs.rq_regs_c.chunk_size, in hubp21_validate_dml_output()
306 if (rq_regs.rq_regs_l.chunk_size != dml_rq_regs->rq_regs_l.chunk_size) in hubp21_validate_dml_output()
308 dml_rq_regs->rq_regs_l.chunk_size, rq_regs.rq_regs_l.chunk_size); in hubp21_validate_dml_output()
331 if (rq_regs.rq_regs_c.chunk_size != dml_rq_regs->rq_regs_c.chunk_size) in hubp21_validate_dml_output()
333 dml_rq_regs->rq_regs_c.chunk_size, rq_regs.rq_regs_c.chunk_size); in hubp21_validate_dml_output()
/linux/arch/x86/xen/
A Dsetup.c727 phys_addr_t mem_end, addr, size, chunk_size; in xen_memory_setup() local
803 chunk_size = size; in xen_memory_setup()
808 chunk_size = min(size, mem_end - addr); in xen_memory_setup()
810 chunk_size = min(size, PFN_PHYS(extra_pages)); in xen_memory_setup()
812 n_pfns = PFN_DOWN(addr + chunk_size) - pfn_s; in xen_memory_setup()
821 xen_align_and_add_e820_region(addr, chunk_size, type); in xen_memory_setup()
823 addr += chunk_size; in xen_memory_setup()
824 size -= chunk_size; in xen_memory_setup()

Completed in 50 milliseconds

123456