Home
last modified time | relevance | path

Searched refs:chunk_size (Results 1 – 25 of 135) sorted by relevance

123456

/linux-5.19.10/drivers/md/
Ddm-exception-store.c145 unsigned chunk_size; in set_chunk_size() local
147 if (kstrtouint(chunk_size_arg, 10, &chunk_size)) { in set_chunk_size()
152 if (!chunk_size) { in set_chunk_size()
153 store->chunk_size = store->chunk_mask = store->chunk_shift = 0; in set_chunk_size()
157 return dm_exception_store_set_chunk_size(store, chunk_size, error); in set_chunk_size()
161 unsigned chunk_size, in dm_exception_store_set_chunk_size() argument
165 if (!is_power_of_2(chunk_size)) { in dm_exception_store_set_chunk_size()
171 if (chunk_size % in dm_exception_store_set_chunk_size()
173 chunk_size % in dm_exception_store_set_chunk_size()
179 if (chunk_size > INT_MAX >> SECTOR_SHIFT) { in dm_exception_store_set_chunk_size()
[all …]
Ddm-stripe.c35 uint32_t chunk_size; member
90 uint32_t chunk_size; in stripe_ctr() local
104 if (kstrtouint(argv[1], 10, &chunk_size) || !chunk_size) { in stripe_ctr()
117 if (sector_div(tmp_len, chunk_size)) { in stripe_ctr()
151 r = dm_set_target_max_io_len(ti, chunk_size); in stripe_ctr()
162 sc->chunk_size = chunk_size; in stripe_ctr()
163 if (chunk_size & (chunk_size - 1)) in stripe_ctr()
166 sc->chunk_size_shift = __ffs(chunk_size); in stripe_ctr()
209 chunk_offset = sector_div(chunk, sc->chunk_size); in stripe_map_sector()
211 chunk_offset = chunk & (sc->chunk_size - 1); in stripe_map_sector()
[all …]
Ddm-unstripe.c21 uint32_t chunk_size; member
61 if (kstrtouint(argv[1], 10, &uc->chunk_size) || !uc->chunk_size) { in unstripe_ctr()
87 uc->unstripe_offset = uc->unstripe * uc->chunk_size; in unstripe_ctr()
88 uc->unstripe_width = (uc->stripes - 1) * uc->chunk_size; in unstripe_ctr()
89 uc->chunk_shift = is_power_of_2(uc->chunk_size) ? fls(uc->chunk_size) - 1 : 0; in unstripe_ctr()
92 if (sector_div(tmp_len, uc->chunk_size)) { in unstripe_ctr()
97 if (dm_set_target_max_io_len(ti, uc->chunk_size)) { in unstripe_ctr()
126 sector_div(tmp_sector, uc->chunk_size); in map_to_core()
156 uc->stripes, (unsigned long long)uc->chunk_size, uc->unstripe, in unstripe_status()
179 limits->chunk_sectors = uc->chunk_size; in unstripe_io_hints()
Ddm-snap-persistent.c81 __le32 chunk_size; member
172 len = ps->store->chunk_size << SECTOR_SHIFT; in alloc_area()
234 .sector = ps->store->chunk_size * chunk, in chunk_io()
235 .count = ps->store->chunk_size, in chunk_io()
294 memset(ps->area, 0, ps->store->chunk_size << SECTOR_SHIFT); in zero_memory_area()
307 unsigned chunk_size; in read_header() local
315 if (!ps->store->chunk_size) { in read_header()
316 ps->store->chunk_size = max(DM_CHUNK_SIZE_DEFAULT_SECTORS, in read_header()
319 ps->store->chunk_mask = ps->store->chunk_size - 1; in read_header()
320 ps->store->chunk_shift = __ffs(ps->store->chunk_size); in read_header()
[all …]
Ddm-snap-transient.c45 if (size < (tc->next_free + store->chunk_size)) in transient_prepare_exception()
49 tc->next_free += store->chunk_size; in transient_prepare_exception()
97 DMEMIT(" N %llu", (unsigned long long)store->chunk_size); in transient_status()
/linux-5.19.10/tools/testing/selftests/net/
Dtcp_mmap.c85 static size_t chunk_size = 512*1024; variable
168 buffer = mmap_large_buffer(chunk_size, &buffer_sz); in child_thread()
174 raddr = mmap(NULL, chunk_size + map_align, PROT_READ, flags, fd, 0); in child_thread()
193 zc.length = chunk_size; in child_thread()
201 assert(zc.length <= chunk_size); in child_thread()
212 assert(zc.recv_skip_hint <= chunk_size); in child_thread()
223 while (sub < chunk_size) { in child_thread()
224 lu = read(fd, buffer + sub, chunk_size - sub); in child_thread()
263 munmap(raddr, chunk_size + map_align); in child_thread()
317 rcvlowat = chunk_size; in do_accept()
[all …]
/linux-5.19.10/net/xdp/
Dxdp_umem.c155 u32 npgs_rem, chunk_size = mr->chunk_size, headroom = mr->headroom; in xdp_umem_reg() local
161 if (chunk_size < XDP_UMEM_MIN_CHUNK_SIZE || chunk_size > PAGE_SIZE) { in xdp_umem_reg()
174 if (!unaligned_chunks && !is_power_of_2(chunk_size)) in xdp_umem_reg()
193 chunks = (unsigned int)div_u64_rem(size, chunk_size, &chunks_rem); in xdp_umem_reg()
200 if (headroom >= chunk_size - XDP_PACKET_HEADROOM) in xdp_umem_reg()
205 umem->chunk_size = chunk_size; in xdp_umem_reg()
Dxsk_buff_pool.c76 pool->chunk_mask = ~((u64)umem->chunk_size - 1); in xp_create_and_assign_umem()
81 pool->chunk_size = umem->chunk_size; in xp_create_and_assign_umem()
82 pool->chunk_shift = ffs(umem->chunk_size) - 1; in xp_create_and_assign_umem()
84 pool->frame_len = umem->chunk_size - umem->headroom - in xp_create_and_assign_umem()
100 xskb->xdp.frame_sz = umem->chunk_size - umem->headroom; in xp_create_and_assign_umem()
105 xp_init_xskb_addr(xskb, pool, i * pool->chunk_size); in xp_create_and_assign_umem()
455 return xp_desc_crosses_non_contig_pg(pool, addr, pool->chunk_size); in xp_addr_crosses_non_contig_pg()
462 *addr + pool->chunk_size > pool->addrs_cnt || in xp_check_unaligned()
/linux-5.19.10/drivers/gpu/drm/
Ddrm_buddy.c79 int drm_buddy_init(struct drm_buddy *mm, u64 size, u64 chunk_size) in drm_buddy_init() argument
84 if (size < chunk_size) in drm_buddy_init()
87 if (chunk_size < PAGE_SIZE) in drm_buddy_init()
90 if (!is_power_of_2(chunk_size)) in drm_buddy_init()
93 size = round_down(size, chunk_size); in drm_buddy_init()
97 mm->chunk_size = chunk_size; in drm_buddy_init()
98 mm->max_order = ilog2(size) - ilog2(chunk_size); in drm_buddy_init()
132 order = ilog2(root_size) - ilog2(chunk_size); in drm_buddy_init()
141 BUG_ON(drm_buddy_block_size(mm, root) < chunk_size); in drm_buddy_init()
199 offset + (mm->chunk_size << block_order)); in split_block()
[all …]
/linux-5.19.10/arch/x86/platform/olpc/
Dolpc_dt.c131 const size_t chunk_size = max(PAGE_SIZE, size); in prom_early_alloc() local
139 res = memblock_alloc(chunk_size, SMP_CACHE_BYTES); in prom_early_alloc()
142 chunk_size); in prom_early_alloc()
144 prom_early_allocated += chunk_size; in prom_early_alloc()
145 memset(res, 0, chunk_size); in prom_early_alloc()
146 free_mem = chunk_size; in prom_early_alloc()
/linux-5.19.10/drivers/gpu/drm/selftests/
Dtest-drm_buddy.c24 static inline u64 get_size(int order, u64 chunk_size) in get_size() argument
26 return (1 << order) * chunk_size; in get_size()
101 if (block_size < mm->chunk_size) { in igt_check_block()
111 if (!IS_ALIGNED(block_size, mm->chunk_size)) { in igt_check_block()
116 if (!IS_ALIGNED(offset, mm->chunk_size)) { in igt_check_block()
320 static void igt_mm_config(u64 *size, u64 *chunk_size) in igt_mm_config() argument
337 *chunk_size = (u64)ms << 12; in igt_mm_config()
473 u64 mm_size, min_page_size, chunk_size, start = 0; in igt_buddy_alloc_smoke() local
482 igt_mm_config(&mm_size, &chunk_size); in igt_buddy_alloc_smoke()
484 err = drm_buddy_init(&mm, mm_size, chunk_size); in igt_buddy_alloc_smoke()
[all …]
/linux-5.19.10/drivers/net/ethernet/mellanox/mlx5/core/steering/
Ddr_icm_pool.c319 enum mlx5dr_icm_chunk_size chunk_size, in dr_icm_chunk_create() argument
333 chunk->size = chunk_size; in dr_icm_chunk_create()
390 enum mlx5dr_icm_chunk_size chunk_size, in dr_icm_handle_buddies_get_mem() argument
402 chunk_size, seg); in dr_icm_handle_buddies_get_mem()
410 chunk_size); in dr_icm_handle_buddies_get_mem()
420 chunk_size); in dr_icm_handle_buddies_get_mem()
439 enum mlx5dr_icm_chunk_size chunk_size) in mlx5dr_icm_alloc_chunk() argument
446 if (chunk_size > pool->max_log_chunk_sz) in mlx5dr_icm_alloc_chunk()
451 ret = dr_icm_handle_buddies_get_mem(pool, chunk_size, &buddy, &seg); in mlx5dr_icm_alloc_chunk()
455 chunk = dr_icm_chunk_create(pool, chunk_size, buddy, seg); in mlx5dr_icm_alloc_chunk()
[all …]
/linux-5.19.10/sound/soc/sof/
Dipc4.c388 size_t chunk_size; in sof_ipc4_set_get_data() local
419 chunk_size = payload_limit; in sof_ipc4_set_get_data()
421 chunk_size = remaining; in sof_ipc4_set_get_data()
433 tx.data_size = chunk_size; in sof_ipc4_set_get_data()
436 tx_size = chunk_size; in sof_ipc4_set_get_data()
441 rx.data_size = chunk_size; in sof_ipc4_set_get_data()
445 rx_size = chunk_size; in sof_ipc4_set_get_data()
469 if (rx_size < chunk_size) { in sof_ipc4_set_get_data()
470 chunk_size = rx_size; in sof_ipc4_set_get_data()
477 offset += chunk_size; in sof_ipc4_set_get_data()
[all …]
/linux-5.19.10/drivers/net/ethernet/mellanox/mlxsw/
Di2c.c320 int off = mlxsw_i2c->cmd.mb_off_in, chunk_size, i, j; in mlxsw_i2c_write() local
334 chunk_size = (in_mbox_size > mlxsw_i2c->block_size) ? in mlxsw_i2c_write()
336 write_tran.len = MLXSW_I2C_ADDR_WIDTH + chunk_size; in mlxsw_i2c_write()
339 mlxsw_i2c->block_size * i, chunk_size); in mlxsw_i2c_write()
359 off += chunk_size; in mlxsw_i2c_write()
360 in_mbox_size -= chunk_size; in mlxsw_i2c_write()
399 int num, chunk_size, reg_size, i, j; in mlxsw_i2c_cmd() local
447 chunk_size = (reg_size > mlxsw_i2c->block_size) ? in mlxsw_i2c_cmd()
449 read_tran[1].len = chunk_size; in mlxsw_i2c_cmd()
471 off += chunk_size; in mlxsw_i2c_cmd()
[all …]
/linux-5.19.10/drivers/rtc/
Drtc-isl12026.c328 size_t chunk_size, num_written; in isl12026_nvm_write() local
348 chunk_size = round_down(offset, ISL12026_PAGESIZE) + in isl12026_nvm_write()
350 chunk_size = min(bytes, chunk_size); in isl12026_nvm_write()
355 memcpy(payload + 2, v + num_written, chunk_size); in isl12026_nvm_write()
358 msgs[0].len = chunk_size + 2; in isl12026_nvm_write()
368 bytes -= chunk_size; in isl12026_nvm_write()
369 offset += chunk_size; in isl12026_nvm_write()
370 num_written += chunk_size; in isl12026_nvm_write()
/linux-5.19.10/drivers/rpmsg/
Dqcom_glink_native.c819 __le32 chunk_size; in qcom_glink_rx_data() member
822 unsigned int chunk_size; in qcom_glink_rx_data() local
835 chunk_size = le32_to_cpu(hdr.chunk_size); in qcom_glink_rx_data()
838 if (avail < sizeof(hdr) + chunk_size) { in qcom_glink_rx_data()
861 intent->data = kmalloc(chunk_size + left_size, in qcom_glink_rx_data()
869 intent->size = chunk_size + left_size; in qcom_glink_rx_data()
892 if (intent->size - intent->offset < chunk_size) { in qcom_glink_rx_data()
900 sizeof(hdr), chunk_size); in qcom_glink_rx_data()
901 intent->offset += chunk_size; in qcom_glink_rx_data()
922 qcom_glink_rx_advance(glink, ALIGN(sizeof(hdr) + chunk_size, 8)); in qcom_glink_rx_data()
[all …]
/linux-5.19.10/fs/nilfs2/
Ddir.c120 unsigned int chunk_size = nilfs_chunk_size(dir); in nilfs_check_page() local
129 if (limit & (chunk_size - 1)) in nilfs_check_page()
144 if (((offs + rec_len - 1) ^ offs) & ~(chunk_size-1)) in nilfs_check_page()
443 unsigned int chunk_size = nilfs_chunk_size(dir); in nilfs_add_link() local
475 rec_len = chunk_size; in nilfs_add_link()
476 de->rec_len = nilfs_rec_len_to_disk(chunk_size); in nilfs_add_link()
583 unsigned int chunk_size = nilfs_chunk_size(inode); in nilfs_make_empty() local
591 err = nilfs_prepare_chunk(page, 0, chunk_size); in nilfs_make_empty()
597 memset(kaddr, 0, chunk_size); in nilfs_make_empty()
607 de->rec_len = nilfs_rec_len_to_disk(chunk_size - NILFS_DIR_REC_LEN(1)); in nilfs_make_empty()
[all …]
/linux-5.19.10/drivers/gpu/drm/i915/
Di915_ttm_buddy_manager.c72 GEM_BUG_ON(min_page_size < mm->chunk_size); in i915_ttm_buddy_man_alloc()
82 pages = size >> ilog2(mm->chunk_size); in i915_ttm_buddy_man_alloc()
92 n_pages = size >> ilog2(mm->chunk_size); in i915_ttm_buddy_man_alloc()
242 u64 chunk_size) in i915_ttm_buddy_man_init() argument
252 err = drm_buddy_init(&bman->mm, size, chunk_size); in i915_ttm_buddy_man_init()
258 GEM_BUG_ON(default_page_size < chunk_size); in i915_ttm_buddy_man_init()
340 size, mm->chunk_size, in i915_ttm_buddy_man_reserve()
/linux-5.19.10/arch/x86/kernel/cpu/mtrr/
Dcleanup.c474 u64 chunk_size, u64 gran_size) in x86_setup_var_mtrrs() argument
483 var_state.chunk_sizek = chunk_size >> 10; in x86_setup_var_mtrrs()
590 mtrr_calc_range_state(u64 chunk_size, u64 gran_size, in mtrr_calc_range_state() argument
607 num_reg = x86_setup_var_mtrrs(range, nr_range, chunk_size, gran_size); in mtrr_calc_range_state()
615 result[i].chunk_sizek = chunk_size >> 10; in mtrr_calc_range_state()
687 u64 chunk_size, gran_size; in mtrr_cleanup() local
758 for (chunk_size = gran_size; chunk_size < (1ULL<<32); in mtrr_cleanup()
759 chunk_size <<= 1) { in mtrr_cleanup()
764 mtrr_calc_range_state(chunk_size, gran_size, in mtrr_cleanup()
784 chunk_size = result[i].chunk_sizek; in mtrr_cleanup()
[all …]
/linux-5.19.10/fs/ext2/
Ddir.c113 unsigned chunk_size = ext2_chunk_size(dir); in ext2_check_page() local
122 if (limit & (chunk_size - 1)) in ext2_check_page()
137 if (unlikely(((offs + rec_len - 1) ^ offs) & ~(chunk_size-1))) in ext2_check_page()
492 unsigned chunk_size = ext2_chunk_size(dir); in ext2_add_link() local
525 rec_len = chunk_size; in ext2_add_link()
526 de->rec_len = ext2_rec_len_to_disk(chunk_size); in ext2_add_link()
632 unsigned chunk_size = ext2_chunk_size(inode); in ext2_make_empty() local
640 err = ext2_prepare_chunk(page, 0, chunk_size); in ext2_make_empty()
646 memset(kaddr, 0, chunk_size); in ext2_make_empty()
656 de->rec_len = ext2_rec_len_to_disk(chunk_size - EXT2_DIR_REC_LEN(1)); in ext2_make_empty()
[all …]
/linux-5.19.10/include/drm/
Ddrm_buddy.h85 u64 chunk_size; member
130 return mm->chunk_size << drm_buddy_block_order(block); in drm_buddy_block_size()
133 int drm_buddy_init(struct drm_buddy *mm, u64 size, u64 chunk_size);
/linux-5.19.10/drivers/platform/x86/intel/ifs/
Dload.c59 int i, num_chunks, chunk_size; in copy_hashes_authenticate_chunks() local
71 chunk_size = hashes_status.chunk_size * 1024; in copy_hashes_authenticate_chunks()
89 linear_addr = base + i * chunk_size; in copy_hashes_authenticate_chunks()
/linux-5.19.10/Documentation/admin-guide/device-mapper/
Dstriped.rst36 my $chunk_size = 128 * 2;
54 $stripe_dev_size -= $stripe_dev_size % ($chunk_size * $num_devs);
56 $table = "0 $stripe_dev_size striped $num_devs $chunk_size";
/linux-5.19.10/drivers/net/wireless/marvell/libertas/
Dif_sdio.c444 u32 chunk_size; in if_sdio_prog_helper() local
473 chunk_size = min_t(size_t, size, 60); in if_sdio_prog_helper()
475 *((__le32*)chunk_buffer) = cpu_to_le32(chunk_size); in if_sdio_prog_helper()
476 memcpy(chunk_buffer + 4, firmware, chunk_size); in if_sdio_prog_helper()
485 firmware += chunk_size; in if_sdio_prog_helper()
486 size -= chunk_size; in if_sdio_prog_helper()
540 u32 chunk_size; in if_sdio_prog_real() local
611 chunk_size = min_t(size_t, req_size, 512); in if_sdio_prog_real()
613 memcpy(chunk_buffer, firmware, chunk_size); in if_sdio_prog_real()
619 chunk_buffer, roundup(chunk_size, 32)); in if_sdio_prog_real()
[all …]
/linux-5.19.10/drivers/gpu/drm/amd/amdkfd/
Dkfd_device.c59 unsigned int chunk_size);
928 unsigned int chunk_size) in kfd_gtt_sa_init() argument
930 if (WARN_ON(buf_size < chunk_size)) in kfd_gtt_sa_init()
934 if (WARN_ON(chunk_size == 0)) in kfd_gtt_sa_init()
937 kfd->gtt_sa_chunk_size = chunk_size; in kfd_gtt_sa_init()
938 kfd->gtt_sa_num_of_chunks = buf_size / chunk_size; in kfd_gtt_sa_init()
961 unsigned int chunk_size) in kfd_gtt_sa_calc_gpu_addr() argument
963 return start_addr + bit_num * chunk_size; in kfd_gtt_sa_calc_gpu_addr()
968 unsigned int chunk_size) in kfd_gtt_sa_calc_cpu_addr() argument
970 return (uint32_t *) ((uint64_t) start_addr + bit_num * chunk_size); in kfd_gtt_sa_calc_cpu_addr()

123456