/Linux-v5.10/drivers/md/ |
D | dm-exception-store.c | 145 unsigned chunk_size; in set_chunk_size() local 147 if (kstrtouint(chunk_size_arg, 10, &chunk_size)) { in set_chunk_size() 152 if (!chunk_size) { in set_chunk_size() 153 store->chunk_size = store->chunk_mask = store->chunk_shift = 0; in set_chunk_size() 157 return dm_exception_store_set_chunk_size(store, chunk_size, error); in set_chunk_size() 161 unsigned chunk_size, in dm_exception_store_set_chunk_size() argument 165 if (!is_power_of_2(chunk_size)) { in dm_exception_store_set_chunk_size() 171 if (chunk_size % in dm_exception_store_set_chunk_size() 173 chunk_size % in dm_exception_store_set_chunk_size() 179 if (chunk_size > INT_MAX >> SECTOR_SHIFT) { in dm_exception_store_set_chunk_size() [all …]
|
D | dm-unstripe.c | 21 uint32_t chunk_size; member 61 if (kstrtouint(argv[1], 10, &uc->chunk_size) || !uc->chunk_size) { in unstripe_ctr() 87 uc->unstripe_offset = uc->unstripe * uc->chunk_size; in unstripe_ctr() 88 uc->unstripe_width = (uc->stripes - 1) * uc->chunk_size; in unstripe_ctr() 89 uc->chunk_shift = is_power_of_2(uc->chunk_size) ? fls(uc->chunk_size) - 1 : 0; in unstripe_ctr() 92 if (sector_div(tmp_len, uc->chunk_size)) { in unstripe_ctr() 97 if (dm_set_target_max_io_len(ti, uc->chunk_size)) { in unstripe_ctr() 126 sector_div(tmp_sector, uc->chunk_size); in map_to_core() 156 uc->stripes, (unsigned long long)uc->chunk_size, uc->unstripe, in unstripe_status() 175 limits->chunk_sectors = uc->chunk_size; in unstripe_io_hints()
|
D | dm-snap-persistent.c | 81 __le32 chunk_size; member 172 len = ps->store->chunk_size << SECTOR_SHIFT; in alloc_area() 234 .sector = ps->store->chunk_size * chunk, in chunk_io() 235 .count = ps->store->chunk_size, in chunk_io() 294 memset(ps->area, 0, ps->store->chunk_size << SECTOR_SHIFT); in zero_memory_area() 307 unsigned chunk_size; in read_header() local 315 if (!ps->store->chunk_size) { in read_header() 316 ps->store->chunk_size = max(DM_CHUNK_SIZE_DEFAULT_SECTORS, in read_header() 319 ps->store->chunk_mask = ps->store->chunk_size - 1; in read_header() 320 ps->store->chunk_shift = __ffs(ps->store->chunk_size); in read_header() [all …]
|
D | dm-stripe.c | 35 uint32_t chunk_size; member 90 uint32_t chunk_size; in stripe_ctr() local 104 if (kstrtouint(argv[1], 10, &chunk_size) || !chunk_size) { in stripe_ctr() 117 if (sector_div(tmp_len, chunk_size)) { in stripe_ctr() 151 r = dm_set_target_max_io_len(ti, chunk_size); in stripe_ctr() 163 sc->chunk_size = chunk_size; in stripe_ctr() 164 if (chunk_size & (chunk_size - 1)) in stripe_ctr() 167 sc->chunk_size_shift = __ffs(chunk_size); in stripe_ctr() 210 chunk_offset = sector_div(chunk, sc->chunk_size); in stripe_map_sector() 212 chunk_offset = chunk & (sc->chunk_size - 1); in stripe_map_sector() [all …]
|
D | dm-snap-transient.c | 45 if (size < (tc->next_free + store->chunk_size)) in transient_prepare_exception() 49 tc->next_free += store->chunk_size; in transient_prepare_exception() 97 DMEMIT(" N %llu", (unsigned long long)store->chunk_size); in transient_status()
|
/Linux-v5.10/tools/testing/selftests/net/ |
D | tcp_mmap.c | 85 static size_t chunk_size = 512*1024; variable 168 buffer = mmap_large_buffer(chunk_size, &buffer_sz); in child_thread() 174 raddr = mmap(NULL, chunk_size + map_align, PROT_READ, flags, fd, 0); in child_thread() 193 zc.length = chunk_size; in child_thread() 201 assert(zc.length <= chunk_size); in child_thread() 212 assert(zc.recv_skip_hint <= chunk_size); in child_thread() 223 while (sub < chunk_size) { in child_thread() 224 lu = read(fd, buffer + sub, chunk_size - sub); in child_thread() 263 munmap(raddr, chunk_size + map_align); in child_thread() 317 rcvlowat = chunk_size; in do_accept() [all …]
|
/Linux-v5.10/drivers/gpu/drm/i915/ |
D | i915_buddy.c | 93 int i915_buddy_init(struct i915_buddy_mm *mm, u64 size, u64 chunk_size) in i915_buddy_init() argument 98 if (size < chunk_size) in i915_buddy_init() 101 if (chunk_size < PAGE_SIZE) in i915_buddy_init() 104 if (!is_power_of_2(chunk_size)) in i915_buddy_init() 107 size = round_down(size, chunk_size); in i915_buddy_init() 110 mm->chunk_size = chunk_size; in i915_buddy_init() 111 mm->max_order = ilog2(size) - ilog2(chunk_size); in i915_buddy_init() 145 order = ilog2(root_size) - ilog2(chunk_size); in i915_buddy_init() 154 GEM_BUG_ON(i915_buddy_block_size(mm, root) < chunk_size); in i915_buddy_init() 201 offset + (mm->chunk_size << block_order)); in split_block() [all …]
|
D | i915_buddy.h | 66 u64 chunk_size; member 110 return mm->chunk_size << i915_buddy_block_order(block); in i915_buddy_block_size() 113 int i915_buddy_init(struct i915_buddy_mm *mm, u64 size, u64 chunk_size);
|
D | intel_memory_region.c | 77 GEM_BUG_ON(!IS_ALIGNED(size, mem->mm.chunk_size)); in __intel_memory_region_get_pages_buddy() 82 ilog2(mem->mm.chunk_size); in __intel_memory_region_get_pages_buddy() 87 min_order = ilog2(size) - ilog2(mem->mm.chunk_size); in __intel_memory_region_get_pages_buddy() 93 n_pages = size >> ilog2(mem->mm.chunk_size); in __intel_memory_region_get_pages_buddy()
|
/Linux-v5.10/net/xdp/ |
D | xdp_umem.c | 156 u32 npgs_rem, chunk_size = mr->chunk_size, headroom = mr->headroom; in xdp_umem_reg() local 162 if (chunk_size < XDP_UMEM_MIN_CHUNK_SIZE || chunk_size > PAGE_SIZE) { in xdp_umem_reg() 175 if (!unaligned_chunks && !is_power_of_2(chunk_size)) in xdp_umem_reg() 194 chunks = (unsigned int)div_u64_rem(size, chunk_size, &chunks_rem); in xdp_umem_reg() 201 if (headroom >= chunk_size - XDP_PACKET_HEADROOM) in xdp_umem_reg() 206 umem->chunk_size = chunk_size; in xdp_umem_reg()
|
D | xsk_buff_pool.c | 60 pool->chunk_mask = ~((u64)umem->chunk_size - 1); in xp_create_and_assign_umem() 65 pool->chunk_size = umem->chunk_size; in xp_create_and_assign_umem() 67 pool->frame_len = umem->chunk_size - umem->headroom - in xp_create_and_assign_umem() 84 xskb->xdp.frame_sz = umem->chunk_size - umem->headroom; in xp_create_and_assign_umem() 431 return xp_desc_crosses_non_contig_pg(pool, addr, pool->chunk_size); in xp_addr_crosses_non_contig_pg() 438 *addr + pool->chunk_size > pool->addrs_cnt || in xp_check_unaligned()
|
/Linux-v5.10/drivers/gpu/drm/i915/selftests/ |
D | i915_buddy.c | 58 if (block_size < mm->chunk_size) { in igt_check_block() 68 if (!IS_ALIGNED(block_size, mm->chunk_size)) { in igt_check_block() 73 if (!IS_ALIGNED(offset, mm->chunk_size)) { in igt_check_block() 279 static void igt_mm_config(u64 *size, u64 *chunk_size) in igt_mm_config() argument 296 *chunk_size = (u64)ms << 12; in igt_mm_config() 305 u64 chunk_size; in igt_buddy_alloc_smoke() local 310 igt_mm_config(&mm_size, &chunk_size); in igt_buddy_alloc_smoke() 312 pr_info("buddy_init with size=%llx, chunk_size=%llx\n", mm_size, chunk_size); in igt_buddy_alloc_smoke() 314 err = i915_buddy_init(&mm, mm_size, chunk_size); in igt_buddy_alloc_smoke() 637 u64 chunk_size; in igt_buddy_alloc_range() local [all …]
|
/Linux-v5.10/arch/x86/platform/olpc/ |
D | olpc_dt.c | 131 const size_t chunk_size = max(PAGE_SIZE, size); in prom_early_alloc() local 139 res = memblock_alloc(chunk_size, SMP_CACHE_BYTES); in prom_early_alloc() 142 chunk_size); in prom_early_alloc() 144 prom_early_allocated += chunk_size; in prom_early_alloc() 145 memset(res, 0, chunk_size); in prom_early_alloc() 146 free_mem = chunk_size; in prom_early_alloc()
|
/Linux-v5.10/drivers/net/ethernet/mellanox/mlxsw/ |
D | i2c.c | 320 int off = mlxsw_i2c->cmd.mb_off_in, chunk_size, i, j; in mlxsw_i2c_write() local 334 chunk_size = (in_mbox_size > mlxsw_i2c->block_size) ? in mlxsw_i2c_write() 336 write_tran.len = MLXSW_I2C_ADDR_WIDTH + chunk_size; in mlxsw_i2c_write() 339 mlxsw_i2c->block_size * i, chunk_size); in mlxsw_i2c_write() 359 off += chunk_size; in mlxsw_i2c_write() 360 in_mbox_size -= chunk_size; in mlxsw_i2c_write() 399 int num, chunk_size, reg_size, i, j; in mlxsw_i2c_cmd() local 447 chunk_size = (reg_size > mlxsw_i2c->block_size) ? in mlxsw_i2c_cmd() 449 read_tran[1].len = chunk_size; in mlxsw_i2c_cmd() 471 off += chunk_size; in mlxsw_i2c_cmd() [all …]
|
/Linux-v5.10/drivers/rtc/ |
D | rtc-isl12026.c | 328 size_t chunk_size, num_written; in isl12026_nvm_write() local 348 chunk_size = round_down(offset, ISL12026_PAGESIZE) + in isl12026_nvm_write() 350 chunk_size = min(bytes, chunk_size); in isl12026_nvm_write() 355 memcpy(payload + 2, v + num_written, chunk_size); in isl12026_nvm_write() 358 msgs[0].len = chunk_size + 2; in isl12026_nvm_write() 368 bytes -= chunk_size; in isl12026_nvm_write() 369 offset += chunk_size; in isl12026_nvm_write() 370 num_written += chunk_size; in isl12026_nvm_write()
|
/Linux-v5.10/fs/nilfs2/ |
D | dir.c | 120 unsigned int chunk_size = nilfs_chunk_size(dir); in nilfs_check_page() local 129 if (limit & (chunk_size - 1)) in nilfs_check_page() 144 if (((offs + rec_len - 1) ^ offs) & ~(chunk_size-1)) in nilfs_check_page() 443 unsigned int chunk_size = nilfs_chunk_size(dir); in nilfs_add_link() local 475 rec_len = chunk_size; in nilfs_add_link() 476 de->rec_len = nilfs_rec_len_to_disk(chunk_size); in nilfs_add_link() 583 unsigned int chunk_size = nilfs_chunk_size(inode); in nilfs_make_empty() local 591 err = nilfs_prepare_chunk(page, 0, chunk_size); in nilfs_make_empty() 597 memset(kaddr, 0, chunk_size); in nilfs_make_empty() 607 de->rec_len = nilfs_rec_len_to_disk(chunk_size - NILFS_DIR_REC_LEN(1)); in nilfs_make_empty() [all …]
|
/Linux-v5.10/arch/x86/kernel/cpu/mtrr/ |
D | cleanup.c | 474 u64 chunk_size, u64 gran_size) in x86_setup_var_mtrrs() argument 483 var_state.chunk_sizek = chunk_size >> 10; in x86_setup_var_mtrrs() 590 mtrr_calc_range_state(u64 chunk_size, u64 gran_size, in mtrr_calc_range_state() argument 607 num_reg = x86_setup_var_mtrrs(range, nr_range, chunk_size, gran_size); in mtrr_calc_range_state() 615 result[i].chunk_sizek = chunk_size >> 10; in mtrr_calc_range_state() 687 u64 chunk_size, gran_size; in mtrr_cleanup() local 758 for (chunk_size = gran_size; chunk_size < (1ULL<<32); in mtrr_cleanup() 759 chunk_size <<= 1) { in mtrr_cleanup() 764 mtrr_calc_range_state(chunk_size, gran_size, in mtrr_cleanup() 784 chunk_size = result[i].chunk_sizek; in mtrr_cleanup() [all …]
|
/Linux-v5.10/fs/ext2/ |
D | dir.c | 119 unsigned chunk_size = ext2_chunk_size(dir); in ext2_check_page() local 129 if (limit & (chunk_size - 1)) in ext2_check_page() 144 if (unlikely(((offs + rec_len - 1) ^ offs) & ~(chunk_size-1))) in ext2_check_page() 465 unsigned chunk_size = ext2_chunk_size(dir); in ext2_add_link() local 497 rec_len = chunk_size; in ext2_add_link() 498 de->rec_len = ext2_rec_len_to_disk(chunk_size); in ext2_add_link() 605 unsigned chunk_size = ext2_chunk_size(inode); in ext2_make_empty() local 613 err = ext2_prepare_chunk(page, 0, chunk_size); in ext2_make_empty() 619 memset(kaddr, 0, chunk_size); in ext2_make_empty() 629 de->rec_len = ext2_rec_len_to_disk(chunk_size - EXT2_DIR_REC_LEN(1)); in ext2_make_empty() [all …]
|
/Linux-v5.10/Documentation/admin-guide/device-mapper/ |
D | striped.rst | 36 my $chunk_size = 128 * 2; 54 $stripe_dev_size -= $stripe_dev_size % ($chunk_size * $num_devs); 56 $table = "0 $stripe_dev_size striped $num_devs $chunk_size";
|
/Linux-v5.10/drivers/net/wireless/marvell/libertas/ |
D | if_sdio.c | 444 u32 chunk_size; in if_sdio_prog_helper() local 473 chunk_size = min_t(size_t, size, 60); in if_sdio_prog_helper() 475 *((__le32*)chunk_buffer) = cpu_to_le32(chunk_size); in if_sdio_prog_helper() 476 memcpy(chunk_buffer + 4, firmware, chunk_size); in if_sdio_prog_helper() 485 firmware += chunk_size; in if_sdio_prog_helper() 486 size -= chunk_size; in if_sdio_prog_helper() 540 u32 chunk_size; in if_sdio_prog_real() local 611 chunk_size = min_t(size_t, req_size, 512); in if_sdio_prog_real() 613 memcpy(chunk_buffer, firmware, chunk_size); in if_sdio_prog_real() 619 chunk_buffer, roundup(chunk_size, 32)); in if_sdio_prog_real() [all …]
|
/Linux-v5.10/drivers/net/wireless/ath/wcn36xx/ |
D | dxe.c | 238 int i, chunk_size = pool->chunk_size; in wcn36xx_dxe_init_tx_bd() local 249 bd_phy_addr += chunk_size; in wcn36xx_dxe_init_tx_bd() 250 bd_cpu_addr += chunk_size; in wcn36xx_dxe_init_tx_bd() 675 wcn->mgmt_mem_pool.chunk_size = WCN36XX_BD_CHUNK_SIZE + in wcn36xx_dxe_allocate_mem_pools() 678 s = wcn->mgmt_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_H; in wcn36xx_dxe_allocate_mem_pools() 690 wcn->data_mem_pool.chunk_size = WCN36XX_BD_CHUNK_SIZE + in wcn36xx_dxe_allocate_mem_pools() 693 s = wcn->data_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_L; in wcn36xx_dxe_allocate_mem_pools() 713 dma_free_coherent(wcn->dev, wcn->mgmt_mem_pool.chunk_size * in wcn36xx_dxe_free_mem_pools() 719 dma_free_coherent(wcn->dev, wcn->data_mem_pool.chunk_size * in wcn36xx_dxe_free_mem_pools()
|
/Linux-v5.10/drivers/gpu/drm/amd/display/dc/dcn21/ |
D | dcn21_hubp.c | 151 CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size, in hubp21_program_requestor() 160 CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size, in hubp21_program_requestor() 273 CHUNK_SIZE, &rq_regs.rq_regs_l.chunk_size, in hubp21_validate_dml_output() 282 CHUNK_SIZE_C, &rq_regs.rq_regs_c.chunk_size, in hubp21_validate_dml_output() 306 if (rq_regs.rq_regs_l.chunk_size != dml_rq_regs->rq_regs_l.chunk_size) in hubp21_validate_dml_output() 308 dml_rq_regs->rq_regs_l.chunk_size, rq_regs.rq_regs_l.chunk_size); in hubp21_validate_dml_output() 331 if (rq_regs.rq_regs_c.chunk_size != dml_rq_regs->rq_regs_c.chunk_size) in hubp21_validate_dml_output() 333 dml_rq_regs->rq_regs_c.chunk_size, rq_regs.rq_regs_c.chunk_size); in hubp21_validate_dml_output()
|
/Linux-v5.10/include/uapi/linux/raid/ |
D | md_u.h | 106 int chunk_size; /* 1 chunk size in bytes */ member 152 int chunk_size; /* in bytes */ member
|
/Linux-v5.10/arch/x86/xen/ |
D | setup.c | 731 phys_addr_t mem_end, addr, size, chunk_size; in xen_memory_setup() local 814 chunk_size = size; in xen_memory_setup() 819 chunk_size = min(size, mem_end - addr); in xen_memory_setup() 821 chunk_size = min(size, PFN_PHYS(extra_pages)); in xen_memory_setup() 823 n_pfns = PFN_DOWN(addr + chunk_size) - pfn_s; in xen_memory_setup() 832 xen_align_and_add_e820_region(addr, chunk_size, type); in xen_memory_setup() 834 addr += chunk_size; in xen_memory_setup() 835 size -= chunk_size; in xen_memory_setup()
|
/Linux-v5.10/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/ |
D | setup.c | 19 if (xsk->chunk_size > PAGE_SIZE || in mlx5e_validate_xsk_param() 20 xsk->chunk_size < MLX5E_MIN_XSK_CHUNK_SIZE) in mlx5e_validate_xsk_param() 24 if (mlx5e_rx_get_min_frag_sz(params, xsk) > xsk->chunk_size) in mlx5e_validate_xsk_param()
|