/Linux-v5.4/tools/testing/selftests/vm/ |
D | mlock-random-test.c | 138 int test_mlock_within_limit(char *p, int alloc_size) in test_mlock_within_limit() argument 147 if (cur.rlim_cur < alloc_size) { in test_mlock_within_limit() 149 alloc_size, (unsigned int)cur.rlim_cur); in test_mlock_within_limit() 162 int lock_size = rand() % alloc_size; in test_mlock_within_limit() 163 int start_offset = rand() % (alloc_size - lock_size); in test_mlock_within_limit() 174 p, alloc_size, in test_mlock_within_limit() 190 if (locked_vm_size > PAGE_ALIGN(alloc_size, page_size) + page_size) { in test_mlock_within_limit() 192 locked_vm_size, alloc_size); in test_mlock_within_limit() 215 int test_mlock_outof_limit(char *p, int alloc_size) in test_mlock_outof_limit() argument 223 if (cur.rlim_cur >= alloc_size) { in test_mlock_outof_limit() [all …]
|
/Linux-v5.4/drivers/net/ethernet/mellanox/mlxsw/ |
D | spectrum1_kvdl.c | 35 unsigned int alloc_size; member 49 .alloc_size = MLXSW_SP1_KVDL_##id##_ALLOC_SIZE, \ 72 unsigned int alloc_size) in mlxsw_sp1_kvdl_alloc_size_part() argument 79 if (alloc_size <= part->info.alloc_size && in mlxsw_sp1_kvdl_alloc_size_part() 81 part->info.alloc_size <= min_part->info.alloc_size)) in mlxsw_sp1_kvdl_alloc_size_part() 108 return info->start_index + entry_index * info->alloc_size; in mlxsw_sp1_kvdl_to_kvdl_index() 115 return (kvdl_index - info->start_index) / info->alloc_size; in mlxsw_sp1_kvdl_to_entry_index() 125 info->alloc_size; in mlxsw_sp1_kvdl_part_alloc() 190 *p_alloc_size = part->info.alloc_size; in mlxsw_sp1_kvdl_alloc_size_query() 227 nr_entries = div_u64(resource_size, info->alloc_size); in mlxsw_sp1_kvdl_part_init() [all …]
|
/Linux-v5.4/kernel/dma/ |
D | swiotlb.c | 203 size_t alloc_size; in swiotlb_init_with_tbl() local 216 alloc_size = PAGE_ALIGN(io_tlb_nslabs * sizeof(int)); in swiotlb_init_with_tbl() 217 io_tlb_list = memblock_alloc(alloc_size, PAGE_SIZE); in swiotlb_init_with_tbl() 220 __func__, alloc_size, PAGE_SIZE); in swiotlb_init_with_tbl() 222 alloc_size = PAGE_ALIGN(io_tlb_nslabs * sizeof(phys_addr_t)); in swiotlb_init_with_tbl() 223 io_tlb_orig_addr = memblock_alloc(alloc_size, PAGE_SIZE); in swiotlb_init_with_tbl() 226 __func__, alloc_size, PAGE_SIZE); in swiotlb_init_with_tbl() 449 size_t alloc_size, in swiotlb_tbl_map_single() argument 468 if (mapping_size > alloc_size) { in swiotlb_tbl_map_single() 470 mapping_size, alloc_size); in swiotlb_tbl_map_single() [all …]
|
D | direct.c | 88 size_t alloc_size = PAGE_ALIGN(size); in __dma_direct_alloc_pages() local 100 page = dma_alloc_contiguous(dev, alloc_size, gfp); in __dma_direct_alloc_pages() 102 dma_free_contiguous(dev, page, alloc_size); in __dma_direct_alloc_pages() 107 page = alloc_pages_node(node, gfp, get_order(alloc_size)); in __dma_direct_alloc_pages()
|
/Linux-v5.4/tools/testing/selftests/powerpc/stringloops/ |
D | memcmp.c | 76 unsigned long alloc_size = comp_size + MAX_OFFSET_DIFF_S1_S2; in testcase() local 79 s1 = memalign(128, alloc_size); in testcase() 85 s2 = memalign(128, alloc_size); in testcase() 99 for (j = 0; j < alloc_size; j++) in testcase() 125 for (j = 0; j < alloc_size; j++) in testcase()
|
/Linux-v5.4/lib/ |
D | scatterlist.c | 203 unsigned int alloc_size = table->orig_nents; in __sg_free_table() local 212 if (alloc_size > curr_max_ents) { in __sg_free_table() 214 alloc_size = curr_max_ents; in __sg_free_table() 215 sg_size = alloc_size - 1; in __sg_free_table() 217 sg_size = alloc_size; in __sg_free_table() 225 free_fn(sgl, alloc_size); in __sg_free_table() 288 unsigned int sg_size, alloc_size = left; in __sg_alloc_table() local 290 if (alloc_size > curr_max_ents) { in __sg_alloc_table() 291 alloc_size = curr_max_ents; in __sg_alloc_table() 292 sg_size = alloc_size - 1; in __sg_alloc_table() [all …]
|
/Linux-v5.4/mm/ |
D | percpu.c | 1314 size_t alloc_size; in pcpu_alloc_first_chunk() local 1330 alloc_size = sizeof(struct pcpu_chunk) + in pcpu_alloc_first_chunk() 1332 chunk = memblock_alloc(alloc_size, SMP_CACHE_BYTES); in pcpu_alloc_first_chunk() 1335 alloc_size); in pcpu_alloc_first_chunk() 1346 alloc_size = BITS_TO_LONGS(region_bits) * sizeof(chunk->alloc_map[0]); in pcpu_alloc_first_chunk() 1347 chunk->alloc_map = memblock_alloc(alloc_size, SMP_CACHE_BYTES); in pcpu_alloc_first_chunk() 1350 alloc_size); in pcpu_alloc_first_chunk() 1352 alloc_size = in pcpu_alloc_first_chunk() 1354 chunk->bound_map = memblock_alloc(alloc_size, SMP_CACHE_BYTES); in pcpu_alloc_first_chunk() 1357 alloc_size); in pcpu_alloc_first_chunk() [all …]
|
/Linux-v5.4/fs/ocfs2/ |
D | move_extents.c | 742 static void ocfs2_calc_extent_defrag_len(u32 *alloc_size, u32 *len_defraged, in ocfs2_calc_extent_defrag_len() argument 745 if ((*alloc_size + *len_defraged) < threshold) { in ocfs2_calc_extent_defrag_len() 749 *len_defraged += *alloc_size; in ocfs2_calc_extent_defrag_len() 764 *alloc_size = threshold - *len_defraged; in ocfs2_calc_extent_defrag_len() 773 u32 cpos, phys_cpos, move_start, len_to_move, alloc_size; in __ocfs2_move_extents_range() local 830 ret = ocfs2_get_clusters(inode, cpos, &phys_cpos, &alloc_size, in __ocfs2_move_extents_range() 837 if (alloc_size > len_to_move) in __ocfs2_move_extents_range() 838 alloc_size = len_to_move; in __ocfs2_move_extents_range() 854 ocfs2_calc_extent_defrag_len(&alloc_size, &len_defraged, in __ocfs2_move_extents_range() 866 cpos, phys_cpos, alloc_size, len_defraged); in __ocfs2_move_extents_range() [all …]
|
/Linux-v5.4/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_simple_resource.c | 156 size_t alloc_size; in vmw_simple_resource_create_ioctl() local 160 alloc_size = offsetof(struct vmw_user_simple_resource, simple) + in vmw_simple_resource_create_ioctl() 162 account_size = ttm_round_pot(alloc_size) + VMW_IDA_ACC_SIZE + in vmw_simple_resource_create_ioctl() 180 usimple = kzalloc(alloc_size, GFP_KERNEL); in vmw_simple_resource_create_ioctl()
|
/Linux-v5.4/net/openvswitch/ |
D | vport.c | 122 size_t alloc_size; in ovs_vport_alloc() local 124 alloc_size = sizeof(struct vport); in ovs_vport_alloc() 126 alloc_size = ALIGN(alloc_size, VPORT_ALIGN); in ovs_vport_alloc() 127 alloc_size += priv_size; in ovs_vport_alloc() 130 vport = kzalloc(alloc_size, GFP_KERNEL); in ovs_vport_alloc()
|
/Linux-v5.4/drivers/md/ |
D | dm-stats.c | 81 static bool __check_shared_memory(size_t alloc_size) in __check_shared_memory() argument 85 a = shared_memory_amount + alloc_size; in __check_shared_memory() 97 static bool check_shared_memory(size_t alloc_size) in check_shared_memory() argument 103 ret = __check_shared_memory(alloc_size); in check_shared_memory() 110 static bool claim_shared_memory(size_t alloc_size) in claim_shared_memory() argument 114 if (!__check_shared_memory(alloc_size)) { in claim_shared_memory() 119 shared_memory_amount += alloc_size; in claim_shared_memory() 126 static void free_shared_memory(size_t alloc_size) in free_shared_memory() argument 132 if (WARN_ON_ONCE(shared_memory_amount < alloc_size)) { in free_shared_memory() 138 shared_memory_amount -= alloc_size; in free_shared_memory() [all …]
|
/Linux-v5.4/tools/lib/api/fs/ |
D | fs.c | 360 size_t size = 0, alloc_size = 0; in filename__read_str() local 370 if (size == alloc_size) { in filename__read_str() 371 alloc_size += BUFSIZ; in filename__read_str() 372 nbf = realloc(bf, alloc_size); in filename__read_str() 381 n = read(fd, bf + size, alloc_size - size); in filename__read_str()
|
/Linux-v5.4/drivers/cpufreq/ |
D | cpufreq_stats.c | 173 unsigned int alloc_size; in cpufreq_stats_create_table() local 188 alloc_size = count * sizeof(int) + count * sizeof(u64); in cpufreq_stats_create_table() 190 alloc_size += count * count * sizeof(int); in cpufreq_stats_create_table() 193 stats->time_in_state = kzalloc(alloc_size, GFP_KERNEL); in cpufreq_stats_create_table()
|
/Linux-v5.4/include/linux/ |
D | swiotlb.h | 51 size_t alloc_size, 58 size_t alloc_size,
|
/Linux-v5.4/arch/powerpc/kernel/ |
D | eeh_pe.c | 49 size_t alloc_size; in eeh_pe_alloc() local 51 alloc_size = sizeof(struct eeh_pe); in eeh_pe_alloc() 53 alloc_size = ALIGN(alloc_size, cache_line_size()); in eeh_pe_alloc() 54 alloc_size += eeh_pe_aux_size; in eeh_pe_alloc() 58 pe = kzalloc(alloc_size, GFP_KERNEL); in eeh_pe_alloc()
|
/Linux-v5.4/drivers/iommu/ |
D | dma-iommu.c | 925 size_t alloc_size = PAGE_ALIGN(size); in __iommu_dma_free() local 926 int count = alloc_size >> PAGE_SHIFT; in __iommu_dma_free() 931 dma_free_from_pool(cpu_addr, alloc_size)) in __iommu_dma_free() 942 dma_common_free_remap(cpu_addr, alloc_size); in __iommu_dma_free() 951 dma_free_contiguous(dev, page, alloc_size); in __iommu_dma_free() 965 size_t alloc_size = PAGE_ALIGN(size); in iommu_dma_alloc_pages() local 970 page = dma_alloc_contiguous(dev, alloc_size, gfp); in iommu_dma_alloc_pages() 972 page = alloc_pages_node(node, gfp, get_order(alloc_size)); in iommu_dma_alloc_pages() 979 cpu_addr = dma_common_contiguous_remap(page, alloc_size, in iommu_dma_alloc_pages() 991 memset(cpu_addr, 0, alloc_size); in iommu_dma_alloc_pages() [all …]
|
/Linux-v5.4/drivers/mtd/parsers/ |
D | cmdlinepart.c | 168 int alloc_size; in newpart() local 171 alloc_size = *num_parts * sizeof(struct mtd_partition) + in newpart() 174 parts = kzalloc(alloc_size, GFP_KERNEL); in newpart()
|
/Linux-v5.4/drivers/i2c/busses/ |
D | i2c-cros-ec-tunnel.c | 181 int alloc_size; in ec_i2c_xfer() local 198 alloc_size = max(request_len, response_len); in ec_i2c_xfer() 199 msg = kmalloc(sizeof(*msg) + alloc_size, GFP_KERNEL); in ec_i2c_xfer()
|
/Linux-v5.4/drivers/usb/host/ |
D | ehci-dbg.c | 339 size_t alloc_size; member 482 size = buf->alloc_size; in fill_async_buffer() 524 size = buf->alloc_size; in fill_bandwidth_buffer() 640 size = buf->alloc_size; in fill_periodic_buffer() 736 return buf->alloc_size - size; in fill_periodic_buffer() 768 size = buf->alloc_size; in fill_registers_buffer() 912 return buf->alloc_size - size; in fill_registers_buffer() 926 buf->alloc_size = PAGE_SIZE; in alloc_buffer() 937 buf->output_buf = vmalloc(buf->alloc_size); in fill_buffer() 1013 buf->alloc_size = (sizeof(void *) == 4 ? 6 : 8) * PAGE_SIZE; in debug_periodic_open()
|
/Linux-v5.4/drivers/gpu/drm/ttm/ |
D | ttm_page_alloc.c | 88 unsigned alloc_size; member 181 m->options.alloc_size = val; in ttm_pool_store() 199 val = m->options.alloc_size; in ttm_pool_show() 593 unsigned alloc_size = _manager->options.alloc_size; in ttm_page_pool_fill_locked() local 603 cstate, alloc_size, 0); in ttm_page_pool_fill_locked() 609 pool->npages += alloc_size; in ttm_page_pool_fill_locked() 995 _manager->options.alloc_size = NUM_PAGES_TO_ALLOC; in ttm_page_alloc_init()
|
/Linux-v5.4/net/ieee802154/ |
D | core.c | 101 size_t alloc_size; in wpan_phy_new() local 103 alloc_size = sizeof(*rdev) + priv_size; in wpan_phy_new() 104 rdev = kzalloc(alloc_size, GFP_KERNEL); in wpan_phy_new()
|
/Linux-v5.4/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_ib.c | 133 uint32_t status = 0, alloc_size; in amdgpu_ib_schedule() local 163 alloc_size = ring->funcs->emit_frame_size + num_ibs * in amdgpu_ib_schedule() 166 r = amdgpu_ring_alloc(ring, alloc_size); in amdgpu_ib_schedule()
|
/Linux-v5.4/arch/x86/kernel/ |
D | ldt.c | 68 unsigned int alloc_size; in alloc_ldt_struct() local 78 alloc_size = num_entries * LDT_ENTRY_SIZE; in alloc_ldt_struct() 86 if (alloc_size > PAGE_SIZE) in alloc_ldt_struct() 87 new_ldt->entries = vzalloc(alloc_size); in alloc_ldt_struct()
|
/Linux-v5.4/drivers/dca/ |
D | dca-core.c | 312 int alloc_size; in alloc_dca_provider() local 314 alloc_size = (sizeof(*dca) + priv_size); in alloc_dca_provider() 315 dca = kzalloc(alloc_size, GFP_KERNEL); in alloc_dca_provider()
|
/Linux-v5.4/drivers/net/phy/ |
D | mdio_bus.c | 148 size_t alloc_size; in mdiobus_alloc_size() local 153 alloc_size = aligned_size + size; in mdiobus_alloc_size() 155 alloc_size = sizeof(*bus); in mdiobus_alloc_size() 157 bus = kzalloc(alloc_size, GFP_KERNEL); in mdiobus_alloc_size()
|