/Linux-v5.10/lib/ |
D | stackdepot.c | 105 u32 hash, void **prealloc, gfp_t alloc_flags) in depot_alloc_stack() argument 237 gfp_t alloc_flags) in stack_depot_save() argument 276 alloc_flags &= ~GFP_ZONEMASK; in stack_depot_save() 277 alloc_flags &= (GFP_ATOMIC | GFP_KERNEL); in stack_depot_save() 278 alloc_flags |= __GFP_NOWARN; in stack_depot_save() 279 page = alloc_pages(alloc_flags, STACK_ALLOC_ORDER); in stack_depot_save() 290 hash, &prealloc, alloc_flags); in stack_depot_save()
|
/Linux-v5.10/mm/ |
D | page_alloc.c | 2281 unsigned int alloc_flags) in prep_new_page() argument 2297 if (alloc_flags & ALLOC_NO_WATERMARKS) in prep_new_page() 2517 unsigned int alloc_flags, int start_type, bool whole_block) in steal_suitable_fallback() argument 2544 if (alloc_flags & ALLOC_KSWAPD) in steal_suitable_fallback() 2760 unsigned int alloc_flags) in __rmqueue_fallback() argument 2774 if (alloc_flags & ALLOC_NOFRAGMENT) in __rmqueue_fallback() 2826 steal_suitable_fallback(zone, page, alloc_flags, start_migratetype, in __rmqueue_fallback() 2842 unsigned int alloc_flags) in __rmqueue() argument 2852 if (alloc_flags & ALLOC_CMA && in __rmqueue() 2863 if (alloc_flags & ALLOC_CMA) in __rmqueue() [all …]
|
D | compaction.c | 2081 unsigned int alloc_flags, in __compaction_suitable() argument 2090 watermark = wmark_pages(zone, alloc_flags & ALLOC_WMARK_MASK); in __compaction_suitable() 2096 alloc_flags)) in __compaction_suitable() 2124 unsigned int alloc_flags, in compaction_suitable() argument 2130 ret = __compaction_suitable(zone, order, alloc_flags, highest_zoneidx, in compaction_suitable() 2162 int alloc_flags) in compaction_zonelist_suitable() argument 2184 compact_result = __compaction_suitable(zone, order, alloc_flags, in compaction_zonelist_suitable() 2215 ret = compaction_suitable(cc->zone, cc->order, cc->alloc_flags, in compact_zone() 2407 unsigned int alloc_flags, int highest_zoneidx, in compact_zone_order() argument 2418 .alloc_flags = alloc_flags, in compact_zone_order() [all …]
|
D | internal.h | 233 const unsigned int alloc_flags; /* alloc flags of a direct compactor */ member
|
/Linux-v5.10/include/linux/ |
D | compaction.h | 94 unsigned int order, unsigned int alloc_flags, 99 unsigned int alloc_flags, int highest_zoneidx); 180 int alloc_flags); 192 int alloc_flags, int highest_zoneidx) in compaction_suitable() argument
|
D | mmzone.h | 859 int highest_zoneidx, unsigned int alloc_flags, 863 unsigned int alloc_flags);
|
/Linux-v5.10/drivers/md/ |
D | dm-zoned-reclaim.c | 286 int alloc_flags = DMZ_ALLOC_SEQ; in dmz_reclaim_rnd_data() local 292 alloc_flags | DMZ_ALLOC_RECLAIM); in dmz_reclaim_rnd_data() 293 if (!szone && alloc_flags == DMZ_ALLOC_SEQ && dmz_nr_cache_zones(zmd)) { in dmz_reclaim_rnd_data() 294 alloc_flags = DMZ_ALLOC_RND; in dmz_reclaim_rnd_data()
|
D | dm-zoned-metadata.c | 2063 int alloc_flags = zmd->nr_cache ? DMZ_ALLOC_CACHE : DMZ_ALLOC_RND; in dmz_get_chunk_mapping() local 2078 dzone = dmz_alloc_zone(zmd, 0, alloc_flags); in dmz_get_chunk_mapping() 2175 int alloc_flags = zmd->nr_cache ? DMZ_ALLOC_CACHE : DMZ_ALLOC_RND; in dmz_get_chunk_buffer() local 2184 bzone = dmz_alloc_zone(zmd, 0, alloc_flags); in dmz_get_chunk_buffer()
|
/Linux-v5.10/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_amdkfd_gpuvm.c | 406 bool coherent = mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_COHERENT; in get_pte_flags() 410 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE) in get_pte_flags() 412 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_EXECUTABLE) in get_pte_flags() 417 if (mem->alloc_flags & KFD_IOC_ALLOC_MEM_FLAGS_VRAM) { in get_pte_flags() 1158 u64 alloc_flags; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() local 1166 alloc_flags = AMDGPU_GEM_CREATE_VRAM_WIPE_ON_RELEASE; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1167 alloc_flags |= (flags & KFD_IOC_ALLOC_MEM_FLAGS_PUBLIC) ? in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1172 alloc_flags = 0; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1176 alloc_flags = 0; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() 1185 alloc_flags = 0; in amdgpu_amdkfd_gpuvm_alloc_memory_of_gpu() [all …]
|
D | amdgpu_amdkfd.h | 61 uint32_t alloc_flags; member
|
/Linux-v5.10/fs/btrfs/ |
D | block-group.c | 2224 u64 alloc_flags; in btrfs_inc_block_group_ro() local 2255 alloc_flags = btrfs_get_alloc_profile(fs_info, cache->flags); in btrfs_inc_block_group_ro() 2256 if (alloc_flags != cache->flags) { in btrfs_inc_block_group_ro() 2257 ret = btrfs_chunk_alloc(trans, alloc_flags, in btrfs_inc_block_group_ro() 2275 alloc_flags = btrfs_get_alloc_profile(fs_info, cache->space_info->flags); in btrfs_inc_block_group_ro() 2276 ret = btrfs_chunk_alloc(trans, alloc_flags, CHUNK_ALLOC_FORCE); in btrfs_inc_block_group_ro() 2282 alloc_flags = btrfs_get_alloc_profile(fs_info, cache->flags); in btrfs_inc_block_group_ro() 2284 check_system_chunk(trans, alloc_flags); in btrfs_inc_block_group_ro() 3062 u64 alloc_flags = btrfs_get_alloc_profile(trans->fs_info, type); in btrfs_force_chunk_alloc() local 3064 return btrfs_chunk_alloc(trans, alloc_flags, CHUNK_ALLOC_FORCE); in btrfs_force_chunk_alloc()
|
/Linux-v5.10/drivers/base/regmap/ |
D | internal.h | 60 gfp_t alloc_flags; member
|
D | regmap.c | 784 map->alloc_flags = GFP_ATOMIC; in __regmap_init() 786 map->alloc_flags = GFP_KERNEL; in __regmap_init() 2253 wval = kmemdup(val, val_count * val_bytes, map->alloc_flags); in regmap_bulk_write()
|
/Linux-v5.10/drivers/iommu/ |
D | dma-iommu.c | 544 gfp_t alloc_flags = gfp; in __iommu_dma_alloc_pages() local 548 alloc_flags |= __GFP_NORETRY; in __iommu_dma_alloc_pages() 549 page = alloc_pages_node(nid, alloc_flags, order); in __iommu_dma_alloc_pages()
|
/Linux-v5.10/drivers/net/ethernet/mellanox/mlx5/core/ |
D | cmd.c | 77 gfp_t alloc_flags = cbk ? GFP_ATOMIC : GFP_KERNEL; in cmd_alloc_ent() local 80 ent = kzalloc(sizeof(*ent), alloc_flags); in cmd_alloc_ent()
|
/Linux-v5.10/arch/s390/kvm/ |
D | kvm-s390.c | 2628 gfp_t alloc_flags = GFP_KERNEL; in kvm_arch_init_vm() local 2651 alloc_flags |= GFP_DMA; in kvm_arch_init_vm() 2654 kvm->arch.sca = (struct bsca_block *) get_zeroed_page(alloc_flags); in kvm_arch_init_vm()
|