Home
last modified time | relevance | path

Searched refs:gfp_t (Results 1 – 25 of 1254) sorted by relevance

12345678910>>...51

/Linux-v5.15/include/linux/
Dgfp.h22 typedef unsigned int __bitwise gfp_t;
72 #define __GFP_DMA ((__force gfp_t)___GFP_DMA)
73 #define __GFP_HIGHMEM ((__force gfp_t)___GFP_HIGHMEM)
74 #define __GFP_DMA32 ((__force gfp_t)___GFP_DMA32)
75 #define __GFP_MOVABLE ((__force gfp_t)___GFP_MOVABLE) /* ZONE_MOVABLE allowed */
105 #define __GFP_RECLAIMABLE ((__force gfp_t)___GFP_RECLAIMABLE)
106 #define __GFP_WRITE ((__force gfp_t)___GFP_WRITE)
107 #define __GFP_HARDWALL ((__force gfp_t)___GFP_HARDWALL)
108 #define __GFP_THISNODE ((__force gfp_t)___GFP_THISNODE)
109 #define __GFP_ACCOUNT ((__force gfp_t)___GFP_ACCOUNT)
[all …]
Dslab.h184 void * __must_check krealloc(const void *, size_t, gfp_t);
342 static __always_inline enum kmalloc_cache_type kmalloc_type(gfp_t flags) in kmalloc_type()
428 void *__kmalloc(size_t size, gfp_t flags) __assume_kmalloc_alignment __malloc;
429 void *kmem_cache_alloc(struct kmem_cache *, gfp_t flags) __assume_slab_alignment __malloc;
440 int kmem_cache_alloc_bulk(struct kmem_cache *, gfp_t, size_t, void **);
452 void *__kmalloc_node(size_t size, gfp_t flags, int node) __assume_kmalloc_alignment __malloc;
453 void *kmem_cache_alloc_node(struct kmem_cache *, gfp_t flags, int node) __assume_slab_alignment __m…
455 static __always_inline void *__kmalloc_node(size_t size, gfp_t flags, int node) in __kmalloc_node()
460 static __always_inline void *kmem_cache_alloc_node(struct kmem_cache *s, gfp_t flags, int node) in kmem_cache_alloc_node()
467 extern void *kmem_cache_alloc_trace(struct kmem_cache *, gfp_t, size_t) __assume_slab_alignment __m…
[all …]
Dkmemleak.h19 gfp_t gfp) __ref;
21 gfp_t gfp) __ref;
23 gfp_t gfp) __ref;
30 extern void kmemleak_scan_area(const void *ptr, size_t size, gfp_t gfp) __ref;
33 gfp_t gfp) __ref;
40 gfp_t gfp) in kmemleak_alloc_recursive()
63 gfp_t gfp) in kmemleak_alloc()
68 gfp_t gfp) in kmemleak_alloc_recursive()
72 gfp_t gfp) in kmemleak_alloc_percpu()
76 gfp_t gfp) in kmemleak_vmalloc()
[all …]
Dxarray.h265 #define XA_FLAGS_LOCK_IRQ ((__force gfp_t)XA_LOCK_IRQ)
266 #define XA_FLAGS_LOCK_BH ((__force gfp_t)XA_LOCK_BH)
267 #define XA_FLAGS_TRACK_FREE ((__force gfp_t)4U)
268 #define XA_FLAGS_ZERO_BUSY ((__force gfp_t)8U)
269 #define XA_FLAGS_ALLOC_WRAPPED ((__force gfp_t)16U)
270 #define XA_FLAGS_ACCOUNT ((__force gfp_t)32U)
271 #define XA_FLAGS_MARK(mark) ((__force gfp_t)((1U << __GFP_BITS_SHIFT) << \
297 gfp_t xa_flags;
350 void *xa_store(struct xarray *, unsigned long index, void *entry, gfp_t);
353 void *entry, gfp_t);
[all …]
Dkasan.h97 void kasan_alloc_pages(struct page *page, unsigned int order, gfp_t flags);
113 unsigned int order, gfp_t flags) in kasan_alloc_pages()
249 void *object, gfp_t flags, bool init);
251 struct kmem_cache *s, void *object, gfp_t flags, bool init) in kasan_slab_alloc()
259 size_t size, gfp_t flags);
261 const void *object, size_t size, gfp_t flags) in kasan_kmalloc()
269 size_t size, gfp_t flags);
271 size_t size, gfp_t flags) in kasan_kmalloc_large()
279 size_t new_size, gfp_t flags);
281 size_t new_size, gfp_t flags) in kasan_krealloc()
[all …]
Dmempool.h13 typedef void * (mempool_alloc_t)(gfp_t gfp_mask, void *pool_data);
36 gfp_t gfp_mask, int node_id);
44 gfp_t gfp_mask, int nid);
48 extern void *mempool_alloc(mempool_t *pool, gfp_t gfp_mask) __malloc;
56 void *mempool_alloc_slab(gfp_t gfp_mask, void *pool_data);
77 void *mempool_kmalloc(gfp_t gfp_mask, void *pool_data);
96 void *mempool_alloc_pages(gfp_t gfp_mask, void *pool_data);
Ddevcoredump.h56 gfp_t gfp);
59 void *data, size_t datalen, gfp_t gfp,
65 size_t datalen, gfp_t gfp);
68 size_t datalen, gfp_t gfp) in dev_coredumpv()
75 void *data, size_t datalen, gfp_t gfp, in dev_coredumpm()
84 size_t datalen, gfp_t gfp) in dev_coredumpsg()
Didr.h32 #define IDR_RT_MARKER (ROOT_IS_IDR | (__force gfp_t) \
112 void idr_preload(gfp_t gfp_mask);
114 int idr_alloc(struct idr *, void *ptr, int start, int end, gfp_t);
116 unsigned long max, gfp_t);
117 int idr_alloc_cyclic(struct idr *, void *ptr, int start, int end, gfp_t);
255 int ida_alloc_range(struct ida *, unsigned int min, unsigned int max, gfp_t);
271 static inline int ida_alloc(struct ida *ida, gfp_t gfp) in ida_alloc()
289 static inline int ida_alloc_min(struct ida *ida, unsigned int min, gfp_t gfp) in ida_alloc_min()
307 static inline int ida_alloc_max(struct ida *ida, unsigned int max, gfp_t gfp) in ida_alloc_max()
Dcpuset.h68 extern bool __cpuset_node_allowed(int node, gfp_t gfp_mask);
70 static inline bool cpuset_node_allowed(int node, gfp_t gfp_mask) in cpuset_node_allowed()
77 static inline bool __cpuset_zone_allowed(struct zone *z, gfp_t gfp_mask) in __cpuset_zone_allowed()
82 static inline bool cpuset_zone_allowed(struct zone *z, gfp_t gfp_mask) in cpuset_zone_allowed()
209 static inline bool cpuset_node_allowed(int node, gfp_t gfp_mask) in cpuset_node_allowed()
214 static inline bool __cpuset_zone_allowed(struct zone *z, gfp_t gfp_mask) in __cpuset_zone_allowed()
219 static inline bool cpuset_zone_allowed(struct zone *z, gfp_t gfp_mask) in cpuset_zone_allowed()
Dswap.h380 gfp_t gfp_mask, nodemask_t *mask);
384 gfp_t gfp_mask,
387 gfp_t gfp_mask, bool noswap,
448 gfp_t gfp, void **shadowp);
461 extern struct page *read_swap_cache_async(swp_entry_t, gfp_t,
464 extern struct page *__read_swap_cache_async(swp_entry_t, gfp_t,
467 extern struct page *swap_cluster_readahead(swp_entry_t entry, gfp_t flag,
469 extern struct page *swapin_readahead(swp_entry_t entry, gfp_t flag,
494 extern int add_swap_count_continuation(swp_entry_t, gfp_t);
575 static inline int add_swap_count_continuation(swp_entry_t swp, gfp_t gfp_mask) in add_swap_count_continuation()
[all …]
Dvmpressure.h33 extern void vmpressure(gfp_t gfp, struct mem_cgroup *memcg, bool tree,
35 extern void vmpressure_prio(gfp_t gfp, struct mem_cgroup *memcg, int prio);
47 static inline void vmpressure(gfp_t gfp, struct mem_cgroup *memcg, bool tree, in vmpressure()
49 static inline void vmpressure_prio(gfp_t gfp, struct mem_cgroup *memcg, in vmpressure_prio()
Dfscache.h205 gfp_t);
212 gfp_t);
213 extern int __fscache_alloc_page(struct fscache_cookie *, struct page *, gfp_t);
214 extern int __fscache_write_page(struct fscache_cookie *, struct page *, loff_t, gfp_t);
219 gfp_t);
574 gfp_t gfp) in fscache_read_or_alloc_page()
625 gfp_t gfp) in fscache_read_or_alloc_pages()
656 gfp_t gfp) in fscache_alloc_page()
707 gfp_t gfp) in fscache_write_page()
793 gfp_t gfp) in fscache_maybe_release_page()
/Linux-v5.15/include/net/sctp/
Dulpevent.h81 gfp_t gfp);
90 gfp_t gfp);
96 gfp_t gfp);
103 gfp_t gfp);
108 gfp_t gfp);
113 __u32 flags, gfp_t gfp);
116 const struct sctp_association *asoc, gfp_t gfp);
120 gfp_t gfp);
124 __u32 indication, gfp_t gfp);
127 const struct sctp_association *asoc, gfp_t gfp);
[all …]
Dstream_interleave.h25 int len, __u8 flags, gfp_t gfp);
29 struct sctp_chunk *chunk, gfp_t gfp);
33 struct sctp_chunk *chunk, gfp_t gfp);
34 void (*start_pd)(struct sctp_ulpq *ulpq, gfp_t gfp);
35 void (*abort_pd)(struct sctp_ulpq *ulpq, gfp_t gfp);
Dauth.h71 struct sctp_shared_key *sctp_auth_shkey_create(__u16 key_id, gfp_t gfp);
73 int sctp_auth_asoc_init_active_key(struct sctp_association *asoc, gfp_t gfp);
79 gfp_t gfp);
80 int sctp_auth_init_hmacs(struct sctp_endpoint *ep, gfp_t gfp);
94 struct sctp_shared_key *ep_key, gfp_t gfp);
110 int sctp_auth_init(struct sctp_endpoint *ep, gfp_t gfp);
Dulpqueue.h44 int sctp_ulpq_tail_data(struct sctp_ulpq *, struct sctp_chunk *, gfp_t);
50 void sctp_ulpq_renege(struct sctp_ulpq *, struct sctp_chunk *, gfp_t);
53 void sctp_ulpq_partial_delivery(struct sctp_ulpq *, gfp_t);
56 void sctp_ulpq_abort_pd(struct sctp_ulpq *, gfp_t);
Dstream_sched.h22 gfp_t gfp);
28 int (*init_sid)(struct sctp_stream *stream, __u16 sid, gfp_t gfp);
48 __u16 value, gfp_t gfp);
54 int sctp_sched_init_sid(struct sctp_stream *stream, __u16 sid, gfp_t gfp);
/Linux-v5.15/net/wireless/
Dnl80211.h64 const u8 *buf, size_t len, gfp_t gfp);
67 const u8 *buf, size_t len, gfp_t gfp,
73 bool reconnect, gfp_t gfp);
77 bool reconnect, gfp_t gfp);
80 const u8 *addr, gfp_t gfp);
83 const u8 *addr, gfp_t gfp);
87 gfp_t gfp);
90 struct cfg80211_roam_info *info, gfp_t gfp);
101 int key_id, const u8 *tsc, gfp_t gfp);
110 gfp_t gfp);
[all …]
/Linux-v5.15/tools/virtio/linux/
Dkernel.h55 static inline void *kmalloc(size_t s, gfp_t gfp) in kmalloc()
61 static inline void *kmalloc_array(unsigned n, size_t s, gfp_t gfp) in kmalloc_array()
66 static inline void *kzalloc(size_t s, gfp_t gfp) in kzalloc()
74 static inline void *alloc_pages_exact(size_t s, gfp_t gfp) in alloc_pages_exact()
91 static inline void *krealloc(void *p, size_t s, gfp_t gfp) in krealloc()
97 static inline unsigned long __get_free_page(gfp_t gfp) in __get_free_page()
121 static inline void *krealloc_array(void *p, size_t new_n, size_t new_size, gfp_t gfp) in krealloc_array()
/Linux-v5.15/mm/
Dslab.h89 struct kmem_cache *kmalloc_slab(size_t, gfp_t);
92 gfp_t kmalloc_fix_flags(gfp_t flags);
203 int __kmem_cache_alloc_bulk(struct kmem_cache *, gfp_t, size_t, void **);
249 gfp_t gfp, bool new_page);
273 size_t objects, gfp_t flags) in memcg_slab_pre_alloc_hook()
298 gfp_t flags, size_t size, in memcg_slab_post_alloc_hook()
378 struct kmem_cache *s, gfp_t gfp, in memcg_alloc_page_obj_cgroups()
390 size_t objects, gfp_t flags) in memcg_slab_pre_alloc_hook()
397 gfp_t flags, size_t size, in memcg_slab_post_alloc_hook()
421 gfp_t gfp) in account_slab_page()
[all …]
/Linux-v5.15/include/drm/
Ddrm_managed.h47 void *drmm_kmalloc(struct drm_device *dev, size_t size, gfp_t gfp) __malloc;
59 static inline void *drmm_kzalloc(struct drm_device *dev, size_t size, gfp_t gfp) in drmm_kzalloc()
76 size_t n, size_t size, gfp_t flags) in drmm_kmalloc_array()
98 size_t n, size_t size, gfp_t flags) in drmm_kcalloc()
103 char *drmm_kstrdup(struct drm_device *dev, const char *s, gfp_t gfp);
/Linux-v5.15/security/apparmor/include/
Dlabel.h60 gfp_t gfp);
281 bool aa_label_init(struct aa_label *label, int size, gfp_t gfp);
282 struct aa_label *aa_label_alloc(int size, struct aa_proxy *proxy, gfp_t gfp);
302 gfp_t gfp);
305 bool aa_update_label_name(struct aa_ns *ns, struct aa_label *label, gfp_t gfp);
315 int flags, gfp_t gfp);
317 struct aa_label *label, int flags, gfp_t gfp);
319 struct aa_label *label, int flags, gfp_t gfp);
321 struct aa_label *label, int flags, gfp_t gfp);
323 gfp_t gfp);
[all …]
/Linux-v5.15/include/linux/sched/
Dmm.h156 static inline gfp_t current_gfp_context(gfp_t flags) in current_gfp_context()
179 extern void fs_reclaim_acquire(gfp_t gfp_mask);
180 extern void fs_reclaim_release(gfp_t gfp_mask);
184 static inline void fs_reclaim_acquire(gfp_t gfp_mask) { } in fs_reclaim_acquire()
185 static inline void fs_reclaim_release(gfp_t gfp_mask) { } in fs_reclaim_release()
196 static inline void might_alloc(gfp_t gfp_mask) in might_alloc()
/Linux-v5.15/arch/powerpc/include/asm/
Dpgalloc.h8 static inline gfp_t pgtable_gfp_flags(struct mm_struct *mm, gfp_t gfp) in pgtable_gfp_flags()
15 static inline gfp_t pgtable_gfp_flags(struct mm_struct *mm, gfp_t gfp) in pgtable_gfp_flags()
/Linux-v5.15/drivers/gpu/drm/i915/
Di915_gpu_error.h226 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp);
229 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp);
232 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp);
237 gfp_t gfp);
281 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp) in i915_gpu_coredump_alloc()
287 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp) in intel_gt_coredump_alloc()
293 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp) in intel_engine_coredump_alloc()
301 gfp_t gfp) in intel_engine_coredump_add_request()

12345678910>>...51