Lines Matching refs:flags
141 unsigned int align, slab_flags_t flags,
145 slab_flags_t flags,
355 void *__kmalloc(size_t size, gfp_t flags) __assume_kmalloc_alignment __malloc;
356 void *kmem_cache_alloc(struct kmem_cache *, gfp_t flags) __assume_slab_alignment __malloc;
379 void *__kmalloc_node(size_t size, gfp_t flags, int node) __assume_kmalloc_alignment __malloc;
380 void *kmem_cache_alloc_node(struct kmem_cache *, gfp_t flags, int node) __assume_slab_alignment __m…
382 static __always_inline void *__kmalloc_node(size_t size, gfp_t flags, int node) in __kmalloc_node() argument
384 return __kmalloc(size, flags); in __kmalloc_node()
387 static __always_inline void *kmem_cache_alloc_node(struct kmem_cache *s, gfp_t flags, int node) in kmem_cache_alloc_node() argument
389 return kmem_cache_alloc(s, flags); in kmem_cache_alloc_node()
412 gfp_t flags, size_t size) in kmem_cache_alloc_trace() argument
414 void *ret = kmem_cache_alloc(s, flags); in kmem_cache_alloc_trace()
416 kasan_kmalloc(s, ret, size, flags); in kmem_cache_alloc_trace()
432 extern void *kmalloc_order(size_t size, gfp_t flags, unsigned int order) __assume_page_alignment __…
435 extern void *kmalloc_order_trace(size_t size, gfp_t flags, unsigned int order) __assume_page_alignm…
438 kmalloc_order_trace(size_t size, gfp_t flags, unsigned int order) in kmalloc_order_trace() argument
440 return kmalloc_order(size, flags, order); in kmalloc_order_trace()
444 static __always_inline void *kmalloc_large(size_t size, gfp_t flags) in kmalloc_large() argument
447 return kmalloc_order_trace(size, flags, order); in kmalloc_large()
501 static __always_inline void *kmalloc(size_t size, gfp_t flags) in kmalloc() argument
505 return kmalloc_large(size, flags); in kmalloc()
507 if (!(flags & GFP_DMA)) { in kmalloc()
514 flags, size); in kmalloc()
518 return __kmalloc(size, flags); in kmalloc()
541 static __always_inline void *kmalloc_node(size_t size, gfp_t flags, int node) in kmalloc_node() argument
545 size <= KMALLOC_MAX_CACHE_SIZE && !(flags & GFP_DMA)) { in kmalloc_node()
552 flags, node, size); in kmalloc_node()
555 return __kmalloc_node(size, flags, node); in kmalloc_node()
627 static inline void *kmalloc_array(size_t n, size_t size, gfp_t flags) in kmalloc_array() argument
634 return kmalloc(bytes, flags); in kmalloc_array()
635 return __kmalloc(bytes, flags); in kmalloc_array()
644 static inline void *kcalloc(size_t n, size_t size, gfp_t flags) in kcalloc() argument
646 return kmalloc_array(n, size, flags | __GFP_ZERO); in kcalloc()
658 #define kmalloc_track_caller(size, flags) \ argument
659 __kmalloc_track_caller(size, flags, _RET_IP_)
661 static inline void *kmalloc_array_node(size_t n, size_t size, gfp_t flags, in kmalloc_array_node() argument
669 return kmalloc_node(bytes, flags, node); in kmalloc_array_node()
670 return __kmalloc_node(bytes, flags, node); in kmalloc_array_node()
673 static inline void *kcalloc_node(size_t n, size_t size, gfp_t flags, int node) in kcalloc_node() argument
675 return kmalloc_array_node(n, size, flags | __GFP_ZERO, node); in kcalloc_node()
681 #define kmalloc_node_track_caller(size, flags, node) \ argument
682 __kmalloc_node_track_caller(size, flags, node, \
687 #define kmalloc_node_track_caller(size, flags, node) \ argument
688 kmalloc_track_caller(size, flags)
695 static inline void *kmem_cache_zalloc(struct kmem_cache *k, gfp_t flags) in kmem_cache_zalloc() argument
697 return kmem_cache_alloc(k, flags | __GFP_ZERO); in kmem_cache_zalloc()
705 static inline void *kzalloc(size_t size, gfp_t flags) in kzalloc() argument
707 return kmalloc(size, flags | __GFP_ZERO); in kzalloc()
716 static inline void *kzalloc_node(size_t size, gfp_t flags, int node) in kzalloc_node() argument
718 return kmalloc_node(size, flags | __GFP_ZERO, node); in kzalloc_node()