Searched refs:gfpflags_allow_blocking (Results 1 – 25 of 36) sorted by relevance
12
310 might_sleep_if(gfpflags_allow_blocking(gfp)); in __i915_sw_fence_await_sw_fence()327 if (!gfpflags_allow_blocking(gfp)) in __i915_sw_fence_await_sw_fence()444 might_sleep_if(gfpflags_allow_blocking(gfp)); in i915_sw_fence_await_dma_fence()454 if (!gfpflags_allow_blocking(gfp)) in i915_sw_fence_await_dma_fence()536 might_sleep_if(gfpflags_allow_blocking(gfp)); in i915_sw_fence_await_reservation()
599 if (!gfpflags_allow_blocking(gfp)) in request_alloc_slow()630 might_sleep_if(gfpflags_allow_blocking(gfp)); in __i915_request_create()
28 static inline bool gfpflags_allow_blocking(const gfp_t gfp_flags) in gfpflags_allow_blocking() function
154 if (gfpflags_allow_blocking(flag)) in arch_dma_alloc()
243 if (cma && gfpflags_allow_blocking(gfp)) { in dma_alloc_contiguous()
222 if (!gfpflags_allow_blocking(flags)) { in arch_dma_alloc()
116 !gfpflags_allow_blocking(gfp_mask)); in cn_netlink_send_mult()
490 if (gfpflags_allow_blocking(flags)) in __kasan_kmalloc()535 if (gfpflags_allow_blocking(flags)) in kasan_kmalloc_large()
254 if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) { in radix_tree_node_alloc()377 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask)); in radix_tree_preload()389 if (gfpflags_allow_blocking(gfp_mask)) in radix_tree_maybe_preload()
332 if (gfpflags_allow_blocking(gfp)) { in __xas_nomem()
301 might_sleep_if(gfpflags_allow_blocking(gfp_flags)); in get_task_io_context()
323 static inline bool gfpflags_allow_blocking(const gfp_t gfp_flags) in gfpflags_allow_blocking() function
1606 might_sleep_if(gfpflags_allow_blocking(pri)); in skb_unclone()1635 might_sleep_if(gfpflags_allow_blocking(pri)); in skb_header_unclone()1681 might_sleep_if(gfpflags_allow_blocking(pri)); in skb_share_check()1717 might_sleep_if(gfpflags_allow_blocking(pri)); in skb_unshare()
690 might_sleep_if(gfpflags_allow_blocking(gfp_flags)); in on_each_cpu_cond_mask()
327 might_sleep_if(gfpflags_allow_blocking(mem_flags)); in dma_pool_alloc()
565 might_sleep_if(gfpflags_allow_blocking(flags)); in slab_pre_alloc_hook()
2592 if (gfpflags_allow_blocking(local_flags)) in cache_grow_begin()2634 if (gfpflags_allow_blocking(local_flags)) in cache_grow_begin()2642 if (gfpflags_allow_blocking(local_flags)) in cache_grow_begin()2993 might_sleep_if(gfpflags_allow_blocking(flags)); in cache_alloc_debugcheck_before()
682 might_sleep_if(gfpflags_allow_blocking(gfp)); in wb_get_create()
1135 if (gfpflags_allow_blocking(gfp_mask)) { in alloc_vmap_area()2443 if (gfpflags_allow_blocking(gfp_mask|highmem_mask)) in __vmalloc_area_node()
871 bool can_sleep = gfpflags_allow_blocking(gfp); in z3fold_alloc()
1008 if (IS_ENABLED(CONFIG_DMA_REMAP) && gfpflags_allow_blocking(gfp) && in iommu_dma_alloc()1013 !gfpflags_allow_blocking(gfp) && !coherent) in iommu_dma_alloc()
651 if (!gfpflags_allow_blocking(gfp)) { in rxrpc_wait_for_channel()
693 if (!prealloc && gfpflags_allow_blocking(mask)) { in __clear_extent_bit()810 if (gfpflags_allow_blocking(mask)) in __clear_extent_bit()955 if (!prealloc && gfpflags_allow_blocking(mask)) { in __set_extent_bit()1130 if (gfpflags_allow_blocking(mask)) in __set_extent_bit()4408 if (gfpflags_allow_blocking(mask) && in try_release_extent_mapping()
737 allowblock = gfpflags_allow_blocking(gfp); in __dma_alloc()1472 if (coherent_flag == COHERENT || !gfpflags_allow_blocking(gfp)) in __arm_iommu_alloc_attrs()
1618 bool preload = gfpflags_allow_blocking(gfp); in sctp_assoc_set_id()