Searched refs:gfpflags_allow_blocking (Results 1 – 25 of 35) sorted by relevance
12
306 might_sleep_if(gfpflags_allow_blocking(gfp)); in __i915_sw_fence_await_sw_fence()321 if (!gfpflags_allow_blocking(gfp)) in __i915_sw_fence_await_sw_fence()439 might_sleep_if(gfpflags_allow_blocking(gfp)); in i915_sw_fence_await_dma_fence()449 if (!gfpflags_allow_blocking(gfp)) in i915_sw_fence_await_dma_fence()496 might_sleep_if(gfpflags_allow_blocking(gfp)); in i915_sw_fence_await_reservation()
28 static inline bool gfpflags_allow_blocking(const gfp_t gfp_flags) in gfpflags_allow_blocking() function
158 if (gfpflags_allow_blocking(flag)) in arch_dma_alloc()
80 if (gfpflags_allow_blocking(gfp)) { in dma_direct_alloc()
129 !gfpflags_allow_blocking(gfp_mask)); in cn_netlink_send_mult()
390 if (!gfpflags_allow_blocking(gfp_mask) && !in_interrupt()) { in radix_tree_node_alloc()513 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask)); in radix_tree_preload()525 if (gfpflags_allow_blocking(gfp_mask)) in radix_tree_maybe_preload()546 WARN_ON_ONCE(!gfpflags_allow_blocking(gfp_mask)); in radix_tree_split_preload()565 if (!gfpflags_allow_blocking(gfp_mask)) { in radix_tree_maybe_preload_order()
537 if (gfpflags_allow_blocking(flags)) in kasan_kmalloc()563 if (gfpflags_allow_blocking(flags)) in kasan_kmalloc_large()
324 might_sleep_if(gfpflags_allow_blocking(gfp_flags)); in get_task_io_context()
323 static inline bool gfpflags_allow_blocking(const gfp_t gfp_flags) in gfpflags_allow_blocking() function
1448 might_sleep_if(gfpflags_allow_blocking(pri)); in skb_unclone()1477 might_sleep_if(gfpflags_allow_blocking(pri)); in skb_header_unclone()1523 might_sleep_if(gfpflags_allow_blocking(pri)); in skb_share_check()1559 might_sleep_if(gfpflags_allow_blocking(pri)); in skb_unshare()
421 might_sleep_if(gfpflags_allow_blocking(flags)); in slab_pre_alloc_hook()
328 might_sleep_if(gfpflags_allow_blocking(mem_flags)); in dma_pool_alloc()
2670 if (gfpflags_allow_blocking(local_flags)) in cache_grow_begin()2706 if (gfpflags_allow_blocking(local_flags)) in cache_grow_begin()2714 if (gfpflags_allow_blocking(local_flags)) in cache_grow_begin()3066 might_sleep_if(gfpflags_allow_blocking(flags)); in cache_alloc_debugcheck_before()
543 bool can_sleep = gfpflags_allow_blocking(gfp); in z3fold_alloc()
660 might_sleep_if(gfpflags_allow_blocking(gfp)); in wb_get_create()
525 if (gfpflags_allow_blocking(gfp_mask)) { in alloc_vmap_area()1696 if (gfpflags_allow_blocking(gfp_mask|highmem_mask)) in __vmalloc_area_node()
105 if (!coherent && !gfpflags_allow_blocking(flags)) { in __dma_alloc()547 if (!gfpflags_allow_blocking(gfp)) { in __iommu_alloc_attrs()
679 might_sleep_if(gfpflags_allow_blocking(gfp_flags)); in on_each_cpu_cond()
1774 if (gfpflags_allow_blocking(gfp_mask) && (stime > 0)) { in audit_log_start()
655 if (!gfpflags_allow_blocking(gfp)) { in rxrpc_wait_for_channel()
602 if (!prealloc && gfpflags_allow_blocking(mask)) { in __clear_extent_bit()719 if (gfpflags_allow_blocking(mask)) in __clear_extent_bit()862 if (!prealloc && gfpflags_allow_blocking(mask)) { in __set_extent_bit()1037 if (gfpflags_allow_blocking(mask)) in __set_extent_bit()4231 if (gfpflags_allow_blocking(mask) && in try_release_extent_mapping()
779 allowblock = gfpflags_allow_blocking(gfp); in __dma_alloc()1532 if (coherent_flag == COHERENT || !gfpflags_allow_blocking(gfp)) in __arm_iommu_alloc_attrs()
1637 bool preload = gfpflags_allow_blocking(gfp); in sctp_assoc_set_id()
2207 if (gfpflags_allow_blocking(sk->sk_allocation)) in sk_page_frag()
489 bool preload = gfpflags_allow_blocking(gfp_mask); in add_client_resource()