Searched refs:PAGE_ALLOC_COSTLY_ORDER (Results 1 – 14 of 14) sorted by relevance
1456 watermark = (order > PAGE_ALLOC_COSTLY_ORDER) ? in __compaction_suitable()1491 if (ret == COMPACT_CONTINUE && (order > PAGE_ALLOC_COSTLY_ORDER)) { in compaction_suitable()
3492 if (order > PAGE_ALLOC_COSTLY_ORDER) in __alloc_pages_may_oom()3638 if (order > PAGE_ALLOC_COSTLY_ORDER) in should_compact_retry()3650 min_priority = (order > PAGE_ALLOC_COSTLY_ORDER) ? in should_compact_retry()3681 if (!order || order > PAGE_ALLOC_COSTLY_ORDER) in should_compact_retry()3931 if (did_some_progress && order <= PAGE_ALLOC_COSTLY_ORDER) in should_reclaim_retry()4047 const bool costly_order = order > PAGE_ALLOC_COSTLY_ORDER; in __alloc_pages_slowpath()4281 WARN_ON_ONCE(order > PAGE_ALLOC_COSTLY_ORDER); in __alloc_pages_slowpath()
2604 (sc->order > PAGE_ALLOC_COSTLY_ORDER || in in_reclaim_compaction()2940 sc->order > PAGE_ALLOC_COSTLY_ORDER && in shrink_zones()
1669 if (order > PAGE_ALLOC_COSTLY_ORDER) in mem_cgroup_oom()2232 if (nr_reclaimed && nr_pages <= (1 << PAGE_ALLOC_COSTLY_ORDER)) in try_charge()
3164 static unsigned int slub_max_order = PAGE_ALLOC_COSTLY_ORDER;
370 PAGE_ALLOC_COSTLY_ORDER, ring->node); in xgbe_map_rx_buffer()
37 #define PAGE_ALLOC_COSTLY_ORDER 3 macro
403 order = max_t(int, PAGE_ALLOC_COSTLY_ORDER - 1, 0); in xlgmac_map_rx_buffer()
189 if (size <= (PAGE_SIZE << PAGE_ALLOC_COSTLY_ORDER)) { in ceph_kvmalloc()
128 .. slub_max_order=x (default 3 (PAGE_ALLOC_COSTLY_ORDER))
2053 #define GFS2_SEQ_GOODSIZE min(PAGE_SIZE << PAGE_ALLOC_COSTLY_ORDER, 65536UL)
147 if (size <= (PAGE_SIZE << PAGE_ALLOC_COSTLY_ORDER)) { in bpf_map_area_alloc()
1679 PAGE_ALLOC_COSTLY_ORDER); in unix_dgram_sendmsg()
4647 PAGE_ALLOC_COSTLY_ORDER, in tcp_send_rcvq()