Home
last modified time | relevance | path

Searched refs:pfn (Results 1 – 25 of 617) sorted by relevance

12345678910>>...25

/Linux-v5.4/mm/
Dpage_isolation.c21 unsigned long flags, pfn; in set_migratetype_isolate() local
38 pfn = page_to_pfn(page); in set_migratetype_isolate()
39 arg.start_pfn = pfn; in set_migratetype_isolate()
96 unsigned long pfn, buddy_pfn; in unset_migratetype_isolate() local
115 pfn = page_to_pfn(page); in unset_migratetype_isolate()
116 buddy_pfn = __find_buddy_pfn(pfn, order); in unset_migratetype_isolate()
117 buddy = page + (buddy_pfn - pfn); in unset_migratetype_isolate()
147 __first_valid_page(unsigned long pfn, unsigned long nr_pages) in __first_valid_page() argument
154 page = pfn_to_online_page(pfn + i); in __first_valid_page()
196 unsigned long pfn; in start_isolate_page_range() local
[all …]
Dpage_ext.c117 unsigned long pfn = page_to_pfn(page); in lookup_page_ext() local
130 index = pfn - round_down(node_start_pfn(page_to_nid(page)), in lookup_page_ext()
192 unsigned long pfn = page_to_pfn(page); in lookup_page_ext() local
193 struct mem_section *section = __pfn_to_section(pfn); in lookup_page_ext()
202 return get_entry(section->page_ext, pfn); in lookup_page_ext()
221 static int __meminit init_section_page_ext(unsigned long pfn, int nid) in init_section_page_ext() argument
227 section = __pfn_to_section(pfn); in init_section_page_ext()
251 pfn &= PAGE_SECTION_MASK; in init_section_page_ext()
252 section->page_ext = (void *)base - page_ext_size * pfn; in init_section_page_ext()
273 static void __free_page_ext(unsigned long pfn) in __free_page_ext() argument
[all …]
Dmemory_hotplug.c227 unsigned long i, pfn, end_pfn, nr_pages; in register_page_bootmem_info_node() local
237 pfn = pgdat->node_start_pfn; in register_page_bootmem_info_node()
241 for (; pfn < end_pfn; pfn += PAGES_PER_SECTION) { in register_page_bootmem_info_node()
248 if (pfn_valid(pfn) && (early_pfn_to_nid(pfn) == node)) in register_page_bootmem_info_node()
249 register_page_bootmem_info_section(pfn); in register_page_bootmem_info_node()
254 static int check_pfn_span(unsigned long pfn, unsigned long nr_pages, in check_pfn_span() argument
272 if (!IS_ALIGNED(pfn, min_align) in check_pfn_span()
275 reason, pfn, pfn + nr_pages - 1); in check_pfn_span()
287 int __ref __add_pages(int nid, unsigned long pfn, unsigned long nr_pages, in __add_pages() argument
298 if (altmap->base_pfn != pfn in __add_pages()
[all …]
Dmemory-failure.c209 static int kill_proc(struct to_kill *tk, unsigned long pfn, int flags) in kill_proc() argument
216 pfn, t->comm, t->pid); in kill_proc()
362 unsigned long pfn, int flags) in kill_procs() argument
375 pfn, tk->tsk->comm, tk->tsk->pid); in kill_procs()
386 else if (kill_proc(tk, pfn, flags) < 0) in kill_procs()
388 pfn, tk->tsk->comm, tk->tsk->pid); in kill_procs()
593 static int truncate_error_page(struct page *p, unsigned long pfn, in truncate_error_page() argument
603 pfn, err); in truncate_error_page()
607 pfn); in truncate_error_page()
620 pfn); in truncate_error_page()
[all …]
Dpage_owner.c259 unsigned long pfn = zone->zone_start_pfn, block_end_pfn; in pagetypeinfo_showmixedcount_print() local
260 unsigned long end_pfn = pfn + zone->spanned_pages; in pagetypeinfo_showmixedcount_print()
266 pfn = zone->zone_start_pfn; in pagetypeinfo_showmixedcount_print()
273 for (; pfn < end_pfn; ) { in pagetypeinfo_showmixedcount_print()
274 page = pfn_to_online_page(pfn); in pagetypeinfo_showmixedcount_print()
276 pfn = ALIGN(pfn + 1, MAX_ORDER_NR_PAGES); in pagetypeinfo_showmixedcount_print()
280 block_end_pfn = ALIGN(pfn + 1, pageblock_nr_pages); in pagetypeinfo_showmixedcount_print()
285 for (; pfn < block_end_pfn; pfn++) { in pagetypeinfo_showmixedcount_print()
286 if (!pfn_valid_within(pfn)) in pagetypeinfo_showmixedcount_print()
290 page = pfn_to_page(pfn); in pagetypeinfo_showmixedcount_print()
[all …]
Dsparse.c222 static void subsection_mask_set(unsigned long *map, unsigned long pfn, in subsection_mask_set() argument
225 int idx = subsection_map_index(pfn); in subsection_mask_set()
226 int end = subsection_map_index(pfn + nr_pages - 1); in subsection_mask_set()
231 void __init subsection_map_init(unsigned long pfn, unsigned long nr_pages) in subsection_map_init() argument
233 int end_sec = pfn_to_section_nr(pfn + nr_pages - 1); in subsection_map_init()
234 unsigned long nr, start_sec = pfn_to_section_nr(pfn); in subsection_map_init()
244 - (pfn & ~PAGE_SECTION_MASK)); in subsection_map_init()
246 subsection_mask_set(ms->usage->subsection_map, pfn, pfns); in subsection_map_init()
249 pfns, subsection_map_index(pfn), in subsection_map_init()
250 subsection_map_index(pfn + pfns - 1)); in subsection_map_init()
[all …]
Dpage_idle.c31 static struct page *page_idle_get_page(unsigned long pfn) in page_idle_get_page() argument
36 if (!pfn_valid(pfn)) in page_idle_get_page()
39 page = pfn_to_page(pfn); in page_idle_get_page()
127 unsigned long pfn, end_pfn; in page_idle_bitmap_read() local
133 pfn = pos * BITS_PER_BYTE; in page_idle_bitmap_read()
134 if (pfn >= max_pfn) in page_idle_bitmap_read()
137 end_pfn = pfn + count * BITS_PER_BYTE; in page_idle_bitmap_read()
141 for (; pfn < end_pfn; pfn++) { in page_idle_bitmap_read()
142 bit = pfn % BITMAP_CHUNK_BITS; in page_idle_bitmap_read()
145 page = page_idle_get_page(pfn); in page_idle_bitmap_read()
[all …]
Dcma.c83 static void cma_clear_bitmap(struct cma *cma, unsigned long pfn, in cma_clear_bitmap() argument
88 bitmap_no = (pfn - cma->base_pfn) >> cma->order_per_bit; in cma_clear_bitmap()
99 unsigned long base_pfn = cma->base_pfn, pfn = base_pfn; in cma_activate_area() local
110 WARN_ON_ONCE(!pfn_valid(pfn)); in cma_activate_area()
111 zone = page_zone(pfn_to_page(pfn)); in cma_activate_area()
116 base_pfn = pfn; in cma_activate_area()
117 for (j = pageblock_nr_pages; j; --j, pfn++) { in cma_activate_area()
118 WARN_ON_ONCE(!pfn_valid(pfn)); in cma_activate_area()
125 if (page_zone(pfn_to_page(pfn)) != zone) in cma_activate_area()
421 unsigned long pfn = -1; in cma_alloc() local
[all …]
Dcompaction.c48 #define block_start_pfn(pfn, order) round_down(pfn, 1UL << (order)) argument
49 #define block_end_pfn(pfn, order) ALIGN((pfn) + 1, 1UL << (order)) argument
50 #define pageblock_start_pfn(pfn) block_start_pfn(pfn, pageblock_order) argument
51 #define pageblock_end_pfn(pfn) block_end_pfn(pfn, pageblock_order) argument
59 unsigned long pfn = page_to_pfn(page); in release_freepages() local
62 if (pfn > high_pfn) in release_freepages()
63 high_pfn = pfn; in release_freepages()
241 __reset_isolation_pfn(struct zone *zone, unsigned long pfn, bool check_source, in __reset_isolation_pfn() argument
244 struct page *page = pfn_to_online_page(pfn); in __reset_isolation_pfn()
272 block_pfn = pageblock_start_pfn(pfn); in __reset_isolation_pfn()
[all …]
/Linux-v5.4/include/linux/
Dpfn_t.h29 static inline pfn_t __pfn_to_pfn_t(unsigned long pfn, u64 flags) in __pfn_to_pfn_t() argument
31 pfn_t pfn_t = { .val = pfn | (flags & PFN_FLAGS_MASK), }; in __pfn_to_pfn_t()
37 static inline pfn_t pfn_to_pfn_t(unsigned long pfn) in pfn_to_pfn_t() argument
39 return __pfn_to_pfn_t(pfn, 0); in pfn_to_pfn_t()
47 static inline bool pfn_t_has_page(pfn_t pfn) in pfn_t_has_page() argument
49 return (pfn.val & PFN_MAP) == PFN_MAP || (pfn.val & PFN_DEV) == 0; in pfn_t_has_page()
52 static inline unsigned long pfn_t_to_pfn(pfn_t pfn) in pfn_t_to_pfn() argument
54 return pfn.val & ~PFN_FLAGS_MASK; in pfn_t_to_pfn()
57 static inline struct page *pfn_t_to_page(pfn_t pfn) in pfn_t_to_page() argument
59 if (pfn_t_has_page(pfn)) in pfn_t_to_page()
[all …]
/Linux-v5.4/arch/x86/xen/
Dp2m.c126 static inline unsigned p2m_top_index(unsigned long pfn) in p2m_top_index() argument
128 BUG_ON(pfn >= MAX_P2M_PFN); in p2m_top_index()
129 return pfn / (P2M_MID_PER_PAGE * P2M_PER_PAGE); in p2m_top_index()
132 static inline unsigned p2m_mid_index(unsigned long pfn) in p2m_mid_index() argument
134 return (pfn / P2M_PER_PAGE) % P2M_MID_PER_PAGE; in p2m_mid_index()
137 static inline unsigned p2m_index(unsigned long pfn) in p2m_index() argument
139 return pfn % P2M_PER_PAGE; in p2m_index()
174 static void p2m_init_identity(unsigned long *p2m, unsigned long pfn) in p2m_init_identity() argument
179 p2m[i] = IDENTITY_FRAME(pfn + i); in p2m_init_identity()
219 unsigned long pfn, mfn; in xen_build_mfn_list_list() local
[all …]
/Linux-v5.4/tools/testing/scatterlist/
Dmain.c18 #define pfn(...) (unsigned []){ __VA_ARGS__ } macro
26 unsigned *pfn; in main() member
31 { -EINVAL, 1, pfn(0), PAGE_SIZE, PAGE_SIZE + 1, 1 }, in main()
32 { -EINVAL, 1, pfn(0), PAGE_SIZE, 0, 1 }, in main()
33 { -EINVAL, 1, pfn(0), PAGE_SIZE, sgmax + 1, 1 }, in main()
34 { 0, 1, pfn(0), PAGE_SIZE, sgmax, 1 }, in main()
35 { 0, 1, pfn(0), 1, sgmax, 1 }, in main()
36 { 0, 2, pfn(0, 1), 2 * PAGE_SIZE, sgmax, 1 }, in main()
37 { 0, 2, pfn(1, 0), 2 * PAGE_SIZE, sgmax, 2 }, in main()
38 { 0, 3, pfn(0, 1, 2), 3 * PAGE_SIZE, sgmax, 1 }, in main()
[all …]
/Linux-v5.4/arch/x86/include/asm/xen/
Dpage.h57 extern int xen_alloc_p2m_entry(unsigned long pfn);
59 extern unsigned long get_phys_to_machine(unsigned long pfn);
60 extern bool set_phys_to_machine(unsigned long pfn, unsigned long mfn);
61 extern bool __set_phys_to_machine(unsigned long pfn, unsigned long mfn);
142 static inline unsigned long __pfn_to_mfn(unsigned long pfn) in __pfn_to_mfn() argument
146 if (pfn < xen_p2m_size) in __pfn_to_mfn()
147 mfn = xen_p2m_addr[pfn]; in __pfn_to_mfn()
148 else if (unlikely(pfn < xen_max_p2m_pfn)) in __pfn_to_mfn()
149 return get_phys_to_machine(pfn); in __pfn_to_mfn()
151 return IDENTITY_FRAME(pfn); in __pfn_to_mfn()
[all …]
/Linux-v5.4/arch/arm/xen/
Dp2m.c22 unsigned long pfn; member
43 if (new->pfn == entry->pfn) in xen_add_phys_to_mach_entry()
46 if (new->pfn < entry->pfn) in xen_add_phys_to_mach_entry()
58 __func__, &new->pfn, &new->mfn, &entry->pfn, &entry->mfn); in xen_add_phys_to_mach_entry()
63 unsigned long __pfn_to_mfn(unsigned long pfn) in __pfn_to_mfn() argument
72 if (entry->pfn <= pfn && in __pfn_to_mfn()
73 entry->pfn + entry->nr_pages > pfn) { in __pfn_to_mfn()
74 unsigned long mfn = entry->mfn + (pfn - entry->pfn); in __pfn_to_mfn()
78 if (pfn < entry->pfn) in __pfn_to_mfn()
121 bool __set_phys_to_machine_multi(unsigned long pfn, in __set_phys_to_machine_multi() argument
[all …]
/Linux-v5.4/include/asm-generic/
Dmemory_model.h18 #define arch_pfn_to_nid(pfn) pfn_to_nid(pfn) argument
22 #define arch_local_page_offset(pfn, nid) \ argument
23 ((pfn) - NODE_DATA(nid)->node_start_pfn)
33 #define __pfn_to_page(pfn) (mem_map + ((pfn) - ARCH_PFN_OFFSET)) argument
38 #define __pfn_to_page(pfn) \ argument
39 ({ unsigned long __pfn = (pfn); \
54 #define __pfn_to_page(pfn) (vmemmap + (pfn)) argument
68 #define __pfn_to_page(pfn) \ argument
69 ({ unsigned long __pfn = (pfn); \
79 #define __pfn_to_phys(pfn) PFN_PHYS(pfn) argument
/Linux-v5.4/drivers/gpu/drm/i915/selftests/
Dscatterlist.c48 unsigned long pfn, n; in expect_pfn_sg() local
50 pfn = pt->start; in expect_pfn_sg()
55 if (page_to_pfn(page) != pfn) { in expect_pfn_sg()
57 __func__, who, pfn, page_to_pfn(page)); in expect_pfn_sg()
70 pfn += npages; in expect_pfn_sg()
72 if (pfn != pt->end) { in expect_pfn_sg()
74 __func__, who, pt->end, pfn); in expect_pfn_sg()
86 unsigned long pfn; in expect_pfn_sg_page_iter() local
88 pfn = pt->start; in expect_pfn_sg_page_iter()
92 if (page != pfn_to_page(pfn)) { in expect_pfn_sg_page_iter()
[all …]
/Linux-v5.4/include/trace/events/
Dcma.h13 TP_PROTO(unsigned long pfn, const struct page *page,
16 TP_ARGS(pfn, page, count, align),
19 __field(unsigned long, pfn)
26 __entry->pfn = pfn;
33 __entry->pfn,
41 TP_PROTO(unsigned long pfn, const struct page *page,
44 TP_ARGS(pfn, page, count),
47 __field(unsigned long, pfn)
53 __entry->pfn = pfn;
59 __entry->pfn,
Dkmem.h159 __field( unsigned long, pfn )
164 __entry->pfn = page_to_pfn(page);
169 pfn_to_page(__entry->pfn),
170 __entry->pfn,
181 __field( unsigned long, pfn )
185 __entry->pfn = page_to_pfn(page);
189 pfn_to_page(__entry->pfn),
190 __entry->pfn)
201 __field( unsigned long, pfn )
208 __entry->pfn = page ? page_to_pfn(page) : -1UL;
[all …]
/Linux-v5.4/arch/arm/mach-omap2/
Dio.c70 .pfn = __phys_to_pfn(L3_24XX_PHYS),
76 .pfn = __phys_to_pfn(L4_24XX_PHYS),
86 .pfn = __phys_to_pfn(DSP_MEM_2420_PHYS),
92 .pfn = __phys_to_pfn(DSP_IPI_2420_PHYS),
98 .pfn = __phys_to_pfn(DSP_MMU_2420_PHYS),
110 .pfn = __phys_to_pfn(L4_WK_243X_PHYS),
116 .pfn = __phys_to_pfn(OMAP243X_GPMC_PHYS),
122 .pfn = __phys_to_pfn(OMAP243X_SDRC_PHYS),
128 .pfn = __phys_to_pfn(OMAP243X_SMS_PHYS),
140 .pfn = __phys_to_pfn(L3_34XX_PHYS),
[all …]
/Linux-v5.4/include/xen/arm/
Dpage.h15 #define phys_to_machine_mapping_valid(pfn) (1) argument
43 unsigned long __pfn_to_mfn(unsigned long pfn);
47 static inline unsigned long pfn_to_gfn(unsigned long pfn) in pfn_to_gfn() argument
49 return pfn; in pfn_to_gfn()
58 static inline unsigned long pfn_to_bfn(unsigned long pfn) in pfn_to_bfn() argument
63 mfn = __pfn_to_mfn(pfn); in pfn_to_bfn()
68 return pfn; in pfn_to_bfn()
96 bool __set_phys_to_machine(unsigned long pfn, unsigned long mfn);
97 bool __set_phys_to_machine_multi(unsigned long pfn, unsigned long mfn,
100 static inline bool set_phys_to_machine(unsigned long pfn, unsigned long mfn) in set_phys_to_machine() argument
[all …]
/Linux-v5.4/arch/s390/kernel/
Dsuspend.c100 void page_key_read(unsigned long *pfn) in page_key_read() argument
106 page = pfn_to_page(*pfn); in page_key_read()
111 *(unsigned char *) pfn = key; in page_key_read()
118 void page_key_memorize(unsigned long *pfn) in page_key_memorize() argument
120 page_key_wp->data[page_key_wx] = *(unsigned char *) pfn; in page_key_memorize()
121 *(unsigned char *) pfn = 0; in page_key_memorize()
152 int pfn_is_nosave(unsigned long pfn) in pfn_is_nosave() argument
160 if (pfn <= LC_PAGES) in pfn_is_nosave()
162 if (pfn >= nosave_begin_pfn && pfn < nosave_end_pfn) in pfn_is_nosave()
165 if (pfn >= stext_pfn && pfn <= end_rodata_pfn) in pfn_is_nosave()
[all …]
/Linux-v5.4/arch/alpha/include/asm/
Dmmzone.h35 #define node_localnr(pfn, nid) ((pfn) - NODE_DATA(nid)->node_start_pfn) argument
82 unsigned long pfn; \
84 pfn = page_to_pfn(page) << 32; \
85 pte_val(pte) = pfn | pgprot_val(pgprot); \
104 #define pfn_to_nid(pfn) pa_to_nid(((u64)(pfn) << PAGE_SHIFT)) argument
105 #define pfn_valid(pfn) \ argument
106 (((pfn) - node_start_pfn(pfn_to_nid(pfn))) < \
107 node_spanned_pages(pfn_to_nid(pfn))) \
/Linux-v5.4/arch/unicore32/mm/
Dioremap.c98 remap_area_sections(unsigned long virt, unsigned long pfn, in remap_area_sections() argument
114 set_pmd(pmd, __pmd(__pfn_to_phys(pfn) | type->prot_sect)); in remap_area_sections()
115 pfn += SZ_4M >> PAGE_SHIFT; in remap_area_sections()
125 void __iomem *__uc32_ioremap_pfn_caller(unsigned long pfn, in __uc32_ioremap_pfn_caller() argument
136 if (pfn >= 0x100000 && (__pfn_to_phys(pfn) & ~SECTION_MASK)) in __uc32_ioremap_pfn_caller()
142 if (pfn_valid(pfn)) { in __uc32_ioremap_pfn_caller()
164 if (!((__pfn_to_phys(pfn) | size | addr) & ~PMD_MASK)) { in __uc32_ioremap_pfn_caller()
166 err = remap_area_sections(addr, pfn, size, type); in __uc32_ioremap_pfn_caller()
168 err = ioremap_page_range(addr, addr + size, __pfn_to_phys(pfn), in __uc32_ioremap_pfn_caller()
185 unsigned long pfn = __phys_to_pfn(phys_addr); in __uc32_ioremap_caller() local
[all …]
/Linux-v5.4/arch/unicore32/kernel/
Dhibernate.c72 unsigned long pfn; in resume_physical_mapping_init() local
80 pfn = 0; in resume_physical_mapping_init()
87 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
93 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
104 for (; pte < max_pte; pte++, pfn++) { in resume_physical_mapping_init()
105 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
108 set_pte(pte, pfn_pte(pfn, PAGE_KERNEL_EXEC)); in resume_physical_mapping_init()
142 int pfn_is_nosave(unsigned long pfn) in pfn_is_nosave() argument
147 return (pfn >= begin_pfn) && (pfn < end_pfn); in pfn_is_nosave()
/Linux-v5.4/arch/ia64/include/asm/
Dpage.h99 extern int ia64_pfn_valid (unsigned long pfn);
101 # define ia64_pfn_valid(pfn) 1 argument
108 # define pfn_to_page(pfn) (vmem_map + (pfn)) argument
109 # define __pfn_to_phys(pfn) PFN_PHYS(pfn) argument
118 # define pfn_valid(pfn) (((pfn) < max_mapnr) && ia64_pfn_valid(pfn)) argument
122 # define pfn_valid(pfn) (((pfn) >= min_low_pfn) && ((pfn) < max_low_pfn) && ia64_pfn_valid(pfn)) argument
127 #define pfn_to_kaddr(pfn) __va((pfn) << PAGE_SHIFT) argument

12345678910>>...25