Home
last modified time | relevance | path

Searched refs:pfn (Results 1 – 25 of 607) sorted by relevance

12345678910>>...25

/Linux-v4.19/include/linux/
Dpfn_t.h27 static inline pfn_t __pfn_to_pfn_t(unsigned long pfn, u64 flags) in __pfn_to_pfn_t() argument
29 pfn_t pfn_t = { .val = pfn | (flags & PFN_FLAGS_MASK), }; in __pfn_to_pfn_t()
35 static inline pfn_t pfn_to_pfn_t(unsigned long pfn) in pfn_to_pfn_t() argument
37 return __pfn_to_pfn_t(pfn, 0); in pfn_to_pfn_t()
45 static inline bool pfn_t_has_page(pfn_t pfn) in pfn_t_has_page() argument
47 return (pfn.val & PFN_MAP) == PFN_MAP || (pfn.val & PFN_DEV) == 0; in pfn_t_has_page()
50 static inline unsigned long pfn_t_to_pfn(pfn_t pfn) in pfn_t_to_pfn() argument
52 return pfn.val & ~PFN_FLAGS_MASK; in pfn_t_to_pfn()
55 static inline struct page *pfn_t_to_page(pfn_t pfn) in pfn_t_to_page() argument
57 if (pfn_t_has_page(pfn)) in pfn_t_to_page()
[all …]
Dhmm.h165 uint64_t pfn) in hmm_pfn_to_page() argument
167 if (pfn == range->values[HMM_PFN_NONE]) in hmm_pfn_to_page()
169 if (pfn == range->values[HMM_PFN_ERROR]) in hmm_pfn_to_page()
171 if (pfn == range->values[HMM_PFN_SPECIAL]) in hmm_pfn_to_page()
173 if (!(pfn & range->flags[HMM_PFN_VALID])) in hmm_pfn_to_page()
175 return pfn_to_page(pfn >> range->pfn_shift); in hmm_pfn_to_page()
185 uint64_t pfn) in hmm_pfn_to_pfn() argument
187 if (pfn == range->values[HMM_PFN_NONE]) in hmm_pfn_to_pfn()
189 if (pfn == range->values[HMM_PFN_ERROR]) in hmm_pfn_to_pfn()
191 if (pfn == range->values[HMM_PFN_SPECIAL]) in hmm_pfn_to_pfn()
[all …]
/Linux-v4.19/mm/
Dpage_isolation.c22 unsigned long flags, pfn; in set_migratetype_isolate() local
39 pfn = page_to_pfn(page); in set_migratetype_isolate()
40 arg.start_pfn = pfn; in set_migratetype_isolate()
97 unsigned long pfn, buddy_pfn; in unset_migratetype_isolate() local
116 pfn = page_to_pfn(page); in unset_migratetype_isolate()
117 buddy_pfn = __find_buddy_pfn(pfn, order); in unset_migratetype_isolate()
118 buddy = page + (buddy_pfn - pfn); in unset_migratetype_isolate()
148 __first_valid_page(unsigned long pfn, unsigned long nr_pages) in __first_valid_page() argument
155 if (!pfn_valid_within(pfn + i)) in __first_valid_page()
157 page = pfn_to_online_page(pfn + i); in __first_valid_page()
[all …]
Dpage_ext.c125 unsigned long pfn = page_to_pfn(page); in lookup_page_ext() local
138 index = pfn - round_down(node_start_pfn(page_to_nid(page)), in lookup_page_ext()
200 unsigned long pfn = page_to_pfn(page); in lookup_page_ext() local
201 struct mem_section *section = __pfn_to_section(pfn); in lookup_page_ext()
210 return get_entry(section->page_ext, pfn); in lookup_page_ext()
229 static int __meminit init_section_page_ext(unsigned long pfn, int nid) in init_section_page_ext() argument
235 section = __pfn_to_section(pfn); in init_section_page_ext()
259 pfn &= PAGE_SECTION_MASK; in init_section_page_ext()
260 section->page_ext = (void *)base - get_entry_size() * pfn; in init_section_page_ext()
280 static void __free_page_ext(unsigned long pfn) in __free_page_ext() argument
[all …]
Dmemory-failure.c213 static int kill_proc(struct to_kill *tk, unsigned long pfn, int flags) in kill_proc() argument
220 pfn, t->comm, t->pid); in kill_proc()
361 unsigned long pfn, int flags) in kill_procs() argument
374 pfn, tk->tsk->comm, tk->tsk->pid); in kill_procs()
384 else if (kill_proc(tk, pfn, flags) < 0) in kill_procs()
386 pfn, tk->tsk->comm, tk->tsk->pid); in kill_procs()
591 static int truncate_error_page(struct page *p, unsigned long pfn, in truncate_error_page() argument
601 pfn, err); in truncate_error_page()
605 pfn); in truncate_error_page()
618 pfn); in truncate_error_page()
[all …]
Dmemory_hotplug.c222 unsigned long i, pfn, end_pfn, nr_pages; in register_page_bootmem_info_node() local
232 pfn = pgdat->node_start_pfn; in register_page_bootmem_info_node()
236 for (; pfn < end_pfn; pfn += PAGES_PER_SECTION) { in register_page_bootmem_info_node()
243 if (pfn_valid(pfn) && (early_pfn_to_nid(pfn) == node)) in register_page_bootmem_info_node()
244 register_page_bootmem_info_section(pfn); in register_page_bootmem_info_node()
349 unsigned long pfn; in find_biggest_section_pfn() local
352 pfn = end_pfn - 1; in find_biggest_section_pfn()
353 for (; pfn >= start_pfn; pfn -= PAGES_PER_SECTION) { in find_biggest_section_pfn()
354 ms = __pfn_to_section(pfn); in find_biggest_section_pfn()
359 if (unlikely(pfn_to_nid(pfn) != nid)) in find_biggest_section_pfn()
[all …]
Dpage_owner.c261 unsigned long pfn = zone->zone_start_pfn, block_end_pfn; in pagetypeinfo_showmixedcount_print() local
262 unsigned long end_pfn = pfn + zone->spanned_pages; in pagetypeinfo_showmixedcount_print()
268 pfn = zone->zone_start_pfn; in pagetypeinfo_showmixedcount_print()
275 for (; pfn < end_pfn; ) { in pagetypeinfo_showmixedcount_print()
276 if (!pfn_valid(pfn)) { in pagetypeinfo_showmixedcount_print()
277 pfn = ALIGN(pfn + 1, MAX_ORDER_NR_PAGES); in pagetypeinfo_showmixedcount_print()
281 block_end_pfn = ALIGN(pfn + 1, pageblock_nr_pages); in pagetypeinfo_showmixedcount_print()
284 page = pfn_to_page(pfn); in pagetypeinfo_showmixedcount_print()
287 for (; pfn < block_end_pfn; pfn++) { in pagetypeinfo_showmixedcount_print()
288 if (!pfn_valid_within(pfn)) in pagetypeinfo_showmixedcount_print()
[all …]
Dpage_idle.c31 static struct page *page_idle_get_page(unsigned long pfn) in page_idle_get_page() argument
36 if (!pfn_valid(pfn)) in page_idle_get_page()
39 page = pfn_to_page(pfn); in page_idle_get_page()
127 unsigned long pfn, end_pfn; in page_idle_bitmap_read() local
133 pfn = pos * BITS_PER_BYTE; in page_idle_bitmap_read()
134 if (pfn >= max_pfn) in page_idle_bitmap_read()
137 end_pfn = pfn + count * BITS_PER_BYTE; in page_idle_bitmap_read()
141 for (; pfn < end_pfn; pfn++) { in page_idle_bitmap_read()
142 bit = pfn % BITMAP_CHUNK_BITS; in page_idle_bitmap_read()
145 page = page_idle_get_page(pfn); in page_idle_bitmap_read()
[all …]
Dcma.c87 static void cma_clear_bitmap(struct cma *cma, unsigned long pfn, in cma_clear_bitmap() argument
92 bitmap_no = (pfn - cma->base_pfn) >> cma->order_per_bit; in cma_clear_bitmap()
103 unsigned long base_pfn = cma->base_pfn, pfn = base_pfn; in cma_activate_area() local
112 WARN_ON_ONCE(!pfn_valid(pfn)); in cma_activate_area()
113 zone = page_zone(pfn_to_page(pfn)); in cma_activate_area()
118 base_pfn = pfn; in cma_activate_area()
119 for (j = pageblock_nr_pages; j; --j, pfn++) { in cma_activate_area()
120 WARN_ON_ONCE(!pfn_valid(pfn)); in cma_activate_area()
127 if (page_zone(pfn_to_page(pfn)) != zone) in cma_activate_area()
407 unsigned long pfn = -1; in cma_alloc() local
[all …]
/Linux-v4.19/arch/x86/xen/
Dp2m.c124 static inline unsigned p2m_top_index(unsigned long pfn) in p2m_top_index() argument
126 BUG_ON(pfn >= MAX_P2M_PFN); in p2m_top_index()
127 return pfn / (P2M_MID_PER_PAGE * P2M_PER_PAGE); in p2m_top_index()
130 static inline unsigned p2m_mid_index(unsigned long pfn) in p2m_mid_index() argument
132 return (pfn / P2M_PER_PAGE) % P2M_MID_PER_PAGE; in p2m_mid_index()
135 static inline unsigned p2m_index(unsigned long pfn) in p2m_index() argument
137 return pfn % P2M_PER_PAGE; in p2m_index()
172 static void p2m_init_identity(unsigned long *p2m, unsigned long pfn) in p2m_init_identity() argument
177 p2m[i] = IDENTITY_FRAME(pfn + i); in p2m_init_identity()
210 unsigned long pfn, mfn; in xen_build_mfn_list_list() local
[all …]
/Linux-v4.19/tools/testing/scatterlist/
Dmain.c18 #define pfn(...) (unsigned []){ __VA_ARGS__ } macro
26 unsigned *pfn; in main() member
31 { -EINVAL, 1, pfn(0), PAGE_SIZE, PAGE_SIZE + 1, 1 }, in main()
32 { -EINVAL, 1, pfn(0), PAGE_SIZE, 0, 1 }, in main()
33 { -EINVAL, 1, pfn(0), PAGE_SIZE, sgmax + 1, 1 }, in main()
34 { 0, 1, pfn(0), PAGE_SIZE, sgmax, 1 }, in main()
35 { 0, 1, pfn(0), 1, sgmax, 1 }, in main()
36 { 0, 2, pfn(0, 1), 2 * PAGE_SIZE, sgmax, 1 }, in main()
37 { 0, 2, pfn(1, 0), 2 * PAGE_SIZE, sgmax, 2 }, in main()
38 { 0, 3, pfn(0, 1, 2), 3 * PAGE_SIZE, sgmax, 1 }, in main()
[all …]
/Linux-v4.19/arch/x86/include/asm/xen/
Dpage.h57 extern int xen_alloc_p2m_entry(unsigned long pfn);
59 extern unsigned long get_phys_to_machine(unsigned long pfn);
60 extern bool set_phys_to_machine(unsigned long pfn, unsigned long mfn);
61 extern bool __set_phys_to_machine(unsigned long pfn, unsigned long mfn);
115 static inline unsigned long __pfn_to_mfn(unsigned long pfn) in __pfn_to_mfn() argument
119 if (pfn < xen_p2m_size) in __pfn_to_mfn()
120 mfn = xen_p2m_addr[pfn]; in __pfn_to_mfn()
121 else if (unlikely(pfn < xen_max_p2m_pfn)) in __pfn_to_mfn()
122 return get_phys_to_machine(pfn); in __pfn_to_mfn()
124 return IDENTITY_FRAME(pfn); in __pfn_to_mfn()
[all …]
/Linux-v4.19/arch/arm/xen/
Dp2m.c21 unsigned long pfn; member
42 if (new->pfn == entry->pfn) in xen_add_phys_to_mach_entry()
45 if (new->pfn < entry->pfn) in xen_add_phys_to_mach_entry()
57 __func__, &new->pfn, &new->mfn, &entry->pfn, &entry->mfn); in xen_add_phys_to_mach_entry()
62 unsigned long __pfn_to_mfn(unsigned long pfn) in __pfn_to_mfn() argument
71 if (entry->pfn <= pfn && in __pfn_to_mfn()
72 entry->pfn + entry->nr_pages > pfn) { in __pfn_to_mfn()
74 return entry->mfn + (pfn - entry->pfn); in __pfn_to_mfn()
76 if (pfn < entry->pfn) in __pfn_to_mfn()
119 bool __set_phys_to_machine_multi(unsigned long pfn, in __set_phys_to_machine_multi() argument
[all …]
/Linux-v4.19/include/asm-generic/
Dmemory_model.h18 #define arch_pfn_to_nid(pfn) pfn_to_nid(pfn) argument
22 #define arch_local_page_offset(pfn, nid) \ argument
23 ((pfn) - NODE_DATA(nid)->node_start_pfn)
33 #define __pfn_to_page(pfn) (mem_map + ((pfn) - ARCH_PFN_OFFSET)) argument
38 #define __pfn_to_page(pfn) \ argument
39 ({ unsigned long __pfn = (pfn); \
54 #define __pfn_to_page(pfn) (vmemmap + (pfn)) argument
68 #define __pfn_to_page(pfn) \ argument
69 ({ unsigned long __pfn = (pfn); \
79 #define __pfn_to_phys(pfn) PFN_PHYS(pfn) argument
/Linux-v4.19/drivers/gpu/drm/i915/selftests/
Dscatterlist.c47 unsigned long pfn, n; in expect_pfn_sg() local
49 pfn = pt->start; in expect_pfn_sg()
54 if (page_to_pfn(page) != pfn) { in expect_pfn_sg()
56 __func__, who, pfn, page_to_pfn(page)); in expect_pfn_sg()
69 pfn += npages; in expect_pfn_sg()
71 if (pfn != pt->end) { in expect_pfn_sg()
73 __func__, who, pt->end, pfn); in expect_pfn_sg()
85 unsigned long pfn; in expect_pfn_sg_page_iter() local
87 pfn = pt->start; in expect_pfn_sg_page_iter()
91 if (page != pfn_to_page(pfn)) { in expect_pfn_sg_page_iter()
[all …]
/Linux-v4.19/include/trace/events/
Dcma.h13 TP_PROTO(unsigned long pfn, const struct page *page,
16 TP_ARGS(pfn, page, count, align),
19 __field(unsigned long, pfn)
26 __entry->pfn = pfn;
33 __entry->pfn,
41 TP_PROTO(unsigned long pfn, const struct page *page,
44 TP_ARGS(pfn, page, count),
47 __field(unsigned long, pfn)
53 __entry->pfn = pfn;
59 __entry->pfn,
Dkmem.h158 __field( unsigned long, pfn )
163 __entry->pfn = page_to_pfn(page);
168 pfn_to_page(__entry->pfn),
169 __entry->pfn,
180 __field( unsigned long, pfn )
184 __entry->pfn = page_to_pfn(page);
188 pfn_to_page(__entry->pfn),
189 __entry->pfn)
200 __field( unsigned long, pfn )
207 __entry->pfn = page ? page_to_pfn(page) : -1UL;
[all …]
/Linux-v4.19/arch/parisc/include/asm/
Dmmzone.h34 #define pfn_is_io(pfn) ((pfn & (0xf0000000UL >> PAGE_SHIFT)) == (0xf0000000UL >> PAGE_SHIFT)) argument
37 #define pfn_is_io(pfn) ((pfn & (0xf000000000000000UL >> PAGE_SHIFT)) == (0xf000000000000000UL >> PA… argument
40 static inline int pfn_to_nid(unsigned long pfn) in pfn_to_nid() argument
44 if (unlikely(pfn_is_io(pfn))) in pfn_to_nid()
47 i = pfn >> PFNNID_SHIFT; in pfn_to_nid()
53 static inline int pfn_valid(int pfn) in pfn_valid() argument
55 int nid = pfn_to_nid(pfn); in pfn_valid()
58 return (pfn < node_end_pfn(nid)); in pfn_valid()
/Linux-v4.19/arch/arm/mach-omap2/
Dio.c73 .pfn = __phys_to_pfn(L3_24XX_PHYS),
79 .pfn = __phys_to_pfn(L4_24XX_PHYS),
89 .pfn = __phys_to_pfn(DSP_MEM_2420_PHYS),
95 .pfn = __phys_to_pfn(DSP_IPI_2420_PHYS),
101 .pfn = __phys_to_pfn(DSP_MMU_2420_PHYS),
113 .pfn = __phys_to_pfn(L4_WK_243X_PHYS),
119 .pfn = __phys_to_pfn(OMAP243X_GPMC_PHYS),
125 .pfn = __phys_to_pfn(OMAP243X_SDRC_PHYS),
131 .pfn = __phys_to_pfn(OMAP243X_SMS_PHYS),
143 .pfn = __phys_to_pfn(L3_34XX_PHYS),
[all …]
/Linux-v4.19/include/xen/arm/
Dpage.h15 #define phys_to_machine_mapping_valid(pfn) (1) argument
43 unsigned long __pfn_to_mfn(unsigned long pfn);
47 static inline unsigned long pfn_to_gfn(unsigned long pfn) in pfn_to_gfn() argument
49 return pfn; in pfn_to_gfn()
58 static inline unsigned long pfn_to_bfn(unsigned long pfn) in pfn_to_bfn() argument
63 mfn = __pfn_to_mfn(pfn); in pfn_to_bfn()
68 return pfn; in pfn_to_bfn()
96 bool __set_phys_to_machine(unsigned long pfn, unsigned long mfn);
97 bool __set_phys_to_machine_multi(unsigned long pfn, unsigned long mfn,
100 static inline bool set_phys_to_machine(unsigned long pfn, unsigned long mfn) in set_phys_to_machine() argument
[all …]
/Linux-v4.19/arch/s390/kernel/
Dsuspend.c100 void page_key_read(unsigned long *pfn) in page_key_read() argument
106 page = pfn_to_page(*pfn); in page_key_read()
111 *(unsigned char *) pfn = key; in page_key_read()
118 void page_key_memorize(unsigned long *pfn) in page_key_memorize() argument
120 page_key_wp->data[page_key_wx] = *(unsigned char *) pfn; in page_key_memorize()
121 *(unsigned char *) pfn = 0; in page_key_memorize()
152 int pfn_is_nosave(unsigned long pfn) in pfn_is_nosave() argument
160 if (pfn <= LC_PAGES) in pfn_is_nosave()
162 if (pfn >= nosave_begin_pfn && pfn < nosave_end_pfn) in pfn_is_nosave()
165 if (pfn >= stext_pfn && pfn <= end_rodata_pfn) in pfn_is_nosave()
[all …]
/Linux-v4.19/arch/x86/power/
Dhibernate_32.c86 unsigned long pfn; in resume_physical_mapping_init() local
94 pfn = 0; in resume_physical_mapping_init()
101 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
105 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
113 set_pmd(pmd, pfn_pmd(pfn, PAGE_KERNEL_LARGE_EXEC)); in resume_physical_mapping_init()
114 pfn += PTRS_PER_PTE; in resume_physical_mapping_init()
123 for (; pte < max_pte; pte++, pfn++) { in resume_physical_mapping_init()
124 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
127 set_pte(pte, pfn_pte(pfn, PAGE_KERNEL_EXEC)); in resume_physical_mapping_init()
170 int pfn_is_nosave(unsigned long pfn) in pfn_is_nosave() argument
[all …]
/Linux-v4.19/arch/alpha/include/asm/
Dmmzone.h35 #define node_localnr(pfn, nid) ((pfn) - NODE_DATA(nid)->node_start_pfn) argument
82 unsigned long pfn; \
84 pfn = page_to_pfn(page) << 32; \
85 pte_val(pte) = pfn | pgprot_val(pgprot); \
104 #define pfn_to_nid(pfn) pa_to_nid(((u64)(pfn) << PAGE_SHIFT)) argument
105 #define pfn_valid(pfn) \ argument
106 (((pfn) - node_start_pfn(pfn_to_nid(pfn))) < \
107 node_spanned_pages(pfn_to_nid(pfn))) \
/Linux-v4.19/arch/unicore32/mm/
Dioremap.c102 remap_area_sections(unsigned long virt, unsigned long pfn, in remap_area_sections() argument
118 set_pmd(pmd, __pmd(__pfn_to_phys(pfn) | type->prot_sect)); in remap_area_sections()
119 pfn += SZ_4M >> PAGE_SHIFT; in remap_area_sections()
129 void __iomem *__uc32_ioremap_pfn_caller(unsigned long pfn, in __uc32_ioremap_pfn_caller() argument
140 if (pfn >= 0x100000 && (__pfn_to_phys(pfn) & ~SECTION_MASK)) in __uc32_ioremap_pfn_caller()
146 if (pfn_valid(pfn)) { in __uc32_ioremap_pfn_caller()
168 if (!((__pfn_to_phys(pfn) | size | addr) & ~PMD_MASK)) { in __uc32_ioremap_pfn_caller()
170 err = remap_area_sections(addr, pfn, size, type); in __uc32_ioremap_pfn_caller()
172 err = ioremap_page_range(addr, addr + size, __pfn_to_phys(pfn), in __uc32_ioremap_pfn_caller()
189 unsigned long pfn = __phys_to_pfn(phys_addr); in __uc32_ioremap_caller() local
[all …]
/Linux-v4.19/arch/unicore32/kernel/
Dhibernate.c75 unsigned long pfn; in resume_physical_mapping_init() local
83 pfn = 0; in resume_physical_mapping_init()
90 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
96 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
107 for (; pte < max_pte; pte++, pfn++) { in resume_physical_mapping_init()
108 if (pfn >= max_low_pfn) in resume_physical_mapping_init()
111 set_pte(pte, pfn_pte(pfn, PAGE_KERNEL_EXEC)); in resume_physical_mapping_init()
145 int pfn_is_nosave(unsigned long pfn) in pfn_is_nosave() argument
150 return (pfn >= begin_pfn) && (pfn < end_pfn); in pfn_is_nosave()

12345678910>>...25