Home
last modified time | relevance | path

Searched refs:p4d (Results 1 – 25 of 71) sorted by relevance

123

/Linux-v5.4/include/asm-generic/
D5level-fixup.h16 #define pud_alloc(mm, p4d, address) \ argument
17 ((unlikely(pgd_none(*(p4d))) && __pud_alloc(mm, p4d, address)) ? \
18 NULL : pud_offset(p4d, address))
24 static inline int p4d_none(p4d_t p4d) in p4d_none() argument
29 static inline int p4d_bad(p4d_t p4d) in p4d_bad() argument
34 static inline int p4d_present(p4d_t p4d) in p4d_present() argument
40 #define p4d_ERROR(p4d) do { } while (0) argument
41 #define p4d_clear(p4d) pgd_clear(p4d) argument
42 #define p4d_val(p4d) pgd_val(p4d) argument
43 #define p4d_populate(mm, p4d, pud) pgd_populate(mm, p4d, pud) argument
[all …]
Dpgtable-nopud.h19 typedef struct { p4d_t p4d; } pud_t; member
31 static inline int p4d_none(p4d_t p4d) { return 0; } in p4d_none() argument
32 static inline int p4d_bad(p4d_t p4d) { return 0; } in p4d_bad() argument
33 static inline int p4d_present(p4d_t p4d) { return 1; } in p4d_present() argument
34 static inline void p4d_clear(p4d_t *p4d) { } in p4d_clear() argument
35 #define pud_ERROR(pud) (p4d_ERROR((pud).p4d))
37 #define p4d_populate(mm, p4d, pud) do { } while (0) argument
38 #define p4d_populate_safe(mm, p4d, pud) do { } while (0) argument
45 static inline pud_t *pud_offset(p4d_t *p4d, unsigned long address) in pud_offset() argument
47 return (pud_t *)p4d; in pud_offset()
[all …]
Dpgtable-nop4d.h26 #define p4d_ERROR(p4d) (pgd_ERROR((p4d).pgd)) argument
28 #define pgd_populate(mm, pgd, p4d) do { } while (0) argument
29 #define pgd_populate_safe(mm, pgd, p4d) do { } while (0) argument
Dpgtable.h368 #define p4d_access_permitted(p4d, write) \ argument
369 (p4d_present(p4d) && (!(write) || p4d_write(p4d)))
429 #define set_p4d_safe(p4dp, p4d) \ argument
431 WARN_ON_ONCE(p4d_present(*p4dp) && !p4d_same(*p4dp, p4d)); \
432 set_p4d(p4dp, p4d); \
576 static inline int p4d_none_or_clear_bad(p4d_t *p4d) in p4d_none_or_clear_bad() argument
578 if (p4d_none(*p4d)) in p4d_none_or_clear_bad()
580 if (unlikely(p4d_bad(*p4d))) { in p4d_none_or_clear_bad()
581 p4d_clear_bad(p4d); in p4d_none_or_clear_bad()
1042 int p4d_set_huge(p4d_t *p4d, phys_addr_t addr, pgprot_t prot);
[all …]
/Linux-v5.4/mm/kasan/
Dinit.c50 static inline bool kasan_pud_table(p4d_t p4d) in kasan_pud_table() argument
52 return p4d_page(p4d) == virt_to_page(lm_alias(kasan_early_shadow_pud)); in kasan_pud_table()
55 static inline bool kasan_pud_table(p4d_t p4d) in kasan_pud_table() argument
146 static int __ref zero_pud_populate(p4d_t *p4d, unsigned long addr, in zero_pud_populate() argument
149 pud_t *pud = pud_offset(p4d, addr); in zero_pud_populate()
186 p4d_t *p4d = p4d_offset(pgd, addr); in zero_p4d_populate() local
195 p4d_populate(&init_mm, p4d, in zero_p4d_populate()
197 pud = pud_offset(p4d, addr); in zero_p4d_populate()
206 if (p4d_none(*p4d)) { in zero_p4d_populate()
210 p = pud_alloc(&init_mm, p4d, addr); in zero_p4d_populate()
[all …]
/Linux-v5.4/arch/x86/mm/
Dinit_64.c72 DEFINE_POPULATE(p4d_populate, p4d, pud, init)
73 DEFINE_POPULATE(pgd_populate, pgd, p4d, init)
87 DEFINE_ENTRY(p4d, p4d, init)
186 p4d_t *p4d; in sync_global_pgds_l4() local
190 p4d = p4d_offset(pgd, addr); in sync_global_pgds_l4()
195 if (!p4d_none(*p4d_ref) && !p4d_none(*p4d)) in sync_global_pgds_l4()
196 BUG_ON(p4d_page_vaddr(*p4d) in sync_global_pgds_l4()
199 if (p4d_none(*p4d)) in sync_global_pgds_l4()
200 set_p4d(p4d, *p4d_ref); in sync_global_pgds_l4()
246 p4d_t *p4d = (p4d_t *)spp_getpage(); in fill_p4d() local
[all …]
Dident_map.c67 p4d_t *p4d = p4d_page + p4d_index(addr); in ident_p4d_init() local
74 if (p4d_present(*p4d)) { in ident_p4d_init()
75 pud = pud_offset(p4d, 0); in ident_p4d_init()
83 set_p4d(p4d, __p4d(__pa(pud) | info->kernpg_flag)); in ident_p4d_init()
106 p4d_t *p4d; in kernel_ident_mapping_init() local
113 p4d = p4d_offset(pgd, 0); in kernel_ident_mapping_init()
114 result = ident_p4d_init(info, p4d, addr, next); in kernel_ident_mapping_init()
120 p4d = (p4d_t *)info->alloc_pgt_page(info->context); in kernel_ident_mapping_init()
121 if (!p4d) in kernel_ident_mapping_init()
123 result = ident_p4d_init(info, p4d, addr, next); in kernel_ident_mapping_init()
[all …]
Dkasan_init_64.c106 static void __init kasan_populate_p4d(p4d_t *p4d, unsigned long addr, in kasan_populate_p4d() argument
112 if (p4d_none(*p4d)) { in kasan_populate_p4d()
115 p4d_populate(&init_mm, p4d, p); in kasan_populate_p4d()
118 pud = pud_offset(p4d, addr); in kasan_populate_p4d()
130 p4d_t *p4d; in kasan_populate_pgd() local
138 p4d = p4d_offset(pgd, addr); in kasan_populate_pgd()
141 kasan_populate_p4d(p4d, addr, next, nid); in kasan_populate_pgd()
142 } while (p4d++, addr = next, addr != end); in kasan_populate_pgd()
197 unsigned long p4d; in early_p4d_offset() local
202 p4d = pgd_val(*pgd) & PTE_PFN_MASK; in early_p4d_offset()
[all …]
Dpti.c203 p4d_t *p4d; in pti_user_pagetable_walk_pmd() local
206 p4d = pti_user_pagetable_walk_p4d(address); in pti_user_pagetable_walk_pmd()
207 if (!p4d) in pti_user_pagetable_walk_pmd()
210 BUILD_BUG_ON(p4d_large(*p4d) != 0); in pti_user_pagetable_walk_pmd()
211 if (p4d_none(*p4d)) { in pti_user_pagetable_walk_pmd()
216 set_p4d(p4d, __p4d(_KERNPG_TABLE | __pa(new_pud_page))); in pti_user_pagetable_walk_pmd()
219 pud = pud_offset(p4d, address); in pti_user_pagetable_walk_pmd()
317 p4d_t *p4d; in pti_clone_pgtable() local
327 p4d = p4d_offset(pgd, addr); in pti_clone_pgtable()
328 if (WARN_ON(p4d_none(*p4d))) in pti_clone_pgtable()
[all …]
Dfault.c153 p4d_t *p4d, *p4d_k; in vmalloc_sync_one() local
168 p4d = p4d_offset(pgd, address); in vmalloc_sync_one()
173 pud = pud_offset(p4d, address); in vmalloc_sync_one()
285 p4d_t *p4d; in dump_pagetable() local
298 p4d = p4d_offset(pgd, address); in dump_pagetable()
299 pud = pud_offset(p4d, address); in dump_pagetable()
334 p4d_t *p4d, *p4d_k; in vmalloc_fault() local
363 p4d = p4d_offset(pgd, address); in vmalloc_fault()
368 if (p4d_none(*p4d) && !pgtable_l5_enabled()) { in vmalloc_fault()
369 set_p4d(p4d, *p4d_k); in vmalloc_fault()
[all …]
Dpgtable_32.c31 p4d_t *p4d; in set_pte_vaddr() local
41 p4d = p4d_offset(pgd, vaddr); in set_pte_vaddr()
42 if (p4d_none(*p4d)) { in set_pte_vaddr()
46 pud = pud_offset(p4d, vaddr); in set_pte_vaddr()
Dpageattr.c569 p4d_t *p4d; in lookup_address_in_pgd() local
578 p4d = p4d_offset(pgd, address); in lookup_address_in_pgd()
579 if (p4d_none(*p4d)) in lookup_address_in_pgd()
583 if (p4d_large(*p4d) || !p4d_present(*p4d)) in lookup_address_in_pgd()
584 return (pte_t *)p4d; in lookup_address_in_pgd()
586 pud = pud_offset(p4d, address); in lookup_address_in_pgd()
638 p4d_t *p4d; in lookup_pmd_address() local
645 p4d = p4d_offset(pgd, address); in lookup_pmd_address()
646 if (p4d_none(*p4d) || p4d_large(*p4d) || !p4d_present(*p4d)) in lookup_pmd_address()
649 pud = pud_offset(p4d, address); in lookup_pmd_address()
[all …]
Dmem_encrypt_identity.c108 p4d_t *p4d; in sme_prepare_pgd() local
114 p4d = ppd->pgtable_area; in sme_prepare_pgd()
115 memset(p4d, 0, sizeof(*p4d) * PTRS_PER_P4D); in sme_prepare_pgd()
116 ppd->pgtable_area += sizeof(*p4d) * PTRS_PER_P4D; in sme_prepare_pgd()
117 set_pgd(pgd, __pgd(PGD_FLAGS | __pa(p4d))); in sme_prepare_pgd()
120 p4d = p4d_offset(pgd, ppd->vaddr); in sme_prepare_pgd()
121 if (p4d_none(*p4d)) { in sme_prepare_pgd()
125 set_p4d(p4d, __p4d(P4D_FLAGS | __pa(pud))); in sme_prepare_pgd()
128 pud = pud_offset(p4d, ppd->vaddr); in sme_prepare_pgd()
/Linux-v5.4/arch/x86/include/asm/
Dpgalloc.h138 static inline void p4d_populate(struct mm_struct *mm, p4d_t *p4d, pud_t *pud) in p4d_populate() argument
141 set_p4d(p4d, __p4d(_PAGE_TABLE | __pa(pud))); in p4d_populate()
144 static inline void p4d_populate_safe(struct mm_struct *mm, p4d_t *p4d, pud_t *pud) in p4d_populate_safe() argument
147 set_p4d_safe(p4d, __p4d(_PAGE_TABLE | __pa(pud))); in p4d_populate_safe()
174 static inline void pgd_populate(struct mm_struct *mm, pgd_t *pgd, p4d_t *p4d) in pgd_populate() argument
178 paravirt_alloc_p4d(mm, __pa(p4d) >> PAGE_SHIFT); in pgd_populate()
179 set_pgd(pgd, __pgd(_PAGE_TABLE | __pa(p4d))); in pgd_populate()
182 static inline void pgd_populate_safe(struct mm_struct *mm, pgd_t *pgd, p4d_t *p4d) in pgd_populate_safe() argument
186 paravirt_alloc_p4d(mm, __pa(p4d) >> PAGE_SHIFT); in pgd_populate_safe()
187 set_pgd_safe(pgd, __pgd(_PAGE_TABLE | __pa(p4d))); in pgd_populate_safe()
[all …]
Dpgtable_types.h308 typedef struct { p4dval_t p4d; } p4d_t; member
315 static inline p4dval_t native_p4d_val(p4d_t p4d) in native_p4d_val() argument
317 return p4d.p4d; in native_p4d_val()
327 static inline p4dval_t native_p4d_val(p4d_t p4d) in native_p4d_val() argument
329 return native_pgd_val(p4d.pgd); in native_p4d_val()
350 return (pud_t) { .p4d.pgd = native_make_pgd(val) }; in native_make_pud()
355 return native_pgd_val(pud.p4d.pgd); in native_pud_val()
376 return (pmd_t) { .pud.p4d.pgd = native_make_pgd(val) }; in native_make_pmd()
381 return native_pgd_val(pmd.pud.p4d.pgd); in native_pmd_val()
385 static inline p4dval_t p4d_pfn_mask(p4d_t p4d) in p4d_pfn_mask() argument
[all …]
/Linux-v5.4/lib/
Dioremap.c146 static inline int ioremap_pud_range(p4d_t *p4d, unsigned long addr, in ioremap_pud_range() argument
152 pud = pud_alloc(&init_mm, p4d, addr); in ioremap_pud_range()
167 static int ioremap_try_huge_p4d(p4d_t *p4d, unsigned long addr, in ioremap_try_huge_p4d() argument
183 if (p4d_present(*p4d) && !p4d_free_pud_page(p4d, addr)) in ioremap_try_huge_p4d()
186 return p4d_set_huge(p4d, phys_addr, prot); in ioremap_try_huge_p4d()
192 p4d_t *p4d; in ioremap_p4d_range() local
195 p4d = p4d_alloc(&init_mm, pgd, addr); in ioremap_p4d_range()
196 if (!p4d) in ioremap_p4d_range()
201 if (ioremap_try_huge_p4d(p4d, addr, next, phys_addr, prot)) in ioremap_p4d_range()
204 if (ioremap_pud_range(p4d, addr, next, phys_addr, prot)) in ioremap_p4d_range()
[all …]
/Linux-v5.4/mm/
Dsparse-vmemmap.c180 pud_t * __meminit vmemmap_pud_populate(p4d_t *p4d, unsigned long addr, int node) in vmemmap_pud_populate() argument
182 pud_t *pud = pud_offset(p4d, addr); in vmemmap_pud_populate()
194 p4d_t *p4d = p4d_offset(pgd, addr); in vmemmap_p4d_populate() local
195 if (p4d_none(*p4d)) { in vmemmap_p4d_populate()
199 p4d_populate(&init_mm, p4d, p); in vmemmap_p4d_populate()
201 return p4d; in vmemmap_p4d_populate()
221 p4d_t *p4d; in vmemmap_populate_basepages() local
230 p4d = vmemmap_p4d_populate(pgd, addr, node); in vmemmap_populate_basepages()
231 if (!p4d) in vmemmap_populate_basepages()
233 pud = vmemmap_pud_populate(p4d, addr, node); in vmemmap_populate_basepages()
/Linux-v5.4/arch/x86/power/
Dhibernate_64.c32 p4d_t *p4d = NULL; in set_up_temporary_text_mapping() local
55 p4d = (p4d_t *)get_safe_page(GFP_ATOMIC); in set_up_temporary_text_mapping()
56 if (!p4d) in set_up_temporary_text_mapping()
72 if (p4d) { in set_up_temporary_text_mapping()
74 pgd_t new_pgd = __pgd(__pa(p4d) | pgprot_val(pgtable_prot)); in set_up_temporary_text_mapping()
76 set_p4d(p4d + p4d_index(restore_jump_address), new_p4d); in set_up_temporary_text_mapping()
Dhibernate.c214 p4d_t *p4d; in relocate_restore_code() local
228 p4d = p4d_offset(pgd, relocated_restore_code); in relocate_restore_code()
229 if (p4d_large(*p4d)) { in relocate_restore_code()
230 set_p4d(p4d, __p4d(p4d_val(*p4d) & ~_PAGE_NX)); in relocate_restore_code()
233 pud = pud_offset(p4d, relocated_restore_code); in relocate_restore_code()
Dhibernate_32.c32 p4d_t *p4d; in resume_one_md_table_init() local
42 p4d = p4d_offset(pgd, 0); in resume_one_md_table_init()
43 pud = pud_offset(p4d, 0); in resume_one_md_table_init()
47 p4d = p4d_offset(pgd, 0); in resume_one_md_table_init()
48 pud = pud_offset(p4d, 0); in resume_one_md_table_init()
/Linux-v5.4/arch/x86/kernel/
Dhead64.c120 p4dval_t *p4d; in __startup_64() local
161 p4d = fixup_pointer(&level4_kernel_pgt, physaddr); in __startup_64()
162 p4d[511] += load_delta; in __startup_64()
187 p4d = fixup_pointer(early_dynamic_pgts[(*next_pgt_ptr)++], in __startup_64()
191 pgd[i + 0] = (pgdval_t)p4d + pgtable_flags; in __startup_64()
192 pgd[i + 1] = (pgdval_t)p4d + pgtable_flags; in __startup_64()
195 p4d[(i + 0) % PTRS_PER_P4D] = (pgdval_t)pud + pgtable_flags; in __startup_64()
196 p4d[(i + 1) % PTRS_PER_P4D] = (pgdval_t)pud + pgtable_flags; in __startup_64()
304 p4dval_t p4d, *p4d_p; in __early_make_pgtable() local
336 p4d = *p4d_p; in __early_make_pgtable()
[all …]
Dmachine_kexec_64.c114 free_page((unsigned long)image->arch.p4d); in free_transition_pgtable()
115 image->arch.p4d = NULL; in free_transition_pgtable()
129 p4d_t *p4d; in init_transition_pgtable() local
138 p4d = (p4d_t *)get_zeroed_page(GFP_KERNEL); in init_transition_pgtable()
139 if (!p4d) in init_transition_pgtable()
141 image->arch.p4d = p4d; in init_transition_pgtable()
142 set_pgd(pgd, __pgd(__pa(p4d) | _KERNPG_TABLE)); in init_transition_pgtable()
144 p4d = p4d_offset(pgd, vaddr); in init_transition_pgtable()
145 if (!p4d_present(*p4d)) { in init_transition_pgtable()
150 set_p4d(p4d, __p4d(__pa(pud) | _KERNPG_TABLE)); in init_transition_pgtable()
[all …]
/Linux-v5.4/arch/s390/mm/
Dpage-states.c120 static void mark_kernel_pud(p4d_t *p4d, unsigned long addr, unsigned long end) in mark_kernel_pud() argument
127 pud = pud_offset(p4d, addr); in mark_kernel_pud()
145 p4d_t *p4d; in mark_kernel_p4d() local
148 p4d = p4d_offset(pgd, addr); in mark_kernel_p4d()
151 if (p4d_none(*p4d)) in mark_kernel_p4d()
153 if (!p4d_folded(*p4d)) { in mark_kernel_p4d()
154 page = virt_to_page(p4d_val(*p4d)); in mark_kernel_p4d()
158 mark_kernel_pud(p4d, addr, next); in mark_kernel_p4d()
159 } while (p4d++, addr = next, addr != end); in mark_kernel_p4d()
Ddump_pagetables.c182 p4d_t *p4d, unsigned long addr) in walk_pud_level() argument
189 if ((p4d_val(*p4d) & PAGE_MASK) == __pa(kasan_early_shadow_pud)) { in walk_pud_level()
195 pud = pud_offset(p4d, addr); in walk_pud_level()
215 p4d_t *p4d; in walk_p4d_level() local
225 p4d = p4d_offset(pgd, addr); in walk_p4d_level()
226 for (i = 0; i < PTRS_PER_P4D && addr < max_addr; i++, p4d++) { in walk_p4d_level()
228 if (!p4d_none(*p4d)) in walk_p4d_level()
229 walk_pud_level(m, st, p4d, addr); in walk_p4d_level()
/Linux-v5.4/arch/s390/include/asm/
Dpgalloc.h59 #define p4d_free(mm, p4d) crst_table_free(mm, (unsigned long *) p4d) argument
90 static inline void pgd_populate(struct mm_struct *mm, pgd_t *pgd, p4d_t *p4d) in pgd_populate() argument
92 pgd_val(*pgd) = _REGION1_ENTRY | __pa(p4d); in pgd_populate()
95 static inline void p4d_populate(struct mm_struct *mm, p4d_t *p4d, pud_t *pud) in p4d_populate() argument
97 p4d_val(*p4d) = _REGION2_ENTRY | __pa(pud); in p4d_populate()

123