Searched refs:vaddr_end (Results 1 – 8 of 8) sorted by relevance
/Linux-v4.19/arch/x86/mm/ |
D | mem_encrypt_identity.c | 72 unsigned long vaddr_end; member 85 pgd_end = ppd->vaddr_end & PGDIR_MASK; in sme_clear_pgd() 175 while (ppd->vaddr < ppd->vaddr_end) { in __sme_map_range_pmd() 185 while (ppd->vaddr < ppd->vaddr_end) { in __sme_map_range_pte() 196 unsigned long vaddr_end; in __sme_map_range() local 202 vaddr_end = ppd->vaddr_end; in __sme_map_range() 205 ppd->vaddr_end = ALIGN(ppd->vaddr, PMD_PAGE_SIZE); in __sme_map_range() 209 ppd->vaddr_end = vaddr_end & PMD_PAGE_MASK; in __sme_map_range() 213 ppd->vaddr_end = vaddr_end; in __sme_map_range() 370 ppd.vaddr_end = workarea_end; in sme_encrypt_kernel() [all …]
|
D | mem_encrypt.c | 255 unsigned long vaddr_end, vaddr_next; in early_set_memory_enc_dec() local 262 vaddr_end = vaddr + size; in early_set_memory_enc_dec() 264 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in early_set_memory_enc_dec() 287 ((vaddr_end - vaddr) >= psize)) { in early_set_memory_enc_dec() 305 __pa((vaddr_end & pmask) + psize), in early_set_memory_enc_dec() 353 unsigned long vaddr, vaddr_end, npages; in mem_encrypt_free_decrypted_mem() local 357 vaddr_end = (unsigned long)__end_bss_decrypted; in mem_encrypt_free_decrypted_mem() 358 npages = (vaddr_end - vaddr) >> PAGE_SHIFT; in mem_encrypt_free_decrypted_mem() 372 free_init_pages("unused decrypted", vaddr, vaddr_end); in mem_encrypt_free_decrypted_mem()
|
D | kaslr.c | 41 static const unsigned long vaddr_end = CPU_ENTRY_AREA_BASE; variable 89 BUILD_BUG_ON(vaddr_start >= vaddr_end); in kernel_randomize_memory() 90 BUILD_BUG_ON(vaddr_end != CPU_ENTRY_AREA_BASE); in kernel_randomize_memory() 91 BUILD_BUG_ON(vaddr_end > __START_KERNEL_map); in kernel_randomize_memory() 112 remain_entropy = vaddr_end - vaddr_start; in kernel_randomize_memory()
|
D | init_64.c | 392 unsigned long vaddr_end = __START_KERNEL_map + KERNEL_IMAGE_SIZE; in cleanup_highmap() local 402 vaddr_end = __START_KERNEL_map + (max_pfn_mapped << PAGE_SHIFT); in cleanup_highmap() 404 for (; vaddr + PMD_SIZE - 1 < vaddr_end; pmd++, vaddr += PMD_SIZE) { in cleanup_highmap() 700 unsigned long vaddr, vaddr_start, vaddr_end, vaddr_next, paddr_last; in kernel_physical_mapping_init() local 704 vaddr_end = (unsigned long)__va(paddr_end); in kernel_physical_mapping_init() 707 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in kernel_physical_mapping_init() 716 __pa(vaddr_end), in kernel_physical_mapping_init() 722 paddr_last = phys_p4d_init(p4d, __pa(vaddr), __pa(vaddr_end), in kernel_physical_mapping_init() 735 sync_global_pgds(vaddr_start, vaddr_end - 1); in kernel_physical_mapping_init()
|
/Linux-v4.19/arch/x86/kernel/ |
D | head64.c | 116 unsigned long vaddr, vaddr_end; in __startup_64() local 248 vaddr_end = (unsigned long)__end_bss_decrypted; in __startup_64() 249 for (; vaddr < vaddr_end; vaddr += PMD_SIZE) { in __startup_64()
|
/Linux-v4.19/Documentation/x86/x86_64/ |
D | mm.txt | 15 vaddr_end for KASLR 41 vaddr_end for KASLR
|
/Linux-v4.19/arch/x86/xen/ |
D | mmu_pv.c | 1085 unsigned long vaddr_end) in xen_cleanhighmap() argument 1092 for (; vaddr <= vaddr_end && (pmd < (level2_kernel_pgt + PTRS_PER_PMD)); in xen_cleanhighmap() 1110 void *vaddr_end = vaddr + size; in xen_free_ro_pages() local 1112 for (; vaddr < vaddr_end; vaddr += PAGE_SIZE) in xen_free_ro_pages()
|
/Linux-v4.19/arch/x86/kvm/ |
D | svm.c | 6403 unsigned long vaddr, vaddr_end, next_vaddr, npages, size; in sev_launch_update_data() local 6422 vaddr_end = vaddr + size; in sev_launch_update_data() 6439 for (i = 0; vaddr < vaddr_end; vaddr = next_vaddr, i += pages) { in sev_launch_update_data() 6752 unsigned long vaddr, vaddr_end, next_vaddr; in sev_dbg_crypt() local 6767 vaddr_end = vaddr + size; in sev_dbg_crypt() 6770 for (; vaddr < vaddr_end; vaddr = next_vaddr) { in sev_dbg_crypt()
|