Home
last modified time | relevance | path

Searched refs:zero_page (Results 1 – 14 of 14) sorted by relevance

/Linux-v5.15/include/trace/events/
Dfs_dax.h65 struct page *zero_page,
67 TP_ARGS(inode, vmf, zero_page, radix_entry),
72 __field(struct page *, zero_page)
81 __entry->zero_page = zero_page;
91 __entry->zero_page,
99 struct page *zero_page, void *radix_entry), \
100 TP_ARGS(inode, vmf, zero_page, radix_entry))
/Linux-v5.15/arch/nds32/mm/
Dinit.c140 void *zero_page; in paging_init() local
152 zero_page = memblock_alloc(PAGE_SIZE, PAGE_SIZE); in paging_init()
153 if (!zero_page) in paging_init()
158 empty_zero_page = virt_to_page(zero_page); in paging_init()
/Linux-v5.15/arch/arm64/kernel/
Dhibernate.c434 void *zero_page; in swsusp_arch_resume() local
458 zero_page = (void *)get_safe_page(GFP_ATOMIC); in swsusp_arch_resume()
459 if (!zero_page) { in swsusp_arch_resume()
499 resume_hdr.__hyp_stub_vectors, virt_to_phys(zero_page)); in swsusp_arch_resume()
Dhibernate-asm.S25 .macro break_before_make_ttbr_switch zero_page, page_table, tmp, tmp2
26 phys_to_ttbr \tmp, \zero_page
/Linux-v5.15/mm/
Dhuge_memory.c93 struct page *zero_page; in get_huge_zero_page() local
98 zero_page = alloc_pages((GFP_TRANSHUGE | __GFP_ZERO) & ~__GFP_MOVABLE, in get_huge_zero_page()
100 if (!zero_page) { in get_huge_zero_page()
106 if (cmpxchg(&huge_zero_page, NULL, zero_page)) { in get_huge_zero_page()
108 __free_pages(zero_page, compound_order(zero_page)); in get_huge_zero_page()
111 WRITE_ONCE(huge_zero_pfn, page_to_pfn(zero_page)); in get_huge_zero_page()
159 struct page *zero_page = xchg(&huge_zero_page, NULL); in shrink_huge_zero_page_scan() local
160 BUG_ON(zero_page == NULL); in shrink_huge_zero_page_scan()
162 __free_pages(zero_page, compound_order(zero_page)); in shrink_huge_zero_page_scan()
711 struct page *zero_page) in set_huge_zero_page() argument
[all …]
/Linux-v5.15/drivers/dma/
Dbcm2835-dma.c50 dma_addr_t zero_page; member
750 if (buf_addr == od->zero_page && !c->is_lite_channel) in bcm2835_dma_prep_dma_cyclic()
852 dma_unmap_page_attrs(od->ddev.dev, od->zero_page, PAGE_SIZE, in bcm2835_dma_free()
937 od->zero_page = dma_map_page_attrs(od->ddev.dev, ZERO_PAGE(0), 0, in bcm2835_dma_probe()
940 if (dma_mapping_error(od->ddev.dev, od->zero_page)) { in bcm2835_dma_probe()
/Linux-v5.15/fs/
Ddax.c1078 struct page *zero_page; in dax_pmd_load_hole() local
1083 zero_page = mm_get_huge_zero_page(vmf->vma->vm_mm); in dax_pmd_load_hole()
1085 if (unlikely(!zero_page)) in dax_pmd_load_hole()
1088 pfn = page_to_pfn_t(zero_page); in dax_pmd_load_hole()
1108 pmd_entry = mk_pmd(zero_page, vmf->vma->vm_page_prot); in dax_pmd_load_hole()
1112 trace_dax_pmd_load_hole(inode, vmf, zero_page, *entry); in dax_pmd_load_hole()
1118 trace_dax_pmd_load_hole_fallback(inode, vmf, zero_page, *entry); in dax_pmd_load_hole()
/Linux-v5.15/include/target/
Dtarget_core_fabric.h214 u32 length, bool zero_page, bool chainable);
/Linux-v5.15/drivers/nvdimm/
Dpfn_devs.c364 void *zero_page = page_address(ZERO_PAGE(0)); in nd_pfn_clear_memmap_errors() local
402 rc = nvdimm_write_bytes(ndns, nsoff, zero_page, in nd_pfn_clear_memmap_errors()
Dbtt.c511 void *zero_page = page_address(ZERO_PAGE(0)); in arena_clear_freelist_error() local
521 ret = arena_write_bytes(arena, nsoff, zero_page, in arena_clear_freelist_error()
/Linux-v5.15/arch/arm/mm/
Dmmu.c1724 void *zero_page; in paging_init() local
1747 zero_page = early_alloc(PAGE_SIZE); in paging_init()
1751 empty_zero_page = virt_to_page(zero_page); in paging_init()
/Linux-v5.15/drivers/target/
Dtarget_core_transport.c2681 bool zero_page, bool chainable) in target_alloc_sgl() argument
2683 gfp_t gfp = GFP_KERNEL | (zero_page ? __GFP_ZERO : 0); in target_alloc_sgl()
/Linux-v5.15/virt/kvm/
Dkvm_main.c3055 const void *zero_page = (const void *) __va(page_to_phys(ZERO_PAGE(0))); in kvm_clear_guest() local
3062 ret = kvm_write_guest_page(kvm, gfn, zero_page, offset, len); in kvm_clear_guest()
/Linux-v5.15/arch/x86/kvm/vmx/
Dvmx.c3564 const void *zero_page = (const void *) __va(page_to_phys(ZERO_PAGE(0))); in init_rmode_tss() local
3569 if (__copy_to_user(ua + PAGE_SIZE * i, zero_page, PAGE_SIZE)) in init_rmode_tss()