Searched refs:PAGE_ALIGNED (Results 1 – 25 of 61) sorted by relevance
123
42 if (!PAGE_ALIGNED(__start_rodata)) { in rodata_test()46 if (!PAGE_ALIGNED(__end_rodata)) { in rodata_test()
187 VM_BUG_ON(!PAGE_ALIGNED(start)); in vmemmap_remap_range()188 VM_BUG_ON(!PAGE_ALIGNED(end)); in vmemmap_remap_range()
1630 VM_BUG_ON(!PAGE_ALIGNED(start)); in populate_vma_page_range()1631 VM_BUG_ON(!PAGE_ALIGNED(end)); in populate_vma_page_range()1698 VM_BUG_ON(!PAGE_ALIGNED(start)); in faultin_vma_page_range()1699 VM_BUG_ON(!PAGE_ALIGNED(end)); in faultin_vma_page_range()1866 if (!PAGE_ALIGNED(uaddr)) { in fault_in_writeable()1976 if (!PAGE_ALIGNED(uaddr)) { in fault_in_readable()
158 if (!PAGE_ALIGNED(eq_addr) || !PAGE_ALIGNED(cq_addr) || in mana_smc_setup_hwc()159 !PAGE_ALIGNED(rq_addr) || !PAGE_ALIGNED(sq_addr)) in mana_smc_setup_hwc()
45 if (!PAGE_ALIGNED(addr)) in set_memory_encrypted()58 if (!PAGE_ALIGNED(addr)) in set_memory_decrypted()
28 if (!PAGE_ALIGNED(base) || !PAGE_ALIGNED(end)) { in sram_check_protect_exec()
133 if (!PAGE_ALIGNED(rmem->base) || !PAGE_ALIGNED(rmem->size)) { in open_dice_probe()
71 (!PAGE_ALIGNED(in->phys_addr) || in entry_is_valid()72 !PAGE_ALIGNED(in->num_pages << EFI_PAGE_SHIFT))) { in entry_is_valid()
46 BUG_ON(!PAGE_ALIGNED(image->data)); in init_vdso_image()47 BUG_ON(!PAGE_ALIGNED(image->size)); in init_vdso_image()
106 BUG_ON(!PAGE_ALIGNED(vdso_info.vdso)); in init_vdso()107 BUG_ON(!PAGE_ALIGNED(vdso_info.size)); in init_vdso()
21 return !PAGE_ALIGNED(md->phys_addr) || in region_is_misaligned()22 !PAGE_ALIGNED(md->num_pages << EFI_PAGE_SHIFT); in region_is_misaligned()
194 if (!PAGE_ALIGNED(tx) || !PAGE_ALIGNED(rx)) { in do_ffa_rxtx_map()302 if (!PAGE_ALIGNED(sz)) in __ffa_host_share_ranges()322 if (!PAGE_ALIGNED(sz)) in __ffa_host_unshare_ranges()
324 if (!PAGE_ALIGNED(phys) || !PAGE_ALIGNED(size)) in __pkvm_init()
311 if (!PAGE_ALIGNED(user_addr) || !PAGE_ALIGNED(length) || !length) in afu_dma_map_region()
352 if (!PAGE_ALIGNED(info->vcpu.start) || in vgic_v2_probe()353 !PAGE_ALIGNED(resource_size(&info->vcpu))) { in vgic_v2_probe()
228 if (PAGE_ALIGNED(last_pte_table)) { in kernel_page_table()276 if (PAGE_ALIGNED(last_pmd_table)) { in kernel_ptr_table()
328 if (!PAGE_ALIGNED(base_addr) || !PAGE_ALIGNED(psize)) in pfrt_log_mmap()
76 if (!PAGE_ALIGNED(addr)) { in change_memory_common()
347 if (drm_WARN_ON(&vdev->drm, size == 0 || !PAGE_ALIGNED(size))) in ivpu_bo_alloc()570 drm_WARN_ON(&vdev->drm, !PAGE_ALIGNED(vpu_addr)); in ivpu_bo_alloc_internal()571 drm_WARN_ON(&vdev->drm, !PAGE_ALIGNED(size)); in ivpu_bo_alloc_internal()
1244 VM_BUG_ON(!PAGE_ALIGNED(start)); in vmemmap_free()1245 VM_BUG_ON(!PAGE_ALIGNED(end)); in vmemmap_free()1540 VM_BUG_ON(!PAGE_ALIGNED(start)); in vmemmap_populate()1541 VM_BUG_ON(!PAGE_ALIGNED(end)); in vmemmap_populate()
197 if (i != nents - 1 && !PAGE_ALIGNED(sg->length)) in imgu_dmamap_map_sg()
176 if (!PAGE_ALIGNED(addr)) { in xdp_umem_reg()
301 if (policy == ALLOCATE_LEFT && PAGE_ALIGNED(alloc)) in test_alloc()303 if (policy == ALLOCATE_RIGHT && !PAGE_ALIGNED(alloc)) in test_alloc()
738 if (PAGE_ALIGNED(addr) && PAGE_ALIGNED(next)) { in remove_pte_table()989 VM_BUG_ON(!PAGE_ALIGNED(addr)); in radix__vmemmap_pte_populate()
284 if (PAGE_ALIGNED(cur_in)) { in lzo_compress_pages()