Lines Matching full:addr

64 static int ioremap_pte_range(pmd_t *pmd, unsigned long addr,  in ioremap_pte_range()  argument
72 pte = pte_alloc_kernel_track(pmd, addr, mask); in ioremap_pte_range()
77 set_pte_at(&init_mm, addr, pte, pfn_pte(pfn, prot)); in ioremap_pte_range()
79 } while (pte++, addr += PAGE_SIZE, addr != end); in ioremap_pte_range()
84 static int ioremap_try_huge_pmd(pmd_t *pmd, unsigned long addr, in ioremap_try_huge_pmd() argument
91 if ((end - addr) != PMD_SIZE) in ioremap_try_huge_pmd()
94 if (!IS_ALIGNED(addr, PMD_SIZE)) in ioremap_try_huge_pmd()
100 if (pmd_present(*pmd) && !pmd_free_pte_page(pmd, addr)) in ioremap_try_huge_pmd()
106 static inline int ioremap_pmd_range(pud_t *pud, unsigned long addr, in ioremap_pmd_range() argument
113 pmd = pmd_alloc_track(&init_mm, pud, addr, mask); in ioremap_pmd_range()
117 next = pmd_addr_end(addr, end); in ioremap_pmd_range()
119 if (ioremap_try_huge_pmd(pmd, addr, next, phys_addr, prot)) { in ioremap_pmd_range()
124 if (ioremap_pte_range(pmd, addr, next, phys_addr, prot, mask)) in ioremap_pmd_range()
126 } while (pmd++, phys_addr += (next - addr), addr = next, addr != end); in ioremap_pmd_range()
130 static int ioremap_try_huge_pud(pud_t *pud, unsigned long addr, in ioremap_try_huge_pud() argument
137 if ((end - addr) != PUD_SIZE) in ioremap_try_huge_pud()
140 if (!IS_ALIGNED(addr, PUD_SIZE)) in ioremap_try_huge_pud()
146 if (pud_present(*pud) && !pud_free_pmd_page(pud, addr)) in ioremap_try_huge_pud()
152 static inline int ioremap_pud_range(p4d_t *p4d, unsigned long addr, in ioremap_pud_range() argument
159 pud = pud_alloc_track(&init_mm, p4d, addr, mask); in ioremap_pud_range()
163 next = pud_addr_end(addr, end); in ioremap_pud_range()
165 if (ioremap_try_huge_pud(pud, addr, next, phys_addr, prot)) { in ioremap_pud_range()
170 if (ioremap_pmd_range(pud, addr, next, phys_addr, prot, mask)) in ioremap_pud_range()
172 } while (pud++, phys_addr += (next - addr), addr = next, addr != end); in ioremap_pud_range()
176 static int ioremap_try_huge_p4d(p4d_t *p4d, unsigned long addr, in ioremap_try_huge_p4d() argument
183 if ((end - addr) != P4D_SIZE) in ioremap_try_huge_p4d()
186 if (!IS_ALIGNED(addr, P4D_SIZE)) in ioremap_try_huge_p4d()
192 if (p4d_present(*p4d) && !p4d_free_pud_page(p4d, addr)) in ioremap_try_huge_p4d()
198 static inline int ioremap_p4d_range(pgd_t *pgd, unsigned long addr, in ioremap_p4d_range() argument
205 p4d = p4d_alloc_track(&init_mm, pgd, addr, mask); in ioremap_p4d_range()
209 next = p4d_addr_end(addr, end); in ioremap_p4d_range()
211 if (ioremap_try_huge_p4d(p4d, addr, next, phys_addr, prot)) { in ioremap_p4d_range()
216 if (ioremap_pud_range(p4d, addr, next, phys_addr, prot, mask)) in ioremap_p4d_range()
218 } while (p4d++, phys_addr += (next - addr), addr = next, addr != end); in ioremap_p4d_range()
222 int ioremap_page_range(unsigned long addr, in ioremap_page_range() argument
232 BUG_ON(addr >= end); in ioremap_page_range()
234 start = addr; in ioremap_page_range()
235 pgd = pgd_offset_k(addr); in ioremap_page_range()
237 next = pgd_addr_end(addr, end); in ioremap_page_range()
238 err = ioremap_p4d_range(pgd, addr, next, phys_addr, prot, in ioremap_page_range()
242 } while (pgd++, phys_addr += (next - addr), addr = next, addr != end); in ioremap_page_range()
253 void __iomem *ioremap_prot(phys_addr_t addr, size_t size, unsigned long prot) in ioremap_prot() argument
260 last_addr = addr + size - 1; in ioremap_prot()
261 if (!size || last_addr < addr) in ioremap_prot()
265 offset = addr & (~PAGE_MASK); in ioremap_prot()
266 addr -= offset; in ioremap_prot()
273 vaddr = (unsigned long)area->addr; in ioremap_prot()
275 if (ioremap_page_range(vaddr, vaddr + size, addr, __pgprot(prot))) { in ioremap_prot()
284 void iounmap(volatile void __iomem *addr) in iounmap() argument
286 vunmap((void *)((unsigned long)addr & PAGE_MASK)); in iounmap()