Home
last modified time | relevance | path

Searched refs:vaddr (Results 1 – 25 of 729) sorted by relevance

12345678910>>...30

/Linux-v4.19/arch/m68k/include/asm/
Dbitops.h31 static inline void bset_reg_set_bit(int nr, volatile unsigned long *vaddr) in bset_reg_set_bit() argument
33 char *p = (char *)vaddr + (nr ^ 31) / 8; in bset_reg_set_bit()
41 static inline void bset_mem_set_bit(int nr, volatile unsigned long *vaddr) in bset_mem_set_bit() argument
43 char *p = (char *)vaddr + (nr ^ 31) / 8; in bset_mem_set_bit()
50 static inline void bfset_mem_set_bit(int nr, volatile unsigned long *vaddr) in bfset_mem_set_bit() argument
54 : "d" (nr ^ 31), "o" (*vaddr) in bfset_mem_set_bit()
59 #define set_bit(nr, vaddr) bset_reg_set_bit(nr, vaddr) argument
61 #define set_bit(nr, vaddr) bset_mem_set_bit(nr, vaddr) argument
63 #define set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \ argument
64 bset_mem_set_bit(nr, vaddr) : \
[all …]
/Linux-v4.19/arch/parisc/kernel/
Dpci-dma.c78 unsigned long vaddr, in map_pte_uncached() argument
82 unsigned long orig_vaddr = vaddr; in map_pte_uncached()
84 vaddr &= ~PMD_MASK; in map_pte_uncached()
85 end = vaddr + size; in map_pte_uncached()
97 vaddr += PAGE_SIZE; in map_pte_uncached()
101 } while (vaddr < end); in map_pte_uncached()
105 static inline int map_pmd_uncached(pmd_t * pmd, unsigned long vaddr, in map_pmd_uncached() argument
109 unsigned long orig_vaddr = vaddr; in map_pmd_uncached()
111 vaddr &= ~PGDIR_MASK; in map_pmd_uncached()
112 end = vaddr + size; in map_pmd_uncached()
[all …]
/Linux-v4.19/arch/arm/mm/
Dcache-xsc3l2.c100 unsigned long vaddr; in xsc3_l2_inv_range() local
107 vaddr = -1; /* to force the first mapping */ in xsc3_l2_inv_range()
113 vaddr = l2_map_va(start & ~(CACHE_LINE_SIZE - 1), vaddr); in xsc3_l2_inv_range()
114 xsc3_l2_clean_mva(vaddr); in xsc3_l2_inv_range()
115 xsc3_l2_inv_mva(vaddr); in xsc3_l2_inv_range()
123 vaddr = l2_map_va(start, vaddr); in xsc3_l2_inv_range()
124 xsc3_l2_inv_mva(vaddr); in xsc3_l2_inv_range()
132 vaddr = l2_map_va(start, vaddr); in xsc3_l2_inv_range()
133 xsc3_l2_clean_mva(vaddr); in xsc3_l2_inv_range()
134 xsc3_l2_inv_mva(vaddr); in xsc3_l2_inv_range()
[all …]
Dhighmem.c23 unsigned long vaddr = __fix_to_virt(idx); in set_fixmap_pte() local
24 pte_t *ptep = pte_offset_kernel(pmd_off_k(vaddr), vaddr); in set_fixmap_pte()
27 local_flush_tlb_kernel_page(vaddr); in set_fixmap_pte()
30 static inline pte_t get_fixmap_pte(unsigned long vaddr) in get_fixmap_pte() argument
32 pte_t *ptep = pte_offset_kernel(pmd_off_k(vaddr), vaddr); in get_fixmap_pte()
58 unsigned long vaddr; in kmap_atomic() local
83 vaddr = __fix_to_virt(idx); in kmap_atomic()
89 BUG_ON(!pte_none(get_fixmap_pte(vaddr))); in kmap_atomic()
98 return (void *)vaddr; in kmap_atomic()
104 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in __kunmap_atomic() local
[all …]
/Linux-v4.19/arch/sh/mm/
Dkmap.c18 #define kmap_get_fixmap_pte(vaddr) \ argument
19 pte_offset_kernel(pmd_offset(pud_offset(pgd_offset_k(vaddr), (vaddr)), (vaddr)), (vaddr))
25 unsigned long vaddr; in kmap_coherent_init() local
28 vaddr = __fix_to_virt(FIX_CMAP_BEGIN); in kmap_coherent_init()
29 kmap_coherent_pte = kmap_get_fixmap_pte(vaddr); in kmap_coherent_init()
35 unsigned long vaddr; in kmap_coherent() local
46 vaddr = __fix_to_virt(idx); in kmap_coherent()
51 return (void *)vaddr; in kmap_coherent()
57 unsigned long vaddr = (unsigned long)kvaddr & PAGE_MASK; in kunmap_coherent() local
58 enum fixed_addresses idx = __virt_to_fix(vaddr); in kunmap_coherent()
[all …]
/Linux-v4.19/arch/microblaze/mm/
Dconsistent.c66 unsigned long order, vaddr; in arch_dma_alloc() local
84 vaddr = __get_free_pages(gfp, order); in arch_dma_alloc()
85 if (!vaddr) in arch_dma_alloc()
92 flush_dcache_range(virt_to_phys((void *)vaddr), in arch_dma_alloc()
93 virt_to_phys((void *)vaddr) + size); in arch_dma_alloc()
96 ret = (void *)vaddr; in arch_dma_alloc()
115 free_pages(vaddr, order); in arch_dma_alloc()
122 *dma_handle = pa = __virt_to_phys(vaddr); in arch_dma_alloc()
131 page = virt_to_page(vaddr); in arch_dma_alloc()
153 free_pages(vaddr, order); in arch_dma_alloc()
[all …]
Dhighmem.c38 unsigned long vaddr; in kmap_atomic_prot() local
49 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic_prot()
53 set_pte_at(&init_mm, vaddr, kmap_pte-idx, mk_pte(page, prot)); in kmap_atomic_prot()
54 local_flush_tlb_page(NULL, vaddr); in kmap_atomic_prot()
56 return (void *) vaddr; in kmap_atomic_prot()
62 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in __kunmap_atomic() local
66 if (vaddr < __fix_to_virt(FIX_KMAP_END)) { in __kunmap_atomic()
76 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); in __kunmap_atomic()
82 pte_clear(&init_mm, vaddr, kmap_pte-idx); in __kunmap_atomic()
83 local_flush_tlb_page(NULL, vaddr); in __kunmap_atomic()
/Linux-v4.19/arch/nds32/mm/
Dhighmem.c15 unsigned long vaddr; in kmap() local
19 vaddr = (unsigned long)kmap_high(page); in kmap()
20 return (void *)vaddr; in kmap()
38 unsigned long vaddr, pte; in kmap_atomic() local
50 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic()
52 ptep = pte_offset_kernel(pmd_off_k(vaddr), vaddr); in kmap_atomic()
55 __nds32__tlbop_inv(vaddr); in kmap_atomic()
56 __nds32__mtsr_dsb(vaddr, NDS32_SR_TLB_VPN); in kmap_atomic()
59 return (void *)vaddr; in kmap_atomic()
67 unsigned long vaddr = (unsigned long)kvaddr; in __kunmap_atomic() local
[all …]
Dcacheflush.c178 void copy_user_page(void *vto, void *vfrom, unsigned long vaddr, in copy_user_page() argument
181 cpu_dcache_wbinval_page((unsigned long)vaddr); in copy_user_page()
182 cpu_icache_inval_page((unsigned long)vaddr); in copy_user_page()
188 void clear_user_page(void *addr, unsigned long vaddr, struct page *page) in clear_user_page() argument
190 cpu_dcache_wbinval_page((unsigned long)vaddr); in clear_user_page()
191 cpu_icache_inval_page((unsigned long)vaddr); in clear_user_page()
198 unsigned long vaddr, struct vm_area_struct *vma) in copy_user_highpage() argument
207 if (aliasing(vaddr, (unsigned long)kfrom)) in copy_user_highpage()
209 vto = kremap0(vaddr, pto); in copy_user_highpage()
210 vfrom = kremap1(vaddr, pfrom); in copy_user_highpage()
[all …]
/Linux-v4.19/arch/sparc/mm/
Dio-unit.c95 static unsigned long iounit_get_area(struct iounit_struct *iounit, unsigned long vaddr, int size) in iounit_get_area() argument
101 npages = ((vaddr & ~PAGE_MASK) + size + (PAGE_SIZE-1)) >> PAGE_SHIFT; in iounit_get_area()
110 IOD(("iounit_get_area(%08lx,%d[%d])=", vaddr, size, npages)); in iounit_get_area()
125 panic("iounit_get_area: Couldn't find free iopte slots for (%08lx,%d)\n", vaddr, size); in iounit_get_area()
133 iopte = MKIOPTE(__pa(vaddr & PAGE_MASK)); in iounit_get_area()
134 vaddr = IOUNIT_DMA_BASE + (scan << PAGE_SHIFT) + (vaddr & ~PAGE_MASK); in iounit_get_area()
139 IOD(("%08lx\n", vaddr)); in iounit_get_area()
140 return vaddr; in iounit_get_area()
143 static __u32 iounit_get_scsi_one(struct device *dev, char *vaddr, unsigned long len) in iounit_get_scsi_one() argument
149 ret = iounit_get_area(iounit, (unsigned long)vaddr, len); in iounit_get_scsi_one()
[all …]
Dhighmem.c54 unsigned long vaddr; in kmap_atomic() local
64 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic()
68 __flush_cache_one(vaddr); in kmap_atomic()
79 __flush_tlb_one(vaddr); in kmap_atomic()
84 return (void*) vaddr; in kmap_atomic()
90 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in __kunmap_atomic() local
93 if (vaddr < FIXADDR_START) { // FIXME in __kunmap_atomic()
106 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN+idx)); in __kunmap_atomic()
110 __flush_cache_one(vaddr); in __kunmap_atomic()
119 pte_clear(&init_mm, vaddr, kmap_pte-idx); in __kunmap_atomic()
[all …]
/Linux-v4.19/drivers/net/ethernet/freescale/fman/
Dfman_muram.c47 unsigned long vaddr) in fman_muram_vbase_to_offset() argument
49 return vaddr - (unsigned long)muram->vbase; in fman_muram_vbase_to_offset()
68 void __iomem *vaddr; in fman_muram_init() local
81 vaddr = ioremap(base, size); in fman_muram_init()
82 if (!vaddr) { in fman_muram_init()
87 ret = gen_pool_add_virt(muram->pool, (unsigned long)vaddr, in fman_muram_init()
91 iounmap(vaddr); in fman_muram_init()
95 memset_io(vaddr, 0, (int)size); in fman_muram_init()
97 muram->vbase = vaddr; in fman_muram_init()
134 unsigned long vaddr; in fman_muram_alloc() local
[all …]
/Linux-v4.19/arch/m68k/sun3/
Ddvma.c24 static unsigned long dvma_page(unsigned long kaddr, unsigned long vaddr) in dvma_page() argument
36 if(ptelist[(vaddr & 0xff000) >> PAGE_SHIFT] != pte) { in dvma_page()
37 sun3_put_pte(vaddr, pte); in dvma_page()
38 ptelist[(vaddr & 0xff000) >> PAGE_SHIFT] = pte; in dvma_page()
41 return (vaddr + (kaddr & ~PAGE_MASK)); in dvma_page()
50 unsigned long vaddr; in dvma_map_iommu() local
52 vaddr = dvma_btov(baddr); in dvma_map_iommu()
54 end = vaddr + len; in dvma_map_iommu()
56 while(vaddr < end) { in dvma_map_iommu()
57 dvma_page(kaddr, vaddr); in dvma_map_iommu()
[all …]
/Linux-v4.19/arch/x86/mm/
Dmem_encrypt.c118 static void __init __sme_early_map_unmap_mem(void *vaddr, unsigned long size, in __sme_early_map_unmap_mem() argument
121 unsigned long paddr = (unsigned long)vaddr - __PAGE_OFFSET; in __sme_early_map_unmap_mem()
129 __early_make_pgtable((unsigned long)vaddr, pmd); in __sme_early_map_unmap_mem()
131 vaddr += PMD_SIZE; in __sme_early_map_unmap_mem()
252 static int __init early_set_memory_enc_dec(unsigned long vaddr, in early_set_memory_enc_dec() argument
261 vaddr_next = vaddr; in early_set_memory_enc_dec()
262 vaddr_end = vaddr + size; in early_set_memory_enc_dec()
264 for (; vaddr < vaddr_end; vaddr = vaddr_next) { in early_set_memory_enc_dec()
265 kpte = lookup_address(vaddr, &level); in early_set_memory_enc_dec()
273 vaddr_next = (vaddr & PAGE_MASK) + PAGE_SIZE; in early_set_memory_enc_dec()
[all …]
Dinit_32.c109 pmd_t * __init populate_extra_pmd(unsigned long vaddr) in populate_extra_pmd() argument
111 int pgd_idx = pgd_index(vaddr); in populate_extra_pmd()
112 int pmd_idx = pmd_index(vaddr); in populate_extra_pmd()
117 pte_t * __init populate_extra_pte(unsigned long vaddr) in populate_extra_pte() argument
119 int pte_idx = pte_index(vaddr); in populate_extra_pte()
122 pmd = populate_extra_pmd(vaddr); in populate_extra_pte()
134 unsigned long vaddr; in page_table_range_init_count() local
139 vaddr = start; in page_table_range_init_count()
140 pgd_idx = pgd_index(vaddr); in page_table_range_init_count()
141 pmd_idx = pmd_index(vaddr); in page_table_range_init_count()
[all …]
/Linux-v4.19/arch/arm/mach-ixp4xx/include/mach/
Dio.h100 const u8 *vaddr = p; in __indirect_writesb() local
103 writeb(*vaddr++, bus_addr); in __indirect_writesb()
125 const u16 *vaddr = p; in __indirect_writesw() local
128 writew(*vaddr++, bus_addr); in __indirect_writesw()
146 const u32 *vaddr = p; in __indirect_writesl() local
148 writel(*vaddr++, bus_addr); in __indirect_writesl()
170 u8 *vaddr = p; in __indirect_readsb() local
173 *vaddr++ = readb(bus_addr); in __indirect_readsb()
195 u16 *vaddr = p; in __indirect_readsw() local
198 *vaddr++ = readw(bus_addr); in __indirect_readsw()
[all …]
/Linux-v4.19/drivers/media/common/videobuf2/
Dvideobuf2-vmalloc.c26 void *vaddr; member
48 buf->vaddr = vmalloc_user(buf->size); in vb2_vmalloc_alloc()
54 if (!buf->vaddr) { in vb2_vmalloc_alloc()
69 vfree(buf->vaddr); in vb2_vmalloc_put()
74 static void *vb2_vmalloc_get_userptr(struct device *dev, unsigned long vaddr, in vb2_vmalloc_get_userptr() argument
88 offset = vaddr & ~PAGE_MASK; in vb2_vmalloc_get_userptr()
90 vec = vb2_create_framevec(vaddr, size, dma_dir == DMA_FROM_DEVICE || in vb2_vmalloc_get_userptr()
108 buf->vaddr = (__force void *) in vb2_vmalloc_get_userptr()
111 buf->vaddr = vm_map_ram(frame_vector_pages(vec), n_pages, -1, in vb2_vmalloc_get_userptr()
115 if (!buf->vaddr) in vb2_vmalloc_get_userptr()
[all …]
/Linux-v4.19/arch/mips/mm/
Dhighmem.c49 unsigned long vaddr; in kmap_atomic() local
59 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic()
64 local_flush_tlb_one((unsigned long)vaddr); in kmap_atomic()
66 return (void*) vaddr; in kmap_atomic()
72 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in __kunmap_atomic() local
75 if (vaddr < FIXADDR_START) { // FIXME in __kunmap_atomic()
86 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); in __kunmap_atomic()
92 pte_clear(&init_mm, vaddr, kmap_pte-idx); in __kunmap_atomic()
93 local_flush_tlb_one(vaddr); in __kunmap_atomic()
108 unsigned long vaddr; in kmap_atomic_pfn() local
[all …]
Dpgtable-32.c35 unsigned long vaddr; in pagetable_init() local
54 vaddr = __fix_to_virt(__end_of_fixed_addresses - 1); in pagetable_init()
55 fixrange_init(vaddr & PMD_MASK, vaddr + FIXADDR_SIZE, pgd_base); in pagetable_init()
61 vaddr = PKMAP_BASE; in pagetable_init()
62 fixrange_init(vaddr & PMD_MASK, vaddr + PAGE_SIZE*LAST_PKMAP, pgd_base); in pagetable_init()
64 pgd = swapper_pg_dir + __pgd_offset(vaddr); in pagetable_init()
65 pud = pud_offset(pgd, vaddr); in pagetable_init()
66 pmd = pmd_offset(pud, vaddr); in pagetable_init()
67 pte = pte_offset_kernel(pmd, vaddr); in pagetable_init()
/Linux-v4.19/arch/m68k/sun3x/
Ddvma.c80 unsigned long vaddr, int len) in dvma_map_cpu() argument
87 vaddr &= PAGE_MASK; in dvma_map_cpu()
89 end = PAGE_ALIGN(vaddr + len); in dvma_map_cpu()
91 pr_debug("dvma: mapping kern %08lx to virt %08lx\n", kaddr, vaddr); in dvma_map_cpu()
92 pgd = pgd_offset_k(vaddr); in dvma_map_cpu()
98 if((pmd = pmd_alloc(&init_mm, pgd, vaddr)) == NULL) { in dvma_map_cpu()
103 if((end & PGDIR_MASK) > (vaddr & PGDIR_MASK)) in dvma_map_cpu()
104 end2 = (vaddr + (PGDIR_SIZE-1)) & PGDIR_MASK; in dvma_map_cpu()
112 if((pte = pte_alloc_kernel(pmd, vaddr)) == NULL) { in dvma_map_cpu()
117 if((end2 & PMD_MASK) > (vaddr & PMD_MASK)) in dvma_map_cpu()
[all …]
/Linux-v4.19/mm/
Dhighmem.c151 struct page *kmap_to_page(void *vaddr) in kmap_to_page() argument
153 unsigned long addr = (unsigned long)vaddr; in kmap_to_page()
216 unsigned long vaddr; in map_new_virtual() local
258 vaddr = PKMAP_ADDR(last_pkmap_nr); in map_new_virtual()
259 set_pte_at(&init_mm, vaddr, in map_new_virtual()
263 set_page_address(page, (void *)vaddr); in map_new_virtual()
265 return vaddr; in map_new_virtual()
278 unsigned long vaddr; in kmap_high() local
285 vaddr = (unsigned long)page_address(page); in kmap_high()
286 if (!vaddr) in kmap_high()
[all …]
/Linux-v4.19/arch/sparc/include/asm/
Dviking.h213 static inline unsigned long viking_hwprobe(unsigned long vaddr) in viking_hwprobe() argument
217 vaddr &= PAGE_MASK; in viking_hwprobe()
221 : "r" (vaddr | 0x400), "i" (ASI_M_FLUSH_PROBE)); in viking_hwprobe()
228 : "r" (vaddr | 0x200), "i" (ASI_M_FLUSH_PROBE)); in viking_hwprobe()
230 vaddr &= ~SRMMU_PGDIR_MASK; in viking_hwprobe()
231 vaddr >>= PAGE_SHIFT; in viking_hwprobe()
232 return val | (vaddr << 8); in viking_hwprobe()
238 : "r" (vaddr | 0x100), "i" (ASI_M_FLUSH_PROBE)); in viking_hwprobe()
240 vaddr &= ~SRMMU_REAL_PMD_MASK; in viking_hwprobe()
241 vaddr >>= PAGE_SHIFT; in viking_hwprobe()
[all …]
/Linux-v4.19/arch/powerpc/mm/
Dhighmem.c35 unsigned long vaddr; in kmap_atomic_prot() local
45 vaddr = __fix_to_virt(FIX_KMAP_BEGIN + idx); in kmap_atomic_prot()
49 __set_pte_at(&init_mm, vaddr, kmap_pte-idx, mk_pte(page, prot), 1); in kmap_atomic_prot()
50 local_flush_tlb_page(NULL, vaddr); in kmap_atomic_prot()
52 return (void*) vaddr; in kmap_atomic_prot()
58 unsigned long vaddr = (unsigned long) kvaddr & PAGE_MASK; in __kunmap_atomic() local
61 if (vaddr < __fix_to_virt(FIX_KMAP_END)) { in __kunmap_atomic()
74 BUG_ON(vaddr != __fix_to_virt(FIX_KMAP_BEGIN + idx)); in __kunmap_atomic()
80 pte_clear(&init_mm, vaddr, kmap_pte-idx); in __kunmap_atomic()
81 local_flush_tlb_page(NULL, vaddr); in __kunmap_atomic()
/Linux-v4.19/arch/um/kernel/
Dmem.c92 unsigned long vaddr; in fixrange_init() local
94 vaddr = start; in fixrange_init()
95 i = pgd_index(vaddr); in fixrange_init()
96 j = pmd_index(vaddr); in fixrange_init()
99 for ( ; (i < PTRS_PER_PGD) && (vaddr < end); pgd++, i++) { in fixrange_init()
100 pud = pud_offset(pgd, vaddr); in fixrange_init()
103 pmd = pmd_offset(pud, vaddr); in fixrange_init()
104 for (; (j < PTRS_PER_PMD) && (vaddr < end); pmd++, j++) { in fixrange_init()
106 vaddr += PMD_SIZE; in fixrange_init()
121 unsigned long v, vaddr = FIXADDR_USER_START; in fixaddr_user_init() local
[all …]
/Linux-v4.19/arch/xtensa/mm/
Dcache.c61 unsigned long vaddr) in kmap_invalidate_coherent() argument
63 if (!DCACHE_ALIAS_EQ(page_to_phys(page), vaddr)) { in kmap_invalidate_coherent()
81 unsigned long vaddr, unsigned long *paddr) in coherent_kvaddr() argument
83 if (PageHighMem(page) || !DCACHE_ALIAS_EQ(page_to_phys(page), vaddr)) { in coherent_kvaddr()
85 return (void *)(base + (vaddr & DCACHE_ALIAS_MASK)); in coherent_kvaddr()
92 void clear_user_highpage(struct page *page, unsigned long vaddr) in clear_user_highpage() argument
95 void *kvaddr = coherent_kvaddr(page, TLBTEMP_BASE_1, vaddr, &paddr); in clear_user_highpage()
98 kmap_invalidate_coherent(page, vaddr); in clear_user_highpage()
106 unsigned long vaddr, struct vm_area_struct *vma) in copy_user_highpage() argument
109 void *dst_vaddr = coherent_kvaddr(dst, TLBTEMP_BASE_1, vaddr, in copy_user_highpage()
[all …]

12345678910>>...30