Home
last modified time | relevance | path

Searched refs:ptes (Results 1 – 25 of 42) sorted by relevance

12

/Linux-v4.19/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
Dvmmnv44.c28 dma_addr_t *list, u32 ptei, u32 ptes) in nv44_vmm_pgt_fill() argument
38 while (ptes--) { in nv44_vmm_pgt_fill()
74 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv44_vmm_pgt_pte() argument
79 const u32 pten = min(ptes, 4 - (ptei & 3)); in nv44_vmm_pgt_pte()
84 ptes -= pten; in nv44_vmm_pgt_pte()
87 while (ptes >= 4) { in nv44_vmm_pgt_pte()
94 ptes -= 4; in nv44_vmm_pgt_pte()
97 if (ptes) { in nv44_vmm_pgt_pte()
98 for (i = 0; i < ptes; i++, addr += 0x1000) in nv44_vmm_pgt_pte()
100 nv44_vmm_pgt_fill(vmm, pt, tmp, ptei, ptes); in nv44_vmm_pgt_pte()
[all …]
Dvmmnv41.c28 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv41_vmm_pgt_pte() argument
31 while (ptes--) { in nv41_vmm_pgt_pte()
39 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_sgl() argument
41 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_sgl()
46 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv41_vmm_pgt_dma() argument
50 while (ptes--) { in nv41_vmm_pgt_dma()
56 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv41_vmm_pgt_pte); in nv41_vmm_pgt_dma()
62 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv41_vmm_pgt_unmap() argument
64 VMM_FO032(pt, vmm, ptei * 4, 0, ptes); in nv41_vmm_pgt_unmap()
Dvmmgp100.c32 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in gp100_vmm_pgt_pte() argument
36 map->type += ptes * map->ctag; in gp100_vmm_pgt_pte()
38 while (ptes--) { in gp100_vmm_pgt_pte()
46 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gp100_vmm_pgt_sgl() argument
48 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); in gp100_vmm_pgt_sgl()
53 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gp100_vmm_pgt_dma() argument
56 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gp100_vmm_pgt_dma()
58 while (ptes--) { in gp100_vmm_pgt_dma()
67 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gp100_vmm_pgt_pte); in gp100_vmm_pgt_dma()
72 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gp100_vmm_pgt_mem() argument
[all …]
Dvmmnv04.c29 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv04_vmm_pgt_pte() argument
32 while (ptes--) { in nv04_vmm_pgt_pte()
40 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_sgl() argument
42 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_sgl()
47 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv04_vmm_pgt_dma() argument
51 while (ptes--) in nv04_vmm_pgt_dma()
55 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, nv04_vmm_pgt_pte); in nv04_vmm_pgt_dma()
61 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in nv04_vmm_pgt_unmap() argument
63 VMM_FO032(pt, vmm, 8 + (ptei * 4), 0, ptes); in nv04_vmm_pgt_unmap()
Dvmmnv50.c33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in nv50_vmm_pgt_pte() argument
39 map->type += ptes * map->ctag; in nv50_vmm_pgt_pte()
41 while (ptes) { in nv50_vmm_pgt_pte()
44 if (ptes >= pten && IS_ALIGNED(ptei, pten)) in nv50_vmm_pgt_pte()
50 ptes -= pten; in nv50_vmm_pgt_pte()
59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, nv50_vmm_pgt_pte); in nv50_vmm_pgt_sgl()
66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in nv50_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in nv50_vmm_pgt_dma()
71 while (ptes--) { in nv50_vmm_pgt_dma()
[all …]
Dvmm.c198 const struct nvkm_vmm_desc *desc, u32 ptei, u32 ptes) in nvkm_vmm_unref_sptes() argument
209 for (lpti = ptei >> sptb; ptes; spti = 0, lpti++) { in nvkm_vmm_unref_sptes()
210 const u32 pten = min(sptn - spti, ptes); in nvkm_vmm_unref_sptes()
212 ptes -= pten; in nvkm_vmm_unref_sptes()
222 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes()
236 for (ptes = 1, ptei++; ptei < lpti; ptes++, ptei++) { in nvkm_vmm_unref_sptes()
243 TRA(it, "LPTE %05x: U -> S %d PTEs", pteb, ptes); in nvkm_vmm_unref_sptes()
244 pair->func->sparse(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_unref_sptes()
251 TRA(it, "LPTE %05x: U -> I %d PTEs", pteb, ptes); in nvkm_vmm_unref_sptes()
252 pair->func->invalid(vmm, pgt->pt[0], pteb, ptes); in nvkm_vmm_unref_sptes()
[all …]
Dvmmgf100.c33 u32 ptei, u32 ptes, struct nvkm_vmm_map *map, u64 addr) in gf100_vmm_pgt_pte() argument
39 while (ptes--) { in gf100_vmm_pgt_pte()
48 map->type += ptes * map->ctag; in gf100_vmm_pgt_pte()
50 while (ptes--) { in gf100_vmm_pgt_pte()
59 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_sgl() argument
61 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_sgl()
66 u32 ptei, u32 ptes, struct nvkm_vmm_map *map) in gf100_vmm_pgt_dma() argument
69 VMM_SPAM(vmm, "DMAA %08x %08x PTE(s)", ptei, ptes); in gf100_vmm_pgt_dma()
71 while (ptes--) { in gf100_vmm_pgt_dma()
80 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gf100_vmm_pgt_pte); in gf100_vmm_pgt_dma()
[all …]
Dvmmgk104.c26 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gk104_vmm_lpt_invalid() argument
29 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(1) /* PRIV. */, ptes); in gk104_vmm_lpt_invalid()
Dvmmgm200.c29 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gm200_vmm_pgt_sparse() argument
32 VMM_FO064(pt, vmm, ptei * 8, BIT_ULL(32) /* VOL. */, ptes); in gm200_vmm_pgt_sparse()
/Linux-v4.19/arch/x86/xen/
Dgrant-table.c51 pte_t **ptes; member
68 set_pte_at(&init_mm, addr, gnttab_shared_vm_area.ptes[i], in arch_gnttab_map_shared()
90 set_pte_at(&init_mm, addr, gnttab_status_vm_area.ptes[i], in arch_gnttab_map_status()
100 pte_t **ptes; in arch_gnttab_unmap() local
105 ptes = gnttab_status_vm_area.ptes; in arch_gnttab_unmap()
107 ptes = gnttab_shared_vm_area.ptes; in arch_gnttab_unmap()
112 set_pte_at(&init_mm, addr, ptes[i], __pte(0)); in arch_gnttab_unmap()
119 area->ptes = kmalloc_array(nr_frames, sizeof(*area->ptes), GFP_KERNEL); in arch_gnttab_valloc()
120 if (area->ptes == NULL) in arch_gnttab_valloc()
123 area->area = alloc_vm_area(PAGE_SIZE * nr_frames, area->ptes); in arch_gnttab_valloc()
[all …]
/Linux-v4.19/block/partitions/
Defi.c352 gpt_header **gpt, gpt_entry **ptes) in is_gpt_valid() argument
357 if (!ptes) in is_gpt_valid()
446 if (!(*ptes = alloc_read_gpt_entries(state, *gpt))) in is_gpt_valid()
450 crc = efi_crc32((const unsigned char *) (*ptes), pt_size); in is_gpt_valid()
461 kfree(*ptes); in is_gpt_valid()
462 *ptes = NULL; in is_gpt_valid()
598 gpt_entry **ptes) in find_valid_gpt() argument
607 if (!ptes) in find_valid_gpt()
647 *ptes = pptes; in find_valid_gpt()
656 *ptes = aptes; in find_valid_gpt()
[all …]
/Linux-v4.19/arch/alpha/kernel/
Dpci_iommu.c85 arena->ptes = __alloc_bootmem_node(NODE_DATA(nid), mem_size, align, 0); in iommu_arena_new_node()
86 if (!NODE_DATA(nid) || !arena->ptes) { in iommu_arena_new_node()
90 arena->ptes = __alloc_bootmem(mem_size, align, 0); in iommu_arena_new_node()
96 arena->ptes = __alloc_bootmem(mem_size, align, 0); in iommu_arena_new_node()
125 unsigned long *ptes; in iommu_arena_find_pages() local
140 ptes = arena->ptes; in iommu_arena_find_pages()
152 if (ptes[p+i]) in iommu_arena_find_pages()
184 unsigned long *ptes; in iommu_arena_alloc() local
190 ptes = arena->ptes; in iommu_arena_alloc()
203 ptes[p+i] = IOMMU_INVALID_PTE; in iommu_arena_alloc()
[all …]
Dcore_titan.c327 port->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); in titan_init_one_pachip_port()
335 port->tba[2].csr = virt_to_phys(hose->sg_pci->ptes); in titan_init_one_pachip_port()
462 unsigned long *ptes; in titan_ioremap() local
517 ptes = hose->sg_pci->ptes; in titan_ioremap()
521 pfn = ptes[baddr >> PAGE_SHIFT]; in titan_ioremap()
710 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; in titan_agp_translate()
Dcore_marvel.c291 csrs->POx_TBASE[0].csr = virt_to_phys(hose->sg_isa->ptes); in io7_init_hose()
309 csrs->POx_TBASE[2].csr = virt_to_phys(hose->sg_pci->ptes); in io7_init_hose()
687 unsigned long *ptes; in marvel_ioremap() local
742 ptes = hose->sg_pci->ptes; in marvel_ioremap()
746 pfn = ptes[baddr >> PAGE_SHIFT]; in marvel_ioremap()
1045 pte = aper->arena->ptes[baddr >> PAGE_SHIFT]; in marvel_agp_translate()
Dpci_impl.h139 unsigned long *ptes; member
Dcore_cia.c461 arena->ptes[4] = pte0; in verify_tb_operation()
485 arena->ptes[5] = pte0; in verify_tb_operation()
521 arena->ptes[4] = 0; in verify_tb_operation()
522 arena->ptes[5] = 0; in verify_tb_operation()
734 *(vip)CIA_IOC_PCI_T0_BASE = virt_to_phys(hose->sg_isa->ptes) >> 2; in do_init_arch()
Dcore_tsunami.c335 pchip->tba[0].csr = virt_to_phys(hose->sg_isa->ptes); in tsunami_init_one_pchip()
339 pchip->tba[1].csr = virt_to_phys(hose->sg_pci->ptes); in tsunami_init_one_pchip()
/Linux-v4.19/drivers/staging/gasket/
Dgasket_page_table.c294 static bool gasket_is_pte_range_free(struct gasket_page_table_entry *ptes, in gasket_is_pte_range_free() argument
300 if (ptes[i].status != PTE_FREE) in gasket_is_pte_range_free()
447 struct gasket_page_table_entry *ptes, in gasket_perform_mapping() argument
466 ptes[i].page = NULL; in gasket_perform_mapping()
467 ptes[i].offset = offset; in gasket_perform_mapping()
468 ptes[i].dma_addr = pg_tbl->coherent_pages[0].paddr + in gasket_perform_mapping()
483 ptes[i].page = page; in gasket_perform_mapping()
484 ptes[i].offset = offset; in gasket_perform_mapping()
487 ptes[i].dma_addr = in gasket_perform_mapping()
492 __func__, i, &ptes[i], in gasket_perform_mapping()
[all …]
/Linux-v4.19/arch/powerpc/include/asm/
Dplpar_wrappers.h171 unsigned long *ptes) in plpar_pte_read_4() argument
179 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); in plpar_pte_read_4()
189 unsigned long *ptes) in plpar_pte_read_4_raw() argument
197 memcpy(ptes, retbuf, 8*sizeof(unsigned long)); in plpar_pte_read_4_raw()
/Linux-v4.19/arch/powerpc/platforms/pseries/
Dlpar.c231 } ptes[4]; in manual_hpte_clear_all() local
240 lpar_rc = plpar_pte_read_4_raw(0, i, (void *)ptes); in manual_hpte_clear_all()
247 if ((ptes[j].pteh & HPTE_V_VRMA_MASK) == in manual_hpte_clear_all()
250 if (ptes[j].pteh & HPTE_V_VALID) in manual_hpte_clear_all()
252 &(ptes[j].pteh), &(ptes[j].ptel)); in manual_hpte_clear_all()
339 } ptes[4]; in __pSeries_lpar_hpte_find() local
343 lpar_rc = plpar_pte_read_4(0, hpte_group, (void *)ptes); in __pSeries_lpar_hpte_find()
351 if (HPTE_V_COMPARE(ptes[j].pteh, want_v) && in __pSeries_lpar_hpte_find()
352 (ptes[j].pteh & HPTE_V_VALID)) in __pSeries_lpar_hpte_find()
/Linux-v4.19/arch/powerpc/mm/
Ddump_hashpagetable.c247 struct hash_pte ptes[4]; in pseries_find() local
265 lpar_rc = plpar_pte_read_4(0, hpte_group, (void *)ptes); in pseries_find()
270 if (HPTE_V_COMPARE(ptes[j].v, want_v) && in pseries_find()
271 (ptes[j].v & HPTE_V_VALID)) { in pseries_find()
273 *v = ptes[j].v; in pseries_find()
274 *r = ptes[j].r; in pseries_find()
Dhugetlbpage.c253 void *ptes[0]; member
265 kmem_cache_free(hugepte_cache, batch->ptes[i]); in hugepd_free_rcu_callback()
288 (*batchp)->ptes[(*batchp)->index++] = hugepte; in hugepd_free()
/Linux-v4.19/arch/x86/kvm/
Dpaging_tmpl.h91 pt_element_t ptes[PT_MAX_FULL_LEVELS]; member
218 pte = orig_pte = walker->ptes[level - 1]; in FNAME()
259 walker->ptes[level - 1] = pte; in FNAME()
390 walker->ptes[walker->level - 1] = pte; in FNAME()
556 return r || curr_pte != gw->ptes[level - 1]; in FNAME()
/Linux-v4.19/mm/
Dswap_state.c723 ra_info->ptes = pte; in swap_ra_info()
725 tpte = ra_info->ptes; in swap_ra_info()
749 for (i = 0, pte = ra_info.ptes; i < ra_info.nr_pte; in swap_vma_readahead()
/Linux-v4.19/drivers/xen/xenbus/
Dxenbus_client.c678 pte_t *ptes[XENBUS_MAX_RING_GRANTS]; in xenbus_map_ring_valloc_pv() local
693 area = alloc_vm_area(XEN_PAGE_SIZE * nr_grefs, ptes); in xenbus_map_ring_valloc_pv()
700 phys_addrs[i] = arbitrary_virt_to_machine(ptes[i]).maddr; in xenbus_map_ring_valloc_pv()

12