/Linux-v6.1/mm/ |
D | memremap.c | 45 static void devmap_managed_enable_put(struct dev_pagemap *pgmap) in devmap_managed_enable_put() argument 47 if (pgmap->type == MEMORY_DEVICE_FS_DAX) in devmap_managed_enable_put() 51 static void devmap_managed_enable_get(struct dev_pagemap *pgmap) in devmap_managed_enable_get() argument 53 if (pgmap->type == MEMORY_DEVICE_FS_DAX) in devmap_managed_enable_get() 57 static void devmap_managed_enable_get(struct dev_pagemap *pgmap) in devmap_managed_enable_get() argument 60 static void devmap_managed_enable_put(struct dev_pagemap *pgmap) in devmap_managed_enable_put() argument 72 static unsigned long pfn_first(struct dev_pagemap *pgmap, int range_id) in pfn_first() argument 74 struct range *range = &pgmap->ranges[range_id]; in pfn_first() 79 return pfn + vmem_altmap_offset(pgmap_altmap(pgmap)); in pfn_first() 82 bool pgmap_pfn_valid(struct dev_pagemap *pgmap, unsigned long pfn) in pgmap_pfn_valid() argument [all …]
|
D | sparse-vmemmap.c | 299 struct dev_pagemap *pgmap) in reuse_compound_section() argument 301 unsigned long nr_pages = pgmap_vmemmap_nr(pgmap); in reuse_compound_section() 303 PHYS_PFN(pgmap->ranges[pgmap->nr_range].start); in reuse_compound_section() 328 struct dev_pagemap *pgmap) in vmemmap_populate_compound_pages() argument 334 if (reuse_compound_section(start_pfn, pgmap)) { in vmemmap_populate_compound_pages() 347 size = min(end - start, pgmap_vmemmap_nr(pgmap) * sizeof(struct page)); in vmemmap_populate_compound_pages() 378 struct dev_pagemap *pgmap) in __populate_section_memmap() argument 389 pgmap && pgmap_vmemmap_nr(pgmap) > 1 && !altmap) in __populate_section_memmap() 390 r = vmemmap_populate_compound_pages(pfn, start, end, nid, pgmap); in __populate_section_memmap()
|
D | sparse.c | 431 struct dev_pagemap *pgmap) in __populate_section_memmap() argument 634 struct dev_pagemap *pgmap) in populate_section_memmap() argument 636 return __populate_section_memmap(pfn, nr_pages, nid, altmap, pgmap); in populate_section_memmap() 706 struct dev_pagemap *pgmap) in populate_section_memmap() argument 830 struct dev_pagemap *pgmap) in section_activate() argument 862 memmap = populate_section_memmap(pfn, nr_pages, nid, altmap, pgmap); in section_activate() 892 struct dev_pagemap *pgmap) in sparse_add_section() argument 903 memmap = section_activate(nid, start_pfn, nr_pages, altmap, pgmap); in sparse_add_section()
|
D | gup.c | 28 struct dev_pagemap *pgmap; member 528 struct dev_pagemap **pgmap) in follow_page_pte() argument 597 *pgmap = get_dev_pagemap(pte_pfn(pte), *pgmap); in follow_page_pte() 598 if (*pgmap) in follow_page_pte() 721 page = follow_devmap_pmd(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask() 727 return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask() 747 return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask() 765 follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask() 802 page = follow_devmap_pud(vma, address, pud, flags, &ctx->pgmap); in follow_pud_mask() 916 if (ctx.pgmap) in follow_page() [all …]
|
D | memory-failure.c | 1572 struct dev_pagemap *pgmap) in mf_generic_kill_procs() argument 1601 switch (pgmap->type) { in mf_generic_kill_procs() 1917 struct dev_pagemap *pgmap) in memory_failure_dev_pagemap() argument 1929 if (!pgmap_pfn_valid(pgmap, pfn)) in memory_failure_dev_pagemap() 1936 if (pgmap_has_memory_failure(pgmap)) { in memory_failure_dev_pagemap() 1937 rc = pgmap->ops->memory_failure(pgmap, pfn, 1, flags); in memory_failure_dev_pagemap() 1946 rc = mf_generic_kill_procs(pfn, flags, pgmap); in memory_failure_dev_pagemap() 1949 put_dev_pagemap(pgmap); in memory_failure_dev_pagemap() 1981 struct dev_pagemap *pgmap; in memory_failure() local 2002 pgmap = get_dev_pagemap(pfn, NULL); in memory_failure() [all …]
|
D | memory_hotplug.c | 262 struct dev_pagemap *pgmap; in pfn_to_online_page() local 291 pgmap = get_dev_pagemap(pfn, NULL); in pfn_to_online_page() 292 put_dev_pagemap(pgmap); in pfn_to_online_page() 295 if (pgmap) in pfn_to_online_page() 337 params->pgmap); in __add_pages()
|
D | huge_memory.c | 1033 pmd_t *pmd, int flags, struct dev_pagemap **pgmap) in follow_devmap_pmd() argument 1065 *pgmap = get_dev_pagemap(pfn, *pgmap); in follow_devmap_pmd() 1066 if (!*pgmap) in follow_devmap_pmd() 1191 pud_t *pud, int flags, struct dev_pagemap **pgmap) in follow_devmap_pud() argument 1225 *pgmap = get_dev_pagemap(pfn, *pgmap); in follow_devmap_pud() 1226 if (!*pgmap) in follow_devmap_pud()
|
/Linux-v6.1/include/linux/ |
D | memremap.h | 100 int (*memory_failure)(struct dev_pagemap *pgmap, unsigned long pfn, 142 static inline bool pgmap_has_memory_failure(struct dev_pagemap *pgmap) in pgmap_has_memory_failure() argument 144 return pgmap->ops && pgmap->ops->memory_failure; in pgmap_has_memory_failure() 147 static inline struct vmem_altmap *pgmap_altmap(struct dev_pagemap *pgmap) in pgmap_altmap() argument 149 if (pgmap->flags & PGMAP_ALTMAP_VALID) in pgmap_altmap() 150 return &pgmap->altmap; in pgmap_altmap() 154 static inline unsigned long pgmap_vmemmap_nr(struct dev_pagemap *pgmap) in pgmap_vmemmap_nr() argument 156 return 1 << pgmap->vmemmap_shift; in pgmap_vmemmap_nr() 163 page->pgmap->type == MEMORY_DEVICE_PRIVATE; in is_device_private_page() 175 page->pgmap->type == MEMORY_DEVICE_PCI_P2PDMA; in is_pci_p2pdma_page() [all …]
|
D | huge_mm.h | 262 pmd_t *pmd, int flags, struct dev_pagemap **pgmap); 264 pud_t *pud, int flags, struct dev_pagemap **pgmap); 424 unsigned long addr, pmd_t *pmd, int flags, struct dev_pagemap **pgmap) in follow_devmap_pmd() argument 430 unsigned long addr, pud_t *pud, int flags, struct dev_pagemap **pgmap) in follow_devmap_pud() argument
|
D | memory_hotplug.h | 117 struct dev_pagemap *pgmap; member 349 struct dev_pagemap *pgmap);
|
/Linux-v6.1/drivers/pci/ |
D | p2pdma.c | 31 struct dev_pagemap pgmap; member 36 static struct pci_p2pdma_pagemap *to_p2p_pgmap(struct dev_pagemap *pgmap) in to_p2p_pgmap() argument 38 return container_of(pgmap, struct pci_p2pdma_pagemap, pgmap); in to_p2p_pgmap() 169 struct dev_pagemap *pgmap; in pci_p2pdma_add_resource() local 196 pgmap = &p2p_pgmap->pgmap; in pci_p2pdma_add_resource() 197 pgmap->range.start = pci_resource_start(pdev, bar) + offset; in pci_p2pdma_add_resource() 198 pgmap->range.end = pgmap->range.start + size - 1; in pci_p2pdma_add_resource() 199 pgmap->nr_range = 1; in pci_p2pdma_add_resource() 200 pgmap->type = MEMORY_DEVICE_PCI_P2PDMA; in pci_p2pdma_add_resource() 206 addr = devm_memremap_pages(&pdev->dev, pgmap); in pci_p2pdma_add_resource() [all …]
|
/Linux-v6.1/drivers/xen/ |
D | unpopulated-alloc.c | 36 struct dev_pagemap *pgmap; in fill_list() local 84 pgmap = kzalloc(sizeof(*pgmap), GFP_KERNEL); in fill_list() 85 if (!pgmap) { in fill_list() 90 pgmap->type = MEMORY_DEVICE_GENERIC; in fill_list() 91 pgmap->range = (struct range) { in fill_list() 95 pgmap->nr_range = 1; in fill_list() 96 pgmap->owner = res; in fill_list() 121 vaddr = memremap_pages(pgmap, NUMA_NO_NODE); in fill_list() 139 kfree(pgmap); in fill_list()
|
/Linux-v6.1/drivers/dax/ |
D | device.c | 85 if (dev_dax->pgmap->vmemmap_shift) in dax_set_mapping() 403 struct dev_pagemap *pgmap; in dev_dax_probe() local 416 pgmap = dev_dax->pgmap; in dev_dax_probe() 418 if (dev_dax->pgmap) { in dev_dax_probe() 424 pgmap = devm_kzalloc(dev, in dev_dax_probe() 425 struct_size(pgmap, ranges, dev_dax->nr_range - 1), in dev_dax_probe() 427 if (!pgmap) in dev_dax_probe() 430 pgmap->nr_range = dev_dax->nr_range; in dev_dax_probe() 431 dev_dax->pgmap = pgmap; in dev_dax_probe() 435 pgmap->ranges[i] = *range; in dev_dax_probe() [all …]
|
D | pmem.c | 20 struct dev_pagemap pgmap = { }; in __dax_pmem_probe() local 34 rc = nvdimm_setup_pfn(nd_pfn, &pgmap); in __dax_pmem_probe() 54 range = pgmap.range; in __dax_pmem_probe() 65 .pgmap = &pgmap, in __dax_pmem_probe()
|
D | bus.h | 21 struct dev_pagemap *pgmap; member
|
D | dax-private.h | 70 struct dev_pagemap *pgmap; member
|
D | bus.c | 395 dev_dax->pgmap = NULL; in kill_dev_dax() 1290 kfree(dev_dax->pgmap); in dev_dax_release() 1343 if (data->pgmap) { in devm_create_dev_dax() 1347 dev_dax->pgmap = kmemdup(data->pgmap, in devm_create_dev_dax() 1349 if (!dev_dax->pgmap) { in devm_create_dev_dax() 1404 kfree(dev_dax->pgmap); in devm_create_dev_dax()
|
/Linux-v6.1/tools/testing/nvdimm/test/ |
D | iomap.c | 99 struct dev_pagemap *pgmap = _pgmap; in nfit_test_kill() local 101 WARN_ON(!pgmap); in nfit_test_kill() 103 percpu_ref_kill(&pgmap->ref); in nfit_test_kill() 105 wait_for_completion(&pgmap->done); in nfit_test_kill() 106 percpu_ref_exit(&pgmap->ref); in nfit_test_kill() 111 struct dev_pagemap *pgmap = container_of(ref, struct dev_pagemap, ref); in dev_pagemap_percpu_release() local 113 complete(&pgmap->done); in dev_pagemap_percpu_release() 116 void *__wrap_devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap) in __wrap_devm_memremap_pages() argument 119 resource_size_t offset = pgmap->range.start; in __wrap_devm_memremap_pages() 123 return devm_memremap_pages(dev, pgmap); in __wrap_devm_memremap_pages() [all …]
|
/Linux-v6.1/drivers/nvdimm/ |
D | pmem.c | 456 static int pmem_pagemap_memory_failure(struct dev_pagemap *pgmap, in pmem_pagemap_memory_failure() argument 460 container_of(pgmap, struct pmem_device, pgmap); in pmem_pagemap_memory_failure() 499 rc = nvdimm_setup_pfn(nd_pfn, &pmem->pgmap); in pmem_attach_disk() 528 pmem->pgmap.owner = pmem; in pmem_attach_disk() 531 pmem->pgmap.type = MEMORY_DEVICE_FS_DAX; in pmem_attach_disk() 532 pmem->pgmap.ops = &fsdax_pagemap_ops; in pmem_attach_disk() 533 addr = devm_memremap_pages(dev, &pmem->pgmap); in pmem_attach_disk() 537 range_len(&pmem->pgmap.range); in pmem_attach_disk() 539 bb_range = pmem->pgmap.range; in pmem_attach_disk() 542 pmem->pgmap.range.start = res->start; in pmem_attach_disk() [all …]
|
D | pfn_devs.c | 675 static int __nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap) in __nvdimm_setup_pfn() argument 677 struct range *range = &pgmap->range; in __nvdimm_setup_pfn() 678 struct vmem_altmap *altmap = &pgmap->altmap; in __nvdimm_setup_pfn() 698 pgmap->nr_range = 1; in __nvdimm_setup_pfn() 713 pgmap->flags |= PGMAP_ALTMAP_VALID; in __nvdimm_setup_pfn() 837 int nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap) in nvdimm_setup_pfn() argument 849 return __nvdimm_setup_pfn(nd_pfn, pgmap); in nvdimm_setup_pfn()
|
D | pmem.h | 29 struct dev_pagemap pgmap; member
|
D | nd.h | 656 int nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap); 659 struct dev_pagemap *pgmap) in nvdimm_setup_pfn() argument
|
/Linux-v6.1/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_migrate.c | 215 return (addr + adev->kfd.dev->pgmap.range.start) >> PAGE_SHIFT; in svm_migrate_addr_to_pfn() 245 return (addr - adev->kfd.dev->pgmap.range.start); in svm_migrate_addr() 994 struct dev_pagemap *pgmap; in svm_migrate_init() local 1003 pgmap = &kfddev->pgmap; in svm_migrate_init() 1004 memset(pgmap, 0, sizeof(*pgmap)); in svm_migrate_init() 1011 pgmap->range.start = adev->gmc.aper_base; in svm_migrate_init() 1012 pgmap->range.end = adev->gmc.aper_base + adev->gmc.aper_size - 1; in svm_migrate_init() 1013 pgmap->type = MEMORY_DEVICE_COHERENT; in svm_migrate_init() 1018 pgmap->range.start = res->start; in svm_migrate_init() 1019 pgmap->range.end = res->end; in svm_migrate_init() [all …]
|
D | kfd_svm.h | 201 #define KFD_IS_SVM_API_SUPPORTED(dev) ((dev)->pgmap.type != 0)
|
/Linux-v6.1/fs/fuse/ |
D | virtio_fs.c | 800 struct dev_pagemap *pgmap; in virtio_fs_setup_dax() local 824 pgmap = devm_kzalloc(&vdev->dev, sizeof(*pgmap), GFP_KERNEL); in virtio_fs_setup_dax() 825 if (!pgmap) in virtio_fs_setup_dax() 828 pgmap->type = MEMORY_DEVICE_FS_DAX; in virtio_fs_setup_dax() 835 pgmap->range = (struct range) { in virtio_fs_setup_dax() 839 pgmap->nr_range = 1; in virtio_fs_setup_dax() 841 fs->window_kaddr = devm_memremap_pages(&vdev->dev, pgmap); in virtio_fs_setup_dax()
|