/Linux-v5.4/mm/ |
D | memremap.c | 28 static int devmap_managed_enable_get(struct dev_pagemap *pgmap) in devmap_managed_enable_get() argument 30 if (!pgmap->ops || !pgmap->ops->page_free) { in devmap_managed_enable_get() 40 static int devmap_managed_enable_get(struct dev_pagemap *pgmap) in devmap_managed_enable_get() argument 56 static unsigned long pfn_first(struct dev_pagemap *pgmap) in pfn_first() argument 58 return PHYS_PFN(pgmap->res.start) + in pfn_first() 59 vmem_altmap_offset(pgmap_altmap(pgmap)); in pfn_first() 62 static unsigned long pfn_end(struct dev_pagemap *pgmap) in pfn_end() argument 64 const struct resource *res = &pgmap->res; in pfn_end() 79 static void dev_pagemap_kill(struct dev_pagemap *pgmap) in dev_pagemap_kill() argument 81 if (pgmap->ops && pgmap->ops->kill) in dev_pagemap_kill() [all …]
|
D | hmm.c | 218 struct dev_pagemap *pgmap; member 425 hmm_vma_walk->pgmap = get_dev_pagemap(pfn, in hmm_vma_handle_pmd() 426 hmm_vma_walk->pgmap); in hmm_vma_handle_pmd() 427 if (unlikely(!hmm_vma_walk->pgmap)) in hmm_vma_handle_pmd() 432 if (hmm_vma_walk->pgmap) { in hmm_vma_handle_pmd() 433 put_dev_pagemap(hmm_vma_walk->pgmap); in hmm_vma_handle_pmd() 434 hmm_vma_walk->pgmap = NULL; in hmm_vma_handle_pmd() 530 hmm_vma_walk->pgmap = get_dev_pagemap(pte_pfn(pte), in hmm_vma_handle_pte() 531 hmm_vma_walk->pgmap); in hmm_vma_handle_pte() 532 if (unlikely(!hmm_vma_walk->pgmap)) in hmm_vma_handle_pte() [all …]
|
D | gup.c | 28 struct dev_pagemap *pgmap; member 175 struct dev_pagemap **pgmap) in follow_page_pte() argument 219 *pgmap = get_dev_pagemap(pte_pfn(pte), *pgmap); in follow_page_pte() 220 if (*pgmap) in follow_page_pte() 359 page = follow_devmap_pmd(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask() 365 return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask() 385 return follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask() 415 follow_page_pte(vma, address, pmd, flags, &ctx->pgmap); in follow_pmd_mask() 452 page = follow_devmap_pud(vma, address, pud, flags, &ctx->pgmap); in follow_pud_mask() 555 if (ctx.pgmap) in follow_page() [all …]
|
D | memory-failure.c | 1157 struct dev_pagemap *pgmap) in memory_failure_dev_pagemap() argument 1184 if (pgmap->type == MEMORY_DEVICE_PRIVATE) { in memory_failure_dev_pagemap() 1226 put_dev_pagemap(pgmap); in memory_failure_dev_pagemap() 1253 struct dev_pagemap *pgmap; in memory_failure() local 1263 pgmap = get_dev_pagemap(pfn, NULL); in memory_failure() 1264 if (pgmap) in memory_failure() 1266 pgmap); in memory_failure()
|
D | huge_memory.c | 934 pmd_t *pmd, int flags, struct dev_pagemap **pgmap) in follow_devmap_pmd() argument 967 *pgmap = get_dev_pagemap(pfn, *pgmap); in follow_devmap_pmd() 968 if (!*pgmap) in follow_devmap_pmd() 1080 pud_t *pud, int flags, struct dev_pagemap **pgmap) in follow_devmap_pud() argument 1107 *pgmap = get_dev_pagemap(pfn, *pgmap); in follow_devmap_pud() 1108 if (!*pgmap) in follow_devmap_pud()
|
D | swap.c | 103 put_dev_pagemap(page->pgmap); in __put_page()
|
D | page_alloc.c | 5951 struct dev_pagemap *pgmap) in memmap_init_zone_device() argument 5955 struct vmem_altmap *altmap = pgmap_altmap(pgmap); in memmap_init_zone_device() 5960 if (WARN_ON_ONCE(!pgmap || zone_idx(zone) != ZONE_DEVICE)) in memmap_init_zone_device() 5992 page->pgmap = pgmap; in memmap_init_zone_device()
|
D | memory.c | 2756 ret = vmf->page->pgmap->ops->migrate_to_ram(vmf); in do_swap_page()
|
/Linux-v5.4/include/linux/ |
D | memremap.h | 78 void (*kill)(struct dev_pagemap *pgmap); 83 void (*cleanup)(struct dev_pagemap *pgmap); 118 static inline struct vmem_altmap *pgmap_altmap(struct dev_pagemap *pgmap) in pgmap_altmap() argument 120 if (pgmap->flags & PGMAP_ALTMAP_VALID) in pgmap_altmap() 121 return &pgmap->altmap; in pgmap_altmap() 126 void *memremap_pages(struct dev_pagemap *pgmap, int nid); 127 void memunmap_pages(struct dev_pagemap *pgmap); 128 void *devm_memremap_pages(struct device *dev, struct dev_pagemap *pgmap); 129 void devm_memunmap_pages(struct device *dev, struct dev_pagemap *pgmap); 131 struct dev_pagemap *pgmap); [all …]
|
D | huge_mm.h | 242 pmd_t *pmd, int flags, struct dev_pagemap **pgmap); 244 pud_t *pud, int flags, struct dev_pagemap **pgmap); 393 unsigned long addr, pmd_t *pmd, int flags, struct dev_pagemap **pgmap) in follow_devmap_pmd() argument 399 unsigned long addr, pud_t *pud, int flags, struct dev_pagemap **pgmap) in follow_devmap_pud() argument
|
D | mm_types.h | 160 struct dev_pagemap *pgmap; member
|
D | mm.h | 978 switch (page->pgmap->type) { in put_devmap_managed_page() 1001 page->pgmap->type == MEMORY_DEVICE_PRIVATE; in is_device_private_page() 1009 page->pgmap->type == MEMORY_DEVICE_PCI_P2PDMA; in is_pci_p2pdma_page()
|
/Linux-v5.4/tools/testing/nvdimm/test/ |
D | iomap.c | 101 struct dev_pagemap *pgmap = _pgmap; in nfit_test_kill() local 103 WARN_ON(!pgmap || !pgmap->ref); in nfit_test_kill() 105 if (pgmap->ops && pgmap->ops->kill) in nfit_test_kill() 106 pgmap->ops->kill(pgmap); in nfit_test_kill() 108 percpu_ref_kill(pgmap->ref); in nfit_test_kill() 110 if (pgmap->ops && pgmap->ops->cleanup) { in nfit_test_kill() 111 pgmap->ops->cleanup(pgmap); in nfit_test_kill() 113 wait_for_completion(&pgmap->done); in nfit_test_kill() 114 percpu_ref_exit(pgmap->ref); in nfit_test_kill() 120 struct dev_pagemap *pgmap = in dev_pagemap_percpu_release() local [all …]
|
/Linux-v5.4/drivers/pci/ |
D | p2pdma.c | 37 struct dev_pagemap pgmap; member 42 static struct pci_p2pdma_pagemap *to_p2p_pgmap(struct dev_pagemap *pgmap) in to_p2p_pgmap() argument 44 return container_of(pgmap, struct pci_p2pdma_pagemap, pgmap); in to_p2p_pgmap() 161 struct dev_pagemap *pgmap; in pci_p2pdma_add_resource() local 187 pgmap = &p2p_pgmap->pgmap; in pci_p2pdma_add_resource() 188 pgmap->res.start = pci_resource_start(pdev, bar) + offset; in pci_p2pdma_add_resource() 189 pgmap->res.end = pgmap->res.start + size - 1; in pci_p2pdma_add_resource() 190 pgmap->res.flags = pci_resource_flags(pdev, bar); in pci_p2pdma_add_resource() 191 pgmap->type = MEMORY_DEVICE_PCI_P2PDMA; in pci_p2pdma_add_resource() 197 addr = devm_memremap_pages(&pdev->dev, pgmap); in pci_p2pdma_add_resource() [all …]
|
/Linux-v5.4/drivers/nvdimm/ |
D | pmem.c | 310 static void pmem_pagemap_cleanup(struct dev_pagemap *pgmap) in pmem_pagemap_cleanup() argument 313 container_of(pgmap->ref, struct request_queue, q_usage_counter); in pmem_pagemap_cleanup() 318 static void pmem_release_queue(void *pgmap) in pmem_release_queue() argument 320 pmem_pagemap_cleanup(pgmap); in pmem_release_queue() 323 static void pmem_pagemap_kill(struct dev_pagemap *pgmap) in pmem_pagemap_kill() argument 326 container_of(pgmap->ref, struct request_queue, q_usage_counter); in pmem_pagemap_kill() 378 rc = nvdimm_setup_pfn(nd_pfn, &pmem->pgmap); in pmem_attach_disk() 406 pmem->pgmap.ref = &q->q_usage_counter; in pmem_attach_disk() 408 pmem->pgmap.type = MEMORY_DEVICE_FS_DAX; in pmem_attach_disk() 409 pmem->pgmap.ops = &fsdax_pagemap_ops; in pmem_attach_disk() [all …]
|
D | pfn_devs.c | 663 static int __nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap) in __nvdimm_setup_pfn() argument 665 struct resource *res = &pgmap->res; in __nvdimm_setup_pfn() 666 struct vmem_altmap *altmap = &pgmap->altmap; in __nvdimm_setup_pfn() 700 pgmap->flags |= PGMAP_ALTMAP_VALID; in __nvdimm_setup_pfn() 806 int nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap) in nvdimm_setup_pfn() argument 818 return __nvdimm_setup_pfn(nd_pfn, pgmap); in nvdimm_setup_pfn()
|
D | pmem.h | 26 struct dev_pagemap pgmap; member
|
D | nd.h | 378 int nvdimm_setup_pfn(struct nd_pfn *nd_pfn, struct dev_pagemap *pgmap); 383 struct dev_pagemap *pgmap) in nvdimm_setup_pfn() argument
|
/Linux-v5.4/drivers/dax/pmem/ |
D | core.c | 19 struct dev_pagemap pgmap = { }; in __dax_pmem_probe() local 34 rc = nvdimm_setup_pfn(nd_pfn, &pgmap); in __dax_pmem_probe() 53 memcpy(&res, &pgmap.res, sizeof(res)); in __dax_pmem_probe() 61 dev_dax = __devm_create_dev_dax(dax_region, id, &pgmap, subsys); in __dax_pmem_probe()
|
/Linux-v5.4/drivers/dax/ |
D | bus.h | 22 struct dev_pagemap *pgmap, enum dev_dax_subsys subsys); 25 int id, struct dev_pagemap *pgmap) in devm_create_dev_dax() argument 27 return __devm_create_dev_dax(dax_region, id, pgmap, DEV_DAX_BUS); in devm_create_dev_dax()
|
D | super.c | 126 struct dev_pagemap *pgmap, *end_pgmap; in __generic_fsdax_supported() local 128 pgmap = get_dev_pagemap(pfn_t_to_pfn(pfn), NULL); in __generic_fsdax_supported() 130 if (pgmap && pgmap == end_pgmap && pgmap->type == MEMORY_DEVICE_FS_DAX in __generic_fsdax_supported() 131 && pfn_t_to_page(pfn)->pgmap == pgmap in __generic_fsdax_supported() 132 && pfn_t_to_page(end_pfn)->pgmap == pgmap in __generic_fsdax_supported() 136 put_dev_pagemap(pgmap); in __generic_fsdax_supported()
|
D | bus.c | 388 struct dev_pagemap *pgmap, enum dev_dax_subsys subsys) in __devm_create_dev_dax() argument 404 memcpy(&dev_dax->pgmap, pgmap, sizeof(*pgmap)); in __devm_create_dev_dax()
|
D | dax-private.h | 53 struct dev_pagemap pgmap; member
|
D | device.c | 431 dev_dax->pgmap.type = MEMORY_DEVICE_DEVDAX; in dev_dax_probe() 432 addr = devm_memremap_pages(dev, &dev_dax->pgmap); in dev_dax_probe()
|
/Linux-v5.4/drivers/gpu/drm/nouveau/ |
D | nouveau_dmem.c | 84 return container_of(page->pgmap, struct nouveau_dmem, pagemap); in page_to_dmem()
|