/Linux-v5.15/kernel/ |
D | range.c | 12 int add_range(struct range *range, int az, int nr_range, u64 start, u64 end) in add_range() argument 15 return nr_range; in add_range() 18 if (nr_range >= az) in add_range() 19 return nr_range; in add_range() 21 range[nr_range].start = start; in add_range() 22 range[nr_range].end = end; in add_range() 24 nr_range++; in add_range() 26 return nr_range; in add_range() 29 int add_range_with_merge(struct range *range, int az, int nr_range, in add_range_with_merge() argument 35 return nr_range; in add_range_with_merge() [all …]
|
/Linux-v5.15/arch/x86/mm/ |
D | init.c | 302 static int __meminit save_mr(struct map_range *mr, int nr_range, in save_mr() argument 307 if (nr_range >= NR_RANGE_MR) in save_mr() 309 mr[nr_range].start = start_pfn<<PAGE_SHIFT; in save_mr() 310 mr[nr_range].end = end_pfn<<PAGE_SHIFT; in save_mr() 311 mr[nr_range].page_size_mask = page_size_mask; in save_mr() 312 nr_range++; in save_mr() 315 return nr_range; in save_mr() 323 int nr_range) in adjust_range_page_size_mask() argument 327 for (i = 0; i < nr_range; i++) { in adjust_range_page_size_mask() 377 static int __meminit split_mem_range(struct map_range *mr, int nr_range, in split_mem_range() argument [all …]
|
/Linux-v5.15/arch/x86/kernel/cpu/mtrr/ |
D | cleanup.c | 54 static int __initdata nr_range; variable 65 x86_get_mtrr_mem_range(struct range *range, int nr_range, in x86_get_mtrr_mem_range() argument 79 nr_range = add_range_with_merge(range, RANGE_NUM, nr_range, in x86_get_mtrr_mem_range() 84 for (i = 0; i < nr_range; i++) in x86_get_mtrr_mem_range() 126 nr_range = clean_sort_range(range, RANGE_NUM); in x86_get_mtrr_mem_range() 129 for (i = 0; i < nr_range; i++) in x86_get_mtrr_mem_range() 134 return nr_range; in x86_get_mtrr_mem_range() 139 static unsigned long __init sum_ranges(struct range *range, int nr_range) in sum_ranges() argument 144 for (i = 0; i < nr_range; i++) in sum_ranges() 473 x86_setup_var_mtrrs(struct range *range, int nr_range, in x86_setup_var_mtrrs() argument [all …]
|
/Linux-v5.15/mm/ |
D | memremap.c | 87 for (i = 0; i < pgmap->nr_range; i++) { in pgmap_pfn_valid() 171 for (i = 0; i < pgmap->nr_range; i++) in memunmap_pages() 176 for (i = 0; i < pgmap->nr_range; i++) in memunmap_pages() 323 const int nr_range = pgmap->nr_range; in memremap_pages() local 326 if (WARN_ONCE(!nr_range, "nr_range must be specified\n")) in memremap_pages() 389 pgmap->nr_range = 0; in memremap_pages() 391 for (i = 0; i < nr_range; i++) { in memremap_pages() 395 pgmap->nr_range++; in memremap_pages() 398 if (i < nr_range) { in memremap_pages() 400 pgmap->nr_range = nr_range; in memremap_pages()
|
/Linux-v5.15/include/linux/ |
D | range.h | 16 int add_range(struct range *range, int az, int nr_range, 20 int add_range_with_merge(struct range *range, int az, int nr_range, 27 void sort_range(struct range *range, int nr_range);
|
D | memremap.h | 119 int nr_range; member
|
/Linux-v5.15/drivers/dax/ |
D | kmem.c | 65 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_kmem_probe() 82 data = kzalloc(struct_size(data, res, dev_dax->nr_range), GFP_KERNEL); in dev_dax_kmem_probe() 96 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_kmem_probe() 174 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_kmem_remove() 196 if (success >= dev_dax->nr_range) { in dev_dax_kmem_remove()
|
D | bus.c | 144 for (i = 0; i < dev_dax->nr_range; i++) in dev_dax_size() 371 int i = dev_dax->nr_range - 1; in trim_dev_dax_range() 381 if (--dev_dax->nr_range == 0) { in trim_dev_dax_range() 389 while (dev_dax->nr_range) in free_dev_dax_ranges() 767 if (dev_WARN_ONCE(dev, dev_dax->nr_range, in alloc_dev_dax_range() 779 * (dev_dax->nr_range + 1), GFP_KERNEL); in alloc_dev_dax_range() 785 for (i = 0; i < dev_dax->nr_range; i++) in alloc_dev_dax_range() 788 ranges[dev_dax->nr_range++] = (struct dev_dax_range) { in alloc_dev_dax_range() 796 dev_dbg(dev, "alloc range[%d]: %pa:%pa\n", dev_dax->nr_range - 1, in alloc_dev_dax_range() 806 rc = devm_register_dax_mapping(dev_dax, dev_dax->nr_range - 1); in alloc_dev_dax_range() [all …]
|
D | device.c | 59 for (i = 0; i < dev_dax->nr_range; i++) { in dax_pgoff_to_phys() 402 if (dev_WARN_ONCE(dev, pgmap && dev_dax->nr_range > 1, in dev_dax_probe() 408 * (dev_dax->nr_range - 1), GFP_KERNEL); in dev_dax_probe() 411 pgmap->nr_range = dev_dax->nr_range; in dev_dax_probe() 414 for (i = 0; i < dev_dax->nr_range; i++) { in dev_dax_probe()
|
D | dax-private.h | 71 int nr_range; member
|
/Linux-v5.15/tools/testing/nvdimm/ |
D | dax-dev.c | 14 for (i = 0; i < dev_dax->nr_range; i++) { in dax_pgoff_to_phys()
|
/Linux-v5.15/drivers/xen/ |
D | unpopulated-alloc.c | 52 pgmap->nr_range = 1; in fill_list()
|
/Linux-v5.15/drivers/nvdimm/ |
D | pmem.c | 420 pmem->pgmap.nr_range = 1; in pmem_attach_disk()
|
D | pfn_devs.c | 696 pgmap->nr_range = 1; in __nvdimm_setup_pfn()
|
/Linux-v5.15/drivers/pci/ |
D | p2pdma.c | 205 pgmap->nr_range = 1; in pci_p2pdma_add_resource()
|
/Linux-v5.15/drivers/gpu/drm/nouveau/ |
D | nouveau_dmem.c | 254 chunk->pagemap.nr_range = 1; in nouveau_dmem_chunk_alloc()
|
/Linux-v5.15/arch/powerpc/kvm/ |
D | book3s_hv_uvmem.c | 1176 kvmppc_uvmem_pgmap.nr_range = 1; in kvmppc_uvmem_init()
|
/Linux-v5.15/drivers/gpu/drm/amd/amdkfd/ |
D | kfd_migrate.c | 888 pgmap->nr_range = 1; in svm_migrate_init()
|
/Linux-v5.15/lib/ |
D | test_hmm.c | 476 devmem->pagemap.nr_range = 1; in dmirror_allocate_chunk()
|
/Linux-v5.15/fs/fuse/ |
D | virtio_fs.c | 841 pgmap->nr_range = 1; in virtio_fs_setup_dax()
|
/Linux-v5.15/Documentation/vm/ |
D | hmm.rst | 295 pagemap.nr_range = 1;
|