Lines Matching refs:nd_mapping

297 	struct nd_mapping *nd_mapping = &nd_region->mapping[0];  in nd_namespace_blk_size()  local
298 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in nd_namespace_blk_size()
315 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in __nd_namespace_blk_validate() local
316 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in __nd_namespace_blk_validate()
447 struct nd_mapping *nd_mapping, struct nd_label_id *label_id, in scan_free() argument
451 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_free()
510 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in shrink_dpa_allocation() local
513 rc = scan_free(nd_region, nd_mapping, label_id, n); in shrink_dpa_allocation()
522 struct nd_region *nd_region, struct nd_mapping *nd_mapping, in init_dpa_allocation() argument
526 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in init_dpa_allocation()
533 first_dpa = nd_mapping->start + nd_mapping->size - n; in init_dpa_allocation()
535 first_dpa = nd_mapping->start; in init_dpa_allocation()
578 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in space_valid() local
581 .nd_mapping = nd_mapping, in space_valid()
582 .available = nd_mapping->size, in space_valid()
615 struct nd_mapping *nd_mapping, struct nd_label_id *label_id, in scan_allocate() argument
618 resource_size_t mapping_end = nd_mapping->start + nd_mapping->size - 1; in scan_allocate()
620 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_allocate()
629 valid.start = nd_mapping->start; in scan_allocate()
644 if (res->end < nd_mapping->start) in scan_allocate()
648 if (!first++ && res->start > nd_mapping->start) { in scan_allocate()
649 valid.start = nd_mapping->start; in scan_allocate()
762 return init_dpa_allocation(label_id, nd_region, nd_mapping, n); in scan_allocate()
767 struct nd_mapping *nd_mapping, struct nd_label_id *label_id) in merge_dpa() argument
769 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in merge_dpa()
814 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in __reserve_free_pmem() local
817 if (nd_mapping->nvdimm != nvdimm) in __reserve_free_pmem()
820 n = nd_pmem_available_dpa(nd_region, nd_mapping, &rem); in __reserve_free_pmem()
823 rem = scan_allocate(nd_region, nd_mapping, &label_id, n); in __reserve_free_pmem()
835 struct nd_mapping *nd_mapping) in release_free_pmem() argument
837 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in release_free_pmem()
846 struct nd_mapping *nd_mapping) in reserve_free_pmem() argument
848 struct nvdimm *nvdimm = nd_mapping->nvdimm; in reserve_free_pmem()
854 release_free_pmem(nvdimm_bus, nd_mapping); in reserve_free_pmem()
879 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in grow_dpa_allocation() local
891 rc = reserve_free_pmem(nvdimm_bus, nd_mapping); in grow_dpa_allocation()
895 rem = scan_allocate(nd_region, nd_mapping, in grow_dpa_allocation()
898 release_free_pmem(nvdimm_bus, nd_mapping); in grow_dpa_allocation()
912 rc = merge_dpa(nd_region, nd_mapping, label_id); in grow_dpa_allocation()
932 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in nd_namespace_pmem_set_resource() local
933 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in nd_namespace_pmem_set_resource()
947 offset = (res->start - nd_mapping->start) in nd_namespace_pmem_set_resource()
975 struct nd_mapping *nd_mapping; in __size_store() local
1018 nd_mapping = &nd_region->mapping[i]; in __size_store()
1019 ndd = to_ndd(nd_mapping); in __size_store()
1150 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in nvdimm_namespace_locked() local
1151 struct nvdimm *nvdimm = nd_mapping->nvdimm; in nvdimm_namespace_locked()
1224 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in namespace_update_uuid() local
1233 if (list_empty(&nd_mapping->labels)) in namespace_update_uuid()
1240 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in namespace_update_uuid() local
1241 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in namespace_update_uuid()
1250 mutex_lock(&nd_mapping->lock); in namespace_update_uuid()
1251 list_for_each_entry(label_ent, &nd_mapping->labels, list) { in namespace_update_uuid()
1262 mutex_unlock(&nd_mapping->lock); in namespace_update_uuid()
1423 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in dpa_extents_show() local
1424 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in dpa_extents_show()
1444 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in btt_claim_class() local
1445 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in btt_claim_class()
1798 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in has_uuid_at_pos() local
1800 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in has_uuid_at_pos()
1804 list_for_each_entry(label_ent, &nd_mapping->labels, list) { in has_uuid_at_pos()
1856 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in select_pmem_id() local
1857 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in select_pmem_id()
1862 lockdep_assert_held(&nd_mapping->lock); in select_pmem_id()
1863 list_for_each_entry(label_ent, &nd_mapping->labels, list) { in select_pmem_id()
1881 hw_start = nd_mapping->start; in select_pmem_id()
1882 hw_end = hw_start + nd_mapping->size; in select_pmem_id()
1895 list_move(&label_ent->list, &nd_mapping->labels); in select_pmem_id()
1914 struct nd_mapping *nd_mapping; in create_namespace_pmem() local
1987 nd_mapping = &nd_region->mapping[i]; in create_namespace_pmem()
1988 label_ent = list_first_entry_or_null(&nd_mapping->labels, in create_namespace_pmem()
2006 ndd = to_ndd(nd_mapping); in create_namespace_pmem()
2181 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in add_namespace_resource() local
2182 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in add_namespace_resource()
2219 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in create_namespace_blk() local
2221 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in create_namespace_blk()
2306 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in scan_labels() local
2307 resource_size_t map_end = nd_mapping->start + nd_mapping->size - 1; in scan_labels()
2310 list_for_each_entry_safe(label_ent, e, &nd_mapping->labels, list) { in scan_labels()
2325 if (__le64_to_cpu(nd_label->dpa) < nd_mapping->start || in scan_labels()
2344 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_labels()
2373 nd_mapping_free_labels(nd_mapping); in scan_labels()
2405 nd_mapping = &nd_region->mapping[i]; in scan_labels()
2406 if (list_empty(&nd_mapping->labels)) { in scan_labels()
2412 list_for_each_safe(l, e, &nd_mapping->labels) { in scan_labels()
2417 nd_mapping_free_labels(nd_mapping); in scan_labels()
2418 list_splice_init(&list, &nd_mapping->labels); in scan_labels()
2441 struct nd_mapping *nd_mapping; in create_namespaces() local
2450 nd_mapping = &nd_region->mapping[i]; in create_namespaces()
2451 mutex_lock_nested(&nd_mapping->lock, i); in create_namespaces()
2459 nd_mapping = &nd_region->mapping[reverse]; in create_namespaces()
2460 mutex_unlock(&nd_mapping->lock); in create_namespaces()
2472 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in deactivate_labels() local
2473 struct nvdimm_drvdata *ndd = nd_mapping->ndd; in deactivate_labels()
2474 struct nvdimm *nvdimm = nd_mapping->nvdimm; in deactivate_labels()
2476 mutex_lock(&nd_mapping->lock); in deactivate_labels()
2477 nd_mapping_free_labels(nd_mapping); in deactivate_labels()
2478 mutex_unlock(&nd_mapping->lock); in deactivate_labels()
2481 nd_mapping->ndd = NULL; in deactivate_labels()
2492 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in init_active_labels() local
2493 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in init_active_labels()
2494 struct nvdimm *nvdimm = nd_mapping->nvdimm; in init_active_labels()
2511 dev_name(&nd_mapping->nvdimm->dev), in init_active_labels()
2516 nd_mapping->ndd = ndd; in init_active_labels()
2539 mutex_lock(&nd_mapping->lock); in init_active_labels()
2540 list_add_tail(&label_ent->list, &nd_mapping->labels); in init_active_labels()
2541 mutex_unlock(&nd_mapping->lock); in init_active_labels()