Lines Matching refs:nd_mapping
295 struct nd_mapping *nd_mapping, struct nd_label_id *label_id, in scan_free() argument
298 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_free()
347 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in shrink_dpa_allocation() local
350 rc = scan_free(nd_region, nd_mapping, label_id, n); in shrink_dpa_allocation()
359 struct nd_region *nd_region, struct nd_mapping *nd_mapping, in init_dpa_allocation() argument
362 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in init_dpa_allocation()
367 res = nvdimm_allocate_dpa(ndd, label_id, nd_mapping->start, n); in init_dpa_allocation()
433 struct nd_mapping *nd_mapping, struct nd_label_id *label_id, in scan_allocate() argument
436 resource_size_t mapping_end = nd_mapping->start + nd_mapping->size - 1; in scan_allocate()
437 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_allocate()
446 valid.start = nd_mapping->start; in scan_allocate()
461 if (res->end < nd_mapping->start) in scan_allocate()
465 if (!first++ && res->start > nd_mapping->start) { in scan_allocate()
466 valid.start = nd_mapping->start; in scan_allocate()
570 return init_dpa_allocation(label_id, nd_region, nd_mapping, n); in scan_allocate()
575 struct nd_mapping *nd_mapping, struct nd_label_id *label_id) in merge_dpa() argument
577 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in merge_dpa()
622 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in __reserve_free_pmem() local
625 if (nd_mapping->nvdimm != nvdimm) in __reserve_free_pmem()
628 n = nd_pmem_available_dpa(nd_region, nd_mapping); in __reserve_free_pmem()
631 rem = scan_allocate(nd_region, nd_mapping, &label_id, n); in __reserve_free_pmem()
643 struct nd_mapping *nd_mapping) in release_free_pmem() argument
645 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in release_free_pmem()
672 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in grow_dpa_allocation() local
676 rem = scan_allocate(nd_region, nd_mapping, label_id, rem); in grow_dpa_allocation()
684 rc = merge_dpa(nd_region, nd_mapping, label_id); in grow_dpa_allocation()
704 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in nd_namespace_pmem_set_resource() local
705 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in nd_namespace_pmem_set_resource()
719 offset = (res->start - nd_mapping->start) in nd_namespace_pmem_set_resource()
748 struct nd_mapping *nd_mapping; in __size_store() local
785 nd_mapping = &nd_region->mapping[i]; in __size_store()
786 ndd = to_ndd(nd_mapping); in __size_store()
906 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in nvdimm_namespace_locked() local
907 struct nvdimm *nvdimm = nd_mapping->nvdimm; in nvdimm_namespace_locked()
976 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in namespace_update_uuid() local
985 if (list_empty(&nd_mapping->labels)) in namespace_update_uuid()
992 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in namespace_update_uuid() local
993 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in namespace_update_uuid()
1002 mutex_lock(&nd_mapping->lock); in namespace_update_uuid()
1003 list_for_each_entry(label_ent, &nd_mapping->labels, list) { in namespace_update_uuid()
1016 mutex_unlock(&nd_mapping->lock); in namespace_update_uuid()
1153 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in dpa_extents_show() local
1154 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in dpa_extents_show()
1174 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in btt_claim_class() local
1175 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in btt_claim_class()
1561 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in has_uuid_at_pos() local
1563 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in has_uuid_at_pos()
1567 list_for_each_entry(label_ent, &nd_mapping->labels, list) { in has_uuid_at_pos()
1611 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in select_pmem_id() local
1612 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in select_pmem_id()
1617 lockdep_assert_held(&nd_mapping->lock); in select_pmem_id()
1618 list_for_each_entry(label_ent, &nd_mapping->labels, list) { in select_pmem_id()
1636 hw_start = nd_mapping->start; in select_pmem_id()
1637 hw_end = hw_start + nd_mapping->size; in select_pmem_id()
1651 list_move(&label_ent->list, &nd_mapping->labels); in select_pmem_id()
1663 struct nd_mapping *nd_mapping, in create_namespace_pmem() argument
1666 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in create_namespace_pmem()
1745 nd_mapping = &nd_region->mapping[i]; in create_namespace_pmem()
1746 label_ent = list_first_entry_or_null(&nd_mapping->labels, in create_namespace_pmem()
1755 ndd = to_ndd(nd_mapping); in create_namespace_pmem()
1889 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in add_namespace_resource() local
1890 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in add_namespace_resource()
1932 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in scan_labels() local
1933 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_labels()
1934 resource_size_t map_end = nd_mapping->start + nd_mapping->size - 1; in scan_labels()
1937 list_for_each_entry_safe(label_ent, e, &nd_mapping->labels, list) { in scan_labels()
1945 if (nsl_get_dpa(ndd, nd_label) < nd_mapping->start || in scan_labels()
1961 dev = create_namespace_pmem(nd_region, nd_mapping, nd_label); in scan_labels()
1985 nd_mapping_free_labels(nd_mapping); in scan_labels()
2006 nd_mapping = &nd_region->mapping[i]; in scan_labels()
2007 if (list_empty(&nd_mapping->labels)) { in scan_labels()
2013 list_for_each_safe(l, e, &nd_mapping->labels) { in scan_labels()
2018 nd_mapping_free_labels(nd_mapping); in scan_labels()
2019 list_splice_init(&list, &nd_mapping->labels); in scan_labels()
2039 struct nd_mapping *nd_mapping; in create_namespaces() local
2048 nd_mapping = &nd_region->mapping[i]; in create_namespaces()
2049 mutex_lock_nested(&nd_mapping->lock, i); in create_namespaces()
2057 nd_mapping = &nd_region->mapping[reverse]; in create_namespaces()
2058 mutex_unlock(&nd_mapping->lock); in create_namespaces()
2070 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in deactivate_labels() local
2071 struct nvdimm_drvdata *ndd = nd_mapping->ndd; in deactivate_labels()
2072 struct nvdimm *nvdimm = nd_mapping->nvdimm; in deactivate_labels()
2074 mutex_lock(&nd_mapping->lock); in deactivate_labels()
2075 nd_mapping_free_labels(nd_mapping); in deactivate_labels()
2076 mutex_unlock(&nd_mapping->lock); in deactivate_labels()
2079 nd_mapping->ndd = NULL; in deactivate_labels()
2090 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in init_active_labels() local
2091 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in init_active_labels()
2092 struct nvdimm *nvdimm = nd_mapping->nvdimm; in init_active_labels()
2109 dev_name(&nd_mapping->nvdimm->dev), in init_active_labels()
2115 nd_mapping->ndd = ndd; in init_active_labels()
2132 mutex_lock(&nd_mapping->lock); in init_active_labels()
2133 list_add_tail(&label_ent->list, &nd_mapping->labels); in init_active_labels()
2134 mutex_unlock(&nd_mapping->lock); in init_active_labels()