Lines Matching refs:nd_mapping

294 		struct nd_mapping *nd_mapping, struct nd_label_id *label_id,  in scan_free()  argument
297 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_free()
346 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in shrink_dpa_allocation() local
349 rc = scan_free(nd_region, nd_mapping, label_id, n); in shrink_dpa_allocation()
358 struct nd_region *nd_region, struct nd_mapping *nd_mapping, in init_dpa_allocation() argument
361 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in init_dpa_allocation()
366 res = nvdimm_allocate_dpa(ndd, label_id, nd_mapping->start, n); in init_dpa_allocation()
432 struct nd_mapping *nd_mapping, struct nd_label_id *label_id, in scan_allocate() argument
435 resource_size_t mapping_end = nd_mapping->start + nd_mapping->size - 1; in scan_allocate()
436 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_allocate()
445 valid.start = nd_mapping->start; in scan_allocate()
460 if (res->end < nd_mapping->start) in scan_allocate()
464 if (!first++ && res->start > nd_mapping->start) { in scan_allocate()
465 valid.start = nd_mapping->start; in scan_allocate()
569 return init_dpa_allocation(label_id, nd_region, nd_mapping, n); in scan_allocate()
574 struct nd_mapping *nd_mapping, struct nd_label_id *label_id) in merge_dpa() argument
576 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in merge_dpa()
621 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in __reserve_free_pmem() local
624 if (nd_mapping->nvdimm != nvdimm) in __reserve_free_pmem()
627 n = nd_pmem_available_dpa(nd_region, nd_mapping); in __reserve_free_pmem()
630 rem = scan_allocate(nd_region, nd_mapping, &label_id, n); in __reserve_free_pmem()
642 struct nd_mapping *nd_mapping) in release_free_pmem() argument
644 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in release_free_pmem()
671 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in grow_dpa_allocation() local
675 rem = scan_allocate(nd_region, nd_mapping, label_id, rem); in grow_dpa_allocation()
683 rc = merge_dpa(nd_region, nd_mapping, label_id); in grow_dpa_allocation()
703 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in nd_namespace_pmem_set_resource() local
704 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in nd_namespace_pmem_set_resource()
718 offset = (res->start - nd_mapping->start) in nd_namespace_pmem_set_resource()
747 struct nd_mapping *nd_mapping; in __size_store() local
784 nd_mapping = &nd_region->mapping[i]; in __size_store()
785 ndd = to_ndd(nd_mapping); in __size_store()
905 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in nvdimm_namespace_locked() local
906 struct nvdimm *nvdimm = nd_mapping->nvdimm; in nvdimm_namespace_locked()
975 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in namespace_update_uuid() local
984 if (list_empty(&nd_mapping->labels)) in namespace_update_uuid()
991 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in namespace_update_uuid() local
992 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in namespace_update_uuid()
1001 mutex_lock(&nd_mapping->lock); in namespace_update_uuid()
1002 list_for_each_entry(label_ent, &nd_mapping->labels, list) { in namespace_update_uuid()
1015 mutex_unlock(&nd_mapping->lock); in namespace_update_uuid()
1152 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in dpa_extents_show() local
1153 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in dpa_extents_show()
1173 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in btt_claim_class() local
1174 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in btt_claim_class()
1560 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in has_uuid_at_pos() local
1562 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in has_uuid_at_pos()
1566 list_for_each_entry(label_ent, &nd_mapping->labels, list) { in has_uuid_at_pos()
1610 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in select_pmem_id() local
1611 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in select_pmem_id()
1616 lockdep_assert_held(&nd_mapping->lock); in select_pmem_id()
1617 list_for_each_entry(label_ent, &nd_mapping->labels, list) { in select_pmem_id()
1635 hw_start = nd_mapping->start; in select_pmem_id()
1636 hw_end = hw_start + nd_mapping->size; in select_pmem_id()
1650 list_move(&label_ent->list, &nd_mapping->labels); in select_pmem_id()
1662 struct nd_mapping *nd_mapping, in create_namespace_pmem() argument
1665 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in create_namespace_pmem()
1744 nd_mapping = &nd_region->mapping[i]; in create_namespace_pmem()
1745 label_ent = list_first_entry_or_null(&nd_mapping->labels, in create_namespace_pmem()
1754 ndd = to_ndd(nd_mapping); in create_namespace_pmem()
1888 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in add_namespace_resource() local
1889 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in add_namespace_resource()
1931 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in scan_labels() local
1932 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_labels()
1933 resource_size_t map_end = nd_mapping->start + nd_mapping->size - 1; in scan_labels()
1936 list_for_each_entry_safe(label_ent, e, &nd_mapping->labels, list) { in scan_labels()
1944 if (nsl_get_dpa(ndd, nd_label) < nd_mapping->start || in scan_labels()
1960 dev = create_namespace_pmem(nd_region, nd_mapping, nd_label); in scan_labels()
1984 nd_mapping_free_labels(nd_mapping); in scan_labels()
2005 nd_mapping = &nd_region->mapping[i]; in scan_labels()
2006 if (list_empty(&nd_mapping->labels)) { in scan_labels()
2012 list_for_each_safe(l, e, &nd_mapping->labels) { in scan_labels()
2017 nd_mapping_free_labels(nd_mapping); in scan_labels()
2018 list_splice_init(&list, &nd_mapping->labels); in scan_labels()
2038 struct nd_mapping *nd_mapping; in create_namespaces() local
2047 nd_mapping = &nd_region->mapping[i]; in create_namespaces()
2048 mutex_lock_nested(&nd_mapping->lock, i); in create_namespaces()
2056 nd_mapping = &nd_region->mapping[reverse]; in create_namespaces()
2057 mutex_unlock(&nd_mapping->lock); in create_namespaces()
2069 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in deactivate_labels() local
2070 struct nvdimm_drvdata *ndd = nd_mapping->ndd; in deactivate_labels()
2071 struct nvdimm *nvdimm = nd_mapping->nvdimm; in deactivate_labels()
2073 mutex_lock(&nd_mapping->lock); in deactivate_labels()
2074 nd_mapping_free_labels(nd_mapping); in deactivate_labels()
2075 mutex_unlock(&nd_mapping->lock); in deactivate_labels()
2078 nd_mapping->ndd = NULL; in deactivate_labels()
2089 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in init_active_labels() local
2090 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in init_active_labels()
2091 struct nvdimm *nvdimm = nd_mapping->nvdimm; in init_active_labels()
2108 dev_name(&nd_mapping->nvdimm->dev), in init_active_labels()
2114 nd_mapping->ndd = ndd; in init_active_labels()
2131 mutex_lock(&nd_mapping->lock); in init_active_labels()
2132 list_add_tail(&label_ent->list, &nd_mapping->labels); in init_active_labels()
2133 mutex_unlock(&nd_mapping->lock); in init_active_labels()