• Home
  • Raw
  • Download

Lines Matching refs:ndd

273 	struct nvdimm_drvdata *ndd = to_ndd(nd_mapping);  in nd_namespace_blk_size()  local
281 for_each_dpa_resource(ndd, res) in nd_namespace_blk_size()
291 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in __nd_namespace_blk_validate() local
296 if (!nsblk->uuid || !nsblk->lbasize || !ndd) in __nd_namespace_blk_validate()
301 for_each_dpa_resource(ndd, res) { in __nd_namespace_blk_validate()
320 for_each_dpa_resource(ndd, res) in __nd_namespace_blk_validate()
426 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_free() local
434 for_each_dpa_resource(ndd, res) in scan_free()
443 nd_dbg_dpa(nd_region, ndd, res, "delete %d\n", rc); in scan_free()
444 nvdimm_free_dpa(ndd, res); in scan_free()
461 nd_dbg_dpa(nd_region, ndd, res, "shrink %d\n", rc); in scan_free()
501 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in init_dpa_allocation() local
513 res = nvdimm_allocate_dpa(ndd, label_id, first_dpa, n); in init_dpa_allocation()
517 nd_dbg_dpa(nd_region, ndd, res, "init %d\n", rc); in init_dpa_allocation()
538 static void space_valid(struct nd_region *nd_region, struct nvdimm_drvdata *ndd, in space_valid() argument
600 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_allocate() local
605 for_each_dpa_resource(ndd, res) in scan_allocate()
614 for_each_dpa_resource(ndd, res) { in scan_allocate()
631 space_valid(nd_region, ndd, label_id, NULL, next, exist, in scan_allocate()
642 space_valid(nd_region, ndd, label_id, res, next, exist, in scan_allocate()
653 space_valid(nd_region, ndd, label_id, res, next, exist, in scan_allocate()
701 new_res = nvdimm_allocate_dpa(ndd, label_id, in scan_allocate()
716 nd_dbg_dpa(nd_region, ndd, new_res, "%s(%d) %d\n", in scan_allocate()
741 if ((is_pmem || !ndd->dpa.child) && n == to_allocate) in scan_allocate()
749 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in merge_dpa() local
755 for_each_dpa_resource(ndd, res) { in merge_dpa()
765 nvdimm_free_dpa(ndd, next); in merge_dpa()
767 nd_dbg_dpa(nd_region, ndd, res, "merge %d\n", rc); in merge_dpa()
817 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in release_free_pmem() local
820 for_each_dpa_resource_safe(ndd, res, _res) in release_free_pmem()
822 nvdimm_free_dpa(ndd, res); in release_free_pmem()
913 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in nd_namespace_pmem_set_resource() local
917 if (!ndd) { in nd_namespace_pmem_set_resource()
925 for_each_dpa_resource(ndd, res) in nd_namespace_pmem_set_resource()
956 struct nvdimm_drvdata *ndd; in __size_store() local
999 ndd = to_ndd(nd_mapping); in __size_store()
1005 if (!ndd) in __size_store()
1008 allocated += nvdimm_allocated_dpa(ndd, &label_id); in __size_store()
1221 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in namespace_update_uuid() local
1225 for_each_dpa_resource(ndd, res) in namespace_update_uuid()
1238 nsl_get_flags(ndd, nd_label)); in namespace_update_uuid()
1404 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in dpa_extents_show() local
1407 for_each_dpa_resource(ndd, res) in dpa_extents_show()
1425 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in btt_claim_class() local
1432 if (!ndd) { in btt_claim_class()
1437 nsindex = to_namespace_index(ndd, ndd->ns_current); in btt_claim_class()
1843 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in has_uuid_at_pos() local
1853 position = nsl_get_position(ndd, nd_label); in has_uuid_at_pos()
1854 nlabel = nsl_get_nlabel(ndd, nd_label); in has_uuid_at_pos()
1856 if (!nsl_validate_isetcookie(ndd, nd_label, cookie)) in has_uuid_at_pos()
1862 if (!nsl_validate_type_guid(ndd, nd_label, in has_uuid_at_pos()
1867 dev_dbg(ndd->dev, "duplicate entry for uuid\n"); in has_uuid_at_pos()
1893 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in select_pmem_id() local
1919 pmem_start = nsl_get_dpa(ndd, nd_label); in select_pmem_id()
1920 pmem_end = pmem_start + nsl_get_rawsize(ndd, nd_label); in select_pmem_id()
1926 dev_name(ndd->dev), nd_label->uuid); in select_pmem_id()
1946 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in create_namespace_pmem() local
1948 to_namespace_index(ndd, ndd->ns_current); in create_namespace_pmem()
1964 if (!nsl_validate_isetcookie(ndd, nd_label, cookie)) { in create_namespace_pmem()
1967 if (!nsl_validate_isetcookie(ndd, nd_label, altcookie)) in create_namespace_pmem()
2023 struct nvdimm_drvdata *ndd; in create_namespace_pmem() local
2035 ndd = to_ndd(nd_mapping); in create_namespace_pmem()
2036 size += nsl_get_rawsize(ndd, label0); in create_namespace_pmem()
2037 if (nsl_get_position(ndd, label0) != 0) in create_namespace_pmem()
2040 nspm->alt_name = kmemdup(nsl_ref_name(ndd, label0), in create_namespace_pmem()
2044 nspm->lbasize = nsl_get_lbasize(ndd, label0); in create_namespace_pmem()
2046 nsl_get_claim_class(ndd, label0); in create_namespace_pmem()
2074 struct nvdimm_drvdata *ndd, struct nd_namespace_blk *nsblk, in nsblk_add_resource() argument
2087 for_each_dpa_resource(ndd, res) in nsblk_add_resource()
2216 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in add_namespace_resource() local
2231 res = nsblk_add_resource(nd_region, ndd, in add_namespace_resource()
2233 nsl_get_dpa(ndd, nd_label)); in add_namespace_resource()
2236 nd_dbg_dpa(nd_region, ndd, res, "%d assign\n", count); in add_namespace_resource()
2255 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in create_namespace_blk() local
2261 if (!nsl_validate_type_guid(ndd, nd_label, &nd_set->type_guid)) in create_namespace_blk()
2263 if (!nsl_validate_blk_isetcookie(ndd, nd_label, nd_set->cookie2)) in create_namespace_blk()
2273 nsblk->lbasize = nsl_get_lbasize(ndd, nd_label); in create_namespace_blk()
2275 nsblk->common.claim_class = nsl_get_claim_class(ndd, nd_label); in create_namespace_blk()
2278 nsl_get_name(ndd, nd_label, name); in create_namespace_blk()
2284 res = nsblk_add_resource(nd_region, ndd, nsblk, in create_namespace_blk()
2285 nsl_get_dpa(ndd, nd_label)); in create_namespace_blk()
2288 nd_dbg_dpa(nd_region, ndd, res, "%d: assign\n", count); in create_namespace_blk()
2326 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in scan_labels() local
2337 flags = nsl_get_flags(ndd, nd_label); in scan_labels()
2345 if (nsl_get_dpa(ndd, nd_label) < nd_mapping->start || in scan_labels()
2346 nsl_get_dpa(ndd, nd_label) > map_end) in scan_labels()
2489 struct nvdimm_drvdata *ndd = nd_mapping->ndd; in deactivate_labels() local
2496 put_ndd(ndd); in deactivate_labels()
2497 nd_mapping->ndd = NULL; in deactivate_labels()
2498 if (ndd) in deactivate_labels()
2509 struct nvdimm_drvdata *ndd = to_ndd(nd_mapping); in init_active_labels() local
2518 if (!ndd) { in init_active_labels()
2533 nd_mapping->ndd = ndd; in init_active_labels()
2535 get_ndd(ndd); in init_active_labels()
2537 count = nd_label_active_count(ndd); in init_active_labels()
2538 dev_dbg(ndd->dev, "count: %d\n", count); in init_active_labels()
2547 label = nd_label_active(ndd, j); in init_active_labels()
2549 u32 flags = nsl_get_flags(ndd, label); in init_active_labels()
2552 nsl_set_flags(ndd, label, flags); in init_active_labels()