Home
last modified time | relevance | path

Searched refs:nd_region (Results 1 – 21 of 21) sorted by relevance

/drivers/nvdimm/
Dregion_devs.c70 int nd_region_activate(struct nd_region *nd_region) in nd_region_activate() argument
74 struct device *dev = &nd_region->dev; in nd_region_activate()
77 nvdimm_bus_lock(&nd_region->dev); in nd_region_activate()
78 for (i = 0; i < nd_region->ndr_mappings; i++) { in nd_region_activate()
79 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in nd_region_activate()
89 nvdimm_bus_unlock(&nd_region->dev); in nd_region_activate()
100 for (i = 0; i < nd_region->ndr_mappings; i++) { in nd_region_activate()
101 struct nd_mapping *nd_mapping = &nd_region->mapping[i]; in nd_region_activate()
103 int rc = nvdimm_map_flush(&nd_region->dev, nvdimm, i, ndrd); in nd_region_activate()
113 for (i = 0; i < nd_region->ndr_mappings - 1; i++) { in nd_region_activate()
[all …]
Dregion.c25 struct nd_region *nd_region = to_nd_region(dev); in nd_region_probe() local
27 if (nd_region->num_lanes > num_online_cpus() in nd_region_probe()
28 && nd_region->num_lanes < num_possible_cpus() in nd_region_probe()
31 num_online_cpus(), nd_region->num_lanes, in nd_region_probe()
34 nd_region->num_lanes); in nd_region_probe()
37 rc = nd_region_activate(nd_region); in nd_region_probe()
41 rc = nd_blk_region_init(nd_region); in nd_region_probe()
45 rc = nd_region_register_namespaces(nd_region, &err); in nd_region_probe()
56 if (is_nd_pmem(&nd_region->dev)) { in nd_region_probe()
59 if (devm_init_badblocks(dev, &nd_region->bb)) in nd_region_probe()
[all …]
Dnamespace_devs.c33 struct nd_region *nd_region = to_nd_region(dev->parent); in namespace_pmem_release() local
36 ida_simple_remove(&nd_region->ns_ida, nspm->id); in namespace_pmem_release()
45 struct nd_region *nd_region = to_nd_region(dev->parent); in namespace_blk_release() local
48 ida_simple_remove(&nd_region->ns_ida, nsblk->id); in namespace_blk_release()
140 struct nd_region *nd_region = to_nd_region(dev->parent); in pmem_should_map_pages() local
147 if (!test_bit(ND_REGION_PAGEMAP, &nd_region->flags)) in pmem_should_map_pages()
192 struct nd_region *nd_region = to_nd_region(ndns->dev.parent); in nvdimm_namespace_disk_name() local
209 sprintf(name, "pmem%d.%d%s", nd_region->id, nsidx, in nvdimm_namespace_disk_name()
212 sprintf(name, "pmem%d%s", nd_region->id, in nvdimm_namespace_disk_name()
218 sprintf(name, "ndblk%d.%d%s", nd_region->id, nsblk->id, in nvdimm_namespace_disk_name()
[all …]
Dnd-core.h83 struct nd_region;
84 void nd_region_create_ns_seed(struct nd_region *nd_region);
85 void nd_region_create_btt_seed(struct nd_region *nd_region);
86 void nd_region_create_pfn_seed(struct nd_region *nd_region);
87 void nd_region_create_dax_seed(struct nd_region *nd_region);
100 struct nd_region;
104 resource_size_t nd_pmem_available_dpa(struct nd_region *nd_region,
106 resource_size_t nd_blk_available_dpa(struct nd_region *nd_region);
107 resource_size_t nd_region_available_dpa(struct nd_region *nd_region);
108 int nd_region_conflict(struct nd_region *nd_region, resource_size_t start,
[all …]
Dnd.h153 struct nd_region { struct
181 struct nd_region nd_region; argument
273 struct device *nd_btt_create(struct nd_region *nd_region);
286 static inline struct device *nd_btt_create(struct nd_region *nd_region) in nd_btt_create() argument
303 struct device *nd_pfn_create(struct nd_region *nd_region);
320 static inline struct device *nd_pfn_create(struct nd_region *nd_region) in nd_pfn_create() argument
335 struct device *nd_dax_create(struct nd_region *nd_region);
348 static inline struct device *nd_dax_create(struct nd_region *nd_region) in nd_dax_create() argument
354 struct nd_region *to_nd_region(struct device *dev);
355 int nd_region_to_nstype(struct nd_region *nd_region);
[all …]
Ddax_devs.c23 struct nd_region *nd_region = to_nd_region(dev->parent); in nd_dax_release() local
29 ida_simple_remove(&nd_region->dax_ida, nd_pfn->id); in nd_dax_release()
61 static struct nd_dax *nd_dax_alloc(struct nd_region *nd_region) in nd_dax_alloc() argument
72 nd_pfn->id = ida_simple_get(&nd_region->dax_ida, 0, 0, GFP_KERNEL); in nd_dax_alloc()
79 dev_set_name(dev, "dax%d.%d", nd_region->id, nd_pfn->id); in nd_dax_alloc()
82 dev->parent = &nd_region->dev; in nd_dax_alloc()
87 struct device *nd_dax_create(struct nd_region *nd_region) in nd_dax_create() argument
92 if (!is_memory(&nd_region->dev)) in nd_dax_create()
95 nd_dax = nd_dax_alloc(nd_region); in nd_dax_create()
109 struct nd_region *nd_region = to_nd_region(ndns->dev.parent); in nd_dax_probe() local
[all …]
Dbtt_devs.c26 struct nd_region *nd_region = to_nd_region(dev->parent); in nd_btt_release() local
31 ida_simple_remove(&nd_region->btt_ida, nd_btt->id); in nd_btt_release()
181 static struct device *__nd_btt_create(struct nd_region *nd_region, in __nd_btt_create() argument
192 nd_btt->id = ida_simple_get(&nd_region->btt_ida, 0, 0, GFP_KERNEL); in __nd_btt_create()
204 dev_set_name(dev, "btt%d.%d", nd_region->id, nd_btt->id); in __nd_btt_create()
205 dev->parent = &nd_region->dev; in __nd_btt_create()
218 ida_simple_remove(&nd_region->btt_ida, nd_btt->id); in __nd_btt_create()
225 struct device *nd_btt_create(struct nd_region *nd_region) in nd_btt_create() argument
227 struct device *dev = __nd_btt_create(nd_region, 0, NULL, NULL); in nd_btt_create()
336 struct nd_region *nd_region = to_nd_region(ndns->dev.parent); in nd_btt_probe() local
[all …]
Dpfn_devs.c27 struct nd_region *nd_region = to_nd_region(dev->parent); in nd_pfn_release() local
32 ida_simple_remove(&nd_region->pfn_ida, nd_pfn->id); in nd_pfn_release()
325 static struct nd_pfn *nd_pfn_alloc(struct nd_region *nd_region) in nd_pfn_alloc() argument
334 nd_pfn->id = ida_simple_get(&nd_region->pfn_ida, 0, 0, GFP_KERNEL); in nd_pfn_alloc()
341 dev_set_name(dev, "pfn%d.%d", nd_region->id, nd_pfn->id); in nd_pfn_alloc()
344 dev->parent = &nd_region->dev; in nd_pfn_alloc()
349 struct device *nd_pfn_create(struct nd_region *nd_region) in nd_pfn_create() argument
354 if (!is_memory(&nd_region->dev)) in nd_pfn_create()
357 nd_pfn = nd_pfn_alloc(nd_region); in nd_pfn_create()
499 struct nd_region *nd_region = to_nd_region(ndns->dev.parent); in nd_pfn_probe() local
[all …]
Ddimm_devs.c239 struct nd_region *nd_region = &ndbr->nd_region; in nd_blk_region_to_dimm() local
240 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in nd_blk_region_to_dimm()
424 struct nd_region *nd_region; in alias_dpa_busy() local
432 nd_region = to_nd_region(dev); in alias_dpa_busy()
433 for (i = 0; i < nd_region->ndr_mappings; i++) { in alias_dpa_busy()
434 nd_mapping = &nd_region->mapping[i]; in alias_dpa_busy()
439 if (i >= nd_region->ndr_mappings) in alias_dpa_busy()
499 resource_size_t nd_blk_available_dpa(struct nd_region *nd_region) in nd_blk_available_dpa() argument
501 struct nvdimm_bus *nvdimm_bus = walk_to_nvdimm_bus(&nd_region->dev); in nd_blk_available_dpa()
502 struct nd_mapping *nd_mapping = &nd_region->mapping[0]; in nd_blk_available_dpa()
[all …]
Dpmem.c47 static struct nd_region *to_region(struct pmem_device *pmem) in to_region()
178 struct nd_region *nd_region = to_region(pmem); in pmem_make_request() local
181 nvdimm_flush(nd_region); in pmem_make_request()
197 nvdimm_flush(nd_region); in pmem_make_request()
305 struct nd_region *nd_region = to_nd_region(dev->parent); in pmem_attach_disk() local
337 fua = nvdimm_has_flush(nd_region); in pmem_attach_disk()
342 wbc = nvdimm_has_cache(nd_region); in pmem_attach_disk()
408 nvdimm_badblocks_populate(nd_region, &pmem->bb, res); in pmem_attach_disk()
488 struct nd_region *nd_region; in nd_pmem_notify() local
503 nd_region = to_nd_region(ndns->dev.parent); in nd_pmem_notify()
[all …]
Dlabel.h149 struct nd_region;
152 int nd_pmem_namespace_label_update(struct nd_region *nd_region,
154 int nd_blk_namespace_label_update(struct nd_region *nd_region,
Dlabel.c381 struct nd_region *nd_region = NULL; in nd_label_reserve_dpa() local
398 nd_dbg_dpa(nd_region, ndd, res, "reserve\n"); in nd_label_reserve_dpa()
630 static int __pmem_label_update(struct nd_region *nd_region, in __pmem_label_update() argument
635 struct nd_interleave_set *nd_set = nd_region->nd_set; in __pmem_label_update()
651 cookie = nd_region_interleave_set_cookie(nd_region, nsindex); in __pmem_label_update()
674 nd_label->nlabel = __cpu_to_le16(nd_region->ndr_mappings); in __pmem_label_update()
694 nd_dbg_dpa(nd_region, ndd, res, "%s\n", __func__); in __pmem_label_update()
768 static int __blk_label_update(struct nd_region *nd_region, in __blk_label_update() argument
773 struct nd_interleave_set *nd_set = nd_region->nd_set; in __blk_label_update()
849 if (!nsblk_add_resource(nd_region, ndd, nsblk, res->start)) { in __blk_label_update()
[all …]
Dbus.c164 void nvdimm_region_notify(struct nd_region *nd_region, enum nvdimm_event event) in nvdimm_region_notify() argument
166 struct nvdimm_bus *nvdimm_bus = walk_to_nvdimm_bus(&nd_region->dev); in nvdimm_region_notify()
172 nd_device_notify(&nd_region->dev, event); in nvdimm_region_notify()
183 struct nd_region *nd_region; in nvdimm_clear_badblocks_region() local
191 nd_region = to_nd_region(dev); in nvdimm_clear_badblocks_region()
192 ndr_end = nd_region->ndr_start + nd_region->ndr_size - 1; in nvdimm_clear_badblocks_region()
195 if (ctx->phys < nd_region->ndr_start in nvdimm_clear_badblocks_region()
199 sector = (ctx->phys - nd_region->ndr_start) / 512; in nvdimm_clear_badblocks_region()
200 badblocks_clear(&nd_region->bb, sector, ctx->cleared / 512); in nvdimm_clear_badblocks_region()
202 if (nd_region->bb_state) in nvdimm_clear_badblocks_region()
[all …]
Dblk.c62 struct nd_region *nd_region; in to_ndbr() local
66 nd_region = container_of(parent, struct nd_region, dev); in to_ndbr()
67 return container_of(nd_region, struct nd_blk_region, nd_region); in to_ndbr()
Dclaim.c86 struct nd_region *nd_region = to_nd_region(dev->parent); in is_idle() local
90 seed = nd_region->btt_seed; in is_idle()
92 seed = nd_region->pfn_seed; in is_idle()
94 seed = nd_region->dax_seed; in is_idle()
Dbtt.c1207 lane = nd_region_acquire_lane(btt->nd_region); in btt_read_pg()
1277 nd_region_release_lane(btt->nd_region, lane); in btt_read_pg()
1289 nd_region_release_lane(btt->nd_region, lane); in btt_read_pg()
1323 lane = nd_region_acquire_lane(btt->nd_region); in btt_write_pg()
1340 nd_region_release_lane(btt->nd_region, lane); in btt_write_pg()
1401 nd_region_release_lane(btt->nd_region, lane); in btt_write_pg()
1419 nd_region_release_lane(btt->nd_region, lane); in btt_write_pg()
1588 u32 lbasize, u8 *uuid, struct nd_region *nd_region) in btt_init() argument
1605 btt->nd_region = nd_region; in btt_init()
1615 if (btt->init_state != INIT_READY && nd_region->ro) { in btt_init()
[all …]
Dbtt.h233 struct nd_region *nd_region; member
Dcore.c502 void nvdimm_badblocks_populate(struct nd_region *nd_region, in nvdimm_badblocks_populate() argument
508 if (!is_memory(&nd_region->dev)) { in nvdimm_badblocks_populate()
509 dev_WARN_ONCE(&nd_region->dev, 1, in nvdimm_badblocks_populate()
513 nvdimm_bus = walk_to_nvdimm_bus(&nd_region->dev); in nvdimm_badblocks_populate()
/drivers/acpi/nfit/
Dnfit.h91 struct nd_region *nd_region; member
202 struct nd_region *nd_region; member
Dcore.c1724 struct nd_region *nd_region = to_nd_region(dev); in range_index_show() local
1725 struct nfit_spa *nfit_spa = nd_region_provider_data(nd_region); in range_index_show()
1734 struct nd_region *nd_region = to_nd_region(dev); in ecc_unit_size_show() local
1735 struct nfit_spa *nfit_spa = nd_region_provider_data(nd_region); in ecc_unit_size_show()
1983 nvdimm_flush(nfit_blk->nd_region); in write_blk_ctl()
2032 nvdimm_flush(nfit_blk->nd_region); in acpi_nfit_blk_single_io()
2043 struct nd_region *nd_region = nfit_blk->nd_region; in acpi_nfit_blk_region_do_io() local
2047 lane = nd_region_acquire_lane(nd_region); in acpi_nfit_blk_region_do_io()
2059 nd_region_release_lane(nd_region, lane); in acpi_nfit_blk_region_do_io()
2126 nfit_blk->nd_region = to_nd_region(dev); in acpi_nfit_blk_region_enable()
[all …]
Dmce.c73 nvdimm_region_notify(nfit_spa->nd_region, in nfit_handle_mce()