Lines Matching refs:nvdimm

31 	struct nvdimm *nvdimm = to_nvdimm(dev);  in nvdimm_check_config_data()  local
33 if (!nvdimm->cmd_mask || in nvdimm_check_config_data()
34 !test_bit(ND_CMD_GET_CONFIG_DATA, &nvdimm->cmd_mask)) { in nvdimm_check_config_data()
35 if (test_bit(NDD_LABELING, &nvdimm->flags)) in nvdimm_check_config_data()
178 struct nvdimm *nvdimm = to_nvdimm(dev); in nvdimm_set_labeling() local
180 set_bit(NDD_LABELING, &nvdimm->flags); in nvdimm_set_labeling()
185 struct nvdimm *nvdimm = to_nvdimm(dev); in nvdimm_set_locked() local
187 set_bit(NDD_LOCKED, &nvdimm->flags); in nvdimm_set_locked()
192 struct nvdimm *nvdimm = to_nvdimm(dev); in nvdimm_clear_locked() local
194 clear_bit(NDD_LOCKED, &nvdimm->flags); in nvdimm_clear_locked()
199 struct nvdimm *nvdimm = to_nvdimm(dev); in nvdimm_release() local
201 ida_simple_remove(&dimm_ida, nvdimm->id); in nvdimm_release()
202 kfree(nvdimm); in nvdimm_release()
205 struct nvdimm *to_nvdimm(struct device *dev) in to_nvdimm()
207 struct nvdimm *nvdimm = container_of(dev, struct nvdimm, dev); in to_nvdimm() local
210 return nvdimm; in to_nvdimm()
214 struct nvdimm *nd_blk_region_to_dimm(struct nd_blk_region *ndbr) in nd_blk_region_to_dimm()
219 return nd_mapping->nvdimm; in nd_blk_region_to_dimm()
232 struct nvdimm *nvdimm = nd_mapping->nvdimm; in to_ndd() local
234 WARN_ON_ONCE(!is_nvdimm_bus_locked(&nvdimm->dev)); in to_ndd()
236 return dev_get_drvdata(&nvdimm->dev); in to_ndd()
268 const char *nvdimm_name(struct nvdimm *nvdimm) in nvdimm_name() argument
270 return dev_name(&nvdimm->dev); in nvdimm_name()
274 struct kobject *nvdimm_kobj(struct nvdimm *nvdimm) in nvdimm_kobj() argument
276 return &nvdimm->dev.kobj; in nvdimm_kobj()
280 unsigned long nvdimm_cmd_mask(struct nvdimm *nvdimm) in nvdimm_cmd_mask() argument
282 return nvdimm->cmd_mask; in nvdimm_cmd_mask()
286 void *nvdimm_provider_data(struct nvdimm *nvdimm) in nvdimm_provider_data() argument
288 if (nvdimm) in nvdimm_provider_data()
289 return nvdimm->provider_data; in nvdimm_provider_data()
297 struct nvdimm *nvdimm = to_nvdimm(dev); in commands_show() local
300 if (!nvdimm->cmd_mask) in commands_show()
303 for_each_set_bit(cmd, &nvdimm->cmd_mask, BITS_PER_LONG) in commands_show()
313 struct nvdimm *nvdimm = to_nvdimm(dev); in flags_show() local
316 test_bit(NDD_ALIASING, &nvdimm->flags) ? "alias " : "", in flags_show()
317 test_bit(NDD_LABELING, &nvdimm->flags) ? "label " : "", in flags_show()
318 test_bit(NDD_LOCKED, &nvdimm->flags) ? "lock " : ""); in flags_show()
325 struct nvdimm *nvdimm = to_nvdimm(dev); in state_show() local
333 return sprintf(buf, "%s\n", atomic_read(&nvdimm->busy) in state_show()
364 struct nvdimm *nvdimm = to_nvdimm(dev); in security_show() local
366 if (test_bit(NVDIMM_SECURITY_OVERWRITE, &nvdimm->sec.flags)) in security_show()
368 if (test_bit(NVDIMM_SECURITY_DISABLED, &nvdimm->sec.flags)) in security_show()
370 if (test_bit(NVDIMM_SECURITY_UNLOCKED, &nvdimm->sec.flags)) in security_show()
372 if (test_bit(NVDIMM_SECURITY_LOCKED, &nvdimm->sec.flags)) in security_show()
380 struct nvdimm *nvdimm = to_nvdimm(dev); in frozen_show() local
383 &nvdimm->sec.flags)); in frozen_show()
422 struct nvdimm *nvdimm = to_nvdimm(dev); in nvdimm_visible() local
426 if (!nvdimm->sec.flags) in nvdimm_visible()
431 if (nvdimm->sec.ops->freeze || nvdimm->sec.ops->disable in nvdimm_visible()
432 || nvdimm->sec.ops->change_key in nvdimm_visible()
433 || nvdimm->sec.ops->erase in nvdimm_visible()
434 || nvdimm->sec.ops->overwrite) in nvdimm_visible()
439 if (nvdimm->sec.ops->freeze) in nvdimm_visible()
451 struct nvdimm *nvdimm = to_nvdimm(dev); in result_show() local
454 if (!nvdimm->fw_ops) in result_show()
458 result = nvdimm->fw_ops->activate_result(nvdimm); in result_show()
480 struct nvdimm *nvdimm = to_nvdimm(dev); in activate_show() local
483 if (!nvdimm->fw_ops) in activate_show()
487 state = nvdimm->fw_ops->activate_state(nvdimm); in activate_show()
505 struct nvdimm *nvdimm = to_nvdimm(dev); in activate_store() local
509 if (!nvdimm->fw_ops) in activate_store()
520 rc = nvdimm->fw_ops->arm(nvdimm, arg); in activate_store()
540 struct nvdimm *nvdimm = to_nvdimm(dev); in nvdimm_firmware_visible() local
545 if (!nvdimm->fw_ops) in nvdimm_firmware_visible()
582 struct nvdimm *__nvdimm_create(struct nvdimm_bus *nvdimm_bus, in __nvdimm_create()
589 struct nvdimm *nvdimm = kzalloc(sizeof(*nvdimm), GFP_KERNEL); in __nvdimm_create() local
592 if (!nvdimm) in __nvdimm_create()
595 nvdimm->id = ida_simple_get(&dimm_ida, 0, 0, GFP_KERNEL); in __nvdimm_create()
596 if (nvdimm->id < 0) { in __nvdimm_create()
597 kfree(nvdimm); in __nvdimm_create()
601 nvdimm->dimm_id = dimm_id; in __nvdimm_create()
602 nvdimm->provider_data = provider_data; in __nvdimm_create()
605 nvdimm->flags = flags; in __nvdimm_create()
606 nvdimm->cmd_mask = cmd_mask; in __nvdimm_create()
607 nvdimm->num_flush = num_flush; in __nvdimm_create()
608 nvdimm->flush_wpq = flush_wpq; in __nvdimm_create()
609 atomic_set(&nvdimm->busy, 0); in __nvdimm_create()
610 dev = &nvdimm->dev; in __nvdimm_create()
611 dev_set_name(dev, "nmem%d", nvdimm->id); in __nvdimm_create()
614 dev->devt = MKDEV(nvdimm_major, nvdimm->id); in __nvdimm_create()
616 nvdimm->sec.ops = sec_ops; in __nvdimm_create()
617 nvdimm->fw_ops = fw_ops; in __nvdimm_create()
618 nvdimm->sec.overwrite_tmo = 0; in __nvdimm_create()
619 INIT_DELAYED_WORK(&nvdimm->dwork, nvdimm_security_overwrite_query); in __nvdimm_create()
625 nvdimm->sec.flags = nvdimm_security_flags(nvdimm, NVDIMM_USER); in __nvdimm_create()
626 nvdimm->sec.ext_flags = nvdimm_security_flags(nvdimm, NVDIMM_MASTER); in __nvdimm_create()
629 return nvdimm; in __nvdimm_create()
635 struct nvdimm *nvdimm = data; in shutdown_security_notify() local
637 sysfs_put(nvdimm->sec.overwrite_state); in shutdown_security_notify()
642 struct nvdimm *nvdimm = to_nvdimm(dev); in nvdimm_security_setup_events() local
644 if (!nvdimm->sec.flags || !nvdimm->sec.ops in nvdimm_security_setup_events()
645 || !nvdimm->sec.ops->overwrite) in nvdimm_security_setup_events()
647 nvdimm->sec.overwrite_state = sysfs_get_dirent(dev->kobj.sd, "security"); in nvdimm_security_setup_events()
648 if (!nvdimm->sec.overwrite_state) in nvdimm_security_setup_events()
651 return devm_add_action_or_reset(dev, shutdown_security_notify, nvdimm); in nvdimm_security_setup_events()
655 int nvdimm_in_overwrite(struct nvdimm *nvdimm) in nvdimm_in_overwrite() argument
657 return test_bit(NDD_SECURITY_OVERWRITE, &nvdimm->flags); in nvdimm_in_overwrite()
661 int nvdimm_security_freeze(struct nvdimm *nvdimm) in nvdimm_security_freeze() argument
665 WARN_ON_ONCE(!is_nvdimm_bus_locked(&nvdimm->dev)); in nvdimm_security_freeze()
667 if (!nvdimm->sec.ops || !nvdimm->sec.ops->freeze) in nvdimm_security_freeze()
670 if (!nvdimm->sec.flags) in nvdimm_security_freeze()
673 if (test_bit(NDD_SECURITY_OVERWRITE, &nvdimm->flags)) { in nvdimm_security_freeze()
674 dev_warn(&nvdimm->dev, "Overwrite operation in progress.\n"); in nvdimm_security_freeze()
678 rc = nvdimm->sec.ops->freeze(nvdimm); in nvdimm_security_freeze()
679 nvdimm->sec.flags = nvdimm_security_flags(nvdimm, NVDIMM_USER); in nvdimm_security_freeze()
716 if (nd_mapping->nvdimm == info->nd_mapping->nvdimm) in alias_dpa_busy()
850 if (__reserve_free_pmem(&nd_region->dev, nd_mapping->nvdimm)) in nd_pmem_max_contiguous_dpa()