Lines Matching refs:nvmem
56 struct nvmem_device *nvmem; member
76 static int __nvmem_reg_read(struct nvmem_device *nvmem, unsigned int offset, in __nvmem_reg_read() argument
79 if (nvmem->reg_read) in __nvmem_reg_read()
80 return nvmem->reg_read(nvmem->priv, offset, val, bytes); in __nvmem_reg_read()
85 static int __nvmem_reg_write(struct nvmem_device *nvmem, unsigned int offset, in __nvmem_reg_write() argument
90 if (nvmem->reg_write) { in __nvmem_reg_write()
91 gpiod_set_value_cansleep(nvmem->wp_gpio, 0); in __nvmem_reg_write()
92 ret = nvmem->reg_write(nvmem->priv, offset, val, bytes); in __nvmem_reg_write()
93 gpiod_set_value_cansleep(nvmem->wp_gpio, 1); in __nvmem_reg_write()
100 static int nvmem_access_with_keepouts(struct nvmem_device *nvmem, in nvmem_access_with_keepouts() argument
107 const struct nvmem_keepout *keepout = nvmem->keepout; in nvmem_access_with_keepouts()
108 const struct nvmem_keepout *keepoutend = keepout + nvmem->nkeepout; in nvmem_access_with_keepouts()
124 rc = __nvmem_reg_write(nvmem, offset, val, ksize); in nvmem_access_with_keepouts()
126 rc = __nvmem_reg_read(nvmem, offset, val, ksize); in nvmem_access_with_keepouts()
156 return __nvmem_reg_write(nvmem, offset, val, ksize); in nvmem_access_with_keepouts()
158 return __nvmem_reg_read(nvmem, offset, val, ksize); in nvmem_access_with_keepouts()
164 static int nvmem_reg_read(struct nvmem_device *nvmem, unsigned int offset, in nvmem_reg_read() argument
167 if (!nvmem->nkeepout) in nvmem_reg_read()
168 return __nvmem_reg_read(nvmem, offset, val, bytes); in nvmem_reg_read()
170 return nvmem_access_with_keepouts(nvmem, offset, val, bytes, false); in nvmem_reg_read()
173 static int nvmem_reg_write(struct nvmem_device *nvmem, unsigned int offset, in nvmem_reg_write() argument
176 if (!nvmem->nkeepout) in nvmem_reg_write()
177 return __nvmem_reg_write(nvmem, offset, val, bytes); in nvmem_reg_write()
179 return nvmem_access_with_keepouts(nvmem, offset, val, bytes, true); in nvmem_reg_write()
198 struct nvmem_device *nvmem = to_nvmem_device(dev); in type_show() local
200 return sprintf(buf, "%s\n", nvmem_type_str[nvmem->type]); in type_show()
215 struct nvmem_device *nvmem; in bin_attr_nvmem_read() local
222 nvmem = to_nvmem_device(dev); in bin_attr_nvmem_read()
225 if (pos >= nvmem->size) in bin_attr_nvmem_read()
228 if (!IS_ALIGNED(pos, nvmem->stride)) in bin_attr_nvmem_read()
231 if (count < nvmem->word_size) in bin_attr_nvmem_read()
234 if (pos + count > nvmem->size) in bin_attr_nvmem_read()
235 count = nvmem->size - pos; in bin_attr_nvmem_read()
237 count = round_down(count, nvmem->word_size); in bin_attr_nvmem_read()
239 if (!nvmem->reg_read) in bin_attr_nvmem_read()
242 rc = nvmem_reg_read(nvmem, pos, buf, count); in bin_attr_nvmem_read()
255 struct nvmem_device *nvmem; in bin_attr_nvmem_write() local
262 nvmem = to_nvmem_device(dev); in bin_attr_nvmem_write()
265 if (pos >= nvmem->size) in bin_attr_nvmem_write()
268 if (!IS_ALIGNED(pos, nvmem->stride)) in bin_attr_nvmem_write()
271 if (count < nvmem->word_size) in bin_attr_nvmem_write()
274 if (pos + count > nvmem->size) in bin_attr_nvmem_write()
275 count = nvmem->size - pos; in bin_attr_nvmem_write()
277 count = round_down(count, nvmem->word_size); in bin_attr_nvmem_write()
279 if (!nvmem->reg_write) in bin_attr_nvmem_write()
282 rc = nvmem_reg_write(nvmem, pos, buf, count); in bin_attr_nvmem_write()
290 static umode_t nvmem_bin_attr_get_umode(struct nvmem_device *nvmem) in nvmem_bin_attr_get_umode() argument
294 if (!nvmem->root_only) in nvmem_bin_attr_get_umode()
297 if (!nvmem->read_only) in nvmem_bin_attr_get_umode()
300 if (!nvmem->reg_write) in nvmem_bin_attr_get_umode()
303 if (!nvmem->reg_read) in nvmem_bin_attr_get_umode()
313 struct nvmem_device *nvmem = to_nvmem_device(dev); in nvmem_bin_attr_is_visible() local
315 attr->size = nvmem->size; in nvmem_bin_attr_is_visible()
317 return nvmem_bin_attr_get_umode(nvmem); in nvmem_bin_attr_is_visible()
359 static int nvmem_sysfs_setup_compat(struct nvmem_device *nvmem, in nvmem_sysfs_setup_compat() argument
373 nvmem->eeprom = bin_attr_nvmem_eeprom_compat; in nvmem_sysfs_setup_compat()
374 nvmem->eeprom.attr.mode = nvmem_bin_attr_get_umode(nvmem); in nvmem_sysfs_setup_compat()
375 nvmem->eeprom.size = nvmem->size; in nvmem_sysfs_setup_compat()
377 nvmem->eeprom.attr.key = &eeprom_lock_key; in nvmem_sysfs_setup_compat()
379 nvmem->eeprom.private = &nvmem->dev; in nvmem_sysfs_setup_compat()
380 nvmem->base_dev = config->base_dev; in nvmem_sysfs_setup_compat()
382 rval = device_create_bin_file(nvmem->base_dev, &nvmem->eeprom); in nvmem_sysfs_setup_compat()
384 dev_err(&nvmem->dev, in nvmem_sysfs_setup_compat()
389 nvmem->flags |= FLAG_COMPAT; in nvmem_sysfs_setup_compat()
394 static void nvmem_sysfs_remove_compat(struct nvmem_device *nvmem, in nvmem_sysfs_remove_compat() argument
398 device_remove_bin_file(nvmem->base_dev, &nvmem->eeprom); in nvmem_sysfs_remove_compat()
403 static int nvmem_sysfs_setup_compat(struct nvmem_device *nvmem, in nvmem_sysfs_setup_compat() argument
408 static void nvmem_sysfs_remove_compat(struct nvmem_device *nvmem, in nvmem_sysfs_remove_compat() argument
417 struct nvmem_device *nvmem = to_nvmem_device(dev); in nvmem_release() local
419 ida_free(&nvmem_ida, nvmem->id); in nvmem_release()
420 gpiod_put(nvmem->wp_gpio); in nvmem_release()
421 kfree(nvmem); in nvmem_release()
443 static void nvmem_device_remove_all_cells(const struct nvmem_device *nvmem) in nvmem_device_remove_all_cells() argument
447 list_for_each_entry_safe(cell, p, &nvmem->cells, node) in nvmem_device_remove_all_cells()
454 list_add_tail(&cell->node, &cell->nvmem->cells); in nvmem_cell_entry_add()
459 static int nvmem_cell_info_to_nvmem_cell_entry_nodup(struct nvmem_device *nvmem, in nvmem_cell_info_to_nvmem_cell_entry_nodup() argument
463 cell->nvmem = nvmem; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
476 if (!IS_ALIGNED(cell->offset, nvmem->stride)) { in nvmem_cell_info_to_nvmem_cell_entry_nodup()
477 dev_err(&nvmem->dev, in nvmem_cell_info_to_nvmem_cell_entry_nodup()
479 cell->name ?: "<unknown>", nvmem->stride); in nvmem_cell_info_to_nvmem_cell_entry_nodup()
486 static int nvmem_cell_info_to_nvmem_cell_entry(struct nvmem_device *nvmem, in nvmem_cell_info_to_nvmem_cell_entry() argument
492 err = nvmem_cell_info_to_nvmem_cell_entry_nodup(nvmem, info, cell); in nvmem_cell_info_to_nvmem_cell_entry()
512 static int nvmem_add_cells(struct nvmem_device *nvmem, in nvmem_add_cells() argument
530 rval = nvmem_cell_info_to_nvmem_cell_entry(nvmem, &info[i], cells[i]); in nvmem_add_cells()
578 static int nvmem_add_cells_from_table(struct nvmem_device *nvmem) in nvmem_add_cells_from_table() argument
587 if (strcmp(nvmem_dev_name(nvmem), table->nvmem_name) == 0) { in nvmem_add_cells_from_table()
597 rval = nvmem_cell_info_to_nvmem_cell_entry(nvmem, info, cell); in nvmem_add_cells_from_table()
614 nvmem_find_cell_entry_by_name(struct nvmem_device *nvmem, const char *cell_id) in nvmem_find_cell_entry_by_name() argument
619 list_for_each_entry(iter, &nvmem->cells, node) { in nvmem_find_cell_entry_by_name()
630 static int nvmem_validate_keepouts(struct nvmem_device *nvmem) in nvmem_validate_keepouts() argument
633 const struct nvmem_keepout *keepout = nvmem->keepout; in nvmem_validate_keepouts()
634 const struct nvmem_keepout *keepoutend = keepout + nvmem->nkeepout; in nvmem_validate_keepouts()
639 dev_err(&nvmem->dev, in nvmem_validate_keepouts()
646 dev_err(&nvmem->dev, in nvmem_validate_keepouts()
656 if ((keepout->end - keepout->start < nvmem->word_size) || in nvmem_validate_keepouts()
658 (keepout->start - cur < nvmem->word_size))) { in nvmem_validate_keepouts()
660 dev_err(&nvmem->dev, in nvmem_validate_keepouts()
667 if (!IS_ALIGNED(keepout->start, nvmem->stride) || in nvmem_validate_keepouts()
668 !IS_ALIGNED(keepout->end, nvmem->stride)) { in nvmem_validate_keepouts()
670 dev_err(&nvmem->dev, in nvmem_validate_keepouts()
683 static int nvmem_add_cells_from_of(struct nvmem_device *nvmem) in nvmem_add_cells_from_of() argument
686 struct device *dev = &nvmem->dev; in nvmem_add_cells_from_of()
709 cell->nvmem = nvmem; in nvmem_add_cells_from_of()
725 if (!IS_ALIGNED(cell->offset, nvmem->stride)) { in nvmem_add_cells_from_of()
727 cell->name, nvmem->stride); in nvmem_add_cells_from_of()
754 struct nvmem_device *nvmem; in nvmem_register() local
763 nvmem = kzalloc(sizeof(*nvmem), GFP_KERNEL); in nvmem_register()
764 if (!nvmem) in nvmem_register()
769 kfree(nvmem); in nvmem_register()
774 nvmem->wp_gpio = config->wp_gpio; in nvmem_register()
776 nvmem->wp_gpio = gpiod_get_optional(config->dev, "wp", in nvmem_register()
778 if (IS_ERR(nvmem->wp_gpio)) { in nvmem_register()
779 ida_free(&nvmem_ida, nvmem->id); in nvmem_register()
780 rval = PTR_ERR(nvmem->wp_gpio); in nvmem_register()
781 kfree(nvmem); in nvmem_register()
785 kref_init(&nvmem->refcnt); in nvmem_register()
786 INIT_LIST_HEAD(&nvmem->cells); in nvmem_register()
788 nvmem->id = rval; in nvmem_register()
789 nvmem->owner = config->owner; in nvmem_register()
790 if (!nvmem->owner && config->dev->driver) in nvmem_register()
791 nvmem->owner = config->dev->driver->owner; in nvmem_register()
792 nvmem->stride = config->stride ?: 1; in nvmem_register()
793 nvmem->word_size = config->word_size ?: 1; in nvmem_register()
794 nvmem->size = config->size; in nvmem_register()
795 nvmem->dev.type = &nvmem_provider_type; in nvmem_register()
796 nvmem->dev.bus = &nvmem_bus_type; in nvmem_register()
797 nvmem->dev.parent = config->dev; in nvmem_register()
798 nvmem->root_only = config->root_only; in nvmem_register()
799 nvmem->priv = config->priv; in nvmem_register()
800 nvmem->type = config->type; in nvmem_register()
801 nvmem->reg_read = config->reg_read; in nvmem_register()
802 nvmem->reg_write = config->reg_write; in nvmem_register()
803 nvmem->cell_post_process = config->cell_post_process; in nvmem_register()
804 nvmem->keepout = config->keepout; in nvmem_register()
805 nvmem->nkeepout = config->nkeepout; in nvmem_register()
807 nvmem->dev.of_node = config->of_node; in nvmem_register()
809 nvmem->dev.of_node = config->dev->of_node; in nvmem_register()
813 rval = dev_set_name(&nvmem->dev, "%s", config->name); in nvmem_register()
816 rval = dev_set_name(&nvmem->dev, "%s%d", config->name, nvmem->id); in nvmem_register()
819 rval = dev_set_name(&nvmem->dev, "%s%d", in nvmem_register()
821 config->name ? config->id : nvmem->id); in nvmem_register()
826 ida_free(&nvmem_ida, nvmem->id); in nvmem_register()
827 kfree(nvmem); in nvmem_register()
831 nvmem->read_only = device_property_present(config->dev, "read-only") || in nvmem_register()
832 config->read_only || !nvmem->reg_write; in nvmem_register()
835 nvmem->dev.groups = nvmem_dev_groups; in nvmem_register()
838 dev_dbg(&nvmem->dev, "Registering nvmem device %s\n", config->name); in nvmem_register()
840 rval = device_register(&nvmem->dev); in nvmem_register()
844 if (nvmem->nkeepout) { in nvmem_register()
845 rval = nvmem_validate_keepouts(nvmem); in nvmem_register()
851 rval = nvmem_sysfs_setup_compat(nvmem, config); in nvmem_register()
857 rval = nvmem_add_cells(nvmem, config->cells, config->ncells); in nvmem_register()
862 rval = nvmem_add_cells_from_table(nvmem); in nvmem_register()
866 rval = nvmem_add_cells_from_of(nvmem); in nvmem_register()
870 blocking_notifier_call_chain(&nvmem_notifier, NVMEM_ADD, nvmem); in nvmem_register()
872 return nvmem; in nvmem_register()
875 nvmem_device_remove_all_cells(nvmem); in nvmem_register()
878 nvmem_sysfs_remove_compat(nvmem, config); in nvmem_register()
880 device_del(&nvmem->dev); in nvmem_register()
882 put_device(&nvmem->dev); in nvmem_register()
890 struct nvmem_device *nvmem; in nvmem_device_release() local
892 nvmem = container_of(kref, struct nvmem_device, refcnt); in nvmem_device_release()
894 blocking_notifier_call_chain(&nvmem_notifier, NVMEM_REMOVE, nvmem); in nvmem_device_release()
896 if (nvmem->flags & FLAG_COMPAT) in nvmem_device_release()
897 device_remove_bin_file(nvmem->base_dev, &nvmem->eeprom); in nvmem_device_release()
899 nvmem_device_remove_all_cells(nvmem); in nvmem_device_release()
900 device_unregister(&nvmem->dev); in nvmem_device_release()
908 void nvmem_unregister(struct nvmem_device *nvmem) in nvmem_unregister() argument
910 if (nvmem) in nvmem_unregister()
911 kref_put(&nvmem->refcnt, nvmem_device_release); in nvmem_unregister()
915 static void devm_nvmem_unregister(void *nvmem) in devm_nvmem_unregister() argument
917 nvmem_unregister(nvmem); in devm_nvmem_unregister()
934 struct nvmem_device *nvmem; in devm_nvmem_register() local
937 nvmem = nvmem_register(config); in devm_nvmem_register()
938 if (IS_ERR(nvmem)) in devm_nvmem_register()
939 return nvmem; in devm_nvmem_register()
941 ret = devm_add_action_or_reset(dev, devm_nvmem_unregister, nvmem); in devm_nvmem_register()
945 return nvmem; in devm_nvmem_register()
952 struct nvmem_device *nvmem = NULL; in __nvmem_device_get() local
958 nvmem = to_nvmem_device(dev); in __nvmem_device_get()
960 if (!nvmem) in __nvmem_device_get()
963 if (!try_module_get(nvmem->owner)) { in __nvmem_device_get()
964 dev_err(&nvmem->dev, in __nvmem_device_get()
966 nvmem_dev_name(nvmem)); in __nvmem_device_get()
968 put_device(&nvmem->dev); in __nvmem_device_get()
972 kref_get(&nvmem->refcnt); in __nvmem_device_get()
974 return nvmem; in __nvmem_device_get()
977 static void __nvmem_device_put(struct nvmem_device *nvmem) in __nvmem_device_put() argument
979 put_device(&nvmem->dev); in __nvmem_device_put()
980 module_put(nvmem->owner); in __nvmem_device_put()
981 kref_put(&nvmem->refcnt, nvmem_device_release); in __nvmem_device_put()
998 struct nvmem_device *nvmem; in of_nvmem_device_get() local
1008 nvmem = __nvmem_device_get(nvmem_np, device_match_of_node); in of_nvmem_device_get()
1010 return nvmem; in of_nvmem_device_get()
1027 struct nvmem_device *nvmem; in nvmem_device_get() local
1029 nvmem = of_nvmem_device_get(dev->of_node, dev_name); in nvmem_device_get()
1031 if (!IS_ERR(nvmem) || PTR_ERR(nvmem) == -EPROBE_DEFER) in nvmem_device_get()
1032 return nvmem; in nvmem_device_get()
1058 struct nvmem_device **nvmem = res; in devm_nvmem_device_match() local
1060 if (WARN_ON(!nvmem || !*nvmem)) in devm_nvmem_device_match()
1063 return *nvmem == data; in devm_nvmem_device_match()
1078 void devm_nvmem_device_put(struct device *dev, struct nvmem_device *nvmem) in devm_nvmem_device_put() argument
1083 devm_nvmem_device_match, nvmem); in devm_nvmem_device_put()
1094 void nvmem_device_put(struct nvmem_device *nvmem) in nvmem_device_put() argument
1096 __nvmem_device_put(nvmem); in nvmem_device_put()
1112 struct nvmem_device **ptr, *nvmem; in devm_nvmem_device_get() local
1118 nvmem = nvmem_device_get(dev, id); in devm_nvmem_device_get()
1119 if (!IS_ERR(nvmem)) { in devm_nvmem_device_get()
1120 *ptr = nvmem; in devm_nvmem_device_get()
1126 return nvmem; in devm_nvmem_device_get()
1159 struct nvmem_device *nvmem; in nvmem_cell_get_from_lookup() local
1173 nvmem = __nvmem_device_get((void *)lookup->nvmem_name, in nvmem_cell_get_from_lookup()
1175 if (IS_ERR(nvmem)) { in nvmem_cell_get_from_lookup()
1177 cell = ERR_CAST(nvmem); in nvmem_cell_get_from_lookup()
1181 cell_entry = nvmem_find_cell_entry_by_name(nvmem, in nvmem_cell_get_from_lookup()
1184 __nvmem_device_put(nvmem); in nvmem_cell_get_from_lookup()
1189 __nvmem_device_put(nvmem); in nvmem_cell_get_from_lookup()
1201 nvmem_find_cell_entry_by_node(struct nvmem_device *nvmem, struct device_node *np) in nvmem_find_cell_entry_by_node() argument
1206 list_for_each_entry(iter, &nvmem->cells, node) { in nvmem_find_cell_entry_by_node()
1232 struct nvmem_device *nvmem; in of_nvmem_cell_get() local
1249 nvmem = __nvmem_device_get(nvmem_np, device_match_of_node); in of_nvmem_cell_get()
1251 if (IS_ERR(nvmem)) in of_nvmem_cell_get()
1252 return ERR_CAST(nvmem); in of_nvmem_cell_get()
1254 cell_entry = nvmem_find_cell_entry_by_node(nvmem, cell_np); in of_nvmem_cell_get()
1256 __nvmem_device_put(nvmem); in of_nvmem_cell_get()
1262 __nvmem_device_put(nvmem); in of_nvmem_cell_get()
1369 struct nvmem_device *nvmem = cell->entry->nvmem; in nvmem_cell_put() local
1375 __nvmem_device_put(nvmem); in nvmem_cell_put()
1412 static int __nvmem_cell_read(struct nvmem_device *nvmem, in __nvmem_cell_read() argument
1418 rc = nvmem_reg_read(nvmem, cell->offset, buf, cell->bytes); in __nvmem_cell_read()
1427 if (nvmem->cell_post_process) { in __nvmem_cell_read()
1428 rc = nvmem->cell_post_process(nvmem->priv, id, in __nvmem_cell_read()
1452 struct nvmem_device *nvmem = cell->entry->nvmem; in nvmem_cell_read() local
1456 if (!nvmem) in nvmem_cell_read()
1463 rc = __nvmem_cell_read(nvmem, cell->entry, buf, len, cell->id); in nvmem_cell_read()
1476 struct nvmem_device *nvmem = cell->nvmem; in nvmem_cell_prepare_write_buffer() local
1493 rc = nvmem_reg_read(nvmem, cell->offset, &v, 1); in nvmem_cell_prepare_write_buffer()
1512 rc = nvmem_reg_read(nvmem, in nvmem_cell_prepare_write_buffer()
1528 struct nvmem_device *nvmem = cell->nvmem; in __nvmem_cell_entry_write() local
1531 if (!nvmem || nvmem->read_only || in __nvmem_cell_entry_write()
1541 rc = nvmem_reg_write(nvmem, cell->offset, buf, cell->bytes); in __nvmem_cell_entry_write()
1762 ssize_t nvmem_device_cell_read(struct nvmem_device *nvmem, in nvmem_device_cell_read() argument
1769 if (!nvmem) in nvmem_device_cell_read()
1772 rc = nvmem_cell_info_to_nvmem_cell_entry_nodup(nvmem, info, &cell); in nvmem_device_cell_read()
1776 rc = __nvmem_cell_read(nvmem, &cell, buf, &len, NULL); in nvmem_device_cell_read()
1793 int nvmem_device_cell_write(struct nvmem_device *nvmem, in nvmem_device_cell_write() argument
1799 if (!nvmem) in nvmem_device_cell_write()
1802 rc = nvmem_cell_info_to_nvmem_cell_entry_nodup(nvmem, info, &cell); in nvmem_device_cell_write()
1821 int nvmem_device_read(struct nvmem_device *nvmem, in nvmem_device_read() argument
1827 if (!nvmem) in nvmem_device_read()
1830 rc = nvmem_reg_read(nvmem, offset, buf, bytes); in nvmem_device_read()
1849 int nvmem_device_write(struct nvmem_device *nvmem, in nvmem_device_write() argument
1855 if (!nvmem) in nvmem_device_write()
1858 rc = nvmem_reg_write(nvmem, offset, buf, bytes); in nvmem_device_write()
1936 const char *nvmem_dev_name(struct nvmem_device *nvmem) in nvmem_dev_name() argument
1938 return dev_name(&nvmem->dev); in nvmem_dev_name()