Lines Matching refs:cell
344 struct nvmem_cell *cell = NULL; in nvmem_cell_attr_read() local
349 cell = nvmem_create_cell(entry, entry->name, 0); in nvmem_cell_attr_read()
350 if (IS_ERR(cell)) in nvmem_cell_attr_read()
351 return PTR_ERR(cell); in nvmem_cell_attr_read()
353 if (!cell) in nvmem_cell_attr_read()
356 content = nvmem_cell_read(cell, &cell_sz); in nvmem_cell_attr_read()
367 kfree_const(cell->id); in nvmem_cell_attr_read()
368 kfree(cell); in nvmem_cell_attr_read()
551 static void nvmem_cell_entry_drop(struct nvmem_cell_entry *cell) in nvmem_cell_entry_drop() argument
553 blocking_notifier_call_chain(&nvmem_notifier, NVMEM_CELL_REMOVE, cell); in nvmem_cell_entry_drop()
555 list_del(&cell->node); in nvmem_cell_entry_drop()
557 of_node_put(cell->np); in nvmem_cell_entry_drop()
558 kfree_const(cell->name); in nvmem_cell_entry_drop()
559 kfree(cell); in nvmem_cell_entry_drop()
564 struct nvmem_cell_entry *cell, *p; in nvmem_device_remove_all_cells() local
566 list_for_each_entry_safe(cell, p, &nvmem->cells, node) in nvmem_device_remove_all_cells()
567 nvmem_cell_entry_drop(cell); in nvmem_device_remove_all_cells()
570 static void nvmem_cell_entry_add(struct nvmem_cell_entry *cell) in nvmem_cell_entry_add() argument
573 list_add_tail(&cell->node, &cell->nvmem->cells); in nvmem_cell_entry_add()
575 blocking_notifier_call_chain(&nvmem_notifier, NVMEM_CELL_ADD, cell); in nvmem_cell_entry_add()
580 struct nvmem_cell_entry *cell) in nvmem_cell_info_to_nvmem_cell_entry_nodup() argument
582 cell->nvmem = nvmem; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
583 cell->offset = info->offset; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
584 cell->raw_len = info->raw_len ?: info->bytes; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
585 cell->bytes = info->bytes; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
586 cell->name = info->name; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
587 cell->read_post_process = info->read_post_process; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
588 cell->priv = info->priv; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
590 cell->bit_offset = info->bit_offset; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
591 cell->nbits = info->nbits; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
592 cell->np = info->np; in nvmem_cell_info_to_nvmem_cell_entry_nodup()
594 if (cell->nbits) { in nvmem_cell_info_to_nvmem_cell_entry_nodup()
595 cell->bytes = DIV_ROUND_UP(cell->nbits + cell->bit_offset, in nvmem_cell_info_to_nvmem_cell_entry_nodup()
597 cell->raw_len = ALIGN(cell->bytes, nvmem->word_size); in nvmem_cell_info_to_nvmem_cell_entry_nodup()
600 if (!IS_ALIGNED(cell->offset, nvmem->stride)) { in nvmem_cell_info_to_nvmem_cell_entry_nodup()
603 cell->name ?: "<unknown>", nvmem->stride); in nvmem_cell_info_to_nvmem_cell_entry_nodup()
607 if (!IS_ALIGNED(cell->raw_len, nvmem->word_size)) { in nvmem_cell_info_to_nvmem_cell_entry_nodup()
610 cell->name ?: "<unknown>", cell->raw_len, in nvmem_cell_info_to_nvmem_cell_entry_nodup()
616 cell->raw_len = ALIGN(cell->raw_len, nvmem->word_size); in nvmem_cell_info_to_nvmem_cell_entry_nodup()
624 struct nvmem_cell_entry *cell) in nvmem_cell_info_to_nvmem_cell_entry() argument
628 err = nvmem_cell_info_to_nvmem_cell_entry_nodup(nvmem, info, cell); in nvmem_cell_info_to_nvmem_cell_entry()
632 cell->name = kstrdup_const(info->name, GFP_KERNEL); in nvmem_cell_info_to_nvmem_cell_entry()
633 if (!cell->name) in nvmem_cell_info_to_nvmem_cell_entry()
650 struct nvmem_cell_entry *cell; in nvmem_add_one_cell() local
653 cell = kzalloc(sizeof(*cell), GFP_KERNEL); in nvmem_add_one_cell()
654 if (!cell) in nvmem_add_one_cell()
657 rval = nvmem_cell_info_to_nvmem_cell_entry(nvmem, info, cell); in nvmem_add_one_cell()
659 kfree(cell); in nvmem_add_one_cell()
663 nvmem_cell_entry_add(cell); in nvmem_add_one_cell()
722 struct nvmem_cell_entry *iter, *cell = NULL; in nvmem_find_cell_entry_by_name() local
727 cell = iter; in nvmem_find_cell_entry_by_name()
733 return cell; in nvmem_find_cell_entry_by_name()
1298 struct nvmem_cell *cell; in nvmem_create_cell() local
1301 cell = kzalloc(sizeof(*cell), GFP_KERNEL); in nvmem_create_cell()
1302 if (!cell) in nvmem_create_cell()
1308 kfree(cell); in nvmem_create_cell()
1313 cell->id = name; in nvmem_create_cell()
1314 cell->entry = entry; in nvmem_create_cell()
1315 cell->index = index; in nvmem_create_cell()
1317 return cell; in nvmem_create_cell()
1324 struct nvmem_cell *cell = ERR_PTR(-ENOENT); in nvmem_cell_get_from_lookup() local
1344 cell = ERR_CAST(nvmem); in nvmem_cell_get_from_lookup()
1352 cell = ERR_PTR(-ENOENT); in nvmem_cell_get_from_lookup()
1354 cell = nvmem_create_cell(cell_entry, con_id, 0); in nvmem_cell_get_from_lookup()
1355 if (IS_ERR(cell)) in nvmem_cell_get_from_lookup()
1363 return cell; in nvmem_cell_get_from_lookup()
1376 struct nvmem_cell_entry *iter, *cell = NULL; in nvmem_find_cell_entry_by_node() local
1381 cell = iter; in nvmem_find_cell_entry_by_node()
1387 return cell; in nvmem_find_cell_entry_by_node()
1419 struct nvmem_cell *cell; in of_nvmem_cell_get() local
1482 cell = nvmem_create_cell(cell_entry, id, cell_index); in of_nvmem_cell_get()
1483 if (IS_ERR(cell)) { in of_nvmem_cell_get()
1488 return cell; in of_nvmem_cell_get()
1507 struct nvmem_cell *cell; in nvmem_cell_get() local
1510 cell = of_nvmem_cell_get(dev->of_node, id); in nvmem_cell_get()
1511 if (!IS_ERR(cell) || PTR_ERR(cell) == -EPROBE_DEFER) in nvmem_cell_get()
1512 return cell; in nvmem_cell_get()
1540 struct nvmem_cell **ptr, *cell; in devm_nvmem_cell_get() local
1546 cell = nvmem_cell_get(dev, id); in devm_nvmem_cell_get()
1547 if (!IS_ERR(cell)) { in devm_nvmem_cell_get()
1548 *ptr = cell; in devm_nvmem_cell_get()
1554 return cell; in devm_nvmem_cell_get()
1575 void devm_nvmem_cell_put(struct device *dev, struct nvmem_cell *cell) in devm_nvmem_cell_put() argument
1580 devm_nvmem_cell_match, cell); in devm_nvmem_cell_put()
1591 void nvmem_cell_put(struct nvmem_cell *cell) in nvmem_cell_put() argument
1593 struct nvmem_device *nvmem = cell->entry->nvmem; in nvmem_cell_put()
1595 if (cell->id) in nvmem_cell_put()
1596 kfree_const(cell->id); in nvmem_cell_put()
1598 kfree(cell); in nvmem_cell_put()
1604 static void nvmem_shift_read_buffer_in_place(struct nvmem_cell_entry *cell, void *buf) in nvmem_shift_read_buffer_in_place() argument
1608 int bit_offset = cell->bit_offset; in nvmem_shift_read_buffer_in_place()
1621 for (i = 1; i < cell->bytes; i++) { in nvmem_shift_read_buffer_in_place()
1628 memmove(p, b, cell->bytes - bytes_offset); in nvmem_shift_read_buffer_in_place()
1629 p += cell->bytes - 1; in nvmem_shift_read_buffer_in_place()
1632 p += cell->bytes - 1; in nvmem_shift_read_buffer_in_place()
1636 extra = cell->bytes - DIV_ROUND_UP(cell->nbits, BITS_PER_BYTE); in nvmem_shift_read_buffer_in_place()
1641 if (cell->nbits % BITS_PER_BYTE) in nvmem_shift_read_buffer_in_place()
1642 *p &= GENMASK((cell->nbits % BITS_PER_BYTE) - 1, 0); in nvmem_shift_read_buffer_in_place()
1646 struct nvmem_cell_entry *cell, in __nvmem_cell_read() argument
1651 rc = nvmem_reg_read(nvmem, cell->offset, buf, cell->raw_len); in __nvmem_cell_read()
1657 if (cell->bit_offset || cell->nbits) in __nvmem_cell_read()
1658 nvmem_shift_read_buffer_in_place(cell, buf); in __nvmem_cell_read()
1660 if (cell->read_post_process) { in __nvmem_cell_read()
1661 rc = cell->read_post_process(cell->priv, id, index, in __nvmem_cell_read()
1662 cell->offset, buf, cell->raw_len); in __nvmem_cell_read()
1668 *len = cell->bytes; in __nvmem_cell_read()
1683 void *nvmem_cell_read(struct nvmem_cell *cell, size_t *len) in nvmem_cell_read() argument
1685 struct nvmem_cell_entry *entry = cell->entry; in nvmem_cell_read()
1697 rc = __nvmem_cell_read(nvmem, cell->entry, buf, len, cell->id, cell->index); in nvmem_cell_read()
1707 static void *nvmem_cell_prepare_write_buffer(struct nvmem_cell_entry *cell, in nvmem_cell_prepare_write_buffer() argument
1710 struct nvmem_device *nvmem = cell->nvmem; in nvmem_cell_prepare_write_buffer()
1711 int i, rc, nbits, bit_offset = cell->bit_offset; in nvmem_cell_prepare_write_buffer()
1714 nbits = cell->nbits; in nvmem_cell_prepare_write_buffer()
1715 buf = kzalloc(cell->bytes, GFP_KERNEL); in nvmem_cell_prepare_write_buffer()
1727 rc = nvmem_reg_read(nvmem, cell->offset, &v, 1); in nvmem_cell_prepare_write_buffer()
1733 for (i = 1; i < cell->bytes; i++) { in nvmem_cell_prepare_write_buffer()
1747 cell->offset + cell->bytes - 1, &v, 1); in nvmem_cell_prepare_write_buffer()
1760 static int __nvmem_cell_entry_write(struct nvmem_cell_entry *cell, void *buf, size_t len) in __nvmem_cell_entry_write() argument
1762 struct nvmem_device *nvmem = cell->nvmem; in __nvmem_cell_entry_write()
1766 (cell->bit_offset == 0 && len != cell->bytes)) in __nvmem_cell_entry_write()
1774 if (cell->read_post_process) in __nvmem_cell_entry_write()
1777 if (cell->bit_offset || cell->nbits) { in __nvmem_cell_entry_write()
1778 if (len != BITS_TO_BYTES(cell->nbits) && len != cell->bytes) in __nvmem_cell_entry_write()
1780 buf = nvmem_cell_prepare_write_buffer(cell, buf, len); in __nvmem_cell_entry_write()
1785 rc = nvmem_reg_write(nvmem, cell->offset, buf, cell->bytes); in __nvmem_cell_entry_write()
1788 if (cell->bit_offset || cell->nbits) in __nvmem_cell_entry_write()
1806 int nvmem_cell_write(struct nvmem_cell *cell, void *buf, size_t len) in nvmem_cell_write() argument
1808 return __nvmem_cell_entry_write(cell->entry, buf, len); in nvmem_cell_write()
1816 struct nvmem_cell *cell; in nvmem_cell_read_common() local
1820 cell = nvmem_cell_get(dev, cell_id); in nvmem_cell_read_common()
1821 if (IS_ERR(cell)) in nvmem_cell_read_common()
1822 return PTR_ERR(cell); in nvmem_cell_read_common()
1824 buf = nvmem_cell_read(cell, &len); in nvmem_cell_read_common()
1826 nvmem_cell_put(cell); in nvmem_cell_read_common()
1831 nvmem_cell_put(cell); in nvmem_cell_read_common()
1836 nvmem_cell_put(cell); in nvmem_cell_read_common()
1905 struct nvmem_cell *cell; in nvmem_cell_read_variable_common() local
1909 cell = nvmem_cell_get(dev, cell_id); in nvmem_cell_read_variable_common()
1910 if (IS_ERR(cell)) in nvmem_cell_read_variable_common()
1911 return cell; in nvmem_cell_read_variable_common()
1913 nbits = cell->entry->nbits; in nvmem_cell_read_variable_common()
1914 buf = nvmem_cell_read(cell, len); in nvmem_cell_read_variable_common()
1915 nvmem_cell_put(cell); in nvmem_cell_read_variable_common()
2009 struct nvmem_cell_entry cell; in nvmem_device_cell_read() local
2016 rc = nvmem_cell_info_to_nvmem_cell_entry_nodup(nvmem, info, &cell); in nvmem_device_cell_read()
2020 rc = __nvmem_cell_read(nvmem, &cell, buf, &len, NULL, 0); in nvmem_device_cell_read()
2040 struct nvmem_cell_entry cell; in nvmem_device_cell_write() local
2046 rc = nvmem_cell_info_to_nvmem_cell_entry_nodup(nvmem, info, &cell); in nvmem_device_cell_write()
2050 return __nvmem_cell_entry_write(&cell, buf, cell.bytes); in nvmem_device_cell_write()