Home
last modified time | relevance | path

Searched refs:iod (Results 1 – 18 of 18) sorted by relevance

/linux/drivers/pinctrl/ti/
H A Dpinctrl-ti-iodelay.c209 static int ti_iodelay_pinconf_set(struct ti_iodelay_device *iod, in ti_iodelay_pinconf_set() argument
212 const struct ti_iodelay_reg_data *reg = iod->reg_data; in ti_iodelay_pinconf_set()
213 struct ti_iodelay_reg_values *ival = &iod->reg_init_conf_values; in ti_iodelay_pinconf_set()
214 struct device *dev = iod->dev; in ti_iodelay_pinconf_set()
267 r = regmap_update_bits(iod->regmap, cfg->offset, reg_mask, reg_val); in ti_iodelay_pinconf_set()
284 struct ti_iodelay_device *iod = data; in ti_iodelay_pinconf_deinit_dev() local
285 const struct ti_iodelay_reg_data *reg = iod->reg_data; in ti_iodelay_pinconf_deinit_dev()
288 regmap_update_bits(iod->regmap, reg->reg_global_lock_offset, in ti_iodelay_pinconf_deinit_dev()
300 static int ti_iodelay_pinconf_init_dev(struct ti_iodelay_device *iod) in ti_iodelay_pinconf_init_dev() argument
302 const struct ti_iodelay_reg_data *reg = iod->reg_data; in ti_iodelay_pinconf_init_dev()
[all …]
/linux/drivers/soc/rockchip/
H A Dio-domain.c65 struct rockchip_iodomain *iod; member
74 void (*init)(struct rockchip_iodomain *iod);
88 struct rockchip_iodomain *iod = supply->iod; in rk3568_iodomain_write() local
102 regmap_write(iod->grf, RK3568_PMU_GRF_IO_VSEL2, val0); in rk3568_iodomain_write()
103 regmap_write(iod->grf, RK3568_PMU_GRF_IO_VSEL2, val1); in rk3568_iodomain_write()
117 regmap_write(iod->grf, RK3568_PMU_GRF_IO_VSEL0, val0); in rk3568_iodomain_write()
118 regmap_write(iod->grf, RK3568_PMU_GRF_IO_VSEL1, val1); in rk3568_iodomain_write()
130 struct rockchip_iodomain *iod = supply->iod; in rockchip_iodomain_write() local
141 ret = regmap_write(iod->grf, iod->soc_data->grf_offset, val); in rockchip_iodomain_write()
143 dev_err(iod->dev, "Couldn't write to GRF\n"); in rockchip_iodomain_write()
[all …]
/linux/drivers/nvme/target/
H A Dpci-epf.c668 static inline const char *nvmet_pci_epf_iod_name(struct nvmet_pci_epf_iod *iod) in nvmet_pci_epf_iod_name() argument
670 return nvme_opcode_str(iod->sq->qid, iod->cmd.common.opcode); in nvmet_pci_epf_iod_name()
679 struct nvmet_pci_epf_iod *iod; in nvmet_pci_epf_alloc_iod() local
681 iod = mempool_alloc(&ctrl->iod_pool, GFP_KERNEL); in nvmet_pci_epf_alloc_iod()
682 if (unlikely(!iod)) in nvmet_pci_epf_alloc_iod()
685 memset(iod, 0, sizeof(*iod)); in nvmet_pci_epf_alloc_iod()
686 iod->req.cmd = &iod->cmd; in nvmet_pci_epf_alloc_iod()
687 iod->req.cqe = &iod->cqe; in nvmet_pci_epf_alloc_iod()
688 iod->req.port = ctrl->port; in nvmet_pci_epf_alloc_iod()
689 iod->ctrl = ctrl; in nvmet_pci_epf_alloc_iod()
[all …]
H A Dfc.c105 struct nvmet_fc_ls_iod *iod; member
251 struct nvmet_fc_ls_iod *iod);
528 struct nvmet_fc_ls_iod *iod; in nvmet_fc_alloc_ls_iodlist() local
531 iod = kcalloc(NVMET_LS_CTX_COUNT, sizeof(struct nvmet_fc_ls_iod), in nvmet_fc_alloc_ls_iodlist()
533 if (!iod) in nvmet_fc_alloc_ls_iodlist()
536 tgtport->iod = iod; in nvmet_fc_alloc_ls_iodlist()
538 for (i = 0; i < NVMET_LS_CTX_COUNT; iod++, i++) { in nvmet_fc_alloc_ls_iodlist()
539 INIT_WORK(&iod->work, nvmet_fc_handle_ls_rqst_work); in nvmet_fc_alloc_ls_iodlist()
540 iod->tgtport = tgtport; in nvmet_fc_alloc_ls_iodlist()
541 list_add_tail(&iod->ls_rcv_list, &tgtport->ls_rcv_list); in nvmet_fc_alloc_ls_iodlist()
[all …]
/linux/tools/testing/selftests/ublk/
H A Dfile_backed.c5 static enum io_uring_op ublk_to_uring_op(const struct ublksrv_io_desc *iod, int zc) in ublk_to_uring_op() argument
7 unsigned ublk_op = ublksrv_get_op(iod); in ublk_to_uring_op()
17 const struct ublksrv_io_desc *iod, int tag) in loop_queue_flush_io() argument
19 unsigned ublk_op = ublksrv_get_op(iod); in loop_queue_flush_io()
31 const struct ublksrv_io_desc *iod, int tag) in loop_queue_tgt_rw_io() argument
33 unsigned ublk_op = ublksrv_get_op(iod); in loop_queue_tgt_rw_io()
36 enum io_uring_op op = ublk_to_uring_op(iod, zc | auto_zc); in loop_queue_tgt_rw_io()
48 iod->nr_sectors << 9, in loop_queue_tgt_rw_io()
49 iod->start_sector << 9); in loop_queue_tgt_rw_io()
66 iod in loop_queue_tgt_rw_io()
79 const struct ublksrv_io_desc *iod = ublk_get_iod(q, tag); loop_queue_tgt_io() local
[all...]
H A Dstripe.c34 const struct ublksrv_io_desc *iod) in calculate_nr_vec() argument
38 loff_t start = iod->start_sector; in calculate_nr_vec()
39 loff_t end = start + iod->nr_sectors; in calculate_nr_vec()
45 const struct ublksrv_io_desc *iod) in alloc_stripe_array() argument
47 unsigned nr_vecs = calculate_nr_vec(conf, iod); in alloc_stripe_array()
73 const struct ublksrv_io_desc *iod, struct stripe_array *s, void *base) in calculate_stripe_array() argument
78 off64_t start = iod->start_sector; in calculate_stripe_array()
79 off64_t end = start + iod->nr_sectors; in calculate_stripe_array()
115 const struct ublksrv_io_desc *iod, int zc) in stripe_to_uring_op() argument
117 unsigned ublk_op = ublksrv_get_op(iod); in stripe_to_uring_op()
127 stripe_queue_tgt_rw_io(struct ublk_thread * t,struct ublk_queue * q,const struct ublksrv_io_desc * iod,int tag) stripe_queue_tgt_rw_io() argument
181 handle_flush(struct ublk_thread * t,struct ublk_queue * q,const struct ublksrv_io_desc * iod,int tag) handle_flush() argument
199 const struct ublksrv_io_desc *iod = ublk_get_iod(q, tag); stripe_queue_tgt_io() local
237 const struct ublksrv_io_desc *iod = ublk_get_iod(q, tag); ublk_stripe_io_done() local
[all...]
H A Dnull.c45 static void __setup_nop_io(int tag, const struct ublksrv_io_desc *iod, in __setup_nop_io() argument
48 unsigned ublk_op = ublksrv_get_op(iod); in __setup_nop_io()
54 sqe->len = iod->nr_sectors << 9; /* injected result */ in __setup_nop_io()
61 const struct ublksrv_io_desc *iod = ublk_get_iod(q, tag); in null_queue_zc_io() local
71 __setup_nop_io(tag, iod, sqe[1], q->q_id); in null_queue_zc_io()
84 const struct ublksrv_io_desc *iod = ublk_get_iod(q, tag); in null_queue_auto_zc_io() local
88 __setup_nop_io(tag, iod, sqe[0], q->q_id); in null_queue_auto_zc_io()
118 const struct ublksrv_io_desc *iod = ublk_get_iod(q, tag); in ublk_null_queue_io() local
123 if (auto_zc && !ublk_io_auto_zc_fallback(iod)) in ublk_null_queue_io()
128 ublk_complete_io(t, q, tag, iod->nr_sectors << 9); in ublk_null_queue_io()
H A Dfault_inject.c44 const struct ublksrv_io_desc *iod = ublk_get_iod(q, tag); in ublk_fault_inject_queue_io() local
52 sqe->user_data = build_user_data(tag, ublksrv_get_op(iod), 0, q->q_id, 1); in ublk_fault_inject_queue_io()
64 const struct ublksrv_io_desc *iod = ublk_get_iod(q, tag); in ublk_fault_inject_tgt_io_done() local
70 ublk_complete_io(t, q, tag, iod->nr_sectors << 9); in ublk_fault_inject_tgt_io_done()
H A Dkublk.h206 static inline int ublk_io_auto_zc_fallback(const struct ublksrv_io_desc *iod)
208 return !!(iod->op_flags & UBLK_IO_F_NEED_REG_BUF);
209 ublk_io_auto_zc_fallback(const struct ublksrv_io_desc * iod) ublk_io_auto_zc_fallback() argument
/linux/drivers/nvme/host/
H A Dpci.c521 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_init_request() local
524 nvme_req(req)->cmd = &iod->cmd; in nvme_pci_init_request()
653 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_avg_seg_size() local
656 if (blk_rq_dma_map_coalesce(&iod->dma_state)) in nvme_pci_avg_seg_size()
664 struct nvme_iod *iod) in nvme_dma_pool() argument
666 if (iod->flags & IOD_SMALL_DESCRIPTOR) in nvme_dma_pool()
693 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_free_descriptors() local
694 dma_addr_t dma_addr = nvme_pci_first_desc_dma_addr(&iod->cmd); in nvme_free_descriptors()
697 if (iod->nr_descriptors == 1) { in nvme_free_descriptors()
698 dma_pool_free(nvme_dma_pool(nvmeq, iod), iod->descriptors[0], in nvme_free_descriptors()
[all …]
/linux/drivers/clk/
H A Dclk-versaclock7.c172 struct vc7_iod_data *iod; member
379 map->src.iod = &vc7->clk_iod[0]; in vc7_get_bank_clk()
383 map->src.iod = &vc7->clk_iod[1]; in vc7_get_bank_clk()
400 map->src.iod = &vc7->clk_iod[1]; in vc7_get_bank_clk()
437 map->src.iod = &vc7->clk_iod[2]; in vc7_get_bank_clk()
463 map->src.iod = &vc7->clk_iod[2]; in vc7_get_bank_clk()
467 map->src.iod = &vc7->clk_iod[3]; in vc7_get_bank_clk()
490 map->src.iod = &vc7->clk_iod[2]; in vc7_get_bank_clk()
494 map->src.iod = &vc7->clk_iod[3]; in vc7_get_bank_clk()
964 struct vc7_iod_data *iod = container_of(hw, struct vc7_iod_data, hw); in vc7_iod_recalc_rate() local
[all …]
/linux/include/uapi/linux/
H A Dublk_cmd.h435 static inline __u8 ublksrv_get_op(const struct ublksrv_io_desc *iod) in ublksrv_get_op() argument
437 return iod->op_flags & 0xff; in ublksrv_get_op()
440 static inline __u32 ublksrv_get_flags(const struct ublksrv_io_desc *iod) in ublksrv_get_flags() argument
442 return iod->op_flags >> 8; in ublksrv_get_flags()
/linux/Documentation/admin-guide/perf/
H A Dfujitsu_uncore_pmu.rst10 mac_iod<iod>_mac<mac>_ch<ch>.
12 pci_iod<iod>_pci<pci>.
15 options in sysfs, see /sys/bus/event_sources/devices/mac_iod<iod>_mac<mac>_ch<ch>/
16 and /sys/bus/event_sources/devices/pci_iod<iod>_pci<pci>/.
/linux/arch/alpha/kernel/
H A Dio.c661 u16 __iomem *iod = (u16 __iomem *) d; in scr_memcpyw() local
673 __raw_writew(tmp, iod++); in scr_memcpyw()
680 memcpy_toio(iod, s, count); in scr_memcpyw()
/linux/drivers/block/
H A Dublk_drv.c469 struct ublksrv_io_desc *iod = ublk_get_iod(ubq, req->tag); in ublk_setup_iod_zoned() local
500 iod->op_flags = ublk_op | ublk_req_build_flags(req); in ublk_setup_iod_zoned()
501 iod->nr_zones = desc->nr_zones; in ublk_setup_iod_zoned()
502 iod->start_sector = desc->sector; in ublk_setup_iod_zoned()
514 iod->op_flags = ublk_op | ublk_req_build_flags(req); in ublk_setup_iod_zoned()
515 iod->nr_sectors = blk_rq_sectors(req); in ublk_setup_iod_zoned()
516 iod->start_sector = blk_rq_pos(req); in ublk_setup_iod_zoned()
517 iod->addr = io->buf.addr; in ublk_setup_iod_zoned()
1057 struct ublksrv_io_desc *iod = ublk_get_iod(ubq, req->tag); in ublk_setup_iod() local
1084 iod->op_flags = ublk_op | ublk_req_build_flags(req); in ublk_setup_iod()
[all …]
/linux/tools/perf/util/
H A Dsynthetic-events.c777 struct io_dir iod; in __event__synthesize_thread() local
812 io_dir__init(&iod, open(filename, O_CLOEXEC | O_DIRECTORY | O_RDONLY)); in __event__synthesize_thread()
813 if (iod.dirfd < 0) in __event__synthesize_thread()
816 while ((dent = io_dir__readdir(&iod)) != NULL) { in __event__synthesize_thread()
858 close(iod.dirfd); in __event__synthesize_thread()
H A Dmachine.c1383 struct io_dir iod; in maps__set_modules_path_dir() local
1387 io_dir__init(&iod, open(path, O_CLOEXEC | O_DIRECTORY | O_RDONLY)); in maps__set_modules_path_dir()
1388 if (iod.dirfd < 0) { in maps__set_modules_path_dir()
1396 while ((dent = io_dir__readdir(&iod)) != NULL) { in maps__set_modules_path_dir()
1397 if (io_dir__is_dir(&iod, dent)) { in maps__set_modules_path_dir()
1440 close(iod.dirfd); in maps__set_modules_path_dir()
/linux/Documentation/admin-guide/
H A Ddevices.txt2625 2 = /dev/vx/iod Volume i/o daemon access node