1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * i2c-stm32.c
4 *
5 * Copyright (C) M'boumba Cedric Madianga 2017
6 * Author: M'boumba Cedric Madianga <cedric.madianga@gmail.com>
7 */
8
9 #include "i2c-stm32.h"
10
11 /* Functions for DMA support */
stm32_i2c_dma_request(struct device * dev,dma_addr_t phy_addr,u32 txdr_offset,u32 rxdr_offset)12 struct stm32_i2c_dma *stm32_i2c_dma_request(struct device *dev,
13 dma_addr_t phy_addr,
14 u32 txdr_offset,
15 u32 rxdr_offset)
16 {
17 struct stm32_i2c_dma *dma;
18 struct dma_slave_config dma_sconfig;
19 int ret;
20
21 dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
22 if (!dma)
23 return ERR_PTR(-ENOMEM);
24
25 /* Request and configure I2C TX dma channel */
26 dma->chan_tx = dma_request_chan(dev, "tx");
27 if (IS_ERR(dma->chan_tx)) {
28 ret = PTR_ERR(dma->chan_tx);
29 if (ret != -ENODEV)
30 dev_err_probe(dev, ret, "can't request DMA tx channel\n");
31
32 goto fail_al;
33 }
34
35 memset(&dma_sconfig, 0, sizeof(dma_sconfig));
36 dma_sconfig.dst_addr = phy_addr + txdr_offset;
37 dma_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
38 dma_sconfig.dst_maxburst = 1;
39 dma_sconfig.direction = DMA_MEM_TO_DEV;
40 ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig);
41 if (ret < 0) {
42 dev_err(dev, "can't configure tx channel\n");
43 goto fail_tx;
44 }
45
46 /* Request and configure I2C RX dma channel */
47 dma->chan_rx = dma_request_chan(dev, "rx");
48 if (IS_ERR(dma->chan_rx)) {
49 ret = PTR_ERR(dma->chan_rx);
50 if (ret != -ENODEV)
51 dev_err_probe(dev, ret, "can't request DMA rx channel\n");
52
53 goto fail_tx;
54 }
55
56 memset(&dma_sconfig, 0, sizeof(dma_sconfig));
57 dma_sconfig.src_addr = phy_addr + rxdr_offset;
58 dma_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
59 dma_sconfig.src_maxburst = 1;
60 dma_sconfig.direction = DMA_DEV_TO_MEM;
61 ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig);
62 if (ret < 0) {
63 dev_err(dev, "can't configure rx channel\n");
64 goto fail_rx;
65 }
66
67 init_completion(&dma->dma_complete);
68
69 dev_info(dev, "using %s (tx) and %s (rx) for DMA transfers\n",
70 dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
71
72 return dma;
73
74 fail_rx:
75 dma_release_channel(dma->chan_rx);
76 fail_tx:
77 dma_release_channel(dma->chan_tx);
78 fail_al:
79 devm_kfree(dev, dma);
80
81 return ERR_PTR(ret);
82 }
83
stm32_i2c_dma_free(struct stm32_i2c_dma * dma)84 void stm32_i2c_dma_free(struct stm32_i2c_dma *dma)
85 {
86 dma->dma_buf = 0;
87 dma->dma_len = 0;
88
89 dma_release_channel(dma->chan_tx);
90 dma->chan_tx = NULL;
91
92 dma_release_channel(dma->chan_rx);
93 dma->chan_rx = NULL;
94
95 dma->chan_using = NULL;
96 }
97
stm32_i2c_prep_dma_xfer(struct device * dev,struct stm32_i2c_dma * dma,bool rd_wr,u32 len,u8 * buf,dma_async_tx_callback callback,void * dma_async_param)98 int stm32_i2c_prep_dma_xfer(struct device *dev, struct stm32_i2c_dma *dma,
99 bool rd_wr, u32 len, u8 *buf,
100 dma_async_tx_callback callback,
101 void *dma_async_param)
102 {
103 struct dma_async_tx_descriptor *txdesc;
104 int ret;
105
106 if (rd_wr) {
107 dma->chan_using = dma->chan_rx;
108 dma->dma_transfer_dir = DMA_DEV_TO_MEM;
109 dma->dma_data_dir = DMA_FROM_DEVICE;
110 } else {
111 dma->chan_using = dma->chan_tx;
112 dma->dma_transfer_dir = DMA_MEM_TO_DEV;
113 dma->dma_data_dir = DMA_TO_DEVICE;
114 }
115
116 dma->dma_len = len;
117
118 dma->dma_buf = dma_map_single(dev, buf, dma->dma_len,
119 dma->dma_data_dir);
120 if (dma_mapping_error(dev, dma->dma_buf)) {
121 dev_err(dev, "DMA mapping failed\n");
122 return -EINVAL;
123 }
124
125 txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf,
126 dma->dma_len,
127 dma->dma_transfer_dir,
128 DMA_PREP_INTERRUPT);
129 if (!txdesc) {
130 dev_err(dev, "Not able to get desc for DMA xfer\n");
131 ret = -EINVAL;
132 goto err;
133 }
134
135 reinit_completion(&dma->dma_complete);
136
137 txdesc->callback = callback;
138 txdesc->callback_param = dma_async_param;
139 ret = dma_submit_error(dmaengine_submit(txdesc));
140 if (ret < 0) {
141 dev_err(dev, "DMA submit failed\n");
142 goto err;
143 }
144
145 dma_async_issue_pending(dma->chan_using);
146
147 return 0;
148
149 err:
150 dma_unmap_single(dev, dma->dma_buf, dma->dma_len,
151 dma->dma_data_dir);
152 return ret;
153 }
154