splice: change exported internal do_splice() helper to take kernel offset
[sfrench/cifs-2.6.git] / drivers / i2c / busses / i2c-stm32.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * i2c-stm32.c
4  *
5  * Copyright (C) M'boumba Cedric Madianga 2017
6  * Author: M'boumba Cedric Madianga <cedric.madianga@gmail.com>
7  */
8
9 #include "i2c-stm32.h"
10
11 /* Functions for DMA support */
12 struct stm32_i2c_dma *stm32_i2c_dma_request(struct device *dev,
13                                             dma_addr_t phy_addr,
14                                             u32 txdr_offset,
15                                             u32 rxdr_offset)
16 {
17         struct stm32_i2c_dma *dma;
18         struct dma_slave_config dma_sconfig;
19         int ret;
20
21         dma = devm_kzalloc(dev, sizeof(*dma), GFP_KERNEL);
22         if (!dma)
23                 return ERR_PTR(-ENOMEM);
24
25         /* Request and configure I2C TX dma channel */
26         dma->chan_tx = dma_request_chan(dev, "tx");
27         if (IS_ERR(dma->chan_tx)) {
28                 ret = PTR_ERR(dma->chan_tx);
29                 if (ret != -EPROBE_DEFER)
30                         dev_err(dev, "can't request DMA tx channel\n");
31                 goto fail_al;
32         }
33
34         memset(&dma_sconfig, 0, sizeof(dma_sconfig));
35         dma_sconfig.dst_addr = phy_addr + txdr_offset;
36         dma_sconfig.dst_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
37         dma_sconfig.dst_maxburst = 1;
38         dma_sconfig.direction = DMA_MEM_TO_DEV;
39         ret = dmaengine_slave_config(dma->chan_tx, &dma_sconfig);
40         if (ret < 0) {
41                 dev_err(dev, "can't configure tx channel\n");
42                 goto fail_tx;
43         }
44
45         /* Request and configure I2C RX dma channel */
46         dma->chan_rx = dma_request_chan(dev, "rx");
47         if (IS_ERR(dma->chan_rx)) {
48                 ret = PTR_ERR(dma->chan_rx);
49                 if (ret != -EPROBE_DEFER)
50                         dev_err(dev, "can't request DMA rx channel\n");
51
52                 goto fail_tx;
53         }
54
55         memset(&dma_sconfig, 0, sizeof(dma_sconfig));
56         dma_sconfig.src_addr = phy_addr + rxdr_offset;
57         dma_sconfig.src_addr_width = DMA_SLAVE_BUSWIDTH_1_BYTE;
58         dma_sconfig.src_maxburst = 1;
59         dma_sconfig.direction = DMA_DEV_TO_MEM;
60         ret = dmaengine_slave_config(dma->chan_rx, &dma_sconfig);
61         if (ret < 0) {
62                 dev_err(dev, "can't configure rx channel\n");
63                 goto fail_rx;
64         }
65
66         init_completion(&dma->dma_complete);
67
68         dev_info(dev, "using %s (tx) and %s (rx) for DMA transfers\n",
69                  dma_chan_name(dma->chan_tx), dma_chan_name(dma->chan_rx));
70
71         return dma;
72
73 fail_rx:
74         dma_release_channel(dma->chan_rx);
75 fail_tx:
76         dma_release_channel(dma->chan_tx);
77 fail_al:
78         devm_kfree(dev, dma);
79         if (ret != -EPROBE_DEFER)
80                 dev_info(dev, "can't use DMA\n");
81
82         return ERR_PTR(ret);
83 }
84
85 void stm32_i2c_dma_free(struct stm32_i2c_dma *dma)
86 {
87         dma->dma_buf = 0;
88         dma->dma_len = 0;
89
90         dma_release_channel(dma->chan_tx);
91         dma->chan_tx = NULL;
92
93         dma_release_channel(dma->chan_rx);
94         dma->chan_rx = NULL;
95
96         dma->chan_using = NULL;
97 }
98
99 int stm32_i2c_prep_dma_xfer(struct device *dev, struct stm32_i2c_dma *dma,
100                             bool rd_wr, u32 len, u8 *buf,
101                             dma_async_tx_callback callback,
102                             void *dma_async_param)
103 {
104         struct dma_async_tx_descriptor *txdesc;
105         struct device *chan_dev;
106         int ret;
107
108         if (rd_wr) {
109                 dma->chan_using = dma->chan_rx;
110                 dma->dma_transfer_dir = DMA_DEV_TO_MEM;
111                 dma->dma_data_dir = DMA_FROM_DEVICE;
112         } else {
113                 dma->chan_using = dma->chan_tx;
114                 dma->dma_transfer_dir = DMA_MEM_TO_DEV;
115                 dma->dma_data_dir = DMA_TO_DEVICE;
116         }
117
118         dma->dma_len = len;
119         chan_dev = dma->chan_using->device->dev;
120
121         dma->dma_buf = dma_map_single(chan_dev, buf, dma->dma_len,
122                                       dma->dma_data_dir);
123         if (dma_mapping_error(chan_dev, dma->dma_buf)) {
124                 dev_err(dev, "DMA mapping failed\n");
125                 return -EINVAL;
126         }
127
128         txdesc = dmaengine_prep_slave_single(dma->chan_using, dma->dma_buf,
129                                              dma->dma_len,
130                                              dma->dma_transfer_dir,
131                                              DMA_PREP_INTERRUPT);
132         if (!txdesc) {
133                 dev_err(dev, "Not able to get desc for DMA xfer\n");
134                 ret = -EINVAL;
135                 goto err;
136         }
137
138         reinit_completion(&dma->dma_complete);
139
140         txdesc->callback = callback;
141         txdesc->callback_param = dma_async_param;
142         ret = dma_submit_error(dmaengine_submit(txdesc));
143         if (ret < 0) {
144                 dev_err(dev, "DMA submit failed\n");
145                 goto err;
146         }
147
148         dma_async_issue_pending(dma->chan_using);
149
150         return 0;
151
152 err:
153         dma_unmap_single(chan_dev, dma->dma_buf, dma->dma_len,
154                          dma->dma_data_dir);
155         return ret;
156 }