1 /* SPDX-License-Identifier: GPL-2.0+ */
3 * Copyright 2013-2014 Freescale Semiconductor, Inc.
4 * Copyright 2018 Angelo Dureghello <angelo@sysam.it>
6 #ifndef _FSL_EDMA_COMMON_H_
7 #define _FSL_EDMA_COMMON_H_
9 #include <linux/dma-direction.h>
10 #include <linux/platform_device.h>
13 #define EDMA_CR_EDBG BIT(1)
14 #define EDMA_CR_ERCA BIT(2)
15 #define EDMA_CR_ERGA BIT(3)
16 #define EDMA_CR_HOE BIT(4)
17 #define EDMA_CR_HALT BIT(5)
18 #define EDMA_CR_CLM BIT(6)
19 #define EDMA_CR_EMLM BIT(7)
20 #define EDMA_CR_ECX BIT(16)
21 #define EDMA_CR_CX BIT(17)
23 #define EDMA_SEEI_SEEI(x) ((x) & GENMASK(4, 0))
24 #define EDMA_CEEI_CEEI(x) ((x) & GENMASK(4, 0))
25 #define EDMA_CINT_CINT(x) ((x) & GENMASK(4, 0))
26 #define EDMA_CERR_CERR(x) ((x) & GENMASK(4, 0))
28 #define EDMA_TCD_ATTR_DSIZE(x) (((x) & GENMASK(2, 0)))
29 #define EDMA_TCD_ATTR_DMOD(x) (((x) & GENMASK(4, 0)) << 3)
30 #define EDMA_TCD_ATTR_SSIZE(x) (((x) & GENMASK(2, 0)) << 8)
31 #define EDMA_TCD_ATTR_SMOD(x) (((x) & GENMASK(4, 0)) << 11)
33 #define EDMA_TCD_CITER_CITER(x) ((x) & GENMASK(14, 0))
34 #define EDMA_TCD_BITER_BITER(x) ((x) & GENMASK(14, 0))
36 #define EDMA_TCD_CSR_START BIT(0)
37 #define EDMA_TCD_CSR_INT_MAJOR BIT(1)
38 #define EDMA_TCD_CSR_INT_HALF BIT(2)
39 #define EDMA_TCD_CSR_D_REQ BIT(3)
40 #define EDMA_TCD_CSR_E_SG BIT(4)
41 #define EDMA_TCD_CSR_E_LINK BIT(5)
42 #define EDMA_TCD_CSR_ACTIVE BIT(6)
43 #define EDMA_TCD_CSR_DONE BIT(7)
45 #define EDMAMUX_CHCFG_DIS 0x0
46 #define EDMAMUX_CHCFG_ENBL 0x80
47 #define EDMAMUX_CHCFG_SOURCE(n) ((n) & 0x3F)
51 #define EDMA_TCD 0x1000
53 #define FSL_EDMA_BUSWIDTHS (BIT(DMA_SLAVE_BUSWIDTH_1_BYTE) | \
54 BIT(DMA_SLAVE_BUSWIDTH_2_BYTES) | \
55 BIT(DMA_SLAVE_BUSWIDTH_4_BYTES) | \
56 BIT(DMA_SLAVE_BUSWIDTH_8_BYTES))
57 enum fsl_edma_pm_state {
62 struct fsl_edma_hw_tcd {
77 * These are iomem pointers, for both v32 and v64.
83 void __iomem *erql; /* aka erq on v32 */
85 void __iomem *eeil; /* aka eei on v32 */
100 struct fsl_edma_sw_tcd {
102 struct fsl_edma_hw_tcd *vtcd;
105 struct fsl_edma_chan {
106 struct virt_dma_chan vchan;
107 enum dma_status status;
108 enum fsl_edma_pm_state pm_state;
111 struct fsl_edma_engine *edma;
112 struct fsl_edma_desc *edesc;
113 struct dma_slave_config cfg;
116 struct dma_pool *tcd_pool;
117 dma_addr_t dma_dev_addr;
119 enum dma_data_direction dma_dir;
121 struct fsl_edma_hw_tcd __iomem *tcd;
124 struct fsl_edma_desc {
125 struct virt_dma_desc vdesc;
126 struct fsl_edma_chan *echan;
128 enum dma_transfer_direction dirn;
130 struct fsl_edma_sw_tcd tcd[];
133 #define FSL_EDMA_DRV_HAS_DMACLK BIT(0)
134 #define FSL_EDMA_DRV_MUX_SWAP BIT(1)
135 #define FSL_EDMA_DRV_CONFIG32 BIT(2)
136 #define FSL_EDMA_DRV_WRAP_IO BIT(3)
137 #define FSL_EDMA_DRV_EDMA64 BIT(4)
138 struct fsl_edma_drvdata {
141 int (*setup_irq)(struct platform_device *pdev,
142 struct fsl_edma_engine *fsl_edma);
145 struct fsl_edma_engine {
146 struct dma_device dma_dev;
147 void __iomem *membase;
148 void __iomem *muxbase[DMAMUX_NR];
149 struct clk *muxclk[DMAMUX_NR];
151 struct mutex fsl_edma_mutex;
152 const struct fsl_edma_drvdata *drvdata;
157 struct edma_regs regs;
158 struct fsl_edma_chan chans[];
161 #define edma_read_tcdreg(chan, __name) \
162 (sizeof(chan->tcd->__name) == sizeof(u32) ? \
163 edma_readl(chan->edma, &chan->tcd->__name) : \
164 edma_readw(chan->edma, &chan->tcd->__name))
166 #define edma_write_tcdreg(chan, val, __name) \
167 (sizeof(chan->tcd->__name) == sizeof(u32) ? \
168 edma_writel(chan->edma, (u32 __force)val, &chan->tcd->__name) : \
169 edma_writew(chan->edma, (u16 __force)val, &chan->tcd->__name))
172 * R/W functions for big- or little-endian registers:
173 * The eDMA controller's endian is independent of the CPU core's endian.
174 * For the big-endian IP module, the offset for 8-bit or 16-bit registers
175 * should also be swapped opposite to that in little-endian IP.
177 static inline u32 edma_readl(struct fsl_edma_engine *edma, void __iomem *addr)
179 if (edma->big_endian)
180 return ioread32be(addr);
182 return ioread32(addr);
185 static inline u16 edma_readw(struct fsl_edma_engine *edma, void __iomem *addr)
187 if (edma->big_endian)
188 return ioread16be(addr);
190 return ioread16(addr);
193 static inline void edma_writeb(struct fsl_edma_engine *edma,
194 u8 val, void __iomem *addr)
196 /* swap the reg offset for these in big-endian mode */
197 if (edma->big_endian)
198 iowrite8(val, (void __iomem *)((unsigned long)addr ^ 0x3));
203 static inline void edma_writew(struct fsl_edma_engine *edma,
204 u16 val, void __iomem *addr)
206 /* swap the reg offset for these in big-endian mode */
207 if (edma->big_endian)
208 iowrite16be(val, (void __iomem *)((unsigned long)addr ^ 0x2));
210 iowrite16(val, addr);
213 static inline void edma_writel(struct fsl_edma_engine *edma,
214 u32 val, void __iomem *addr)
216 if (edma->big_endian)
217 iowrite32be(val, addr);
219 iowrite32(val, addr);
222 static inline struct fsl_edma_chan *to_fsl_edma_chan(struct dma_chan *chan)
224 return container_of(chan, struct fsl_edma_chan, vchan.chan);
227 static inline struct fsl_edma_desc *to_fsl_edma_desc(struct virt_dma_desc *vd)
229 return container_of(vd, struct fsl_edma_desc, vdesc);
232 static inline void fsl_edma_err_chan_handler(struct fsl_edma_chan *fsl_chan)
234 fsl_chan->status = DMA_ERROR;
235 fsl_chan->idle = true;
238 void fsl_edma_tx_chan_handler(struct fsl_edma_chan *fsl_chan);
239 void fsl_edma_disable_request(struct fsl_edma_chan *fsl_chan);
240 void fsl_edma_chan_mux(struct fsl_edma_chan *fsl_chan,
241 unsigned int slot, bool enable);
242 void fsl_edma_free_desc(struct virt_dma_desc *vdesc);
243 int fsl_edma_terminate_all(struct dma_chan *chan);
244 int fsl_edma_pause(struct dma_chan *chan);
245 int fsl_edma_resume(struct dma_chan *chan);
246 int fsl_edma_slave_config(struct dma_chan *chan,
247 struct dma_slave_config *cfg);
248 enum dma_status fsl_edma_tx_status(struct dma_chan *chan,
249 dma_cookie_t cookie, struct dma_tx_state *txstate);
250 struct dma_async_tx_descriptor *fsl_edma_prep_dma_cyclic(
251 struct dma_chan *chan, dma_addr_t dma_addr, size_t buf_len,
252 size_t period_len, enum dma_transfer_direction direction,
253 unsigned long flags);
254 struct dma_async_tx_descriptor *fsl_edma_prep_slave_sg(
255 struct dma_chan *chan, struct scatterlist *sgl,
256 unsigned int sg_len, enum dma_transfer_direction direction,
257 unsigned long flags, void *context);
258 struct dma_async_tx_descriptor *fsl_edma_prep_memcpy(
259 struct dma_chan *chan, dma_addr_t dma_dst, dma_addr_t dma_src,
260 size_t len, unsigned long flags);
261 void fsl_edma_xfer_desc(struct fsl_edma_chan *fsl_chan);
262 void fsl_edma_issue_pending(struct dma_chan *chan);
263 int fsl_edma_alloc_chan_resources(struct dma_chan *chan);
264 void fsl_edma_free_chan_resources(struct dma_chan *chan);
265 void fsl_edma_cleanup_vchan(struct dma_device *dmadev);
266 void fsl_edma_setup_regs(struct fsl_edma_engine *edma);
268 #endif /* _FSL_EDMA_COMMON_H_ */