1 // SPDX-License-Identifier: GPL-2.0+
3 * Direct Memory Access U-Class driver
5 * Copyright (C) 2018 Álvaro Fernández Rojas <noltari@gmail.com>
6 * Copyright (C) 2015 - 2018 Texas Instruments Incorporated <www.ti.com>
7 * Written by Mugunthan V N <mugunthanvnm@ti.com>
9 * Author: Mugunthan V N <mugunthanvnm@ti.com>
12 #define LOG_CATEGORY UCLASS_DMA
19 #include <asm/cache.h>
21 #include <dma-uclass.h>
22 #include <linux/dma-mapping.h>
23 #include <dt-structs.h>
26 #ifdef CONFIG_DMA_CHANNELS
27 static inline struct dma_ops *dma_dev_ops(struct udevice *dev)
29 return (struct dma_ops *)dev->driver->ops;
32 # if CONFIG_IS_ENABLED(OF_CONTROL)
33 static int dma_of_xlate_default(struct dma *dma,
34 struct ofnode_phandle_args *args)
36 debug("%s(dma=%p)\n", __func__, dma);
38 if (args->args_count > 1) {
39 pr_err("Invalid args_count: %d\n", args->args_count);
44 dma->id = args->args[0];
51 int dma_get_by_index(struct udevice *dev, int index, struct dma *dma)
54 struct ofnode_phandle_args args;
55 struct udevice *dev_dma;
56 const struct dma_ops *ops;
58 debug("%s(dev=%p, index=%d, dma=%p)\n", __func__, dev, index, dma);
63 ret = dev_read_phandle_with_args(dev, "dmas", "#dma-cells", 0, index,
66 pr_err("%s: dev_read_phandle_with_args failed: err=%d\n",
71 ret = uclass_get_device_by_ofnode(UCLASS_DMA, args.node, &dev_dma);
73 pr_err("%s: uclass_get_device_by_ofnode failed: err=%d\n",
80 ops = dma_dev_ops(dev_dma);
83 ret = ops->of_xlate(dma, &args);
85 ret = dma_of_xlate_default(dma, &args);
87 pr_err("of_xlate() failed: %d\n", ret);
91 return dma_request(dev_dma, dma);
94 int dma_get_by_name(struct udevice *dev, const char *name, struct dma *dma)
98 debug("%s(dev=%p, name=%s, dma=%p)\n", __func__, dev, name, dma);
101 index = dev_read_stringlist_search(dev, "dma-names", name);
103 pr_err("dev_read_stringlist_search() failed: %d\n", index);
107 return dma_get_by_index(dev, index, dma);
109 # endif /* OF_CONTROL */
111 int dma_request(struct udevice *dev, struct dma *dma)
113 struct dma_ops *ops = dma_dev_ops(dev);
115 debug("%s(dev=%p, dma=%p)\n", __func__, dev, dma);
122 return ops->request(dma);
125 int dma_free(struct dma *dma)
127 struct dma_ops *ops = dma_dev_ops(dma->dev);
129 debug("%s(dma=%p)\n", __func__, dma);
134 return ops->rfree(dma);
137 int dma_enable(struct dma *dma)
139 struct dma_ops *ops = dma_dev_ops(dma->dev);
141 debug("%s(dma=%p)\n", __func__, dma);
146 return ops->enable(dma);
149 int dma_disable(struct dma *dma)
151 struct dma_ops *ops = dma_dev_ops(dma->dev);
153 debug("%s(dma=%p)\n", __func__, dma);
158 return ops->disable(dma);
161 int dma_prepare_rcv_buf(struct dma *dma, void *dst, size_t size)
163 struct dma_ops *ops = dma_dev_ops(dma->dev);
165 debug("%s(dma=%p)\n", __func__, dma);
167 if (!ops->prepare_rcv_buf)
170 return ops->prepare_rcv_buf(dma, dst, size);
173 int dma_receive(struct dma *dma, void **dst, void *metadata)
175 struct dma_ops *ops = dma_dev_ops(dma->dev);
177 debug("%s(dma=%p)\n", __func__, dma);
182 return ops->receive(dma, dst, metadata);
185 int dma_send(struct dma *dma, void *src, size_t len, void *metadata)
187 struct dma_ops *ops = dma_dev_ops(dma->dev);
189 debug("%s(dma=%p)\n", __func__, dma);
194 return ops->send(dma, src, len, metadata);
197 int dma_get_cfg(struct dma *dma, u32 cfg_id, void **cfg_data)
199 struct dma_ops *ops = dma_dev_ops(dma->dev);
201 debug("%s(dma=%p)\n", __func__, dma);
206 return ops->get_cfg(dma, cfg_id, cfg_data);
208 #endif /* CONFIG_DMA_CHANNELS */
210 int dma_get_device(u32 transfer_type, struct udevice **devp)
214 for (uclass_first_device(UCLASS_DMA, &dev); dev;
215 uclass_next_device(&dev)) {
216 struct dma_dev_priv *uc_priv;
218 uc_priv = dev_get_uclass_priv(dev);
219 if (uc_priv->supported & transfer_type)
224 pr_debug("No DMA device found that supports %x type\n",
226 return -EPROTONOSUPPORT;
234 int dma_memcpy(void *dst, void *src, size_t len)
237 const struct dma_ops *ops;
238 dma_addr_t destination;
242 ret = dma_get_device(DMA_SUPPORTS_MEM_TO_MEM, &dev);
246 ops = device_get_ops(dev);
250 /* Clean the areas, so no writeback into the RAM races with DMA */
251 destination = dma_map_single(dst, len, DMA_FROM_DEVICE);
252 source = dma_map_single(src, len, DMA_TO_DEVICE);
254 ret = ops->transfer(dev, DMA_MEM_TO_MEM, destination, source, len);
256 /* Clean+Invalidate the areas after, so we can see DMA'd data */
257 dma_unmap_single(destination, len, DMA_FROM_DEVICE);
258 dma_unmap_single(source, len, DMA_TO_DEVICE);
263 UCLASS_DRIVER(dma) = {
266 .flags = DM_UC_FLAG_SEQ_ALIAS,
267 .per_device_auto = sizeof(struct dma_dev_priv),