1a0594cefSMugunthan V N /*
2a0594cefSMugunthan V N * Direct Memory Access U-Class driver
3a0594cefSMugunthan V N *
4a0594cefSMugunthan V N * (C) Copyright 2015
5a0594cefSMugunthan V N * Texas Instruments Incorporated, <www.ti.com>
6a0594cefSMugunthan V N *
7a0594cefSMugunthan V N * Author: Mugunthan V N <mugunthanvnm@ti.com>
8a0594cefSMugunthan V N *
9a0594cefSMugunthan V N * SPDX-License-Identifier: GPL-2.0+
10a0594cefSMugunthan V N */
11a0594cefSMugunthan V N
12a0594cefSMugunthan V N #include <common.h>
13a0594cefSMugunthan V N #include <dma.h>
14a0594cefSMugunthan V N #include <dm.h>
15a0594cefSMugunthan V N #include <dm/uclass-internal.h>
16a0594cefSMugunthan V N #include <dm/device-internal.h>
17a0594cefSMugunthan V N #include <errno.h>
18a0594cefSMugunthan V N
19a0594cefSMugunthan V N DECLARE_GLOBAL_DATA_PTR;
20a0594cefSMugunthan V N
dma_get_device(u32 transfer_type,struct udevice ** devp)21a0594cefSMugunthan V N int dma_get_device(u32 transfer_type, struct udevice **devp)
22a0594cefSMugunthan V N {
23a0594cefSMugunthan V N struct udevice *dev;
24a0594cefSMugunthan V N
25*5395ac06SMichal Suchanek for (uclass_first_device(UCLASS_DMA, &dev); dev;
26*5395ac06SMichal Suchanek uclass_next_device(&dev)) {
27a0594cefSMugunthan V N struct dma_dev_priv *uc_priv;
28a0594cefSMugunthan V N
29a0594cefSMugunthan V N uc_priv = dev_get_uclass_priv(dev);
30a0594cefSMugunthan V N if (uc_priv->supported & transfer_type)
31a0594cefSMugunthan V N break;
32a0594cefSMugunthan V N }
33a0594cefSMugunthan V N
34a0594cefSMugunthan V N if (!dev) {
3590aa625cSMasahiro Yamada pr_err("No DMA device found that supports %x type\n",
36a0594cefSMugunthan V N transfer_type);
37a0594cefSMugunthan V N return -EPROTONOSUPPORT;
38a0594cefSMugunthan V N }
39a0594cefSMugunthan V N
40a0594cefSMugunthan V N *devp = dev;
41a0594cefSMugunthan V N
42*5395ac06SMichal Suchanek return 0;
43a0594cefSMugunthan V N }
44a0594cefSMugunthan V N
dma_memcpy(void * dst,void * src,size_t len)45a0594cefSMugunthan V N int dma_memcpy(void *dst, void *src, size_t len)
46a0594cefSMugunthan V N {
47a0594cefSMugunthan V N struct udevice *dev;
48a0594cefSMugunthan V N const struct dma_ops *ops;
49a0594cefSMugunthan V N int ret;
50a0594cefSMugunthan V N
51a0594cefSMugunthan V N ret = dma_get_device(DMA_SUPPORTS_MEM_TO_MEM, &dev);
52a0594cefSMugunthan V N if (ret < 0)
53a0594cefSMugunthan V N return ret;
54a0594cefSMugunthan V N
55a0594cefSMugunthan V N ops = device_get_ops(dev);
56a0594cefSMugunthan V N if (!ops->transfer)
57a0594cefSMugunthan V N return -ENOSYS;
58a0594cefSMugunthan V N
59a0594cefSMugunthan V N /* Invalidate the area, so no writeback into the RAM races with DMA */
60a0594cefSMugunthan V N invalidate_dcache_range((unsigned long)dst, (unsigned long)dst +
61a0594cefSMugunthan V N roundup(len, ARCH_DMA_MINALIGN));
62a0594cefSMugunthan V N
63a0594cefSMugunthan V N return ops->transfer(dev, DMA_MEM_TO_MEM, dst, src, len);
64a0594cefSMugunthan V N }
65a0594cefSMugunthan V N
66a0594cefSMugunthan V N UCLASS_DRIVER(dma) = {
67a0594cefSMugunthan V N .id = UCLASS_DMA,
68a0594cefSMugunthan V N .name = "dma",
69a0594cefSMugunthan V N .flags = DM_UC_FLAG_SEQ_ALIAS,
70a0594cefSMugunthan V N .per_device_auto_alloc_size = sizeof(struct dma_dev_priv),
71a0594cefSMugunthan V N };
72