1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Direct Memory Access U-Class driver
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * (C) Copyright 2015
5*4882a593Smuzhiyun * Texas Instruments Incorporated, <www.ti.com>
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Author: Mugunthan V N <mugunthanvnm@ti.com>
8*4882a593Smuzhiyun *
9*4882a593Smuzhiyun * SPDX-License-Identifier: GPL-2.0+
10*4882a593Smuzhiyun */
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #include <common.h>
13*4882a593Smuzhiyun #include <dma.h>
14*4882a593Smuzhiyun #include <dm.h>
15*4882a593Smuzhiyun #include <dm/uclass-internal.h>
16*4882a593Smuzhiyun #include <dm/device-internal.h>
17*4882a593Smuzhiyun #include <errno.h>
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun DECLARE_GLOBAL_DATA_PTR;
20*4882a593Smuzhiyun
dma_get_device(u32 transfer_type,struct udevice ** devp)21*4882a593Smuzhiyun int dma_get_device(u32 transfer_type, struct udevice **devp)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun struct udevice *dev;
24*4882a593Smuzhiyun int ret;
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun for (ret = uclass_first_device(UCLASS_DMA, &dev); dev && !ret;
27*4882a593Smuzhiyun ret = uclass_next_device(&dev)) {
28*4882a593Smuzhiyun struct dma_dev_priv *uc_priv;
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun uc_priv = dev_get_uclass_priv(dev);
31*4882a593Smuzhiyun if (uc_priv->supported & transfer_type)
32*4882a593Smuzhiyun break;
33*4882a593Smuzhiyun }
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun if (!dev) {
36*4882a593Smuzhiyun pr_err("No DMA device found that supports %x type\n",
37*4882a593Smuzhiyun transfer_type);
38*4882a593Smuzhiyun return -EPROTONOSUPPORT;
39*4882a593Smuzhiyun }
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun *devp = dev;
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun return ret;
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun
dma_memcpy(void * dst,void * src,size_t len)46*4882a593Smuzhiyun int dma_memcpy(void *dst, void *src, size_t len)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun struct udevice *dev;
49*4882a593Smuzhiyun const struct dma_ops *ops;
50*4882a593Smuzhiyun int ret;
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun ret = dma_get_device(DMA_SUPPORTS_MEM_TO_MEM, &dev);
53*4882a593Smuzhiyun if (ret < 0)
54*4882a593Smuzhiyun return ret;
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun ops = device_get_ops(dev);
57*4882a593Smuzhiyun if (!ops->transfer)
58*4882a593Smuzhiyun return -ENOSYS;
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun /* Invalidate the area, so no writeback into the RAM races with DMA */
61*4882a593Smuzhiyun invalidate_dcache_range((unsigned long)dst, (unsigned long)dst +
62*4882a593Smuzhiyun roundup(len, ARCH_DMA_MINALIGN));
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun return ops->transfer(dev, DMA_MEM_TO_MEM, dst, src, len);
65*4882a593Smuzhiyun }
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun UCLASS_DRIVER(dma) = {
68*4882a593Smuzhiyun .id = UCLASS_DMA,
69*4882a593Smuzhiyun .name = "dma",
70*4882a593Smuzhiyun .flags = DM_UC_FLAG_SEQ_ALIAS,
71*4882a593Smuzhiyun .per_device_auto_alloc_size = sizeof(struct dma_dev_priv),
72*4882a593Smuzhiyun };
73