1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /* 10G controller driver for Samsung SoCs
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Copyright (C) 2013 Samsung Electronics Co., Ltd.
5*4882a593Smuzhiyun * http://www.samsung.com
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Author: Siva Reddy Kallam <siva.kallam@samsung.com>
8*4882a593Smuzhiyun */
9*4882a593Smuzhiyun #include <linux/delay.h>
10*4882a593Smuzhiyun #include <linux/export.h>
11*4882a593Smuzhiyun #include <linux/io.h>
12*4882a593Smuzhiyun #include <linux/netdevice.h>
13*4882a593Smuzhiyun #include <linux/phy.h>
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun #include "sxgbe_common.h"
16*4882a593Smuzhiyun #include "sxgbe_dma.h"
17*4882a593Smuzhiyun #include "sxgbe_reg.h"
18*4882a593Smuzhiyun #include "sxgbe_desc.h"
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun /* DMA core initialization */
sxgbe_dma_init(void __iomem * ioaddr,int fix_burst,int burst_map)21*4882a593Smuzhiyun static int sxgbe_dma_init(void __iomem *ioaddr, int fix_burst, int burst_map)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun u32 reg_val;
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun reg_val = readl(ioaddr + SXGBE_DMA_SYSBUS_MODE_REG);
26*4882a593Smuzhiyun
27*4882a593Smuzhiyun /* if fix_burst = 0, Set UNDEF = 1 of DMA_Sys_Mode Register.
28*4882a593Smuzhiyun * if fix_burst = 1, Set UNDEF = 0 of DMA_Sys_Mode Register.
29*4882a593Smuzhiyun * burst_map is bitmap for BLEN[4, 8, 16, 32, 64, 128 and 256].
30*4882a593Smuzhiyun * Set burst_map irrespective of fix_burst value.
31*4882a593Smuzhiyun */
32*4882a593Smuzhiyun if (!fix_burst)
33*4882a593Smuzhiyun reg_val |= SXGBE_DMA_AXI_UNDEF_BURST;
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun /* write burst len map */
36*4882a593Smuzhiyun reg_val |= (burst_map << SXGBE_DMA_BLENMAP_LSHIFT);
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun writel(reg_val, ioaddr + SXGBE_DMA_SYSBUS_MODE_REG);
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun return 0;
41*4882a593Smuzhiyun }
42*4882a593Smuzhiyun
sxgbe_dma_channel_init(void __iomem * ioaddr,int cha_num,int fix_burst,int pbl,dma_addr_t dma_tx,dma_addr_t dma_rx,int t_rsize,int r_rsize)43*4882a593Smuzhiyun static void sxgbe_dma_channel_init(void __iomem *ioaddr, int cha_num,
44*4882a593Smuzhiyun int fix_burst, int pbl, dma_addr_t dma_tx,
45*4882a593Smuzhiyun dma_addr_t dma_rx, int t_rsize, int r_rsize)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun u32 reg_val;
48*4882a593Smuzhiyun dma_addr_t dma_addr;
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun reg_val = readl(ioaddr + SXGBE_DMA_CHA_CTL_REG(cha_num));
51*4882a593Smuzhiyun /* set the pbl */
52*4882a593Smuzhiyun if (fix_burst) {
53*4882a593Smuzhiyun reg_val |= SXGBE_DMA_PBL_X8MODE;
54*4882a593Smuzhiyun writel(reg_val, ioaddr + SXGBE_DMA_CHA_CTL_REG(cha_num));
55*4882a593Smuzhiyun /* program the TX pbl */
56*4882a593Smuzhiyun reg_val = readl(ioaddr + SXGBE_DMA_CHA_TXCTL_REG(cha_num));
57*4882a593Smuzhiyun reg_val |= (pbl << SXGBE_DMA_TXPBL_LSHIFT);
58*4882a593Smuzhiyun writel(reg_val, ioaddr + SXGBE_DMA_CHA_TXCTL_REG(cha_num));
59*4882a593Smuzhiyun /* program the RX pbl */
60*4882a593Smuzhiyun reg_val = readl(ioaddr + SXGBE_DMA_CHA_RXCTL_REG(cha_num));
61*4882a593Smuzhiyun reg_val |= (pbl << SXGBE_DMA_RXPBL_LSHIFT);
62*4882a593Smuzhiyun writel(reg_val, ioaddr + SXGBE_DMA_CHA_RXCTL_REG(cha_num));
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun /* program desc registers */
66*4882a593Smuzhiyun writel(upper_32_bits(dma_tx),
67*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_TXDESC_HADD_REG(cha_num));
68*4882a593Smuzhiyun writel(lower_32_bits(dma_tx),
69*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_TXDESC_LADD_REG(cha_num));
70*4882a593Smuzhiyun
71*4882a593Smuzhiyun writel(upper_32_bits(dma_rx),
72*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_RXDESC_HADD_REG(cha_num));
73*4882a593Smuzhiyun writel(lower_32_bits(dma_rx),
74*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_RXDESC_LADD_REG(cha_num));
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun /* program tail pointers */
77*4882a593Smuzhiyun /* assumption: upper 32 bits are constant and
78*4882a593Smuzhiyun * same as TX/RX desc list
79*4882a593Smuzhiyun */
80*4882a593Smuzhiyun dma_addr = dma_tx + ((t_rsize - 1) * SXGBE_DESC_SIZE_BYTES);
81*4882a593Smuzhiyun writel(lower_32_bits(dma_addr),
82*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_TXDESC_TAILPTR_REG(cha_num));
83*4882a593Smuzhiyun
84*4882a593Smuzhiyun dma_addr = dma_rx + ((r_rsize - 1) * SXGBE_DESC_SIZE_BYTES);
85*4882a593Smuzhiyun writel(lower_32_bits(dma_addr),
86*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_RXDESC_LADD_REG(cha_num));
87*4882a593Smuzhiyun /* program the ring sizes */
88*4882a593Smuzhiyun writel(t_rsize - 1, ioaddr + SXGBE_DMA_CHA_TXDESC_RINGLEN_REG(cha_num));
89*4882a593Smuzhiyun writel(r_rsize - 1, ioaddr + SXGBE_DMA_CHA_RXDESC_RINGLEN_REG(cha_num));
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun /* Enable TX/RX interrupts */
92*4882a593Smuzhiyun writel(SXGBE_DMA_ENA_INT,
93*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_INT_ENABLE_REG(cha_num));
94*4882a593Smuzhiyun }
95*4882a593Smuzhiyun
sxgbe_enable_dma_transmission(void __iomem * ioaddr,int cha_num)96*4882a593Smuzhiyun static void sxgbe_enable_dma_transmission(void __iomem *ioaddr, int cha_num)
97*4882a593Smuzhiyun {
98*4882a593Smuzhiyun u32 tx_config;
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun tx_config = readl(ioaddr + SXGBE_DMA_CHA_TXCTL_REG(cha_num));
101*4882a593Smuzhiyun tx_config |= SXGBE_TX_START_DMA;
102*4882a593Smuzhiyun writel(tx_config, ioaddr + SXGBE_DMA_CHA_TXCTL_REG(cha_num));
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun
sxgbe_enable_dma_irq(void __iomem * ioaddr,int dma_cnum)105*4882a593Smuzhiyun static void sxgbe_enable_dma_irq(void __iomem *ioaddr, int dma_cnum)
106*4882a593Smuzhiyun {
107*4882a593Smuzhiyun /* Enable TX/RX interrupts */
108*4882a593Smuzhiyun writel(SXGBE_DMA_ENA_INT,
109*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_INT_ENABLE_REG(dma_cnum));
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun
sxgbe_disable_dma_irq(void __iomem * ioaddr,int dma_cnum)112*4882a593Smuzhiyun static void sxgbe_disable_dma_irq(void __iomem *ioaddr, int dma_cnum)
113*4882a593Smuzhiyun {
114*4882a593Smuzhiyun /* Disable TX/RX interrupts */
115*4882a593Smuzhiyun writel(0, ioaddr + SXGBE_DMA_CHA_INT_ENABLE_REG(dma_cnum));
116*4882a593Smuzhiyun }
117*4882a593Smuzhiyun
sxgbe_dma_start_tx(void __iomem * ioaddr,int tchannels)118*4882a593Smuzhiyun static void sxgbe_dma_start_tx(void __iomem *ioaddr, int tchannels)
119*4882a593Smuzhiyun {
120*4882a593Smuzhiyun int cnum;
121*4882a593Smuzhiyun u32 tx_ctl_reg;
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun for (cnum = 0; cnum < tchannels; cnum++) {
124*4882a593Smuzhiyun tx_ctl_reg = readl(ioaddr + SXGBE_DMA_CHA_TXCTL_REG(cnum));
125*4882a593Smuzhiyun tx_ctl_reg |= SXGBE_TX_ENABLE;
126*4882a593Smuzhiyun writel(tx_ctl_reg,
127*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_TXCTL_REG(cnum));
128*4882a593Smuzhiyun }
129*4882a593Smuzhiyun }
130*4882a593Smuzhiyun
sxgbe_dma_start_tx_queue(void __iomem * ioaddr,int dma_cnum)131*4882a593Smuzhiyun static void sxgbe_dma_start_tx_queue(void __iomem *ioaddr, int dma_cnum)
132*4882a593Smuzhiyun {
133*4882a593Smuzhiyun u32 tx_ctl_reg;
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun tx_ctl_reg = readl(ioaddr + SXGBE_DMA_CHA_TXCTL_REG(dma_cnum));
136*4882a593Smuzhiyun tx_ctl_reg |= SXGBE_TX_ENABLE;
137*4882a593Smuzhiyun writel(tx_ctl_reg, ioaddr + SXGBE_DMA_CHA_TXCTL_REG(dma_cnum));
138*4882a593Smuzhiyun }
139*4882a593Smuzhiyun
sxgbe_dma_stop_tx_queue(void __iomem * ioaddr,int dma_cnum)140*4882a593Smuzhiyun static void sxgbe_dma_stop_tx_queue(void __iomem *ioaddr, int dma_cnum)
141*4882a593Smuzhiyun {
142*4882a593Smuzhiyun u32 tx_ctl_reg;
143*4882a593Smuzhiyun
144*4882a593Smuzhiyun tx_ctl_reg = readl(ioaddr + SXGBE_DMA_CHA_TXCTL_REG(dma_cnum));
145*4882a593Smuzhiyun tx_ctl_reg &= ~(SXGBE_TX_ENABLE);
146*4882a593Smuzhiyun writel(tx_ctl_reg, ioaddr + SXGBE_DMA_CHA_TXCTL_REG(dma_cnum));
147*4882a593Smuzhiyun }
148*4882a593Smuzhiyun
sxgbe_dma_stop_tx(void __iomem * ioaddr,int tchannels)149*4882a593Smuzhiyun static void sxgbe_dma_stop_tx(void __iomem *ioaddr, int tchannels)
150*4882a593Smuzhiyun {
151*4882a593Smuzhiyun int cnum;
152*4882a593Smuzhiyun u32 tx_ctl_reg;
153*4882a593Smuzhiyun
154*4882a593Smuzhiyun for (cnum = 0; cnum < tchannels; cnum++) {
155*4882a593Smuzhiyun tx_ctl_reg = readl(ioaddr + SXGBE_DMA_CHA_TXCTL_REG(cnum));
156*4882a593Smuzhiyun tx_ctl_reg &= ~(SXGBE_TX_ENABLE);
157*4882a593Smuzhiyun writel(tx_ctl_reg, ioaddr + SXGBE_DMA_CHA_TXCTL_REG(cnum));
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun }
160*4882a593Smuzhiyun
sxgbe_dma_start_rx(void __iomem * ioaddr,int rchannels)161*4882a593Smuzhiyun static void sxgbe_dma_start_rx(void __iomem *ioaddr, int rchannels)
162*4882a593Smuzhiyun {
163*4882a593Smuzhiyun int cnum;
164*4882a593Smuzhiyun u32 rx_ctl_reg;
165*4882a593Smuzhiyun
166*4882a593Smuzhiyun for (cnum = 0; cnum < rchannels; cnum++) {
167*4882a593Smuzhiyun rx_ctl_reg = readl(ioaddr + SXGBE_DMA_CHA_RXCTL_REG(cnum));
168*4882a593Smuzhiyun rx_ctl_reg |= SXGBE_RX_ENABLE;
169*4882a593Smuzhiyun writel(rx_ctl_reg,
170*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_RXCTL_REG(cnum));
171*4882a593Smuzhiyun }
172*4882a593Smuzhiyun }
173*4882a593Smuzhiyun
sxgbe_dma_stop_rx(void __iomem * ioaddr,int rchannels)174*4882a593Smuzhiyun static void sxgbe_dma_stop_rx(void __iomem *ioaddr, int rchannels)
175*4882a593Smuzhiyun {
176*4882a593Smuzhiyun int cnum;
177*4882a593Smuzhiyun u32 rx_ctl_reg;
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun for (cnum = 0; cnum < rchannels; cnum++) {
180*4882a593Smuzhiyun rx_ctl_reg = readl(ioaddr + SXGBE_DMA_CHA_RXCTL_REG(cnum));
181*4882a593Smuzhiyun rx_ctl_reg &= ~(SXGBE_RX_ENABLE);
182*4882a593Smuzhiyun writel(rx_ctl_reg, ioaddr + SXGBE_DMA_CHA_RXCTL_REG(cnum));
183*4882a593Smuzhiyun }
184*4882a593Smuzhiyun }
185*4882a593Smuzhiyun
sxgbe_tx_dma_int_status(void __iomem * ioaddr,int channel_no,struct sxgbe_extra_stats * x)186*4882a593Smuzhiyun static int sxgbe_tx_dma_int_status(void __iomem *ioaddr, int channel_no,
187*4882a593Smuzhiyun struct sxgbe_extra_stats *x)
188*4882a593Smuzhiyun {
189*4882a593Smuzhiyun u32 int_status = readl(ioaddr + SXGBE_DMA_CHA_STATUS_REG(channel_no));
190*4882a593Smuzhiyun u32 clear_val = 0;
191*4882a593Smuzhiyun u32 ret_val = 0;
192*4882a593Smuzhiyun
193*4882a593Smuzhiyun /* TX Normal Interrupt Summary */
194*4882a593Smuzhiyun if (likely(int_status & SXGBE_DMA_INT_STATUS_NIS)) {
195*4882a593Smuzhiyun x->normal_irq_n++;
196*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_TI) {
197*4882a593Smuzhiyun ret_val |= handle_tx;
198*4882a593Smuzhiyun x->tx_normal_irq_n++;
199*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_TI;
200*4882a593Smuzhiyun }
201*4882a593Smuzhiyun
202*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_TBU) {
203*4882a593Smuzhiyun x->tx_underflow_irq++;
204*4882a593Smuzhiyun ret_val |= tx_bump_tc;
205*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_TBU;
206*4882a593Smuzhiyun }
207*4882a593Smuzhiyun } else if (unlikely(int_status & SXGBE_DMA_INT_STATUS_AIS)) {
208*4882a593Smuzhiyun /* TX Abnormal Interrupt Summary */
209*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_TPS) {
210*4882a593Smuzhiyun ret_val |= tx_hard_error;
211*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_TPS;
212*4882a593Smuzhiyun x->tx_process_stopped_irq++;
213*4882a593Smuzhiyun }
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_FBE) {
216*4882a593Smuzhiyun ret_val |= tx_hard_error;
217*4882a593Smuzhiyun x->fatal_bus_error_irq++;
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun /* Assumption: FBE bit is the combination of
220*4882a593Smuzhiyun * all the bus access erros and cleared when
221*4882a593Smuzhiyun * the respective error bits cleared
222*4882a593Smuzhiyun */
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun /* check for actual cause */
225*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_TEB0) {
226*4882a593Smuzhiyun x->tx_read_transfer_err++;
227*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_TEB0;
228*4882a593Smuzhiyun } else {
229*4882a593Smuzhiyun x->tx_write_transfer_err++;
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun
232*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_TEB1) {
233*4882a593Smuzhiyun x->tx_desc_access_err++;
234*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_TEB1;
235*4882a593Smuzhiyun } else {
236*4882a593Smuzhiyun x->tx_buffer_access_err++;
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_TEB2) {
240*4882a593Smuzhiyun x->tx_data_transfer_err++;
241*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_TEB2;
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun }
244*4882a593Smuzhiyun
245*4882a593Smuzhiyun /* context descriptor error */
246*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_CTXTERR) {
247*4882a593Smuzhiyun x->tx_ctxt_desc_err++;
248*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_CTXTERR;
249*4882a593Smuzhiyun }
250*4882a593Smuzhiyun }
251*4882a593Smuzhiyun
252*4882a593Smuzhiyun /* clear the served bits */
253*4882a593Smuzhiyun writel(clear_val, ioaddr + SXGBE_DMA_CHA_STATUS_REG(channel_no));
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun return ret_val;
256*4882a593Smuzhiyun }
257*4882a593Smuzhiyun
sxgbe_rx_dma_int_status(void __iomem * ioaddr,int channel_no,struct sxgbe_extra_stats * x)258*4882a593Smuzhiyun static int sxgbe_rx_dma_int_status(void __iomem *ioaddr, int channel_no,
259*4882a593Smuzhiyun struct sxgbe_extra_stats *x)
260*4882a593Smuzhiyun {
261*4882a593Smuzhiyun u32 int_status = readl(ioaddr + SXGBE_DMA_CHA_STATUS_REG(channel_no));
262*4882a593Smuzhiyun u32 clear_val = 0;
263*4882a593Smuzhiyun u32 ret_val = 0;
264*4882a593Smuzhiyun
265*4882a593Smuzhiyun /* RX Normal Interrupt Summary */
266*4882a593Smuzhiyun if (likely(int_status & SXGBE_DMA_INT_STATUS_NIS)) {
267*4882a593Smuzhiyun x->normal_irq_n++;
268*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_RI) {
269*4882a593Smuzhiyun ret_val |= handle_rx;
270*4882a593Smuzhiyun x->rx_normal_irq_n++;
271*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_RI;
272*4882a593Smuzhiyun }
273*4882a593Smuzhiyun } else if (unlikely(int_status & SXGBE_DMA_INT_STATUS_AIS)) {
274*4882a593Smuzhiyun /* RX Abnormal Interrupt Summary */
275*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_RBU) {
276*4882a593Smuzhiyun ret_val |= rx_bump_tc;
277*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_RBU;
278*4882a593Smuzhiyun x->rx_underflow_irq++;
279*4882a593Smuzhiyun }
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_RPS) {
282*4882a593Smuzhiyun ret_val |= rx_hard_error;
283*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_RPS;
284*4882a593Smuzhiyun x->rx_process_stopped_irq++;
285*4882a593Smuzhiyun }
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_FBE) {
288*4882a593Smuzhiyun ret_val |= rx_hard_error;
289*4882a593Smuzhiyun x->fatal_bus_error_irq++;
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun /* Assumption: FBE bit is the combination of
292*4882a593Smuzhiyun * all the bus access erros and cleared when
293*4882a593Smuzhiyun * the respective error bits cleared
294*4882a593Smuzhiyun */
295*4882a593Smuzhiyun
296*4882a593Smuzhiyun /* check for actual cause */
297*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_REB0) {
298*4882a593Smuzhiyun x->rx_read_transfer_err++;
299*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_REB0;
300*4882a593Smuzhiyun } else {
301*4882a593Smuzhiyun x->rx_write_transfer_err++;
302*4882a593Smuzhiyun }
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_REB1) {
305*4882a593Smuzhiyun x->rx_desc_access_err++;
306*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_REB1;
307*4882a593Smuzhiyun } else {
308*4882a593Smuzhiyun x->rx_buffer_access_err++;
309*4882a593Smuzhiyun }
310*4882a593Smuzhiyun
311*4882a593Smuzhiyun if (int_status & SXGBE_DMA_INT_STATUS_REB2) {
312*4882a593Smuzhiyun x->rx_data_transfer_err++;
313*4882a593Smuzhiyun clear_val |= SXGBE_DMA_INT_STATUS_REB2;
314*4882a593Smuzhiyun }
315*4882a593Smuzhiyun }
316*4882a593Smuzhiyun }
317*4882a593Smuzhiyun
318*4882a593Smuzhiyun /* clear the served bits */
319*4882a593Smuzhiyun writel(clear_val, ioaddr + SXGBE_DMA_CHA_STATUS_REG(channel_no));
320*4882a593Smuzhiyun
321*4882a593Smuzhiyun return ret_val;
322*4882a593Smuzhiyun }
323*4882a593Smuzhiyun
324*4882a593Smuzhiyun /* Program the HW RX Watchdog */
sxgbe_dma_rx_watchdog(void __iomem * ioaddr,u32 riwt)325*4882a593Smuzhiyun static void sxgbe_dma_rx_watchdog(void __iomem *ioaddr, u32 riwt)
326*4882a593Smuzhiyun {
327*4882a593Smuzhiyun u32 que_num;
328*4882a593Smuzhiyun
329*4882a593Smuzhiyun SXGBE_FOR_EACH_QUEUE(SXGBE_RX_QUEUES, que_num) {
330*4882a593Smuzhiyun writel(riwt,
331*4882a593Smuzhiyun ioaddr + SXGBE_DMA_CHA_INT_RXWATCHTMR_REG(que_num));
332*4882a593Smuzhiyun }
333*4882a593Smuzhiyun }
334*4882a593Smuzhiyun
sxgbe_enable_tso(void __iomem * ioaddr,u8 chan_num)335*4882a593Smuzhiyun static void sxgbe_enable_tso(void __iomem *ioaddr, u8 chan_num)
336*4882a593Smuzhiyun {
337*4882a593Smuzhiyun u32 ctrl;
338*4882a593Smuzhiyun
339*4882a593Smuzhiyun ctrl = readl(ioaddr + SXGBE_DMA_CHA_TXCTL_REG(chan_num));
340*4882a593Smuzhiyun ctrl |= SXGBE_DMA_CHA_TXCTL_TSE_ENABLE;
341*4882a593Smuzhiyun writel(ctrl, ioaddr + SXGBE_DMA_CHA_TXCTL_REG(chan_num));
342*4882a593Smuzhiyun }
343*4882a593Smuzhiyun
344*4882a593Smuzhiyun static const struct sxgbe_dma_ops sxgbe_dma_ops = {
345*4882a593Smuzhiyun .init = sxgbe_dma_init,
346*4882a593Smuzhiyun .cha_init = sxgbe_dma_channel_init,
347*4882a593Smuzhiyun .enable_dma_transmission = sxgbe_enable_dma_transmission,
348*4882a593Smuzhiyun .enable_dma_irq = sxgbe_enable_dma_irq,
349*4882a593Smuzhiyun .disable_dma_irq = sxgbe_disable_dma_irq,
350*4882a593Smuzhiyun .start_tx = sxgbe_dma_start_tx,
351*4882a593Smuzhiyun .start_tx_queue = sxgbe_dma_start_tx_queue,
352*4882a593Smuzhiyun .stop_tx = sxgbe_dma_stop_tx,
353*4882a593Smuzhiyun .stop_tx_queue = sxgbe_dma_stop_tx_queue,
354*4882a593Smuzhiyun .start_rx = sxgbe_dma_start_rx,
355*4882a593Smuzhiyun .stop_rx = sxgbe_dma_stop_rx,
356*4882a593Smuzhiyun .tx_dma_int_status = sxgbe_tx_dma_int_status,
357*4882a593Smuzhiyun .rx_dma_int_status = sxgbe_rx_dma_int_status,
358*4882a593Smuzhiyun .rx_watchdog = sxgbe_dma_rx_watchdog,
359*4882a593Smuzhiyun .enable_tso = sxgbe_enable_tso,
360*4882a593Smuzhiyun };
361*4882a593Smuzhiyun
sxgbe_get_dma_ops(void)362*4882a593Smuzhiyun const struct sxgbe_dma_ops *sxgbe_get_dma_ops(void)
363*4882a593Smuzhiyun {
364*4882a593Smuzhiyun return &sxgbe_dma_ops;
365*4882a593Smuzhiyun }
366