1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef __SPARC64_IO_H
3*4882a593Smuzhiyun #define __SPARC64_IO_H
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #include <linux/kernel.h>
6*4882a593Smuzhiyun #include <linux/compiler.h>
7*4882a593Smuzhiyun #include <linux/types.h>
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #include <asm/page.h> /* IO address mapping routines need this */
10*4882a593Smuzhiyun #include <asm/asi.h>
11*4882a593Smuzhiyun #include <asm-generic/pci_iomap.h>
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun /* BIO layer definitions. */
14*4882a593Smuzhiyun extern unsigned long kern_base, kern_size;
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun /* __raw_{read,write}{b,w,l,q} uses direct access.
17*4882a593Smuzhiyun * Access the memory as big endian bypassing the cache
18*4882a593Smuzhiyun * by using ASI_PHYS_BYPASS_EC_E
19*4882a593Smuzhiyun */
20*4882a593Smuzhiyun #define __raw_readb __raw_readb
__raw_readb(const volatile void __iomem * addr)21*4882a593Smuzhiyun static inline u8 __raw_readb(const volatile void __iomem *addr)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun u8 ret;
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun __asm__ __volatile__("lduba\t[%1] %2, %0\t/* pci_raw_readb */"
26*4882a593Smuzhiyun : "=r" (ret)
27*4882a593Smuzhiyun : "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E));
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun return ret;
30*4882a593Smuzhiyun }
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun #define __raw_readw __raw_readw
__raw_readw(const volatile void __iomem * addr)33*4882a593Smuzhiyun static inline u16 __raw_readw(const volatile void __iomem *addr)
34*4882a593Smuzhiyun {
35*4882a593Smuzhiyun u16 ret;
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun __asm__ __volatile__("lduha\t[%1] %2, %0\t/* pci_raw_readw */"
38*4882a593Smuzhiyun : "=r" (ret)
39*4882a593Smuzhiyun : "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E));
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun return ret;
42*4882a593Smuzhiyun }
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun #define __raw_readl __raw_readl
__raw_readl(const volatile void __iomem * addr)45*4882a593Smuzhiyun static inline u32 __raw_readl(const volatile void __iomem *addr)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun u32 ret;
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun __asm__ __volatile__("lduwa\t[%1] %2, %0\t/* pci_raw_readl */"
50*4882a593Smuzhiyun : "=r" (ret)
51*4882a593Smuzhiyun : "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E));
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun return ret;
54*4882a593Smuzhiyun }
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun #define __raw_readq __raw_readq
__raw_readq(const volatile void __iomem * addr)57*4882a593Smuzhiyun static inline u64 __raw_readq(const volatile void __iomem *addr)
58*4882a593Smuzhiyun {
59*4882a593Smuzhiyun u64 ret;
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun __asm__ __volatile__("ldxa\t[%1] %2, %0\t/* pci_raw_readq */"
62*4882a593Smuzhiyun : "=r" (ret)
63*4882a593Smuzhiyun : "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E));
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun return ret;
66*4882a593Smuzhiyun }
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun #define __raw_writeb __raw_writeb
__raw_writeb(u8 b,const volatile void __iomem * addr)69*4882a593Smuzhiyun static inline void __raw_writeb(u8 b, const volatile void __iomem *addr)
70*4882a593Smuzhiyun {
71*4882a593Smuzhiyun __asm__ __volatile__("stba\t%r0, [%1] %2\t/* pci_raw_writeb */"
72*4882a593Smuzhiyun : /* no outputs */
73*4882a593Smuzhiyun : "Jr" (b), "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E));
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun #define __raw_writew __raw_writew
__raw_writew(u16 w,const volatile void __iomem * addr)77*4882a593Smuzhiyun static inline void __raw_writew(u16 w, const volatile void __iomem *addr)
78*4882a593Smuzhiyun {
79*4882a593Smuzhiyun __asm__ __volatile__("stha\t%r0, [%1] %2\t/* pci_raw_writew */"
80*4882a593Smuzhiyun : /* no outputs */
81*4882a593Smuzhiyun : "Jr" (w), "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E));
82*4882a593Smuzhiyun }
83*4882a593Smuzhiyun
84*4882a593Smuzhiyun #define __raw_writel __raw_writel
__raw_writel(u32 l,const volatile void __iomem * addr)85*4882a593Smuzhiyun static inline void __raw_writel(u32 l, const volatile void __iomem *addr)
86*4882a593Smuzhiyun {
87*4882a593Smuzhiyun __asm__ __volatile__("stwa\t%r0, [%1] %2\t/* pci_raw_writel */"
88*4882a593Smuzhiyun : /* no outputs */
89*4882a593Smuzhiyun : "Jr" (l), "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E));
90*4882a593Smuzhiyun }
91*4882a593Smuzhiyun
92*4882a593Smuzhiyun #define __raw_writeq __raw_writeq
__raw_writeq(u64 q,const volatile void __iomem * addr)93*4882a593Smuzhiyun static inline void __raw_writeq(u64 q, const volatile void __iomem *addr)
94*4882a593Smuzhiyun {
95*4882a593Smuzhiyun __asm__ __volatile__("stxa\t%r0, [%1] %2\t/* pci_raw_writeq */"
96*4882a593Smuzhiyun : /* no outputs */
97*4882a593Smuzhiyun : "Jr" (q), "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E));
98*4882a593Smuzhiyun }
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun /* Memory functions, same as I/O accesses on Ultra.
101*4882a593Smuzhiyun * Access memory as little endian bypassing
102*4882a593Smuzhiyun * the cache by using ASI_PHYS_BYPASS_EC_E_L
103*4882a593Smuzhiyun */
104*4882a593Smuzhiyun #define readb readb
105*4882a593Smuzhiyun #define readb_relaxed readb
readb(const volatile void __iomem * addr)106*4882a593Smuzhiyun static inline u8 readb(const volatile void __iomem *addr)
107*4882a593Smuzhiyun { u8 ret;
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun __asm__ __volatile__("lduba\t[%1] %2, %0\t/* pci_readb */"
110*4882a593Smuzhiyun : "=r" (ret)
111*4882a593Smuzhiyun : "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E_L)
112*4882a593Smuzhiyun : "memory");
113*4882a593Smuzhiyun return ret;
114*4882a593Smuzhiyun }
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun #define readw readw
117*4882a593Smuzhiyun #define readw_relaxed readw
readw(const volatile void __iomem * addr)118*4882a593Smuzhiyun static inline u16 readw(const volatile void __iomem *addr)
119*4882a593Smuzhiyun { u16 ret;
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun __asm__ __volatile__("lduha\t[%1] %2, %0\t/* pci_readw */"
122*4882a593Smuzhiyun : "=r" (ret)
123*4882a593Smuzhiyun : "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E_L)
124*4882a593Smuzhiyun : "memory");
125*4882a593Smuzhiyun
126*4882a593Smuzhiyun return ret;
127*4882a593Smuzhiyun }
128*4882a593Smuzhiyun
129*4882a593Smuzhiyun #define readl readl
130*4882a593Smuzhiyun #define readl_relaxed readl
readl(const volatile void __iomem * addr)131*4882a593Smuzhiyun static inline u32 readl(const volatile void __iomem *addr)
132*4882a593Smuzhiyun { u32 ret;
133*4882a593Smuzhiyun
134*4882a593Smuzhiyun __asm__ __volatile__("lduwa\t[%1] %2, %0\t/* pci_readl */"
135*4882a593Smuzhiyun : "=r" (ret)
136*4882a593Smuzhiyun : "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E_L)
137*4882a593Smuzhiyun : "memory");
138*4882a593Smuzhiyun
139*4882a593Smuzhiyun return ret;
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun #define readq readq
143*4882a593Smuzhiyun #define readq_relaxed readq
readq(const volatile void __iomem * addr)144*4882a593Smuzhiyun static inline u64 readq(const volatile void __iomem *addr)
145*4882a593Smuzhiyun { u64 ret;
146*4882a593Smuzhiyun
147*4882a593Smuzhiyun __asm__ __volatile__("ldxa\t[%1] %2, %0\t/* pci_readq */"
148*4882a593Smuzhiyun : "=r" (ret)
149*4882a593Smuzhiyun : "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E_L)
150*4882a593Smuzhiyun : "memory");
151*4882a593Smuzhiyun
152*4882a593Smuzhiyun return ret;
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun
155*4882a593Smuzhiyun #define writeb writeb
156*4882a593Smuzhiyun #define writeb_relaxed writeb
writeb(u8 b,volatile void __iomem * addr)157*4882a593Smuzhiyun static inline void writeb(u8 b, volatile void __iomem *addr)
158*4882a593Smuzhiyun {
159*4882a593Smuzhiyun __asm__ __volatile__("stba\t%r0, [%1] %2\t/* pci_writeb */"
160*4882a593Smuzhiyun : /* no outputs */
161*4882a593Smuzhiyun : "Jr" (b), "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E_L)
162*4882a593Smuzhiyun : "memory");
163*4882a593Smuzhiyun }
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun #define writew writew
166*4882a593Smuzhiyun #define writew_relaxed writew
writew(u16 w,volatile void __iomem * addr)167*4882a593Smuzhiyun static inline void writew(u16 w, volatile void __iomem *addr)
168*4882a593Smuzhiyun {
169*4882a593Smuzhiyun __asm__ __volatile__("stha\t%r0, [%1] %2\t/* pci_writew */"
170*4882a593Smuzhiyun : /* no outputs */
171*4882a593Smuzhiyun : "Jr" (w), "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E_L)
172*4882a593Smuzhiyun : "memory");
173*4882a593Smuzhiyun }
174*4882a593Smuzhiyun
175*4882a593Smuzhiyun #define writel writel
176*4882a593Smuzhiyun #define writel_relaxed writel
writel(u32 l,volatile void __iomem * addr)177*4882a593Smuzhiyun static inline void writel(u32 l, volatile void __iomem *addr)
178*4882a593Smuzhiyun {
179*4882a593Smuzhiyun __asm__ __volatile__("stwa\t%r0, [%1] %2\t/* pci_writel */"
180*4882a593Smuzhiyun : /* no outputs */
181*4882a593Smuzhiyun : "Jr" (l), "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E_L)
182*4882a593Smuzhiyun : "memory");
183*4882a593Smuzhiyun }
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun #define writeq writeq
186*4882a593Smuzhiyun #define writeq_relaxed writeq
writeq(u64 q,volatile void __iomem * addr)187*4882a593Smuzhiyun static inline void writeq(u64 q, volatile void __iomem *addr)
188*4882a593Smuzhiyun {
189*4882a593Smuzhiyun __asm__ __volatile__("stxa\t%r0, [%1] %2\t/* pci_writeq */"
190*4882a593Smuzhiyun : /* no outputs */
191*4882a593Smuzhiyun : "Jr" (q), "r" (addr), "i" (ASI_PHYS_BYPASS_EC_E_L)
192*4882a593Smuzhiyun : "memory");
193*4882a593Smuzhiyun }
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun #define inb inb
inb(unsigned long addr)196*4882a593Smuzhiyun static inline u8 inb(unsigned long addr)
197*4882a593Smuzhiyun {
198*4882a593Smuzhiyun return readb((volatile void __iomem *)addr);
199*4882a593Smuzhiyun }
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun #define inw inw
inw(unsigned long addr)202*4882a593Smuzhiyun static inline u16 inw(unsigned long addr)
203*4882a593Smuzhiyun {
204*4882a593Smuzhiyun return readw((volatile void __iomem *)addr);
205*4882a593Smuzhiyun }
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun #define inl inl
inl(unsigned long addr)208*4882a593Smuzhiyun static inline u32 inl(unsigned long addr)
209*4882a593Smuzhiyun {
210*4882a593Smuzhiyun return readl((volatile void __iomem *)addr);
211*4882a593Smuzhiyun }
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun #define outb outb
outb(u8 b,unsigned long addr)214*4882a593Smuzhiyun static inline void outb(u8 b, unsigned long addr)
215*4882a593Smuzhiyun {
216*4882a593Smuzhiyun writeb(b, (volatile void __iomem *)addr);
217*4882a593Smuzhiyun }
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun #define outw outw
outw(u16 w,unsigned long addr)220*4882a593Smuzhiyun static inline void outw(u16 w, unsigned long addr)
221*4882a593Smuzhiyun {
222*4882a593Smuzhiyun writew(w, (volatile void __iomem *)addr);
223*4882a593Smuzhiyun }
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun #define outl outl
outl(u32 l,unsigned long addr)226*4882a593Smuzhiyun static inline void outl(u32 l, unsigned long addr)
227*4882a593Smuzhiyun {
228*4882a593Smuzhiyun writel(l, (volatile void __iomem *)addr);
229*4882a593Smuzhiyun }
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun
232*4882a593Smuzhiyun #define inb_p(__addr) inb(__addr)
233*4882a593Smuzhiyun #define outb_p(__b, __addr) outb(__b, __addr)
234*4882a593Smuzhiyun #define inw_p(__addr) inw(__addr)
235*4882a593Smuzhiyun #define outw_p(__w, __addr) outw(__w, __addr)
236*4882a593Smuzhiyun #define inl_p(__addr) inl(__addr)
237*4882a593Smuzhiyun #define outl_p(__l, __addr) outl(__l, __addr)
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun void outsb(unsigned long, const void *, unsigned long);
240*4882a593Smuzhiyun void outsw(unsigned long, const void *, unsigned long);
241*4882a593Smuzhiyun void outsl(unsigned long, const void *, unsigned long);
242*4882a593Smuzhiyun void insb(unsigned long, void *, unsigned long);
243*4882a593Smuzhiyun void insw(unsigned long, void *, unsigned long);
244*4882a593Smuzhiyun void insl(unsigned long, void *, unsigned long);
245*4882a593Smuzhiyun
readsb(void __iomem * port,void * buf,unsigned long count)246*4882a593Smuzhiyun static inline void readsb(void __iomem *port, void *buf, unsigned long count)
247*4882a593Smuzhiyun {
248*4882a593Smuzhiyun insb((unsigned long __force)port, buf, count);
249*4882a593Smuzhiyun }
readsw(void __iomem * port,void * buf,unsigned long count)250*4882a593Smuzhiyun static inline void readsw(void __iomem *port, void *buf, unsigned long count)
251*4882a593Smuzhiyun {
252*4882a593Smuzhiyun insw((unsigned long __force)port, buf, count);
253*4882a593Smuzhiyun }
254*4882a593Smuzhiyun
readsl(void __iomem * port,void * buf,unsigned long count)255*4882a593Smuzhiyun static inline void readsl(void __iomem *port, void *buf, unsigned long count)
256*4882a593Smuzhiyun {
257*4882a593Smuzhiyun insl((unsigned long __force)port, buf, count);
258*4882a593Smuzhiyun }
259*4882a593Smuzhiyun
writesb(void __iomem * port,const void * buf,unsigned long count)260*4882a593Smuzhiyun static inline void writesb(void __iomem *port, const void *buf, unsigned long count)
261*4882a593Smuzhiyun {
262*4882a593Smuzhiyun outsb((unsigned long __force)port, buf, count);
263*4882a593Smuzhiyun }
264*4882a593Smuzhiyun
writesw(void __iomem * port,const void * buf,unsigned long count)265*4882a593Smuzhiyun static inline void writesw(void __iomem *port, const void *buf, unsigned long count)
266*4882a593Smuzhiyun {
267*4882a593Smuzhiyun outsw((unsigned long __force)port, buf, count);
268*4882a593Smuzhiyun }
269*4882a593Smuzhiyun
writesl(void __iomem * port,const void * buf,unsigned long count)270*4882a593Smuzhiyun static inline void writesl(void __iomem *port, const void *buf, unsigned long count)
271*4882a593Smuzhiyun {
272*4882a593Smuzhiyun outsl((unsigned long __force)port, buf, count);
273*4882a593Smuzhiyun }
274*4882a593Smuzhiyun
275*4882a593Smuzhiyun #define ioread8_rep(p,d,l) readsb(p,d,l)
276*4882a593Smuzhiyun #define ioread16_rep(p,d,l) readsw(p,d,l)
277*4882a593Smuzhiyun #define ioread32_rep(p,d,l) readsl(p,d,l)
278*4882a593Smuzhiyun #define iowrite8_rep(p,d,l) writesb(p,d,l)
279*4882a593Smuzhiyun #define iowrite16_rep(p,d,l) writesw(p,d,l)
280*4882a593Smuzhiyun #define iowrite32_rep(p,d,l) writesl(p,d,l)
281*4882a593Smuzhiyun
282*4882a593Smuzhiyun /* Valid I/O Space regions are anywhere, because each PCI bus supported
283*4882a593Smuzhiyun * can live in an arbitrary area of the physical address range.
284*4882a593Smuzhiyun */
285*4882a593Smuzhiyun #define IO_SPACE_LIMIT 0xffffffffffffffffUL
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun /* Now, SBUS variants, only difference from PCI is that we do
288*4882a593Smuzhiyun * not use little-endian ASIs.
289*4882a593Smuzhiyun */
sbus_readb(const volatile void __iomem * addr)290*4882a593Smuzhiyun static inline u8 sbus_readb(const volatile void __iomem *addr)
291*4882a593Smuzhiyun {
292*4882a593Smuzhiyun return __raw_readb(addr);
293*4882a593Smuzhiyun }
294*4882a593Smuzhiyun
sbus_readw(const volatile void __iomem * addr)295*4882a593Smuzhiyun static inline u16 sbus_readw(const volatile void __iomem *addr)
296*4882a593Smuzhiyun {
297*4882a593Smuzhiyun return __raw_readw(addr);
298*4882a593Smuzhiyun }
299*4882a593Smuzhiyun
sbus_readl(const volatile void __iomem * addr)300*4882a593Smuzhiyun static inline u32 sbus_readl(const volatile void __iomem *addr)
301*4882a593Smuzhiyun {
302*4882a593Smuzhiyun return __raw_readl(addr);
303*4882a593Smuzhiyun }
304*4882a593Smuzhiyun
sbus_readq(const volatile void __iomem * addr)305*4882a593Smuzhiyun static inline u64 sbus_readq(const volatile void __iomem *addr)
306*4882a593Smuzhiyun {
307*4882a593Smuzhiyun return __raw_readq(addr);
308*4882a593Smuzhiyun }
309*4882a593Smuzhiyun
sbus_writeb(u8 b,volatile void __iomem * addr)310*4882a593Smuzhiyun static inline void sbus_writeb(u8 b, volatile void __iomem *addr)
311*4882a593Smuzhiyun {
312*4882a593Smuzhiyun __raw_writeb(b, addr);
313*4882a593Smuzhiyun }
314*4882a593Smuzhiyun
sbus_writew(u16 w,volatile void __iomem * addr)315*4882a593Smuzhiyun static inline void sbus_writew(u16 w, volatile void __iomem *addr)
316*4882a593Smuzhiyun {
317*4882a593Smuzhiyun __raw_writew(w, addr);
318*4882a593Smuzhiyun }
319*4882a593Smuzhiyun
sbus_writel(u32 l,volatile void __iomem * addr)320*4882a593Smuzhiyun static inline void sbus_writel(u32 l, volatile void __iomem *addr)
321*4882a593Smuzhiyun {
322*4882a593Smuzhiyun __raw_writel(l, addr);
323*4882a593Smuzhiyun }
324*4882a593Smuzhiyun
sbus_writeq(u64 q,volatile void __iomem * addr)325*4882a593Smuzhiyun static inline void sbus_writeq(u64 q, volatile void __iomem *addr)
326*4882a593Smuzhiyun {
327*4882a593Smuzhiyun __raw_writeq(q, addr);
328*4882a593Smuzhiyun }
329*4882a593Smuzhiyun
sbus_memset_io(volatile void __iomem * dst,int c,__kernel_size_t n)330*4882a593Smuzhiyun static inline void sbus_memset_io(volatile void __iomem *dst, int c, __kernel_size_t n)
331*4882a593Smuzhiyun {
332*4882a593Smuzhiyun while(n--) {
333*4882a593Smuzhiyun sbus_writeb(c, dst);
334*4882a593Smuzhiyun dst++;
335*4882a593Smuzhiyun }
336*4882a593Smuzhiyun }
337*4882a593Smuzhiyun
memset_io(volatile void __iomem * dst,int c,__kernel_size_t n)338*4882a593Smuzhiyun static inline void memset_io(volatile void __iomem *dst, int c, __kernel_size_t n)
339*4882a593Smuzhiyun {
340*4882a593Smuzhiyun volatile void __iomem *d = dst;
341*4882a593Smuzhiyun
342*4882a593Smuzhiyun while (n--) {
343*4882a593Smuzhiyun writeb(c, d);
344*4882a593Smuzhiyun d++;
345*4882a593Smuzhiyun }
346*4882a593Smuzhiyun }
347*4882a593Smuzhiyun
sbus_memcpy_fromio(void * dst,const volatile void __iomem * src,__kernel_size_t n)348*4882a593Smuzhiyun static inline void sbus_memcpy_fromio(void *dst, const volatile void __iomem *src,
349*4882a593Smuzhiyun __kernel_size_t n)
350*4882a593Smuzhiyun {
351*4882a593Smuzhiyun char *d = dst;
352*4882a593Smuzhiyun
353*4882a593Smuzhiyun while (n--) {
354*4882a593Smuzhiyun char tmp = sbus_readb(src);
355*4882a593Smuzhiyun *d++ = tmp;
356*4882a593Smuzhiyun src++;
357*4882a593Smuzhiyun }
358*4882a593Smuzhiyun }
359*4882a593Smuzhiyun
360*4882a593Smuzhiyun
memcpy_fromio(void * dst,const volatile void __iomem * src,__kernel_size_t n)361*4882a593Smuzhiyun static inline void memcpy_fromio(void *dst, const volatile void __iomem *src,
362*4882a593Smuzhiyun __kernel_size_t n)
363*4882a593Smuzhiyun {
364*4882a593Smuzhiyun char *d = dst;
365*4882a593Smuzhiyun
366*4882a593Smuzhiyun while (n--) {
367*4882a593Smuzhiyun char tmp = readb(src);
368*4882a593Smuzhiyun *d++ = tmp;
369*4882a593Smuzhiyun src++;
370*4882a593Smuzhiyun }
371*4882a593Smuzhiyun }
372*4882a593Smuzhiyun
sbus_memcpy_toio(volatile void __iomem * dst,const void * src,__kernel_size_t n)373*4882a593Smuzhiyun static inline void sbus_memcpy_toio(volatile void __iomem *dst, const void *src,
374*4882a593Smuzhiyun __kernel_size_t n)
375*4882a593Smuzhiyun {
376*4882a593Smuzhiyun const char *s = src;
377*4882a593Smuzhiyun volatile void __iomem *d = dst;
378*4882a593Smuzhiyun
379*4882a593Smuzhiyun while (n--) {
380*4882a593Smuzhiyun char tmp = *s++;
381*4882a593Smuzhiyun sbus_writeb(tmp, d);
382*4882a593Smuzhiyun d++;
383*4882a593Smuzhiyun }
384*4882a593Smuzhiyun }
385*4882a593Smuzhiyun
memcpy_toio(volatile void __iomem * dst,const void * src,__kernel_size_t n)386*4882a593Smuzhiyun static inline void memcpy_toio(volatile void __iomem *dst, const void *src,
387*4882a593Smuzhiyun __kernel_size_t n)
388*4882a593Smuzhiyun {
389*4882a593Smuzhiyun const char *s = src;
390*4882a593Smuzhiyun volatile void __iomem *d = dst;
391*4882a593Smuzhiyun
392*4882a593Smuzhiyun while (n--) {
393*4882a593Smuzhiyun char tmp = *s++;
394*4882a593Smuzhiyun writeb(tmp, d);
395*4882a593Smuzhiyun d++;
396*4882a593Smuzhiyun }
397*4882a593Smuzhiyun }
398*4882a593Smuzhiyun
399*4882a593Smuzhiyun #ifdef __KERNEL__
400*4882a593Smuzhiyun
401*4882a593Smuzhiyun /* On sparc64 we have the whole physical IO address space accessible
402*4882a593Smuzhiyun * using physically addressed loads and stores, so this does nothing.
403*4882a593Smuzhiyun */
ioremap(unsigned long offset,unsigned long size)404*4882a593Smuzhiyun static inline void __iomem *ioremap(unsigned long offset, unsigned long size)
405*4882a593Smuzhiyun {
406*4882a593Smuzhiyun return (void __iomem *)offset;
407*4882a593Smuzhiyun }
408*4882a593Smuzhiyun
409*4882a593Smuzhiyun #define ioremap_uc(X,Y) ioremap((X),(Y))
410*4882a593Smuzhiyun #define ioremap_wc(X,Y) ioremap((X),(Y))
411*4882a593Smuzhiyun #define ioremap_wt(X,Y) ioremap((X),(Y))
412*4882a593Smuzhiyun
iounmap(volatile void __iomem * addr)413*4882a593Smuzhiyun static inline void iounmap(volatile void __iomem *addr)
414*4882a593Smuzhiyun {
415*4882a593Smuzhiyun }
416*4882a593Smuzhiyun
417*4882a593Smuzhiyun #define ioread8 readb
418*4882a593Smuzhiyun #define ioread16 readw
419*4882a593Smuzhiyun #define ioread16be __raw_readw
420*4882a593Smuzhiyun #define ioread32 readl
421*4882a593Smuzhiyun #define ioread32be __raw_readl
422*4882a593Smuzhiyun #define iowrite8 writeb
423*4882a593Smuzhiyun #define iowrite16 writew
424*4882a593Smuzhiyun #define iowrite16be __raw_writew
425*4882a593Smuzhiyun #define iowrite32 writel
426*4882a593Smuzhiyun #define iowrite32be __raw_writel
427*4882a593Smuzhiyun
428*4882a593Smuzhiyun /* Create a virtual mapping cookie for an IO port range */
429*4882a593Smuzhiyun void __iomem *ioport_map(unsigned long port, unsigned int nr);
430*4882a593Smuzhiyun void ioport_unmap(void __iomem *);
431*4882a593Smuzhiyun
432*4882a593Smuzhiyun /* Create a virtual mapping cookie for a PCI BAR (memory or IO) */
433*4882a593Smuzhiyun struct pci_dev;
434*4882a593Smuzhiyun void pci_iounmap(struct pci_dev *dev, void __iomem *);
435*4882a593Smuzhiyun
sbus_can_dma_64bit(void)436*4882a593Smuzhiyun static inline int sbus_can_dma_64bit(void)
437*4882a593Smuzhiyun {
438*4882a593Smuzhiyun return 1;
439*4882a593Smuzhiyun }
sbus_can_burst64(void)440*4882a593Smuzhiyun static inline int sbus_can_burst64(void)
441*4882a593Smuzhiyun {
442*4882a593Smuzhiyun return 1;
443*4882a593Smuzhiyun }
444*4882a593Smuzhiyun struct device;
445*4882a593Smuzhiyun void sbus_set_sbus64(struct device *, int);
446*4882a593Smuzhiyun
447*4882a593Smuzhiyun /*
448*4882a593Smuzhiyun * Convert a physical pointer to a virtual kernel pointer for /dev/mem
449*4882a593Smuzhiyun * access
450*4882a593Smuzhiyun */
451*4882a593Smuzhiyun #define xlate_dev_mem_ptr(p) __va(p)
452*4882a593Smuzhiyun
453*4882a593Smuzhiyun /*
454*4882a593Smuzhiyun * Convert a virtual cached pointer to an uncached pointer
455*4882a593Smuzhiyun */
456*4882a593Smuzhiyun #define xlate_dev_kmem_ptr(p) p
457*4882a593Smuzhiyun
458*4882a593Smuzhiyun #endif
459*4882a593Smuzhiyun
460*4882a593Smuzhiyun #endif /* !(__SPARC64_IO_H) */
461