1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun #ifndef _ASM_POWERPC_VGA_H_
3*4882a593Smuzhiyun #define _ASM_POWERPC_VGA_H_
4*4882a593Smuzhiyun
5*4882a593Smuzhiyun #ifdef __KERNEL__
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun /*
8*4882a593Smuzhiyun * Access to VGA videoram
9*4882a593Smuzhiyun *
10*4882a593Smuzhiyun * (c) 1998 Martin Mares <mj@ucw.cz>
11*4882a593Smuzhiyun */
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun #include <asm/io.h>
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun #if defined(CONFIG_VGA_CONSOLE) || defined(CONFIG_MDA_CONSOLE)
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun #define VT_BUF_HAVE_RW
20*4882a593Smuzhiyun /*
21*4882a593Smuzhiyun * These are only needed for supporting VGA or MDA text mode, which use little
22*4882a593Smuzhiyun * endian byte ordering.
23*4882a593Smuzhiyun * In other cases, we can optimize by using native byte ordering and
24*4882a593Smuzhiyun * <linux/vt_buffer.h> has already done the right job for us.
25*4882a593Smuzhiyun */
26*4882a593Smuzhiyun
scr_writew(u16 val,volatile u16 * addr)27*4882a593Smuzhiyun static inline void scr_writew(u16 val, volatile u16 *addr)
28*4882a593Smuzhiyun {
29*4882a593Smuzhiyun *addr = cpu_to_le16(val);
30*4882a593Smuzhiyun }
31*4882a593Smuzhiyun
scr_readw(volatile const u16 * addr)32*4882a593Smuzhiyun static inline u16 scr_readw(volatile const u16 *addr)
33*4882a593Smuzhiyun {
34*4882a593Smuzhiyun return le16_to_cpu(*addr);
35*4882a593Smuzhiyun }
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun #define VT_BUF_HAVE_MEMSETW
scr_memsetw(u16 * s,u16 v,unsigned int n)38*4882a593Smuzhiyun static inline void scr_memsetw(u16 *s, u16 v, unsigned int n)
39*4882a593Smuzhiyun {
40*4882a593Smuzhiyun memset16(s, cpu_to_le16(v), n / 2);
41*4882a593Smuzhiyun }
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun #define VT_BUF_HAVE_MEMCPYW
44*4882a593Smuzhiyun #define VT_BUF_HAVE_MEMMOVEW
45*4882a593Smuzhiyun #define scr_memcpyw memcpy
46*4882a593Smuzhiyun #define scr_memmovew memmove
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun #endif /* !CONFIG_VGA_CONSOLE && !CONFIG_MDA_CONSOLE */
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun #ifdef __powerpc64__
51*4882a593Smuzhiyun #define VGA_MAP_MEM(x,s) ((unsigned long) ioremap((x), s))
52*4882a593Smuzhiyun #else
53*4882a593Smuzhiyun #define VGA_MAP_MEM(x,s) (x)
54*4882a593Smuzhiyun #endif
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun #define vga_readb(x) (*(x))
57*4882a593Smuzhiyun #define vga_writeb(x,y) (*(y) = (x))
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun #endif /* __KERNEL__ */
60*4882a593Smuzhiyun #endif /* _ASM_POWERPC_VGA_H_ */
61