xref: /OK3568_Linux_fs/u-boot/arch/arm/include/asm/assembler.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  *  arch/arm/include/asm/assembler.h
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  *  Copyright (C) 1996-2000 Russell King
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  * This program is free software; you can redistribute it and/or modify
7*4882a593Smuzhiyun  * it under the terms of the GNU General Public License version 2 as
8*4882a593Smuzhiyun  * published by the Free Software Foundation.
9*4882a593Smuzhiyun  *
10*4882a593Smuzhiyun  *  This file contains arm architecture specific defines
11*4882a593Smuzhiyun  *  for the different processors.
12*4882a593Smuzhiyun  *
13*4882a593Smuzhiyun  *  Do not include any C declarations in this file - it is included by
14*4882a593Smuzhiyun  *  assembler source.
15*4882a593Smuzhiyun  */
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun #include <config.h>
18*4882a593Smuzhiyun #include <asm/unified.h>
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun /*
21*4882a593Smuzhiyun  * Endian independent macros for shifting bytes within registers.
22*4882a593Smuzhiyun  */
23*4882a593Smuzhiyun #ifndef __ARMEB__
24*4882a593Smuzhiyun #define lspull		lsr
25*4882a593Smuzhiyun #define lspush		lsl
26*4882a593Smuzhiyun #define get_byte_0	lsl #0
27*4882a593Smuzhiyun #define get_byte_1	lsr #8
28*4882a593Smuzhiyun #define get_byte_2	lsr #16
29*4882a593Smuzhiyun #define get_byte_3	lsr #24
30*4882a593Smuzhiyun #define put_byte_0	lsl #0
31*4882a593Smuzhiyun #define put_byte_1	lsl #8
32*4882a593Smuzhiyun #define put_byte_2	lsl #16
33*4882a593Smuzhiyun #define put_byte_3	lsl #24
34*4882a593Smuzhiyun #else
35*4882a593Smuzhiyun #define lspull		lsl
36*4882a593Smuzhiyun #define lspush		lsr
37*4882a593Smuzhiyun #define get_byte_0	lsr #24
38*4882a593Smuzhiyun #define get_byte_1	lsr #16
39*4882a593Smuzhiyun #define get_byte_2	lsr #8
40*4882a593Smuzhiyun #define get_byte_3      lsl #0
41*4882a593Smuzhiyun #define put_byte_0	lsl #24
42*4882a593Smuzhiyun #define put_byte_1	lsl #16
43*4882a593Smuzhiyun #define put_byte_2	lsl #8
44*4882a593Smuzhiyun #define put_byte_3      lsl #0
45*4882a593Smuzhiyun #endif
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun /*
48*4882a593Smuzhiyun  * Data preload for architectures that support it
49*4882a593Smuzhiyun  */
50*4882a593Smuzhiyun #if defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__) || \
51*4882a593Smuzhiyun 	defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \
52*4882a593Smuzhiyun 	defined(__ARM_ARCH_6T2__) || defined(__ARM_ARCH_6Z__) || \
53*4882a593Smuzhiyun 	defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_7A__) || \
54*4882a593Smuzhiyun 	defined(__ARM_ARCH_7R__)
55*4882a593Smuzhiyun #define PLD(code...)	code
56*4882a593Smuzhiyun #else
57*4882a593Smuzhiyun #define PLD(code...)
58*4882a593Smuzhiyun #endif
59*4882a593Smuzhiyun 
60*4882a593Smuzhiyun /*
61*4882a593Smuzhiyun  * We only support cores that support at least Thumb-1 and thus we use
62*4882a593Smuzhiyun  * 'bx lr'
63*4882a593Smuzhiyun  */
64*4882a593Smuzhiyun 	.irp	c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
65*4882a593Smuzhiyun 	.macro	ret\c, reg
66*4882a593Smuzhiyun 	.ifeqs	"\reg", "lr"
67*4882a593Smuzhiyun 	bx\c	\reg
68*4882a593Smuzhiyun 	.else
69*4882a593Smuzhiyun 	mov\c	pc, \reg
70*4882a593Smuzhiyun 	.endif
71*4882a593Smuzhiyun 	.endm
72*4882a593Smuzhiyun 	.endr
73*4882a593Smuzhiyun 
74*4882a593Smuzhiyun /*
75*4882a593Smuzhiyun  * Cache aligned, used for optimized memcpy/memset
76*4882a593Smuzhiyun  * In the kernel this is only enabled for Feroceon CPU's...
77*4882a593Smuzhiyun  * We disable it especially for Thumb builds since those instructions
78*4882a593Smuzhiyun  * are not made in a Thumb ready way...
79*4882a593Smuzhiyun  */
80*4882a593Smuzhiyun #if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
81*4882a593Smuzhiyun #define CALGN(code...)
82*4882a593Smuzhiyun #else
83*4882a593Smuzhiyun #define CALGN(code...) code
84*4882a593Smuzhiyun #endif
85