xref: /rk3399_rockchip-uboot/arch/arm/include/asm/assembler.h (revision 75d7a0d7f11ef66dcd3c27ae141f6b59f2ffd00d)
1d8834a13SMatthias Weisser /*
2d8834a13SMatthias Weisser  *  arch/arm/include/asm/assembler.h
3d8834a13SMatthias Weisser  *
4d8834a13SMatthias Weisser  *  Copyright (C) 1996-2000 Russell King
5d8834a13SMatthias Weisser  *
6d8834a13SMatthias Weisser  * This program is free software; you can redistribute it and/or modify
7d8834a13SMatthias Weisser  * it under the terms of the GNU General Public License version 2 as
8d8834a13SMatthias Weisser  * published by the Free Software Foundation.
9d8834a13SMatthias Weisser  *
10d8834a13SMatthias Weisser  *  This file contains arm architecture specific defines
11d8834a13SMatthias Weisser  *  for the different processors.
12d8834a13SMatthias Weisser  *
13d8834a13SMatthias Weisser  *  Do not include any C declarations in this file - it is included by
14d8834a13SMatthias Weisser  *  assembler source.
15d8834a13SMatthias Weisser  */
16d8834a13SMatthias Weisser 
17*75d7a0d7SStefan Agner #include <config.h>
18*75d7a0d7SStefan Agner 
19d8834a13SMatthias Weisser /*
20d8834a13SMatthias Weisser  * Endian independent macros for shifting bytes within registers.
21d8834a13SMatthias Weisser  */
22d8834a13SMatthias Weisser #ifndef __ARMEB__
23*75d7a0d7SStefan Agner #define lspull		lsr
24*75d7a0d7SStefan Agner #define lspush		lsl
25d8834a13SMatthias Weisser #define get_byte_0	lsl #0
26d8834a13SMatthias Weisser #define get_byte_1	lsr #8
27d8834a13SMatthias Weisser #define get_byte_2	lsr #16
28d8834a13SMatthias Weisser #define get_byte_3	lsr #24
29d8834a13SMatthias Weisser #define put_byte_0	lsl #0
30d8834a13SMatthias Weisser #define put_byte_1	lsl #8
31d8834a13SMatthias Weisser #define put_byte_2	lsl #16
32d8834a13SMatthias Weisser #define put_byte_3	lsl #24
33d8834a13SMatthias Weisser #else
34*75d7a0d7SStefan Agner #define lspull		lsl
35*75d7a0d7SStefan Agner #define lspush		lsr
36d8834a13SMatthias Weisser #define get_byte_0	lsr #24
37d8834a13SMatthias Weisser #define get_byte_1	lsr #16
38d8834a13SMatthias Weisser #define get_byte_2	lsr #8
39d8834a13SMatthias Weisser #define get_byte_3      lsl #0
40d8834a13SMatthias Weisser #define put_byte_0	lsl #24
41d8834a13SMatthias Weisser #define put_byte_1	lsl #16
42d8834a13SMatthias Weisser #define put_byte_2	lsl #8
43d8834a13SMatthias Weisser #define put_byte_3      lsl #0
44d8834a13SMatthias Weisser #endif
45d8834a13SMatthias Weisser 
46d8834a13SMatthias Weisser /*
47d8834a13SMatthias Weisser  * Data preload for architectures that support it
48d8834a13SMatthias Weisser  */
49d8834a13SMatthias Weisser #if defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__) || \
50d8834a13SMatthias Weisser 	defined(__ARM_ARCH_6__) || defined(__ARM_ARCH_6J__) || \
51d8834a13SMatthias Weisser 	defined(__ARM_ARCH_6T2__) || defined(__ARM_ARCH_6Z__) || \
52d8834a13SMatthias Weisser 	defined(__ARM_ARCH_6ZK__) || defined(__ARM_ARCH_7A__) || \
53d8834a13SMatthias Weisser 	defined(__ARM_ARCH_7R__)
54d8834a13SMatthias Weisser #define PLD(code...)	code
55d8834a13SMatthias Weisser #else
56d8834a13SMatthias Weisser #define PLD(code...)
57d8834a13SMatthias Weisser #endif
58d8834a13SMatthias Weisser 
59*75d7a0d7SStefan Agner 	.irp	c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
60*75d7a0d7SStefan Agner 	.macro	ret\c, reg
61*75d7a0d7SStefan Agner #if defined(__ARM_ARCH_5E__) || defined(__ARM_ARCH_5TE__)
62*75d7a0d7SStefan Agner 	mov\c	pc, \reg
63*75d7a0d7SStefan Agner #else
64*75d7a0d7SStefan Agner 	.ifeqs	"\reg", "lr"
65*75d7a0d7SStefan Agner 	bx\c	\reg
66*75d7a0d7SStefan Agner 	.else
67*75d7a0d7SStefan Agner 	mov\c	pc, \reg
68*75d7a0d7SStefan Agner 	.endif
69*75d7a0d7SStefan Agner #endif
70*75d7a0d7SStefan Agner 	.endm
71*75d7a0d7SStefan Agner 	.endr
72*75d7a0d7SStefan Agner 
73d8834a13SMatthias Weisser /*
74*75d7a0d7SStefan Agner  * Cache aligned, used for optimized memcpy/memset
75*75d7a0d7SStefan Agner  * In the kernel this is only enabled for Feroceon CPU's...
76*75d7a0d7SStefan Agner  * We disable it especially for Thumb builds since those instructions
77*75d7a0d7SStefan Agner  * are not made in a Thumb ready way...
78d8834a13SMatthias Weisser  */
79*75d7a0d7SStefan Agner #ifdef CONFIG_SYS_THUMB_BUILD
80*75d7a0d7SStefan Agner #define CALGN(code...)
81*75d7a0d7SStefan Agner #else
82d8834a13SMatthias Weisser #define CALGN(code...) code
83*75d7a0d7SStefan Agner #endif
84