xref: /OK3568_Linux_fs/u-boot/arch/arm/lib/relocate.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/*
2*4882a593Smuzhiyun *  relocate - common relocation function for ARM U-Boot
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun *  Copyright (c) 2013  Albert ARIBAUD <albert.u.boot@aribaud.net>
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * SPDX-License-Identifier:	GPL-2.0+
7*4882a593Smuzhiyun */
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun#include <asm-offsets.h>
10*4882a593Smuzhiyun#include <config.h>
11*4882a593Smuzhiyun#include <elf.h>
12*4882a593Smuzhiyun#include <linux/linkage.h>
13*4882a593Smuzhiyun#ifdef CONFIG_CPU_V7M
14*4882a593Smuzhiyun#include <asm/armv7m.h>
15*4882a593Smuzhiyun#endif
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun/*
18*4882a593Smuzhiyun * Default/weak exception vectors relocation routine
19*4882a593Smuzhiyun *
20*4882a593Smuzhiyun * This routine covers the standard ARM cases: normal (0x00000000),
21*4882a593Smuzhiyun * high (0xffff0000) and VBAR. SoCs which do not comply with any of
22*4882a593Smuzhiyun * the standard cases must provide their own, strong, version.
23*4882a593Smuzhiyun */
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun	.section	.text.relocate_vectors,"ax",%progbits
26*4882a593Smuzhiyun	.weak		relocate_vectors
27*4882a593Smuzhiyun
28*4882a593SmuzhiyunENTRY(relocate_vectors)
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun#ifdef CONFIG_CPU_V7M
31*4882a593Smuzhiyun	/*
32*4882a593Smuzhiyun	 * On ARMv7-M we only have to write the new vector address
33*4882a593Smuzhiyun	 * to VTOR register.
34*4882a593Smuzhiyun	 */
35*4882a593Smuzhiyun	ldr	r0, [r9, #GD_RELOCADDR]	/* r0 = gd->relocaddr */
36*4882a593Smuzhiyun	ldr	r1, =V7M_SCB_BASE
37*4882a593Smuzhiyun	str	r0, [r1, V7M_SCB_VTOR]
38*4882a593Smuzhiyun#else
39*4882a593Smuzhiyun#ifdef CONFIG_HAS_VBAR
40*4882a593Smuzhiyun	/*
41*4882a593Smuzhiyun	 * If the ARM processor has the security extensions,
42*4882a593Smuzhiyun	 * use VBAR to relocate the exception vectors.
43*4882a593Smuzhiyun	 */
44*4882a593Smuzhiyun	ldr	r0, [r9, #GD_RELOCADDR]	/* r0 = gd->relocaddr */
45*4882a593Smuzhiyun	mcr     p15, 0, r0, c12, c0, 0  /* Set VBAR */
46*4882a593Smuzhiyun#else
47*4882a593Smuzhiyun	/*
48*4882a593Smuzhiyun	 * Copy the relocated exception vectors to the
49*4882a593Smuzhiyun	 * correct address
50*4882a593Smuzhiyun	 * CP15 c1 V bit gives us the location of the vectors:
51*4882a593Smuzhiyun	 * 0x00000000 or 0xFFFF0000.
52*4882a593Smuzhiyun	 */
53*4882a593Smuzhiyun	ldr	r0, [r9, #GD_RELOCADDR]	/* r0 = gd->relocaddr */
54*4882a593Smuzhiyun	mrc	p15, 0, r2, c1, c0, 0	/* V bit (bit[13]) in CP15 c1 */
55*4882a593Smuzhiyun	ands	r2, r2, #(1 << 13)
56*4882a593Smuzhiyun	ldreq	r1, =0x00000000		/* If V=0 */
57*4882a593Smuzhiyun	ldrne	r1, =0xFFFF0000		/* If V=1 */
58*4882a593Smuzhiyun	ldmia	r0!, {r2-r8,r10}
59*4882a593Smuzhiyun	stmia	r1!, {r2-r8,r10}
60*4882a593Smuzhiyun	ldmia	r0!, {r2-r8,r10}
61*4882a593Smuzhiyun	stmia	r1!, {r2-r8,r10}
62*4882a593Smuzhiyun#endif
63*4882a593Smuzhiyun#endif
64*4882a593Smuzhiyun	bx	lr
65*4882a593Smuzhiyun
66*4882a593SmuzhiyunENDPROC(relocate_vectors)
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun/*
69*4882a593Smuzhiyun * void relocate_code(addr_moni)
70*4882a593Smuzhiyun *
71*4882a593Smuzhiyun * This function relocates the monitor code.
72*4882a593Smuzhiyun *
73*4882a593Smuzhiyun * NOTE:
74*4882a593Smuzhiyun * To prevent the code below from containing references with an R_ARM_ABS32
75*4882a593Smuzhiyun * relocation record type, we never refer to linker-defined symbols directly.
76*4882a593Smuzhiyun * Instead, we declare literals which contain their relative location with
77*4882a593Smuzhiyun * respect to relocate_code, and at run time, add relocate_code back to them.
78*4882a593Smuzhiyun */
79*4882a593Smuzhiyun
80*4882a593SmuzhiyunENTRY(relocate_code)
81*4882a593Smuzhiyun	ldr	r1, =__image_copy_start	/* r1 <- SRC &__image_copy_start */
82*4882a593Smuzhiyun	subs	r4, r0, r1		/* r4 <- relocation offset */
83*4882a593Smuzhiyun	beq	relocate_done		/* skip relocation */
84*4882a593Smuzhiyun	ldr	r2, =__image_copy_end	/* r2 <- SRC &__image_copy_end */
85*4882a593Smuzhiyun
86*4882a593Smuzhiyuncopy_loop:
87*4882a593Smuzhiyun	ldmia	r1!, {r10-r11}		/* copy from source address [r1]    */
88*4882a593Smuzhiyun	stmia	r0!, {r10-r11}		/* copy to   target address [r0]    */
89*4882a593Smuzhiyun	cmp	r1, r2			/* until source end address [r2]    */
90*4882a593Smuzhiyun	blo	copy_loop
91*4882a593Smuzhiyun
92*4882a593Smuzhiyun	/*
93*4882a593Smuzhiyun	 * fix .rel.dyn relocations
94*4882a593Smuzhiyun	 */
95*4882a593Smuzhiyun	ldr	r2, =__rel_dyn_start	/* r2 <- SRC &__rel_dyn_start */
96*4882a593Smuzhiyun	ldr	r3, =__rel_dyn_end	/* r3 <- SRC &__rel_dyn_end */
97*4882a593Smuzhiyunfixloop:
98*4882a593Smuzhiyun	ldmia	r2!, {r0-r1}		/* (r0,r1) <- (SRC location,fixup) */
99*4882a593Smuzhiyun	and	r1, r1, #0xff
100*4882a593Smuzhiyun	cmp	r1, #R_ARM_RELATIVE
101*4882a593Smuzhiyun	bne	fixnext
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun	/* relative fix: increase location by offset */
104*4882a593Smuzhiyun	add	r0, r0, r4
105*4882a593Smuzhiyun	ldr	r1, [r0]
106*4882a593Smuzhiyun	add	r1, r1, r4
107*4882a593Smuzhiyun	str	r1, [r0]
108*4882a593Smuzhiyunfixnext:
109*4882a593Smuzhiyun	cmp	r2, r3
110*4882a593Smuzhiyun	blo	fixloop
111*4882a593Smuzhiyun
112*4882a593Smuzhiyunrelocate_done:
113*4882a593Smuzhiyun
114*4882a593Smuzhiyun#ifdef __XSCALE__
115*4882a593Smuzhiyun	/*
116*4882a593Smuzhiyun	 * On xscale, icache must be invalidated and write buffers drained,
117*4882a593Smuzhiyun	 * even with cache disabled - 4.2.7 of xscale core developer's manual
118*4882a593Smuzhiyun	 */
119*4882a593Smuzhiyun	mcr	p15, 0, r0, c7, c7, 0	/* invalidate icache */
120*4882a593Smuzhiyun	mcr	p15, 0, r0, c7, c10, 4	/* drain write buffer */
121*4882a593Smuzhiyun#endif
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun	/* ARMv4- don't know bx lr but the assembler fails to see that */
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun#ifdef __ARM_ARCH_4__
126*4882a593Smuzhiyun	mov	pc, lr
127*4882a593Smuzhiyun#else
128*4882a593Smuzhiyun	bx	lr
129*4882a593Smuzhiyun#endif
130*4882a593Smuzhiyun
131*4882a593SmuzhiyunENDPROC(relocate_code)
132