1/* 2 * armboot - Startup Code for XScale CPU-core 3 * 4 * Copyright (C) 1998 Dan Malek <dmalek@jlc.net> 5 * Copyright (C) 1999 Magnus Damm <kieraypc01.p.y.kie.era.ericsson.se> 6 * Copyright (C) 2000 Wolfgang Denk <wd@denx.de> 7 * Copyright (C) 2001 Alex Zuepke <azu@sysgo.de> 8 * Copyright (C) 2001 Marius Groger <mag@sysgo.de> 9 * Copyright (C) 2002 Alex Zupke <azu@sysgo.de> 10 * Copyright (C) 2002 Gary Jennejohn <garyj@denx.de> 11 * Copyright (C) 2002 Kyle Harris <kharris@nexus-tech.net> 12 * Copyright (C) 2003 Kai-Uwe Bloem <kai-uwe.bloem@auerswald.de> 13 * Copyright (C) 2003 Kshitij <kshitij@ti.com> 14 * Copyright (C) 2003 Richard Woodruff <r-woodruff2@ti.com> 15 * Copyright (C) 2003 Robert Schwebel <r.schwebel@pengutronix.de> 16 * Copyright (C) 2004 Texas Instruments <r-woodruff2@ti.com> 17 * Copyright (C) 2010 Marek Vasut <marek.vasut@gmail.com> 18 * 19 * SPDX-License-Identifier: GPL-2.0+ 20 */ 21 22#include <asm-offsets.h> 23#include <config.h> 24#include <version.h> 25 26.globl _start 27_start: b reset 28#ifdef CONFIG_SPL_BUILD 29 ldr pc, _hang 30 ldr pc, _hang 31 ldr pc, _hang 32 ldr pc, _hang 33 ldr pc, _hang 34 ldr pc, _hang 35 ldr pc, _hang 36 37_hang: 38 .word do_hang 39 .word 0x12345678 40 .word 0x12345678 41 .word 0x12345678 42 .word 0x12345678 43 .word 0x12345678 44 .word 0x12345678 45 .word 0x12345678 /* now 16*4=64 */ 46#else 47 ldr pc, _undefined_instruction 48 ldr pc, _software_interrupt 49 ldr pc, _prefetch_abort 50 ldr pc, _data_abort 51 ldr pc, _not_used 52 ldr pc, _irq 53 ldr pc, _fiq 54 55_undefined_instruction: .word undefined_instruction 56_software_interrupt: .word software_interrupt 57_prefetch_abort: .word prefetch_abort 58_data_abort: .word data_abort 59_not_used: .word not_used 60_irq: .word irq 61_fiq: .word fiq 62_pad: .word 0x12345678 /* now 16*4=64 */ 63#endif /* CONFIG_SPL_BUILD */ 64.global _end_vect 65_end_vect: 66 67 .balignl 16,0xdeadbeef 68/* 69 ************************************************************************* 70 * 71 * Startup Code (reset vector) 72 * 73 * do important init only if we don't start from memory! 74 * setup Memory and board specific bits prior to relocation. 75 * relocate armboot to ram 76 * setup stack 77 * 78 ************************************************************************* 79 */ 80 81#ifdef CONFIG_USE_IRQ 82/* IRQ stack memory (calculated at run-time) */ 83.globl IRQ_STACK_START 84IRQ_STACK_START: 85 .word 0x0badc0de 86 87/* IRQ stack memory (calculated at run-time) */ 88.globl FIQ_STACK_START 89FIQ_STACK_START: 90 .word 0x0badc0de 91#endif 92 93/* IRQ stack memory (calculated at run-time) + 8 bytes */ 94.globl IRQ_STACK_START_IN 95IRQ_STACK_START_IN: 96 .word 0x0badc0de 97 98/* 99 * the actual reset code 100 */ 101 102reset: 103 /* 104 * set the cpu to SVC32 mode 105 */ 106 mrs r0,cpsr 107 bic r0,r0,#0x1f 108 orr r0,r0,#0xd3 109 msr cpsr,r0 110 111#ifndef CONFIG_SKIP_LOWLEVEL_INIT 112 bl cpu_init_crit 113#endif 114 115#ifdef CONFIG_CPU_PXA25X 116 bl lock_cache_for_stack 117#endif 118 119 bl _main 120 121/*------------------------------------------------------------------------------*/ 122 123 .globl c_runtime_cpu_setup 124c_runtime_cpu_setup: 125 126#ifdef CONFIG_CPU_PXA25X 127 /* 128 * Unlock (actually, disable) the cache now that board_init_f 129 * is done. We could do this earlier but we would need to add 130 * a new C runtime hook, whereas c_runtime_cpu_setup already 131 * exists. 132 * As this routine is just a call to cpu_init_crit, let us 133 * tail-optimize and do a simple branch here. 134 */ 135 b cpu_init_crit 136#else 137 bx lr 138#endif 139 140/* 141 ************************************************************************* 142 * 143 * CPU_init_critical registers 144 * 145 * setup important registers 146 * setup memory timing 147 * 148 ************************************************************************* 149 */ 150#if !defined(CONFIG_SKIP_LOWLEVEL_INIT) || defined(CONFIG_CPU_PXA25X) 151cpu_init_crit: 152 /* 153 * flush v4 I/D caches 154 */ 155 mov r0, #0 156 mcr p15, 0, r0, c7, c7, 0 /* Invalidate I+D+BTB caches */ 157 mcr p15, 0, r0, c8, c7, 0 /* Invalidate Unified TLB */ 158 159 /* 160 * disable MMU stuff and caches 161 */ 162 mrc p15, 0, r0, c1, c0, 0 163 bic r0, r0, #0x00003300 @ clear bits 13:12, 9:8 (--VI --RS) 164 bic r0, r0, #0x00000087 @ clear bits 7, 2:0 (B--- -CAM) 165 orr r0, r0, #0x00000002 @ set bit 2 (A) Align 166 mcr p15, 0, r0, c1, c0, 0 167 168 mov pc, lr /* back to my caller */ 169#endif /* !CONFIG_SKIP_LOWLEVEL_INIT || CONFIG_CPU_PXA25X */ 170 171#ifndef CONFIG_SPL_BUILD 172/* 173 ************************************************************************* 174 * 175 * Interrupt handling 176 * 177 ************************************************************************* 178 */ 179@ 180@ IRQ stack frame. 181@ 182#define S_FRAME_SIZE 72 183 184#define S_OLD_R0 68 185#define S_PSR 64 186#define S_PC 60 187#define S_LR 56 188#define S_SP 52 189 190#define S_IP 48 191#define S_FP 44 192#define S_R10 40 193#define S_R9 36 194#define S_R8 32 195#define S_R7 28 196#define S_R6 24 197#define S_R5 20 198#define S_R4 16 199#define S_R3 12 200#define S_R2 8 201#define S_R1 4 202#define S_R0 0 203 204#define MODE_SVC 0x13 205#define I_BIT 0x80 206 207/* 208 * use bad_save_user_regs for abort/prefetch/undef/swi ... 209 * use irq_save_user_regs / irq_restore_user_regs for IRQ/FIQ handling 210 */ 211 212 .macro bad_save_user_regs 213 sub sp, sp, #S_FRAME_SIZE @ carve out a frame on current user stack 214 stmia sp, {r0 - r12} @ Save user registers (now in svc mode) r0-r12 215 216 ldr r2, IRQ_STACK_START_IN @ set base 2 words into abort stack 217 ldmia r2, {r2 - r3} @ get values for "aborted" pc and cpsr (into parm regs) 218 add r0, sp, #S_FRAME_SIZE @ grab pointer to old stack 219 220 add r5, sp, #S_SP 221 mov r1, lr 222 stmia r5, {r0 - r3} @ save sp_SVC, lr_SVC, pc, cpsr 223 mov r0, sp @ save current stack into r0 (param register) 224 .endm 225 226 .macro irq_save_user_regs 227 sub sp, sp, #S_FRAME_SIZE 228 stmia sp, {r0 - r12} @ Calling r0-r12 229 add r8, sp, #S_PC @ !!!! R8 NEEDS to be saved !!!! a reserved stack spot would be good. 230 stmdb r8, {sp, lr}^ @ Calling SP, LR 231 str lr, [r8, #0] @ Save calling PC 232 mrs r6, spsr 233 str r6, [r8, #4] @ Save CPSR 234 str r0, [r8, #8] @ Save OLD_R0 235 mov r0, sp 236 .endm 237 238 .macro irq_restore_user_regs 239 ldmia sp, {r0 - lr}^ @ Calling r0 - lr 240 mov r0, r0 241 ldr lr, [sp, #S_PC] @ Get PC 242 add sp, sp, #S_FRAME_SIZE 243 subs pc, lr, #4 @ return & move spsr_svc into cpsr 244 .endm 245 246 .macro get_bad_stack 247 ldr r13, IRQ_STACK_START_IN @ setup our mode stack (enter in banked mode) 248 249 str lr, [r13] @ save caller lr in position 0 of saved stack 250 mrs lr, spsr @ get the spsr 251 str lr, [r13, #4] @ save spsr in position 1 of saved stack 252 253 mov r13, #MODE_SVC @ prepare SVC-Mode 254 @ msr spsr_c, r13 255 msr spsr, r13 @ switch modes, make sure moves will execute 256 mov lr, pc @ capture return pc 257 movs pc, lr @ jump to next instruction & switch modes. 258 .endm 259 260 .macro get_bad_stack_swi 261 sub r13, r13, #4 @ space on current stack for scratch reg. 262 str r0, [r13] @ save R0's value. 263 ldr r0, IRQ_STACK_START_IN @ get data regions start 264 str lr, [r0] @ save caller lr in position 0 of saved stack 265 mrs lr, spsr @ get the spsr 266 str lr, [r0, #4] @ save spsr in position 1 of saved stack 267 ldr lr, [r0] @ restore lr 268 ldr r0, [r13] @ restore r0 269 add r13, r13, #4 @ pop stack entry 270 .endm 271 272 .macro get_irq_stack @ setup IRQ stack 273 ldr sp, IRQ_STACK_START 274 .endm 275 276 .macro get_fiq_stack @ setup FIQ stack 277 ldr sp, FIQ_STACK_START 278 .endm 279#endif /* CONFIG_SPL_BUILD */ 280 281/* 282 * exception handlers 283 */ 284#ifdef CONFIG_SPL_BUILD 285 .align 5 286do_hang: 287 bl hang /* hang and never return */ 288#else /* !CONFIG_SPL_BUILD */ 289 .align 5 290undefined_instruction: 291 get_bad_stack 292 bad_save_user_regs 293 bl do_undefined_instruction 294 295 .align 5 296software_interrupt: 297 get_bad_stack_swi 298 bad_save_user_regs 299 bl do_software_interrupt 300 301 .align 5 302prefetch_abort: 303 get_bad_stack 304 bad_save_user_regs 305 bl do_prefetch_abort 306 307 .align 5 308data_abort: 309 get_bad_stack 310 bad_save_user_regs 311 bl do_data_abort 312 313 .align 5 314not_used: 315 get_bad_stack 316 bad_save_user_regs 317 bl do_not_used 318 319#ifdef CONFIG_USE_IRQ 320 321 .align 5 322irq: 323 get_irq_stack 324 irq_save_user_regs 325 bl do_irq 326 irq_restore_user_regs 327 328 .align 5 329fiq: 330 get_fiq_stack 331 /* someone ought to write a more effiction fiq_save_user_regs */ 332 irq_save_user_regs 333 bl do_fiq 334 irq_restore_user_regs 335 336#else 337 338 .align 5 339irq: 340 get_bad_stack 341 bad_save_user_regs 342 bl do_irq 343 344 .align 5 345fiq: 346 get_bad_stack 347 bad_save_user_regs 348 bl do_fiq 349 350#endif 351 .align 5 352#endif /* CONFIG_SPL_BUILD */ 353 354 355/* 356 * Enable MMU to use DCache as DRAM. 357 * 358 * This is useful on PXA25x and PXA26x in early bootstages, where there is no 359 * other possible memory available to hold stack. 360 */ 361#ifdef CONFIG_CPU_PXA25X 362.macro CPWAIT reg 363 mrc p15, 0, \reg, c2, c0, 0 364 mov \reg, \reg 365 sub pc, pc, #4 366.endm 367lock_cache_for_stack: 368 /* Domain access -- enable for all CPs */ 369 ldr r0, =0x0000ffff 370 mcr p15, 0, r0, c3, c0, 0 371 372 /* Point TTBR to MMU table */ 373 ldr r0, =mmutable 374 mcr p15, 0, r0, c2, c0, 0 375 376 /* Kick in MMU, ICache, DCache, BTB */ 377 mrc p15, 0, r0, c1, c0, 0 378 bic r0, #0x1b00 379 bic r0, #0x0087 380 orr r0, #0x1800 381 orr r0, #0x0005 382 mcr p15, 0, r0, c1, c0, 0 383 CPWAIT r0 384 385 /* Unlock Icache, Dcache */ 386 mcr p15, 0, r0, c9, c1, 1 387 mcr p15, 0, r0, c9, c2, 1 388 389 /* Flush Icache, Dcache, BTB */ 390 mcr p15, 0, r0, c7, c7, 0 391 392 /* Unlock I-TLB, D-TLB */ 393 mcr p15, 0, r0, c10, c4, 1 394 mcr p15, 0, r0, c10, c8, 1 395 396 /* Flush TLB */ 397 mcr p15, 0, r0, c8, c7, 0 398 399 /* Allocate 4096 bytes of Dcache as RAM */ 400 401 /* Drain pending loads and stores */ 402 mcr p15, 0, r0, c7, c10, 4 403 404 mov r4, #0x00 405 mov r5, #0x00 406 mov r2, #0x01 407 mcr p15, 0, r0, c9, c2, 0 408 CPWAIT r0 409 410 /* 128 lines reserved (128 x 32bytes = 4096 bytes total) */ 411 mov r0, #128 412 ldr r1, =0xfffff000 413 414alloc: 415 mcr p15, 0, r1, c7, c2, 5 416 /* Drain pending loads and stores */ 417 mcr p15, 0, r0, c7, c10, 4 418 strd r4, [r1], #8 419 strd r4, [r1], #8 420 strd r4, [r1], #8 421 strd r4, [r1], #8 422 subs r0, #0x01 423 bne alloc 424 /* Drain pending loads and stores */ 425 mcr p15, 0, r0, c7, c10, 4 426 mov r2, #0x00 427 mcr p15, 0, r2, c9, c2, 0 428 CPWAIT r0 429 430 mov pc, lr 431 432.section .mmutable, "a" 433mmutable: 434 .align 14 435 /* 0x00000000 - 0xffe00000 : 1:1, uncached mapping */ 436 .set __base, 0 437 .rept 0xfff 438 .word (__base << 20) | 0xc12 439 .set __base, __base + 1 440 .endr 441 442 /* 0xfff00000 : 1:1, cached mapping */ 443 .word (0xfff << 20) | 0x1c1e 444#endif /* CONFIG_CPU_PXA25X */ 445