1*4882a593Smuzhiyun/* 2*4882a593Smuzhiyun * Copyright 2004, 2007-2012 Freescale Semiconductor, Inc. 3*4882a593Smuzhiyun * Copyright (C) 2003 Motorola,Inc. 4*4882a593Smuzhiyun * 5*4882a593Smuzhiyun * SPDX-License-Identifier: GPL-2.0+ 6*4882a593Smuzhiyun */ 7*4882a593Smuzhiyun 8*4882a593Smuzhiyun/* U-Boot Startup Code for Motorola 85xx PowerPC based Embedded Boards 9*4882a593Smuzhiyun * 10*4882a593Smuzhiyun * The processor starts at 0xfffffffc and the code is first executed in the 11*4882a593Smuzhiyun * last 4K page(0xfffff000-0xffffffff) in flash/rom. 12*4882a593Smuzhiyun * 13*4882a593Smuzhiyun */ 14*4882a593Smuzhiyun 15*4882a593Smuzhiyun#include <asm-offsets.h> 16*4882a593Smuzhiyun#include <config.h> 17*4882a593Smuzhiyun#include <mpc85xx.h> 18*4882a593Smuzhiyun#include <version.h> 19*4882a593Smuzhiyun 20*4882a593Smuzhiyun#include <ppc_asm.tmpl> 21*4882a593Smuzhiyun#include <ppc_defs.h> 22*4882a593Smuzhiyun 23*4882a593Smuzhiyun#include <asm/cache.h> 24*4882a593Smuzhiyun#include <asm/mmu.h> 25*4882a593Smuzhiyun 26*4882a593Smuzhiyun#undef MSR_KERNEL 27*4882a593Smuzhiyun#define MSR_KERNEL ( MSR_ME ) /* Machine Check */ 28*4882a593Smuzhiyun 29*4882a593Smuzhiyun#define LAW_EN 0x80000000 30*4882a593Smuzhiyun 31*4882a593Smuzhiyun#if defined(CONFIG_NAND_SPL) || \ 32*4882a593Smuzhiyun (defined(CONFIG_SPL_BUILD) && defined(CONFIG_SPL_INIT_MINIMAL)) 33*4882a593Smuzhiyun#define MINIMAL_SPL 34*4882a593Smuzhiyun#endif 35*4882a593Smuzhiyun 36*4882a593Smuzhiyun#if !defined(CONFIG_SPL) && !defined(CONFIG_SYS_RAMBOOT) && \ 37*4882a593Smuzhiyun !defined(CONFIG_SECURE_BOOT) && !defined(CONFIG_SRIO_PCIE_BOOT_SLAVE) 38*4882a593Smuzhiyun#define NOR_BOOT 39*4882a593Smuzhiyun#endif 40*4882a593Smuzhiyun 41*4882a593Smuzhiyun/* 42*4882a593Smuzhiyun * Set up GOT: Global Offset Table 43*4882a593Smuzhiyun * 44*4882a593Smuzhiyun * Use r12 to access the GOT 45*4882a593Smuzhiyun */ 46*4882a593Smuzhiyun START_GOT 47*4882a593Smuzhiyun GOT_ENTRY(_GOT2_TABLE_) 48*4882a593Smuzhiyun GOT_ENTRY(_FIXUP_TABLE_) 49*4882a593Smuzhiyun 50*4882a593Smuzhiyun#ifndef MINIMAL_SPL 51*4882a593Smuzhiyun GOT_ENTRY(_start) 52*4882a593Smuzhiyun GOT_ENTRY(_start_of_vectors) 53*4882a593Smuzhiyun GOT_ENTRY(_end_of_vectors) 54*4882a593Smuzhiyun GOT_ENTRY(transfer_to_handler) 55*4882a593Smuzhiyun#endif 56*4882a593Smuzhiyun 57*4882a593Smuzhiyun GOT_ENTRY(__init_end) 58*4882a593Smuzhiyun GOT_ENTRY(__bss_end) 59*4882a593Smuzhiyun GOT_ENTRY(__bss_start) 60*4882a593Smuzhiyun END_GOT 61*4882a593Smuzhiyun 62*4882a593Smuzhiyun/* 63*4882a593Smuzhiyun * e500 Startup -- after reset only the last 4KB of the effective 64*4882a593Smuzhiyun * address space is mapped in the MMU L2 TLB1 Entry0. The .bootpg 65*4882a593Smuzhiyun * section is located at THIS LAST page and basically does three 66*4882a593Smuzhiyun * things: clear some registers, set up exception tables and 67*4882a593Smuzhiyun * add more TLB entries for 'larger spaces'(e.g. the boot rom) to 68*4882a593Smuzhiyun * continue the boot procedure. 69*4882a593Smuzhiyun 70*4882a593Smuzhiyun * Once the boot rom is mapped by TLB entries we can proceed 71*4882a593Smuzhiyun * with normal startup. 72*4882a593Smuzhiyun * 73*4882a593Smuzhiyun */ 74*4882a593Smuzhiyun 75*4882a593Smuzhiyun .section .bootpg,"ax" 76*4882a593Smuzhiyun .globl _start_e500 77*4882a593Smuzhiyun 78*4882a593Smuzhiyun_start_e500: 79*4882a593Smuzhiyun/* Enable debug exception */ 80*4882a593Smuzhiyun li r1,MSR_DE 81*4882a593Smuzhiyun mtmsr r1 82*4882a593Smuzhiyun 83*4882a593Smuzhiyun /* 84*4882a593Smuzhiyun * If we got an ePAPR device tree pointer passed in as r3, we need that 85*4882a593Smuzhiyun * later in cpu_init_early_f(). Save it to a safe register before we 86*4882a593Smuzhiyun * clobber it so that we can fetch it from there later. 87*4882a593Smuzhiyun */ 88*4882a593Smuzhiyun mr r24, r3 89*4882a593Smuzhiyun 90*4882a593Smuzhiyun#ifdef CONFIG_SYS_FSL_ERRATUM_A004510 91*4882a593Smuzhiyun mfspr r3,SPRN_SVR 92*4882a593Smuzhiyun rlwinm r3,r3,0,0xff 93*4882a593Smuzhiyun li r4,CONFIG_SYS_FSL_ERRATUM_A004510_SVR_REV 94*4882a593Smuzhiyun cmpw r3,r4 95*4882a593Smuzhiyun beq 1f 96*4882a593Smuzhiyun 97*4882a593Smuzhiyun#ifdef CONFIG_SYS_FSL_ERRATUM_A004510_SVR_REV2 98*4882a593Smuzhiyun li r4,CONFIG_SYS_FSL_ERRATUM_A004510_SVR_REV2 99*4882a593Smuzhiyun cmpw r3,r4 100*4882a593Smuzhiyun beq 1f 101*4882a593Smuzhiyun#endif 102*4882a593Smuzhiyun 103*4882a593Smuzhiyun /* Not a supported revision affected by erratum */ 104*4882a593Smuzhiyun li r27,0 105*4882a593Smuzhiyun b 2f 106*4882a593Smuzhiyun 107*4882a593Smuzhiyun1: li r27,1 /* Remember for later that we have the erratum */ 108*4882a593Smuzhiyun /* Erratum says set bits 55:60 to 001001 */ 109*4882a593Smuzhiyun msync 110*4882a593Smuzhiyun isync 111*4882a593Smuzhiyun mfspr r3,SPRN_HDBCR0 112*4882a593Smuzhiyun li r4,0x48 113*4882a593Smuzhiyun rlwimi r3,r4,0,0x1f8 114*4882a593Smuzhiyun mtspr SPRN_HDBCR0,r3 115*4882a593Smuzhiyun isync 116*4882a593Smuzhiyun2: 117*4882a593Smuzhiyun#endif 118*4882a593Smuzhiyun#ifdef CONFIG_SYS_FSL_ERRATUM_A005125 119*4882a593Smuzhiyun msync 120*4882a593Smuzhiyun isync 121*4882a593Smuzhiyun mfspr r3, SPRN_HDBCR0 122*4882a593Smuzhiyun oris r3, r3, 0x0080 123*4882a593Smuzhiyun mtspr SPRN_HDBCR0, r3 124*4882a593Smuzhiyun#endif 125*4882a593Smuzhiyun 126*4882a593Smuzhiyun 127*4882a593Smuzhiyun#if defined(CONFIG_SECURE_BOOT) && defined(CONFIG_E500MC) && \ 128*4882a593Smuzhiyun !defined(CONFIG_E6500) 129*4882a593Smuzhiyun /* ISBC uses L2 as stack. 130*4882a593Smuzhiyun * Disable L2 cache here so that u-boot can enable it later 131*4882a593Smuzhiyun * as part of it's normal flow 132*4882a593Smuzhiyun */ 133*4882a593Smuzhiyun 134*4882a593Smuzhiyun /* Check if L2 is enabled */ 135*4882a593Smuzhiyun mfspr r3, SPRN_L2CSR0 136*4882a593Smuzhiyun lis r2, L2CSR0_L2E@h 137*4882a593Smuzhiyun ori r2, r2, L2CSR0_L2E@l 138*4882a593Smuzhiyun and. r4, r3, r2 139*4882a593Smuzhiyun beq l2_disabled 140*4882a593Smuzhiyun 141*4882a593Smuzhiyun mfspr r3, SPRN_L2CSR0 142*4882a593Smuzhiyun /* Flush L2 cache */ 143*4882a593Smuzhiyun lis r2,(L2CSR0_L2FL)@h 144*4882a593Smuzhiyun ori r2, r2, (L2CSR0_L2FL)@l 145*4882a593Smuzhiyun or r3, r2, r3 146*4882a593Smuzhiyun sync 147*4882a593Smuzhiyun isync 148*4882a593Smuzhiyun mtspr SPRN_L2CSR0,r3 149*4882a593Smuzhiyun isync 150*4882a593Smuzhiyun1: 151*4882a593Smuzhiyun mfspr r3, SPRN_L2CSR0 152*4882a593Smuzhiyun and. r1, r3, r2 153*4882a593Smuzhiyun bne 1b 154*4882a593Smuzhiyun 155*4882a593Smuzhiyun mfspr r3, SPRN_L2CSR0 156*4882a593Smuzhiyun lis r2, L2CSR0_L2E@h 157*4882a593Smuzhiyun ori r2, r2, L2CSR0_L2E@l 158*4882a593Smuzhiyun andc r4, r3, r2 159*4882a593Smuzhiyun sync 160*4882a593Smuzhiyun isync 161*4882a593Smuzhiyun mtspr SPRN_L2CSR0,r4 162*4882a593Smuzhiyun isync 163*4882a593Smuzhiyun 164*4882a593Smuzhiyunl2_disabled: 165*4882a593Smuzhiyun#endif 166*4882a593Smuzhiyun 167*4882a593Smuzhiyun/* clear registers/arrays not reset by hardware */ 168*4882a593Smuzhiyun 169*4882a593Smuzhiyun /* L1 */ 170*4882a593Smuzhiyun li r0,2 171*4882a593Smuzhiyun mtspr L1CSR0,r0 /* invalidate d-cache */ 172*4882a593Smuzhiyun mtspr L1CSR1,r0 /* invalidate i-cache */ 173*4882a593Smuzhiyun 174*4882a593Smuzhiyun mfspr r1,DBSR 175*4882a593Smuzhiyun mtspr DBSR,r1 /* Clear all valid bits */ 176*4882a593Smuzhiyun 177*4882a593Smuzhiyun 178*4882a593Smuzhiyun .macro create_tlb1_entry esel ts tsize epn wimg rpn perm phy_high scratch 179*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS0(1, \esel, 0)@h 180*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS0(1, \esel, 0)@l 181*4882a593Smuzhiyun mtspr MAS0, \scratch 182*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS1(1, 1, 0, \ts, \tsize)@h 183*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS1(1, 1, 0, \ts, \tsize)@l 184*4882a593Smuzhiyun mtspr MAS1, \scratch 185*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@h 186*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@l 187*4882a593Smuzhiyun mtspr MAS2, \scratch 188*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS3(\rpn, 0, \perm)@h 189*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS3(\rpn, 0, \perm)@l 190*4882a593Smuzhiyun mtspr MAS3, \scratch 191*4882a593Smuzhiyun lis \scratch, \phy_high@h 192*4882a593Smuzhiyun ori \scratch, \scratch, \phy_high@l 193*4882a593Smuzhiyun mtspr MAS7, \scratch 194*4882a593Smuzhiyun isync 195*4882a593Smuzhiyun msync 196*4882a593Smuzhiyun tlbwe 197*4882a593Smuzhiyun isync 198*4882a593Smuzhiyun .endm 199*4882a593Smuzhiyun 200*4882a593Smuzhiyun .macro create_tlb0_entry esel ts tsize epn wimg rpn perm phy_high scratch 201*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS0(0, \esel, 0)@h 202*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS0(0, \esel, 0)@l 203*4882a593Smuzhiyun mtspr MAS0, \scratch 204*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS1(1, 0, 0, \ts, \tsize)@h 205*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS1(1, 0, 0, \ts, \tsize)@l 206*4882a593Smuzhiyun mtspr MAS1, \scratch 207*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@h 208*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@l 209*4882a593Smuzhiyun mtspr MAS2, \scratch 210*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS3(\rpn, 0, \perm)@h 211*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS3(\rpn, 0, \perm)@l 212*4882a593Smuzhiyun mtspr MAS3, \scratch 213*4882a593Smuzhiyun lis \scratch, \phy_high@h 214*4882a593Smuzhiyun ori \scratch, \scratch, \phy_high@l 215*4882a593Smuzhiyun mtspr MAS7, \scratch 216*4882a593Smuzhiyun isync 217*4882a593Smuzhiyun msync 218*4882a593Smuzhiyun tlbwe 219*4882a593Smuzhiyun isync 220*4882a593Smuzhiyun .endm 221*4882a593Smuzhiyun 222*4882a593Smuzhiyun .macro delete_tlb1_entry esel scratch 223*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS0(1, \esel, 0)@h 224*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS0(1, \esel, 0)@l 225*4882a593Smuzhiyun mtspr MAS0, \scratch 226*4882a593Smuzhiyun li \scratch, 0 227*4882a593Smuzhiyun mtspr MAS1, \scratch 228*4882a593Smuzhiyun isync 229*4882a593Smuzhiyun msync 230*4882a593Smuzhiyun tlbwe 231*4882a593Smuzhiyun isync 232*4882a593Smuzhiyun .endm 233*4882a593Smuzhiyun 234*4882a593Smuzhiyun .macro delete_tlb0_entry esel epn wimg scratch 235*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS0(0, \esel, 0)@h 236*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS0(0, \esel, 0)@l 237*4882a593Smuzhiyun mtspr MAS0, \scratch 238*4882a593Smuzhiyun li \scratch, 0 239*4882a593Smuzhiyun mtspr MAS1, \scratch 240*4882a593Smuzhiyun lis \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@h 241*4882a593Smuzhiyun ori \scratch, \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@l 242*4882a593Smuzhiyun mtspr MAS2, \scratch 243*4882a593Smuzhiyun isync 244*4882a593Smuzhiyun msync 245*4882a593Smuzhiyun tlbwe 246*4882a593Smuzhiyun isync 247*4882a593Smuzhiyun .endm 248*4882a593Smuzhiyun 249*4882a593Smuzhiyun/* Interrupt vectors do not fit in minimal SPL. */ 250*4882a593Smuzhiyun#if !defined(MINIMAL_SPL) 251*4882a593Smuzhiyun /* Setup interrupt vectors */ 252*4882a593Smuzhiyun lis r1,CONFIG_SYS_MONITOR_BASE@h 253*4882a593Smuzhiyun mtspr IVPR,r1 254*4882a593Smuzhiyun 255*4882a593Smuzhiyun li r4,CriticalInput@l 256*4882a593Smuzhiyun mtspr IVOR0,r4 /* 0: Critical input */ 257*4882a593Smuzhiyun li r4,MachineCheck@l 258*4882a593Smuzhiyun mtspr IVOR1,r4 /* 1: Machine check */ 259*4882a593Smuzhiyun li r4,DataStorage@l 260*4882a593Smuzhiyun mtspr IVOR2,r4 /* 2: Data storage */ 261*4882a593Smuzhiyun li r4,InstStorage@l 262*4882a593Smuzhiyun mtspr IVOR3,r4 /* 3: Instruction storage */ 263*4882a593Smuzhiyun li r4,ExtInterrupt@l 264*4882a593Smuzhiyun mtspr IVOR4,r4 /* 4: External interrupt */ 265*4882a593Smuzhiyun li r4,Alignment@l 266*4882a593Smuzhiyun mtspr IVOR5,r4 /* 5: Alignment */ 267*4882a593Smuzhiyun li r4,ProgramCheck@l 268*4882a593Smuzhiyun mtspr IVOR6,r4 /* 6: Program check */ 269*4882a593Smuzhiyun li r4,FPUnavailable@l 270*4882a593Smuzhiyun mtspr IVOR7,r4 /* 7: floating point unavailable */ 271*4882a593Smuzhiyun li r4,SystemCall@l 272*4882a593Smuzhiyun mtspr IVOR8,r4 /* 8: System call */ 273*4882a593Smuzhiyun /* 9: Auxiliary processor unavailable(unsupported) */ 274*4882a593Smuzhiyun li r4,Decrementer@l 275*4882a593Smuzhiyun mtspr IVOR10,r4 /* 10: Decrementer */ 276*4882a593Smuzhiyun li r4,IntervalTimer@l 277*4882a593Smuzhiyun mtspr IVOR11,r4 /* 11: Interval timer */ 278*4882a593Smuzhiyun li r4,WatchdogTimer@l 279*4882a593Smuzhiyun mtspr IVOR12,r4 /* 12: Watchdog timer */ 280*4882a593Smuzhiyun li r4,DataTLBError@l 281*4882a593Smuzhiyun mtspr IVOR13,r4 /* 13: Data TLB error */ 282*4882a593Smuzhiyun li r4,InstructionTLBError@l 283*4882a593Smuzhiyun mtspr IVOR14,r4 /* 14: Instruction TLB error */ 284*4882a593Smuzhiyun li r4,DebugBreakpoint@l 285*4882a593Smuzhiyun mtspr IVOR15,r4 /* 15: Debug */ 286*4882a593Smuzhiyun#endif 287*4882a593Smuzhiyun 288*4882a593Smuzhiyun /* Clear and set up some registers. */ 289*4882a593Smuzhiyun li r0,0x0000 290*4882a593Smuzhiyun lis r1,0xffff 291*4882a593Smuzhiyun mtspr DEC,r0 /* prevent dec exceptions */ 292*4882a593Smuzhiyun mttbl r0 /* prevent fit & wdt exceptions */ 293*4882a593Smuzhiyun mttbu r0 294*4882a593Smuzhiyun mtspr TSR,r1 /* clear all timer exception status */ 295*4882a593Smuzhiyun mtspr TCR,r0 /* disable all */ 296*4882a593Smuzhiyun mtspr ESR,r0 /* clear exception syndrome register */ 297*4882a593Smuzhiyun mtspr MCSR,r0 /* machine check syndrome register */ 298*4882a593Smuzhiyun mtxer r0 /* clear integer exception register */ 299*4882a593Smuzhiyun 300*4882a593Smuzhiyun#ifdef CONFIG_SYS_BOOK3E_HV 301*4882a593Smuzhiyun mtspr MAS8,r0 /* make sure MAS8 is clear */ 302*4882a593Smuzhiyun#endif 303*4882a593Smuzhiyun 304*4882a593Smuzhiyun /* Enable Time Base and Select Time Base Clock */ 305*4882a593Smuzhiyun lis r0,HID0_EMCP@h /* Enable machine check */ 306*4882a593Smuzhiyun#if defined(CONFIG_ENABLE_36BIT_PHYS) 307*4882a593Smuzhiyun ori r0,r0,HID0_ENMAS7@l /* Enable MAS7 */ 308*4882a593Smuzhiyun#endif 309*4882a593Smuzhiyun#ifndef CONFIG_E500MC 310*4882a593Smuzhiyun ori r0,r0,HID0_TBEN@l /* Enable Timebase */ 311*4882a593Smuzhiyun#endif 312*4882a593Smuzhiyun mtspr HID0,r0 313*4882a593Smuzhiyun 314*4882a593Smuzhiyun#if !defined(CONFIG_E500MC) && !defined(CONFIG_ARCH_QEMU_E500) 315*4882a593Smuzhiyun li r0,(HID1_ASTME|HID1_ABE)@l /* Addr streaming & broadcast */ 316*4882a593Smuzhiyun mfspr r3,PVR 317*4882a593Smuzhiyun andi. r3,r3, 0xff 318*4882a593Smuzhiyun cmpwi r3,0x50@l /* if we are rev 5.0 or greater set MBDD */ 319*4882a593Smuzhiyun blt 1f 320*4882a593Smuzhiyun /* Set MBDD bit also */ 321*4882a593Smuzhiyun ori r0, r0, HID1_MBDD@l 322*4882a593Smuzhiyun1: 323*4882a593Smuzhiyun mtspr HID1,r0 324*4882a593Smuzhiyun#endif 325*4882a593Smuzhiyun 326*4882a593Smuzhiyun#ifdef CONFIG_SYS_FSL_ERRATUM_CPU_A003999 327*4882a593Smuzhiyun mfspr r3,SPRN_HDBCR1 328*4882a593Smuzhiyun oris r3,r3,0x0100 329*4882a593Smuzhiyun mtspr SPRN_HDBCR1,r3 330*4882a593Smuzhiyun#endif 331*4882a593Smuzhiyun 332*4882a593Smuzhiyun /* Enable Branch Prediction */ 333*4882a593Smuzhiyun#if defined(CONFIG_BTB) 334*4882a593Smuzhiyun lis r0,BUCSR_ENABLE@h 335*4882a593Smuzhiyun ori r0,r0,BUCSR_ENABLE@l 336*4882a593Smuzhiyun mtspr SPRN_BUCSR,r0 337*4882a593Smuzhiyun#endif 338*4882a593Smuzhiyun 339*4882a593Smuzhiyun#if defined(CONFIG_SYS_INIT_DBCR) 340*4882a593Smuzhiyun lis r1,0xffff 341*4882a593Smuzhiyun ori r1,r1,0xffff 342*4882a593Smuzhiyun mtspr DBSR,r1 /* Clear all status bits */ 343*4882a593Smuzhiyun lis r0,CONFIG_SYS_INIT_DBCR@h /* DBCR0[IDM] must be set */ 344*4882a593Smuzhiyun ori r0,r0,CONFIG_SYS_INIT_DBCR@l 345*4882a593Smuzhiyun mtspr DBCR0,r0 346*4882a593Smuzhiyun#endif 347*4882a593Smuzhiyun 348*4882a593Smuzhiyun#ifdef CONFIG_ARCH_MPC8569 349*4882a593Smuzhiyun#define CONFIG_SYS_LBC_ADDR (CONFIG_SYS_CCSRBAR_DEFAULT + 0x5000) 350*4882a593Smuzhiyun#define CONFIG_SYS_LBCR_ADDR (CONFIG_SYS_LBC_ADDR + 0xd0) 351*4882a593Smuzhiyun 352*4882a593Smuzhiyun /* MPC8569 Rev.0 silcon needs to set bit 13 of LBCR to allow elBC to 353*4882a593Smuzhiyun * use address space which is more than 12bits, and it must be done in 354*4882a593Smuzhiyun * the 4K boot page. So we set this bit here. 355*4882a593Smuzhiyun */ 356*4882a593Smuzhiyun 357*4882a593Smuzhiyun /* create a temp mapping TLB0[0] for LBCR */ 358*4882a593Smuzhiyun create_tlb0_entry 0, \ 359*4882a593Smuzhiyun 0, BOOKE_PAGESZ_4K, \ 360*4882a593Smuzhiyun CONFIG_SYS_LBC_ADDR, MAS2_I|MAS2_G, \ 361*4882a593Smuzhiyun CONFIG_SYS_LBC_ADDR, MAS3_SW|MAS3_SR, \ 362*4882a593Smuzhiyun 0, r6 363*4882a593Smuzhiyun 364*4882a593Smuzhiyun /* Set LBCR register */ 365*4882a593Smuzhiyun lis r4,CONFIG_SYS_LBCR_ADDR@h 366*4882a593Smuzhiyun ori r4,r4,CONFIG_SYS_LBCR_ADDR@l 367*4882a593Smuzhiyun 368*4882a593Smuzhiyun lis r5,CONFIG_SYS_LBC_LBCR@h 369*4882a593Smuzhiyun ori r5,r5,CONFIG_SYS_LBC_LBCR@l 370*4882a593Smuzhiyun stw r5,0(r4) 371*4882a593Smuzhiyun isync 372*4882a593Smuzhiyun 373*4882a593Smuzhiyun /* invalidate this temp TLB */ 374*4882a593Smuzhiyun lis r4,CONFIG_SYS_LBC_ADDR@h 375*4882a593Smuzhiyun ori r4,r4,CONFIG_SYS_LBC_ADDR@l 376*4882a593Smuzhiyun tlbivax 0,r4 377*4882a593Smuzhiyun isync 378*4882a593Smuzhiyun 379*4882a593Smuzhiyun#endif /* CONFIG_ARCH_MPC8569 */ 380*4882a593Smuzhiyun 381*4882a593Smuzhiyun/* 382*4882a593Smuzhiyun * Search for the TLB that covers the code we're executing, and shrink it 383*4882a593Smuzhiyun * so that it covers only this 4K page. That will ensure that any other 384*4882a593Smuzhiyun * TLB we create won't interfere with it. We assume that the TLB exists, 385*4882a593Smuzhiyun * which is why we don't check the Valid bit of MAS1. We also assume 386*4882a593Smuzhiyun * it is in TLB1. 387*4882a593Smuzhiyun * 388*4882a593Smuzhiyun * This is necessary, for example, when booting from the on-chip ROM, 389*4882a593Smuzhiyun * which (oddly) creates a single 4GB TLB that covers CCSR and DDR. 390*4882a593Smuzhiyun */ 391*4882a593Smuzhiyun bl nexti /* Find our address */ 392*4882a593Smuzhiyunnexti: mflr r1 /* R1 = our PC */ 393*4882a593Smuzhiyun li r2, 0 394*4882a593Smuzhiyun mtspr MAS6, r2 /* Assume the current PID and AS are 0 */ 395*4882a593Smuzhiyun isync 396*4882a593Smuzhiyun msync 397*4882a593Smuzhiyun tlbsx 0, r1 /* This must succeed */ 398*4882a593Smuzhiyun 399*4882a593Smuzhiyun mfspr r14, MAS0 /* Save ESEL for later */ 400*4882a593Smuzhiyun rlwinm r14, r14, 16, 0xfff 401*4882a593Smuzhiyun 402*4882a593Smuzhiyun /* Set the size of the TLB to 4KB */ 403*4882a593Smuzhiyun mfspr r3, MAS1 404*4882a593Smuzhiyun li r2, 0xF80 405*4882a593Smuzhiyun andc r3, r3, r2 /* Clear the TSIZE bits */ 406*4882a593Smuzhiyun ori r3, r3, MAS1_TSIZE(BOOKE_PAGESZ_4K)@l 407*4882a593Smuzhiyun oris r3, r3, MAS1_IPROT@h 408*4882a593Smuzhiyun mtspr MAS1, r3 409*4882a593Smuzhiyun 410*4882a593Smuzhiyun /* 411*4882a593Smuzhiyun * Set the base address of the TLB to our PC. We assume that 412*4882a593Smuzhiyun * virtual == physical. We also assume that MAS2_EPN == MAS3_RPN. 413*4882a593Smuzhiyun */ 414*4882a593Smuzhiyun lis r3, MAS2_EPN@h 415*4882a593Smuzhiyun ori r3, r3, MAS2_EPN@l /* R3 = MAS2_EPN */ 416*4882a593Smuzhiyun 417*4882a593Smuzhiyun and r1, r1, r3 /* Our PC, rounded down to the nearest page */ 418*4882a593Smuzhiyun 419*4882a593Smuzhiyun mfspr r2, MAS2 420*4882a593Smuzhiyun andc r2, r2, r3 421*4882a593Smuzhiyun or r2, r2, r1 422*4882a593Smuzhiyun#ifdef CONFIG_SYS_FSL_ERRATUM_A004510 423*4882a593Smuzhiyun cmpwi r27,0 424*4882a593Smuzhiyun beq 1f 425*4882a593Smuzhiyun andi. r15, r2, MAS2_I|MAS2_G /* save the old I/G for later */ 426*4882a593Smuzhiyun rlwinm r2, r2, 0, ~MAS2_I 427*4882a593Smuzhiyun ori r2, r2, MAS2_G 428*4882a593Smuzhiyun1: 429*4882a593Smuzhiyun#endif 430*4882a593Smuzhiyun mtspr MAS2, r2 /* Set the EPN to our PC base address */ 431*4882a593Smuzhiyun 432*4882a593Smuzhiyun mfspr r2, MAS3 433*4882a593Smuzhiyun andc r2, r2, r3 434*4882a593Smuzhiyun or r2, r2, r1 435*4882a593Smuzhiyun mtspr MAS3, r2 /* Set the RPN to our PC base address */ 436*4882a593Smuzhiyun 437*4882a593Smuzhiyun isync 438*4882a593Smuzhiyun msync 439*4882a593Smuzhiyun tlbwe 440*4882a593Smuzhiyun 441*4882a593Smuzhiyun/* 442*4882a593Smuzhiyun * Clear out any other TLB entries that may exist, to avoid conflicts. 443*4882a593Smuzhiyun * Our TLB entry is in r14. 444*4882a593Smuzhiyun */ 445*4882a593Smuzhiyun li r0, TLBIVAX_ALL | TLBIVAX_TLB0 446*4882a593Smuzhiyun tlbivax 0, r0 447*4882a593Smuzhiyun tlbsync 448*4882a593Smuzhiyun 449*4882a593Smuzhiyun mfspr r4, SPRN_TLB1CFG 450*4882a593Smuzhiyun rlwinm r4, r4, 0, TLBnCFG_NENTRY_MASK 451*4882a593Smuzhiyun 452*4882a593Smuzhiyun li r3, 0 453*4882a593Smuzhiyun mtspr MAS1, r3 454*4882a593Smuzhiyun1: cmpw r3, r14 455*4882a593Smuzhiyun rlwinm r5, r3, 16, MAS0_ESEL_MSK 456*4882a593Smuzhiyun addi r3, r3, 1 457*4882a593Smuzhiyun beq 2f /* skip the entry we're executing from */ 458*4882a593Smuzhiyun 459*4882a593Smuzhiyun oris r5, r5, MAS0_TLBSEL(1)@h 460*4882a593Smuzhiyun mtspr MAS0, r5 461*4882a593Smuzhiyun 462*4882a593Smuzhiyun isync 463*4882a593Smuzhiyun tlbwe 464*4882a593Smuzhiyun isync 465*4882a593Smuzhiyun msync 466*4882a593Smuzhiyun 467*4882a593Smuzhiyun2: cmpw r3, r4 468*4882a593Smuzhiyun blt 1b 469*4882a593Smuzhiyun 470*4882a593Smuzhiyun#if defined(CONFIG_SYS_PPC_E500_DEBUG_TLB) && !defined(MINIMAL_SPL) && \ 471*4882a593Smuzhiyun !defined(CONFIG_SECURE_BOOT) 472*4882a593Smuzhiyun/* 473*4882a593Smuzhiyun * TLB entry for debuggging in AS1 474*4882a593Smuzhiyun * Create temporary TLB entry in AS0 to handle debug exception 475*4882a593Smuzhiyun * As on debug exception MSR is cleared i.e. Address space is changed 476*4882a593Smuzhiyun * to 0. A TLB entry (in AS0) is required to handle debug exception generated 477*4882a593Smuzhiyun * in AS1. 478*4882a593Smuzhiyun */ 479*4882a593Smuzhiyun 480*4882a593Smuzhiyun#ifdef NOR_BOOT 481*4882a593Smuzhiyun/* 482*4882a593Smuzhiyun * TLB entry is created for IVPR + IVOR15 to map on valid OP code address 483*4882a593Smuzhiyun * bacause flash's virtual address maps to 0xff800000 - 0xffffffff. 484*4882a593Smuzhiyun * and this window is outside of 4K boot window. 485*4882a593Smuzhiyun */ 486*4882a593Smuzhiyun create_tlb1_entry CONFIG_SYS_PPC_E500_DEBUG_TLB, \ 487*4882a593Smuzhiyun 0, BOOKE_PAGESZ_4M, \ 488*4882a593Smuzhiyun CONFIG_SYS_MONITOR_BASE & 0xffc00000, MAS2_I|MAS2_G, \ 489*4882a593Smuzhiyun 0xffc00000, MAS3_SX|MAS3_SW|MAS3_SR, \ 490*4882a593Smuzhiyun 0, r6 491*4882a593Smuzhiyun 492*4882a593Smuzhiyun#else 493*4882a593Smuzhiyun/* 494*4882a593Smuzhiyun * TLB entry is created for IVPR + IVOR15 to map on valid OP code address 495*4882a593Smuzhiyun * because "nexti" will resize TLB to 4K 496*4882a593Smuzhiyun */ 497*4882a593Smuzhiyun create_tlb1_entry CONFIG_SYS_PPC_E500_DEBUG_TLB, \ 498*4882a593Smuzhiyun 0, BOOKE_PAGESZ_256K, \ 499*4882a593Smuzhiyun CONFIG_SYS_MONITOR_BASE & 0xfffc0000, MAS2_I, \ 500*4882a593Smuzhiyun CONFIG_SYS_MONITOR_BASE & 0xfffc0000, MAS3_SX|MAS3_SW|MAS3_SR, \ 501*4882a593Smuzhiyun 0, r6 502*4882a593Smuzhiyun#endif 503*4882a593Smuzhiyun#endif 504*4882a593Smuzhiyun 505*4882a593Smuzhiyun/* 506*4882a593Smuzhiyun * Relocate CCSR, if necessary. We relocate CCSR if (obviously) the default 507*4882a593Smuzhiyun * location is not where we want it. This typically happens on a 36-bit 508*4882a593Smuzhiyun * system, where we want to move CCSR to near the top of 36-bit address space. 509*4882a593Smuzhiyun * 510*4882a593Smuzhiyun * To move CCSR, we create two temporary TLBs, one for the old location, and 511*4882a593Smuzhiyun * another for the new location. On CoreNet systems, we also need to create 512*4882a593Smuzhiyun * a special, temporary LAW. 513*4882a593Smuzhiyun * 514*4882a593Smuzhiyun * As a general rule, TLB0 is used for short-term TLBs, and TLB1 is used for 515*4882a593Smuzhiyun * long-term TLBs, so we use TLB0 here. 516*4882a593Smuzhiyun */ 517*4882a593Smuzhiyun#if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR_PHYS) 518*4882a593Smuzhiyun 519*4882a593Smuzhiyun#if !defined(CONFIG_SYS_CCSRBAR_PHYS_HIGH) || !defined(CONFIG_SYS_CCSRBAR_PHYS_LOW) 520*4882a593Smuzhiyun#error "CONFIG_SYS_CCSRBAR_PHYS_HIGH and CONFIG_SYS_CCSRBAR_PHYS_LOW) must be defined." 521*4882a593Smuzhiyun#endif 522*4882a593Smuzhiyun 523*4882a593Smuzhiyuncreate_ccsr_new_tlb: 524*4882a593Smuzhiyun /* 525*4882a593Smuzhiyun * Create a TLB for the new location of CCSR. Register R8 is reserved 526*4882a593Smuzhiyun * for the virtual address of this TLB (CONFIG_SYS_CCSRBAR). 527*4882a593Smuzhiyun */ 528*4882a593Smuzhiyun lis r8, CONFIG_SYS_CCSRBAR@h 529*4882a593Smuzhiyun ori r8, r8, CONFIG_SYS_CCSRBAR@l 530*4882a593Smuzhiyun lis r9, (CONFIG_SYS_CCSRBAR + 0x1000)@h 531*4882a593Smuzhiyun ori r9, r9, (CONFIG_SYS_CCSRBAR + 0x1000)@l 532*4882a593Smuzhiyun create_tlb0_entry 0, \ 533*4882a593Smuzhiyun 0, BOOKE_PAGESZ_4K, \ 534*4882a593Smuzhiyun CONFIG_SYS_CCSRBAR, MAS2_I|MAS2_G, \ 535*4882a593Smuzhiyun CONFIG_SYS_CCSRBAR_PHYS_LOW, MAS3_SW|MAS3_SR, \ 536*4882a593Smuzhiyun CONFIG_SYS_CCSRBAR_PHYS_HIGH, r3 537*4882a593Smuzhiyun /* 538*4882a593Smuzhiyun * Create a TLB for the current location of CCSR. Register R9 is reserved 539*4882a593Smuzhiyun * for the virtual address of this TLB (CONFIG_SYS_CCSRBAR + 0x1000). 540*4882a593Smuzhiyun */ 541*4882a593Smuzhiyuncreate_ccsr_old_tlb: 542*4882a593Smuzhiyun create_tlb0_entry 1, \ 543*4882a593Smuzhiyun 0, BOOKE_PAGESZ_4K, \ 544*4882a593Smuzhiyun CONFIG_SYS_CCSRBAR + 0x1000, MAS2_I|MAS2_G, \ 545*4882a593Smuzhiyun CONFIG_SYS_CCSRBAR_DEFAULT, MAS3_SW|MAS3_SR, \ 546*4882a593Smuzhiyun 0, r3 /* The default CCSR address is always a 32-bit number */ 547*4882a593Smuzhiyun 548*4882a593Smuzhiyun 549*4882a593Smuzhiyun /* 550*4882a593Smuzhiyun * We have a TLB for what we think is the current (old) CCSR. Let's 551*4882a593Smuzhiyun * verify that, otherwise we won't be able to move it. 552*4882a593Smuzhiyun * CONFIG_SYS_CCSRBAR_DEFAULT is always a 32-bit number, so we only 553*4882a593Smuzhiyun * need to compare the lower 32 bits of CCSRBAR on CoreNet systems. 554*4882a593Smuzhiyun */ 555*4882a593Smuzhiyunverify_old_ccsr: 556*4882a593Smuzhiyun lis r0, CONFIG_SYS_CCSRBAR_DEFAULT@h 557*4882a593Smuzhiyun ori r0, r0, CONFIG_SYS_CCSRBAR_DEFAULT@l 558*4882a593Smuzhiyun#ifdef CONFIG_FSL_CORENET 559*4882a593Smuzhiyun lwz r1, 4(r9) /* CCSRBARL */ 560*4882a593Smuzhiyun#else 561*4882a593Smuzhiyun lwz r1, 0(r9) /* CCSRBAR, shifted right by 12 */ 562*4882a593Smuzhiyun slwi r1, r1, 12 563*4882a593Smuzhiyun#endif 564*4882a593Smuzhiyun 565*4882a593Smuzhiyun cmpl 0, r0, r1 566*4882a593Smuzhiyun 567*4882a593Smuzhiyun /* 568*4882a593Smuzhiyun * If the value we read from CCSRBARL is not what we expect, then 569*4882a593Smuzhiyun * enter an infinite loop. This will at least allow a debugger to 570*4882a593Smuzhiyun * halt execution and examine TLBs, etc. There's no point in going 571*4882a593Smuzhiyun * on. 572*4882a593Smuzhiyun */ 573*4882a593Smuzhiyuninfinite_debug_loop: 574*4882a593Smuzhiyun bne infinite_debug_loop 575*4882a593Smuzhiyun 576*4882a593Smuzhiyun#ifdef CONFIG_FSL_CORENET 577*4882a593Smuzhiyun 578*4882a593Smuzhiyun#define CCSR_LAWBARH0 (CONFIG_SYS_CCSRBAR + 0x1000) 579*4882a593Smuzhiyun#define LAW_SIZE_4K 0xb 580*4882a593Smuzhiyun#define CCSRBAR_LAWAR (LAW_EN | (0x1e << 20) | LAW_SIZE_4K) 581*4882a593Smuzhiyun#define CCSRAR_C 0x80000000 /* Commit */ 582*4882a593Smuzhiyun 583*4882a593Smuzhiyuncreate_temp_law: 584*4882a593Smuzhiyun /* 585*4882a593Smuzhiyun * On CoreNet systems, we create the temporary LAW using a special LAW 586*4882a593Smuzhiyun * target ID of 0x1e. LAWBARH is at offset 0xc00 in CCSR. 587*4882a593Smuzhiyun */ 588*4882a593Smuzhiyun lis r0, CONFIG_SYS_CCSRBAR_PHYS_HIGH@h 589*4882a593Smuzhiyun ori r0, r0, CONFIG_SYS_CCSRBAR_PHYS_HIGH@l 590*4882a593Smuzhiyun lis r1, CONFIG_SYS_CCSRBAR_PHYS_LOW@h 591*4882a593Smuzhiyun ori r1, r1, CONFIG_SYS_CCSRBAR_PHYS_LOW@l 592*4882a593Smuzhiyun lis r2, CCSRBAR_LAWAR@h 593*4882a593Smuzhiyun ori r2, r2, CCSRBAR_LAWAR@l 594*4882a593Smuzhiyun 595*4882a593Smuzhiyun stw r0, 0xc00(r9) /* LAWBARH0 */ 596*4882a593Smuzhiyun stw r1, 0xc04(r9) /* LAWBARL0 */ 597*4882a593Smuzhiyun sync 598*4882a593Smuzhiyun stw r2, 0xc08(r9) /* LAWAR0 */ 599*4882a593Smuzhiyun 600*4882a593Smuzhiyun /* 601*4882a593Smuzhiyun * Read back from LAWAR to ensure the update is complete. e500mc 602*4882a593Smuzhiyun * cores also require an isync. 603*4882a593Smuzhiyun */ 604*4882a593Smuzhiyun lwz r0, 0xc08(r9) /* LAWAR0 */ 605*4882a593Smuzhiyun isync 606*4882a593Smuzhiyun 607*4882a593Smuzhiyun /* 608*4882a593Smuzhiyun * Read the current CCSRBARH and CCSRBARL using load word instructions. 609*4882a593Smuzhiyun * Follow this with an isync instruction. This forces any outstanding 610*4882a593Smuzhiyun * accesses to configuration space to completion. 611*4882a593Smuzhiyun */ 612*4882a593Smuzhiyunread_old_ccsrbar: 613*4882a593Smuzhiyun lwz r0, 0(r9) /* CCSRBARH */ 614*4882a593Smuzhiyun lwz r0, 4(r9) /* CCSRBARL */ 615*4882a593Smuzhiyun isync 616*4882a593Smuzhiyun 617*4882a593Smuzhiyun /* 618*4882a593Smuzhiyun * Write the new values for CCSRBARH and CCSRBARL to their old 619*4882a593Smuzhiyun * locations. The CCSRBARH has a shadow register. When the CCSRBARH 620*4882a593Smuzhiyun * has a new value written it loads a CCSRBARH shadow register. When 621*4882a593Smuzhiyun * the CCSRBARL is written, the CCSRBARH shadow register contents 622*4882a593Smuzhiyun * along with the CCSRBARL value are loaded into the CCSRBARH and 623*4882a593Smuzhiyun * CCSRBARL registers, respectively. Follow this with a sync 624*4882a593Smuzhiyun * instruction. 625*4882a593Smuzhiyun */ 626*4882a593Smuzhiyunwrite_new_ccsrbar: 627*4882a593Smuzhiyun lis r0, CONFIG_SYS_CCSRBAR_PHYS_HIGH@h 628*4882a593Smuzhiyun ori r0, r0, CONFIG_SYS_CCSRBAR_PHYS_HIGH@l 629*4882a593Smuzhiyun lis r1, CONFIG_SYS_CCSRBAR_PHYS_LOW@h 630*4882a593Smuzhiyun ori r1, r1, CONFIG_SYS_CCSRBAR_PHYS_LOW@l 631*4882a593Smuzhiyun lis r2, CCSRAR_C@h 632*4882a593Smuzhiyun ori r2, r2, CCSRAR_C@l 633*4882a593Smuzhiyun 634*4882a593Smuzhiyun stw r0, 0(r9) /* Write to CCSRBARH */ 635*4882a593Smuzhiyun sync /* Make sure we write to CCSRBARH first */ 636*4882a593Smuzhiyun stw r1, 4(r9) /* Write to CCSRBARL */ 637*4882a593Smuzhiyun sync 638*4882a593Smuzhiyun 639*4882a593Smuzhiyun /* 640*4882a593Smuzhiyun * Write a 1 to the commit bit (C) of CCSRAR at the old location. 641*4882a593Smuzhiyun * Follow this with a sync instruction. 642*4882a593Smuzhiyun */ 643*4882a593Smuzhiyun stw r2, 8(r9) 644*4882a593Smuzhiyun sync 645*4882a593Smuzhiyun 646*4882a593Smuzhiyun /* Delete the temporary LAW */ 647*4882a593Smuzhiyundelete_temp_law: 648*4882a593Smuzhiyun li r1, 0 649*4882a593Smuzhiyun stw r1, 0xc08(r8) 650*4882a593Smuzhiyun sync 651*4882a593Smuzhiyun stw r1, 0xc00(r8) 652*4882a593Smuzhiyun stw r1, 0xc04(r8) 653*4882a593Smuzhiyun sync 654*4882a593Smuzhiyun 655*4882a593Smuzhiyun#else /* #ifdef CONFIG_FSL_CORENET */ 656*4882a593Smuzhiyun 657*4882a593Smuzhiyunwrite_new_ccsrbar: 658*4882a593Smuzhiyun /* 659*4882a593Smuzhiyun * Read the current value of CCSRBAR using a load word instruction 660*4882a593Smuzhiyun * followed by an isync. This forces all accesses to configuration 661*4882a593Smuzhiyun * space to complete. 662*4882a593Smuzhiyun */ 663*4882a593Smuzhiyun sync 664*4882a593Smuzhiyun lwz r0, 0(r9) 665*4882a593Smuzhiyun isync 666*4882a593Smuzhiyun 667*4882a593Smuzhiyun/* CONFIG_SYS_CCSRBAR_PHYS right shifted by 12 */ 668*4882a593Smuzhiyun#define CCSRBAR_PHYS_RS12 ((CONFIG_SYS_CCSRBAR_PHYS_HIGH << 20) | \ 669*4882a593Smuzhiyun (CONFIG_SYS_CCSRBAR_PHYS_LOW >> 12)) 670*4882a593Smuzhiyun 671*4882a593Smuzhiyun /* Write the new value to CCSRBAR. */ 672*4882a593Smuzhiyun lis r0, CCSRBAR_PHYS_RS12@h 673*4882a593Smuzhiyun ori r0, r0, CCSRBAR_PHYS_RS12@l 674*4882a593Smuzhiyun stw r0, 0(r9) 675*4882a593Smuzhiyun sync 676*4882a593Smuzhiyun 677*4882a593Smuzhiyun /* 678*4882a593Smuzhiyun * The manual says to perform a load of an address that does not 679*4882a593Smuzhiyun * access configuration space or the on-chip SRAM using an existing TLB, 680*4882a593Smuzhiyun * but that doesn't appear to be necessary. We will do the isync, 681*4882a593Smuzhiyun * though. 682*4882a593Smuzhiyun */ 683*4882a593Smuzhiyun isync 684*4882a593Smuzhiyun 685*4882a593Smuzhiyun /* 686*4882a593Smuzhiyun * Read the contents of CCSRBAR from its new location, followed by 687*4882a593Smuzhiyun * another isync. 688*4882a593Smuzhiyun */ 689*4882a593Smuzhiyun lwz r0, 0(r8) 690*4882a593Smuzhiyun isync 691*4882a593Smuzhiyun 692*4882a593Smuzhiyun#endif /* #ifdef CONFIG_FSL_CORENET */ 693*4882a593Smuzhiyun 694*4882a593Smuzhiyun /* Delete the temporary TLBs */ 695*4882a593Smuzhiyundelete_temp_tlbs: 696*4882a593Smuzhiyun delete_tlb0_entry 0, CONFIG_SYS_CCSRBAR, MAS2_I|MAS2_G, r3 697*4882a593Smuzhiyun delete_tlb0_entry 1, CONFIG_SYS_CCSRBAR + 0x1000, MAS2_I|MAS2_G, r3 698*4882a593Smuzhiyun 699*4882a593Smuzhiyun#endif /* #if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR_PHYS) */ 700*4882a593Smuzhiyun 701*4882a593Smuzhiyun#if defined(CONFIG_SYS_FSL_QORIQ_CHASSIS2) && defined(CONFIG_E6500) 702*4882a593Smuzhiyuncreate_ccsr_l2_tlb: 703*4882a593Smuzhiyun /* 704*4882a593Smuzhiyun * Create a TLB for the MMR location of CCSR 705*4882a593Smuzhiyun * to access L2CSR0 register 706*4882a593Smuzhiyun */ 707*4882a593Smuzhiyun create_tlb0_entry 0, \ 708*4882a593Smuzhiyun 0, BOOKE_PAGESZ_4K, \ 709*4882a593Smuzhiyun CONFIG_SYS_CCSRBAR + 0xC20000, MAS2_I|MAS2_G, \ 710*4882a593Smuzhiyun CONFIG_SYS_CCSRBAR_PHYS_LOW + 0xC20000, MAS3_SW|MAS3_SR, \ 711*4882a593Smuzhiyun CONFIG_SYS_CCSRBAR_PHYS_HIGH, r3 712*4882a593Smuzhiyun 713*4882a593Smuzhiyunenable_l2_cluster_l2: 714*4882a593Smuzhiyun /* enable L2 cache */ 715*4882a593Smuzhiyun lis r3, (CONFIG_SYS_CCSRBAR + 0xC20000)@h 716*4882a593Smuzhiyun ori r3, r3, (CONFIG_SYS_CCSRBAR + 0xC20000)@l 717*4882a593Smuzhiyun li r4, 33 /* stash id */ 718*4882a593Smuzhiyun stw r4, 4(r3) 719*4882a593Smuzhiyun lis r4, (L2CSR0_L2FI|L2CSR0_L2LFC)@h 720*4882a593Smuzhiyun ori r4, r4, (L2CSR0_L2FI|L2CSR0_L2LFC)@l 721*4882a593Smuzhiyun sync 722*4882a593Smuzhiyun stw r4, 0(r3) /* invalidate L2 */ 723*4882a593Smuzhiyun /* Poll till the bits are cleared */ 724*4882a593Smuzhiyun1: sync 725*4882a593Smuzhiyun lwz r0, 0(r3) 726*4882a593Smuzhiyun twi 0, r0, 0 727*4882a593Smuzhiyun isync 728*4882a593Smuzhiyun and. r1, r0, r4 729*4882a593Smuzhiyun bne 1b 730*4882a593Smuzhiyun 731*4882a593Smuzhiyun /* L2PE must be set before L2 cache is enabled */ 732*4882a593Smuzhiyun lis r4, (L2CSR0_L2PE)@h 733*4882a593Smuzhiyun ori r4, r4, (L2CSR0_L2PE)@l 734*4882a593Smuzhiyun sync 735*4882a593Smuzhiyun stw r4, 0(r3) /* enable L2 parity/ECC error checking */ 736*4882a593Smuzhiyun /* Poll till the bit is set */ 737*4882a593Smuzhiyun1: sync 738*4882a593Smuzhiyun lwz r0, 0(r3) 739*4882a593Smuzhiyun twi 0, r0, 0 740*4882a593Smuzhiyun isync 741*4882a593Smuzhiyun and. r1, r0, r4 742*4882a593Smuzhiyun beq 1b 743*4882a593Smuzhiyun 744*4882a593Smuzhiyun lis r4, (L2CSR0_L2E|L2CSR0_L2PE)@h 745*4882a593Smuzhiyun ori r4, r4, (L2CSR0_L2REP_MODE)@l 746*4882a593Smuzhiyun sync 747*4882a593Smuzhiyun stw r4, 0(r3) /* enable L2 */ 748*4882a593Smuzhiyun /* Poll till the bit is set */ 749*4882a593Smuzhiyun1: sync 750*4882a593Smuzhiyun lwz r0, 0(r3) 751*4882a593Smuzhiyun twi 0, r0, 0 752*4882a593Smuzhiyun isync 753*4882a593Smuzhiyun and. r1, r0, r4 754*4882a593Smuzhiyun beq 1b 755*4882a593Smuzhiyun 756*4882a593Smuzhiyundelete_ccsr_l2_tlb: 757*4882a593Smuzhiyun delete_tlb0_entry 0, CONFIG_SYS_CCSRBAR + 0xC20000, MAS2_I|MAS2_G, r3 758*4882a593Smuzhiyun#endif 759*4882a593Smuzhiyun 760*4882a593Smuzhiyun /* 761*4882a593Smuzhiyun * Enable the L1. On e6500, this has to be done 762*4882a593Smuzhiyun * after the L2 is up. 763*4882a593Smuzhiyun */ 764*4882a593Smuzhiyun 765*4882a593Smuzhiyun#ifdef CONFIG_SYS_CACHE_STASHING 766*4882a593Smuzhiyun /* set stash id to (coreID) * 2 + 32 + L1 CT (0) */ 767*4882a593Smuzhiyun li r2,(32 + 0) 768*4882a593Smuzhiyun mtspr L1CSR2,r2 769*4882a593Smuzhiyun#endif 770*4882a593Smuzhiyun 771*4882a593Smuzhiyun /* Enable/invalidate the I-Cache */ 772*4882a593Smuzhiyun lis r2,(L1CSR1_ICFI|L1CSR1_ICLFR)@h 773*4882a593Smuzhiyun ori r2,r2,(L1CSR1_ICFI|L1CSR1_ICLFR)@l 774*4882a593Smuzhiyun mtspr SPRN_L1CSR1,r2 775*4882a593Smuzhiyun1: 776*4882a593Smuzhiyun mfspr r3,SPRN_L1CSR1 777*4882a593Smuzhiyun and. r1,r3,r2 778*4882a593Smuzhiyun bne 1b 779*4882a593Smuzhiyun 780*4882a593Smuzhiyun lis r3,(L1CSR1_CPE|L1CSR1_ICE)@h 781*4882a593Smuzhiyun ori r3,r3,(L1CSR1_CPE|L1CSR1_ICE)@l 782*4882a593Smuzhiyun mtspr SPRN_L1CSR1,r3 783*4882a593Smuzhiyun isync 784*4882a593Smuzhiyun2: 785*4882a593Smuzhiyun mfspr r3,SPRN_L1CSR1 786*4882a593Smuzhiyun andi. r1,r3,L1CSR1_ICE@l 787*4882a593Smuzhiyun beq 2b 788*4882a593Smuzhiyun 789*4882a593Smuzhiyun /* Enable/invalidate the D-Cache */ 790*4882a593Smuzhiyun lis r2,(L1CSR0_DCFI|L1CSR0_DCLFR)@h 791*4882a593Smuzhiyun ori r2,r2,(L1CSR0_DCFI|L1CSR0_DCLFR)@l 792*4882a593Smuzhiyun mtspr SPRN_L1CSR0,r2 793*4882a593Smuzhiyun1: 794*4882a593Smuzhiyun mfspr r3,SPRN_L1CSR0 795*4882a593Smuzhiyun and. r1,r3,r2 796*4882a593Smuzhiyun bne 1b 797*4882a593Smuzhiyun 798*4882a593Smuzhiyun lis r3,(L1CSR0_CPE|L1CSR0_DCE)@h 799*4882a593Smuzhiyun ori r3,r3,(L1CSR0_CPE|L1CSR0_DCE)@l 800*4882a593Smuzhiyun mtspr SPRN_L1CSR0,r3 801*4882a593Smuzhiyun isync 802*4882a593Smuzhiyun2: 803*4882a593Smuzhiyun mfspr r3,SPRN_L1CSR0 804*4882a593Smuzhiyun andi. r1,r3,L1CSR0_DCE@l 805*4882a593Smuzhiyun beq 2b 806*4882a593Smuzhiyun#ifdef CONFIG_SYS_FSL_ERRATUM_A004510 807*4882a593Smuzhiyun#define DCSR_LAWBARH0 (CONFIG_SYS_CCSRBAR + 0x1000) 808*4882a593Smuzhiyun#define LAW_SIZE_1M 0x13 809*4882a593Smuzhiyun#define DCSRBAR_LAWAR (LAW_EN | (0x1d << 20) | LAW_SIZE_1M) 810*4882a593Smuzhiyun 811*4882a593Smuzhiyun cmpwi r27,0 812*4882a593Smuzhiyun beq 9f 813*4882a593Smuzhiyun 814*4882a593Smuzhiyun /* 815*4882a593Smuzhiyun * Create a TLB entry for CCSR 816*4882a593Smuzhiyun * 817*4882a593Smuzhiyun * We're executing out of TLB1 entry in r14, and that's the only 818*4882a593Smuzhiyun * TLB entry that exists. To allocate some TLB entries for our 819*4882a593Smuzhiyun * own use, flip a bit high enough that we won't flip it again 820*4882a593Smuzhiyun * via incrementing. 821*4882a593Smuzhiyun */ 822*4882a593Smuzhiyun 823*4882a593Smuzhiyun xori r8, r14, 32 824*4882a593Smuzhiyun lis r0, MAS0_TLBSEL(1)@h 825*4882a593Smuzhiyun rlwimi r0, r8, 16, MAS0_ESEL_MSK 826*4882a593Smuzhiyun lis r1, FSL_BOOKE_MAS1(1, 1, 0, 0, BOOKE_PAGESZ_16M)@h 827*4882a593Smuzhiyun ori r1, r1, FSL_BOOKE_MAS1(1, 1, 0, 0, BOOKE_PAGESZ_16M)@l 828*4882a593Smuzhiyun lis r7, CONFIG_SYS_CCSRBAR@h 829*4882a593Smuzhiyun ori r7, r7, CONFIG_SYS_CCSRBAR@l 830*4882a593Smuzhiyun ori r2, r7, MAS2_I|MAS2_G 831*4882a593Smuzhiyun lis r3, FSL_BOOKE_MAS3(CONFIG_SYS_CCSRBAR_PHYS_LOW, 0, (MAS3_SW|MAS3_SR))@h 832*4882a593Smuzhiyun ori r3, r3, FSL_BOOKE_MAS3(CONFIG_SYS_CCSRBAR_PHYS_LOW, 0, (MAS3_SW|MAS3_SR))@l 833*4882a593Smuzhiyun lis r4, CONFIG_SYS_CCSRBAR_PHYS_HIGH@h 834*4882a593Smuzhiyun ori r4, r4, CONFIG_SYS_CCSRBAR_PHYS_HIGH@l 835*4882a593Smuzhiyun mtspr MAS0, r0 836*4882a593Smuzhiyun mtspr MAS1, r1 837*4882a593Smuzhiyun mtspr MAS2, r2 838*4882a593Smuzhiyun mtspr MAS3, r3 839*4882a593Smuzhiyun mtspr MAS7, r4 840*4882a593Smuzhiyun isync 841*4882a593Smuzhiyun tlbwe 842*4882a593Smuzhiyun isync 843*4882a593Smuzhiyun msync 844*4882a593Smuzhiyun 845*4882a593Smuzhiyun /* Map DCSR temporarily to physical address zero */ 846*4882a593Smuzhiyun li r0, 0 847*4882a593Smuzhiyun lis r3, DCSRBAR_LAWAR@h 848*4882a593Smuzhiyun ori r3, r3, DCSRBAR_LAWAR@l 849*4882a593Smuzhiyun 850*4882a593Smuzhiyun stw r0, 0xc00(r7) /* LAWBARH0 */ 851*4882a593Smuzhiyun stw r0, 0xc04(r7) /* LAWBARL0 */ 852*4882a593Smuzhiyun sync 853*4882a593Smuzhiyun stw r3, 0xc08(r7) /* LAWAR0 */ 854*4882a593Smuzhiyun 855*4882a593Smuzhiyun /* Read back from LAWAR to ensure the update is complete. */ 856*4882a593Smuzhiyun lwz r3, 0xc08(r7) /* LAWAR0 */ 857*4882a593Smuzhiyun isync 858*4882a593Smuzhiyun 859*4882a593Smuzhiyun /* Create a TLB entry for DCSR at zero */ 860*4882a593Smuzhiyun 861*4882a593Smuzhiyun addi r9, r8, 1 862*4882a593Smuzhiyun lis r0, MAS0_TLBSEL(1)@h 863*4882a593Smuzhiyun rlwimi r0, r9, 16, MAS0_ESEL_MSK 864*4882a593Smuzhiyun lis r1, FSL_BOOKE_MAS1(1, 1, 0, 0, BOOKE_PAGESZ_1M)@h 865*4882a593Smuzhiyun ori r1, r1, FSL_BOOKE_MAS1(1, 1, 0, 0, BOOKE_PAGESZ_1M)@l 866*4882a593Smuzhiyun li r6, 0 /* DCSR effective address */ 867*4882a593Smuzhiyun ori r2, r6, MAS2_I|MAS2_G 868*4882a593Smuzhiyun li r3, MAS3_SW|MAS3_SR 869*4882a593Smuzhiyun li r4, 0 870*4882a593Smuzhiyun mtspr MAS0, r0 871*4882a593Smuzhiyun mtspr MAS1, r1 872*4882a593Smuzhiyun mtspr MAS2, r2 873*4882a593Smuzhiyun mtspr MAS3, r3 874*4882a593Smuzhiyun mtspr MAS7, r4 875*4882a593Smuzhiyun isync 876*4882a593Smuzhiyun tlbwe 877*4882a593Smuzhiyun isync 878*4882a593Smuzhiyun msync 879*4882a593Smuzhiyun 880*4882a593Smuzhiyun /* enable the timebase */ 881*4882a593Smuzhiyun#define CTBENR 0xe2084 882*4882a593Smuzhiyun li r3, 1 883*4882a593Smuzhiyun addis r4, r7, CTBENR@ha 884*4882a593Smuzhiyun stw r3, CTBENR@l(r4) 885*4882a593Smuzhiyun lwz r3, CTBENR@l(r4) 886*4882a593Smuzhiyun twi 0,r3,0 887*4882a593Smuzhiyun isync 888*4882a593Smuzhiyun 889*4882a593Smuzhiyun .macro erratum_set_ccsr offset value 890*4882a593Smuzhiyun addis r3, r7, \offset@ha 891*4882a593Smuzhiyun lis r4, \value@h 892*4882a593Smuzhiyun addi r3, r3, \offset@l 893*4882a593Smuzhiyun ori r4, r4, \value@l 894*4882a593Smuzhiyun bl erratum_set_value 895*4882a593Smuzhiyun .endm 896*4882a593Smuzhiyun 897*4882a593Smuzhiyun .macro erratum_set_dcsr offset value 898*4882a593Smuzhiyun addis r3, r6, \offset@ha 899*4882a593Smuzhiyun lis r4, \value@h 900*4882a593Smuzhiyun addi r3, r3, \offset@l 901*4882a593Smuzhiyun ori r4, r4, \value@l 902*4882a593Smuzhiyun bl erratum_set_value 903*4882a593Smuzhiyun .endm 904*4882a593Smuzhiyun 905*4882a593Smuzhiyun erratum_set_dcsr 0xb0e08 0xe0201800 906*4882a593Smuzhiyun erratum_set_dcsr 0xb0e18 0xe0201800 907*4882a593Smuzhiyun erratum_set_dcsr 0xb0e38 0xe0400000 908*4882a593Smuzhiyun erratum_set_dcsr 0xb0008 0x00900000 909*4882a593Smuzhiyun erratum_set_dcsr 0xb0e40 0xe00a0000 910*4882a593Smuzhiyun erratum_set_ccsr 0x18600 CONFIG_SYS_FSL_CORENET_SNOOPVEC_COREONLY 911*4882a593Smuzhiyun#ifdef CONFIG_RAMBOOT_PBL 912*4882a593Smuzhiyun erratum_set_ccsr 0x10f00 0x495e5000 913*4882a593Smuzhiyun#else 914*4882a593Smuzhiyun erratum_set_ccsr 0x10f00 0x415e5000 915*4882a593Smuzhiyun#endif 916*4882a593Smuzhiyun erratum_set_ccsr 0x11f00 0x415e5000 917*4882a593Smuzhiyun 918*4882a593Smuzhiyun /* Make temp mapping uncacheable again, if it was initially */ 919*4882a593Smuzhiyun bl 2f 920*4882a593Smuzhiyun2: mflr r3 921*4882a593Smuzhiyun tlbsx 0, r3 922*4882a593Smuzhiyun mfspr r4, MAS2 923*4882a593Smuzhiyun rlwimi r4, r15, 0, MAS2_I 924*4882a593Smuzhiyun rlwimi r4, r15, 0, MAS2_G 925*4882a593Smuzhiyun mtspr MAS2, r4 926*4882a593Smuzhiyun isync 927*4882a593Smuzhiyun tlbwe 928*4882a593Smuzhiyun isync 929*4882a593Smuzhiyun msync 930*4882a593Smuzhiyun 931*4882a593Smuzhiyun /* Clear the cache */ 932*4882a593Smuzhiyun lis r3,(L1CSR1_ICFI|L1CSR1_ICLFR)@h 933*4882a593Smuzhiyun ori r3,r3,(L1CSR1_ICFI|L1CSR1_ICLFR)@l 934*4882a593Smuzhiyun sync 935*4882a593Smuzhiyun isync 936*4882a593Smuzhiyun mtspr SPRN_L1CSR1,r3 937*4882a593Smuzhiyun isync 938*4882a593Smuzhiyun2: sync 939*4882a593Smuzhiyun mfspr r4,SPRN_L1CSR1 940*4882a593Smuzhiyun and. r4,r4,r3 941*4882a593Smuzhiyun bne 2b 942*4882a593Smuzhiyun 943*4882a593Smuzhiyun lis r3,(L1CSR1_CPE|L1CSR1_ICE)@h 944*4882a593Smuzhiyun ori r3,r3,(L1CSR1_CPE|L1CSR1_ICE)@l 945*4882a593Smuzhiyun sync 946*4882a593Smuzhiyun isync 947*4882a593Smuzhiyun mtspr SPRN_L1CSR1,r3 948*4882a593Smuzhiyun isync 949*4882a593Smuzhiyun2: sync 950*4882a593Smuzhiyun mfspr r4,SPRN_L1CSR1 951*4882a593Smuzhiyun and. r4,r4,r3 952*4882a593Smuzhiyun beq 2b 953*4882a593Smuzhiyun 954*4882a593Smuzhiyun /* Remove temporary mappings */ 955*4882a593Smuzhiyun lis r0, MAS0_TLBSEL(1)@h 956*4882a593Smuzhiyun rlwimi r0, r9, 16, MAS0_ESEL_MSK 957*4882a593Smuzhiyun li r3, 0 958*4882a593Smuzhiyun mtspr MAS0, r0 959*4882a593Smuzhiyun mtspr MAS1, r3 960*4882a593Smuzhiyun isync 961*4882a593Smuzhiyun tlbwe 962*4882a593Smuzhiyun isync 963*4882a593Smuzhiyun msync 964*4882a593Smuzhiyun 965*4882a593Smuzhiyun li r3, 0 966*4882a593Smuzhiyun stw r3, 0xc08(r7) /* LAWAR0 */ 967*4882a593Smuzhiyun lwz r3, 0xc08(r7) 968*4882a593Smuzhiyun isync 969*4882a593Smuzhiyun 970*4882a593Smuzhiyun lis r0, MAS0_TLBSEL(1)@h 971*4882a593Smuzhiyun rlwimi r0, r8, 16, MAS0_ESEL_MSK 972*4882a593Smuzhiyun li r3, 0 973*4882a593Smuzhiyun mtspr MAS0, r0 974*4882a593Smuzhiyun mtspr MAS1, r3 975*4882a593Smuzhiyun isync 976*4882a593Smuzhiyun tlbwe 977*4882a593Smuzhiyun isync 978*4882a593Smuzhiyun msync 979*4882a593Smuzhiyun 980*4882a593Smuzhiyun b 9f 981*4882a593Smuzhiyun 982*4882a593Smuzhiyun /* r3 = addr, r4 = value, clobbers r5, r11, r12 */ 983*4882a593Smuzhiyunerratum_set_value: 984*4882a593Smuzhiyun /* Lock two cache lines into I-Cache */ 985*4882a593Smuzhiyun sync 986*4882a593Smuzhiyun mfspr r11, SPRN_L1CSR1 987*4882a593Smuzhiyun rlwinm r11, r11, 0, ~L1CSR1_ICUL 988*4882a593Smuzhiyun sync 989*4882a593Smuzhiyun isync 990*4882a593Smuzhiyun mtspr SPRN_L1CSR1, r11 991*4882a593Smuzhiyun isync 992*4882a593Smuzhiyun 993*4882a593Smuzhiyun mflr r12 994*4882a593Smuzhiyun bl 5f 995*4882a593Smuzhiyun5: mflr r5 996*4882a593Smuzhiyun addi r5, r5, 2f - 5b 997*4882a593Smuzhiyun icbtls 0, 0, r5 998*4882a593Smuzhiyun addi r5, r5, 64 999*4882a593Smuzhiyun 1000*4882a593Smuzhiyun sync 1001*4882a593Smuzhiyun mfspr r11, SPRN_L1CSR1 1002*4882a593Smuzhiyun3: andi. r11, r11, L1CSR1_ICUL 1003*4882a593Smuzhiyun bne 3b 1004*4882a593Smuzhiyun 1005*4882a593Smuzhiyun icbtls 0, 0, r5 1006*4882a593Smuzhiyun addi r5, r5, 64 1007*4882a593Smuzhiyun 1008*4882a593Smuzhiyun sync 1009*4882a593Smuzhiyun mfspr r11, SPRN_L1CSR1 1010*4882a593Smuzhiyun3: andi. r11, r11, L1CSR1_ICUL 1011*4882a593Smuzhiyun bne 3b 1012*4882a593Smuzhiyun 1013*4882a593Smuzhiyun b 2f 1014*4882a593Smuzhiyun .align 6 1015*4882a593Smuzhiyun /* Inside a locked cacheline, wait a while, write, then wait a while */ 1016*4882a593Smuzhiyun2: sync 1017*4882a593Smuzhiyun 1018*4882a593Smuzhiyun mfspr r5, SPRN_TBRL 1019*4882a593Smuzhiyun addis r11, r5, 0x10000@h /* wait 65536 timebase ticks */ 1020*4882a593Smuzhiyun4: mfspr r5, SPRN_TBRL 1021*4882a593Smuzhiyun subf. r5, r5, r11 1022*4882a593Smuzhiyun bgt 4b 1023*4882a593Smuzhiyun 1024*4882a593Smuzhiyun stw r4, 0(r3) 1025*4882a593Smuzhiyun 1026*4882a593Smuzhiyun mfspr r5, SPRN_TBRL 1027*4882a593Smuzhiyun addis r11, r5, 0x10000@h /* wait 65536 timebase ticks */ 1028*4882a593Smuzhiyun4: mfspr r5, SPRN_TBRL 1029*4882a593Smuzhiyun subf. r5, r5, r11 1030*4882a593Smuzhiyun bgt 4b 1031*4882a593Smuzhiyun 1032*4882a593Smuzhiyun sync 1033*4882a593Smuzhiyun 1034*4882a593Smuzhiyun /* 1035*4882a593Smuzhiyun * Fill out the rest of this cache line and the next with nops, 1036*4882a593Smuzhiyun * to ensure that nothing outside the locked area will be 1037*4882a593Smuzhiyun * fetched due to a branch. 1038*4882a593Smuzhiyun */ 1039*4882a593Smuzhiyun .rept 19 1040*4882a593Smuzhiyun nop 1041*4882a593Smuzhiyun .endr 1042*4882a593Smuzhiyun 1043*4882a593Smuzhiyun sync 1044*4882a593Smuzhiyun mfspr r11, SPRN_L1CSR1 1045*4882a593Smuzhiyun rlwinm r11, r11, 0, ~L1CSR1_ICUL 1046*4882a593Smuzhiyun sync 1047*4882a593Smuzhiyun isync 1048*4882a593Smuzhiyun mtspr SPRN_L1CSR1, r11 1049*4882a593Smuzhiyun isync 1050*4882a593Smuzhiyun 1051*4882a593Smuzhiyun mtlr r12 1052*4882a593Smuzhiyun blr 1053*4882a593Smuzhiyun 1054*4882a593Smuzhiyun9: 1055*4882a593Smuzhiyun#endif 1056*4882a593Smuzhiyun 1057*4882a593Smuzhiyuncreate_init_ram_area: 1058*4882a593Smuzhiyun lis r6,FSL_BOOKE_MAS0(1, 15, 0)@h 1059*4882a593Smuzhiyun ori r6,r6,FSL_BOOKE_MAS0(1, 15, 0)@l 1060*4882a593Smuzhiyun 1061*4882a593Smuzhiyun#ifdef NOR_BOOT 1062*4882a593Smuzhiyun /* create a temp mapping in AS=1 to the 4M boot window */ 1063*4882a593Smuzhiyun create_tlb1_entry 15, \ 1064*4882a593Smuzhiyun 1, BOOKE_PAGESZ_4M, \ 1065*4882a593Smuzhiyun CONFIG_SYS_MONITOR_BASE & 0xffc00000, MAS2_I|MAS2_G, \ 1066*4882a593Smuzhiyun 0xffc00000, MAS3_SX|MAS3_SW|MAS3_SR, \ 1067*4882a593Smuzhiyun 0, r6 1068*4882a593Smuzhiyun 1069*4882a593Smuzhiyun#elif !defined(CONFIG_SYS_RAMBOOT) && defined(CONFIG_SECURE_BOOT) 1070*4882a593Smuzhiyun /* create a temp mapping in AS = 1 for Flash mapping 1071*4882a593Smuzhiyun * created by PBL for ISBC code 1072*4882a593Smuzhiyun */ 1073*4882a593Smuzhiyun create_tlb1_entry 15, \ 1074*4882a593Smuzhiyun 1, BOOKE_PAGESZ_1M, \ 1075*4882a593Smuzhiyun CONFIG_SYS_MONITOR_BASE & 0xfff00000, MAS2_I|MAS2_G, \ 1076*4882a593Smuzhiyun CONFIG_SYS_PBI_FLASH_WINDOW & 0xfff00000, MAS3_SX|MAS3_SW|MAS3_SR, \ 1077*4882a593Smuzhiyun 0, r6 1078*4882a593Smuzhiyun 1079*4882a593Smuzhiyun/* 1080*4882a593Smuzhiyun * For Targets without CONFIG_SPL like P3, P5 1081*4882a593Smuzhiyun * and for targets with CONFIG_SPL like T1, T2, T4, only for 1082*4882a593Smuzhiyun * u-boot-spl i.e. CONFIG_SPL_BUILD 1083*4882a593Smuzhiyun */ 1084*4882a593Smuzhiyun#elif defined(CONFIG_RAMBOOT_PBL) && defined(CONFIG_SECURE_BOOT) && \ 1085*4882a593Smuzhiyun (!defined(CONFIG_SPL) || defined(CONFIG_SPL_BUILD)) 1086*4882a593Smuzhiyun /* create a temp mapping in AS = 1 for mapping CONFIG_SYS_MONITOR_BASE 1087*4882a593Smuzhiyun * to L3 Address configured by PBL for ISBC code 1088*4882a593Smuzhiyun */ 1089*4882a593Smuzhiyun create_tlb1_entry 15, \ 1090*4882a593Smuzhiyun 1, BOOKE_PAGESZ_1M, \ 1091*4882a593Smuzhiyun CONFIG_SYS_MONITOR_BASE & 0xfff00000, MAS2_I|MAS2_G, \ 1092*4882a593Smuzhiyun CONFIG_SYS_INIT_L3_ADDR & 0xfff00000, MAS3_SX|MAS3_SW|MAS3_SR, \ 1093*4882a593Smuzhiyun 0, r6 1094*4882a593Smuzhiyun 1095*4882a593Smuzhiyun#else 1096*4882a593Smuzhiyun /* 1097*4882a593Smuzhiyun * create a temp mapping in AS=1 to the 1M CONFIG_SYS_MONITOR_BASE space, the main 1098*4882a593Smuzhiyun * image has been relocated to CONFIG_SYS_MONITOR_BASE on the second stage. 1099*4882a593Smuzhiyun */ 1100*4882a593Smuzhiyun create_tlb1_entry 15, \ 1101*4882a593Smuzhiyun 1, BOOKE_PAGESZ_1M, \ 1102*4882a593Smuzhiyun CONFIG_SYS_MONITOR_BASE & 0xfff00000, MAS2_I|MAS2_G, \ 1103*4882a593Smuzhiyun CONFIG_SYS_MONITOR_BASE & 0xfff00000, MAS3_SX|MAS3_SW|MAS3_SR, \ 1104*4882a593Smuzhiyun 0, r6 1105*4882a593Smuzhiyun#endif 1106*4882a593Smuzhiyun 1107*4882a593Smuzhiyun /* create a temp mapping in AS=1 to the stack */ 1108*4882a593Smuzhiyun#if defined(CONFIG_SYS_INIT_RAM_ADDR_PHYS_LOW) && \ 1109*4882a593Smuzhiyun defined(CONFIG_SYS_INIT_RAM_ADDR_PHYS_HIGH) 1110*4882a593Smuzhiyun create_tlb1_entry 14, \ 1111*4882a593Smuzhiyun 1, BOOKE_PAGESZ_16K, \ 1112*4882a593Smuzhiyun CONFIG_SYS_INIT_RAM_ADDR, 0, \ 1113*4882a593Smuzhiyun CONFIG_SYS_INIT_RAM_ADDR_PHYS_LOW, MAS3_SX|MAS3_SW|MAS3_SR, \ 1114*4882a593Smuzhiyun CONFIG_SYS_INIT_RAM_ADDR_PHYS_HIGH, r6 1115*4882a593Smuzhiyun 1116*4882a593Smuzhiyun#else 1117*4882a593Smuzhiyun create_tlb1_entry 14, \ 1118*4882a593Smuzhiyun 1, BOOKE_PAGESZ_16K, \ 1119*4882a593Smuzhiyun CONFIG_SYS_INIT_RAM_ADDR, 0, \ 1120*4882a593Smuzhiyun CONFIG_SYS_INIT_RAM_ADDR, MAS3_SX|MAS3_SW|MAS3_SR, \ 1121*4882a593Smuzhiyun 0, r6 1122*4882a593Smuzhiyun#endif 1123*4882a593Smuzhiyun 1124*4882a593Smuzhiyun lis r6,MSR_IS|MSR_DS|MSR_DE@h 1125*4882a593Smuzhiyun ori r6,r6,MSR_IS|MSR_DS|MSR_DE@l 1126*4882a593Smuzhiyun lis r7,switch_as@h 1127*4882a593Smuzhiyun ori r7,r7,switch_as@l 1128*4882a593Smuzhiyun 1129*4882a593Smuzhiyun mtspr SPRN_SRR0,r7 1130*4882a593Smuzhiyun mtspr SPRN_SRR1,r6 1131*4882a593Smuzhiyun rfi 1132*4882a593Smuzhiyun 1133*4882a593Smuzhiyunswitch_as: 1134*4882a593Smuzhiyun/* L1 DCache is used for initial RAM */ 1135*4882a593Smuzhiyun 1136*4882a593Smuzhiyun /* Allocate Initial RAM in data cache. 1137*4882a593Smuzhiyun */ 1138*4882a593Smuzhiyun lis r3,CONFIG_SYS_INIT_RAM_ADDR@h 1139*4882a593Smuzhiyun ori r3,r3,CONFIG_SYS_INIT_RAM_ADDR@l 1140*4882a593Smuzhiyun mfspr r2, L1CFG0 1141*4882a593Smuzhiyun andi. r2, r2, 0x1ff 1142*4882a593Smuzhiyun /* cache size * 1024 / (2 * L1 line size) */ 1143*4882a593Smuzhiyun slwi r2, r2, (10 - 1 - L1_CACHE_SHIFT) 1144*4882a593Smuzhiyun mtctr r2 1145*4882a593Smuzhiyun li r0,0 1146*4882a593Smuzhiyun1: 1147*4882a593Smuzhiyun dcbz r0,r3 1148*4882a593Smuzhiyun#ifdef CONFIG_E6500 /* Lock/unlock L2 cache long with L1 */ 1149*4882a593Smuzhiyun dcbtls 2, r0, r3 1150*4882a593Smuzhiyun dcbtls 0, r0, r3 1151*4882a593Smuzhiyun#else 1152*4882a593Smuzhiyun dcbtls 0, r0, r3 1153*4882a593Smuzhiyun#endif 1154*4882a593Smuzhiyun addi r3,r3,CONFIG_SYS_CACHELINE_SIZE 1155*4882a593Smuzhiyun bdnz 1b 1156*4882a593Smuzhiyun 1157*4882a593Smuzhiyun /* Jump out the last 4K page and continue to 'normal' start */ 1158*4882a593Smuzhiyun#if defined(CONFIG_SYS_RAMBOOT) || defined(CONFIG_SPL) 1159*4882a593Smuzhiyun /* We assume that we're already running at the address we're linked at */ 1160*4882a593Smuzhiyun b _start_cont 1161*4882a593Smuzhiyun#else 1162*4882a593Smuzhiyun /* Calculate absolute address in FLASH and jump there */ 1163*4882a593Smuzhiyun /*--------------------------------------------------------------*/ 1164*4882a593Smuzhiyun lis r3,CONFIG_SYS_MONITOR_BASE@h 1165*4882a593Smuzhiyun ori r3,r3,CONFIG_SYS_MONITOR_BASE@l 1166*4882a593Smuzhiyun addi r3,r3,_start_cont - _start 1167*4882a593Smuzhiyun mtlr r3 1168*4882a593Smuzhiyun blr 1169*4882a593Smuzhiyun#endif 1170*4882a593Smuzhiyun 1171*4882a593Smuzhiyun .text 1172*4882a593Smuzhiyun .globl _start 1173*4882a593Smuzhiyun_start: 1174*4882a593Smuzhiyun .long 0x27051956 /* U-BOOT Magic Number */ 1175*4882a593Smuzhiyun .globl version_string 1176*4882a593Smuzhiyunversion_string: 1177*4882a593Smuzhiyun .ascii U_BOOT_VERSION_STRING, "\0" 1178*4882a593Smuzhiyun 1179*4882a593Smuzhiyun .align 4 1180*4882a593Smuzhiyun .globl _start_cont 1181*4882a593Smuzhiyun_start_cont: 1182*4882a593Smuzhiyun /* Setup the stack in initial RAM,could be L2-as-SRAM or L1 dcache*/ 1183*4882a593Smuzhiyun lis r3,(CONFIG_SYS_INIT_RAM_ADDR)@h 1184*4882a593Smuzhiyun ori r3,r3,((CONFIG_SYS_INIT_SP_OFFSET-16)&~0xf)@l /* Align to 16 */ 1185*4882a593Smuzhiyun 1186*4882a593Smuzhiyun#if CONFIG_VAL(SYS_MALLOC_F_LEN) 1187*4882a593Smuzhiyun#if CONFIG_VAL(SYS_MALLOC_F_LEN) + GENERATED_GBL_DATA_SIZE > CONFIG_SYS_INIT_RAM_SIZE 1188*4882a593Smuzhiyun#error "SYS_MALLOC_F_LEN too large to fit into initial RAM." 1189*4882a593Smuzhiyun#endif 1190*4882a593Smuzhiyun 1191*4882a593Smuzhiyun /* Leave 16+ byte for back chain termination and NULL return address */ 1192*4882a593Smuzhiyun subi r3,r3,((CONFIG_VAL(SYS_MALLOC_F_LEN)+16+15)&~0xf) 1193*4882a593Smuzhiyun#endif 1194*4882a593Smuzhiyun 1195*4882a593Smuzhiyun /* End of RAM */ 1196*4882a593Smuzhiyun lis r4,(CONFIG_SYS_INIT_RAM_ADDR)@h 1197*4882a593Smuzhiyun ori r4,r4,(CONFIG_SYS_INIT_RAM_SIZE)@l 1198*4882a593Smuzhiyun 1199*4882a593Smuzhiyun li r0,0 1200*4882a593Smuzhiyun 1201*4882a593Smuzhiyun1: subi r4,r4,4 1202*4882a593Smuzhiyun stw r0,0(r4) 1203*4882a593Smuzhiyun cmplw r4,r3 1204*4882a593Smuzhiyun bne 1b 1205*4882a593Smuzhiyun 1206*4882a593Smuzhiyun#if CONFIG_VAL(SYS_MALLOC_F_LEN) 1207*4882a593Smuzhiyun lis r4,(CONFIG_SYS_INIT_RAM_ADDR)@h 1208*4882a593Smuzhiyun ori r4,r4,(CONFIG_SYS_GBL_DATA_OFFSET)@l 1209*4882a593Smuzhiyun 1210*4882a593Smuzhiyun addi r3,r3,16 /* Pre-relocation malloc area */ 1211*4882a593Smuzhiyun stw r3,GD_MALLOC_BASE(r4) 1212*4882a593Smuzhiyun subi r3,r3,16 1213*4882a593Smuzhiyun#endif 1214*4882a593Smuzhiyun li r0,0 1215*4882a593Smuzhiyun stw r0,0(r3) /* Terminate Back Chain */ 1216*4882a593Smuzhiyun stw r0,+4(r3) /* NULL return address. */ 1217*4882a593Smuzhiyun mr r1,r3 /* Transfer to SP(r1) */ 1218*4882a593Smuzhiyun 1219*4882a593Smuzhiyun GET_GOT 1220*4882a593Smuzhiyun 1221*4882a593Smuzhiyun /* Pass our potential ePAPR device tree pointer to cpu_init_early_f */ 1222*4882a593Smuzhiyun mr r3, r24 1223*4882a593Smuzhiyun 1224*4882a593Smuzhiyun bl cpu_init_early_f 1225*4882a593Smuzhiyun 1226*4882a593Smuzhiyun /* switch back to AS = 0 */ 1227*4882a593Smuzhiyun lis r3,(MSR_CE|MSR_ME|MSR_DE)@h 1228*4882a593Smuzhiyun ori r3,r3,(MSR_CE|MSR_ME|MSR_DE)@l 1229*4882a593Smuzhiyun mtmsr r3 1230*4882a593Smuzhiyun isync 1231*4882a593Smuzhiyun 1232*4882a593Smuzhiyun bl cpu_init_f /* return boot_flag for calling board_init_f */ 1233*4882a593Smuzhiyun bl board_init_f 1234*4882a593Smuzhiyun isync 1235*4882a593Smuzhiyun 1236*4882a593Smuzhiyun /* NOTREACHED - board_init_f() does not return */ 1237*4882a593Smuzhiyun 1238*4882a593Smuzhiyun#ifndef MINIMAL_SPL 1239*4882a593Smuzhiyun .globl _start_of_vectors 1240*4882a593Smuzhiyun_start_of_vectors: 1241*4882a593Smuzhiyun 1242*4882a593Smuzhiyun/* Critical input. */ 1243*4882a593Smuzhiyun CRIT_EXCEPTION(0x0100, CriticalInput, CritcalInputException) 1244*4882a593Smuzhiyun 1245*4882a593Smuzhiyun/* Machine check */ 1246*4882a593Smuzhiyun MCK_EXCEPTION(0x200, MachineCheck, MachineCheckException) 1247*4882a593Smuzhiyun 1248*4882a593Smuzhiyun/* Data Storage exception. */ 1249*4882a593Smuzhiyun STD_EXCEPTION(0x0300, DataStorage, UnknownException) 1250*4882a593Smuzhiyun 1251*4882a593Smuzhiyun/* Instruction Storage exception. */ 1252*4882a593Smuzhiyun STD_EXCEPTION(0x0400, InstStorage, UnknownException) 1253*4882a593Smuzhiyun 1254*4882a593Smuzhiyun/* External Interrupt exception. */ 1255*4882a593Smuzhiyun STD_EXCEPTION(0x0500, ExtInterrupt, ExtIntException) 1256*4882a593Smuzhiyun 1257*4882a593Smuzhiyun/* Alignment exception. */ 1258*4882a593SmuzhiyunAlignment: 1259*4882a593Smuzhiyun EXCEPTION_PROLOG(SRR0, SRR1) 1260*4882a593Smuzhiyun mfspr r4,DAR 1261*4882a593Smuzhiyun stw r4,_DAR(r21) 1262*4882a593Smuzhiyun mfspr r5,DSISR 1263*4882a593Smuzhiyun stw r5,_DSISR(r21) 1264*4882a593Smuzhiyun addi r3,r1,STACK_FRAME_OVERHEAD 1265*4882a593Smuzhiyun EXC_XFER_TEMPLATE(0x600, Alignment, AlignmentException, 1266*4882a593Smuzhiyun MSR_KERNEL, COPY_EE) 1267*4882a593Smuzhiyun 1268*4882a593Smuzhiyun/* Program check exception */ 1269*4882a593SmuzhiyunProgramCheck: 1270*4882a593Smuzhiyun EXCEPTION_PROLOG(SRR0, SRR1) 1271*4882a593Smuzhiyun addi r3,r1,STACK_FRAME_OVERHEAD 1272*4882a593Smuzhiyun EXC_XFER_TEMPLATE(0x700, ProgramCheck, ProgramCheckException, 1273*4882a593Smuzhiyun MSR_KERNEL, COPY_EE) 1274*4882a593Smuzhiyun 1275*4882a593Smuzhiyun /* No FPU on MPC85xx. This exception is not supposed to happen. 1276*4882a593Smuzhiyun */ 1277*4882a593Smuzhiyun STD_EXCEPTION(0x0800, FPUnavailable, UnknownException) 1278*4882a593Smuzhiyun STD_EXCEPTION(0x0900, SystemCall, UnknownException) 1279*4882a593Smuzhiyun STD_EXCEPTION(0x0a00, Decrementer, timer_interrupt) 1280*4882a593Smuzhiyun STD_EXCEPTION(0x0b00, IntervalTimer, UnknownException) 1281*4882a593Smuzhiyun STD_EXCEPTION(0x0c00, WatchdogTimer, UnknownException) 1282*4882a593Smuzhiyun 1283*4882a593Smuzhiyun STD_EXCEPTION(0x0d00, DataTLBError, UnknownException) 1284*4882a593Smuzhiyun STD_EXCEPTION(0x0e00, InstructionTLBError, UnknownException) 1285*4882a593Smuzhiyun 1286*4882a593Smuzhiyun CRIT_EXCEPTION(0x0f00, DebugBreakpoint, DebugException ) 1287*4882a593Smuzhiyun 1288*4882a593Smuzhiyun .globl _end_of_vectors 1289*4882a593Smuzhiyun_end_of_vectors: 1290*4882a593Smuzhiyun 1291*4882a593Smuzhiyun 1292*4882a593Smuzhiyun . = . + (0x100 - ( . & 0xff )) /* align for debug */ 1293*4882a593Smuzhiyun 1294*4882a593Smuzhiyun/* 1295*4882a593Smuzhiyun * This code finishes saving the registers to the exception frame 1296*4882a593Smuzhiyun * and jumps to the appropriate handler for the exception. 1297*4882a593Smuzhiyun * Register r21 is pointer into trap frame, r1 has new stack pointer. 1298*4882a593Smuzhiyun * r23 is the address of the handler. 1299*4882a593Smuzhiyun */ 1300*4882a593Smuzhiyun .globl transfer_to_handler 1301*4882a593Smuzhiyuntransfer_to_handler: 1302*4882a593Smuzhiyun SAVE_GPR(7, r21) 1303*4882a593Smuzhiyun SAVE_4GPRS(8, r21) 1304*4882a593Smuzhiyun SAVE_8GPRS(12, r21) 1305*4882a593Smuzhiyun SAVE_8GPRS(24, r21) 1306*4882a593Smuzhiyun 1307*4882a593Smuzhiyun li r22,0 1308*4882a593Smuzhiyun stw r22,RESULT(r21) 1309*4882a593Smuzhiyun mtspr SPRG2,r22 /* r1 is now kernel sp */ 1310*4882a593Smuzhiyun 1311*4882a593Smuzhiyun mtctr r23 /* virtual address of handler */ 1312*4882a593Smuzhiyun mtmsr r20 1313*4882a593Smuzhiyun bctrl 1314*4882a593Smuzhiyun 1315*4882a593Smuzhiyunint_return: 1316*4882a593Smuzhiyun mfmsr r28 /* Disable interrupts */ 1317*4882a593Smuzhiyun li r4,0 1318*4882a593Smuzhiyun ori r4,r4,MSR_EE 1319*4882a593Smuzhiyun andc r28,r28,r4 1320*4882a593Smuzhiyun SYNC /* Some chip revs need this... */ 1321*4882a593Smuzhiyun mtmsr r28 1322*4882a593Smuzhiyun SYNC 1323*4882a593Smuzhiyun lwz r2,_CTR(r1) 1324*4882a593Smuzhiyun lwz r0,_LINK(r1) 1325*4882a593Smuzhiyun mtctr r2 1326*4882a593Smuzhiyun mtlr r0 1327*4882a593Smuzhiyun lwz r2,_XER(r1) 1328*4882a593Smuzhiyun lwz r0,_CCR(r1) 1329*4882a593Smuzhiyun mtspr XER,r2 1330*4882a593Smuzhiyun mtcrf 0xFF,r0 1331*4882a593Smuzhiyun REST_10GPRS(3, r1) 1332*4882a593Smuzhiyun REST_10GPRS(13, r1) 1333*4882a593Smuzhiyun REST_8GPRS(23, r1) 1334*4882a593Smuzhiyun REST_GPR(31, r1) 1335*4882a593Smuzhiyun lwz r2,_NIP(r1) /* Restore environment */ 1336*4882a593Smuzhiyun lwz r0,_MSR(r1) 1337*4882a593Smuzhiyun mtspr SRR0,r2 1338*4882a593Smuzhiyun mtspr SRR1,r0 1339*4882a593Smuzhiyun lwz r0,GPR0(r1) 1340*4882a593Smuzhiyun lwz r2,GPR2(r1) 1341*4882a593Smuzhiyun lwz r1,GPR1(r1) 1342*4882a593Smuzhiyun SYNC 1343*4882a593Smuzhiyun rfi 1344*4882a593Smuzhiyun 1345*4882a593Smuzhiyun/* Cache functions. 1346*4882a593Smuzhiyun*/ 1347*4882a593Smuzhiyun.globl flush_icache 1348*4882a593Smuzhiyunflush_icache: 1349*4882a593Smuzhiyun.globl invalidate_icache 1350*4882a593Smuzhiyuninvalidate_icache: 1351*4882a593Smuzhiyun mfspr r0,L1CSR1 1352*4882a593Smuzhiyun ori r0,r0,L1CSR1_ICFI 1353*4882a593Smuzhiyun msync 1354*4882a593Smuzhiyun isync 1355*4882a593Smuzhiyun mtspr L1CSR1,r0 1356*4882a593Smuzhiyun isync 1357*4882a593Smuzhiyun blr /* entire I cache */ 1358*4882a593Smuzhiyun 1359*4882a593Smuzhiyun.globl invalidate_dcache 1360*4882a593Smuzhiyuninvalidate_dcache: 1361*4882a593Smuzhiyun mfspr r0,L1CSR0 1362*4882a593Smuzhiyun ori r0,r0,L1CSR0_DCFI 1363*4882a593Smuzhiyun msync 1364*4882a593Smuzhiyun isync 1365*4882a593Smuzhiyun mtspr L1CSR0,r0 1366*4882a593Smuzhiyun isync 1367*4882a593Smuzhiyun blr 1368*4882a593Smuzhiyun 1369*4882a593Smuzhiyun .globl icache_enable 1370*4882a593Smuzhiyunicache_enable: 1371*4882a593Smuzhiyun mflr r8 1372*4882a593Smuzhiyun bl invalidate_icache 1373*4882a593Smuzhiyun mtlr r8 1374*4882a593Smuzhiyun isync 1375*4882a593Smuzhiyun mfspr r4,L1CSR1 1376*4882a593Smuzhiyun ori r4,r4,(L1CSR1_CPE | L1CSR1_ICE)@l 1377*4882a593Smuzhiyun oris r4,r4,(L1CSR1_CPE | L1CSR1_ICE)@h 1378*4882a593Smuzhiyun mtspr L1CSR1,r4 1379*4882a593Smuzhiyun isync 1380*4882a593Smuzhiyun blr 1381*4882a593Smuzhiyun 1382*4882a593Smuzhiyun .globl icache_disable 1383*4882a593Smuzhiyunicache_disable: 1384*4882a593Smuzhiyun mfspr r0,L1CSR1 1385*4882a593Smuzhiyun lis r3,0 1386*4882a593Smuzhiyun ori r3,r3,L1CSR1_ICE 1387*4882a593Smuzhiyun andc r0,r0,r3 1388*4882a593Smuzhiyun mtspr L1CSR1,r0 1389*4882a593Smuzhiyun isync 1390*4882a593Smuzhiyun blr 1391*4882a593Smuzhiyun 1392*4882a593Smuzhiyun .globl icache_status 1393*4882a593Smuzhiyunicache_status: 1394*4882a593Smuzhiyun mfspr r3,L1CSR1 1395*4882a593Smuzhiyun andi. r3,r3,L1CSR1_ICE 1396*4882a593Smuzhiyun blr 1397*4882a593Smuzhiyun 1398*4882a593Smuzhiyun .globl dcache_enable 1399*4882a593Smuzhiyundcache_enable: 1400*4882a593Smuzhiyun mflr r8 1401*4882a593Smuzhiyun bl invalidate_dcache 1402*4882a593Smuzhiyun mtlr r8 1403*4882a593Smuzhiyun isync 1404*4882a593Smuzhiyun mfspr r0,L1CSR0 1405*4882a593Smuzhiyun ori r0,r0,(L1CSR0_CPE | L1CSR0_DCE)@l 1406*4882a593Smuzhiyun oris r0,r0,(L1CSR0_CPE | L1CSR0_DCE)@h 1407*4882a593Smuzhiyun msync 1408*4882a593Smuzhiyun isync 1409*4882a593Smuzhiyun mtspr L1CSR0,r0 1410*4882a593Smuzhiyun isync 1411*4882a593Smuzhiyun blr 1412*4882a593Smuzhiyun 1413*4882a593Smuzhiyun .globl dcache_disable 1414*4882a593Smuzhiyundcache_disable: 1415*4882a593Smuzhiyun mfspr r3,L1CSR0 1416*4882a593Smuzhiyun lis r4,0 1417*4882a593Smuzhiyun ori r4,r4,L1CSR0_DCE 1418*4882a593Smuzhiyun andc r3,r3,r4 1419*4882a593Smuzhiyun mtspr L1CSR0,r3 1420*4882a593Smuzhiyun isync 1421*4882a593Smuzhiyun blr 1422*4882a593Smuzhiyun 1423*4882a593Smuzhiyun .globl dcache_status 1424*4882a593Smuzhiyundcache_status: 1425*4882a593Smuzhiyun mfspr r3,L1CSR0 1426*4882a593Smuzhiyun andi. r3,r3,L1CSR0_DCE 1427*4882a593Smuzhiyun blr 1428*4882a593Smuzhiyun 1429*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1430*4882a593Smuzhiyun/* Function: in8 */ 1431*4882a593Smuzhiyun/* Description: Input 8 bits */ 1432*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1433*4882a593Smuzhiyun .globl in8 1434*4882a593Smuzhiyunin8: 1435*4882a593Smuzhiyun lbz r3,0x0000(r3) 1436*4882a593Smuzhiyun blr 1437*4882a593Smuzhiyun 1438*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1439*4882a593Smuzhiyun/* Function: out8 */ 1440*4882a593Smuzhiyun/* Description: Output 8 bits */ 1441*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1442*4882a593Smuzhiyun .globl out8 1443*4882a593Smuzhiyunout8: 1444*4882a593Smuzhiyun stb r4,0x0000(r3) 1445*4882a593Smuzhiyun sync 1446*4882a593Smuzhiyun blr 1447*4882a593Smuzhiyun 1448*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1449*4882a593Smuzhiyun/* Function: out16 */ 1450*4882a593Smuzhiyun/* Description: Output 16 bits */ 1451*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1452*4882a593Smuzhiyun .globl out16 1453*4882a593Smuzhiyunout16: 1454*4882a593Smuzhiyun sth r4,0x0000(r3) 1455*4882a593Smuzhiyun sync 1456*4882a593Smuzhiyun blr 1457*4882a593Smuzhiyun 1458*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1459*4882a593Smuzhiyun/* Function: out16r */ 1460*4882a593Smuzhiyun/* Description: Byte reverse and output 16 bits */ 1461*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1462*4882a593Smuzhiyun .globl out16r 1463*4882a593Smuzhiyunout16r: 1464*4882a593Smuzhiyun sthbrx r4,r0,r3 1465*4882a593Smuzhiyun sync 1466*4882a593Smuzhiyun blr 1467*4882a593Smuzhiyun 1468*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1469*4882a593Smuzhiyun/* Function: out32 */ 1470*4882a593Smuzhiyun/* Description: Output 32 bits */ 1471*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1472*4882a593Smuzhiyun .globl out32 1473*4882a593Smuzhiyunout32: 1474*4882a593Smuzhiyun stw r4,0x0000(r3) 1475*4882a593Smuzhiyun sync 1476*4882a593Smuzhiyun blr 1477*4882a593Smuzhiyun 1478*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1479*4882a593Smuzhiyun/* Function: out32r */ 1480*4882a593Smuzhiyun/* Description: Byte reverse and output 32 bits */ 1481*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1482*4882a593Smuzhiyun .globl out32r 1483*4882a593Smuzhiyunout32r: 1484*4882a593Smuzhiyun stwbrx r4,r0,r3 1485*4882a593Smuzhiyun sync 1486*4882a593Smuzhiyun blr 1487*4882a593Smuzhiyun 1488*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1489*4882a593Smuzhiyun/* Function: in16 */ 1490*4882a593Smuzhiyun/* Description: Input 16 bits */ 1491*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1492*4882a593Smuzhiyun .globl in16 1493*4882a593Smuzhiyunin16: 1494*4882a593Smuzhiyun lhz r3,0x0000(r3) 1495*4882a593Smuzhiyun blr 1496*4882a593Smuzhiyun 1497*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1498*4882a593Smuzhiyun/* Function: in16r */ 1499*4882a593Smuzhiyun/* Description: Input 16 bits and byte reverse */ 1500*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1501*4882a593Smuzhiyun .globl in16r 1502*4882a593Smuzhiyunin16r: 1503*4882a593Smuzhiyun lhbrx r3,r0,r3 1504*4882a593Smuzhiyun blr 1505*4882a593Smuzhiyun 1506*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1507*4882a593Smuzhiyun/* Function: in32 */ 1508*4882a593Smuzhiyun/* Description: Input 32 bits */ 1509*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1510*4882a593Smuzhiyun .globl in32 1511*4882a593Smuzhiyunin32: 1512*4882a593Smuzhiyun lwz 3,0x0000(3) 1513*4882a593Smuzhiyun blr 1514*4882a593Smuzhiyun 1515*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1516*4882a593Smuzhiyun/* Function: in32r */ 1517*4882a593Smuzhiyun/* Description: Input 32 bits and byte reverse */ 1518*4882a593Smuzhiyun/*------------------------------------------------------------------------------- */ 1519*4882a593Smuzhiyun .globl in32r 1520*4882a593Smuzhiyunin32r: 1521*4882a593Smuzhiyun lwbrx r3,r0,r3 1522*4882a593Smuzhiyun blr 1523*4882a593Smuzhiyun#endif /* !MINIMAL_SPL */ 1524*4882a593Smuzhiyun 1525*4882a593Smuzhiyun/*------------------------------------------------------------------------------*/ 1526*4882a593Smuzhiyun 1527*4882a593Smuzhiyun/* 1528*4882a593Smuzhiyun * void write_tlb(mas0, mas1, mas2, mas3, mas7) 1529*4882a593Smuzhiyun */ 1530*4882a593Smuzhiyun .globl write_tlb 1531*4882a593Smuzhiyunwrite_tlb: 1532*4882a593Smuzhiyun mtspr MAS0,r3 1533*4882a593Smuzhiyun mtspr MAS1,r4 1534*4882a593Smuzhiyun mtspr MAS2,r5 1535*4882a593Smuzhiyun mtspr MAS3,r6 1536*4882a593Smuzhiyun#ifdef CONFIG_ENABLE_36BIT_PHYS 1537*4882a593Smuzhiyun mtspr MAS7,r7 1538*4882a593Smuzhiyun#endif 1539*4882a593Smuzhiyun li r3,0 1540*4882a593Smuzhiyun#ifdef CONFIG_SYS_BOOK3E_HV 1541*4882a593Smuzhiyun mtspr MAS8,r3 1542*4882a593Smuzhiyun#endif 1543*4882a593Smuzhiyun isync 1544*4882a593Smuzhiyun tlbwe 1545*4882a593Smuzhiyun msync 1546*4882a593Smuzhiyun isync 1547*4882a593Smuzhiyun blr 1548*4882a593Smuzhiyun 1549*4882a593Smuzhiyun/* 1550*4882a593Smuzhiyun * void relocate_code (addr_sp, gd, addr_moni) 1551*4882a593Smuzhiyun * 1552*4882a593Smuzhiyun * This "function" does not return, instead it continues in RAM 1553*4882a593Smuzhiyun * after relocating the monitor code. 1554*4882a593Smuzhiyun * 1555*4882a593Smuzhiyun * r3 = dest 1556*4882a593Smuzhiyun * r4 = src 1557*4882a593Smuzhiyun * r5 = length in bytes 1558*4882a593Smuzhiyun * r6 = cachelinesize 1559*4882a593Smuzhiyun */ 1560*4882a593Smuzhiyun .globl relocate_code 1561*4882a593Smuzhiyunrelocate_code: 1562*4882a593Smuzhiyun mr r1,r3 /* Set new stack pointer */ 1563*4882a593Smuzhiyun mr r9,r4 /* Save copy of Init Data pointer */ 1564*4882a593Smuzhiyun mr r10,r5 /* Save copy of Destination Address */ 1565*4882a593Smuzhiyun 1566*4882a593Smuzhiyun GET_GOT 1567*4882a593Smuzhiyun#ifndef CONFIG_SPL_SKIP_RELOCATE 1568*4882a593Smuzhiyun mr r3,r5 /* Destination Address */ 1569*4882a593Smuzhiyun lis r4,CONFIG_SYS_MONITOR_BASE@h /* Source Address */ 1570*4882a593Smuzhiyun ori r4,r4,CONFIG_SYS_MONITOR_BASE@l 1571*4882a593Smuzhiyun lwz r5,GOT(__init_end) 1572*4882a593Smuzhiyun sub r5,r5,r4 1573*4882a593Smuzhiyun li r6,CONFIG_SYS_CACHELINE_SIZE /* Cache Line Size */ 1574*4882a593Smuzhiyun 1575*4882a593Smuzhiyun /* 1576*4882a593Smuzhiyun * Fix GOT pointer: 1577*4882a593Smuzhiyun * 1578*4882a593Smuzhiyun * New GOT-PTR = (old GOT-PTR - CONFIG_SYS_MONITOR_BASE) + Destination Address 1579*4882a593Smuzhiyun * 1580*4882a593Smuzhiyun * Offset: 1581*4882a593Smuzhiyun */ 1582*4882a593Smuzhiyun sub r15,r10,r4 1583*4882a593Smuzhiyun 1584*4882a593Smuzhiyun /* First our own GOT */ 1585*4882a593Smuzhiyun add r12,r12,r15 1586*4882a593Smuzhiyun /* the the one used by the C code */ 1587*4882a593Smuzhiyun add r30,r30,r15 1588*4882a593Smuzhiyun 1589*4882a593Smuzhiyun /* 1590*4882a593Smuzhiyun * Now relocate code 1591*4882a593Smuzhiyun */ 1592*4882a593Smuzhiyun 1593*4882a593Smuzhiyun cmplw cr1,r3,r4 1594*4882a593Smuzhiyun addi r0,r5,3 1595*4882a593Smuzhiyun srwi. r0,r0,2 1596*4882a593Smuzhiyun beq cr1,4f /* In place copy is not necessary */ 1597*4882a593Smuzhiyun beq 7f /* Protect against 0 count */ 1598*4882a593Smuzhiyun mtctr r0 1599*4882a593Smuzhiyun bge cr1,2f 1600*4882a593Smuzhiyun 1601*4882a593Smuzhiyun la r8,-4(r4) 1602*4882a593Smuzhiyun la r7,-4(r3) 1603*4882a593Smuzhiyun1: lwzu r0,4(r8) 1604*4882a593Smuzhiyun stwu r0,4(r7) 1605*4882a593Smuzhiyun bdnz 1b 1606*4882a593Smuzhiyun b 4f 1607*4882a593Smuzhiyun 1608*4882a593Smuzhiyun2: slwi r0,r0,2 1609*4882a593Smuzhiyun add r8,r4,r0 1610*4882a593Smuzhiyun add r7,r3,r0 1611*4882a593Smuzhiyun3: lwzu r0,-4(r8) 1612*4882a593Smuzhiyun stwu r0,-4(r7) 1613*4882a593Smuzhiyun bdnz 3b 1614*4882a593Smuzhiyun 1615*4882a593Smuzhiyun/* 1616*4882a593Smuzhiyun * Now flush the cache: note that we must start from a cache aligned 1617*4882a593Smuzhiyun * address. Otherwise we might miss one cache line. 1618*4882a593Smuzhiyun */ 1619*4882a593Smuzhiyun4: cmpwi r6,0 1620*4882a593Smuzhiyun add r5,r3,r5 1621*4882a593Smuzhiyun beq 7f /* Always flush prefetch queue in any case */ 1622*4882a593Smuzhiyun subi r0,r6,1 1623*4882a593Smuzhiyun andc r3,r3,r0 1624*4882a593Smuzhiyun mr r4,r3 1625*4882a593Smuzhiyun5: dcbst 0,r4 1626*4882a593Smuzhiyun add r4,r4,r6 1627*4882a593Smuzhiyun cmplw r4,r5 1628*4882a593Smuzhiyun blt 5b 1629*4882a593Smuzhiyun sync /* Wait for all dcbst to complete on bus */ 1630*4882a593Smuzhiyun mr r4,r3 1631*4882a593Smuzhiyun6: icbi 0,r4 1632*4882a593Smuzhiyun add r4,r4,r6 1633*4882a593Smuzhiyun cmplw r4,r5 1634*4882a593Smuzhiyun blt 6b 1635*4882a593Smuzhiyun7: sync /* Wait for all icbi to complete on bus */ 1636*4882a593Smuzhiyun isync 1637*4882a593Smuzhiyun 1638*4882a593Smuzhiyun/* 1639*4882a593Smuzhiyun * We are done. Do not return, instead branch to second part of board 1640*4882a593Smuzhiyun * initialization, now running from RAM. 1641*4882a593Smuzhiyun */ 1642*4882a593Smuzhiyun 1643*4882a593Smuzhiyun addi r0,r10,in_ram - _start 1644*4882a593Smuzhiyun 1645*4882a593Smuzhiyun /* 1646*4882a593Smuzhiyun * As IVPR is going to point RAM address, 1647*4882a593Smuzhiyun * Make sure IVOR15 has valid opcode to support debugger 1648*4882a593Smuzhiyun */ 1649*4882a593Smuzhiyun mtspr IVOR15,r0 1650*4882a593Smuzhiyun 1651*4882a593Smuzhiyun /* 1652*4882a593Smuzhiyun * Re-point the IVPR at RAM 1653*4882a593Smuzhiyun */ 1654*4882a593Smuzhiyun mtspr IVPR,r10 1655*4882a593Smuzhiyun 1656*4882a593Smuzhiyun mtlr r0 1657*4882a593Smuzhiyun blr /* NEVER RETURNS! */ 1658*4882a593Smuzhiyun#endif 1659*4882a593Smuzhiyun .globl in_ram 1660*4882a593Smuzhiyunin_ram: 1661*4882a593Smuzhiyun 1662*4882a593Smuzhiyun /* 1663*4882a593Smuzhiyun * Relocation Function, r12 point to got2+0x8000 1664*4882a593Smuzhiyun * 1665*4882a593Smuzhiyun * Adjust got2 pointers, no need to check for 0, this code 1666*4882a593Smuzhiyun * already puts a few entries in the table. 1667*4882a593Smuzhiyun */ 1668*4882a593Smuzhiyun li r0,__got2_entries@sectoff@l 1669*4882a593Smuzhiyun la r3,GOT(_GOT2_TABLE_) 1670*4882a593Smuzhiyun lwz r11,GOT(_GOT2_TABLE_) 1671*4882a593Smuzhiyun mtctr r0 1672*4882a593Smuzhiyun sub r11,r3,r11 1673*4882a593Smuzhiyun addi r3,r3,-4 1674*4882a593Smuzhiyun1: lwzu r0,4(r3) 1675*4882a593Smuzhiyun cmpwi r0,0 1676*4882a593Smuzhiyun beq- 2f 1677*4882a593Smuzhiyun add r0,r0,r11 1678*4882a593Smuzhiyun stw r0,0(r3) 1679*4882a593Smuzhiyun2: bdnz 1b 1680*4882a593Smuzhiyun 1681*4882a593Smuzhiyun /* 1682*4882a593Smuzhiyun * Now adjust the fixups and the pointers to the fixups 1683*4882a593Smuzhiyun * in case we need to move ourselves again. 1684*4882a593Smuzhiyun */ 1685*4882a593Smuzhiyun li r0,__fixup_entries@sectoff@l 1686*4882a593Smuzhiyun lwz r3,GOT(_FIXUP_TABLE_) 1687*4882a593Smuzhiyun cmpwi r0,0 1688*4882a593Smuzhiyun mtctr r0 1689*4882a593Smuzhiyun addi r3,r3,-4 1690*4882a593Smuzhiyun beq 4f 1691*4882a593Smuzhiyun3: lwzu r4,4(r3) 1692*4882a593Smuzhiyun lwzux r0,r4,r11 1693*4882a593Smuzhiyun cmpwi r0,0 1694*4882a593Smuzhiyun add r0,r0,r11 1695*4882a593Smuzhiyun stw r4,0(r3) 1696*4882a593Smuzhiyun beq- 5f 1697*4882a593Smuzhiyun stw r0,0(r4) 1698*4882a593Smuzhiyun5: bdnz 3b 1699*4882a593Smuzhiyun4: 1700*4882a593Smuzhiyunclear_bss: 1701*4882a593Smuzhiyun /* 1702*4882a593Smuzhiyun * Now clear BSS segment 1703*4882a593Smuzhiyun */ 1704*4882a593Smuzhiyun lwz r3,GOT(__bss_start) 1705*4882a593Smuzhiyun lwz r4,GOT(__bss_end) 1706*4882a593Smuzhiyun 1707*4882a593Smuzhiyun cmplw 0,r3,r4 1708*4882a593Smuzhiyun beq 6f 1709*4882a593Smuzhiyun 1710*4882a593Smuzhiyun li r0,0 1711*4882a593Smuzhiyun5: 1712*4882a593Smuzhiyun stw r0,0(r3) 1713*4882a593Smuzhiyun addi r3,r3,4 1714*4882a593Smuzhiyun cmplw 0,r3,r4 1715*4882a593Smuzhiyun blt 5b 1716*4882a593Smuzhiyun6: 1717*4882a593Smuzhiyun 1718*4882a593Smuzhiyun mr r3,r9 /* Init Data pointer */ 1719*4882a593Smuzhiyun mr r4,r10 /* Destination Address */ 1720*4882a593Smuzhiyun bl board_init_r 1721*4882a593Smuzhiyun 1722*4882a593Smuzhiyun#ifndef MINIMAL_SPL 1723*4882a593Smuzhiyun /* 1724*4882a593Smuzhiyun * Copy exception vector code to low memory 1725*4882a593Smuzhiyun * 1726*4882a593Smuzhiyun * r3: dest_addr 1727*4882a593Smuzhiyun * r7: source address, r8: end address, r9: target address 1728*4882a593Smuzhiyun */ 1729*4882a593Smuzhiyun .globl trap_init 1730*4882a593Smuzhiyuntrap_init: 1731*4882a593Smuzhiyun mflr r11 1732*4882a593Smuzhiyun bl _GLOBAL_OFFSET_TABLE_-4 1733*4882a593Smuzhiyun mflr r12 1734*4882a593Smuzhiyun 1735*4882a593Smuzhiyun /* Update IVORs as per relocation */ 1736*4882a593Smuzhiyun mtspr IVPR,r3 1737*4882a593Smuzhiyun 1738*4882a593Smuzhiyun lwz r4,CriticalInput@got(r12) 1739*4882a593Smuzhiyun mtspr IVOR0,r4 /* 0: Critical input */ 1740*4882a593Smuzhiyun lwz r4,MachineCheck@got(r12) 1741*4882a593Smuzhiyun mtspr IVOR1,r4 /* 1: Machine check */ 1742*4882a593Smuzhiyun lwz r4,DataStorage@got(r12) 1743*4882a593Smuzhiyun mtspr IVOR2,r4 /* 2: Data storage */ 1744*4882a593Smuzhiyun lwz r4,InstStorage@got(r12) 1745*4882a593Smuzhiyun mtspr IVOR3,r4 /* 3: Instruction storage */ 1746*4882a593Smuzhiyun lwz r4,ExtInterrupt@got(r12) 1747*4882a593Smuzhiyun mtspr IVOR4,r4 /* 4: External interrupt */ 1748*4882a593Smuzhiyun lwz r4,Alignment@got(r12) 1749*4882a593Smuzhiyun mtspr IVOR5,r4 /* 5: Alignment */ 1750*4882a593Smuzhiyun lwz r4,ProgramCheck@got(r12) 1751*4882a593Smuzhiyun mtspr IVOR6,r4 /* 6: Program check */ 1752*4882a593Smuzhiyun lwz r4,FPUnavailable@got(r12) 1753*4882a593Smuzhiyun mtspr IVOR7,r4 /* 7: floating point unavailable */ 1754*4882a593Smuzhiyun lwz r4,SystemCall@got(r12) 1755*4882a593Smuzhiyun mtspr IVOR8,r4 /* 8: System call */ 1756*4882a593Smuzhiyun /* 9: Auxiliary processor unavailable(unsupported) */ 1757*4882a593Smuzhiyun lwz r4,Decrementer@got(r12) 1758*4882a593Smuzhiyun mtspr IVOR10,r4 /* 10: Decrementer */ 1759*4882a593Smuzhiyun lwz r4,IntervalTimer@got(r12) 1760*4882a593Smuzhiyun mtspr IVOR11,r4 /* 11: Interval timer */ 1761*4882a593Smuzhiyun lwz r4,WatchdogTimer@got(r12) 1762*4882a593Smuzhiyun mtspr IVOR12,r4 /* 12: Watchdog timer */ 1763*4882a593Smuzhiyun lwz r4,DataTLBError@got(r12) 1764*4882a593Smuzhiyun mtspr IVOR13,r4 /* 13: Data TLB error */ 1765*4882a593Smuzhiyun lwz r4,InstructionTLBError@got(r12) 1766*4882a593Smuzhiyun mtspr IVOR14,r4 /* 14: Instruction TLB error */ 1767*4882a593Smuzhiyun lwz r4,DebugBreakpoint@got(r12) 1768*4882a593Smuzhiyun mtspr IVOR15,r4 /* 15: Debug */ 1769*4882a593Smuzhiyun 1770*4882a593Smuzhiyun mtlr r11 1771*4882a593Smuzhiyun blr 1772*4882a593Smuzhiyun 1773*4882a593Smuzhiyun.globl unlock_ram_in_cache 1774*4882a593Smuzhiyununlock_ram_in_cache: 1775*4882a593Smuzhiyun /* invalidate the INIT_RAM section */ 1776*4882a593Smuzhiyun lis r3,(CONFIG_SYS_INIT_RAM_ADDR & ~(CONFIG_SYS_CACHELINE_SIZE-1))@h 1777*4882a593Smuzhiyun ori r3,r3,(CONFIG_SYS_INIT_RAM_ADDR & ~(CONFIG_SYS_CACHELINE_SIZE-1))@l 1778*4882a593Smuzhiyun mfspr r4,L1CFG0 1779*4882a593Smuzhiyun andi. r4,r4,0x1ff 1780*4882a593Smuzhiyun slwi r4,r4,(10 - 1 - L1_CACHE_SHIFT) 1781*4882a593Smuzhiyun mtctr r4 1782*4882a593Smuzhiyun1: dcbi r0,r3 1783*4882a593Smuzhiyun#ifdef CONFIG_E6500 /* lock/unlock L2 cache long with L1 */ 1784*4882a593Smuzhiyun dcblc 2, r0, r3 1785*4882a593Smuzhiyun dcblc 0, r0, r3 1786*4882a593Smuzhiyun#else 1787*4882a593Smuzhiyun dcblc r0,r3 1788*4882a593Smuzhiyun#endif 1789*4882a593Smuzhiyun addi r3,r3,CONFIG_SYS_CACHELINE_SIZE 1790*4882a593Smuzhiyun bdnz 1b 1791*4882a593Smuzhiyun sync 1792*4882a593Smuzhiyun 1793*4882a593Smuzhiyun /* Invalidate the TLB entries for the cache */ 1794*4882a593Smuzhiyun lis r3,CONFIG_SYS_INIT_RAM_ADDR@h 1795*4882a593Smuzhiyun ori r3,r3,CONFIG_SYS_INIT_RAM_ADDR@l 1796*4882a593Smuzhiyun tlbivax 0,r3 1797*4882a593Smuzhiyun addi r3,r3,0x1000 1798*4882a593Smuzhiyun tlbivax 0,r3 1799*4882a593Smuzhiyun addi r3,r3,0x1000 1800*4882a593Smuzhiyun tlbivax 0,r3 1801*4882a593Smuzhiyun addi r3,r3,0x1000 1802*4882a593Smuzhiyun tlbivax 0,r3 1803*4882a593Smuzhiyun isync 1804*4882a593Smuzhiyun blr 1805*4882a593Smuzhiyun 1806*4882a593Smuzhiyun.globl flush_dcache 1807*4882a593Smuzhiyunflush_dcache: 1808*4882a593Smuzhiyun mfspr r3,SPRN_L1CFG0 1809*4882a593Smuzhiyun 1810*4882a593Smuzhiyun rlwinm r5,r3,9,3 /* Extract cache block size */ 1811*4882a593Smuzhiyun twlgti r5,1 /* Only 32 and 64 byte cache blocks 1812*4882a593Smuzhiyun * are currently defined. 1813*4882a593Smuzhiyun */ 1814*4882a593Smuzhiyun li r4,32 1815*4882a593Smuzhiyun subfic r6,r5,2 /* r6 = log2(1KiB / cache block size) - 1816*4882a593Smuzhiyun * log2(number of ways) 1817*4882a593Smuzhiyun */ 1818*4882a593Smuzhiyun slw r5,r4,r5 /* r5 = cache block size */ 1819*4882a593Smuzhiyun 1820*4882a593Smuzhiyun rlwinm r7,r3,0,0xff /* Extract number of KiB in the cache */ 1821*4882a593Smuzhiyun mulli r7,r7,13 /* An 8-way cache will require 13 1822*4882a593Smuzhiyun * loads per set. 1823*4882a593Smuzhiyun */ 1824*4882a593Smuzhiyun slw r7,r7,r6 1825*4882a593Smuzhiyun 1826*4882a593Smuzhiyun /* save off HID0 and set DCFA */ 1827*4882a593Smuzhiyun mfspr r8,SPRN_HID0 1828*4882a593Smuzhiyun ori r9,r8,HID0_DCFA@l 1829*4882a593Smuzhiyun mtspr SPRN_HID0,r9 1830*4882a593Smuzhiyun isync 1831*4882a593Smuzhiyun 1832*4882a593Smuzhiyun lis r4,0 1833*4882a593Smuzhiyun mtctr r7 1834*4882a593Smuzhiyun 1835*4882a593Smuzhiyun1: lwz r3,0(r4) /* Load... */ 1836*4882a593Smuzhiyun add r4,r4,r5 1837*4882a593Smuzhiyun bdnz 1b 1838*4882a593Smuzhiyun 1839*4882a593Smuzhiyun msync 1840*4882a593Smuzhiyun lis r4,0 1841*4882a593Smuzhiyun mtctr r7 1842*4882a593Smuzhiyun 1843*4882a593Smuzhiyun1: dcbf 0,r4 /* ...and flush. */ 1844*4882a593Smuzhiyun add r4,r4,r5 1845*4882a593Smuzhiyun bdnz 1b 1846*4882a593Smuzhiyun 1847*4882a593Smuzhiyun /* restore HID0 */ 1848*4882a593Smuzhiyun mtspr SPRN_HID0,r8 1849*4882a593Smuzhiyun isync 1850*4882a593Smuzhiyun 1851*4882a593Smuzhiyun blr 1852*4882a593Smuzhiyun#endif /* !MINIMAL_SPL */ 1853