1*4882a593Smuzhiyun/* 2*4882a593Smuzhiyun * Copyright 2004, 2007, 2008 Freescale Semiconductor. 3*4882a593Smuzhiyun * Srikanth Srinivasan <srikanth.srinivaan@freescale.com> 4*4882a593Smuzhiyun * 5*4882a593Smuzhiyun * SPDX-License-Identifier: GPL-2.0+ 6*4882a593Smuzhiyun */ 7*4882a593Smuzhiyun#include <config.h> 8*4882a593Smuzhiyun#include <mpc86xx.h> 9*4882a593Smuzhiyun 10*4882a593Smuzhiyun#include <ppc_asm.tmpl> 11*4882a593Smuzhiyun#include <ppc_defs.h> 12*4882a593Smuzhiyun 13*4882a593Smuzhiyun#include <asm/cache.h> 14*4882a593Smuzhiyun#include <asm/mmu.h> 15*4882a593Smuzhiyun 16*4882a593Smuzhiyun/* If this is a multi-cpu system then we need to handle the 17*4882a593Smuzhiyun * 2nd cpu. The assumption is that the 2nd cpu is being 18*4882a593Smuzhiyun * held in boot holdoff mode until the 1st cpu unlocks it 19*4882a593Smuzhiyun * from Linux. We'll do some basic cpu init and then pass 20*4882a593Smuzhiyun * it to the Linux Reset Vector. 21*4882a593Smuzhiyun * Sri: Much of this initialization is not required. Linux 22*4882a593Smuzhiyun * rewrites the bats, and the sprs and also enables the L1 cache. 23*4882a593Smuzhiyun * 24*4882a593Smuzhiyun * Core 0 must copy this to a 1M aligned region and set BPTR 25*4882a593Smuzhiyun * to point to it. 26*4882a593Smuzhiyun */ 27*4882a593Smuzhiyun .align 12 28*4882a593Smuzhiyun.globl __secondary_start_page 29*4882a593Smuzhiyun__secondary_start_page: 30*4882a593Smuzhiyun .space 0x100 /* space over to reset vector loc */ 31*4882a593Smuzhiyun mfspr r0, MSSCR0 32*4882a593Smuzhiyun andi. r0, r0, 0x0020 33*4882a593Smuzhiyun rlwinm r0,r0,27,31,31 34*4882a593Smuzhiyun mtspr PIR, r0 35*4882a593Smuzhiyun 36*4882a593Smuzhiyun /* Invalidate BATs */ 37*4882a593Smuzhiyun li r0, 0 38*4882a593Smuzhiyun mtspr IBAT0U, r0 39*4882a593Smuzhiyun mtspr IBAT1U, r0 40*4882a593Smuzhiyun mtspr IBAT2U, r0 41*4882a593Smuzhiyun mtspr IBAT3U, r0 42*4882a593Smuzhiyun mtspr IBAT4U, r0 43*4882a593Smuzhiyun mtspr IBAT5U, r0 44*4882a593Smuzhiyun mtspr IBAT6U, r0 45*4882a593Smuzhiyun mtspr IBAT7U, r0 46*4882a593Smuzhiyun isync 47*4882a593Smuzhiyun mtspr DBAT0U, r0 48*4882a593Smuzhiyun mtspr DBAT1U, r0 49*4882a593Smuzhiyun mtspr DBAT2U, r0 50*4882a593Smuzhiyun mtspr DBAT3U, r0 51*4882a593Smuzhiyun mtspr DBAT4U, r0 52*4882a593Smuzhiyun mtspr DBAT5U, r0 53*4882a593Smuzhiyun mtspr DBAT6U, r0 54*4882a593Smuzhiyun mtspr DBAT7U, r0 55*4882a593Smuzhiyun isync 56*4882a593Smuzhiyun sync 57*4882a593Smuzhiyun 58*4882a593Smuzhiyun /* enable extended addressing */ 59*4882a593Smuzhiyun mfspr r0, HID0 60*4882a593Smuzhiyun lis r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@h 61*4882a593Smuzhiyun ori r0, r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@l 62*4882a593Smuzhiyun mtspr HID0, r0 63*4882a593Smuzhiyun sync 64*4882a593Smuzhiyun isync 65*4882a593Smuzhiyun 66*4882a593Smuzhiyun#ifdef CONFIG_SYS_L2 67*4882a593Smuzhiyun /* init the L2 cache */ 68*4882a593Smuzhiyun addis r3, r0, L2_INIT@h 69*4882a593Smuzhiyun ori r3, r3, L2_INIT@l 70*4882a593Smuzhiyun sync 71*4882a593Smuzhiyun mtspr l2cr, r3 72*4882a593Smuzhiyun#ifdef CONFIG_ALTIVEC 73*4882a593Smuzhiyun dssall 74*4882a593Smuzhiyun#endif 75*4882a593Smuzhiyun /* invalidate the L2 cache */ 76*4882a593Smuzhiyun mfspr r3, l2cr 77*4882a593Smuzhiyun rlwinm. r3, r3, 0, 0, 0 78*4882a593Smuzhiyun beq 1f 79*4882a593Smuzhiyun 80*4882a593Smuzhiyun mfspr r3, l2cr 81*4882a593Smuzhiyun rlwinm r3, r3, 0, 1, 31 82*4882a593Smuzhiyun 83*4882a593Smuzhiyun#ifdef CONFIG_ALTIVEC 84*4882a593Smuzhiyun dssall 85*4882a593Smuzhiyun#endif 86*4882a593Smuzhiyun sync 87*4882a593Smuzhiyun mtspr l2cr, r3 88*4882a593Smuzhiyun sync 89*4882a593Smuzhiyun1: mfspr r3, l2cr 90*4882a593Smuzhiyun oris r3, r3, L2CR_L2I@h 91*4882a593Smuzhiyun mtspr l2cr, r3 92*4882a593Smuzhiyun 93*4882a593Smuzhiyuninvl2: 94*4882a593Smuzhiyun mfspr r3, l2cr 95*4882a593Smuzhiyun andis. r3, r3, L2CR_L2I@h 96*4882a593Smuzhiyun bne invl2 97*4882a593Smuzhiyun sync 98*4882a593Smuzhiyun#endif 99*4882a593Smuzhiyun 100*4882a593Smuzhiyun /* enable and invalidate the data cache */ 101*4882a593Smuzhiyun mfspr r3, HID0 102*4882a593Smuzhiyun li r5, HID0_DCFI|HID0_DLOCK 103*4882a593Smuzhiyun andc r3, r3, r5 104*4882a593Smuzhiyun mtspr HID0, r3 /* no invalidate, unlock */ 105*4882a593Smuzhiyun ori r3, r3, HID0_DCE 106*4882a593Smuzhiyun ori r5, r3, HID0_DCFI 107*4882a593Smuzhiyun mtspr HID0, r5 /* enable + invalidate */ 108*4882a593Smuzhiyun mtspr HID0, r3 /* enable */ 109*4882a593Smuzhiyun sync 110*4882a593Smuzhiyun#ifdef CONFIG_SYS_L2 111*4882a593Smuzhiyun sync 112*4882a593Smuzhiyun lis r3, L2_ENABLE@h 113*4882a593Smuzhiyun ori r3, r3, L2_ENABLE@l 114*4882a593Smuzhiyun mtspr l2cr, r3 115*4882a593Smuzhiyun isync 116*4882a593Smuzhiyun sync 117*4882a593Smuzhiyun#endif 118*4882a593Smuzhiyun 119*4882a593Smuzhiyun /* enable and invalidate the instruction cache*/ 120*4882a593Smuzhiyun mfspr r3, HID0 121*4882a593Smuzhiyun li r5, HID0_ICFI|HID0_ILOCK 122*4882a593Smuzhiyun andc r3, r3, r5 123*4882a593Smuzhiyun ori r3, r3, HID0_ICE 124*4882a593Smuzhiyun ori r5, r3, HID0_ICFI 125*4882a593Smuzhiyun mtspr HID0, r5 126*4882a593Smuzhiyun mtspr HID0, r3 127*4882a593Smuzhiyun isync 128*4882a593Smuzhiyun sync 129*4882a593Smuzhiyun 130*4882a593Smuzhiyun /* TBEN in HID0 */ 131*4882a593Smuzhiyun mfspr r4, HID0 132*4882a593Smuzhiyun oris r4, r4, 0x0400 133*4882a593Smuzhiyun mtspr HID0, r4 134*4882a593Smuzhiyun sync 135*4882a593Smuzhiyun isync 136*4882a593Smuzhiyun 137*4882a593Smuzhiyun /* MCP|SYNCBE|ABE in HID1 */ 138*4882a593Smuzhiyun mfspr r4, HID1 139*4882a593Smuzhiyun oris r4, r4, 0x8000 140*4882a593Smuzhiyun ori r4, r4, 0x0C00 141*4882a593Smuzhiyun mtspr HID1, r4 142*4882a593Smuzhiyun sync 143*4882a593Smuzhiyun isync 144*4882a593Smuzhiyun 145*4882a593Smuzhiyun lis r3, CONFIG_LINUX_RESET_VEC@h 146*4882a593Smuzhiyun ori r3, r3, CONFIG_LINUX_RESET_VEC@l 147*4882a593Smuzhiyun mtlr r3 148*4882a593Smuzhiyun blr 149*4882a593Smuzhiyun 150*4882a593Smuzhiyun /* Never Returns, Running in Linux Now */ 151