17c88f3f6SAchin Gupta/* 27c88f3f6SAchin Gupta * Copyright (c) 2013-2014, ARM Limited and Contributors. All rights reserved. 37c88f3f6SAchin Gupta * 47c88f3f6SAchin Gupta * Redistribution and use in source and binary forms, with or without 57c88f3f6SAchin Gupta * modification, are permitted provided that the following conditions are met: 67c88f3f6SAchin Gupta * 77c88f3f6SAchin Gupta * Redistributions of source code must retain the above copyright notice, this 87c88f3f6SAchin Gupta * list of conditions and the following disclaimer. 97c88f3f6SAchin Gupta * 107c88f3f6SAchin Gupta * Redistributions in binary form must reproduce the above copyright notice, 117c88f3f6SAchin Gupta * this list of conditions and the following disclaimer in the documentation 127c88f3f6SAchin Gupta * and/or other materials provided with the distribution. 137c88f3f6SAchin Gupta * 147c88f3f6SAchin Gupta * Neither the name of ARM nor the names of its contributors may be used 157c88f3f6SAchin Gupta * to endorse or promote products derived from this software without specific 167c88f3f6SAchin Gupta * prior written permission. 177c88f3f6SAchin Gupta * 187c88f3f6SAchin Gupta * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 197c88f3f6SAchin Gupta * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 207c88f3f6SAchin Gupta * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 217c88f3f6SAchin Gupta * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE 227c88f3f6SAchin Gupta * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 237c88f3f6SAchin Gupta * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 247c88f3f6SAchin Gupta * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 257c88f3f6SAchin Gupta * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 267c88f3f6SAchin Gupta * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 277c88f3f6SAchin Gupta * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 287c88f3f6SAchin Gupta * POSSIBILITY OF SUCH DAMAGE. 297c88f3f6SAchin Gupta */ 307c88f3f6SAchin Gupta 317c88f3f6SAchin Gupta#include <bl_common.h> 327c88f3f6SAchin Gupta#include <arch.h> 337c88f3f6SAchin Gupta#include <tsp.h> 34*0a30cf54SAndrew Thoelke#include <asm_macros.S> 357c88f3f6SAchin Gupta 367c88f3f6SAchin Gupta 377c88f3f6SAchin Gupta .globl tsp_entrypoint 387c88f3f6SAchin Gupta .globl tsp_cpu_on_entry 397c88f3f6SAchin Gupta .globl tsp_cpu_off_entry 407c88f3f6SAchin Gupta .globl tsp_cpu_suspend_entry 417c88f3f6SAchin Gupta .globl tsp_cpu_resume_entry 427c88f3f6SAchin Gupta .globl tsp_fast_smc_entry 437c88f3f6SAchin Gupta 447c88f3f6SAchin Gupta /* --------------------------------------------- 457c88f3f6SAchin Gupta * Populate the params in x0-x7 from the pointer 467c88f3f6SAchin Gupta * to the smc args structure in x0. 477c88f3f6SAchin Gupta * --------------------------------------------- 487c88f3f6SAchin Gupta */ 497c88f3f6SAchin Gupta .macro restore_args_call_smc 507c88f3f6SAchin Gupta ldp x6, x7, [x0, #TSP_ARG6] 517c88f3f6SAchin Gupta ldp x4, x5, [x0, #TSP_ARG4] 527c88f3f6SAchin Gupta ldp x2, x3, [x0, #TSP_ARG2] 537c88f3f6SAchin Gupta ldp x0, x1, [x0, #TSP_ARG0] 547c88f3f6SAchin Gupta smc #0 557c88f3f6SAchin Gupta .endm 567c88f3f6SAchin Gupta 577c88f3f6SAchin Gupta 58*0a30cf54SAndrew Thoelkefunc tsp_entrypoint 597c88f3f6SAchin Gupta /*--------------------------------------------- 607c88f3f6SAchin Gupta * Store the extents of the tzram available to 617c88f3f6SAchin Gupta * BL32 for future use. 627c88f3f6SAchin Gupta * TODO: We are assuming that x9-x10 will not be 637c88f3f6SAchin Gupta * corrupted by any function before platform 647c88f3f6SAchin Gupta * setup. 657c88f3f6SAchin Gupta * --------------------------------------------- 667c88f3f6SAchin Gupta */ 677c88f3f6SAchin Gupta mov x9, x0 687c88f3f6SAchin Gupta mov x10, x1 697c88f3f6SAchin Gupta 707c88f3f6SAchin Gupta /* --------------------------------------------- 717c88f3f6SAchin Gupta * The entrypoint is expected to be executed 727c88f3f6SAchin Gupta * only by the primary cpu (at least for now). 737c88f3f6SAchin Gupta * So, make sure no secondary has lost its way. 747c88f3f6SAchin Gupta * --------------------------------------------- 757c88f3f6SAchin Gupta */ 767c88f3f6SAchin Gupta mrs x0, mpidr_el1 777c88f3f6SAchin Gupta bl platform_is_primary_cpu 787c88f3f6SAchin Gupta cbz x0, tsp_entrypoint_panic 797c88f3f6SAchin Gupta 807c88f3f6SAchin Gupta /* --------------------------------------------- 817c88f3f6SAchin Gupta * Set the exception vector to something sane. 827c88f3f6SAchin Gupta * --------------------------------------------- 837c88f3f6SAchin Gupta */ 847c88f3f6SAchin Gupta adr x0, early_exceptions 857c88f3f6SAchin Gupta msr vbar_el1, x0 867c88f3f6SAchin Gupta 877c88f3f6SAchin Gupta /* --------------------------------------------- 887c88f3f6SAchin Gupta * Enable the instruction cache. 897c88f3f6SAchin Gupta * --------------------------------------------- 907c88f3f6SAchin Gupta */ 917c88f3f6SAchin Gupta mrs x0, sctlr_el1 927c88f3f6SAchin Gupta orr x0, x0, #SCTLR_I_BIT 937c88f3f6SAchin Gupta msr sctlr_el1, x0 947c88f3f6SAchin Gupta isb 957c88f3f6SAchin Gupta 967c88f3f6SAchin Gupta /* --------------------------------------------- 977c88f3f6SAchin Gupta * Zero out NOBITS sections. There are 2 of them: 987c88f3f6SAchin Gupta * - the .bss section; 997c88f3f6SAchin Gupta * - the coherent memory section. 1007c88f3f6SAchin Gupta * --------------------------------------------- 1017c88f3f6SAchin Gupta */ 1027c88f3f6SAchin Gupta ldr x0, =__BSS_START__ 1037c88f3f6SAchin Gupta ldr x1, =__BSS_SIZE__ 1047c88f3f6SAchin Gupta bl zeromem16 1057c88f3f6SAchin Gupta 1067c88f3f6SAchin Gupta ldr x0, =__COHERENT_RAM_START__ 1077c88f3f6SAchin Gupta ldr x1, =__COHERENT_RAM_UNALIGNED_SIZE__ 1087c88f3f6SAchin Gupta bl zeromem16 1097c88f3f6SAchin Gupta 1107c88f3f6SAchin Gupta /* -------------------------------------------- 1117c88f3f6SAchin Gupta * Give ourselves a small coherent stack to 1127c88f3f6SAchin Gupta * ease the pain of initializing the MMU 1137c88f3f6SAchin Gupta * -------------------------------------------- 1147c88f3f6SAchin Gupta */ 1157c88f3f6SAchin Gupta mrs x0, mpidr_el1 1167c88f3f6SAchin Gupta bl platform_set_coherent_stack 1177c88f3f6SAchin Gupta 1187c88f3f6SAchin Gupta /* --------------------------------------------- 1197c88f3f6SAchin Gupta * Perform early platform setup & platform 1207c88f3f6SAchin Gupta * specific early arch. setup e.g. mmu setup 1217c88f3f6SAchin Gupta * --------------------------------------------- 1227c88f3f6SAchin Gupta */ 1237c88f3f6SAchin Gupta mov x0, x9 1247c88f3f6SAchin Gupta mov x1, x10 1257c88f3f6SAchin Gupta bl bl32_early_platform_setup 1267c88f3f6SAchin Gupta bl bl32_plat_arch_setup 1277c88f3f6SAchin Gupta 1287c88f3f6SAchin Gupta /* --------------------------------------------- 1297c88f3f6SAchin Gupta * Give ourselves a stack allocated in Normal 1307c88f3f6SAchin Gupta * -IS-WBWA memory 1317c88f3f6SAchin Gupta * --------------------------------------------- 1327c88f3f6SAchin Gupta */ 1337c88f3f6SAchin Gupta mrs x0, mpidr_el1 1347c88f3f6SAchin Gupta bl platform_set_stack 1357c88f3f6SAchin Gupta 1367c88f3f6SAchin Gupta /* --------------------------------------------- 1377c88f3f6SAchin Gupta * Jump to main function. 1387c88f3f6SAchin Gupta * --------------------------------------------- 1397c88f3f6SAchin Gupta */ 1407c88f3f6SAchin Gupta bl tsp_main 1417c88f3f6SAchin Gupta 1427c88f3f6SAchin Gupta /* --------------------------------------------- 1437c88f3f6SAchin Gupta * Tell TSPD that we are done initialising 1447c88f3f6SAchin Gupta * --------------------------------------------- 1457c88f3f6SAchin Gupta */ 1467c88f3f6SAchin Gupta mov x1, x0 1477c88f3f6SAchin Gupta mov x0, #TSP_ENTRY_DONE 1487c88f3f6SAchin Gupta smc #0 1497c88f3f6SAchin Gupta 1507c88f3f6SAchin Guptatsp_entrypoint_panic: 1517c88f3f6SAchin Gupta b tsp_entrypoint_panic 1527c88f3f6SAchin Gupta 1537c88f3f6SAchin Gupta /*--------------------------------------------- 1547c88f3f6SAchin Gupta * This entrypoint is used by the TSPD when this 1557c88f3f6SAchin Gupta * cpu is to be turned off through a CPU_OFF 1567c88f3f6SAchin Gupta * psci call to ask the TSP to perform any 1577c88f3f6SAchin Gupta * bookeeping necessary. In the current 1587c88f3f6SAchin Gupta * implementation, the TSPD expects the TSP to 1597c88f3f6SAchin Gupta * re-initialise its state so nothing is done 1607c88f3f6SAchin Gupta * here except for acknowledging the request. 1617c88f3f6SAchin Gupta * --------------------------------------------- 1627c88f3f6SAchin Gupta */ 163*0a30cf54SAndrew Thoelkefunc tsp_cpu_off_entry 1647c88f3f6SAchin Gupta bl tsp_cpu_off_main 1657c88f3f6SAchin Gupta restore_args_call_smc 1667c88f3f6SAchin Gupta 1677c88f3f6SAchin Gupta /*--------------------------------------------- 1687c88f3f6SAchin Gupta * This entrypoint is used by the TSPD when this 1697c88f3f6SAchin Gupta * cpu is turned on using a CPU_ON psci call to 1707c88f3f6SAchin Gupta * ask the TSP to initialise itself i.e. setup 1717c88f3f6SAchin Gupta * the mmu, stacks etc. Minimal architectural 1727c88f3f6SAchin Gupta * state will be initialised by the TSPD when 1737c88f3f6SAchin Gupta * this function is entered i.e. Caches and MMU 1747c88f3f6SAchin Gupta * will be turned off, the execution state 1757c88f3f6SAchin Gupta * will be aarch64 and exceptions masked. 1767c88f3f6SAchin Gupta * --------------------------------------------- 1777c88f3f6SAchin Gupta */ 178*0a30cf54SAndrew Thoelkefunc tsp_cpu_on_entry 1797c88f3f6SAchin Gupta /* --------------------------------------------- 1807c88f3f6SAchin Gupta * Set the exception vector to something sane. 1817c88f3f6SAchin Gupta * --------------------------------------------- 1827c88f3f6SAchin Gupta */ 1837c88f3f6SAchin Gupta adr x0, early_exceptions 1847c88f3f6SAchin Gupta msr vbar_el1, x0 1857c88f3f6SAchin Gupta 1867c88f3f6SAchin Gupta /* --------------------------------------------- 1877c88f3f6SAchin Gupta * Enable the instruction cache. 1887c88f3f6SAchin Gupta * --------------------------------------------- 1897c88f3f6SAchin Gupta */ 1907c88f3f6SAchin Gupta mrs x0, sctlr_el1 1917c88f3f6SAchin Gupta orr x0, x0, #SCTLR_I_BIT 1927c88f3f6SAchin Gupta msr sctlr_el1, x0 1937c88f3f6SAchin Gupta isb 1947c88f3f6SAchin Gupta 1957c88f3f6SAchin Gupta /* -------------------------------------------- 1967c88f3f6SAchin Gupta * Give ourselves a small coherent stack to 1977c88f3f6SAchin Gupta * ease the pain of initializing the MMU 1987c88f3f6SAchin Gupta * -------------------------------------------- 1997c88f3f6SAchin Gupta */ 2007c88f3f6SAchin Gupta mrs x0, mpidr_el1 2017c88f3f6SAchin Gupta bl platform_set_coherent_stack 2027c88f3f6SAchin Gupta 2037c88f3f6SAchin Gupta /* --------------------------------------------- 2047c88f3f6SAchin Gupta * Initialise the MMU 2057c88f3f6SAchin Gupta * --------------------------------------------- 2067c88f3f6SAchin Gupta */ 2077c88f3f6SAchin Gupta bl enable_mmu 2087c88f3f6SAchin Gupta 2097c88f3f6SAchin Gupta /* --------------------------------------------- 2107c88f3f6SAchin Gupta * Give ourselves a stack allocated in Normal 2117c88f3f6SAchin Gupta * -IS-WBWA memory 2127c88f3f6SAchin Gupta * --------------------------------------------- 2137c88f3f6SAchin Gupta */ 2147c88f3f6SAchin Gupta mrs x0, mpidr_el1 2157c88f3f6SAchin Gupta bl platform_set_stack 2167c88f3f6SAchin Gupta 2177c88f3f6SAchin Gupta /* --------------------------------------------- 2187c88f3f6SAchin Gupta * Enter C runtime to perform any remaining 2197c88f3f6SAchin Gupta * book keeping 2207c88f3f6SAchin Gupta * --------------------------------------------- 2217c88f3f6SAchin Gupta */ 2227c88f3f6SAchin Gupta bl tsp_cpu_on_main 2237c88f3f6SAchin Gupta restore_args_call_smc 2247c88f3f6SAchin Gupta 2257c88f3f6SAchin Gupta /* Should never reach here */ 2267c88f3f6SAchin Guptatsp_cpu_on_entry_panic: 2277c88f3f6SAchin Gupta b tsp_cpu_on_entry_panic 2287c88f3f6SAchin Gupta 2297c88f3f6SAchin Gupta /*--------------------------------------------- 2307c88f3f6SAchin Gupta * This entrypoint is used by the TSPD when this 2317c88f3f6SAchin Gupta * cpu is to be suspended through a CPU_SUSPEND 2327c88f3f6SAchin Gupta * psci call to ask the TSP to perform any 2337c88f3f6SAchin Gupta * bookeeping necessary. In the current 2347c88f3f6SAchin Gupta * implementation, the TSPD saves and restores 2357c88f3f6SAchin Gupta * the EL1 state. 2367c88f3f6SAchin Gupta * --------------------------------------------- 2377c88f3f6SAchin Gupta */ 238*0a30cf54SAndrew Thoelkefunc tsp_cpu_suspend_entry 2397c88f3f6SAchin Gupta bl tsp_cpu_suspend_main 2407c88f3f6SAchin Gupta restore_args_call_smc 2417c88f3f6SAchin Gupta 2427c88f3f6SAchin Gupta /*--------------------------------------------- 2437c88f3f6SAchin Gupta * This entrypoint is used by the TSPD when this 2447c88f3f6SAchin Gupta * cpu resumes execution after an earlier 2457c88f3f6SAchin Gupta * CPU_SUSPEND psci call to ask the TSP to 2467c88f3f6SAchin Gupta * restore its saved context. In the current 2477c88f3f6SAchin Gupta * implementation, the TSPD saves and restores 2487c88f3f6SAchin Gupta * EL1 state so nothing is done here apart from 2497c88f3f6SAchin Gupta * acknowledging the request. 2507c88f3f6SAchin Gupta * --------------------------------------------- 2517c88f3f6SAchin Gupta */ 252*0a30cf54SAndrew Thoelkefunc tsp_cpu_resume_entry 2537c88f3f6SAchin Gupta bl tsp_cpu_resume_main 2547c88f3f6SAchin Gupta restore_args_call_smc 2557c88f3f6SAchin Guptatsp_cpu_resume_panic: 2567c88f3f6SAchin Gupta b tsp_cpu_resume_panic 2577c88f3f6SAchin Gupta 2587c88f3f6SAchin Gupta /*--------------------------------------------- 2597c88f3f6SAchin Gupta * This entrypoint is used by the TSPD to ask 2607c88f3f6SAchin Gupta * the TSP to service a fast smc request. 2617c88f3f6SAchin Gupta * --------------------------------------------- 2627c88f3f6SAchin Gupta */ 263*0a30cf54SAndrew Thoelkefunc tsp_fast_smc_entry 2647c88f3f6SAchin Gupta bl tsp_fast_smc_handler 2657c88f3f6SAchin Gupta restore_args_call_smc 2667c88f3f6SAchin Guptatsp_fast_smc_entry_panic: 2677c88f3f6SAchin Gupta b tsp_fast_smc_entry_panic 2687c88f3f6SAchin Gupta 269