1/* 2 * Copyright (c) 2025-2026, Arm Limited. All rights reserved. 3 * 4 * SPDX-License-Identifier: BSD-3-Clause 5 */ 6 7#include <arch.h> 8#include <asm_macros.S> 9#include <common/bl_common.h> 10#include <canyon.h> 11#include <cpu_macros.S> 12#include <plat_macros.S> 13 14/* Hardware handled coherency */ 15#if HW_ASSISTED_COHERENCY == 0 16#error "Canyon must be compiled with HW_ASSISTED_COHERENCY enabled" 17#endif 18 19/* 64-bit only core */ 20#if CTX_INCLUDE_AARCH32_REGS == 1 21#error "Canyon supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0" 22#endif 23 24cpu_reset_prologue canyon 25 26cpu_reset_func_start canyon 27 /* ---------------------------------------------------- 28 * Disable speculative loads 29 * ---------------------------------------------------- 30 */ 31 msr SSBS, xzr 32cpu_reset_func_end canyon 33 34func canyon_core_pwr_dwn 35 /* 36 * When software running at lower ELs requests power down without first 37 * disabling SME, the CME connected to it will reject its power down 38 * request. Skip setting the PWRDN_EN bit, downgrading the powerdown 39 * request to a simple WFI wait, to get a minimal amount of power saving 40 * rather than an instant pabandon. 41 */ 42 mrs x0, SVCR 43 cbnz x0, canyon_skip_pwr_dwn 44 45 /* --------------------------------------------------- 46 * Flip CPU power down bit in power control register. 47 * It will be set on powerdown and cleared on wakeup 48 * --------------------------------------------------- 49 */ 50 sysreg_bit_toggle CANYON_IMP_CPUPWRCTLR_EL1, \ 51 CANYON_IMP_CPUPWRCTLR_EL1_CORE_PWRDN_EN_BIT 52 isb 53canyon_skip_pwr_dwn: 54 signal_pabandon_handled 55 ret 56endfunc canyon_core_pwr_dwn 57 58.section .rodata.canyon_regs, "aS" 59canyon_regs: /* The ASCII list of register names to be reported */ 60 .asciz "cpuectlr_el1", "" 61 62func canyon_cpu_reg_dump 63 adr x6, canyon_regs 64 mrs x8, CANYON_IMP_CPUECTLR_EL1 65 ret 66endfunc canyon_cpu_reg_dump 67 68declare_cpu_ops canyon, CANYON_MIDR, \ 69 canyon_reset_func, \ 70 canyon_core_pwr_dwn 71