19ec78bdfSTony Xie/* 29ec78bdfSTony Xie * Copyright (c) 2016, ARM Limited and Contributors. All rights reserved. 39ec78bdfSTony Xie * 4*82cb2c1aSdp-arm * SPDX-License-Identifier: BSD-3-Clause 59ec78bdfSTony Xie */ 69ec78bdfSTony Xie 79ec78bdfSTony Xie#include <arch.h> 89ec78bdfSTony Xie#include <asm_macros.S> 99ec78bdfSTony Xie#include <platform_def.h> 104c127e68SCaesar Wang#include <pmu_regs.h> 119ec78bdfSTony Xie 129ec78bdfSTony Xie .globl clst_warmboot_data 139ec78bdfSTony Xie 144c127e68SCaesar Wang .macro sram_func _name 15aa2345e9SSoren Brinkmann .cfi_sections .debug_frame 164c127e68SCaesar Wang .section .sram.text, "ax" 174c127e68SCaesar Wang .type \_name, %function 18aa2345e9SSoren Brinkmann .cfi_startproc 194c127e68SCaesar Wang \_name: 204c127e68SCaesar Wang .endm 214c127e68SCaesar Wang 224c127e68SCaesar Wang#define CRU_CLKSEL_CON6 0x118 234c127e68SCaesar Wang 244c127e68SCaesar Wang#define DDRCTL0_C_SYSREQ_CFG 0x0100 254c127e68SCaesar Wang#define DDRCTL1_C_SYSREQ_CFG 0x1000 264c127e68SCaesar Wang 274c127e68SCaesar Wang#define DDRC0_SREF_DONE_EXT 0x01 284c127e68SCaesar Wang#define DDRC1_SREF_DONE_EXT 0x04 294c127e68SCaesar Wang 309ec78bdfSTony Xie#define PLL_MODE_SHIFT (0x8) 319ec78bdfSTony Xie#define PLL_NORMAL_MODE ((0x3 << (PLL_MODE_SHIFT + 16)) | \ 329ec78bdfSTony Xie (0x1 << PLL_MODE_SHIFT)) 339ec78bdfSTony Xie#define MPIDR_CLST_L_BITS 0x0 349ec78bdfSTony Xie /* 359ec78bdfSTony Xie * For different socs, if we want to speed up warmboot, 369ec78bdfSTony Xie * we need to config some regs here. 379ec78bdfSTony Xie * If scu was suspend, we must resume related clk 389ec78bdfSTony Xie * from slow (24M) mode to normal mode first. 399ec78bdfSTony Xie * X0: MPIDR_EL1 & MPIDR_CLUSTER_MASK 409ec78bdfSTony Xie */ 419ec78bdfSTony Xie.macro func_rockchip_clst_warmboot 429ec78bdfSTony Xie adr x4, clst_warmboot_data 439ec78bdfSTony Xie lsr x5, x0, #6 449ec78bdfSTony Xie ldr w3, [x4, x5] 459ec78bdfSTony Xie str wzr, [x4, x5] 469ec78bdfSTony Xie cmp w3, #PMU_CLST_RET 479ec78bdfSTony Xie b.ne clst_warmboot_end 489ec78bdfSTony Xie ldr w6, =(PLL_NORMAL_MODE) 499ec78bdfSTony Xie /* 509ec78bdfSTony Xie * core_l offset is CRU_BASE + 0xc, 519ec78bdfSTony Xie * core_b offset is CRU_BASE + 0x2c 529ec78bdfSTony Xie */ 539ec78bdfSTony Xie ldr x7, =(CRU_BASE + 0xc) 549ec78bdfSTony Xie lsr x2, x0, #3 559ec78bdfSTony Xie str w6, [x7, x2] 569ec78bdfSTony Xieclst_warmboot_end: 579ec78bdfSTony Xie.endm 589ec78bdfSTony Xie 599ec78bdfSTony Xie.macro rockchip_clst_warmboot_data 609ec78bdfSTony Xieclst_warmboot_data: 619ec78bdfSTony Xie .rept PLATFORM_CLUSTER_COUNT 629ec78bdfSTony Xie .word 0 639ec78bdfSTony Xie .endr 649ec78bdfSTony Xie.endm 654c127e68SCaesar Wang 664c127e68SCaesar Wang /* ----------------------------------------------- 674c127e68SCaesar Wang * void sram_func_set_ddrctl_pll(uint32_t pll_src) 684c127e68SCaesar Wang * Function to switch the PLL source for ddrctrl 694c127e68SCaesar Wang * In: x0 - The PLL of the clk_ddrc clock source 704c127e68SCaesar Wang * out: None 714c127e68SCaesar Wang * Clobber list : x0 - x3, x5, x8 - x10 724c127e68SCaesar Wang * ----------------------------------------------- 734c127e68SCaesar Wang */ 744c127e68SCaesar Wang 754c127e68SCaesar Wang .globl sram_func_set_ddrctl_pll 764c127e68SCaesar Wang 774c127e68SCaesar Wangsram_func sram_func_set_ddrctl_pll 784c127e68SCaesar Wang /* backup parameter */ 794c127e68SCaesar Wang mov x8, x0 804c127e68SCaesar Wang 814c127e68SCaesar Wang /* disable the MMU at EL3 */ 824c127e68SCaesar Wang mrs x9, sctlr_el3 834c127e68SCaesar Wang bic x10, x9, #(SCTLR_M_BIT) 844c127e68SCaesar Wang msr sctlr_el3, x10 854c127e68SCaesar Wang isb 864c127e68SCaesar Wang dsb sy 874c127e68SCaesar Wang 884c127e68SCaesar Wang /* enable ddrctl0_1 idle request */ 894c127e68SCaesar Wang mov x5, PMU_BASE 904c127e68SCaesar Wang ldr w0, [x5, #PMU_SFT_CON] 914c127e68SCaesar Wang orr w0, w0, #DDRCTL0_C_SYSREQ_CFG 924c127e68SCaesar Wang orr w0, w0, #DDRCTL1_C_SYSREQ_CFG 934c127e68SCaesar Wang str w0, [x5, #PMU_SFT_CON] 944c127e68SCaesar Wang 954c127e68SCaesar Wangcheck_ddrc0_1_sref_enter: 964c127e68SCaesar Wang ldr w1, [x5, #PMU_DDR_SREF_ST] 974c127e68SCaesar Wang and w2, w1, #DDRC0_SREF_DONE_EXT 984c127e68SCaesar Wang and w3, w1, #DDRC1_SREF_DONE_EXT 994c127e68SCaesar Wang orr w2, w2, w3 1004c127e68SCaesar Wang cmp w2, #(DDRC0_SREF_DONE_EXT | DDRC1_SREF_DONE_EXT) 1014c127e68SCaesar Wang b.eq check_ddrc0_1_sref_enter 1024c127e68SCaesar Wang 1034c127e68SCaesar Wang /* 1044c127e68SCaesar Wang * select a PLL for ddrctrl: 1054c127e68SCaesar Wang * x0 = 0: ALPLL 1064c127e68SCaesar Wang * x0 = 1: ABPLL 1074c127e68SCaesar Wang * x0 = 2: DPLL 1084c127e68SCaesar Wang * x0 = 3: GPLLL 1094c127e68SCaesar Wang */ 1104c127e68SCaesar Wang mov x5, CRU_BASE 1114c127e68SCaesar Wang lsl w0, w8, #4 1124c127e68SCaesar Wang orr w0, w0, #0x00300000 1134c127e68SCaesar Wang str w0, [x5, #CRU_CLKSEL_CON6] 1144c127e68SCaesar Wang 1154c127e68SCaesar Wang /* disable ddrctl0_1 idle request */ 1164c127e68SCaesar Wang mov x5, PMU_BASE 1174c127e68SCaesar Wang ldr w0, [x5, #PMU_SFT_CON] 1184c127e68SCaesar Wang bic w0, w0, #DDRCTL0_C_SYSREQ_CFG 1194c127e68SCaesar Wang bic w0, w0, #DDRCTL1_C_SYSREQ_CFG 1204c127e68SCaesar Wang str w0, [x5, #PMU_SFT_CON] 1214c127e68SCaesar Wang 1224c127e68SCaesar Wangcheck_ddrc0_1_sref_exit: 1234c127e68SCaesar Wang ldr w1, [x5, #PMU_DDR_SREF_ST] 1244c127e68SCaesar Wang and w2, w1, #DDRC0_SREF_DONE_EXT 1254c127e68SCaesar Wang and w3, w1, #DDRC1_SREF_DONE_EXT 1264c127e68SCaesar Wang orr w2, w2, w3 1274c127e68SCaesar Wang cmp w2, #0x0 1284c127e68SCaesar Wang b.eq check_ddrc0_1_sref_exit 1294c127e68SCaesar Wang 1304c127e68SCaesar Wang /* reenable the MMU at EL3 */ 1314c127e68SCaesar Wang msr sctlr_el3, x9 1324c127e68SCaesar Wang isb 1334c127e68SCaesar Wang dsb sy 1344c127e68SCaesar Wang 1354c127e68SCaesar Wang ret 1364c127e68SCaesar Wangendfunc sram_func_set_ddrctl_pll 137