xref: /OK3568_Linux_fs/u-boot/arch/arm/cpu/armv8/sleep.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/*
2*4882a593Smuzhiyun * (C) Copyright 2017 Rockchip Electronics Co., Ltd.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * SPDX-License-Identifier:	GPL-2.0+
5*4882a593Smuzhiyun */
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun#include <asm/macro.h>
8*4882a593Smuzhiyun#include <asm-offsets.h>
9*4882a593Smuzhiyun#include <asm/psci.h>
10*4882a593Smuzhiyun#include <config.h>
11*4882a593Smuzhiyun#include <linux/linkage.h>
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun	.globl cpu_suspend
14*4882a593Smuzhiyun	.globl cpu_do_suspend
15*4882a593Smuzhiyun	.globl cpu_suspend_save
16*4882a593Smuzhiyun	.globl cpu_resume
17*4882a593Smuzhiyun	.globl cpu_do_resume
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun/*
20*4882a593Smuzhiyun * int cpu_suspend(unsigned long arg, int (*fn)(unsigned long))
21*4882a593Smuzhiyun * @arg will be passed to fn as argument
22*4882a593Smuzhiyun * return value: 0 - cpu resumed from suspended state.
23*4882a593Smuzhiyun *		 -1 - cpu not suspended.
24*4882a593Smuzhiyun */
25*4882a593SmuzhiyunENTRY(cpu_suspend)
26*4882a593Smuzhiyun	/*
27*4882a593Smuzhiyun	 * Save x8~x30(lr is x30, sp is x29), total (23 + 1 reserved)*8=192
28*4882a593Smuzhiyun	 */
29*4882a593Smuzhiyun	stp	x29, lr, [sp, #-192]!
30*4882a593Smuzhiyun	/* Reserve 8-byte after x8, just for offset with 16-byte aligned */
31*4882a593Smuzhiyun	str	x8, [sp, #16]
32*4882a593Smuzhiyun	stp	x9, x10, [sp, #32]
33*4882a593Smuzhiyun	stp	x11, x12, [sp, #48]
34*4882a593Smuzhiyun	stp	x13, x14, [sp, #64]
35*4882a593Smuzhiyun	stp	x15, x16, [sp, #80]
36*4882a593Smuzhiyun	stp	x17, x18, [sp, #96]
37*4882a593Smuzhiyun	stp	x19, x20, [sp,#112]
38*4882a593Smuzhiyun	stp	x21, x22, [sp,#128]
39*4882a593Smuzhiyun	stp	x23, x24, [sp,#144]
40*4882a593Smuzhiyun	stp	x25, x26, [sp,#160]
41*4882a593Smuzhiyun	stp	x27, x28, [sp,#176]
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun	mov	x19, sp
44*4882a593Smuzhiyun	mov	x20, x0
45*4882a593Smuzhiyun	mov	x21, x1
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun	/* Save arch specific suspend fn and arg to stack */
48*4882a593Smuzhiyun	sub	sp, sp, #PM_CTX_SIZE
49*4882a593Smuzhiyun	stp	x0, x1, [sp, #-16]!
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun	/* x18 is gd, save it to _suspend_gd !! */
52*4882a593Smuzhiyun	adr	x0, _suspend_gd
53*4882a593Smuzhiyun	str	x18, [x0]
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun	/* x0: pm_ctx;	x1: sp where restore x8~x30 from */
56*4882a593Smuzhiyun	add	x0, sp, #16
57*4882a593Smuzhiyun	mov	x1, x19
58*4882a593Smuzhiyun	bl	cpu_suspend_save
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun	adr	lr, aborted
61*4882a593Smuzhiyun	/* Jump to arch specific suspend */
62*4882a593Smuzhiyun	mov	x0, x20
63*4882a593Smuzhiyun	br	x21
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun	/* Should never reach here, otherwise failed */
66*4882a593Smuzhiyunaborted:
67*4882a593Smuzhiyun	/* cpu not suspended */
68*4882a593Smuzhiyun	add	sp, sp, #(16 + PM_CTX_SIZE)
69*4882a593Smuzhiyun	/* Return -1 to the caller */
70*4882a593Smuzhiyun	mov	x0, #(-1)
71*4882a593Smuzhiyun
72*4882a593Smuzhiyunsuspend_return:
73*4882a593Smuzhiyun	ldr	x8, [sp, #16]
74*4882a593Smuzhiyun	ldp	x9, x10, [sp, #32]
75*4882a593Smuzhiyun	ldp	x11, x12, [sp, #48]
76*4882a593Smuzhiyun	ldp	x13, x14, [sp, #64]
77*4882a593Smuzhiyun	ldp	x15, x16, [sp, #80]
78*4882a593Smuzhiyun	ldp	x17, x18, [sp, #96]
79*4882a593Smuzhiyun	ldp	x19, x20, [sp,#112]
80*4882a593Smuzhiyun	ldp	x21, x22, [sp,#128]
81*4882a593Smuzhiyun	ldp	x23, x24, [sp,#144]
82*4882a593Smuzhiyun	ldp	x25, x26, [sp,#160]
83*4882a593Smuzhiyun	ldp	x27, x28, [sp,#176]
84*4882a593Smuzhiyun	ldp	x29, lr, [sp], #192
85*4882a593Smuzhiyun	ret
86*4882a593SmuzhiyunENDPROC(cpu_suspend)
87*4882a593Smuzhiyun
88*4882a593SmuzhiyunENTRY(cpu_do_suspend)
89*4882a593Smuzhiyun	/*
90*4882a593Smuzhiyun	 * Save temporary x2~x12, total: 11*8=88, maybe you need not so many
91*4882a593Smuzhiyun	 * registers now, but I save them for future extendion.
92*4882a593Smuzhiyun	 */
93*4882a593Smuzhiyun	stp	x2,  x3, [sp, #-88]!
94*4882a593Smuzhiyun	stp	x4,  x5, [sp, #16]
95*4882a593Smuzhiyun	stp	x6,  x7, [sp, #32]
96*4882a593Smuzhiyun	stp	x8,  x9, [sp, #48]
97*4882a593Smuzhiyun	stp	x10, x11, [sp,#64]
98*4882a593Smuzhiyun	str	x12, [sp, #80]
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun	/*
101*4882a593Smuzhiyun	 * Save core registers.
102*4882a593Smuzhiyun	 *
103*4882a593Smuzhiyun	 * Note: If you want to add/sub the register here,
104*4882a593Smuzhiyun	 *	 remember update suspend_regs[] of struct pm_ctx.
105*4882a593Smuzhiyun	 */
106*4882a593Smuzhiyun	mrs	x2, vbar_el2
107*4882a593Smuzhiyun	mrs	x3, cptr_el2
108*4882a593Smuzhiyun	mrs	x4, ttbr0_el2
109*4882a593Smuzhiyun	mrs	x5, tcr_el2
110*4882a593Smuzhiyun	mrs	x6, mair_el2
111*4882a593Smuzhiyun	mrs	x7, cntvoff_el2
112*4882a593Smuzhiyun	mrs	x8, sctlr_el2
113*4882a593Smuzhiyun	mrs	x9, hcr_el2
114*4882a593Smuzhiyun	mrs	x10, daif
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun	stp	x2,  x3, [x0, #0]
117*4882a593Smuzhiyun	stp	x4,  x5, [x0, #16]
118*4882a593Smuzhiyun	stp	x6,  x7, [x0, #32]
119*4882a593Smuzhiyun	stp	x8,  x9, [x0, #48]
120*4882a593Smuzhiyun	str	x10, [x0, #64]
121*4882a593Smuzhiyun
122*4882a593Smuzhiyun	/* Restore temporary x2~x12 */
123*4882a593Smuzhiyun	ldp	x4,  x5, [sp, #16]
124*4882a593Smuzhiyun	ldp	x6,  x7, [sp, #32]
125*4882a593Smuzhiyun	ldp	x8,  x9, [sp, #48]
126*4882a593Smuzhiyun	ldp	x10, x11, [sp,#64]
127*4882a593Smuzhiyun	ldr	x12, [sp, #80]
128*4882a593Smuzhiyun	ldp	x2,  x3, [sp], #88
129*4882a593Smuzhiyun	ret
130*4882a593SmuzhiyunENDPROC(cpu_do_suspend)
131*4882a593Smuzhiyun
132*4882a593SmuzhiyunENTRY(cpu_resume)
133*4882a593Smuzhiyun	/* Disable interrupt */
134*4882a593Smuzhiyun	msr       daifset, #0x03
135*4882a593Smuzhiyun
136*4882a593Smuzhiyun	/* Load gd !! */
137*4882a593Smuzhiyun	adr x1, _suspend_gd
138*4882a593Smuzhiyun	ldr x2, [x1]
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun	/* Get pm_ctx */
141*4882a593Smuzhiyun	add x2, x2, #PM_CTX_PHYS
142*4882a593Smuzhiyun	ldr x0, [x2]
143*4882a593Smuzhiyun
144*4882a593Smuzhiyun	/* Need x0=x0-16, because cpu_do_resume needs it */
145*4882a593Smuzhiyun	ldp	x1, lr, [x0], #16
146*4882a593Smuzhiyun	mov	sp, x1
147*4882a593Smuzhiyun	ret
148*4882a593SmuzhiyunENDPROC(cpu_resume)
149*4882a593Smuzhiyun
150*4882a593Smuzhiyun/*
151*4882a593Smuzhiyun * void sm_do_cpu_do_resume(paddr suspend_regs) __noreturn;
152*4882a593Smuzhiyun * Restore the registers stored when cpu_do_suspend
153*4882a593Smuzhiyun * x0 points to the physical base address of the suspend_regs
154*4882a593Smuzhiyun * field of struct pm_ctx.
155*4882a593Smuzhiyun */
156*4882a593SmuzhiyunENTRY(cpu_do_resume)
157*4882a593Smuzhiyun	/*
158*4882a593Smuzhiyun	 * Invalidate local tlb entries before turning on MMU !!!
159*4882a593Smuzhiyun	 */
160*4882a593Smuzhiyun	tlbi	alle2
161*4882a593Smuzhiyun	dsb	sy
162*4882a593Smuzhiyun	isb
163*4882a593Smuzhiyun
164*4882a593Smuzhiyun	ldp	x2, x3, [x0]
165*4882a593Smuzhiyun	ldp	x4, x5, [x0, #16]
166*4882a593Smuzhiyun	ldp	x6, x7, [x0, #32]
167*4882a593Smuzhiyun	ldp	x8, x9, [x0, #48]
168*4882a593Smuzhiyun	ldp	x10, x11, [x0, #64]
169*4882a593Smuzhiyun	ldr	x12, [x0, #80]
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun	/* Restore core register */
172*4882a593Smuzhiyun	msr	vbar_el2, x2
173*4882a593Smuzhiyun	msr	cptr_el2, x3
174*4882a593Smuzhiyun	msr	ttbr0_el2, x4
175*4882a593Smuzhiyun	msr	tcr_el2, x5
176*4882a593Smuzhiyun	msr	mair_el2, x6
177*4882a593Smuzhiyun	msr	cntvoff_el2, x7
178*4882a593Smuzhiyun	msr	hcr_el2, x9
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun	/* Enable MMU here */
181*4882a593Smuzhiyun	msr	sctlr_el2, x8
182*4882a593Smuzhiyun	dsb	sy
183*4882a593Smuzhiyun	isb
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun	/* resume interrupt */
186*4882a593Smuzhiyun	msr	daif, x10
187*4882a593Smuzhiyun
188*4882a593Smuzhiyun	mov	x0, #0
189*4882a593Smuzhiyun	b	suspend_return
190*4882a593SmuzhiyunENDPROC(cpu_do_resume)
191*4882a593Smuzhiyun
192*4882a593Smuzhiyun.align 3
193*4882a593Smuzhiyun_suspend_gd:
194*4882a593Smuzhiyun	.long	0x0
195*4882a593Smuzhiyun
196