xref: /OK3568_Linux_fs/kernel/arch/arm64/kvm/hyp/hyp-entry.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun/*
3*4882a593Smuzhiyun * Copyright (C) 2015-2018 - ARM Ltd
4*4882a593Smuzhiyun * Author: Marc Zyngier <marc.zyngier@arm.com>
5*4882a593Smuzhiyun */
6*4882a593Smuzhiyun
7*4882a593Smuzhiyun#include <linux/arm-smccc.h>
8*4882a593Smuzhiyun#include <linux/linkage.h>
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun#include <asm/alternative.h>
11*4882a593Smuzhiyun#include <asm/assembler.h>
12*4882a593Smuzhiyun#include <asm/cpufeature.h>
13*4882a593Smuzhiyun#include <asm/kvm_arm.h>
14*4882a593Smuzhiyun#include <asm/kvm_asm.h>
15*4882a593Smuzhiyun#include <asm/mmu.h>
16*4882a593Smuzhiyun#include <asm/spectre.h>
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun.macro save_caller_saved_regs_vect
19*4882a593Smuzhiyun	/* x0 and x1 were saved in the vector entry */
20*4882a593Smuzhiyun	stp	x2, x3,   [sp, #-16]!
21*4882a593Smuzhiyun	stp	x4, x5,   [sp, #-16]!
22*4882a593Smuzhiyun	stp	x6, x7,   [sp, #-16]!
23*4882a593Smuzhiyun	stp	x8, x9,   [sp, #-16]!
24*4882a593Smuzhiyun	stp	x10, x11, [sp, #-16]!
25*4882a593Smuzhiyun	stp	x12, x13, [sp, #-16]!
26*4882a593Smuzhiyun	stp	x14, x15, [sp, #-16]!
27*4882a593Smuzhiyun	stp	x16, x17, [sp, #-16]!
28*4882a593Smuzhiyun.endm
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun.macro restore_caller_saved_regs_vect
31*4882a593Smuzhiyun	ldp	x16, x17, [sp], #16
32*4882a593Smuzhiyun	ldp	x14, x15, [sp], #16
33*4882a593Smuzhiyun	ldp	x12, x13, [sp], #16
34*4882a593Smuzhiyun	ldp	x10, x11, [sp], #16
35*4882a593Smuzhiyun	ldp	x8, x9,   [sp], #16
36*4882a593Smuzhiyun	ldp	x6, x7,   [sp], #16
37*4882a593Smuzhiyun	ldp	x4, x5,   [sp], #16
38*4882a593Smuzhiyun	ldp	x2, x3,   [sp], #16
39*4882a593Smuzhiyun	ldp	x0, x1,   [sp], #16
40*4882a593Smuzhiyun.endm
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun	.text
43*4882a593Smuzhiyun
44*4882a593Smuzhiyunel1_sync:				// Guest trapped into EL2
45*4882a593Smuzhiyun
46*4882a593Smuzhiyun	mrs	x0, esr_el2
47*4882a593Smuzhiyun	ubfx	x0, x0, #ESR_ELx_EC_SHIFT, #ESR_ELx_EC_WIDTH
48*4882a593Smuzhiyun	cmp	x0, #ESR_ELx_EC_HVC64
49*4882a593Smuzhiyun	ccmp	x0, #ESR_ELx_EC_HVC32, #4, ne
50*4882a593Smuzhiyun	b.ne	el1_trap
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun	/*
53*4882a593Smuzhiyun	 * Fastest possible path for ARM_SMCCC_ARCH_WORKAROUND_1.
54*4882a593Smuzhiyun	 * The workaround has already been applied on the host,
55*4882a593Smuzhiyun	 * so let's quickly get back to the guest. We don't bother
56*4882a593Smuzhiyun	 * restoring x1, as it can be clobbered anyway.
57*4882a593Smuzhiyun	 */
58*4882a593Smuzhiyun	ldr	x1, [sp]				// Guest's x0
59*4882a593Smuzhiyun	eor	w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1
60*4882a593Smuzhiyun	cbz	w1, wa_epilogue
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun	/* ARM_SMCCC_ARCH_WORKAROUND_2 handling */
63*4882a593Smuzhiyun	eor	w1, w1, #(ARM_SMCCC_ARCH_WORKAROUND_1 ^ \
64*4882a593Smuzhiyun			  ARM_SMCCC_ARCH_WORKAROUND_2)
65*4882a593Smuzhiyun	cbz	w1, wa_epilogue
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun	eor	w1, w1, #(ARM_SMCCC_ARCH_WORKAROUND_2 ^ \
68*4882a593Smuzhiyun			  ARM_SMCCC_ARCH_WORKAROUND_3)
69*4882a593Smuzhiyun	cbnz	w1, el1_trap
70*4882a593Smuzhiyun
71*4882a593Smuzhiyunwa_epilogue:
72*4882a593Smuzhiyun	mov	x0, xzr
73*4882a593Smuzhiyun	add	sp, sp, #16
74*4882a593Smuzhiyun	eret
75*4882a593Smuzhiyun	sb
76*4882a593Smuzhiyun
77*4882a593Smuzhiyunel1_trap:
78*4882a593Smuzhiyun	get_vcpu_ptr	x1, x0
79*4882a593Smuzhiyun	mov	x0, #ARM_EXCEPTION_TRAP
80*4882a593Smuzhiyun	b	__guest_exit
81*4882a593Smuzhiyun
82*4882a593Smuzhiyunel1_irq:
83*4882a593Smuzhiyun	get_vcpu_ptr	x1, x0
84*4882a593Smuzhiyun	mov	x0, #ARM_EXCEPTION_IRQ
85*4882a593Smuzhiyun	b	__guest_exit
86*4882a593Smuzhiyun
87*4882a593Smuzhiyunel1_error:
88*4882a593Smuzhiyun	get_vcpu_ptr	x1, x0
89*4882a593Smuzhiyun	mov	x0, #ARM_EXCEPTION_EL1_SERROR
90*4882a593Smuzhiyun	b	__guest_exit
91*4882a593Smuzhiyun
92*4882a593Smuzhiyunel2_sync:
93*4882a593Smuzhiyun	/* Check for illegal exception return */
94*4882a593Smuzhiyun	mrs	x0, spsr_el2
95*4882a593Smuzhiyun	tbnz	x0, #20, 1f
96*4882a593Smuzhiyun
97*4882a593Smuzhiyun	save_caller_saved_regs_vect
98*4882a593Smuzhiyun	stp     x29, x30, [sp, #-16]!
99*4882a593Smuzhiyun	bl	kvm_unexpected_el2_exception
100*4882a593Smuzhiyun	ldp     x29, x30, [sp], #16
101*4882a593Smuzhiyun	restore_caller_saved_regs_vect
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun	eret
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun1:
106*4882a593Smuzhiyun	/* Let's attempt a recovery from the illegal exception return */
107*4882a593Smuzhiyun	get_vcpu_ptr	x1, x0
108*4882a593Smuzhiyun	mov	x0, #ARM_EXCEPTION_IL
109*4882a593Smuzhiyun	b	__guest_exit
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun
112*4882a593Smuzhiyunel2_error:
113*4882a593Smuzhiyun	save_caller_saved_regs_vect
114*4882a593Smuzhiyun	stp     x29, x30, [sp, #-16]!
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun	bl	kvm_unexpected_el2_exception
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun	ldp     x29, x30, [sp], #16
119*4882a593Smuzhiyun	restore_caller_saved_regs_vect
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun	eret
122*4882a593Smuzhiyun	sb
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun.macro invalid_vector	label, target = __guest_exit_panic
125*4882a593Smuzhiyun	.align	2
126*4882a593SmuzhiyunSYM_CODE_START_LOCAL(\label)
127*4882a593Smuzhiyun	b \target
128*4882a593SmuzhiyunSYM_CODE_END(\label)
129*4882a593Smuzhiyun.endm
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun	/* None of these should ever happen */
132*4882a593Smuzhiyun	invalid_vector	el2t_sync_invalid
133*4882a593Smuzhiyun	invalid_vector	el2t_irq_invalid
134*4882a593Smuzhiyun	invalid_vector	el2t_fiq_invalid
135*4882a593Smuzhiyun	invalid_vector	el2t_error_invalid
136*4882a593Smuzhiyun	invalid_vector	el2h_irq_invalid
137*4882a593Smuzhiyun	invalid_vector	el2h_fiq_invalid
138*4882a593Smuzhiyun	invalid_vector	el1_fiq_invalid
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun	.ltorg
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun	.align 11
143*4882a593Smuzhiyun
144*4882a593Smuzhiyun.macro check_preamble_length start, end
145*4882a593Smuzhiyun/* kvm_patch_vector_branch() generates code that jumps over the preamble. */
146*4882a593Smuzhiyun.if ((\end-\start) != KVM_VECTOR_PREAMBLE)
147*4882a593Smuzhiyun	.error "KVM vector preamble length mismatch"
148*4882a593Smuzhiyun.endif
149*4882a593Smuzhiyun.endm
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun.macro valid_vect target
152*4882a593Smuzhiyun	.align 7
153*4882a593Smuzhiyun661:
154*4882a593Smuzhiyun	esb
155*4882a593Smuzhiyun	stp	x0, x1, [sp, #-16]!
156*4882a593Smuzhiyun662:
157*4882a593Smuzhiyun	b	\target
158*4882a593Smuzhiyun
159*4882a593Smuzhiyuncheck_preamble_length 661b, 662b
160*4882a593Smuzhiyun.endm
161*4882a593Smuzhiyun
162*4882a593Smuzhiyun.macro invalid_vect target
163*4882a593Smuzhiyun	.align 7
164*4882a593Smuzhiyun661:
165*4882a593Smuzhiyun	nop
166*4882a593Smuzhiyun	stp	x0, x1, [sp, #-16]!
167*4882a593Smuzhiyun662:
168*4882a593Smuzhiyun	b	\target
169*4882a593Smuzhiyun
170*4882a593Smuzhiyuncheck_preamble_length 661b, 662b
171*4882a593Smuzhiyun.endm
172*4882a593Smuzhiyun
173*4882a593SmuzhiyunSYM_CODE_START(__kvm_hyp_vector)
174*4882a593Smuzhiyun	invalid_vect	el2t_sync_invalid	// Synchronous EL2t
175*4882a593Smuzhiyun	invalid_vect	el2t_irq_invalid	// IRQ EL2t
176*4882a593Smuzhiyun	invalid_vect	el2t_fiq_invalid	// FIQ EL2t
177*4882a593Smuzhiyun	invalid_vect	el2t_error_invalid	// Error EL2t
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun	valid_vect	el2_sync		// Synchronous EL2h
180*4882a593Smuzhiyun	invalid_vect	el2h_irq_invalid	// IRQ EL2h
181*4882a593Smuzhiyun	invalid_vect	el2h_fiq_invalid	// FIQ EL2h
182*4882a593Smuzhiyun	valid_vect	el2_error		// Error EL2h
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun	valid_vect	el1_sync		// Synchronous 64-bit EL1
185*4882a593Smuzhiyun	valid_vect	el1_irq			// IRQ 64-bit EL1
186*4882a593Smuzhiyun	invalid_vect	el1_fiq_invalid		// FIQ 64-bit EL1
187*4882a593Smuzhiyun	valid_vect	el1_error		// Error 64-bit EL1
188*4882a593Smuzhiyun
189*4882a593Smuzhiyun	valid_vect	el1_sync		// Synchronous 32-bit EL1
190*4882a593Smuzhiyun	valid_vect	el1_irq			// IRQ 32-bit EL1
191*4882a593Smuzhiyun	invalid_vect	el1_fiq_invalid		// FIQ 32-bit EL1
192*4882a593Smuzhiyun	valid_vect	el1_error		// Error 32-bit EL1
193*4882a593SmuzhiyunSYM_CODE_END(__kvm_hyp_vector)
194*4882a593Smuzhiyun
195*4882a593Smuzhiyun.macro spectrev2_smccc_wa1_smc
196*4882a593Smuzhiyun	sub	sp, sp, #(8 * 4)
197*4882a593Smuzhiyun	stp	x2, x3, [sp, #(8 * 0)]
198*4882a593Smuzhiyun	stp	x0, x1, [sp, #(8 * 2)]
199*4882a593Smuzhiyun	alternative_cb spectre_bhb_patch_wa3
200*4882a593Smuzhiyun	/* Patched to mov WA3 when supported */
201*4882a593Smuzhiyun	mov	w0, #ARM_SMCCC_ARCH_WORKAROUND_1
202*4882a593Smuzhiyun	alternative_cb_end
203*4882a593Smuzhiyun	smc	#0
204*4882a593Smuzhiyun	ldp	x2, x3, [sp, #(8 * 0)]
205*4882a593Smuzhiyun	add	sp, sp, #(8 * 2)
206*4882a593Smuzhiyun.endm
207*4882a593Smuzhiyun
208*4882a593Smuzhiyun.macro hyp_ventry	indirect, spectrev2
209*4882a593Smuzhiyun	.align	7
210*4882a593Smuzhiyun1:	esb
211*4882a593Smuzhiyun	.if \spectrev2 != 0
212*4882a593Smuzhiyun	spectrev2_smccc_wa1_smc
213*4882a593Smuzhiyun	.else
214*4882a593Smuzhiyun	stp	x0, x1, [sp, #-16]!
215*4882a593Smuzhiyun	mitigate_spectre_bhb_loop	x0
216*4882a593Smuzhiyun	mitigate_spectre_bhb_clear_insn
217*4882a593Smuzhiyun	.endif
218*4882a593Smuzhiyun	.if \indirect != 0
219*4882a593Smuzhiyun	alternative_cb  kvm_patch_vector_branch
220*4882a593Smuzhiyun	/*
221*4882a593Smuzhiyun	 * For ARM64_SPECTRE_V3A configurations, these NOPs get replaced with:
222*4882a593Smuzhiyun	 *
223*4882a593Smuzhiyun	 * movz	x0, #(addr & 0xffff)
224*4882a593Smuzhiyun	 * movk	x0, #((addr >> 16) & 0xffff), lsl #16
225*4882a593Smuzhiyun	 * movk	x0, #((addr >> 32) & 0xffff), lsl #32
226*4882a593Smuzhiyun	 * br	x0
227*4882a593Smuzhiyun	 *
228*4882a593Smuzhiyun	 * Where:
229*4882a593Smuzhiyun	 * addr = kern_hyp_va(__kvm_hyp_vector) + vector-offset + KVM_VECTOR_PREAMBLE.
230*4882a593Smuzhiyun	 * See kvm_patch_vector_branch for details.
231*4882a593Smuzhiyun	 */
232*4882a593Smuzhiyun	nop
233*4882a593Smuzhiyun	nop
234*4882a593Smuzhiyun	nop
235*4882a593Smuzhiyun	nop
236*4882a593Smuzhiyun	alternative_cb_end
237*4882a593Smuzhiyun	.endif
238*4882a593Smuzhiyun	b	__kvm_hyp_vector + (1b - 0b + KVM_VECTOR_PREAMBLE)
239*4882a593Smuzhiyun.endm
240*4882a593Smuzhiyun
241*4882a593Smuzhiyun.macro generate_vectors	indirect, spectrev2
242*4882a593Smuzhiyun0:
243*4882a593Smuzhiyun	.rept 16
244*4882a593Smuzhiyun	hyp_ventry	\indirect, \spectrev2
245*4882a593Smuzhiyun	.endr
246*4882a593Smuzhiyun	.org 0b + SZ_2K		// Safety measure
247*4882a593Smuzhiyun.endm
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun	.align	11
250*4882a593SmuzhiyunSYM_CODE_START(__bp_harden_hyp_vecs)
251*4882a593Smuzhiyun	generate_vectors indirect = 0, spectrev2 = 1 // HYP_VECTOR_SPECTRE_DIRECT
252*4882a593Smuzhiyun	generate_vectors indirect = 1, spectrev2 = 0 // HYP_VECTOR_INDIRECT
253*4882a593Smuzhiyun	generate_vectors indirect = 1, spectrev2 = 1 // HYP_VECTOR_SPECTRE_INDIRECT
254*4882a593Smuzhiyun1:	.org __bp_harden_hyp_vecs + __BP_HARDEN_HYP_VECS_SZ
255*4882a593Smuzhiyun	.org 1b
256*4882a593SmuzhiyunSYM_CODE_END(__bp_harden_hyp_vecs)
257