xref: /OK3568_Linux_fs/kernel/arch/powerpc/kvm/book3s_32_sr.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun/*
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Copyright SUSE Linux Products GmbH 2009
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * Authors: Alexander Graf <agraf@suse.de>
7*4882a593Smuzhiyun */
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun/******************************************************************************
10*4882a593Smuzhiyun *                                                                            *
11*4882a593Smuzhiyun *                               Entry code                                   *
12*4882a593Smuzhiyun *                                                                            *
13*4882a593Smuzhiyun *****************************************************************************/
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun.macro LOAD_GUEST_SEGMENTS
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun	/* Required state:
18*4882a593Smuzhiyun	 *
19*4882a593Smuzhiyun	 * MSR = ~IR|DR
20*4882a593Smuzhiyun	 * R1 = host R1
21*4882a593Smuzhiyun	 * R2 = host R2
22*4882a593Smuzhiyun	 * R3 = shadow vcpu
23*4882a593Smuzhiyun	 * all other volatile GPRS = free except R4, R6
24*4882a593Smuzhiyun	 * SVCPU[CR]  = guest CR
25*4882a593Smuzhiyun	 * SVCPU[XER] = guest XER
26*4882a593Smuzhiyun	 * SVCPU[CTR] = guest CTR
27*4882a593Smuzhiyun	 * SVCPU[LR]  = guest LR
28*4882a593Smuzhiyun	 */
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun#define XCHG_SR(n)	lwz	r9, (SVCPU_SR+(n*4))(r3);  \
31*4882a593Smuzhiyun			mtsr	n, r9
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun	XCHG_SR(0)
34*4882a593Smuzhiyun	XCHG_SR(1)
35*4882a593Smuzhiyun	XCHG_SR(2)
36*4882a593Smuzhiyun	XCHG_SR(3)
37*4882a593Smuzhiyun	XCHG_SR(4)
38*4882a593Smuzhiyun	XCHG_SR(5)
39*4882a593Smuzhiyun	XCHG_SR(6)
40*4882a593Smuzhiyun	XCHG_SR(7)
41*4882a593Smuzhiyun	XCHG_SR(8)
42*4882a593Smuzhiyun	XCHG_SR(9)
43*4882a593Smuzhiyun	XCHG_SR(10)
44*4882a593Smuzhiyun	XCHG_SR(11)
45*4882a593Smuzhiyun	XCHG_SR(12)
46*4882a593Smuzhiyun	XCHG_SR(13)
47*4882a593Smuzhiyun	XCHG_SR(14)
48*4882a593Smuzhiyun	XCHG_SR(15)
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun	/* Clear BATs. */
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun#define KVM_KILL_BAT(n, reg)		\
53*4882a593Smuzhiyun        mtspr   SPRN_IBAT##n##U,reg;	\
54*4882a593Smuzhiyun        mtspr   SPRN_IBAT##n##L,reg;	\
55*4882a593Smuzhiyun        mtspr   SPRN_DBAT##n##U,reg;	\
56*4882a593Smuzhiyun        mtspr   SPRN_DBAT##n##L,reg;	\
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun        li	r9, 0
59*4882a593Smuzhiyun	KVM_KILL_BAT(0, r9)
60*4882a593Smuzhiyun	KVM_KILL_BAT(1, r9)
61*4882a593Smuzhiyun	KVM_KILL_BAT(2, r9)
62*4882a593Smuzhiyun	KVM_KILL_BAT(3, r9)
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun.endm
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun/******************************************************************************
67*4882a593Smuzhiyun *                                                                            *
68*4882a593Smuzhiyun *                               Exit code                                    *
69*4882a593Smuzhiyun *                                                                            *
70*4882a593Smuzhiyun *****************************************************************************/
71*4882a593Smuzhiyun
72*4882a593Smuzhiyun.macro LOAD_HOST_SEGMENTS
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun	/* Register usage at this point:
75*4882a593Smuzhiyun	 *
76*4882a593Smuzhiyun	 * R1         = host R1
77*4882a593Smuzhiyun	 * R2         = host R2
78*4882a593Smuzhiyun	 * R12        = exit handler id
79*4882a593Smuzhiyun	 * R13        = shadow vcpu - SHADOW_VCPU_OFF
80*4882a593Smuzhiyun	 * SVCPU.*    = guest *
81*4882a593Smuzhiyun	 * SVCPU[CR]  = guest CR
82*4882a593Smuzhiyun	 * SVCPU[XER] = guest XER
83*4882a593Smuzhiyun	 * SVCPU[CTR] = guest CTR
84*4882a593Smuzhiyun	 * SVCPU[LR]  = guest LR
85*4882a593Smuzhiyun	 *
86*4882a593Smuzhiyun	 */
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun	/* Restore BATs */
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun	/* We only overwrite the upper part, so we only restoree
91*4882a593Smuzhiyun	   the upper part. */
92*4882a593Smuzhiyun#define KVM_LOAD_BAT(n, reg, RA, RB)	\
93*4882a593Smuzhiyun	lwz	RA,(n*16)+0(reg);	\
94*4882a593Smuzhiyun	lwz	RB,(n*16)+4(reg);	\
95*4882a593Smuzhiyun	mtspr	SPRN_IBAT##n##U,RA;	\
96*4882a593Smuzhiyun	mtspr	SPRN_IBAT##n##L,RB;	\
97*4882a593Smuzhiyun	lwz	RA,(n*16)+8(reg);	\
98*4882a593Smuzhiyun	lwz	RB,(n*16)+12(reg);	\
99*4882a593Smuzhiyun	mtspr	SPRN_DBAT##n##U,RA;	\
100*4882a593Smuzhiyun	mtspr	SPRN_DBAT##n##L,RB;	\
101*4882a593Smuzhiyun
102*4882a593Smuzhiyun	lis     r9, BATS@ha
103*4882a593Smuzhiyun	addi    r9, r9, BATS@l
104*4882a593Smuzhiyun	tophys(r9, r9)
105*4882a593Smuzhiyun	KVM_LOAD_BAT(0, r9, r10, r11)
106*4882a593Smuzhiyun	KVM_LOAD_BAT(1, r9, r10, r11)
107*4882a593Smuzhiyun	KVM_LOAD_BAT(2, r9, r10, r11)
108*4882a593Smuzhiyun	KVM_LOAD_BAT(3, r9, r10, r11)
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun	/* Restore Segment Registers */
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun	/* 0xc - 0xf */
113*4882a593Smuzhiyun
114*4882a593Smuzhiyun        li      r0, 4
115*4882a593Smuzhiyun        mtctr   r0
116*4882a593Smuzhiyun	LOAD_REG_IMMEDIATE(r3, 0x20000000 | (0x111 * 0xc))
117*4882a593Smuzhiyun        lis     r4, 0xc000
118*4882a593Smuzhiyun3:      mtsrin  r3, r4
119*4882a593Smuzhiyun        addi    r3, r3, 0x111     /* increment VSID */
120*4882a593Smuzhiyun        addis   r4, r4, 0x1000    /* address of next segment */
121*4882a593Smuzhiyun        bdnz    3b
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun	/* 0x0 - 0xb */
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun	/* 'current->mm' needs to be in r4 */
126*4882a593Smuzhiyun	tophys(r4, r2)
127*4882a593Smuzhiyun	lwz	r4, MM(r4)
128*4882a593Smuzhiyun	tophys(r4, r4)
129*4882a593Smuzhiyun	/* This only clobbers r0, r3, r4 and r5 */
130*4882a593Smuzhiyun	bl	switch_mmu_context
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun.endm
133