xref: /OK3568_Linux_fs/kernel/arch/mips/kvm/fpu.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/*
2*4882a593Smuzhiyun * This file is subject to the terms and conditions of the GNU General Public
3*4882a593Smuzhiyun * License.  See the file "COPYING" in the main directory of this archive
4*4882a593Smuzhiyun * for more details.
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * FPU context handling code for KVM.
7*4882a593Smuzhiyun *
8*4882a593Smuzhiyun * Copyright (C) 2015 Imagination Technologies Ltd.
9*4882a593Smuzhiyun */
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun#include <asm/asm.h>
12*4882a593Smuzhiyun#include <asm/asm-offsets.h>
13*4882a593Smuzhiyun#include <asm/fpregdef.h>
14*4882a593Smuzhiyun#include <asm/mipsregs.h>
15*4882a593Smuzhiyun#include <asm/regdef.h>
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun/* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
18*4882a593Smuzhiyun#undef fp
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun	.set	noreorder
21*4882a593Smuzhiyun	.set	noat
22*4882a593Smuzhiyun
23*4882a593SmuzhiyunLEAF(__kvm_save_fpu)
24*4882a593Smuzhiyun	.set	push
25*4882a593Smuzhiyun	SET_HARDFLOAT
26*4882a593Smuzhiyun	.set	fp=64
27*4882a593Smuzhiyun	mfc0	t0, CP0_STATUS
28*4882a593Smuzhiyun	sll     t0, t0, 5			# is Status.FR set?
29*4882a593Smuzhiyun	bgez    t0, 1f				# no: skip odd doubles
30*4882a593Smuzhiyun	 nop
31*4882a593Smuzhiyun	sdc1	$f1,  VCPU_FPR1(a0)
32*4882a593Smuzhiyun	sdc1	$f3,  VCPU_FPR3(a0)
33*4882a593Smuzhiyun	sdc1	$f5,  VCPU_FPR5(a0)
34*4882a593Smuzhiyun	sdc1	$f7,  VCPU_FPR7(a0)
35*4882a593Smuzhiyun	sdc1	$f9,  VCPU_FPR9(a0)
36*4882a593Smuzhiyun	sdc1	$f11, VCPU_FPR11(a0)
37*4882a593Smuzhiyun	sdc1	$f13, VCPU_FPR13(a0)
38*4882a593Smuzhiyun	sdc1	$f15, VCPU_FPR15(a0)
39*4882a593Smuzhiyun	sdc1	$f17, VCPU_FPR17(a0)
40*4882a593Smuzhiyun	sdc1	$f19, VCPU_FPR19(a0)
41*4882a593Smuzhiyun	sdc1	$f21, VCPU_FPR21(a0)
42*4882a593Smuzhiyun	sdc1	$f23, VCPU_FPR23(a0)
43*4882a593Smuzhiyun	sdc1	$f25, VCPU_FPR25(a0)
44*4882a593Smuzhiyun	sdc1	$f27, VCPU_FPR27(a0)
45*4882a593Smuzhiyun	sdc1	$f29, VCPU_FPR29(a0)
46*4882a593Smuzhiyun	sdc1	$f31, VCPU_FPR31(a0)
47*4882a593Smuzhiyun1:	sdc1	$f0,  VCPU_FPR0(a0)
48*4882a593Smuzhiyun	sdc1	$f2,  VCPU_FPR2(a0)
49*4882a593Smuzhiyun	sdc1	$f4,  VCPU_FPR4(a0)
50*4882a593Smuzhiyun	sdc1	$f6,  VCPU_FPR6(a0)
51*4882a593Smuzhiyun	sdc1	$f8,  VCPU_FPR8(a0)
52*4882a593Smuzhiyun	sdc1	$f10, VCPU_FPR10(a0)
53*4882a593Smuzhiyun	sdc1	$f12, VCPU_FPR12(a0)
54*4882a593Smuzhiyun	sdc1	$f14, VCPU_FPR14(a0)
55*4882a593Smuzhiyun	sdc1	$f16, VCPU_FPR16(a0)
56*4882a593Smuzhiyun	sdc1	$f18, VCPU_FPR18(a0)
57*4882a593Smuzhiyun	sdc1	$f20, VCPU_FPR20(a0)
58*4882a593Smuzhiyun	sdc1	$f22, VCPU_FPR22(a0)
59*4882a593Smuzhiyun	sdc1	$f24, VCPU_FPR24(a0)
60*4882a593Smuzhiyun	sdc1	$f26, VCPU_FPR26(a0)
61*4882a593Smuzhiyun	sdc1	$f28, VCPU_FPR28(a0)
62*4882a593Smuzhiyun	jr	ra
63*4882a593Smuzhiyun	 sdc1	$f30, VCPU_FPR30(a0)
64*4882a593Smuzhiyun	.set	pop
65*4882a593Smuzhiyun	END(__kvm_save_fpu)
66*4882a593Smuzhiyun
67*4882a593SmuzhiyunLEAF(__kvm_restore_fpu)
68*4882a593Smuzhiyun	.set	push
69*4882a593Smuzhiyun	SET_HARDFLOAT
70*4882a593Smuzhiyun	.set	fp=64
71*4882a593Smuzhiyun	mfc0	t0, CP0_STATUS
72*4882a593Smuzhiyun	sll     t0, t0, 5			# is Status.FR set?
73*4882a593Smuzhiyun	bgez    t0, 1f				# no: skip odd doubles
74*4882a593Smuzhiyun	 nop
75*4882a593Smuzhiyun	ldc1	$f1,  VCPU_FPR1(a0)
76*4882a593Smuzhiyun	ldc1	$f3,  VCPU_FPR3(a0)
77*4882a593Smuzhiyun	ldc1	$f5,  VCPU_FPR5(a0)
78*4882a593Smuzhiyun	ldc1	$f7,  VCPU_FPR7(a0)
79*4882a593Smuzhiyun	ldc1	$f9,  VCPU_FPR9(a0)
80*4882a593Smuzhiyun	ldc1	$f11, VCPU_FPR11(a0)
81*4882a593Smuzhiyun	ldc1	$f13, VCPU_FPR13(a0)
82*4882a593Smuzhiyun	ldc1	$f15, VCPU_FPR15(a0)
83*4882a593Smuzhiyun	ldc1	$f17, VCPU_FPR17(a0)
84*4882a593Smuzhiyun	ldc1	$f19, VCPU_FPR19(a0)
85*4882a593Smuzhiyun	ldc1	$f21, VCPU_FPR21(a0)
86*4882a593Smuzhiyun	ldc1	$f23, VCPU_FPR23(a0)
87*4882a593Smuzhiyun	ldc1	$f25, VCPU_FPR25(a0)
88*4882a593Smuzhiyun	ldc1	$f27, VCPU_FPR27(a0)
89*4882a593Smuzhiyun	ldc1	$f29, VCPU_FPR29(a0)
90*4882a593Smuzhiyun	ldc1	$f31, VCPU_FPR31(a0)
91*4882a593Smuzhiyun1:	ldc1	$f0,  VCPU_FPR0(a0)
92*4882a593Smuzhiyun	ldc1	$f2,  VCPU_FPR2(a0)
93*4882a593Smuzhiyun	ldc1	$f4,  VCPU_FPR4(a0)
94*4882a593Smuzhiyun	ldc1	$f6,  VCPU_FPR6(a0)
95*4882a593Smuzhiyun	ldc1	$f8,  VCPU_FPR8(a0)
96*4882a593Smuzhiyun	ldc1	$f10, VCPU_FPR10(a0)
97*4882a593Smuzhiyun	ldc1	$f12, VCPU_FPR12(a0)
98*4882a593Smuzhiyun	ldc1	$f14, VCPU_FPR14(a0)
99*4882a593Smuzhiyun	ldc1	$f16, VCPU_FPR16(a0)
100*4882a593Smuzhiyun	ldc1	$f18, VCPU_FPR18(a0)
101*4882a593Smuzhiyun	ldc1	$f20, VCPU_FPR20(a0)
102*4882a593Smuzhiyun	ldc1	$f22, VCPU_FPR22(a0)
103*4882a593Smuzhiyun	ldc1	$f24, VCPU_FPR24(a0)
104*4882a593Smuzhiyun	ldc1	$f26, VCPU_FPR26(a0)
105*4882a593Smuzhiyun	ldc1	$f28, VCPU_FPR28(a0)
106*4882a593Smuzhiyun	jr	ra
107*4882a593Smuzhiyun	 ldc1	$f30, VCPU_FPR30(a0)
108*4882a593Smuzhiyun	.set	pop
109*4882a593Smuzhiyun	END(__kvm_restore_fpu)
110*4882a593Smuzhiyun
111*4882a593SmuzhiyunLEAF(__kvm_restore_fcsr)
112*4882a593Smuzhiyun	.set	push
113*4882a593Smuzhiyun	SET_HARDFLOAT
114*4882a593Smuzhiyun	lw	t0, VCPU_FCR31(a0)
115*4882a593Smuzhiyun	/*
116*4882a593Smuzhiyun	 * The ctc1 must stay at this offset in __kvm_restore_fcsr.
117*4882a593Smuzhiyun	 * See kvm_mips_csr_die_notify() which handles t0 containing a value
118*4882a593Smuzhiyun	 * which triggers an FP Exception, which must be stepped over and
119*4882a593Smuzhiyun	 * ignored since the set cause bits must remain there for the guest.
120*4882a593Smuzhiyun	 */
121*4882a593Smuzhiyun	ctc1	t0, fcr31
122*4882a593Smuzhiyun	jr	ra
123*4882a593Smuzhiyun	 nop
124*4882a593Smuzhiyun	.set	pop
125*4882a593Smuzhiyun	END(__kvm_restore_fcsr)
126