1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Hyp portion of the (not much of an) Emulation layer for 32bit guests.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 2012,2013 - ARM Ltd
6*4882a593Smuzhiyun * Author: Marc Zyngier <marc.zyngier@arm.com>
7*4882a593Smuzhiyun *
8*4882a593Smuzhiyun * based on arch/arm/kvm/emulate.c
9*4882a593Smuzhiyun * Copyright (C) 2012 - Virtual Open Systems and Columbia University
10*4882a593Smuzhiyun * Author: Christoffer Dall <c.dall@virtualopensystems.com>
11*4882a593Smuzhiyun */
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun #include <linux/kvm_host.h>
14*4882a593Smuzhiyun #include <asm/kvm_emulate.h>
15*4882a593Smuzhiyun #include <asm/kvm_hyp.h>
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun /*
18*4882a593Smuzhiyun * stolen from arch/arm/kernel/opcodes.c
19*4882a593Smuzhiyun *
20*4882a593Smuzhiyun * condition code lookup table
21*4882a593Smuzhiyun * index into the table is test code: EQ, NE, ... LT, GT, AL, NV
22*4882a593Smuzhiyun *
23*4882a593Smuzhiyun * bit position in short is condition code: NZCV
24*4882a593Smuzhiyun */
25*4882a593Smuzhiyun static const unsigned short cc_map[16] = {
26*4882a593Smuzhiyun 0xF0F0, /* EQ == Z set */
27*4882a593Smuzhiyun 0x0F0F, /* NE */
28*4882a593Smuzhiyun 0xCCCC, /* CS == C set */
29*4882a593Smuzhiyun 0x3333, /* CC */
30*4882a593Smuzhiyun 0xFF00, /* MI == N set */
31*4882a593Smuzhiyun 0x00FF, /* PL */
32*4882a593Smuzhiyun 0xAAAA, /* VS == V set */
33*4882a593Smuzhiyun 0x5555, /* VC */
34*4882a593Smuzhiyun 0x0C0C, /* HI == C set && Z clear */
35*4882a593Smuzhiyun 0xF3F3, /* LS == C clear || Z set */
36*4882a593Smuzhiyun 0xAA55, /* GE == (N==V) */
37*4882a593Smuzhiyun 0x55AA, /* LT == (N!=V) */
38*4882a593Smuzhiyun 0x0A05, /* GT == (!Z && (N==V)) */
39*4882a593Smuzhiyun 0xF5FA, /* LE == (Z || (N!=V)) */
40*4882a593Smuzhiyun 0xFFFF, /* AL always */
41*4882a593Smuzhiyun 0 /* NV */
42*4882a593Smuzhiyun };
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun /*
45*4882a593Smuzhiyun * Check if a trapped instruction should have been executed or not.
46*4882a593Smuzhiyun */
kvm_condition_valid32(const struct kvm_vcpu * vcpu)47*4882a593Smuzhiyun bool kvm_condition_valid32(const struct kvm_vcpu *vcpu)
48*4882a593Smuzhiyun {
49*4882a593Smuzhiyun unsigned long cpsr;
50*4882a593Smuzhiyun u32 cpsr_cond;
51*4882a593Smuzhiyun int cond;
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun /* Top two bits non-zero? Unconditional. */
54*4882a593Smuzhiyun if (kvm_vcpu_get_esr(vcpu) >> 30)
55*4882a593Smuzhiyun return true;
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun /* Is condition field valid? */
58*4882a593Smuzhiyun cond = kvm_vcpu_get_condition(vcpu);
59*4882a593Smuzhiyun if (cond == 0xE)
60*4882a593Smuzhiyun return true;
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun cpsr = *vcpu_cpsr(vcpu);
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun if (cond < 0) {
65*4882a593Smuzhiyun /* This can happen in Thumb mode: examine IT state. */
66*4882a593Smuzhiyun unsigned long it;
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun it = ((cpsr >> 8) & 0xFC) | ((cpsr >> 25) & 0x3);
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun /* it == 0 => unconditional. */
71*4882a593Smuzhiyun if (it == 0)
72*4882a593Smuzhiyun return true;
73*4882a593Smuzhiyun
74*4882a593Smuzhiyun /* The cond for this insn works out as the top 4 bits. */
75*4882a593Smuzhiyun cond = (it >> 4);
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun cpsr_cond = cpsr >> 28;
79*4882a593Smuzhiyun
80*4882a593Smuzhiyun if (!((cc_map[cond] >> cpsr_cond) & 1))
81*4882a593Smuzhiyun return false;
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun return true;
84*4882a593Smuzhiyun }
85*4882a593Smuzhiyun
86*4882a593Smuzhiyun /**
87*4882a593Smuzhiyun * adjust_itstate - adjust ITSTATE when emulating instructions in IT-block
88*4882a593Smuzhiyun * @vcpu: The VCPU pointer
89*4882a593Smuzhiyun *
90*4882a593Smuzhiyun * When exceptions occur while instructions are executed in Thumb IF-THEN
91*4882a593Smuzhiyun * blocks, the ITSTATE field of the CPSR is not advanced (updated), so we have
92*4882a593Smuzhiyun * to do this little bit of work manually. The fields map like this:
93*4882a593Smuzhiyun *
94*4882a593Smuzhiyun * IT[7:0] -> CPSR[26:25],CPSR[15:10]
95*4882a593Smuzhiyun */
kvm_adjust_itstate(struct kvm_vcpu * vcpu)96*4882a593Smuzhiyun static void kvm_adjust_itstate(struct kvm_vcpu *vcpu)
97*4882a593Smuzhiyun {
98*4882a593Smuzhiyun unsigned long itbits, cond;
99*4882a593Smuzhiyun unsigned long cpsr = *vcpu_cpsr(vcpu);
100*4882a593Smuzhiyun bool is_arm = !(cpsr & PSR_AA32_T_BIT);
101*4882a593Smuzhiyun
102*4882a593Smuzhiyun if (is_arm || !(cpsr & PSR_AA32_IT_MASK))
103*4882a593Smuzhiyun return;
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun cond = (cpsr & 0xe000) >> 13;
106*4882a593Smuzhiyun itbits = (cpsr & 0x1c00) >> (10 - 2);
107*4882a593Smuzhiyun itbits |= (cpsr & (0x3 << 25)) >> 25;
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun /* Perform ITAdvance (see page A2-52 in ARM DDI 0406C) */
110*4882a593Smuzhiyun if ((itbits & 0x7) == 0)
111*4882a593Smuzhiyun itbits = cond = 0;
112*4882a593Smuzhiyun else
113*4882a593Smuzhiyun itbits = (itbits << 1) & 0x1f;
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun cpsr &= ~PSR_AA32_IT_MASK;
116*4882a593Smuzhiyun cpsr |= cond << 13;
117*4882a593Smuzhiyun cpsr |= (itbits & 0x1c) << (10 - 2);
118*4882a593Smuzhiyun cpsr |= (itbits & 0x3) << 25;
119*4882a593Smuzhiyun *vcpu_cpsr(vcpu) = cpsr;
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun
122*4882a593Smuzhiyun /**
123*4882a593Smuzhiyun * kvm_skip_instr - skip a trapped instruction and proceed to the next
124*4882a593Smuzhiyun * @vcpu: The vcpu pointer
125*4882a593Smuzhiyun */
kvm_skip_instr32(struct kvm_vcpu * vcpu)126*4882a593Smuzhiyun void kvm_skip_instr32(struct kvm_vcpu *vcpu)
127*4882a593Smuzhiyun {
128*4882a593Smuzhiyun u32 pc = *vcpu_pc(vcpu);
129*4882a593Smuzhiyun bool is_thumb;
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun is_thumb = !!(*vcpu_cpsr(vcpu) & PSR_AA32_T_BIT);
132*4882a593Smuzhiyun if (is_thumb && !kvm_vcpu_trap_il_is32bit(vcpu))
133*4882a593Smuzhiyun pc += 2;
134*4882a593Smuzhiyun else
135*4882a593Smuzhiyun pc += 4;
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun *vcpu_pc(vcpu) = pc;
138*4882a593Smuzhiyun
139*4882a593Smuzhiyun kvm_adjust_itstate(vcpu);
140*4882a593Smuzhiyun }
141