xref: /OK3568_Linux_fs/kernel/arch/x86/kernel/verify_cpu.S (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun/* SPDX-License-Identifier: GPL-2.0-only */
2*4882a593Smuzhiyun/*
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun *	verify_cpu.S - Code for cpu long mode and SSE verification. This
5*4882a593Smuzhiyun *	code has been borrowed from boot/setup.S and was introduced by
6*4882a593Smuzhiyun * 	Andi Kleen.
7*4882a593Smuzhiyun *
8*4882a593Smuzhiyun *	Copyright (c) 2007  Andi Kleen (ak@suse.de)
9*4882a593Smuzhiyun *	Copyright (c) 2007  Eric Biederman (ebiederm@xmission.com)
10*4882a593Smuzhiyun *	Copyright (c) 2007  Vivek Goyal (vgoyal@in.ibm.com)
11*4882a593Smuzhiyun *	Copyright (c) 2010  Kees Cook (kees.cook@canonical.com)
12*4882a593Smuzhiyun *
13*4882a593Smuzhiyun *	This is a common code for verification whether CPU supports
14*4882a593Smuzhiyun * 	long mode and SSE or not. It is not called directly instead this
15*4882a593Smuzhiyun *	file is included at various places and compiled in that context.
16*4882a593Smuzhiyun *	This file is expected to run in 32bit code.  Currently:
17*4882a593Smuzhiyun *
18*4882a593Smuzhiyun *	arch/x86/boot/compressed/head_64.S: Boot cpu verification
19*4882a593Smuzhiyun *	arch/x86/kernel/trampoline_64.S: secondary processor verification
20*4882a593Smuzhiyun *	arch/x86/kernel/head_32.S: processor startup
21*4882a593Smuzhiyun *
22*4882a593Smuzhiyun *	verify_cpu, returns the status of longmode and SSE in register %eax.
23*4882a593Smuzhiyun *		0: Success    1: Failure
24*4882a593Smuzhiyun *
25*4882a593Smuzhiyun *	On Intel, the XD_DISABLE flag will be cleared as a side-effect.
26*4882a593Smuzhiyun *
27*4882a593Smuzhiyun * 	The caller needs to check for the error code and take the action
28*4882a593Smuzhiyun * 	appropriately. Either display a message or halt.
29*4882a593Smuzhiyun */
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun#include <asm/cpufeatures.h>
32*4882a593Smuzhiyun#include <asm/msr-index.h>
33*4882a593Smuzhiyun
34*4882a593SmuzhiyunSYM_FUNC_START_LOCAL(verify_cpu)
35*4882a593Smuzhiyun	pushf				# Save caller passed flags
36*4882a593Smuzhiyun	push	$0			# Kill any dangerous flags
37*4882a593Smuzhiyun	popf
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun#ifndef __x86_64__
40*4882a593Smuzhiyun	pushfl				# standard way to check for cpuid
41*4882a593Smuzhiyun	popl	%eax
42*4882a593Smuzhiyun	movl	%eax,%ebx
43*4882a593Smuzhiyun	xorl	$0x200000,%eax
44*4882a593Smuzhiyun	pushl	%eax
45*4882a593Smuzhiyun	popfl
46*4882a593Smuzhiyun	pushfl
47*4882a593Smuzhiyun	popl	%eax
48*4882a593Smuzhiyun	cmpl	%eax,%ebx
49*4882a593Smuzhiyun	jz	.Lverify_cpu_no_longmode	# cpu has no cpuid
50*4882a593Smuzhiyun#endif
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun	movl	$0x0,%eax		# See if cpuid 1 is implemented
53*4882a593Smuzhiyun	cpuid
54*4882a593Smuzhiyun	cmpl	$0x1,%eax
55*4882a593Smuzhiyun	jb	.Lverify_cpu_no_longmode	# no cpuid 1
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun	xor	%di,%di
58*4882a593Smuzhiyun	cmpl	$0x68747541,%ebx	# AuthenticAMD
59*4882a593Smuzhiyun	jnz	.Lverify_cpu_noamd
60*4882a593Smuzhiyun	cmpl	$0x69746e65,%edx
61*4882a593Smuzhiyun	jnz	.Lverify_cpu_noamd
62*4882a593Smuzhiyun	cmpl	$0x444d4163,%ecx
63*4882a593Smuzhiyun	jnz	.Lverify_cpu_noamd
64*4882a593Smuzhiyun	mov	$1,%di			# cpu is from AMD
65*4882a593Smuzhiyun	jmp	.Lverify_cpu_check
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun.Lverify_cpu_noamd:
68*4882a593Smuzhiyun	cmpl	$0x756e6547,%ebx        # GenuineIntel?
69*4882a593Smuzhiyun	jnz	.Lverify_cpu_check
70*4882a593Smuzhiyun	cmpl	$0x49656e69,%edx
71*4882a593Smuzhiyun	jnz	.Lverify_cpu_check
72*4882a593Smuzhiyun	cmpl	$0x6c65746e,%ecx
73*4882a593Smuzhiyun	jnz	.Lverify_cpu_check
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun	# only call IA32_MISC_ENABLE when:
76*4882a593Smuzhiyun	# family > 6 || (family == 6 && model >= 0xd)
77*4882a593Smuzhiyun	movl	$0x1, %eax		# check CPU family and model
78*4882a593Smuzhiyun	cpuid
79*4882a593Smuzhiyun	movl	%eax, %ecx
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun	andl	$0x0ff00f00, %eax	# mask family and extended family
82*4882a593Smuzhiyun	shrl	$8, %eax
83*4882a593Smuzhiyun	cmpl	$6, %eax
84*4882a593Smuzhiyun	ja	.Lverify_cpu_clear_xd	# family > 6, ok
85*4882a593Smuzhiyun	jb	.Lverify_cpu_check	# family < 6, skip
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun	andl	$0x000f00f0, %ecx	# mask model and extended model
88*4882a593Smuzhiyun	shrl	$4, %ecx
89*4882a593Smuzhiyun	cmpl	$0xd, %ecx
90*4882a593Smuzhiyun	jb	.Lverify_cpu_check	# family == 6, model < 0xd, skip
91*4882a593Smuzhiyun
92*4882a593Smuzhiyun.Lverify_cpu_clear_xd:
93*4882a593Smuzhiyun	movl	$MSR_IA32_MISC_ENABLE, %ecx
94*4882a593Smuzhiyun	rdmsr
95*4882a593Smuzhiyun	btrl	$2, %edx		# clear MSR_IA32_MISC_ENABLE_XD_DISABLE
96*4882a593Smuzhiyun	jnc	.Lverify_cpu_check	# only write MSR if bit was changed
97*4882a593Smuzhiyun	wrmsr
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun.Lverify_cpu_check:
100*4882a593Smuzhiyun	movl    $0x1,%eax		# Does the cpu have what it takes
101*4882a593Smuzhiyun	cpuid
102*4882a593Smuzhiyun	andl	$REQUIRED_MASK0,%edx
103*4882a593Smuzhiyun	xorl	$REQUIRED_MASK0,%edx
104*4882a593Smuzhiyun	jnz	.Lverify_cpu_no_longmode
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun	movl    $0x80000000,%eax	# See if extended cpuid is implemented
107*4882a593Smuzhiyun	cpuid
108*4882a593Smuzhiyun	cmpl    $0x80000001,%eax
109*4882a593Smuzhiyun	jb      .Lverify_cpu_no_longmode	# no extended cpuid
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun	movl    $0x80000001,%eax	# Does the cpu have what it takes
112*4882a593Smuzhiyun	cpuid
113*4882a593Smuzhiyun	andl    $REQUIRED_MASK1,%edx
114*4882a593Smuzhiyun	xorl    $REQUIRED_MASK1,%edx
115*4882a593Smuzhiyun	jnz     .Lverify_cpu_no_longmode
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun.Lverify_cpu_sse_test:
118*4882a593Smuzhiyun	movl	$1,%eax
119*4882a593Smuzhiyun	cpuid
120*4882a593Smuzhiyun	andl	$SSE_MASK,%edx
121*4882a593Smuzhiyun	cmpl	$SSE_MASK,%edx
122*4882a593Smuzhiyun	je	.Lverify_cpu_sse_ok
123*4882a593Smuzhiyun	test	%di,%di
124*4882a593Smuzhiyun	jz	.Lverify_cpu_no_longmode	# only try to force SSE on AMD
125*4882a593Smuzhiyun	movl	$MSR_K7_HWCR,%ecx
126*4882a593Smuzhiyun	rdmsr
127*4882a593Smuzhiyun	btr	$15,%eax		# enable SSE
128*4882a593Smuzhiyun	wrmsr
129*4882a593Smuzhiyun	xor	%di,%di			# don't loop
130*4882a593Smuzhiyun	jmp	.Lverify_cpu_sse_test	# try again
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun.Lverify_cpu_no_longmode:
133*4882a593Smuzhiyun	popf				# Restore caller passed flags
134*4882a593Smuzhiyun	movl $1,%eax
135*4882a593Smuzhiyun	RET
136*4882a593Smuzhiyun.Lverify_cpu_sse_ok:
137*4882a593Smuzhiyun	popf				# Restore caller passed flags
138*4882a593Smuzhiyun	xorl %eax, %eax
139*4882a593Smuzhiyun	RET
140*4882a593SmuzhiyunSYM_FUNC_END(verify_cpu)
141