xref: /OK3568_Linux_fs/kernel/drivers/gpu/arm/bifrost/mali_kbase_smc.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * (C) COPYRIGHT 2015, 2018, 2020-2021 ARM Limited. All rights reserved.
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  * This program is free software and is provided to you under the terms of the
7*4882a593Smuzhiyun  * GNU General Public License version 2 as published by the Free Software
8*4882a593Smuzhiyun  * Foundation, and any use by you of this program is subject to the terms
9*4882a593Smuzhiyun  * of such GNU license.
10*4882a593Smuzhiyun  *
11*4882a593Smuzhiyun  * This program is distributed in the hope that it will be useful,
12*4882a593Smuzhiyun  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13*4882a593Smuzhiyun  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14*4882a593Smuzhiyun  * GNU General Public License for more details.
15*4882a593Smuzhiyun  *
16*4882a593Smuzhiyun  * You should have received a copy of the GNU General Public License
17*4882a593Smuzhiyun  * along with this program; if not, you can access it online at
18*4882a593Smuzhiyun  * http://www.gnu.org/licenses/gpl-2.0.html.
19*4882a593Smuzhiyun  *
20*4882a593Smuzhiyun  */
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun #if IS_ENABLED(CONFIG_ARM64)
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun #include <mali_kbase.h>
25*4882a593Smuzhiyun #include <mali_kbase_smc.h>
26*4882a593Smuzhiyun 
27*4882a593Smuzhiyun #include <linux/compiler.h>
28*4882a593Smuzhiyun 
29*4882a593Smuzhiyun /* __asmeq is not available on Kernel versions >= 4.20 */
30*4882a593Smuzhiyun #ifndef __asmeq
31*4882a593Smuzhiyun /*
32*4882a593Smuzhiyun  * This is used to ensure the compiler did actually allocate the register we
33*4882a593Smuzhiyun  * asked it for some inline assembly sequences.  Apparently we can't trust the
34*4882a593Smuzhiyun  * compiler from one version to another so a bit of paranoia won't hurt.  This
35*4882a593Smuzhiyun  * string is meant to be concatenated with the inline asm string and will
36*4882a593Smuzhiyun  * cause compilation to stop on mismatch.  (for details, see gcc PR 15089)
37*4882a593Smuzhiyun  */
38*4882a593Smuzhiyun #define __asmeq(x, y)  ".ifnc " x "," y " ; .err ; .endif\n\t"
39*4882a593Smuzhiyun #endif
40*4882a593Smuzhiyun 
invoke_smc_fid(u64 function_id,u64 arg0,u64 arg1,u64 arg2)41*4882a593Smuzhiyun static noinline u64 invoke_smc_fid(u64 function_id,
42*4882a593Smuzhiyun 		u64 arg0, u64 arg1, u64 arg2)
43*4882a593Smuzhiyun {
44*4882a593Smuzhiyun 	register u64 x0 asm("x0") = function_id;
45*4882a593Smuzhiyun 	register u64 x1 asm("x1") = arg0;
46*4882a593Smuzhiyun 	register u64 x2 asm("x2") = arg1;
47*4882a593Smuzhiyun 	register u64 x3 asm("x3") = arg2;
48*4882a593Smuzhiyun 
49*4882a593Smuzhiyun 	asm volatile(
50*4882a593Smuzhiyun 			__asmeq("%0", "x0")
51*4882a593Smuzhiyun 			__asmeq("%1", "x1")
52*4882a593Smuzhiyun 			__asmeq("%2", "x2")
53*4882a593Smuzhiyun 			__asmeq("%3", "x3")
54*4882a593Smuzhiyun 			"smc    #0\n"
55*4882a593Smuzhiyun 			: "+r" (x0)
56*4882a593Smuzhiyun 			: "r" (x1), "r" (x2), "r" (x3));
57*4882a593Smuzhiyun 
58*4882a593Smuzhiyun 	return x0;
59*4882a593Smuzhiyun }
60*4882a593Smuzhiyun 
kbase_invoke_smc_fid(u32 fid,u64 arg0,u64 arg1,u64 arg2)61*4882a593Smuzhiyun u64 kbase_invoke_smc_fid(u32 fid, u64 arg0, u64 arg1, u64 arg2)
62*4882a593Smuzhiyun {
63*4882a593Smuzhiyun 	/* Is fast call (bit 31 set) */
64*4882a593Smuzhiyun 	KBASE_DEBUG_ASSERT(fid & ~SMC_FAST_CALL);
65*4882a593Smuzhiyun 	/* bits 16-23 must be zero for fast calls */
66*4882a593Smuzhiyun 	KBASE_DEBUG_ASSERT((fid & (0xFF << 16)) == 0);
67*4882a593Smuzhiyun 
68*4882a593Smuzhiyun 	return invoke_smc_fid(fid, arg0, arg1, arg2);
69*4882a593Smuzhiyun }
70*4882a593Smuzhiyun 
kbase_invoke_smc(u32 oen,u16 function_number,bool smc64,u64 arg0,u64 arg1,u64 arg2)71*4882a593Smuzhiyun u64 kbase_invoke_smc(u32 oen, u16 function_number, bool smc64,
72*4882a593Smuzhiyun 		u64 arg0, u64 arg1, u64 arg2)
73*4882a593Smuzhiyun {
74*4882a593Smuzhiyun 	u32 fid = 0;
75*4882a593Smuzhiyun 
76*4882a593Smuzhiyun 	/* Only the six bits allowed should be used. */
77*4882a593Smuzhiyun 	KBASE_DEBUG_ASSERT((oen & ~SMC_OEN_MASK) == 0);
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun 	fid |= SMC_FAST_CALL; /* Bit 31: Fast call */
80*4882a593Smuzhiyun 	if (smc64)
81*4882a593Smuzhiyun 		fid |= SMC_64; /* Bit 30: 1=SMC64, 0=SMC32 */
82*4882a593Smuzhiyun 	fid |= oen; /* Bit 29:24: OEN */
83*4882a593Smuzhiyun 	/* Bit 23:16: Must be zero for fast calls */
84*4882a593Smuzhiyun 	fid |= (function_number); /* Bit 15:0: function number */
85*4882a593Smuzhiyun 
86*4882a593Smuzhiyun 	return kbase_invoke_smc_fid(fid, arg0, arg1, arg2);
87*4882a593Smuzhiyun }
88*4882a593Smuzhiyun 
89*4882a593Smuzhiyun #endif /* CONFIG_ARM64 */
90*4882a593Smuzhiyun 
91