1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun *
3*4882a593Smuzhiyun * (C) COPYRIGHT 2015 ARM Limited. All rights reserved.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * This program is free software and is provided to you under the terms of the
6*4882a593Smuzhiyun * GNU General Public License version 2 as published by the Free Software
7*4882a593Smuzhiyun * Foundation, and any use by you of this program is subject to the terms
8*4882a593Smuzhiyun * of such GNU licence.
9*4882a593Smuzhiyun *
10*4882a593Smuzhiyun * A copy of the licence is included with the program, and can also be obtained
11*4882a593Smuzhiyun * from Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
12*4882a593Smuzhiyun * Boston, MA 02110-1301, USA.
13*4882a593Smuzhiyun *
14*4882a593Smuzhiyun */
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun #ifdef CONFIG_ARM64
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun #include <mali_kbase.h>
21*4882a593Smuzhiyun #include <mali_kbase_smc.h>
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun #include <linux/compiler.h>
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun /* __asmeq is not available on Kernel versions >= 4.20 */
26*4882a593Smuzhiyun #ifndef __asmeq
27*4882a593Smuzhiyun /*
28*4882a593Smuzhiyun * This is used to ensure the compiler did actually allocate the register we
29*4882a593Smuzhiyun * asked it for some inline assembly sequences. Apparently we can't trust the
30*4882a593Smuzhiyun * compiler from one version to another so a bit of paranoia won't hurt. This
31*4882a593Smuzhiyun * string is meant to be concatenated with the inline asm string and will
32*4882a593Smuzhiyun * cause compilation to stop on mismatch. (for details, see gcc PR 15089)
33*4882a593Smuzhiyun */
34*4882a593Smuzhiyun #define __asmeq(x, y) ".ifnc " x "," y " ; .err ; .endif\n\t"
35*4882a593Smuzhiyun #endif
36*4882a593Smuzhiyun
invoke_smc_fid(u64 function_id,u64 arg0,u64 arg1,u64 arg2)37*4882a593Smuzhiyun static noinline u64 invoke_smc_fid(u64 function_id,
38*4882a593Smuzhiyun u64 arg0, u64 arg1, u64 arg2)
39*4882a593Smuzhiyun {
40*4882a593Smuzhiyun register u64 x0 asm("x0") = function_id;
41*4882a593Smuzhiyun register u64 x1 asm("x1") = arg0;
42*4882a593Smuzhiyun register u64 x2 asm("x2") = arg1;
43*4882a593Smuzhiyun register u64 x3 asm("x3") = arg2;
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun asm volatile(
46*4882a593Smuzhiyun __asmeq("%0", "x0")
47*4882a593Smuzhiyun __asmeq("%1", "x1")
48*4882a593Smuzhiyun __asmeq("%2", "x2")
49*4882a593Smuzhiyun __asmeq("%3", "x3")
50*4882a593Smuzhiyun "smc #0\n"
51*4882a593Smuzhiyun : "+r" (x0)
52*4882a593Smuzhiyun : "r" (x1), "r" (x2), "r" (x3));
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun return x0;
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun
kbase_invoke_smc_fid(u32 fid,u64 arg0,u64 arg1,u64 arg2)57*4882a593Smuzhiyun u64 kbase_invoke_smc_fid(u32 fid, u64 arg0, u64 arg1, u64 arg2)
58*4882a593Smuzhiyun {
59*4882a593Smuzhiyun /* Is fast call (bit 31 set) */
60*4882a593Smuzhiyun KBASE_DEBUG_ASSERT(fid & ~SMC_FAST_CALL);
61*4882a593Smuzhiyun /* bits 16-23 must be zero for fast calls */
62*4882a593Smuzhiyun KBASE_DEBUG_ASSERT((fid & (0xFF << 16)) == 0);
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun return invoke_smc_fid(fid, arg0, arg1, arg2);
65*4882a593Smuzhiyun }
66*4882a593Smuzhiyun
kbase_invoke_smc(u32 oen,u16 function_number,bool smc64,u64 arg0,u64 arg1,u64 arg2)67*4882a593Smuzhiyun u64 kbase_invoke_smc(u32 oen, u16 function_number, bool smc64,
68*4882a593Smuzhiyun u64 arg0, u64 arg1, u64 arg2)
69*4882a593Smuzhiyun {
70*4882a593Smuzhiyun u32 fid = 0;
71*4882a593Smuzhiyun
72*4882a593Smuzhiyun /* Only the six bits allowed should be used. */
73*4882a593Smuzhiyun KBASE_DEBUG_ASSERT((oen & ~SMC_OEN_MASK) == 0);
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun fid |= SMC_FAST_CALL; /* Bit 31: Fast call */
76*4882a593Smuzhiyun if (smc64)
77*4882a593Smuzhiyun fid |= SMC_64; /* Bit 30: 1=SMC64, 0=SMC32 */
78*4882a593Smuzhiyun fid |= oen; /* Bit 29:24: OEN */
79*4882a593Smuzhiyun /* Bit 23:16: Must be zero for fast calls */
80*4882a593Smuzhiyun fid |= (function_number); /* Bit 15:0: function number */
81*4882a593Smuzhiyun
82*4882a593Smuzhiyun return kbase_invoke_smc_fid(fid, arg0, arg1, arg2);
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun #endif /* CONFIG_ARM64 */
86*4882a593Smuzhiyun
87