xref: /optee_os/core/arch/arm/plat-rcar/romapi_call.S (revision c79fb6d48c8f58eb3b9a6246a9931c5b07e47177)
1fa6e3546SVolodymyr Babchuk/* SPDX-License-Identifier: BSD-2-Clause */
2fa6e3546SVolodymyr Babchuk/*
3fa6e3546SVolodymyr Babchuk * Copyright (c) 2021, EPAM Systems
4fa6e3546SVolodymyr Babchuk */
5fa6e3546SVolodymyr Babchuk
6fa6e3546SVolodymyr Babchuk#include <asm.S>
7fa6e3546SVolodymyr Babchuk#include <arm.h>
8fa6e3546SVolodymyr Babchuk#include <arm64_macros.S>
9131547c7SJerome Forissier#include <generated/asm-defines.h>
10fa6e3546SVolodymyr Babchuk#include <kernel/cache_helpers.h>
11fa6e3546SVolodymyr Babchuk
12fa6e3546SVolodymyr Babchuk/* uint32_t __plat_romapi_wrapper(paddr_t func, uint64_t arg1, uint64_t arg2,
13fa6e3546SVolodymyr Babchuk *				  uint64_t arg3)
14fa6e3546SVolodymyr Babchuk * Call MaskROM function func(arg1, arg2, arg3).
15fa6e3546SVolodymyr Babchuk * We need to disable MMU before calling any MaskROM API functions
16fa6e3546SVolodymyr Babchuk */
17fa6e3546SVolodymyr BabchukFUNC __plat_romapi_wrapper , : , .identity_map
18fa6e3546SVolodymyr Babchuk
19fa6e3546SVolodymyr Babchuk	push	fp,  lr
20fa6e3546SVolodymyr Babchuk	push	x19, x20
21fa6e3546SVolodymyr Babchuk	push	x21, x22
22fa6e3546SVolodymyr Babchuk	push	x23, x24
23fa6e3546SVolodymyr Babchuk
24fa6e3546SVolodymyr Babchuk	mov	x19, x0
25fa6e3546SVolodymyr Babchuk	mov	x20, x1
26fa6e3546SVolodymyr Babchuk	mov	x21, x2
27fa6e3546SVolodymyr Babchuk	mov	x22, x3
28fa6e3546SVolodymyr Babchuk
29fa6e3546SVolodymyr Babchuk	/* Get PA of stack pointer */
30fa6e3546SVolodymyr Babchuk	mov	x0, sp
31fa6e3546SVolodymyr Babchuk#ifdef CFG_CORE_ASLR
32fa6e3546SVolodymyr Babchuk	/*
33fa6e3546SVolodymyr Babchuk	 * We are running at identity location, so we can't use bl there,
34fa6e3546SVolodymyr Babchuk	 * because assembler will generate relative address to virt_to_phys(),
35131547c7SJerome Forissier	 * which is not identity mapped.
36fa6e3546SVolodymyr Babchuk	 */
37131547c7SJerome Forissier	adr_l   x9, virt_to_phys
38*c79fb6d4SJens Wiklander	ldr     x10, boot_mmu_config + CORE_MMU_CONFIG_MAP_OFFSET
39131547c7SJerome Forissier	add     x9, x9, x10
40fa6e3546SVolodymyr Babchuk	blr	x9
41fa6e3546SVolodymyr Babchuk#else
42fa6e3546SVolodymyr Babchuk	bl	virt_to_phys
43fa6e3546SVolodymyr Babchuk#endif
44fa6e3546SVolodymyr Babchuk	mov	x23, x0
45fa6e3546SVolodymyr Babchuk
46fa6e3546SVolodymyr Babchuk	/* We about to disable MMU. Make sure that all writes reached memory */
47fa6e3546SVolodymyr Babchuk	mov     x0, #DCACHE_OP_CLEAN
48fa6e3546SVolodymyr Babchuk#ifdef CFG_CORE_ASLR
49fa6e3546SVolodymyr Babchuk	/* See the comment above */
50131547c7SJerome Forissier	adr_l   x9, dcache_op_all
51*c79fb6d4SJens Wiklander	ldr     x10, boot_mmu_config + CORE_MMU_CONFIG_MAP_OFFSET
52131547c7SJerome Forissier	add     x9, x9, x10
53fa6e3546SVolodymyr Babchuk	blr	x9
54fa6e3546SVolodymyr Babchuk#else
55fa6e3546SVolodymyr Babchuk	bl	dcache_op_all
56fa6e3546SVolodymyr Babchuk#endif
57fa6e3546SVolodymyr Babchuk
58fa6e3546SVolodymyr Babchuk	/* Disable MMU */
59fa6e3546SVolodymyr Babchuk	mrs	x9, sctlr_el1
60fa6e3546SVolodymyr Babchuk	bic	x9, x9, #SCTLR_M
61fa6e3546SVolodymyr Babchuk	bic	x9, x9, #SCTLR_C
62fa6e3546SVolodymyr Babchuk	msr	sctlr_el1, x9
63fa6e3546SVolodymyr Babchuk	isb
64fa6e3546SVolodymyr Babchuk	/* Invalidate instruction cache and branch predictor */
65fa6e3546SVolodymyr Babchuk	ic	ialluis
66fa6e3546SVolodymyr Babchuk	dsb	ish	/* ensure that maintenance operations are seen */
67fa6e3546SVolodymyr Babchuk	isb
68fa6e3546SVolodymyr Babchuk
69fa6e3546SVolodymyr Babchuk	/* Save old SP to x24 and switch to a new stack */
70fa6e3546SVolodymyr Babchuk	mov	x24, sp
71fa6e3546SVolodymyr Babchuk	mov	sp, x23
72fa6e3546SVolodymyr Babchuk
73fa6e3546SVolodymyr Babchuk	/* call the function */
74fa6e3546SVolodymyr Babchuk	mov	x0, x20 	/* x20: uint64_t arg1 */
75fa6e3546SVolodymyr Babchuk	mov	x1, x21		/* x21: uint64_t arg2 */
76fa6e3546SVolodymyr Babchuk	mov	x2, x22		/* x22: uint64_t arg3 */
77fa6e3546SVolodymyr Babchuk	blr	x19		/* x19: paddr_t func */
78fa6e3546SVolodymyr Babchuk
79fa6e3546SVolodymyr Babchuk	/* restore sp */
80fa6e3546SVolodymyr Babchuk	mov	sp, x24
81fa6e3546SVolodymyr Babchuk
82fa6e3546SVolodymyr Babchuk	/* Enable MMU */
83fa6e3546SVolodymyr Babchuk	mrs	x9, sctlr_el1
84fa6e3546SVolodymyr Babchuk	orr	x9, x9, #SCTLR_M
85fa6e3546SVolodymyr Babchuk	orr	x9, x9, #SCTLR_C
86fa6e3546SVolodymyr Babchuk	msr	sctlr_el1, x9
87fa6e3546SVolodymyr Babchuk	isb
88fa6e3546SVolodymyr Babchuk
89fa6e3546SVolodymyr Babchuk	/* Invalidate instruction cache and branch predictor */
90fa6e3546SVolodymyr Babchuk	ic	iallu
91fa6e3546SVolodymyr Babchuk	isb
92fa6e3546SVolodymyr Babchuk
93fa6e3546SVolodymyr Babchuk	pop	x23, x24
94fa6e3546SVolodymyr Babchuk	pop	x21, x22
95fa6e3546SVolodymyr Babchuk	pop	x19, x20
96fa6e3546SVolodymyr Babchuk	pop	fp, lr
97fa6e3546SVolodymyr Babchuk	ret
98fa6e3546SVolodymyr BabchukEND_FUNC __plat_romapi_wrapper
99