1*841533ddSMadhukar Pappireddy /* 2*841533ddSMadhukar Pappireddy * Copyright (c) 2024, Arm Limited and Contributors. All rights reserved. 3*841533ddSMadhukar Pappireddy * Copyright (c) 2022, Google LLC. All rights reserved. 4*841533ddSMadhukar Pappireddy * 5*841533ddSMadhukar Pappireddy * SPDX-License-Identifier: BSD-3-Clause 6*841533ddSMadhukar Pappireddy */ 7*841533ddSMadhukar Pappireddy 8*841533ddSMadhukar Pappireddy #ifndef SIMD_CTX_H 9*841533ddSMadhukar Pappireddy #define SIMD_CTX_H 10*841533ddSMadhukar Pappireddy 11*841533ddSMadhukar Pappireddy /******************************************************************************* 12*841533ddSMadhukar Pappireddy * Constants that allow assembler code to access members of and the 'simd_context' 13*841533ddSMadhukar Pappireddy * structure at their correct offsets. 14*841533ddSMadhukar Pappireddy ******************************************************************************/ 15*841533ddSMadhukar Pappireddy 16*841533ddSMadhukar Pappireddy #if CTX_INCLUDE_FPREGS 17*841533ddSMadhukar Pappireddy 18*841533ddSMadhukar Pappireddy #define SIMD_VECTOR_LEN_BYTES U(16) /* 128 bits fixed vector length for FPU */ 19*841533ddSMadhukar Pappireddy 20*841533ddSMadhukar Pappireddy #define CTX_SIMD_VECTORS U(0) 21*841533ddSMadhukar Pappireddy /* there are 32 vector registers, each of size SIMD_VECTOR_LEN_BYTES */ 22*841533ddSMadhukar Pappireddy #define CTX_SIMD_FPSR (CTX_SIMD_VECTORS + (32 * SIMD_VECTOR_LEN_BYTES)) 23*841533ddSMadhukar Pappireddy #define CTX_SIMD_FPCR (CTX_SIMD_FPSR + 8) 24*841533ddSMadhukar Pappireddy 25*841533ddSMadhukar Pappireddy #if CTX_INCLUDE_AARCH32_REGS 26*841533ddSMadhukar Pappireddy #define CTX_SIMD_FPEXC32 (CTX_SIMD_FPCR + 8) 27*841533ddSMadhukar Pappireddy #endif /* CTX_INCLUDE_AARCH32_REGS */ 28*841533ddSMadhukar Pappireddy 29*841533ddSMadhukar Pappireddy #ifndef __ASSEMBLER__ 30*841533ddSMadhukar Pappireddy 31*841533ddSMadhukar Pappireddy #include <stdint.h> 32*841533ddSMadhukar Pappireddy #include <lib/cassert.h> 33*841533ddSMadhukar Pappireddy 34*841533ddSMadhukar Pappireddy /* 35*841533ddSMadhukar Pappireddy * Please don't change order of fields in this struct as that may violate 36*841533ddSMadhukar Pappireddy * alignment requirements and affect how assembly code accesses members of this 37*841533ddSMadhukar Pappireddy * struct. 38*841533ddSMadhukar Pappireddy */ 39*841533ddSMadhukar Pappireddy typedef struct { 40*841533ddSMadhukar Pappireddy uint8_t vectors[32][SIMD_VECTOR_LEN_BYTES]; 41*841533ddSMadhukar Pappireddy uint8_t fpsr[8]; 42*841533ddSMadhukar Pappireddy uint8_t fpcr[8]; 43*841533ddSMadhukar Pappireddy #if CTX_INCLUDE_FPREGS && CTX_INCLUDE_AARCH32_REGS 44*841533ddSMadhukar Pappireddy /* 16 bytes to align to next 16 byte boundary */ 45*841533ddSMadhukar Pappireddy uint8_t fpexc32_el2[16]; 46*841533ddSMadhukar Pappireddy #endif 47*841533ddSMadhukar Pappireddy } simd_regs_t __attribute__((aligned(16))); 48*841533ddSMadhukar Pappireddy 49*841533ddSMadhukar Pappireddy CASSERT(CTX_SIMD_VECTORS == __builtin_offsetof(simd_regs_t, vectors), 50*841533ddSMadhukar Pappireddy assert_vectors_mismatch); 51*841533ddSMadhukar Pappireddy 52*841533ddSMadhukar Pappireddy CASSERT(CTX_SIMD_FPSR == __builtin_offsetof(simd_regs_t, fpsr), 53*841533ddSMadhukar Pappireddy assert_fpsr_mismatch); 54*841533ddSMadhukar Pappireddy 55*841533ddSMadhukar Pappireddy CASSERT(CTX_SIMD_FPCR == __builtin_offsetof(simd_regs_t, fpcr), 56*841533ddSMadhukar Pappireddy assert_fpcr_mismatch); 57*841533ddSMadhukar Pappireddy 58*841533ddSMadhukar Pappireddy #if CTX_INCLUDE_FPREGS && CTX_INCLUDE_AARCH32_REGS 59*841533ddSMadhukar Pappireddy CASSERT(CTX_SIMD_FPEXC32 == __builtin_offsetof(simd_regs_t, fpexc32_el2), 60*841533ddSMadhukar Pappireddy assert_fpex32_mismtatch); 61*841533ddSMadhukar Pappireddy #endif 62*841533ddSMadhukar Pappireddy 63*841533ddSMadhukar Pappireddy void simd_ctx_save(uint32_t security_state, bool hint_sve); 64*841533ddSMadhukar Pappireddy void simd_ctx_restore(uint32_t security_state); 65*841533ddSMadhukar Pappireddy 66*841533ddSMadhukar Pappireddy #endif /* __ASSEMBLER__ */ 67*841533ddSMadhukar Pappireddy 68*841533ddSMadhukar Pappireddy #endif /* CTX_INCLUDE_FPREGS */ 69*841533ddSMadhukar Pappireddy 70*841533ddSMadhukar Pappireddy #endif /* SIMD_CTX_H */ 71