1*1fca7e26SJens Wiklander /* 2*1fca7e26SJens Wiklander * Copyright (c) 2017, Linaro Limited 3*1fca7e26SJens Wiklander * All rights reserved. 4*1fca7e26SJens Wiklander * 5*1fca7e26SJens Wiklander * SPDX-License-Identifier: BSD-2-Clause 6*1fca7e26SJens Wiklander */ 7*1fca7e26SJens Wiklander 8*1fca7e26SJens Wiklander #include <crypto/internal_aes-gcm.h> 9*1fca7e26SJens Wiklander #include <io.h> 10*1fca7e26SJens Wiklander #include <string_ext.h> 11*1fca7e26SJens Wiklander #include <string.h> 12*1fca7e26SJens Wiklander #include <tee_api_types.h> 13*1fca7e26SJens Wiklander #include <types_ext.h> 14*1fca7e26SJens Wiklander #include <utee_defines.h> 15*1fca7e26SJens Wiklander #include <util.h> 16*1fca7e26SJens Wiklander 17*1fca7e26SJens Wiklander static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len) 18*1fca7e26SJens Wiklander { 19*1fca7e26SJens Wiklander size_t n; 20*1fca7e26SJens Wiklander 21*1fca7e26SJens Wiklander for (n = 0; n < len; n++) 22*1fca7e26SJens Wiklander dst[n] ^= src[n]; 23*1fca7e26SJens Wiklander } 24*1fca7e26SJens Wiklander 25*1fca7e26SJens Wiklander static bool ptr_is_block_aligned(const void *p) 26*1fca7e26SJens Wiklander { 27*1fca7e26SJens Wiklander return !((vaddr_t)p & (TEE_AES_BLOCK_SIZE - 1)); 28*1fca7e26SJens Wiklander } 29*1fca7e26SJens Wiklander 30*1fca7e26SJens Wiklander static void ghash_update_pad_zero(struct internal_aes_gcm_ctx *ctx, 31*1fca7e26SJens Wiklander const uint8_t *data, size_t len) 32*1fca7e26SJens Wiklander { 33*1fca7e26SJens Wiklander size_t n = len / TEE_AES_BLOCK_SIZE; 34*1fca7e26SJens Wiklander uint64_t block[2]; 35*1fca7e26SJens Wiklander 36*1fca7e26SJens Wiklander if (n) { 37*1fca7e26SJens Wiklander if (ptr_is_block_aligned(data)) { 38*1fca7e26SJens Wiklander internal_aes_gcm_ghash_update(ctx, NULL, data, n); 39*1fca7e26SJens Wiklander } else { 40*1fca7e26SJens Wiklander size_t m; 41*1fca7e26SJens Wiklander 42*1fca7e26SJens Wiklander for (m = 0; m < n; m++) { 43*1fca7e26SJens Wiklander 44*1fca7e26SJens Wiklander memcpy(block, data + m * sizeof(block), 45*1fca7e26SJens Wiklander sizeof(block)); 46*1fca7e26SJens Wiklander internal_aes_gcm_ghash_update(ctx, NULL, 47*1fca7e26SJens Wiklander (void *)block, 1); 48*1fca7e26SJens Wiklander } 49*1fca7e26SJens Wiklander } 50*1fca7e26SJens Wiklander } 51*1fca7e26SJens Wiklander 52*1fca7e26SJens Wiklander if (len - n * TEE_AES_BLOCK_SIZE) { 53*1fca7e26SJens Wiklander memset(block, 0, sizeof(block)); 54*1fca7e26SJens Wiklander memcpy(block, data + n * TEE_AES_BLOCK_SIZE, 55*1fca7e26SJens Wiklander len - n * TEE_AES_BLOCK_SIZE); 56*1fca7e26SJens Wiklander internal_aes_gcm_ghash_update(ctx, block, NULL, 0); 57*1fca7e26SJens Wiklander } 58*1fca7e26SJens Wiklander } 59*1fca7e26SJens Wiklander 60*1fca7e26SJens Wiklander static void ghash_update_lengths(struct internal_aes_gcm_ctx *ctx, uint32_t l1, 61*1fca7e26SJens Wiklander uint32_t l2) 62*1fca7e26SJens Wiklander { 63*1fca7e26SJens Wiklander uint64_t len_fields[2] = { 64*1fca7e26SJens Wiklander TEE_U64_TO_BIG_ENDIAN(l1 * 8), 65*1fca7e26SJens Wiklander TEE_U64_TO_BIG_ENDIAN(l2 * 8) 66*1fca7e26SJens Wiklander }; 67*1fca7e26SJens Wiklander 68*1fca7e26SJens Wiklander COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE); 69*1fca7e26SJens Wiklander internal_aes_gcm_ghash_update(ctx, (uint8_t *)len_fields, NULL, 0); 70*1fca7e26SJens Wiklander } 71*1fca7e26SJens Wiklander 72*1fca7e26SJens Wiklander TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx, 73*1fca7e26SJens Wiklander TEE_OperationMode mode, const void *key, 74*1fca7e26SJens Wiklander size_t key_len, const void *nonce, 75*1fca7e26SJens Wiklander size_t nonce_len, size_t tag_len) 76*1fca7e26SJens Wiklander { 77*1fca7e26SJens Wiklander TEE_Result res; 78*1fca7e26SJens Wiklander 79*1fca7e26SJens Wiklander COMPILE_TIME_ASSERT(sizeof(ctx->ctr) == TEE_AES_BLOCK_SIZE); 80*1fca7e26SJens Wiklander 81*1fca7e26SJens Wiklander if (tag_len > sizeof(ctx->buf_tag)) 82*1fca7e26SJens Wiklander return TEE_ERROR_BAD_PARAMETERS; 83*1fca7e26SJens Wiklander 84*1fca7e26SJens Wiklander memset(ctx, 0, sizeof(*ctx)); 85*1fca7e26SJens Wiklander 86*1fca7e26SJens Wiklander ctx->tag_len = tag_len; 87*1fca7e26SJens Wiklander res = internal_aes_gcm_set_key(ctx, key, key_len); 88*1fca7e26SJens Wiklander if (res) 89*1fca7e26SJens Wiklander return res; 90*1fca7e26SJens Wiklander 91*1fca7e26SJens Wiklander if (nonce_len == (96 / 8)) { 92*1fca7e26SJens Wiklander memcpy(ctx->ctr, nonce, nonce_len); 93*1fca7e26SJens Wiklander internal_aes_gcm_inc_ctr(ctx); 94*1fca7e26SJens Wiklander } else { 95*1fca7e26SJens Wiklander ghash_update_pad_zero(ctx, nonce, nonce_len); 96*1fca7e26SJens Wiklander ghash_update_lengths(ctx, 0, nonce_len); 97*1fca7e26SJens Wiklander 98*1fca7e26SJens Wiklander memcpy(ctx->ctr, ctx->hash_state, sizeof(ctx->ctr)); 99*1fca7e26SJens Wiklander memset(ctx->hash_state, 0, sizeof(ctx->hash_state)); 100*1fca7e26SJens Wiklander } 101*1fca7e26SJens Wiklander 102*1fca7e26SJens Wiklander internal_aes_gcm_encrypt_block(ctx, ctx->ctr, ctx->buf_tag); 103*1fca7e26SJens Wiklander internal_aes_gcm_inc_ctr(ctx); 104*1fca7e26SJens Wiklander if (mode == TEE_MODE_ENCRYPT) { 105*1fca7e26SJens Wiklander /* 106*1fca7e26SJens Wiklander * Encryption uses the pre-encrypted xor-buffer to encrypt 107*1fca7e26SJens Wiklander * while decryption encrypts the xor-buffer when needed 108*1fca7e26SJens Wiklander * instead. 109*1fca7e26SJens Wiklander * 110*1fca7e26SJens Wiklander * The reason for this is that the combined encryption and 111*1fca7e26SJens Wiklander * ghash implementation does both operations intertwined. 112*1fca7e26SJens Wiklander * In the decrypt case the xor-buffer is needed at the end 113*1fca7e26SJens Wiklander * of processing each block, while the encryption case 114*1fca7e26SJens Wiklander * needs xor-buffer before processing each block. 115*1fca7e26SJens Wiklander * 116*1fca7e26SJens Wiklander * In a pure software implementation we wouldn't have any 117*1fca7e26SJens Wiklander * use for this kind of optimization, but since this 118*1fca7e26SJens Wiklander * AES-GCM implementation is aimed at being combined with 119*1fca7e26SJens Wiklander * accelerated routines it's more convenient to always have 120*1fca7e26SJens Wiklander * this optimization activated. 121*1fca7e26SJens Wiklander */ 122*1fca7e26SJens Wiklander internal_aes_gcm_encrypt_block(ctx, ctx->ctr, ctx->buf_cryp); 123*1fca7e26SJens Wiklander internal_aes_gcm_inc_ctr(ctx); 124*1fca7e26SJens Wiklander } 125*1fca7e26SJens Wiklander 126*1fca7e26SJens Wiklander return TEE_SUCCESS; 127*1fca7e26SJens Wiklander } 128*1fca7e26SJens Wiklander 129*1fca7e26SJens Wiklander TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx, 130*1fca7e26SJens Wiklander const void *data, size_t len) 131*1fca7e26SJens Wiklander { 132*1fca7e26SJens Wiklander const uint8_t *d = data; 133*1fca7e26SJens Wiklander size_t l = len; 134*1fca7e26SJens Wiklander const uint8_t *head = NULL; 135*1fca7e26SJens Wiklander size_t n; 136*1fca7e26SJens Wiklander 137*1fca7e26SJens Wiklander if (ctx->payload_bytes) 138*1fca7e26SJens Wiklander return TEE_ERROR_BAD_PARAMETERS; 139*1fca7e26SJens Wiklander 140*1fca7e26SJens Wiklander ctx->aad_bytes += len; 141*1fca7e26SJens Wiklander 142*1fca7e26SJens Wiklander while (l) { 143*1fca7e26SJens Wiklander if (ctx->buf_pos || !ptr_is_block_aligned(d) || 144*1fca7e26SJens Wiklander l < TEE_AES_BLOCK_SIZE) { 145*1fca7e26SJens Wiklander n = MIN(TEE_AES_BLOCK_SIZE - ctx->buf_pos, l); 146*1fca7e26SJens Wiklander memcpy(ctx->buf_hash + ctx->buf_pos, d, n); 147*1fca7e26SJens Wiklander ctx->buf_pos += n; 148*1fca7e26SJens Wiklander 149*1fca7e26SJens Wiklander if (ctx->buf_pos != TEE_AES_BLOCK_SIZE) 150*1fca7e26SJens Wiklander return TEE_SUCCESS; 151*1fca7e26SJens Wiklander 152*1fca7e26SJens Wiklander ctx->buf_pos = 0; 153*1fca7e26SJens Wiklander head = ctx->buf_hash; 154*1fca7e26SJens Wiklander d += n; 155*1fca7e26SJens Wiklander l -= n; 156*1fca7e26SJens Wiklander } 157*1fca7e26SJens Wiklander 158*1fca7e26SJens Wiklander if (ptr_is_block_aligned(d)) 159*1fca7e26SJens Wiklander n = l / TEE_AES_BLOCK_SIZE; 160*1fca7e26SJens Wiklander else 161*1fca7e26SJens Wiklander n = 0; 162*1fca7e26SJens Wiklander 163*1fca7e26SJens Wiklander internal_aes_gcm_ghash_update(ctx, head, d, n); 164*1fca7e26SJens Wiklander l -= n * TEE_AES_BLOCK_SIZE; 165*1fca7e26SJens Wiklander d += n * TEE_AES_BLOCK_SIZE; 166*1fca7e26SJens Wiklander } 167*1fca7e26SJens Wiklander 168*1fca7e26SJens Wiklander return TEE_SUCCESS; 169*1fca7e26SJens Wiklander } 170*1fca7e26SJens Wiklander 171*1fca7e26SJens Wiklander TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx, 172*1fca7e26SJens Wiklander TEE_OperationMode mode, 173*1fca7e26SJens Wiklander const void *src, size_t len, 174*1fca7e26SJens Wiklander void *dst) 175*1fca7e26SJens Wiklander { 176*1fca7e26SJens Wiklander size_t n; 177*1fca7e26SJens Wiklander const uint8_t *s = src; 178*1fca7e26SJens Wiklander uint8_t *d = dst; 179*1fca7e26SJens Wiklander size_t l = len; 180*1fca7e26SJens Wiklander 181*1fca7e26SJens Wiklander if (!ctx->payload_bytes && ctx->buf_pos) { 182*1fca7e26SJens Wiklander /* AAD part done, finish up the last bits. */ 183*1fca7e26SJens Wiklander memset(ctx->buf_hash + ctx->buf_pos, 0, 184*1fca7e26SJens Wiklander TEE_AES_BLOCK_SIZE - ctx->buf_pos); 185*1fca7e26SJens Wiklander internal_aes_gcm_ghash_update(ctx, ctx->buf_hash, NULL, 0); 186*1fca7e26SJens Wiklander ctx->buf_pos = 0; 187*1fca7e26SJens Wiklander } 188*1fca7e26SJens Wiklander 189*1fca7e26SJens Wiklander ctx->payload_bytes += len; 190*1fca7e26SJens Wiklander 191*1fca7e26SJens Wiklander while (l) { 192*1fca7e26SJens Wiklander if (ctx->buf_pos || !ptr_is_block_aligned(s) || 193*1fca7e26SJens Wiklander !ptr_is_block_aligned(d) || l < TEE_AES_BLOCK_SIZE) { 194*1fca7e26SJens Wiklander n = MIN(TEE_AES_BLOCK_SIZE - ctx->buf_pos, l); 195*1fca7e26SJens Wiklander 196*1fca7e26SJens Wiklander if (!ctx->buf_pos && mode == TEE_MODE_DECRYPT) { 197*1fca7e26SJens Wiklander internal_aes_gcm_encrypt_block(ctx, ctx->ctr, 198*1fca7e26SJens Wiklander ctx->buf_cryp); 199*1fca7e26SJens Wiklander } 200*1fca7e26SJens Wiklander 201*1fca7e26SJens Wiklander xor_buf(ctx->buf_cryp + ctx->buf_pos, s, n); 202*1fca7e26SJens Wiklander memcpy(d, ctx->buf_cryp + ctx->buf_pos, n); 203*1fca7e26SJens Wiklander if (mode == TEE_MODE_ENCRYPT) 204*1fca7e26SJens Wiklander memcpy(ctx->buf_hash + ctx->buf_pos, 205*1fca7e26SJens Wiklander ctx->buf_cryp + ctx->buf_pos, n); 206*1fca7e26SJens Wiklander else 207*1fca7e26SJens Wiklander memcpy(ctx->buf_hash + ctx->buf_pos, s, n); 208*1fca7e26SJens Wiklander 209*1fca7e26SJens Wiklander ctx->buf_pos += n; 210*1fca7e26SJens Wiklander 211*1fca7e26SJens Wiklander if (ctx->buf_pos != TEE_AES_BLOCK_SIZE) 212*1fca7e26SJens Wiklander return TEE_SUCCESS; 213*1fca7e26SJens Wiklander 214*1fca7e26SJens Wiklander internal_aes_gcm_ghash_update(ctx, ctx->buf_hash, 215*1fca7e26SJens Wiklander NULL, 0); 216*1fca7e26SJens Wiklander ctx->buf_pos = 0; 217*1fca7e26SJens Wiklander d += n; 218*1fca7e26SJens Wiklander s += n; 219*1fca7e26SJens Wiklander l -= n; 220*1fca7e26SJens Wiklander 221*1fca7e26SJens Wiklander if (mode == TEE_MODE_ENCRYPT) 222*1fca7e26SJens Wiklander internal_aes_gcm_encrypt_block(ctx, ctx->ctr, 223*1fca7e26SJens Wiklander ctx->buf_cryp); 224*1fca7e26SJens Wiklander internal_aes_gcm_inc_ctr(ctx); 225*1fca7e26SJens Wiklander } else { 226*1fca7e26SJens Wiklander n = l / TEE_AES_BLOCK_SIZE; 227*1fca7e26SJens Wiklander internal_aes_gcm_update_payload_block_aligned(ctx, mode, 228*1fca7e26SJens Wiklander s, n, d); 229*1fca7e26SJens Wiklander s += n * TEE_AES_BLOCK_SIZE; 230*1fca7e26SJens Wiklander d += n * TEE_AES_BLOCK_SIZE; 231*1fca7e26SJens Wiklander l -= n * TEE_AES_BLOCK_SIZE; 232*1fca7e26SJens Wiklander } 233*1fca7e26SJens Wiklander } 234*1fca7e26SJens Wiklander 235*1fca7e26SJens Wiklander return TEE_SUCCESS; 236*1fca7e26SJens Wiklander } 237*1fca7e26SJens Wiklander 238*1fca7e26SJens Wiklander static TEE_Result operation_final(struct internal_aes_gcm_ctx *ctx, 239*1fca7e26SJens Wiklander TEE_OperationMode m, const uint8_t *src, 240*1fca7e26SJens Wiklander size_t len, uint8_t *dst) 241*1fca7e26SJens Wiklander { 242*1fca7e26SJens Wiklander TEE_Result res; 243*1fca7e26SJens Wiklander 244*1fca7e26SJens Wiklander res = internal_aes_gcm_update_payload(ctx, m, src, len, dst); 245*1fca7e26SJens Wiklander if (res) 246*1fca7e26SJens Wiklander return res; 247*1fca7e26SJens Wiklander 248*1fca7e26SJens Wiklander if (ctx->buf_pos) { 249*1fca7e26SJens Wiklander memset(ctx->buf_hash + ctx->buf_pos, 0, 250*1fca7e26SJens Wiklander sizeof(ctx->buf_hash) - ctx->buf_pos); 251*1fca7e26SJens Wiklander internal_aes_gcm_ghash_update(ctx, ctx->buf_hash, NULL, 0); 252*1fca7e26SJens Wiklander } 253*1fca7e26SJens Wiklander 254*1fca7e26SJens Wiklander ghash_update_lengths(ctx, ctx->aad_bytes, ctx->payload_bytes); 255*1fca7e26SJens Wiklander /* buf_tag was filled in with the first counter block aes_gcm_init() */ 256*1fca7e26SJens Wiklander xor_buf(ctx->buf_tag, ctx->hash_state, ctx->tag_len); 257*1fca7e26SJens Wiklander 258*1fca7e26SJens Wiklander return TEE_SUCCESS; 259*1fca7e26SJens Wiklander } 260*1fca7e26SJens Wiklander 261*1fca7e26SJens Wiklander TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx, 262*1fca7e26SJens Wiklander const void *src, size_t len, void *dst, 263*1fca7e26SJens Wiklander void *tag, size_t *tag_len) 264*1fca7e26SJens Wiklander { 265*1fca7e26SJens Wiklander TEE_Result res; 266*1fca7e26SJens Wiklander 267*1fca7e26SJens Wiklander if (*tag_len < ctx->tag_len) 268*1fca7e26SJens Wiklander return TEE_ERROR_SHORT_BUFFER; 269*1fca7e26SJens Wiklander 270*1fca7e26SJens Wiklander res = operation_final(ctx, TEE_MODE_ENCRYPT, src, len, dst); 271*1fca7e26SJens Wiklander if (res) 272*1fca7e26SJens Wiklander return res; 273*1fca7e26SJens Wiklander 274*1fca7e26SJens Wiklander memcpy(tag, ctx->buf_tag, ctx->tag_len); 275*1fca7e26SJens Wiklander *tag_len = ctx->tag_len; 276*1fca7e26SJens Wiklander 277*1fca7e26SJens Wiklander return TEE_SUCCESS; 278*1fca7e26SJens Wiklander } 279*1fca7e26SJens Wiklander 280*1fca7e26SJens Wiklander TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx, 281*1fca7e26SJens Wiklander const void *src, size_t len, void *dst, 282*1fca7e26SJens Wiklander const void *tag, size_t tag_len) 283*1fca7e26SJens Wiklander { 284*1fca7e26SJens Wiklander TEE_Result res; 285*1fca7e26SJens Wiklander 286*1fca7e26SJens Wiklander if (tag_len != ctx->tag_len) 287*1fca7e26SJens Wiklander return TEE_ERROR_MAC_INVALID; 288*1fca7e26SJens Wiklander 289*1fca7e26SJens Wiklander res = operation_final(ctx, TEE_MODE_DECRYPT, src, len, dst); 290*1fca7e26SJens Wiklander if (res) 291*1fca7e26SJens Wiklander return res; 292*1fca7e26SJens Wiklander 293*1fca7e26SJens Wiklander if (buf_compare_ct(ctx->buf_tag, tag, tag_len)) 294*1fca7e26SJens Wiklander return TEE_ERROR_MAC_INVALID; 295*1fca7e26SJens Wiklander 296*1fca7e26SJens Wiklander return TEE_SUCCESS; 297*1fca7e26SJens Wiklander } 298*1fca7e26SJens Wiklander 299*1fca7e26SJens Wiklander void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_ctx *ctx) 300*1fca7e26SJens Wiklander { 301*1fca7e26SJens Wiklander uint64_t c; 302*1fca7e26SJens Wiklander 303*1fca7e26SJens Wiklander c = TEE_U64_FROM_BIG_ENDIAN(ctx->ctr[1]) + 1; 304*1fca7e26SJens Wiklander ctx->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c); 305*1fca7e26SJens Wiklander if (!c) { 306*1fca7e26SJens Wiklander c = TEE_U64_FROM_BIG_ENDIAN(ctx->ctr[0]) + 1; 307*1fca7e26SJens Wiklander ctx->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c); 308*1fca7e26SJens Wiklander } 309*1fca7e26SJens Wiklander } 310*1fca7e26SJens Wiklander 311*1fca7e26SJens Wiklander #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB 312*1fca7e26SJens Wiklander #include <crypto/aes-gcm.h> 313*1fca7e26SJens Wiklander 314*1fca7e26SJens Wiklander size_t crypto_aes_gcm_get_ctx_size(void) 315*1fca7e26SJens Wiklander { 316*1fca7e26SJens Wiklander return sizeof(struct internal_aes_gcm_ctx); 317*1fca7e26SJens Wiklander } 318*1fca7e26SJens Wiklander 319*1fca7e26SJens Wiklander TEE_Result crypto_aes_gcm_init(void *c, TEE_OperationMode mode, 320*1fca7e26SJens Wiklander const uint8_t *key, size_t key_len, 321*1fca7e26SJens Wiklander const uint8_t *nonce, size_t nonce_len, 322*1fca7e26SJens Wiklander size_t tag_len) 323*1fca7e26SJens Wiklander { 324*1fca7e26SJens Wiklander return internal_aes_gcm_init(c, mode, key, key_len, nonce, nonce_len, 325*1fca7e26SJens Wiklander tag_len); 326*1fca7e26SJens Wiklander } 327*1fca7e26SJens Wiklander 328*1fca7e26SJens Wiklander TEE_Result crypto_aes_gcm_update_aad(void *c, const uint8_t *data, size_t len) 329*1fca7e26SJens Wiklander { 330*1fca7e26SJens Wiklander return internal_aes_gcm_update_aad(c, data, len); 331*1fca7e26SJens Wiklander } 332*1fca7e26SJens Wiklander 333*1fca7e26SJens Wiklander TEE_Result crypto_aes_gcm_update_payload(void *c, TEE_OperationMode m, 334*1fca7e26SJens Wiklander const uint8_t *src, size_t len, 335*1fca7e26SJens Wiklander uint8_t *dst) 336*1fca7e26SJens Wiklander { 337*1fca7e26SJens Wiklander return internal_aes_gcm_update_payload(c, m, src, len, dst); 338*1fca7e26SJens Wiklander } 339*1fca7e26SJens Wiklander 340*1fca7e26SJens Wiklander TEE_Result crypto_aes_gcm_enc_final(void *c, const uint8_t *src, size_t len, 341*1fca7e26SJens Wiklander uint8_t *dst, uint8_t *tag, size_t *tag_len) 342*1fca7e26SJens Wiklander { 343*1fca7e26SJens Wiklander return internal_aes_gcm_enc_final(c, src, len, dst, tag, tag_len); 344*1fca7e26SJens Wiklander } 345*1fca7e26SJens Wiklander 346*1fca7e26SJens Wiklander TEE_Result crypto_aes_gcm_dec_final(void *c, const uint8_t *src, size_t len, 347*1fca7e26SJens Wiklander uint8_t *dst, const uint8_t *tag, 348*1fca7e26SJens Wiklander size_t tag_len) 349*1fca7e26SJens Wiklander { 350*1fca7e26SJens Wiklander return internal_aes_gcm_dec_final(c, src, len, dst, tag, tag_len); 351*1fca7e26SJens Wiklander } 352*1fca7e26SJens Wiklander 353*1fca7e26SJens Wiklander void crypto_aes_gcm_final(void *c __unused) 354*1fca7e26SJens Wiklander { 355*1fca7e26SJens Wiklander } 356*1fca7e26SJens Wiklander #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/ 357