1 // SPDX-License-Identifier: BSD-2-Clause 2 /* 3 * Copyright (c) 2017, Linaro Limited 4 */ 5 6 #include <assert.h> 7 #include <crypto/internal_aes-gcm.h> 8 #include <io.h> 9 #include <string_ext.h> 10 #include <string.h> 11 #include <tee_api_types.h> 12 #include <types_ext.h> 13 #include <utee_defines.h> 14 #include <util.h> 15 16 #include "aes-gcm-private.h" 17 18 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len) 19 { 20 size_t n; 21 22 for (n = 0; n < len; n++) 23 dst[n] ^= src[n]; 24 } 25 26 27 static void ghash_update_pad_zero(struct internal_aes_gcm_state *state, 28 const uint8_t *data, size_t len) 29 { 30 size_t n = len / TEE_AES_BLOCK_SIZE; 31 uint64_t block[2]; 32 33 if (n) { 34 if (internal_aes_gcm_ptr_is_block_aligned(data)) { 35 internal_aes_gcm_ghash_update(state, NULL, data, n); 36 } else { 37 size_t m; 38 39 for (m = 0; m < n; m++) { 40 41 memcpy(block, data + m * sizeof(block), 42 sizeof(block)); 43 internal_aes_gcm_ghash_update(state, NULL, 44 (void *)block, 1); 45 } 46 } 47 } 48 49 if (len - n * TEE_AES_BLOCK_SIZE) { 50 memset(block, 0, sizeof(block)); 51 memcpy(block, data + n * TEE_AES_BLOCK_SIZE, 52 len - n * TEE_AES_BLOCK_SIZE); 53 internal_aes_gcm_ghash_update(state, block, NULL, 0); 54 } 55 } 56 57 static void ghash_update_lengths(struct internal_aes_gcm_state *state, 58 uint32_t l1, uint32_t l2) 59 { 60 uint64_t len_fields[2] = { 61 TEE_U64_TO_BIG_ENDIAN(l1 * 8), 62 TEE_U64_TO_BIG_ENDIAN(l2 * 8) 63 }; 64 65 COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE); 66 internal_aes_gcm_ghash_update(state, (uint8_t *)len_fields, NULL, 0); 67 } 68 69 static TEE_Result __gcm_init(struct internal_aes_gcm_state *state, 70 const struct internal_aes_gcm_key *ek, 71 TEE_OperationMode mode, const void *nonce, 72 size_t nonce_len, size_t tag_len) 73 { 74 COMPILE_TIME_ASSERT(sizeof(state->ctr) == TEE_AES_BLOCK_SIZE); 75 76 if (tag_len > sizeof(state->buf_tag)) 77 return TEE_ERROR_BAD_PARAMETERS; 78 79 memset(state, 0, sizeof(*state)); 80 81 state->tag_len = tag_len; 82 internal_aes_gcm_set_key(state, ek); 83 84 if (nonce_len == (96 / 8)) { 85 memcpy(state->ctr, nonce, nonce_len); 86 internal_aes_gcm_inc_ctr(state); 87 } else { 88 ghash_update_pad_zero(state, nonce, nonce_len); 89 ghash_update_lengths(state, 0, nonce_len); 90 91 memcpy(state->ctr, state->hash_state, sizeof(state->ctr)); 92 memset(state->hash_state, 0, sizeof(state->hash_state)); 93 } 94 95 internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_tag); 96 internal_aes_gcm_inc_ctr(state); 97 if (mode == TEE_MODE_ENCRYPT) { 98 /* 99 * Encryption uses the pre-encrypted xor-buffer to encrypt 100 * while decryption encrypts the xor-buffer when needed 101 * instead. 102 * 103 * The reason for this is that the combined encryption and 104 * ghash implementation does both operations intertwined. 105 * In the decrypt case the xor-buffer is needed at the end 106 * of processing each block, while the encryption case 107 * needs xor-buffer before processing each block. 108 * 109 * In a pure software implementation we wouldn't have any 110 * use for this kind of optimization, but since this 111 * AES-GCM implementation is aimed at being combined with 112 * accelerated routines it's more convenient to always have 113 * this optimization activated. 114 */ 115 internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_cryp); 116 internal_aes_gcm_inc_ctr(state); 117 } 118 119 return TEE_SUCCESS; 120 } 121 122 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx, 123 TEE_OperationMode mode, const void *key, 124 size_t key_len, const void *nonce, 125 size_t nonce_len, size_t tag_len) 126 { 127 TEE_Result res = internal_aes_gcm_expand_enc_key(key, key_len, 128 &ctx->key); 129 if (res) 130 return res; 131 132 return __gcm_init(&ctx->state, &ctx->key, mode, nonce, nonce_len, 133 tag_len); 134 } 135 136 static TEE_Result __gcm_update_aad(struct internal_aes_gcm_state *state, 137 const void *data, size_t len) 138 { 139 const uint8_t *d = data; 140 size_t l = len; 141 const uint8_t *head = NULL; 142 size_t n; 143 144 if (state->payload_bytes) 145 return TEE_ERROR_BAD_PARAMETERS; 146 147 state->aad_bytes += len; 148 149 while (l) { 150 if (state->buf_pos || 151 !internal_aes_gcm_ptr_is_block_aligned(d) || 152 l < TEE_AES_BLOCK_SIZE) { 153 n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l); 154 memcpy(state->buf_hash + state->buf_pos, d, n); 155 state->buf_pos += n; 156 157 if (state->buf_pos != TEE_AES_BLOCK_SIZE) 158 return TEE_SUCCESS; 159 160 state->buf_pos = 0; 161 head = state->buf_hash; 162 d += n; 163 l -= n; 164 } 165 166 if (internal_aes_gcm_ptr_is_block_aligned(d)) 167 n = l / TEE_AES_BLOCK_SIZE; 168 else 169 n = 0; 170 171 internal_aes_gcm_ghash_update(state, head, d, n); 172 l -= n * TEE_AES_BLOCK_SIZE; 173 d += n * TEE_AES_BLOCK_SIZE; 174 } 175 176 return TEE_SUCCESS; 177 } 178 179 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx, 180 const void *data, size_t len) 181 { 182 return __gcm_update_aad(&ctx->state, data, len); 183 } 184 185 static TEE_Result 186 __gcm_update_payload(struct internal_aes_gcm_state *state, 187 const struct internal_aes_gcm_key *ek, 188 TEE_OperationMode mode, const void *src, 189 size_t len, void *dst) 190 { 191 size_t n; 192 const uint8_t *s = src; 193 uint8_t *d = dst; 194 size_t l = len; 195 196 if (!state->payload_bytes && state->buf_pos) { 197 /* AAD part done, finish up the last bits. */ 198 memset(state->buf_hash + state->buf_pos, 0, 199 TEE_AES_BLOCK_SIZE - state->buf_pos); 200 internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0); 201 state->buf_pos = 0; 202 } 203 204 state->payload_bytes += len; 205 206 while (l) { 207 if (state->buf_pos || 208 !internal_aes_gcm_ptr_is_block_aligned(s) || 209 !internal_aes_gcm_ptr_is_block_aligned(d) || 210 l < TEE_AES_BLOCK_SIZE) { 211 n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l); 212 213 if (!state->buf_pos && mode == TEE_MODE_DECRYPT) { 214 internal_aes_gcm_encrypt_block(ek, state->ctr, 215 state->buf_cryp); 216 } 217 218 xor_buf(state->buf_cryp + state->buf_pos, s, n); 219 memcpy(d, state->buf_cryp + state->buf_pos, n); 220 if (mode == TEE_MODE_ENCRYPT) 221 memcpy(state->buf_hash + state->buf_pos, 222 state->buf_cryp + state->buf_pos, n); 223 else 224 memcpy(state->buf_hash + state->buf_pos, s, n); 225 226 state->buf_pos += n; 227 228 if (state->buf_pos != TEE_AES_BLOCK_SIZE) 229 return TEE_SUCCESS; 230 231 internal_aes_gcm_ghash_update(state, state->buf_hash, 232 NULL, 0); 233 state->buf_pos = 0; 234 d += n; 235 s += n; 236 l -= n; 237 238 if (mode == TEE_MODE_ENCRYPT) 239 internal_aes_gcm_encrypt_block(ek, state->ctr, 240 state->buf_cryp); 241 internal_aes_gcm_inc_ctr(state); 242 } else { 243 n = l / TEE_AES_BLOCK_SIZE; 244 internal_aes_gcm_update_payload_block_aligned(state, ek, 245 mode, 246 s, n, d); 247 s += n * TEE_AES_BLOCK_SIZE; 248 d += n * TEE_AES_BLOCK_SIZE; 249 l -= n * TEE_AES_BLOCK_SIZE; 250 } 251 } 252 253 return TEE_SUCCESS; 254 } 255 256 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx, 257 TEE_OperationMode mode, 258 const void *src, size_t len, 259 void *dst) 260 { 261 return __gcm_update_payload(&ctx->state, &ctx->key, mode, src, len, 262 dst); 263 } 264 265 static TEE_Result operation_final(struct internal_aes_gcm_state *state, 266 const struct internal_aes_gcm_key *enc_key, 267 TEE_OperationMode m, const uint8_t *src, 268 size_t len, uint8_t *dst) 269 { 270 TEE_Result res; 271 272 res = __gcm_update_payload(state, enc_key, m, src, len, dst); 273 if (res) 274 return res; 275 276 if (state->buf_pos) { 277 memset(state->buf_hash + state->buf_pos, 0, 278 sizeof(state->buf_hash) - state->buf_pos); 279 internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0); 280 } 281 282 ghash_update_lengths(state, state->aad_bytes, state->payload_bytes); 283 /* buf_tag was filled in with the first counter block aes_gcm_init() */ 284 xor_buf(state->buf_tag, state->hash_state, state->tag_len); 285 286 return TEE_SUCCESS; 287 } 288 289 static TEE_Result __gcm_enc_final(struct internal_aes_gcm_state *state, 290 const struct internal_aes_gcm_key *enc_key, 291 const void *src, size_t len, void *dst, 292 void *tag, size_t *tag_len) 293 { 294 TEE_Result res; 295 296 if (*tag_len < state->tag_len) 297 return TEE_ERROR_SHORT_BUFFER; 298 299 res = operation_final(state, enc_key, TEE_MODE_ENCRYPT, src, len, dst); 300 if (res) 301 return res; 302 303 memcpy(tag, state->buf_tag, state->tag_len); 304 *tag_len = state->tag_len; 305 306 return TEE_SUCCESS; 307 } 308 309 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx, 310 const void *src, size_t len, void *dst, 311 void *tag, size_t *tag_len) 312 { 313 return __gcm_enc_final(&ctx->state, &ctx->key, src, len, dst, tag, 314 tag_len); 315 } 316 317 static TEE_Result __gcm_dec_final(struct internal_aes_gcm_state *state, 318 const struct internal_aes_gcm_key *enc_key, 319 const void *src, size_t len, void *dst, 320 const void *tag, size_t tag_len) 321 { 322 TEE_Result res; 323 324 if (tag_len != state->tag_len) 325 return TEE_ERROR_MAC_INVALID; 326 327 res = operation_final(state, enc_key, TEE_MODE_DECRYPT, src, len, dst); 328 if (res) 329 return res; 330 331 if (buf_compare_ct(state->buf_tag, tag, tag_len)) 332 return TEE_ERROR_MAC_INVALID; 333 334 return TEE_SUCCESS; 335 } 336 337 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx, 338 const void *src, size_t len, void *dst, 339 const void *tag, size_t tag_len) 340 { 341 return __gcm_dec_final(&ctx->state, &ctx->key, src, len, dst, tag, 342 tag_len); 343 } 344 345 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_state *state) 346 { 347 uint64_t c; 348 349 c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[1]) + 1; 350 state->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c); 351 if (!c) { 352 c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[0]) + 1; 353 state->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c); 354 } 355 } 356 357 TEE_Result internal_aes_gcm_enc(const struct internal_aes_gcm_key *enc_key, 358 const void *nonce, size_t nonce_len, 359 const void *aad, size_t aad_len, 360 const void *src, size_t len, void *dst, 361 void *tag, size_t *tag_len) 362 { 363 TEE_Result res; 364 struct internal_aes_gcm_state state; 365 366 res = __gcm_init(&state, enc_key, TEE_MODE_ENCRYPT, nonce, nonce_len, 367 *tag_len); 368 if (res) 369 return res; 370 371 if (aad) { 372 res = __gcm_update_aad(&state, aad, aad_len); 373 if (res) 374 return res; 375 } 376 377 return __gcm_enc_final(&state, enc_key, src, len, dst, tag, tag_len); 378 } 379 380 TEE_Result internal_aes_gcm_dec(const struct internal_aes_gcm_key *enc_key, 381 const void *nonce, size_t nonce_len, 382 const void *aad, size_t aad_len, 383 const void *src, size_t len, void *dst, 384 const void *tag, size_t tag_len) 385 { 386 TEE_Result res; 387 struct internal_aes_gcm_state state; 388 389 res = __gcm_init(&state, enc_key, TEE_MODE_DECRYPT, nonce, nonce_len, 390 tag_len); 391 if (res) 392 return res; 393 394 if (aad) { 395 res = __gcm_update_aad(&state, aad, aad_len); 396 if (res) 397 return res; 398 } 399 400 return __gcm_dec_final(&state, enc_key, src, len, dst, tag, tag_len); 401 } 402 403 404 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB 405 #include <crypto/aes-gcm.h> 406 407 size_t crypto_aes_gcm_get_ctx_size(void) 408 { 409 return sizeof(struct internal_aes_gcm_ctx); 410 } 411 412 TEE_Result crypto_aes_gcm_init(void *c, TEE_OperationMode mode, 413 const uint8_t *key, size_t key_len, 414 const uint8_t *nonce, size_t nonce_len, 415 size_t tag_len) 416 { 417 return internal_aes_gcm_init(c, mode, key, key_len, nonce, nonce_len, 418 tag_len); 419 } 420 421 TEE_Result crypto_aes_gcm_update_aad(void *c, const uint8_t *data, size_t len) 422 { 423 return internal_aes_gcm_update_aad(c, data, len); 424 } 425 426 TEE_Result crypto_aes_gcm_update_payload(void *c, TEE_OperationMode m, 427 const uint8_t *src, size_t len, 428 uint8_t *dst) 429 { 430 return internal_aes_gcm_update_payload(c, m, src, len, dst); 431 } 432 433 TEE_Result crypto_aes_gcm_enc_final(void *c, const uint8_t *src, size_t len, 434 uint8_t *dst, uint8_t *tag, size_t *tag_len) 435 { 436 return internal_aes_gcm_enc_final(c, src, len, dst, tag, tag_len); 437 } 438 439 TEE_Result crypto_aes_gcm_dec_final(void *c, const uint8_t *src, size_t len, 440 uint8_t *dst, const uint8_t *tag, 441 size_t tag_len) 442 { 443 return internal_aes_gcm_dec_final(c, src, len, dst, tag, tag_len); 444 } 445 446 void crypto_aes_gcm_final(void *c __unused) 447 { 448 } 449 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/ 450