xref: /optee_os/core/crypto/aes-gcm.c (revision 5a913ee74d3c71af2a2860ce8a4e7aeab2916f9b)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2017, Linaro Limited
4  */
5 
6 #include <assert.h>
7 #include <crypto/internal_aes-gcm.h>
8 #include <crypto/crypto_impl.h>
9 #include <io.h>
10 #include <string_ext.h>
11 #include <string.h>
12 #include <tee_api_types.h>
13 #include <types_ext.h>
14 #include <utee_defines.h>
15 #include <util.h>
16 
17 #include "aes-gcm-private.h"
18 
19 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len)
20 {
21 	size_t n;
22 
23 	for (n = 0; n < len; n++)
24 		dst[n] ^= src[n];
25 }
26 
27 
28 static void ghash_update_pad_zero(struct internal_aes_gcm_state *state,
29 				  const uint8_t *data, size_t len)
30 {
31 	size_t n = len / TEE_AES_BLOCK_SIZE;
32 	uint64_t block[2];
33 
34 	if (n) {
35 		if (internal_aes_gcm_ptr_is_block_aligned(data)) {
36 			internal_aes_gcm_ghash_update(state, NULL, data, n);
37 		} else {
38 			size_t m;
39 
40 			for (m = 0; m < n; m++) {
41 
42 				memcpy(block, data + m * sizeof(block),
43 				       sizeof(block));
44 				internal_aes_gcm_ghash_update(state, NULL,
45 							      (void *)block, 1);
46 			}
47 		}
48 	}
49 
50 	if (len - n * TEE_AES_BLOCK_SIZE) {
51 		memset(block, 0, sizeof(block));
52 		memcpy(block, data + n * TEE_AES_BLOCK_SIZE,
53 		       len - n * TEE_AES_BLOCK_SIZE);
54 		internal_aes_gcm_ghash_update(state, block, NULL, 0);
55 	}
56 }
57 
58 static void ghash_update_lengths(struct internal_aes_gcm_state *state,
59 				 uint32_t l1, uint32_t l2)
60 {
61 	uint64_t len_fields[2] = {
62 		TEE_U64_TO_BIG_ENDIAN(l1 * 8),
63 		TEE_U64_TO_BIG_ENDIAN(l2 * 8)
64 	};
65 
66 	COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE);
67 	internal_aes_gcm_ghash_update(state, (uint8_t *)len_fields, NULL, 0);
68 }
69 
70 static TEE_Result __gcm_init(struct internal_aes_gcm_state *state,
71 			     const struct internal_aes_gcm_key *ek,
72 			     TEE_OperationMode mode, const void *nonce,
73 			     size_t nonce_len, size_t tag_len)
74 {
75 	COMPILE_TIME_ASSERT(sizeof(state->ctr) == TEE_AES_BLOCK_SIZE);
76 
77 	if (tag_len > sizeof(state->buf_tag))
78 		return TEE_ERROR_BAD_PARAMETERS;
79 
80 	memset(state, 0, sizeof(*state));
81 
82 	state->tag_len = tag_len;
83 	internal_aes_gcm_set_key(state, ek);
84 
85 	if (nonce_len == (96 / 8)) {
86 		memcpy(state->ctr, nonce, nonce_len);
87 		internal_aes_gcm_inc_ctr(state);
88 	} else {
89 		ghash_update_pad_zero(state, nonce, nonce_len);
90 		ghash_update_lengths(state, 0, nonce_len);
91 
92 		memcpy(state->ctr, state->hash_state, sizeof(state->ctr));
93 		memset(state->hash_state, 0, sizeof(state->hash_state));
94 	}
95 
96 	internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_tag);
97 	internal_aes_gcm_inc_ctr(state);
98 	if (mode == TEE_MODE_ENCRYPT) {
99 		/*
100 		 * Encryption uses the pre-encrypted xor-buffer to encrypt
101 		 * while decryption encrypts the xor-buffer when needed
102 		 * instead.
103 		 *
104 		 * The reason for this is that the combined encryption and
105 		 * ghash implementation does both operations intertwined.
106 		 * In the decrypt case the xor-buffer is needed at the end
107 		 * of processing each block, while the encryption case
108 		 * needs xor-buffer before processing each block.
109 		 *
110 		 * In a pure software implementation we wouldn't have any
111 		 * use for this kind of optimization, but since this
112 		 * AES-GCM implementation is aimed at being combined with
113 		 * accelerated routines it's more convenient to always have
114 		 * this optimization activated.
115 		 */
116 		internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_cryp);
117 		internal_aes_gcm_inc_ctr(state);
118 	}
119 
120 	return TEE_SUCCESS;
121 }
122 
123 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx,
124 				 TEE_OperationMode mode, const void *key,
125 				 size_t key_len, const void *nonce,
126 				 size_t nonce_len, size_t tag_len)
127 {
128 	TEE_Result res = internal_aes_gcm_expand_enc_key(key, key_len,
129 							 &ctx->key);
130 	if (res)
131 		return res;
132 
133 	return __gcm_init(&ctx->state, &ctx->key, mode, nonce, nonce_len,
134 			  tag_len);
135 }
136 
137 static TEE_Result __gcm_update_aad(struct internal_aes_gcm_state *state,
138 				   const void *data, size_t len)
139 {
140 	const uint8_t *d = data;
141 	size_t l = len;
142 	const uint8_t *head = NULL;
143 	size_t n;
144 
145 	if (state->payload_bytes)
146 		return TEE_ERROR_BAD_PARAMETERS;
147 
148 	state->aad_bytes += len;
149 
150 	while (l) {
151 		if (state->buf_pos ||
152 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
153 		    l < TEE_AES_BLOCK_SIZE) {
154 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
155 			memcpy(state->buf_hash + state->buf_pos, d, n);
156 			state->buf_pos += n;
157 
158 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
159 				return TEE_SUCCESS;
160 
161 			state->buf_pos = 0;
162 			head = state->buf_hash;
163 			d += n;
164 			l -= n;
165 		}
166 
167 		if (internal_aes_gcm_ptr_is_block_aligned(d))
168 			n = l / TEE_AES_BLOCK_SIZE;
169 		else
170 			n = 0;
171 
172 		internal_aes_gcm_ghash_update(state, head, d, n);
173 		l -= n * TEE_AES_BLOCK_SIZE;
174 		d += n * TEE_AES_BLOCK_SIZE;
175 	}
176 
177 	return TEE_SUCCESS;
178 }
179 
180 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx,
181 				       const void *data, size_t len)
182 {
183 	return __gcm_update_aad(&ctx->state, data, len);
184 }
185 
186 static TEE_Result
187 __gcm_update_payload(struct internal_aes_gcm_state *state,
188 		     const struct internal_aes_gcm_key *ek,
189 		     TEE_OperationMode mode, const void *src,
190 		     size_t len, void *dst)
191 {
192 	size_t n;
193 	const uint8_t *s = src;
194 	uint8_t *d = dst;
195 	size_t l = len;
196 
197 	if (!state->payload_bytes && state->buf_pos) {
198 		/* AAD part done, finish up the last bits. */
199 		memset(state->buf_hash + state->buf_pos, 0,
200 		       TEE_AES_BLOCK_SIZE - state->buf_pos);
201 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
202 		state->buf_pos = 0;
203 	}
204 
205 	state->payload_bytes += len;
206 
207 	while (l) {
208 		if (state->buf_pos ||
209 		    !internal_aes_gcm_ptr_is_block_aligned(s) ||
210 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
211 		    l < TEE_AES_BLOCK_SIZE) {
212 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
213 
214 			if (!state->buf_pos && mode == TEE_MODE_DECRYPT) {
215 				internal_aes_gcm_encrypt_block(ek, state->ctr,
216 							       state->buf_cryp);
217 			}
218 
219 			xor_buf(state->buf_cryp + state->buf_pos, s, n);
220 			memcpy(d, state->buf_cryp + state->buf_pos, n);
221 			if (mode == TEE_MODE_ENCRYPT)
222 				memcpy(state->buf_hash + state->buf_pos,
223 				       state->buf_cryp + state->buf_pos, n);
224 			else
225 				memcpy(state->buf_hash + state->buf_pos, s, n);
226 
227 			state->buf_pos += n;
228 
229 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
230 				return TEE_SUCCESS;
231 
232 			internal_aes_gcm_ghash_update(state, state->buf_hash,
233 						      NULL, 0);
234 			state->buf_pos = 0;
235 			d += n;
236 			s += n;
237 			l -= n;
238 
239 			if (mode == TEE_MODE_ENCRYPT)
240 				internal_aes_gcm_encrypt_block(ek, state->ctr,
241 							       state->buf_cryp);
242 			internal_aes_gcm_inc_ctr(state);
243 		} else {
244 			n = l / TEE_AES_BLOCK_SIZE;
245 			internal_aes_gcm_update_payload_block_aligned(state, ek,
246 								      mode,
247 								      s, n, d);
248 			s += n * TEE_AES_BLOCK_SIZE;
249 			d += n * TEE_AES_BLOCK_SIZE;
250 			l -= n * TEE_AES_BLOCK_SIZE;
251 		}
252 	}
253 
254 	return TEE_SUCCESS;
255 }
256 
257 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx,
258 					   TEE_OperationMode mode,
259 					   const void *src, size_t len,
260 					   void *dst)
261 {
262 	return __gcm_update_payload(&ctx->state, &ctx->key, mode, src, len,
263 				    dst);
264 }
265 
266 static TEE_Result operation_final(struct internal_aes_gcm_state *state,
267 				  const struct internal_aes_gcm_key *enc_key,
268 				  TEE_OperationMode m, const uint8_t *src,
269 				  size_t len, uint8_t *dst)
270 {
271 	TEE_Result res;
272 
273 	res = __gcm_update_payload(state, enc_key, m, src, len, dst);
274 	if (res)
275 		return res;
276 
277 	if (state->buf_pos) {
278 		memset(state->buf_hash + state->buf_pos, 0,
279 		       sizeof(state->buf_hash) - state->buf_pos);
280 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
281 	}
282 
283 	ghash_update_lengths(state, state->aad_bytes, state->payload_bytes);
284 	/* buf_tag was filled in with the first counter block aes_gcm_init() */
285 	xor_buf(state->buf_tag, state->hash_state, state->tag_len);
286 
287 	return TEE_SUCCESS;
288 }
289 
290 static TEE_Result __gcm_enc_final(struct internal_aes_gcm_state *state,
291 				  const struct internal_aes_gcm_key *enc_key,
292 				  const void *src, size_t len, void *dst,
293 				  void *tag, size_t *tag_len)
294 {
295 	TEE_Result res;
296 
297 	if (*tag_len < state->tag_len)
298 		return TEE_ERROR_SHORT_BUFFER;
299 
300 	res = operation_final(state, enc_key, TEE_MODE_ENCRYPT, src, len, dst);
301 	if (res)
302 		return res;
303 
304 	memcpy(tag, state->buf_tag, state->tag_len);
305 	*tag_len = state->tag_len;
306 
307 	return TEE_SUCCESS;
308 }
309 
310 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx,
311 				      const void *src, size_t len, void *dst,
312 				      void *tag, size_t *tag_len)
313 {
314 	return __gcm_enc_final(&ctx->state, &ctx->key, src, len, dst, tag,
315 			       tag_len);
316 }
317 
318 static TEE_Result __gcm_dec_final(struct internal_aes_gcm_state *state,
319 				  const struct internal_aes_gcm_key *enc_key,
320 				  const void *src, size_t len, void *dst,
321 				  const void *tag, size_t tag_len)
322 {
323 	TEE_Result res;
324 
325 	if (tag_len != state->tag_len)
326 		return TEE_ERROR_MAC_INVALID;
327 
328 	res = operation_final(state, enc_key, TEE_MODE_DECRYPT, src, len, dst);
329 	if (res)
330 		return res;
331 
332 	if (consttime_memcmp(state->buf_tag, tag, tag_len))
333 		return TEE_ERROR_MAC_INVALID;
334 
335 	return TEE_SUCCESS;
336 }
337 
338 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx,
339 				      const void *src, size_t len, void *dst,
340 				      const void *tag, size_t tag_len)
341 {
342 	return __gcm_dec_final(&ctx->state, &ctx->key, src, len, dst, tag,
343 			       tag_len);
344 }
345 
346 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_state *state)
347 {
348 	uint64_t c;
349 
350 	c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[1]) + 1;
351 	state->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
352 	if (!c) {
353 		c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[0]) + 1;
354 		state->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
355 	}
356 }
357 
358 TEE_Result internal_aes_gcm_enc(const struct internal_aes_gcm_key *enc_key,
359 				const void *nonce, size_t nonce_len,
360 				const void *aad, size_t aad_len,
361 				const void *src, size_t len, void *dst,
362 				void *tag, size_t *tag_len)
363 {
364 	TEE_Result res;
365 	struct internal_aes_gcm_state state;
366 
367 	res = __gcm_init(&state, enc_key, TEE_MODE_ENCRYPT, nonce, nonce_len,
368 			 *tag_len);
369 	if (res)
370 		return res;
371 
372 	if (aad) {
373 		res = __gcm_update_aad(&state, aad, aad_len);
374 		if (res)
375 			return res;
376 	}
377 
378 	return __gcm_enc_final(&state, enc_key, src, len, dst, tag, tag_len);
379 }
380 
381 TEE_Result internal_aes_gcm_dec(const struct internal_aes_gcm_key *enc_key,
382 				const void *nonce, size_t nonce_len,
383 				const void *aad, size_t aad_len,
384 				const void *src, size_t len, void *dst,
385 				const void *tag, size_t tag_len)
386 {
387 	TEE_Result res;
388 	struct internal_aes_gcm_state state;
389 
390 	res = __gcm_init(&state, enc_key, TEE_MODE_DECRYPT, nonce, nonce_len,
391 			 tag_len);
392 	if (res)
393 		return res;
394 
395 	if (aad) {
396 		res = __gcm_update_aad(&state, aad, aad_len);
397 		if (res)
398 			return res;
399 	}
400 
401 	return __gcm_dec_final(&state, enc_key, src, len, dst, tag, tag_len);
402 }
403 
404 
405 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB
406 #include <stdlib.h>
407 #include <crypto/crypto.h>
408 
409 struct aes_gcm_ctx {
410 	struct crypto_authenc_ctx aec;
411 	struct internal_aes_gcm_ctx ctx;
412 };
413 
414 static const struct crypto_authenc_ops aes_gcm_ops;
415 
416 static struct aes_gcm_ctx *
417 to_aes_gcm_ctx(struct crypto_authenc_ctx *aec)
418 {
419 	assert(aec->ops == &aes_gcm_ops);
420 
421 	return container_of(aec, struct aes_gcm_ctx, aec);
422 }
423 
424 TEE_Result crypto_aes_gcm_alloc_ctx(struct crypto_authenc_ctx **ctx_ret)
425 {
426 	struct aes_gcm_ctx *ctx = calloc(1, sizeof(*ctx));
427 
428 	if (!ctx)
429 		return TEE_ERROR_OUT_OF_MEMORY;
430 	ctx->aec.ops = &aes_gcm_ops;
431 
432 	*ctx_ret = &ctx->aec;
433 
434 	return TEE_SUCCESS;
435 }
436 
437 static void aes_gcm_free_ctx(struct crypto_authenc_ctx *aec)
438 {
439 	free(to_aes_gcm_ctx(aec));
440 }
441 
442 static void aes_gcm_copy_state(struct crypto_authenc_ctx *dst_ctx,
443 			       struct crypto_authenc_ctx *src_ctx)
444 {
445 	to_aes_gcm_ctx(dst_ctx)->ctx = to_aes_gcm_ctx(src_ctx)->ctx;
446 }
447 
448 static TEE_Result aes_gcm_init(struct crypto_authenc_ctx *aec,
449 			       TEE_OperationMode mode,
450 			       const uint8_t *key, size_t key_len,
451 			       const uint8_t *nonce, size_t nonce_len,
452 			       size_t tag_len, size_t aad_len __unused,
453 			       size_t payload_len __unused)
454 {
455 	return internal_aes_gcm_init(&to_aes_gcm_ctx(aec)->ctx, mode, key,
456 				     key_len, nonce, nonce_len, tag_len);
457 }
458 
459 static TEE_Result aes_gcm_update_aad(struct crypto_authenc_ctx *aec,
460 				     const uint8_t *data, size_t len)
461 {
462 	return internal_aes_gcm_update_aad(&to_aes_gcm_ctx(aec)->ctx, data,
463 					   len);
464 }
465 
466 static TEE_Result aes_gcm_update_payload(struct crypto_authenc_ctx *aec,
467 					 TEE_OperationMode m,
468 					 const uint8_t *src, size_t len,
469 					 uint8_t *dst)
470 {
471 	return internal_aes_gcm_update_payload(&to_aes_gcm_ctx(aec)->ctx,
472 					       m, src, len, dst);
473 }
474 
475 static TEE_Result aes_gcm_enc_final(struct crypto_authenc_ctx *aec,
476 				    const uint8_t *src, size_t len,
477 				    uint8_t *dst, uint8_t *tag, size_t *tag_len)
478 {
479 	return internal_aes_gcm_enc_final(&to_aes_gcm_ctx(aec)->ctx, src, len,
480 					  dst, tag, tag_len);
481 }
482 
483 static TEE_Result aes_gcm_dec_final(struct crypto_authenc_ctx *aec,
484 				    const uint8_t *src, size_t len,
485 				    uint8_t *dst, const uint8_t *tag,
486 				    size_t tag_len)
487 {
488 	return internal_aes_gcm_dec_final(&to_aes_gcm_ctx(aec)->ctx, src, len,
489 					  dst, tag, tag_len);
490 }
491 
492 static void aes_gcm_final(struct crypto_authenc_ctx *aec __unused)
493 {
494 }
495 
496 static const struct crypto_authenc_ops aes_gcm_ops = {
497 	.init = aes_gcm_init,
498 	.update_aad = aes_gcm_update_aad,
499 	.update_payload = aes_gcm_update_payload,
500 	.enc_final = aes_gcm_enc_final,
501 	.dec_final = aes_gcm_dec_final,
502 	.final = aes_gcm_final,
503 	.free_ctx = aes_gcm_free_ctx,
504 	.copy_state = aes_gcm_copy_state,
505 };
506 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/
507