xref: /optee_os/core/crypto/aes-gcm.c (revision b97e9666f646ca681890b1f5c61b8d62f0160d34)
1 /*
2  * Copyright (c) 2017, Linaro Limited
3  * All rights reserved.
4  *
5  * SPDX-License-Identifier: BSD-2-Clause
6  */
7 
8 #include <assert.h>
9 #include <crypto/internal_aes-gcm.h>
10 #include <io.h>
11 #include <string_ext.h>
12 #include <string.h>
13 #include <tee_api_types.h>
14 #include <types_ext.h>
15 #include <utee_defines.h>
16 #include <util.h>
17 
18 #include "aes-gcm-private.h"
19 
20 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len)
21 {
22 	size_t n;
23 
24 	for (n = 0; n < len; n++)
25 		dst[n] ^= src[n];
26 }
27 
28 
29 static void ghash_update_pad_zero(struct internal_aes_gcm_state *state,
30 				  const uint8_t *data, size_t len)
31 {
32 	size_t n = len / TEE_AES_BLOCK_SIZE;
33 	uint64_t block[2];
34 
35 	if (n) {
36 		if (internal_aes_gcm_ptr_is_block_aligned(data)) {
37 			internal_aes_gcm_ghash_update(state, NULL, data, n);
38 		} else {
39 			size_t m;
40 
41 			for (m = 0; m < n; m++) {
42 
43 				memcpy(block, data + m * sizeof(block),
44 				       sizeof(block));
45 				internal_aes_gcm_ghash_update(state, NULL,
46 							      (void *)block, 1);
47 			}
48 		}
49 	}
50 
51 	if (len - n * TEE_AES_BLOCK_SIZE) {
52 		memset(block, 0, sizeof(block));
53 		memcpy(block, data + n * TEE_AES_BLOCK_SIZE,
54 		       len - n * TEE_AES_BLOCK_SIZE);
55 		internal_aes_gcm_ghash_update(state, block, NULL, 0);
56 	}
57 }
58 
59 static void ghash_update_lengths(struct internal_aes_gcm_state *state,
60 				 uint32_t l1, uint32_t l2)
61 {
62 	uint64_t len_fields[2] = {
63 		TEE_U64_TO_BIG_ENDIAN(l1 * 8),
64 		TEE_U64_TO_BIG_ENDIAN(l2 * 8)
65 	};
66 
67 	COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE);
68 	internal_aes_gcm_ghash_update(state, (uint8_t *)len_fields, NULL, 0);
69 }
70 
71 static TEE_Result __gcm_init(struct internal_aes_gcm_state *state,
72 			     const struct internal_aes_gcm_key *ek,
73 			     TEE_OperationMode mode, const void *nonce,
74 			     size_t nonce_len, size_t tag_len)
75 {
76 	COMPILE_TIME_ASSERT(sizeof(state->ctr) == TEE_AES_BLOCK_SIZE);
77 
78 	if (tag_len > sizeof(state->buf_tag))
79 		return TEE_ERROR_BAD_PARAMETERS;
80 
81 	memset(state, 0, sizeof(*state));
82 
83 	state->tag_len = tag_len;
84 	internal_aes_gcm_set_key(state, ek);
85 
86 	if (nonce_len == (96 / 8)) {
87 		memcpy(state->ctr, nonce, nonce_len);
88 		internal_aes_gcm_inc_ctr(state);
89 	} else {
90 		ghash_update_pad_zero(state, nonce, nonce_len);
91 		ghash_update_lengths(state, 0, nonce_len);
92 
93 		memcpy(state->ctr, state->hash_state, sizeof(state->ctr));
94 		memset(state->hash_state, 0, sizeof(state->hash_state));
95 	}
96 
97 	internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_tag);
98 	internal_aes_gcm_inc_ctr(state);
99 	if (mode == TEE_MODE_ENCRYPT) {
100 		/*
101 		 * Encryption uses the pre-encrypted xor-buffer to encrypt
102 		 * while decryption encrypts the xor-buffer when needed
103 		 * instead.
104 		 *
105 		 * The reason for this is that the combined encryption and
106 		 * ghash implementation does both operations intertwined.
107 		 * In the decrypt case the xor-buffer is needed at the end
108 		 * of processing each block, while the encryption case
109 		 * needs xor-buffer before processing each block.
110 		 *
111 		 * In a pure software implementation we wouldn't have any
112 		 * use for this kind of optimization, but since this
113 		 * AES-GCM implementation is aimed at being combined with
114 		 * accelerated routines it's more convenient to always have
115 		 * this optimization activated.
116 		 */
117 		internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_cryp);
118 		internal_aes_gcm_inc_ctr(state);
119 	}
120 
121 	return TEE_SUCCESS;
122 }
123 
124 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx,
125 				 TEE_OperationMode mode, const void *key,
126 				 size_t key_len, const void *nonce,
127 				 size_t nonce_len, size_t tag_len)
128 {
129 	TEE_Result res = internal_aes_gcm_expand_enc_key(key, key_len,
130 							 &ctx->key);
131 	if (res)
132 		return res;
133 
134 	return __gcm_init(&ctx->state, &ctx->key, mode, nonce, nonce_len,
135 			  tag_len);
136 }
137 
138 static TEE_Result __gcm_update_aad(struct internal_aes_gcm_state *state,
139 				   const void *data, size_t len)
140 {
141 	const uint8_t *d = data;
142 	size_t l = len;
143 	const uint8_t *head = NULL;
144 	size_t n;
145 
146 	if (state->payload_bytes)
147 		return TEE_ERROR_BAD_PARAMETERS;
148 
149 	state->aad_bytes += len;
150 
151 	while (l) {
152 		if (state->buf_pos ||
153 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
154 		    l < TEE_AES_BLOCK_SIZE) {
155 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
156 			memcpy(state->buf_hash + state->buf_pos, d, n);
157 			state->buf_pos += n;
158 
159 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
160 				return TEE_SUCCESS;
161 
162 			state->buf_pos = 0;
163 			head = state->buf_hash;
164 			d += n;
165 			l -= n;
166 		}
167 
168 		if (internal_aes_gcm_ptr_is_block_aligned(d))
169 			n = l / TEE_AES_BLOCK_SIZE;
170 		else
171 			n = 0;
172 
173 		internal_aes_gcm_ghash_update(state, head, d, n);
174 		l -= n * TEE_AES_BLOCK_SIZE;
175 		d += n * TEE_AES_BLOCK_SIZE;
176 	}
177 
178 	return TEE_SUCCESS;
179 }
180 
181 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx,
182 				       const void *data, size_t len)
183 {
184 	return __gcm_update_aad(&ctx->state, data, len);
185 }
186 
187 static TEE_Result
188 __gcm_update_payload(struct internal_aes_gcm_state *state,
189 		     const struct internal_aes_gcm_key *ek,
190 		     TEE_OperationMode mode, const void *src,
191 		     size_t len, void *dst)
192 {
193 	size_t n;
194 	const uint8_t *s = src;
195 	uint8_t *d = dst;
196 	size_t l = len;
197 
198 	if (!state->payload_bytes && state->buf_pos) {
199 		/* AAD part done, finish up the last bits. */
200 		memset(state->buf_hash + state->buf_pos, 0,
201 		       TEE_AES_BLOCK_SIZE - state->buf_pos);
202 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
203 		state->buf_pos = 0;
204 	}
205 
206 	state->payload_bytes += len;
207 
208 	while (l) {
209 		if (state->buf_pos ||
210 		    !internal_aes_gcm_ptr_is_block_aligned(s) ||
211 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
212 		    l < TEE_AES_BLOCK_SIZE) {
213 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
214 
215 			if (!state->buf_pos && mode == TEE_MODE_DECRYPT) {
216 				internal_aes_gcm_encrypt_block(ek, state->ctr,
217 							       state->buf_cryp);
218 			}
219 
220 			xor_buf(state->buf_cryp + state->buf_pos, s, n);
221 			memcpy(d, state->buf_cryp + state->buf_pos, n);
222 			if (mode == TEE_MODE_ENCRYPT)
223 				memcpy(state->buf_hash + state->buf_pos,
224 				       state->buf_cryp + state->buf_pos, n);
225 			else
226 				memcpy(state->buf_hash + state->buf_pos, s, n);
227 
228 			state->buf_pos += n;
229 
230 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
231 				return TEE_SUCCESS;
232 
233 			internal_aes_gcm_ghash_update(state, state->buf_hash,
234 						      NULL, 0);
235 			state->buf_pos = 0;
236 			d += n;
237 			s += n;
238 			l -= n;
239 
240 			if (mode == TEE_MODE_ENCRYPT)
241 				internal_aes_gcm_encrypt_block(ek, state->ctr,
242 							       state->buf_cryp);
243 			internal_aes_gcm_inc_ctr(state);
244 		} else {
245 			n = l / TEE_AES_BLOCK_SIZE;
246 			internal_aes_gcm_update_payload_block_aligned(state, ek,
247 								      mode,
248 								      s, n, d);
249 			s += n * TEE_AES_BLOCK_SIZE;
250 			d += n * TEE_AES_BLOCK_SIZE;
251 			l -= n * TEE_AES_BLOCK_SIZE;
252 		}
253 	}
254 
255 	return TEE_SUCCESS;
256 }
257 
258 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx,
259 					   TEE_OperationMode mode,
260 					   const void *src, size_t len,
261 					   void *dst)
262 {
263 	return __gcm_update_payload(&ctx->state, &ctx->key, mode, src, len,
264 				    dst);
265 }
266 
267 static TEE_Result operation_final(struct internal_aes_gcm_state *state,
268 				  const struct internal_aes_gcm_key *enc_key,
269 				  TEE_OperationMode m, const uint8_t *src,
270 				  size_t len, uint8_t *dst)
271 {
272 	TEE_Result res;
273 
274 	res = __gcm_update_payload(state, enc_key, m, src, len, dst);
275 	if (res)
276 		return res;
277 
278 	if (state->buf_pos) {
279 		memset(state->buf_hash + state->buf_pos, 0,
280 		       sizeof(state->buf_hash) - state->buf_pos);
281 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
282 	}
283 
284 	ghash_update_lengths(state, state->aad_bytes, state->payload_bytes);
285 	/* buf_tag was filled in with the first counter block aes_gcm_init() */
286 	xor_buf(state->buf_tag, state->hash_state, state->tag_len);
287 
288 	return TEE_SUCCESS;
289 }
290 
291 static TEE_Result __gcm_enc_final(struct internal_aes_gcm_state *state,
292 				  const struct internal_aes_gcm_key *enc_key,
293 				  const void *src, size_t len, void *dst,
294 				  void *tag, size_t *tag_len)
295 {
296 	TEE_Result res;
297 
298 	if (*tag_len < state->tag_len)
299 		return TEE_ERROR_SHORT_BUFFER;
300 
301 	res = operation_final(state, enc_key, TEE_MODE_ENCRYPT, src, len, dst);
302 	if (res)
303 		return res;
304 
305 	memcpy(tag, state->buf_tag, state->tag_len);
306 	*tag_len = state->tag_len;
307 
308 	return TEE_SUCCESS;
309 }
310 
311 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx,
312 				      const void *src, size_t len, void *dst,
313 				      void *tag, size_t *tag_len)
314 {
315 	return __gcm_enc_final(&ctx->state, &ctx->key, src, len, dst, tag,
316 			       tag_len);
317 }
318 
319 static TEE_Result __gcm_dec_final(struct internal_aes_gcm_state *state,
320 				  const struct internal_aes_gcm_key *enc_key,
321 				  const void *src, size_t len, void *dst,
322 				  const void *tag, size_t tag_len)
323 {
324 	TEE_Result res;
325 
326 	if (tag_len != state->tag_len)
327 		return TEE_ERROR_MAC_INVALID;
328 
329 	res = operation_final(state, enc_key, TEE_MODE_DECRYPT, src, len, dst);
330 	if (res)
331 		return res;
332 
333 	if (buf_compare_ct(state->buf_tag, tag, tag_len))
334 		return TEE_ERROR_MAC_INVALID;
335 
336 	return TEE_SUCCESS;
337 }
338 
339 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx,
340 				      const void *src, size_t len, void *dst,
341 				      const void *tag, size_t tag_len)
342 {
343 	return __gcm_dec_final(&ctx->state, &ctx->key, src, len, dst, tag,
344 			       tag_len);
345 }
346 
347 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_state *state)
348 {
349 	uint64_t c;
350 
351 	c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[1]) + 1;
352 	state->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
353 	if (!c) {
354 		c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[0]) + 1;
355 		state->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
356 	}
357 }
358 
359 TEE_Result internal_aes_gcm_enc(const struct internal_aes_gcm_key *enc_key,
360 				const void *nonce, size_t nonce_len,
361 				const void *aad, size_t aad_len,
362 				const void *src, size_t len, void *dst,
363 				void *tag, size_t *tag_len)
364 {
365 	TEE_Result res;
366 	struct internal_aes_gcm_state state;
367 
368 	res = __gcm_init(&state, enc_key, TEE_MODE_ENCRYPT, nonce, nonce_len,
369 			 *tag_len);
370 	if (res)
371 		return res;
372 
373 	if (aad) {
374 		res = __gcm_update_aad(&state, aad, aad_len);
375 		if (res)
376 			return res;
377 	}
378 
379 	return __gcm_enc_final(&state, enc_key, src, len, dst, tag, tag_len);
380 }
381 
382 TEE_Result internal_aes_gcm_dec(const struct internal_aes_gcm_key *enc_key,
383 				const void *nonce, size_t nonce_len,
384 				const void *aad, size_t aad_len,
385 				const void *src, size_t len, void *dst,
386 				const void *tag, size_t tag_len)
387 {
388 	TEE_Result res;
389 	struct internal_aes_gcm_state state;
390 
391 	res = __gcm_init(&state, enc_key, TEE_MODE_DECRYPT, nonce, nonce_len,
392 			 tag_len);
393 	if (res)
394 		return res;
395 
396 	if (aad) {
397 		res = __gcm_update_aad(&state, aad, aad_len);
398 		if (res)
399 			return res;
400 	}
401 
402 	return __gcm_dec_final(&state, enc_key, src, len, dst, tag, tag_len);
403 }
404 
405 
406 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB
407 #include <crypto/aes-gcm.h>
408 
409 size_t crypto_aes_gcm_get_ctx_size(void)
410 {
411 	return sizeof(struct internal_aes_gcm_ctx);
412 }
413 
414 TEE_Result crypto_aes_gcm_init(void *c, TEE_OperationMode mode,
415 			       const uint8_t *key, size_t key_len,
416 			       const uint8_t *nonce, size_t nonce_len,
417 			       size_t tag_len)
418 {
419 	return internal_aes_gcm_init(c, mode, key, key_len, nonce, nonce_len,
420 				     tag_len);
421 }
422 
423 TEE_Result crypto_aes_gcm_update_aad(void *c, const uint8_t *data, size_t len)
424 {
425 	return internal_aes_gcm_update_aad(c, data, len);
426 }
427 
428 TEE_Result crypto_aes_gcm_update_payload(void *c, TEE_OperationMode m,
429 					 const uint8_t *src, size_t len,
430 					 uint8_t *dst)
431 {
432 	return internal_aes_gcm_update_payload(c, m, src, len, dst);
433 }
434 
435 TEE_Result crypto_aes_gcm_enc_final(void *c, const uint8_t *src, size_t len,
436 				    uint8_t *dst, uint8_t *tag, size_t *tag_len)
437 {
438 	return internal_aes_gcm_enc_final(c, src, len, dst, tag, tag_len);
439 }
440 
441 TEE_Result crypto_aes_gcm_dec_final(void *c, const uint8_t *src, size_t len,
442 				    uint8_t *dst, const uint8_t *tag,
443 				    size_t tag_len)
444 {
445 	return internal_aes_gcm_dec_final(c, src, len, dst, tag, tag_len);
446 }
447 
448 void crypto_aes_gcm_final(void *c __unused)
449 {
450 }
451 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/
452