xref: /optee_os/core/crypto/aes-gcm.c (revision 54af8d679d9af478930edd5a8caef1b778dfe74d)
1 /*
2  * Copyright (c) 2017, Linaro Limited
3  * All rights reserved.
4  *
5  * SPDX-License-Identifier: BSD-2-Clause
6  */
7 
8 #include <assert.h>
9 #include <crypto/internal_aes-gcm.h>
10 #include <io.h>
11 #include <string_ext.h>
12 #include <string.h>
13 #include <tee_api_types.h>
14 #include <types_ext.h>
15 #include <utee_defines.h>
16 #include <util.h>
17 
18 #include "aes-gcm-private.h"
19 
20 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len)
21 {
22 	size_t n;
23 
24 	for (n = 0; n < len; n++)
25 		dst[n] ^= src[n];
26 }
27 
28 
29 static void ghash_update_pad_zero(struct internal_aes_gcm_state *state,
30 				  const uint8_t *data, size_t len)
31 {
32 	size_t n = len / TEE_AES_BLOCK_SIZE;
33 	uint64_t block[2];
34 
35 	if (n) {
36 		if (internal_aes_gcm_ptr_is_block_aligned(data)) {
37 			internal_aes_gcm_ghash_update(state, NULL, data, n);
38 		} else {
39 			size_t m;
40 
41 			for (m = 0; m < n; m++) {
42 
43 				memcpy(block, data + m * sizeof(block),
44 				       sizeof(block));
45 				internal_aes_gcm_ghash_update(state, NULL,
46 							      (void *)block, 1);
47 			}
48 		}
49 	}
50 
51 	if (len - n * TEE_AES_BLOCK_SIZE) {
52 		memset(block, 0, sizeof(block));
53 		memcpy(block, data + n * TEE_AES_BLOCK_SIZE,
54 		       len - n * TEE_AES_BLOCK_SIZE);
55 		internal_aes_gcm_ghash_update(state, block, NULL, 0);
56 	}
57 }
58 
59 static void ghash_update_lengths(struct internal_aes_gcm_state *state,
60 				 uint32_t l1, uint32_t l2)
61 {
62 	uint64_t len_fields[2] = {
63 		TEE_U64_TO_BIG_ENDIAN(l1 * 8),
64 		TEE_U64_TO_BIG_ENDIAN(l2 * 8)
65 	};
66 
67 	COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE);
68 	internal_aes_gcm_ghash_update(state, (uint8_t *)len_fields, NULL, 0);
69 }
70 
71 static TEE_Result __gcm_init(struct internal_aes_gcm_state *state,
72 			     const struct internal_aes_gcm_key *ek,
73 			     TEE_OperationMode mode, const void *nonce,
74 			     size_t nonce_len, size_t tag_len)
75 {
76 	COMPILE_TIME_ASSERT(sizeof(state->ctr) == TEE_AES_BLOCK_SIZE);
77 
78 	if (tag_len > sizeof(state->buf_tag))
79 		return TEE_ERROR_BAD_PARAMETERS;
80 
81 	memset(state, 0, sizeof(*state));
82 
83 	state->tag_len = tag_len;
84 	internal_aes_gcm_set_key(state, ek);
85 
86 	if (nonce_len == (96 / 8)) {
87 		memcpy(state->ctr, nonce, nonce_len);
88 		internal_aes_gcm_inc_ctr(state);
89 	} else {
90 		ghash_update_pad_zero(state, nonce, nonce_len);
91 		ghash_update_lengths(state, 0, nonce_len);
92 
93 		memcpy(state->ctr, state->hash_state, sizeof(state->ctr));
94 		memset(state->hash_state, 0, sizeof(state->hash_state));
95 	}
96 
97 	internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_tag);
98 	internal_aes_gcm_inc_ctr(state);
99 	if (mode == TEE_MODE_ENCRYPT) {
100 		/*
101 		 * Encryption uses the pre-encrypted xor-buffer to encrypt
102 		 * while decryption encrypts the xor-buffer when needed
103 		 * instead.
104 		 *
105 		 * The reason for this is that the combined encryption and
106 		 * ghash implementation does both operations intertwined.
107 		 * In the decrypt case the xor-buffer is needed at the end
108 		 * of processing each block, while the encryption case
109 		 * needs xor-buffer before processing each block.
110 		 *
111 		 * In a pure software implementation we wouldn't have any
112 		 * use for this kind of optimization, but since this
113 		 * AES-GCM implementation is aimed at being combined with
114 		 * accelerated routines it's more convenient to always have
115 		 * this optimization activated.
116 		 */
117 		internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_cryp);
118 		internal_aes_gcm_inc_ctr(state);
119 	}
120 
121 	return TEE_SUCCESS;
122 }
123 
124 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx,
125 				 TEE_OperationMode mode, const void *key,
126 				 size_t key_len, const void *nonce,
127 				 size_t nonce_len, size_t tag_len)
128 {
129 	TEE_Result res = internal_aes_gcm_expand_enc_key(key, key_len,
130 							 &ctx->key);
131 	if (res)
132 		return res;
133 
134 	return __gcm_init(&ctx->state, &ctx->key, mode, nonce, nonce_len,
135 			  tag_len);
136 }
137 
138 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx,
139 				       const void *data, size_t len)
140 {
141 	struct internal_aes_gcm_state *state = &ctx->state;
142 	const uint8_t *d = data;
143 	size_t l = len;
144 	const uint8_t *head = NULL;
145 	size_t n;
146 
147 	if (state->payload_bytes)
148 		return TEE_ERROR_BAD_PARAMETERS;
149 
150 	state->aad_bytes += len;
151 
152 	while (l) {
153 		if (state->buf_pos ||
154 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
155 		    l < TEE_AES_BLOCK_SIZE) {
156 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
157 			memcpy(state->buf_hash + state->buf_pos, d, n);
158 			state->buf_pos += n;
159 
160 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
161 				return TEE_SUCCESS;
162 
163 			state->buf_pos = 0;
164 			head = state->buf_hash;
165 			d += n;
166 			l -= n;
167 		}
168 
169 		if (internal_aes_gcm_ptr_is_block_aligned(d))
170 			n = l / TEE_AES_BLOCK_SIZE;
171 		else
172 			n = 0;
173 
174 		internal_aes_gcm_ghash_update(state, head, d, n);
175 		l -= n * TEE_AES_BLOCK_SIZE;
176 		d += n * TEE_AES_BLOCK_SIZE;
177 	}
178 
179 	return TEE_SUCCESS;
180 }
181 
182 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx,
183 					   TEE_OperationMode mode,
184 					   const void *src, size_t len,
185 					   void *dst)
186 {
187 	struct internal_aes_gcm_state *state = &ctx->state;
188 	struct internal_aes_gcm_key *ek = &ctx->key;
189 	size_t n;
190 	const uint8_t *s = src;
191 	uint8_t *d = dst;
192 	size_t l = len;
193 
194 	if (!state->payload_bytes && state->buf_pos) {
195 		/* AAD part done, finish up the last bits. */
196 		memset(state->buf_hash + state->buf_pos, 0,
197 		       TEE_AES_BLOCK_SIZE - state->buf_pos);
198 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
199 		state->buf_pos = 0;
200 	}
201 
202 	state->payload_bytes += len;
203 
204 	while (l) {
205 		if (state->buf_pos ||
206 		    !internal_aes_gcm_ptr_is_block_aligned(s) ||
207 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
208 		    l < TEE_AES_BLOCK_SIZE) {
209 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
210 
211 			if (!state->buf_pos && mode == TEE_MODE_DECRYPT) {
212 				internal_aes_gcm_encrypt_block(ek, state->ctr,
213 							       state->buf_cryp);
214 			}
215 
216 			xor_buf(state->buf_cryp + state->buf_pos, s, n);
217 			memcpy(d, state->buf_cryp + state->buf_pos, n);
218 			if (mode == TEE_MODE_ENCRYPT)
219 				memcpy(state->buf_hash + state->buf_pos,
220 				       state->buf_cryp + state->buf_pos, n);
221 			else
222 				memcpy(state->buf_hash + state->buf_pos, s, n);
223 
224 			state->buf_pos += n;
225 
226 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
227 				return TEE_SUCCESS;
228 
229 			internal_aes_gcm_ghash_update(state, state->buf_hash,
230 						      NULL, 0);
231 			state->buf_pos = 0;
232 			d += n;
233 			s += n;
234 			l -= n;
235 
236 			if (mode == TEE_MODE_ENCRYPT)
237 				internal_aes_gcm_encrypt_block(ek, state->ctr,
238 							       state->buf_cryp);
239 			internal_aes_gcm_inc_ctr(state);
240 		} else {
241 			n = l / TEE_AES_BLOCK_SIZE;
242 			internal_aes_gcm_update_payload_block_aligned(state, ek,
243 								      mode,
244 								      s, n, d);
245 			s += n * TEE_AES_BLOCK_SIZE;
246 			d += n * TEE_AES_BLOCK_SIZE;
247 			l -= n * TEE_AES_BLOCK_SIZE;
248 		}
249 	}
250 
251 	return TEE_SUCCESS;
252 }
253 
254 static TEE_Result operation_final(struct internal_aes_gcm_ctx *ctx,
255 				  TEE_OperationMode m, const uint8_t *src,
256 				  size_t len, uint8_t *dst)
257 {
258 	struct internal_aes_gcm_state *state = &ctx->state;
259 	TEE_Result res;
260 
261 	res = internal_aes_gcm_update_payload(ctx, m, src, len, dst);
262 	if (res)
263 		return res;
264 
265 	if (state->buf_pos) {
266 		memset(state->buf_hash + state->buf_pos, 0,
267 		       sizeof(state->buf_hash) - state->buf_pos);
268 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
269 	}
270 
271 	ghash_update_lengths(state, state->aad_bytes, state->payload_bytes);
272 	/* buf_tag was filled in with the first counter block aes_gcm_init() */
273 	xor_buf(state->buf_tag, state->hash_state, state->tag_len);
274 
275 	return TEE_SUCCESS;
276 }
277 
278 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx,
279 				      const void *src, size_t len, void *dst,
280 				      void *tag, size_t *tag_len)
281 {
282 	struct internal_aes_gcm_state *state = &ctx->state;
283 	TEE_Result res;
284 
285 	if (*tag_len < state->tag_len)
286 		return TEE_ERROR_SHORT_BUFFER;
287 
288 	res = operation_final(ctx, TEE_MODE_ENCRYPT, src, len, dst);
289 	if (res)
290 		return res;
291 
292 	memcpy(tag, state->buf_tag, state->tag_len);
293 	*tag_len = state->tag_len;
294 
295 	return TEE_SUCCESS;
296 }
297 
298 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx,
299 				      const void *src, size_t len, void *dst,
300 				      const void *tag, size_t tag_len)
301 {
302 	struct internal_aes_gcm_state *state = &ctx->state;
303 	TEE_Result res;
304 
305 	if (tag_len != state->tag_len)
306 		return TEE_ERROR_MAC_INVALID;
307 
308 	res = operation_final(ctx, TEE_MODE_DECRYPT, src, len, dst);
309 	if (res)
310 		return res;
311 
312 	if (buf_compare_ct(state->buf_tag, tag, tag_len))
313 		return TEE_ERROR_MAC_INVALID;
314 
315 	return TEE_SUCCESS;
316 }
317 
318 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_state *state)
319 {
320 	uint64_t c;
321 
322 	c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[1]) + 1;
323 	state->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
324 	if (!c) {
325 		c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[0]) + 1;
326 		state->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
327 	}
328 }
329 
330 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB
331 #include <crypto/aes-gcm.h>
332 
333 size_t crypto_aes_gcm_get_ctx_size(void)
334 {
335 	return sizeof(struct internal_aes_gcm_ctx);
336 }
337 
338 TEE_Result crypto_aes_gcm_init(void *c, TEE_OperationMode mode,
339 			       const uint8_t *key, size_t key_len,
340 			       const uint8_t *nonce, size_t nonce_len,
341 			       size_t tag_len)
342 {
343 	return internal_aes_gcm_init(c, mode, key, key_len, nonce, nonce_len,
344 				     tag_len);
345 }
346 
347 TEE_Result crypto_aes_gcm_update_aad(void *c, const uint8_t *data, size_t len)
348 {
349 	return internal_aes_gcm_update_aad(c, data, len);
350 }
351 
352 TEE_Result crypto_aes_gcm_update_payload(void *c, TEE_OperationMode m,
353 					 const uint8_t *src, size_t len,
354 					 uint8_t *dst)
355 {
356 	return internal_aes_gcm_update_payload(c, m, src, len, dst);
357 }
358 
359 TEE_Result crypto_aes_gcm_enc_final(void *c, const uint8_t *src, size_t len,
360 				    uint8_t *dst, uint8_t *tag, size_t *tag_len)
361 {
362 	return internal_aes_gcm_enc_final(c, src, len, dst, tag, tag_len);
363 }
364 
365 TEE_Result crypto_aes_gcm_dec_final(void *c, const uint8_t *src, size_t len,
366 				    uint8_t *dst, const uint8_t *tag,
367 				    size_t tag_len)
368 {
369 	return internal_aes_gcm_dec_final(c, src, len, dst, tag, tag_len);
370 }
371 
372 void crypto_aes_gcm_final(void *c __unused)
373 {
374 }
375 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/
376