xref: /optee_os/core/crypto/aes-gcm.c (revision 424cb3863ab707e652dc5c2f54c1d78686c45fa2)
1 /*
2  * Copyright (c) 2017, Linaro Limited
3  * All rights reserved.
4  *
5  * SPDX-License-Identifier: BSD-2-Clause
6  */
7 
8 #include <assert.h>
9 #include <crypto/internal_aes-gcm.h>
10 #include <io.h>
11 #include <string_ext.h>
12 #include <string.h>
13 #include <tee_api_types.h>
14 #include <types_ext.h>
15 #include <utee_defines.h>
16 #include <util.h>
17 
18 #include "aes-gcm-private.h"
19 
20 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len)
21 {
22 	size_t n;
23 
24 	for (n = 0; n < len; n++)
25 		dst[n] ^= src[n];
26 }
27 
28 
29 static void ghash_update_pad_zero(struct internal_aes_gcm_ctx *ctx,
30 				  const uint8_t *data, size_t len)
31 {
32 	size_t n = len / TEE_AES_BLOCK_SIZE;
33 	uint64_t block[2];
34 
35 	if (n) {
36 		if (internal_aes_gcm_ptr_is_block_aligned(data)) {
37 			internal_aes_gcm_ghash_update(ctx, NULL, data, n);
38 		} else {
39 			size_t m;
40 
41 			for (m = 0; m < n; m++) {
42 
43 				memcpy(block, data + m * sizeof(block),
44 				       sizeof(block));
45 				internal_aes_gcm_ghash_update(ctx, NULL,
46 							      (void *)block, 1);
47 			}
48 		}
49 	}
50 
51 	if (len - n * TEE_AES_BLOCK_SIZE) {
52 		memset(block, 0, sizeof(block));
53 		memcpy(block, data + n * TEE_AES_BLOCK_SIZE,
54 		       len - n * TEE_AES_BLOCK_SIZE);
55 		internal_aes_gcm_ghash_update(ctx, block, NULL, 0);
56 	}
57 }
58 
59 static void ghash_update_lengths(struct internal_aes_gcm_ctx *ctx, uint32_t l1,
60 				 uint32_t l2)
61 {
62 	uint64_t len_fields[2] = {
63 		TEE_U64_TO_BIG_ENDIAN(l1 * 8),
64 		TEE_U64_TO_BIG_ENDIAN(l2 * 8)
65 	};
66 
67 	COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE);
68 	internal_aes_gcm_ghash_update(ctx, (uint8_t *)len_fields, NULL, 0);
69 }
70 
71 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx,
72 				 TEE_OperationMode mode, const void *key,
73 				 size_t key_len, const void *nonce,
74 				 size_t nonce_len, size_t tag_len)
75 {
76 	TEE_Result res;
77 
78 	COMPILE_TIME_ASSERT(sizeof(ctx->ctr) == TEE_AES_BLOCK_SIZE);
79 
80 	if (tag_len > sizeof(ctx->buf_tag))
81 		return TEE_ERROR_BAD_PARAMETERS;
82 
83 	memset(ctx, 0, sizeof(*ctx));
84 
85 	ctx->tag_len = tag_len;
86 	res = internal_aes_gcm_set_key(ctx, key, key_len);
87 	if (res)
88 		return res;
89 
90 	if (nonce_len == (96 / 8)) {
91 		memcpy(ctx->ctr, nonce, nonce_len);
92 		internal_aes_gcm_inc_ctr(ctx);
93 	} else {
94 		ghash_update_pad_zero(ctx, nonce, nonce_len);
95 		ghash_update_lengths(ctx, 0, nonce_len);
96 
97 		memcpy(ctx->ctr, ctx->hash_state, sizeof(ctx->ctr));
98 		memset(ctx->hash_state, 0, sizeof(ctx->hash_state));
99 	}
100 
101 	internal_aes_gcm_encrypt_block(ctx, ctx->ctr, ctx->buf_tag);
102 	internal_aes_gcm_inc_ctr(ctx);
103 	if (mode == TEE_MODE_ENCRYPT) {
104 		/*
105 		 * Encryption uses the pre-encrypted xor-buffer to encrypt
106 		 * while decryption encrypts the xor-buffer when needed
107 		 * instead.
108 		 *
109 		 * The reason for this is that the combined encryption and
110 		 * ghash implementation does both operations intertwined.
111 		 * In the decrypt case the xor-buffer is needed at the end
112 		 * of processing each block, while the encryption case
113 		 * needs xor-buffer before processing each block.
114 		 *
115 		 * In a pure software implementation we wouldn't have any
116 		 * use for this kind of optimization, but since this
117 		 * AES-GCM implementation is aimed at being combined with
118 		 * accelerated routines it's more convenient to always have
119 		 * this optimization activated.
120 		 */
121 		internal_aes_gcm_encrypt_block(ctx, ctx->ctr, ctx->buf_cryp);
122 		internal_aes_gcm_inc_ctr(ctx);
123 	}
124 
125 	return TEE_SUCCESS;
126 }
127 
128 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx,
129 				       const void *data, size_t len)
130 {
131 	const uint8_t *d = data;
132 	size_t l = len;
133 	const uint8_t *head = NULL;
134 	size_t n;
135 
136 	if (ctx->payload_bytes)
137 		return TEE_ERROR_BAD_PARAMETERS;
138 
139 	ctx->aad_bytes += len;
140 
141 	while (l) {
142 		if (ctx->buf_pos || !internal_aes_gcm_ptr_is_block_aligned(d) ||
143 		    l < TEE_AES_BLOCK_SIZE) {
144 			n = MIN(TEE_AES_BLOCK_SIZE - ctx->buf_pos, l);
145 			memcpy(ctx->buf_hash + ctx->buf_pos, d, n);
146 			ctx->buf_pos += n;
147 
148 			if (ctx->buf_pos != TEE_AES_BLOCK_SIZE)
149 				return TEE_SUCCESS;
150 
151 			ctx->buf_pos = 0;
152 			head = ctx->buf_hash;
153 			d += n;
154 			l -= n;
155 		}
156 
157 		if (internal_aes_gcm_ptr_is_block_aligned(d))
158 			n = l / TEE_AES_BLOCK_SIZE;
159 		else
160 			n = 0;
161 
162 		internal_aes_gcm_ghash_update(ctx, head, d, n);
163 		l -= n * TEE_AES_BLOCK_SIZE;
164 		d += n * TEE_AES_BLOCK_SIZE;
165 	}
166 
167 	return TEE_SUCCESS;
168 }
169 
170 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx,
171 					   TEE_OperationMode mode,
172 					   const void *src, size_t len,
173 					   void *dst)
174 {
175 	size_t n;
176 	const uint8_t *s = src;
177 	uint8_t *d = dst;
178 	size_t l = len;
179 
180 	if (!ctx->payload_bytes && ctx->buf_pos) {
181 		/* AAD part done, finish up the last bits. */
182 		memset(ctx->buf_hash + ctx->buf_pos, 0,
183 		       TEE_AES_BLOCK_SIZE - ctx->buf_pos);
184 		internal_aes_gcm_ghash_update(ctx, ctx->buf_hash, NULL, 0);
185 		ctx->buf_pos = 0;
186 	}
187 
188 	ctx->payload_bytes += len;
189 
190 	while (l) {
191 		if (ctx->buf_pos || !internal_aes_gcm_ptr_is_block_aligned(s) ||
192 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
193 		    l < TEE_AES_BLOCK_SIZE) {
194 			n = MIN(TEE_AES_BLOCK_SIZE - ctx->buf_pos, l);
195 
196 			if (!ctx->buf_pos && mode == TEE_MODE_DECRYPT) {
197 				internal_aes_gcm_encrypt_block(ctx, ctx->ctr,
198 							       ctx->buf_cryp);
199 			}
200 
201 			xor_buf(ctx->buf_cryp + ctx->buf_pos, s, n);
202 			memcpy(d, ctx->buf_cryp + ctx->buf_pos, n);
203 			if (mode == TEE_MODE_ENCRYPT)
204 				memcpy(ctx->buf_hash + ctx->buf_pos,
205 				       ctx->buf_cryp + ctx->buf_pos, n);
206 			else
207 				memcpy(ctx->buf_hash + ctx->buf_pos, s, n);
208 
209 			ctx->buf_pos += n;
210 
211 			if (ctx->buf_pos != TEE_AES_BLOCK_SIZE)
212 				return TEE_SUCCESS;
213 
214 			internal_aes_gcm_ghash_update(ctx, ctx->buf_hash,
215 						      NULL, 0);
216 			ctx->buf_pos = 0;
217 			d += n;
218 			s += n;
219 			l -= n;
220 
221 			if (mode == TEE_MODE_ENCRYPT)
222 				internal_aes_gcm_encrypt_block(ctx, ctx->ctr,
223 							       ctx->buf_cryp);
224 			internal_aes_gcm_inc_ctr(ctx);
225 		} else {
226 			n = l / TEE_AES_BLOCK_SIZE;
227 			internal_aes_gcm_update_payload_block_aligned(ctx, mode,
228 								      s, n, d);
229 			s += n * TEE_AES_BLOCK_SIZE;
230 			d += n * TEE_AES_BLOCK_SIZE;
231 			l -= n * TEE_AES_BLOCK_SIZE;
232 		}
233 	}
234 
235 	return TEE_SUCCESS;
236 }
237 
238 static TEE_Result operation_final(struct internal_aes_gcm_ctx *ctx,
239 				  TEE_OperationMode m, const uint8_t *src,
240 				  size_t len, uint8_t *dst)
241 {
242 	TEE_Result res;
243 
244 	res = internal_aes_gcm_update_payload(ctx, m, src, len, dst);
245 	if (res)
246 		return res;
247 
248 	if (ctx->buf_pos) {
249 		memset(ctx->buf_hash + ctx->buf_pos, 0,
250 		       sizeof(ctx->buf_hash) - ctx->buf_pos);
251 		internal_aes_gcm_ghash_update(ctx, ctx->buf_hash, NULL, 0);
252 	}
253 
254 	ghash_update_lengths(ctx, ctx->aad_bytes, ctx->payload_bytes);
255 	/* buf_tag was filled in with the first counter block aes_gcm_init() */
256 	xor_buf(ctx->buf_tag, ctx->hash_state, ctx->tag_len);
257 
258 	return TEE_SUCCESS;
259 }
260 
261 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx,
262 				      const void *src, size_t len, void *dst,
263 				      void *tag, size_t *tag_len)
264 {
265 	TEE_Result res;
266 
267 	if (*tag_len < ctx->tag_len)
268 		return TEE_ERROR_SHORT_BUFFER;
269 
270 	res = operation_final(ctx, TEE_MODE_ENCRYPT, src, len, dst);
271 	if (res)
272 		return res;
273 
274 	memcpy(tag, ctx->buf_tag, ctx->tag_len);
275 	*tag_len = ctx->tag_len;
276 
277 	return TEE_SUCCESS;
278 }
279 
280 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx,
281 				      const void *src, size_t len, void *dst,
282 				      const void *tag, size_t tag_len)
283 {
284 	TEE_Result res;
285 
286 	if (tag_len != ctx->tag_len)
287 		return TEE_ERROR_MAC_INVALID;
288 
289 	res = operation_final(ctx, TEE_MODE_DECRYPT, src, len, dst);
290 	if (res)
291 		return res;
292 
293 	if (buf_compare_ct(ctx->buf_tag, tag, tag_len))
294 		return TEE_ERROR_MAC_INVALID;
295 
296 	return TEE_SUCCESS;
297 }
298 
299 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_ctx *ctx)
300 {
301 	uint64_t c;
302 
303 	c = TEE_U64_FROM_BIG_ENDIAN(ctx->ctr[1]) + 1;
304 	ctx->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
305 	if (!c) {
306 		c = TEE_U64_FROM_BIG_ENDIAN(ctx->ctr[0]) + 1;
307 		ctx->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
308 	}
309 }
310 
311 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB
312 #include <crypto/aes-gcm.h>
313 
314 size_t crypto_aes_gcm_get_ctx_size(void)
315 {
316 	return sizeof(struct internal_aes_gcm_ctx);
317 }
318 
319 TEE_Result crypto_aes_gcm_init(void *c, TEE_OperationMode mode,
320 			       const uint8_t *key, size_t key_len,
321 			       const uint8_t *nonce, size_t nonce_len,
322 			       size_t tag_len)
323 {
324 	return internal_aes_gcm_init(c, mode, key, key_len, nonce, nonce_len,
325 				     tag_len);
326 }
327 
328 TEE_Result crypto_aes_gcm_update_aad(void *c, const uint8_t *data, size_t len)
329 {
330 	return internal_aes_gcm_update_aad(c, data, len);
331 }
332 
333 TEE_Result crypto_aes_gcm_update_payload(void *c, TEE_OperationMode m,
334 					 const uint8_t *src, size_t len,
335 					 uint8_t *dst)
336 {
337 	return internal_aes_gcm_update_payload(c, m, src, len, dst);
338 }
339 
340 TEE_Result crypto_aes_gcm_enc_final(void *c, const uint8_t *src, size_t len,
341 				    uint8_t *dst, uint8_t *tag, size_t *tag_len)
342 {
343 	return internal_aes_gcm_enc_final(c, src, len, dst, tag, tag_len);
344 }
345 
346 TEE_Result crypto_aes_gcm_dec_final(void *c, const uint8_t *src, size_t len,
347 				    uint8_t *dst, const uint8_t *tag,
348 				    size_t tag_len)
349 {
350 	return internal_aes_gcm_dec_final(c, src, len, dst, tag, tag_len);
351 }
352 
353 void crypto_aes_gcm_final(void *c __unused)
354 {
355 }
356 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/
357