xref: /optee_os/core/crypto/aes-gcm.c (revision 0d3602029b650f73fc234db3deab518d6242a17a)
1 /*
2  * Copyright (c) 2017, Linaro Limited
3  * All rights reserved.
4  *
5  * SPDX-License-Identifier: BSD-2-Clause
6  */
7 
8 #include <crypto/internal_aes-gcm.h>
9 #include <io.h>
10 #include <string_ext.h>
11 #include <string.h>
12 #include <tee_api_types.h>
13 #include <types_ext.h>
14 #include <utee_defines.h>
15 #include <util.h>
16 
17 #include "aes-gcm-private.h"
18 
19 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len)
20 {
21 	size_t n;
22 
23 	for (n = 0; n < len; n++)
24 		dst[n] ^= src[n];
25 }
26 
27 
28 static void ghash_update_pad_zero(struct internal_aes_gcm_ctx *ctx,
29 				  const uint8_t *data, size_t len)
30 {
31 	size_t n = len / TEE_AES_BLOCK_SIZE;
32 	uint64_t block[2];
33 
34 	if (n) {
35 		if (internal_aes_gcm_ptr_is_block_aligned(data)) {
36 			internal_aes_gcm_ghash_update(ctx, NULL, data, n);
37 		} else {
38 			size_t m;
39 
40 			for (m = 0; m < n; m++) {
41 
42 				memcpy(block, data + m * sizeof(block),
43 				       sizeof(block));
44 				internal_aes_gcm_ghash_update(ctx, NULL,
45 							      (void *)block, 1);
46 			}
47 		}
48 	}
49 
50 	if (len - n * TEE_AES_BLOCK_SIZE) {
51 		memset(block, 0, sizeof(block));
52 		memcpy(block, data + n * TEE_AES_BLOCK_SIZE,
53 		       len - n * TEE_AES_BLOCK_SIZE);
54 		internal_aes_gcm_ghash_update(ctx, block, NULL, 0);
55 	}
56 }
57 
58 static void ghash_update_lengths(struct internal_aes_gcm_ctx *ctx, uint32_t l1,
59 				 uint32_t l2)
60 {
61 	uint64_t len_fields[2] = {
62 		TEE_U64_TO_BIG_ENDIAN(l1 * 8),
63 		TEE_U64_TO_BIG_ENDIAN(l2 * 8)
64 	};
65 
66 	COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE);
67 	internal_aes_gcm_ghash_update(ctx, (uint8_t *)len_fields, NULL, 0);
68 }
69 
70 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx,
71 				 TEE_OperationMode mode, const void *key,
72 				 size_t key_len, const void *nonce,
73 				 size_t nonce_len, size_t tag_len)
74 {
75 	TEE_Result res;
76 
77 	COMPILE_TIME_ASSERT(sizeof(ctx->ctr) == TEE_AES_BLOCK_SIZE);
78 
79 	if (tag_len > sizeof(ctx->buf_tag))
80 		return TEE_ERROR_BAD_PARAMETERS;
81 
82 	memset(ctx, 0, sizeof(*ctx));
83 
84 	ctx->tag_len = tag_len;
85 	res = internal_aes_gcm_set_key(ctx, key, key_len);
86 	if (res)
87 		return res;
88 
89 	if (nonce_len == (96 / 8)) {
90 		memcpy(ctx->ctr, nonce, nonce_len);
91 		internal_aes_gcm_inc_ctr(ctx);
92 	} else {
93 		ghash_update_pad_zero(ctx, nonce, nonce_len);
94 		ghash_update_lengths(ctx, 0, nonce_len);
95 
96 		memcpy(ctx->ctr, ctx->hash_state, sizeof(ctx->ctr));
97 		memset(ctx->hash_state, 0, sizeof(ctx->hash_state));
98 	}
99 
100 	internal_aes_gcm_encrypt_block(ctx, ctx->ctr, ctx->buf_tag);
101 	internal_aes_gcm_inc_ctr(ctx);
102 	if (mode == TEE_MODE_ENCRYPT) {
103 		/*
104 		 * Encryption uses the pre-encrypted xor-buffer to encrypt
105 		 * while decryption encrypts the xor-buffer when needed
106 		 * instead.
107 		 *
108 		 * The reason for this is that the combined encryption and
109 		 * ghash implementation does both operations intertwined.
110 		 * In the decrypt case the xor-buffer is needed at the end
111 		 * of processing each block, while the encryption case
112 		 * needs xor-buffer before processing each block.
113 		 *
114 		 * In a pure software implementation we wouldn't have any
115 		 * use for this kind of optimization, but since this
116 		 * AES-GCM implementation is aimed at being combined with
117 		 * accelerated routines it's more convenient to always have
118 		 * this optimization activated.
119 		 */
120 		internal_aes_gcm_encrypt_block(ctx, ctx->ctr, ctx->buf_cryp);
121 		internal_aes_gcm_inc_ctr(ctx);
122 	}
123 
124 	return TEE_SUCCESS;
125 }
126 
127 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx,
128 				       const void *data, size_t len)
129 {
130 	const uint8_t *d = data;
131 	size_t l = len;
132 	const uint8_t *head = NULL;
133 	size_t n;
134 
135 	if (ctx->payload_bytes)
136 		return TEE_ERROR_BAD_PARAMETERS;
137 
138 	ctx->aad_bytes += len;
139 
140 	while (l) {
141 		if (ctx->buf_pos || !internal_aes_gcm_ptr_is_block_aligned(d) ||
142 		    l < TEE_AES_BLOCK_SIZE) {
143 			n = MIN(TEE_AES_BLOCK_SIZE - ctx->buf_pos, l);
144 			memcpy(ctx->buf_hash + ctx->buf_pos, d, n);
145 			ctx->buf_pos += n;
146 
147 			if (ctx->buf_pos != TEE_AES_BLOCK_SIZE)
148 				return TEE_SUCCESS;
149 
150 			ctx->buf_pos = 0;
151 			head = ctx->buf_hash;
152 			d += n;
153 			l -= n;
154 		}
155 
156 		if (internal_aes_gcm_ptr_is_block_aligned(d))
157 			n = l / TEE_AES_BLOCK_SIZE;
158 		else
159 			n = 0;
160 
161 		internal_aes_gcm_ghash_update(ctx, head, d, n);
162 		l -= n * TEE_AES_BLOCK_SIZE;
163 		d += n * TEE_AES_BLOCK_SIZE;
164 	}
165 
166 	return TEE_SUCCESS;
167 }
168 
169 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx,
170 					   TEE_OperationMode mode,
171 					   const void *src, size_t len,
172 					   void *dst)
173 {
174 	size_t n;
175 	const uint8_t *s = src;
176 	uint8_t *d = dst;
177 	size_t l = len;
178 
179 	if (!ctx->payload_bytes && ctx->buf_pos) {
180 		/* AAD part done, finish up the last bits. */
181 		memset(ctx->buf_hash + ctx->buf_pos, 0,
182 		       TEE_AES_BLOCK_SIZE - ctx->buf_pos);
183 		internal_aes_gcm_ghash_update(ctx, ctx->buf_hash, NULL, 0);
184 		ctx->buf_pos = 0;
185 	}
186 
187 	ctx->payload_bytes += len;
188 
189 	while (l) {
190 		if (ctx->buf_pos || !internal_aes_gcm_ptr_is_block_aligned(s) ||
191 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
192 		    l < TEE_AES_BLOCK_SIZE) {
193 			n = MIN(TEE_AES_BLOCK_SIZE - ctx->buf_pos, l);
194 
195 			if (!ctx->buf_pos && mode == TEE_MODE_DECRYPT) {
196 				internal_aes_gcm_encrypt_block(ctx, ctx->ctr,
197 							       ctx->buf_cryp);
198 			}
199 
200 			xor_buf(ctx->buf_cryp + ctx->buf_pos, s, n);
201 			memcpy(d, ctx->buf_cryp + ctx->buf_pos, n);
202 			if (mode == TEE_MODE_ENCRYPT)
203 				memcpy(ctx->buf_hash + ctx->buf_pos,
204 				       ctx->buf_cryp + ctx->buf_pos, n);
205 			else
206 				memcpy(ctx->buf_hash + ctx->buf_pos, s, n);
207 
208 			ctx->buf_pos += n;
209 
210 			if (ctx->buf_pos != TEE_AES_BLOCK_SIZE)
211 				return TEE_SUCCESS;
212 
213 			internal_aes_gcm_ghash_update(ctx, ctx->buf_hash,
214 						      NULL, 0);
215 			ctx->buf_pos = 0;
216 			d += n;
217 			s += n;
218 			l -= n;
219 
220 			if (mode == TEE_MODE_ENCRYPT)
221 				internal_aes_gcm_encrypt_block(ctx, ctx->ctr,
222 							       ctx->buf_cryp);
223 			internal_aes_gcm_inc_ctr(ctx);
224 		} else {
225 			n = l / TEE_AES_BLOCK_SIZE;
226 			internal_aes_gcm_update_payload_block_aligned(ctx, mode,
227 								      s, n, d);
228 			s += n * TEE_AES_BLOCK_SIZE;
229 			d += n * TEE_AES_BLOCK_SIZE;
230 			l -= n * TEE_AES_BLOCK_SIZE;
231 		}
232 	}
233 
234 	return TEE_SUCCESS;
235 }
236 
237 static TEE_Result operation_final(struct internal_aes_gcm_ctx *ctx,
238 				  TEE_OperationMode m, const uint8_t *src,
239 				  size_t len, uint8_t *dst)
240 {
241 	TEE_Result res;
242 
243 	res = internal_aes_gcm_update_payload(ctx, m, src, len, dst);
244 	if (res)
245 		return res;
246 
247 	if (ctx->buf_pos) {
248 		memset(ctx->buf_hash + ctx->buf_pos, 0,
249 		       sizeof(ctx->buf_hash) - ctx->buf_pos);
250 		internal_aes_gcm_ghash_update(ctx, ctx->buf_hash, NULL, 0);
251 	}
252 
253 	ghash_update_lengths(ctx, ctx->aad_bytes, ctx->payload_bytes);
254 	/* buf_tag was filled in with the first counter block aes_gcm_init() */
255 	xor_buf(ctx->buf_tag, ctx->hash_state, ctx->tag_len);
256 
257 	return TEE_SUCCESS;
258 }
259 
260 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx,
261 				      const void *src, size_t len, void *dst,
262 				      void *tag, size_t *tag_len)
263 {
264 	TEE_Result res;
265 
266 	if (*tag_len < ctx->tag_len)
267 		return TEE_ERROR_SHORT_BUFFER;
268 
269 	res = operation_final(ctx, TEE_MODE_ENCRYPT, src, len, dst);
270 	if (res)
271 		return res;
272 
273 	memcpy(tag, ctx->buf_tag, ctx->tag_len);
274 	*tag_len = ctx->tag_len;
275 
276 	return TEE_SUCCESS;
277 }
278 
279 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx,
280 				      const void *src, size_t len, void *dst,
281 				      const void *tag, size_t tag_len)
282 {
283 	TEE_Result res;
284 
285 	if (tag_len != ctx->tag_len)
286 		return TEE_ERROR_MAC_INVALID;
287 
288 	res = operation_final(ctx, TEE_MODE_DECRYPT, src, len, dst);
289 	if (res)
290 		return res;
291 
292 	if (buf_compare_ct(ctx->buf_tag, tag, tag_len))
293 		return TEE_ERROR_MAC_INVALID;
294 
295 	return TEE_SUCCESS;
296 }
297 
298 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_ctx *ctx)
299 {
300 	uint64_t c;
301 
302 	c = TEE_U64_FROM_BIG_ENDIAN(ctx->ctr[1]) + 1;
303 	ctx->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
304 	if (!c) {
305 		c = TEE_U64_FROM_BIG_ENDIAN(ctx->ctr[0]) + 1;
306 		ctx->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
307 	}
308 }
309 
310 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB
311 #include <crypto/aes-gcm.h>
312 
313 size_t crypto_aes_gcm_get_ctx_size(void)
314 {
315 	return sizeof(struct internal_aes_gcm_ctx);
316 }
317 
318 TEE_Result crypto_aes_gcm_init(void *c, TEE_OperationMode mode,
319 			       const uint8_t *key, size_t key_len,
320 			       const uint8_t *nonce, size_t nonce_len,
321 			       size_t tag_len)
322 {
323 	return internal_aes_gcm_init(c, mode, key, key_len, nonce, nonce_len,
324 				     tag_len);
325 }
326 
327 TEE_Result crypto_aes_gcm_update_aad(void *c, const uint8_t *data, size_t len)
328 {
329 	return internal_aes_gcm_update_aad(c, data, len);
330 }
331 
332 TEE_Result crypto_aes_gcm_update_payload(void *c, TEE_OperationMode m,
333 					 const uint8_t *src, size_t len,
334 					 uint8_t *dst)
335 {
336 	return internal_aes_gcm_update_payload(c, m, src, len, dst);
337 }
338 
339 TEE_Result crypto_aes_gcm_enc_final(void *c, const uint8_t *src, size_t len,
340 				    uint8_t *dst, uint8_t *tag, size_t *tag_len)
341 {
342 	return internal_aes_gcm_enc_final(c, src, len, dst, tag, tag_len);
343 }
344 
345 TEE_Result crypto_aes_gcm_dec_final(void *c, const uint8_t *src, size_t len,
346 				    uint8_t *dst, const uint8_t *tag,
347 				    size_t tag_len)
348 {
349 	return internal_aes_gcm_dec_final(c, src, len, dst, tag, tag_len);
350 }
351 
352 void crypto_aes_gcm_final(void *c __unused)
353 {
354 }
355 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/
356