xref: /optee_os/core/crypto/aes-gcm.c (revision edbeddc184ead93b125e49d55c33644d6d2762e9)
1 /*
2  * Copyright (c) 2017, Linaro Limited
3  * All rights reserved.
4  *
5  * SPDX-License-Identifier: BSD-2-Clause
6  */
7 
8 #include <crypto/internal_aes-gcm.h>
9 #include <io.h>
10 #include <string_ext.h>
11 #include <string.h>
12 #include <tee_api_types.h>
13 #include <types_ext.h>
14 #include <utee_defines.h>
15 #include <util.h>
16 
17 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len)
18 {
19 	size_t n;
20 
21 	for (n = 0; n < len; n++)
22 		dst[n] ^= src[n];
23 }
24 
25 static bool ptr_is_block_aligned(const void *p)
26 {
27 	return !((vaddr_t)p & (TEE_AES_BLOCK_SIZE - 1));
28 }
29 
30 static void ghash_update_pad_zero(struct internal_aes_gcm_ctx *ctx,
31 				  const uint8_t *data, size_t len)
32 {
33 	size_t n = len / TEE_AES_BLOCK_SIZE;
34 	uint64_t block[2];
35 
36 	if (n) {
37 		if (ptr_is_block_aligned(data)) {
38 			internal_aes_gcm_ghash_update(ctx, NULL, data, n);
39 		} else {
40 			size_t m;
41 
42 			for (m = 0; m < n; m++) {
43 
44 				memcpy(block, data + m * sizeof(block),
45 				       sizeof(block));
46 				internal_aes_gcm_ghash_update(ctx, NULL,
47 							      (void *)block, 1);
48 			}
49 		}
50 	}
51 
52 	if (len - n * TEE_AES_BLOCK_SIZE) {
53 		memset(block, 0, sizeof(block));
54 		memcpy(block, data + n * TEE_AES_BLOCK_SIZE,
55 		       len - n * TEE_AES_BLOCK_SIZE);
56 		internal_aes_gcm_ghash_update(ctx, block, NULL, 0);
57 	}
58 }
59 
60 static void ghash_update_lengths(struct internal_aes_gcm_ctx *ctx, uint32_t l1,
61 				 uint32_t l2)
62 {
63 	uint64_t len_fields[2] = {
64 		TEE_U64_TO_BIG_ENDIAN(l1 * 8),
65 		TEE_U64_TO_BIG_ENDIAN(l2 * 8)
66 	};
67 
68 	COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE);
69 	internal_aes_gcm_ghash_update(ctx, (uint8_t *)len_fields, NULL, 0);
70 }
71 
72 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx,
73 				 TEE_OperationMode mode, const void *key,
74 				 size_t key_len, const void *nonce,
75 				 size_t nonce_len, size_t tag_len)
76 {
77 	TEE_Result res;
78 
79 	COMPILE_TIME_ASSERT(sizeof(ctx->ctr) == TEE_AES_BLOCK_SIZE);
80 
81 	if (tag_len > sizeof(ctx->buf_tag))
82 		return TEE_ERROR_BAD_PARAMETERS;
83 
84 	memset(ctx, 0, sizeof(*ctx));
85 
86 	ctx->tag_len = tag_len;
87 	res = internal_aes_gcm_set_key(ctx, key, key_len);
88 	if (res)
89 		return res;
90 
91 	if (nonce_len == (96 / 8)) {
92 		memcpy(ctx->ctr, nonce, nonce_len);
93 		internal_aes_gcm_inc_ctr(ctx);
94 	} else {
95 		ghash_update_pad_zero(ctx, nonce, nonce_len);
96 		ghash_update_lengths(ctx, 0, nonce_len);
97 
98 		memcpy(ctx->ctr, ctx->hash_state, sizeof(ctx->ctr));
99 		memset(ctx->hash_state, 0, sizeof(ctx->hash_state));
100 	}
101 
102 	internal_aes_gcm_encrypt_block(ctx, ctx->ctr, ctx->buf_tag);
103 	internal_aes_gcm_inc_ctr(ctx);
104 	if (mode == TEE_MODE_ENCRYPT) {
105 		/*
106 		 * Encryption uses the pre-encrypted xor-buffer to encrypt
107 		 * while decryption encrypts the xor-buffer when needed
108 		 * instead.
109 		 *
110 		 * The reason for this is that the combined encryption and
111 		 * ghash implementation does both operations intertwined.
112 		 * In the decrypt case the xor-buffer is needed at the end
113 		 * of processing each block, while the encryption case
114 		 * needs xor-buffer before processing each block.
115 		 *
116 		 * In a pure software implementation we wouldn't have any
117 		 * use for this kind of optimization, but since this
118 		 * AES-GCM implementation is aimed at being combined with
119 		 * accelerated routines it's more convenient to always have
120 		 * this optimization activated.
121 		 */
122 		internal_aes_gcm_encrypt_block(ctx, ctx->ctr, ctx->buf_cryp);
123 		internal_aes_gcm_inc_ctr(ctx);
124 	}
125 
126 	return TEE_SUCCESS;
127 }
128 
129 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx,
130 				       const void *data, size_t len)
131 {
132 	const uint8_t *d = data;
133 	size_t l = len;
134 	const uint8_t *head = NULL;
135 	size_t n;
136 
137 	if (ctx->payload_bytes)
138 		return TEE_ERROR_BAD_PARAMETERS;
139 
140 	ctx->aad_bytes += len;
141 
142 	while (l) {
143 		if (ctx->buf_pos || !ptr_is_block_aligned(d) ||
144 		    l < TEE_AES_BLOCK_SIZE) {
145 			n = MIN(TEE_AES_BLOCK_SIZE - ctx->buf_pos, l);
146 			memcpy(ctx->buf_hash + ctx->buf_pos, d, n);
147 			ctx->buf_pos += n;
148 
149 			if (ctx->buf_pos != TEE_AES_BLOCK_SIZE)
150 				return TEE_SUCCESS;
151 
152 			ctx->buf_pos = 0;
153 			head = ctx->buf_hash;
154 			d += n;
155 			l -= n;
156 		}
157 
158 		if (ptr_is_block_aligned(d))
159 			n = l / TEE_AES_BLOCK_SIZE;
160 		else
161 			n = 0;
162 
163 		internal_aes_gcm_ghash_update(ctx, head, d, n);
164 		l -= n * TEE_AES_BLOCK_SIZE;
165 		d += n * TEE_AES_BLOCK_SIZE;
166 	}
167 
168 	return TEE_SUCCESS;
169 }
170 
171 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx,
172 					   TEE_OperationMode mode,
173 					   const void *src, size_t len,
174 					   void *dst)
175 {
176 	size_t n;
177 	const uint8_t *s = src;
178 	uint8_t *d = dst;
179 	size_t l = len;
180 
181 	if (!ctx->payload_bytes && ctx->buf_pos) {
182 		/* AAD part done, finish up the last bits. */
183 		memset(ctx->buf_hash + ctx->buf_pos, 0,
184 		       TEE_AES_BLOCK_SIZE - ctx->buf_pos);
185 		internal_aes_gcm_ghash_update(ctx, ctx->buf_hash, NULL, 0);
186 		ctx->buf_pos = 0;
187 	}
188 
189 	ctx->payload_bytes += len;
190 
191 	while (l) {
192 		if (ctx->buf_pos || !ptr_is_block_aligned(s) ||
193 		    !ptr_is_block_aligned(d) || l < TEE_AES_BLOCK_SIZE) {
194 			n = MIN(TEE_AES_BLOCK_SIZE - ctx->buf_pos, l);
195 
196 			if (!ctx->buf_pos && mode == TEE_MODE_DECRYPT) {
197 				internal_aes_gcm_encrypt_block(ctx, ctx->ctr,
198 							       ctx->buf_cryp);
199 			}
200 
201 			xor_buf(ctx->buf_cryp + ctx->buf_pos, s, n);
202 			memcpy(d, ctx->buf_cryp + ctx->buf_pos, n);
203 			if (mode == TEE_MODE_ENCRYPT)
204 				memcpy(ctx->buf_hash + ctx->buf_pos,
205 				       ctx->buf_cryp + ctx->buf_pos, n);
206 			else
207 				memcpy(ctx->buf_hash + ctx->buf_pos, s, n);
208 
209 			ctx->buf_pos += n;
210 
211 			if (ctx->buf_pos != TEE_AES_BLOCK_SIZE)
212 				return TEE_SUCCESS;
213 
214 			internal_aes_gcm_ghash_update(ctx, ctx->buf_hash,
215 						      NULL, 0);
216 			ctx->buf_pos = 0;
217 			d += n;
218 			s += n;
219 			l -= n;
220 
221 			if (mode == TEE_MODE_ENCRYPT)
222 				internal_aes_gcm_encrypt_block(ctx, ctx->ctr,
223 							       ctx->buf_cryp);
224 			internal_aes_gcm_inc_ctr(ctx);
225 		} else {
226 			n = l / TEE_AES_BLOCK_SIZE;
227 			internal_aes_gcm_update_payload_block_aligned(ctx, mode,
228 								      s, n, d);
229 			s += n * TEE_AES_BLOCK_SIZE;
230 			d += n * TEE_AES_BLOCK_SIZE;
231 			l -= n * TEE_AES_BLOCK_SIZE;
232 		}
233 	}
234 
235 	return TEE_SUCCESS;
236 }
237 
238 static TEE_Result operation_final(struct internal_aes_gcm_ctx *ctx,
239 				  TEE_OperationMode m, const uint8_t *src,
240 				  size_t len, uint8_t *dst)
241 {
242 	TEE_Result res;
243 
244 	res = internal_aes_gcm_update_payload(ctx, m, src, len, dst);
245 	if (res)
246 		return res;
247 
248 	if (ctx->buf_pos) {
249 		memset(ctx->buf_hash + ctx->buf_pos, 0,
250 		       sizeof(ctx->buf_hash) - ctx->buf_pos);
251 		internal_aes_gcm_ghash_update(ctx, ctx->buf_hash, NULL, 0);
252 	}
253 
254 	ghash_update_lengths(ctx, ctx->aad_bytes, ctx->payload_bytes);
255 	/* buf_tag was filled in with the first counter block aes_gcm_init() */
256 	xor_buf(ctx->buf_tag, ctx->hash_state, ctx->tag_len);
257 
258 	return TEE_SUCCESS;
259 }
260 
261 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx,
262 				      const void *src, size_t len, void *dst,
263 				      void *tag, size_t *tag_len)
264 {
265 	TEE_Result res;
266 
267 	if (*tag_len < ctx->tag_len)
268 		return TEE_ERROR_SHORT_BUFFER;
269 
270 	res = operation_final(ctx, TEE_MODE_ENCRYPT, src, len, dst);
271 	if (res)
272 		return res;
273 
274 	memcpy(tag, ctx->buf_tag, ctx->tag_len);
275 	*tag_len = ctx->tag_len;
276 
277 	return TEE_SUCCESS;
278 }
279 
280 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx,
281 				      const void *src, size_t len, void *dst,
282 				      const void *tag, size_t tag_len)
283 {
284 	TEE_Result res;
285 
286 	if (tag_len != ctx->tag_len)
287 		return TEE_ERROR_MAC_INVALID;
288 
289 	res = operation_final(ctx, TEE_MODE_DECRYPT, src, len, dst);
290 	if (res)
291 		return res;
292 
293 	if (buf_compare_ct(ctx->buf_tag, tag, tag_len))
294 		return TEE_ERROR_MAC_INVALID;
295 
296 	return TEE_SUCCESS;
297 }
298 
299 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_ctx *ctx)
300 {
301 	uint64_t c;
302 
303 	c = TEE_U64_FROM_BIG_ENDIAN(ctx->ctr[1]) + 1;
304 	ctx->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
305 	if (!c) {
306 		c = TEE_U64_FROM_BIG_ENDIAN(ctx->ctr[0]) + 1;
307 		ctx->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
308 	}
309 }
310 
311 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB
312 #include <crypto/aes-gcm.h>
313 
314 size_t crypto_aes_gcm_get_ctx_size(void)
315 {
316 	return sizeof(struct internal_aes_gcm_ctx);
317 }
318 
319 TEE_Result crypto_aes_gcm_init(void *c, TEE_OperationMode mode,
320 			       const uint8_t *key, size_t key_len,
321 			       const uint8_t *nonce, size_t nonce_len,
322 			       size_t tag_len)
323 {
324 	return internal_aes_gcm_init(c, mode, key, key_len, nonce, nonce_len,
325 				     tag_len);
326 }
327 
328 TEE_Result crypto_aes_gcm_update_aad(void *c, const uint8_t *data, size_t len)
329 {
330 	return internal_aes_gcm_update_aad(c, data, len);
331 }
332 
333 TEE_Result crypto_aes_gcm_update_payload(void *c, TEE_OperationMode m,
334 					 const uint8_t *src, size_t len,
335 					 uint8_t *dst)
336 {
337 	return internal_aes_gcm_update_payload(c, m, src, len, dst);
338 }
339 
340 TEE_Result crypto_aes_gcm_enc_final(void *c, const uint8_t *src, size_t len,
341 				    uint8_t *dst, uint8_t *tag, size_t *tag_len)
342 {
343 	return internal_aes_gcm_enc_final(c, src, len, dst, tag, tag_len);
344 }
345 
346 TEE_Result crypto_aes_gcm_dec_final(void *c, const uint8_t *src, size_t len,
347 				    uint8_t *dst, const uint8_t *tag,
348 				    size_t tag_len)
349 {
350 	return internal_aes_gcm_dec_final(c, src, len, dst, tag, tag_len);
351 }
352 
353 void crypto_aes_gcm_final(void *c __unused)
354 {
355 }
356 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/
357