xref: /optee_os/core/crypto/aes-gcm.c (revision 8a15c688baf21524392f092611e54614a032affe)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2017-2020, Linaro Limited
4  */
5 
6 #include <assert.h>
7 #include <crypto/crypto.h>
8 #include <crypto/crypto_impl.h>
9 #include <crypto/internal_aes-gcm.h>
10 #include <io.h>
11 #include <string_ext.h>
12 #include <string.h>
13 #include <tee_api_types.h>
14 #include <types_ext.h>
15 #include <utee_defines.h>
16 #include <util.h>
17 
18 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len)
19 {
20 	size_t n;
21 
22 	for (n = 0; n < len; n++)
23 		dst[n] ^= src[n];
24 }
25 
26 
27 static void ghash_update_pad_zero(struct internal_aes_gcm_state *state,
28 				  const uint8_t *data, size_t len)
29 {
30 	size_t n = len / TEE_AES_BLOCK_SIZE;
31 	uint64_t block[2];
32 
33 	if (n) {
34 		if (internal_aes_gcm_ptr_is_block_aligned(data)) {
35 			internal_aes_gcm_ghash_update(state, NULL, data, n);
36 		} else {
37 			size_t m;
38 
39 			for (m = 0; m < n; m++) {
40 
41 				memcpy(block, data + m * sizeof(block),
42 				       sizeof(block));
43 				internal_aes_gcm_ghash_update(state, NULL,
44 							      (void *)block, 1);
45 			}
46 		}
47 	}
48 
49 	if (len - n * TEE_AES_BLOCK_SIZE) {
50 		memset(block, 0, sizeof(block));
51 		memcpy(block, data + n * TEE_AES_BLOCK_SIZE,
52 		       len - n * TEE_AES_BLOCK_SIZE);
53 		internal_aes_gcm_ghash_update(state, block, NULL, 0);
54 	}
55 }
56 
57 static void ghash_update_lengths(struct internal_aes_gcm_state *state,
58 				 uint32_t l1, uint32_t l2)
59 {
60 	uint64_t len_fields[2] = {
61 		TEE_U64_TO_BIG_ENDIAN(l1 * 8),
62 		TEE_U64_TO_BIG_ENDIAN(l2 * 8)
63 	};
64 
65 	COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE);
66 	internal_aes_gcm_ghash_update(state, (uint8_t *)len_fields, NULL, 0);
67 }
68 
69 static TEE_Result __gcm_init(struct internal_aes_gcm_state *state,
70 			     const struct internal_aes_gcm_key *ek,
71 			     TEE_OperationMode mode, const void *nonce,
72 			     size_t nonce_len, size_t tag_len)
73 {
74 	COMPILE_TIME_ASSERT(sizeof(state->ctr) == TEE_AES_BLOCK_SIZE);
75 
76 	if (tag_len > sizeof(state->buf_tag))
77 		return TEE_ERROR_BAD_PARAMETERS;
78 
79 	memset(state, 0, sizeof(*state));
80 
81 	state->tag_len = tag_len;
82 	internal_aes_gcm_set_key(state, ek);
83 
84 	if (nonce_len == (96 / 8)) {
85 		memcpy(state->ctr, nonce, nonce_len);
86 		internal_aes_gcm_inc_ctr(state);
87 	} else {
88 		ghash_update_pad_zero(state, nonce, nonce_len);
89 		ghash_update_lengths(state, 0, nonce_len);
90 
91 		memcpy(state->ctr, state->hash_state, sizeof(state->ctr));
92 		memset(state->hash_state, 0, sizeof(state->hash_state));
93 	}
94 
95 	crypto_aes_enc_block(ek->data, sizeof(ek->data), ek->rounds,
96 			     state->ctr, state->buf_tag);
97 	internal_aes_gcm_inc_ctr(state);
98 	if (mode == TEE_MODE_ENCRYPT) {
99 		/*
100 		 * Encryption uses the pre-encrypted xor-buffer to encrypt
101 		 * while decryption encrypts the xor-buffer when needed
102 		 * instead.
103 		 *
104 		 * The reason for this is that the combined encryption and
105 		 * ghash implementation does both operations intertwined.
106 		 * In the decrypt case the xor-buffer is needed at the end
107 		 * of processing each block, while the encryption case
108 		 * needs xor-buffer before processing each block.
109 		 *
110 		 * In a pure software implementation we wouldn't have any
111 		 * use for this kind of optimization, but since this
112 		 * AES-GCM implementation is aimed at being combined with
113 		 * accelerated routines it's more convenient to always have
114 		 * this optimization activated.
115 		 */
116 		crypto_aes_enc_block(ek->data, sizeof(ek->data), ek->rounds,
117 				     state->ctr, state->buf_cryp);
118 		internal_aes_gcm_inc_ctr(state);
119 	}
120 
121 	return TEE_SUCCESS;
122 }
123 
124 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx,
125 				 TEE_OperationMode mode, const void *key,
126 				 size_t key_len, const void *nonce,
127 				 size_t nonce_len, size_t tag_len)
128 {
129 	TEE_Result res = internal_aes_gcm_expand_enc_key(key, key_len,
130 							 &ctx->key);
131 	if (res)
132 		return res;
133 
134 	return __gcm_init(&ctx->state, &ctx->key, mode, nonce, nonce_len,
135 			  tag_len);
136 }
137 
138 static TEE_Result __gcm_update_aad(struct internal_aes_gcm_state *state,
139 				   const void *data, size_t len)
140 {
141 	const uint8_t *d = data;
142 	size_t l = len;
143 	const uint8_t *head = NULL;
144 	size_t n;
145 
146 	if (state->payload_bytes)
147 		return TEE_ERROR_BAD_PARAMETERS;
148 
149 	state->aad_bytes += len;
150 
151 	while (l) {
152 		if (state->buf_pos ||
153 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
154 		    l < TEE_AES_BLOCK_SIZE) {
155 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
156 			memcpy(state->buf_hash + state->buf_pos, d, n);
157 			state->buf_pos += n;
158 
159 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
160 				return TEE_SUCCESS;
161 
162 			state->buf_pos = 0;
163 			head = state->buf_hash;
164 			d += n;
165 			l -= n;
166 		}
167 
168 		if (internal_aes_gcm_ptr_is_block_aligned(d))
169 			n = l / TEE_AES_BLOCK_SIZE;
170 		else
171 			n = 0;
172 
173 		internal_aes_gcm_ghash_update(state, head, d, n);
174 		l -= n * TEE_AES_BLOCK_SIZE;
175 		d += n * TEE_AES_BLOCK_SIZE;
176 	}
177 
178 	return TEE_SUCCESS;
179 }
180 
181 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx,
182 				       const void *data, size_t len)
183 {
184 	return __gcm_update_aad(&ctx->state, data, len);
185 }
186 
187 static TEE_Result
188 __gcm_update_payload(struct internal_aes_gcm_state *state,
189 		     const struct internal_aes_gcm_key *ek,
190 		     TEE_OperationMode mode, const void *src,
191 		     size_t len, void *dst)
192 {
193 	size_t n;
194 	const uint8_t *s = src;
195 	uint8_t *d = dst;
196 	size_t l = len;
197 
198 	if (!state->payload_bytes && state->buf_pos) {
199 		/* AAD part done, finish up the last bits. */
200 		memset(state->buf_hash + state->buf_pos, 0,
201 		       TEE_AES_BLOCK_SIZE - state->buf_pos);
202 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
203 		state->buf_pos = 0;
204 	}
205 
206 	state->payload_bytes += len;
207 
208 	while (l) {
209 		if (state->buf_pos || l < TEE_AES_BLOCK_SIZE) {
210 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
211 
212 			if (!state->buf_pos && mode == TEE_MODE_DECRYPT)
213 				crypto_aes_enc_block(ek->data, sizeof(ek->data),
214 						     ek->rounds, state->ctr,
215 						     state->buf_cryp);
216 
217 			xor_buf(state->buf_cryp + state->buf_pos, s, n);
218 			memcpy(d, state->buf_cryp + state->buf_pos, n);
219 			if (mode == TEE_MODE_ENCRYPT)
220 				memcpy(state->buf_hash + state->buf_pos,
221 				       state->buf_cryp + state->buf_pos, n);
222 			else
223 				memcpy(state->buf_hash + state->buf_pos, s, n);
224 
225 			state->buf_pos += n;
226 
227 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
228 				return TEE_SUCCESS;
229 
230 			internal_aes_gcm_ghash_update(state, state->buf_hash,
231 						      NULL, 0);
232 			state->buf_pos = 0;
233 			d += n;
234 			s += n;
235 			l -= n;
236 
237 			if (mode == TEE_MODE_ENCRYPT)
238 				crypto_aes_enc_block(ek->data, sizeof(ek->data),
239 						     ek->rounds, state->ctr,
240 						     state->buf_cryp);
241 			internal_aes_gcm_inc_ctr(state);
242 		} else {
243 			n = l / TEE_AES_BLOCK_SIZE;
244 			internal_aes_gcm_update_payload_blocks(state, ek, mode,
245 							       s, n, d);
246 			s += n * TEE_AES_BLOCK_SIZE;
247 			d += n * TEE_AES_BLOCK_SIZE;
248 			l -= n * TEE_AES_BLOCK_SIZE;
249 		}
250 	}
251 
252 	return TEE_SUCCESS;
253 }
254 
255 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx,
256 					   TEE_OperationMode mode,
257 					   const void *src, size_t len,
258 					   void *dst)
259 {
260 	return __gcm_update_payload(&ctx->state, &ctx->key, mode, src, len,
261 				    dst);
262 }
263 
264 static TEE_Result operation_final(struct internal_aes_gcm_state *state,
265 				  const struct internal_aes_gcm_key *enc_key,
266 				  TEE_OperationMode m, const uint8_t *src,
267 				  size_t len, uint8_t *dst)
268 {
269 	TEE_Result res;
270 
271 	res = __gcm_update_payload(state, enc_key, m, src, len, dst);
272 	if (res)
273 		return res;
274 
275 	if (state->buf_pos) {
276 		memset(state->buf_hash + state->buf_pos, 0,
277 		       sizeof(state->buf_hash) - state->buf_pos);
278 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
279 	}
280 
281 	ghash_update_lengths(state, state->aad_bytes, state->payload_bytes);
282 	/* buf_tag was filled in with the first counter block aes_gcm_init() */
283 	xor_buf(state->buf_tag, state->hash_state, state->tag_len);
284 
285 	return TEE_SUCCESS;
286 }
287 
288 static TEE_Result __gcm_enc_final(struct internal_aes_gcm_state *state,
289 				  const struct internal_aes_gcm_key *enc_key,
290 				  const void *src, size_t len, void *dst,
291 				  void *tag, size_t *tag_len)
292 {
293 	TEE_Result res;
294 
295 	if (*tag_len < state->tag_len)
296 		return TEE_ERROR_SHORT_BUFFER;
297 
298 	res = operation_final(state, enc_key, TEE_MODE_ENCRYPT, src, len, dst);
299 	if (res)
300 		return res;
301 
302 	memcpy(tag, state->buf_tag, state->tag_len);
303 	*tag_len = state->tag_len;
304 
305 	return TEE_SUCCESS;
306 }
307 
308 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx,
309 				      const void *src, size_t len, void *dst,
310 				      void *tag, size_t *tag_len)
311 {
312 	return __gcm_enc_final(&ctx->state, &ctx->key, src, len, dst, tag,
313 			       tag_len);
314 }
315 
316 static TEE_Result __gcm_dec_final(struct internal_aes_gcm_state *state,
317 				  const struct internal_aes_gcm_key *enc_key,
318 				  const void *src, size_t len, void *dst,
319 				  const void *tag, size_t tag_len)
320 {
321 	TEE_Result res;
322 
323 	if (tag_len != state->tag_len)
324 		return TEE_ERROR_MAC_INVALID;
325 
326 	res = operation_final(state, enc_key, TEE_MODE_DECRYPT, src, len, dst);
327 	if (res)
328 		return res;
329 
330 	if (consttime_memcmp(state->buf_tag, tag, tag_len))
331 		return TEE_ERROR_MAC_INVALID;
332 
333 	return TEE_SUCCESS;
334 }
335 
336 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx,
337 				      const void *src, size_t len, void *dst,
338 				      const void *tag, size_t tag_len)
339 {
340 	return __gcm_dec_final(&ctx->state, &ctx->key, src, len, dst, tag,
341 			       tag_len);
342 }
343 
344 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_state *state)
345 {
346 	uint64_t c = 0;
347 
348 	c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[1]) + 1;
349 	state->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
350 	if (!c) {
351 		c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[0]) + 1;
352 		state->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
353 	}
354 }
355 
356 void internal_aes_gcm_dec_ctr(struct internal_aes_gcm_state *state)
357 {
358 	uint64_t c = 0;
359 
360 	c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[1]) - 1;
361 	state->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
362 	if (c == UINT64_MAX) {
363 		c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[0]) - 1;
364 		state->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
365 	}
366 }
367 
368 TEE_Result internal_aes_gcm_enc(const struct internal_aes_gcm_key *enc_key,
369 				const void *nonce, size_t nonce_len,
370 				const void *aad, size_t aad_len,
371 				const void *src, size_t len, void *dst,
372 				void *tag, size_t *tag_len)
373 {
374 	TEE_Result res;
375 	struct internal_aes_gcm_state state;
376 
377 	res = __gcm_init(&state, enc_key, TEE_MODE_ENCRYPT, nonce, nonce_len,
378 			 *tag_len);
379 	if (res)
380 		return res;
381 
382 	if (aad) {
383 		res = __gcm_update_aad(&state, aad, aad_len);
384 		if (res)
385 			return res;
386 	}
387 
388 	return __gcm_enc_final(&state, enc_key, src, len, dst, tag, tag_len);
389 }
390 
391 TEE_Result internal_aes_gcm_dec(const struct internal_aes_gcm_key *enc_key,
392 				const void *nonce, size_t nonce_len,
393 				const void *aad, size_t aad_len,
394 				const void *src, size_t len, void *dst,
395 				const void *tag, size_t tag_len)
396 {
397 	TEE_Result res;
398 	struct internal_aes_gcm_state state;
399 
400 	res = __gcm_init(&state, enc_key, TEE_MODE_DECRYPT, nonce, nonce_len,
401 			 tag_len);
402 	if (res)
403 		return res;
404 
405 	if (aad) {
406 		res = __gcm_update_aad(&state, aad, aad_len);
407 		if (res)
408 			return res;
409 	}
410 
411 	return __gcm_dec_final(&state, enc_key, src, len, dst, tag, tag_len);
412 }
413 
414 
415 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB
416 #include <stdlib.h>
417 #include <crypto/crypto.h>
418 
419 struct aes_gcm_ctx {
420 	struct crypto_authenc_ctx aec;
421 	struct internal_aes_gcm_ctx ctx;
422 };
423 
424 static const struct crypto_authenc_ops aes_gcm_ops;
425 
426 static struct aes_gcm_ctx *
427 to_aes_gcm_ctx(struct crypto_authenc_ctx *aec)
428 {
429 	assert(aec->ops == &aes_gcm_ops);
430 
431 	return container_of(aec, struct aes_gcm_ctx, aec);
432 }
433 
434 TEE_Result crypto_aes_gcm_alloc_ctx(struct crypto_authenc_ctx **ctx_ret)
435 {
436 	struct aes_gcm_ctx *ctx = calloc(1, sizeof(*ctx));
437 
438 	if (!ctx)
439 		return TEE_ERROR_OUT_OF_MEMORY;
440 	ctx->aec.ops = &aes_gcm_ops;
441 
442 	*ctx_ret = &ctx->aec;
443 
444 	return TEE_SUCCESS;
445 }
446 
447 static void aes_gcm_free_ctx(struct crypto_authenc_ctx *aec)
448 {
449 	free(to_aes_gcm_ctx(aec));
450 }
451 
452 static void aes_gcm_copy_state(struct crypto_authenc_ctx *dst_ctx,
453 			       struct crypto_authenc_ctx *src_ctx)
454 {
455 	to_aes_gcm_ctx(dst_ctx)->ctx = to_aes_gcm_ctx(src_ctx)->ctx;
456 }
457 
458 static TEE_Result aes_gcm_init(struct crypto_authenc_ctx *aec,
459 			       TEE_OperationMode mode,
460 			       const uint8_t *key, size_t key_len,
461 			       const uint8_t *nonce, size_t nonce_len,
462 			       size_t tag_len, size_t aad_len __unused,
463 			       size_t payload_len __unused)
464 {
465 	return internal_aes_gcm_init(&to_aes_gcm_ctx(aec)->ctx, mode, key,
466 				     key_len, nonce, nonce_len, tag_len);
467 }
468 
469 static TEE_Result aes_gcm_update_aad(struct crypto_authenc_ctx *aec,
470 				     const uint8_t *data, size_t len)
471 {
472 	return internal_aes_gcm_update_aad(&to_aes_gcm_ctx(aec)->ctx, data,
473 					   len);
474 }
475 
476 static TEE_Result aes_gcm_update_payload(struct crypto_authenc_ctx *aec,
477 					 TEE_OperationMode m,
478 					 const uint8_t *src, size_t len,
479 					 uint8_t *dst)
480 {
481 	return internal_aes_gcm_update_payload(&to_aes_gcm_ctx(aec)->ctx,
482 					       m, src, len, dst);
483 }
484 
485 static TEE_Result aes_gcm_enc_final(struct crypto_authenc_ctx *aec,
486 				    const uint8_t *src, size_t len,
487 				    uint8_t *dst, uint8_t *tag, size_t *tag_len)
488 {
489 	return internal_aes_gcm_enc_final(&to_aes_gcm_ctx(aec)->ctx, src, len,
490 					  dst, tag, tag_len);
491 }
492 
493 static TEE_Result aes_gcm_dec_final(struct crypto_authenc_ctx *aec,
494 				    const uint8_t *src, size_t len,
495 				    uint8_t *dst, const uint8_t *tag,
496 				    size_t tag_len)
497 {
498 	return internal_aes_gcm_dec_final(&to_aes_gcm_ctx(aec)->ctx, src, len,
499 					  dst, tag, tag_len);
500 }
501 
502 static void aes_gcm_final(struct crypto_authenc_ctx *aec __unused)
503 {
504 }
505 
506 static const struct crypto_authenc_ops aes_gcm_ops = {
507 	.init = aes_gcm_init,
508 	.update_aad = aes_gcm_update_aad,
509 	.update_payload = aes_gcm_update_payload,
510 	.enc_final = aes_gcm_enc_final,
511 	.dec_final = aes_gcm_dec_final,
512 	.final = aes_gcm_final,
513 	.free_ctx = aes_gcm_free_ctx,
514 	.copy_state = aes_gcm_copy_state,
515 };
516 
517 /*
518  * internal_aes_gcm_gfmul() is based on ghash_gfmul() from
519  * https://github.com/openbsd/src/blob/master/sys/crypto/gmac.c
520  */
521 void internal_aes_gcm_gfmul(const uint64_t X[2], const uint64_t Y[2],
522 			    uint64_t product[2])
523 {
524 	uint64_t y[2] = { 0 };
525 	uint64_t z[2] = { 0 };
526 	const uint8_t *x = (const uint8_t *)X;
527 	uint32_t mul = 0;
528 	size_t n = 0;
529 
530 	y[0] = TEE_U64_FROM_BIG_ENDIAN(Y[0]);
531 	y[1] = TEE_U64_FROM_BIG_ENDIAN(Y[1]);
532 
533 	for (n = 0; n < TEE_AES_BLOCK_SIZE * 8; n++) {
534 		/* update Z */
535 		if (x[n >> 3] & (1 << (~n & 7)))
536 			internal_aes_gcm_xor_block(z, y);
537 
538 		/* update Y */
539 		mul = y[1] & 1;
540 		y[1] = (y[0] << 63) | (y[1] >> 1);
541 		y[0] = (y[0] >> 1) ^ (0xe100000000000000 * mul);
542 	}
543 
544 	product[0] = TEE_U64_TO_BIG_ENDIAN(z[0]);
545 	product[1] = TEE_U64_TO_BIG_ENDIAN(z[1]);
546 }
547 
548 void internal_aes_gcm_encrypt_block(struct internal_aes_gcm_state *state,
549 				    const struct internal_aes_gcm_key *enc_key,
550 				    const uint64_t src[2], uint64_t dst[2])
551 {
552 	void *buf_cryp = state->buf_cryp;
553 	void *ctr = state->ctr;
554 
555 	internal_aes_gcm_xor_block(buf_cryp, src);
556 	internal_aes_gcm_ghash_update(state, buf_cryp, NULL, 0);
557 	memcpy(dst, buf_cryp, sizeof(state->buf_cryp));
558 
559 	crypto_aes_enc_block(enc_key->data, sizeof(enc_key->data),
560 			     enc_key->rounds, ctr, buf_cryp);
561 	internal_aes_gcm_inc_ctr(state);
562 }
563 
564 static void encrypt_pl(struct internal_aes_gcm_state *state,
565 		       const struct internal_aes_gcm_key *ek,
566 		       const uint8_t *src, size_t num_blocks, uint8_t *dst)
567 {
568 	size_t n = 0;
569 
570 	if (ALIGNMENT_IS_OK(src, uint64_t)) {
571 		for (n = 0; n < num_blocks; n++) {
572 			const void *s = src + n * TEE_AES_BLOCK_SIZE;
573 			void *d = dst + n * TEE_AES_BLOCK_SIZE;
574 
575 			internal_aes_gcm_encrypt_block(state, ek, s, d);
576 		}
577 	} else {
578 		for (n = 0; n < num_blocks; n++) {
579 			uint64_t tmp[2] = { 0 };
580 			void *d = dst + n * TEE_AES_BLOCK_SIZE;
581 
582 			memcpy(tmp, src + n * TEE_AES_BLOCK_SIZE, sizeof(tmp));
583 			internal_aes_gcm_encrypt_block(state, ek, tmp, d);
584 		}
585 	}
586 }
587 
588 void internal_aes_gcm_decrypt_block(struct internal_aes_gcm_state *state,
589 				    const struct internal_aes_gcm_key *enc_key,
590 				    const uint64_t src[2], uint64_t dst[2])
591 {
592 	void *buf_cryp = state->buf_cryp;
593 	void *ctr = state->ctr;
594 
595 	crypto_aes_enc_block(enc_key->data, sizeof(enc_key->data),
596 			     enc_key->rounds, ctr, buf_cryp);
597 	internal_aes_gcm_inc_ctr(state);
598 
599 	internal_aes_gcm_xor_block(buf_cryp, src);
600 	internal_aes_gcm_ghash_update(state, src, NULL, 0);
601 	memcpy(dst, buf_cryp, sizeof(state->buf_cryp));
602 }
603 
604 static void decrypt_pl(struct internal_aes_gcm_state *state,
605 		       const struct internal_aes_gcm_key *ek,
606 		       const uint8_t *src, size_t num_blocks, uint8_t *dst)
607 {
608 	size_t n = 0;
609 
610 	if (ALIGNMENT_IS_OK(src, uint64_t)) {
611 		for (n = 0; n < num_blocks; n++) {
612 			const void *s = src + n * TEE_AES_BLOCK_SIZE;
613 			void *d = dst + n * TEE_AES_BLOCK_SIZE;
614 
615 			internal_aes_gcm_decrypt_block(state, ek, s, d);
616 		}
617 	} else {
618 		for (n = 0; n < num_blocks; n++) {
619 			uint64_t tmp[2] = { 0 };
620 			void *d = dst + n * TEE_AES_BLOCK_SIZE;
621 
622 			memcpy(tmp, src + n * TEE_AES_BLOCK_SIZE, sizeof(tmp));
623 			internal_aes_gcm_decrypt_block(state, ek, tmp, d);
624 		}
625 	}
626 }
627 
628 void __weak
629 internal_aes_gcm_update_payload_blocks(struct internal_aes_gcm_state *state,
630 				       const struct internal_aes_gcm_key *ek,
631 				       TEE_OperationMode m, const void *src,
632 				       size_t num_blocks, void *dst)
633 {
634 	assert(!state->buf_pos && num_blocks);
635 
636 	if (m == TEE_MODE_ENCRYPT)
637 		encrypt_pl(state, ek, src, num_blocks, dst);
638 	else
639 		decrypt_pl(state, ek, src, num_blocks, dst);
640 }
641 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/
642