xref: /optee_os/core/crypto/aes-gcm.c (revision d7fd8f8765899d4fd1336702aa34cd0e72e2806c)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2017-2020, Linaro Limited
4  */
5 
6 #include <assert.h>
7 #include <crypto/internal_aes-gcm.h>
8 #include <crypto/crypto_impl.h>
9 #include <io.h>
10 #include <string_ext.h>
11 #include <string.h>
12 #include <tee_api_types.h>
13 #include <types_ext.h>
14 #include <utee_defines.h>
15 #include <util.h>
16 
17 static void xor_buf(uint8_t *dst, const uint8_t *src, size_t len)
18 {
19 	size_t n;
20 
21 	for (n = 0; n < len; n++)
22 		dst[n] ^= src[n];
23 }
24 
25 
26 static void ghash_update_pad_zero(struct internal_aes_gcm_state *state,
27 				  const uint8_t *data, size_t len)
28 {
29 	size_t n = len / TEE_AES_BLOCK_SIZE;
30 	uint64_t block[2];
31 
32 	if (n) {
33 		if (internal_aes_gcm_ptr_is_block_aligned(data)) {
34 			internal_aes_gcm_ghash_update(state, NULL, data, n);
35 		} else {
36 			size_t m;
37 
38 			for (m = 0; m < n; m++) {
39 
40 				memcpy(block, data + m * sizeof(block),
41 				       sizeof(block));
42 				internal_aes_gcm_ghash_update(state, NULL,
43 							      (void *)block, 1);
44 			}
45 		}
46 	}
47 
48 	if (len - n * TEE_AES_BLOCK_SIZE) {
49 		memset(block, 0, sizeof(block));
50 		memcpy(block, data + n * TEE_AES_BLOCK_SIZE,
51 		       len - n * TEE_AES_BLOCK_SIZE);
52 		internal_aes_gcm_ghash_update(state, block, NULL, 0);
53 	}
54 }
55 
56 static void ghash_update_lengths(struct internal_aes_gcm_state *state,
57 				 uint32_t l1, uint32_t l2)
58 {
59 	uint64_t len_fields[2] = {
60 		TEE_U64_TO_BIG_ENDIAN(l1 * 8),
61 		TEE_U64_TO_BIG_ENDIAN(l2 * 8)
62 	};
63 
64 	COMPILE_TIME_ASSERT(sizeof(len_fields) == TEE_AES_BLOCK_SIZE);
65 	internal_aes_gcm_ghash_update(state, (uint8_t *)len_fields, NULL, 0);
66 }
67 
68 static TEE_Result __gcm_init(struct internal_aes_gcm_state *state,
69 			     const struct internal_aes_gcm_key *ek,
70 			     TEE_OperationMode mode, const void *nonce,
71 			     size_t nonce_len, size_t tag_len)
72 {
73 	COMPILE_TIME_ASSERT(sizeof(state->ctr) == TEE_AES_BLOCK_SIZE);
74 
75 	if (tag_len > sizeof(state->buf_tag))
76 		return TEE_ERROR_BAD_PARAMETERS;
77 
78 	memset(state, 0, sizeof(*state));
79 
80 	state->tag_len = tag_len;
81 	internal_aes_gcm_set_key(state, ek);
82 
83 	if (nonce_len == (96 / 8)) {
84 		memcpy(state->ctr, nonce, nonce_len);
85 		internal_aes_gcm_inc_ctr(state);
86 	} else {
87 		ghash_update_pad_zero(state, nonce, nonce_len);
88 		ghash_update_lengths(state, 0, nonce_len);
89 
90 		memcpy(state->ctr, state->hash_state, sizeof(state->ctr));
91 		memset(state->hash_state, 0, sizeof(state->hash_state));
92 	}
93 
94 	internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_tag);
95 	internal_aes_gcm_inc_ctr(state);
96 	if (mode == TEE_MODE_ENCRYPT) {
97 		/*
98 		 * Encryption uses the pre-encrypted xor-buffer to encrypt
99 		 * while decryption encrypts the xor-buffer when needed
100 		 * instead.
101 		 *
102 		 * The reason for this is that the combined encryption and
103 		 * ghash implementation does both operations intertwined.
104 		 * In the decrypt case the xor-buffer is needed at the end
105 		 * of processing each block, while the encryption case
106 		 * needs xor-buffer before processing each block.
107 		 *
108 		 * In a pure software implementation we wouldn't have any
109 		 * use for this kind of optimization, but since this
110 		 * AES-GCM implementation is aimed at being combined with
111 		 * accelerated routines it's more convenient to always have
112 		 * this optimization activated.
113 		 */
114 		internal_aes_gcm_encrypt_block(ek, state->ctr, state->buf_cryp);
115 		internal_aes_gcm_inc_ctr(state);
116 	}
117 
118 	return TEE_SUCCESS;
119 }
120 
121 TEE_Result internal_aes_gcm_init(struct internal_aes_gcm_ctx *ctx,
122 				 TEE_OperationMode mode, const void *key,
123 				 size_t key_len, const void *nonce,
124 				 size_t nonce_len, size_t tag_len)
125 {
126 	TEE_Result res = internal_aes_gcm_expand_enc_key(key, key_len,
127 							 &ctx->key);
128 	if (res)
129 		return res;
130 
131 	return __gcm_init(&ctx->state, &ctx->key, mode, nonce, nonce_len,
132 			  tag_len);
133 }
134 
135 static TEE_Result __gcm_update_aad(struct internal_aes_gcm_state *state,
136 				   const void *data, size_t len)
137 {
138 	const uint8_t *d = data;
139 	size_t l = len;
140 	const uint8_t *head = NULL;
141 	size_t n;
142 
143 	if (state->payload_bytes)
144 		return TEE_ERROR_BAD_PARAMETERS;
145 
146 	state->aad_bytes += len;
147 
148 	while (l) {
149 		if (state->buf_pos ||
150 		    !internal_aes_gcm_ptr_is_block_aligned(d) ||
151 		    l < TEE_AES_BLOCK_SIZE) {
152 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
153 			memcpy(state->buf_hash + state->buf_pos, d, n);
154 			state->buf_pos += n;
155 
156 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
157 				return TEE_SUCCESS;
158 
159 			state->buf_pos = 0;
160 			head = state->buf_hash;
161 			d += n;
162 			l -= n;
163 		}
164 
165 		if (internal_aes_gcm_ptr_is_block_aligned(d))
166 			n = l / TEE_AES_BLOCK_SIZE;
167 		else
168 			n = 0;
169 
170 		internal_aes_gcm_ghash_update(state, head, d, n);
171 		l -= n * TEE_AES_BLOCK_SIZE;
172 		d += n * TEE_AES_BLOCK_SIZE;
173 	}
174 
175 	return TEE_SUCCESS;
176 }
177 
178 TEE_Result internal_aes_gcm_update_aad(struct internal_aes_gcm_ctx *ctx,
179 				       const void *data, size_t len)
180 {
181 	return __gcm_update_aad(&ctx->state, data, len);
182 }
183 
184 static TEE_Result
185 __gcm_update_payload(struct internal_aes_gcm_state *state,
186 		     const struct internal_aes_gcm_key *ek,
187 		     TEE_OperationMode mode, const void *src,
188 		     size_t len, void *dst)
189 {
190 	size_t n;
191 	const uint8_t *s = src;
192 	uint8_t *d = dst;
193 	size_t l = len;
194 
195 	if (!state->payload_bytes && state->buf_pos) {
196 		/* AAD part done, finish up the last bits. */
197 		memset(state->buf_hash + state->buf_pos, 0,
198 		       TEE_AES_BLOCK_SIZE - state->buf_pos);
199 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
200 		state->buf_pos = 0;
201 	}
202 
203 	state->payload_bytes += len;
204 
205 	while (l) {
206 		if (state->buf_pos || l < TEE_AES_BLOCK_SIZE) {
207 			n = MIN(TEE_AES_BLOCK_SIZE - state->buf_pos, l);
208 
209 			if (!state->buf_pos && mode == TEE_MODE_DECRYPT) {
210 				internal_aes_gcm_encrypt_block(ek, state->ctr,
211 							       state->buf_cryp);
212 			}
213 
214 			xor_buf(state->buf_cryp + state->buf_pos, s, n);
215 			memcpy(d, state->buf_cryp + state->buf_pos, n);
216 			if (mode == TEE_MODE_ENCRYPT)
217 				memcpy(state->buf_hash + state->buf_pos,
218 				       state->buf_cryp + state->buf_pos, n);
219 			else
220 				memcpy(state->buf_hash + state->buf_pos, s, n);
221 
222 			state->buf_pos += n;
223 
224 			if (state->buf_pos != TEE_AES_BLOCK_SIZE)
225 				return TEE_SUCCESS;
226 
227 			internal_aes_gcm_ghash_update(state, state->buf_hash,
228 						      NULL, 0);
229 			state->buf_pos = 0;
230 			d += n;
231 			s += n;
232 			l -= n;
233 
234 			if (mode == TEE_MODE_ENCRYPT)
235 				internal_aes_gcm_encrypt_block(ek, state->ctr,
236 							       state->buf_cryp);
237 			internal_aes_gcm_inc_ctr(state);
238 		} else {
239 			n = l / TEE_AES_BLOCK_SIZE;
240 			internal_aes_gcm_update_payload_blocks(state, ek, mode,
241 							       s, n, d);
242 			s += n * TEE_AES_BLOCK_SIZE;
243 			d += n * TEE_AES_BLOCK_SIZE;
244 			l -= n * TEE_AES_BLOCK_SIZE;
245 		}
246 	}
247 
248 	return TEE_SUCCESS;
249 }
250 
251 TEE_Result internal_aes_gcm_update_payload(struct internal_aes_gcm_ctx *ctx,
252 					   TEE_OperationMode mode,
253 					   const void *src, size_t len,
254 					   void *dst)
255 {
256 	return __gcm_update_payload(&ctx->state, &ctx->key, mode, src, len,
257 				    dst);
258 }
259 
260 static TEE_Result operation_final(struct internal_aes_gcm_state *state,
261 				  const struct internal_aes_gcm_key *enc_key,
262 				  TEE_OperationMode m, const uint8_t *src,
263 				  size_t len, uint8_t *dst)
264 {
265 	TEE_Result res;
266 
267 	res = __gcm_update_payload(state, enc_key, m, src, len, dst);
268 	if (res)
269 		return res;
270 
271 	if (state->buf_pos) {
272 		memset(state->buf_hash + state->buf_pos, 0,
273 		       sizeof(state->buf_hash) - state->buf_pos);
274 		internal_aes_gcm_ghash_update(state, state->buf_hash, NULL, 0);
275 	}
276 
277 	ghash_update_lengths(state, state->aad_bytes, state->payload_bytes);
278 	/* buf_tag was filled in with the first counter block aes_gcm_init() */
279 	xor_buf(state->buf_tag, state->hash_state, state->tag_len);
280 
281 	return TEE_SUCCESS;
282 }
283 
284 static TEE_Result __gcm_enc_final(struct internal_aes_gcm_state *state,
285 				  const struct internal_aes_gcm_key *enc_key,
286 				  const void *src, size_t len, void *dst,
287 				  void *tag, size_t *tag_len)
288 {
289 	TEE_Result res;
290 
291 	if (*tag_len < state->tag_len)
292 		return TEE_ERROR_SHORT_BUFFER;
293 
294 	res = operation_final(state, enc_key, TEE_MODE_ENCRYPT, src, len, dst);
295 	if (res)
296 		return res;
297 
298 	memcpy(tag, state->buf_tag, state->tag_len);
299 	*tag_len = state->tag_len;
300 
301 	return TEE_SUCCESS;
302 }
303 
304 TEE_Result internal_aes_gcm_enc_final(struct internal_aes_gcm_ctx *ctx,
305 				      const void *src, size_t len, void *dst,
306 				      void *tag, size_t *tag_len)
307 {
308 	return __gcm_enc_final(&ctx->state, &ctx->key, src, len, dst, tag,
309 			       tag_len);
310 }
311 
312 static TEE_Result __gcm_dec_final(struct internal_aes_gcm_state *state,
313 				  const struct internal_aes_gcm_key *enc_key,
314 				  const void *src, size_t len, void *dst,
315 				  const void *tag, size_t tag_len)
316 {
317 	TEE_Result res;
318 
319 	if (tag_len != state->tag_len)
320 		return TEE_ERROR_MAC_INVALID;
321 
322 	res = operation_final(state, enc_key, TEE_MODE_DECRYPT, src, len, dst);
323 	if (res)
324 		return res;
325 
326 	if (consttime_memcmp(state->buf_tag, tag, tag_len))
327 		return TEE_ERROR_MAC_INVALID;
328 
329 	return TEE_SUCCESS;
330 }
331 
332 TEE_Result internal_aes_gcm_dec_final(struct internal_aes_gcm_ctx *ctx,
333 				      const void *src, size_t len, void *dst,
334 				      const void *tag, size_t tag_len)
335 {
336 	return __gcm_dec_final(&ctx->state, &ctx->key, src, len, dst, tag,
337 			       tag_len);
338 }
339 
340 void internal_aes_gcm_inc_ctr(struct internal_aes_gcm_state *state)
341 {
342 	uint64_t c;
343 
344 	c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[1]) + 1;
345 	state->ctr[1] = TEE_U64_TO_BIG_ENDIAN(c);
346 	if (!c) {
347 		c = TEE_U64_FROM_BIG_ENDIAN(state->ctr[0]) + 1;
348 		state->ctr[0] = TEE_U64_TO_BIG_ENDIAN(c);
349 	}
350 }
351 
352 TEE_Result internal_aes_gcm_enc(const struct internal_aes_gcm_key *enc_key,
353 				const void *nonce, size_t nonce_len,
354 				const void *aad, size_t aad_len,
355 				const void *src, size_t len, void *dst,
356 				void *tag, size_t *tag_len)
357 {
358 	TEE_Result res;
359 	struct internal_aes_gcm_state state;
360 
361 	res = __gcm_init(&state, enc_key, TEE_MODE_ENCRYPT, nonce, nonce_len,
362 			 *tag_len);
363 	if (res)
364 		return res;
365 
366 	if (aad) {
367 		res = __gcm_update_aad(&state, aad, aad_len);
368 		if (res)
369 			return res;
370 	}
371 
372 	return __gcm_enc_final(&state, enc_key, src, len, dst, tag, tag_len);
373 }
374 
375 TEE_Result internal_aes_gcm_dec(const struct internal_aes_gcm_key *enc_key,
376 				const void *nonce, size_t nonce_len,
377 				const void *aad, size_t aad_len,
378 				const void *src, size_t len, void *dst,
379 				const void *tag, size_t tag_len)
380 {
381 	TEE_Result res;
382 	struct internal_aes_gcm_state state;
383 
384 	res = __gcm_init(&state, enc_key, TEE_MODE_DECRYPT, nonce, nonce_len,
385 			 tag_len);
386 	if (res)
387 		return res;
388 
389 	if (aad) {
390 		res = __gcm_update_aad(&state, aad, aad_len);
391 		if (res)
392 			return res;
393 	}
394 
395 	return __gcm_dec_final(&state, enc_key, src, len, dst, tag, tag_len);
396 }
397 
398 
399 #ifndef CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB
400 #include <stdlib.h>
401 #include <crypto/crypto.h>
402 
403 struct aes_gcm_ctx {
404 	struct crypto_authenc_ctx aec;
405 	struct internal_aes_gcm_ctx ctx;
406 };
407 
408 static const struct crypto_authenc_ops aes_gcm_ops;
409 
410 static struct aes_gcm_ctx *
411 to_aes_gcm_ctx(struct crypto_authenc_ctx *aec)
412 {
413 	assert(aec->ops == &aes_gcm_ops);
414 
415 	return container_of(aec, struct aes_gcm_ctx, aec);
416 }
417 
418 TEE_Result crypto_aes_gcm_alloc_ctx(struct crypto_authenc_ctx **ctx_ret)
419 {
420 	struct aes_gcm_ctx *ctx = calloc(1, sizeof(*ctx));
421 
422 	if (!ctx)
423 		return TEE_ERROR_OUT_OF_MEMORY;
424 	ctx->aec.ops = &aes_gcm_ops;
425 
426 	*ctx_ret = &ctx->aec;
427 
428 	return TEE_SUCCESS;
429 }
430 
431 static void aes_gcm_free_ctx(struct crypto_authenc_ctx *aec)
432 {
433 	free(to_aes_gcm_ctx(aec));
434 }
435 
436 static void aes_gcm_copy_state(struct crypto_authenc_ctx *dst_ctx,
437 			       struct crypto_authenc_ctx *src_ctx)
438 {
439 	to_aes_gcm_ctx(dst_ctx)->ctx = to_aes_gcm_ctx(src_ctx)->ctx;
440 }
441 
442 static TEE_Result aes_gcm_init(struct crypto_authenc_ctx *aec,
443 			       TEE_OperationMode mode,
444 			       const uint8_t *key, size_t key_len,
445 			       const uint8_t *nonce, size_t nonce_len,
446 			       size_t tag_len, size_t aad_len __unused,
447 			       size_t payload_len __unused)
448 {
449 	return internal_aes_gcm_init(&to_aes_gcm_ctx(aec)->ctx, mode, key,
450 				     key_len, nonce, nonce_len, tag_len);
451 }
452 
453 static TEE_Result aes_gcm_update_aad(struct crypto_authenc_ctx *aec,
454 				     const uint8_t *data, size_t len)
455 {
456 	return internal_aes_gcm_update_aad(&to_aes_gcm_ctx(aec)->ctx, data,
457 					   len);
458 }
459 
460 static TEE_Result aes_gcm_update_payload(struct crypto_authenc_ctx *aec,
461 					 TEE_OperationMode m,
462 					 const uint8_t *src, size_t len,
463 					 uint8_t *dst)
464 {
465 	return internal_aes_gcm_update_payload(&to_aes_gcm_ctx(aec)->ctx,
466 					       m, src, len, dst);
467 }
468 
469 static TEE_Result aes_gcm_enc_final(struct crypto_authenc_ctx *aec,
470 				    const uint8_t *src, size_t len,
471 				    uint8_t *dst, uint8_t *tag, size_t *tag_len)
472 {
473 	return internal_aes_gcm_enc_final(&to_aes_gcm_ctx(aec)->ctx, src, len,
474 					  dst, tag, tag_len);
475 }
476 
477 static TEE_Result aes_gcm_dec_final(struct crypto_authenc_ctx *aec,
478 				    const uint8_t *src, size_t len,
479 				    uint8_t *dst, const uint8_t *tag,
480 				    size_t tag_len)
481 {
482 	return internal_aes_gcm_dec_final(&to_aes_gcm_ctx(aec)->ctx, src, len,
483 					  dst, tag, tag_len);
484 }
485 
486 static void aes_gcm_final(struct crypto_authenc_ctx *aec __unused)
487 {
488 }
489 
490 static const struct crypto_authenc_ops aes_gcm_ops = {
491 	.init = aes_gcm_init,
492 	.update_aad = aes_gcm_update_aad,
493 	.update_payload = aes_gcm_update_payload,
494 	.enc_final = aes_gcm_enc_final,
495 	.dec_final = aes_gcm_dec_final,
496 	.final = aes_gcm_final,
497 	.free_ctx = aes_gcm_free_ctx,
498 	.copy_state = aes_gcm_copy_state,
499 };
500 
501 /*
502  * internal_aes_gcm_gfmul() is based on ghash_gfmul() from
503  * https://github.com/openbsd/src/blob/master/sys/crypto/gmac.c
504  */
505 void internal_aes_gcm_gfmul(const uint64_t X[2], const uint64_t Y[2],
506 			    uint64_t product[2])
507 {
508 	uint64_t y[2] = { 0 };
509 	uint64_t z[2] = { 0 };
510 	const uint8_t *x = (const uint8_t *)X;
511 	uint32_t mul = 0;
512 	size_t n = 0;
513 
514 	y[0] = TEE_U64_FROM_BIG_ENDIAN(Y[0]);
515 	y[1] = TEE_U64_FROM_BIG_ENDIAN(Y[1]);
516 
517 	for (n = 0; n < TEE_AES_BLOCK_SIZE * 8; n++) {
518 		/* update Z */
519 		if (x[n >> 3] & (1 << (~n & 7)))
520 			internal_aes_gcm_xor_block(z, y);
521 
522 		/* update Y */
523 		mul = y[1] & 1;
524 		y[1] = (y[0] << 63) | (y[1] >> 1);
525 		y[0] = (y[0] >> 1) ^ (0xe100000000000000 * mul);
526 	}
527 
528 	product[0] = TEE_U64_TO_BIG_ENDIAN(z[0]);
529 	product[1] = TEE_U64_TO_BIG_ENDIAN(z[1]);
530 }
531 
532 static void encrypt_block(struct internal_aes_gcm_state *state,
533 			  const struct internal_aes_gcm_key *ek,
534 			  const void *src, void *dst)
535 {
536 	void *buf_cryp = state->buf_cryp;
537 	void *ctr = state->ctr;
538 
539 	internal_aes_gcm_xor_block(buf_cryp, src);
540 	internal_aes_gcm_ghash_update(state, buf_cryp, NULL, 0);
541 	memcpy(dst, buf_cryp, sizeof(state->buf_cryp));
542 
543 	internal_aes_gcm_encrypt_block(ek, ctr, buf_cryp);
544 	internal_aes_gcm_inc_ctr(state);
545 }
546 
547 static void encrypt_pl(struct internal_aes_gcm_state *state,
548 		       const struct internal_aes_gcm_key *ek,
549 		       const uint8_t *src, size_t num_blocks, uint8_t *dst)
550 {
551 	size_t n = 0;
552 
553 	if (ALIGNMENT_IS_OK(src, uint64_t)) {
554 		for (n = 0; n < num_blocks; n++)
555 			encrypt_block(state, ek, src + n * TEE_AES_BLOCK_SIZE,
556 				      dst + n * TEE_AES_BLOCK_SIZE);
557 	} else {
558 		for (n = 0; n < num_blocks; n++) {
559 			uint64_t tmp[2] = { 0 };
560 
561 			memcpy(tmp, src + n * TEE_AES_BLOCK_SIZE, sizeof(tmp));
562 			encrypt_block(state, ek, tmp,
563 				      dst + n * TEE_AES_BLOCK_SIZE);
564 		}
565 	}
566 }
567 
568 static void decrypt_block(struct internal_aes_gcm_state *state,
569 			  const struct internal_aes_gcm_key *ek,
570 			  const void *src, void *dst)
571 {
572 	void *buf_cryp = state->buf_cryp;
573 	void *ctr = state->ctr;
574 
575 	internal_aes_gcm_encrypt_block(ek, ctr, buf_cryp);
576 	internal_aes_gcm_inc_ctr(state);
577 
578 	internal_aes_gcm_xor_block(buf_cryp, src);
579 	internal_aes_gcm_ghash_update(state, src, NULL, 0);
580 	memcpy(dst, buf_cryp, sizeof(state->buf_cryp));
581 }
582 
583 static void decrypt_pl(struct internal_aes_gcm_state *state,
584 		       const struct internal_aes_gcm_key *ek,
585 		       const uint8_t *src, size_t num_blocks, uint8_t *dst)
586 {
587 	size_t n = 0;
588 
589 	if (ALIGNMENT_IS_OK(src, uint64_t)) {
590 		for (n = 0; n < num_blocks; n++)
591 			decrypt_block(state, ek, src + n * TEE_AES_BLOCK_SIZE,
592 				      dst + n * TEE_AES_BLOCK_SIZE);
593 	} else {
594 		for (n = 0; n < num_blocks; n++) {
595 			uint64_t tmp[2] = { 0 };
596 
597 			memcpy(tmp, src + n * TEE_AES_BLOCK_SIZE, sizeof(tmp));
598 			decrypt_block(state, ek, tmp,
599 				      dst + n * TEE_AES_BLOCK_SIZE);
600 		}
601 	}
602 }
603 
604 void __weak
605 internal_aes_gcm_update_payload_blocks(struct internal_aes_gcm_state *state,
606 				       const struct internal_aes_gcm_key *ek,
607 				       TEE_OperationMode m, const void *src,
608 				       size_t num_blocks, void *dst)
609 {
610 	assert(!state->buf_pos && num_blocks);
611 
612 	if (m == TEE_MODE_ENCRYPT)
613 		encrypt_pl(state, ek, src, num_blocks, dst);
614 	else
615 		decrypt_pl(state, ek, src, num_blocks, dst);
616 }
617 #endif /*!CFG_CRYPTO_AES_GCM_FROM_CRYPTOLIB*/
618