xref: /optee_os/core/drivers/crypto/stm32/stm32_saes.c (revision 45fef32a3f1a1b6f3e30343ad4898405468e4b87)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2021-2023, STMicroelectronics - All Rights Reserved
4  */
5 #include <assert.h>
6 #include <config.h>
7 #include <drivers/clk.h>
8 #include <drivers/clk_dt.h>
9 #include <drivers/rstctrl.h>
10 #include <io.h>
11 #include <kernel/boot.h>
12 #include <kernel/delay.h>
13 #include <kernel/dt.h>
14 #include <kernel/huk_subkey.h>
15 #include <kernel/mutex.h>
16 #include <kernel/pm.h>
17 #include <libfdt.h>
18 #include <mm/core_memprot.h>
19 #include <stdint.h>
20 #include <stm32_util.h>
21 #include <string_ext.h>
22 #include <utee_defines.h>
23 #include <util.h>
24 
25 #include "common.h"
26 #include "stm32_saes.h"
27 
28 /* SAES control register */
29 #define _SAES_CR			U(0x0)
30 /* SAES status register */
31 #define _SAES_SR			U(0x04)
32 /* SAES data input register */
33 #define _SAES_DINR			U(0x08)
34 /* SAES data output register */
35 #define _SAES_DOUTR			U(0x0c)
36 /* SAES key registers [0-3] */
37 #define _SAES_KEYR0			U(0x10)
38 #define _SAES_KEYR1			U(0x14)
39 #define _SAES_KEYR2			U(0x18)
40 #define _SAES_KEYR3			U(0x1c)
41 /* SAES initialization vector registers [0-3] */
42 #define _SAES_IVR0			U(0x20)
43 #define _SAES_IVR1			U(0x24)
44 #define _SAES_IVR2			U(0x28)
45 #define _SAES_IVR3			U(0x2c)
46 /* SAES key registers [4-7] */
47 #define _SAES_KEYR4			U(0x30)
48 #define _SAES_KEYR5			U(0x34)
49 #define _SAES_KEYR6			U(0x38)
50 #define _SAES_KEYR7			U(0x3c)
51 /* SAES suspend registers [0-7] */
52 #define _SAES_SUSPR0			U(0x40)
53 #define _SAES_SUSPR1			U(0x44)
54 #define _SAES_SUSPR2			U(0x48)
55 #define _SAES_SUSPR3			U(0x4c)
56 #define _SAES_SUSPR4			U(0x50)
57 #define _SAES_SUSPR5			U(0x54)
58 #define _SAES_SUSPR6			U(0x58)
59 #define _SAES_SUSPR7			U(0x5c)
60 /* SAES Interrupt Enable Register */
61 #define _SAES_IER			U(0x300)
62 /* SAES Interrupt Status Register */
63 #define _SAES_ISR			U(0x304)
64 /* SAES Interrupt Clear Register */
65 #define _SAES_ICR			U(0x308)
66 
67 /* SAES control register fields */
68 #define _SAES_CR_RESET_VALUE		U(0x0)
69 #define _SAES_CR_IPRST			BIT(31)
70 #define _SAES_CR_KEYSEL_MASK		GENMASK_32(30, 28)
71 #define _SAES_CR_KEYSEL_SHIFT		U(28)
72 #define _SAES_CR_KEYSEL_SOFT		U(0x0)
73 #define _SAES_CR_KEYSEL_DHUK		U(0x1)
74 #define _SAES_CR_KEYSEL_BHK		U(0x2)
75 #define _SAES_CR_KEYSEL_BHU_XOR_BH_K	U(0x4)
76 #define _SAES_CR_KEYSEL_TEST		U(0x7)
77 #define _SAES_CR_KSHAREID_MASK		GENMASK_32(27, 26)
78 #define _SAES_CR_KSHAREID_SHIFT		U(26)
79 #define _SAES_CR_KSHAREID_CRYP		U(0x0)
80 #define _SAES_CR_KEYMOD_MASK		GENMASK_32(25, 24)
81 #define _SAES_CR_KEYMOD_SHIFT		U(24)
82 #define _SAES_CR_KEYMOD_NORMAL		U(0x0)
83 #define _SAES_CR_KEYMOD_WRAPPED		U(0x1)
84 #define _SAES_CR_KEYMOD_SHARED		U(0x2)
85 #define _SAES_CR_NPBLB_MASK		GENMASK_32(23, 20)
86 #define _SAES_CR_NPBLB_SHIFT		U(20)
87 #define _SAES_CR_KEYPROT		BIT(19)
88 #define _SAES_CR_KEYSIZE		BIT(18)
89 #define _SAES_CR_GCMPH_MASK		GENMASK_32(14, 13)
90 #define _SAES_CR_GCMPH_SHIFT		U(13)
91 #define _SAES_CR_GCMPH_INIT		U(0)
92 #define _SAES_CR_GCMPH_HEADER		U(1)
93 #define _SAES_CR_GCMPH_PAYLOAD		U(2)
94 #define _SAES_CR_GCMPH_FINAL		U(3)
95 #define _SAES_CR_DMAOUTEN		BIT(12)
96 #define _SAES_CR_DMAINEN		BIT(11)
97 #define _SAES_CR_CHMOD_MASK		(BIT(16) | GENMASK_32(6, 5))
98 #define _SAES_CR_CHMOD_SHIFT		U(5)
99 #define _SAES_CR_CHMOD_ECB		U(0x0)
100 #define _SAES_CR_CHMOD_CBC		U(0x1)
101 #define _SAES_CR_CHMOD_CTR		U(0x2)
102 #define _SAES_CR_CHMOD_GCM		U(0x3)
103 #define _SAES_CR_CHMOD_GMAC		U(0x3)
104 #define _SAES_CR_CHMOD_CCM		U(0x800)
105 #define _SAES_CR_MODE_MASK		GENMASK_32(4, 3)
106 #define _SAES_CR_MODE_SHIFT		U(3)
107 #define _SAES_CR_MODE_ENC		U(0)
108 #define _SAES_CR_MODE_KEYPREP		U(1)
109 #define _SAES_CR_MODE_DEC		U(2)
110 #define _SAES_CR_DATATYPE_MASK		GENMASK_32(2, 1)
111 #define _SAES_CR_DATATYPE_SHIFT		U(1)
112 #define _SAES_CR_DATATYPE_NONE		U(0)
113 #define _SAES_CR_DATATYPE_HALF_WORD	U(1)
114 #define _SAES_CR_DATATYPE_BYTE		U(2)
115 #define _SAES_CR_DATATYPE_BIT		U(3)
116 #define _SAES_CR_EN			BIT(0)
117 
118 /* SAES status register fields */
119 #define _SAES_SR_KEYVALID		BIT(7)
120 #define _SAES_SR_BUSY			BIT(3)
121 #define _SAES_SR_WRERR			BIT(2)
122 #define _SAES_SR_RDERR			BIT(1)
123 #define _SAES_SR_CCF			BIT(0)
124 
125 /* SAES interrupt registers fields */
126 #define _SAES_I_RNG_ERR			BIT(3)
127 #define _SAES_I_KEY_ERR			BIT(2)
128 #define _SAES_I_RW_ERR			BIT(1)
129 #define _SAES_I_CC			BIT(0)
130 
131 #define SAES_TIMEOUT_US			U(100000)
132 #define TIMEOUT_US_1MS			U(1000)
133 #define SAES_RESET_DELAY		U(2)
134 
135 #define IS_CHAINING_MODE(mode, cr) \
136 	(((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mode << \
137 					  _SAES_CR_CHMOD_SHIFT))
138 
139 #define SET_CHAINING_MODE(mode, cr) \
140 	set_field_u32(cr, _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mode)
141 
142 static struct mutex saes_lock = MUTEX_INITIALIZER;
143 static struct stm32_saes_platdata {
144 	vaddr_t base;
145 	struct clk *clk;
146 	struct rstctrl *reset;
147 } saes_pdata;
148 
149 static bool does_chaining_mode_need_iv(uint32_t cr)
150 {
151 	return !IS_CHAINING_MODE(ECB, cr);
152 }
153 
154 static bool is_encrypt(uint32_t cr)
155 {
156 	return (cr & _SAES_CR_MODE_MASK) ==
157 	       SHIFT_U32(_SAES_CR_MODE_ENC, _SAES_CR_MODE_SHIFT);
158 }
159 
160 static bool is_decrypt(uint32_t cr)
161 {
162 	return (cr & _SAES_CR_MODE_MASK) ==
163 	       SHIFT_U32(_SAES_CR_MODE_DEC, _SAES_CR_MODE_SHIFT);
164 }
165 
166 static bool does_need_npblb(uint32_t cr)
167 {
168 	return (IS_CHAINING_MODE(GCM, cr) && is_encrypt(cr)) ||
169 	       (IS_CHAINING_MODE(CCM, cr) && is_decrypt(cr));
170 }
171 
172 static bool can_suspend(uint32_t cr)
173 {
174 	return !IS_CHAINING_MODE(GCM, cr);
175 }
176 
177 static void write_aligned_block(vaddr_t base, uint32_t *data)
178 {
179 	unsigned int i = 0;
180 
181 	/* SAES is configured to swap bytes as expected */
182 	for (i = 0; i < AES_BLOCK_NB_U32; i++)
183 		io_write32(base + _SAES_DINR, data[i]);
184 }
185 
186 static void write_block(vaddr_t base, uint8_t *data)
187 {
188 	if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
189 		write_aligned_block(base, (void *)data);
190 	} else {
191 		uint32_t data_u32[AES_BLOCK_NB_U32] = { };
192 
193 		memcpy(data_u32, data, sizeof(data_u32));
194 		write_aligned_block(base, data_u32);
195 	}
196 }
197 
198 static void read_aligned_block(vaddr_t base, uint32_t *data)
199 {
200 	unsigned int i = 0;
201 
202 	/* SAES is configured to swap bytes as expected */
203 	for (i = 0; i < AES_BLOCK_NB_U32; i++)
204 		data[i] = io_read32(base + _SAES_DOUTR);
205 }
206 
207 static void read_block(vaddr_t base, uint8_t *data)
208 {
209 	if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
210 		read_aligned_block(base, (void *)data);
211 	} else {
212 		uint32_t data_u32[AES_BLOCK_NB_U32] = { };
213 
214 		read_aligned_block(base, data_u32);
215 
216 		memcpy(data, data_u32, sizeof(data_u32));
217 	}
218 }
219 
220 static TEE_Result wait_computation_completed(vaddr_t base)
221 {
222 	uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
223 
224 	while ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF)
225 		if (timeout_elapsed(timeout_ref))
226 			break;
227 
228 	if ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) {
229 		DMSG("CCF timeout");
230 		return TEE_ERROR_GENERIC;
231 	}
232 
233 	return TEE_SUCCESS;
234 }
235 
236 static void clear_computation_completed(uintptr_t base)
237 {
238 	io_setbits32(base + _SAES_ICR, _SAES_I_CC);
239 }
240 
241 static TEE_Result wait_key_valid(vaddr_t base)
242 {
243 	uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
244 
245 	while (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID))
246 		if (timeout_elapsed(timeout_ref))
247 			break;
248 
249 	if (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID)) {
250 		DMSG("CCF timeout");
251 		return TEE_ERROR_GENERIC;
252 	}
253 
254 	return TEE_SUCCESS;
255 }
256 
257 static TEE_Result saes_start(struct stm32_saes_context *ctx)
258 {
259 	uint64_t timeout_ref = 0;
260 
261 	/* Reset SAES */
262 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
263 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
264 
265 	timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
266 	while (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY)
267 		if (timeout_elapsed(timeout_ref))
268 			break;
269 
270 	if (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) {
271 		DMSG("busy timeout");
272 		return TEE_ERROR_GENERIC;
273 	}
274 
275 	return TEE_SUCCESS;
276 }
277 
278 static void saes_end(struct stm32_saes_context *ctx, int prev_error)
279 {
280 	if (prev_error) {
281 		/* Reset SAES */
282 		io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
283 		io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
284 	}
285 
286 	/* Disable the SAES peripheral */
287 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
288 }
289 
290 static void saes_write_iv(struct stm32_saes_context *ctx)
291 {
292 	/* If chaining mode need to restore IV */
293 	if (does_chaining_mode_need_iv(ctx->cr)) {
294 		unsigned int i = 0;
295 
296 		for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) {
297 			io_write32(ctx->base + _SAES_IVR0 + i *
298 				   sizeof(uint32_t), ctx->iv[i]);
299 		}
300 	}
301 }
302 
303 static void saes_save_suspend(struct stm32_saes_context *ctx)
304 {
305 	size_t i = 0;
306 
307 	for (i = 0; i < 8; i++)
308 		ctx->susp[i] = io_read32(ctx->base + _SAES_SUSPR0 +
309 					 i * sizeof(uint32_t));
310 }
311 
312 static void saes_restore_suspend(struct stm32_saes_context *ctx)
313 {
314 	size_t i = 0;
315 
316 	for (i = 0; i < 8; i++)
317 		io_write32(ctx->base + _SAES_SUSPR0 + i * sizeof(uint32_t),
318 			   ctx->susp[i]);
319 }
320 
321 static void saes_write_key(struct stm32_saes_context *ctx)
322 {
323 	/* Restore the _SAES_KEYRx if SOFTWARE key */
324 	if ((ctx->cr & _SAES_CR_KEYSEL_MASK) ==
325 	    SHIFT_U32(_SAES_CR_KEYSEL_SOFT, _SAES_CR_KEYSEL_SHIFT)) {
326 		size_t i = 0;
327 
328 		for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++)
329 			io_write32(ctx->base + _SAES_KEYR0 + i *
330 				   sizeof(uint32_t),
331 				   ctx->key[i]);
332 
333 		if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) {
334 			for (i = 0;
335 			     i < (AES_KEYSIZE_256 / 2) / sizeof(uint32_t);
336 			     i++) {
337 				io_write32(ctx->base + _SAES_KEYR4 + i *
338 					   sizeof(uint32_t),
339 					   ctx->key[i + 4]);
340 			}
341 		}
342 	}
343 }
344 
345 static TEE_Result saes_prepare_key(struct stm32_saes_context *ctx)
346 {
347 	TEE_Result res = TEE_ERROR_GENERIC;
348 
349 	/* Disable the SAES peripheral */
350 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
351 
352 	/* Set key size */
353 	if ((ctx->cr & _SAES_CR_KEYSIZE))
354 		io_setbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
355 	else
356 		io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
357 
358 	saes_write_key(ctx);
359 
360 	res = wait_key_valid(ctx->base);
361 	if (res)
362 		return res;
363 
364 	/*
365 	 * For ECB/CBC decryption, key preparation mode must be selected
366 	 * to populate the key.
367 	 */
368 	if ((IS_CHAINING_MODE(ECB, ctx->cr) ||
369 	     IS_CHAINING_MODE(CBC, ctx->cr)) && is_decrypt(ctx->cr)) {
370 		/* Select Mode 2 */
371 		io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
372 				SHIFT_U32(_SAES_CR_MODE_KEYPREP,
373 					  _SAES_CR_MODE_SHIFT));
374 
375 		/* Enable SAES */
376 		io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
377 
378 		res = wait_computation_completed(ctx->base);
379 		if (res)
380 			return res;
381 
382 		clear_computation_completed(ctx->base);
383 
384 		/* Set Mode 3 */
385 		io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
386 				SHIFT_U32(_SAES_CR_MODE_DEC,
387 					  _SAES_CR_MODE_SHIFT));
388 	}
389 
390 	return TEE_SUCCESS;
391 }
392 
393 static TEE_Result save_context(struct stm32_saes_context *ctx)
394 {
395 	if ((io_read32(ctx->base + _SAES_SR) & _SAES_SR_CCF)) {
396 		/* Device should not be in a processing phase */
397 		return TEE_ERROR_BAD_STATE;
398 	}
399 
400 	/* Save CR */
401 	ctx->cr = io_read32(ctx->base + _SAES_CR);
402 
403 	if (!can_suspend(ctx->cr))
404 		return TEE_SUCCESS;
405 
406 	saes_save_suspend(ctx);
407 
408 	/* If chaining mode need to save current IV */
409 	if (does_chaining_mode_need_iv(ctx->cr)) {
410 		uint8_t i = 0;
411 
412 		/* Save IV */
413 		for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) {
414 			ctx->iv[i] = io_read32(ctx->base + _SAES_IVR0 + i *
415 					       sizeof(uint32_t));
416 		}
417 	}
418 
419 	/* Disable the SAES peripheral */
420 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
421 
422 	return TEE_SUCCESS;
423 }
424 
425 /* To resume the processing of a message */
426 static TEE_Result restore_context(struct stm32_saes_context *ctx)
427 {
428 	TEE_Result res = TEE_SUCCESS;
429 
430 	/* SAES shall be disabled */
431 	if ((io_read32(ctx->base + _SAES_CR) & _SAES_CR_EN)) {
432 		DMSG("Device is still enabled");
433 		return TEE_ERROR_BAD_STATE;
434 	}
435 
436 	/* Reset internal state */
437 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
438 
439 	/* Restore configuration register */
440 	io_write32(ctx->base + _SAES_CR, ctx->cr);
441 
442 	/* Write key and, in case of CBC or ECB decrypt, prepare it */
443 	res = saes_prepare_key(ctx);
444 	if (res)
445 		return res;
446 
447 	saes_restore_suspend(ctx);
448 
449 	saes_write_iv(ctx);
450 
451 	/* Enable the SAES peripheral */
452 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
453 
454 	return TEE_SUCCESS;
455 }
456 
457 static TEE_Result do_from_init_to_phase(struct stm32_saes_context *ctx,
458 					uint32_t new_phase)
459 {
460 	TEE_Result res = TEE_SUCCESS;
461 
462 	/* We didn't run the init phase yet */
463 	res = restore_context(ctx);
464 	if (res)
465 		return res;
466 
467 	res = wait_computation_completed(ctx->base);
468 	if (res)
469 		return res;
470 
471 	clear_computation_completed(ctx->base);
472 
473 	/* Move to 'new_phase' */
474 	io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
475 			SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT));
476 
477 	/* Enable the SAES peripheral (init disabled it) */
478 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
479 
480 	return TEE_SUCCESS;
481 }
482 
483 static TEE_Result do_from_header_to_phase(struct stm32_saes_context *ctx,
484 					  uint32_t new_phase)
485 {
486 	TEE_Result res = TEE_SUCCESS;
487 
488 	if (can_suspend(ctx->cr)) {
489 		res = restore_context(ctx);
490 		if (res)
491 			return res;
492 	}
493 
494 	if (ctx->extra_size) {
495 		/* Manage unaligned header data before moving to next phase */
496 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
497 		       AES_BLOCK_SIZE - ctx->extra_size);
498 
499 		write_aligned_block(ctx->base, ctx->extra);
500 
501 		res = wait_computation_completed(ctx->base);
502 		if (res)
503 			return res;
504 
505 		clear_computation_completed(ctx->base);
506 
507 		ctx->assoc_len += ctx->extra_size * INT8_BIT;
508 		ctx->extra_size = U(0);
509 	}
510 
511 	/* Move to 'new_phase' */
512 	io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
513 			SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT));
514 
515 	return TEE_SUCCESS;
516 }
517 
518 /**
519  * @brief Start an AES computation.
520  * @param ctx: SAES process context
521  * @param is_dec: true if decryption, false if encryption
522  * @param ch_mode: define the chaining mode
523  * @param key_select: define where the key comes from
524  * @param key: pointer to key (if key_select is KEY_SOFT, else unused)
525  * @param key_size: key size
526  * @param iv: pointer to initialization vector (unused if ch_mode is ECB)
527  * @param iv_size: iv size
528  * @note this function doesn't access to hardware but stores in ctx the values
529  *
530  * @retval TEE_SUCCESS if OK or a TEE_Result compliant code.
531  */
532 TEE_Result stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec,
533 			   enum stm32_saes_chaining_mode ch_mode,
534 			   enum stm32_saes_key_selection key_select,
535 			   const void *key, size_t key_size, const void *iv,
536 			   size_t iv_size)
537 {
538 	const uint32_t *key_u32 = NULL;
539 	const uint32_t *iv_u32 = NULL;
540 	uint32_t local_key[8] = { };
541 	uint32_t local_iv[4] = { };
542 	unsigned int i = 0;
543 
544 	if (!ctx)
545 		return TEE_ERROR_BAD_PARAMETERS;
546 
547 	*ctx = (struct stm32_saes_context){
548 		.lock = &saes_lock,
549 		.base = saes_pdata.base,
550 		.cr = _SAES_CR_RESET_VALUE
551 	};
552 
553 	/* We want buffer to be u32 aligned */
554 	if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
555 		key_u32 = key;
556 	} else {
557 		memcpy(local_key, key, key_size);
558 		key_u32 = local_key;
559 	}
560 
561 	if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
562 		iv_u32 = iv;
563 	} else {
564 		memcpy(local_iv, iv, iv_size);
565 		iv_u32 = local_iv;
566 	}
567 
568 	if (is_dec)
569 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK,
570 					 _SAES_CR_MODE_DEC);
571 	else
572 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK,
573 					 _SAES_CR_MODE_ENC);
574 
575 	/* Save chaining mode */
576 	switch (ch_mode) {
577 	case STM32_SAES_MODE_ECB:
578 		ctx->cr |= SET_CHAINING_MODE(ECB, ctx->cr);
579 		break;
580 	case STM32_SAES_MODE_CBC:
581 		ctx->cr |= SET_CHAINING_MODE(CBC, ctx->cr);
582 		break;
583 	case STM32_SAES_MODE_CTR:
584 		ctx->cr |= SET_CHAINING_MODE(CTR, ctx->cr);
585 		break;
586 	case STM32_SAES_MODE_GCM:
587 		ctx->cr |= SET_CHAINING_MODE(GCM, ctx->cr);
588 		break;
589 	case STM32_SAES_MODE_CCM:
590 		ctx->cr |= SET_CHAINING_MODE(CCM, ctx->cr);
591 		break;
592 	default:
593 		return TEE_ERROR_BAD_PARAMETERS;
594 	}
595 
596 	/*
597 	 * We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data.
598 	 * So we won't need to
599 	 * TEE_U32_TO_BIG_ENDIAN(data) before write to DINR
600 	 * nor
601 	 * TEE_U32_FROM_BIG_ENDIAN after reading from DOUTR.
602 	 *
603 	 * But note that wrap key only accept _SAES_CR_DATATYPE_NONE.
604 	 */
605 	ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_DATATYPE_MASK,
606 				 _SAES_CR_DATATYPE_BYTE);
607 
608 	/* Configure keysize */
609 	switch (key_size) {
610 	case AES_KEYSIZE_128:
611 		ctx->cr &=  ~_SAES_CR_KEYSIZE;
612 		break;
613 	case AES_KEYSIZE_256:
614 		ctx->cr |= _SAES_CR_KEYSIZE;
615 		break;
616 	default:
617 		return TEE_ERROR_BAD_PARAMETERS;
618 	}
619 
620 	/* Configure key */
621 	switch (key_select) {
622 	case STM32_SAES_KEY_SOFT:
623 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
624 					 SHIFT_U32(_SAES_CR_KEYSEL_SOFT,
625 						   _SAES_CR_KEYSEL_SHIFT));
626 		/* Save key */
627 		switch (key_size) {
628 		case AES_KEYSIZE_128:
629 			/* First 16 bytes == 4 u32 */
630 			for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t);
631 			     i++) {
632 				ctx->key[i] =
633 					TEE_U32_TO_BIG_ENDIAN(key_u32[3 - i]);
634 				/*
635 				 * /!\ we save the key in HW byte order
636 				 * and word order: key[i] is for _SAES_KEYRi.
637 				 */
638 			}
639 			break;
640 		case AES_KEYSIZE_256:
641 			for (i = 0; i < AES_KEYSIZE_256 / sizeof(uint32_t);
642 			     i++) {
643 				ctx->key[i] =
644 					TEE_U32_TO_BIG_ENDIAN(key_u32[7 - i]);
645 				/*
646 				 * /!\ we save the key in HW byte order
647 				 * and word order: key[i] is for _SAES_KEYRi.
648 				 */
649 			}
650 			break;
651 		default:
652 			return TEE_ERROR_BAD_PARAMETERS;
653 		}
654 		break;
655 	case STM32_SAES_KEY_DHU:
656 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
657 					 SHIFT_U32(_SAES_CR_KEYSEL_DHUK,
658 						   _SAES_CR_KEYSEL_SHIFT));
659 		break;
660 	case STM32_SAES_KEY_BH:
661 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
662 					 SHIFT_U32(_SAES_CR_KEYSEL_BHK,
663 						   _SAES_CR_KEYSEL_SHIFT));
664 		break;
665 	case STM32_SAES_KEY_BHU_XOR_BH:
666 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
667 					 SHIFT_U32(_SAES_CR_KEYSEL_BHU_XOR_BH_K,
668 						   _SAES_CR_KEYSEL_SHIFT));
669 		break;
670 	case STM32_SAES_KEY_WRAPPED:
671 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
672 					 SHIFT_U32(_SAES_CR_KEYSEL_SOFT,
673 						   _SAES_CR_KEYSEL_SHIFT));
674 		break;
675 
676 	default:
677 		return TEE_ERROR_BAD_PARAMETERS;
678 	}
679 
680 	/* Save IV */
681 	if (ch_mode != STM32_SAES_MODE_ECB) {
682 		if (!iv || iv_size != AES_IVSIZE)
683 			return TEE_ERROR_BAD_PARAMETERS;
684 
685 		for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++)
686 			ctx->iv[i] = TEE_U32_TO_BIG_ENDIAN(iv_u32[3 - i]);
687 	}
688 
689 	/* Reset suspend registers */
690 	memset(ctx->susp, 0, sizeof(ctx->susp));
691 
692 	return saes_start(ctx);
693 }
694 
695 /**
696  * @brief Update (or start) an AES authentificate process of
697  *        associated data (CCM or GCM).
698  * @param ctx: SAES process context
699  * @param data: pointer to associated data
700  * @param data_size: data size
701  *
702  * @retval 0 if OK.
703  */
704 TEE_Result stm32_saes_update_assodata(struct stm32_saes_context *ctx,
705 				      uint8_t *data, size_t data_size)
706 {
707 	TEE_Result res = TEE_SUCCESS;
708 	unsigned int i = 0;
709 	uint32_t previous_phase = 0;
710 
711 	if (!ctx)
712 		return TEE_ERROR_BAD_PARAMETERS;
713 
714 	/* If no associated data, nothing to do */
715 	if (!data || !data_size)
716 		return TEE_SUCCESS;
717 
718 	mutex_lock(ctx->lock);
719 
720 	previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >>
721 			 _SAES_CR_GCMPH_SHIFT;
722 
723 	switch (previous_phase) {
724 	case _SAES_CR_GCMPH_INIT:
725 		res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_HEADER);
726 		break;
727 	case _SAES_CR_GCMPH_HEADER:
728 		/*
729 		 * Function update_assodata() was already called.
730 		 * We only need to restore the context.
731 		 */
732 		if (can_suspend(ctx->cr))
733 			res = restore_context(ctx);
734 
735 		break;
736 	default:
737 		DMSG("out of order call");
738 		res = TEE_ERROR_BAD_STATE;
739 	}
740 
741 	if (res)
742 		goto out;
743 
744 	/* Manage if remaining data from a previous update_assodata() call */
745 	if (ctx->extra_size &&
746 	    ((ctx->extra_size + data_size) >= AES_BLOCK_SIZE)) {
747 		uint32_t block[AES_BLOCK_NB_U32] = { };
748 
749 		memcpy(block, ctx->extra, ctx->extra_size);
750 		memcpy((uint8_t *)block + ctx->extra_size, data,
751 		       AES_BLOCK_SIZE - ctx->extra_size);
752 
753 		write_aligned_block(ctx->base, block);
754 
755 		res = wait_computation_completed(ctx->base);
756 		if (res)
757 			goto out;
758 
759 		clear_computation_completed(ctx->base);
760 
761 		i += AES_BLOCK_SIZE - ctx->extra_size;
762 		ctx->extra_size = 0;
763 		ctx->assoc_len += AES_BLOCK_SIZE_BIT;
764 	}
765 
766 	while (data_size - i >= AES_BLOCK_SIZE) {
767 		write_block(ctx->base, data + i);
768 
769 		res = wait_computation_completed(ctx->base);
770 		if (res)
771 			goto out;
772 
773 		clear_computation_completed(ctx->base);
774 
775 		/* Process next block */
776 		i += AES_BLOCK_SIZE;
777 		ctx->assoc_len += AES_BLOCK_SIZE_BIT;
778 	}
779 
780 	/*
781 	 * Manage last block if not a block size multiple:
782 	 * Save remaining data to manage them later (potentially with new
783 	 * associated data).
784 	 */
785 	if (i < data_size) {
786 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
787 		       data_size - i);
788 		ctx->extra_size += data_size - i;
789 	}
790 
791 	res = save_context(ctx);
792 out:
793 	if (res)
794 		saes_end(ctx, res);
795 
796 	mutex_unlock(ctx->lock);
797 
798 	return res;
799 }
800 
801 /**
802  * @brief Update (or start) an AES authenticate and de/encrypt with
803  *        payload data (CCM or GCM).
804  * @param ctx: SAES process context
805  * @param last_block: true if last payload data block
806  * @param data_in: pointer to payload
807  * @param data_out: pointer where to save de/encrypted payload
808  * @param data_size: payload size
809  *
810  * @retval TEE_SUCCESS if OK.
811  */
812 TEE_Result stm32_saes_update_load(struct stm32_saes_context *ctx,
813 				  bool last_block, uint8_t *data_in,
814 				  uint8_t *data_out, size_t data_size)
815 {
816 	TEE_Result res = TEE_SUCCESS;
817 	unsigned int i = 0;
818 	uint32_t previous_phase = 0;
819 
820 	if (!ctx)
821 		return TEE_ERROR_BAD_PARAMETERS;
822 
823 	/* If there is no data, nothing to do */
824 	if (!data_in || !data_size)
825 		return TEE_SUCCESS;
826 
827 	mutex_lock(ctx->lock);
828 
829 	previous_phase = ((ctx->cr & _SAES_CR_GCMPH_MASK) >>
830 			  _SAES_CR_GCMPH_SHIFT);
831 
832 	switch (previous_phase) {
833 	case _SAES_CR_GCMPH_INIT:
834 		res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD);
835 		break;
836 	case _SAES_CR_GCMPH_HEADER:
837 		res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD);
838 		break;
839 	case _SAES_CR_GCMPH_PAYLOAD:
840 		/* new update_load call, we only need to restore context */
841 		if (can_suspend(ctx->cr))
842 			res = restore_context(ctx);
843 
844 		break;
845 	default:
846 		DMSG("out of order call");
847 		res = TEE_ERROR_BAD_STATE;
848 	}
849 
850 	if (res)
851 		goto out;
852 
853 	while (i < ROUNDDOWN(data_size, AES_BLOCK_SIZE)) {
854 		write_block(ctx->base, data_in + i);
855 
856 		res = wait_computation_completed(ctx->base);
857 		if (res)
858 			goto out;
859 
860 		read_block(ctx->base, data_out + i);
861 
862 		clear_computation_completed(ctx->base);
863 
864 		/* Process next block */
865 		i += AES_BLOCK_SIZE;
866 		ctx->load_len += AES_BLOCK_SIZE_BIT;
867 	}
868 
869 	/* Manage last block if not a block size multiple */
870 	if (last_block && i < data_size) {
871 		uint32_t block_in[AES_BLOCK_NB_U32] = { };
872 		uint32_t block_out[AES_BLOCK_NB_U32] = { };
873 
874 		memcpy(block_in, data_in + i, data_size - i);
875 
876 		if (does_need_npblb(ctx->cr)) {
877 			uint32_t npblb = AES_BLOCK_SIZE - (data_size - i);
878 
879 			io_clrsetbits32(ctx->base + _SAES_CR,
880 					_SAES_CR_NPBLB_MASK,
881 					SHIFT_U32(npblb, _SAES_CR_NPBLB_SHIFT));
882 		}
883 
884 		write_aligned_block(ctx->base, block_in);
885 
886 		res = wait_computation_completed(ctx->base);
887 		if (res)
888 			goto out;
889 
890 		read_aligned_block(ctx->base, block_out);
891 
892 		clear_computation_completed(ctx->base);
893 
894 		memcpy(data_out + i, block_out, data_size - i);
895 
896 		ctx->load_len += (data_size - i) * INT8_BIT;
897 	}
898 
899 	res = save_context(ctx);
900 out:
901 	if (res)
902 		saes_end(ctx, res);
903 
904 	mutex_unlock(ctx->lock);
905 
906 	return res;
907 }
908 
909 /**
910  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
911  * @param ctx: SAES process context
912  * @param tag: pointer where to save the tag
913  * @param data_size: tag size
914  *
915  * @retval TEE_SUCCESS if OK.
916  */
917 TEE_Result stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag,
918 			    size_t tag_size)
919 {
920 	TEE_Result res = TEE_SUCCESS;
921 	uint32_t tag_u32[4] = { };
922 	uint32_t previous_phase = 0;
923 
924 	if (!ctx)
925 		return TEE_ERROR_BAD_PARAMETERS;
926 
927 	mutex_lock(ctx->lock);
928 
929 	previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >>
930 			  _SAES_CR_GCMPH_SHIFT;
931 
932 	switch (previous_phase) {
933 	case _SAES_CR_GCMPH_INIT:
934 		res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_FINAL);
935 		break;
936 	case _SAES_CR_GCMPH_HEADER:
937 		res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_FINAL);
938 		break;
939 	case _SAES_CR_GCMPH_PAYLOAD:
940 		if (can_suspend(ctx->cr))
941 			res = restore_context(ctx);
942 
943 		/* Move to final phase */
944 		io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
945 				SHIFT_U32(_SAES_CR_GCMPH_FINAL,
946 					  _SAES_CR_GCMPH_SHIFT));
947 		break;
948 	default:
949 		DMSG("out of order call");
950 		res = TEE_ERROR_BAD_STATE;
951 	}
952 	if (res)
953 		goto out;
954 
955 	if (IS_CHAINING_MODE(GCM, ctx->cr)) {
956 		/* SAES is configured to swap bytes as expected */
957 		io_write32(ctx->base + _SAES_DINR, 0);
958 		io_write32(ctx->base + _SAES_DINR, ctx->assoc_len);
959 		io_write32(ctx->base + _SAES_DINR, 0);
960 		io_write32(ctx->base + _SAES_DINR, ctx->load_len);
961 	}
962 
963 	res = wait_computation_completed(ctx->base);
964 	if (res)
965 		goto out;
966 
967 	read_aligned_block(ctx->base, tag_u32);
968 
969 	clear_computation_completed(ctx->base);
970 
971 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
972 
973 out:
974 	saes_end(ctx, res);
975 	mutex_unlock(ctx->lock);
976 
977 	return res;
978 }
979 
980 /**
981  * @brief Update (or start) an AES de/encrypt process (ECB, CBC or CTR).
982  * @param ctx: SAES process context
983  * @param last_block: true if last payload data block
984  * @param data_in: pointer to payload
985  * @param data_out: pointer where to save de/encrypted payload
986  * @param data_size: payload size
987  *
988  * @retval TEE_SUCCESS if OK.
989  */
990 TEE_Result stm32_saes_update(struct stm32_saes_context *ctx, bool last_block,
991 			     uint8_t *data_in, uint8_t *data_out,
992 			     size_t data_size)
993 {
994 	TEE_Result res = TEE_SUCCESS;
995 	unsigned int i = U(0);
996 
997 	if (!ctx)
998 		return TEE_ERROR_BAD_PARAMETERS;
999 
1000 	mutex_lock(ctx->lock);
1001 
1002 	/*
1003 	 * CBC encryption requires the 2 last blocks to be aligned with AES
1004 	 * block size.
1005 	 */
1006 	if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) &&
1007 	    is_encrypt(ctx->cr) &&
1008 	    (ROUNDDOWN(data_size, AES_BLOCK_SIZE) != data_size)) {
1009 		if (data_size < AES_BLOCK_SIZE * 2) {
1010 			/*
1011 			 * If CBC, size of the last part should be at
1012 			 * least 2*AES_BLOCK_SIZE
1013 			 */
1014 			EMSG("Unexpected last block size");
1015 			res = TEE_ERROR_BAD_STATE;
1016 			goto out;
1017 		}
1018 		/*
1019 		 * Do not support padding if the total size is not aligned with
1020 		 * the size of a block.
1021 		 */
1022 		res = TEE_ERROR_NOT_IMPLEMENTED;
1023 		goto out;
1024 	}
1025 
1026 	/* Manage remaining CTR mask from previous update call */
1027 	if (IS_CHAINING_MODE(CTR, ctx->cr) && ctx->extra_size) {
1028 		unsigned int j = 0;
1029 		uint8_t *mask = (uint8_t *)ctx->extra;
1030 
1031 		for (i = 0, j = 0; j < ctx->extra_size && i < data_size;
1032 		     j++, i++)
1033 			data_out[i] = data_in[i] ^ mask[j];
1034 
1035 		if (j != ctx->extra_size) {
1036 			/*
1037 			 * We didn't consume all saved mask,
1038 			 * but no more data.
1039 			 */
1040 
1041 			/* We save remaining mask and its new size */
1042 			memmove(ctx->extra, ctx->extra + j,
1043 				ctx->extra_size - j);
1044 			ctx->extra_size -= j;
1045 
1046 			/*
1047 			 * We don't need to save HW context we didn't
1048 			 * modify HW state.
1049 			 */
1050 			res = TEE_SUCCESS;
1051 			goto out;
1052 		}
1053 		/* All extra mask consumed */
1054 		ctx->extra_size = 0;
1055 	}
1056 
1057 	res = restore_context(ctx);
1058 	if (res)
1059 		goto out;
1060 
1061 	while (data_size - i >= AES_BLOCK_SIZE) {
1062 		write_block(ctx->base, data_in + i);
1063 
1064 		res = wait_computation_completed(ctx->base);
1065 		if (res)
1066 			goto out;
1067 
1068 		read_block(ctx->base, data_out + i);
1069 
1070 		clear_computation_completed(ctx->base);
1071 
1072 		/* Process next block */
1073 		i += AES_BLOCK_SIZE;
1074 	}
1075 
1076 	/* Manage last block if not a block size multiple */
1077 	if (i < data_size) {
1078 		if (IS_CHAINING_MODE(CTR, ctx->cr)) {
1079 			/*
1080 			 * For CTR we save the generated mask to use it at next
1081 			 * update call.
1082 			 */
1083 			uint32_t block_in[AES_BLOCK_NB_U32] = { };
1084 			uint32_t block_out[AES_BLOCK_NB_U32] = { };
1085 
1086 			memcpy(block_in, data_in + i, data_size - i);
1087 
1088 			write_aligned_block(ctx->base, block_in);
1089 
1090 			res = wait_computation_completed(ctx->base);
1091 			if (res)
1092 				goto out;
1093 
1094 			read_aligned_block(ctx->base, block_out);
1095 
1096 			clear_computation_completed(ctx->base);
1097 
1098 			memcpy(data_out + i, block_out, data_size - i);
1099 
1100 			/* Save mask for possibly next call */
1101 			ctx->extra_size = AES_BLOCK_SIZE - (data_size - i);
1102 			memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1103 			       ctx->extra_size);
1104 		} else {
1105 			/* CBC and ECB can manage only multiple of block_size */
1106 			res = TEE_ERROR_BAD_PARAMETERS;
1107 			goto out;
1108 		}
1109 	}
1110 
1111 	if (!last_block)
1112 		res = save_context(ctx);
1113 
1114 out:
1115 	/* If last block or error, end of SAES process */
1116 	if (last_block || res)
1117 		saes_end(ctx, res);
1118 
1119 	mutex_unlock(ctx->lock);
1120 
1121 	return res;
1122 }
1123 
1124 static void xor_block(uint8_t *b1, uint8_t *b2, size_t size)
1125 {
1126 	size_t i = 0;
1127 
1128 	for (i = 0; i < size; i++)
1129 		b1[i] ^= b2[i];
1130 }
1131 
1132 static TEE_Result stm32_saes_cmac_prf_128(struct stm32_saes_context *ctx,
1133 					  enum stm32_saes_key_selection key_sel,
1134 					  const void *key, size_t key_size,
1135 					  uint8_t *data, size_t data_size,
1136 					  uint8_t *out)
1137 {
1138 	TEE_Result res = TEE_ERROR_GENERIC;
1139 	uint8_t block[AES_BLOCK_SIZE] = { };
1140 	uint8_t k1[AES_BLOCK_SIZE] = { };
1141 	uint8_t k2[AES_BLOCK_SIZE] = { };
1142 	uint8_t l[AES_BLOCK_SIZE] = { };
1143 	size_t processed = 0;
1144 	uint8_t bit = 0;
1145 	int i = 0;
1146 
1147 	if (!ctx)
1148 		return TEE_ERROR_BAD_PARAMETERS;
1149 
1150 	/* Get K1 and K2 */
1151 	res = stm32_saes_init(ctx, false, STM32_SAES_MODE_ECB, key_sel,
1152 			      key, key_size, NULL, 0);
1153 	if (res)
1154 		return res;
1155 
1156 	res = stm32_saes_update(ctx, true, l, l, sizeof(l));
1157 	if (res)
1158 		return res;
1159 
1160 	/* MSB(L) == 0 => K1 = L << 1 */
1161 	bit = 0;
1162 	for (i = sizeof(l) - 1; i >= 0; i--) {
1163 		k1[i] = (l[i] << 1) | bit;
1164 		bit = (l[i] & 0x80) >> 7;
1165 	}
1166 	/* MSB(L) == 1 => K1 = (L << 1) XOR const_Rb */
1167 	if ((l[0] & 0x80))
1168 		k1[sizeof(k1) - 1] = k1[sizeof(k1) - 1] ^ 0x87;
1169 
1170 	/* MSB(K1) == 0 => K2 = K1 << 1 */
1171 	bit = 0;
1172 	for (i = sizeof(k1) - 1; i >= 0; i--) {
1173 		k2[i] = (k1[i] << 1) | bit;
1174 		bit = (k1[i] & 0x80) >> 7;
1175 	}
1176 
1177 	/* MSB(K1) == 1 => K2 = (K1 << 1) XOR const_Rb */
1178 	if ((k1[0] & 0x80))
1179 		k2[sizeof(k2) - 1] = k2[sizeof(k2) - 1] ^ 0x87;
1180 
1181 	if (data_size > AES_BLOCK_SIZE) {
1182 		uint8_t *data_out = NULL;
1183 
1184 		/* All block but last in CBC mode */
1185 		res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC,
1186 				      key_sel, key, key_size, block,
1187 				      sizeof(block));
1188 		if (res)
1189 			return res;
1190 
1191 		processed = ROUNDDOWN(data_size - 1, AES_BLOCK_SIZE);
1192 		data_out = malloc(processed);
1193 		if (!data_out)
1194 			return TEE_ERROR_OUT_OF_MEMORY;
1195 
1196 		res = stm32_saes_update(ctx, true, data, data_out, processed);
1197 		if (!res) {
1198 			/* Copy last out block or keep block as { 0 } */
1199 			memcpy(block, data_out + processed - AES_BLOCK_SIZE,
1200 			       AES_BLOCK_SIZE);
1201 		}
1202 
1203 		free(data_out);
1204 
1205 		if (res)
1206 			return res;
1207 	}
1208 
1209 	/* Manage last block */
1210 	xor_block(block, data + processed, data_size - processed);
1211 	if (data_size - processed == AES_BLOCK_SIZE) {
1212 		xor_block(block, k1, AES_BLOCK_SIZE);
1213 	} else {
1214 		/* xor with padding = 0b100... */
1215 		block[data_size - processed] ^= 0x80;
1216 		xor_block(block, k2, AES_BLOCK_SIZE);
1217 	}
1218 
1219 	/*
1220 	 * AES last block.
1221 	 * We need to use same chaining mode to keep same key if DHUK is
1222 	 * selected so we reuse l as a zero initialized IV.
1223 	 */
1224 	memset(l, 0, sizeof(l));
1225 	res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC, key_sel, key,
1226 			      key_size, l, sizeof(l));
1227 	if (res)
1228 		return res;
1229 
1230 	return stm32_saes_update(ctx, true, block, out, AES_BLOCK_SIZE);
1231 }
1232 
1233 TEE_Result stm32_saes_kdf(struct stm32_saes_context *ctx,
1234 			  enum stm32_saes_key_selection key_sel,
1235 			  const void *key, size_t key_size,
1236 			  const void *input, size_t input_size,
1237 			  uint8_t *subkey, size_t subkey_size)
1238 
1239 {
1240 	TEE_Result res = TEE_SUCCESS;
1241 	uint32_t index = 0;
1242 	uint32_t index_be = 0;
1243 	uint8_t *data = NULL;
1244 	size_t data_index = 0;
1245 	size_t subkey_index = 0;
1246 	size_t data_size = input_size + sizeof(index_be);
1247 	uint8_t cmac[AES_BLOCK_SIZE] = { };
1248 
1249 	if (!ctx || !input || !input_size)
1250 		return TEE_ERROR_BAD_PARAMETERS;
1251 
1252 	/* For each K(i) we will add an index */
1253 	data = malloc(data_size);
1254 	if (!data)
1255 		return TEE_ERROR_OUT_OF_MEMORY;
1256 
1257 	data_index = 0;
1258 	index_be = TEE_U32_TO_BIG_ENDIAN(index);
1259 	memcpy(data + data_index, &index_be, sizeof(index_be));
1260 	data_index += sizeof(index_be);
1261 	memcpy(data + data_index, input, input_size);
1262 	data_index += input_size;
1263 
1264 	/* K(i) computation. */
1265 	index = 0;
1266 	while (subkey_index < subkey_size) {
1267 		index++;
1268 		index_be = TEE_U32_TO_BIG_ENDIAN(index);
1269 		memcpy(data, &index_be, sizeof(index_be));
1270 
1271 		res = stm32_saes_cmac_prf_128(ctx, key_sel, key, key_size,
1272 					      data, data_size, cmac);
1273 		if (res)
1274 			goto out;
1275 
1276 		memcpy(subkey + subkey_index, cmac,
1277 		       MIN(subkey_size - subkey_index, sizeof(cmac)));
1278 		subkey_index += sizeof(cmac);
1279 	}
1280 
1281 out:
1282 	free(data);
1283 	if (res)
1284 		memzero_explicit(subkey, subkey_size);
1285 
1286 	return res;
1287 }
1288 
1289 /* Implement hardware HUK derivation using SAES resources */
1290 TEE_Result huk_subkey_derive(enum huk_subkey_usage usage,
1291 			     const void *const_data, size_t const_data_len,
1292 			     uint8_t *subkey, size_t subkey_len)
1293 {
1294 	TEE_Result res = TEE_ERROR_GENERIC;
1295 	uint8_t *input = NULL;
1296 	size_t input_index = 0;
1297 	size_t subkey_bitlen = 0;
1298 	struct stm32_saes_context ctx = { };
1299 	uint8_t separator = 0;
1300 
1301 	/* Check if driver is probed */
1302 	if (!saes_pdata.base) {
1303 		return __huk_subkey_derive(usage, const_data, const_data_len,
1304 					   subkey, subkey_len);
1305 	}
1306 
1307 	input = malloc(const_data_len + sizeof(separator) + sizeof(usage) +
1308 		       sizeof(subkey_bitlen) + AES_BLOCK_SIZE);
1309 	if (!input)
1310 		return TEE_ERROR_OUT_OF_MEMORY;
1311 
1312 	input_index = 0;
1313 	if (const_data) {
1314 		memcpy(input + input_index, const_data, const_data_len);
1315 		input_index += const_data_len;
1316 
1317 		memcpy(input + input_index, &separator, sizeof(separator));
1318 		input_index += sizeof(separator);
1319 	}
1320 
1321 	memcpy(input + input_index, &usage, sizeof(usage));
1322 	input_index += sizeof(usage);
1323 
1324 	/*
1325 	 * We should add the subkey_len in bits at end of input.
1326 	 * And we choose to put in a MSB first uint32_t.
1327 	 */
1328 	subkey_bitlen = TEE_U32_TO_BIG_ENDIAN(subkey_len * INT8_BIT);
1329 	memcpy(input + input_index, &subkey_bitlen, sizeof(subkey_bitlen));
1330 	input_index += sizeof(subkey_bitlen);
1331 
1332 	/*
1333 	 * We get K(0) to avoid some key control attack
1334 	 * and store it at end of input.
1335 	 */
1336 	res = stm32_saes_cmac_prf_128(&ctx, STM32_SAES_KEY_DHU, NULL,
1337 				      AES_KEYSIZE_128,
1338 				      input, input_index,
1339 				      input + input_index);
1340 	if (res)
1341 		goto out;
1342 
1343 	/* We just added K(0) to input */
1344 	input_index += AES_BLOCK_SIZE;
1345 
1346 	res = stm32_saes_kdf(&ctx, STM32_SAES_KEY_DHU, NULL, AES_KEYSIZE_128,
1347 			     input, input_index, subkey, subkey_len);
1348 
1349 out:
1350 	free(input);
1351 	return res;
1352 }
1353 
1354 static TEE_Result stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata,
1355 				       const void *fdt, int node)
1356 {
1357 	struct dt_node_info dt_saes = { };
1358 	TEE_Result res = TEE_ERROR_GENERIC;
1359 
1360 	dt_saes.reg = fdt_reg_base_address(fdt, node);
1361 	dt_saes.reg_size = fdt_reg_size(fdt, node);
1362 
1363 	if (dt_saes.reg == DT_INFO_INVALID_REG ||
1364 	    dt_saes.reg_size == DT_INFO_INVALID_REG_SIZE)
1365 		return TEE_ERROR_BAD_PARAMETERS;
1366 
1367 	res = clk_dt_get_by_index(fdt, node, 0, &pdata->clk);
1368 	if (res != TEE_SUCCESS)
1369 		return res;
1370 
1371 	res = rstctrl_dt_get_by_index(fdt, node, 0, &pdata->reset);
1372 	if (res != TEE_SUCCESS && res != TEE_ERROR_ITEM_NOT_FOUND)
1373 		return res;
1374 
1375 	pdata->base = (vaddr_t)phys_to_virt(dt_saes.reg, MEM_AREA_IO_SEC,
1376 					    dt_saes.reg_size);
1377 	if (!pdata->base)
1378 		panic();
1379 
1380 	return TEE_SUCCESS;
1381 }
1382 
1383 static void stm32_saes_reset(void)
1384 {
1385 	if (saes_pdata.reset) {
1386 		/* External reset of SAES */
1387 		if (rstctrl_assert_to(saes_pdata.reset, TIMEOUT_US_1MS))
1388 			panic();
1389 
1390 		udelay(SAES_RESET_DELAY);
1391 
1392 		if (rstctrl_deassert_to(saes_pdata.reset, TIMEOUT_US_1MS))
1393 			panic();
1394 	} else {
1395 		/* Internal reset of SAES */
1396 		io_setbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST);
1397 		udelay(SAES_RESET_DELAY);
1398 		io_clrbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST);
1399 	}
1400 }
1401 
1402 static TEE_Result stm32_saes_pm(enum pm_op op, uint32_t pm_hint,
1403 				const struct pm_callback_handle *hdl __unused)
1404 {
1405 	switch (op) {
1406 	case PM_OP_SUSPEND:
1407 		clk_disable(saes_pdata.clk);
1408 		return TEE_SUCCESS;
1409 
1410 	case PM_OP_RESUME:
1411 		if (clk_enable(saes_pdata.clk))
1412 			panic();
1413 
1414 		if (PM_HINT_IS_STATE(pm_hint, CONTEXT))
1415 			stm32_saes_reset();
1416 
1417 		return TEE_SUCCESS;
1418 	default:
1419 		break;
1420 	}
1421 
1422 	return TEE_ERROR_NOT_IMPLEMENTED;
1423 }
1424 
1425 static TEE_Result stm32_saes_probe(const void *fdt, int node,
1426 				   const void *compat_data __unused)
1427 {
1428 	TEE_Result res = TEE_SUCCESS;
1429 
1430 	assert(!saes_pdata.base);
1431 
1432 	res = stm32_saes_parse_fdt(&saes_pdata, fdt, node);
1433 	if (res)
1434 		return res;
1435 
1436 	if (clk_enable(saes_pdata.clk))
1437 		panic();
1438 
1439 	stm32_saes_reset();
1440 
1441 	if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1442 		res = stm32_register_cipher(SAES_IP);
1443 		if (res) {
1444 			EMSG("Failed to register to cipher: %#"PRIx32, res);
1445 			panic();
1446 		}
1447 	}
1448 
1449 	register_pm_core_service_cb(stm32_saes_pm, NULL, "stm32-saes");
1450 
1451 	return TEE_SUCCESS;
1452 }
1453 
1454 static const struct dt_device_match saes_match_table[] = {
1455 	{ .compatible = "st,stm32mp13-saes" },
1456 	{ }
1457 };
1458 
1459 DEFINE_DT_DRIVER(stm32_saes_dt_driver) = {
1460 	.name = "stm32-saes",
1461 	.match_table = saes_match_table,
1462 	.probe = stm32_saes_probe,
1463 };
1464