xref: /optee_os/core/drivers/crypto/stm32/stm32_saes.c (revision 34c834fdc46a7dc6b1cc2c58868a6e6a95eb672d)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2021-2023, STMicroelectronics - All Rights Reserved
4  */
5 #include <assert.h>
6 #include <config.h>
7 #include <drivers/clk.h>
8 #include <drivers/clk_dt.h>
9 #include <drivers/rstctrl.h>
10 #include <io.h>
11 #include <kernel/boot.h>
12 #include <kernel/delay.h>
13 #include <kernel/dt.h>
14 #include <kernel/huk_subkey.h>
15 #include <kernel/mutex.h>
16 #include <kernel/pm.h>
17 #include <libfdt.h>
18 #include <mm/core_memprot.h>
19 #include <stdint.h>
20 #include <stm32_util.h>
21 #include <string_ext.h>
22 #include <utee_defines.h>
23 #include <util.h>
24 
25 #include "common.h"
26 #include "stm32_saes.h"
27 
28 /* SAES control register */
29 #define _SAES_CR			U(0x0)
30 /* SAES status register */
31 #define _SAES_SR			U(0x04)
32 /* SAES data input register */
33 #define _SAES_DINR			U(0x08)
34 /* SAES data output register */
35 #define _SAES_DOUTR			U(0x0c)
36 /* SAES key registers [0-3] */
37 #define _SAES_KEYR0			U(0x10)
38 #define _SAES_KEYR1			U(0x14)
39 #define _SAES_KEYR2			U(0x18)
40 #define _SAES_KEYR3			U(0x1c)
41 /* SAES initialization vector registers [0-3] */
42 #define _SAES_IVR0			U(0x20)
43 #define _SAES_IVR1			U(0x24)
44 #define _SAES_IVR2			U(0x28)
45 #define _SAES_IVR3			U(0x2c)
46 /* SAES key registers [4-7] */
47 #define _SAES_KEYR4			U(0x30)
48 #define _SAES_KEYR5			U(0x34)
49 #define _SAES_KEYR6			U(0x38)
50 #define _SAES_KEYR7			U(0x3c)
51 /* SAES suspend registers [0-7] */
52 #define _SAES_SUSPR0			U(0x40)
53 #define _SAES_SUSPR1			U(0x44)
54 #define _SAES_SUSPR2			U(0x48)
55 #define _SAES_SUSPR3			U(0x4c)
56 #define _SAES_SUSPR4			U(0x50)
57 #define _SAES_SUSPR5			U(0x54)
58 #define _SAES_SUSPR6			U(0x58)
59 #define _SAES_SUSPR7			U(0x5c)
60 /* SAES Interrupt Enable Register */
61 #define _SAES_IER			U(0x300)
62 /* SAES Interrupt Status Register */
63 #define _SAES_ISR			U(0x304)
64 /* SAES Interrupt Clear Register */
65 #define _SAES_ICR			U(0x308)
66 
67 /* SAES control register fields */
68 #define _SAES_CR_RESET_VALUE		U(0x0)
69 #define _SAES_CR_IPRST			BIT(31)
70 #define _SAES_CR_KEYSEL_MASK		GENMASK_32(30, 28)
71 #define _SAES_CR_KEYSEL_SHIFT		U(28)
72 #define _SAES_CR_KEYSEL_SOFT		U(0x0)
73 #define _SAES_CR_KEYSEL_DHUK		U(0x1)
74 #define _SAES_CR_KEYSEL_BHK		U(0x2)
75 #define _SAES_CR_KEYSEL_BHU_XOR_BH_K	U(0x4)
76 #define _SAES_CR_KEYSEL_TEST		U(0x7)
77 #define _SAES_CR_KSHAREID_MASK		GENMASK_32(27, 26)
78 #define _SAES_CR_KSHAREID_SHIFT		U(26)
79 #define _SAES_CR_KSHAREID_CRYP		U(0x0)
80 #define _SAES_CR_KEYMOD_MASK		GENMASK_32(25, 24)
81 #define _SAES_CR_KEYMOD_SHIFT		U(24)
82 #define _SAES_CR_KEYMOD_NORMAL		U(0x0)
83 #define _SAES_CR_KEYMOD_WRAPPED		U(0x1)
84 #define _SAES_CR_KEYMOD_SHARED		U(0x2)
85 #define _SAES_CR_NPBLB_MASK		GENMASK_32(23, 20)
86 #define _SAES_CR_NPBLB_SHIFT		U(20)
87 #define _SAES_CR_KEYPROT		BIT(19)
88 #define _SAES_CR_KEYSIZE		BIT(18)
89 #define _SAES_CR_GCMPH_MASK		GENMASK_32(14, 13)
90 #define _SAES_CR_GCMPH_SHIFT		U(13)
91 #define _SAES_CR_GCMPH_INIT		U(0)
92 #define _SAES_CR_GCMPH_HEADER		U(1)
93 #define _SAES_CR_GCMPH_PAYLOAD		U(2)
94 #define _SAES_CR_GCMPH_FINAL		U(3)
95 #define _SAES_CR_DMAOUTEN		BIT(12)
96 #define _SAES_CR_DMAINEN		BIT(11)
97 #define _SAES_CR_CHMOD_MASK		(BIT(16) | GENMASK_32(6, 5))
98 #define _SAES_CR_CHMOD_SHIFT		U(5)
99 #define _SAES_CR_CHMOD_ECB		U(0x0)
100 #define _SAES_CR_CHMOD_CBC		U(0x1)
101 #define _SAES_CR_CHMOD_CTR		U(0x2)
102 #define _SAES_CR_CHMOD_GCM		U(0x3)
103 #define _SAES_CR_CHMOD_GMAC		U(0x3)
104 #define _SAES_CR_CHMOD_CCM		U(0x800)
105 #define _SAES_CR_MODE_MASK		GENMASK_32(4, 3)
106 #define _SAES_CR_MODE_SHIFT		U(3)
107 #define _SAES_CR_MODE_ENC		U(0)
108 #define _SAES_CR_MODE_KEYPREP		U(1)
109 #define _SAES_CR_MODE_DEC		U(2)
110 #define _SAES_CR_DATATYPE_MASK		GENMASK_32(2, 1)
111 #define _SAES_CR_DATATYPE_SHIFT		U(1)
112 #define _SAES_CR_DATATYPE_NONE		U(0)
113 #define _SAES_CR_DATATYPE_HALF_WORD	U(1)
114 #define _SAES_CR_DATATYPE_BYTE		U(2)
115 #define _SAES_CR_DATATYPE_BIT		U(3)
116 #define _SAES_CR_EN			BIT(0)
117 
118 /* SAES status register fields */
119 #define _SAES_SR_KEYVALID		BIT(7)
120 #define _SAES_SR_BUSY			BIT(3)
121 #define _SAES_SR_WRERR			BIT(2)
122 #define _SAES_SR_RDERR			BIT(1)
123 #define _SAES_SR_CCF			BIT(0)
124 
125 /* SAES interrupt registers fields */
126 #define _SAES_I_RNG_ERR			BIT(3)
127 #define _SAES_I_KEY_ERR			BIT(2)
128 #define _SAES_I_RW_ERR			BIT(1)
129 #define _SAES_I_CC			BIT(0)
130 
131 #define SAES_TIMEOUT_US			U(100000)
132 #define TIMEOUT_US_1MS			U(1000)
133 #define SAES_RESET_DELAY		U(2)
134 
135 #define IS_CHAINING_MODE(mode, cr) \
136 	(((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mode << \
137 					  _SAES_CR_CHMOD_SHIFT))
138 
139 #define SET_CHAINING_MODE(mode, cr) \
140 	set_field_u32(cr, _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mode)
141 
142 static struct mutex saes_lock = MUTEX_INITIALIZER;
143 static struct stm32_saes_platdata {
144 	vaddr_t base;
145 	struct clk *clk;
146 	struct clk *clk_rng;
147 	struct rstctrl *reset;
148 } saes_pdata;
149 
150 static bool does_chaining_mode_need_iv(uint32_t cr)
151 {
152 	return !IS_CHAINING_MODE(ECB, cr);
153 }
154 
155 static bool is_encrypt(uint32_t cr)
156 {
157 	return (cr & _SAES_CR_MODE_MASK) ==
158 	       SHIFT_U32(_SAES_CR_MODE_ENC, _SAES_CR_MODE_SHIFT);
159 }
160 
161 static bool is_decrypt(uint32_t cr)
162 {
163 	return (cr & _SAES_CR_MODE_MASK) ==
164 	       SHIFT_U32(_SAES_CR_MODE_DEC, _SAES_CR_MODE_SHIFT);
165 }
166 
167 static bool does_need_npblb(uint32_t cr)
168 {
169 	return (IS_CHAINING_MODE(GCM, cr) && is_encrypt(cr)) ||
170 	       (IS_CHAINING_MODE(CCM, cr) && is_decrypt(cr));
171 }
172 
173 static bool can_suspend(uint32_t cr)
174 {
175 	return !IS_CHAINING_MODE(GCM, cr);
176 }
177 
178 static void write_aligned_block(vaddr_t base, uint32_t *data)
179 {
180 	unsigned int i = 0;
181 
182 	/* SAES is configured to swap bytes as expected */
183 	for (i = 0; i < AES_BLOCK_NB_U32; i++)
184 		io_write32(base + _SAES_DINR, data[i]);
185 }
186 
187 static void write_block(vaddr_t base, uint8_t *data)
188 {
189 	if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
190 		write_aligned_block(base, (void *)data);
191 	} else {
192 		uint32_t data_u32[AES_BLOCK_NB_U32] = { };
193 
194 		memcpy(data_u32, data, sizeof(data_u32));
195 		write_aligned_block(base, data_u32);
196 	}
197 }
198 
199 static void read_aligned_block(vaddr_t base, uint32_t *data)
200 {
201 	unsigned int i = 0;
202 
203 	/* SAES is configured to swap bytes as expected */
204 	for (i = 0; i < AES_BLOCK_NB_U32; i++)
205 		data[i] = io_read32(base + _SAES_DOUTR);
206 }
207 
208 static void read_block(vaddr_t base, uint8_t *data)
209 {
210 	if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
211 		read_aligned_block(base, (void *)data);
212 	} else {
213 		uint32_t data_u32[AES_BLOCK_NB_U32] = { };
214 
215 		read_aligned_block(base, data_u32);
216 
217 		memcpy(data, data_u32, sizeof(data_u32));
218 	}
219 }
220 
221 static TEE_Result wait_computation_completed(vaddr_t base)
222 {
223 	uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
224 
225 	while ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF)
226 		if (timeout_elapsed(timeout_ref))
227 			break;
228 
229 	if ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) {
230 		DMSG("CCF timeout");
231 		return TEE_ERROR_GENERIC;
232 	}
233 
234 	return TEE_SUCCESS;
235 }
236 
237 static void clear_computation_completed(uintptr_t base)
238 {
239 	io_setbits32(base + _SAES_ICR, _SAES_I_CC);
240 }
241 
242 static TEE_Result wait_key_valid(vaddr_t base)
243 {
244 	uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
245 
246 	while (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID))
247 		if (timeout_elapsed(timeout_ref))
248 			break;
249 
250 	if (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID)) {
251 		DMSG("CCF timeout");
252 		return TEE_ERROR_GENERIC;
253 	}
254 
255 	return TEE_SUCCESS;
256 }
257 
258 static TEE_Result saes_start(struct stm32_saes_context *ctx)
259 {
260 	uint64_t timeout_ref = 0;
261 
262 	/* Reset SAES */
263 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
264 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
265 
266 	timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
267 	while (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY)
268 		if (timeout_elapsed(timeout_ref))
269 			break;
270 
271 	if (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) {
272 		DMSG("busy timeout");
273 		return TEE_ERROR_GENERIC;
274 	}
275 
276 	return TEE_SUCCESS;
277 }
278 
279 static void saes_end(struct stm32_saes_context *ctx, int prev_error)
280 {
281 	if (prev_error) {
282 		/* Reset SAES */
283 		io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
284 		io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
285 	}
286 
287 	/* Disable the SAES peripheral */
288 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
289 }
290 
291 static void saes_write_iv(struct stm32_saes_context *ctx)
292 {
293 	/* If chaining mode need to restore IV */
294 	if (does_chaining_mode_need_iv(ctx->cr)) {
295 		unsigned int i = 0;
296 
297 		for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) {
298 			io_write32(ctx->base + _SAES_IVR0 + i *
299 				   sizeof(uint32_t), ctx->iv[i]);
300 		}
301 	}
302 }
303 
304 static void saes_save_suspend(struct stm32_saes_context *ctx)
305 {
306 	size_t i = 0;
307 
308 	for (i = 0; i < 8; i++)
309 		ctx->susp[i] = io_read32(ctx->base + _SAES_SUSPR0 +
310 					 i * sizeof(uint32_t));
311 }
312 
313 static void saes_restore_suspend(struct stm32_saes_context *ctx)
314 {
315 	size_t i = 0;
316 
317 	for (i = 0; i < 8; i++)
318 		io_write32(ctx->base + _SAES_SUSPR0 + i * sizeof(uint32_t),
319 			   ctx->susp[i]);
320 }
321 
322 static void saes_write_key(struct stm32_saes_context *ctx)
323 {
324 	/* Restore the _SAES_KEYRx if SOFTWARE key */
325 	if ((ctx->cr & _SAES_CR_KEYSEL_MASK) ==
326 	    SHIFT_U32(_SAES_CR_KEYSEL_SOFT, _SAES_CR_KEYSEL_SHIFT)) {
327 		size_t i = 0;
328 
329 		for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++)
330 			io_write32(ctx->base + _SAES_KEYR0 + i *
331 				   sizeof(uint32_t),
332 				   ctx->key[i]);
333 
334 		if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) {
335 			for (i = 0;
336 			     i < (AES_KEYSIZE_256 / 2) / sizeof(uint32_t);
337 			     i++) {
338 				io_write32(ctx->base + _SAES_KEYR4 + i *
339 					   sizeof(uint32_t),
340 					   ctx->key[i + 4]);
341 			}
342 		}
343 	}
344 }
345 
346 static TEE_Result saes_prepare_key(struct stm32_saes_context *ctx)
347 {
348 	TEE_Result res = TEE_ERROR_GENERIC;
349 
350 	/* Disable the SAES peripheral */
351 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
352 
353 	/* Set key size */
354 	if ((ctx->cr & _SAES_CR_KEYSIZE))
355 		io_setbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
356 	else
357 		io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
358 
359 	saes_write_key(ctx);
360 
361 	res = wait_key_valid(ctx->base);
362 	if (res)
363 		return res;
364 
365 	/*
366 	 * For ECB/CBC decryption, key preparation mode must be selected
367 	 * to populate the key.
368 	 */
369 	if ((IS_CHAINING_MODE(ECB, ctx->cr) ||
370 	     IS_CHAINING_MODE(CBC, ctx->cr)) && is_decrypt(ctx->cr)) {
371 		/* Select Mode 2 */
372 		io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
373 				SHIFT_U32(_SAES_CR_MODE_KEYPREP,
374 					  _SAES_CR_MODE_SHIFT));
375 
376 		/* Enable SAES */
377 		io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
378 
379 		res = wait_computation_completed(ctx->base);
380 		if (res)
381 			return res;
382 
383 		clear_computation_completed(ctx->base);
384 
385 		/* Set Mode 3 */
386 		io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
387 				SHIFT_U32(_SAES_CR_MODE_DEC,
388 					  _SAES_CR_MODE_SHIFT));
389 	}
390 
391 	return TEE_SUCCESS;
392 }
393 
394 static TEE_Result save_context(struct stm32_saes_context *ctx)
395 {
396 	if ((io_read32(ctx->base + _SAES_SR) & _SAES_SR_CCF)) {
397 		/* Device should not be in a processing phase */
398 		return TEE_ERROR_BAD_STATE;
399 	}
400 
401 	/* Save CR */
402 	ctx->cr = io_read32(ctx->base + _SAES_CR);
403 
404 	if (!can_suspend(ctx->cr))
405 		return TEE_SUCCESS;
406 
407 	saes_save_suspend(ctx);
408 
409 	/* If chaining mode need to save current IV */
410 	if (does_chaining_mode_need_iv(ctx->cr)) {
411 		uint8_t i = 0;
412 
413 		/* Save IV */
414 		for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) {
415 			ctx->iv[i] = io_read32(ctx->base + _SAES_IVR0 + i *
416 					       sizeof(uint32_t));
417 		}
418 	}
419 
420 	/* Disable the SAES peripheral */
421 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
422 
423 	return TEE_SUCCESS;
424 }
425 
426 /* To resume the processing of a message */
427 static TEE_Result restore_context(struct stm32_saes_context *ctx)
428 {
429 	TEE_Result res = TEE_SUCCESS;
430 
431 	/* SAES shall be disabled */
432 	if ((io_read32(ctx->base + _SAES_CR) & _SAES_CR_EN)) {
433 		DMSG("Device is still enabled");
434 		return TEE_ERROR_BAD_STATE;
435 	}
436 
437 	/* Reset internal state */
438 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
439 
440 	/* Restore configuration register */
441 	io_write32(ctx->base + _SAES_CR, ctx->cr);
442 
443 	/* Write key and, in case of CBC or ECB decrypt, prepare it */
444 	res = saes_prepare_key(ctx);
445 	if (res)
446 		return res;
447 
448 	saes_restore_suspend(ctx);
449 
450 	saes_write_iv(ctx);
451 
452 	/* Enable the SAES peripheral */
453 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
454 
455 	return TEE_SUCCESS;
456 }
457 
458 static TEE_Result do_from_init_to_phase(struct stm32_saes_context *ctx,
459 					uint32_t new_phase)
460 {
461 	TEE_Result res = TEE_SUCCESS;
462 
463 	/* We didn't run the init phase yet */
464 	res = restore_context(ctx);
465 	if (res)
466 		return res;
467 
468 	res = wait_computation_completed(ctx->base);
469 	if (res)
470 		return res;
471 
472 	clear_computation_completed(ctx->base);
473 
474 	/* Move to 'new_phase' */
475 	io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
476 			SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT));
477 
478 	/* Enable the SAES peripheral (init disabled it) */
479 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
480 
481 	return TEE_SUCCESS;
482 }
483 
484 static TEE_Result do_from_header_to_phase(struct stm32_saes_context *ctx,
485 					  uint32_t new_phase)
486 {
487 	TEE_Result res = TEE_SUCCESS;
488 
489 	if (can_suspend(ctx->cr)) {
490 		res = restore_context(ctx);
491 		if (res)
492 			return res;
493 	}
494 
495 	if (ctx->extra_size) {
496 		/* Manage unaligned header data before moving to next phase */
497 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
498 		       AES_BLOCK_SIZE - ctx->extra_size);
499 
500 		write_aligned_block(ctx->base, ctx->extra);
501 
502 		res = wait_computation_completed(ctx->base);
503 		if (res)
504 			return res;
505 
506 		clear_computation_completed(ctx->base);
507 
508 		ctx->assoc_len += ctx->extra_size * INT8_BIT;
509 		ctx->extra_size = U(0);
510 	}
511 
512 	/* Move to 'new_phase' */
513 	io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
514 			SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT));
515 
516 	return TEE_SUCCESS;
517 }
518 
519 /**
520  * @brief Start an AES computation.
521  * @param ctx: SAES process context
522  * @param is_dec: true if decryption, false if encryption
523  * @param ch_mode: define the chaining mode
524  * @param key_select: define where the key comes from
525  * @param key: pointer to key (if key_select is KEY_SOFT, else unused)
526  * @param key_size: key size
527  * @param iv: pointer to initialization vector (unused if ch_mode is ECB)
528  * @param iv_size: iv size
529  * @note this function doesn't access to hardware but stores in ctx the values
530  *
531  * @retval TEE_SUCCESS if OK or a TEE_Result compliant code.
532  */
533 TEE_Result stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec,
534 			   enum stm32_saes_chaining_mode ch_mode,
535 			   enum stm32_saes_key_selection key_select,
536 			   const void *key, size_t key_size, const void *iv,
537 			   size_t iv_size)
538 {
539 	const uint32_t *key_u32 = NULL;
540 	const uint32_t *iv_u32 = NULL;
541 	uint32_t local_key[8] = { };
542 	uint32_t local_iv[4] = { };
543 	unsigned int i = 0;
544 
545 	if (!ctx)
546 		return TEE_ERROR_BAD_PARAMETERS;
547 
548 	*ctx = (struct stm32_saes_context){
549 		.lock = &saes_lock,
550 		.base = saes_pdata.base,
551 		.cr = _SAES_CR_RESET_VALUE
552 	};
553 
554 	/* We want buffer to be u32 aligned */
555 	if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
556 		key_u32 = key;
557 	} else {
558 		memcpy(local_key, key, key_size);
559 		key_u32 = local_key;
560 	}
561 
562 	if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
563 		iv_u32 = iv;
564 	} else {
565 		memcpy(local_iv, iv, iv_size);
566 		iv_u32 = local_iv;
567 	}
568 
569 	if (is_dec)
570 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK,
571 					 _SAES_CR_MODE_DEC);
572 	else
573 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK,
574 					 _SAES_CR_MODE_ENC);
575 
576 	/* Save chaining mode */
577 	switch (ch_mode) {
578 	case STM32_SAES_MODE_ECB:
579 		ctx->cr |= SET_CHAINING_MODE(ECB, ctx->cr);
580 		break;
581 	case STM32_SAES_MODE_CBC:
582 		ctx->cr |= SET_CHAINING_MODE(CBC, ctx->cr);
583 		break;
584 	case STM32_SAES_MODE_CTR:
585 		ctx->cr |= SET_CHAINING_MODE(CTR, ctx->cr);
586 		break;
587 	case STM32_SAES_MODE_GCM:
588 		ctx->cr |= SET_CHAINING_MODE(GCM, ctx->cr);
589 		break;
590 	case STM32_SAES_MODE_CCM:
591 		ctx->cr |= SET_CHAINING_MODE(CCM, ctx->cr);
592 		break;
593 	default:
594 		return TEE_ERROR_BAD_PARAMETERS;
595 	}
596 
597 	/*
598 	 * We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data.
599 	 * So we won't need to
600 	 * TEE_U32_TO_BIG_ENDIAN(data) before write to DINR
601 	 * nor
602 	 * TEE_U32_FROM_BIG_ENDIAN after reading from DOUTR.
603 	 *
604 	 * But note that wrap key only accept _SAES_CR_DATATYPE_NONE.
605 	 */
606 	ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_DATATYPE_MASK,
607 				 _SAES_CR_DATATYPE_BYTE);
608 
609 	/* Configure keysize */
610 	switch (key_size) {
611 	case AES_KEYSIZE_128:
612 		ctx->cr &=  ~_SAES_CR_KEYSIZE;
613 		break;
614 	case AES_KEYSIZE_256:
615 		ctx->cr |= _SAES_CR_KEYSIZE;
616 		break;
617 	default:
618 		return TEE_ERROR_BAD_PARAMETERS;
619 	}
620 
621 	/* Configure key */
622 	switch (key_select) {
623 	case STM32_SAES_KEY_SOFT:
624 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
625 					 SHIFT_U32(_SAES_CR_KEYSEL_SOFT,
626 						   _SAES_CR_KEYSEL_SHIFT));
627 		/* Save key */
628 		switch (key_size) {
629 		case AES_KEYSIZE_128:
630 			/* First 16 bytes == 4 u32 */
631 			for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t);
632 			     i++) {
633 				ctx->key[i] =
634 					TEE_U32_TO_BIG_ENDIAN(key_u32[3 - i]);
635 				/*
636 				 * /!\ we save the key in HW byte order
637 				 * and word order: key[i] is for _SAES_KEYRi.
638 				 */
639 			}
640 			break;
641 		case AES_KEYSIZE_256:
642 			for (i = 0; i < AES_KEYSIZE_256 / sizeof(uint32_t);
643 			     i++) {
644 				ctx->key[i] =
645 					TEE_U32_TO_BIG_ENDIAN(key_u32[7 - i]);
646 				/*
647 				 * /!\ we save the key in HW byte order
648 				 * and word order: key[i] is for _SAES_KEYRi.
649 				 */
650 			}
651 			break;
652 		default:
653 			return TEE_ERROR_BAD_PARAMETERS;
654 		}
655 		break;
656 	case STM32_SAES_KEY_DHU:
657 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
658 					 SHIFT_U32(_SAES_CR_KEYSEL_DHUK,
659 						   _SAES_CR_KEYSEL_SHIFT));
660 		break;
661 	case STM32_SAES_KEY_BH:
662 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
663 					 SHIFT_U32(_SAES_CR_KEYSEL_BHK,
664 						   _SAES_CR_KEYSEL_SHIFT));
665 		break;
666 	case STM32_SAES_KEY_BHU_XOR_BH:
667 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
668 					 SHIFT_U32(_SAES_CR_KEYSEL_BHU_XOR_BH_K,
669 						   _SAES_CR_KEYSEL_SHIFT));
670 		break;
671 	case STM32_SAES_KEY_WRAPPED:
672 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
673 					 SHIFT_U32(_SAES_CR_KEYSEL_SOFT,
674 						   _SAES_CR_KEYSEL_SHIFT));
675 		break;
676 
677 	default:
678 		return TEE_ERROR_BAD_PARAMETERS;
679 	}
680 
681 	/* Save IV */
682 	if (ch_mode != STM32_SAES_MODE_ECB) {
683 		if (!iv || iv_size != AES_IVSIZE)
684 			return TEE_ERROR_BAD_PARAMETERS;
685 
686 		for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++)
687 			ctx->iv[i] = TEE_U32_TO_BIG_ENDIAN(iv_u32[3 - i]);
688 	}
689 
690 	/* Reset suspend registers */
691 	memset(ctx->susp, 0, sizeof(ctx->susp));
692 
693 	return saes_start(ctx);
694 }
695 
696 /**
697  * @brief Update (or start) an AES authentificate process of
698  *        associated data (CCM or GCM).
699  * @param ctx: SAES process context
700  * @param data: pointer to associated data
701  * @param data_size: data size
702  *
703  * @retval 0 if OK.
704  */
705 TEE_Result stm32_saes_update_assodata(struct stm32_saes_context *ctx,
706 				      uint8_t *data, size_t data_size)
707 {
708 	TEE_Result res = TEE_SUCCESS;
709 	unsigned int i = 0;
710 	uint32_t previous_phase = 0;
711 
712 	if (!ctx)
713 		return TEE_ERROR_BAD_PARAMETERS;
714 
715 	/* If no associated data, nothing to do */
716 	if (!data || !data_size)
717 		return TEE_SUCCESS;
718 
719 	mutex_lock(ctx->lock);
720 
721 	previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >>
722 			 _SAES_CR_GCMPH_SHIFT;
723 
724 	switch (previous_phase) {
725 	case _SAES_CR_GCMPH_INIT:
726 		res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_HEADER);
727 		break;
728 	case _SAES_CR_GCMPH_HEADER:
729 		/*
730 		 * Function update_assodata() was already called.
731 		 * We only need to restore the context.
732 		 */
733 		if (can_suspend(ctx->cr))
734 			res = restore_context(ctx);
735 
736 		break;
737 	default:
738 		DMSG("out of order call");
739 		res = TEE_ERROR_BAD_STATE;
740 	}
741 
742 	if (res)
743 		goto out;
744 
745 	/* Manage if remaining data from a previous update_assodata() call */
746 	if (ctx->extra_size &&
747 	    ((ctx->extra_size + data_size) >= AES_BLOCK_SIZE)) {
748 		uint32_t block[AES_BLOCK_NB_U32] = { };
749 
750 		memcpy(block, ctx->extra, ctx->extra_size);
751 		memcpy((uint8_t *)block + ctx->extra_size, data,
752 		       AES_BLOCK_SIZE - ctx->extra_size);
753 
754 		write_aligned_block(ctx->base, block);
755 
756 		res = wait_computation_completed(ctx->base);
757 		if (res)
758 			goto out;
759 
760 		clear_computation_completed(ctx->base);
761 
762 		i += AES_BLOCK_SIZE - ctx->extra_size;
763 		ctx->extra_size = 0;
764 		ctx->assoc_len += AES_BLOCK_SIZE_BIT;
765 	}
766 
767 	while (data_size - i >= AES_BLOCK_SIZE) {
768 		write_block(ctx->base, data + i);
769 
770 		res = wait_computation_completed(ctx->base);
771 		if (res)
772 			goto out;
773 
774 		clear_computation_completed(ctx->base);
775 
776 		/* Process next block */
777 		i += AES_BLOCK_SIZE;
778 		ctx->assoc_len += AES_BLOCK_SIZE_BIT;
779 	}
780 
781 	/*
782 	 * Manage last block if not a block size multiple:
783 	 * Save remaining data to manage them later (potentially with new
784 	 * associated data).
785 	 */
786 	if (i < data_size) {
787 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
788 		       data_size - i);
789 		ctx->extra_size += data_size - i;
790 	}
791 
792 	res = save_context(ctx);
793 out:
794 	if (res)
795 		saes_end(ctx, res);
796 
797 	mutex_unlock(ctx->lock);
798 
799 	return res;
800 }
801 
802 /**
803  * @brief Update (or start) an AES authenticate and de/encrypt with
804  *        payload data (CCM or GCM).
805  * @param ctx: SAES process context
806  * @param last_block: true if last payload data block
807  * @param data_in: pointer to payload
808  * @param data_out: pointer where to save de/encrypted payload
809  * @param data_size: payload size
810  *
811  * @retval TEE_SUCCESS if OK.
812  */
813 TEE_Result stm32_saes_update_load(struct stm32_saes_context *ctx,
814 				  bool last_block, uint8_t *data_in,
815 				  uint8_t *data_out, size_t data_size)
816 {
817 	TEE_Result res = TEE_SUCCESS;
818 	unsigned int i = 0;
819 	uint32_t previous_phase = 0;
820 
821 	if (!ctx)
822 		return TEE_ERROR_BAD_PARAMETERS;
823 
824 	/* If there is no data, nothing to do */
825 	if (!data_in || !data_size)
826 		return TEE_SUCCESS;
827 
828 	mutex_lock(ctx->lock);
829 
830 	previous_phase = ((ctx->cr & _SAES_CR_GCMPH_MASK) >>
831 			  _SAES_CR_GCMPH_SHIFT);
832 
833 	switch (previous_phase) {
834 	case _SAES_CR_GCMPH_INIT:
835 		res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD);
836 		break;
837 	case _SAES_CR_GCMPH_HEADER:
838 		res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD);
839 		break;
840 	case _SAES_CR_GCMPH_PAYLOAD:
841 		/* new update_load call, we only need to restore context */
842 		if (can_suspend(ctx->cr))
843 			res = restore_context(ctx);
844 
845 		break;
846 	default:
847 		DMSG("out of order call");
848 		res = TEE_ERROR_BAD_STATE;
849 	}
850 
851 	if (res)
852 		goto out;
853 
854 	while (i < ROUNDDOWN(data_size, AES_BLOCK_SIZE)) {
855 		write_block(ctx->base, data_in + i);
856 
857 		res = wait_computation_completed(ctx->base);
858 		if (res)
859 			goto out;
860 
861 		read_block(ctx->base, data_out + i);
862 
863 		clear_computation_completed(ctx->base);
864 
865 		/* Process next block */
866 		i += AES_BLOCK_SIZE;
867 		ctx->load_len += AES_BLOCK_SIZE_BIT;
868 	}
869 
870 	/* Manage last block if not a block size multiple */
871 	if (last_block && i < data_size) {
872 		uint32_t block_in[AES_BLOCK_NB_U32] = { };
873 		uint32_t block_out[AES_BLOCK_NB_U32] = { };
874 
875 		memcpy(block_in, data_in + i, data_size - i);
876 
877 		if (does_need_npblb(ctx->cr)) {
878 			uint32_t npblb = AES_BLOCK_SIZE - (data_size - i);
879 
880 			io_clrsetbits32(ctx->base + _SAES_CR,
881 					_SAES_CR_NPBLB_MASK,
882 					SHIFT_U32(npblb, _SAES_CR_NPBLB_SHIFT));
883 		}
884 
885 		write_aligned_block(ctx->base, block_in);
886 
887 		res = wait_computation_completed(ctx->base);
888 		if (res)
889 			goto out;
890 
891 		read_aligned_block(ctx->base, block_out);
892 
893 		clear_computation_completed(ctx->base);
894 
895 		memcpy(data_out + i, block_out, data_size - i);
896 
897 		ctx->load_len += (data_size - i) * INT8_BIT;
898 	}
899 
900 	res = save_context(ctx);
901 out:
902 	if (res)
903 		saes_end(ctx, res);
904 
905 	mutex_unlock(ctx->lock);
906 
907 	return res;
908 }
909 
910 /**
911  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
912  * @param ctx: SAES process context
913  * @param tag: pointer where to save the tag
914  * @param data_size: tag size
915  *
916  * @retval TEE_SUCCESS if OK.
917  */
918 TEE_Result stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag,
919 			    size_t tag_size)
920 {
921 	TEE_Result res = TEE_SUCCESS;
922 	uint32_t tag_u32[4] = { };
923 	uint32_t previous_phase = 0;
924 
925 	if (!ctx)
926 		return TEE_ERROR_BAD_PARAMETERS;
927 
928 	mutex_lock(ctx->lock);
929 
930 	previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >>
931 			  _SAES_CR_GCMPH_SHIFT;
932 
933 	switch (previous_phase) {
934 	case _SAES_CR_GCMPH_INIT:
935 		res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_FINAL);
936 		break;
937 	case _SAES_CR_GCMPH_HEADER:
938 		res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_FINAL);
939 		break;
940 	case _SAES_CR_GCMPH_PAYLOAD:
941 		if (can_suspend(ctx->cr))
942 			res = restore_context(ctx);
943 
944 		/* Move to final phase */
945 		io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
946 				SHIFT_U32(_SAES_CR_GCMPH_FINAL,
947 					  _SAES_CR_GCMPH_SHIFT));
948 		break;
949 	default:
950 		DMSG("out of order call");
951 		res = TEE_ERROR_BAD_STATE;
952 	}
953 	if (res)
954 		goto out;
955 
956 	if (IS_CHAINING_MODE(GCM, ctx->cr)) {
957 		/* SAES is configured to swap bytes as expected */
958 		io_write32(ctx->base + _SAES_DINR, 0);
959 		io_write32(ctx->base + _SAES_DINR, ctx->assoc_len);
960 		io_write32(ctx->base + _SAES_DINR, 0);
961 		io_write32(ctx->base + _SAES_DINR, ctx->load_len);
962 	}
963 
964 	res = wait_computation_completed(ctx->base);
965 	if (res)
966 		goto out;
967 
968 	read_aligned_block(ctx->base, tag_u32);
969 
970 	clear_computation_completed(ctx->base);
971 
972 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
973 
974 out:
975 	saes_end(ctx, res);
976 	mutex_unlock(ctx->lock);
977 
978 	return res;
979 }
980 
981 /**
982  * @brief Update (or start) an AES de/encrypt process (ECB, CBC or CTR).
983  * @param ctx: SAES process context
984  * @param last_block: true if last payload data block
985  * @param data_in: pointer to payload
986  * @param data_out: pointer where to save de/encrypted payload
987  * @param data_size: payload size
988  *
989  * @retval TEE_SUCCESS if OK.
990  */
991 TEE_Result stm32_saes_update(struct stm32_saes_context *ctx, bool last_block,
992 			     uint8_t *data_in, uint8_t *data_out,
993 			     size_t data_size)
994 {
995 	TEE_Result res = TEE_SUCCESS;
996 	unsigned int i = U(0);
997 
998 	if (!ctx)
999 		return TEE_ERROR_BAD_PARAMETERS;
1000 
1001 	mutex_lock(ctx->lock);
1002 
1003 	/*
1004 	 * CBC encryption requires the 2 last blocks to be aligned with AES
1005 	 * block size.
1006 	 */
1007 	if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) &&
1008 	    is_encrypt(ctx->cr) &&
1009 	    (ROUNDDOWN(data_size, AES_BLOCK_SIZE) != data_size)) {
1010 		if (data_size < AES_BLOCK_SIZE * 2) {
1011 			/*
1012 			 * If CBC, size of the last part should be at
1013 			 * least 2*AES_BLOCK_SIZE
1014 			 */
1015 			EMSG("Unexpected last block size");
1016 			res = TEE_ERROR_BAD_STATE;
1017 			goto out;
1018 		}
1019 		/*
1020 		 * Do not support padding if the total size is not aligned with
1021 		 * the size of a block.
1022 		 */
1023 		res = TEE_ERROR_NOT_IMPLEMENTED;
1024 		goto out;
1025 	}
1026 
1027 	/* Manage remaining CTR mask from previous update call */
1028 	if (IS_CHAINING_MODE(CTR, ctx->cr) && ctx->extra_size) {
1029 		unsigned int j = 0;
1030 		uint8_t *mask = (uint8_t *)ctx->extra;
1031 
1032 		for (i = 0, j = 0; j < ctx->extra_size && i < data_size;
1033 		     j++, i++)
1034 			data_out[i] = data_in[i] ^ mask[j];
1035 
1036 		if (j != ctx->extra_size) {
1037 			/*
1038 			 * We didn't consume all saved mask,
1039 			 * but no more data.
1040 			 */
1041 
1042 			/* We save remaining mask and its new size */
1043 			memmove(ctx->extra, ctx->extra + j,
1044 				ctx->extra_size - j);
1045 			ctx->extra_size -= j;
1046 
1047 			/*
1048 			 * We don't need to save HW context we didn't
1049 			 * modify HW state.
1050 			 */
1051 			res = TEE_SUCCESS;
1052 			goto out;
1053 		}
1054 		/* All extra mask consumed */
1055 		ctx->extra_size = 0;
1056 	}
1057 
1058 	res = restore_context(ctx);
1059 	if (res)
1060 		goto out;
1061 
1062 	while (data_size - i >= AES_BLOCK_SIZE) {
1063 		write_block(ctx->base, data_in + i);
1064 
1065 		res = wait_computation_completed(ctx->base);
1066 		if (res)
1067 			goto out;
1068 
1069 		read_block(ctx->base, data_out + i);
1070 
1071 		clear_computation_completed(ctx->base);
1072 
1073 		/* Process next block */
1074 		i += AES_BLOCK_SIZE;
1075 	}
1076 
1077 	/* Manage last block if not a block size multiple */
1078 	if (i < data_size) {
1079 		if (IS_CHAINING_MODE(CTR, ctx->cr)) {
1080 			/*
1081 			 * For CTR we save the generated mask to use it at next
1082 			 * update call.
1083 			 */
1084 			uint32_t block_in[AES_BLOCK_NB_U32] = { };
1085 			uint32_t block_out[AES_BLOCK_NB_U32] = { };
1086 
1087 			memcpy(block_in, data_in + i, data_size - i);
1088 
1089 			write_aligned_block(ctx->base, block_in);
1090 
1091 			res = wait_computation_completed(ctx->base);
1092 			if (res)
1093 				goto out;
1094 
1095 			read_aligned_block(ctx->base, block_out);
1096 
1097 			clear_computation_completed(ctx->base);
1098 
1099 			memcpy(data_out + i, block_out, data_size - i);
1100 
1101 			/* Save mask for possibly next call */
1102 			ctx->extra_size = AES_BLOCK_SIZE - (data_size - i);
1103 			memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1104 			       ctx->extra_size);
1105 		} else {
1106 			/* CBC and ECB can manage only multiple of block_size */
1107 			res = TEE_ERROR_BAD_PARAMETERS;
1108 			goto out;
1109 		}
1110 	}
1111 
1112 	if (!last_block)
1113 		res = save_context(ctx);
1114 
1115 out:
1116 	/* If last block or error, end of SAES process */
1117 	if (last_block || res)
1118 		saes_end(ctx, res);
1119 
1120 	mutex_unlock(ctx->lock);
1121 
1122 	return res;
1123 }
1124 
1125 static void xor_block(uint8_t *b1, uint8_t *b2, size_t size)
1126 {
1127 	size_t i = 0;
1128 
1129 	for (i = 0; i < size; i++)
1130 		b1[i] ^= b2[i];
1131 }
1132 
1133 static TEE_Result stm32_saes_cmac_prf_128(struct stm32_saes_context *ctx,
1134 					  enum stm32_saes_key_selection key_sel,
1135 					  const void *key, size_t key_size,
1136 					  uint8_t *data, size_t data_size,
1137 					  uint8_t *out)
1138 {
1139 	TEE_Result res = TEE_ERROR_GENERIC;
1140 	uint8_t block[AES_BLOCK_SIZE] = { };
1141 	uint8_t k1[AES_BLOCK_SIZE] = { };
1142 	uint8_t k2[AES_BLOCK_SIZE] = { };
1143 	uint8_t l[AES_BLOCK_SIZE] = { };
1144 	size_t processed = 0;
1145 	uint8_t bit = 0;
1146 	int i = 0;
1147 
1148 	if (!ctx)
1149 		return TEE_ERROR_BAD_PARAMETERS;
1150 
1151 	/* Get K1 and K2 */
1152 	res = stm32_saes_init(ctx, false, STM32_SAES_MODE_ECB, key_sel,
1153 			      key, key_size, NULL, 0);
1154 	if (res)
1155 		return res;
1156 
1157 	res = stm32_saes_update(ctx, true, l, l, sizeof(l));
1158 	if (res)
1159 		return res;
1160 
1161 	/* MSB(L) == 0 => K1 = L << 1 */
1162 	bit = 0;
1163 	for (i = sizeof(l) - 1; i >= 0; i--) {
1164 		k1[i] = (l[i] << 1) | bit;
1165 		bit = (l[i] & 0x80) >> 7;
1166 	}
1167 	/* MSB(L) == 1 => K1 = (L << 1) XOR const_Rb */
1168 	if ((l[0] & 0x80))
1169 		k1[sizeof(k1) - 1] = k1[sizeof(k1) - 1] ^ 0x87;
1170 
1171 	/* MSB(K1) == 0 => K2 = K1 << 1 */
1172 	bit = 0;
1173 	for (i = sizeof(k1) - 1; i >= 0; i--) {
1174 		k2[i] = (k1[i] << 1) | bit;
1175 		bit = (k1[i] & 0x80) >> 7;
1176 	}
1177 
1178 	/* MSB(K1) == 1 => K2 = (K1 << 1) XOR const_Rb */
1179 	if ((k1[0] & 0x80))
1180 		k2[sizeof(k2) - 1] = k2[sizeof(k2) - 1] ^ 0x87;
1181 
1182 	if (data_size > AES_BLOCK_SIZE) {
1183 		uint8_t *data_out = NULL;
1184 
1185 		/* All block but last in CBC mode */
1186 		res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC,
1187 				      key_sel, key, key_size, block,
1188 				      sizeof(block));
1189 		if (res)
1190 			return res;
1191 
1192 		processed = ROUNDDOWN(data_size - 1, AES_BLOCK_SIZE);
1193 		data_out = malloc(processed);
1194 		if (!data_out)
1195 			return TEE_ERROR_OUT_OF_MEMORY;
1196 
1197 		res = stm32_saes_update(ctx, true, data, data_out, processed);
1198 		if (!res) {
1199 			/* Copy last out block or keep block as { 0 } */
1200 			memcpy(block, data_out + processed - AES_BLOCK_SIZE,
1201 			       AES_BLOCK_SIZE);
1202 		}
1203 
1204 		free(data_out);
1205 
1206 		if (res)
1207 			return res;
1208 	}
1209 
1210 	/* Manage last block */
1211 	xor_block(block, data + processed, data_size - processed);
1212 	if (data_size - processed == AES_BLOCK_SIZE) {
1213 		xor_block(block, k1, AES_BLOCK_SIZE);
1214 	} else {
1215 		/* xor with padding = 0b100... */
1216 		block[data_size - processed] ^= 0x80;
1217 		xor_block(block, k2, AES_BLOCK_SIZE);
1218 	}
1219 
1220 	/*
1221 	 * AES last block.
1222 	 * We need to use same chaining mode to keep same key if DHUK is
1223 	 * selected so we reuse l as a zero initialized IV.
1224 	 */
1225 	memset(l, 0, sizeof(l));
1226 	res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC, key_sel, key,
1227 			      key_size, l, sizeof(l));
1228 	if (res)
1229 		return res;
1230 
1231 	return stm32_saes_update(ctx, true, block, out, AES_BLOCK_SIZE);
1232 }
1233 
1234 TEE_Result stm32_saes_kdf(struct stm32_saes_context *ctx,
1235 			  enum stm32_saes_key_selection key_sel,
1236 			  const void *key, size_t key_size,
1237 			  const void *input, size_t input_size,
1238 			  uint8_t *subkey, size_t subkey_size)
1239 
1240 {
1241 	TEE_Result res = TEE_SUCCESS;
1242 	uint32_t index = 0;
1243 	uint32_t index_be = 0;
1244 	uint8_t *data = NULL;
1245 	size_t data_index = 0;
1246 	size_t subkey_index = 0;
1247 	size_t data_size = input_size + sizeof(index_be);
1248 	uint8_t cmac[AES_BLOCK_SIZE] = { };
1249 
1250 	if (!ctx || !input || !input_size)
1251 		return TEE_ERROR_BAD_PARAMETERS;
1252 
1253 	/* For each K(i) we will add an index */
1254 	data = malloc(data_size);
1255 	if (!data)
1256 		return TEE_ERROR_OUT_OF_MEMORY;
1257 
1258 	data_index = 0;
1259 	index_be = TEE_U32_TO_BIG_ENDIAN(index);
1260 	memcpy(data + data_index, &index_be, sizeof(index_be));
1261 	data_index += sizeof(index_be);
1262 	memcpy(data + data_index, input, input_size);
1263 	data_index += input_size;
1264 
1265 	/* K(i) computation. */
1266 	index = 0;
1267 	while (subkey_index < subkey_size) {
1268 		index++;
1269 		index_be = TEE_U32_TO_BIG_ENDIAN(index);
1270 		memcpy(data, &index_be, sizeof(index_be));
1271 
1272 		res = stm32_saes_cmac_prf_128(ctx, key_sel, key, key_size,
1273 					      data, data_size, cmac);
1274 		if (res)
1275 			goto out;
1276 
1277 		memcpy(subkey + subkey_index, cmac,
1278 		       MIN(subkey_size - subkey_index, sizeof(cmac)));
1279 		subkey_index += sizeof(cmac);
1280 	}
1281 
1282 out:
1283 	free(data);
1284 	if (res)
1285 		memzero_explicit(subkey, subkey_size);
1286 
1287 	return res;
1288 }
1289 
1290 /* Implement hardware HUK derivation using SAES resources */
1291 TEE_Result huk_subkey_derive(enum huk_subkey_usage usage,
1292 			     const void *const_data, size_t const_data_len,
1293 			     uint8_t *subkey, size_t subkey_len)
1294 {
1295 	TEE_Result res = TEE_ERROR_GENERIC;
1296 	uint8_t *input = NULL;
1297 	size_t input_index = 0;
1298 	size_t subkey_bitlen = 0;
1299 	struct stm32_saes_context ctx = { };
1300 	uint8_t separator = 0;
1301 
1302 	/* Check if driver is probed */
1303 	if (!saes_pdata.base) {
1304 		return __huk_subkey_derive(usage, const_data, const_data_len,
1305 					   subkey, subkey_len);
1306 	}
1307 
1308 	input = malloc(const_data_len + sizeof(separator) + sizeof(usage) +
1309 		       sizeof(subkey_bitlen) + AES_BLOCK_SIZE);
1310 	if (!input)
1311 		return TEE_ERROR_OUT_OF_MEMORY;
1312 
1313 	input_index = 0;
1314 	if (const_data) {
1315 		memcpy(input + input_index, const_data, const_data_len);
1316 		input_index += const_data_len;
1317 
1318 		memcpy(input + input_index, &separator, sizeof(separator));
1319 		input_index += sizeof(separator);
1320 	}
1321 
1322 	memcpy(input + input_index, &usage, sizeof(usage));
1323 	input_index += sizeof(usage);
1324 
1325 	/*
1326 	 * We should add the subkey_len in bits at end of input.
1327 	 * And we choose to put in a MSB first uint32_t.
1328 	 */
1329 	subkey_bitlen = TEE_U32_TO_BIG_ENDIAN(subkey_len * INT8_BIT);
1330 	memcpy(input + input_index, &subkey_bitlen, sizeof(subkey_bitlen));
1331 	input_index += sizeof(subkey_bitlen);
1332 
1333 	/*
1334 	 * We get K(0) to avoid some key control attack
1335 	 * and store it at end of input.
1336 	 */
1337 	res = stm32_saes_cmac_prf_128(&ctx, STM32_SAES_KEY_DHU, NULL,
1338 				      AES_KEYSIZE_128,
1339 				      input, input_index,
1340 				      input + input_index);
1341 	if (res)
1342 		goto out;
1343 
1344 	/* We just added K(0) to input */
1345 	input_index += AES_BLOCK_SIZE;
1346 
1347 	res = stm32_saes_kdf(&ctx, STM32_SAES_KEY_DHU, NULL, AES_KEYSIZE_128,
1348 			     input, input_index, subkey, subkey_len);
1349 
1350 out:
1351 	free(input);
1352 	return res;
1353 }
1354 
1355 static TEE_Result stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata,
1356 				       const void *fdt, int node)
1357 {
1358 	struct dt_node_info dt_saes = { };
1359 	TEE_Result res = TEE_ERROR_GENERIC;
1360 
1361 	dt_saes.reg = fdt_reg_base_address(fdt, node);
1362 	dt_saes.reg_size = fdt_reg_size(fdt, node);
1363 
1364 	if (dt_saes.reg == DT_INFO_INVALID_REG ||
1365 	    dt_saes.reg_size == DT_INFO_INVALID_REG_SIZE)
1366 		return TEE_ERROR_BAD_PARAMETERS;
1367 
1368 	res = clk_dt_get_by_name(fdt, node, "bus", &pdata->clk);
1369 	if (res != TEE_SUCCESS)
1370 		return res;
1371 
1372 	res = clk_dt_get_by_name(fdt, node, "rng", &pdata->clk_rng);
1373 	if (res != TEE_SUCCESS)
1374 		return res;
1375 
1376 	res = rstctrl_dt_get_by_index(fdt, node, 0, &pdata->reset);
1377 	if (res != TEE_SUCCESS && res != TEE_ERROR_ITEM_NOT_FOUND)
1378 		return res;
1379 
1380 	pdata->base = (vaddr_t)phys_to_virt(dt_saes.reg, MEM_AREA_IO_SEC,
1381 					    dt_saes.reg_size);
1382 	if (!pdata->base)
1383 		panic();
1384 
1385 	return TEE_SUCCESS;
1386 }
1387 
1388 static void stm32_saes_reset(void)
1389 {
1390 	if (saes_pdata.reset) {
1391 		/* External reset of SAES */
1392 		if (rstctrl_assert_to(saes_pdata.reset, TIMEOUT_US_1MS))
1393 			panic();
1394 
1395 		udelay(SAES_RESET_DELAY);
1396 
1397 		if (rstctrl_deassert_to(saes_pdata.reset, TIMEOUT_US_1MS))
1398 			panic();
1399 	} else {
1400 		/* Internal reset of SAES */
1401 		io_setbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST);
1402 		udelay(SAES_RESET_DELAY);
1403 		io_clrbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST);
1404 	}
1405 }
1406 
1407 static TEE_Result stm32_saes_pm(enum pm_op op, uint32_t pm_hint,
1408 				const struct pm_callback_handle *hdl __unused)
1409 {
1410 	switch (op) {
1411 	case PM_OP_SUSPEND:
1412 		clk_disable(saes_pdata.clk);
1413 		clk_disable(saes_pdata.clk_rng);
1414 		return TEE_SUCCESS;
1415 
1416 	case PM_OP_RESUME:
1417 		if (clk_enable(saes_pdata.clk) ||
1418 		    clk_enable(saes_pdata.clk_rng))
1419 			panic();
1420 
1421 		if (PM_HINT_IS_STATE(pm_hint, CONTEXT))
1422 			stm32_saes_reset();
1423 
1424 		return TEE_SUCCESS;
1425 	default:
1426 		break;
1427 	}
1428 
1429 	return TEE_ERROR_NOT_IMPLEMENTED;
1430 }
1431 
1432 static TEE_Result stm32_saes_probe(const void *fdt, int node,
1433 				   const void *compat_data __unused)
1434 {
1435 	TEE_Result res = TEE_SUCCESS;
1436 
1437 	assert(!saes_pdata.base);
1438 
1439 	res = stm32_saes_parse_fdt(&saes_pdata, fdt, node);
1440 	if (res)
1441 		return res;
1442 
1443 	if (clk_enable(saes_pdata.clk) || clk_enable(saes_pdata.clk_rng))
1444 		panic();
1445 
1446 	stm32_saes_reset();
1447 
1448 	if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1449 		res = stm32_register_cipher(SAES_IP);
1450 		if (res) {
1451 			EMSG("Failed to register to cipher: %#"PRIx32, res);
1452 			panic();
1453 		}
1454 	}
1455 
1456 	register_pm_core_service_cb(stm32_saes_pm, NULL, "stm32-saes");
1457 
1458 	return TEE_SUCCESS;
1459 }
1460 
1461 static const struct dt_device_match saes_match_table[] = {
1462 	{ .compatible = "st,stm32mp13-saes" },
1463 	{ }
1464 };
1465 
1466 DEFINE_DT_DRIVER(stm32_saes_dt_driver) = {
1467 	.name = "stm32-saes",
1468 	.match_table = saes_match_table,
1469 	.probe = stm32_saes_probe,
1470 };
1471