xref: /optee_os/core/drivers/crypto/stm32/stm32_saes.c (revision 4320f5cf30c52f7b8f0379aa8ae4a1d39bf7e901)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2021-2023, STMicroelectronics - All Rights Reserved
4  */
5 #include <assert.h>
6 #include <config.h>
7 #include <drivers/clk.h>
8 #include <drivers/clk_dt.h>
9 #include <drivers/rstctrl.h>
10 #include <io.h>
11 #include <kernel/boot.h>
12 #include <kernel/delay.h>
13 #include <kernel/dt.h>
14 #include <kernel/mutex.h>
15 #include <libfdt.h>
16 #include <mm/core_memprot.h>
17 #include <stdint.h>
18 #include <stm32_util.h>
19 #include <utee_defines.h>
20 #include <util.h>
21 
22 #include "common.h"
23 #include "stm32_saes.h"
24 
25 /* SAES control register */
26 #define _SAES_CR			U(0x0)
27 /* SAES status register */
28 #define _SAES_SR			U(0x04)
29 /* SAES data input register */
30 #define _SAES_DINR			U(0x08)
31 /* SAES data output register */
32 #define _SAES_DOUTR			U(0x0c)
33 /* SAES key registers [0-3] */
34 #define _SAES_KEYR0			U(0x10)
35 #define _SAES_KEYR1			U(0x14)
36 #define _SAES_KEYR2			U(0x18)
37 #define _SAES_KEYR3			U(0x1c)
38 /* SAES initialization vector registers [0-3] */
39 #define _SAES_IVR0			U(0x20)
40 #define _SAES_IVR1			U(0x24)
41 #define _SAES_IVR2			U(0x28)
42 #define _SAES_IVR3			U(0x2c)
43 /* SAES key registers [4-7] */
44 #define _SAES_KEYR4			U(0x30)
45 #define _SAES_KEYR5			U(0x34)
46 #define _SAES_KEYR6			U(0x38)
47 #define _SAES_KEYR7			U(0x3c)
48 /* SAES suspend registers [0-7] */
49 #define _SAES_SUSPR0			U(0x40)
50 #define _SAES_SUSPR1			U(0x44)
51 #define _SAES_SUSPR2			U(0x48)
52 #define _SAES_SUSPR3			U(0x4c)
53 #define _SAES_SUSPR4			U(0x50)
54 #define _SAES_SUSPR5			U(0x54)
55 #define _SAES_SUSPR6			U(0x58)
56 #define _SAES_SUSPR7			U(0x5c)
57 /* SAES Interrupt Enable Register */
58 #define _SAES_IER			U(0x300)
59 /* SAES Interrupt Status Register */
60 #define _SAES_ISR			U(0x304)
61 /* SAES Interrupt Clear Register */
62 #define _SAES_ICR			U(0x308)
63 
64 /* SAES control register fields */
65 #define _SAES_CR_RESET_VALUE		U(0x0)
66 #define _SAES_CR_IPRST			BIT(31)
67 #define _SAES_CR_KEYSEL_MASK		GENMASK_32(30, 28)
68 #define _SAES_CR_KEYSEL_SHIFT		U(28)
69 #define _SAES_CR_KEYSEL_SOFT		U(0x0)
70 #define _SAES_CR_KEYSEL_DHUK		U(0x1)
71 #define _SAES_CR_KEYSEL_BHK		U(0x2)
72 #define _SAES_CR_KEYSEL_BHU_XOR_BH_K	U(0x4)
73 #define _SAES_CR_KEYSEL_TEST		U(0x7)
74 #define _SAES_CR_KSHAREID_MASK		GENMASK_32(27, 26)
75 #define _SAES_CR_KSHAREID_SHIFT		U(26)
76 #define _SAES_CR_KSHAREID_CRYP		U(0x0)
77 #define _SAES_CR_KEYMOD_MASK		GENMASK_32(25, 24)
78 #define _SAES_CR_KEYMOD_SHIFT		U(24)
79 #define _SAES_CR_KEYMOD_NORMAL		U(0x0)
80 #define _SAES_CR_KEYMOD_WRAPPED		U(0x1)
81 #define _SAES_CR_KEYMOD_SHARED		U(0x2)
82 #define _SAES_CR_NPBLB_MASK		GENMASK_32(23, 20)
83 #define _SAES_CR_NPBLB_SHIFT		U(20)
84 #define _SAES_CR_KEYPROT		BIT(19)
85 #define _SAES_CR_KEYSIZE		BIT(18)
86 #define _SAES_CR_GCMPH_MASK		GENMASK_32(14, 13)
87 #define _SAES_CR_GCMPH_SHIFT		U(13)
88 #define _SAES_CR_GCMPH_INIT		U(0)
89 #define _SAES_CR_GCMPH_HEADER		U(1)
90 #define _SAES_CR_GCMPH_PAYLOAD		U(2)
91 #define _SAES_CR_GCMPH_FINAL		U(3)
92 #define _SAES_CR_DMAOUTEN		BIT(12)
93 #define _SAES_CR_DMAINEN		BIT(11)
94 #define _SAES_CR_CHMOD_MASK		(BIT(16) | GENMASK_32(6, 5))
95 #define _SAES_CR_CHMOD_SHIFT		U(5)
96 #define _SAES_CR_CHMOD_ECB		U(0x0)
97 #define _SAES_CR_CHMOD_CBC		U(0x1)
98 #define _SAES_CR_CHMOD_CTR		U(0x2)
99 #define _SAES_CR_CHMOD_GCM		U(0x3)
100 #define _SAES_CR_CHMOD_GMAC		U(0x3)
101 #define _SAES_CR_CHMOD_CCM		U(0x800)
102 #define _SAES_CR_MODE_MASK		GENMASK_32(4, 3)
103 #define _SAES_CR_MODE_SHIFT		U(3)
104 #define _SAES_CR_MODE_ENC		U(0)
105 #define _SAES_CR_MODE_KEYPREP		U(1)
106 #define _SAES_CR_MODE_DEC		U(2)
107 #define _SAES_CR_DATATYPE_MASK		GENMASK_32(2, 1)
108 #define _SAES_CR_DATATYPE_SHIFT		U(1)
109 #define _SAES_CR_DATATYPE_NONE		U(0)
110 #define _SAES_CR_DATATYPE_HALF_WORD	U(1)
111 #define _SAES_CR_DATATYPE_BYTE		U(2)
112 #define _SAES_CR_DATATYPE_BIT		U(3)
113 #define _SAES_CR_EN			BIT(0)
114 
115 /* SAES status register fields */
116 #define _SAES_SR_KEYVALID		BIT(7)
117 #define _SAES_SR_BUSY			BIT(3)
118 #define _SAES_SR_WRERR			BIT(2)
119 #define _SAES_SR_RDERR			BIT(1)
120 #define _SAES_SR_CCF			BIT(0)
121 
122 /* SAES interrupt registers fields */
123 #define _SAES_I_RNG_ERR			BIT(3)
124 #define _SAES_I_KEY_ERR			BIT(2)
125 #define _SAES_I_RW_ERR			BIT(1)
126 #define _SAES_I_CC			BIT(0)
127 
128 #define SAES_TIMEOUT_US			U(100000)
129 #define TIMEOUT_US_1MS			U(1000)
130 #define SAES_RESET_DELAY		U(2)
131 
132 #define IS_CHAINING_MODE(mode, cr) \
133 	(((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mode << \
134 					  _SAES_CR_CHMOD_SHIFT))
135 
136 #define SET_CHAINING_MODE(mode, cr) \
137 	set_field_u32(cr, _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mode)
138 
139 static struct mutex saes_lock = MUTEX_INITIALIZER;
140 static struct stm32_saes_platdata {
141 	vaddr_t base;
142 	struct clk *clk;
143 	struct rstctrl *reset;
144 } saes_pdata;
145 
146 static bool does_chaining_mode_need_iv(uint32_t cr)
147 {
148 	return !IS_CHAINING_MODE(ECB, cr);
149 }
150 
151 static bool is_encrypt(uint32_t cr)
152 {
153 	return (cr & _SAES_CR_MODE_MASK) ==
154 	       SHIFT_U32(_SAES_CR_MODE_ENC, _SAES_CR_MODE_SHIFT);
155 }
156 
157 static bool is_decrypt(uint32_t cr)
158 {
159 	return (cr & _SAES_CR_MODE_MASK) ==
160 	       SHIFT_U32(_SAES_CR_MODE_DEC, _SAES_CR_MODE_SHIFT);
161 }
162 
163 static bool does_need_npblb(uint32_t cr)
164 {
165 	return (IS_CHAINING_MODE(GCM, cr) && is_encrypt(cr)) ||
166 	       (IS_CHAINING_MODE(CCM, cr) && is_decrypt(cr));
167 }
168 
169 static bool can_suspend(uint32_t cr)
170 {
171 	return !IS_CHAINING_MODE(GCM, cr);
172 }
173 
174 static void write_aligned_block(vaddr_t base, uint32_t *data)
175 {
176 	unsigned int i = 0;
177 
178 	/* SAES is configured to swap bytes as expected */
179 	for (i = 0; i < AES_BLOCK_NB_U32; i++)
180 		io_write32(base + _SAES_DINR, data[i]);
181 }
182 
183 static void write_block(vaddr_t base, uint8_t *data)
184 {
185 	if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
186 		write_aligned_block(base, (void *)data);
187 	} else {
188 		uint32_t data_u32[AES_BLOCK_NB_U32] = { };
189 
190 		memcpy(data_u32, data, sizeof(data_u32));
191 		write_aligned_block(base, data_u32);
192 	}
193 }
194 
195 static void read_aligned_block(vaddr_t base, uint32_t *data)
196 {
197 	unsigned int i = 0;
198 
199 	/* SAES is configured to swap bytes as expected */
200 	for (i = 0; i < AES_BLOCK_NB_U32; i++)
201 		data[i] = io_read32(base + _SAES_DOUTR);
202 }
203 
204 static void read_block(vaddr_t base, uint8_t *data)
205 {
206 	if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
207 		read_aligned_block(base, (void *)data);
208 	} else {
209 		uint32_t data_u32[AES_BLOCK_NB_U32] = { };
210 
211 		read_aligned_block(base, data_u32);
212 
213 		memcpy(data, data_u32, sizeof(data_u32));
214 	}
215 }
216 
217 static TEE_Result wait_computation_completed(vaddr_t base)
218 {
219 	uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
220 
221 	while ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF)
222 		if (timeout_elapsed(timeout_ref))
223 			break;
224 
225 	if ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) {
226 		DMSG("CCF timeout");
227 		return TEE_ERROR_GENERIC;
228 	}
229 
230 	return TEE_SUCCESS;
231 }
232 
233 static void clear_computation_completed(uintptr_t base)
234 {
235 	io_setbits32(base + _SAES_ICR, _SAES_I_CC);
236 }
237 
238 static TEE_Result saes_start(struct stm32_saes_context *ctx)
239 {
240 	uint64_t timeout_ref = 0;
241 
242 	/* Reset SAES */
243 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
244 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
245 
246 	timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
247 	while (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY)
248 		if (timeout_elapsed(timeout_ref))
249 			break;
250 
251 	if (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) {
252 		DMSG("busy timeout");
253 		return TEE_ERROR_GENERIC;
254 	}
255 
256 	return TEE_SUCCESS;
257 }
258 
259 static void saes_end(struct stm32_saes_context *ctx, int prev_error)
260 {
261 	if (prev_error) {
262 		/* Reset SAES */
263 		io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
264 		io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
265 	}
266 
267 	/* Disable the SAES peripheral */
268 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
269 }
270 
271 static void saes_write_iv(struct stm32_saes_context *ctx)
272 {
273 	/* If chaining mode need to restore IV */
274 	if (does_chaining_mode_need_iv(ctx->cr)) {
275 		unsigned int i = 0;
276 
277 		for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) {
278 			io_write32(ctx->base + _SAES_IVR0 + i *
279 				   sizeof(uint32_t), ctx->iv[i]);
280 		}
281 	}
282 }
283 
284 static void saes_save_suspend(struct stm32_saes_context *ctx)
285 {
286 	size_t i = 0;
287 
288 	for (i = 0; i < 8; i++)
289 		ctx->susp[i] = io_read32(ctx->base + _SAES_SUSPR0 +
290 					 i * sizeof(uint32_t));
291 }
292 
293 static void saes_restore_suspend(struct stm32_saes_context *ctx)
294 {
295 	size_t i = 0;
296 
297 	for (i = 0; i < 8; i++)
298 		io_write32(ctx->base + _SAES_SUSPR0 + i * sizeof(uint32_t),
299 			   ctx->susp[i]);
300 }
301 
302 static void saes_write_key(struct stm32_saes_context *ctx)
303 {
304 	/* Restore the _SAES_KEYRx if SOFTWARE key */
305 	if ((ctx->cr & _SAES_CR_KEYSEL_MASK) ==
306 	    SHIFT_U32(_SAES_CR_KEYSEL_SOFT, _SAES_CR_KEYSEL_SHIFT)) {
307 		size_t i = 0;
308 
309 		for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++)
310 			io_write32(ctx->base + _SAES_KEYR0 + i *
311 				   sizeof(uint32_t),
312 				   ctx->key[i]);
313 
314 		if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) {
315 			for (i = 0;
316 			     i < (AES_KEYSIZE_256 / 2) / sizeof(uint32_t);
317 			     i++) {
318 				io_write32(ctx->base + _SAES_KEYR4 + i *
319 					   sizeof(uint32_t),
320 					   ctx->key[i + 4]);
321 			}
322 		}
323 	}
324 }
325 
326 static TEE_Result saes_prepare_key(struct stm32_saes_context *ctx)
327 {
328 	/* Disable the SAES peripheral */
329 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
330 
331 	/* Set key size */
332 	if ((ctx->cr & _SAES_CR_KEYSIZE))
333 		io_setbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
334 	else
335 		io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
336 
337 	saes_write_key(ctx);
338 
339 	/*
340 	 * For ECB/CBC decryption, key preparation mode must be selected
341 	 * to populate the key.
342 	 */
343 	if ((IS_CHAINING_MODE(ECB, ctx->cr) ||
344 	     IS_CHAINING_MODE(CBC, ctx->cr)) && is_decrypt(ctx->cr)) {
345 		TEE_Result res = TEE_SUCCESS;
346 
347 		/* Select Mode 2 */
348 		io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
349 				SHIFT_U32(_SAES_CR_MODE_KEYPREP,
350 					  _SAES_CR_MODE_SHIFT));
351 
352 		/* Enable SAES */
353 		io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
354 
355 		res = wait_computation_completed(ctx->base);
356 		if (res)
357 			return res;
358 
359 		clear_computation_completed(ctx->base);
360 
361 		/* Set Mode 3 */
362 		io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
363 				SHIFT_U32(_SAES_CR_MODE_DEC,
364 					  _SAES_CR_MODE_SHIFT));
365 	}
366 
367 	return TEE_SUCCESS;
368 }
369 
370 static TEE_Result save_context(struct stm32_saes_context *ctx)
371 {
372 	if ((io_read32(ctx->base + _SAES_SR) & _SAES_SR_CCF)) {
373 		/* Device should not be in a processing phase */
374 		return TEE_ERROR_BAD_STATE;
375 	}
376 
377 	/* Save CR */
378 	ctx->cr = io_read32(ctx->base + _SAES_CR);
379 
380 	if (!can_suspend(ctx->cr))
381 		return TEE_SUCCESS;
382 
383 	saes_save_suspend(ctx);
384 
385 	/* If chaining mode need to save current IV */
386 	if (does_chaining_mode_need_iv(ctx->cr)) {
387 		uint8_t i = 0;
388 
389 		/* Save IV */
390 		for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) {
391 			ctx->iv[i] = io_read32(ctx->base + _SAES_IVR0 + i *
392 					       sizeof(uint32_t));
393 		}
394 	}
395 
396 	/* Disable the SAES peripheral */
397 	io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
398 
399 	return TEE_SUCCESS;
400 }
401 
402 /* To resume the processing of a message */
403 static TEE_Result restore_context(struct stm32_saes_context *ctx)
404 {
405 	TEE_Result res = TEE_SUCCESS;
406 
407 	/* SAES shall be disabled */
408 	if ((io_read32(ctx->base + _SAES_CR) & _SAES_CR_EN)) {
409 		DMSG("Device is still enabled");
410 		return TEE_ERROR_BAD_STATE;
411 	}
412 
413 	/* Reset internal state */
414 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
415 
416 	/* Restore configuration register */
417 	io_write32(ctx->base + _SAES_CR, ctx->cr);
418 
419 	/* Write key and, in case of CBC or ECB decrypt, prepare it */
420 	res = saes_prepare_key(ctx);
421 	if (res)
422 		return res;
423 
424 	saes_restore_suspend(ctx);
425 
426 	saes_write_iv(ctx);
427 
428 	/* Enable the SAES peripheral */
429 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
430 
431 	return TEE_SUCCESS;
432 }
433 
434 static TEE_Result do_from_init_to_phase(struct stm32_saes_context *ctx,
435 					uint32_t new_phase)
436 {
437 	TEE_Result res = TEE_SUCCESS;
438 
439 	/* We didn't run the init phase yet */
440 	res = restore_context(ctx);
441 	if (res)
442 		return res;
443 
444 	res = wait_computation_completed(ctx->base);
445 	if (res)
446 		return res;
447 
448 	clear_computation_completed(ctx->base);
449 
450 	/* Move to 'new_phase' */
451 	io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
452 			SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT));
453 
454 	/* Enable the SAES peripheral (init disabled it) */
455 	io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
456 
457 	return TEE_SUCCESS;
458 }
459 
460 static TEE_Result do_from_header_to_phase(struct stm32_saes_context *ctx,
461 					  uint32_t new_phase)
462 {
463 	TEE_Result res = TEE_SUCCESS;
464 
465 	if (can_suspend(ctx->cr)) {
466 		res = restore_context(ctx);
467 		if (res)
468 			return res;
469 	}
470 
471 	if (ctx->extra_size) {
472 		/* Manage unaligned header data before moving to next phase */
473 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
474 		       AES_BLOCK_SIZE - ctx->extra_size);
475 
476 		write_aligned_block(ctx->base, ctx->extra);
477 
478 		res = wait_computation_completed(ctx->base);
479 		if (res)
480 			return res;
481 
482 		clear_computation_completed(ctx->base);
483 
484 		ctx->assoc_len += ctx->extra_size * INT8_BIT;
485 		ctx->extra_size = U(0);
486 	}
487 
488 	/* Move to 'new_phase' */
489 	io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
490 			SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT));
491 
492 	return TEE_SUCCESS;
493 }
494 
495 /**
496  * @brief Start an AES computation.
497  * @param ctx: SAES process context
498  * @param is_dec: true if decryption, false if encryption
499  * @param ch_mode: define the chaining mode
500  * @param key_select: define where the key comes from
501  * @param key: pointer to key (if key_select is KEY_SOFT, else unused)
502  * @param key_size: key size
503  * @param iv: pointer to initialization vector (unused if ch_mode is ECB)
504  * @param iv_size: iv size
505  * @note this function doesn't access to hardware but stores in ctx the values
506  *
507  * @retval TEE_SUCCESS if OK or a TEE_Result compliant code.
508  */
509 TEE_Result stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec,
510 			   enum stm32_saes_chaining_mode ch_mode,
511 			   enum stm32_saes_key_selection key_select,
512 			   const void *key, size_t key_size, const void *iv,
513 			   size_t iv_size)
514 {
515 	const uint32_t *key_u32 = NULL;
516 	const uint32_t *iv_u32 = NULL;
517 	uint32_t local_key[8] = { };
518 	uint32_t local_iv[4] = { };
519 	unsigned int i = 0;
520 
521 	if (!ctx)
522 		return TEE_ERROR_BAD_PARAMETERS;
523 
524 	*ctx = (struct stm32_saes_context){
525 		.lock = &saes_lock,
526 		.base = saes_pdata.base,
527 		.cr = _SAES_CR_RESET_VALUE
528 	};
529 
530 	/* We want buffer to be u32 aligned */
531 	if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
532 		key_u32 = key;
533 	} else {
534 		memcpy(local_key, key, key_size);
535 		key_u32 = local_key;
536 	}
537 
538 	if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
539 		iv_u32 = iv;
540 	} else {
541 		memcpy(local_iv, iv, iv_size);
542 		iv_u32 = local_iv;
543 	}
544 
545 	if (is_dec)
546 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK,
547 					 _SAES_CR_MODE_DEC);
548 	else
549 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK,
550 					 _SAES_CR_MODE_ENC);
551 
552 	/* Save chaining mode */
553 	switch (ch_mode) {
554 	case STM32_SAES_MODE_ECB:
555 		ctx->cr |= SET_CHAINING_MODE(ECB, ctx->cr);
556 		break;
557 	case STM32_SAES_MODE_CBC:
558 		ctx->cr |= SET_CHAINING_MODE(CBC, ctx->cr);
559 		break;
560 	case STM32_SAES_MODE_CTR:
561 		ctx->cr |= SET_CHAINING_MODE(CTR, ctx->cr);
562 		break;
563 	case STM32_SAES_MODE_GCM:
564 		ctx->cr |= SET_CHAINING_MODE(GCM, ctx->cr);
565 		break;
566 	case STM32_SAES_MODE_CCM:
567 		ctx->cr |= SET_CHAINING_MODE(CCM, ctx->cr);
568 		break;
569 	default:
570 		return TEE_ERROR_BAD_PARAMETERS;
571 	}
572 
573 	/*
574 	 * We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data.
575 	 * So we won't need to
576 	 * TEE_U32_TO_BIG_ENDIAN(data) before write to DINR
577 	 * nor
578 	 * TEE_U32_FROM_BIG_ENDIAN after reading from DOUTR.
579 	 *
580 	 * But note that wrap key only accept _SAES_CR_DATATYPE_NONE.
581 	 */
582 	ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_DATATYPE_MASK,
583 				 _SAES_CR_DATATYPE_BYTE);
584 
585 	/* Configure keysize */
586 	switch (key_size) {
587 	case AES_KEYSIZE_128:
588 		ctx->cr &=  ~_SAES_CR_KEYSIZE;
589 		break;
590 	case AES_KEYSIZE_256:
591 		ctx->cr |= _SAES_CR_KEYSIZE;
592 		break;
593 	default:
594 		return TEE_ERROR_BAD_PARAMETERS;
595 	}
596 
597 	/* Configure key */
598 	switch (key_select) {
599 	case STM32_SAES_KEY_SOFT:
600 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
601 					 _SAES_CR_KEYSEL_SOFT);
602 		/* Save key */
603 		switch (key_size) {
604 		case AES_KEYSIZE_128:
605 			/* First 16 bytes == 4 u32 */
606 			for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t);
607 			     i++) {
608 				ctx->key[i] =
609 					TEE_U32_TO_BIG_ENDIAN(key_u32[3 - i]);
610 				/*
611 				 * /!\ we save the key in HW byte order
612 				 * and word order: key[i] is for _SAES_KEYRi.
613 				 */
614 			}
615 			break;
616 		case AES_KEYSIZE_256:
617 			for (i = 0; i < AES_KEYSIZE_256 / sizeof(uint32_t);
618 			     i++) {
619 				ctx->key[i] =
620 					TEE_U32_TO_BIG_ENDIAN(key_u32[7 - i]);
621 				/*
622 				 * /!\ we save the key in HW byte order
623 				 * and word order: key[i] is for _SAES_KEYRi.
624 				 */
625 			}
626 			break;
627 		default:
628 			return TEE_ERROR_BAD_PARAMETERS;
629 		}
630 		break;
631 	case STM32_SAES_KEY_DHU:
632 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
633 					 _SAES_CR_KEYSEL_DHUK);
634 		break;
635 	case STM32_SAES_KEY_BH:
636 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
637 					 _SAES_CR_KEYSEL_BHK);
638 		break;
639 	case STM32_SAES_KEY_BHU_XOR_BH:
640 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
641 					 _SAES_CR_KEYSEL_BHU_XOR_BH_K);
642 		break;
643 	case STM32_SAES_KEY_WRAPPED:
644 		ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
645 					 _SAES_CR_KEYSEL_SOFT);
646 		break;
647 
648 	default:
649 		return TEE_ERROR_BAD_PARAMETERS;
650 	}
651 
652 	/* Save IV */
653 	if (ch_mode != STM32_SAES_MODE_ECB) {
654 		if (!iv || iv_size != AES_IVSIZE)
655 			return TEE_ERROR_BAD_PARAMETERS;
656 
657 		for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++)
658 			ctx->iv[i] = TEE_U32_TO_BIG_ENDIAN(iv_u32[3 - i]);
659 	}
660 
661 	/* Reset suspend registers */
662 	memset(ctx->susp, 0, sizeof(ctx->susp));
663 
664 	return saes_start(ctx);
665 }
666 
667 /**
668  * @brief Update (or start) an AES authentificate process of
669  *        associated data (CCM or GCM).
670  * @param ctx: SAES process context
671  * @param data: pointer to associated data
672  * @param data_size: data size
673  *
674  * @retval 0 if OK.
675  */
676 TEE_Result stm32_saes_update_assodata(struct stm32_saes_context *ctx,
677 				      uint8_t *data, size_t data_size)
678 {
679 	TEE_Result res = TEE_SUCCESS;
680 	unsigned int i = 0;
681 	uint32_t previous_phase = 0;
682 
683 	if (!ctx)
684 		return TEE_ERROR_BAD_PARAMETERS;
685 
686 	/* If no associated data, nothing to do */
687 	if (!data || !data_size)
688 		return TEE_SUCCESS;
689 
690 	mutex_lock(ctx->lock);
691 
692 	previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >>
693 			 _SAES_CR_GCMPH_SHIFT;
694 
695 	switch (previous_phase) {
696 	case _SAES_CR_GCMPH_INIT:
697 		res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_HEADER);
698 		break;
699 	case _SAES_CR_GCMPH_HEADER:
700 		/*
701 		 * Function update_assodata() was already called.
702 		 * We only need to restore the context.
703 		 */
704 		if (can_suspend(ctx->cr))
705 			res = restore_context(ctx);
706 
707 		break;
708 	default:
709 		DMSG("out of order call");
710 		res = TEE_ERROR_BAD_STATE;
711 	}
712 
713 	if (res)
714 		goto out;
715 
716 	/* Manage if remaining data from a previous update_assodata() call */
717 	if (ctx->extra_size &&
718 	    ((ctx->extra_size + data_size) >= AES_BLOCK_SIZE)) {
719 		uint32_t block[AES_BLOCK_NB_U32] = { };
720 
721 		memcpy(block, ctx->extra, ctx->extra_size);
722 		memcpy((uint8_t *)block + ctx->extra_size, data,
723 		       AES_BLOCK_SIZE - ctx->extra_size);
724 
725 		write_aligned_block(ctx->base, block);
726 
727 		res = wait_computation_completed(ctx->base);
728 		if (res)
729 			goto out;
730 
731 		clear_computation_completed(ctx->base);
732 
733 		i += AES_BLOCK_SIZE - ctx->extra_size;
734 		ctx->extra_size = 0;
735 		ctx->assoc_len += AES_BLOCK_SIZE_BIT;
736 	}
737 
738 	while (data_size - i >= AES_BLOCK_SIZE) {
739 		write_block(ctx->base, data + i);
740 
741 		res = wait_computation_completed(ctx->base);
742 		if (res)
743 			goto out;
744 
745 		clear_computation_completed(ctx->base);
746 
747 		/* Process next block */
748 		i += AES_BLOCK_SIZE;
749 		ctx->assoc_len += AES_BLOCK_SIZE_BIT;
750 	}
751 
752 	/*
753 	 * Manage last block if not a block size multiple:
754 	 * Save remaining data to manage them later (potentially with new
755 	 * associated data).
756 	 */
757 	if (i < data_size) {
758 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
759 		       data_size - i);
760 		ctx->extra_size += data_size - i;
761 	}
762 
763 	res = save_context(ctx);
764 out:
765 	if (res)
766 		saes_end(ctx, res);
767 
768 	mutex_unlock(ctx->lock);
769 
770 	return res;
771 }
772 
773 /**
774  * @brief Update (or start) an AES authenticate and de/encrypt with
775  *        payload data (CCM or GCM).
776  * @param ctx: SAES process context
777  * @param last_block: true if last payload data block
778  * @param data_in: pointer to payload
779  * @param data_out: pointer where to save de/encrypted payload
780  * @param data_size: payload size
781  *
782  * @retval TEE_SUCCESS if OK.
783  */
784 TEE_Result stm32_saes_update_load(struct stm32_saes_context *ctx,
785 				  bool last_block, uint8_t *data_in,
786 				  uint8_t *data_out, size_t data_size)
787 {
788 	TEE_Result res = TEE_SUCCESS;
789 	unsigned int i = 0;
790 	uint32_t previous_phase = 0;
791 
792 	if (!ctx)
793 		return TEE_ERROR_BAD_PARAMETERS;
794 
795 	/* If there is no data, nothing to do */
796 	if (!data_in || !data_size)
797 		return TEE_SUCCESS;
798 
799 	mutex_lock(ctx->lock);
800 
801 	previous_phase = ((ctx->cr & _SAES_CR_GCMPH_MASK) >>
802 			  _SAES_CR_GCMPH_SHIFT);
803 
804 	switch (previous_phase) {
805 	case _SAES_CR_GCMPH_INIT:
806 		res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD);
807 		break;
808 	case _SAES_CR_GCMPH_HEADER:
809 		res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD);
810 		break;
811 	case _SAES_CR_GCMPH_PAYLOAD:
812 		/* new update_load call, we only need to restore context */
813 		if (can_suspend(ctx->cr))
814 			res = restore_context(ctx);
815 
816 		break;
817 	default:
818 		DMSG("out of order call");
819 		res = TEE_ERROR_BAD_STATE;
820 	}
821 
822 	if (res)
823 		goto out;
824 
825 	while (i < ROUNDDOWN(data_size, AES_BLOCK_SIZE)) {
826 		write_block(ctx->base, data_in + i);
827 
828 		res = wait_computation_completed(ctx->base);
829 		if (res)
830 			goto out;
831 
832 		read_block(ctx->base, data_out + i);
833 
834 		clear_computation_completed(ctx->base);
835 
836 		/* Process next block */
837 		i += AES_BLOCK_SIZE;
838 		ctx->load_len += AES_BLOCK_SIZE_BIT;
839 	}
840 
841 	/* Manage last block if not a block size multiple */
842 	if (last_block && i < data_size) {
843 		uint32_t block_in[AES_BLOCK_NB_U32] = { };
844 		uint32_t block_out[AES_BLOCK_NB_U32] = { };
845 
846 		memcpy(block_in, data_in + i, data_size - i);
847 
848 		if (does_need_npblb(ctx->cr)) {
849 			uint32_t npblb = AES_BLOCK_SIZE - (data_size - i);
850 
851 			io_clrsetbits32(ctx->base + _SAES_CR,
852 					_SAES_CR_NPBLB_MASK,
853 					SHIFT_U32(npblb, _SAES_CR_NPBLB_SHIFT));
854 		}
855 
856 		write_aligned_block(ctx->base, block_in);
857 
858 		res = wait_computation_completed(ctx->base);
859 		if (res)
860 			goto out;
861 
862 		read_aligned_block(ctx->base, block_out);
863 
864 		clear_computation_completed(ctx->base);
865 
866 		memcpy(data_out + i, block_out, data_size - i);
867 
868 		ctx->load_len += (data_size - i) * INT8_BIT;
869 	}
870 
871 	res = save_context(ctx);
872 out:
873 	if (res)
874 		saes_end(ctx, res);
875 
876 	mutex_unlock(ctx->lock);
877 
878 	return res;
879 }
880 
881 /**
882  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
883  * @param ctx: SAES process context
884  * @param tag: pointer where to save the tag
885  * @param data_size: tag size
886  *
887  * @retval TEE_SUCCESS if OK.
888  */
889 TEE_Result stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag,
890 			    size_t tag_size)
891 {
892 	TEE_Result res = TEE_SUCCESS;
893 	uint32_t tag_u32[4] = { };
894 	uint32_t previous_phase = 0;
895 
896 	if (!ctx)
897 		return TEE_ERROR_BAD_PARAMETERS;
898 
899 	mutex_lock(ctx->lock);
900 
901 	previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >>
902 			  _SAES_CR_GCMPH_SHIFT;
903 
904 	switch (previous_phase) {
905 	case _SAES_CR_GCMPH_INIT:
906 		res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_FINAL);
907 		break;
908 	case _SAES_CR_GCMPH_HEADER:
909 		res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_FINAL);
910 		break;
911 	case _SAES_CR_GCMPH_PAYLOAD:
912 		if (can_suspend(ctx->cr))
913 			res = restore_context(ctx);
914 
915 		/* Move to final phase */
916 		io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
917 				SHIFT_U32(_SAES_CR_GCMPH_FINAL,
918 					  _SAES_CR_GCMPH_SHIFT));
919 		break;
920 	default:
921 		DMSG("out of order call");
922 		res = TEE_ERROR_BAD_STATE;
923 	}
924 	if (res)
925 		goto out;
926 
927 	if (IS_CHAINING_MODE(GCM, ctx->cr)) {
928 		/* SAES is configured to swap bytes as expected */
929 		io_write32(ctx->base + _SAES_DINR, 0);
930 		io_write32(ctx->base + _SAES_DINR, ctx->assoc_len);
931 		io_write32(ctx->base + _SAES_DINR, 0);
932 		io_write32(ctx->base + _SAES_DINR, ctx->load_len);
933 	}
934 
935 	res = wait_computation_completed(ctx->base);
936 	if (res)
937 		goto out;
938 
939 	read_aligned_block(ctx->base, tag_u32);
940 
941 	clear_computation_completed(ctx->base);
942 
943 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
944 
945 out:
946 	saes_end(ctx, res);
947 	mutex_unlock(ctx->lock);
948 
949 	return res;
950 }
951 
952 /**
953  * @brief Update (or start) an AES de/encrypt process (ECB, CBC or CTR).
954  * @param ctx: SAES process context
955  * @param last_block: true if last payload data block
956  * @param data_in: pointer to payload
957  * @param data_out: pointer where to save de/encrypted payload
958  * @param data_size: payload size
959  *
960  * @retval TEE_SUCCESS if OK.
961  */
962 TEE_Result stm32_saes_update(struct stm32_saes_context *ctx, bool last_block,
963 			     uint8_t *data_in, uint8_t *data_out,
964 			     size_t data_size)
965 {
966 	TEE_Result res = TEE_SUCCESS;
967 	unsigned int i = U(0);
968 
969 	if (!ctx)
970 		return TEE_ERROR_BAD_PARAMETERS;
971 
972 	mutex_lock(ctx->lock);
973 
974 	/*
975 	 * CBC encryption requires the 2 last blocks to be aligned with AES
976 	 * block size.
977 	 */
978 	if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) &&
979 	    is_encrypt(ctx->cr) &&
980 	    (ROUNDDOWN(data_size, AES_BLOCK_SIZE) != data_size)) {
981 		if (data_size < AES_BLOCK_SIZE * 2) {
982 			/*
983 			 * If CBC, size of the last part should be at
984 			 * least 2*AES_BLOCK_SIZE
985 			 */
986 			EMSG("Unexpected last block size");
987 			res = TEE_ERROR_BAD_STATE;
988 			goto out;
989 		}
990 		/*
991 		 * Do not support padding if the total size is not aligned with
992 		 * the size of a block.
993 		 */
994 		res = TEE_ERROR_NOT_IMPLEMENTED;
995 		goto out;
996 	}
997 
998 	/* Manage remaining CTR mask from previous update call */
999 	if (IS_CHAINING_MODE(CTR, ctx->cr) && ctx->extra_size) {
1000 		unsigned int j = 0;
1001 		uint8_t *mask = (uint8_t *)ctx->extra;
1002 
1003 		for (i = 0, j = 0; j < ctx->extra_size && i < data_size;
1004 		     j++, i++)
1005 			data_out[i] = data_in[i] ^ mask[j];
1006 
1007 		if (j != ctx->extra_size) {
1008 			/*
1009 			 * We didn't consume all saved mask,
1010 			 * but no more data.
1011 			 */
1012 
1013 			/* We save remaining mask and its new size */
1014 			memmove(ctx->extra, ctx->extra + j,
1015 				ctx->extra_size - j);
1016 			ctx->extra_size -= j;
1017 
1018 			/*
1019 			 * We don't need to save HW context we didn't
1020 			 * modify HW state.
1021 			 */
1022 			res = TEE_SUCCESS;
1023 			goto out;
1024 		}
1025 		/* All extra mask consumed */
1026 		ctx->extra_size = 0;
1027 	}
1028 
1029 	res = restore_context(ctx);
1030 	if (res)
1031 		goto out;
1032 
1033 	while (data_size - i >= AES_BLOCK_SIZE) {
1034 		write_block(ctx->base, data_in + i);
1035 
1036 		res = wait_computation_completed(ctx->base);
1037 		if (res)
1038 			goto out;
1039 
1040 		read_block(ctx->base, data_out + i);
1041 
1042 		clear_computation_completed(ctx->base);
1043 
1044 		/* Process next block */
1045 		i += AES_BLOCK_SIZE;
1046 	}
1047 
1048 	/* Manage last block if not a block size multiple */
1049 	if (i < data_size) {
1050 		if (IS_CHAINING_MODE(CTR, ctx->cr)) {
1051 			/*
1052 			 * For CTR we save the generated mask to use it at next
1053 			 * update call.
1054 			 */
1055 			uint32_t block_in[AES_BLOCK_NB_U32] = { };
1056 			uint32_t block_out[AES_BLOCK_NB_U32] = { };
1057 
1058 			memcpy(block_in, data_in + i, data_size - i);
1059 
1060 			write_aligned_block(ctx->base, block_in);
1061 
1062 			res = wait_computation_completed(ctx->base);
1063 			if (res)
1064 				goto out;
1065 
1066 			read_aligned_block(ctx->base, block_out);
1067 
1068 			clear_computation_completed(ctx->base);
1069 
1070 			memcpy(data_out + i, block_out, data_size - i);
1071 
1072 			/* Save mask for possibly next call */
1073 			ctx->extra_size = AES_BLOCK_SIZE - (data_size - i);
1074 			memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1075 			       ctx->extra_size);
1076 		} else {
1077 			/* CBC and ECB can manage only multiple of block_size */
1078 			res = TEE_ERROR_BAD_PARAMETERS;
1079 			goto out;
1080 		}
1081 	}
1082 
1083 	if (!last_block)
1084 		res = save_context(ctx);
1085 
1086 out:
1087 	/* If last block or error, end of SAES process */
1088 	if (last_block || res)
1089 		saes_end(ctx, res);
1090 
1091 	mutex_unlock(ctx->lock);
1092 
1093 	return res;
1094 }
1095 
1096 static TEE_Result stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata,
1097 				       const void *fdt, int node)
1098 {
1099 	struct dt_node_info dt_saes = { };
1100 	TEE_Result res = TEE_ERROR_GENERIC;
1101 
1102 	dt_saes.reg = fdt_reg_base_address(fdt, node);
1103 	dt_saes.reg_size = fdt_reg_size(fdt, node);
1104 
1105 	if (dt_saes.reg == DT_INFO_INVALID_REG ||
1106 	    dt_saes.reg_size == DT_INFO_INVALID_REG_SIZE)
1107 		return TEE_ERROR_BAD_PARAMETERS;
1108 
1109 	res = clk_dt_get_by_index(fdt, node, 0, &pdata->clk);
1110 	if (res != TEE_SUCCESS)
1111 		return res;
1112 
1113 	res = rstctrl_dt_get_by_index(fdt, node, 0, &pdata->reset);
1114 	if (res != TEE_SUCCESS && res != TEE_ERROR_ITEM_NOT_FOUND)
1115 		return res;
1116 
1117 	pdata->base = (vaddr_t)phys_to_virt(dt_saes.reg, MEM_AREA_IO_SEC,
1118 					    dt_saes.reg_size);
1119 	if (!pdata->base)
1120 		panic();
1121 
1122 	return TEE_SUCCESS;
1123 }
1124 
1125 static TEE_Result stm32_saes_probe(const void *fdt, int node,
1126 				   const void *compat_data __unused)
1127 {
1128 	TEE_Result res = TEE_SUCCESS;
1129 
1130 	assert(!saes_pdata.base);
1131 
1132 	res = stm32_saes_parse_fdt(&saes_pdata, fdt, node);
1133 	if (res)
1134 		return res;
1135 
1136 	if (clk_enable(saes_pdata.clk))
1137 		panic();
1138 
1139 	/* External reset of SAES */
1140 	if (saes_pdata.reset) {
1141 		if (rstctrl_assert_to(saes_pdata.reset, TIMEOUT_US_1MS))
1142 			panic();
1143 
1144 		udelay(SAES_RESET_DELAY);
1145 
1146 		if (rstctrl_deassert_to(saes_pdata.reset, TIMEOUT_US_1MS))
1147 			panic();
1148 	}
1149 
1150 	/* Internal reset of SAES */
1151 	io_setbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST);
1152 	udelay(SAES_RESET_DELAY);
1153 	io_clrbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST);
1154 
1155 	if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1156 		res = stm32_register_cipher(SAES_IP);
1157 		if (res) {
1158 			EMSG("Failed to register to cipher: %#"PRIx32, res);
1159 			panic();
1160 		}
1161 	}
1162 
1163 	return TEE_SUCCESS;
1164 }
1165 
1166 static const struct dt_device_match saes_match_table[] = {
1167 	{ .compatible = "st,stm32mp13-saes" },
1168 	{ }
1169 };
1170 
1171 DEFINE_DT_DRIVER(stm32_saes_dt_driver) = {
1172 	.name = "stm32-saes",
1173 	.match_table = saes_match_table,
1174 	.probe = &stm32_saes_probe,
1175 };
1176