xref: /rk3399_ARM-atf/drivers/st/crypto/stm32_saes.c (revision 8cef63d6c7184fe1eebc354716e4b3910d385f9b)
1 /*
2  * Copyright (c) 2022-2025, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 #include <assert.h>
7 #include <endian.h>
8 #include <errno.h>
9 #include <stdint.h>
10 
11 #include <drivers/clk.h>
12 #include <drivers/delay_timer.h>
13 #include <drivers/st/stm32_saes.h>
14 #include <drivers/st/stm32mp_reset.h>
15 #include <lib/mmio.h>
16 #include <lib/utils_def.h>
17 #include <libfdt.h>
18 
19 #include <platform_def.h>
20 
21 #define UINT8_BIT			8U
22 #define AES_BLOCK_SIZE_BIT		128U
23 #define AES_BLOCK_SIZE			(AES_BLOCK_SIZE_BIT / UINT8_BIT)
24 
25 #define AES_KEYSIZE_128			16U
26 #define AES_KEYSIZE_256			32U
27 #define AES_IVSIZE			16U
28 
29 /* SAES control register */
30 #define _SAES_CR			0x0U
31 /* SAES status register */
32 #define _SAES_SR			0x04U
33 /* SAES data input register */
34 #define _SAES_DINR			0x08U
35 /* SAES data output register */
36 #define _SAES_DOUTR			0x0CU
37 /* SAES key registers [0-3] */
38 #define _SAES_KEYR0			0x10U
39 #define _SAES_KEYR1			0x14U
40 #define _SAES_KEYR2			0x18U
41 #define _SAES_KEYR3			0x1CU
42 /* SAES initialization vector registers [0-3] */
43 #define _SAES_IVR0			0x20U
44 #define _SAES_IVR1			0x24U
45 #define _SAES_IVR2			0x28U
46 #define _SAES_IVR3			0x2CU
47 /* SAES key registers [4-7] */
48 #define _SAES_KEYR4			0x30U
49 #define _SAES_KEYR5			0x34U
50 #define _SAES_KEYR6			0x38U
51 #define _SAES_KEYR7			0x3CU
52 /* SAES suspend registers [0-7] */
53 #define _SAES_SUSPR0			0x40U
54 #define _SAES_SUSPR1			0x44U
55 #define _SAES_SUSPR2			0x48U
56 #define _SAES_SUSPR3			0x4CU
57 #define _SAES_SUSPR4			0x50U
58 #define _SAES_SUSPR5			0x54U
59 #define _SAES_SUSPR6			0x58U
60 #define _SAES_SUSPR7			0x5CU
61 /* SAES Interrupt Enable Register */
62 #define _SAES_IER			0x300U
63 /* SAES Interrupt Status Register */
64 #define _SAES_ISR			0x304U
65 /* SAES Interrupt Clear Register */
66 #define _SAES_ICR			0x308U
67 
68 /* SAES control register fields */
69 #define _SAES_CR_RESET_VALUE		0x0U
70 #define _SAES_CR_IPRST			BIT(31)
71 #define _SAES_CR_KEYSEL_MASK		GENMASK(30, 28)
72 #define _SAES_CR_KEYSEL_SHIFT		28U
73 #define _SAES_CR_KEYSEL_SOFT		0x0U
74 #define _SAES_CR_KEYSEL_DHUK		0x1U
75 #define _SAES_CR_KEYSEL_BHK		0x2U
76 #define _SAES_CR_KEYSEL_BHU_XOR_BH_K	0x4U
77 #define _SAES_CR_KEYSEL_TEST		0x7U
78 #define _SAES_CR_KSHAREID_MASK		GENMASK(27, 26)
79 #define _SAES_CR_KSHAREID_SHIFT		26U
80 #define _SAES_CR_KSHAREID_CRYP		0x0U
81 #define _SAES_CR_KEYMOD_MASK		GENMASK(25, 24)
82 #define _SAES_CR_KEYMOD_SHIFT		24U
83 #define _SAES_CR_KEYMOD_NORMAL		0x0U
84 #define _SAES_CR_KEYMOD_WRAPPED		0x1U
85 #define _SAES_CR_KEYMOD_SHARED		0x2U
86 #define _SAES_CR_NPBLB_MASK		GENMASK(23, 20)
87 #define _SAES_CR_NPBLB_SHIFT		20U
88 #define _SAES_CR_KEYPROT		BIT(19)
89 #define _SAES_CR_KEYSIZE		BIT(18)
90 #define _SAES_CR_GCMPH_MASK		GENMASK(14, 13)
91 #define _SAES_CR_GCMPH_SHIFT		13U
92 #define _SAES_CR_GCMPH_INIT		0U
93 #define _SAES_CR_GCMPH_HEADER		1U
94 #define _SAES_CR_GCMPH_PAYLOAD		2U
95 #define _SAES_CR_GCMPH_FINAL		3U
96 #define _SAES_CR_DMAOUTEN		BIT(12)
97 #define _SAES_CR_DMAINEN		BIT(11)
98 #define _SAES_CR_CHMOD_MASK		(BIT(16) | GENMASK(6, 5))
99 #define _SAES_CR_CHMOD_SHIFT		5U
100 #define _SAES_CR_CHMOD_ECB		0x0U
101 #define _SAES_CR_CHMOD_CBC		0x1U
102 #define _SAES_CR_CHMOD_CTR		0x2U
103 #define _SAES_CR_CHMOD_GCM		0x3U
104 #define _SAES_CR_CHMOD_GMAC		0x3U
105 #define _SAES_CR_CHMOD_CCM		0x800U
106 #define _SAES_CR_MODE_MASK		GENMASK(4, 3)
107 #define _SAES_CR_MODE_SHIFT		3U
108 #define _SAES_CR_MODE_ENC		0U
109 #define _SAES_CR_MODE_KEYPREP		1U
110 #define _SAES_CR_MODE_DEC		2U
111 #define _SAES_CR_DATATYPE_MASK		GENMASK(2, 1)
112 #define _SAES_CR_DATATYPE_SHIFT		1U
113 #define _SAES_CR_DATATYPE_NONE		0U
114 #define _SAES_CR_DATATYPE_HALF_WORD	1U
115 #define _SAES_CR_DATATYPE_BYTE		2U
116 #define _SAES_CR_DATATYPE_BIT		3U
117 #define _SAES_CR_EN			BIT(0)
118 
119 /* SAES status register fields */
120 #define _SAES_SR_KEYVALID		BIT(7)
121 #define _SAES_SR_BUSY			BIT(3)
122 #define _SAES_SR_WRERR			BIT(2)
123 #define _SAES_SR_RDERR			BIT(1)
124 #define _SAES_SR_CCF			BIT(0)
125 
126 /* SAES interrupt registers fields */
127 #define _SAES_I_RNG_ERR			BIT(3)
128 #define _SAES_I_KEY_ERR			BIT(2)
129 #define _SAES_I_RW_ERR			BIT(1)
130 #define _SAES_I_CC			BIT(0)
131 
132 #define SAES_TIMEOUT_US			100000U
133 #define TIMEOUT_US_1MS			1000U
134 #define SAES_RESET_DELAY		20U
135 
136 #define IS_CHAINING_MODE(mod, cr) \
137 	(((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mod << _SAES_CR_CHMOD_SHIFT))
138 
139 #define SET_CHAINING_MODE(mod, cr) \
140 	mmio_clrsetbits_32((cr), _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mod << _SAES_CR_CHMOD_SHIFT)
141 
142 static struct stm32_saes_platdata saes_pdata;
143 
144 static int stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata)
145 {
146 	int node;
147 	struct dt_node_info info;
148 	void *fdt;
149 
150 	if (fdt_get_address(&fdt) == 0) {
151 		return -FDT_ERR_NOTFOUND;
152 	}
153 
154 	node = dt_get_node(&info, -1, DT_SAES_COMPAT);
155 	if (node < 0) {
156 		ERROR("No SAES entry in DT\n");
157 		return -FDT_ERR_NOTFOUND;
158 	}
159 
160 	if (info.status == DT_DISABLED) {
161 		return -FDT_ERR_NOTFOUND;
162 	}
163 
164 	if ((info.base == 0U) || (info.clock < 0) || (info.reset < 0)) {
165 		return -FDT_ERR_BADVALUE;
166 	}
167 
168 	pdata->base = (uintptr_t)info.base;
169 	pdata->clock_id = (unsigned long)info.clock;
170 	pdata->reset_id = (unsigned int)info.reset;
171 
172 	return 0;
173 }
174 
175 static bool does_chaining_mode_need_iv(uint32_t cr)
176 {
177 	return !(IS_CHAINING_MODE(ECB, cr));
178 }
179 
180 static bool is_encrypt(uint32_t cr)
181 {
182 	return (cr & _SAES_CR_MODE_MASK) == (_SAES_CR_MODE_ENC << _SAES_CR_MODE_SHIFT);
183 }
184 
185 static bool is_decrypt(uint32_t cr)
186 {
187 	return (cr & _SAES_CR_MODE_MASK) == (_SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
188 }
189 
190 static int wait_computation_completed(uintptr_t base)
191 {
192 	uint64_t timeout = timeout_init_us(SAES_TIMEOUT_US);
193 
194 	while ((mmio_read_32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) {
195 		if (timeout_elapsed(timeout)) {
196 			WARN("%s: timeout\n", __func__);
197 			return -ETIMEDOUT;
198 		}
199 	}
200 
201 	return 0;
202 }
203 
204 static void clear_computation_completed(uintptr_t base)
205 {
206 	mmio_setbits_32(base + _SAES_ICR, _SAES_I_CC);
207 }
208 
209 static int saes_start(struct stm32_saes_context *ctx)
210 {
211 	uint64_t timeout;
212 
213 	/* Reset IP */
214 	if ((mmio_read_32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) != _SAES_SR_BUSY) {
215 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
216 		udelay(SAES_RESET_DELAY);
217 		mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
218 	}
219 
220 	timeout = timeout_init_us(SAES_TIMEOUT_US);
221 	while ((mmio_read_32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) == _SAES_SR_BUSY) {
222 		if (timeout_elapsed(timeout)) {
223 			WARN("%s: timeout\n", __func__);
224 			return -ETIMEDOUT;
225 		}
226 	}
227 
228 	return 0;
229 }
230 
231 static void saes_end(struct stm32_saes_context *ctx, int prev_error)
232 {
233 	if (prev_error != 0) {
234 		/* Reset IP */
235 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
236 		udelay(SAES_RESET_DELAY);
237 		mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
238 	}
239 
240 	/* Disable the SAES peripheral */
241 	mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
242 }
243 
244 static void saes_write_iv(struct stm32_saes_context *ctx)
245 {
246 	/* If chaining mode need to restore IV */
247 	if (does_chaining_mode_need_iv(ctx->cr)) {
248 		uint8_t i;
249 
250 		/* Restore the _SAES_IVRx */
251 		for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
252 			mmio_write_32(ctx->base + _SAES_IVR0 + i * sizeof(uint32_t), ctx->iv[i]);
253 		}
254 	}
255 
256 }
257 
258 static void saes_write_key(struct stm32_saes_context *ctx)
259 {
260 	/* Restore the _SAES_KEYRx if SOFTWARE key */
261 	if ((ctx->cr & _SAES_CR_KEYSEL_MASK) == (_SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT)) {
262 		uint8_t i;
263 
264 		for (i = 0U; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) {
265 			mmio_write_32(ctx->base + _SAES_KEYR0 + i * sizeof(uint32_t), ctx->key[i]);
266 		}
267 
268 		if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) {
269 			for (i = 0U; i < (AES_KEYSIZE_256 / 2U) / sizeof(uint32_t); i++) {
270 				mmio_write_32(ctx->base + _SAES_KEYR4 + i * sizeof(uint32_t),
271 					      ctx->key[i + 4U]);
272 			}
273 		}
274 	}
275 }
276 
277 static int saes_prepare_key(struct stm32_saes_context *ctx)
278 {
279 	/* Disable the SAES peripheral */
280 	mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
281 
282 	/* Set key size */
283 	if ((ctx->cr & _SAES_CR_KEYSIZE) != 0U) {
284 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
285 	} else {
286 		mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
287 	}
288 
289 	saes_write_key(ctx);
290 
291 	/* For ECB/CBC decryption, key preparation mode must be selected to populate the key */
292 	if ((IS_CHAINING_MODE(ECB, ctx->cr) || IS_CHAINING_MODE(CBC, ctx->cr)) &&
293 	    is_decrypt(ctx->cr)) {
294 		int ret;
295 
296 		/* Select Mode 2 */
297 		mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
298 				   _SAES_CR_MODE_KEYPREP << _SAES_CR_MODE_SHIFT);
299 
300 		/* Enable SAES */
301 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
302 
303 		/* Wait Computation completed */
304 		ret = wait_computation_completed(ctx->base);
305 		if (ret != 0) {
306 			return ret;
307 		}
308 
309 		clear_computation_completed(ctx->base);
310 
311 		/* Set Mode 3 */
312 		mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
313 				   _SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
314 	}
315 
316 	return 0;
317 }
318 
319 static int save_context(struct stm32_saes_context *ctx)
320 {
321 	if ((mmio_read_32(ctx->base + _SAES_SR) & _SAES_SR_CCF) != 0U) {
322 		/* Device should not be in a processing phase */
323 		return -EINVAL;
324 	}
325 
326 	/* Save CR */
327 	ctx->cr = mmio_read_32(ctx->base + _SAES_CR);
328 
329 	/* If chaining mode need to save current IV */
330 	if (does_chaining_mode_need_iv(ctx->cr)) {
331 		uint8_t i;
332 
333 		/* Save IV */
334 		for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
335 			ctx->iv[i] = mmio_read_32(ctx->base + _SAES_IVR0 + i * sizeof(uint32_t));
336 		}
337 	}
338 
339 	/* Disable the SAES peripheral */
340 	mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
341 
342 	return 0;
343 }
344 
345 /* To resume the processing of a message */
346 static int restore_context(struct stm32_saes_context *ctx)
347 {
348 	int ret;
349 
350 	/* IP should be disabled */
351 	if ((mmio_read_32(ctx->base + _SAES_CR) & _SAES_CR_EN) != 0U) {
352 		VERBOSE("%s: Device is still enabled\n", __func__);
353 		return -EINVAL;
354 	}
355 
356 	/* Reset internal state */
357 	mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
358 
359 	/* Restore the _SAES_CR */
360 	mmio_write_32(ctx->base + _SAES_CR, ctx->cr);
361 
362 	/* Preparation decrypt key */
363 	ret = saes_prepare_key(ctx);
364 	if (ret != 0) {
365 		return ret;
366 	}
367 
368 	saes_write_iv(ctx);
369 
370 	/* Enable the SAES peripheral */
371 	mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
372 
373 	return 0;
374 }
375 
376 /**
377  * @brief Initialize SAES driver.
378  * @param None.
379  * @retval 0 if OK; negative value else.
380  */
381 int stm32_saes_driver_init(void)
382 {
383 	int err;
384 
385 	err = stm32_saes_parse_fdt(&saes_pdata);
386 	if (err != 0) {
387 		return err;
388 	}
389 
390 	clk_enable(saes_pdata.clock_id);
391 	if (stm32mp_reset_assert(saes_pdata.reset_id, TIMEOUT_US_1MS) != 0) {
392 		panic();
393 	}
394 
395 	udelay(SAES_RESET_DELAY);
396 	if (stm32mp_reset_deassert(saes_pdata.reset_id, TIMEOUT_US_1MS) != 0) {
397 		panic();
398 	}
399 
400 	return 0;
401 }
402 
403 /**
404  * @brief Start a AES computation.
405  * @param ctx: SAES process context
406  * @param is_dec: true if decryption, false if encryption
407  * @param ch_mode: define the chaining mode
408  * @param key_select: define where the key comes from.
409  * @param key: pointer to key (if key_select is KEY_SOFT, else unused)
410  * @param key_size: key size
411  * @param iv: pointer to initialization vectore (unsed if ch_mode is ECB)
412  * @param iv_size: iv size
413  * @note this function doesn't access to hardware but store in ctx the values
414  *
415  * @retval 0 if OK; negative value else.
416  */
417 int stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec,
418 		    enum stm32_saes_chaining_mode ch_mode, enum stm32_saes_key_selection key_select,
419 		    const void *key, size_t key_size, const void *iv, size_t iv_size)
420 {
421 	unsigned int i;
422 	const uint32_t *iv_u32;
423 	const uint32_t *key_u32;
424 
425 	ctx->assoc_len = 0U;
426 	ctx->load_len = 0U;
427 
428 	ctx->base = saes_pdata.base;
429 	ctx->cr = _SAES_CR_RESET_VALUE;
430 
431 	/* We want buffer to be u32 aligned */
432 	assert((uintptr_t)key % __alignof__(uint32_t) == 0);
433 	assert((uintptr_t)iv % __alignof__(uint32_t) == 0);
434 
435 	iv_u32 = iv;
436 	key_u32 = key;
437 
438 	if (is_dec) {
439 		/* Save Mode 3 = decrypt */
440 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_MODE_MASK,
441 				   _SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
442 	} else {
443 		/* Save Mode 1 = crypt */
444 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_MODE_MASK,
445 				   _SAES_CR_MODE_ENC << _SAES_CR_MODE_SHIFT);
446 	}
447 
448 	/* Save chaining mode */
449 	switch (ch_mode) {
450 	case STM32_SAES_MODE_ECB:
451 		SET_CHAINING_MODE(ECB, (uintptr_t)&(ctx->cr));
452 		break;
453 	case STM32_SAES_MODE_CBC:
454 		SET_CHAINING_MODE(CBC, (uintptr_t)&(ctx->cr));
455 		break;
456 	case STM32_SAES_MODE_CTR:
457 		SET_CHAINING_MODE(CTR, (uintptr_t)&(ctx->cr));
458 		break;
459 	case STM32_SAES_MODE_GCM:
460 		SET_CHAINING_MODE(GCM, (uintptr_t)&(ctx->cr));
461 		break;
462 	case STM32_SAES_MODE_CCM:
463 		SET_CHAINING_MODE(CCM, (uintptr_t)&(ctx->cr));
464 		break;
465 	default:
466 		return -EINVAL;
467 	}
468 
469 	/* We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data.
470 	 * so we won't need to
471 	 * htobe32(data) before write to DINR
472 	 * nor
473 	 * be32toh after reading from DOUTR
474 	 *
475 	 * But note that wrap key only accept _SAES_CR_DATATYPE_NONE
476 	 */
477 	mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_DATATYPE_MASK,
478 			   _SAES_CR_DATATYPE_BYTE << _SAES_CR_DATATYPE_SHIFT);
479 
480 	/* Configure keysize */
481 	switch (key_size) {
482 	case AES_KEYSIZE_128:
483 		mmio_clrbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSIZE);
484 		break;
485 	case AES_KEYSIZE_256:
486 		mmio_setbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSIZE);
487 		break;
488 	default:
489 		return -EINVAL;
490 	}
491 
492 	/* Configure key */
493 	switch (key_select) {
494 	case STM32_SAES_KEY_SOFT:
495 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
496 				   _SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT);
497 		/* Save key */
498 		switch (key_size) {
499 		case AES_KEYSIZE_128:
500 			/* First 16 bytes == 4 u32 */
501 			for (i = 0U; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) {
502 				mmio_write_32((uintptr_t)(ctx->key + i), htobe32(key_u32[3 - i]));
503 				/* /!\ we save the key in HW byte order
504 				 * and word order : key[i] is for _SAES_KEYRi
505 				 */
506 			}
507 			break;
508 		case AES_KEYSIZE_256:
509 			for (i = 0U; i < AES_KEYSIZE_256 / sizeof(uint32_t); i++) {
510 				mmio_write_32((uintptr_t)(ctx->key + i), htobe32(key_u32[7 - i]));
511 				/* /!\ we save the key in HW byte order
512 				 * and word order : key[i] is for _SAES_KEYRi
513 				 */
514 			}
515 			break;
516 		default:
517 			return -EINVAL;
518 		}
519 
520 		break;
521 	case STM32_SAES_KEY_DHU:
522 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
523 				   _SAES_CR_KEYSEL_DHUK << _SAES_CR_KEYSEL_SHIFT);
524 		break;
525 	case STM32_SAES_KEY_BH:
526 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
527 				   _SAES_CR_KEYSEL_BHK << _SAES_CR_KEYSEL_SHIFT);
528 		break;
529 	case STM32_SAES_KEY_BHU_XOR_BH:
530 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
531 				   _SAES_CR_KEYSEL_BHU_XOR_BH_K << _SAES_CR_KEYSEL_SHIFT);
532 		break;
533 	case STM32_SAES_KEY_WRAPPED:
534 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
535 				   _SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT);
536 		break;
537 
538 	default:
539 		return -EINVAL;
540 	}
541 
542 	/* Save IV */
543 	if (ch_mode != STM32_SAES_MODE_ECB) {
544 		if ((iv == NULL) || (iv_size != AES_IVSIZE)) {
545 			return -EINVAL;
546 		}
547 
548 		for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
549 			mmio_write_32((uintptr_t)(ctx->iv + i), htobe32(iv_u32[3 - i]));
550 			/* /!\ We save the iv in HW byte order */
551 		}
552 	}
553 
554 	return saes_start(ctx);
555 }
556 
557 /**
558  * @brief Update (or start) a AES authentificate process of associated data (CCM or GCM).
559  * @param ctx: SAES process context
560  * @param last_block: true if last assoc data block
561  * @param data: pointer to associated data
562  * @param data_size: data size
563  *
564  * @retval 0 if OK; negative value else.
565  */
566 int stm32_saes_update_assodata(struct stm32_saes_context *ctx, bool last_block,
567 			       uint8_t *data, size_t data_size)
568 {
569 	int ret;
570 	uint32_t *data_u32;
571 	unsigned int i = 0U;
572 
573 	/* We want buffers to be u32 aligned */
574 	assert((uintptr_t)data % __alignof__(uint32_t) == 0);
575 	data_u32 = (uint32_t *)data;
576 
577 	/* Init phase */
578 	ret = restore_context(ctx);
579 	if (ret != 0) {
580 		goto out;
581 	}
582 
583 	ret = wait_computation_completed(ctx->base);
584 	if (ret != 0) {
585 		return ret;
586 	}
587 
588 	clear_computation_completed(ctx->base);
589 
590 	if ((data == NULL) || (data_size == 0U)) {
591 		/* No associated data */
592 		/* ret already = 0 */
593 		goto out;
594 	}
595 
596 	/* There is an header/associated data phase */
597 	mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
598 			   _SAES_CR_GCMPH_HEADER << _SAES_CR_GCMPH_SHIFT);
599 
600 	/* Enable the SAES peripheral */
601 	mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
602 
603 	while (i < round_down(data_size, AES_BLOCK_SIZE)) {
604 		unsigned int w; /* Word index */
605 
606 		w = i / sizeof(uint32_t);
607 		/* No need to htobe() as we configure the HW to swap bytes */
608 		mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 0U]);
609 		mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 1U]);
610 		mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 2U]);
611 		mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 3U]);
612 
613 		ret = wait_computation_completed(ctx->base);
614 		if (ret != 0) {
615 			goto out;
616 		}
617 
618 		clear_computation_completed(ctx->base);
619 
620 		/* Process next block */
621 		i += AES_BLOCK_SIZE;
622 		ctx->assoc_len += AES_BLOCK_SIZE_BIT;
623 	}
624 
625 	/* Manage last block if not a block size multiple */
626 	if ((last_block) && (i < data_size)) {
627 		/* We don't manage unaligned last block yet */
628 		ret = -ENODEV;
629 		goto out;
630 	}
631 
632 out:
633 	if (ret != 0) {
634 		saes_end(ctx, ret);
635 	}
636 
637 	return ret;
638 }
639 
640 /**
641  * @brief Update (or start) a AES authenticate and de/encrypt with payload data (CCM or GCM).
642  * @param ctx: SAES process context
643  * @param last_block: true if last payload data block
644  * @param data_in: pointer to payload
645  * @param data_out: pointer where to save de/encrypted payload
646  * @param data_size: payload size
647  *
648  * @retval 0 if OK; negative value else.
649  */
650 int stm32_saes_update_load(struct stm32_saes_context *ctx, bool last_block,
651 			   uint8_t *data_in, uint8_t *data_out, size_t data_size)
652 {
653 	int ret = 0;
654 	uint32_t *data_in_u32;
655 	uint32_t *data_out_u32;
656 	unsigned int i = 0U;
657 	uint32_t prev_cr;
658 
659 	/* We want buffers to be u32 aligned */
660 	assert((uintptr_t)data_in % __alignof__(uint32_t) == 0);
661 	assert((uintptr_t)data_out % __alignof__(uint32_t) == 0);
662 	data_in_u32 = (uint32_t *)data_in;
663 	data_out_u32 = (uint32_t *)data_out;
664 
665 	prev_cr = mmio_read_32(ctx->base + _SAES_CR);
666 
667 	if ((data_in == NULL) || (data_size == 0U)) {
668 		/* there is no data */
669 		goto out;
670 	}
671 
672 	/* There is a load phase */
673 	mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
674 			   _SAES_CR_GCMPH_PAYLOAD << _SAES_CR_GCMPH_SHIFT);
675 
676 	if ((prev_cr & _SAES_CR_GCMPH_MASK) ==
677 	    (_SAES_CR_GCMPH_INIT << _SAES_CR_GCMPH_SHIFT)) {
678 		/* Still in initialization phase, no header
679 		 * We need to enable the SAES peripheral
680 		 */
681 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
682 	}
683 
684 	while (i < round_down(data_size, AES_BLOCK_SIZE)) {
685 		unsigned int w; /* Word index */
686 
687 		w = i / sizeof(uint32_t);
688 		/* No need to htobe() as we configure the HW to swap bytes */
689 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 0U]);
690 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 1U]);
691 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 2U]);
692 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 3U]);
693 
694 		ret = wait_computation_completed(ctx->base);
695 		if (ret != 0) {
696 			goto out;
697 		}
698 
699 		/* No need to htobe() as we configure the HW to swap bytes */
700 		data_out_u32[w + 0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
701 		data_out_u32[w + 1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
702 		data_out_u32[w + 2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
703 		data_out_u32[w + 3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
704 
705 		clear_computation_completed(ctx->base);
706 
707 		/* Process next block */
708 		i += AES_BLOCK_SIZE;
709 		ctx->load_len += AES_BLOCK_SIZE_BIT;
710 	}
711 	/* Manage last block if not a block size multiple */
712 	if ((last_block) && (i < data_size)) {
713 		uint32_t block_in[AES_BLOCK_SIZE / sizeof(uint32_t)] = {0};
714 		uint32_t block_out[AES_BLOCK_SIZE / sizeof(uint32_t)] = {0};
715 
716 		memcpy(block_in, data_in + i, data_size - i);
717 
718 		/* No need to htobe() as we configure the HW to swap bytes */
719 		mmio_write_32(ctx->base + _SAES_DINR, block_in[0U]);
720 		mmio_write_32(ctx->base + _SAES_DINR, block_in[1U]);
721 		mmio_write_32(ctx->base + _SAES_DINR, block_in[2U]);
722 		mmio_write_32(ctx->base + _SAES_DINR, block_in[3U]);
723 
724 		ret = wait_computation_completed(ctx->base);
725 		if (ret != 0) {
726 			VERBOSE("%s %d\n", __func__, __LINE__);
727 			goto out;
728 		}
729 
730 		/* No need to htobe() as we configure the HW to swap bytes */
731 		block_out[0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
732 		block_out[1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
733 		block_out[2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
734 		block_out[3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
735 
736 		clear_computation_completed(ctx->base);
737 
738 		memcpy(data_out + i, block_out, data_size - i);
739 
740 		ctx->load_len += (data_size - i) * UINT8_BIT;
741 	}
742 
743 out:
744 	if (ret != 0) {
745 		saes_end(ctx, ret);
746 	}
747 
748 	return ret;
749 }
750 
751 /**
752  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
753  * @param ctx: SAES process context
754  * @param tag: pointer where to save the tag
755  * @param data_size: tag size
756  *
757  * @retval 0 if OK; negative value else.
758  */
759 int stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag,
760 		     size_t tag_size)
761 {
762 	int ret;
763 	uint32_t tag_u32[4];
764 	uint32_t prev_cr;
765 
766 	prev_cr = mmio_read_32(ctx->base + _SAES_CR);
767 
768 	mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
769 			   _SAES_CR_GCMPH_FINAL << _SAES_CR_GCMPH_SHIFT);
770 
771 	if ((prev_cr & _SAES_CR_GCMPH_MASK) == (_SAES_CR_GCMPH_INIT << _SAES_CR_GCMPH_SHIFT)) {
772 		/* Still in initialization phase, no header
773 		 * We need to enable the SAES peripheral
774 		 */
775 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
776 	}
777 
778 	/* No need to htobe() as we configure the HW to swap bytes */
779 	mmio_write_32(ctx->base + _SAES_DINR, 0);
780 	mmio_write_32(ctx->base + _SAES_DINR, ctx->assoc_len);
781 	mmio_write_32(ctx->base + _SAES_DINR, 0);
782 	mmio_write_32(ctx->base + _SAES_DINR, ctx->load_len);
783 
784 	ret = wait_computation_completed(ctx->base);
785 	if (ret != 0) {
786 		goto out;
787 	}
788 
789 	/* No need to htobe() as we configure the HW to swap bytes */
790 	tag_u32[0] = mmio_read_32(ctx->base + _SAES_DOUTR);
791 	tag_u32[1] = mmio_read_32(ctx->base + _SAES_DOUTR);
792 	tag_u32[2] = mmio_read_32(ctx->base + _SAES_DOUTR);
793 	tag_u32[3] = mmio_read_32(ctx->base + _SAES_DOUTR);
794 
795 	clear_computation_completed(ctx->base);
796 
797 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
798 
799 out:
800 	saes_end(ctx, ret);
801 
802 	return ret;
803 }
804 
805 /**
806  * @brief Update (or start) a AES de/encrypt process (ECB, CBC or CTR).
807  * @param ctx: SAES process context
808  * @param last_block: true if last payload data block
809  * @param data_in: pointer to payload
810  * @param data_out: pointer where to save de/encrypted payload
811  * @param data_size: payload size
812  *
813  * @retval 0 if OK; negative value else.
814  */
815 int stm32_saes_update(struct stm32_saes_context *ctx, bool last_block,
816 		      uint8_t *data_in, uint8_t *data_out, size_t data_size)
817 {
818 	int ret;
819 	uint32_t *data_in_u32;
820 	uint32_t *data_out_u32;
821 	unsigned int i = 0U;
822 
823 	/* We want buffers to be u32 aligned */
824 	assert((uintptr_t)data_in % __alignof__(uint32_t) == 0);
825 	assert((uintptr_t)data_out % __alignof__(uint32_t) == 0);
826 	data_in_u32 = (uint32_t *)data_in;
827 	data_out_u32 = (uint32_t *)data_out;
828 
829 	if ((!last_block) &&
830 	    (round_down(data_size, AES_BLOCK_SIZE) != data_size)) {
831 		ERROR("%s: non last block must be multiple of 128 bits\n",
832 		      __func__);
833 		ret = -EINVAL;
834 		goto out;
835 	}
836 
837 	/* In CBC encryption we need to manage specifically last 2 128bits
838 	 * blocks if total size in not a block size aligned
839 	 * work TODO. Currently return ENODEV.
840 	 * Morevoer as we need to know last 2 block, if unaligned and
841 	 * call with less than two block, return -EINVAL.
842 	 */
843 	if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) && is_encrypt(ctx->cr) &&
844 	    (round_down(data_size, AES_BLOCK_SIZE) != data_size)) {
845 		if (data_size < AES_BLOCK_SIZE * 2U) {
846 			ERROR("if CBC, last part size should be at least 2 * AES_BLOCK_SIZE\n");
847 			ret = -EINVAL;
848 			goto out;
849 		}
850 		/* Moreover the CBC specific padding for encrypt is not yet implemented */
851 		ret = -ENODEV;
852 		goto out;
853 	}
854 
855 	ret = restore_context(ctx);
856 	if (ret != 0) {
857 		goto out;
858 	}
859 
860 	while (i < round_down(data_size, AES_BLOCK_SIZE)) {
861 		unsigned int w; /* Word index */
862 
863 		w = i / sizeof(uint32_t);
864 		/* No need to htobe() as we configure the HW to swap bytes */
865 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 0U]);
866 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 1U]);
867 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 2U]);
868 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 3U]);
869 
870 		ret = wait_computation_completed(ctx->base);
871 		if (ret != 0) {
872 			goto out;
873 		}
874 
875 		/* No need to htobe() as we configure the HW to swap bytes */
876 		data_out_u32[w + 0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
877 		data_out_u32[w + 1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
878 		data_out_u32[w + 2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
879 		data_out_u32[w + 3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
880 
881 		clear_computation_completed(ctx->base);
882 
883 		/* Process next block */
884 		i += AES_BLOCK_SIZE;
885 	}
886 	/* Manage last block if not a block size multiple */
887 
888 	if ((last_block) && (i < data_size)) {
889 		/* In and out buffer have same size so should be AES_BLOCK_SIZE multiple */
890 		ret = -ENODEV;
891 		goto out;
892 	}
893 
894 	if (!last_block) {
895 		ret = save_context(ctx);
896 	}
897 
898 out:
899 	/* If last block or error, end of SAES process */
900 	if (last_block || (ret != 0)) {
901 		saes_end(ctx, ret);
902 	}
903 
904 	return ret;
905 }
906