xref: /rk3399_ARM-atf/drivers/st/crypto/stm32_saes.c (revision f4b8470feee4437fb3984baeee8c61ed91f63f51)
1 /*
2  * Copyright (c) 2022, STMicroelectronics - All Rights Reserved
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 #include <assert.h>
7 #include <endian.h>
8 #include <errno.h>
9 #include <stdint.h>
10 
11 #include <drivers/clk.h>
12 #include <drivers/delay_timer.h>
13 #include <drivers/st/stm32_saes.h>
14 #include <drivers/st/stm32mp_reset.h>
15 #include <lib/mmio.h>
16 #include <lib/utils_def.h>
17 #include <libfdt.h>
18 
19 #include <platform_def.h>
20 
21 #define UINT8_BIT			8U
22 #define AES_BLOCK_SIZE_BIT		128U
23 #define AES_BLOCK_SIZE			(AES_BLOCK_SIZE_BIT / UINT8_BIT)
24 
25 #define AES_KEYSIZE_128			16U
26 #define AES_KEYSIZE_256			32U
27 #define AES_IVSIZE			16U
28 
29 /* SAES control register */
30 #define _SAES_CR			0x0U
31 /* SAES status register */
32 #define _SAES_SR			0x04U
33 /* SAES data input register */
34 #define _SAES_DINR			0x08U
35 /* SAES data output register */
36 #define _SAES_DOUTR			0x0CU
37 /* SAES key registers [0-3] */
38 #define _SAES_KEYR0			0x10U
39 #define _SAES_KEYR1			0x14U
40 #define _SAES_KEYR2			0x18U
41 #define _SAES_KEYR3			0x1CU
42 /* SAES initialization vector registers [0-3] */
43 #define _SAES_IVR0			0x20U
44 #define _SAES_IVR1			0x24U
45 #define _SAES_IVR2			0x28U
46 #define _SAES_IVR3			0x2CU
47 /* SAES key registers [4-7] */
48 #define _SAES_KEYR4			0x30U
49 #define _SAES_KEYR5			0x34U
50 #define _SAES_KEYR6			0x38U
51 #define _SAES_KEYR7			0x3CU
52 /* SAES suspend registers [0-7] */
53 #define _SAES_SUSPR0			0x40U
54 #define _SAES_SUSPR1			0x44U
55 #define _SAES_SUSPR2			0x48U
56 #define _SAES_SUSPR3			0x4CU
57 #define _SAES_SUSPR4			0x50U
58 #define _SAES_SUSPR5			0x54U
59 #define _SAES_SUSPR6			0x58U
60 #define _SAES_SUSPR7			0x5CU
61 /* SAES Interrupt Enable Register */
62 #define _SAES_IER			0x300U
63 /* SAES Interrupt Status Register */
64 #define _SAES_ISR			0x304U
65 /* SAES Interrupt Clear Register */
66 #define _SAES_ICR			0x308U
67 
68 /* SAES control register fields */
69 #define _SAES_CR_RESET_VALUE		0x0U
70 #define _SAES_CR_IPRST			BIT(31)
71 #define _SAES_CR_KEYSEL_MASK		GENMASK(30, 28)
72 #define _SAES_CR_KEYSEL_SHIFT		28U
73 #define _SAES_CR_KEYSEL_SOFT		0x0U
74 #define _SAES_CR_KEYSEL_DHUK		0x1U
75 #define _SAES_CR_KEYSEL_BHK		0x2U
76 #define _SAES_CR_KEYSEL_BHU_XOR_BH_K	0x4U
77 #define _SAES_CR_KEYSEL_TEST		0x7U
78 #define _SAES_CR_KSHAREID_MASK		GENMASK(27, 26)
79 #define _SAES_CR_KSHAREID_SHIFT		26U
80 #define _SAES_CR_KSHAREID_CRYP		0x0U
81 #define _SAES_CR_KEYMOD_MASK		GENMASK(25, 24)
82 #define _SAES_CR_KEYMOD_SHIFT		24U
83 #define _SAES_CR_KEYMOD_NORMAL		0x0U
84 #define _SAES_CR_KEYMOD_WRAPPED		0x1U
85 #define _SAES_CR_KEYMOD_SHARED		0x2U
86 #define _SAES_CR_NPBLB_MASK		GENMASK(23, 20)
87 #define _SAES_CR_NPBLB_SHIFT		20U
88 #define _SAES_CR_KEYPROT		BIT(19)
89 #define _SAES_CR_KEYSIZE		BIT(18)
90 #define _SAES_CR_GCMPH_MASK		GENMASK(14, 13)
91 #define _SAES_CR_GCMPH_SHIFT		13U
92 #define _SAES_CR_GCMPH_INIT		0U
93 #define _SAES_CR_GCMPH_HEADER		1U
94 #define _SAES_CR_GCMPH_PAYLOAD		2U
95 #define _SAES_CR_GCMPH_FINAL		3U
96 #define _SAES_CR_DMAOUTEN		BIT(12)
97 #define _SAES_CR_DMAINEN		BIT(11)
98 #define _SAES_CR_CHMOD_MASK		(BIT(16) | GENMASK(6, 5))
99 #define _SAES_CR_CHMOD_SHIFT		5U
100 #define _SAES_CR_CHMOD_ECB		0x0U
101 #define _SAES_CR_CHMOD_CBC		0x1U
102 #define _SAES_CR_CHMOD_CTR		0x2U
103 #define _SAES_CR_CHMOD_GCM		0x3U
104 #define _SAES_CR_CHMOD_GMAC		0x3U
105 #define _SAES_CR_CHMOD_CCM		0x800U
106 #define _SAES_CR_MODE_MASK		GENMASK(4, 3)
107 #define _SAES_CR_MODE_SHIFT		3U
108 #define _SAES_CR_MODE_ENC		0U
109 #define _SAES_CR_MODE_KEYPREP		1U
110 #define _SAES_CR_MODE_DEC		2U
111 #define _SAES_CR_DATATYPE_MASK		GENMASK(2, 1)
112 #define _SAES_CR_DATATYPE_SHIFT		1U
113 #define _SAES_CR_DATATYPE_NONE		0U
114 #define _SAES_CR_DATATYPE_HALF_WORD	1U
115 #define _SAES_CR_DATATYPE_BYTE		2U
116 #define _SAES_CR_DATATYPE_BIT		3U
117 #define _SAES_CR_EN			BIT(0)
118 
119 /* SAES status register fields */
120 #define _SAES_SR_KEYVALID		BIT(7)
121 #define _SAES_SR_BUSY			BIT(3)
122 #define _SAES_SR_WRERR			BIT(2)
123 #define _SAES_SR_RDERR			BIT(1)
124 #define _SAES_SR_CCF			BIT(0)
125 
126 /* SAES interrupt registers fields */
127 #define _SAES_I_RNG_ERR			BIT(3)
128 #define _SAES_I_KEY_ERR			BIT(2)
129 #define _SAES_I_RW_ERR			BIT(1)
130 #define _SAES_I_CC			BIT(0)
131 
132 #define SAES_TIMEOUT_US			100000U
133 #define TIMEOUT_US_1MS			1000U
134 #define SAES_RESET_DELAY		20U
135 
136 #define IS_CHAINING_MODE(mod, cr) \
137 	(((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mod << _SAES_CR_CHMOD_SHIFT))
138 
139 #define SET_CHAINING_MODE(mod, cr) \
140 	mmio_clrsetbits_32((cr), _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mod << _SAES_CR_CHMOD_SHIFT)
141 
142 #define pragma weak stm32_saes_get_platdata
143 
144 static struct stm32_saes_platdata saes_pdata;
145 
146 int stm32_saes_get_platdata(struct stm32_saes_platdata *pdata)
147 {
148 	return -ENODEV;
149 }
150 
151 static int stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata)
152 {
153 	int node;
154 	struct dt_node_info info;
155 	void *fdt;
156 
157 	if (fdt_get_address(&fdt) == 0) {
158 		return -FDT_ERR_NOTFOUND;
159 	}
160 
161 	node = dt_get_node(&info, -1, DT_SAES_COMPAT);
162 	if (node < 0) {
163 		ERROR("No SAES entry in DT\n");
164 		return -FDT_ERR_NOTFOUND;
165 	}
166 
167 	if (info.status == DT_DISABLED) {
168 		return -FDT_ERR_NOTFOUND;
169 	}
170 
171 	if ((info.base == 0U) || (info.clock < 0) || (info.reset < 0)) {
172 		return -FDT_ERR_BADVALUE;
173 	}
174 
175 	pdata->base = (uintptr_t)info.base;
176 	pdata->clock_id = (unsigned long)info.clock;
177 	pdata->reset_id = (unsigned int)info.reset;
178 
179 	return 0;
180 }
181 
182 static bool does_chaining_mode_need_iv(uint32_t cr)
183 {
184 	return !(IS_CHAINING_MODE(ECB, cr));
185 }
186 
187 static bool is_encrypt(uint32_t cr)
188 {
189 	return (cr & _SAES_CR_MODE_MASK) == (_SAES_CR_MODE_ENC << _SAES_CR_MODE_SHIFT);
190 }
191 
192 static bool is_decrypt(uint32_t cr)
193 {
194 	return (cr & _SAES_CR_MODE_MASK) == (_SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
195 }
196 
197 static int wait_computation_completed(uintptr_t base)
198 {
199 	uint64_t timeout = timeout_init_us(SAES_TIMEOUT_US);
200 
201 	while ((mmio_read_32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) {
202 		if (timeout_elapsed(timeout)) {
203 			WARN("%s: timeout\n", __func__);
204 			return -ETIMEDOUT;
205 		}
206 	}
207 
208 	return 0;
209 }
210 
211 static void clear_computation_completed(uintptr_t base)
212 {
213 	mmio_setbits_32(base + _SAES_ICR, _SAES_I_CC);
214 }
215 
216 static int saes_start(struct stm32_saes_context *ctx)
217 {
218 	uint64_t timeout;
219 
220 	/* Reset IP */
221 	mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
222 	udelay(SAES_RESET_DELAY);
223 	mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
224 
225 	timeout = timeout_init_us(SAES_TIMEOUT_US);
226 	while ((mmio_read_32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) == _SAES_SR_BUSY) {
227 		if (timeout_elapsed(timeout)) {
228 			WARN("%s: timeout\n", __func__);
229 			return -ETIMEDOUT;
230 		}
231 	}
232 
233 	return 0;
234 }
235 
236 static void saes_end(struct stm32_saes_context *ctx, int prev_error)
237 {
238 	if (prev_error != 0) {
239 		/* Reset IP */
240 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
241 		udelay(SAES_RESET_DELAY);
242 		mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
243 	}
244 
245 	/* Disable the SAES peripheral */
246 	mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
247 }
248 
249 static void saes_write_iv(struct stm32_saes_context *ctx)
250 {
251 	/* If chaining mode need to restore IV */
252 	if (does_chaining_mode_need_iv(ctx->cr)) {
253 		uint8_t i;
254 
255 		/* Restore the _SAES_IVRx */
256 		for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
257 			mmio_write_32(ctx->base + _SAES_IVR0 + i * sizeof(uint32_t), ctx->iv[i]);
258 		}
259 	}
260 
261 }
262 
263 static void saes_write_key(struct stm32_saes_context *ctx)
264 {
265 	/* Restore the _SAES_KEYRx if SOFTWARE key */
266 	if ((ctx->cr & _SAES_CR_KEYSEL_MASK) == (_SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT)) {
267 		uint8_t i;
268 
269 		for (i = 0U; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) {
270 			mmio_write_32(ctx->base + _SAES_KEYR0 + i * sizeof(uint32_t), ctx->key[i]);
271 		}
272 
273 		if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) {
274 			for (i = 0U; i < (AES_KEYSIZE_256 / 2U) / sizeof(uint32_t); i++) {
275 				mmio_write_32(ctx->base + _SAES_KEYR4 + i * sizeof(uint32_t),
276 					      ctx->key[i + 4U]);
277 			}
278 		}
279 	}
280 }
281 
282 static int saes_prepare_key(struct stm32_saes_context *ctx)
283 {
284 	/* Disable the SAES peripheral */
285 	mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
286 
287 	/* Set key size */
288 	if ((ctx->cr & _SAES_CR_KEYSIZE) != 0U) {
289 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
290 	} else {
291 		mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
292 	}
293 
294 	saes_write_key(ctx);
295 
296 	/* For ECB/CBC decryption, key preparation mode must be selected to populate the key */
297 	if ((IS_CHAINING_MODE(ECB, ctx->cr) || IS_CHAINING_MODE(CBC, ctx->cr)) &&
298 	    is_decrypt(ctx->cr)) {
299 		int ret;
300 
301 		/* Select Mode 2 */
302 		mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
303 				   _SAES_CR_MODE_KEYPREP << _SAES_CR_MODE_SHIFT);
304 
305 		/* Enable SAES */
306 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
307 
308 		/* Wait Computation completed */
309 		ret = wait_computation_completed(ctx->base);
310 		if (ret != 0) {
311 			return ret;
312 		}
313 
314 		clear_computation_completed(ctx->base);
315 
316 		/* Set Mode 3 */
317 		mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
318 				   _SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
319 	}
320 
321 	return 0;
322 }
323 
324 static int save_context(struct stm32_saes_context *ctx)
325 {
326 	if ((mmio_read_32(ctx->base + _SAES_SR) & _SAES_SR_CCF) != 0U) {
327 		/* Device should not be in a processing phase */
328 		return -EINVAL;
329 	}
330 
331 	/* Save CR */
332 	ctx->cr = mmio_read_32(ctx->base + _SAES_CR);
333 
334 	/* If chaining mode need to save current IV */
335 	if (does_chaining_mode_need_iv(ctx->cr)) {
336 		uint8_t i;
337 
338 		/* Save IV */
339 		for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
340 			ctx->iv[i] = mmio_read_32(ctx->base + _SAES_IVR0 + i * sizeof(uint32_t));
341 		}
342 	}
343 
344 	/* Disable the SAES peripheral */
345 	mmio_clrbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
346 
347 	return 0;
348 }
349 
350 /* To resume the processing of a message */
351 static int restore_context(struct stm32_saes_context *ctx)
352 {
353 	int ret;
354 
355 	/* IP should be disabled */
356 	if ((mmio_read_32(ctx->base + _SAES_CR) & _SAES_CR_EN) != 0U) {
357 		VERBOSE("%s: Device is still enabled\n", __func__);
358 		return -EINVAL;
359 	}
360 
361 	/* Reset internal state */
362 	mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
363 
364 	/* Restore the _SAES_CR */
365 	mmio_write_32(ctx->base + _SAES_CR, ctx->cr);
366 
367 	/* Preparation decrypt key */
368 	ret = saes_prepare_key(ctx);
369 	if (ret != 0) {
370 		return ret;
371 	}
372 
373 	saes_write_iv(ctx);
374 
375 	/* Enable the SAES peripheral */
376 	mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
377 
378 	return 0;
379 }
380 
381 /**
382  * @brief Initialize SAES driver.
383  * @param None.
384  * @retval 0 if OK; negative value else.
385  */
386 int stm32_saes_driver_init(void)
387 {
388 	int err;
389 
390 	err = stm32_saes_parse_fdt(&saes_pdata);
391 	if (err != 0) {
392 		err = stm32_saes_get_platdata(&saes_pdata);
393 		if (err != 0) {
394 			return err;
395 		}
396 	}
397 
398 	clk_enable(saes_pdata.clock_id);
399 	if (stm32mp_reset_assert(saes_pdata.reset_id, TIMEOUT_US_1MS) != 0) {
400 		panic();
401 	}
402 
403 	udelay(SAES_RESET_DELAY);
404 	if (stm32mp_reset_deassert(saes_pdata.reset_id, TIMEOUT_US_1MS) != 0) {
405 		panic();
406 	}
407 
408 	return 0;
409 }
410 
411 /**
412  * @brief Start a AES computation.
413  * @param ctx: SAES process context
414  * @param is_dec: true if decryption, false if encryption
415  * @param ch_mode: define the chaining mode
416  * @param key_select: define where the key comes from.
417  * @param key: pointer to key (if key_select is KEY_SOFT, else unused)
418  * @param key_size: key size
419  * @param iv: pointer to initialization vectore (unsed if ch_mode is ECB)
420  * @param iv_size: iv size
421  * @note this function doesn't access to hardware but store in ctx the values
422  *
423  * @retval 0 if OK; negative value else.
424  */
425 int stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec,
426 		    enum stm32_saes_chaining_mode ch_mode, enum stm32_saes_key_selection key_select,
427 		    const void *key, size_t key_size, const void *iv, size_t iv_size)
428 {
429 	unsigned int i;
430 	const uint32_t *iv_u32;
431 	const uint32_t *key_u32;
432 
433 	ctx->assoc_len = 0U;
434 	ctx->load_len = 0U;
435 
436 	ctx->base = saes_pdata.base;
437 	ctx->cr = _SAES_CR_RESET_VALUE;
438 
439 	/* We want buffer to be u32 aligned */
440 	assert((uintptr_t)key % __alignof__(uint32_t) == 0);
441 	assert((uintptr_t)iv % __alignof__(uint32_t) == 0);
442 
443 	iv_u32 = iv;
444 	key_u32 = key;
445 
446 	if (is_dec) {
447 		/* Save Mode 3 = decrypt */
448 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_MODE_MASK,
449 				   _SAES_CR_MODE_DEC << _SAES_CR_MODE_SHIFT);
450 	} else {
451 		/* Save Mode 1 = crypt */
452 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_MODE_MASK,
453 				   _SAES_CR_MODE_ENC << _SAES_CR_MODE_SHIFT);
454 	}
455 
456 	/* Save chaining mode */
457 	switch (ch_mode) {
458 	case STM32_SAES_MODE_ECB:
459 		SET_CHAINING_MODE(ECB, (uintptr_t)&(ctx->cr));
460 		break;
461 	case STM32_SAES_MODE_CBC:
462 		SET_CHAINING_MODE(CBC, (uintptr_t)&(ctx->cr));
463 		break;
464 	case STM32_SAES_MODE_CTR:
465 		SET_CHAINING_MODE(CTR, (uintptr_t)&(ctx->cr));
466 		break;
467 	case STM32_SAES_MODE_GCM:
468 		SET_CHAINING_MODE(GCM, (uintptr_t)&(ctx->cr));
469 		break;
470 	case STM32_SAES_MODE_CCM:
471 		SET_CHAINING_MODE(CCM, (uintptr_t)&(ctx->cr));
472 		break;
473 	default:
474 		return -EINVAL;
475 	}
476 
477 	/* We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data.
478 	 * so we won't need to
479 	 * htobe32(data) before write to DINR
480 	 * nor
481 	 * be32toh after reading from DOUTR
482 	 *
483 	 * But note that wrap key only accept _SAES_CR_DATATYPE_NONE
484 	 */
485 	mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_DATATYPE_MASK,
486 			   _SAES_CR_DATATYPE_BYTE << _SAES_CR_DATATYPE_SHIFT);
487 
488 	/* Configure keysize */
489 	switch (key_size) {
490 	case AES_KEYSIZE_128:
491 		mmio_clrbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSIZE);
492 		break;
493 	case AES_KEYSIZE_256:
494 		mmio_setbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSIZE);
495 		break;
496 	default:
497 		return -EINVAL;
498 	}
499 
500 	/* Configure key */
501 	switch (key_select) {
502 	case STM32_SAES_KEY_SOFT:
503 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
504 				   _SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT);
505 		/* Save key */
506 		switch (key_size) {
507 		case AES_KEYSIZE_128:
508 			/* First 16 bytes == 4 u32 */
509 			for (i = 0U; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) {
510 				mmio_write_32((uintptr_t)(ctx->key + i), htobe32(key_u32[3 - i]));
511 				/* /!\ we save the key in HW byte order
512 				 * and word order : key[i] is for _SAES_KEYRi
513 				 */
514 			}
515 			break;
516 		case AES_KEYSIZE_256:
517 			for (i = 0U; i < AES_KEYSIZE_256 / sizeof(uint32_t); i++) {
518 				mmio_write_32((uintptr_t)(ctx->key + i), htobe32(key_u32[7 - i]));
519 				/* /!\ we save the key in HW byte order
520 				 * and word order : key[i] is for _SAES_KEYRi
521 				 */
522 			}
523 			break;
524 		default:
525 			return -EINVAL;
526 		}
527 
528 		break;
529 	case STM32_SAES_KEY_DHU:
530 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
531 				   _SAES_CR_KEYSEL_DHUK << _SAES_CR_KEYSEL_SHIFT);
532 		break;
533 	case STM32_SAES_KEY_BH:
534 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
535 				   _SAES_CR_KEYSEL_BHK << _SAES_CR_KEYSEL_SHIFT);
536 		break;
537 	case STM32_SAES_KEY_BHU_XOR_BH:
538 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
539 				   _SAES_CR_KEYSEL_BHU_XOR_BH_K << _SAES_CR_KEYSEL_SHIFT);
540 		break;
541 	case STM32_SAES_KEY_WRAPPED:
542 		mmio_clrsetbits_32((uintptr_t)&(ctx->cr), _SAES_CR_KEYSEL_MASK,
543 				   _SAES_CR_KEYSEL_SOFT << _SAES_CR_KEYSEL_SHIFT);
544 		break;
545 
546 	default:
547 		return -EINVAL;
548 	}
549 
550 	/* Save IV */
551 	if (ch_mode != STM32_SAES_MODE_ECB) {
552 		if ((iv == NULL) || (iv_size != AES_IVSIZE)) {
553 			return -EINVAL;
554 		}
555 
556 		for (i = 0U; i < AES_IVSIZE / sizeof(uint32_t); i++) {
557 			mmio_write_32((uintptr_t)(ctx->iv + i), htobe32(iv_u32[3 - i]));
558 			/* /!\ We save the iv in HW byte order */
559 		}
560 	}
561 
562 	return saes_start(ctx);
563 }
564 
565 /**
566  * @brief Update (or start) a AES authentificate process of associated data (CCM or GCM).
567  * @param ctx: SAES process context
568  * @param last_block: true if last assoc data block
569  * @param data: pointer to associated data
570  * @param data_size: data size
571  *
572  * @retval 0 if OK; negative value else.
573  */
574 int stm32_saes_update_assodata(struct stm32_saes_context *ctx, bool last_block,
575 			       uint8_t *data, size_t data_size)
576 {
577 	int ret;
578 	uint32_t *data_u32;
579 	unsigned int i = 0U;
580 
581 	/* We want buffers to be u32 aligned */
582 	assert((uintptr_t)data % __alignof__(uint32_t) == 0);
583 	data_u32 = (uint32_t *)data;
584 
585 	/* Init phase */
586 	ret = restore_context(ctx);
587 	if (ret != 0) {
588 		goto out;
589 	}
590 
591 	ret = wait_computation_completed(ctx->base);
592 	if (ret != 0) {
593 		return ret;
594 	}
595 
596 	clear_computation_completed(ctx->base);
597 
598 	if ((data == NULL) || (data_size == 0U)) {
599 		/* No associated data */
600 		/* ret already = 0 */
601 		goto out;
602 	}
603 
604 	/* There is an header/associated data phase */
605 	mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
606 			   _SAES_CR_GCMPH_HEADER << _SAES_CR_GCMPH_SHIFT);
607 
608 	/* Enable the SAES peripheral */
609 	mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
610 
611 	while (i < round_down(data_size, AES_BLOCK_SIZE)) {
612 		unsigned int w; /* Word index */
613 
614 		w = i / sizeof(uint32_t);
615 		/* No need to htobe() as we configure the HW to swap bytes */
616 		mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 0U]);
617 		mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 1U]);
618 		mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 2U]);
619 		mmio_write_32(ctx->base + _SAES_DINR, data_u32[w + 3U]);
620 
621 		ret = wait_computation_completed(ctx->base);
622 		if (ret != 0) {
623 			goto out;
624 		}
625 
626 		clear_computation_completed(ctx->base);
627 
628 		/* Process next block */
629 		i += AES_BLOCK_SIZE;
630 		ctx->assoc_len += AES_BLOCK_SIZE_BIT;
631 	}
632 
633 	/* Manage last block if not a block size multiple */
634 	if ((last_block) && (i < data_size)) {
635 		/* We don't manage unaligned last block yet */
636 		ret = -ENODEV;
637 		goto out;
638 	}
639 
640 out:
641 	if (ret != 0) {
642 		saes_end(ctx, ret);
643 	}
644 
645 	return ret;
646 }
647 
648 /**
649  * @brief Update (or start) a AES authenticate and de/encrypt with payload data (CCM or GCM).
650  * @param ctx: SAES process context
651  * @param last_block: true if last payload data block
652  * @param data_in: pointer to payload
653  * @param data_out: pointer where to save de/encrypted payload
654  * @param data_size: payload size
655  *
656  * @retval 0 if OK; negative value else.
657  */
658 int stm32_saes_update_load(struct stm32_saes_context *ctx, bool last_block,
659 			   uint8_t *data_in, uint8_t *data_out, size_t data_size)
660 {
661 	int ret = 0;
662 	uint32_t *data_in_u32;
663 	uint32_t *data_out_u32;
664 	unsigned int i = 0U;
665 	uint32_t prev_cr;
666 
667 	/* We want buffers to be u32 aligned */
668 	assert((uintptr_t)data_in % __alignof__(uint32_t) == 0);
669 	assert((uintptr_t)data_out % __alignof__(uint32_t) == 0);
670 	data_in_u32 = (uint32_t *)data_in;
671 	data_out_u32 = (uint32_t *)data_out;
672 
673 	prev_cr = mmio_read_32(ctx->base + _SAES_CR);
674 
675 	if ((data_in == NULL) || (data_size == 0U)) {
676 		/* there is no data */
677 		goto out;
678 	}
679 
680 	/* There is a load phase */
681 	mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
682 			   _SAES_CR_GCMPH_PAYLOAD << _SAES_CR_GCMPH_SHIFT);
683 
684 	if ((prev_cr & _SAES_CR_GCMPH_MASK) ==
685 	    (_SAES_CR_GCMPH_INIT << _SAES_CR_GCMPH_SHIFT)) {
686 		/* Still in initialization phase, no header
687 		 * We need to enable the SAES peripheral
688 		 */
689 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
690 	}
691 
692 	while (i < round_down(data_size, AES_BLOCK_SIZE)) {
693 		unsigned int w; /* Word index */
694 
695 		w = i / sizeof(uint32_t);
696 		/* No need to htobe() as we configure the HW to swap bytes */
697 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 0U]);
698 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 1U]);
699 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 2U]);
700 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 3U]);
701 
702 		ret = wait_computation_completed(ctx->base);
703 		if (ret != 0) {
704 			goto out;
705 		}
706 
707 		/* No need to htobe() as we configure the HW to swap bytes */
708 		data_out_u32[w + 0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
709 		data_out_u32[w + 1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
710 		data_out_u32[w + 2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
711 		data_out_u32[w + 3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
712 
713 		clear_computation_completed(ctx->base);
714 
715 		/* Process next block */
716 		i += AES_BLOCK_SIZE;
717 		ctx->load_len += AES_BLOCK_SIZE_BIT;
718 	}
719 	/* Manage last block if not a block size multiple */
720 	if ((last_block) && (i < data_size)) {
721 		uint32_t block_in[AES_BLOCK_SIZE / sizeof(uint32_t)] = {0};
722 		uint32_t block_out[AES_BLOCK_SIZE / sizeof(uint32_t)] = {0};
723 
724 		memcpy(block_in, data_in + i, data_size - i);
725 
726 		/* No need to htobe() as we configure the HW to swap bytes */
727 		mmio_write_32(ctx->base + _SAES_DINR, block_in[0U]);
728 		mmio_write_32(ctx->base + _SAES_DINR, block_in[1U]);
729 		mmio_write_32(ctx->base + _SAES_DINR, block_in[2U]);
730 		mmio_write_32(ctx->base + _SAES_DINR, block_in[3U]);
731 
732 		ret = wait_computation_completed(ctx->base);
733 		if (ret != 0) {
734 			VERBOSE("%s %d\n", __func__, __LINE__);
735 			goto out;
736 		}
737 
738 		/* No need to htobe() as we configure the HW to swap bytes */
739 		block_out[0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
740 		block_out[1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
741 		block_out[2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
742 		block_out[3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
743 
744 		clear_computation_completed(ctx->base);
745 
746 		memcpy(data_out + i, block_out, data_size - i);
747 
748 		ctx->load_len += (data_size - i) * UINT8_BIT;
749 	}
750 
751 out:
752 	if (ret != 0) {
753 		saes_end(ctx, ret);
754 	}
755 
756 	return ret;
757 }
758 
759 /**
760  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
761  * @param ctx: SAES process context
762  * @param tag: pointer where to save the tag
763  * @param data_size: tag size
764  *
765  * @retval 0 if OK; negative value else.
766  */
767 int stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag,
768 		     size_t tag_size)
769 {
770 	int ret;
771 	uint32_t tag_u32[4];
772 	uint32_t prev_cr;
773 
774 	prev_cr = mmio_read_32(ctx->base + _SAES_CR);
775 
776 	mmio_clrsetbits_32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
777 			   _SAES_CR_GCMPH_FINAL << _SAES_CR_GCMPH_SHIFT);
778 
779 	if ((prev_cr & _SAES_CR_GCMPH_MASK) == (_SAES_CR_GCMPH_INIT << _SAES_CR_GCMPH_SHIFT)) {
780 		/* Still in initialization phase, no header
781 		 * We need to enable the SAES peripheral
782 		 */
783 		mmio_setbits_32(ctx->base + _SAES_CR, _SAES_CR_EN);
784 	}
785 
786 	/* No need to htobe() as we configure the HW to swap bytes */
787 	mmio_write_32(ctx->base + _SAES_DINR, 0);
788 	mmio_write_32(ctx->base + _SAES_DINR, ctx->assoc_len);
789 	mmio_write_32(ctx->base + _SAES_DINR, 0);
790 	mmio_write_32(ctx->base + _SAES_DINR, ctx->load_len);
791 
792 	ret = wait_computation_completed(ctx->base);
793 	if (ret != 0) {
794 		goto out;
795 	}
796 
797 	/* No need to htobe() as we configure the HW to swap bytes */
798 	tag_u32[0] = mmio_read_32(ctx->base + _SAES_DOUTR);
799 	tag_u32[1] = mmio_read_32(ctx->base + _SAES_DOUTR);
800 	tag_u32[2] = mmio_read_32(ctx->base + _SAES_DOUTR);
801 	tag_u32[3] = mmio_read_32(ctx->base + _SAES_DOUTR);
802 
803 	clear_computation_completed(ctx->base);
804 
805 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
806 
807 out:
808 	saes_end(ctx, ret);
809 
810 	return ret;
811 }
812 
813 /**
814  * @brief Update (or start) a AES de/encrypt process (ECB, CBC or CTR).
815  * @param ctx: SAES process context
816  * @param last_block: true if last payload data block
817  * @param data_in: pointer to payload
818  * @param data_out: pointer where to save de/encrypted payload
819  * @param data_size: payload size
820  *
821  * @retval 0 if OK; negative value else.
822  */
823 int stm32_saes_update(struct stm32_saes_context *ctx, bool last_block,
824 		      uint8_t *data_in, uint8_t *data_out, size_t data_size)
825 {
826 	int ret;
827 	uint32_t *data_in_u32;
828 	uint32_t *data_out_u32;
829 	unsigned int i = 0U;
830 
831 	/* We want buffers to be u32 aligned */
832 	assert((uintptr_t)data_in % __alignof__(uint32_t) == 0);
833 	assert((uintptr_t)data_out % __alignof__(uint32_t) == 0);
834 	data_in_u32 = (uint32_t *)data_in;
835 	data_out_u32 = (uint32_t *)data_out;
836 
837 	if ((!last_block) &&
838 	    (round_down(data_size, AES_BLOCK_SIZE) != data_size)) {
839 		ERROR("%s: non last block must be multiple of 128 bits\n",
840 		      __func__);
841 		ret = -EINVAL;
842 		goto out;
843 	}
844 
845 	/* In CBC encryption we need to manage specifically last 2 128bits
846 	 * blocks if total size in not a block size aligned
847 	 * work TODO. Currently return ENODEV.
848 	 * Morevoer as we need to know last 2 block, if unaligned and
849 	 * call with less than two block, return -EINVAL.
850 	 */
851 	if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) && is_encrypt(ctx->cr) &&
852 	    (round_down(data_size, AES_BLOCK_SIZE) != data_size)) {
853 		if (data_size < AES_BLOCK_SIZE * 2U) {
854 			ERROR("if CBC, last part size should be at least 2 * AES_BLOCK_SIZE\n");
855 			ret = -EINVAL;
856 			goto out;
857 		}
858 		/* Moreover the CBC specific padding for encrypt is not yet implemented */
859 		ret = -ENODEV;
860 		goto out;
861 	}
862 
863 	ret = restore_context(ctx);
864 	if (ret != 0) {
865 		goto out;
866 	}
867 
868 	while (i < round_down(data_size, AES_BLOCK_SIZE)) {
869 		unsigned int w; /* Word index */
870 
871 		w = i / sizeof(uint32_t);
872 		/* No need to htobe() as we configure the HW to swap bytes */
873 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 0U]);
874 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 1U]);
875 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 2U]);
876 		mmio_write_32(ctx->base + _SAES_DINR, data_in_u32[w + 3U]);
877 
878 		ret = wait_computation_completed(ctx->base);
879 		if (ret != 0) {
880 			goto out;
881 		}
882 
883 		/* No need to htobe() as we configure the HW to swap bytes */
884 		data_out_u32[w + 0U] = mmio_read_32(ctx->base + _SAES_DOUTR);
885 		data_out_u32[w + 1U] = mmio_read_32(ctx->base + _SAES_DOUTR);
886 		data_out_u32[w + 2U] = mmio_read_32(ctx->base + _SAES_DOUTR);
887 		data_out_u32[w + 3U] = mmio_read_32(ctx->base + _SAES_DOUTR);
888 
889 		clear_computation_completed(ctx->base);
890 
891 		/* Process next block */
892 		i += AES_BLOCK_SIZE;
893 	}
894 	/* Manage last block if not a block size multiple */
895 
896 	if ((last_block) && (i < data_size)) {
897 		/* In and out buffer have same size so should be AES_BLOCK_SIZE multiple */
898 		ret = -ENODEV;
899 		goto out;
900 	}
901 
902 	if (!last_block) {
903 		ret = save_context(ctx);
904 	}
905 
906 out:
907 	/* If last block or error, end of SAES process */
908 	if (last_block || (ret != 0)) {
909 		saes_end(ctx, ret);
910 	}
911 
912 	return ret;
913 }
914