xref: /optee_os/core/drivers/crypto/stm32/stm32_cryp.c (revision 14d68630950adab8daf7b31beb0ba8f417ff91b5)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2021, STMicroelectronics - All Rights Reserved
4  */
5 #include <assert.h>
6 #include <config.h>
7 #include <drivers/clk.h>
8 #include <drivers/clk_dt.h>
9 #include <drivers/rstctrl.h>
10 #include <initcall.h>
11 #include <io.h>
12 #include <kernel/boot.h>
13 #include <kernel/delay.h>
14 #include <kernel/dt.h>
15 #include <kernel/dt_driver.h>
16 #include <kernel/mutex.h>
17 #include <libfdt.h>
18 #include <mm/core_memprot.h>
19 #include <stdint.h>
20 #include <stm32_util.h>
21 #include <string.h>
22 #include <utee_defines.h>
23 #include <util.h>
24 
25 #include "stm32_cryp.h"
26 #include "common.h"
27 
28 /* CRYP control register */
29 #define _CRYP_CR			0x0U
30 /* CRYP status register */
31 #define _CRYP_SR			0x04U
32 /* CRYP data input register */
33 #define _CRYP_DIN			0x08U
34 /* CRYP data output register */
35 #define _CRYP_DOUT			0x0CU
36 /* CRYP DMA control register */
37 #define _CRYP_DMACR			0x10U
38 /* CRYP interrupt mask set/clear register */
39 #define _CRYP_IMSCR			0x14U
40 /* CRYP raw interrupt status register */
41 #define _CRYP_RISR			0x18U
42 /* CRYP masked interrupt status register */
43 #define _CRYP_MISR			0x1CU
44 /* CRYP key registers */
45 #define _CRYP_K0LR			0x20U
46 #define _CRYP_K0RR			0x24U
47 #define _CRYP_K1LR			0x28U
48 #define _CRYP_K1RR			0x2CU
49 #define _CRYP_K2LR			0x30U
50 #define _CRYP_K2RR			0x34U
51 #define _CRYP_K3LR			0x38U
52 #define _CRYP_K3RR			0x3CU
53 /* CRYP initialization vector registers */
54 #define _CRYP_IV0LR			0x40U
55 #define _CRYP_IV0RR			0x44U
56 #define _CRYP_IV1LR			0x48U
57 #define _CRYP_IV1RR			0x4CU
58 /* CRYP context swap GCM-CCM registers */
59 #define _CRYP_CSGCMCCM0R		0x50U
60 #define _CRYP_CSGCMCCM1R		0x54U
61 #define _CRYP_CSGCMCCM2R		0x58U
62 #define _CRYP_CSGCMCCM3R		0x5CU
63 #define _CRYP_CSGCMCCM4R		0x60U
64 #define _CRYP_CSGCMCCM5R		0x64U
65 #define _CRYP_CSGCMCCM6R		0x68U
66 #define _CRYP_CSGCMCCM7R		0x6CU
67 /* CRYP context swap GCM registers */
68 #define _CRYP_CSGCM0R			0x70U
69 #define _CRYP_CSGCM1R			0x74U
70 #define _CRYP_CSGCM2R			0x78U
71 #define _CRYP_CSGCM3R			0x7CU
72 #define _CRYP_CSGCM4R			0x80U
73 #define _CRYP_CSGCM5R			0x84U
74 #define _CRYP_CSGCM6R			0x88U
75 #define _CRYP_CSGCM7R			0x8CU
76 /* CRYP hardware configuration register */
77 #define _CRYP_HWCFGR			0x3F0U
78 /* CRYP HW version register */
79 #define _CRYP_VERR			0x3F4U
80 /* CRYP identification */
81 #define _CRYP_IPIDR			0x3F8U
82 /* CRYP HW magic ID */
83 #define _CRYP_MID			0x3FCU
84 
85 #define CRYP_TIMEOUT_US			1000000U
86 #define TIMEOUT_US_1MS			1000U
87 #define CRYP_RESET_DELAY_US		U(2)
88 
89 /* CRYP control register fields */
90 #define _CRYP_CR_RESET_VALUE		0x0U
91 #define _CRYP_CR_NPBLB_MSK		GENMASK_32(23, 20)
92 #define _CRYP_CR_NPBLB_OFF		20U
93 #define _CRYP_CR_GCM_CCMPH_MSK		GENMASK_32(17, 16)
94 #define _CRYP_CR_GCM_CCMPH_OFF		16U
95 #define _CRYP_CR_GCM_CCMPH_INIT		0U
96 #define _CRYP_CR_GCM_CCMPH_HEADER	1U
97 #define _CRYP_CR_GCM_CCMPH_PAYLOAD	2U
98 #define _CRYP_CR_GCM_CCMPH_FINAL	3U
99 #define _CRYP_CR_CRYPEN			BIT(15)
100 #define _CRYP_CR_FFLUSH			BIT(14)
101 #define _CRYP_CR_KEYSIZE_MSK		GENMASK_32(9, 8)
102 #define _CRYP_CR_KEYSIZE_OFF		8U
103 #define _CRYP_CR_KSIZE_128		0U
104 #define _CRYP_CR_KSIZE_192		1U
105 #define _CRYP_CR_KSIZE_256		2U
106 #define _CRYP_CR_DATATYPE_MSK		GENMASK_32(7, 6)
107 #define _CRYP_CR_DATATYPE_OFF		6U
108 #define _CRYP_CR_DATATYPE_NONE		0U
109 #define _CRYP_CR_DATATYPE_HALF_WORD	1U
110 #define _CRYP_CR_DATATYPE_BYTE		2U
111 #define _CRYP_CR_DATATYPE_BIT		3U
112 #define _CRYP_CR_ALGOMODE_MSK		(BIT(19) | GENMASK_32(5, 3))
113 #define _CRYP_CR_ALGOMODE_OFF		3U
114 #define _CRYP_CR_ALGOMODE_TDES_ECB	0x0U
115 #define _CRYP_CR_ALGOMODE_TDES_CBC	0x1U
116 #define _CRYP_CR_ALGOMODE_DES_ECB	0x2U
117 #define _CRYP_CR_ALGOMODE_DES_CBC	0x3U
118 #define _CRYP_CR_ALGOMODE_AES_ECB	0x4U
119 #define _CRYP_CR_ALGOMODE_AES_CBC	0x5U
120 #define _CRYP_CR_ALGOMODE_AES_CTR	0x6U
121 #define _CRYP_CR_ALGOMODE_AES		0x7U
122 #define _CRYP_CR_ALGOMODE_AES_GCM	BIT(16)
123 #define _CRYP_CR_ALGOMODE_AES_CCM	(BIT(16) | BIT(0))
124 #define _CRYP_CR_ALGODIR		BIT(2)
125 #define _CRYP_CR_ALGODIR_ENC		0U
126 #define _CRYP_CR_ALGODIR_DEC		BIT(2)
127 
128 /* CRYP status register fields */
129 #define _CRYP_SR_BUSY			BIT(4)
130 #define _CRYP_SR_OFFU			BIT(3)
131 #define _CRYP_SR_OFNE			BIT(2)
132 #define _CRYP_SR_IFNF			BIT(1)
133 #define _CRYP_SR_IFEM			BIT(0)
134 
135 /* CRYP DMA control register fields */
136 #define _CRYP_DMACR_DOEN		BIT(1)
137 #define _CRYP_DMACR_DIEN		BIT(0)
138 
139 /* CRYP interrupt fields */
140 #define _CRYP_I_OUT			BIT(1)
141 #define _CRYP_I_IN			BIT(0)
142 
143 /* CRYP hardware configuration register fields */
144 #define _CRYP_HWCFGR_CFG1_MSK		GENMASK_32(3, 0)
145 #define _CRYP_HWCFGR_CFG1_OFF		0U
146 #define _CRYP_HWCFGR_CFG2_MSK		GENMASK_32(7, 4)
147 #define _CRYP_HWCFGR_CFG2_OFF		4U
148 #define _CRYP_HWCFGR_CFG3_MSK		GENMASK_32(11, 8)
149 #define _CRYP_HWCFGR_CFG3_OFF		8U
150 #define _CRYP_HWCFGR_CFG4_MSK		GENMASK_32(15, 12)
151 #define _CRYP_HWCFGR_CFG4_OFF		12U
152 
153 /* CRYP HW version register */
154 #define _CRYP_VERR_MSK			GENMASK_32(7, 0)
155 #define _CRYP_VERR_OFF			0U
156 
157 /*
158  * Macro to manage bit manipulation when we work on a local variable
159  * before writing only once to the hardware register.
160  */
161 #define CLRBITS(v, bits)		((v) &= ~(bits))
162 #define SETBITS(v, bits)		((v) |= (bits))
163 
164 #define IS_ALGOMODE(cr, mod) \
165 	(((cr) & _CRYP_CR_ALGOMODE_MSK) == (_CRYP_CR_ALGOMODE_##mod << \
166 					  _CRYP_CR_ALGOMODE_OFF))
167 
168 #define SET_ALGOMODE(mod, cr) \
169 	clrsetbits(&(cr), _CRYP_CR_ALGOMODE_MSK, (_CRYP_CR_ALGOMODE_##mod << \
170 						  _CRYP_CR_ALGOMODE_OFF))
171 
172 #define GET_ALGOMODE(cr) \
173 	(((cr) & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF)
174 
175 static struct stm32_cryp_platdata cryp_pdata;
176 static struct mutex cryp_lock = MUTEX_INITIALIZER;
177 
178 static void clrsetbits(uint32_t *v, uint32_t mask, uint32_t bits)
179 {
180 	*v = (*v & ~mask) | bits;
181 }
182 
183 static bool algo_mode_needs_iv(uint32_t cr)
184 {
185 	return !IS_ALGOMODE(cr, TDES_ECB) && !IS_ALGOMODE(cr, DES_ECB) &&
186 	       !IS_ALGOMODE(cr, AES_ECB);
187 }
188 
189 static bool algo_mode_is_ecb_cbc(uint32_t cr)
190 {
191 	return GET_ALGOMODE(cr) < _CRYP_CR_ALGOMODE_AES_CTR;
192 }
193 
194 static bool algo_mode_is_aes(uint32_t cr)
195 {
196 	return ((cr & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF) >=
197 	       _CRYP_CR_ALGOMODE_AES_ECB;
198 }
199 
200 static bool is_decrypt(uint32_t cr)
201 {
202 	return (cr & _CRYP_CR_ALGODIR) == _CRYP_CR_ALGODIR_DEC;
203 }
204 
205 static bool is_encrypt(uint32_t cr)
206 {
207 	return !is_decrypt(cr);
208 }
209 
210 static bool does_need_npblb(uint32_t cr)
211 {
212 	return (IS_ALGOMODE(cr, AES_GCM) && is_encrypt(cr)) ||
213 	       (IS_ALGOMODE(cr, AES_CCM) && is_decrypt(cr));
214 }
215 
216 static TEE_Result wait_sr_bits(vaddr_t base, uint32_t bits)
217 {
218 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
219 
220 	while ((io_read32(base + _CRYP_SR) & bits) != bits)
221 		if (timeout_elapsed(timeout_ref))
222 			break;
223 
224 	if ((io_read32(base + _CRYP_SR) & bits) != bits)
225 		return TEE_ERROR_BUSY;
226 
227 	return TEE_SUCCESS;
228 }
229 
230 static TEE_Result wait_end_busy(vaddr_t base)
231 {
232 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
233 
234 	while (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
235 		if (timeout_elapsed(timeout_ref))
236 			break;
237 
238 	if (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
239 		return TEE_ERROR_BUSY;
240 
241 	return TEE_SUCCESS;
242 }
243 
244 static TEE_Result wait_end_enable(vaddr_t base)
245 {
246 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
247 
248 	while (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
249 		if (timeout_elapsed(timeout_ref))
250 			break;
251 
252 	if (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
253 		return TEE_ERROR_BUSY;
254 
255 	return TEE_SUCCESS;
256 }
257 
258 static TEE_Result __must_check write_align_block(struct stm32_cryp_context *ctx,
259 						 uint32_t *data)
260 {
261 	TEE_Result res = TEE_SUCCESS;
262 	unsigned int i = 0;
263 
264 	res = wait_sr_bits(ctx->base, _CRYP_SR_IFNF);
265 	if (res)
266 		return res;
267 
268 	for (i = 0; i < ctx->block_u32; i++) {
269 		/* No need to htobe() as we configure the HW to swap bytes */
270 		io_write32(ctx->base + _CRYP_DIN, data[i]);
271 	}
272 
273 	return TEE_SUCCESS;
274 }
275 
276 static TEE_Result __must_check write_block(struct stm32_cryp_context *ctx,
277 					   uint8_t *data)
278 {
279 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
280 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
281 
282 		memcpy(data_u32, data, ctx->block_u32 * sizeof(uint32_t));
283 		return write_align_block(ctx, data_u32);
284 	}
285 
286 	return write_align_block(ctx, (void *)data);
287 }
288 
289 static TEE_Result __must_check read_align_block(struct stm32_cryp_context *ctx,
290 						uint32_t *data)
291 {
292 	TEE_Result res = TEE_SUCCESS;
293 	unsigned int i = 0;
294 
295 	res = wait_sr_bits(ctx->base, _CRYP_SR_OFNE);
296 	if (res)
297 		return res;
298 
299 	for (i = 0; i < ctx->block_u32; i++) {
300 		/* No need to htobe() as we configure the HW to swap bytes */
301 		data[i] = io_read32(ctx->base + _CRYP_DOUT);
302 	}
303 
304 	return TEE_SUCCESS;
305 }
306 
307 static TEE_Result __must_check read_block(struct stm32_cryp_context *ctx,
308 					  uint8_t *data)
309 {
310 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
311 		TEE_Result res = TEE_SUCCESS;
312 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
313 
314 		res = read_align_block(ctx, data_u32);
315 		if (res)
316 			return res;
317 
318 		memcpy(data, data_u32, ctx->block_u32 * sizeof(uint32_t));
319 
320 		return TEE_SUCCESS;
321 	}
322 
323 	return read_align_block(ctx, (void *)data);
324 }
325 
326 static TEE_Result stm32_cryp_reset(void)
327 {
328 	TEE_Result res = TEE_ERROR_GENERIC;
329 
330 	if (!cryp_pdata.reset)
331 		return TEE_SUCCESS;
332 
333 	res = rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS);
334 	if (res)
335 		return res;
336 
337 	udelay(CRYP_RESET_DELAY_US);
338 
339 	return rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS);
340 }
341 
342 static void cryp_end(struct stm32_cryp_context *ctx, TEE_Result prev_error)
343 {
344 	if (prev_error && stm32_cryp_reset())
345 		panic();
346 
347 	/* Disable the CRYP peripheral */
348 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
349 }
350 
351 static void cryp_write_iv(struct stm32_cryp_context *ctx)
352 {
353 	if (algo_mode_needs_iv(ctx->cr)) {
354 		unsigned int i = 0;
355 
356 		/* Restore the _CRYP_IVRx */
357 		for (i = 0; i < ctx->block_u32; i++)
358 			io_write32(ctx->base + _CRYP_IV0LR + i *
359 				   sizeof(uint32_t), ctx->iv[i]);
360 	}
361 }
362 
363 static void cryp_save_suspend(struct stm32_cryp_context *ctx)
364 {
365 	unsigned int i = 0;
366 
367 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
368 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
369 			ctx->pm_gcmccm[i] = io_read32(ctx->base +
370 						      _CRYP_CSGCMCCM0R +
371 						      i * sizeof(uint32_t));
372 
373 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
374 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
375 			ctx->pm_gcm[i] = io_read32(ctx->base + _CRYP_CSGCM0R +
376 						   i * sizeof(uint32_t));
377 }
378 
379 static void cryp_restore_suspend(struct stm32_cryp_context *ctx)
380 {
381 	unsigned int i = 0;
382 
383 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
384 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
385 			io_write32(ctx->base + _CRYP_CSGCMCCM0R +
386 				   i * sizeof(uint32_t), ctx->pm_gcmccm[i]);
387 
388 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
389 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
390 			io_write32(ctx->base + _CRYP_CSGCM0R +
391 				   i * sizeof(uint32_t), ctx->pm_gcm[i]);
392 }
393 
394 static void cryp_write_key(struct stm32_cryp_context *ctx)
395 {
396 	vaddr_t reg = 0;
397 	int i = 0;
398 	uint32_t algo = GET_ALGOMODE(ctx->cr);
399 
400 	if (algo == _CRYP_CR_ALGOMODE_DES_ECB ||
401 	    algo == _CRYP_CR_ALGOMODE_DES_CBC)
402 		reg = ctx->base + _CRYP_K1RR;
403 	else
404 		reg = ctx->base + _CRYP_K3RR;
405 
406 	for (i = ctx->key_size / sizeof(uint32_t) - 1;
407 	     i >= 0;
408 	     i--, reg -= sizeof(uint32_t))
409 		io_write32(reg, ctx->key[i]);
410 }
411 
412 static TEE_Result cryp_prepare_key(struct stm32_cryp_context *ctx)
413 {
414 	TEE_Result res = TEE_SUCCESS;
415 
416 	/*
417 	 * For AES ECB/CBC decryption, key preparation mode must be selected
418 	 * to populate the key.
419 	 */
420 	if (is_decrypt(ctx->cr) && (IS_ALGOMODE(ctx->cr, AES_ECB) ||
421 				    IS_ALGOMODE(ctx->cr, AES_CBC))) {
422 		/* Select Algomode "prepare key" */
423 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
424 				_CRYP_CR_ALGOMODE_AES << _CRYP_CR_ALGOMODE_OFF);
425 
426 		cryp_write_key(ctx);
427 
428 		/* Enable CRYP */
429 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
430 
431 		res = wait_end_busy(ctx->base);
432 		if (res)
433 			return res;
434 
435 		/* Reset 'real' algomode */
436 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
437 				ctx->cr & _CRYP_CR_ALGOMODE_MSK);
438 	} else {
439 		cryp_write_key(ctx);
440 	}
441 
442 	return TEE_SUCCESS;
443 }
444 
445 static TEE_Result save_context(struct stm32_cryp_context *ctx)
446 {
447 	/* Device should not be in a processing phase */
448 	if (io_read32(ctx->base + _CRYP_SR) & _CRYP_SR_BUSY)
449 		return TEE_ERROR_BAD_STATE;
450 
451 	/* Disable the CRYP peripheral */
452 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
453 
454 	/* Save CR */
455 	ctx->cr = io_read32(ctx->base + _CRYP_CR);
456 
457 	cryp_save_suspend(ctx);
458 
459 	/* If algo mode needs to save current IV */
460 	if (algo_mode_needs_iv(ctx->cr)) {
461 		unsigned int i = 0;
462 
463 		/* Save IV */
464 		for (i = 0; i < ctx->block_u32; i++)
465 			ctx->iv[i] = io_read32(ctx->base + _CRYP_IV0LR + i *
466 					       sizeof(uint32_t));
467 	}
468 
469 	return TEE_SUCCESS;
470 }
471 
472 /* To resume the processing of a message */
473 static TEE_Result restore_context(struct stm32_cryp_context *ctx)
474 {
475 	TEE_Result res = TEE_SUCCESS;
476 
477 	/* IP should be disabled */
478 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN) {
479 		DMSG("Device is still enabled");
480 		return TEE_ERROR_BAD_STATE;
481 	}
482 
483 	/* Restore the _CRYP_CR */
484 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
485 
486 	/* Write key and, in case of AES_CBC or AES_ECB decrypt, prepare it */
487 	res = cryp_prepare_key(ctx);
488 	if (res)
489 		return res;
490 
491 	cryp_restore_suspend(ctx);
492 
493 	cryp_write_iv(ctx);
494 
495 	/* Flush internal fifo */
496 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_FFLUSH);
497 
498 	/* Enable the CRYP peripheral */
499 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
500 
501 	return TEE_SUCCESS;
502 }
503 
504 /*
505  * Translate a byte index in an array of BE uint32_t into the index of same
506  * byte in the corresponding LE uint32_t array.
507  */
508 static size_t be_index(size_t index)
509 {
510 	return (index & ~0x3) + 3 - (index & 0x3);
511 }
512 
513 static TEE_Result ccm_first_context(struct stm32_cryp_context *ctx)
514 {
515 	TEE_Result res = TEE_SUCCESS;
516 	uint32_t b0[AES_BLOCK_NB_U32] = { 0 };
517 	uint8_t *iv = (uint8_t *)ctx->iv;
518 	size_t l = 0;
519 	size_t i = 15;
520 
521 	/* IP should be disabled */
522 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN)
523 		return TEE_ERROR_BAD_STATE;
524 
525 	/* Write the _CRYP_CR */
526 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
527 
528 	/* Write key */
529 	res = cryp_prepare_key(ctx);
530 	if (res)
531 		return res;
532 
533 	/* Save full IV that will be b0 */
534 	memcpy(b0, iv, sizeof(b0));
535 
536 	/*
537 	 * Update IV to become CTR0/1 before setting it.
538 	 * IV is saved as LE uint32_t[4] as expected by hardware,
539 	 * but CCM RFC defines bytes to update in a BE array.
540 	 */
541 	/* Set flag bits to 0 (5 higher bits), keep 3 low bits */
542 	iv[be_index(0)] &= 0x7;
543 	/* Get size of length field (can be from 2 to 8) */
544 	l = iv[be_index(0)] + 1;
545 	/* Set Q to 0 */
546 	for (i = 15; i >= 15 - l + 1; i--)
547 		iv[be_index(i)] = 0;
548 	/* Save CTR0 */
549 	memcpy(ctx->ctr0_ccm, iv, sizeof(b0));
550 	/* Increment Q */
551 	iv[be_index(15)] |= 0x1;
552 
553 	cryp_write_iv(ctx);
554 
555 	/* Enable the CRYP peripheral */
556 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
557 
558 	res = write_align_block(ctx, b0);
559 
560 	return res;
561 }
562 
563 static TEE_Result do_from_init_to_phase(struct stm32_cryp_context *ctx,
564 					uint32_t new_phase)
565 {
566 	TEE_Result res = TEE_SUCCESS;
567 
568 	/*
569 	 * We didn't run the init phase yet
570 	 * CCM need a specific restore_context phase for the init phase
571 	 */
572 	if (IS_ALGOMODE(ctx->cr, AES_CCM))
573 		res = ccm_first_context(ctx);
574 	else
575 		res = restore_context(ctx);
576 
577 	if (res)
578 		return res;
579 
580 	res = wait_end_enable(ctx->base);
581 	if (res)
582 		return res;
583 
584 	/* Move to 'new_phase' */
585 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
586 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
587 
588 	/* Enable the CRYP peripheral (init disabled it) */
589 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
590 
591 	return TEE_SUCCESS;
592 }
593 
594 static TEE_Result do_from_header_to_phase(struct stm32_cryp_context *ctx,
595 					  uint32_t new_phase)
596 {
597 	TEE_Result res = TEE_SUCCESS;
598 
599 	res = restore_context(ctx);
600 	if (res)
601 		return res;
602 
603 	if (ctx->extra_size) {
604 		/* Manage unaligned header data before moving to next phase */
605 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
606 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
607 
608 		res = write_align_block(ctx, ctx->extra);
609 		if (res)
610 			return res;
611 
612 		ctx->assoc_len += (ctx->extra_size) * INT8_BIT;
613 		ctx->extra_size = 0;
614 	}
615 
616 	/* Move to 'new_phase' */
617 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
618 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
619 
620 	return TEE_SUCCESS;
621 }
622 
623 /**
624  * @brief Start a AES computation.
625  * @param ctx: CRYP process context
626  * @param is_dec: true if decryption, false if encryption
627  * @param algo: define the algo mode
628  * @param key: pointer to key
629  * @param key_size: key size
630  * @param iv: pointer to initialization vector (unused if algo is ECB)
631  * @param iv_size: iv size
632  * @note this function doesn't access to hardware but stores in ctx the values
633  *
634  * @retval TEE_SUCCESS if OK.
635  */
636 TEE_Result stm32_cryp_init(struct stm32_cryp_context *ctx, bool is_dec,
637 			   enum stm32_cryp_algo_mode algo,
638 			   const void *key, size_t key_size, const void *iv,
639 			   size_t iv_size)
640 {
641 	unsigned int i = 0;
642 	const uint32_t *iv_u32 = NULL;
643 	uint32_t local_iv[4] = { 0 };
644 	const uint32_t *key_u32 = NULL;
645 	uint32_t local_key[8] = { 0 };
646 
647 	ctx->assoc_len = 0;
648 	ctx->load_len = 0;
649 	ctx->extra_size = 0;
650 	ctx->lock = &cryp_lock;
651 
652 	ctx->base = io_pa_or_va(&cryp_pdata.base, 1);
653 	ctx->cr = _CRYP_CR_RESET_VALUE;
654 
655 	/* We want buffer to be u32 aligned */
656 	if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
657 		key_u32 = key;
658 	} else {
659 		memcpy(local_key, key, key_size);
660 		key_u32 = local_key;
661 	}
662 
663 	if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
664 		iv_u32 = iv;
665 	} else {
666 		memcpy(local_iv, iv, iv_size);
667 		iv_u32 = local_iv;
668 	}
669 
670 	if (is_dec)
671 		SETBITS(ctx->cr, _CRYP_CR_ALGODIR);
672 	else
673 		CLRBITS(ctx->cr, _CRYP_CR_ALGODIR);
674 
675 	/* Save algo mode */
676 	switch (algo) {
677 	case STM32_CRYP_MODE_TDES_ECB:
678 		SET_ALGOMODE(TDES_ECB, ctx->cr);
679 		break;
680 	case STM32_CRYP_MODE_TDES_CBC:
681 		SET_ALGOMODE(TDES_CBC, ctx->cr);
682 		break;
683 	case STM32_CRYP_MODE_DES_ECB:
684 		SET_ALGOMODE(DES_ECB, ctx->cr);
685 		break;
686 	case STM32_CRYP_MODE_DES_CBC:
687 		SET_ALGOMODE(DES_CBC, ctx->cr);
688 		break;
689 	case STM32_CRYP_MODE_AES_ECB:
690 		SET_ALGOMODE(AES_ECB, ctx->cr);
691 		break;
692 	case STM32_CRYP_MODE_AES_CBC:
693 		SET_ALGOMODE(AES_CBC, ctx->cr);
694 		break;
695 	case STM32_CRYP_MODE_AES_CTR:
696 		SET_ALGOMODE(AES_CTR, ctx->cr);
697 		break;
698 	case STM32_CRYP_MODE_AES_GCM:
699 		SET_ALGOMODE(AES_GCM, ctx->cr);
700 		break;
701 	case STM32_CRYP_MODE_AES_CCM:
702 		SET_ALGOMODE(AES_CCM, ctx->cr);
703 		break;
704 	default:
705 		return TEE_ERROR_BAD_PARAMETERS;
706 	}
707 
708 	/*
709 	 * We will use HW Byte swap (_CRYP_CR_DATATYPE_BYTE) for data.
710 	 * So we won't need to
711 	 * TEE_U32_TO_BIG_ENDIAN(data) before write to DIN register
712 	 * nor
713 	 * TEE_U32_FROM_BIG_ENDIAN after reading from DOUT register.
714 	 */
715 	clrsetbits(&ctx->cr, _CRYP_CR_DATATYPE_MSK,
716 		   _CRYP_CR_DATATYPE_BYTE << _CRYP_CR_DATATYPE_OFF);
717 
718 	/*
719 	 * Configure keysize for AES algorithms
720 	 * And save block size
721 	 */
722 	if (algo_mode_is_aes(ctx->cr)) {
723 		switch (key_size) {
724 		case AES_KEYSIZE_128:
725 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
726 				   _CRYP_CR_KSIZE_128 << _CRYP_CR_KEYSIZE_OFF);
727 			break;
728 		case AES_KEYSIZE_192:
729 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
730 				   _CRYP_CR_KSIZE_192 << _CRYP_CR_KEYSIZE_OFF);
731 			break;
732 		case AES_KEYSIZE_256:
733 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
734 				   _CRYP_CR_KSIZE_256 << _CRYP_CR_KEYSIZE_OFF);
735 			break;
736 		default:
737 			return TEE_ERROR_BAD_PARAMETERS;
738 		}
739 
740 		/* And set block size */
741 		ctx->block_u32 = AES_BLOCK_NB_U32;
742 	} else {
743 		/* And set DES/TDES block size */
744 		ctx->block_u32 = DES_BLOCK_NB_U32;
745 	}
746 
747 	/* Save key in HW order */
748 	ctx->key_size = key_size;
749 	for (i = 0; i < key_size / sizeof(uint32_t); i++)
750 		ctx->key[i] = TEE_U32_TO_BIG_ENDIAN(key_u32[i]);
751 
752 	/* Save IV */
753 	if (algo_mode_needs_iv(ctx->cr)) {
754 		if (!iv || iv_size != ctx->block_u32 * sizeof(uint32_t))
755 			return TEE_ERROR_BAD_PARAMETERS;
756 
757 		/*
758 		 * We save IV in the byte order expected by the
759 		 * IV registers
760 		 */
761 		for (i = 0; i < ctx->block_u32; i++)
762 			ctx->iv[i] = TEE_U32_TO_BIG_ENDIAN(iv_u32[i]);
763 	}
764 
765 	/* Reset suspend registers */
766 	memset(ctx->pm_gcmccm, 0, sizeof(ctx->pm_gcmccm));
767 	memset(ctx->pm_gcm, 0, sizeof(ctx->pm_gcm));
768 
769 	return TEE_SUCCESS;
770 }
771 
772 /**
773  * @brief Update (or start) a AES authenticate process of
774  *        associated data (CCM or GCM).
775  * @param ctx: CRYP process context
776  * @param data: pointer to associated data
777  * @param data_size: data size
778  * @retval TEE_SUCCESS if OK.
779  */
780 TEE_Result stm32_cryp_update_assodata(struct stm32_cryp_context *ctx,
781 				      uint8_t *data, size_t data_size)
782 {
783 	TEE_Result res = TEE_SUCCESS;
784 	unsigned int i = 0;
785 	uint32_t previous_phase = 0;
786 
787 	/* If no associated data, nothing to do */
788 	if (!data || !data_size)
789 		return TEE_SUCCESS;
790 
791 	mutex_lock(ctx->lock);
792 
793 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
794 			 _CRYP_CR_GCM_CCMPH_OFF;
795 
796 	switch (previous_phase) {
797 	case _CRYP_CR_GCM_CCMPH_INIT:
798 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_HEADER);
799 		break;
800 	case _CRYP_CR_GCM_CCMPH_HEADER:
801 		/*
802 		 * Function update_assodata was already called.
803 		 * We only need to restore the context.
804 		 */
805 		res = restore_context(ctx);
806 		break;
807 	default:
808 		assert(0);
809 		res = TEE_ERROR_BAD_STATE;
810 	}
811 
812 	if (res)
813 		goto out;
814 
815 	/* Manage if remaining data from a previous update_assodata call */
816 	if (ctx->extra_size &&
817 	    (ctx->extra_size + data_size >=
818 	     ctx->block_u32 * sizeof(uint32_t))) {
819 		uint32_t block[MAX_BLOCK_NB_U32] = { 0 };
820 
821 		memcpy(block, ctx->extra, ctx->extra_size);
822 		memcpy((uint8_t *)block + ctx->extra_size, data,
823 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
824 
825 		res = write_align_block(ctx, block);
826 		if (res)
827 			goto out;
828 
829 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
830 		ctx->extra_size = 0;
831 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
832 	}
833 
834 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
835 		res = write_block(ctx, data + i);
836 		if (res)
837 			goto out;
838 
839 		/* Process next block */
840 		i += ctx->block_u32 * sizeof(uint32_t);
841 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
842 	}
843 
844 	/*
845 	 * Manage last block if not a block size multiple:
846 	 * Save remaining data to manage them later (potentially with new
847 	 * associated data).
848 	 */
849 	if (i < data_size) {
850 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
851 		       data_size - i);
852 		ctx->extra_size += data_size - i;
853 	}
854 
855 	res = save_context(ctx);
856 out:
857 	if (res)
858 		cryp_end(ctx, res);
859 
860 	mutex_unlock(ctx->lock);
861 
862 	return res;
863 }
864 
865 /**
866  * @brief Update (or start) a AES authenticate and de/encrypt with
867  *        payload data (CCM or GCM).
868  * @param ctx: CRYP process context
869  * @param data_in: pointer to payload
870  * @param data_out: pointer where to save de/encrypted payload
871  * @param data_size: payload size
872  *
873  * @retval TEE_SUCCESS if OK.
874  */
875 TEE_Result stm32_cryp_update_load(struct stm32_cryp_context *ctx,
876 				  uint8_t *data_in, uint8_t *data_out,
877 				  size_t data_size)
878 {
879 	TEE_Result res = TEE_SUCCESS;
880 	unsigned int i = 0;
881 	uint32_t previous_phase = 0;
882 
883 	if (!data_in || !data_size)
884 		return TEE_SUCCESS;
885 
886 	mutex_lock(ctx->lock);
887 
888 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
889 			 _CRYP_CR_GCM_CCMPH_OFF;
890 
891 	switch (previous_phase) {
892 	case _CRYP_CR_GCM_CCMPH_INIT:
893 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
894 		break;
895 	case _CRYP_CR_GCM_CCMPH_HEADER:
896 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
897 		break;
898 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
899 		/* new update_load call, we only need to restore context */
900 		res = restore_context(ctx);
901 		break;
902 	default:
903 		assert(0);
904 		res = TEE_ERROR_BAD_STATE;
905 	}
906 
907 	if (res)
908 		goto out;
909 
910 	/* Manage if incomplete block from a previous update_load call */
911 	if (ctx->extra_size &&
912 	    (ctx->extra_size + data_size >=
913 	     ctx->block_u32 * sizeof(uint32_t))) {
914 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
915 
916 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
917 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
918 
919 		res = write_align_block(ctx, ctx->extra);
920 		if (res)
921 			goto out;
922 
923 		res = read_align_block(ctx, block_out);
924 		if (res)
925 			goto out;
926 
927 		memcpy(data_out + i, (uint8_t *)block_out + ctx->extra_size,
928 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
929 
930 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
931 		ctx->extra_size = 0;
932 
933 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
934 	}
935 
936 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
937 		res = write_block(ctx, data_in + i);
938 		if (res)
939 			goto out;
940 
941 		res = read_block(ctx, data_out + i);
942 		if (res)
943 			goto out;
944 
945 		/* Process next block */
946 		i += ctx->block_u32 * sizeof(uint32_t);
947 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
948 	}
949 
950 	res = save_context(ctx);
951 	if (res)
952 		goto out;
953 
954 	/*
955 	 * Manage last block if not a block size multiple
956 	 * We saved context,
957 	 * Complete block with 0 and send to CRYP to get {en,de}crypted data
958 	 * Store data to resend as last block in final()
959 	 * or to complete next update_load() to get correct tag.
960 	 */
961 	if (i < data_size) {
962 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
963 		size_t prev_extra_size = ctx->extra_size;
964 
965 		/* Re-enable the CRYP peripheral */
966 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
967 
968 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
969 		       data_size - i);
970 		ctx->extra_size += data_size - i;
971 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
972 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
973 
974 		res = write_align_block(ctx, ctx->extra);
975 		if (res)
976 			goto out;
977 
978 		res = read_align_block(ctx, block_out);
979 		if (res)
980 			goto out;
981 
982 		memcpy(data_out + i, (uint8_t *)block_out + prev_extra_size,
983 		       data_size - i);
984 
985 		/* Disable the CRYP peripheral */
986 		io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
987 	}
988 
989 out:
990 	if (res)
991 		cryp_end(ctx, res);
992 
993 	mutex_unlock(ctx->lock);
994 
995 	return res;
996 }
997 
998 /**
999  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
1000  * @param ctx: CRYP process context
1001  * @param tag: pointer where to save the tag
1002  * @param data_size: tag size
1003  *
1004  * @retval TEE_SUCCESS if OK.
1005  */
1006 TEE_Result stm32_cryp_final(struct stm32_cryp_context *ctx, uint8_t *tag,
1007 			    size_t tag_size)
1008 {
1009 	TEE_Result res = TEE_SUCCESS;
1010 	uint32_t tag_u32[4] = { 0 };
1011 	uint32_t previous_phase = 0;
1012 
1013 	mutex_lock(ctx->lock);
1014 
1015 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
1016 			 _CRYP_CR_GCM_CCMPH_OFF;
1017 
1018 	switch (previous_phase) {
1019 	case _CRYP_CR_GCM_CCMPH_INIT:
1020 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1021 		break;
1022 	case _CRYP_CR_GCM_CCMPH_HEADER:
1023 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1024 		break;
1025 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
1026 		res = restore_context(ctx);
1027 		if (res)
1028 			break;
1029 
1030 		/* Manage if incomplete block from a previous update_load() */
1031 		if (ctx->extra_size) {
1032 			uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1033 			size_t sz = ctx->block_u32 * sizeof(uint32_t) -
1034 				    ctx->extra_size;
1035 
1036 			if (does_need_npblb(ctx->cr)) {
1037 				io_clrsetbits32(ctx->base + _CRYP_CR,
1038 						_CRYP_CR_NPBLB_MSK,
1039 						sz << _CRYP_CR_NPBLB_OFF);
1040 			}
1041 
1042 			memset((uint8_t *)ctx->extra + ctx->extra_size, 0, sz);
1043 
1044 			res = write_align_block(ctx, ctx->extra);
1045 			if (res)
1046 				break;
1047 
1048 			/* Don't care {en,de}crypted data, already saved */
1049 			res = read_align_block(ctx, block_out);
1050 			if (res)
1051 				break;
1052 
1053 			ctx->load_len += (ctx->extra_size * INT8_BIT);
1054 			ctx->extra_size = 0;
1055 		}
1056 
1057 		/* Move to final phase */
1058 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
1059 				_CRYP_CR_GCM_CCMPH_FINAL <<
1060 				_CRYP_CR_GCM_CCMPH_OFF);
1061 		break;
1062 	default:
1063 		assert(0);
1064 		res = TEE_ERROR_BAD_STATE;
1065 	}
1066 
1067 	if (res)
1068 		goto out;
1069 
1070 	if (IS_ALGOMODE(ctx->cr, AES_GCM)) {
1071 		/* No need to htobe() as we configure the HW to swap bytes */
1072 		io_write32(ctx->base + _CRYP_DIN, 0U);
1073 		io_write32(ctx->base + _CRYP_DIN, ctx->assoc_len);
1074 		io_write32(ctx->base + _CRYP_DIN, 0U);
1075 		io_write32(ctx->base + _CRYP_DIN, ctx->load_len);
1076 	} else if (IS_ALGOMODE(ctx->cr, AES_CCM)) {
1077 		/* No need to htobe() in this phase */
1078 		res = write_align_block(ctx, ctx->ctr0_ccm);
1079 		if (res)
1080 			goto out;
1081 	}
1082 
1083 	res = read_align_block(ctx, tag_u32);
1084 	if (res)
1085 		goto out;
1086 
1087 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
1088 
1089 out:
1090 	cryp_end(ctx, res);
1091 	mutex_unlock(ctx->lock);
1092 
1093 	return res;
1094 }
1095 
1096 /**
1097  * @brief Update (or start) a de/encrypt process.
1098  * @param ctx: CRYP process context
1099  * @param last_block: true if last payload data block
1100  * @param data_in: pointer to payload
1101  * @param data_out: pointer where to save de/encrypted payload
1102  * @param data_size: payload size
1103  *
1104  * @retval TEE_SUCCESS if OK.
1105  */
1106 TEE_Result stm32_cryp_update(struct stm32_cryp_context *ctx, bool last_block,
1107 			     uint8_t *data_in, uint8_t *data_out,
1108 			     size_t data_size)
1109 {
1110 	TEE_Result res = TEE_SUCCESS;
1111 	unsigned int i = 0;
1112 
1113 	mutex_lock(ctx->lock);
1114 
1115 	/*
1116 	 * In CBC and ECB encryption we need to manage specifically last
1117 	 * 2 blocks if total size in not aligned to a block size.
1118 	 * Currently return TEE_ERROR_NOT_IMPLEMENTED. Moreover as we need to
1119 	 * know last 2 blocks, if unaligned and call with less than two blocks,
1120 	 * return TEE_ERROR_BAD_STATE.
1121 	 */
1122 	if (last_block && algo_mode_is_ecb_cbc(ctx->cr) &&
1123 	    is_encrypt(ctx->cr) &&
1124 	    (ROUNDDOWN(data_size, ctx->block_u32 * sizeof(uint32_t)) !=
1125 	     data_size)) {
1126 		if (data_size < ctx->block_u32 * sizeof(uint32_t) * 2) {
1127 			/*
1128 			 * If CBC, size of the last part should be at
1129 			 * least 2*BLOCK_SIZE
1130 			 */
1131 			EMSG("Unexpected last block size");
1132 			res = TEE_ERROR_BAD_STATE;
1133 			goto out;
1134 		}
1135 		/*
1136 		 * Moreover the ECB/CBC specific padding for encrypt is not
1137 		 * yet implemented, and not used in OPTEE
1138 		 */
1139 		res = TEE_ERROR_NOT_IMPLEMENTED;
1140 		goto out;
1141 	}
1142 
1143 	/* Manage remaining CTR mask from previous update call */
1144 	if (IS_ALGOMODE(ctx->cr, AES_CTR) && ctx->extra_size) {
1145 		unsigned int j = 0;
1146 		uint8_t *mask = (uint8_t *)ctx->extra;
1147 
1148 		for (j = 0; j < ctx->extra_size && i < data_size; j++, i++)
1149 			data_out[i] = data_in[i] ^ mask[j];
1150 
1151 		if (j != ctx->extra_size) {
1152 			/*
1153 			 * We didn't consume all saved mask,
1154 			 * but no more data.
1155 			 */
1156 
1157 			/* We save remaining mask and its new size */
1158 			memmove(ctx->extra, ctx->extra + j,
1159 				ctx->extra_size - j);
1160 			ctx->extra_size -= j;
1161 
1162 			/*
1163 			 * We don't need to save HW context we didn't
1164 			 * modify HW state.
1165 			 */
1166 			res = TEE_SUCCESS;
1167 			goto out;
1168 		}
1169 
1170 		/* All extra mask consumed */
1171 		ctx->extra_size = 0;
1172 	}
1173 
1174 	res = restore_context(ctx);
1175 	if (res)
1176 		goto out;
1177 
1178 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
1179 		/*
1180 		 * We only write/read one block at a time
1181 		 * but CRYP use a in (and out) FIFO of 8 * uint32_t
1182 		 */
1183 		res = write_block(ctx, data_in + i);
1184 		if (res)
1185 			goto out;
1186 
1187 		res = read_block(ctx, data_out + i);
1188 		if (res)
1189 			goto out;
1190 
1191 		/* Process next block */
1192 		i += ctx->block_u32 * sizeof(uint32_t);
1193 	}
1194 
1195 	/* Manage last block if not a block size multiple */
1196 	if (i < data_size) {
1197 		uint32_t block_in[MAX_BLOCK_NB_U32] = { 0 };
1198 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1199 
1200 		if (!IS_ALGOMODE(ctx->cr, AES_CTR)) {
1201 			/*
1202 			 * Other algorithm than CTR can manage only multiple
1203 			 * of block_size.
1204 			 */
1205 			res = TEE_ERROR_BAD_PARAMETERS;
1206 			goto out;
1207 		}
1208 
1209 		/*
1210 		 * For CTR we save the generated mask to use it at next
1211 		 * update call.
1212 		 */
1213 		memcpy(block_in, data_in + i, data_size - i);
1214 
1215 		res = write_align_block(ctx, block_in);
1216 		if (res)
1217 			goto out;
1218 
1219 		res = read_align_block(ctx, block_out);
1220 		if (res)
1221 			goto out;
1222 
1223 		memcpy(data_out + i, block_out, data_size - i);
1224 
1225 		/* Save mask for possibly next call */
1226 		ctx->extra_size = ctx->block_u32 * sizeof(uint32_t) -
1227 			(data_size - i);
1228 		memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1229 		       ctx->extra_size);
1230 	}
1231 
1232 	if (!last_block)
1233 		res = save_context(ctx);
1234 
1235 out:
1236 	/* If last block or error, end of CRYP process */
1237 	if (last_block || res)
1238 		cryp_end(ctx, res);
1239 
1240 	mutex_unlock(ctx->lock);
1241 
1242 	return res;
1243 }
1244 
1245 static TEE_Result stm32_cryp_probe(const void *fdt, int node,
1246 				   const void *compt_data __unused)
1247 {
1248 	TEE_Result res = TEE_SUCCESS;
1249 	struct dt_node_info dt_cryp = { };
1250 	struct rstctrl *rstctrl = NULL;
1251 	struct clk *clk = NULL;
1252 
1253 	fdt_fill_device_info(fdt, &dt_cryp, node);
1254 
1255 	if (dt_cryp.reg == DT_INFO_INVALID_REG ||
1256 	    dt_cryp.reg_size == DT_INFO_INVALID_REG_SIZE)
1257 		panic();
1258 
1259 	res = clk_dt_get_by_index(fdt, node, 0, &clk);
1260 	if (res)
1261 		return res;
1262 
1263 	res = rstctrl_dt_get_by_index(fdt, node, 0, &rstctrl);
1264 	if (res != TEE_SUCCESS && res != TEE_ERROR_ITEM_NOT_FOUND)
1265 		return res;
1266 
1267 	cryp_pdata.clock = clk;
1268 	cryp_pdata.reset = rstctrl;
1269 	cryp_pdata.base.pa = dt_cryp.reg;
1270 
1271 	io_pa_or_va_secure(&cryp_pdata.base, dt_cryp.reg_size);
1272 	if (!cryp_pdata.base.va)
1273 		panic();
1274 
1275 	stm32mp_register_secure_periph_iomem(cryp_pdata.base.pa);
1276 
1277 	if (clk_enable(cryp_pdata.clock))
1278 		panic();
1279 
1280 	if (stm32_cryp_reset())
1281 		panic();
1282 
1283 	if (IS_ENABLED(CFG_CRYPTO_DRV_AUTHENC)) {
1284 		res = stm32_register_authenc();
1285 		if (res) {
1286 			EMSG("Failed to register to authenc: %#"PRIx32, res);
1287 			panic();
1288 		}
1289 	}
1290 
1291 	if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1292 		res = stm32_register_cipher(CRYP_IP);
1293 		if (res) {
1294 			EMSG("Failed to register to cipher: %#"PRIx32, res);
1295 			panic();
1296 		}
1297 	}
1298 
1299 	return TEE_SUCCESS;
1300 }
1301 
1302 static const struct dt_device_match stm32_cryp_match_table[] = {
1303 	{ .compatible = "st,stm32mp1-cryp" },
1304 	{ }
1305 };
1306 
1307 DEFINE_DT_DRIVER(stm32_cryp_dt_driver) = {
1308 	.name = "stm32-cryp",
1309 	.match_table = stm32_cryp_match_table,
1310 	.probe = stm32_cryp_probe,
1311 };
1312