xref: /optee_os/core/drivers/crypto/stm32/stm32_cryp.c (revision 4edd96e6d7a7228e907cf498b23e5b5fbdaf39a0)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2021, STMicroelectronics - All Rights Reserved
4  */
5 #include <assert.h>
6 #include <config.h>
7 #include <drivers/clk.h>
8 #include <drivers/clk_dt.h>
9 #include <drivers/rstctrl.h>
10 #include <initcall.h>
11 #include <io.h>
12 #include <kernel/boot.h>
13 #include <kernel/delay.h>
14 #include <kernel/dt.h>
15 #include <kernel/dt_driver.h>
16 #include <kernel/mutex.h>
17 #include <libfdt.h>
18 #include <mm/core_memprot.h>
19 #include <stdint.h>
20 #include <stm32_util.h>
21 #include <string.h>
22 #include <utee_defines.h>
23 #include <util.h>
24 
25 #include "stm32_cryp.h"
26 #include "common.h"
27 
28 /* CRYP control register */
29 #define _CRYP_CR			0x0U
30 /* CRYP status register */
31 #define _CRYP_SR			0x04U
32 /* CRYP data input register */
33 #define _CRYP_DIN			0x08U
34 /* CRYP data output register */
35 #define _CRYP_DOUT			0x0CU
36 /* CRYP DMA control register */
37 #define _CRYP_DMACR			0x10U
38 /* CRYP interrupt mask set/clear register */
39 #define _CRYP_IMSCR			0x14U
40 /* CRYP raw interrupt status register */
41 #define _CRYP_RISR			0x18U
42 /* CRYP masked interrupt status register */
43 #define _CRYP_MISR			0x1CU
44 /* CRYP key registers */
45 #define _CRYP_K0LR			0x20U
46 #define _CRYP_K0RR			0x24U
47 #define _CRYP_K1LR			0x28U
48 #define _CRYP_K1RR			0x2CU
49 #define _CRYP_K2LR			0x30U
50 #define _CRYP_K2RR			0x34U
51 #define _CRYP_K3LR			0x38U
52 #define _CRYP_K3RR			0x3CU
53 /* CRYP initialization vector registers */
54 #define _CRYP_IV0LR			0x40U
55 #define _CRYP_IV0RR			0x44U
56 #define _CRYP_IV1LR			0x48U
57 #define _CRYP_IV1RR			0x4CU
58 /* CRYP context swap GCM-CCM registers */
59 #define _CRYP_CSGCMCCM0R		0x50U
60 #define _CRYP_CSGCMCCM1R		0x54U
61 #define _CRYP_CSGCMCCM2R		0x58U
62 #define _CRYP_CSGCMCCM3R		0x5CU
63 #define _CRYP_CSGCMCCM4R		0x60U
64 #define _CRYP_CSGCMCCM5R		0x64U
65 #define _CRYP_CSGCMCCM6R		0x68U
66 #define _CRYP_CSGCMCCM7R		0x6CU
67 /* CRYP context swap GCM registers */
68 #define _CRYP_CSGCM0R			0x70U
69 #define _CRYP_CSGCM1R			0x74U
70 #define _CRYP_CSGCM2R			0x78U
71 #define _CRYP_CSGCM3R			0x7CU
72 #define _CRYP_CSGCM4R			0x80U
73 #define _CRYP_CSGCM5R			0x84U
74 #define _CRYP_CSGCM6R			0x88U
75 #define _CRYP_CSGCM7R			0x8CU
76 /* CRYP hardware configuration register */
77 #define _CRYP_HWCFGR			0x3F0U
78 /* CRYP HW version register */
79 #define _CRYP_VERR			0x3F4U
80 /* CRYP identification */
81 #define _CRYP_IPIDR			0x3F8U
82 /* CRYP HW magic ID */
83 #define _CRYP_MID			0x3FCU
84 
85 #define CRYP_TIMEOUT_US			1000000U
86 #define TIMEOUT_US_1MS			1000U
87 
88 /* CRYP control register fields */
89 #define _CRYP_CR_RESET_VALUE		0x0U
90 #define _CRYP_CR_NPBLB_MSK		GENMASK_32(23, 20)
91 #define _CRYP_CR_NPBLB_OFF		20U
92 #define _CRYP_CR_GCM_CCMPH_MSK		GENMASK_32(17, 16)
93 #define _CRYP_CR_GCM_CCMPH_OFF		16U
94 #define _CRYP_CR_GCM_CCMPH_INIT		0U
95 #define _CRYP_CR_GCM_CCMPH_HEADER	1U
96 #define _CRYP_CR_GCM_CCMPH_PAYLOAD	2U
97 #define _CRYP_CR_GCM_CCMPH_FINAL	3U
98 #define _CRYP_CR_CRYPEN			BIT(15)
99 #define _CRYP_CR_FFLUSH			BIT(14)
100 #define _CRYP_CR_KEYSIZE_MSK		GENMASK_32(9, 8)
101 #define _CRYP_CR_KEYSIZE_OFF		8U
102 #define _CRYP_CR_KSIZE_128		0U
103 #define _CRYP_CR_KSIZE_192		1U
104 #define _CRYP_CR_KSIZE_256		2U
105 #define _CRYP_CR_DATATYPE_MSK		GENMASK_32(7, 6)
106 #define _CRYP_CR_DATATYPE_OFF		6U
107 #define _CRYP_CR_DATATYPE_NONE		0U
108 #define _CRYP_CR_DATATYPE_HALF_WORD	1U
109 #define _CRYP_CR_DATATYPE_BYTE		2U
110 #define _CRYP_CR_DATATYPE_BIT		3U
111 #define _CRYP_CR_ALGOMODE_MSK		(BIT(19) | GENMASK_32(5, 3))
112 #define _CRYP_CR_ALGOMODE_OFF		3U
113 #define _CRYP_CR_ALGOMODE_TDES_ECB	0x0U
114 #define _CRYP_CR_ALGOMODE_TDES_CBC	0x1U
115 #define _CRYP_CR_ALGOMODE_DES_ECB	0x2U
116 #define _CRYP_CR_ALGOMODE_DES_CBC	0x3U
117 #define _CRYP_CR_ALGOMODE_AES_ECB	0x4U
118 #define _CRYP_CR_ALGOMODE_AES_CBC	0x5U
119 #define _CRYP_CR_ALGOMODE_AES_CTR	0x6U
120 #define _CRYP_CR_ALGOMODE_AES		0x7U
121 #define _CRYP_CR_ALGOMODE_AES_GCM	BIT(16)
122 #define _CRYP_CR_ALGOMODE_AES_CCM	(BIT(16) | BIT(0))
123 #define _CRYP_CR_ALGODIR		BIT(2)
124 #define _CRYP_CR_ALGODIR_ENC		0U
125 #define _CRYP_CR_ALGODIR_DEC		BIT(2)
126 
127 /* CRYP status register fields */
128 #define _CRYP_SR_BUSY			BIT(4)
129 #define _CRYP_SR_OFFU			BIT(3)
130 #define _CRYP_SR_OFNE			BIT(2)
131 #define _CRYP_SR_IFNF			BIT(1)
132 #define _CRYP_SR_IFEM			BIT(0)
133 
134 /* CRYP DMA control register fields */
135 #define _CRYP_DMACR_DOEN		BIT(1)
136 #define _CRYP_DMACR_DIEN		BIT(0)
137 
138 /* CRYP interrupt fields */
139 #define _CRYP_I_OUT			BIT(1)
140 #define _CRYP_I_IN			BIT(0)
141 
142 /* CRYP hardware configuration register fields */
143 #define _CRYP_HWCFGR_CFG1_MSK		GENMASK_32(3, 0)
144 #define _CRYP_HWCFGR_CFG1_OFF		0U
145 #define _CRYP_HWCFGR_CFG2_MSK		GENMASK_32(7, 4)
146 #define _CRYP_HWCFGR_CFG2_OFF		4U
147 #define _CRYP_HWCFGR_CFG3_MSK		GENMASK_32(11, 8)
148 #define _CRYP_HWCFGR_CFG3_OFF		8U
149 #define _CRYP_HWCFGR_CFG4_MSK		GENMASK_32(15, 12)
150 #define _CRYP_HWCFGR_CFG4_OFF		12U
151 
152 /* CRYP HW version register */
153 #define _CRYP_VERR_MSK			GENMASK_32(7, 0)
154 #define _CRYP_VERR_OFF			0U
155 
156 /*
157  * Macro to manage bit manipulation when we work on a local variable
158  * before writing only once to the hardware register.
159  */
160 #define CLRBITS(v, bits)		((v) &= ~(bits))
161 #define SETBITS(v, bits)		((v) |= (bits))
162 
163 #define IS_ALGOMODE(cr, mod) \
164 	(((cr) & _CRYP_CR_ALGOMODE_MSK) == (_CRYP_CR_ALGOMODE_##mod << \
165 					  _CRYP_CR_ALGOMODE_OFF))
166 
167 #define SET_ALGOMODE(mod, cr) \
168 	clrsetbits(&(cr), _CRYP_CR_ALGOMODE_MSK, (_CRYP_CR_ALGOMODE_##mod << \
169 						  _CRYP_CR_ALGOMODE_OFF))
170 
171 #define GET_ALGOMODE(cr) \
172 	(((cr) & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF)
173 
174 static struct stm32_cryp_platdata cryp_pdata;
175 static struct mutex cryp_lock = MUTEX_INITIALIZER;
176 
177 static void clrsetbits(uint32_t *v, uint32_t mask, uint32_t bits)
178 {
179 	*v = (*v & ~mask) | bits;
180 }
181 
182 static bool algo_mode_needs_iv(uint32_t cr)
183 {
184 	return !IS_ALGOMODE(cr, TDES_ECB) && !IS_ALGOMODE(cr, DES_ECB) &&
185 	       !IS_ALGOMODE(cr, AES_ECB);
186 }
187 
188 static bool algo_mode_is_ecb_cbc(uint32_t cr)
189 {
190 	return GET_ALGOMODE(cr) < _CRYP_CR_ALGOMODE_AES_CTR;
191 }
192 
193 static bool algo_mode_is_aes(uint32_t cr)
194 {
195 	return ((cr & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF) >=
196 	       _CRYP_CR_ALGOMODE_AES_ECB;
197 }
198 
199 static bool is_decrypt(uint32_t cr)
200 {
201 	return (cr & _CRYP_CR_ALGODIR) == _CRYP_CR_ALGODIR_DEC;
202 }
203 
204 static bool is_encrypt(uint32_t cr)
205 {
206 	return !is_decrypt(cr);
207 }
208 
209 static bool does_need_npblb(uint32_t cr)
210 {
211 	return (IS_ALGOMODE(cr, AES_GCM) && is_encrypt(cr)) ||
212 	       (IS_ALGOMODE(cr, AES_CCM) && is_decrypt(cr));
213 }
214 
215 static TEE_Result wait_sr_bits(vaddr_t base, uint32_t bits)
216 {
217 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
218 
219 	while ((io_read32(base + _CRYP_SR) & bits) != bits)
220 		if (timeout_elapsed(timeout_ref))
221 			break;
222 
223 	if ((io_read32(base + _CRYP_SR) & bits) != bits)
224 		return TEE_ERROR_BUSY;
225 
226 	return TEE_SUCCESS;
227 }
228 
229 static TEE_Result wait_end_busy(vaddr_t base)
230 {
231 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
232 
233 	while (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
234 		if (timeout_elapsed(timeout_ref))
235 			break;
236 
237 	if (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
238 		return TEE_ERROR_BUSY;
239 
240 	return TEE_SUCCESS;
241 }
242 
243 static TEE_Result wait_end_enable(vaddr_t base)
244 {
245 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
246 
247 	while (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
248 		if (timeout_elapsed(timeout_ref))
249 			break;
250 
251 	if (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
252 		return TEE_ERROR_BUSY;
253 
254 	return TEE_SUCCESS;
255 }
256 
257 static TEE_Result __must_check write_align_block(struct stm32_cryp_context *ctx,
258 						 uint32_t *data)
259 {
260 	TEE_Result res = TEE_SUCCESS;
261 	unsigned int i = 0;
262 
263 	res = wait_sr_bits(ctx->base, _CRYP_SR_IFNF);
264 	if (res)
265 		return res;
266 
267 	for (i = 0; i < ctx->block_u32; i++) {
268 		/* No need to htobe() as we configure the HW to swap bytes */
269 		io_write32(ctx->base + _CRYP_DIN, data[i]);
270 	}
271 
272 	return TEE_SUCCESS;
273 }
274 
275 static TEE_Result __must_check write_block(struct stm32_cryp_context *ctx,
276 					   uint8_t *data)
277 {
278 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
279 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
280 
281 		memcpy(data_u32, data, ctx->block_u32 * sizeof(uint32_t));
282 		return write_align_block(ctx, data_u32);
283 	}
284 
285 	return write_align_block(ctx, (void *)data);
286 }
287 
288 static TEE_Result __must_check read_align_block(struct stm32_cryp_context *ctx,
289 						uint32_t *data)
290 {
291 	TEE_Result res = TEE_SUCCESS;
292 	unsigned int i = 0;
293 
294 	res = wait_sr_bits(ctx->base, _CRYP_SR_OFNE);
295 	if (res)
296 		return res;
297 
298 	for (i = 0; i < ctx->block_u32; i++) {
299 		/* No need to htobe() as we configure the HW to swap bytes */
300 		data[i] = io_read32(ctx->base + _CRYP_DOUT);
301 	}
302 
303 	return TEE_SUCCESS;
304 }
305 
306 static TEE_Result __must_check read_block(struct stm32_cryp_context *ctx,
307 					  uint8_t *data)
308 {
309 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
310 		TEE_Result res = TEE_SUCCESS;
311 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
312 
313 		res = read_align_block(ctx, data_u32);
314 		if (res)
315 			return res;
316 
317 		memcpy(data, data_u32, ctx->block_u32 * sizeof(uint32_t));
318 
319 		return TEE_SUCCESS;
320 	}
321 
322 	return read_align_block(ctx, (void *)data);
323 }
324 
325 static void cryp_end(struct stm32_cryp_context *ctx, TEE_Result prev_error)
326 {
327 	if (prev_error) {
328 		if (rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
329 			panic();
330 		if (rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
331 			panic();
332 	}
333 
334 	/* Disable the CRYP peripheral */
335 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
336 }
337 
338 static void cryp_write_iv(struct stm32_cryp_context *ctx)
339 {
340 	if (algo_mode_needs_iv(ctx->cr)) {
341 		unsigned int i = 0;
342 
343 		/* Restore the _CRYP_IVRx */
344 		for (i = 0; i < ctx->block_u32; i++)
345 			io_write32(ctx->base + _CRYP_IV0LR + i *
346 				   sizeof(uint32_t), ctx->iv[i]);
347 	}
348 }
349 
350 static void cryp_save_suspend(struct stm32_cryp_context *ctx)
351 {
352 	unsigned int i = 0;
353 
354 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
355 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
356 			ctx->pm_gcmccm[i] = io_read32(ctx->base +
357 						      _CRYP_CSGCMCCM0R +
358 						      i * sizeof(uint32_t));
359 
360 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
361 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
362 			ctx->pm_gcm[i] = io_read32(ctx->base + _CRYP_CSGCM0R +
363 						   i * sizeof(uint32_t));
364 }
365 
366 static void cryp_restore_suspend(struct stm32_cryp_context *ctx)
367 {
368 	unsigned int i = 0;
369 
370 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
371 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
372 			io_write32(ctx->base + _CRYP_CSGCMCCM0R +
373 				   i * sizeof(uint32_t), ctx->pm_gcmccm[i]);
374 
375 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
376 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
377 			io_write32(ctx->base + _CRYP_CSGCM0R +
378 				   i * sizeof(uint32_t), ctx->pm_gcm[i]);
379 }
380 
381 static void cryp_write_key(struct stm32_cryp_context *ctx)
382 {
383 	vaddr_t reg = 0;
384 	int i = 0;
385 	uint32_t algo = GET_ALGOMODE(ctx->cr);
386 
387 	if (algo == _CRYP_CR_ALGOMODE_DES_ECB ||
388 	    algo == _CRYP_CR_ALGOMODE_DES_CBC)
389 		reg = ctx->base + _CRYP_K1RR;
390 	else
391 		reg = ctx->base + _CRYP_K3RR;
392 
393 	for (i = ctx->key_size / sizeof(uint32_t) - 1;
394 	     i >= 0;
395 	     i--, reg -= sizeof(uint32_t))
396 		io_write32(reg, ctx->key[i]);
397 }
398 
399 static TEE_Result cryp_prepare_key(struct stm32_cryp_context *ctx)
400 {
401 	TEE_Result res = TEE_SUCCESS;
402 
403 	/*
404 	 * For AES ECB/CBC decryption, key preparation mode must be selected
405 	 * to populate the key.
406 	 */
407 	if (is_decrypt(ctx->cr) && (IS_ALGOMODE(ctx->cr, AES_ECB) ||
408 				    IS_ALGOMODE(ctx->cr, AES_CBC))) {
409 		/* Select Algomode "prepare key" */
410 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
411 				_CRYP_CR_ALGOMODE_AES << _CRYP_CR_ALGOMODE_OFF);
412 
413 		cryp_write_key(ctx);
414 
415 		/* Enable CRYP */
416 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
417 
418 		res = wait_end_busy(ctx->base);
419 		if (res)
420 			return res;
421 
422 		/* Reset 'real' algomode */
423 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
424 				ctx->cr & _CRYP_CR_ALGOMODE_MSK);
425 	} else {
426 		cryp_write_key(ctx);
427 	}
428 
429 	return TEE_SUCCESS;
430 }
431 
432 static TEE_Result save_context(struct stm32_cryp_context *ctx)
433 {
434 	/* Device should not be in a processing phase */
435 	if (io_read32(ctx->base + _CRYP_SR) & _CRYP_SR_BUSY)
436 		return TEE_ERROR_BAD_STATE;
437 
438 	/* Disable the CRYP peripheral */
439 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
440 
441 	/* Save CR */
442 	ctx->cr = io_read32(ctx->base + _CRYP_CR);
443 
444 	cryp_save_suspend(ctx);
445 
446 	/* If algo mode needs to save current IV */
447 	if (algo_mode_needs_iv(ctx->cr)) {
448 		unsigned int i = 0;
449 
450 		/* Save IV */
451 		for (i = 0; i < ctx->block_u32; i++)
452 			ctx->iv[i] = io_read32(ctx->base + _CRYP_IV0LR + i *
453 					       sizeof(uint32_t));
454 	}
455 
456 	return TEE_SUCCESS;
457 }
458 
459 /* To resume the processing of a message */
460 static TEE_Result restore_context(struct stm32_cryp_context *ctx)
461 {
462 	TEE_Result res = TEE_SUCCESS;
463 
464 	/* IP should be disabled */
465 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN) {
466 		DMSG("Device is still enabled");
467 		return TEE_ERROR_BAD_STATE;
468 	}
469 
470 	/* Restore the _CRYP_CR */
471 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
472 
473 	/* Write key and, in case of AES_CBC or AES_ECB decrypt, prepare it */
474 	res = cryp_prepare_key(ctx);
475 	if (res)
476 		return res;
477 
478 	cryp_restore_suspend(ctx);
479 
480 	cryp_write_iv(ctx);
481 
482 	/* Flush internal fifo */
483 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_FFLUSH);
484 
485 	/* Enable the CRYP peripheral */
486 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
487 
488 	return TEE_SUCCESS;
489 }
490 
491 /*
492  * Translate a byte index in an array of BE uint32_t into the index of same
493  * byte in the corresponding LE uint32_t array.
494  */
495 static size_t be_index(size_t index)
496 {
497 	return (index & ~0x3) + 3 - (index & 0x3);
498 }
499 
500 static TEE_Result ccm_first_context(struct stm32_cryp_context *ctx)
501 {
502 	TEE_Result res = TEE_SUCCESS;
503 	uint32_t b0[AES_BLOCK_NB_U32] = { 0 };
504 	uint8_t *iv = (uint8_t *)ctx->iv;
505 	size_t l = 0;
506 	size_t i = 15;
507 
508 	/* IP should be disabled */
509 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN)
510 		return TEE_ERROR_BAD_STATE;
511 
512 	/* Write the _CRYP_CR */
513 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
514 
515 	/* Write key */
516 	res = cryp_prepare_key(ctx);
517 	if (res)
518 		return res;
519 
520 	/* Save full IV that will be b0 */
521 	memcpy(b0, iv, sizeof(b0));
522 
523 	/*
524 	 * Update IV to become CTR0/1 before setting it.
525 	 * IV is saved as LE uint32_t[4] as expected by hardware,
526 	 * but CCM RFC defines bytes to update in a BE array.
527 	 */
528 	/* Set flag bits to 0 (5 higher bits), keep 3 low bits */
529 	iv[be_index(0)] &= 0x7;
530 	/* Get size of length field (can be from 2 to 8) */
531 	l = iv[be_index(0)] + 1;
532 	/* Set Q to 0 */
533 	for (i = 15; i >= 15 - l + 1; i--)
534 		iv[be_index(i)] = 0;
535 	/* Save CTR0 */
536 	memcpy(ctx->ctr0_ccm, iv, sizeof(b0));
537 	/* Increment Q */
538 	iv[be_index(15)] |= 0x1;
539 
540 	cryp_write_iv(ctx);
541 
542 	/* Enable the CRYP peripheral */
543 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
544 
545 	res = write_align_block(ctx, b0);
546 
547 	return res;
548 }
549 
550 static TEE_Result do_from_init_to_phase(struct stm32_cryp_context *ctx,
551 					uint32_t new_phase)
552 {
553 	TEE_Result res = TEE_SUCCESS;
554 
555 	/*
556 	 * We didn't run the init phase yet
557 	 * CCM need a specific restore_context phase for the init phase
558 	 */
559 	if (IS_ALGOMODE(ctx->cr, AES_CCM))
560 		res = ccm_first_context(ctx);
561 	else
562 		res = restore_context(ctx);
563 
564 	if (res)
565 		return res;
566 
567 	res = wait_end_enable(ctx->base);
568 	if (res)
569 		return res;
570 
571 	/* Move to 'new_phase' */
572 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
573 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
574 
575 	/* Enable the CRYP peripheral (init disabled it) */
576 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
577 
578 	return TEE_SUCCESS;
579 }
580 
581 static TEE_Result do_from_header_to_phase(struct stm32_cryp_context *ctx,
582 					  uint32_t new_phase)
583 {
584 	TEE_Result res = TEE_SUCCESS;
585 
586 	res = restore_context(ctx);
587 	if (res)
588 		return res;
589 
590 	if (ctx->extra_size) {
591 		/* Manage unaligned header data before moving to next phase */
592 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
593 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
594 
595 		res = write_align_block(ctx, ctx->extra);
596 		if (res)
597 			return res;
598 
599 		ctx->assoc_len += (ctx->extra_size) * INT8_BIT;
600 		ctx->extra_size = 0;
601 	}
602 
603 	/* Move to 'new_phase' */
604 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
605 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
606 
607 	return TEE_SUCCESS;
608 }
609 
610 /**
611  * @brief Start a AES computation.
612  * @param ctx: CRYP process context
613  * @param is_dec: true if decryption, false if encryption
614  * @param algo: define the algo mode
615  * @param key: pointer to key
616  * @param key_size: key size
617  * @param iv: pointer to initialization vector (unused if algo is ECB)
618  * @param iv_size: iv size
619  * @note this function doesn't access to hardware but stores in ctx the values
620  *
621  * @retval TEE_SUCCESS if OK.
622  */
623 TEE_Result stm32_cryp_init(struct stm32_cryp_context *ctx, bool is_dec,
624 			   enum stm32_cryp_algo_mode algo,
625 			   const void *key, size_t key_size, const void *iv,
626 			   size_t iv_size)
627 {
628 	unsigned int i = 0;
629 	const uint32_t *iv_u32 = NULL;
630 	uint32_t local_iv[4] = { 0 };
631 	const uint32_t *key_u32 = NULL;
632 	uint32_t local_key[8] = { 0 };
633 
634 	ctx->assoc_len = 0;
635 	ctx->load_len = 0;
636 	ctx->extra_size = 0;
637 	ctx->lock = &cryp_lock;
638 
639 	ctx->base = io_pa_or_va(&cryp_pdata.base, 1);
640 	ctx->cr = _CRYP_CR_RESET_VALUE;
641 
642 	/* We want buffer to be u32 aligned */
643 	if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
644 		key_u32 = key;
645 	} else {
646 		memcpy(local_key, key, key_size);
647 		key_u32 = local_key;
648 	}
649 
650 	if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
651 		iv_u32 = iv;
652 	} else {
653 		memcpy(local_iv, iv, iv_size);
654 		iv_u32 = local_iv;
655 	}
656 
657 	if (is_dec)
658 		SETBITS(ctx->cr, _CRYP_CR_ALGODIR);
659 	else
660 		CLRBITS(ctx->cr, _CRYP_CR_ALGODIR);
661 
662 	/* Save algo mode */
663 	switch (algo) {
664 	case STM32_CRYP_MODE_TDES_ECB:
665 		SET_ALGOMODE(TDES_ECB, ctx->cr);
666 		break;
667 	case STM32_CRYP_MODE_TDES_CBC:
668 		SET_ALGOMODE(TDES_CBC, ctx->cr);
669 		break;
670 	case STM32_CRYP_MODE_DES_ECB:
671 		SET_ALGOMODE(DES_ECB, ctx->cr);
672 		break;
673 	case STM32_CRYP_MODE_DES_CBC:
674 		SET_ALGOMODE(DES_CBC, ctx->cr);
675 		break;
676 	case STM32_CRYP_MODE_AES_ECB:
677 		SET_ALGOMODE(AES_ECB, ctx->cr);
678 		break;
679 	case STM32_CRYP_MODE_AES_CBC:
680 		SET_ALGOMODE(AES_CBC, ctx->cr);
681 		break;
682 	case STM32_CRYP_MODE_AES_CTR:
683 		SET_ALGOMODE(AES_CTR, ctx->cr);
684 		break;
685 	case STM32_CRYP_MODE_AES_GCM:
686 		SET_ALGOMODE(AES_GCM, ctx->cr);
687 		break;
688 	case STM32_CRYP_MODE_AES_CCM:
689 		SET_ALGOMODE(AES_CCM, ctx->cr);
690 		break;
691 	default:
692 		return TEE_ERROR_BAD_PARAMETERS;
693 	}
694 
695 	/*
696 	 * We will use HW Byte swap (_CRYP_CR_DATATYPE_BYTE) for data.
697 	 * So we won't need to
698 	 * TEE_U32_TO_BIG_ENDIAN(data) before write to DIN register
699 	 * nor
700 	 * TEE_U32_FROM_BIG_ENDIAN after reading from DOUT register.
701 	 */
702 	clrsetbits(&ctx->cr, _CRYP_CR_DATATYPE_MSK,
703 		   _CRYP_CR_DATATYPE_BYTE << _CRYP_CR_DATATYPE_OFF);
704 
705 	/*
706 	 * Configure keysize for AES algorithms
707 	 * And save block size
708 	 */
709 	if (algo_mode_is_aes(ctx->cr)) {
710 		switch (key_size) {
711 		case AES_KEYSIZE_128:
712 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
713 				   _CRYP_CR_KSIZE_128 << _CRYP_CR_KEYSIZE_OFF);
714 			break;
715 		case AES_KEYSIZE_192:
716 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
717 				   _CRYP_CR_KSIZE_192 << _CRYP_CR_KEYSIZE_OFF);
718 			break;
719 		case AES_KEYSIZE_256:
720 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
721 				   _CRYP_CR_KSIZE_256 << _CRYP_CR_KEYSIZE_OFF);
722 			break;
723 		default:
724 			return TEE_ERROR_BAD_PARAMETERS;
725 		}
726 
727 		/* And set block size */
728 		ctx->block_u32 = AES_BLOCK_NB_U32;
729 	} else {
730 		/* And set DES/TDES block size */
731 		ctx->block_u32 = DES_BLOCK_NB_U32;
732 	}
733 
734 	/* Save key in HW order */
735 	ctx->key_size = key_size;
736 	for (i = 0; i < key_size / sizeof(uint32_t); i++)
737 		ctx->key[i] = TEE_U32_TO_BIG_ENDIAN(key_u32[i]);
738 
739 	/* Save IV */
740 	if (algo_mode_needs_iv(ctx->cr)) {
741 		if (!iv || iv_size != ctx->block_u32 * sizeof(uint32_t))
742 			return TEE_ERROR_BAD_PARAMETERS;
743 
744 		/*
745 		 * We save IV in the byte order expected by the
746 		 * IV registers
747 		 */
748 		for (i = 0; i < ctx->block_u32; i++)
749 			ctx->iv[i] = TEE_U32_TO_BIG_ENDIAN(iv_u32[i]);
750 	}
751 
752 	/* Reset suspend registers */
753 	memset(ctx->pm_gcmccm, 0, sizeof(ctx->pm_gcmccm));
754 	memset(ctx->pm_gcm, 0, sizeof(ctx->pm_gcm));
755 
756 	return TEE_SUCCESS;
757 }
758 
759 /**
760  * @brief Update (or start) a AES authenticate process of
761  *        associated data (CCM or GCM).
762  * @param ctx: CRYP process context
763  * @param data: pointer to associated data
764  * @param data_size: data size
765  * @retval TEE_SUCCESS if OK.
766  */
767 TEE_Result stm32_cryp_update_assodata(struct stm32_cryp_context *ctx,
768 				      uint8_t *data, size_t data_size)
769 {
770 	TEE_Result res = TEE_SUCCESS;
771 	unsigned int i = 0;
772 	uint32_t previous_phase = 0;
773 
774 	/* If no associated data, nothing to do */
775 	if (!data || !data_size)
776 		return TEE_SUCCESS;
777 
778 	mutex_lock(ctx->lock);
779 
780 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
781 			 _CRYP_CR_GCM_CCMPH_OFF;
782 
783 	switch (previous_phase) {
784 	case _CRYP_CR_GCM_CCMPH_INIT:
785 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_HEADER);
786 		break;
787 	case _CRYP_CR_GCM_CCMPH_HEADER:
788 		/*
789 		 * Function update_assodata was already called.
790 		 * We only need to restore the context.
791 		 */
792 		res = restore_context(ctx);
793 		break;
794 	default:
795 		assert(0);
796 		res = TEE_ERROR_BAD_STATE;
797 	}
798 
799 	if (res)
800 		goto out;
801 
802 	/* Manage if remaining data from a previous update_assodata call */
803 	if (ctx->extra_size &&
804 	    (ctx->extra_size + data_size >=
805 	     ctx->block_u32 * sizeof(uint32_t))) {
806 		uint32_t block[MAX_BLOCK_NB_U32] = { 0 };
807 
808 		memcpy(block, ctx->extra, ctx->extra_size);
809 		memcpy((uint8_t *)block + ctx->extra_size, data,
810 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
811 
812 		res = write_align_block(ctx, block);
813 		if (res)
814 			goto out;
815 
816 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
817 		ctx->extra_size = 0;
818 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
819 	}
820 
821 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
822 		res = write_block(ctx, data + i);
823 		if (res)
824 			goto out;
825 
826 		/* Process next block */
827 		i += ctx->block_u32 * sizeof(uint32_t);
828 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
829 	}
830 
831 	/*
832 	 * Manage last block if not a block size multiple:
833 	 * Save remaining data to manage them later (potentially with new
834 	 * associated data).
835 	 */
836 	if (i < data_size) {
837 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
838 		       data_size - i);
839 		ctx->extra_size += data_size - i;
840 	}
841 
842 	res = save_context(ctx);
843 out:
844 	if (res)
845 		cryp_end(ctx, res);
846 
847 	mutex_unlock(ctx->lock);
848 
849 	return res;
850 }
851 
852 /**
853  * @brief Update (or start) a AES authenticate and de/encrypt with
854  *        payload data (CCM or GCM).
855  * @param ctx: CRYP process context
856  * @param data_in: pointer to payload
857  * @param data_out: pointer where to save de/encrypted payload
858  * @param data_size: payload size
859  *
860  * @retval TEE_SUCCESS if OK.
861  */
862 TEE_Result stm32_cryp_update_load(struct stm32_cryp_context *ctx,
863 				  uint8_t *data_in, uint8_t *data_out,
864 				  size_t data_size)
865 {
866 	TEE_Result res = TEE_SUCCESS;
867 	unsigned int i = 0;
868 	uint32_t previous_phase = 0;
869 
870 	if (!data_in || !data_size)
871 		return TEE_SUCCESS;
872 
873 	mutex_lock(ctx->lock);
874 
875 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
876 			 _CRYP_CR_GCM_CCMPH_OFF;
877 
878 	switch (previous_phase) {
879 	case _CRYP_CR_GCM_CCMPH_INIT:
880 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
881 		break;
882 	case _CRYP_CR_GCM_CCMPH_HEADER:
883 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
884 		break;
885 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
886 		/* new update_load call, we only need to restore context */
887 		res = restore_context(ctx);
888 		break;
889 	default:
890 		assert(0);
891 		res = TEE_ERROR_BAD_STATE;
892 	}
893 
894 	if (res)
895 		goto out;
896 
897 	/* Manage if incomplete block from a previous update_load call */
898 	if (ctx->extra_size &&
899 	    (ctx->extra_size + data_size >=
900 	     ctx->block_u32 * sizeof(uint32_t))) {
901 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
902 
903 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
904 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
905 
906 		res = write_align_block(ctx, ctx->extra);
907 		if (res)
908 			goto out;
909 
910 		res = read_align_block(ctx, block_out);
911 		if (res)
912 			goto out;
913 
914 		memcpy(data_out + i, (uint8_t *)block_out + ctx->extra_size,
915 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
916 
917 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
918 		ctx->extra_size = 0;
919 
920 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
921 	}
922 
923 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
924 		res = write_block(ctx, data_in + i);
925 		if (res)
926 			goto out;
927 
928 		res = read_block(ctx, data_out + i);
929 		if (res)
930 			goto out;
931 
932 		/* Process next block */
933 		i += ctx->block_u32 * sizeof(uint32_t);
934 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
935 	}
936 
937 	res = save_context(ctx);
938 	if (res)
939 		goto out;
940 
941 	/*
942 	 * Manage last block if not a block size multiple
943 	 * We saved context,
944 	 * Complete block with 0 and send to CRYP to get {en,de}crypted data
945 	 * Store data to resend as last block in final()
946 	 * or to complete next update_load() to get correct tag.
947 	 */
948 	if (i < data_size) {
949 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
950 		size_t prev_extra_size = ctx->extra_size;
951 
952 		/* Re-enable the CRYP peripheral */
953 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
954 
955 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
956 		       data_size - i);
957 		ctx->extra_size += data_size - i;
958 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
959 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
960 
961 		res = write_align_block(ctx, ctx->extra);
962 		if (res)
963 			goto out;
964 
965 		res = read_align_block(ctx, block_out);
966 		if (res)
967 			goto out;
968 
969 		memcpy(data_out + i, (uint8_t *)block_out + prev_extra_size,
970 		       data_size - i);
971 
972 		/* Disable the CRYP peripheral */
973 		io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
974 	}
975 
976 out:
977 	if (res)
978 		cryp_end(ctx, res);
979 
980 	mutex_unlock(ctx->lock);
981 
982 	return res;
983 }
984 
985 /**
986  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
987  * @param ctx: CRYP process context
988  * @param tag: pointer where to save the tag
989  * @param data_size: tag size
990  *
991  * @retval TEE_SUCCESS if OK.
992  */
993 TEE_Result stm32_cryp_final(struct stm32_cryp_context *ctx, uint8_t *tag,
994 			    size_t tag_size)
995 {
996 	TEE_Result res = TEE_SUCCESS;
997 	uint32_t tag_u32[4] = { 0 };
998 	uint32_t previous_phase = 0;
999 
1000 	mutex_lock(ctx->lock);
1001 
1002 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
1003 			 _CRYP_CR_GCM_CCMPH_OFF;
1004 
1005 	switch (previous_phase) {
1006 	case _CRYP_CR_GCM_CCMPH_INIT:
1007 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1008 		break;
1009 	case _CRYP_CR_GCM_CCMPH_HEADER:
1010 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1011 		break;
1012 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
1013 		res = restore_context(ctx);
1014 		if (res)
1015 			break;
1016 
1017 		/* Manage if incomplete block from a previous update_load() */
1018 		if (ctx->extra_size) {
1019 			uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1020 			size_t sz = ctx->block_u32 * sizeof(uint32_t) -
1021 				    ctx->extra_size;
1022 
1023 			if (does_need_npblb(ctx->cr)) {
1024 				io_clrsetbits32(ctx->base + _CRYP_CR,
1025 						_CRYP_CR_NPBLB_MSK,
1026 						sz << _CRYP_CR_NPBLB_OFF);
1027 			}
1028 
1029 			memset((uint8_t *)ctx->extra + ctx->extra_size, 0, sz);
1030 
1031 			res = write_align_block(ctx, ctx->extra);
1032 			if (res)
1033 				break;
1034 
1035 			/* Don't care {en,de}crypted data, already saved */
1036 			res = read_align_block(ctx, block_out);
1037 			if (res)
1038 				break;
1039 
1040 			ctx->load_len += (ctx->extra_size * INT8_BIT);
1041 			ctx->extra_size = 0;
1042 		}
1043 
1044 		/* Move to final phase */
1045 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
1046 				_CRYP_CR_GCM_CCMPH_FINAL <<
1047 				_CRYP_CR_GCM_CCMPH_OFF);
1048 		break;
1049 	default:
1050 		assert(0);
1051 		res = TEE_ERROR_BAD_STATE;
1052 	}
1053 
1054 	if (res)
1055 		goto out;
1056 
1057 	if (IS_ALGOMODE(ctx->cr, AES_GCM)) {
1058 		/* No need to htobe() as we configure the HW to swap bytes */
1059 		io_write32(ctx->base + _CRYP_DIN, 0U);
1060 		io_write32(ctx->base + _CRYP_DIN, ctx->assoc_len);
1061 		io_write32(ctx->base + _CRYP_DIN, 0U);
1062 		io_write32(ctx->base + _CRYP_DIN, ctx->load_len);
1063 	} else if (IS_ALGOMODE(ctx->cr, AES_CCM)) {
1064 		/* No need to htobe() in this phase */
1065 		res = write_align_block(ctx, ctx->ctr0_ccm);
1066 		if (res)
1067 			goto out;
1068 	}
1069 
1070 	res = read_align_block(ctx, tag_u32);
1071 	if (res)
1072 		goto out;
1073 
1074 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
1075 
1076 out:
1077 	cryp_end(ctx, res);
1078 	mutex_unlock(ctx->lock);
1079 
1080 	return res;
1081 }
1082 
1083 /**
1084  * @brief Update (or start) a de/encrypt process.
1085  * @param ctx: CRYP process context
1086  * @param last_block: true if last payload data block
1087  * @param data_in: pointer to payload
1088  * @param data_out: pointer where to save de/encrypted payload
1089  * @param data_size: payload size
1090  *
1091  * @retval TEE_SUCCESS if OK.
1092  */
1093 TEE_Result stm32_cryp_update(struct stm32_cryp_context *ctx, bool last_block,
1094 			     uint8_t *data_in, uint8_t *data_out,
1095 			     size_t data_size)
1096 {
1097 	TEE_Result res = TEE_SUCCESS;
1098 	unsigned int i = 0;
1099 
1100 	mutex_lock(ctx->lock);
1101 
1102 	/*
1103 	 * In CBC and ECB encryption we need to manage specifically last
1104 	 * 2 blocks if total size in not aligned to a block size.
1105 	 * Currently return TEE_ERROR_NOT_IMPLEMENTED. Moreover as we need to
1106 	 * know last 2 blocks, if unaligned and call with less than two blocks,
1107 	 * return TEE_ERROR_BAD_STATE.
1108 	 */
1109 	if (last_block && algo_mode_is_ecb_cbc(ctx->cr) &&
1110 	    is_encrypt(ctx->cr) &&
1111 	    (ROUNDDOWN(data_size, ctx->block_u32 * sizeof(uint32_t)) !=
1112 	     data_size)) {
1113 		if (data_size < ctx->block_u32 * sizeof(uint32_t) * 2) {
1114 			/*
1115 			 * If CBC, size of the last part should be at
1116 			 * least 2*BLOCK_SIZE
1117 			 */
1118 			EMSG("Unexpected last block size");
1119 			res = TEE_ERROR_BAD_STATE;
1120 			goto out;
1121 		}
1122 		/*
1123 		 * Moreover the ECB/CBC specific padding for encrypt is not
1124 		 * yet implemented, and not used in OPTEE
1125 		 */
1126 		res = TEE_ERROR_NOT_IMPLEMENTED;
1127 		goto out;
1128 	}
1129 
1130 	/* Manage remaining CTR mask from previous update call */
1131 	if (IS_ALGOMODE(ctx->cr, AES_CTR) && ctx->extra_size) {
1132 		unsigned int j = 0;
1133 		uint8_t *mask = (uint8_t *)ctx->extra;
1134 
1135 		for (j = 0; j < ctx->extra_size && i < data_size; j++, i++)
1136 			data_out[i] = data_in[i] ^ mask[j];
1137 
1138 		if (j != ctx->extra_size) {
1139 			/*
1140 			 * We didn't consume all saved mask,
1141 			 * but no more data.
1142 			 */
1143 
1144 			/* We save remaining mask and its new size */
1145 			memmove(ctx->extra, ctx->extra + j,
1146 				ctx->extra_size - j);
1147 			ctx->extra_size -= j;
1148 
1149 			/*
1150 			 * We don't need to save HW context we didn't
1151 			 * modify HW state.
1152 			 */
1153 			res = TEE_SUCCESS;
1154 			goto out;
1155 		}
1156 
1157 		/* All extra mask consumed */
1158 		ctx->extra_size = 0;
1159 	}
1160 
1161 	res = restore_context(ctx);
1162 	if (res)
1163 		goto out;
1164 
1165 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
1166 		/*
1167 		 * We only write/read one block at a time
1168 		 * but CRYP use a in (and out) FIFO of 8 * uint32_t
1169 		 */
1170 		res = write_block(ctx, data_in + i);
1171 		if (res)
1172 			goto out;
1173 
1174 		res = read_block(ctx, data_out + i);
1175 		if (res)
1176 			goto out;
1177 
1178 		/* Process next block */
1179 		i += ctx->block_u32 * sizeof(uint32_t);
1180 	}
1181 
1182 	/* Manage last block if not a block size multiple */
1183 	if (i < data_size) {
1184 		uint32_t block_in[MAX_BLOCK_NB_U32] = { 0 };
1185 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1186 
1187 		if (!IS_ALGOMODE(ctx->cr, AES_CTR)) {
1188 			/*
1189 			 * Other algorithm than CTR can manage only multiple
1190 			 * of block_size.
1191 			 */
1192 			res = TEE_ERROR_BAD_PARAMETERS;
1193 			goto out;
1194 		}
1195 
1196 		/*
1197 		 * For CTR we save the generated mask to use it at next
1198 		 * update call.
1199 		 */
1200 		memcpy(block_in, data_in + i, data_size - i);
1201 
1202 		res = write_align_block(ctx, block_in);
1203 		if (res)
1204 			goto out;
1205 
1206 		res = read_align_block(ctx, block_out);
1207 		if (res)
1208 			goto out;
1209 
1210 		memcpy(data_out + i, block_out, data_size - i);
1211 
1212 		/* Save mask for possibly next call */
1213 		ctx->extra_size = ctx->block_u32 * sizeof(uint32_t) -
1214 			(data_size - i);
1215 		memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1216 		       ctx->extra_size);
1217 	}
1218 
1219 	if (!last_block)
1220 		res = save_context(ctx);
1221 
1222 out:
1223 	/* If last block or error, end of CRYP process */
1224 	if (last_block || res)
1225 		cryp_end(ctx, res);
1226 
1227 	mutex_unlock(ctx->lock);
1228 
1229 	return res;
1230 }
1231 
1232 static TEE_Result stm32_cryp_probe(const void *fdt, int node,
1233 				   const void *compt_data __unused)
1234 {
1235 	TEE_Result res = TEE_SUCCESS;
1236 	struct dt_node_info dt_cryp = { };
1237 	struct rstctrl *rstctrl = NULL;
1238 	struct clk *clk = NULL;
1239 
1240 	fdt_fill_device_info(fdt, &dt_cryp, node);
1241 
1242 	if (dt_cryp.reg == DT_INFO_INVALID_REG ||
1243 	    dt_cryp.reg_size == DT_INFO_INVALID_REG_SIZE)
1244 		panic();
1245 
1246 	res = clk_dt_get_by_index(fdt, node, 0, &clk);
1247 	if (res)
1248 		return res;
1249 
1250 	res = rstctrl_dt_get_by_index(fdt, node, 0, &rstctrl);
1251 	if (res)
1252 		return res;
1253 
1254 	cryp_pdata.clock = clk;
1255 	cryp_pdata.reset = rstctrl;
1256 	cryp_pdata.base.pa = dt_cryp.reg;
1257 
1258 	io_pa_or_va_secure(&cryp_pdata.base, dt_cryp.reg_size);
1259 	if (!cryp_pdata.base.va)
1260 		panic();
1261 
1262 	stm32mp_register_secure_periph_iomem(cryp_pdata.base.pa);
1263 
1264 	if (clk_enable(cryp_pdata.clock))
1265 		panic();
1266 
1267 	if (rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
1268 		panic();
1269 
1270 	if (rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
1271 		panic();
1272 
1273 	if (IS_ENABLED(CFG_CRYPTO_DRV_AUTHENC)) {
1274 		res = stm32_register_authenc();
1275 		if (res) {
1276 			EMSG("Failed to register to authenc: %#"PRIx32, res);
1277 			panic();
1278 		}
1279 	}
1280 
1281 	if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1282 		res = stm32_register_cipher(CRYP_IP);
1283 		if (res) {
1284 			EMSG("Failed to register to cipher: %#"PRIx32, res);
1285 			panic();
1286 		}
1287 	}
1288 
1289 	return TEE_SUCCESS;
1290 }
1291 
1292 static const struct dt_device_match stm32_cryp_match_table[] = {
1293 	{ .compatible = "st,stm32mp1-cryp" },
1294 	{ }
1295 };
1296 
1297 DEFINE_DT_DRIVER(stm32_cryp_dt_driver) = {
1298 	.name = "stm32-cryp",
1299 	.match_table = stm32_cryp_match_table,
1300 	.probe = stm32_cryp_probe,
1301 };
1302