xref: /optee_os/core/drivers/crypto/stm32/stm32_cryp.c (revision ba2a6adb764f1310ad3c3091d89de84274f86b02)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2021, STMicroelectronics - All Rights Reserved
4  */
5 #include <assert.h>
6 #include <config.h>
7 #include <drivers/clk.h>
8 #include <drivers/clk_dt.h>
9 #include <drivers/rstctrl.h>
10 #include <initcall.h>
11 #include <io.h>
12 #include <kernel/boot.h>
13 #include <kernel/delay.h>
14 #include <kernel/dt.h>
15 #include <kernel/dt_driver.h>
16 #include <kernel/mutex.h>
17 #include <libfdt.h>
18 #include <mm/core_memprot.h>
19 #include <stdint.h>
20 #include <stm32_util.h>
21 #include <string.h>
22 #include <utee_defines.h>
23 #include <util.h>
24 
25 #include "stm32_cryp.h"
26 #include "common.h"
27 
28 #define INT8_BIT			8U
29 #define AES_BLOCK_SIZE_BIT		128U
30 #define AES_BLOCK_SIZE			(AES_BLOCK_SIZE_BIT / INT8_BIT)
31 #define AES_BLOCK_NB_U32		(AES_BLOCK_SIZE / sizeof(uint32_t))
32 #define DES_BLOCK_SIZE_BIT		64U
33 #define DES_BLOCK_SIZE			(DES_BLOCK_SIZE_BIT / INT8_BIT)
34 #define DES_BLOCK_NB_U32		(DES_BLOCK_SIZE / sizeof(uint32_t))
35 #define MAX_BLOCK_SIZE_BIT		AES_BLOCK_SIZE_BIT
36 #define MAX_BLOCK_SIZE			AES_BLOCK_SIZE
37 #define MAX_BLOCK_NB_U32		AES_BLOCK_NB_U32
38 #define AES_KEYSIZE_128			16U
39 #define AES_KEYSIZE_192			24U
40 #define AES_KEYSIZE_256			32U
41 
42 /* CRYP control register */
43 #define _CRYP_CR			0x0U
44 /* CRYP status register */
45 #define _CRYP_SR			0x04U
46 /* CRYP data input register */
47 #define _CRYP_DIN			0x08U
48 /* CRYP data output register */
49 #define _CRYP_DOUT			0x0CU
50 /* CRYP DMA control register */
51 #define _CRYP_DMACR			0x10U
52 /* CRYP interrupt mask set/clear register */
53 #define _CRYP_IMSCR			0x14U
54 /* CRYP raw interrupt status register */
55 #define _CRYP_RISR			0x18U
56 /* CRYP masked interrupt status register */
57 #define _CRYP_MISR			0x1CU
58 /* CRYP key registers */
59 #define _CRYP_K0LR			0x20U
60 #define _CRYP_K0RR			0x24U
61 #define _CRYP_K1LR			0x28U
62 #define _CRYP_K1RR			0x2CU
63 #define _CRYP_K2LR			0x30U
64 #define _CRYP_K2RR			0x34U
65 #define _CRYP_K3LR			0x38U
66 #define _CRYP_K3RR			0x3CU
67 /* CRYP initialization vector registers */
68 #define _CRYP_IV0LR			0x40U
69 #define _CRYP_IV0RR			0x44U
70 #define _CRYP_IV1LR			0x48U
71 #define _CRYP_IV1RR			0x4CU
72 /* CRYP context swap GCM-CCM registers */
73 #define _CRYP_CSGCMCCM0R		0x50U
74 #define _CRYP_CSGCMCCM1R		0x54U
75 #define _CRYP_CSGCMCCM2R		0x58U
76 #define _CRYP_CSGCMCCM3R		0x5CU
77 #define _CRYP_CSGCMCCM4R		0x60U
78 #define _CRYP_CSGCMCCM5R		0x64U
79 #define _CRYP_CSGCMCCM6R		0x68U
80 #define _CRYP_CSGCMCCM7R		0x6CU
81 /* CRYP context swap GCM registers */
82 #define _CRYP_CSGCM0R			0x70U
83 #define _CRYP_CSGCM1R			0x74U
84 #define _CRYP_CSGCM2R			0x78U
85 #define _CRYP_CSGCM3R			0x7CU
86 #define _CRYP_CSGCM4R			0x80U
87 #define _CRYP_CSGCM5R			0x84U
88 #define _CRYP_CSGCM6R			0x88U
89 #define _CRYP_CSGCM7R			0x8CU
90 /* CRYP hardware configuration register */
91 #define _CRYP_HWCFGR			0x3F0U
92 /* CRYP HW version register */
93 #define _CRYP_VERR			0x3F4U
94 /* CRYP identification */
95 #define _CRYP_IPIDR			0x3F8U
96 /* CRYP HW magic ID */
97 #define _CRYP_MID			0x3FCU
98 
99 #define CRYP_TIMEOUT_US			1000000U
100 #define TIMEOUT_US_1MS			1000U
101 
102 /* CRYP control register fields */
103 #define _CRYP_CR_RESET_VALUE		0x0U
104 #define _CRYP_CR_NPBLB_MSK		GENMASK_32(23, 20)
105 #define _CRYP_CR_NPBLB_OFF		20U
106 #define _CRYP_CR_GCM_CCMPH_MSK		GENMASK_32(17, 16)
107 #define _CRYP_CR_GCM_CCMPH_OFF		16U
108 #define _CRYP_CR_GCM_CCMPH_INIT		0U
109 #define _CRYP_CR_GCM_CCMPH_HEADER	1U
110 #define _CRYP_CR_GCM_CCMPH_PAYLOAD	2U
111 #define _CRYP_CR_GCM_CCMPH_FINAL	3U
112 #define _CRYP_CR_CRYPEN			BIT(15)
113 #define _CRYP_CR_FFLUSH			BIT(14)
114 #define _CRYP_CR_KEYSIZE_MSK		GENMASK_32(9, 8)
115 #define _CRYP_CR_KEYSIZE_OFF		8U
116 #define _CRYP_CR_KSIZE_128		0U
117 #define _CRYP_CR_KSIZE_192		1U
118 #define _CRYP_CR_KSIZE_256		2U
119 #define _CRYP_CR_DATATYPE_MSK		GENMASK_32(7, 6)
120 #define _CRYP_CR_DATATYPE_OFF		6U
121 #define _CRYP_CR_DATATYPE_NONE		0U
122 #define _CRYP_CR_DATATYPE_HALF_WORD	1U
123 #define _CRYP_CR_DATATYPE_BYTE		2U
124 #define _CRYP_CR_DATATYPE_BIT		3U
125 #define _CRYP_CR_ALGOMODE_MSK		(BIT(19) | GENMASK_32(5, 3))
126 #define _CRYP_CR_ALGOMODE_OFF		3U
127 #define _CRYP_CR_ALGOMODE_TDES_ECB	0x0U
128 #define _CRYP_CR_ALGOMODE_TDES_CBC	0x1U
129 #define _CRYP_CR_ALGOMODE_DES_ECB	0x2U
130 #define _CRYP_CR_ALGOMODE_DES_CBC	0x3U
131 #define _CRYP_CR_ALGOMODE_AES_ECB	0x4U
132 #define _CRYP_CR_ALGOMODE_AES_CBC	0x5U
133 #define _CRYP_CR_ALGOMODE_AES_CTR	0x6U
134 #define _CRYP_CR_ALGOMODE_AES		0x7U
135 #define _CRYP_CR_ALGOMODE_AES_GCM	BIT(16)
136 #define _CRYP_CR_ALGOMODE_AES_CCM	(BIT(16) | BIT(0))
137 #define _CRYP_CR_ALGODIR		BIT(2)
138 #define _CRYP_CR_ALGODIR_ENC		0U
139 #define _CRYP_CR_ALGODIR_DEC		BIT(2)
140 
141 /* CRYP status register fields */
142 #define _CRYP_SR_BUSY			BIT(4)
143 #define _CRYP_SR_OFFU			BIT(3)
144 #define _CRYP_SR_OFNE			BIT(2)
145 #define _CRYP_SR_IFNF			BIT(1)
146 #define _CRYP_SR_IFEM			BIT(0)
147 
148 /* CRYP DMA control register fields */
149 #define _CRYP_DMACR_DOEN		BIT(1)
150 #define _CRYP_DMACR_DIEN		BIT(0)
151 
152 /* CRYP interrupt fields */
153 #define _CRYP_I_OUT			BIT(1)
154 #define _CRYP_I_IN			BIT(0)
155 
156 /* CRYP hardware configuration register fields */
157 #define _CRYP_HWCFGR_CFG1_MSK		GENMASK_32(3, 0)
158 #define _CRYP_HWCFGR_CFG1_OFF		0U
159 #define _CRYP_HWCFGR_CFG2_MSK		GENMASK_32(7, 4)
160 #define _CRYP_HWCFGR_CFG2_OFF		4U
161 #define _CRYP_HWCFGR_CFG3_MSK		GENMASK_32(11, 8)
162 #define _CRYP_HWCFGR_CFG3_OFF		8U
163 #define _CRYP_HWCFGR_CFG4_MSK		GENMASK_32(15, 12)
164 #define _CRYP_HWCFGR_CFG4_OFF		12U
165 
166 /* CRYP HW version register */
167 #define _CRYP_VERR_MSK			GENMASK_32(7, 0)
168 #define _CRYP_VERR_OFF			0U
169 
170 /*
171  * Macro to manage bit manipulation when we work on local variable
172  * before writing only once to the real register.
173  */
174 #define CLRBITS(v, bits)		((v) &= ~(bits))
175 #define SETBITS(v, bits)		((v) |= (bits))
176 
177 #define IS_ALGOMODE(cr, mod) \
178 	(((cr) & _CRYP_CR_ALGOMODE_MSK) == (_CRYP_CR_ALGOMODE_##mod << \
179 					  _CRYP_CR_ALGOMODE_OFF))
180 
181 #define SET_ALGOMODE(mod, cr) \
182 	clrsetbits(&(cr), _CRYP_CR_ALGOMODE_MSK, (_CRYP_CR_ALGOMODE_##mod << \
183 						  _CRYP_CR_ALGOMODE_OFF))
184 
185 #define GET_ALGOMODE(cr) \
186 	(((cr) & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF)
187 
188 #define TOBE32(x)			TEE_U32_BSWAP(x)
189 #define FROMBE32(x)			TEE_U32_BSWAP(x)
190 
191 static struct stm32_cryp_platdata cryp_pdata;
192 static struct mutex cryp_lock = MUTEX_INITIALIZER;
193 
194 static void clrsetbits(uint32_t *v, uint32_t mask, uint32_t bits)
195 {
196 	*v = (*v & ~mask) | bits;
197 }
198 
199 static bool algo_mode_needs_iv(uint32_t cr)
200 {
201 	return !IS_ALGOMODE(cr, TDES_ECB) && !IS_ALGOMODE(cr, DES_ECB) &&
202 	       !IS_ALGOMODE(cr, AES_ECB);
203 }
204 
205 static bool algo_mode_is_ecb_cbc(uint32_t cr)
206 {
207 	return GET_ALGOMODE(cr) < _CRYP_CR_ALGOMODE_AES_CTR;
208 }
209 
210 static bool algo_mode_is_aes(uint32_t cr)
211 {
212 	return ((cr & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF) >=
213 	       _CRYP_CR_ALGOMODE_AES_ECB;
214 }
215 
216 static bool is_decrypt(uint32_t cr)
217 {
218 	return (cr & _CRYP_CR_ALGODIR) == _CRYP_CR_ALGODIR_DEC;
219 }
220 
221 static bool is_encrypt(uint32_t cr)
222 {
223 	return !is_decrypt(cr);
224 }
225 
226 static bool does_need_npblb(uint32_t cr)
227 {
228 	return (IS_ALGOMODE(cr, AES_GCM) && is_encrypt(cr)) ||
229 	       (IS_ALGOMODE(cr, AES_CCM) && is_decrypt(cr));
230 }
231 
232 static TEE_Result wait_sr_bits(vaddr_t base, uint32_t bits)
233 {
234 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
235 
236 	while ((io_read32(base + _CRYP_SR) & bits) != bits)
237 		if (timeout_elapsed(timeout_ref))
238 			break;
239 
240 	if ((io_read32(base + _CRYP_SR) & bits) != bits)
241 		return TEE_ERROR_BUSY;
242 
243 	return TEE_SUCCESS;
244 }
245 
246 static TEE_Result wait_end_busy(vaddr_t base)
247 {
248 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
249 
250 	while (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
251 		if (timeout_elapsed(timeout_ref))
252 			break;
253 
254 	if (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
255 		return TEE_ERROR_BUSY;
256 
257 	return TEE_SUCCESS;
258 }
259 
260 static TEE_Result wait_end_enable(vaddr_t base)
261 {
262 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
263 
264 	while (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
265 		if (timeout_elapsed(timeout_ref))
266 			break;
267 
268 	if (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
269 		return TEE_ERROR_BUSY;
270 
271 	return TEE_SUCCESS;
272 }
273 
274 static TEE_Result __must_check write_align_block(struct stm32_cryp_context *ctx,
275 						 uint32_t *data)
276 {
277 	TEE_Result res = TEE_SUCCESS;
278 	unsigned int i = 0;
279 
280 	res = wait_sr_bits(ctx->base, _CRYP_SR_IFNF);
281 	if (res)
282 		return res;
283 
284 	for (i = 0; i < ctx->block_u32; i++) {
285 		/* No need to htobe() as we configure the HW to swap bytes */
286 		io_write32(ctx->base + _CRYP_DIN, data[i]);
287 	}
288 
289 	return TEE_SUCCESS;
290 }
291 
292 static TEE_Result __must_check write_block(struct stm32_cryp_context *ctx,
293 					   uint8_t *data)
294 {
295 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
296 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
297 
298 		memcpy(data_u32, data, ctx->block_u32 * sizeof(uint32_t));
299 		return write_align_block(ctx, data_u32);
300 	}
301 
302 	return write_align_block(ctx, (void *)data);
303 }
304 
305 static TEE_Result __must_check read_align_block(struct stm32_cryp_context *ctx,
306 						uint32_t *data)
307 {
308 	TEE_Result res = TEE_SUCCESS;
309 	unsigned int i = 0;
310 
311 	res = wait_sr_bits(ctx->base, _CRYP_SR_OFNE);
312 	if (res)
313 		return res;
314 
315 	for (i = 0; i < ctx->block_u32; i++) {
316 		/* No need to htobe() as we configure the HW to swap bytes */
317 		data[i] = io_read32(ctx->base + _CRYP_DOUT);
318 	}
319 
320 	return TEE_SUCCESS;
321 }
322 
323 static TEE_Result __must_check read_block(struct stm32_cryp_context *ctx,
324 					  uint8_t *data)
325 {
326 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
327 		TEE_Result res = TEE_SUCCESS;
328 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
329 
330 		res = read_align_block(ctx, data_u32);
331 		if (res)
332 			return res;
333 
334 		memcpy(data, data_u32, ctx->block_u32 * sizeof(uint32_t));
335 
336 		return TEE_SUCCESS;
337 	}
338 
339 	return read_align_block(ctx, (void *)data);
340 }
341 
342 static void cryp_end(struct stm32_cryp_context *ctx, TEE_Result prev_error)
343 {
344 	if (prev_error) {
345 		if (rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
346 			panic();
347 		if (rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
348 			panic();
349 	}
350 
351 	/* Disable the CRYP peripheral */
352 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
353 }
354 
355 static void cryp_write_iv(struct stm32_cryp_context *ctx)
356 {
357 	if (algo_mode_needs_iv(ctx->cr)) {
358 		unsigned int i = 0;
359 
360 		/* Restore the _CRYP_IVRx */
361 		for (i = 0; i < ctx->block_u32; i++)
362 			io_write32(ctx->base + _CRYP_IV0LR + i *
363 				   sizeof(uint32_t), ctx->iv[i]);
364 	}
365 }
366 
367 static void cryp_save_suspend(struct stm32_cryp_context *ctx)
368 {
369 	unsigned int i = 0;
370 
371 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
372 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
373 			ctx->pm_gcmccm[i] = io_read32(ctx->base +
374 						      _CRYP_CSGCMCCM0R +
375 						      i * sizeof(uint32_t));
376 
377 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
378 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
379 			ctx->pm_gcm[i] = io_read32(ctx->base + _CRYP_CSGCM0R +
380 						   i * sizeof(uint32_t));
381 }
382 
383 static void cryp_restore_suspend(struct stm32_cryp_context *ctx)
384 {
385 	unsigned int i = 0;
386 
387 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
388 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
389 			io_write32(ctx->base + _CRYP_CSGCMCCM0R +
390 				   i * sizeof(uint32_t), ctx->pm_gcmccm[i]);
391 
392 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
393 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
394 			io_write32(ctx->base + _CRYP_CSGCM0R +
395 				   i * sizeof(uint32_t), ctx->pm_gcm[i]);
396 }
397 
398 static void cryp_write_key(struct stm32_cryp_context *ctx)
399 {
400 	vaddr_t reg = 0;
401 	int i = 0;
402 	uint32_t algo = GET_ALGOMODE(ctx->cr);
403 
404 	if (algo == _CRYP_CR_ALGOMODE_DES_ECB ||
405 	    algo == _CRYP_CR_ALGOMODE_DES_CBC)
406 		reg = ctx->base + _CRYP_K1RR;
407 	else
408 		reg = ctx->base + _CRYP_K3RR;
409 
410 	for (i = ctx->key_size / sizeof(uint32_t) - 1;
411 	     i >= 0;
412 	     i--, reg -= sizeof(uint32_t))
413 		io_write32(reg, ctx->key[i]);
414 }
415 
416 static TEE_Result cryp_prepare_key(struct stm32_cryp_context *ctx)
417 {
418 	TEE_Result res = TEE_SUCCESS;
419 
420 	/*
421 	 * For AES ECB/CBC decryption, key preparation mode must be selected
422 	 * to populate the key.
423 	 */
424 	if (is_decrypt(ctx->cr) && (IS_ALGOMODE(ctx->cr, AES_ECB) ||
425 				    IS_ALGOMODE(ctx->cr, AES_CBC))) {
426 		/* Select Algomode "prepare key" */
427 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
428 				_CRYP_CR_ALGOMODE_AES << _CRYP_CR_ALGOMODE_OFF);
429 
430 		cryp_write_key(ctx);
431 
432 		/* Enable CRYP */
433 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
434 
435 		res = wait_end_busy(ctx->base);
436 		if (res)
437 			return res;
438 
439 		/* Reset 'real' algomode */
440 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
441 				ctx->cr & _CRYP_CR_ALGOMODE_MSK);
442 	} else {
443 		cryp_write_key(ctx);
444 	}
445 
446 	return TEE_SUCCESS;
447 }
448 
449 static TEE_Result save_context(struct stm32_cryp_context *ctx)
450 {
451 	/* Device should not be in a processing phase */
452 	if (io_read32(ctx->base + _CRYP_SR) & _CRYP_SR_BUSY)
453 		return TEE_ERROR_BAD_STATE;
454 
455 	/* Disable the CRYP peripheral */
456 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
457 
458 	/* Save CR */
459 	ctx->cr = io_read32(ctx->base + _CRYP_CR);
460 
461 	cryp_save_suspend(ctx);
462 
463 	/* If algo mode needs to save current IV */
464 	if (algo_mode_needs_iv(ctx->cr)) {
465 		unsigned int i = 0;
466 
467 		/* Save IV */
468 		for (i = 0; i < ctx->block_u32; i++)
469 			ctx->iv[i] = io_read32(ctx->base + _CRYP_IV0LR + i *
470 					       sizeof(uint32_t));
471 	}
472 
473 	return TEE_SUCCESS;
474 }
475 
476 /* To resume the processing of a message */
477 static TEE_Result restore_context(struct stm32_cryp_context *ctx)
478 {
479 	TEE_Result res = TEE_SUCCESS;
480 
481 	/* IP should be disabled */
482 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN) {
483 		DMSG("Device is still enabled");
484 		return TEE_ERROR_BAD_STATE;
485 	}
486 
487 	/* Restore the _CRYP_CR */
488 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
489 
490 	/* Write key and, in case of AES_CBC or AES_ECB decrypt, prepare it */
491 	res = cryp_prepare_key(ctx);
492 	if (res)
493 		return res;
494 
495 	cryp_restore_suspend(ctx);
496 
497 	cryp_write_iv(ctx);
498 
499 	/* Flush internal fifo */
500 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_FFLUSH);
501 
502 	/* Enable the CRYP peripheral */
503 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
504 
505 	return TEE_SUCCESS;
506 }
507 
508 /*
509  * Translate a byte index in an array of BE uint32_t into the index of same
510  * byte in the corresponding LE uint32_t array.
511  */
512 static size_t be_index(size_t index)
513 {
514 	return (index & ~0x3) + 3 - (index & 0x3);
515 }
516 
517 static TEE_Result ccm_first_context(struct stm32_cryp_context *ctx)
518 {
519 	TEE_Result res = TEE_SUCCESS;
520 	uint32_t b0[AES_BLOCK_NB_U32] = { 0 };
521 	uint8_t *iv = (uint8_t *)ctx->iv;
522 	size_t l = 0;
523 	size_t i = 15;
524 
525 	/* IP should be disabled */
526 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN)
527 		return TEE_ERROR_BAD_STATE;
528 
529 	/* Write the _CRYP_CR */
530 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
531 
532 	/* Write key */
533 	res = cryp_prepare_key(ctx);
534 	if (res)
535 		return res;
536 
537 	/* Save full IV that will be b0 */
538 	memcpy(b0, iv, sizeof(b0));
539 
540 	/*
541 	 * Update IV to become CTR0/1 before setting it.
542 	 * IV is saved as LE uint32_t[4] as expected by hardware,
543 	 * but CCM RFC defines bytes to update in a BE array.
544 	 */
545 	/* Set flag bits to 0 (5 higher bits), keep 3 low bits */
546 	iv[be_index(0)] &= 0x7;
547 	/* Get size of length field (can be from 2 to 8) */
548 	l = iv[be_index(0)] + 1;
549 	/* Set Q to 0 */
550 	for (i = 15; i >= 15 - l + 1; i--)
551 		iv[be_index(i)] = 0;
552 	/* Save CTR0 */
553 	memcpy(ctx->ctr0_ccm, iv, sizeof(b0));
554 	/* Increment Q */
555 	iv[be_index(15)] |= 0x1;
556 
557 	cryp_write_iv(ctx);
558 
559 	/* Enable the CRYP peripheral */
560 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
561 
562 	res = write_align_block(ctx, b0);
563 
564 	return res;
565 }
566 
567 static TEE_Result do_from_init_to_phase(struct stm32_cryp_context *ctx,
568 					uint32_t new_phase)
569 {
570 	TEE_Result res = TEE_SUCCESS;
571 
572 	/*
573 	 * We didn't run the init phase yet
574 	 * CCM need a specific restore_context phase for the init phase
575 	 */
576 	if (IS_ALGOMODE(ctx->cr, AES_CCM))
577 		res = ccm_first_context(ctx);
578 	else
579 		res = restore_context(ctx);
580 
581 	if (res)
582 		return res;
583 
584 	res = wait_end_enable(ctx->base);
585 	if (res)
586 		return res;
587 
588 	/* Move to 'new_phase' */
589 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
590 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
591 
592 	/* Enable the CRYP peripheral (init disabled it) */
593 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
594 
595 	return TEE_SUCCESS;
596 }
597 
598 static TEE_Result do_from_header_to_phase(struct stm32_cryp_context *ctx,
599 					  uint32_t new_phase)
600 {
601 	TEE_Result res = TEE_SUCCESS;
602 
603 	res = restore_context(ctx);
604 	if (res)
605 		return res;
606 
607 	if (ctx->extra_size) {
608 		/* Manage unaligned header data before moving to next phase */
609 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
610 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
611 
612 		res = write_align_block(ctx, ctx->extra);
613 		if (res)
614 			return res;
615 
616 		ctx->assoc_len += (ctx->extra_size) * INT8_BIT;
617 		ctx->extra_size = 0;
618 	}
619 
620 	/* Move to 'new_phase' */
621 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
622 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
623 
624 	return TEE_SUCCESS;
625 }
626 
627 /**
628  * @brief Start a AES computation.
629  * @param ctx: CRYP process context
630  * @param is_dec: true if decryption, false if encryption
631  * @param algo: define the algo mode
632  * @param key: pointer to key
633  * @param key_size: key size
634  * @param iv: pointer to initialization vector (unused if algo is ECB)
635  * @param iv_size: iv size
636  * @note this function doesn't access to hardware but stores in ctx the values
637  *
638  * @retval TEE_SUCCESS if OK.
639  */
640 TEE_Result stm32_cryp_init(struct stm32_cryp_context *ctx, bool is_dec,
641 			   enum stm32_cryp_algo_mode algo,
642 			   const void *key, size_t key_size, const void *iv,
643 			   size_t iv_size)
644 {
645 	unsigned int i = 0;
646 	const uint32_t *iv_u32 = NULL;
647 	uint32_t local_iv[4] = { 0 };
648 	const uint32_t *key_u32 = NULL;
649 	uint32_t local_key[8] = { 0 };
650 
651 	ctx->assoc_len = 0;
652 	ctx->load_len = 0;
653 	ctx->extra_size = 0;
654 	ctx->lock = &cryp_lock;
655 
656 	ctx->base = io_pa_or_va(&cryp_pdata.base, 1);
657 	ctx->cr = _CRYP_CR_RESET_VALUE;
658 
659 	/* We want buffer to be u32 aligned */
660 	if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
661 		key_u32 = key;
662 	} else {
663 		memcpy(local_key, key, key_size);
664 		key_u32 = local_key;
665 	}
666 
667 	if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
668 		iv_u32 = iv;
669 	} else {
670 		memcpy(local_iv, iv, iv_size);
671 		iv_u32 = local_iv;
672 	}
673 
674 	if (is_dec)
675 		SETBITS(ctx->cr, _CRYP_CR_ALGODIR);
676 	else
677 		CLRBITS(ctx->cr, _CRYP_CR_ALGODIR);
678 
679 	/* Save algo mode */
680 	switch (algo) {
681 	case STM32_CRYP_MODE_TDES_ECB:
682 		SET_ALGOMODE(TDES_ECB, ctx->cr);
683 		break;
684 	case STM32_CRYP_MODE_TDES_CBC:
685 		SET_ALGOMODE(TDES_CBC, ctx->cr);
686 		break;
687 	case STM32_CRYP_MODE_DES_ECB:
688 		SET_ALGOMODE(DES_ECB, ctx->cr);
689 		break;
690 	case STM32_CRYP_MODE_DES_CBC:
691 		SET_ALGOMODE(DES_CBC, ctx->cr);
692 		break;
693 	case STM32_CRYP_MODE_AES_ECB:
694 		SET_ALGOMODE(AES_ECB, ctx->cr);
695 		break;
696 	case STM32_CRYP_MODE_AES_CBC:
697 		SET_ALGOMODE(AES_CBC, ctx->cr);
698 		break;
699 	case STM32_CRYP_MODE_AES_CTR:
700 		SET_ALGOMODE(AES_CTR, ctx->cr);
701 		break;
702 	case STM32_CRYP_MODE_AES_GCM:
703 		SET_ALGOMODE(AES_GCM, ctx->cr);
704 		break;
705 	case STM32_CRYP_MODE_AES_CCM:
706 		SET_ALGOMODE(AES_CCM, ctx->cr);
707 		break;
708 	default:
709 		return TEE_ERROR_BAD_PARAMETERS;
710 	}
711 
712 	/*
713 	 * We will use HW Byte swap (_CRYP_CR_DATATYPE_BYTE) for data.
714 	 * So we won't need to
715 	 * TOBE32(data) before write to DIN
716 	 * nor
717 	 * FROMBE32 after reading from DOUT.
718 	 */
719 	clrsetbits(&ctx->cr, _CRYP_CR_DATATYPE_MSK,
720 		   _CRYP_CR_DATATYPE_BYTE << _CRYP_CR_DATATYPE_OFF);
721 
722 	/*
723 	 * Configure keysize for AES algorithms
724 	 * And save block size
725 	 */
726 	if (algo_mode_is_aes(ctx->cr)) {
727 		switch (key_size) {
728 		case AES_KEYSIZE_128:
729 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
730 				   _CRYP_CR_KSIZE_128 << _CRYP_CR_KEYSIZE_OFF);
731 			break;
732 		case AES_KEYSIZE_192:
733 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
734 				   _CRYP_CR_KSIZE_192 << _CRYP_CR_KEYSIZE_OFF);
735 			break;
736 		case AES_KEYSIZE_256:
737 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
738 				   _CRYP_CR_KSIZE_256 << _CRYP_CR_KEYSIZE_OFF);
739 			break;
740 		default:
741 			return TEE_ERROR_BAD_PARAMETERS;
742 		}
743 
744 		/* And set block size */
745 		ctx->block_u32 = AES_BLOCK_NB_U32;
746 	} else {
747 		/* And set DES/TDES block size */
748 		ctx->block_u32 = DES_BLOCK_NB_U32;
749 	}
750 
751 	/* Save key in HW order */
752 	ctx->key_size = key_size;
753 	for (i = 0; i < key_size / sizeof(uint32_t); i++)
754 		ctx->key[i] = TOBE32(key_u32[i]);
755 
756 	/* Save IV */
757 	if (algo_mode_needs_iv(ctx->cr)) {
758 		if (!iv || iv_size != ctx->block_u32 * sizeof(uint32_t))
759 			return TEE_ERROR_BAD_PARAMETERS;
760 
761 		/*
762 		 * We save IV in the byte order expected by the
763 		 * IV registers
764 		 */
765 		for (i = 0; i < ctx->block_u32; i++)
766 			ctx->iv[i] = TOBE32(iv_u32[i]);
767 	}
768 
769 	/* Reset suspend registers */
770 	memset(ctx->pm_gcmccm, 0, sizeof(ctx->pm_gcmccm));
771 	memset(ctx->pm_gcm, 0, sizeof(ctx->pm_gcm));
772 
773 	return TEE_SUCCESS;
774 }
775 
776 /**
777  * @brief Update (or start) a AES authenticate process of
778  *        associated data (CCM or GCM).
779  * @param ctx: CRYP process context
780  * @param data: pointer to associated data
781  * @param data_size: data size
782  * @retval TEE_SUCCESS if OK.
783  */
784 TEE_Result stm32_cryp_update_assodata(struct stm32_cryp_context *ctx,
785 				      uint8_t *data, size_t data_size)
786 {
787 	TEE_Result res = TEE_SUCCESS;
788 	unsigned int i = 0;
789 	uint32_t previous_phase = 0;
790 
791 	/* If no associated data, nothing to do */
792 	if (!data || !data_size)
793 		return TEE_SUCCESS;
794 
795 	mutex_lock(ctx->lock);
796 
797 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
798 			 _CRYP_CR_GCM_CCMPH_OFF;
799 
800 	switch (previous_phase) {
801 	case _CRYP_CR_GCM_CCMPH_INIT:
802 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_HEADER);
803 		break;
804 	case _CRYP_CR_GCM_CCMPH_HEADER:
805 		/*
806 		 * Function update_assodata was already called.
807 		 * We only need to restore the context.
808 		 */
809 		res = restore_context(ctx);
810 		break;
811 	default:
812 		assert(0);
813 		res = TEE_ERROR_BAD_STATE;
814 	}
815 
816 	if (res)
817 		goto out;
818 
819 	/* Manage if remaining data from a previous update_assodata call */
820 	if (ctx->extra_size &&
821 	    (ctx->extra_size + data_size >=
822 	     ctx->block_u32 * sizeof(uint32_t))) {
823 		uint32_t block[MAX_BLOCK_NB_U32] = { 0 };
824 
825 		memcpy(block, ctx->extra, ctx->extra_size);
826 		memcpy((uint8_t *)block + ctx->extra_size, data,
827 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
828 
829 		res = write_align_block(ctx, block);
830 		if (res)
831 			goto out;
832 
833 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
834 		ctx->extra_size = 0;
835 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
836 	}
837 
838 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
839 		res = write_block(ctx, data + i);
840 		if (res)
841 			goto out;
842 
843 		/* Process next block */
844 		i += ctx->block_u32 * sizeof(uint32_t);
845 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
846 	}
847 
848 	/*
849 	 * Manage last block if not a block size multiple:
850 	 * Save remaining data to manage them later (potentially with new
851 	 * associated data).
852 	 */
853 	if (i < data_size) {
854 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
855 		       data_size - i);
856 		ctx->extra_size += data_size - i;
857 	}
858 
859 	res = save_context(ctx);
860 out:
861 	if (res)
862 		cryp_end(ctx, res);
863 
864 	mutex_unlock(ctx->lock);
865 
866 	return res;
867 }
868 
869 /**
870  * @brief Update (or start) a AES authenticate and de/encrypt with
871  *        payload data (CCM or GCM).
872  * @param ctx: CRYP process context
873  * @param data_in: pointer to payload
874  * @param data_out: pointer where to save de/encrypted payload
875  * @param data_size: payload size
876  *
877  * @retval TEE_SUCCESS if OK.
878  */
879 TEE_Result stm32_cryp_update_load(struct stm32_cryp_context *ctx,
880 				  uint8_t *data_in, uint8_t *data_out,
881 				  size_t data_size)
882 {
883 	TEE_Result res = TEE_SUCCESS;
884 	unsigned int i = 0;
885 	uint32_t previous_phase = 0;
886 
887 	if (!data_in || !data_size)
888 		return TEE_SUCCESS;
889 
890 	mutex_lock(ctx->lock);
891 
892 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
893 			 _CRYP_CR_GCM_CCMPH_OFF;
894 
895 	switch (previous_phase) {
896 	case _CRYP_CR_GCM_CCMPH_INIT:
897 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
898 		break;
899 	case _CRYP_CR_GCM_CCMPH_HEADER:
900 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
901 		break;
902 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
903 		/* new update_load call, we only need to restore context */
904 		res = restore_context(ctx);
905 		break;
906 	default:
907 		assert(0);
908 		res = TEE_ERROR_BAD_STATE;
909 	}
910 
911 	if (res)
912 		goto out;
913 
914 	/* Manage if incomplete block from a previous update_load call */
915 	if (ctx->extra_size &&
916 	    (ctx->extra_size + data_size >=
917 	     ctx->block_u32 * sizeof(uint32_t))) {
918 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
919 
920 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
921 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
922 
923 		res = write_align_block(ctx, ctx->extra);
924 		if (res)
925 			goto out;
926 
927 		res = read_align_block(ctx, block_out);
928 		if (res)
929 			goto out;
930 
931 		memcpy(data_out + i, (uint8_t *)block_out + ctx->extra_size,
932 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
933 
934 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
935 		ctx->extra_size = 0;
936 
937 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
938 	}
939 
940 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
941 		res = write_block(ctx, data_in + i);
942 		if (res)
943 			goto out;
944 
945 		res = read_block(ctx, data_out + i);
946 		if (res)
947 			goto out;
948 
949 		/* Process next block */
950 		i += ctx->block_u32 * sizeof(uint32_t);
951 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
952 	}
953 
954 	res = save_context(ctx);
955 	if (res)
956 		goto out;
957 
958 	/*
959 	 * Manage last block if not a block size multiple
960 	 * We saved context,
961 	 * Complete block with 0 and send to CRYP to get {en,de}crypted data
962 	 * Store data to resend as last block in final()
963 	 * or to complete next update_load() to get correct tag.
964 	 */
965 	if (i < data_size) {
966 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
967 		size_t prev_extra_size = ctx->extra_size;
968 
969 		/* Re-enable the CRYP peripheral */
970 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
971 
972 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
973 		       data_size - i);
974 		ctx->extra_size += data_size - i;
975 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
976 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
977 
978 		res = write_align_block(ctx, ctx->extra);
979 		if (res)
980 			goto out;
981 
982 		res = read_align_block(ctx, block_out);
983 		if (res)
984 			goto out;
985 
986 		memcpy(data_out + i, (uint8_t *)block_out + prev_extra_size,
987 		       data_size - i);
988 
989 		/* Disable the CRYP peripheral */
990 		io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
991 	}
992 
993 out:
994 	if (res)
995 		cryp_end(ctx, res);
996 
997 	mutex_unlock(ctx->lock);
998 
999 	return res;
1000 }
1001 
1002 /**
1003  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
1004  * @param ctx: CRYP process context
1005  * @param tag: pointer where to save the tag
1006  * @param data_size: tag size
1007  *
1008  * @retval TEE_SUCCESS if OK.
1009  */
1010 TEE_Result stm32_cryp_final(struct stm32_cryp_context *ctx, uint8_t *tag,
1011 			    size_t tag_size)
1012 {
1013 	TEE_Result res = TEE_SUCCESS;
1014 	uint32_t tag_u32[4] = { 0 };
1015 	uint32_t previous_phase = 0;
1016 
1017 	mutex_lock(ctx->lock);
1018 
1019 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
1020 			 _CRYP_CR_GCM_CCMPH_OFF;
1021 
1022 	switch (previous_phase) {
1023 	case _CRYP_CR_GCM_CCMPH_INIT:
1024 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1025 		break;
1026 	case _CRYP_CR_GCM_CCMPH_HEADER:
1027 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1028 		break;
1029 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
1030 		res = restore_context(ctx);
1031 		if (res)
1032 			break;
1033 
1034 		/* Manage if incomplete block from a previous update_load() */
1035 		if (ctx->extra_size) {
1036 			uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1037 			size_t sz = ctx->block_u32 * sizeof(uint32_t) -
1038 				    ctx->extra_size;
1039 
1040 			if (does_need_npblb(ctx->cr)) {
1041 				io_clrsetbits32(ctx->base + _CRYP_CR,
1042 						_CRYP_CR_NPBLB_MSK,
1043 						sz << _CRYP_CR_NPBLB_OFF);
1044 			}
1045 
1046 			memset((uint8_t *)ctx->extra + ctx->extra_size, 0, sz);
1047 
1048 			res = write_align_block(ctx, ctx->extra);
1049 			if (res)
1050 				break;
1051 
1052 			/* Don't care {en,de}crypted data, already saved */
1053 			res = read_align_block(ctx, block_out);
1054 			if (res)
1055 				break;
1056 
1057 			ctx->load_len += (ctx->extra_size * INT8_BIT);
1058 			ctx->extra_size = 0;
1059 		}
1060 
1061 		/* Move to final phase */
1062 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
1063 				_CRYP_CR_GCM_CCMPH_FINAL <<
1064 				_CRYP_CR_GCM_CCMPH_OFF);
1065 		break;
1066 	default:
1067 		assert(0);
1068 		res = TEE_ERROR_BAD_STATE;
1069 	}
1070 
1071 	if (res)
1072 		goto out;
1073 
1074 	if (IS_ALGOMODE(ctx->cr, AES_GCM)) {
1075 		/* No need to htobe() as we configure the HW to swap bytes */
1076 		io_write32(ctx->base + _CRYP_DIN, 0U);
1077 		io_write32(ctx->base + _CRYP_DIN, ctx->assoc_len);
1078 		io_write32(ctx->base + _CRYP_DIN, 0U);
1079 		io_write32(ctx->base + _CRYP_DIN, ctx->load_len);
1080 	} else if (IS_ALGOMODE(ctx->cr, AES_CCM)) {
1081 		/* No need to htobe() in this phase */
1082 		res = write_align_block(ctx, ctx->ctr0_ccm);
1083 		if (res)
1084 			goto out;
1085 	}
1086 
1087 	res = read_align_block(ctx, tag_u32);
1088 	if (res)
1089 		goto out;
1090 
1091 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
1092 
1093 out:
1094 	cryp_end(ctx, res);
1095 	mutex_unlock(ctx->lock);
1096 
1097 	return res;
1098 }
1099 
1100 /**
1101  * @brief Update (or start) a de/encrypt process.
1102  * @param ctx: CRYP process context
1103  * @param last_block: true if last payload data block
1104  * @param data_in: pointer to payload
1105  * @param data_out: pointer where to save de/encrypted payload
1106  * @param data_size: payload size
1107  *
1108  * @retval TEE_SUCCESS if OK.
1109  */
1110 TEE_Result stm32_cryp_update(struct stm32_cryp_context *ctx, bool last_block,
1111 			     uint8_t *data_in, uint8_t *data_out,
1112 			     size_t data_size)
1113 {
1114 	TEE_Result res = TEE_SUCCESS;
1115 	unsigned int i = 0;
1116 
1117 	mutex_lock(ctx->lock);
1118 
1119 	/*
1120 	 * In CBC and ECB encryption we need to manage specifically last
1121 	 * 2 blocks if total size in not aligned to a block size.
1122 	 * Currently return TEE_ERROR_NOT_IMPLEMENTED. Moreover as we need to
1123 	 * know last 2 blocks, if unaligned and call with less than two blocks,
1124 	 * return TEE_ERROR_BAD_STATE.
1125 	 */
1126 	if (last_block && algo_mode_is_ecb_cbc(ctx->cr) &&
1127 	    is_encrypt(ctx->cr) &&
1128 	    (ROUNDDOWN(data_size, ctx->block_u32 * sizeof(uint32_t)) !=
1129 	     data_size)) {
1130 		if (data_size < ctx->block_u32 * sizeof(uint32_t) * 2) {
1131 			/*
1132 			 * If CBC, size of the last part should be at
1133 			 * least 2*BLOCK_SIZE
1134 			 */
1135 			EMSG("Unexpected last block size");
1136 			res = TEE_ERROR_BAD_STATE;
1137 			goto out;
1138 		}
1139 		/*
1140 		 * Moreover the ECB/CBC specific padding for encrypt is not
1141 		 * yet implemented, and not used in OPTEE
1142 		 */
1143 		res = TEE_ERROR_NOT_IMPLEMENTED;
1144 		goto out;
1145 	}
1146 
1147 	/* Manage remaining CTR mask from previous update call */
1148 	if (IS_ALGOMODE(ctx->cr, AES_CTR) && ctx->extra_size) {
1149 		unsigned int j = 0;
1150 		uint8_t *mask = (uint8_t *)ctx->extra;
1151 
1152 		for (j = 0; j < ctx->extra_size && i < data_size; j++, i++)
1153 			data_out[i] = data_in[i] ^ mask[j];
1154 
1155 		if (j != ctx->extra_size) {
1156 			/*
1157 			 * We didn't consume all saved mask,
1158 			 * but no more data.
1159 			 */
1160 
1161 			/* We save remaining mask and its new size */
1162 			memmove(ctx->extra, ctx->extra + j,
1163 				ctx->extra_size - j);
1164 			ctx->extra_size -= j;
1165 
1166 			/*
1167 			 * We don't need to save HW context we didn't
1168 			 * modify HW state.
1169 			 */
1170 			res = TEE_SUCCESS;
1171 			goto out;
1172 		}
1173 
1174 		/* All extra mask consumed */
1175 		ctx->extra_size = 0;
1176 	}
1177 
1178 	res = restore_context(ctx);
1179 	if (res)
1180 		goto out;
1181 
1182 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
1183 		/*
1184 		 * We only write/read one block at a time
1185 		 * but CRYP use a in (and out) FIFO of 8 * uint32_t
1186 		 */
1187 		res = write_block(ctx, data_in + i);
1188 		if (res)
1189 			goto out;
1190 
1191 		res = read_block(ctx, data_out + i);
1192 		if (res)
1193 			goto out;
1194 
1195 		/* Process next block */
1196 		i += ctx->block_u32 * sizeof(uint32_t);
1197 	}
1198 
1199 	/* Manage last block if not a block size multiple */
1200 	if (i < data_size) {
1201 		uint32_t block_in[MAX_BLOCK_NB_U32] = { 0 };
1202 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1203 
1204 		if (!IS_ALGOMODE(ctx->cr, AES_CTR)) {
1205 			/*
1206 			 * Other algorithm than CTR can manage only multiple
1207 			 * of block_size.
1208 			 */
1209 			res = TEE_ERROR_BAD_PARAMETERS;
1210 			goto out;
1211 		}
1212 
1213 		/*
1214 		 * For CTR we save the generated mask to use it at next
1215 		 * update call.
1216 		 */
1217 		memcpy(block_in, data_in + i, data_size - i);
1218 
1219 		res = write_align_block(ctx, block_in);
1220 		if (res)
1221 			goto out;
1222 
1223 		res = read_align_block(ctx, block_out);
1224 		if (res)
1225 			goto out;
1226 
1227 		memcpy(data_out + i, block_out, data_size - i);
1228 
1229 		/* Save mask for possibly next call */
1230 		ctx->extra_size = ctx->block_u32 * sizeof(uint32_t) -
1231 			(data_size - i);
1232 		memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1233 		       ctx->extra_size);
1234 	}
1235 
1236 	if (!last_block)
1237 		res = save_context(ctx);
1238 
1239 out:
1240 	/* If last block or error, end of CRYP process */
1241 	if (last_block || res)
1242 		cryp_end(ctx, res);
1243 
1244 	mutex_unlock(ctx->lock);
1245 
1246 	return res;
1247 }
1248 
1249 static TEE_Result stm32_cryp_probe(const void *fdt, int node,
1250 				   const void *compt_data __unused)
1251 {
1252 	TEE_Result res = TEE_SUCCESS;
1253 	struct dt_node_info dt_cryp = { };
1254 	struct rstctrl *rstctrl = NULL;
1255 	struct clk *clk = NULL;
1256 
1257 	fdt_fill_device_info(fdt, &dt_cryp, node);
1258 
1259 	if (dt_cryp.reg == DT_INFO_INVALID_REG ||
1260 	    dt_cryp.reg_size == DT_INFO_INVALID_REG_SIZE)
1261 		panic();
1262 
1263 	res = clk_dt_get_by_index(fdt, node, 0, &clk);
1264 	if (res)
1265 		return res;
1266 
1267 	res = rstctrl_dt_get_by_index(fdt, node, 0, &rstctrl);
1268 	if (res)
1269 		return res;
1270 
1271 	cryp_pdata.clock = clk;
1272 	cryp_pdata.reset = rstctrl;
1273 	cryp_pdata.base.pa = dt_cryp.reg;
1274 
1275 	io_pa_or_va_secure(&cryp_pdata.base, dt_cryp.reg_size);
1276 	if (!cryp_pdata.base.va)
1277 		panic();
1278 
1279 	stm32mp_register_secure_periph_iomem(cryp_pdata.base.pa);
1280 
1281 	if (clk_enable(cryp_pdata.clock))
1282 		panic();
1283 
1284 	if (rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
1285 		panic();
1286 
1287 	if (rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS))
1288 		panic();
1289 
1290 	if (IS_ENABLED(CFG_CRYPTO_DRV_AUTHENC)) {
1291 		res = stm32_register_authenc();
1292 		if (res) {
1293 			EMSG("Failed to register to authenc: %#"PRIx32, res);
1294 			panic();
1295 		}
1296 	}
1297 
1298 	if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1299 		res = stm32_register_cipher();
1300 		if (res) {
1301 			EMSG("Failed to register to cipher: %#"PRIx32, res);
1302 			panic();
1303 		}
1304 	}
1305 
1306 	return TEE_SUCCESS;
1307 }
1308 
1309 static const struct dt_device_match stm32_cryp_match_table[] = {
1310 	{ .compatible = "st,stm32mp1-cryp" },
1311 	{ }
1312 };
1313 
1314 DEFINE_DT_DRIVER(stm32_cryp_dt_driver) = {
1315 	.name = "stm32-cryp",
1316 	.match_table = stm32_cryp_match_table,
1317 	.probe = stm32_cryp_probe,
1318 };
1319