xref: /optee_os/core/drivers/crypto/stm32/stm32_cryp.c (revision 71785645fa6ce42db40dbf5a54e0eaedc4f61591)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2021, STMicroelectronics - All Rights Reserved
4  */
5 #include <assert.h>
6 #include <config.h>
7 #include <drivers/clk.h>
8 #include <drivers/clk_dt.h>
9 #include <drivers/rstctrl.h>
10 #include <initcall.h>
11 #include <io.h>
12 #include <kernel/boot.h>
13 #include <kernel/delay.h>
14 #include <kernel/dt.h>
15 #include <kernel/dt_driver.h>
16 #include <kernel/mutex.h>
17 #include <kernel/pm.h>
18 #include <libfdt.h>
19 #include <mm/core_memprot.h>
20 #include <stdint.h>
21 #include <stm32_util.h>
22 #include <string.h>
23 #include <utee_defines.h>
24 #include <util.h>
25 
26 #include "stm32_cryp.h"
27 #include "common.h"
28 
29 /* CRYP control register */
30 #define _CRYP_CR			0x0U
31 /* CRYP status register */
32 #define _CRYP_SR			0x04U
33 /* CRYP data input register */
34 #define _CRYP_DIN			0x08U
35 /* CRYP data output register */
36 #define _CRYP_DOUT			0x0CU
37 /* CRYP DMA control register */
38 #define _CRYP_DMACR			0x10U
39 /* CRYP interrupt mask set/clear register */
40 #define _CRYP_IMSCR			0x14U
41 /* CRYP raw interrupt status register */
42 #define _CRYP_RISR			0x18U
43 /* CRYP masked interrupt status register */
44 #define _CRYP_MISR			0x1CU
45 /* CRYP key registers */
46 #define _CRYP_K0LR			0x20U
47 #define _CRYP_K0RR			0x24U
48 #define _CRYP_K1LR			0x28U
49 #define _CRYP_K1RR			0x2CU
50 #define _CRYP_K2LR			0x30U
51 #define _CRYP_K2RR			0x34U
52 #define _CRYP_K3LR			0x38U
53 #define _CRYP_K3RR			0x3CU
54 /* CRYP initialization vector registers */
55 #define _CRYP_IV0LR			0x40U
56 #define _CRYP_IV0RR			0x44U
57 #define _CRYP_IV1LR			0x48U
58 #define _CRYP_IV1RR			0x4CU
59 /* CRYP context swap GCM-CCM registers */
60 #define _CRYP_CSGCMCCM0R		0x50U
61 #define _CRYP_CSGCMCCM1R		0x54U
62 #define _CRYP_CSGCMCCM2R		0x58U
63 #define _CRYP_CSGCMCCM3R		0x5CU
64 #define _CRYP_CSGCMCCM4R		0x60U
65 #define _CRYP_CSGCMCCM5R		0x64U
66 #define _CRYP_CSGCMCCM6R		0x68U
67 #define _CRYP_CSGCMCCM7R		0x6CU
68 /* CRYP context swap GCM registers */
69 #define _CRYP_CSGCM0R			0x70U
70 #define _CRYP_CSGCM1R			0x74U
71 #define _CRYP_CSGCM2R			0x78U
72 #define _CRYP_CSGCM3R			0x7CU
73 #define _CRYP_CSGCM4R			0x80U
74 #define _CRYP_CSGCM5R			0x84U
75 #define _CRYP_CSGCM6R			0x88U
76 #define _CRYP_CSGCM7R			0x8CU
77 /* CRYP hardware configuration register */
78 #define _CRYP_HWCFGR			0x3F0U
79 /* CRYP HW version register */
80 #define _CRYP_VERR			0x3F4U
81 /* CRYP identification */
82 #define _CRYP_IPIDR			0x3F8U
83 /* CRYP HW magic ID */
84 #define _CRYP_MID			0x3FCU
85 
86 #define CRYP_TIMEOUT_US			1000000U
87 #define TIMEOUT_US_1MS			1000U
88 #define CRYP_RESET_DELAY_US		U(2)
89 
90 /* CRYP control register fields */
91 #define _CRYP_CR_RESET_VALUE		0x0U
92 #define _CRYP_CR_NPBLB_MSK		GENMASK_32(23, 20)
93 #define _CRYP_CR_NPBLB_OFF		20U
94 #define _CRYP_CR_GCM_CCMPH_MSK		GENMASK_32(17, 16)
95 #define _CRYP_CR_GCM_CCMPH_OFF		16U
96 #define _CRYP_CR_GCM_CCMPH_INIT		0U
97 #define _CRYP_CR_GCM_CCMPH_HEADER	1U
98 #define _CRYP_CR_GCM_CCMPH_PAYLOAD	2U
99 #define _CRYP_CR_GCM_CCMPH_FINAL	3U
100 #define _CRYP_CR_CRYPEN			BIT(15)
101 #define _CRYP_CR_FFLUSH			BIT(14)
102 #define _CRYP_CR_KEYSIZE_MSK		GENMASK_32(9, 8)
103 #define _CRYP_CR_KEYSIZE_OFF		8U
104 #define _CRYP_CR_KSIZE_128		0U
105 #define _CRYP_CR_KSIZE_192		1U
106 #define _CRYP_CR_KSIZE_256		2U
107 #define _CRYP_CR_DATATYPE_MSK		GENMASK_32(7, 6)
108 #define _CRYP_CR_DATATYPE_OFF		6U
109 #define _CRYP_CR_DATATYPE_NONE		0U
110 #define _CRYP_CR_DATATYPE_HALF_WORD	1U
111 #define _CRYP_CR_DATATYPE_BYTE		2U
112 #define _CRYP_CR_DATATYPE_BIT		3U
113 #define _CRYP_CR_ALGOMODE_MSK		(BIT(19) | GENMASK_32(5, 3))
114 #define _CRYP_CR_ALGOMODE_OFF		3U
115 #define _CRYP_CR_ALGOMODE_TDES_ECB	0x0U
116 #define _CRYP_CR_ALGOMODE_TDES_CBC	0x1U
117 #define _CRYP_CR_ALGOMODE_DES_ECB	0x2U
118 #define _CRYP_CR_ALGOMODE_DES_CBC	0x3U
119 #define _CRYP_CR_ALGOMODE_AES_ECB	0x4U
120 #define _CRYP_CR_ALGOMODE_AES_CBC	0x5U
121 #define _CRYP_CR_ALGOMODE_AES_CTR	0x6U
122 #define _CRYP_CR_ALGOMODE_AES		0x7U
123 #define _CRYP_CR_ALGOMODE_AES_GCM	BIT(16)
124 #define _CRYP_CR_ALGOMODE_AES_CCM	(BIT(16) | BIT(0))
125 #define _CRYP_CR_ALGODIR		BIT(2)
126 #define _CRYP_CR_ALGODIR_ENC		0U
127 #define _CRYP_CR_ALGODIR_DEC		BIT(2)
128 
129 /* CRYP status register fields */
130 #define _CRYP_SR_BUSY			BIT(4)
131 #define _CRYP_SR_OFFU			BIT(3)
132 #define _CRYP_SR_OFNE			BIT(2)
133 #define _CRYP_SR_IFNF			BIT(1)
134 #define _CRYP_SR_IFEM			BIT(0)
135 
136 /* CRYP DMA control register fields */
137 #define _CRYP_DMACR_DOEN		BIT(1)
138 #define _CRYP_DMACR_DIEN		BIT(0)
139 
140 /* CRYP interrupt fields */
141 #define _CRYP_I_OUT			BIT(1)
142 #define _CRYP_I_IN			BIT(0)
143 
144 /* CRYP hardware configuration register fields */
145 #define _CRYP_HWCFGR_CFG1_MSK		GENMASK_32(3, 0)
146 #define _CRYP_HWCFGR_CFG1_OFF		0U
147 #define _CRYP_HWCFGR_CFG2_MSK		GENMASK_32(7, 4)
148 #define _CRYP_HWCFGR_CFG2_OFF		4U
149 #define _CRYP_HWCFGR_CFG3_MSK		GENMASK_32(11, 8)
150 #define _CRYP_HWCFGR_CFG3_OFF		8U
151 #define _CRYP_HWCFGR_CFG4_MSK		GENMASK_32(15, 12)
152 #define _CRYP_HWCFGR_CFG4_OFF		12U
153 
154 /* CRYP HW version register */
155 #define _CRYP_VERR_MSK			GENMASK_32(7, 0)
156 #define _CRYP_VERR_OFF			0U
157 
158 /*
159  * Macro to manage bit manipulation when we work on a local variable
160  * before writing only once to the hardware register.
161  */
162 #define CLRBITS(v, bits)		((v) &= ~(bits))
163 #define SETBITS(v, bits)		((v) |= (bits))
164 
165 #define IS_ALGOMODE(cr, mod) \
166 	(((cr) & _CRYP_CR_ALGOMODE_MSK) == (_CRYP_CR_ALGOMODE_##mod << \
167 					  _CRYP_CR_ALGOMODE_OFF))
168 
169 #define SET_ALGOMODE(mod, cr) \
170 	clrsetbits(&(cr), _CRYP_CR_ALGOMODE_MSK, (_CRYP_CR_ALGOMODE_##mod << \
171 						  _CRYP_CR_ALGOMODE_OFF))
172 
173 #define GET_ALGOMODE(cr) \
174 	(((cr) & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF)
175 
176 static struct stm32_cryp_platdata cryp_pdata;
177 static struct mutex cryp_lock = MUTEX_INITIALIZER;
178 
179 static void clrsetbits(uint32_t *v, uint32_t mask, uint32_t bits)
180 {
181 	*v = (*v & ~mask) | bits;
182 }
183 
184 static bool algo_mode_needs_iv(uint32_t cr)
185 {
186 	return !IS_ALGOMODE(cr, TDES_ECB) && !IS_ALGOMODE(cr, DES_ECB) &&
187 	       !IS_ALGOMODE(cr, AES_ECB);
188 }
189 
190 static bool algo_mode_is_ecb_cbc(uint32_t cr)
191 {
192 	return GET_ALGOMODE(cr) < _CRYP_CR_ALGOMODE_AES_CTR;
193 }
194 
195 static bool algo_mode_is_aes(uint32_t cr)
196 {
197 	return ((cr & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF) >=
198 	       _CRYP_CR_ALGOMODE_AES_ECB;
199 }
200 
201 static bool is_decrypt(uint32_t cr)
202 {
203 	return (cr & _CRYP_CR_ALGODIR) == _CRYP_CR_ALGODIR_DEC;
204 }
205 
206 static bool is_encrypt(uint32_t cr)
207 {
208 	return !is_decrypt(cr);
209 }
210 
211 static bool does_need_npblb(uint32_t cr)
212 {
213 	return (IS_ALGOMODE(cr, AES_GCM) && is_encrypt(cr)) ||
214 	       (IS_ALGOMODE(cr, AES_CCM) && is_decrypt(cr));
215 }
216 
217 static TEE_Result wait_sr_bits(vaddr_t base, uint32_t bits)
218 {
219 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
220 
221 	while ((io_read32(base + _CRYP_SR) & bits) != bits)
222 		if (timeout_elapsed(timeout_ref))
223 			break;
224 
225 	if ((io_read32(base + _CRYP_SR) & bits) != bits)
226 		return TEE_ERROR_BUSY;
227 
228 	return TEE_SUCCESS;
229 }
230 
231 static TEE_Result wait_end_busy(vaddr_t base)
232 {
233 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
234 
235 	while (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
236 		if (timeout_elapsed(timeout_ref))
237 			break;
238 
239 	if (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY)
240 		return TEE_ERROR_BUSY;
241 
242 	return TEE_SUCCESS;
243 }
244 
245 static TEE_Result wait_end_enable(vaddr_t base)
246 {
247 	uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US);
248 
249 	while (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
250 		if (timeout_elapsed(timeout_ref))
251 			break;
252 
253 	if (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN)
254 		return TEE_ERROR_BUSY;
255 
256 	return TEE_SUCCESS;
257 }
258 
259 static TEE_Result __must_check write_align_block(struct stm32_cryp_context *ctx,
260 						 uint32_t *data)
261 {
262 	TEE_Result res = TEE_SUCCESS;
263 	unsigned int i = 0;
264 
265 	res = wait_sr_bits(ctx->base, _CRYP_SR_IFNF);
266 	if (res)
267 		return res;
268 
269 	for (i = 0; i < ctx->block_u32; i++) {
270 		/* No need to htobe() as we configure the HW to swap bytes */
271 		io_write32(ctx->base + _CRYP_DIN, data[i]);
272 	}
273 
274 	return TEE_SUCCESS;
275 }
276 
277 static TEE_Result __must_check write_block(struct stm32_cryp_context *ctx,
278 					   uint8_t *data)
279 {
280 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
281 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
282 
283 		memcpy(data_u32, data, ctx->block_u32 * sizeof(uint32_t));
284 		return write_align_block(ctx, data_u32);
285 	}
286 
287 	return write_align_block(ctx, (void *)data);
288 }
289 
290 static TEE_Result __must_check read_align_block(struct stm32_cryp_context *ctx,
291 						uint32_t *data)
292 {
293 	TEE_Result res = TEE_SUCCESS;
294 	unsigned int i = 0;
295 
296 	res = wait_sr_bits(ctx->base, _CRYP_SR_OFNE);
297 	if (res)
298 		return res;
299 
300 	for (i = 0; i < ctx->block_u32; i++) {
301 		/* No need to htobe() as we configure the HW to swap bytes */
302 		data[i] = io_read32(ctx->base + _CRYP_DOUT);
303 	}
304 
305 	return TEE_SUCCESS;
306 }
307 
308 static TEE_Result __must_check read_block(struct stm32_cryp_context *ctx,
309 					  uint8_t *data)
310 {
311 	if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
312 		TEE_Result res = TEE_SUCCESS;
313 		uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 };
314 
315 		res = read_align_block(ctx, data_u32);
316 		if (res)
317 			return res;
318 
319 		memcpy(data, data_u32, ctx->block_u32 * sizeof(uint32_t));
320 
321 		return TEE_SUCCESS;
322 	}
323 
324 	return read_align_block(ctx, (void *)data);
325 }
326 
327 static TEE_Result stm32_cryp_reset(void)
328 {
329 	TEE_Result res = TEE_ERROR_GENERIC;
330 
331 	if (!cryp_pdata.reset)
332 		return TEE_SUCCESS;
333 
334 	res = rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS);
335 	if (res)
336 		return res;
337 
338 	udelay(CRYP_RESET_DELAY_US);
339 
340 	return rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS);
341 }
342 
343 static void cryp_end(struct stm32_cryp_context *ctx, TEE_Result prev_error)
344 {
345 	if (prev_error && stm32_cryp_reset())
346 		panic();
347 
348 	/* Disable the CRYP peripheral */
349 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
350 }
351 
352 static void cryp_write_iv(struct stm32_cryp_context *ctx)
353 {
354 	if (algo_mode_needs_iv(ctx->cr)) {
355 		unsigned int i = 0;
356 
357 		/* Restore the _CRYP_IVRx */
358 		for (i = 0; i < ctx->block_u32; i++)
359 			io_write32(ctx->base + _CRYP_IV0LR + i *
360 				   sizeof(uint32_t), ctx->iv[i]);
361 	}
362 }
363 
364 static void cryp_save_suspend(struct stm32_cryp_context *ctx)
365 {
366 	unsigned int i = 0;
367 
368 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
369 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
370 			ctx->pm_gcmccm[i] = io_read32(ctx->base +
371 						      _CRYP_CSGCMCCM0R +
372 						      i * sizeof(uint32_t));
373 
374 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
375 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
376 			ctx->pm_gcm[i] = io_read32(ctx->base + _CRYP_CSGCM0R +
377 						   i * sizeof(uint32_t));
378 }
379 
380 static void cryp_restore_suspend(struct stm32_cryp_context *ctx)
381 {
382 	unsigned int i = 0;
383 
384 	if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM))
385 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++)
386 			io_write32(ctx->base + _CRYP_CSGCMCCM0R +
387 				   i * sizeof(uint32_t), ctx->pm_gcmccm[i]);
388 
389 	if (IS_ALGOMODE(ctx->cr, AES_GCM))
390 		for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++)
391 			io_write32(ctx->base + _CRYP_CSGCM0R +
392 				   i * sizeof(uint32_t), ctx->pm_gcm[i]);
393 }
394 
395 static void cryp_write_key(struct stm32_cryp_context *ctx)
396 {
397 	vaddr_t reg = 0;
398 	int i = 0;
399 	uint32_t algo = GET_ALGOMODE(ctx->cr);
400 
401 	if (algo == _CRYP_CR_ALGOMODE_DES_ECB ||
402 	    algo == _CRYP_CR_ALGOMODE_DES_CBC)
403 		reg = ctx->base + _CRYP_K1RR;
404 	else
405 		reg = ctx->base + _CRYP_K3RR;
406 
407 	for (i = ctx->key_size / sizeof(uint32_t) - 1;
408 	     i >= 0;
409 	     i--, reg -= sizeof(uint32_t))
410 		io_write32(reg, ctx->key[i]);
411 }
412 
413 static TEE_Result cryp_prepare_key(struct stm32_cryp_context *ctx)
414 {
415 	TEE_Result res = TEE_SUCCESS;
416 
417 	/*
418 	 * For AES ECB/CBC decryption, key preparation mode must be selected
419 	 * to populate the key.
420 	 */
421 	if (is_decrypt(ctx->cr) && (IS_ALGOMODE(ctx->cr, AES_ECB) ||
422 				    IS_ALGOMODE(ctx->cr, AES_CBC))) {
423 		/* Select Algomode "prepare key" */
424 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
425 				_CRYP_CR_ALGOMODE_AES << _CRYP_CR_ALGOMODE_OFF);
426 
427 		cryp_write_key(ctx);
428 
429 		/* Enable CRYP */
430 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
431 
432 		res = wait_end_busy(ctx->base);
433 		if (res)
434 			return res;
435 
436 		/* Reset 'real' algomode */
437 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK,
438 				ctx->cr & _CRYP_CR_ALGOMODE_MSK);
439 	} else {
440 		cryp_write_key(ctx);
441 	}
442 
443 	return TEE_SUCCESS;
444 }
445 
446 static TEE_Result save_context(struct stm32_cryp_context *ctx)
447 {
448 	/* Device should not be in a processing phase */
449 	if (io_read32(ctx->base + _CRYP_SR) & _CRYP_SR_BUSY)
450 		return TEE_ERROR_BAD_STATE;
451 
452 	/* Disable the CRYP peripheral */
453 	io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
454 
455 	/* Save CR */
456 	ctx->cr = io_read32(ctx->base + _CRYP_CR);
457 
458 	cryp_save_suspend(ctx);
459 
460 	/* If algo mode needs to save current IV */
461 	if (algo_mode_needs_iv(ctx->cr)) {
462 		unsigned int i = 0;
463 
464 		/* Save IV */
465 		for (i = 0; i < ctx->block_u32; i++)
466 			ctx->iv[i] = io_read32(ctx->base + _CRYP_IV0LR + i *
467 					       sizeof(uint32_t));
468 	}
469 
470 	return TEE_SUCCESS;
471 }
472 
473 /* To resume the processing of a message */
474 static TEE_Result restore_context(struct stm32_cryp_context *ctx)
475 {
476 	TEE_Result res = TEE_SUCCESS;
477 
478 	/* IP should be disabled */
479 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN) {
480 		DMSG("Device is still enabled");
481 		return TEE_ERROR_BAD_STATE;
482 	}
483 
484 	/* Restore the _CRYP_CR */
485 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
486 
487 	/* Write key and, in case of AES_CBC or AES_ECB decrypt, prepare it */
488 	res = cryp_prepare_key(ctx);
489 	if (res)
490 		return res;
491 
492 	cryp_restore_suspend(ctx);
493 
494 	cryp_write_iv(ctx);
495 
496 	/* Flush internal fifo */
497 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_FFLUSH);
498 
499 	/* Enable the CRYP peripheral */
500 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
501 
502 	return TEE_SUCCESS;
503 }
504 
505 /*
506  * Translate a byte index in an array of BE uint32_t into the index of same
507  * byte in the corresponding LE uint32_t array.
508  */
509 static size_t be_index(size_t index)
510 {
511 	return (index & ~0x3) + 3 - (index & 0x3);
512 }
513 
514 static TEE_Result ccm_first_context(struct stm32_cryp_context *ctx)
515 {
516 	TEE_Result res = TEE_SUCCESS;
517 	uint32_t b0[AES_BLOCK_NB_U32] = { 0 };
518 	uint8_t *iv = (uint8_t *)ctx->iv;
519 	size_t l = 0;
520 	size_t i = 15;
521 
522 	/* IP should be disabled */
523 	if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN)
524 		return TEE_ERROR_BAD_STATE;
525 
526 	/* Write the _CRYP_CR */
527 	io_write32(ctx->base + _CRYP_CR, ctx->cr);
528 
529 	/* Write key */
530 	res = cryp_prepare_key(ctx);
531 	if (res)
532 		return res;
533 
534 	/* Save full IV that will be b0 */
535 	memcpy(b0, iv, sizeof(b0));
536 
537 	/*
538 	 * Update IV to become CTR0/1 before setting it.
539 	 * IV is saved as LE uint32_t[4] as expected by hardware,
540 	 * but CCM RFC defines bytes to update in a BE array.
541 	 */
542 	/* Set flag bits to 0 (5 higher bits), keep 3 low bits */
543 	iv[be_index(0)] &= 0x7;
544 	/* Get size of length field (can be from 2 to 8) */
545 	l = iv[be_index(0)] + 1;
546 	/* Set Q to 0 */
547 	for (i = 15; i >= 15 - l + 1; i--)
548 		iv[be_index(i)] = 0;
549 	/* Save CTR0 */
550 	memcpy(ctx->ctr0_ccm, iv, sizeof(b0));
551 	/* Increment Q */
552 	iv[be_index(15)] |= 0x1;
553 
554 	cryp_write_iv(ctx);
555 
556 	/* Enable the CRYP peripheral */
557 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
558 
559 	res = write_align_block(ctx, b0);
560 
561 	return res;
562 }
563 
564 static TEE_Result do_from_init_to_phase(struct stm32_cryp_context *ctx,
565 					uint32_t new_phase)
566 {
567 	TEE_Result res = TEE_SUCCESS;
568 
569 	/*
570 	 * We didn't run the init phase yet
571 	 * CCM need a specific restore_context phase for the init phase
572 	 */
573 	if (IS_ALGOMODE(ctx->cr, AES_CCM))
574 		res = ccm_first_context(ctx);
575 	else
576 		res = restore_context(ctx);
577 
578 	if (res)
579 		return res;
580 
581 	res = wait_end_enable(ctx->base);
582 	if (res)
583 		return res;
584 
585 	/* Move to 'new_phase' */
586 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
587 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
588 
589 	/* Enable the CRYP peripheral (init disabled it) */
590 	io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
591 
592 	return TEE_SUCCESS;
593 }
594 
595 static TEE_Result do_from_header_to_phase(struct stm32_cryp_context *ctx,
596 					  uint32_t new_phase)
597 {
598 	TEE_Result res = TEE_SUCCESS;
599 
600 	res = restore_context(ctx);
601 	if (res)
602 		return res;
603 
604 	if (ctx->extra_size) {
605 		/* Manage unaligned header data before moving to next phase */
606 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
607 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
608 
609 		res = write_align_block(ctx, ctx->extra);
610 		if (res)
611 			return res;
612 
613 		ctx->assoc_len += (ctx->extra_size) * INT8_BIT;
614 		ctx->extra_size = 0;
615 	}
616 
617 	/* Move to 'new_phase' */
618 	io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
619 			new_phase << _CRYP_CR_GCM_CCMPH_OFF);
620 
621 	return TEE_SUCCESS;
622 }
623 
624 /**
625  * @brief Start a AES computation.
626  * @param ctx: CRYP process context
627  * @param is_dec: true if decryption, false if encryption
628  * @param algo: define the algo mode
629  * @param key: pointer to key
630  * @param key_size: key size
631  * @param iv: pointer to initialization vector (unused if algo is ECB)
632  * @param iv_size: iv size
633  * @note this function doesn't access to hardware but stores in ctx the values
634  *
635  * @retval TEE_SUCCESS if OK.
636  */
637 TEE_Result stm32_cryp_init(struct stm32_cryp_context *ctx, bool is_dec,
638 			   enum stm32_cryp_algo_mode algo,
639 			   const void *key, size_t key_size, const void *iv,
640 			   size_t iv_size)
641 {
642 	unsigned int i = 0;
643 	const uint32_t *iv_u32 = NULL;
644 	uint32_t local_iv[4] = { 0 };
645 	const uint32_t *key_u32 = NULL;
646 	uint32_t local_key[8] = { 0 };
647 
648 	ctx->assoc_len = 0;
649 	ctx->load_len = 0;
650 	ctx->extra_size = 0;
651 	ctx->lock = &cryp_lock;
652 
653 	ctx->base = io_pa_or_va(&cryp_pdata.base, 1);
654 	ctx->cr = _CRYP_CR_RESET_VALUE;
655 
656 	/* We want buffer to be u32 aligned */
657 	if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
658 		key_u32 = key;
659 	} else {
660 		memcpy(local_key, key, key_size);
661 		key_u32 = local_key;
662 	}
663 
664 	if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
665 		iv_u32 = iv;
666 	} else {
667 		memcpy(local_iv, iv, iv_size);
668 		iv_u32 = local_iv;
669 	}
670 
671 	if (is_dec)
672 		SETBITS(ctx->cr, _CRYP_CR_ALGODIR);
673 	else
674 		CLRBITS(ctx->cr, _CRYP_CR_ALGODIR);
675 
676 	/* Save algo mode */
677 	switch (algo) {
678 	case STM32_CRYP_MODE_TDES_ECB:
679 		SET_ALGOMODE(TDES_ECB, ctx->cr);
680 		break;
681 	case STM32_CRYP_MODE_TDES_CBC:
682 		SET_ALGOMODE(TDES_CBC, ctx->cr);
683 		break;
684 	case STM32_CRYP_MODE_DES_ECB:
685 		SET_ALGOMODE(DES_ECB, ctx->cr);
686 		break;
687 	case STM32_CRYP_MODE_DES_CBC:
688 		SET_ALGOMODE(DES_CBC, ctx->cr);
689 		break;
690 	case STM32_CRYP_MODE_AES_ECB:
691 		SET_ALGOMODE(AES_ECB, ctx->cr);
692 		break;
693 	case STM32_CRYP_MODE_AES_CBC:
694 		SET_ALGOMODE(AES_CBC, ctx->cr);
695 		break;
696 	case STM32_CRYP_MODE_AES_CTR:
697 		SET_ALGOMODE(AES_CTR, ctx->cr);
698 		break;
699 	case STM32_CRYP_MODE_AES_GCM:
700 		SET_ALGOMODE(AES_GCM, ctx->cr);
701 		break;
702 	case STM32_CRYP_MODE_AES_CCM:
703 		SET_ALGOMODE(AES_CCM, ctx->cr);
704 		break;
705 	default:
706 		return TEE_ERROR_BAD_PARAMETERS;
707 	}
708 
709 	/*
710 	 * We will use HW Byte swap (_CRYP_CR_DATATYPE_BYTE) for data.
711 	 * So we won't need to
712 	 * TEE_U32_TO_BIG_ENDIAN(data) before write to DIN register
713 	 * nor
714 	 * TEE_U32_FROM_BIG_ENDIAN after reading from DOUT register.
715 	 */
716 	clrsetbits(&ctx->cr, _CRYP_CR_DATATYPE_MSK,
717 		   _CRYP_CR_DATATYPE_BYTE << _CRYP_CR_DATATYPE_OFF);
718 
719 	/*
720 	 * Configure keysize for AES algorithms
721 	 * And save block size
722 	 */
723 	if (algo_mode_is_aes(ctx->cr)) {
724 		switch (key_size) {
725 		case AES_KEYSIZE_128:
726 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
727 				   _CRYP_CR_KSIZE_128 << _CRYP_CR_KEYSIZE_OFF);
728 			break;
729 		case AES_KEYSIZE_192:
730 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
731 				   _CRYP_CR_KSIZE_192 << _CRYP_CR_KEYSIZE_OFF);
732 			break;
733 		case AES_KEYSIZE_256:
734 			clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK,
735 				   _CRYP_CR_KSIZE_256 << _CRYP_CR_KEYSIZE_OFF);
736 			break;
737 		default:
738 			return TEE_ERROR_BAD_PARAMETERS;
739 		}
740 
741 		/* And set block size */
742 		ctx->block_u32 = AES_BLOCK_NB_U32;
743 	} else {
744 		/* And set DES/TDES block size */
745 		ctx->block_u32 = DES_BLOCK_NB_U32;
746 	}
747 
748 	/* Save key in HW order */
749 	ctx->key_size = key_size;
750 	for (i = 0; i < key_size / sizeof(uint32_t); i++)
751 		ctx->key[i] = TEE_U32_TO_BIG_ENDIAN(key_u32[i]);
752 
753 	/* Save IV */
754 	if (algo_mode_needs_iv(ctx->cr)) {
755 		if (!iv || iv_size != ctx->block_u32 * sizeof(uint32_t))
756 			return TEE_ERROR_BAD_PARAMETERS;
757 
758 		/*
759 		 * We save IV in the byte order expected by the
760 		 * IV registers
761 		 */
762 		for (i = 0; i < ctx->block_u32; i++)
763 			ctx->iv[i] = TEE_U32_TO_BIG_ENDIAN(iv_u32[i]);
764 	}
765 
766 	/* Reset suspend registers */
767 	memset(ctx->pm_gcmccm, 0, sizeof(ctx->pm_gcmccm));
768 	memset(ctx->pm_gcm, 0, sizeof(ctx->pm_gcm));
769 
770 	return TEE_SUCCESS;
771 }
772 
773 /**
774  * @brief Update (or start) a AES authenticate process of
775  *        associated data (CCM or GCM).
776  * @param ctx: CRYP process context
777  * @param data: pointer to associated data
778  * @param data_size: data size
779  * @retval TEE_SUCCESS if OK.
780  */
781 TEE_Result stm32_cryp_update_assodata(struct stm32_cryp_context *ctx,
782 				      uint8_t *data, size_t data_size)
783 {
784 	TEE_Result res = TEE_SUCCESS;
785 	unsigned int i = 0;
786 	uint32_t previous_phase = 0;
787 
788 	/* If no associated data, nothing to do */
789 	if (!data || !data_size)
790 		return TEE_SUCCESS;
791 
792 	mutex_lock(ctx->lock);
793 
794 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
795 			 _CRYP_CR_GCM_CCMPH_OFF;
796 
797 	switch (previous_phase) {
798 	case _CRYP_CR_GCM_CCMPH_INIT:
799 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_HEADER);
800 		break;
801 	case _CRYP_CR_GCM_CCMPH_HEADER:
802 		/*
803 		 * Function update_assodata was already called.
804 		 * We only need to restore the context.
805 		 */
806 		res = restore_context(ctx);
807 		break;
808 	default:
809 		assert(0);
810 		res = TEE_ERROR_BAD_STATE;
811 	}
812 
813 	if (res)
814 		goto out;
815 
816 	/* Manage if remaining data from a previous update_assodata call */
817 	if (ctx->extra_size &&
818 	    (ctx->extra_size + data_size >=
819 	     ctx->block_u32 * sizeof(uint32_t))) {
820 		uint32_t block[MAX_BLOCK_NB_U32] = { 0 };
821 
822 		memcpy(block, ctx->extra, ctx->extra_size);
823 		memcpy((uint8_t *)block + ctx->extra_size, data,
824 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
825 
826 		res = write_align_block(ctx, block);
827 		if (res)
828 			goto out;
829 
830 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
831 		ctx->extra_size = 0;
832 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
833 	}
834 
835 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
836 		res = write_block(ctx, data + i);
837 		if (res)
838 			goto out;
839 
840 		/* Process next block */
841 		i += ctx->block_u32 * sizeof(uint32_t);
842 		ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
843 	}
844 
845 	/*
846 	 * Manage last block if not a block size multiple:
847 	 * Save remaining data to manage them later (potentially with new
848 	 * associated data).
849 	 */
850 	if (i < data_size) {
851 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
852 		       data_size - i);
853 		ctx->extra_size += data_size - i;
854 	}
855 
856 	res = save_context(ctx);
857 out:
858 	if (res)
859 		cryp_end(ctx, res);
860 
861 	mutex_unlock(ctx->lock);
862 
863 	return res;
864 }
865 
866 /**
867  * @brief Update (or start) a AES authenticate and de/encrypt with
868  *        payload data (CCM or GCM).
869  * @param ctx: CRYP process context
870  * @param data_in: pointer to payload
871  * @param data_out: pointer where to save de/encrypted payload
872  * @param data_size: payload size
873  *
874  * @retval TEE_SUCCESS if OK.
875  */
876 TEE_Result stm32_cryp_update_load(struct stm32_cryp_context *ctx,
877 				  uint8_t *data_in, uint8_t *data_out,
878 				  size_t data_size)
879 {
880 	TEE_Result res = TEE_SUCCESS;
881 	unsigned int i = 0;
882 	uint32_t previous_phase = 0;
883 
884 	if (!data_in || !data_size)
885 		return TEE_SUCCESS;
886 
887 	mutex_lock(ctx->lock);
888 
889 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
890 			 _CRYP_CR_GCM_CCMPH_OFF;
891 
892 	switch (previous_phase) {
893 	case _CRYP_CR_GCM_CCMPH_INIT:
894 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
895 		break;
896 	case _CRYP_CR_GCM_CCMPH_HEADER:
897 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD);
898 		break;
899 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
900 		/* new update_load call, we only need to restore context */
901 		res = restore_context(ctx);
902 		break;
903 	default:
904 		assert(0);
905 		res = TEE_ERROR_BAD_STATE;
906 	}
907 
908 	if (res)
909 		goto out;
910 
911 	/* Manage if incomplete block from a previous update_load call */
912 	if (ctx->extra_size &&
913 	    (ctx->extra_size + data_size >=
914 	     ctx->block_u32 * sizeof(uint32_t))) {
915 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
916 
917 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
918 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
919 
920 		res = write_align_block(ctx, ctx->extra);
921 		if (res)
922 			goto out;
923 
924 		res = read_align_block(ctx, block_out);
925 		if (res)
926 			goto out;
927 
928 		memcpy(data_out + i, (uint8_t *)block_out + ctx->extra_size,
929 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
930 
931 		i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size;
932 		ctx->extra_size = 0;
933 
934 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
935 	}
936 
937 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
938 		res = write_block(ctx, data_in + i);
939 		if (res)
940 			goto out;
941 
942 		res = read_block(ctx, data_out + i);
943 		if (res)
944 			goto out;
945 
946 		/* Process next block */
947 		i += ctx->block_u32 * sizeof(uint32_t);
948 		ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT;
949 	}
950 
951 	res = save_context(ctx);
952 	if (res)
953 		goto out;
954 
955 	/*
956 	 * Manage last block if not a block size multiple
957 	 * We saved context,
958 	 * Complete block with 0 and send to CRYP to get {en,de}crypted data
959 	 * Store data to resend as last block in final()
960 	 * or to complete next update_load() to get correct tag.
961 	 */
962 	if (i < data_size) {
963 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
964 		size_t prev_extra_size = ctx->extra_size;
965 
966 		/* Re-enable the CRYP peripheral */
967 		io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
968 
969 		memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i,
970 		       data_size - i);
971 		ctx->extra_size += data_size - i;
972 		memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
973 		       ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size);
974 
975 		res = write_align_block(ctx, ctx->extra);
976 		if (res)
977 			goto out;
978 
979 		res = read_align_block(ctx, block_out);
980 		if (res)
981 			goto out;
982 
983 		memcpy(data_out + i, (uint8_t *)block_out + prev_extra_size,
984 		       data_size - i);
985 
986 		/* Disable the CRYP peripheral */
987 		io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN);
988 	}
989 
990 out:
991 	if (res)
992 		cryp_end(ctx, res);
993 
994 	mutex_unlock(ctx->lock);
995 
996 	return res;
997 }
998 
999 /**
1000  * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
1001  * @param ctx: CRYP process context
1002  * @param tag: pointer where to save the tag
1003  * @param data_size: tag size
1004  *
1005  * @retval TEE_SUCCESS if OK.
1006  */
1007 TEE_Result stm32_cryp_final(struct stm32_cryp_context *ctx, uint8_t *tag,
1008 			    size_t tag_size)
1009 {
1010 	TEE_Result res = TEE_SUCCESS;
1011 	uint32_t tag_u32[4] = { 0 };
1012 	uint32_t previous_phase = 0;
1013 
1014 	mutex_lock(ctx->lock);
1015 
1016 	previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >>
1017 			 _CRYP_CR_GCM_CCMPH_OFF;
1018 
1019 	switch (previous_phase) {
1020 	case _CRYP_CR_GCM_CCMPH_INIT:
1021 		res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1022 		break;
1023 	case _CRYP_CR_GCM_CCMPH_HEADER:
1024 		res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL);
1025 		break;
1026 	case _CRYP_CR_GCM_CCMPH_PAYLOAD:
1027 		res = restore_context(ctx);
1028 		if (res)
1029 			break;
1030 
1031 		/* Manage if incomplete block from a previous update_load() */
1032 		if (ctx->extra_size) {
1033 			uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1034 			size_t sz = ctx->block_u32 * sizeof(uint32_t) -
1035 				    ctx->extra_size;
1036 
1037 			if (does_need_npblb(ctx->cr)) {
1038 				io_clrsetbits32(ctx->base + _CRYP_CR,
1039 						_CRYP_CR_NPBLB_MSK,
1040 						sz << _CRYP_CR_NPBLB_OFF);
1041 			}
1042 
1043 			memset((uint8_t *)ctx->extra + ctx->extra_size, 0, sz);
1044 
1045 			res = write_align_block(ctx, ctx->extra);
1046 			if (res)
1047 				break;
1048 
1049 			/* Don't care {en,de}crypted data, already saved */
1050 			res = read_align_block(ctx, block_out);
1051 			if (res)
1052 				break;
1053 
1054 			ctx->load_len += (ctx->extra_size * INT8_BIT);
1055 			ctx->extra_size = 0;
1056 		}
1057 
1058 		/* Move to final phase */
1059 		io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK,
1060 				_CRYP_CR_GCM_CCMPH_FINAL <<
1061 				_CRYP_CR_GCM_CCMPH_OFF);
1062 		break;
1063 	default:
1064 		assert(0);
1065 		res = TEE_ERROR_BAD_STATE;
1066 	}
1067 
1068 	if (res)
1069 		goto out;
1070 
1071 	if (IS_ALGOMODE(ctx->cr, AES_GCM)) {
1072 		/* No need to htobe() as we configure the HW to swap bytes */
1073 		io_write32(ctx->base + _CRYP_DIN, 0U);
1074 		io_write32(ctx->base + _CRYP_DIN, ctx->assoc_len);
1075 		io_write32(ctx->base + _CRYP_DIN, 0U);
1076 		io_write32(ctx->base + _CRYP_DIN, ctx->load_len);
1077 	} else if (IS_ALGOMODE(ctx->cr, AES_CCM)) {
1078 		/* No need to htobe() in this phase */
1079 		res = write_align_block(ctx, ctx->ctr0_ccm);
1080 		if (res)
1081 			goto out;
1082 	}
1083 
1084 	res = read_align_block(ctx, tag_u32);
1085 	if (res)
1086 		goto out;
1087 
1088 	memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
1089 
1090 out:
1091 	cryp_end(ctx, res);
1092 	mutex_unlock(ctx->lock);
1093 
1094 	return res;
1095 }
1096 
1097 /**
1098  * @brief Update (or start) a de/encrypt process.
1099  * @param ctx: CRYP process context
1100  * @param last_block: true if last payload data block
1101  * @param data_in: pointer to payload
1102  * @param data_out: pointer where to save de/encrypted payload
1103  * @param data_size: payload size
1104  *
1105  * @retval TEE_SUCCESS if OK.
1106  */
1107 TEE_Result stm32_cryp_update(struct stm32_cryp_context *ctx, bool last_block,
1108 			     uint8_t *data_in, uint8_t *data_out,
1109 			     size_t data_size)
1110 {
1111 	TEE_Result res = TEE_SUCCESS;
1112 	unsigned int i = 0;
1113 
1114 	mutex_lock(ctx->lock);
1115 
1116 	/*
1117 	 * In CBC and ECB encryption we need to manage specifically last
1118 	 * 2 blocks if total size in not aligned to a block size.
1119 	 * Currently return TEE_ERROR_NOT_IMPLEMENTED. Moreover as we need to
1120 	 * know last 2 blocks, if unaligned and call with less than two blocks,
1121 	 * return TEE_ERROR_BAD_STATE.
1122 	 */
1123 	if (last_block && algo_mode_is_ecb_cbc(ctx->cr) &&
1124 	    is_encrypt(ctx->cr) &&
1125 	    (ROUNDDOWN2(data_size, ctx->block_u32 * sizeof(uint32_t)) !=
1126 	     data_size)) {
1127 		if (data_size < ctx->block_u32 * sizeof(uint32_t) * 2) {
1128 			/*
1129 			 * If CBC, size of the last part should be at
1130 			 * least 2*BLOCK_SIZE
1131 			 */
1132 			EMSG("Unexpected last block size");
1133 			res = TEE_ERROR_BAD_STATE;
1134 			goto out;
1135 		}
1136 		/*
1137 		 * Moreover the ECB/CBC specific padding for encrypt is not
1138 		 * yet implemented, and not used in OPTEE
1139 		 */
1140 		res = TEE_ERROR_NOT_IMPLEMENTED;
1141 		goto out;
1142 	}
1143 
1144 	/* Manage remaining CTR mask from previous update call */
1145 	if (IS_ALGOMODE(ctx->cr, AES_CTR) && ctx->extra_size) {
1146 		unsigned int j = 0;
1147 		uint8_t *mask = (uint8_t *)ctx->extra;
1148 
1149 		for (j = 0; j < ctx->extra_size && i < data_size; j++, i++)
1150 			data_out[i] = data_in[i] ^ mask[j];
1151 
1152 		if (j != ctx->extra_size) {
1153 			/*
1154 			 * We didn't consume all saved mask,
1155 			 * but no more data.
1156 			 */
1157 
1158 			/* We save remaining mask and its new size */
1159 			memmove(ctx->extra, (uint8_t *)ctx->extra + j,
1160 				ctx->extra_size - j);
1161 			ctx->extra_size -= j;
1162 
1163 			/*
1164 			 * We don't need to save HW context we didn't
1165 			 * modify HW state.
1166 			 */
1167 			res = TEE_SUCCESS;
1168 			goto out;
1169 		}
1170 
1171 		/* All extra mask consumed */
1172 		ctx->extra_size = 0;
1173 	}
1174 
1175 	res = restore_context(ctx);
1176 	if (res)
1177 		goto out;
1178 
1179 	while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) {
1180 		/*
1181 		 * We only write/read one block at a time
1182 		 * but CRYP use a in (and out) FIFO of 8 * uint32_t
1183 		 */
1184 		res = write_block(ctx, data_in + i);
1185 		if (res)
1186 			goto out;
1187 
1188 		res = read_block(ctx, data_out + i);
1189 		if (res)
1190 			goto out;
1191 
1192 		/* Process next block */
1193 		i += ctx->block_u32 * sizeof(uint32_t);
1194 	}
1195 
1196 	/* Manage last block if not a block size multiple */
1197 	if (i < data_size) {
1198 		uint32_t block_in[MAX_BLOCK_NB_U32] = { 0 };
1199 		uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 };
1200 
1201 		if (!IS_ALGOMODE(ctx->cr, AES_CTR)) {
1202 			/*
1203 			 * Other algorithm than CTR can manage only multiple
1204 			 * of block_size.
1205 			 */
1206 			res = TEE_ERROR_BAD_PARAMETERS;
1207 			goto out;
1208 		}
1209 
1210 		/*
1211 		 * For CTR we save the generated mask to use it at next
1212 		 * update call.
1213 		 */
1214 		memcpy(block_in, data_in + i, data_size - i);
1215 
1216 		res = write_align_block(ctx, block_in);
1217 		if (res)
1218 			goto out;
1219 
1220 		res = read_align_block(ctx, block_out);
1221 		if (res)
1222 			goto out;
1223 
1224 		memcpy(data_out + i, block_out, data_size - i);
1225 
1226 		/* Save mask for possibly next call */
1227 		ctx->extra_size = ctx->block_u32 * sizeof(uint32_t) -
1228 			(data_size - i);
1229 		memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1230 		       ctx->extra_size);
1231 	}
1232 
1233 	if (!last_block)
1234 		res = save_context(ctx);
1235 
1236 out:
1237 	/* If last block or error, end of CRYP process */
1238 	if (last_block || res)
1239 		cryp_end(ctx, res);
1240 
1241 	mutex_unlock(ctx->lock);
1242 
1243 	return res;
1244 }
1245 
1246 static TEE_Result stm32_cryp_pm(enum pm_op op, uint32_t pm_hint,
1247 				const struct pm_callback_handle *hdl __unused)
1248 {
1249 	switch (op) {
1250 	case PM_OP_SUSPEND:
1251 		clk_disable(cryp_pdata.clock);
1252 		return TEE_SUCCESS;
1253 	case PM_OP_RESUME:
1254 		if (clk_enable(cryp_pdata.clock))
1255 			panic();
1256 
1257 		if (PM_HINT_IS_STATE(pm_hint, CONTEXT) && stm32_cryp_reset())
1258 			panic();
1259 
1260 		return TEE_SUCCESS;
1261 	default:
1262 		/* Unexpected PM operation */
1263 		assert(0);
1264 		return TEE_ERROR_NOT_IMPLEMENTED;
1265 	}
1266 }
1267 DECLARE_KEEP_PAGER(stm32_cryp_pm);
1268 
1269 static TEE_Result stm32_cryp_probe(const void *fdt, int node,
1270 				   const void *compt_data __unused)
1271 {
1272 	TEE_Result res = TEE_SUCCESS;
1273 	struct dt_node_info dt_cryp = { };
1274 	struct rstctrl *rstctrl = NULL;
1275 	struct clk *clk = NULL;
1276 
1277 	fdt_fill_device_info(fdt, &dt_cryp, node);
1278 
1279 	if (dt_cryp.reg == DT_INFO_INVALID_REG ||
1280 	    dt_cryp.reg_size == DT_INFO_INVALID_REG_SIZE)
1281 		panic();
1282 
1283 	res = clk_dt_get_by_index(fdt, node, 0, &clk);
1284 	if (res)
1285 		return res;
1286 
1287 	res = rstctrl_dt_get_by_index(fdt, node, 0, &rstctrl);
1288 	if (res != TEE_SUCCESS && res != TEE_ERROR_ITEM_NOT_FOUND)
1289 		return res;
1290 
1291 	cryp_pdata.clock = clk;
1292 	cryp_pdata.reset = rstctrl;
1293 	cryp_pdata.base.pa = dt_cryp.reg;
1294 
1295 	io_pa_or_va_secure(&cryp_pdata.base, dt_cryp.reg_size);
1296 	if (!cryp_pdata.base.va)
1297 		panic();
1298 
1299 	if (clk_enable(cryp_pdata.clock))
1300 		panic();
1301 
1302 	if (stm32_cryp_reset())
1303 		panic();
1304 
1305 	if (IS_ENABLED(CFG_CRYPTO_DRV_AUTHENC)) {
1306 		res = stm32_register_authenc();
1307 		if (res) {
1308 			EMSG("Failed to register to authenc: %#"PRIx32, res);
1309 			panic();
1310 		}
1311 	}
1312 
1313 	if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1314 		res = stm32_register_cipher(CRYP_IP);
1315 		if (res) {
1316 			EMSG("Failed to register to cipher: %#"PRIx32, res);
1317 			panic();
1318 		}
1319 	}
1320 
1321 	register_pm_core_service_cb(stm32_cryp_pm, NULL, "stm32-cryp");
1322 
1323 	return TEE_SUCCESS;
1324 }
1325 
1326 static const struct dt_device_match stm32_cryp_match_table[] = {
1327 	{ .compatible = "st,stm32mp1-cryp" },
1328 	{ }
1329 };
1330 
1331 DEFINE_DT_DRIVER(stm32_cryp_dt_driver) = {
1332 	.name = "stm32-cryp",
1333 	.match_table = stm32_cryp_match_table,
1334 	.probe = stm32_cryp_probe,
1335 };
1336