1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3 * Copyright (c) 2021-2023, STMicroelectronics - All Rights Reserved
4 */
5 #include <assert.h>
6 #include <config.h>
7 #include <drivers/clk.h>
8 #include <drivers/clk_dt.h>
9 #include <drivers/rstctrl.h>
10 #include <io.h>
11 #include <kernel/boot.h>
12 #include <kernel/delay.h>
13 #include <kernel/dt.h>
14 #include <kernel/huk_subkey.h>
15 #include <kernel/mutex.h>
16 #include <kernel/pm.h>
17 #include <libfdt.h>
18 #include <mm/core_memprot.h>
19 #include <stdint.h>
20 #include <stm32_util.h>
21 #include <string_ext.h>
22 #include <utee_defines.h>
23 #include <util.h>
24
25 #include "common.h"
26 #include "stm32_saes.h"
27
28 /* SAES control register */
29 #define _SAES_CR U(0x0)
30 /* SAES status register */
31 #define _SAES_SR U(0x04)
32 /* SAES data input register */
33 #define _SAES_DINR U(0x08)
34 /* SAES data output register */
35 #define _SAES_DOUTR U(0x0c)
36 /* SAES key registers [0-3] */
37 #define _SAES_KEYR0 U(0x10)
38 #define _SAES_KEYR1 U(0x14)
39 #define _SAES_KEYR2 U(0x18)
40 #define _SAES_KEYR3 U(0x1c)
41 /* SAES initialization vector registers [0-3] */
42 #define _SAES_IVR0 U(0x20)
43 #define _SAES_IVR1 U(0x24)
44 #define _SAES_IVR2 U(0x28)
45 #define _SAES_IVR3 U(0x2c)
46 /* SAES key registers [4-7] */
47 #define _SAES_KEYR4 U(0x30)
48 #define _SAES_KEYR5 U(0x34)
49 #define _SAES_KEYR6 U(0x38)
50 #define _SAES_KEYR7 U(0x3c)
51 /* SAES suspend registers [0-7] */
52 #define _SAES_SUSPR0 U(0x40)
53 #define _SAES_SUSPR1 U(0x44)
54 #define _SAES_SUSPR2 U(0x48)
55 #define _SAES_SUSPR3 U(0x4c)
56 #define _SAES_SUSPR4 U(0x50)
57 #define _SAES_SUSPR5 U(0x54)
58 #define _SAES_SUSPR6 U(0x58)
59 #define _SAES_SUSPR7 U(0x5c)
60 /* SAES Interrupt Enable Register */
61 #define _SAES_IER U(0x300)
62 /* SAES Interrupt Status Register */
63 #define _SAES_ISR U(0x304)
64 /* SAES Interrupt Clear Register */
65 #define _SAES_ICR U(0x308)
66
67 /* SAES control register fields */
68 #define _SAES_CR_RESET_VALUE U(0x0)
69 #define _SAES_CR_IPRST BIT(31)
70 #define _SAES_CR_KEYSEL_MASK GENMASK_32(30, 28)
71 #define _SAES_CR_KEYSEL_SHIFT U(28)
72 #define _SAES_CR_KEYSEL_SOFT U(0x0)
73 #define _SAES_CR_KEYSEL_DHUK U(0x1)
74 #define _SAES_CR_KEYSEL_BHK U(0x2)
75 #define _SAES_CR_KEYSEL_BHU_XOR_BH_K U(0x4)
76 #define _SAES_CR_KEYSEL_TEST U(0x7)
77 #define _SAES_CR_KSHAREID_MASK GENMASK_32(27, 26)
78 #define _SAES_CR_KSHAREID_SHIFT U(26)
79 #define _SAES_CR_KSHAREID_CRYP U(0x0)
80 #define _SAES_CR_KEYMOD_MASK GENMASK_32(25, 24)
81 #define _SAES_CR_KEYMOD_SHIFT U(24)
82 #define _SAES_CR_KEYMOD_NORMAL U(0x0)
83 #define _SAES_CR_KEYMOD_WRAPPED U(0x1)
84 #define _SAES_CR_KEYMOD_SHARED U(0x2)
85 #define _SAES_CR_NPBLB_MASK GENMASK_32(23, 20)
86 #define _SAES_CR_NPBLB_SHIFT U(20)
87 #define _SAES_CR_KEYPROT BIT(19)
88 #define _SAES_CR_KEYSIZE BIT(18)
89 #define _SAES_CR_GCMPH_MASK GENMASK_32(14, 13)
90 #define _SAES_CR_GCMPH_SHIFT U(13)
91 #define _SAES_CR_GCMPH_INIT U(0)
92 #define _SAES_CR_GCMPH_HEADER U(1)
93 #define _SAES_CR_GCMPH_PAYLOAD U(2)
94 #define _SAES_CR_GCMPH_FINAL U(3)
95 #define _SAES_CR_DMAOUTEN BIT(12)
96 #define _SAES_CR_DMAINEN BIT(11)
97 #define _SAES_CR_CHMOD_MASK (BIT(16) | GENMASK_32(6, 5))
98 #define _SAES_CR_CHMOD_SHIFT U(5)
99 #define _SAES_CR_CHMOD_ECB U(0x0)
100 #define _SAES_CR_CHMOD_CBC U(0x1)
101 #define _SAES_CR_CHMOD_CTR U(0x2)
102 #define _SAES_CR_CHMOD_GCM U(0x3)
103 #define _SAES_CR_CHMOD_GMAC U(0x3)
104 #define _SAES_CR_CHMOD_CCM U(0x800)
105 #define _SAES_CR_MODE_MASK GENMASK_32(4, 3)
106 #define _SAES_CR_MODE_SHIFT U(3)
107 #define _SAES_CR_MODE_ENC U(0)
108 #define _SAES_CR_MODE_KEYPREP U(1)
109 #define _SAES_CR_MODE_DEC U(2)
110 #define _SAES_CR_DATATYPE_MASK GENMASK_32(2, 1)
111 #define _SAES_CR_DATATYPE_SHIFT U(1)
112 #define _SAES_CR_DATATYPE_NONE U(0)
113 #define _SAES_CR_DATATYPE_HALF_WORD U(1)
114 #define _SAES_CR_DATATYPE_BYTE U(2)
115 #define _SAES_CR_DATATYPE_BIT U(3)
116 #define _SAES_CR_EN BIT(0)
117
118 /* SAES status register fields */
119 #define _SAES_SR_KEYVALID BIT(7)
120 #define _SAES_SR_BUSY BIT(3)
121 #define _SAES_SR_WRERR BIT(2)
122 #define _SAES_SR_RDERR BIT(1)
123 #define _SAES_SR_CCF BIT(0)
124
125 /* SAES interrupt registers fields */
126 #define _SAES_I_RNG_ERR BIT(3)
127 #define _SAES_I_KEY_ERR BIT(2)
128 #define _SAES_I_RW_ERR BIT(1)
129 #define _SAES_I_CC BIT(0)
130
131 #define SAES_TIMEOUT_US U(100000)
132 #define TIMEOUT_US_1MS U(1000)
133 #define SAES_RESET_DELAY U(2)
134
135 #define IS_CHAINING_MODE(mode, cr) \
136 (((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mode << \
137 _SAES_CR_CHMOD_SHIFT))
138
139 #define SET_CHAINING_MODE(mode, cr) \
140 set_field_u32(cr, _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mode)
141
142 static struct mutex saes_lock = MUTEX_INITIALIZER;
143 static struct stm32_saes_platdata {
144 vaddr_t base;
145 struct clk *clk;
146 struct clk *clk_rng;
147 struct rstctrl *reset;
148 } saes_pdata;
149
does_chaining_mode_need_iv(uint32_t cr)150 static bool does_chaining_mode_need_iv(uint32_t cr)
151 {
152 return !IS_CHAINING_MODE(ECB, cr);
153 }
154
is_encrypt(uint32_t cr)155 static bool is_encrypt(uint32_t cr)
156 {
157 return (cr & _SAES_CR_MODE_MASK) ==
158 SHIFT_U32(_SAES_CR_MODE_ENC, _SAES_CR_MODE_SHIFT);
159 }
160
is_decrypt(uint32_t cr)161 static bool is_decrypt(uint32_t cr)
162 {
163 return (cr & _SAES_CR_MODE_MASK) ==
164 SHIFT_U32(_SAES_CR_MODE_DEC, _SAES_CR_MODE_SHIFT);
165 }
166
does_need_npblb(uint32_t cr)167 static bool does_need_npblb(uint32_t cr)
168 {
169 return (IS_CHAINING_MODE(GCM, cr) && is_encrypt(cr)) ||
170 (IS_CHAINING_MODE(CCM, cr) && is_decrypt(cr));
171 }
172
can_suspend(uint32_t cr)173 static bool can_suspend(uint32_t cr)
174 {
175 return !IS_CHAINING_MODE(GCM, cr);
176 }
177
write_aligned_block(vaddr_t base,uint32_t * data)178 static void write_aligned_block(vaddr_t base, uint32_t *data)
179 {
180 unsigned int i = 0;
181
182 /* SAES is configured to swap bytes as expected */
183 for (i = 0; i < AES_BLOCK_NB_U32; i++)
184 io_write32(base + _SAES_DINR, data[i]);
185 }
186
write_block(vaddr_t base,uint8_t * data)187 static void write_block(vaddr_t base, uint8_t *data)
188 {
189 if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
190 write_aligned_block(base, (void *)data);
191 } else {
192 uint32_t data_u32[AES_BLOCK_NB_U32] = { };
193
194 memcpy(data_u32, data, sizeof(data_u32));
195 write_aligned_block(base, data_u32);
196 }
197 }
198
read_aligned_block(vaddr_t base,uint32_t * data)199 static void read_aligned_block(vaddr_t base, uint32_t *data)
200 {
201 unsigned int i = 0;
202
203 /* SAES is configured to swap bytes as expected */
204 for (i = 0; i < AES_BLOCK_NB_U32; i++)
205 data[i] = io_read32(base + _SAES_DOUTR);
206 }
207
read_block(vaddr_t base,uint8_t * data)208 static void read_block(vaddr_t base, uint8_t *data)
209 {
210 if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) {
211 read_aligned_block(base, (void *)data);
212 } else {
213 uint32_t data_u32[AES_BLOCK_NB_U32] = { };
214
215 read_aligned_block(base, data_u32);
216
217 memcpy(data, data_u32, sizeof(data_u32));
218 }
219 }
220
wait_computation_completed(vaddr_t base)221 static TEE_Result wait_computation_completed(vaddr_t base)
222 {
223 uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
224
225 while ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF)
226 if (timeout_elapsed(timeout_ref))
227 break;
228
229 if ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) {
230 DMSG("CCF timeout");
231 return TEE_ERROR_GENERIC;
232 }
233
234 return TEE_SUCCESS;
235 }
236
clear_computation_completed(uintptr_t base)237 static void clear_computation_completed(uintptr_t base)
238 {
239 io_setbits32(base + _SAES_ICR, _SAES_I_CC);
240 }
241
wait_key_valid(vaddr_t base)242 static TEE_Result wait_key_valid(vaddr_t base)
243 {
244 uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
245
246 while (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID))
247 if (timeout_elapsed(timeout_ref))
248 break;
249
250 if (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID)) {
251 DMSG("CCF timeout");
252 return TEE_ERROR_GENERIC;
253 }
254
255 return TEE_SUCCESS;
256 }
257
saes_start(struct stm32_saes_context * ctx)258 static TEE_Result saes_start(struct stm32_saes_context *ctx)
259 {
260 uint64_t timeout_ref = 0;
261
262 /* Reset SAES */
263 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
264 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
265
266 timeout_ref = timeout_init_us(SAES_TIMEOUT_US);
267 while (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY)
268 if (timeout_elapsed(timeout_ref))
269 break;
270
271 if (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) {
272 DMSG("busy timeout");
273 return TEE_ERROR_GENERIC;
274 }
275
276 return TEE_SUCCESS;
277 }
278
saes_end(struct stm32_saes_context * ctx,int prev_error)279 static void saes_end(struct stm32_saes_context *ctx, int prev_error)
280 {
281 if (prev_error) {
282 /* Reset SAES */
283 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
284 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
285 }
286
287 /* Disable the SAES peripheral */
288 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
289 }
290
saes_write_iv(struct stm32_saes_context * ctx)291 static void saes_write_iv(struct stm32_saes_context *ctx)
292 {
293 /* If chaining mode need to restore IV */
294 if (does_chaining_mode_need_iv(ctx->cr)) {
295 unsigned int i = 0;
296
297 for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) {
298 io_write32(ctx->base + _SAES_IVR0 + i *
299 sizeof(uint32_t), ctx->iv[i]);
300 }
301 }
302 }
303
saes_save_suspend(struct stm32_saes_context * ctx)304 static void saes_save_suspend(struct stm32_saes_context *ctx)
305 {
306 size_t i = 0;
307
308 for (i = 0; i < 8; i++)
309 ctx->susp[i] = io_read32(ctx->base + _SAES_SUSPR0 +
310 i * sizeof(uint32_t));
311 }
312
saes_restore_suspend(struct stm32_saes_context * ctx)313 static void saes_restore_suspend(struct stm32_saes_context *ctx)
314 {
315 size_t i = 0;
316
317 for (i = 0; i < 8; i++)
318 io_write32(ctx->base + _SAES_SUSPR0 + i * sizeof(uint32_t),
319 ctx->susp[i]);
320 }
321
saes_write_key(struct stm32_saes_context * ctx)322 static void saes_write_key(struct stm32_saes_context *ctx)
323 {
324 /* Restore the _SAES_KEYRx if SOFTWARE key */
325 if ((ctx->cr & _SAES_CR_KEYSEL_MASK) ==
326 SHIFT_U32(_SAES_CR_KEYSEL_SOFT, _SAES_CR_KEYSEL_SHIFT)) {
327 size_t i = 0;
328
329 for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++)
330 io_write32(ctx->base + _SAES_KEYR0 + i *
331 sizeof(uint32_t),
332 ctx->key[i]);
333
334 if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) {
335 for (i = 0;
336 i < (AES_KEYSIZE_256 / 2) / sizeof(uint32_t);
337 i++) {
338 io_write32(ctx->base + _SAES_KEYR4 + i *
339 sizeof(uint32_t),
340 ctx->key[i + 4]);
341 }
342 }
343 }
344 }
345
saes_prepare_key(struct stm32_saes_context * ctx)346 static TEE_Result saes_prepare_key(struct stm32_saes_context *ctx)
347 {
348 TEE_Result res = TEE_ERROR_GENERIC;
349
350 /* Disable the SAES peripheral */
351 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
352
353 /* Set key size */
354 if ((ctx->cr & _SAES_CR_KEYSIZE))
355 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
356 else
357 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE);
358
359 saes_write_key(ctx);
360
361 res = wait_key_valid(ctx->base);
362 if (res)
363 return res;
364
365 /*
366 * For ECB/CBC decryption, key preparation mode must be selected
367 * to populate the key.
368 */
369 if ((IS_CHAINING_MODE(ECB, ctx->cr) ||
370 IS_CHAINING_MODE(CBC, ctx->cr)) && is_decrypt(ctx->cr)) {
371 /* Select Mode 2 */
372 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
373 SHIFT_U32(_SAES_CR_MODE_KEYPREP,
374 _SAES_CR_MODE_SHIFT));
375
376 /* Enable SAES */
377 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
378
379 res = wait_computation_completed(ctx->base);
380 if (res)
381 return res;
382
383 clear_computation_completed(ctx->base);
384
385 /* Set Mode 3 */
386 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK,
387 SHIFT_U32(_SAES_CR_MODE_DEC,
388 _SAES_CR_MODE_SHIFT));
389 }
390
391 return TEE_SUCCESS;
392 }
393
save_context(struct stm32_saes_context * ctx)394 static TEE_Result save_context(struct stm32_saes_context *ctx)
395 {
396 if ((io_read32(ctx->base + _SAES_SR) & _SAES_SR_CCF)) {
397 /* Device should not be in a processing phase */
398 return TEE_ERROR_BAD_STATE;
399 }
400
401 /* Save CR */
402 ctx->cr = io_read32(ctx->base + _SAES_CR);
403
404 if (!can_suspend(ctx->cr))
405 return TEE_SUCCESS;
406
407 saes_save_suspend(ctx);
408
409 /* If chaining mode need to save current IV */
410 if (does_chaining_mode_need_iv(ctx->cr)) {
411 uint8_t i = 0;
412
413 /* Save IV */
414 for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) {
415 ctx->iv[i] = io_read32(ctx->base + _SAES_IVR0 + i *
416 sizeof(uint32_t));
417 }
418 }
419
420 /* Disable the SAES peripheral */
421 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
422
423 return TEE_SUCCESS;
424 }
425
426 /* To resume the processing of a message */
restore_context(struct stm32_saes_context * ctx)427 static TEE_Result restore_context(struct stm32_saes_context *ctx)
428 {
429 TEE_Result res = TEE_SUCCESS;
430
431 /* SAES shall be disabled */
432 if ((io_read32(ctx->base + _SAES_CR) & _SAES_CR_EN)) {
433 DMSG("Device is still enabled");
434 return TEE_ERROR_BAD_STATE;
435 }
436
437 /* Reset internal state */
438 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST);
439
440 /* Restore configuration register */
441 io_write32(ctx->base + _SAES_CR, ctx->cr);
442
443 /* Write key and, in case of CBC or ECB decrypt, prepare it */
444 res = saes_prepare_key(ctx);
445 if (res)
446 return res;
447
448 saes_restore_suspend(ctx);
449
450 saes_write_iv(ctx);
451
452 /* Enable the SAES peripheral */
453 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
454
455 return TEE_SUCCESS;
456 }
457
do_from_init_to_phase(struct stm32_saes_context * ctx,uint32_t new_phase)458 static TEE_Result do_from_init_to_phase(struct stm32_saes_context *ctx,
459 uint32_t new_phase)
460 {
461 TEE_Result res = TEE_SUCCESS;
462
463 /* We didn't run the init phase yet */
464 res = restore_context(ctx);
465 if (res)
466 return res;
467
468 res = wait_computation_completed(ctx->base);
469 if (res)
470 return res;
471
472 clear_computation_completed(ctx->base);
473
474 /* Move to 'new_phase' */
475 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
476 SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT));
477
478 /* Enable the SAES peripheral (init disabled it) */
479 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN);
480
481 return TEE_SUCCESS;
482 }
483
do_from_header_to_phase(struct stm32_saes_context * ctx,uint32_t new_phase)484 static TEE_Result do_from_header_to_phase(struct stm32_saes_context *ctx,
485 uint32_t new_phase)
486 {
487 TEE_Result res = TEE_SUCCESS;
488
489 if (can_suspend(ctx->cr)) {
490 res = restore_context(ctx);
491 if (res)
492 return res;
493 }
494
495 if (ctx->extra_size) {
496 /* Manage unaligned header data before moving to next phase */
497 memset((uint8_t *)ctx->extra + ctx->extra_size, 0,
498 AES_BLOCK_SIZE - ctx->extra_size);
499
500 write_aligned_block(ctx->base, ctx->extra);
501
502 res = wait_computation_completed(ctx->base);
503 if (res)
504 return res;
505
506 clear_computation_completed(ctx->base);
507
508 ctx->assoc_len += ctx->extra_size * INT8_BIT;
509 ctx->extra_size = U(0);
510 }
511
512 /* Move to 'new_phase' */
513 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
514 SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT));
515
516 return TEE_SUCCESS;
517 }
518
519 /**
520 * @brief Start an AES computation.
521 * @param ctx: SAES process context
522 * @param is_dec: true if decryption, false if encryption
523 * @param ch_mode: define the chaining mode
524 * @param key_select: define where the key comes from
525 * @param key: pointer to key (if key_select is KEY_SOFT, else unused)
526 * @param key_size: key size
527 * @param iv: pointer to initialization vector (unused if ch_mode is ECB)
528 * @param iv_size: iv size
529 * @note this function doesn't access to hardware but stores in ctx the values
530 *
531 * @retval TEE_SUCCESS if OK or a TEE_Result compliant code.
532 */
stm32_saes_init(struct stm32_saes_context * ctx,bool is_dec,enum stm32_saes_chaining_mode ch_mode,enum stm32_saes_key_selection key_select,const void * key,size_t key_size,const void * iv,size_t iv_size)533 TEE_Result stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec,
534 enum stm32_saes_chaining_mode ch_mode,
535 enum stm32_saes_key_selection key_select,
536 const void *key, size_t key_size, const void *iv,
537 size_t iv_size)
538 {
539 const uint32_t *key_u32 = NULL;
540 const uint32_t *iv_u32 = NULL;
541 uint32_t local_key[8] = { };
542 uint32_t local_iv[4] = { };
543 unsigned int i = 0;
544
545 if (!ctx)
546 return TEE_ERROR_BAD_PARAMETERS;
547
548 *ctx = (struct stm32_saes_context){
549 .lock = &saes_lock,
550 .base = saes_pdata.base,
551 .cr = _SAES_CR_RESET_VALUE
552 };
553
554 /* We want buffer to be u32 aligned */
555 if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) {
556 key_u32 = key;
557 } else {
558 memcpy(local_key, key, key_size);
559 key_u32 = local_key;
560 }
561
562 if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) {
563 iv_u32 = iv;
564 } else {
565 memcpy(local_iv, iv, iv_size);
566 iv_u32 = local_iv;
567 }
568
569 if (is_dec)
570 ctx->cr = set_field_u32(ctx->cr, _SAES_CR_MODE_MASK,
571 _SAES_CR_MODE_DEC);
572 else
573 ctx->cr = set_field_u32(ctx->cr, _SAES_CR_MODE_MASK,
574 _SAES_CR_MODE_ENC);
575
576 /* Save chaining mode */
577 switch (ch_mode) {
578 case STM32_SAES_MODE_ECB:
579 ctx->cr = SET_CHAINING_MODE(ECB, ctx->cr);
580 break;
581 case STM32_SAES_MODE_CBC:
582 ctx->cr = SET_CHAINING_MODE(CBC, ctx->cr);
583 break;
584 case STM32_SAES_MODE_CTR:
585 ctx->cr = SET_CHAINING_MODE(CTR, ctx->cr);
586 break;
587 case STM32_SAES_MODE_GCM:
588 ctx->cr = SET_CHAINING_MODE(GCM, ctx->cr);
589 break;
590 case STM32_SAES_MODE_CCM:
591 ctx->cr = SET_CHAINING_MODE(CCM, ctx->cr);
592 break;
593 default:
594 return TEE_ERROR_BAD_PARAMETERS;
595 }
596
597 /*
598 * We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data.
599 * So we won't need to
600 * TEE_U32_TO_BIG_ENDIAN(data) before write to DINR
601 * nor
602 * TEE_U32_FROM_BIG_ENDIAN after reading from DOUTR.
603 *
604 * But note that wrap key only accept _SAES_CR_DATATYPE_NONE.
605 */
606 ctx->cr = set_field_u32(ctx->cr, _SAES_CR_DATATYPE_MASK,
607 _SAES_CR_DATATYPE_BYTE);
608
609 /* Configure keysize */
610 switch (key_size) {
611 case AES_KEYSIZE_128:
612 ctx->cr &= ~_SAES_CR_KEYSIZE;
613 break;
614 case AES_KEYSIZE_256:
615 ctx->cr |= _SAES_CR_KEYSIZE;
616 break;
617 default:
618 return TEE_ERROR_BAD_PARAMETERS;
619 }
620
621 /* Configure key */
622 switch (key_select) {
623 case STM32_SAES_KEY_SOFT:
624 ctx->cr = set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
625 _SAES_CR_KEYSEL_SOFT);
626 /* Save key */
627 switch (key_size) {
628 case AES_KEYSIZE_128:
629 /* First 16 bytes == 4 u32 */
630 for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t);
631 i++) {
632 ctx->key[i] =
633 TEE_U32_TO_BIG_ENDIAN(key_u32[3 - i]);
634 /*
635 * /!\ we save the key in HW byte order
636 * and word order: key[i] is for _SAES_KEYRi.
637 */
638 }
639 break;
640 case AES_KEYSIZE_256:
641 for (i = 0; i < AES_KEYSIZE_256 / sizeof(uint32_t);
642 i++) {
643 ctx->key[i] =
644 TEE_U32_TO_BIG_ENDIAN(key_u32[7 - i]);
645 /*
646 * /!\ we save the key in HW byte order
647 * and word order: key[i] is for _SAES_KEYRi.
648 */
649 }
650 break;
651 default:
652 return TEE_ERROR_BAD_PARAMETERS;
653 }
654 break;
655 case STM32_SAES_KEY_DHU:
656 ctx->cr = set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
657 _SAES_CR_KEYSEL_DHUK);
658 break;
659 case STM32_SAES_KEY_BH:
660 ctx->cr = set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
661 _SAES_CR_KEYSEL_BHK);
662 break;
663 case STM32_SAES_KEY_BHU_XOR_BH:
664 ctx->cr = set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
665 _SAES_CR_KEYSEL_BHU_XOR_BH_K);
666 break;
667 case STM32_SAES_KEY_WRAPPED:
668 ctx->cr = set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK,
669 _SAES_CR_KEYSEL_SOFT);
670 break;
671
672 default:
673 return TEE_ERROR_BAD_PARAMETERS;
674 }
675
676 /* Save IV */
677 if (ch_mode != STM32_SAES_MODE_ECB) {
678 if (!iv || iv_size != AES_IVSIZE)
679 return TEE_ERROR_BAD_PARAMETERS;
680
681 for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++)
682 ctx->iv[i] = TEE_U32_TO_BIG_ENDIAN(iv_u32[3 - i]);
683 }
684
685 /* Reset suspend registers */
686 memset(ctx->susp, 0, sizeof(ctx->susp));
687
688 return saes_start(ctx);
689 }
690
691 /**
692 * @brief Update (or start) an AES authentificate process of
693 * associated data (CCM or GCM).
694 * @param ctx: SAES process context
695 * @param data: pointer to associated data
696 * @param data_size: data size
697 *
698 * @retval 0 if OK.
699 */
stm32_saes_update_assodata(struct stm32_saes_context * ctx,uint8_t * data,size_t data_size)700 TEE_Result stm32_saes_update_assodata(struct stm32_saes_context *ctx,
701 uint8_t *data, size_t data_size)
702 {
703 TEE_Result res = TEE_SUCCESS;
704 unsigned int i = 0;
705 uint32_t previous_phase = 0;
706
707 if (!ctx)
708 return TEE_ERROR_BAD_PARAMETERS;
709
710 /* If no associated data, nothing to do */
711 if (!data || !data_size)
712 return TEE_SUCCESS;
713
714 mutex_lock(ctx->lock);
715
716 previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >>
717 _SAES_CR_GCMPH_SHIFT;
718
719 switch (previous_phase) {
720 case _SAES_CR_GCMPH_INIT:
721 res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_HEADER);
722 break;
723 case _SAES_CR_GCMPH_HEADER:
724 /*
725 * Function update_assodata() was already called.
726 * We only need to restore the context.
727 */
728 if (can_suspend(ctx->cr))
729 res = restore_context(ctx);
730
731 break;
732 default:
733 DMSG("out of order call");
734 res = TEE_ERROR_BAD_STATE;
735 }
736
737 if (res)
738 goto out;
739
740 /* Manage if remaining data from a previous update_assodata() call */
741 if (ctx->extra_size &&
742 ((ctx->extra_size + data_size) >= AES_BLOCK_SIZE)) {
743 uint32_t block[AES_BLOCK_NB_U32] = { };
744
745 memcpy(block, ctx->extra, ctx->extra_size);
746 memcpy((uint8_t *)block + ctx->extra_size, data,
747 AES_BLOCK_SIZE - ctx->extra_size);
748
749 write_aligned_block(ctx->base, block);
750
751 res = wait_computation_completed(ctx->base);
752 if (res)
753 goto out;
754
755 clear_computation_completed(ctx->base);
756
757 i += AES_BLOCK_SIZE - ctx->extra_size;
758 ctx->extra_size = 0;
759 ctx->assoc_len += AES_BLOCK_SIZE_BIT;
760 }
761
762 while (data_size - i >= AES_BLOCK_SIZE) {
763 write_block(ctx->base, data + i);
764
765 res = wait_computation_completed(ctx->base);
766 if (res)
767 goto out;
768
769 clear_computation_completed(ctx->base);
770
771 /* Process next block */
772 i += AES_BLOCK_SIZE;
773 ctx->assoc_len += AES_BLOCK_SIZE_BIT;
774 }
775
776 /*
777 * Manage last block if not a block size multiple:
778 * Save remaining data to manage them later (potentially with new
779 * associated data).
780 */
781 if (i < data_size) {
782 memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i,
783 data_size - i);
784 ctx->extra_size += data_size - i;
785 }
786
787 res = save_context(ctx);
788 out:
789 if (res)
790 saes_end(ctx, res);
791
792 mutex_unlock(ctx->lock);
793
794 return res;
795 }
796
797 /**
798 * @brief Update (or start) an AES authenticate and de/encrypt with
799 * payload data (CCM or GCM).
800 * @param ctx: SAES process context
801 * @param last_block: true if last payload data block
802 * @param data_in: pointer to payload
803 * @param data_out: pointer where to save de/encrypted payload
804 * @param data_size: payload size
805 *
806 * @retval TEE_SUCCESS if OK.
807 */
stm32_saes_update_load(struct stm32_saes_context * ctx,bool last_block,uint8_t * data_in,uint8_t * data_out,size_t data_size)808 TEE_Result stm32_saes_update_load(struct stm32_saes_context *ctx,
809 bool last_block, uint8_t *data_in,
810 uint8_t *data_out, size_t data_size)
811 {
812 TEE_Result res = TEE_SUCCESS;
813 unsigned int i = 0;
814 uint32_t previous_phase = 0;
815
816 if (!ctx)
817 return TEE_ERROR_BAD_PARAMETERS;
818
819 /* If there is no data, nothing to do */
820 if (!data_in || !data_size)
821 return TEE_SUCCESS;
822
823 mutex_lock(ctx->lock);
824
825 previous_phase = ((ctx->cr & _SAES_CR_GCMPH_MASK) >>
826 _SAES_CR_GCMPH_SHIFT);
827
828 switch (previous_phase) {
829 case _SAES_CR_GCMPH_INIT:
830 res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD);
831 break;
832 case _SAES_CR_GCMPH_HEADER:
833 res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD);
834 break;
835 case _SAES_CR_GCMPH_PAYLOAD:
836 /* new update_load call, we only need to restore context */
837 if (can_suspend(ctx->cr))
838 res = restore_context(ctx);
839
840 break;
841 default:
842 DMSG("out of order call");
843 res = TEE_ERROR_BAD_STATE;
844 }
845
846 if (res)
847 goto out;
848
849 while (i < ROUNDDOWN(data_size, AES_BLOCK_SIZE)) {
850 write_block(ctx->base, data_in + i);
851
852 res = wait_computation_completed(ctx->base);
853 if (res)
854 goto out;
855
856 read_block(ctx->base, data_out + i);
857
858 clear_computation_completed(ctx->base);
859
860 /* Process next block */
861 i += AES_BLOCK_SIZE;
862 ctx->load_len += AES_BLOCK_SIZE_BIT;
863 }
864
865 /* Manage last block if not a block size multiple */
866 if (last_block && i < data_size) {
867 uint32_t block_in[AES_BLOCK_NB_U32] = { };
868 uint32_t block_out[AES_BLOCK_NB_U32] = { };
869
870 memcpy(block_in, data_in + i, data_size - i);
871
872 if (does_need_npblb(ctx->cr)) {
873 uint32_t npblb = AES_BLOCK_SIZE - (data_size - i);
874
875 io_clrsetbits32(ctx->base + _SAES_CR,
876 _SAES_CR_NPBLB_MASK,
877 SHIFT_U32(npblb, _SAES_CR_NPBLB_SHIFT));
878 }
879
880 write_aligned_block(ctx->base, block_in);
881
882 res = wait_computation_completed(ctx->base);
883 if (res)
884 goto out;
885
886 read_aligned_block(ctx->base, block_out);
887
888 clear_computation_completed(ctx->base);
889
890 memcpy(data_out + i, block_out, data_size - i);
891
892 ctx->load_len += (data_size - i) * INT8_BIT;
893 }
894
895 res = save_context(ctx);
896 out:
897 if (res)
898 saes_end(ctx, res);
899
900 mutex_unlock(ctx->lock);
901
902 return res;
903 }
904
905 /**
906 * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM).
907 * @param ctx: SAES process context
908 * @param tag: pointer where to save the tag
909 * @param data_size: tag size
910 *
911 * @retval TEE_SUCCESS if OK.
912 */
stm32_saes_final(struct stm32_saes_context * ctx,uint8_t * tag,size_t tag_size)913 TEE_Result stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag,
914 size_t tag_size)
915 {
916 TEE_Result res = TEE_SUCCESS;
917 uint32_t tag_u32[4] = { };
918 uint32_t previous_phase = 0;
919
920 if (!ctx)
921 return TEE_ERROR_BAD_PARAMETERS;
922
923 mutex_lock(ctx->lock);
924
925 previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >>
926 _SAES_CR_GCMPH_SHIFT;
927
928 switch (previous_phase) {
929 case _SAES_CR_GCMPH_INIT:
930 res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_FINAL);
931 break;
932 case _SAES_CR_GCMPH_HEADER:
933 res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_FINAL);
934 break;
935 case _SAES_CR_GCMPH_PAYLOAD:
936 if (can_suspend(ctx->cr))
937 res = restore_context(ctx);
938
939 /* Move to final phase */
940 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK,
941 SHIFT_U32(_SAES_CR_GCMPH_FINAL,
942 _SAES_CR_GCMPH_SHIFT));
943 break;
944 default:
945 DMSG("out of order call");
946 res = TEE_ERROR_BAD_STATE;
947 }
948 if (res)
949 goto out;
950
951 if (IS_CHAINING_MODE(GCM, ctx->cr)) {
952 /* SAES is configured to swap bytes as expected */
953 io_write32(ctx->base + _SAES_DINR, 0);
954 io_write32(ctx->base + _SAES_DINR, ctx->assoc_len);
955 io_write32(ctx->base + _SAES_DINR, 0);
956 io_write32(ctx->base + _SAES_DINR, ctx->load_len);
957 }
958
959 res = wait_computation_completed(ctx->base);
960 if (res)
961 goto out;
962
963 read_aligned_block(ctx->base, tag_u32);
964
965 clear_computation_completed(ctx->base);
966
967 memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size));
968
969 out:
970 saes_end(ctx, res);
971 mutex_unlock(ctx->lock);
972
973 return res;
974 }
975
976 /**
977 * @brief Update (or start) an AES de/encrypt process (ECB, CBC or CTR).
978 * @param ctx: SAES process context
979 * @param last_block: true if last payload data block
980 * @param data_in: pointer to payload
981 * @param data_out: pointer where to save de/encrypted payload
982 * @param data_size: payload size
983 *
984 * @retval TEE_SUCCESS if OK.
985 */
stm32_saes_update(struct stm32_saes_context * ctx,bool last_block,uint8_t * data_in,uint8_t * data_out,size_t data_size)986 TEE_Result stm32_saes_update(struct stm32_saes_context *ctx, bool last_block,
987 uint8_t *data_in, uint8_t *data_out,
988 size_t data_size)
989 {
990 TEE_Result res = TEE_SUCCESS;
991 unsigned int i = U(0);
992
993 if (!ctx)
994 return TEE_ERROR_BAD_PARAMETERS;
995
996 mutex_lock(ctx->lock);
997
998 /*
999 * CBC encryption requires the 2 last blocks to be aligned with AES
1000 * block size.
1001 */
1002 if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) &&
1003 is_encrypt(ctx->cr) &&
1004 (ROUNDDOWN(data_size, AES_BLOCK_SIZE) != data_size)) {
1005 if (data_size < AES_BLOCK_SIZE * 2) {
1006 /*
1007 * If CBC, size of the last part should be at
1008 * least 2*AES_BLOCK_SIZE
1009 */
1010 EMSG("Unexpected last block size");
1011 res = TEE_ERROR_BAD_STATE;
1012 goto out;
1013 }
1014 /*
1015 * Do not support padding if the total size is not aligned with
1016 * the size of a block.
1017 */
1018 res = TEE_ERROR_NOT_IMPLEMENTED;
1019 goto out;
1020 }
1021
1022 /* Manage remaining CTR mask from previous update call */
1023 if (IS_CHAINING_MODE(CTR, ctx->cr) && ctx->extra_size) {
1024 unsigned int j = 0;
1025 uint8_t *mask = (uint8_t *)ctx->extra;
1026
1027 for (i = 0, j = 0; j < ctx->extra_size && i < data_size;
1028 j++, i++)
1029 data_out[i] = data_in[i] ^ mask[j];
1030
1031 if (j != ctx->extra_size) {
1032 /*
1033 * We didn't consume all saved mask,
1034 * but no more data.
1035 */
1036
1037 /* We save remaining mask and its new size */
1038 memmove(ctx->extra, (uint8_t *)ctx->extra + j,
1039 ctx->extra_size - j);
1040 ctx->extra_size -= j;
1041
1042 /*
1043 * We don't need to save HW context we didn't
1044 * modify HW state.
1045 */
1046 res = TEE_SUCCESS;
1047 goto out;
1048 }
1049 /* All extra mask consumed */
1050 ctx->extra_size = 0;
1051 }
1052
1053 res = restore_context(ctx);
1054 if (res)
1055 goto out;
1056
1057 while (data_size - i >= AES_BLOCK_SIZE) {
1058 write_block(ctx->base, data_in + i);
1059
1060 res = wait_computation_completed(ctx->base);
1061 if (res)
1062 goto out;
1063
1064 read_block(ctx->base, data_out + i);
1065
1066 clear_computation_completed(ctx->base);
1067
1068 /* Process next block */
1069 i += AES_BLOCK_SIZE;
1070 }
1071
1072 /* Manage last block if not a block size multiple */
1073 if (i < data_size) {
1074 if (IS_CHAINING_MODE(CTR, ctx->cr)) {
1075 /*
1076 * For CTR we save the generated mask to use it at next
1077 * update call.
1078 */
1079 uint32_t block_in[AES_BLOCK_NB_U32] = { };
1080 uint32_t block_out[AES_BLOCK_NB_U32] = { };
1081
1082 memcpy(block_in, data_in + i, data_size - i);
1083
1084 write_aligned_block(ctx->base, block_in);
1085
1086 res = wait_computation_completed(ctx->base);
1087 if (res)
1088 goto out;
1089
1090 read_aligned_block(ctx->base, block_out);
1091
1092 clear_computation_completed(ctx->base);
1093
1094 memcpy(data_out + i, block_out, data_size - i);
1095
1096 /* Save mask for possibly next call */
1097 ctx->extra_size = AES_BLOCK_SIZE - (data_size - i);
1098 memcpy(ctx->extra, (uint8_t *)block_out + data_size - i,
1099 ctx->extra_size);
1100 } else {
1101 /* CBC and ECB can manage only multiple of block_size */
1102 res = TEE_ERROR_BAD_PARAMETERS;
1103 goto out;
1104 }
1105 }
1106
1107 if (!last_block)
1108 res = save_context(ctx);
1109
1110 out:
1111 /* If last block or error, end of SAES process */
1112 if (last_block || res)
1113 saes_end(ctx, res);
1114
1115 mutex_unlock(ctx->lock);
1116
1117 return res;
1118 }
1119
xor_block(uint8_t * b1,uint8_t * b2,size_t size)1120 static void xor_block(uint8_t *b1, uint8_t *b2, size_t size)
1121 {
1122 size_t i = 0;
1123
1124 for (i = 0; i < size; i++)
1125 b1[i] ^= b2[i];
1126 }
1127
stm32_saes_cmac_prf_128(struct stm32_saes_context * ctx,enum stm32_saes_key_selection key_sel,const void * key,size_t key_size,uint8_t * data,size_t data_size,uint8_t * out)1128 static TEE_Result stm32_saes_cmac_prf_128(struct stm32_saes_context *ctx,
1129 enum stm32_saes_key_selection key_sel,
1130 const void *key, size_t key_size,
1131 uint8_t *data, size_t data_size,
1132 uint8_t *out)
1133 {
1134 TEE_Result res = TEE_ERROR_GENERIC;
1135 uint8_t block[AES_BLOCK_SIZE] = { };
1136 uint8_t k1[AES_BLOCK_SIZE] = { };
1137 uint8_t k2[AES_BLOCK_SIZE] = { };
1138 uint8_t l[AES_BLOCK_SIZE] = { };
1139 size_t processed = 0;
1140 uint8_t bit = 0;
1141 int i = 0;
1142
1143 if (!ctx)
1144 return TEE_ERROR_BAD_PARAMETERS;
1145
1146 /* Get K1 and K2 */
1147 res = stm32_saes_init(ctx, false, STM32_SAES_MODE_ECB, key_sel,
1148 key, key_size, NULL, 0);
1149 if (res)
1150 return res;
1151
1152 res = stm32_saes_update(ctx, true, l, l, sizeof(l));
1153 if (res)
1154 return res;
1155
1156 /* MSB(L) == 0 => K1 = L << 1 */
1157 bit = 0;
1158 for (i = sizeof(l) - 1; i >= 0; i--) {
1159 k1[i] = (l[i] << 1) | bit;
1160 bit = (l[i] & 0x80) >> 7;
1161 }
1162 /* MSB(L) == 1 => K1 = (L << 1) XOR const_Rb */
1163 if ((l[0] & 0x80))
1164 k1[sizeof(k1) - 1] = k1[sizeof(k1) - 1] ^ 0x87;
1165
1166 /* MSB(K1) == 0 => K2 = K1 << 1 */
1167 bit = 0;
1168 for (i = sizeof(k1) - 1; i >= 0; i--) {
1169 k2[i] = (k1[i] << 1) | bit;
1170 bit = (k1[i] & 0x80) >> 7;
1171 }
1172
1173 /* MSB(K1) == 1 => K2 = (K1 << 1) XOR const_Rb */
1174 if ((k1[0] & 0x80))
1175 k2[sizeof(k2) - 1] = k2[sizeof(k2) - 1] ^ 0x87;
1176
1177 if (data_size > AES_BLOCK_SIZE) {
1178 uint8_t *data_out = NULL;
1179
1180 /* All block but last in CBC mode */
1181 res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC,
1182 key_sel, key, key_size, block,
1183 sizeof(block));
1184 if (res)
1185 return res;
1186
1187 processed = ROUNDDOWN(data_size - 1, AES_BLOCK_SIZE);
1188 data_out = malloc(processed);
1189 if (!data_out)
1190 return TEE_ERROR_OUT_OF_MEMORY;
1191
1192 res = stm32_saes_update(ctx, true, data, data_out, processed);
1193 if (!res) {
1194 /* Copy last out block or keep block as { 0 } */
1195 memcpy(block, data_out + processed - AES_BLOCK_SIZE,
1196 AES_BLOCK_SIZE);
1197 }
1198
1199 free(data_out);
1200
1201 if (res)
1202 return res;
1203 }
1204
1205 /* Manage last block */
1206 xor_block(block, data + processed, data_size - processed);
1207 if (data_size - processed == AES_BLOCK_SIZE) {
1208 xor_block(block, k1, AES_BLOCK_SIZE);
1209 } else {
1210 /* xor with padding = 0b100... */
1211 block[data_size - processed] ^= 0x80;
1212 xor_block(block, k2, AES_BLOCK_SIZE);
1213 }
1214
1215 /*
1216 * AES last block.
1217 * We need to use same chaining mode to keep same key if DHUK is
1218 * selected so we reuse l as a zero initialized IV.
1219 */
1220 memset(l, 0, sizeof(l));
1221 res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC, key_sel, key,
1222 key_size, l, sizeof(l));
1223 if (res)
1224 return res;
1225
1226 return stm32_saes_update(ctx, true, block, out, AES_BLOCK_SIZE);
1227 }
1228
stm32_saes_kdf(struct stm32_saes_context * ctx,enum stm32_saes_key_selection key_sel,const void * key,size_t key_size,const void * input,size_t input_size,uint8_t * subkey,size_t subkey_size)1229 TEE_Result stm32_saes_kdf(struct stm32_saes_context *ctx,
1230 enum stm32_saes_key_selection key_sel,
1231 const void *key, size_t key_size,
1232 const void *input, size_t input_size,
1233 uint8_t *subkey, size_t subkey_size)
1234
1235 {
1236 TEE_Result res = TEE_SUCCESS;
1237 uint32_t index = 0;
1238 uint32_t index_be = 0;
1239 uint8_t *data = NULL;
1240 size_t data_index = 0;
1241 size_t subkey_index = 0;
1242 size_t data_size = input_size + sizeof(index_be);
1243 uint8_t cmac[AES_BLOCK_SIZE] = { };
1244
1245 if (!ctx || !input || !input_size)
1246 return TEE_ERROR_BAD_PARAMETERS;
1247
1248 /* For each K(i) we will add an index */
1249 data = malloc(data_size);
1250 if (!data)
1251 return TEE_ERROR_OUT_OF_MEMORY;
1252
1253 data_index = 0;
1254 index_be = TEE_U32_TO_BIG_ENDIAN(index);
1255 memcpy(data + data_index, &index_be, sizeof(index_be));
1256 data_index += sizeof(index_be);
1257 memcpy(data + data_index, input, input_size);
1258 data_index += input_size;
1259
1260 /* K(i) computation. */
1261 index = 0;
1262 while (subkey_index < subkey_size) {
1263 index++;
1264 index_be = TEE_U32_TO_BIG_ENDIAN(index);
1265 memcpy(data, &index_be, sizeof(index_be));
1266
1267 res = stm32_saes_cmac_prf_128(ctx, key_sel, key, key_size,
1268 data, data_size, cmac);
1269 if (res)
1270 goto out;
1271
1272 memcpy(subkey + subkey_index, cmac,
1273 MIN(subkey_size - subkey_index, sizeof(cmac)));
1274 subkey_index += sizeof(cmac);
1275 }
1276
1277 out:
1278 free(data);
1279 if (res)
1280 memzero_explicit(subkey, subkey_size);
1281
1282 return res;
1283 }
1284
1285 /* Implement hardware HUK derivation using SAES resources */
huk_subkey_derive(enum huk_subkey_usage usage,const void * const_data,size_t const_data_len,uint8_t * subkey,size_t subkey_len)1286 TEE_Result huk_subkey_derive(enum huk_subkey_usage usage,
1287 const void *const_data, size_t const_data_len,
1288 uint8_t *subkey, size_t subkey_len)
1289 {
1290 TEE_Result res = TEE_ERROR_GENERIC;
1291 uint8_t *input = NULL;
1292 size_t input_index = 0;
1293 size_t subkey_bitlen = 0;
1294 struct stm32_saes_context ctx = { };
1295 uint8_t separator = 0;
1296
1297 /* Check if driver is probed */
1298 if (!saes_pdata.base) {
1299 return __huk_subkey_derive(usage, const_data, const_data_len,
1300 subkey, subkey_len);
1301 }
1302
1303 input = malloc(const_data_len + sizeof(separator) + sizeof(usage) +
1304 sizeof(subkey_bitlen) + AES_BLOCK_SIZE);
1305 if (!input)
1306 return TEE_ERROR_OUT_OF_MEMORY;
1307
1308 input_index = 0;
1309 if (const_data) {
1310 memcpy(input + input_index, const_data, const_data_len);
1311 input_index += const_data_len;
1312
1313 memcpy(input + input_index, &separator, sizeof(separator));
1314 input_index += sizeof(separator);
1315 }
1316
1317 memcpy(input + input_index, &usage, sizeof(usage));
1318 input_index += sizeof(usage);
1319
1320 /*
1321 * We should add the subkey_len in bits at end of input.
1322 * And we choose to put in a MSB first uint32_t.
1323 */
1324 subkey_bitlen = TEE_U32_TO_BIG_ENDIAN(subkey_len * INT8_BIT);
1325 memcpy(input + input_index, &subkey_bitlen, sizeof(subkey_bitlen));
1326 input_index += sizeof(subkey_bitlen);
1327
1328 /*
1329 * We get K(0) to avoid some key control attack
1330 * and store it at end of input.
1331 */
1332 res = stm32_saes_cmac_prf_128(&ctx, STM32_SAES_KEY_DHU, NULL,
1333 AES_KEYSIZE_128,
1334 input, input_index,
1335 input + input_index);
1336 if (res)
1337 goto out;
1338
1339 /* We just added K(0) to input */
1340 input_index += AES_BLOCK_SIZE;
1341
1342 res = stm32_saes_kdf(&ctx, STM32_SAES_KEY_DHU, NULL, AES_KEYSIZE_128,
1343 input, input_index, subkey, subkey_len);
1344
1345 out:
1346 free(input);
1347 return res;
1348 }
1349
stm32_saes_parse_fdt(struct stm32_saes_platdata * pdata,const void * fdt,int node)1350 static TEE_Result stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata,
1351 const void *fdt, int node)
1352 {
1353 TEE_Result res = TEE_ERROR_GENERIC;
1354 size_t reg_size = 0;
1355 paddr_t reg = 0;
1356
1357 res = clk_dt_get_by_name(fdt, node, "bus", &pdata->clk);
1358 if (res != TEE_SUCCESS)
1359 return res;
1360
1361 res = clk_dt_get_by_name(fdt, node, "rng", &pdata->clk_rng);
1362 if (res != TEE_SUCCESS)
1363 return res;
1364
1365 res = rstctrl_dt_get_by_index(fdt, node, 0, &pdata->reset);
1366 if (res != TEE_SUCCESS && res != TEE_ERROR_ITEM_NOT_FOUND)
1367 return res;
1368
1369 if (fdt_reg_info(fdt, node, ®, ®_size))
1370 return TEE_ERROR_BAD_PARAMETERS;
1371
1372 pdata->base = (vaddr_t)phys_to_virt(reg, MEM_AREA_IO_SEC, reg_size);
1373 if (!pdata->base)
1374 panic();
1375
1376 return TEE_SUCCESS;
1377 }
1378
stm32_saes_reset(void)1379 static void stm32_saes_reset(void)
1380 {
1381 if (saes_pdata.reset) {
1382 /* External reset of SAES */
1383 if (rstctrl_assert_to(saes_pdata.reset, TIMEOUT_US_1MS))
1384 panic();
1385
1386 udelay(SAES_RESET_DELAY);
1387
1388 if (rstctrl_deassert_to(saes_pdata.reset, TIMEOUT_US_1MS))
1389 panic();
1390 } else {
1391 /* Internal reset of SAES */
1392 io_setbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST);
1393 udelay(SAES_RESET_DELAY);
1394 io_clrbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST);
1395 }
1396 }
1397
stm32_saes_pm(enum pm_op op,uint32_t pm_hint,const struct pm_callback_handle * hdl __unused)1398 static TEE_Result stm32_saes_pm(enum pm_op op, uint32_t pm_hint,
1399 const struct pm_callback_handle *hdl __unused)
1400 {
1401 switch (op) {
1402 case PM_OP_SUSPEND:
1403 clk_disable(saes_pdata.clk);
1404 clk_disable(saes_pdata.clk_rng);
1405 return TEE_SUCCESS;
1406
1407 case PM_OP_RESUME:
1408 if (clk_enable(saes_pdata.clk) ||
1409 clk_enable(saes_pdata.clk_rng))
1410 panic();
1411
1412 if (PM_HINT_IS_STATE(pm_hint, CONTEXT))
1413 stm32_saes_reset();
1414
1415 return TEE_SUCCESS;
1416 default:
1417 break;
1418 }
1419
1420 return TEE_ERROR_NOT_IMPLEMENTED;
1421 }
1422
stm32_saes_probe(const void * fdt,int node,const void * compat_data __unused)1423 static TEE_Result stm32_saes_probe(const void *fdt, int node,
1424 const void *compat_data __unused)
1425 {
1426 TEE_Result res = TEE_SUCCESS;
1427
1428 assert(!saes_pdata.base);
1429
1430 res = stm32_saes_parse_fdt(&saes_pdata, fdt, node);
1431 if (res)
1432 return res;
1433
1434 if (clk_enable(saes_pdata.clk) || clk_enable(saes_pdata.clk_rng))
1435 panic();
1436
1437 stm32_saes_reset();
1438
1439 if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) {
1440 res = stm32_register_cipher(SAES_IP);
1441 if (res) {
1442 EMSG("Failed to register to cipher: %#"PRIx32, res);
1443 panic();
1444 }
1445 }
1446
1447 register_pm_core_service_cb(stm32_saes_pm, NULL, "stm32-saes");
1448
1449 return TEE_SUCCESS;
1450 }
1451
1452 static const struct dt_device_match saes_match_table[] = {
1453 { .compatible = "st,stm32mp13-saes" },
1454 { }
1455 };
1456
1457 DEFINE_DT_DRIVER(stm32_saes_dt_driver) = {
1458 .name = "stm32-saes",
1459 .match_table = saes_match_table,
1460 .probe = stm32_saes_probe,
1461 };
1462