1 // SPDX-License-Identifier: BSD-2-Clause 2 /* 3 * Copyright (c) 2021-2023, STMicroelectronics - All Rights Reserved 4 */ 5 #include <assert.h> 6 #include <config.h> 7 #include <drivers/clk.h> 8 #include <drivers/clk_dt.h> 9 #include <drivers/rstctrl.h> 10 #include <io.h> 11 #include <kernel/boot.h> 12 #include <kernel/delay.h> 13 #include <kernel/dt.h> 14 #include <kernel/huk_subkey.h> 15 #include <kernel/mutex.h> 16 #include <libfdt.h> 17 #include <mm/core_memprot.h> 18 #include <stdint.h> 19 #include <stm32_util.h> 20 #include <string_ext.h> 21 #include <utee_defines.h> 22 #include <util.h> 23 24 #include "common.h" 25 #include "stm32_saes.h" 26 27 /* SAES control register */ 28 #define _SAES_CR U(0x0) 29 /* SAES status register */ 30 #define _SAES_SR U(0x04) 31 /* SAES data input register */ 32 #define _SAES_DINR U(0x08) 33 /* SAES data output register */ 34 #define _SAES_DOUTR U(0x0c) 35 /* SAES key registers [0-3] */ 36 #define _SAES_KEYR0 U(0x10) 37 #define _SAES_KEYR1 U(0x14) 38 #define _SAES_KEYR2 U(0x18) 39 #define _SAES_KEYR3 U(0x1c) 40 /* SAES initialization vector registers [0-3] */ 41 #define _SAES_IVR0 U(0x20) 42 #define _SAES_IVR1 U(0x24) 43 #define _SAES_IVR2 U(0x28) 44 #define _SAES_IVR3 U(0x2c) 45 /* SAES key registers [4-7] */ 46 #define _SAES_KEYR4 U(0x30) 47 #define _SAES_KEYR5 U(0x34) 48 #define _SAES_KEYR6 U(0x38) 49 #define _SAES_KEYR7 U(0x3c) 50 /* SAES suspend registers [0-7] */ 51 #define _SAES_SUSPR0 U(0x40) 52 #define _SAES_SUSPR1 U(0x44) 53 #define _SAES_SUSPR2 U(0x48) 54 #define _SAES_SUSPR3 U(0x4c) 55 #define _SAES_SUSPR4 U(0x50) 56 #define _SAES_SUSPR5 U(0x54) 57 #define _SAES_SUSPR6 U(0x58) 58 #define _SAES_SUSPR7 U(0x5c) 59 /* SAES Interrupt Enable Register */ 60 #define _SAES_IER U(0x300) 61 /* SAES Interrupt Status Register */ 62 #define _SAES_ISR U(0x304) 63 /* SAES Interrupt Clear Register */ 64 #define _SAES_ICR U(0x308) 65 66 /* SAES control register fields */ 67 #define _SAES_CR_RESET_VALUE U(0x0) 68 #define _SAES_CR_IPRST BIT(31) 69 #define _SAES_CR_KEYSEL_MASK GENMASK_32(30, 28) 70 #define _SAES_CR_KEYSEL_SHIFT U(28) 71 #define _SAES_CR_KEYSEL_SOFT U(0x0) 72 #define _SAES_CR_KEYSEL_DHUK U(0x1) 73 #define _SAES_CR_KEYSEL_BHK U(0x2) 74 #define _SAES_CR_KEYSEL_BHU_XOR_BH_K U(0x4) 75 #define _SAES_CR_KEYSEL_TEST U(0x7) 76 #define _SAES_CR_KSHAREID_MASK GENMASK_32(27, 26) 77 #define _SAES_CR_KSHAREID_SHIFT U(26) 78 #define _SAES_CR_KSHAREID_CRYP U(0x0) 79 #define _SAES_CR_KEYMOD_MASK GENMASK_32(25, 24) 80 #define _SAES_CR_KEYMOD_SHIFT U(24) 81 #define _SAES_CR_KEYMOD_NORMAL U(0x0) 82 #define _SAES_CR_KEYMOD_WRAPPED U(0x1) 83 #define _SAES_CR_KEYMOD_SHARED U(0x2) 84 #define _SAES_CR_NPBLB_MASK GENMASK_32(23, 20) 85 #define _SAES_CR_NPBLB_SHIFT U(20) 86 #define _SAES_CR_KEYPROT BIT(19) 87 #define _SAES_CR_KEYSIZE BIT(18) 88 #define _SAES_CR_GCMPH_MASK GENMASK_32(14, 13) 89 #define _SAES_CR_GCMPH_SHIFT U(13) 90 #define _SAES_CR_GCMPH_INIT U(0) 91 #define _SAES_CR_GCMPH_HEADER U(1) 92 #define _SAES_CR_GCMPH_PAYLOAD U(2) 93 #define _SAES_CR_GCMPH_FINAL U(3) 94 #define _SAES_CR_DMAOUTEN BIT(12) 95 #define _SAES_CR_DMAINEN BIT(11) 96 #define _SAES_CR_CHMOD_MASK (BIT(16) | GENMASK_32(6, 5)) 97 #define _SAES_CR_CHMOD_SHIFT U(5) 98 #define _SAES_CR_CHMOD_ECB U(0x0) 99 #define _SAES_CR_CHMOD_CBC U(0x1) 100 #define _SAES_CR_CHMOD_CTR U(0x2) 101 #define _SAES_CR_CHMOD_GCM U(0x3) 102 #define _SAES_CR_CHMOD_GMAC U(0x3) 103 #define _SAES_CR_CHMOD_CCM U(0x800) 104 #define _SAES_CR_MODE_MASK GENMASK_32(4, 3) 105 #define _SAES_CR_MODE_SHIFT U(3) 106 #define _SAES_CR_MODE_ENC U(0) 107 #define _SAES_CR_MODE_KEYPREP U(1) 108 #define _SAES_CR_MODE_DEC U(2) 109 #define _SAES_CR_DATATYPE_MASK GENMASK_32(2, 1) 110 #define _SAES_CR_DATATYPE_SHIFT U(1) 111 #define _SAES_CR_DATATYPE_NONE U(0) 112 #define _SAES_CR_DATATYPE_HALF_WORD U(1) 113 #define _SAES_CR_DATATYPE_BYTE U(2) 114 #define _SAES_CR_DATATYPE_BIT U(3) 115 #define _SAES_CR_EN BIT(0) 116 117 /* SAES status register fields */ 118 #define _SAES_SR_KEYVALID BIT(7) 119 #define _SAES_SR_BUSY BIT(3) 120 #define _SAES_SR_WRERR BIT(2) 121 #define _SAES_SR_RDERR BIT(1) 122 #define _SAES_SR_CCF BIT(0) 123 124 /* SAES interrupt registers fields */ 125 #define _SAES_I_RNG_ERR BIT(3) 126 #define _SAES_I_KEY_ERR BIT(2) 127 #define _SAES_I_RW_ERR BIT(1) 128 #define _SAES_I_CC BIT(0) 129 130 #define SAES_TIMEOUT_US U(100000) 131 #define TIMEOUT_US_1MS U(1000) 132 #define SAES_RESET_DELAY U(2) 133 134 #define IS_CHAINING_MODE(mode, cr) \ 135 (((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mode << \ 136 _SAES_CR_CHMOD_SHIFT)) 137 138 #define SET_CHAINING_MODE(mode, cr) \ 139 set_field_u32(cr, _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mode) 140 141 static struct mutex saes_lock = MUTEX_INITIALIZER; 142 static struct stm32_saes_platdata { 143 vaddr_t base; 144 struct clk *clk; 145 struct rstctrl *reset; 146 } saes_pdata; 147 148 static bool does_chaining_mode_need_iv(uint32_t cr) 149 { 150 return !IS_CHAINING_MODE(ECB, cr); 151 } 152 153 static bool is_encrypt(uint32_t cr) 154 { 155 return (cr & _SAES_CR_MODE_MASK) == 156 SHIFT_U32(_SAES_CR_MODE_ENC, _SAES_CR_MODE_SHIFT); 157 } 158 159 static bool is_decrypt(uint32_t cr) 160 { 161 return (cr & _SAES_CR_MODE_MASK) == 162 SHIFT_U32(_SAES_CR_MODE_DEC, _SAES_CR_MODE_SHIFT); 163 } 164 165 static bool does_need_npblb(uint32_t cr) 166 { 167 return (IS_CHAINING_MODE(GCM, cr) && is_encrypt(cr)) || 168 (IS_CHAINING_MODE(CCM, cr) && is_decrypt(cr)); 169 } 170 171 static bool can_suspend(uint32_t cr) 172 { 173 return !IS_CHAINING_MODE(GCM, cr); 174 } 175 176 static void write_aligned_block(vaddr_t base, uint32_t *data) 177 { 178 unsigned int i = 0; 179 180 /* SAES is configured to swap bytes as expected */ 181 for (i = 0; i < AES_BLOCK_NB_U32; i++) 182 io_write32(base + _SAES_DINR, data[i]); 183 } 184 185 static void write_block(vaddr_t base, uint8_t *data) 186 { 187 if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) { 188 write_aligned_block(base, (void *)data); 189 } else { 190 uint32_t data_u32[AES_BLOCK_NB_U32] = { }; 191 192 memcpy(data_u32, data, sizeof(data_u32)); 193 write_aligned_block(base, data_u32); 194 } 195 } 196 197 static void read_aligned_block(vaddr_t base, uint32_t *data) 198 { 199 unsigned int i = 0; 200 201 /* SAES is configured to swap bytes as expected */ 202 for (i = 0; i < AES_BLOCK_NB_U32; i++) 203 data[i] = io_read32(base + _SAES_DOUTR); 204 } 205 206 static void read_block(vaddr_t base, uint8_t *data) 207 { 208 if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) { 209 read_aligned_block(base, (void *)data); 210 } else { 211 uint32_t data_u32[AES_BLOCK_NB_U32] = { }; 212 213 read_aligned_block(base, data_u32); 214 215 memcpy(data, data_u32, sizeof(data_u32)); 216 } 217 } 218 219 static TEE_Result wait_computation_completed(vaddr_t base) 220 { 221 uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US); 222 223 while ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) 224 if (timeout_elapsed(timeout_ref)) 225 break; 226 227 if ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) { 228 DMSG("CCF timeout"); 229 return TEE_ERROR_GENERIC; 230 } 231 232 return TEE_SUCCESS; 233 } 234 235 static void clear_computation_completed(uintptr_t base) 236 { 237 io_setbits32(base + _SAES_ICR, _SAES_I_CC); 238 } 239 240 static TEE_Result wait_key_valid(vaddr_t base) 241 { 242 uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US); 243 244 while (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID)) 245 if (timeout_elapsed(timeout_ref)) 246 break; 247 248 if (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID)) { 249 DMSG("CCF timeout"); 250 return TEE_ERROR_GENERIC; 251 } 252 253 return TEE_SUCCESS; 254 } 255 256 static TEE_Result saes_start(struct stm32_saes_context *ctx) 257 { 258 uint64_t timeout_ref = 0; 259 260 /* Reset SAES */ 261 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 262 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 263 264 timeout_ref = timeout_init_us(SAES_TIMEOUT_US); 265 while (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) 266 if (timeout_elapsed(timeout_ref)) 267 break; 268 269 if (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) { 270 DMSG("busy timeout"); 271 return TEE_ERROR_GENERIC; 272 } 273 274 return TEE_SUCCESS; 275 } 276 277 static void saes_end(struct stm32_saes_context *ctx, int prev_error) 278 { 279 if (prev_error) { 280 /* Reset SAES */ 281 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 282 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 283 } 284 285 /* Disable the SAES peripheral */ 286 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 287 } 288 289 static void saes_write_iv(struct stm32_saes_context *ctx) 290 { 291 /* If chaining mode need to restore IV */ 292 if (does_chaining_mode_need_iv(ctx->cr)) { 293 unsigned int i = 0; 294 295 for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) { 296 io_write32(ctx->base + _SAES_IVR0 + i * 297 sizeof(uint32_t), ctx->iv[i]); 298 } 299 } 300 } 301 302 static void saes_save_suspend(struct stm32_saes_context *ctx) 303 { 304 size_t i = 0; 305 306 for (i = 0; i < 8; i++) 307 ctx->susp[i] = io_read32(ctx->base + _SAES_SUSPR0 + 308 i * sizeof(uint32_t)); 309 } 310 311 static void saes_restore_suspend(struct stm32_saes_context *ctx) 312 { 313 size_t i = 0; 314 315 for (i = 0; i < 8; i++) 316 io_write32(ctx->base + _SAES_SUSPR0 + i * sizeof(uint32_t), 317 ctx->susp[i]); 318 } 319 320 static void saes_write_key(struct stm32_saes_context *ctx) 321 { 322 /* Restore the _SAES_KEYRx if SOFTWARE key */ 323 if ((ctx->cr & _SAES_CR_KEYSEL_MASK) == 324 SHIFT_U32(_SAES_CR_KEYSEL_SOFT, _SAES_CR_KEYSEL_SHIFT)) { 325 size_t i = 0; 326 327 for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) 328 io_write32(ctx->base + _SAES_KEYR0 + i * 329 sizeof(uint32_t), 330 ctx->key[i]); 331 332 if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) { 333 for (i = 0; 334 i < (AES_KEYSIZE_256 / 2) / sizeof(uint32_t); 335 i++) { 336 io_write32(ctx->base + _SAES_KEYR4 + i * 337 sizeof(uint32_t), 338 ctx->key[i + 4]); 339 } 340 } 341 } 342 } 343 344 static TEE_Result saes_prepare_key(struct stm32_saes_context *ctx) 345 { 346 TEE_Result res = TEE_ERROR_GENERIC; 347 348 /* Disable the SAES peripheral */ 349 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 350 351 /* Set key size */ 352 if ((ctx->cr & _SAES_CR_KEYSIZE)) 353 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE); 354 else 355 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE); 356 357 saes_write_key(ctx); 358 359 res = wait_key_valid(ctx->base); 360 if (res) 361 return res; 362 363 /* 364 * For ECB/CBC decryption, key preparation mode must be selected 365 * to populate the key. 366 */ 367 if ((IS_CHAINING_MODE(ECB, ctx->cr) || 368 IS_CHAINING_MODE(CBC, ctx->cr)) && is_decrypt(ctx->cr)) { 369 /* Select Mode 2 */ 370 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK, 371 SHIFT_U32(_SAES_CR_MODE_KEYPREP, 372 _SAES_CR_MODE_SHIFT)); 373 374 /* Enable SAES */ 375 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 376 377 res = wait_computation_completed(ctx->base); 378 if (res) 379 return res; 380 381 clear_computation_completed(ctx->base); 382 383 /* Set Mode 3 */ 384 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK, 385 SHIFT_U32(_SAES_CR_MODE_DEC, 386 _SAES_CR_MODE_SHIFT)); 387 } 388 389 return TEE_SUCCESS; 390 } 391 392 static TEE_Result save_context(struct stm32_saes_context *ctx) 393 { 394 if ((io_read32(ctx->base + _SAES_SR) & _SAES_SR_CCF)) { 395 /* Device should not be in a processing phase */ 396 return TEE_ERROR_BAD_STATE; 397 } 398 399 /* Save CR */ 400 ctx->cr = io_read32(ctx->base + _SAES_CR); 401 402 if (!can_suspend(ctx->cr)) 403 return TEE_SUCCESS; 404 405 saes_save_suspend(ctx); 406 407 /* If chaining mode need to save current IV */ 408 if (does_chaining_mode_need_iv(ctx->cr)) { 409 uint8_t i = 0; 410 411 /* Save IV */ 412 for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) { 413 ctx->iv[i] = io_read32(ctx->base + _SAES_IVR0 + i * 414 sizeof(uint32_t)); 415 } 416 } 417 418 /* Disable the SAES peripheral */ 419 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 420 421 return TEE_SUCCESS; 422 } 423 424 /* To resume the processing of a message */ 425 static TEE_Result restore_context(struct stm32_saes_context *ctx) 426 { 427 TEE_Result res = TEE_SUCCESS; 428 429 /* SAES shall be disabled */ 430 if ((io_read32(ctx->base + _SAES_CR) & _SAES_CR_EN)) { 431 DMSG("Device is still enabled"); 432 return TEE_ERROR_BAD_STATE; 433 } 434 435 /* Reset internal state */ 436 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 437 438 /* Restore configuration register */ 439 io_write32(ctx->base + _SAES_CR, ctx->cr); 440 441 /* Write key and, in case of CBC or ECB decrypt, prepare it */ 442 res = saes_prepare_key(ctx); 443 if (res) 444 return res; 445 446 saes_restore_suspend(ctx); 447 448 saes_write_iv(ctx); 449 450 /* Enable the SAES peripheral */ 451 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 452 453 return TEE_SUCCESS; 454 } 455 456 static TEE_Result do_from_init_to_phase(struct stm32_saes_context *ctx, 457 uint32_t new_phase) 458 { 459 TEE_Result res = TEE_SUCCESS; 460 461 /* We didn't run the init phase yet */ 462 res = restore_context(ctx); 463 if (res) 464 return res; 465 466 res = wait_computation_completed(ctx->base); 467 if (res) 468 return res; 469 470 clear_computation_completed(ctx->base); 471 472 /* Move to 'new_phase' */ 473 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK, 474 SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT)); 475 476 /* Enable the SAES peripheral (init disabled it) */ 477 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 478 479 return TEE_SUCCESS; 480 } 481 482 static TEE_Result do_from_header_to_phase(struct stm32_saes_context *ctx, 483 uint32_t new_phase) 484 { 485 TEE_Result res = TEE_SUCCESS; 486 487 if (can_suspend(ctx->cr)) { 488 res = restore_context(ctx); 489 if (res) 490 return res; 491 } 492 493 if (ctx->extra_size) { 494 /* Manage unaligned header data before moving to next phase */ 495 memset((uint8_t *)ctx->extra + ctx->extra_size, 0, 496 AES_BLOCK_SIZE - ctx->extra_size); 497 498 write_aligned_block(ctx->base, ctx->extra); 499 500 res = wait_computation_completed(ctx->base); 501 if (res) 502 return res; 503 504 clear_computation_completed(ctx->base); 505 506 ctx->assoc_len += ctx->extra_size * INT8_BIT; 507 ctx->extra_size = U(0); 508 } 509 510 /* Move to 'new_phase' */ 511 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK, 512 SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT)); 513 514 return TEE_SUCCESS; 515 } 516 517 /** 518 * @brief Start an AES computation. 519 * @param ctx: SAES process context 520 * @param is_dec: true if decryption, false if encryption 521 * @param ch_mode: define the chaining mode 522 * @param key_select: define where the key comes from 523 * @param key: pointer to key (if key_select is KEY_SOFT, else unused) 524 * @param key_size: key size 525 * @param iv: pointer to initialization vector (unused if ch_mode is ECB) 526 * @param iv_size: iv size 527 * @note this function doesn't access to hardware but stores in ctx the values 528 * 529 * @retval TEE_SUCCESS if OK or a TEE_Result compliant code. 530 */ 531 TEE_Result stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec, 532 enum stm32_saes_chaining_mode ch_mode, 533 enum stm32_saes_key_selection key_select, 534 const void *key, size_t key_size, const void *iv, 535 size_t iv_size) 536 { 537 const uint32_t *key_u32 = NULL; 538 const uint32_t *iv_u32 = NULL; 539 uint32_t local_key[8] = { }; 540 uint32_t local_iv[4] = { }; 541 unsigned int i = 0; 542 543 if (!ctx) 544 return TEE_ERROR_BAD_PARAMETERS; 545 546 *ctx = (struct stm32_saes_context){ 547 .lock = &saes_lock, 548 .base = saes_pdata.base, 549 .cr = _SAES_CR_RESET_VALUE 550 }; 551 552 /* We want buffer to be u32 aligned */ 553 if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) { 554 key_u32 = key; 555 } else { 556 memcpy(local_key, key, key_size); 557 key_u32 = local_key; 558 } 559 560 if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) { 561 iv_u32 = iv; 562 } else { 563 memcpy(local_iv, iv, iv_size); 564 iv_u32 = local_iv; 565 } 566 567 if (is_dec) 568 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK, 569 _SAES_CR_MODE_DEC); 570 else 571 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK, 572 _SAES_CR_MODE_ENC); 573 574 /* Save chaining mode */ 575 switch (ch_mode) { 576 case STM32_SAES_MODE_ECB: 577 ctx->cr |= SET_CHAINING_MODE(ECB, ctx->cr); 578 break; 579 case STM32_SAES_MODE_CBC: 580 ctx->cr |= SET_CHAINING_MODE(CBC, ctx->cr); 581 break; 582 case STM32_SAES_MODE_CTR: 583 ctx->cr |= SET_CHAINING_MODE(CTR, ctx->cr); 584 break; 585 case STM32_SAES_MODE_GCM: 586 ctx->cr |= SET_CHAINING_MODE(GCM, ctx->cr); 587 break; 588 case STM32_SAES_MODE_CCM: 589 ctx->cr |= SET_CHAINING_MODE(CCM, ctx->cr); 590 break; 591 default: 592 return TEE_ERROR_BAD_PARAMETERS; 593 } 594 595 /* 596 * We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data. 597 * So we won't need to 598 * TEE_U32_TO_BIG_ENDIAN(data) before write to DINR 599 * nor 600 * TEE_U32_FROM_BIG_ENDIAN after reading from DOUTR. 601 * 602 * But note that wrap key only accept _SAES_CR_DATATYPE_NONE. 603 */ 604 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_DATATYPE_MASK, 605 _SAES_CR_DATATYPE_BYTE); 606 607 /* Configure keysize */ 608 switch (key_size) { 609 case AES_KEYSIZE_128: 610 ctx->cr &= ~_SAES_CR_KEYSIZE; 611 break; 612 case AES_KEYSIZE_256: 613 ctx->cr |= _SAES_CR_KEYSIZE; 614 break; 615 default: 616 return TEE_ERROR_BAD_PARAMETERS; 617 } 618 619 /* Configure key */ 620 switch (key_select) { 621 case STM32_SAES_KEY_SOFT: 622 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 623 SHIFT_U32(_SAES_CR_KEYSEL_SOFT, 624 _SAES_CR_KEYSEL_SHIFT)); 625 /* Save key */ 626 switch (key_size) { 627 case AES_KEYSIZE_128: 628 /* First 16 bytes == 4 u32 */ 629 for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t); 630 i++) { 631 ctx->key[i] = 632 TEE_U32_TO_BIG_ENDIAN(key_u32[3 - i]); 633 /* 634 * /!\ we save the key in HW byte order 635 * and word order: key[i] is for _SAES_KEYRi. 636 */ 637 } 638 break; 639 case AES_KEYSIZE_256: 640 for (i = 0; i < AES_KEYSIZE_256 / sizeof(uint32_t); 641 i++) { 642 ctx->key[i] = 643 TEE_U32_TO_BIG_ENDIAN(key_u32[7 - i]); 644 /* 645 * /!\ we save the key in HW byte order 646 * and word order: key[i] is for _SAES_KEYRi. 647 */ 648 } 649 break; 650 default: 651 return TEE_ERROR_BAD_PARAMETERS; 652 } 653 break; 654 case STM32_SAES_KEY_DHU: 655 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 656 SHIFT_U32(_SAES_CR_KEYSEL_DHUK, 657 _SAES_CR_KEYSEL_SHIFT)); 658 break; 659 case STM32_SAES_KEY_BH: 660 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 661 SHIFT_U32(_SAES_CR_KEYSEL_BHK, 662 _SAES_CR_KEYSEL_SHIFT)); 663 break; 664 case STM32_SAES_KEY_BHU_XOR_BH: 665 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 666 SHIFT_U32(_SAES_CR_KEYSEL_BHU_XOR_BH_K, 667 _SAES_CR_KEYSEL_SHIFT)); 668 break; 669 case STM32_SAES_KEY_WRAPPED: 670 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 671 SHIFT_U32(_SAES_CR_KEYSEL_SOFT, 672 _SAES_CR_KEYSEL_SHIFT)); 673 break; 674 675 default: 676 return TEE_ERROR_BAD_PARAMETERS; 677 } 678 679 /* Save IV */ 680 if (ch_mode != STM32_SAES_MODE_ECB) { 681 if (!iv || iv_size != AES_IVSIZE) 682 return TEE_ERROR_BAD_PARAMETERS; 683 684 for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) 685 ctx->iv[i] = TEE_U32_TO_BIG_ENDIAN(iv_u32[3 - i]); 686 } 687 688 /* Reset suspend registers */ 689 memset(ctx->susp, 0, sizeof(ctx->susp)); 690 691 return saes_start(ctx); 692 } 693 694 /** 695 * @brief Update (or start) an AES authentificate process of 696 * associated data (CCM or GCM). 697 * @param ctx: SAES process context 698 * @param data: pointer to associated data 699 * @param data_size: data size 700 * 701 * @retval 0 if OK. 702 */ 703 TEE_Result stm32_saes_update_assodata(struct stm32_saes_context *ctx, 704 uint8_t *data, size_t data_size) 705 { 706 TEE_Result res = TEE_SUCCESS; 707 unsigned int i = 0; 708 uint32_t previous_phase = 0; 709 710 if (!ctx) 711 return TEE_ERROR_BAD_PARAMETERS; 712 713 /* If no associated data, nothing to do */ 714 if (!data || !data_size) 715 return TEE_SUCCESS; 716 717 mutex_lock(ctx->lock); 718 719 previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >> 720 _SAES_CR_GCMPH_SHIFT; 721 722 switch (previous_phase) { 723 case _SAES_CR_GCMPH_INIT: 724 res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_HEADER); 725 break; 726 case _SAES_CR_GCMPH_HEADER: 727 /* 728 * Function update_assodata() was already called. 729 * We only need to restore the context. 730 */ 731 if (can_suspend(ctx->cr)) 732 res = restore_context(ctx); 733 734 break; 735 default: 736 DMSG("out of order call"); 737 res = TEE_ERROR_BAD_STATE; 738 } 739 740 if (res) 741 goto out; 742 743 /* Manage if remaining data from a previous update_assodata() call */ 744 if (ctx->extra_size && 745 ((ctx->extra_size + data_size) >= AES_BLOCK_SIZE)) { 746 uint32_t block[AES_BLOCK_NB_U32] = { }; 747 748 memcpy(block, ctx->extra, ctx->extra_size); 749 memcpy((uint8_t *)block + ctx->extra_size, data, 750 AES_BLOCK_SIZE - ctx->extra_size); 751 752 write_aligned_block(ctx->base, block); 753 754 res = wait_computation_completed(ctx->base); 755 if (res) 756 goto out; 757 758 clear_computation_completed(ctx->base); 759 760 i += AES_BLOCK_SIZE - ctx->extra_size; 761 ctx->extra_size = 0; 762 ctx->assoc_len += AES_BLOCK_SIZE_BIT; 763 } 764 765 while (data_size - i >= AES_BLOCK_SIZE) { 766 write_block(ctx->base, data + i); 767 768 res = wait_computation_completed(ctx->base); 769 if (res) 770 goto out; 771 772 clear_computation_completed(ctx->base); 773 774 /* Process next block */ 775 i += AES_BLOCK_SIZE; 776 ctx->assoc_len += AES_BLOCK_SIZE_BIT; 777 } 778 779 /* 780 * Manage last block if not a block size multiple: 781 * Save remaining data to manage them later (potentially with new 782 * associated data). 783 */ 784 if (i < data_size) { 785 memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i, 786 data_size - i); 787 ctx->extra_size += data_size - i; 788 } 789 790 res = save_context(ctx); 791 out: 792 if (res) 793 saes_end(ctx, res); 794 795 mutex_unlock(ctx->lock); 796 797 return res; 798 } 799 800 /** 801 * @brief Update (or start) an AES authenticate and de/encrypt with 802 * payload data (CCM or GCM). 803 * @param ctx: SAES process context 804 * @param last_block: true if last payload data block 805 * @param data_in: pointer to payload 806 * @param data_out: pointer where to save de/encrypted payload 807 * @param data_size: payload size 808 * 809 * @retval TEE_SUCCESS if OK. 810 */ 811 TEE_Result stm32_saes_update_load(struct stm32_saes_context *ctx, 812 bool last_block, uint8_t *data_in, 813 uint8_t *data_out, size_t data_size) 814 { 815 TEE_Result res = TEE_SUCCESS; 816 unsigned int i = 0; 817 uint32_t previous_phase = 0; 818 819 if (!ctx) 820 return TEE_ERROR_BAD_PARAMETERS; 821 822 /* If there is no data, nothing to do */ 823 if (!data_in || !data_size) 824 return TEE_SUCCESS; 825 826 mutex_lock(ctx->lock); 827 828 previous_phase = ((ctx->cr & _SAES_CR_GCMPH_MASK) >> 829 _SAES_CR_GCMPH_SHIFT); 830 831 switch (previous_phase) { 832 case _SAES_CR_GCMPH_INIT: 833 res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD); 834 break; 835 case _SAES_CR_GCMPH_HEADER: 836 res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD); 837 break; 838 case _SAES_CR_GCMPH_PAYLOAD: 839 /* new update_load call, we only need to restore context */ 840 if (can_suspend(ctx->cr)) 841 res = restore_context(ctx); 842 843 break; 844 default: 845 DMSG("out of order call"); 846 res = TEE_ERROR_BAD_STATE; 847 } 848 849 if (res) 850 goto out; 851 852 while (i < ROUNDDOWN(data_size, AES_BLOCK_SIZE)) { 853 write_block(ctx->base, data_in + i); 854 855 res = wait_computation_completed(ctx->base); 856 if (res) 857 goto out; 858 859 read_block(ctx->base, data_out + i); 860 861 clear_computation_completed(ctx->base); 862 863 /* Process next block */ 864 i += AES_BLOCK_SIZE; 865 ctx->load_len += AES_BLOCK_SIZE_BIT; 866 } 867 868 /* Manage last block if not a block size multiple */ 869 if (last_block && i < data_size) { 870 uint32_t block_in[AES_BLOCK_NB_U32] = { }; 871 uint32_t block_out[AES_BLOCK_NB_U32] = { }; 872 873 memcpy(block_in, data_in + i, data_size - i); 874 875 if (does_need_npblb(ctx->cr)) { 876 uint32_t npblb = AES_BLOCK_SIZE - (data_size - i); 877 878 io_clrsetbits32(ctx->base + _SAES_CR, 879 _SAES_CR_NPBLB_MASK, 880 SHIFT_U32(npblb, _SAES_CR_NPBLB_SHIFT)); 881 } 882 883 write_aligned_block(ctx->base, block_in); 884 885 res = wait_computation_completed(ctx->base); 886 if (res) 887 goto out; 888 889 read_aligned_block(ctx->base, block_out); 890 891 clear_computation_completed(ctx->base); 892 893 memcpy(data_out + i, block_out, data_size - i); 894 895 ctx->load_len += (data_size - i) * INT8_BIT; 896 } 897 898 res = save_context(ctx); 899 out: 900 if (res) 901 saes_end(ctx, res); 902 903 mutex_unlock(ctx->lock); 904 905 return res; 906 } 907 908 /** 909 * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM). 910 * @param ctx: SAES process context 911 * @param tag: pointer where to save the tag 912 * @param data_size: tag size 913 * 914 * @retval TEE_SUCCESS if OK. 915 */ 916 TEE_Result stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag, 917 size_t tag_size) 918 { 919 TEE_Result res = TEE_SUCCESS; 920 uint32_t tag_u32[4] = { }; 921 uint32_t previous_phase = 0; 922 923 if (!ctx) 924 return TEE_ERROR_BAD_PARAMETERS; 925 926 mutex_lock(ctx->lock); 927 928 previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >> 929 _SAES_CR_GCMPH_SHIFT; 930 931 switch (previous_phase) { 932 case _SAES_CR_GCMPH_INIT: 933 res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_FINAL); 934 break; 935 case _SAES_CR_GCMPH_HEADER: 936 res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_FINAL); 937 break; 938 case _SAES_CR_GCMPH_PAYLOAD: 939 if (can_suspend(ctx->cr)) 940 res = restore_context(ctx); 941 942 /* Move to final phase */ 943 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK, 944 SHIFT_U32(_SAES_CR_GCMPH_FINAL, 945 _SAES_CR_GCMPH_SHIFT)); 946 break; 947 default: 948 DMSG("out of order call"); 949 res = TEE_ERROR_BAD_STATE; 950 } 951 if (res) 952 goto out; 953 954 if (IS_CHAINING_MODE(GCM, ctx->cr)) { 955 /* SAES is configured to swap bytes as expected */ 956 io_write32(ctx->base + _SAES_DINR, 0); 957 io_write32(ctx->base + _SAES_DINR, ctx->assoc_len); 958 io_write32(ctx->base + _SAES_DINR, 0); 959 io_write32(ctx->base + _SAES_DINR, ctx->load_len); 960 } 961 962 res = wait_computation_completed(ctx->base); 963 if (res) 964 goto out; 965 966 read_aligned_block(ctx->base, tag_u32); 967 968 clear_computation_completed(ctx->base); 969 970 memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size)); 971 972 out: 973 saes_end(ctx, res); 974 mutex_unlock(ctx->lock); 975 976 return res; 977 } 978 979 /** 980 * @brief Update (or start) an AES de/encrypt process (ECB, CBC or CTR). 981 * @param ctx: SAES process context 982 * @param last_block: true if last payload data block 983 * @param data_in: pointer to payload 984 * @param data_out: pointer where to save de/encrypted payload 985 * @param data_size: payload size 986 * 987 * @retval TEE_SUCCESS if OK. 988 */ 989 TEE_Result stm32_saes_update(struct stm32_saes_context *ctx, bool last_block, 990 uint8_t *data_in, uint8_t *data_out, 991 size_t data_size) 992 { 993 TEE_Result res = TEE_SUCCESS; 994 unsigned int i = U(0); 995 996 if (!ctx) 997 return TEE_ERROR_BAD_PARAMETERS; 998 999 mutex_lock(ctx->lock); 1000 1001 /* 1002 * CBC encryption requires the 2 last blocks to be aligned with AES 1003 * block size. 1004 */ 1005 if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) && 1006 is_encrypt(ctx->cr) && 1007 (ROUNDDOWN(data_size, AES_BLOCK_SIZE) != data_size)) { 1008 if (data_size < AES_BLOCK_SIZE * 2) { 1009 /* 1010 * If CBC, size of the last part should be at 1011 * least 2*AES_BLOCK_SIZE 1012 */ 1013 EMSG("Unexpected last block size"); 1014 res = TEE_ERROR_BAD_STATE; 1015 goto out; 1016 } 1017 /* 1018 * Do not support padding if the total size is not aligned with 1019 * the size of a block. 1020 */ 1021 res = TEE_ERROR_NOT_IMPLEMENTED; 1022 goto out; 1023 } 1024 1025 /* Manage remaining CTR mask from previous update call */ 1026 if (IS_CHAINING_MODE(CTR, ctx->cr) && ctx->extra_size) { 1027 unsigned int j = 0; 1028 uint8_t *mask = (uint8_t *)ctx->extra; 1029 1030 for (i = 0, j = 0; j < ctx->extra_size && i < data_size; 1031 j++, i++) 1032 data_out[i] = data_in[i] ^ mask[j]; 1033 1034 if (j != ctx->extra_size) { 1035 /* 1036 * We didn't consume all saved mask, 1037 * but no more data. 1038 */ 1039 1040 /* We save remaining mask and its new size */ 1041 memmove(ctx->extra, ctx->extra + j, 1042 ctx->extra_size - j); 1043 ctx->extra_size -= j; 1044 1045 /* 1046 * We don't need to save HW context we didn't 1047 * modify HW state. 1048 */ 1049 res = TEE_SUCCESS; 1050 goto out; 1051 } 1052 /* All extra mask consumed */ 1053 ctx->extra_size = 0; 1054 } 1055 1056 res = restore_context(ctx); 1057 if (res) 1058 goto out; 1059 1060 while (data_size - i >= AES_BLOCK_SIZE) { 1061 write_block(ctx->base, data_in + i); 1062 1063 res = wait_computation_completed(ctx->base); 1064 if (res) 1065 goto out; 1066 1067 read_block(ctx->base, data_out + i); 1068 1069 clear_computation_completed(ctx->base); 1070 1071 /* Process next block */ 1072 i += AES_BLOCK_SIZE; 1073 } 1074 1075 /* Manage last block if not a block size multiple */ 1076 if (i < data_size) { 1077 if (IS_CHAINING_MODE(CTR, ctx->cr)) { 1078 /* 1079 * For CTR we save the generated mask to use it at next 1080 * update call. 1081 */ 1082 uint32_t block_in[AES_BLOCK_NB_U32] = { }; 1083 uint32_t block_out[AES_BLOCK_NB_U32] = { }; 1084 1085 memcpy(block_in, data_in + i, data_size - i); 1086 1087 write_aligned_block(ctx->base, block_in); 1088 1089 res = wait_computation_completed(ctx->base); 1090 if (res) 1091 goto out; 1092 1093 read_aligned_block(ctx->base, block_out); 1094 1095 clear_computation_completed(ctx->base); 1096 1097 memcpy(data_out + i, block_out, data_size - i); 1098 1099 /* Save mask for possibly next call */ 1100 ctx->extra_size = AES_BLOCK_SIZE - (data_size - i); 1101 memcpy(ctx->extra, (uint8_t *)block_out + data_size - i, 1102 ctx->extra_size); 1103 } else { 1104 /* CBC and ECB can manage only multiple of block_size */ 1105 res = TEE_ERROR_BAD_PARAMETERS; 1106 goto out; 1107 } 1108 } 1109 1110 if (!last_block) 1111 res = save_context(ctx); 1112 1113 out: 1114 /* If last block or error, end of SAES process */ 1115 if (last_block || res) 1116 saes_end(ctx, res); 1117 1118 mutex_unlock(ctx->lock); 1119 1120 return res; 1121 } 1122 1123 static void xor_block(uint8_t *b1, uint8_t *b2, size_t size) 1124 { 1125 size_t i = 0; 1126 1127 for (i = 0; i < size; i++) 1128 b1[i] ^= b2[i]; 1129 } 1130 1131 static TEE_Result stm32_saes_cmac_prf_128(struct stm32_saes_context *ctx, 1132 enum stm32_saes_key_selection key_sel, 1133 const void *key, size_t key_size, 1134 uint8_t *data, size_t data_size, 1135 uint8_t *out) 1136 { 1137 TEE_Result res = TEE_ERROR_GENERIC; 1138 uint8_t block[AES_BLOCK_SIZE] = { }; 1139 uint8_t k1[AES_BLOCK_SIZE] = { }; 1140 uint8_t k2[AES_BLOCK_SIZE] = { }; 1141 uint8_t l[AES_BLOCK_SIZE] = { }; 1142 size_t processed = 0; 1143 uint8_t bit = 0; 1144 int i = 0; 1145 1146 if (!ctx) 1147 return TEE_ERROR_BAD_PARAMETERS; 1148 1149 /* Get K1 and K2 */ 1150 res = stm32_saes_init(ctx, false, STM32_SAES_MODE_ECB, key_sel, 1151 key, key_size, NULL, 0); 1152 if (res) 1153 return res; 1154 1155 res = stm32_saes_update(ctx, true, l, l, sizeof(l)); 1156 if (res) 1157 return res; 1158 1159 /* MSB(L) == 0 => K1 = L << 1 */ 1160 bit = 0; 1161 for (i = sizeof(l) - 1; i >= 0; i--) { 1162 k1[i] = (l[i] << 1) | bit; 1163 bit = (l[i] & 0x80) >> 7; 1164 } 1165 /* MSB(L) == 1 => K1 = (L << 1) XOR const_Rb */ 1166 if ((l[0] & 0x80)) 1167 k1[sizeof(k1) - 1] = k1[sizeof(k1) - 1] ^ 0x87; 1168 1169 /* MSB(K1) == 0 => K2 = K1 << 1 */ 1170 bit = 0; 1171 for (i = sizeof(k1) - 1; i >= 0; i--) { 1172 k2[i] = (k1[i] << 1) | bit; 1173 bit = (k1[i] & 0x80) >> 7; 1174 } 1175 1176 /* MSB(K1) == 1 => K2 = (K1 << 1) XOR const_Rb */ 1177 if ((k1[0] & 0x80)) 1178 k2[sizeof(k2) - 1] = k2[sizeof(k2) - 1] ^ 0x87; 1179 1180 if (data_size > AES_BLOCK_SIZE) { 1181 uint8_t *data_out = NULL; 1182 1183 /* All block but last in CBC mode */ 1184 res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC, 1185 key_sel, key, key_size, block, 1186 sizeof(block)); 1187 if (res) 1188 return res; 1189 1190 processed = ROUNDDOWN(data_size - 1, AES_BLOCK_SIZE); 1191 data_out = malloc(processed); 1192 if (!data_out) 1193 return TEE_ERROR_OUT_OF_MEMORY; 1194 1195 res = stm32_saes_update(ctx, true, data, data_out, processed); 1196 if (!res) { 1197 /* Copy last out block or keep block as { 0 } */ 1198 memcpy(block, data_out + processed - AES_BLOCK_SIZE, 1199 AES_BLOCK_SIZE); 1200 } 1201 1202 free(data_out); 1203 1204 if (res) 1205 return res; 1206 } 1207 1208 /* Manage last block */ 1209 xor_block(block, data + processed, data_size - processed); 1210 if (data_size - processed == AES_BLOCK_SIZE) { 1211 xor_block(block, k1, AES_BLOCK_SIZE); 1212 } else { 1213 /* xor with padding = 0b100... */ 1214 block[data_size - processed] ^= 0x80; 1215 xor_block(block, k2, AES_BLOCK_SIZE); 1216 } 1217 1218 /* 1219 * AES last block. 1220 * We need to use same chaining mode to keep same key if DHUK is 1221 * selected so we reuse l as a zero initialized IV. 1222 */ 1223 memset(l, 0, sizeof(l)); 1224 res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC, key_sel, key, 1225 key_size, l, sizeof(l)); 1226 if (res) 1227 return res; 1228 1229 return stm32_saes_update(ctx, true, block, out, AES_BLOCK_SIZE); 1230 } 1231 1232 TEE_Result stm32_saes_kdf(struct stm32_saes_context *ctx, 1233 enum stm32_saes_key_selection key_sel, 1234 const void *key, size_t key_size, 1235 const void *input, size_t input_size, 1236 uint8_t *subkey, size_t subkey_size) 1237 1238 { 1239 TEE_Result res = TEE_SUCCESS; 1240 uint32_t index = 0; 1241 uint32_t index_be = 0; 1242 uint8_t *data = NULL; 1243 size_t data_index = 0; 1244 size_t subkey_index = 0; 1245 size_t data_size = input_size + sizeof(index_be); 1246 uint8_t cmac[AES_BLOCK_SIZE] = { }; 1247 1248 if (!ctx || !input || !input_size) 1249 return TEE_ERROR_BAD_PARAMETERS; 1250 1251 /* For each K(i) we will add an index */ 1252 data = malloc(data_size); 1253 if (!data) 1254 return TEE_ERROR_OUT_OF_MEMORY; 1255 1256 data_index = 0; 1257 index_be = TEE_U32_TO_BIG_ENDIAN(index); 1258 memcpy(data + data_index, &index_be, sizeof(index_be)); 1259 data_index += sizeof(index_be); 1260 memcpy(data + data_index, input, input_size); 1261 data_index += input_size; 1262 1263 /* K(i) computation. */ 1264 index = 0; 1265 while (subkey_index < subkey_size) { 1266 index++; 1267 index_be = TEE_U32_TO_BIG_ENDIAN(index); 1268 memcpy(data, &index_be, sizeof(index_be)); 1269 1270 res = stm32_saes_cmac_prf_128(ctx, key_sel, key, key_size, 1271 data, data_size, cmac); 1272 if (res) 1273 goto out; 1274 1275 memcpy(subkey + subkey_index, cmac, 1276 MIN(subkey_size - subkey_index, sizeof(cmac))); 1277 subkey_index += sizeof(cmac); 1278 } 1279 1280 out: 1281 free(data); 1282 if (res) 1283 memzero_explicit(subkey, subkey_size); 1284 1285 return res; 1286 } 1287 1288 /* Implement hardware HUK derivation using SAES resources */ 1289 TEE_Result huk_subkey_derive(enum huk_subkey_usage usage, 1290 const void *const_data, size_t const_data_len, 1291 uint8_t *subkey, size_t subkey_len) 1292 { 1293 TEE_Result res = TEE_ERROR_GENERIC; 1294 uint8_t *input = NULL; 1295 size_t input_index = 0; 1296 size_t subkey_bitlen = 0; 1297 struct stm32_saes_context ctx = { }; 1298 uint8_t separator = 0; 1299 1300 /* Check if driver is probed */ 1301 if (!saes_pdata.base) { 1302 return __huk_subkey_derive(usage, const_data, const_data_len, 1303 subkey, subkey_len); 1304 } 1305 1306 input = malloc(const_data_len + sizeof(separator) + sizeof(usage) + 1307 sizeof(subkey_bitlen) + AES_BLOCK_SIZE); 1308 if (!input) 1309 return TEE_ERROR_OUT_OF_MEMORY; 1310 1311 input_index = 0; 1312 if (const_data) { 1313 memcpy(input + input_index, const_data, const_data_len); 1314 input_index += const_data_len; 1315 1316 memcpy(input + input_index, &separator, sizeof(separator)); 1317 input_index += sizeof(separator); 1318 } 1319 1320 memcpy(input + input_index, &usage, sizeof(usage)); 1321 input_index += sizeof(usage); 1322 1323 /* 1324 * We should add the subkey_len in bits at end of input. 1325 * And we choose to put in a MSB first uint32_t. 1326 */ 1327 subkey_bitlen = TEE_U32_TO_BIG_ENDIAN(subkey_len * INT8_BIT); 1328 memcpy(input + input_index, &subkey_bitlen, sizeof(subkey_bitlen)); 1329 input_index += sizeof(subkey_bitlen); 1330 1331 /* 1332 * We get K(0) to avoid some key control attack 1333 * and store it at end of input. 1334 */ 1335 res = stm32_saes_cmac_prf_128(&ctx, STM32_SAES_KEY_DHU, NULL, 1336 AES_KEYSIZE_128, 1337 input, input_index, 1338 input + input_index); 1339 if (res) 1340 goto out; 1341 1342 /* We just added K(0) to input */ 1343 input_index += AES_BLOCK_SIZE; 1344 1345 res = stm32_saes_kdf(&ctx, STM32_SAES_KEY_DHU, NULL, AES_KEYSIZE_128, 1346 input, input_index, subkey, subkey_len); 1347 1348 out: 1349 free(input); 1350 return res; 1351 } 1352 1353 static TEE_Result stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata, 1354 const void *fdt, int node) 1355 { 1356 struct dt_node_info dt_saes = { }; 1357 TEE_Result res = TEE_ERROR_GENERIC; 1358 1359 dt_saes.reg = fdt_reg_base_address(fdt, node); 1360 dt_saes.reg_size = fdt_reg_size(fdt, node); 1361 1362 if (dt_saes.reg == DT_INFO_INVALID_REG || 1363 dt_saes.reg_size == DT_INFO_INVALID_REG_SIZE) 1364 return TEE_ERROR_BAD_PARAMETERS; 1365 1366 res = clk_dt_get_by_index(fdt, node, 0, &pdata->clk); 1367 if (res != TEE_SUCCESS) 1368 return res; 1369 1370 res = rstctrl_dt_get_by_index(fdt, node, 0, &pdata->reset); 1371 if (res != TEE_SUCCESS && res != TEE_ERROR_ITEM_NOT_FOUND) 1372 return res; 1373 1374 pdata->base = (vaddr_t)phys_to_virt(dt_saes.reg, MEM_AREA_IO_SEC, 1375 dt_saes.reg_size); 1376 if (!pdata->base) 1377 panic(); 1378 1379 return TEE_SUCCESS; 1380 } 1381 1382 static TEE_Result stm32_saes_probe(const void *fdt, int node, 1383 const void *compat_data __unused) 1384 { 1385 TEE_Result res = TEE_SUCCESS; 1386 1387 assert(!saes_pdata.base); 1388 1389 res = stm32_saes_parse_fdt(&saes_pdata, fdt, node); 1390 if (res) 1391 return res; 1392 1393 if (clk_enable(saes_pdata.clk)) 1394 panic(); 1395 1396 if (saes_pdata.reset) { 1397 /* External reset of SAES */ 1398 if (rstctrl_assert_to(saes_pdata.reset, TIMEOUT_US_1MS)) 1399 panic(); 1400 1401 udelay(SAES_RESET_DELAY); 1402 1403 if (rstctrl_deassert_to(saes_pdata.reset, TIMEOUT_US_1MS)) 1404 panic(); 1405 } else { 1406 /* Internal reset of SAES */ 1407 io_setbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST); 1408 udelay(SAES_RESET_DELAY); 1409 io_clrbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST); 1410 } 1411 1412 if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) { 1413 res = stm32_register_cipher(SAES_IP); 1414 if (res) { 1415 EMSG("Failed to register to cipher: %#"PRIx32, res); 1416 panic(); 1417 } 1418 } 1419 1420 return TEE_SUCCESS; 1421 } 1422 1423 static const struct dt_device_match saes_match_table[] = { 1424 { .compatible = "st,stm32mp13-saes" }, 1425 { } 1426 }; 1427 1428 DEFINE_DT_DRIVER(stm32_saes_dt_driver) = { 1429 .name = "stm32-saes", 1430 .match_table = saes_match_table, 1431 .probe = stm32_saes_probe, 1432 }; 1433