1 // SPDX-License-Identifier: BSD-2-Clause 2 /* 3 * Copyright (c) 2021-2023, STMicroelectronics - All Rights Reserved 4 */ 5 #include <assert.h> 6 #include <config.h> 7 #include <drivers/clk.h> 8 #include <drivers/clk_dt.h> 9 #include <drivers/rstctrl.h> 10 #include <io.h> 11 #include <kernel/boot.h> 12 #include <kernel/delay.h> 13 #include <kernel/dt.h> 14 #include <kernel/huk_subkey.h> 15 #include <kernel/mutex.h> 16 #include <libfdt.h> 17 #include <mm/core_memprot.h> 18 #include <stdint.h> 19 #include <stm32_util.h> 20 #include <string_ext.h> 21 #include <utee_defines.h> 22 #include <util.h> 23 24 #include "common.h" 25 #include "stm32_saes.h" 26 27 /* SAES control register */ 28 #define _SAES_CR U(0x0) 29 /* SAES status register */ 30 #define _SAES_SR U(0x04) 31 /* SAES data input register */ 32 #define _SAES_DINR U(0x08) 33 /* SAES data output register */ 34 #define _SAES_DOUTR U(0x0c) 35 /* SAES key registers [0-3] */ 36 #define _SAES_KEYR0 U(0x10) 37 #define _SAES_KEYR1 U(0x14) 38 #define _SAES_KEYR2 U(0x18) 39 #define _SAES_KEYR3 U(0x1c) 40 /* SAES initialization vector registers [0-3] */ 41 #define _SAES_IVR0 U(0x20) 42 #define _SAES_IVR1 U(0x24) 43 #define _SAES_IVR2 U(0x28) 44 #define _SAES_IVR3 U(0x2c) 45 /* SAES key registers [4-7] */ 46 #define _SAES_KEYR4 U(0x30) 47 #define _SAES_KEYR5 U(0x34) 48 #define _SAES_KEYR6 U(0x38) 49 #define _SAES_KEYR7 U(0x3c) 50 /* SAES suspend registers [0-7] */ 51 #define _SAES_SUSPR0 U(0x40) 52 #define _SAES_SUSPR1 U(0x44) 53 #define _SAES_SUSPR2 U(0x48) 54 #define _SAES_SUSPR3 U(0x4c) 55 #define _SAES_SUSPR4 U(0x50) 56 #define _SAES_SUSPR5 U(0x54) 57 #define _SAES_SUSPR6 U(0x58) 58 #define _SAES_SUSPR7 U(0x5c) 59 /* SAES Interrupt Enable Register */ 60 #define _SAES_IER U(0x300) 61 /* SAES Interrupt Status Register */ 62 #define _SAES_ISR U(0x304) 63 /* SAES Interrupt Clear Register */ 64 #define _SAES_ICR U(0x308) 65 66 /* SAES control register fields */ 67 #define _SAES_CR_RESET_VALUE U(0x0) 68 #define _SAES_CR_IPRST BIT(31) 69 #define _SAES_CR_KEYSEL_MASK GENMASK_32(30, 28) 70 #define _SAES_CR_KEYSEL_SHIFT U(28) 71 #define _SAES_CR_KEYSEL_SOFT U(0x0) 72 #define _SAES_CR_KEYSEL_DHUK U(0x1) 73 #define _SAES_CR_KEYSEL_BHK U(0x2) 74 #define _SAES_CR_KEYSEL_BHU_XOR_BH_K U(0x4) 75 #define _SAES_CR_KEYSEL_TEST U(0x7) 76 #define _SAES_CR_KSHAREID_MASK GENMASK_32(27, 26) 77 #define _SAES_CR_KSHAREID_SHIFT U(26) 78 #define _SAES_CR_KSHAREID_CRYP U(0x0) 79 #define _SAES_CR_KEYMOD_MASK GENMASK_32(25, 24) 80 #define _SAES_CR_KEYMOD_SHIFT U(24) 81 #define _SAES_CR_KEYMOD_NORMAL U(0x0) 82 #define _SAES_CR_KEYMOD_WRAPPED U(0x1) 83 #define _SAES_CR_KEYMOD_SHARED U(0x2) 84 #define _SAES_CR_NPBLB_MASK GENMASK_32(23, 20) 85 #define _SAES_CR_NPBLB_SHIFT U(20) 86 #define _SAES_CR_KEYPROT BIT(19) 87 #define _SAES_CR_KEYSIZE BIT(18) 88 #define _SAES_CR_GCMPH_MASK GENMASK_32(14, 13) 89 #define _SAES_CR_GCMPH_SHIFT U(13) 90 #define _SAES_CR_GCMPH_INIT U(0) 91 #define _SAES_CR_GCMPH_HEADER U(1) 92 #define _SAES_CR_GCMPH_PAYLOAD U(2) 93 #define _SAES_CR_GCMPH_FINAL U(3) 94 #define _SAES_CR_DMAOUTEN BIT(12) 95 #define _SAES_CR_DMAINEN BIT(11) 96 #define _SAES_CR_CHMOD_MASK (BIT(16) | GENMASK_32(6, 5)) 97 #define _SAES_CR_CHMOD_SHIFT U(5) 98 #define _SAES_CR_CHMOD_ECB U(0x0) 99 #define _SAES_CR_CHMOD_CBC U(0x1) 100 #define _SAES_CR_CHMOD_CTR U(0x2) 101 #define _SAES_CR_CHMOD_GCM U(0x3) 102 #define _SAES_CR_CHMOD_GMAC U(0x3) 103 #define _SAES_CR_CHMOD_CCM U(0x800) 104 #define _SAES_CR_MODE_MASK GENMASK_32(4, 3) 105 #define _SAES_CR_MODE_SHIFT U(3) 106 #define _SAES_CR_MODE_ENC U(0) 107 #define _SAES_CR_MODE_KEYPREP U(1) 108 #define _SAES_CR_MODE_DEC U(2) 109 #define _SAES_CR_DATATYPE_MASK GENMASK_32(2, 1) 110 #define _SAES_CR_DATATYPE_SHIFT U(1) 111 #define _SAES_CR_DATATYPE_NONE U(0) 112 #define _SAES_CR_DATATYPE_HALF_WORD U(1) 113 #define _SAES_CR_DATATYPE_BYTE U(2) 114 #define _SAES_CR_DATATYPE_BIT U(3) 115 #define _SAES_CR_EN BIT(0) 116 117 /* SAES status register fields */ 118 #define _SAES_SR_KEYVALID BIT(7) 119 #define _SAES_SR_BUSY BIT(3) 120 #define _SAES_SR_WRERR BIT(2) 121 #define _SAES_SR_RDERR BIT(1) 122 #define _SAES_SR_CCF BIT(0) 123 124 /* SAES interrupt registers fields */ 125 #define _SAES_I_RNG_ERR BIT(3) 126 #define _SAES_I_KEY_ERR BIT(2) 127 #define _SAES_I_RW_ERR BIT(1) 128 #define _SAES_I_CC BIT(0) 129 130 #define SAES_TIMEOUT_US U(100000) 131 #define TIMEOUT_US_1MS U(1000) 132 #define SAES_RESET_DELAY U(2) 133 134 #define IS_CHAINING_MODE(mode, cr) \ 135 (((cr) & _SAES_CR_CHMOD_MASK) == (_SAES_CR_CHMOD_##mode << \ 136 _SAES_CR_CHMOD_SHIFT)) 137 138 #define SET_CHAINING_MODE(mode, cr) \ 139 set_field_u32(cr, _SAES_CR_CHMOD_MASK, _SAES_CR_CHMOD_##mode) 140 141 static struct mutex saes_lock = MUTEX_INITIALIZER; 142 static struct stm32_saes_platdata { 143 vaddr_t base; 144 struct clk *clk; 145 struct rstctrl *reset; 146 } saes_pdata; 147 148 static bool does_chaining_mode_need_iv(uint32_t cr) 149 { 150 return !IS_CHAINING_MODE(ECB, cr); 151 } 152 153 static bool is_encrypt(uint32_t cr) 154 { 155 return (cr & _SAES_CR_MODE_MASK) == 156 SHIFT_U32(_SAES_CR_MODE_ENC, _SAES_CR_MODE_SHIFT); 157 } 158 159 static bool is_decrypt(uint32_t cr) 160 { 161 return (cr & _SAES_CR_MODE_MASK) == 162 SHIFT_U32(_SAES_CR_MODE_DEC, _SAES_CR_MODE_SHIFT); 163 } 164 165 static bool does_need_npblb(uint32_t cr) 166 { 167 return (IS_CHAINING_MODE(GCM, cr) && is_encrypt(cr)) || 168 (IS_CHAINING_MODE(CCM, cr) && is_decrypt(cr)); 169 } 170 171 static bool can_suspend(uint32_t cr) 172 { 173 return !IS_CHAINING_MODE(GCM, cr); 174 } 175 176 static void write_aligned_block(vaddr_t base, uint32_t *data) 177 { 178 unsigned int i = 0; 179 180 /* SAES is configured to swap bytes as expected */ 181 for (i = 0; i < AES_BLOCK_NB_U32; i++) 182 io_write32(base + _SAES_DINR, data[i]); 183 } 184 185 static void write_block(vaddr_t base, uint8_t *data) 186 { 187 if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) { 188 write_aligned_block(base, (void *)data); 189 } else { 190 uint32_t data_u32[AES_BLOCK_NB_U32] = { }; 191 192 memcpy(data_u32, data, sizeof(data_u32)); 193 write_aligned_block(base, data_u32); 194 } 195 } 196 197 static void read_aligned_block(vaddr_t base, uint32_t *data) 198 { 199 unsigned int i = 0; 200 201 /* SAES is configured to swap bytes as expected */ 202 for (i = 0; i < AES_BLOCK_NB_U32; i++) 203 data[i] = io_read32(base + _SAES_DOUTR); 204 } 205 206 static void read_block(vaddr_t base, uint8_t *data) 207 { 208 if (IS_ALIGNED_WITH_TYPE(data, uint32_t)) { 209 read_aligned_block(base, (void *)data); 210 } else { 211 uint32_t data_u32[AES_BLOCK_NB_U32] = { }; 212 213 read_aligned_block(base, data_u32); 214 215 memcpy(data, data_u32, sizeof(data_u32)); 216 } 217 } 218 219 static TEE_Result wait_computation_completed(vaddr_t base) 220 { 221 uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US); 222 223 while ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) 224 if (timeout_elapsed(timeout_ref)) 225 break; 226 227 if ((io_read32(base + _SAES_SR) & _SAES_SR_CCF) != _SAES_SR_CCF) { 228 DMSG("CCF timeout"); 229 return TEE_ERROR_GENERIC; 230 } 231 232 return TEE_SUCCESS; 233 } 234 235 static void clear_computation_completed(uintptr_t base) 236 { 237 io_setbits32(base + _SAES_ICR, _SAES_I_CC); 238 } 239 240 static TEE_Result wait_key_valid(vaddr_t base) 241 { 242 uint64_t timeout_ref = timeout_init_us(SAES_TIMEOUT_US); 243 244 while (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID)) 245 if (timeout_elapsed(timeout_ref)) 246 break; 247 248 if (!(io_read32(base + _SAES_SR) & _SAES_SR_KEYVALID)) { 249 DMSG("CCF timeout"); 250 return TEE_ERROR_GENERIC; 251 } 252 253 return TEE_SUCCESS; 254 } 255 256 static TEE_Result saes_start(struct stm32_saes_context *ctx) 257 { 258 uint64_t timeout_ref = 0; 259 260 /* Reset SAES */ 261 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 262 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 263 264 timeout_ref = timeout_init_us(SAES_TIMEOUT_US); 265 while (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) 266 if (timeout_elapsed(timeout_ref)) 267 break; 268 269 if (io_read32(ctx->base + _SAES_SR) & _SAES_SR_BUSY) { 270 DMSG("busy timeout"); 271 return TEE_ERROR_GENERIC; 272 } 273 274 return TEE_SUCCESS; 275 } 276 277 static void saes_end(struct stm32_saes_context *ctx, int prev_error) 278 { 279 if (prev_error) { 280 /* Reset SAES */ 281 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 282 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 283 } 284 285 /* Disable the SAES peripheral */ 286 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 287 } 288 289 static void saes_write_iv(struct stm32_saes_context *ctx) 290 { 291 /* If chaining mode need to restore IV */ 292 if (does_chaining_mode_need_iv(ctx->cr)) { 293 unsigned int i = 0; 294 295 for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) { 296 io_write32(ctx->base + _SAES_IVR0 + i * 297 sizeof(uint32_t), ctx->iv[i]); 298 } 299 } 300 } 301 302 static void saes_save_suspend(struct stm32_saes_context *ctx) 303 { 304 size_t i = 0; 305 306 for (i = 0; i < 8; i++) 307 ctx->susp[i] = io_read32(ctx->base + _SAES_SUSPR0 + 308 i * sizeof(uint32_t)); 309 } 310 311 static void saes_restore_suspend(struct stm32_saes_context *ctx) 312 { 313 size_t i = 0; 314 315 for (i = 0; i < 8; i++) 316 io_write32(ctx->base + _SAES_SUSPR0 + i * sizeof(uint32_t), 317 ctx->susp[i]); 318 } 319 320 static void saes_write_key(struct stm32_saes_context *ctx) 321 { 322 /* Restore the _SAES_KEYRx if SOFTWARE key */ 323 if ((ctx->cr & _SAES_CR_KEYSEL_MASK) == 324 SHIFT_U32(_SAES_CR_KEYSEL_SOFT, _SAES_CR_KEYSEL_SHIFT)) { 325 size_t i = 0; 326 327 for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t); i++) 328 io_write32(ctx->base + _SAES_KEYR0 + i * 329 sizeof(uint32_t), 330 ctx->key[i]); 331 332 if ((ctx->cr & _SAES_CR_KEYSIZE) == _SAES_CR_KEYSIZE) { 333 for (i = 0; 334 i < (AES_KEYSIZE_256 / 2) / sizeof(uint32_t); 335 i++) { 336 io_write32(ctx->base + _SAES_KEYR4 + i * 337 sizeof(uint32_t), 338 ctx->key[i + 4]); 339 } 340 } 341 } 342 } 343 344 static TEE_Result saes_prepare_key(struct stm32_saes_context *ctx) 345 { 346 TEE_Result res = TEE_ERROR_GENERIC; 347 348 /* Disable the SAES peripheral */ 349 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 350 351 /* Set key size */ 352 if ((ctx->cr & _SAES_CR_KEYSIZE)) 353 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE); 354 else 355 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_KEYSIZE); 356 357 saes_write_key(ctx); 358 359 res = wait_key_valid(ctx->base); 360 if (res) 361 return res; 362 363 /* 364 * For ECB/CBC decryption, key preparation mode must be selected 365 * to populate the key. 366 */ 367 if ((IS_CHAINING_MODE(ECB, ctx->cr) || 368 IS_CHAINING_MODE(CBC, ctx->cr)) && is_decrypt(ctx->cr)) { 369 /* Select Mode 2 */ 370 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK, 371 SHIFT_U32(_SAES_CR_MODE_KEYPREP, 372 _SAES_CR_MODE_SHIFT)); 373 374 /* Enable SAES */ 375 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 376 377 res = wait_computation_completed(ctx->base); 378 if (res) 379 return res; 380 381 clear_computation_completed(ctx->base); 382 383 /* Set Mode 3 */ 384 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_MODE_MASK, 385 SHIFT_U32(_SAES_CR_MODE_DEC, 386 _SAES_CR_MODE_SHIFT)); 387 } 388 389 return TEE_SUCCESS; 390 } 391 392 static TEE_Result save_context(struct stm32_saes_context *ctx) 393 { 394 if ((io_read32(ctx->base + _SAES_SR) & _SAES_SR_CCF)) { 395 /* Device should not be in a processing phase */ 396 return TEE_ERROR_BAD_STATE; 397 } 398 399 /* Save CR */ 400 ctx->cr = io_read32(ctx->base + _SAES_CR); 401 402 if (!can_suspend(ctx->cr)) 403 return TEE_SUCCESS; 404 405 saes_save_suspend(ctx); 406 407 /* If chaining mode need to save current IV */ 408 if (does_chaining_mode_need_iv(ctx->cr)) { 409 uint8_t i = 0; 410 411 /* Save IV */ 412 for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) { 413 ctx->iv[i] = io_read32(ctx->base + _SAES_IVR0 + i * 414 sizeof(uint32_t)); 415 } 416 } 417 418 /* Disable the SAES peripheral */ 419 io_clrbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 420 421 return TEE_SUCCESS; 422 } 423 424 /* To resume the processing of a message */ 425 static TEE_Result restore_context(struct stm32_saes_context *ctx) 426 { 427 TEE_Result res = TEE_SUCCESS; 428 429 /* SAES shall be disabled */ 430 if ((io_read32(ctx->base + _SAES_CR) & _SAES_CR_EN)) { 431 DMSG("Device is still enabled"); 432 return TEE_ERROR_BAD_STATE; 433 } 434 435 /* Reset internal state */ 436 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_IPRST); 437 438 /* Restore configuration register */ 439 io_write32(ctx->base + _SAES_CR, ctx->cr); 440 441 /* Write key and, in case of CBC or ECB decrypt, prepare it */ 442 res = saes_prepare_key(ctx); 443 if (res) 444 return res; 445 446 saes_restore_suspend(ctx); 447 448 saes_write_iv(ctx); 449 450 /* Enable the SAES peripheral */ 451 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 452 453 return TEE_SUCCESS; 454 } 455 456 static TEE_Result do_from_init_to_phase(struct stm32_saes_context *ctx, 457 uint32_t new_phase) 458 { 459 TEE_Result res = TEE_SUCCESS; 460 461 /* We didn't run the init phase yet */ 462 res = restore_context(ctx); 463 if (res) 464 return res; 465 466 res = wait_computation_completed(ctx->base); 467 if (res) 468 return res; 469 470 clear_computation_completed(ctx->base); 471 472 /* Move to 'new_phase' */ 473 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK, 474 SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT)); 475 476 /* Enable the SAES peripheral (init disabled it) */ 477 io_setbits32(ctx->base + _SAES_CR, _SAES_CR_EN); 478 479 return TEE_SUCCESS; 480 } 481 482 static TEE_Result do_from_header_to_phase(struct stm32_saes_context *ctx, 483 uint32_t new_phase) 484 { 485 TEE_Result res = TEE_SUCCESS; 486 487 if (can_suspend(ctx->cr)) { 488 res = restore_context(ctx); 489 if (res) 490 return res; 491 } 492 493 if (ctx->extra_size) { 494 /* Manage unaligned header data before moving to next phase */ 495 memset((uint8_t *)ctx->extra + ctx->extra_size, 0, 496 AES_BLOCK_SIZE - ctx->extra_size); 497 498 write_aligned_block(ctx->base, ctx->extra); 499 500 res = wait_computation_completed(ctx->base); 501 if (res) 502 return res; 503 504 clear_computation_completed(ctx->base); 505 506 ctx->assoc_len += ctx->extra_size * INT8_BIT; 507 ctx->extra_size = U(0); 508 } 509 510 /* Move to 'new_phase' */ 511 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK, 512 SHIFT_U32(new_phase, _SAES_CR_GCMPH_SHIFT)); 513 514 return TEE_SUCCESS; 515 } 516 517 /** 518 * @brief Start an AES computation. 519 * @param ctx: SAES process context 520 * @param is_dec: true if decryption, false if encryption 521 * @param ch_mode: define the chaining mode 522 * @param key_select: define where the key comes from 523 * @param key: pointer to key (if key_select is KEY_SOFT, else unused) 524 * @param key_size: key size 525 * @param iv: pointer to initialization vector (unused if ch_mode is ECB) 526 * @param iv_size: iv size 527 * @note this function doesn't access to hardware but stores in ctx the values 528 * 529 * @retval TEE_SUCCESS if OK or a TEE_Result compliant code. 530 */ 531 TEE_Result stm32_saes_init(struct stm32_saes_context *ctx, bool is_dec, 532 enum stm32_saes_chaining_mode ch_mode, 533 enum stm32_saes_key_selection key_select, 534 const void *key, size_t key_size, const void *iv, 535 size_t iv_size) 536 { 537 const uint32_t *key_u32 = NULL; 538 const uint32_t *iv_u32 = NULL; 539 uint32_t local_key[8] = { }; 540 uint32_t local_iv[4] = { }; 541 unsigned int i = 0; 542 543 if (!ctx) 544 return TEE_ERROR_BAD_PARAMETERS; 545 546 *ctx = (struct stm32_saes_context){ 547 .lock = &saes_lock, 548 .base = saes_pdata.base, 549 .cr = _SAES_CR_RESET_VALUE 550 }; 551 552 /* We want buffer to be u32 aligned */ 553 if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) { 554 key_u32 = key; 555 } else { 556 memcpy(local_key, key, key_size); 557 key_u32 = local_key; 558 } 559 560 if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) { 561 iv_u32 = iv; 562 } else { 563 memcpy(local_iv, iv, iv_size); 564 iv_u32 = local_iv; 565 } 566 567 if (is_dec) 568 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK, 569 _SAES_CR_MODE_DEC); 570 else 571 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_MODE_MASK, 572 _SAES_CR_MODE_ENC); 573 574 /* Save chaining mode */ 575 switch (ch_mode) { 576 case STM32_SAES_MODE_ECB: 577 ctx->cr |= SET_CHAINING_MODE(ECB, ctx->cr); 578 break; 579 case STM32_SAES_MODE_CBC: 580 ctx->cr |= SET_CHAINING_MODE(CBC, ctx->cr); 581 break; 582 case STM32_SAES_MODE_CTR: 583 ctx->cr |= SET_CHAINING_MODE(CTR, ctx->cr); 584 break; 585 case STM32_SAES_MODE_GCM: 586 ctx->cr |= SET_CHAINING_MODE(GCM, ctx->cr); 587 break; 588 case STM32_SAES_MODE_CCM: 589 ctx->cr |= SET_CHAINING_MODE(CCM, ctx->cr); 590 break; 591 default: 592 return TEE_ERROR_BAD_PARAMETERS; 593 } 594 595 /* 596 * We will use HW Byte swap (_SAES_CR_DATATYPE_BYTE) for data. 597 * So we won't need to 598 * TEE_U32_TO_BIG_ENDIAN(data) before write to DINR 599 * nor 600 * TEE_U32_FROM_BIG_ENDIAN after reading from DOUTR. 601 * 602 * But note that wrap key only accept _SAES_CR_DATATYPE_NONE. 603 */ 604 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_DATATYPE_MASK, 605 _SAES_CR_DATATYPE_BYTE); 606 607 /* Configure keysize */ 608 switch (key_size) { 609 case AES_KEYSIZE_128: 610 ctx->cr &= ~_SAES_CR_KEYSIZE; 611 break; 612 case AES_KEYSIZE_256: 613 ctx->cr |= _SAES_CR_KEYSIZE; 614 break; 615 default: 616 return TEE_ERROR_BAD_PARAMETERS; 617 } 618 619 /* Configure key */ 620 switch (key_select) { 621 case STM32_SAES_KEY_SOFT: 622 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 623 _SAES_CR_KEYSEL_SOFT); 624 /* Save key */ 625 switch (key_size) { 626 case AES_KEYSIZE_128: 627 /* First 16 bytes == 4 u32 */ 628 for (i = 0; i < AES_KEYSIZE_128 / sizeof(uint32_t); 629 i++) { 630 ctx->key[i] = 631 TEE_U32_TO_BIG_ENDIAN(key_u32[3 - i]); 632 /* 633 * /!\ we save the key in HW byte order 634 * and word order: key[i] is for _SAES_KEYRi. 635 */ 636 } 637 break; 638 case AES_KEYSIZE_256: 639 for (i = 0; i < AES_KEYSIZE_256 / sizeof(uint32_t); 640 i++) { 641 ctx->key[i] = 642 TEE_U32_TO_BIG_ENDIAN(key_u32[7 - i]); 643 /* 644 * /!\ we save the key in HW byte order 645 * and word order: key[i] is for _SAES_KEYRi. 646 */ 647 } 648 break; 649 default: 650 return TEE_ERROR_BAD_PARAMETERS; 651 } 652 break; 653 case STM32_SAES_KEY_DHU: 654 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 655 _SAES_CR_KEYSEL_DHUK); 656 break; 657 case STM32_SAES_KEY_BH: 658 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 659 _SAES_CR_KEYSEL_BHK); 660 break; 661 case STM32_SAES_KEY_BHU_XOR_BH: 662 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 663 _SAES_CR_KEYSEL_BHU_XOR_BH_K); 664 break; 665 case STM32_SAES_KEY_WRAPPED: 666 ctx->cr |= set_field_u32(ctx->cr, _SAES_CR_KEYSEL_MASK, 667 _SAES_CR_KEYSEL_SOFT); 668 break; 669 670 default: 671 return TEE_ERROR_BAD_PARAMETERS; 672 } 673 674 /* Save IV */ 675 if (ch_mode != STM32_SAES_MODE_ECB) { 676 if (!iv || iv_size != AES_IVSIZE) 677 return TEE_ERROR_BAD_PARAMETERS; 678 679 for (i = 0; i < AES_IVSIZE / sizeof(uint32_t); i++) 680 ctx->iv[i] = TEE_U32_TO_BIG_ENDIAN(iv_u32[3 - i]); 681 } 682 683 /* Reset suspend registers */ 684 memset(ctx->susp, 0, sizeof(ctx->susp)); 685 686 return saes_start(ctx); 687 } 688 689 /** 690 * @brief Update (or start) an AES authentificate process of 691 * associated data (CCM or GCM). 692 * @param ctx: SAES process context 693 * @param data: pointer to associated data 694 * @param data_size: data size 695 * 696 * @retval 0 if OK. 697 */ 698 TEE_Result stm32_saes_update_assodata(struct stm32_saes_context *ctx, 699 uint8_t *data, size_t data_size) 700 { 701 TEE_Result res = TEE_SUCCESS; 702 unsigned int i = 0; 703 uint32_t previous_phase = 0; 704 705 if (!ctx) 706 return TEE_ERROR_BAD_PARAMETERS; 707 708 /* If no associated data, nothing to do */ 709 if (!data || !data_size) 710 return TEE_SUCCESS; 711 712 mutex_lock(ctx->lock); 713 714 previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >> 715 _SAES_CR_GCMPH_SHIFT; 716 717 switch (previous_phase) { 718 case _SAES_CR_GCMPH_INIT: 719 res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_HEADER); 720 break; 721 case _SAES_CR_GCMPH_HEADER: 722 /* 723 * Function update_assodata() was already called. 724 * We only need to restore the context. 725 */ 726 if (can_suspend(ctx->cr)) 727 res = restore_context(ctx); 728 729 break; 730 default: 731 DMSG("out of order call"); 732 res = TEE_ERROR_BAD_STATE; 733 } 734 735 if (res) 736 goto out; 737 738 /* Manage if remaining data from a previous update_assodata() call */ 739 if (ctx->extra_size && 740 ((ctx->extra_size + data_size) >= AES_BLOCK_SIZE)) { 741 uint32_t block[AES_BLOCK_NB_U32] = { }; 742 743 memcpy(block, ctx->extra, ctx->extra_size); 744 memcpy((uint8_t *)block + ctx->extra_size, data, 745 AES_BLOCK_SIZE - ctx->extra_size); 746 747 write_aligned_block(ctx->base, block); 748 749 res = wait_computation_completed(ctx->base); 750 if (res) 751 goto out; 752 753 clear_computation_completed(ctx->base); 754 755 i += AES_BLOCK_SIZE - ctx->extra_size; 756 ctx->extra_size = 0; 757 ctx->assoc_len += AES_BLOCK_SIZE_BIT; 758 } 759 760 while (data_size - i >= AES_BLOCK_SIZE) { 761 write_block(ctx->base, data + i); 762 763 res = wait_computation_completed(ctx->base); 764 if (res) 765 goto out; 766 767 clear_computation_completed(ctx->base); 768 769 /* Process next block */ 770 i += AES_BLOCK_SIZE; 771 ctx->assoc_len += AES_BLOCK_SIZE_BIT; 772 } 773 774 /* 775 * Manage last block if not a block size multiple: 776 * Save remaining data to manage them later (potentially with new 777 * associated data). 778 */ 779 if (i < data_size) { 780 memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i, 781 data_size - i); 782 ctx->extra_size += data_size - i; 783 } 784 785 res = save_context(ctx); 786 out: 787 if (res) 788 saes_end(ctx, res); 789 790 mutex_unlock(ctx->lock); 791 792 return res; 793 } 794 795 /** 796 * @brief Update (or start) an AES authenticate and de/encrypt with 797 * payload data (CCM or GCM). 798 * @param ctx: SAES process context 799 * @param last_block: true if last payload data block 800 * @param data_in: pointer to payload 801 * @param data_out: pointer where to save de/encrypted payload 802 * @param data_size: payload size 803 * 804 * @retval TEE_SUCCESS if OK. 805 */ 806 TEE_Result stm32_saes_update_load(struct stm32_saes_context *ctx, 807 bool last_block, uint8_t *data_in, 808 uint8_t *data_out, size_t data_size) 809 { 810 TEE_Result res = TEE_SUCCESS; 811 unsigned int i = 0; 812 uint32_t previous_phase = 0; 813 814 if (!ctx) 815 return TEE_ERROR_BAD_PARAMETERS; 816 817 /* If there is no data, nothing to do */ 818 if (!data_in || !data_size) 819 return TEE_SUCCESS; 820 821 mutex_lock(ctx->lock); 822 823 previous_phase = ((ctx->cr & _SAES_CR_GCMPH_MASK) >> 824 _SAES_CR_GCMPH_SHIFT); 825 826 switch (previous_phase) { 827 case _SAES_CR_GCMPH_INIT: 828 res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD); 829 break; 830 case _SAES_CR_GCMPH_HEADER: 831 res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_PAYLOAD); 832 break; 833 case _SAES_CR_GCMPH_PAYLOAD: 834 /* new update_load call, we only need to restore context */ 835 if (can_suspend(ctx->cr)) 836 res = restore_context(ctx); 837 838 break; 839 default: 840 DMSG("out of order call"); 841 res = TEE_ERROR_BAD_STATE; 842 } 843 844 if (res) 845 goto out; 846 847 while (i < ROUNDDOWN(data_size, AES_BLOCK_SIZE)) { 848 write_block(ctx->base, data_in + i); 849 850 res = wait_computation_completed(ctx->base); 851 if (res) 852 goto out; 853 854 read_block(ctx->base, data_out + i); 855 856 clear_computation_completed(ctx->base); 857 858 /* Process next block */ 859 i += AES_BLOCK_SIZE; 860 ctx->load_len += AES_BLOCK_SIZE_BIT; 861 } 862 863 /* Manage last block if not a block size multiple */ 864 if (last_block && i < data_size) { 865 uint32_t block_in[AES_BLOCK_NB_U32] = { }; 866 uint32_t block_out[AES_BLOCK_NB_U32] = { }; 867 868 memcpy(block_in, data_in + i, data_size - i); 869 870 if (does_need_npblb(ctx->cr)) { 871 uint32_t npblb = AES_BLOCK_SIZE - (data_size - i); 872 873 io_clrsetbits32(ctx->base + _SAES_CR, 874 _SAES_CR_NPBLB_MASK, 875 SHIFT_U32(npblb, _SAES_CR_NPBLB_SHIFT)); 876 } 877 878 write_aligned_block(ctx->base, block_in); 879 880 res = wait_computation_completed(ctx->base); 881 if (res) 882 goto out; 883 884 read_aligned_block(ctx->base, block_out); 885 886 clear_computation_completed(ctx->base); 887 888 memcpy(data_out + i, block_out, data_size - i); 889 890 ctx->load_len += (data_size - i) * INT8_BIT; 891 } 892 893 res = save_context(ctx); 894 out: 895 if (res) 896 saes_end(ctx, res); 897 898 mutex_unlock(ctx->lock); 899 900 return res; 901 } 902 903 /** 904 * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM). 905 * @param ctx: SAES process context 906 * @param tag: pointer where to save the tag 907 * @param data_size: tag size 908 * 909 * @retval TEE_SUCCESS if OK. 910 */ 911 TEE_Result stm32_saes_final(struct stm32_saes_context *ctx, uint8_t *tag, 912 size_t tag_size) 913 { 914 TEE_Result res = TEE_SUCCESS; 915 uint32_t tag_u32[4] = { }; 916 uint32_t previous_phase = 0; 917 918 if (!ctx) 919 return TEE_ERROR_BAD_PARAMETERS; 920 921 mutex_lock(ctx->lock); 922 923 previous_phase = (ctx->cr & _SAES_CR_GCMPH_MASK) >> 924 _SAES_CR_GCMPH_SHIFT; 925 926 switch (previous_phase) { 927 case _SAES_CR_GCMPH_INIT: 928 res = do_from_init_to_phase(ctx, _SAES_CR_GCMPH_FINAL); 929 break; 930 case _SAES_CR_GCMPH_HEADER: 931 res = do_from_header_to_phase(ctx, _SAES_CR_GCMPH_FINAL); 932 break; 933 case _SAES_CR_GCMPH_PAYLOAD: 934 if (can_suspend(ctx->cr)) 935 res = restore_context(ctx); 936 937 /* Move to final phase */ 938 io_clrsetbits32(ctx->base + _SAES_CR, _SAES_CR_GCMPH_MASK, 939 SHIFT_U32(_SAES_CR_GCMPH_FINAL, 940 _SAES_CR_GCMPH_SHIFT)); 941 break; 942 default: 943 DMSG("out of order call"); 944 res = TEE_ERROR_BAD_STATE; 945 } 946 if (res) 947 goto out; 948 949 if (IS_CHAINING_MODE(GCM, ctx->cr)) { 950 /* SAES is configured to swap bytes as expected */ 951 io_write32(ctx->base + _SAES_DINR, 0); 952 io_write32(ctx->base + _SAES_DINR, ctx->assoc_len); 953 io_write32(ctx->base + _SAES_DINR, 0); 954 io_write32(ctx->base + _SAES_DINR, ctx->load_len); 955 } 956 957 res = wait_computation_completed(ctx->base); 958 if (res) 959 goto out; 960 961 read_aligned_block(ctx->base, tag_u32); 962 963 clear_computation_completed(ctx->base); 964 965 memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size)); 966 967 out: 968 saes_end(ctx, res); 969 mutex_unlock(ctx->lock); 970 971 return res; 972 } 973 974 /** 975 * @brief Update (or start) an AES de/encrypt process (ECB, CBC or CTR). 976 * @param ctx: SAES process context 977 * @param last_block: true if last payload data block 978 * @param data_in: pointer to payload 979 * @param data_out: pointer where to save de/encrypted payload 980 * @param data_size: payload size 981 * 982 * @retval TEE_SUCCESS if OK. 983 */ 984 TEE_Result stm32_saes_update(struct stm32_saes_context *ctx, bool last_block, 985 uint8_t *data_in, uint8_t *data_out, 986 size_t data_size) 987 { 988 TEE_Result res = TEE_SUCCESS; 989 unsigned int i = U(0); 990 991 if (!ctx) 992 return TEE_ERROR_BAD_PARAMETERS; 993 994 mutex_lock(ctx->lock); 995 996 /* 997 * CBC encryption requires the 2 last blocks to be aligned with AES 998 * block size. 999 */ 1000 if (last_block && IS_CHAINING_MODE(CBC, ctx->cr) && 1001 is_encrypt(ctx->cr) && 1002 (ROUNDDOWN(data_size, AES_BLOCK_SIZE) != data_size)) { 1003 if (data_size < AES_BLOCK_SIZE * 2) { 1004 /* 1005 * If CBC, size of the last part should be at 1006 * least 2*AES_BLOCK_SIZE 1007 */ 1008 EMSG("Unexpected last block size"); 1009 res = TEE_ERROR_BAD_STATE; 1010 goto out; 1011 } 1012 /* 1013 * Do not support padding if the total size is not aligned with 1014 * the size of a block. 1015 */ 1016 res = TEE_ERROR_NOT_IMPLEMENTED; 1017 goto out; 1018 } 1019 1020 /* Manage remaining CTR mask from previous update call */ 1021 if (IS_CHAINING_MODE(CTR, ctx->cr) && ctx->extra_size) { 1022 unsigned int j = 0; 1023 uint8_t *mask = (uint8_t *)ctx->extra; 1024 1025 for (i = 0, j = 0; j < ctx->extra_size && i < data_size; 1026 j++, i++) 1027 data_out[i] = data_in[i] ^ mask[j]; 1028 1029 if (j != ctx->extra_size) { 1030 /* 1031 * We didn't consume all saved mask, 1032 * but no more data. 1033 */ 1034 1035 /* We save remaining mask and its new size */ 1036 memmove(ctx->extra, ctx->extra + j, 1037 ctx->extra_size - j); 1038 ctx->extra_size -= j; 1039 1040 /* 1041 * We don't need to save HW context we didn't 1042 * modify HW state. 1043 */ 1044 res = TEE_SUCCESS; 1045 goto out; 1046 } 1047 /* All extra mask consumed */ 1048 ctx->extra_size = 0; 1049 } 1050 1051 res = restore_context(ctx); 1052 if (res) 1053 goto out; 1054 1055 while (data_size - i >= AES_BLOCK_SIZE) { 1056 write_block(ctx->base, data_in + i); 1057 1058 res = wait_computation_completed(ctx->base); 1059 if (res) 1060 goto out; 1061 1062 read_block(ctx->base, data_out + i); 1063 1064 clear_computation_completed(ctx->base); 1065 1066 /* Process next block */ 1067 i += AES_BLOCK_SIZE; 1068 } 1069 1070 /* Manage last block if not a block size multiple */ 1071 if (i < data_size) { 1072 if (IS_CHAINING_MODE(CTR, ctx->cr)) { 1073 /* 1074 * For CTR we save the generated mask to use it at next 1075 * update call. 1076 */ 1077 uint32_t block_in[AES_BLOCK_NB_U32] = { }; 1078 uint32_t block_out[AES_BLOCK_NB_U32] = { }; 1079 1080 memcpy(block_in, data_in + i, data_size - i); 1081 1082 write_aligned_block(ctx->base, block_in); 1083 1084 res = wait_computation_completed(ctx->base); 1085 if (res) 1086 goto out; 1087 1088 read_aligned_block(ctx->base, block_out); 1089 1090 clear_computation_completed(ctx->base); 1091 1092 memcpy(data_out + i, block_out, data_size - i); 1093 1094 /* Save mask for possibly next call */ 1095 ctx->extra_size = AES_BLOCK_SIZE - (data_size - i); 1096 memcpy(ctx->extra, (uint8_t *)block_out + data_size - i, 1097 ctx->extra_size); 1098 } else { 1099 /* CBC and ECB can manage only multiple of block_size */ 1100 res = TEE_ERROR_BAD_PARAMETERS; 1101 goto out; 1102 } 1103 } 1104 1105 if (!last_block) 1106 res = save_context(ctx); 1107 1108 out: 1109 /* If last block or error, end of SAES process */ 1110 if (last_block || res) 1111 saes_end(ctx, res); 1112 1113 mutex_unlock(ctx->lock); 1114 1115 return res; 1116 } 1117 1118 static void xor_block(uint8_t *b1, uint8_t *b2, size_t size) 1119 { 1120 size_t i = 0; 1121 1122 for (i = 0; i < size; i++) 1123 b1[i] ^= b2[i]; 1124 } 1125 1126 static TEE_Result stm32_saes_cmac_prf_128(struct stm32_saes_context *ctx, 1127 enum stm32_saes_key_selection key_sel, 1128 const void *key, size_t key_size, 1129 uint8_t *data, size_t data_size, 1130 uint8_t *out) 1131 { 1132 TEE_Result res = TEE_ERROR_GENERIC; 1133 uint8_t block[AES_BLOCK_SIZE] = { }; 1134 uint8_t k1[AES_BLOCK_SIZE] = { }; 1135 uint8_t k2[AES_BLOCK_SIZE] = { }; 1136 uint8_t l[AES_BLOCK_SIZE] = { }; 1137 size_t processed = 0; 1138 uint8_t bit = 0; 1139 int i = 0; 1140 1141 if (!ctx) 1142 return TEE_ERROR_BAD_PARAMETERS; 1143 1144 /* Get K1 and K2 */ 1145 res = stm32_saes_init(ctx, false, STM32_SAES_MODE_ECB, key_sel, 1146 key, key_size, NULL, 0); 1147 if (res) 1148 return res; 1149 1150 res = stm32_saes_update(ctx, true, l, l, sizeof(l)); 1151 if (res) 1152 return res; 1153 1154 /* MSB(L) == 0 => K1 = L << 1 */ 1155 bit = 0; 1156 for (i = sizeof(l) - 1; i >= 0; i--) { 1157 k1[i] = (l[i] << 1) | bit; 1158 bit = (l[i] & 0x80) >> 7; 1159 } 1160 /* MSB(L) == 1 => K1 = (L << 1) XOR const_Rb */ 1161 if ((l[0] & 0x80)) 1162 k1[sizeof(k1) - 1] = k1[sizeof(k1) - 1] ^ 0x87; 1163 1164 /* MSB(K1) == 0 => K2 = K1 << 1 */ 1165 bit = 0; 1166 for (i = sizeof(k1) - 1; i >= 0; i--) { 1167 k2[i] = (k1[i] << 1) | bit; 1168 bit = (k1[i] & 0x80) >> 7; 1169 } 1170 1171 /* MSB(K1) == 1 => K2 = (K1 << 1) XOR const_Rb */ 1172 if ((k1[0] & 0x80)) 1173 k2[sizeof(k2) - 1] = k2[sizeof(k2) - 1] ^ 0x87; 1174 1175 if (data_size > AES_BLOCK_SIZE) { 1176 uint8_t *data_out = NULL; 1177 1178 /* All block but last in CBC mode */ 1179 res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC, 1180 key_sel, key, key_size, block, 1181 sizeof(block)); 1182 if (res) 1183 return res; 1184 1185 processed = ROUNDDOWN(data_size - 1, AES_BLOCK_SIZE); 1186 data_out = malloc(processed); 1187 if (!data_out) 1188 return TEE_ERROR_OUT_OF_MEMORY; 1189 1190 res = stm32_saes_update(ctx, true, data, data_out, processed); 1191 if (!res) { 1192 /* Copy last out block or keep block as { 0 } */ 1193 memcpy(block, data_out + processed - AES_BLOCK_SIZE, 1194 AES_BLOCK_SIZE); 1195 } 1196 1197 free(data_out); 1198 1199 if (res) 1200 return res; 1201 } 1202 1203 /* Manage last block */ 1204 xor_block(block, data + processed, data_size - processed); 1205 if (data_size - processed == AES_BLOCK_SIZE) { 1206 xor_block(block, k1, AES_BLOCK_SIZE); 1207 } else { 1208 /* xor with padding = 0b100... */ 1209 block[data_size - processed] ^= 0x80; 1210 xor_block(block, k2, AES_BLOCK_SIZE); 1211 } 1212 1213 /* 1214 * AES last block. 1215 * We need to use same chaining mode to keep same key if DHUK is 1216 * selected so we reuse l as a zero initialized IV. 1217 */ 1218 memset(l, 0, sizeof(l)); 1219 res = stm32_saes_init(ctx, false, STM32_SAES_MODE_CBC, key_sel, key, 1220 key_size, l, sizeof(l)); 1221 if (res) 1222 return res; 1223 1224 return stm32_saes_update(ctx, true, block, out, AES_BLOCK_SIZE); 1225 } 1226 1227 TEE_Result stm32_saes_kdf(struct stm32_saes_context *ctx, 1228 enum stm32_saes_key_selection key_sel, 1229 const void *key, size_t key_size, 1230 const void *input, size_t input_size, 1231 uint8_t *subkey, size_t subkey_size) 1232 1233 { 1234 TEE_Result res = TEE_SUCCESS; 1235 uint32_t index = 0; 1236 uint32_t index_be = 0; 1237 uint8_t *data = NULL; 1238 size_t data_index = 0; 1239 size_t subkey_index = 0; 1240 size_t data_size = input_size + sizeof(index_be); 1241 uint8_t cmac[AES_BLOCK_SIZE] = { }; 1242 1243 if (!ctx || !input || !input_size) 1244 return TEE_ERROR_BAD_PARAMETERS; 1245 1246 /* For each K(i) we will add an index */ 1247 data = malloc(data_size); 1248 if (!data) 1249 return TEE_ERROR_OUT_OF_MEMORY; 1250 1251 data_index = 0; 1252 index_be = TEE_U32_TO_BIG_ENDIAN(index); 1253 memcpy(data + data_index, &index_be, sizeof(index_be)); 1254 data_index += sizeof(index_be); 1255 memcpy(data + data_index, input, input_size); 1256 data_index += input_size; 1257 1258 /* K(i) computation. */ 1259 index = 0; 1260 while (subkey_index < subkey_size) { 1261 index++; 1262 index_be = TEE_U32_TO_BIG_ENDIAN(index); 1263 memcpy(data, &index_be, sizeof(index_be)); 1264 1265 res = stm32_saes_cmac_prf_128(ctx, key_sel, key, key_size, 1266 data, data_size, cmac); 1267 if (res) 1268 goto out; 1269 1270 memcpy(subkey + subkey_index, cmac, 1271 MIN(subkey_size - subkey_index, sizeof(cmac))); 1272 subkey_index += sizeof(cmac); 1273 } 1274 1275 out: 1276 free(data); 1277 if (res) 1278 memzero_explicit(subkey, subkey_size); 1279 1280 return res; 1281 } 1282 1283 /* Implement hardware HUK derivation using SAES resources */ 1284 TEE_Result huk_subkey_derive(enum huk_subkey_usage usage, 1285 const void *const_data, size_t const_data_len, 1286 uint8_t *subkey, size_t subkey_len) 1287 { 1288 TEE_Result res = TEE_ERROR_GENERIC; 1289 uint8_t *input = NULL; 1290 size_t input_index = 0; 1291 size_t subkey_bitlen = 0; 1292 struct stm32_saes_context ctx = { }; 1293 uint8_t separator = 0; 1294 1295 /* Check if driver is probed */ 1296 if (!saes_pdata.base) { 1297 return __huk_subkey_derive(usage, const_data, const_data_len, 1298 subkey, subkey_len); 1299 } 1300 1301 input = malloc(const_data_len + sizeof(separator) + sizeof(usage) + 1302 sizeof(subkey_bitlen) + AES_BLOCK_SIZE); 1303 if (!input) 1304 return TEE_ERROR_OUT_OF_MEMORY; 1305 1306 input_index = 0; 1307 if (const_data) { 1308 memcpy(input + input_index, const_data, const_data_len); 1309 input_index += const_data_len; 1310 1311 memcpy(input + input_index, &separator, sizeof(separator)); 1312 input_index += sizeof(separator); 1313 } 1314 1315 memcpy(input + input_index, &usage, sizeof(usage)); 1316 input_index += sizeof(usage); 1317 1318 /* 1319 * We should add the subkey_len in bits at end of input. 1320 * And we choose to put in a MSB first uint32_t. 1321 */ 1322 subkey_bitlen = TEE_U32_TO_BIG_ENDIAN(subkey_len * INT8_BIT); 1323 memcpy(input + input_index, &subkey_bitlen, sizeof(subkey_bitlen)); 1324 input_index += sizeof(subkey_bitlen); 1325 1326 /* 1327 * We get K(0) to avoid some key control attack 1328 * and store it at end of input. 1329 */ 1330 res = stm32_saes_cmac_prf_128(&ctx, STM32_SAES_KEY_DHU, NULL, 1331 AES_KEYSIZE_128, 1332 input, input_index, 1333 input + input_index); 1334 if (res) 1335 goto out; 1336 1337 /* We just added K(0) to input */ 1338 input_index += AES_BLOCK_SIZE; 1339 1340 res = stm32_saes_kdf(&ctx, STM32_SAES_KEY_DHU, NULL, AES_KEYSIZE_128, 1341 input, input_index, subkey, subkey_len); 1342 1343 out: 1344 free(input); 1345 return res; 1346 } 1347 1348 static TEE_Result stm32_saes_parse_fdt(struct stm32_saes_platdata *pdata, 1349 const void *fdt, int node) 1350 { 1351 struct dt_node_info dt_saes = { }; 1352 TEE_Result res = TEE_ERROR_GENERIC; 1353 1354 dt_saes.reg = fdt_reg_base_address(fdt, node); 1355 dt_saes.reg_size = fdt_reg_size(fdt, node); 1356 1357 if (dt_saes.reg == DT_INFO_INVALID_REG || 1358 dt_saes.reg_size == DT_INFO_INVALID_REG_SIZE) 1359 return TEE_ERROR_BAD_PARAMETERS; 1360 1361 res = clk_dt_get_by_index(fdt, node, 0, &pdata->clk); 1362 if (res != TEE_SUCCESS) 1363 return res; 1364 1365 res = rstctrl_dt_get_by_index(fdt, node, 0, &pdata->reset); 1366 if (res != TEE_SUCCESS && res != TEE_ERROR_ITEM_NOT_FOUND) 1367 return res; 1368 1369 pdata->base = (vaddr_t)phys_to_virt(dt_saes.reg, MEM_AREA_IO_SEC, 1370 dt_saes.reg_size); 1371 if (!pdata->base) 1372 panic(); 1373 1374 return TEE_SUCCESS; 1375 } 1376 1377 static TEE_Result stm32_saes_probe(const void *fdt, int node, 1378 const void *compat_data __unused) 1379 { 1380 TEE_Result res = TEE_SUCCESS; 1381 1382 assert(!saes_pdata.base); 1383 1384 res = stm32_saes_parse_fdt(&saes_pdata, fdt, node); 1385 if (res) 1386 return res; 1387 1388 if (clk_enable(saes_pdata.clk)) 1389 panic(); 1390 1391 /* External reset of SAES */ 1392 if (saes_pdata.reset) { 1393 if (rstctrl_assert_to(saes_pdata.reset, TIMEOUT_US_1MS)) 1394 panic(); 1395 1396 udelay(SAES_RESET_DELAY); 1397 1398 if (rstctrl_deassert_to(saes_pdata.reset, TIMEOUT_US_1MS)) 1399 panic(); 1400 } 1401 1402 /* Internal reset of SAES */ 1403 io_setbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST); 1404 udelay(SAES_RESET_DELAY); 1405 io_clrbits32(saes_pdata.base + _SAES_CR, _SAES_CR_IPRST); 1406 1407 if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) { 1408 res = stm32_register_cipher(SAES_IP); 1409 if (res) { 1410 EMSG("Failed to register to cipher: %#"PRIx32, res); 1411 panic(); 1412 } 1413 } 1414 1415 return TEE_SUCCESS; 1416 } 1417 1418 static const struct dt_device_match saes_match_table[] = { 1419 { .compatible = "st,stm32mp13-saes" }, 1420 { } 1421 }; 1422 1423 DEFINE_DT_DRIVER(stm32_saes_dt_driver) = { 1424 .name = "stm32-saes", 1425 .match_table = saes_match_table, 1426 .probe = &stm32_saes_probe, 1427 }; 1428