1 // SPDX-License-Identifier: BSD-2-Clause 2 /* 3 * Copyright (c) 2021, STMicroelectronics - All Rights Reserved 4 */ 5 #include <assert.h> 6 #include <config.h> 7 #include <drivers/rstctrl.h> 8 #include <initcall.h> 9 #include <io.h> 10 #include <kernel/boot.h> 11 #include <kernel/delay.h> 12 #include <kernel/dt.h> 13 #include <kernel/mutex.h> 14 #include <libfdt.h> 15 #include <mm/core_memprot.h> 16 #include <stdint.h> 17 #include <stm32_util.h> 18 #include <string.h> 19 #include <utee_defines.h> 20 #include <util.h> 21 22 #include "stm32_cryp.h" 23 #include "common.h" 24 25 #define INT8_BIT 8U 26 #define AES_BLOCK_SIZE_BIT 128U 27 #define AES_BLOCK_SIZE (AES_BLOCK_SIZE_BIT / INT8_BIT) 28 #define AES_BLOCK_NB_U32 (AES_BLOCK_SIZE / sizeof(uint32_t)) 29 #define DES_BLOCK_SIZE_BIT 64U 30 #define DES_BLOCK_SIZE (DES_BLOCK_SIZE_BIT / INT8_BIT) 31 #define DES_BLOCK_NB_U32 (DES_BLOCK_SIZE / sizeof(uint32_t)) 32 #define MAX_BLOCK_SIZE_BIT AES_BLOCK_SIZE_BIT 33 #define MAX_BLOCK_SIZE AES_BLOCK_SIZE 34 #define MAX_BLOCK_NB_U32 AES_BLOCK_NB_U32 35 #define AES_KEYSIZE_128 16U 36 #define AES_KEYSIZE_192 24U 37 #define AES_KEYSIZE_256 32U 38 39 /* CRYP control register */ 40 #define _CRYP_CR 0x0U 41 /* CRYP status register */ 42 #define _CRYP_SR 0x04U 43 /* CRYP data input register */ 44 #define _CRYP_DIN 0x08U 45 /* CRYP data output register */ 46 #define _CRYP_DOUT 0x0CU 47 /* CRYP DMA control register */ 48 #define _CRYP_DMACR 0x10U 49 /* CRYP interrupt mask set/clear register */ 50 #define _CRYP_IMSCR 0x14U 51 /* CRYP raw interrupt status register */ 52 #define _CRYP_RISR 0x18U 53 /* CRYP masked interrupt status register */ 54 #define _CRYP_MISR 0x1CU 55 /* CRYP key registers */ 56 #define _CRYP_K0LR 0x20U 57 #define _CRYP_K0RR 0x24U 58 #define _CRYP_K1LR 0x28U 59 #define _CRYP_K1RR 0x2CU 60 #define _CRYP_K2LR 0x30U 61 #define _CRYP_K2RR 0x34U 62 #define _CRYP_K3LR 0x38U 63 #define _CRYP_K3RR 0x3CU 64 /* CRYP initialization vector registers */ 65 #define _CRYP_IV0LR 0x40U 66 #define _CRYP_IV0RR 0x44U 67 #define _CRYP_IV1LR 0x48U 68 #define _CRYP_IV1RR 0x4CU 69 /* CRYP context swap GCM-CCM registers */ 70 #define _CRYP_CSGCMCCM0R 0x50U 71 #define _CRYP_CSGCMCCM1R 0x54U 72 #define _CRYP_CSGCMCCM2R 0x58U 73 #define _CRYP_CSGCMCCM3R 0x5CU 74 #define _CRYP_CSGCMCCM4R 0x60U 75 #define _CRYP_CSGCMCCM5R 0x64U 76 #define _CRYP_CSGCMCCM6R 0x68U 77 #define _CRYP_CSGCMCCM7R 0x6CU 78 /* CRYP context swap GCM registers */ 79 #define _CRYP_CSGCM0R 0x70U 80 #define _CRYP_CSGCM1R 0x74U 81 #define _CRYP_CSGCM2R 0x78U 82 #define _CRYP_CSGCM3R 0x7CU 83 #define _CRYP_CSGCM4R 0x80U 84 #define _CRYP_CSGCM5R 0x84U 85 #define _CRYP_CSGCM6R 0x88U 86 #define _CRYP_CSGCM7R 0x8CU 87 /* CRYP hardware configuration register */ 88 #define _CRYP_HWCFGR 0x3F0U 89 /* CRYP HW version register */ 90 #define _CRYP_VERR 0x3F4U 91 /* CRYP identification */ 92 #define _CRYP_IPIDR 0x3F8U 93 /* CRYP HW magic ID */ 94 #define _CRYP_MID 0x3FCU 95 96 #define CRYP_TIMEOUT_US 1000000U 97 #define TIMEOUT_US_1MS 1000U 98 99 /* CRYP control register fields */ 100 #define _CRYP_CR_RESET_VALUE 0x0U 101 #define _CRYP_CR_NPBLB_MSK GENMASK_32(23, 20) 102 #define _CRYP_CR_NPBLB_OFF 20U 103 #define _CRYP_CR_GCM_CCMPH_MSK GENMASK_32(17, 16) 104 #define _CRYP_CR_GCM_CCMPH_OFF 16U 105 #define _CRYP_CR_GCM_CCMPH_INIT 0U 106 #define _CRYP_CR_GCM_CCMPH_HEADER 1U 107 #define _CRYP_CR_GCM_CCMPH_PAYLOAD 2U 108 #define _CRYP_CR_GCM_CCMPH_FINAL 3U 109 #define _CRYP_CR_CRYPEN BIT(15) 110 #define _CRYP_CR_FFLUSH BIT(14) 111 #define _CRYP_CR_KEYSIZE_MSK GENMASK_32(9, 8) 112 #define _CRYP_CR_KEYSIZE_OFF 8U 113 #define _CRYP_CR_KSIZE_128 0U 114 #define _CRYP_CR_KSIZE_192 1U 115 #define _CRYP_CR_KSIZE_256 2U 116 #define _CRYP_CR_DATATYPE_MSK GENMASK_32(7, 6) 117 #define _CRYP_CR_DATATYPE_OFF 6U 118 #define _CRYP_CR_DATATYPE_NONE 0U 119 #define _CRYP_CR_DATATYPE_HALF_WORD 1U 120 #define _CRYP_CR_DATATYPE_BYTE 2U 121 #define _CRYP_CR_DATATYPE_BIT 3U 122 #define _CRYP_CR_ALGOMODE_MSK (BIT(19) | GENMASK_32(5, 3)) 123 #define _CRYP_CR_ALGOMODE_OFF 3U 124 #define _CRYP_CR_ALGOMODE_TDES_ECB 0x0U 125 #define _CRYP_CR_ALGOMODE_TDES_CBC 0x1U 126 #define _CRYP_CR_ALGOMODE_DES_ECB 0x2U 127 #define _CRYP_CR_ALGOMODE_DES_CBC 0x3U 128 #define _CRYP_CR_ALGOMODE_AES_ECB 0x4U 129 #define _CRYP_CR_ALGOMODE_AES_CBC 0x5U 130 #define _CRYP_CR_ALGOMODE_AES_CTR 0x6U 131 #define _CRYP_CR_ALGOMODE_AES 0x7U 132 #define _CRYP_CR_ALGOMODE_AES_GCM BIT(16) 133 #define _CRYP_CR_ALGOMODE_AES_CCM (BIT(16) | BIT(0)) 134 #define _CRYP_CR_ALGODIR BIT(2) 135 #define _CRYP_CR_ALGODIR_ENC 0U 136 #define _CRYP_CR_ALGODIR_DEC BIT(2) 137 138 /* CRYP status register fields */ 139 #define _CRYP_SR_BUSY BIT(4) 140 #define _CRYP_SR_OFFU BIT(3) 141 #define _CRYP_SR_OFNE BIT(2) 142 #define _CRYP_SR_IFNF BIT(1) 143 #define _CRYP_SR_IFEM BIT(0) 144 145 /* CRYP DMA control register fields */ 146 #define _CRYP_DMACR_DOEN BIT(1) 147 #define _CRYP_DMACR_DIEN BIT(0) 148 149 /* CRYP interrupt fields */ 150 #define _CRYP_I_OUT BIT(1) 151 #define _CRYP_I_IN BIT(0) 152 153 /* CRYP hardware configuration register fields */ 154 #define _CRYP_HWCFGR_CFG1_MSK GENMASK_32(3, 0) 155 #define _CRYP_HWCFGR_CFG1_OFF 0U 156 #define _CRYP_HWCFGR_CFG2_MSK GENMASK_32(7, 4) 157 #define _CRYP_HWCFGR_CFG2_OFF 4U 158 #define _CRYP_HWCFGR_CFG3_MSK GENMASK_32(11, 8) 159 #define _CRYP_HWCFGR_CFG3_OFF 8U 160 #define _CRYP_HWCFGR_CFG4_MSK GENMASK_32(15, 12) 161 #define _CRYP_HWCFGR_CFG4_OFF 12U 162 163 /* CRYP HW version register */ 164 #define _CRYP_VERR_MSK GENMASK_32(7, 0) 165 #define _CRYP_VERR_OFF 0U 166 167 /* 168 * Macro to manage bit manipulation when we work on local variable 169 * before writing only once to the real register. 170 */ 171 #define CLRBITS(v, bits) ((v) &= ~(bits)) 172 #define SETBITS(v, bits) ((v) |= (bits)) 173 174 #define IS_ALGOMODE(cr, mod) \ 175 (((cr) & _CRYP_CR_ALGOMODE_MSK) == (_CRYP_CR_ALGOMODE_##mod << \ 176 _CRYP_CR_ALGOMODE_OFF)) 177 178 #define SET_ALGOMODE(mod, cr) \ 179 clrsetbits(&(cr), _CRYP_CR_ALGOMODE_MSK, (_CRYP_CR_ALGOMODE_##mod << \ 180 _CRYP_CR_ALGOMODE_OFF)) 181 182 #define GET_ALGOMODE(cr) \ 183 (((cr) & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF) 184 185 #define TOBE32(x) TEE_U32_BSWAP(x) 186 #define FROMBE32(x) TEE_U32_BSWAP(x) 187 188 static struct stm32_cryp_platdata cryp_pdata; 189 static struct mutex cryp_lock = MUTEX_INITIALIZER; 190 191 static void clrsetbits(uint32_t *v, uint32_t mask, uint32_t bits) 192 { 193 *v = (*v & ~mask) | bits; 194 } 195 196 static bool algo_mode_needs_iv(uint32_t cr) 197 { 198 return !IS_ALGOMODE(cr, TDES_ECB) && !IS_ALGOMODE(cr, DES_ECB) && 199 !IS_ALGOMODE(cr, AES_ECB); 200 } 201 202 static bool algo_mode_is_ecb_cbc(uint32_t cr) 203 { 204 return GET_ALGOMODE(cr) < _CRYP_CR_ALGOMODE_AES_CTR; 205 } 206 207 static bool algo_mode_is_aes(uint32_t cr) 208 { 209 return ((cr & _CRYP_CR_ALGOMODE_MSK) >> _CRYP_CR_ALGOMODE_OFF) >= 210 _CRYP_CR_ALGOMODE_AES_ECB; 211 } 212 213 static bool is_decrypt(uint32_t cr) 214 { 215 return (cr & _CRYP_CR_ALGODIR) == _CRYP_CR_ALGODIR_DEC; 216 } 217 218 static bool is_encrypt(uint32_t cr) 219 { 220 return !is_decrypt(cr); 221 } 222 223 static bool does_need_npblb(uint32_t cr) 224 { 225 return (IS_ALGOMODE(cr, AES_GCM) && is_encrypt(cr)) || 226 (IS_ALGOMODE(cr, AES_CCM) && is_decrypt(cr)); 227 } 228 229 static TEE_Result wait_sr_bits(vaddr_t base, uint32_t bits) 230 { 231 uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US); 232 233 while ((io_read32(base + _CRYP_SR) & bits) != bits) 234 if (timeout_elapsed(timeout_ref)) 235 break; 236 237 if ((io_read32(base + _CRYP_SR) & bits) != bits) 238 return TEE_ERROR_BUSY; 239 240 return TEE_SUCCESS; 241 } 242 243 static TEE_Result wait_end_busy(vaddr_t base) 244 { 245 uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US); 246 247 while (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY) 248 if (timeout_elapsed(timeout_ref)) 249 break; 250 251 if (io_read32(base + _CRYP_SR) & _CRYP_SR_BUSY) 252 return TEE_ERROR_BUSY; 253 254 return TEE_SUCCESS; 255 } 256 257 static TEE_Result wait_end_enable(vaddr_t base) 258 { 259 uint64_t timeout_ref = timeout_init_us(CRYP_TIMEOUT_US); 260 261 while (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN) 262 if (timeout_elapsed(timeout_ref)) 263 break; 264 265 if (io_read32(base + _CRYP_CR) & _CRYP_CR_CRYPEN) 266 return TEE_ERROR_BUSY; 267 268 return TEE_SUCCESS; 269 } 270 271 static TEE_Result __must_check write_align_block(struct stm32_cryp_context *ctx, 272 uint32_t *data) 273 { 274 TEE_Result res = TEE_SUCCESS; 275 unsigned int i = 0; 276 277 res = wait_sr_bits(ctx->base, _CRYP_SR_IFNF); 278 if (res) 279 return res; 280 281 for (i = 0; i < ctx->block_u32; i++) { 282 /* No need to htobe() as we configure the HW to swap bytes */ 283 io_write32(ctx->base + _CRYP_DIN, data[i]); 284 } 285 286 return TEE_SUCCESS; 287 } 288 289 static TEE_Result __must_check write_block(struct stm32_cryp_context *ctx, 290 uint8_t *data) 291 { 292 if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) { 293 uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 }; 294 295 memcpy(data_u32, data, ctx->block_u32 * sizeof(uint32_t)); 296 return write_align_block(ctx, data_u32); 297 } 298 299 return write_align_block(ctx, (void *)data); 300 } 301 302 static TEE_Result __must_check read_align_block(struct stm32_cryp_context *ctx, 303 uint32_t *data) 304 { 305 TEE_Result res = TEE_SUCCESS; 306 unsigned int i = 0; 307 308 res = wait_sr_bits(ctx->base, _CRYP_SR_OFNE); 309 if (res) 310 return res; 311 312 for (i = 0; i < ctx->block_u32; i++) { 313 /* No need to htobe() as we configure the HW to swap bytes */ 314 data[i] = io_read32(ctx->base + _CRYP_DOUT); 315 } 316 317 return TEE_SUCCESS; 318 } 319 320 static TEE_Result __must_check read_block(struct stm32_cryp_context *ctx, 321 uint8_t *data) 322 { 323 if (!IS_ALIGNED_WITH_TYPE(data, uint32_t)) { 324 TEE_Result res = TEE_SUCCESS; 325 uint32_t data_u32[MAX_BLOCK_NB_U32] = { 0 }; 326 327 res = read_align_block(ctx, data_u32); 328 if (res) 329 return res; 330 331 memcpy(data, data_u32, ctx->block_u32 * sizeof(uint32_t)); 332 333 return TEE_SUCCESS; 334 } 335 336 return read_align_block(ctx, (void *)data); 337 } 338 339 static void cryp_end(struct stm32_cryp_context *ctx, TEE_Result prev_error) 340 { 341 if (prev_error) { 342 if (rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS)) 343 panic(); 344 if (rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS)) 345 panic(); 346 } 347 348 /* Disable the CRYP peripheral */ 349 io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN); 350 } 351 352 static void cryp_write_iv(struct stm32_cryp_context *ctx) 353 { 354 if (algo_mode_needs_iv(ctx->cr)) { 355 unsigned int i = 0; 356 357 /* Restore the _CRYP_IVRx */ 358 for (i = 0; i < ctx->block_u32; i++) 359 io_write32(ctx->base + _CRYP_IV0LR + i * 360 sizeof(uint32_t), ctx->iv[i]); 361 } 362 } 363 364 static void cryp_save_suspend(struct stm32_cryp_context *ctx) 365 { 366 unsigned int i = 0; 367 368 if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM)) 369 for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++) 370 ctx->pm_gcmccm[i] = io_read32(ctx->base + 371 _CRYP_CSGCMCCM0R + 372 i * sizeof(uint32_t)); 373 374 if (IS_ALGOMODE(ctx->cr, AES_GCM)) 375 for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++) 376 ctx->pm_gcm[i] = io_read32(ctx->base + _CRYP_CSGCM0R + 377 i * sizeof(uint32_t)); 378 } 379 380 static void cryp_restore_suspend(struct stm32_cryp_context *ctx) 381 { 382 unsigned int i = 0; 383 384 if (IS_ALGOMODE(ctx->cr, AES_GCM) || IS_ALGOMODE(ctx->cr, AES_CCM)) 385 for (i = 0; i < ARRAY_SIZE(ctx->pm_gcmccm); i++) 386 io_write32(ctx->base + _CRYP_CSGCMCCM0R + 387 i * sizeof(uint32_t), ctx->pm_gcmccm[i]); 388 389 if (IS_ALGOMODE(ctx->cr, AES_GCM)) 390 for (i = 0; i < ARRAY_SIZE(ctx->pm_gcm); i++) 391 io_write32(ctx->base + _CRYP_CSGCM0R + 392 i * sizeof(uint32_t), ctx->pm_gcm[i]); 393 } 394 395 static void cryp_write_key(struct stm32_cryp_context *ctx) 396 { 397 vaddr_t reg = 0; 398 int i = 0; 399 uint32_t algo = GET_ALGOMODE(ctx->cr); 400 401 if (algo == _CRYP_CR_ALGOMODE_DES_ECB || 402 algo == _CRYP_CR_ALGOMODE_DES_CBC) 403 reg = ctx->base + _CRYP_K1RR; 404 else 405 reg = ctx->base + _CRYP_K3RR; 406 407 for (i = ctx->key_size / sizeof(uint32_t) - 1; 408 i >= 0; 409 i--, reg -= sizeof(uint32_t)) 410 io_write32(reg, ctx->key[i]); 411 } 412 413 static TEE_Result cryp_prepare_key(struct stm32_cryp_context *ctx) 414 { 415 TEE_Result res = TEE_SUCCESS; 416 417 /* 418 * For AES ECB/CBC decryption, key preparation mode must be selected 419 * to populate the key. 420 */ 421 if (is_decrypt(ctx->cr) && (IS_ALGOMODE(ctx->cr, AES_ECB) || 422 IS_ALGOMODE(ctx->cr, AES_CBC))) { 423 /* Select Algomode "prepare key" */ 424 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK, 425 _CRYP_CR_ALGOMODE_AES << _CRYP_CR_ALGOMODE_OFF); 426 427 cryp_write_key(ctx); 428 429 /* Enable CRYP */ 430 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN); 431 432 res = wait_end_busy(ctx->base); 433 if (res) 434 return res; 435 436 /* Reset 'real' algomode */ 437 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_ALGOMODE_MSK, 438 ctx->cr & _CRYP_CR_ALGOMODE_MSK); 439 } else { 440 cryp_write_key(ctx); 441 } 442 443 return TEE_SUCCESS; 444 } 445 446 static TEE_Result save_context(struct stm32_cryp_context *ctx) 447 { 448 /* Device should not be in a processing phase */ 449 if (io_read32(ctx->base + _CRYP_SR) & _CRYP_SR_BUSY) 450 return TEE_ERROR_BAD_STATE; 451 452 /* Disable the CRYP peripheral */ 453 io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN); 454 455 /* Save CR */ 456 ctx->cr = io_read32(ctx->base + _CRYP_CR); 457 458 cryp_save_suspend(ctx); 459 460 /* If algo mode needs to save current IV */ 461 if (algo_mode_needs_iv(ctx->cr)) { 462 unsigned int i = 0; 463 464 /* Save IV */ 465 for (i = 0; i < ctx->block_u32; i++) 466 ctx->iv[i] = io_read32(ctx->base + _CRYP_IV0LR + i * 467 sizeof(uint32_t)); 468 } 469 470 return TEE_SUCCESS; 471 } 472 473 /* To resume the processing of a message */ 474 static TEE_Result restore_context(struct stm32_cryp_context *ctx) 475 { 476 TEE_Result res = TEE_SUCCESS; 477 478 /* IP should be disabled */ 479 if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN) { 480 DMSG("Device is still enabled"); 481 return TEE_ERROR_BAD_STATE; 482 } 483 484 /* Restore the _CRYP_CR */ 485 io_write32(ctx->base + _CRYP_CR, ctx->cr); 486 487 /* Write key and, in case of AES_CBC or AES_ECB decrypt, prepare it */ 488 res = cryp_prepare_key(ctx); 489 if (res) 490 return res; 491 492 cryp_restore_suspend(ctx); 493 494 cryp_write_iv(ctx); 495 496 /* Flush internal fifo */ 497 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_FFLUSH); 498 499 /* Enable the CRYP peripheral */ 500 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN); 501 502 return TEE_SUCCESS; 503 } 504 505 /* 506 * Translate a byte index in an array of BE uint32_t into the index of same 507 * byte in the corresponding LE uint32_t array. 508 */ 509 static size_t be_index(size_t index) 510 { 511 return (index & ~0x3) + 3 - (index & 0x3); 512 } 513 514 static TEE_Result ccm_first_context(struct stm32_cryp_context *ctx) 515 { 516 TEE_Result res = TEE_SUCCESS; 517 uint32_t b0[AES_BLOCK_NB_U32] = { 0 }; 518 uint8_t *iv = (uint8_t *)ctx->iv; 519 size_t l = 0; 520 size_t i = 15; 521 522 /* IP should be disabled */ 523 if (io_read32(ctx->base + _CRYP_CR) & _CRYP_CR_CRYPEN) 524 return TEE_ERROR_BAD_STATE; 525 526 /* Write the _CRYP_CR */ 527 io_write32(ctx->base + _CRYP_CR, ctx->cr); 528 529 /* Write key */ 530 res = cryp_prepare_key(ctx); 531 if (res) 532 return res; 533 534 /* Save full IV that will be b0 */ 535 memcpy(b0, iv, sizeof(b0)); 536 537 /* 538 * Update IV to become CTR0/1 before setting it. 539 * IV is saved as LE uint32_t[4] as expected by hardware, 540 * but CCM RFC defines bytes to update in a BE array. 541 */ 542 /* Set flag bits to 0 (5 higher bits), keep 3 low bits */ 543 iv[be_index(0)] &= 0x7; 544 /* Get size of length field (can be from 2 to 8) */ 545 l = iv[be_index(0)] + 1; 546 /* Set Q to 0 */ 547 for (i = 15; i >= 15 - l + 1; i--) 548 iv[be_index(i)] = 0; 549 /* Save CTR0 */ 550 memcpy(ctx->ctr0_ccm, iv, sizeof(b0)); 551 /* Increment Q */ 552 iv[be_index(15)] |= 0x1; 553 554 cryp_write_iv(ctx); 555 556 /* Enable the CRYP peripheral */ 557 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN); 558 559 res = write_align_block(ctx, b0); 560 561 return res; 562 } 563 564 static TEE_Result do_from_init_to_phase(struct stm32_cryp_context *ctx, 565 uint32_t new_phase) 566 { 567 TEE_Result res = TEE_SUCCESS; 568 569 /* 570 * We didn't run the init phase yet 571 * CCM need a specific restore_context phase for the init phase 572 */ 573 if (IS_ALGOMODE(ctx->cr, AES_CCM)) 574 res = ccm_first_context(ctx); 575 else 576 res = restore_context(ctx); 577 578 if (res) 579 return res; 580 581 res = wait_end_enable(ctx->base); 582 if (res) 583 return res; 584 585 /* Move to 'new_phase' */ 586 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK, 587 new_phase << _CRYP_CR_GCM_CCMPH_OFF); 588 589 /* Enable the CRYP peripheral (init disabled it) */ 590 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN); 591 592 return TEE_SUCCESS; 593 } 594 595 static TEE_Result do_from_header_to_phase(struct stm32_cryp_context *ctx, 596 uint32_t new_phase) 597 { 598 TEE_Result res = TEE_SUCCESS; 599 600 res = restore_context(ctx); 601 if (res) 602 return res; 603 604 if (ctx->extra_size) { 605 /* Manage unaligned header data before moving to next phase */ 606 memset((uint8_t *)ctx->extra + ctx->extra_size, 0, 607 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size); 608 609 res = write_align_block(ctx, ctx->extra); 610 if (res) 611 return res; 612 613 ctx->assoc_len += (ctx->extra_size) * INT8_BIT; 614 ctx->extra_size = 0; 615 } 616 617 /* Move to 'new_phase' */ 618 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK, 619 new_phase << _CRYP_CR_GCM_CCMPH_OFF); 620 621 return TEE_SUCCESS; 622 } 623 624 /** 625 * @brief Start a AES computation. 626 * @param ctx: CRYP process context 627 * @param is_dec: true if decryption, false if encryption 628 * @param algo: define the algo mode 629 * @param key: pointer to key 630 * @param key_size: key size 631 * @param iv: pointer to initialization vector (unused if algo is ECB) 632 * @param iv_size: iv size 633 * @note this function doesn't access to hardware but stores in ctx the values 634 * 635 * @retval TEE_SUCCESS if OK. 636 */ 637 TEE_Result stm32_cryp_init(struct stm32_cryp_context *ctx, bool is_dec, 638 enum stm32_cryp_algo_mode algo, 639 const void *key, size_t key_size, const void *iv, 640 size_t iv_size) 641 { 642 unsigned int i = 0; 643 const uint32_t *iv_u32 = NULL; 644 uint32_t local_iv[4] = { 0 }; 645 const uint32_t *key_u32 = NULL; 646 uint32_t local_key[8] = { 0 }; 647 648 ctx->assoc_len = 0; 649 ctx->load_len = 0; 650 ctx->extra_size = 0; 651 ctx->lock = &cryp_lock; 652 653 ctx->base = io_pa_or_va(&cryp_pdata.base, 1); 654 ctx->cr = _CRYP_CR_RESET_VALUE; 655 656 /* We want buffer to be u32 aligned */ 657 if (IS_ALIGNED_WITH_TYPE(key, uint32_t)) { 658 key_u32 = key; 659 } else { 660 memcpy(local_key, key, key_size); 661 key_u32 = local_key; 662 } 663 664 if (IS_ALIGNED_WITH_TYPE(iv, uint32_t)) { 665 iv_u32 = iv; 666 } else { 667 memcpy(local_iv, iv, iv_size); 668 iv_u32 = local_iv; 669 } 670 671 if (is_dec) 672 SETBITS(ctx->cr, _CRYP_CR_ALGODIR); 673 else 674 CLRBITS(ctx->cr, _CRYP_CR_ALGODIR); 675 676 /* Save algo mode */ 677 switch (algo) { 678 case STM32_CRYP_MODE_TDES_ECB: 679 SET_ALGOMODE(TDES_ECB, ctx->cr); 680 break; 681 case STM32_CRYP_MODE_TDES_CBC: 682 SET_ALGOMODE(TDES_CBC, ctx->cr); 683 break; 684 case STM32_CRYP_MODE_DES_ECB: 685 SET_ALGOMODE(DES_ECB, ctx->cr); 686 break; 687 case STM32_CRYP_MODE_DES_CBC: 688 SET_ALGOMODE(DES_CBC, ctx->cr); 689 break; 690 case STM32_CRYP_MODE_AES_ECB: 691 SET_ALGOMODE(AES_ECB, ctx->cr); 692 break; 693 case STM32_CRYP_MODE_AES_CBC: 694 SET_ALGOMODE(AES_CBC, ctx->cr); 695 break; 696 case STM32_CRYP_MODE_AES_CTR: 697 SET_ALGOMODE(AES_CTR, ctx->cr); 698 break; 699 case STM32_CRYP_MODE_AES_GCM: 700 SET_ALGOMODE(AES_GCM, ctx->cr); 701 break; 702 case STM32_CRYP_MODE_AES_CCM: 703 SET_ALGOMODE(AES_CCM, ctx->cr); 704 break; 705 default: 706 return TEE_ERROR_BAD_PARAMETERS; 707 } 708 709 /* 710 * We will use HW Byte swap (_CRYP_CR_DATATYPE_BYTE) for data. 711 * So we won't need to 712 * TOBE32(data) before write to DIN 713 * nor 714 * FROMBE32 after reading from DOUT. 715 */ 716 clrsetbits(&ctx->cr, _CRYP_CR_DATATYPE_MSK, 717 _CRYP_CR_DATATYPE_BYTE << _CRYP_CR_DATATYPE_OFF); 718 719 /* 720 * Configure keysize for AES algorithms 721 * And save block size 722 */ 723 if (algo_mode_is_aes(ctx->cr)) { 724 switch (key_size) { 725 case AES_KEYSIZE_128: 726 clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK, 727 _CRYP_CR_KSIZE_128 << _CRYP_CR_KEYSIZE_OFF); 728 break; 729 case AES_KEYSIZE_192: 730 clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK, 731 _CRYP_CR_KSIZE_192 << _CRYP_CR_KEYSIZE_OFF); 732 break; 733 case AES_KEYSIZE_256: 734 clrsetbits(&ctx->cr, _CRYP_CR_KEYSIZE_MSK, 735 _CRYP_CR_KSIZE_256 << _CRYP_CR_KEYSIZE_OFF); 736 break; 737 default: 738 return TEE_ERROR_BAD_PARAMETERS; 739 } 740 741 /* And set block size */ 742 ctx->block_u32 = AES_BLOCK_NB_U32; 743 } else { 744 /* And set DES/TDES block size */ 745 ctx->block_u32 = DES_BLOCK_NB_U32; 746 } 747 748 /* Save key in HW order */ 749 ctx->key_size = key_size; 750 for (i = 0; i < key_size / sizeof(uint32_t); i++) 751 ctx->key[i] = TOBE32(key_u32[i]); 752 753 /* Save IV */ 754 if (algo_mode_needs_iv(ctx->cr)) { 755 if (!iv || iv_size != ctx->block_u32 * sizeof(uint32_t)) 756 return TEE_ERROR_BAD_PARAMETERS; 757 758 /* 759 * We save IV in the byte order expected by the 760 * IV registers 761 */ 762 for (i = 0; i < ctx->block_u32; i++) 763 ctx->iv[i] = TOBE32(iv_u32[i]); 764 } 765 766 /* Reset suspend registers */ 767 memset(ctx->pm_gcmccm, 0, sizeof(ctx->pm_gcmccm)); 768 memset(ctx->pm_gcm, 0, sizeof(ctx->pm_gcm)); 769 770 return TEE_SUCCESS; 771 } 772 773 /** 774 * @brief Update (or start) a AES authenticate process of 775 * associated data (CCM or GCM). 776 * @param ctx: CRYP process context 777 * @param data: pointer to associated data 778 * @param data_size: data size 779 * @retval TEE_SUCCESS if OK. 780 */ 781 TEE_Result stm32_cryp_update_assodata(struct stm32_cryp_context *ctx, 782 uint8_t *data, size_t data_size) 783 { 784 TEE_Result res = TEE_SUCCESS; 785 unsigned int i = 0; 786 uint32_t previous_phase = 0; 787 788 /* If no associated data, nothing to do */ 789 if (!data || !data_size) 790 return TEE_SUCCESS; 791 792 mutex_lock(ctx->lock); 793 794 previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >> 795 _CRYP_CR_GCM_CCMPH_OFF; 796 797 switch (previous_phase) { 798 case _CRYP_CR_GCM_CCMPH_INIT: 799 res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_HEADER); 800 break; 801 case _CRYP_CR_GCM_CCMPH_HEADER: 802 /* 803 * Function update_assodata was already called. 804 * We only need to restore the context. 805 */ 806 res = restore_context(ctx); 807 break; 808 default: 809 assert(0); 810 res = TEE_ERROR_BAD_STATE; 811 } 812 813 if (res) 814 goto out; 815 816 /* Manage if remaining data from a previous update_assodata call */ 817 if (ctx->extra_size && 818 (ctx->extra_size + data_size >= 819 ctx->block_u32 * sizeof(uint32_t))) { 820 uint32_t block[MAX_BLOCK_NB_U32] = { 0 }; 821 822 memcpy(block, ctx->extra, ctx->extra_size); 823 memcpy((uint8_t *)block + ctx->extra_size, data, 824 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size); 825 826 res = write_align_block(ctx, block); 827 if (res) 828 goto out; 829 830 i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size; 831 ctx->extra_size = 0; 832 ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT; 833 } 834 835 while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) { 836 res = write_block(ctx, data + i); 837 if (res) 838 goto out; 839 840 /* Process next block */ 841 i += ctx->block_u32 * sizeof(uint32_t); 842 ctx->assoc_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT; 843 } 844 845 /* 846 * Manage last block if not a block size multiple: 847 * Save remaining data to manage them later (potentially with new 848 * associated data). 849 */ 850 if (i < data_size) { 851 memcpy((uint8_t *)ctx->extra + ctx->extra_size, data + i, 852 data_size - i); 853 ctx->extra_size += data_size - i; 854 } 855 856 res = save_context(ctx); 857 out: 858 if (res) 859 cryp_end(ctx, res); 860 861 mutex_unlock(ctx->lock); 862 863 return res; 864 } 865 866 /** 867 * @brief Update (or start) a AES authenticate and de/encrypt with 868 * payload data (CCM or GCM). 869 * @param ctx: CRYP process context 870 * @param data_in: pointer to payload 871 * @param data_out: pointer where to save de/encrypted payload 872 * @param data_size: payload size 873 * 874 * @retval TEE_SUCCESS if OK. 875 */ 876 TEE_Result stm32_cryp_update_load(struct stm32_cryp_context *ctx, 877 uint8_t *data_in, uint8_t *data_out, 878 size_t data_size) 879 { 880 TEE_Result res = TEE_SUCCESS; 881 unsigned int i = 0; 882 uint32_t previous_phase = 0; 883 884 if (!data_in || !data_size) 885 return TEE_SUCCESS; 886 887 mutex_lock(ctx->lock); 888 889 previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >> 890 _CRYP_CR_GCM_CCMPH_OFF; 891 892 switch (previous_phase) { 893 case _CRYP_CR_GCM_CCMPH_INIT: 894 res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD); 895 break; 896 case _CRYP_CR_GCM_CCMPH_HEADER: 897 res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_PAYLOAD); 898 break; 899 case _CRYP_CR_GCM_CCMPH_PAYLOAD: 900 /* new update_load call, we only need to restore context */ 901 res = restore_context(ctx); 902 break; 903 default: 904 assert(0); 905 res = TEE_ERROR_BAD_STATE; 906 } 907 908 if (res) 909 goto out; 910 911 /* Manage if incomplete block from a previous update_load call */ 912 if (ctx->extra_size && 913 (ctx->extra_size + data_size >= 914 ctx->block_u32 * sizeof(uint32_t))) { 915 uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 }; 916 917 memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i, 918 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size); 919 920 res = write_align_block(ctx, ctx->extra); 921 if (res) 922 goto out; 923 924 res = read_align_block(ctx, block_out); 925 if (res) 926 goto out; 927 928 memcpy(data_out + i, (uint8_t *)block_out + ctx->extra_size, 929 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size); 930 931 i += ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size; 932 ctx->extra_size = 0; 933 934 ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT; 935 } 936 937 while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) { 938 res = write_block(ctx, data_in + i); 939 if (res) 940 goto out; 941 942 res = read_block(ctx, data_out + i); 943 if (res) 944 goto out; 945 946 /* Process next block */ 947 i += ctx->block_u32 * sizeof(uint32_t); 948 ctx->load_len += ctx->block_u32 * sizeof(uint32_t) * INT8_BIT; 949 } 950 951 res = save_context(ctx); 952 if (res) 953 goto out; 954 955 /* 956 * Manage last block if not a block size multiple 957 * We saved context, 958 * Complete block with 0 and send to CRYP to get {en,de}crypted data 959 * Store data to resend as last block in final() 960 * or to complete next update_load() to get correct tag. 961 */ 962 if (i < data_size) { 963 uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 }; 964 size_t prev_extra_size = ctx->extra_size; 965 966 /* Re-enable the CRYP peripheral */ 967 io_setbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN); 968 969 memcpy((uint8_t *)ctx->extra + ctx->extra_size, data_in + i, 970 data_size - i); 971 ctx->extra_size += data_size - i; 972 memset((uint8_t *)ctx->extra + ctx->extra_size, 0, 973 ctx->block_u32 * sizeof(uint32_t) - ctx->extra_size); 974 975 res = write_align_block(ctx, ctx->extra); 976 if (res) 977 goto out; 978 979 res = read_align_block(ctx, block_out); 980 if (res) 981 goto out; 982 983 memcpy(data_out + i, (uint8_t *)block_out + prev_extra_size, 984 data_size - i); 985 986 /* Disable the CRYP peripheral */ 987 io_clrbits32(ctx->base + _CRYP_CR, _CRYP_CR_CRYPEN); 988 } 989 990 out: 991 if (res) 992 cryp_end(ctx, res); 993 994 mutex_unlock(ctx->lock); 995 996 return res; 997 } 998 999 /** 1000 * @brief Get authentication tag for AES authenticated algorithms (CCM or GCM). 1001 * @param ctx: CRYP process context 1002 * @param tag: pointer where to save the tag 1003 * @param data_size: tag size 1004 * 1005 * @retval TEE_SUCCESS if OK. 1006 */ 1007 TEE_Result stm32_cryp_final(struct stm32_cryp_context *ctx, uint8_t *tag, 1008 size_t tag_size) 1009 { 1010 TEE_Result res = TEE_SUCCESS; 1011 uint32_t tag_u32[4] = { 0 }; 1012 uint32_t previous_phase = 0; 1013 1014 mutex_lock(ctx->lock); 1015 1016 previous_phase = (ctx->cr & _CRYP_CR_GCM_CCMPH_MSK) >> 1017 _CRYP_CR_GCM_CCMPH_OFF; 1018 1019 switch (previous_phase) { 1020 case _CRYP_CR_GCM_CCMPH_INIT: 1021 res = do_from_init_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL); 1022 break; 1023 case _CRYP_CR_GCM_CCMPH_HEADER: 1024 res = do_from_header_to_phase(ctx, _CRYP_CR_GCM_CCMPH_FINAL); 1025 break; 1026 case _CRYP_CR_GCM_CCMPH_PAYLOAD: 1027 res = restore_context(ctx); 1028 if (res) 1029 break; 1030 1031 /* Manage if incomplete block from a previous update_load() */ 1032 if (ctx->extra_size) { 1033 uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 }; 1034 size_t sz = ctx->block_u32 * sizeof(uint32_t) - 1035 ctx->extra_size; 1036 1037 if (does_need_npblb(ctx->cr)) { 1038 io_clrsetbits32(ctx->base + _CRYP_CR, 1039 _CRYP_CR_NPBLB_MSK, 1040 sz << _CRYP_CR_NPBLB_OFF); 1041 } 1042 1043 memset((uint8_t *)ctx->extra + ctx->extra_size, 0, sz); 1044 1045 res = write_align_block(ctx, ctx->extra); 1046 if (res) 1047 break; 1048 1049 /* Don't care {en,de}crypted data, already saved */ 1050 res = read_align_block(ctx, block_out); 1051 if (res) 1052 break; 1053 1054 ctx->load_len += (ctx->extra_size * INT8_BIT); 1055 ctx->extra_size = 0; 1056 } 1057 1058 /* Move to final phase */ 1059 io_clrsetbits32(ctx->base + _CRYP_CR, _CRYP_CR_GCM_CCMPH_MSK, 1060 _CRYP_CR_GCM_CCMPH_FINAL << 1061 _CRYP_CR_GCM_CCMPH_OFF); 1062 break; 1063 default: 1064 assert(0); 1065 res = TEE_ERROR_BAD_STATE; 1066 } 1067 1068 if (res) 1069 goto out; 1070 1071 if (IS_ALGOMODE(ctx->cr, AES_GCM)) { 1072 /* No need to htobe() as we configure the HW to swap bytes */ 1073 io_write32(ctx->base + _CRYP_DIN, 0U); 1074 io_write32(ctx->base + _CRYP_DIN, ctx->assoc_len); 1075 io_write32(ctx->base + _CRYP_DIN, 0U); 1076 io_write32(ctx->base + _CRYP_DIN, ctx->load_len); 1077 } else if (IS_ALGOMODE(ctx->cr, AES_CCM)) { 1078 /* No need to htobe() in this phase */ 1079 res = write_align_block(ctx, ctx->ctr0_ccm); 1080 if (res) 1081 goto out; 1082 } 1083 1084 res = read_align_block(ctx, tag_u32); 1085 if (res) 1086 goto out; 1087 1088 memcpy(tag, tag_u32, MIN(sizeof(tag_u32), tag_size)); 1089 1090 out: 1091 cryp_end(ctx, res); 1092 mutex_unlock(ctx->lock); 1093 1094 return res; 1095 } 1096 1097 /** 1098 * @brief Update (or start) a de/encrypt process. 1099 * @param ctx: CRYP process context 1100 * @param last_block: true if last payload data block 1101 * @param data_in: pointer to payload 1102 * @param data_out: pointer where to save de/encrypted payload 1103 * @param data_size: payload size 1104 * 1105 * @retval TEE_SUCCESS if OK. 1106 */ 1107 TEE_Result stm32_cryp_update(struct stm32_cryp_context *ctx, bool last_block, 1108 uint8_t *data_in, uint8_t *data_out, 1109 size_t data_size) 1110 { 1111 TEE_Result res = TEE_SUCCESS; 1112 unsigned int i = 0; 1113 1114 mutex_lock(ctx->lock); 1115 1116 /* 1117 * In CBC and ECB encryption we need to manage specifically last 1118 * 2 blocks if total size in not aligned to a block size. 1119 * Currently return TEE_ERROR_NOT_IMPLEMENTED. Moreover as we need to 1120 * know last 2 blocks, if unaligned and call with less than two blocks, 1121 * return TEE_ERROR_BAD_STATE. 1122 */ 1123 if (last_block && algo_mode_is_ecb_cbc(ctx->cr) && 1124 is_encrypt(ctx->cr) && 1125 (ROUNDDOWN(data_size, ctx->block_u32 * sizeof(uint32_t)) != 1126 data_size)) { 1127 if (data_size < ctx->block_u32 * sizeof(uint32_t) * 2) { 1128 /* 1129 * If CBC, size of the last part should be at 1130 * least 2*BLOCK_SIZE 1131 */ 1132 EMSG("Unexpected last block size"); 1133 res = TEE_ERROR_BAD_STATE; 1134 goto out; 1135 } 1136 /* 1137 * Moreover the ECB/CBC specific padding for encrypt is not 1138 * yet implemented, and not used in OPTEE 1139 */ 1140 res = TEE_ERROR_NOT_IMPLEMENTED; 1141 goto out; 1142 } 1143 1144 /* Manage remaining CTR mask from previous update call */ 1145 if (IS_ALGOMODE(ctx->cr, AES_CTR) && ctx->extra_size) { 1146 unsigned int j = 0; 1147 uint8_t *mask = (uint8_t *)ctx->extra; 1148 1149 for (j = 0; j < ctx->extra_size && i < data_size; j++, i++) 1150 data_out[i] = data_in[i] ^ mask[j]; 1151 1152 if (j != ctx->extra_size) { 1153 /* 1154 * We didn't consume all saved mask, 1155 * but no more data. 1156 */ 1157 1158 /* We save remaining mask and its new size */ 1159 memmove(ctx->extra, ctx->extra + j, 1160 ctx->extra_size - j); 1161 ctx->extra_size -= j; 1162 1163 /* 1164 * We don't need to save HW context we didn't 1165 * modify HW state. 1166 */ 1167 res = TEE_SUCCESS; 1168 goto out; 1169 } 1170 1171 /* All extra mask consumed */ 1172 ctx->extra_size = 0; 1173 } 1174 1175 res = restore_context(ctx); 1176 if (res) 1177 goto out; 1178 1179 while (data_size - i >= ctx->block_u32 * sizeof(uint32_t)) { 1180 /* 1181 * We only write/read one block at a time 1182 * but CRYP use a in (and out) FIFO of 8 * uint32_t 1183 */ 1184 res = write_block(ctx, data_in + i); 1185 if (res) 1186 goto out; 1187 1188 res = read_block(ctx, data_out + i); 1189 if (res) 1190 goto out; 1191 1192 /* Process next block */ 1193 i += ctx->block_u32 * sizeof(uint32_t); 1194 } 1195 1196 /* Manage last block if not a block size multiple */ 1197 if (i < data_size) { 1198 uint32_t block_in[MAX_BLOCK_NB_U32] = { 0 }; 1199 uint32_t block_out[MAX_BLOCK_NB_U32] = { 0 }; 1200 1201 if (!IS_ALGOMODE(ctx->cr, AES_CTR)) { 1202 /* 1203 * Other algorithm than CTR can manage only multiple 1204 * of block_size. 1205 */ 1206 res = TEE_ERROR_BAD_PARAMETERS; 1207 goto out; 1208 } 1209 1210 /* 1211 * For CTR we save the generated mask to use it at next 1212 * update call. 1213 */ 1214 memcpy(block_in, data_in + i, data_size - i); 1215 1216 res = write_align_block(ctx, block_in); 1217 if (res) 1218 goto out; 1219 1220 res = read_align_block(ctx, block_out); 1221 if (res) 1222 goto out; 1223 1224 memcpy(data_out + i, block_out, data_size - i); 1225 1226 /* Save mask for possibly next call */ 1227 ctx->extra_size = ctx->block_u32 * sizeof(uint32_t) - 1228 (data_size - i); 1229 memcpy(ctx->extra, (uint8_t *)block_out + data_size - i, 1230 ctx->extra_size); 1231 } 1232 1233 if (!last_block) 1234 res = save_context(ctx); 1235 1236 out: 1237 /* If last block or error, end of CRYP process */ 1238 if (last_block || res) 1239 cryp_end(ctx, res); 1240 1241 mutex_unlock(ctx->lock); 1242 1243 return res; 1244 } 1245 1246 static int fdt_stm32_cryp(struct stm32_cryp_platdata *pdata) 1247 { 1248 int node = -1; 1249 struct dt_node_info dt_cryp = { }; 1250 void *fdt = NULL; 1251 1252 fdt = get_embedded_dt(); 1253 if (!fdt) 1254 return -FDT_ERR_NOTFOUND; 1255 1256 node = fdt_node_offset_by_compatible(fdt, node, "st,stm32mp1-cryp"); 1257 if (node < 0) { 1258 EMSG("No CRYP entry in DT"); 1259 return -FDT_ERR_NOTFOUND; 1260 } 1261 1262 _fdt_fill_device_info(fdt, &dt_cryp, node); 1263 1264 if (dt_cryp.status == DT_STATUS_DISABLED) 1265 return -FDT_ERR_NOTFOUND; 1266 1267 if (dt_cryp.clock == DT_INFO_INVALID_CLOCK || 1268 dt_cryp.reg == DT_INFO_INVALID_REG) 1269 return -FDT_ERR_BADVALUE; 1270 1271 pdata->base.pa = dt_cryp.reg; 1272 io_pa_or_va_secure(&pdata->base, 1); 1273 1274 pdata->clock_id = (unsigned long)dt_cryp.clock; 1275 1276 if (rstctrl_dt_get_by_index(fdt, node, 0, &pdata->reset) != TEE_SUCCESS) 1277 panic(); 1278 1279 return 0; 1280 } 1281 1282 static TEE_Result stm32_cryp_driver_init(void) 1283 { 1284 TEE_Result res = TEE_SUCCESS; 1285 1286 switch (fdt_stm32_cryp(&cryp_pdata)) { 1287 case 0: 1288 break; 1289 case -FDT_ERR_NOTFOUND: 1290 return TEE_SUCCESS; 1291 default: 1292 panic(); 1293 } 1294 1295 stm32mp_register_secure_periph_iomem(cryp_pdata.base.pa); 1296 1297 stm32_clock_enable(cryp_pdata.clock_id); 1298 1299 if (rstctrl_assert_to(cryp_pdata.reset, TIMEOUT_US_1MS)) 1300 panic(); 1301 1302 if (rstctrl_deassert_to(cryp_pdata.reset, TIMEOUT_US_1MS)) 1303 panic(); 1304 1305 if (IS_ENABLED(CFG_CRYPTO_DRV_AUTHENC)) { 1306 res = stm32_register_authenc(); 1307 if (res) { 1308 EMSG("Failed to register to authenc: %#"PRIx32, res); 1309 panic(); 1310 } 1311 } 1312 1313 if (IS_ENABLED(CFG_CRYPTO_DRV_CIPHER)) { 1314 res = stm32_register_cipher(); 1315 if (res) { 1316 EMSG("Failed to register to cipher: %#"PRIx32, res); 1317 panic(); 1318 } 1319 } 1320 1321 return TEE_SUCCESS; 1322 } 1323 1324 driver_init(stm32_cryp_driver_init); 1325