1 // SPDX-License-Identifier: BSD-2-Clause 2 /* 3 * Copyright (c) 2017, Linaro Limited 4 * Copyright 2020 NXP 5 * Copyright 2021, SumUp Service GmbH 6 */ 7 8 #include <assert.h> 9 #include <compiler.h> 10 #include <crypto/crypto.h> 11 #include <crypto/crypto_impl.h> 12 #include <kernel/panic.h> 13 #include <stdlib.h> 14 #include <string.h> 15 #include <utee_defines.h> 16 17 TEE_Result crypto_hash_alloc_ctx(void **ctx, uint32_t algo) 18 { 19 TEE_Result res = TEE_ERROR_NOT_IMPLEMENTED; 20 struct crypto_hash_ctx *c = NULL; 21 22 /* 23 * Use default cryptographic implementation if no matching 24 * drvcrypt device. 25 */ 26 res = drvcrypt_hash_alloc_ctx(&c, algo); 27 28 if (res == TEE_ERROR_NOT_IMPLEMENTED) { 29 switch (algo) { 30 case TEE_ALG_MD5: 31 res = crypto_md5_alloc_ctx(&c); 32 break; 33 case TEE_ALG_SHA1: 34 res = crypto_sha1_alloc_ctx(&c); 35 break; 36 case TEE_ALG_SHA224: 37 res = crypto_sha224_alloc_ctx(&c); 38 break; 39 case TEE_ALG_SHA256: 40 res = crypto_sha256_alloc_ctx(&c); 41 break; 42 case TEE_ALG_SHA384: 43 res = crypto_sha384_alloc_ctx(&c); 44 break; 45 case TEE_ALG_SHA512: 46 res = crypto_sha512_alloc_ctx(&c); 47 break; 48 case TEE_ALG_SM3: 49 res = crypto_sm3_alloc_ctx(&c); 50 break; 51 default: 52 break; 53 } 54 } 55 56 if (!res) 57 *ctx = c; 58 59 return res; 60 } 61 62 static const struct crypto_hash_ops *hash_ops(void *ctx) 63 { 64 struct crypto_hash_ctx *c = ctx; 65 66 assert(c && c->ops); 67 68 return c->ops; 69 } 70 71 void crypto_hash_free_ctx(void *ctx) 72 { 73 if (ctx) 74 hash_ops(ctx)->free_ctx(ctx); 75 } 76 77 void crypto_hash_copy_state(void *dst_ctx, void *src_ctx) 78 { 79 hash_ops(dst_ctx)->copy_state(dst_ctx, src_ctx); 80 } 81 82 TEE_Result crypto_hash_init(void *ctx) 83 { 84 return hash_ops(ctx)->init(ctx); 85 } 86 87 TEE_Result crypto_hash_update(void *ctx, const uint8_t *data, size_t len) 88 { 89 return hash_ops(ctx)->update(ctx, data, len); 90 } 91 92 TEE_Result crypto_hash_final(void *ctx, uint8_t *digest, size_t len) 93 { 94 return hash_ops(ctx)->final(ctx, digest, len); 95 } 96 97 TEE_Result crypto_cipher_alloc_ctx(void **ctx, uint32_t algo) 98 { 99 TEE_Result res = TEE_ERROR_NOT_IMPLEMENTED; 100 struct crypto_cipher_ctx *c = NULL; 101 102 /* 103 * Use default cryptographic implementation if no matching 104 * drvcrypt device. 105 */ 106 res = drvcrypt_cipher_alloc_ctx(&c, algo); 107 108 if (res == TEE_ERROR_NOT_IMPLEMENTED) { 109 switch (algo) { 110 case TEE_ALG_AES_ECB_NOPAD: 111 res = crypto_aes_ecb_alloc_ctx(&c); 112 break; 113 case TEE_ALG_AES_CBC_NOPAD: 114 res = crypto_aes_cbc_alloc_ctx(&c); 115 break; 116 case TEE_ALG_AES_CTR: 117 res = crypto_aes_ctr_alloc_ctx(&c); 118 break; 119 case TEE_ALG_AES_CTS: 120 res = crypto_aes_cts_alloc_ctx(&c); 121 break; 122 case TEE_ALG_AES_XTS: 123 res = crypto_aes_xts_alloc_ctx(&c); 124 break; 125 case TEE_ALG_DES_ECB_NOPAD: 126 res = crypto_des_ecb_alloc_ctx(&c); 127 break; 128 case TEE_ALG_DES3_ECB_NOPAD: 129 res = crypto_des3_ecb_alloc_ctx(&c); 130 break; 131 case TEE_ALG_DES_CBC_NOPAD: 132 res = crypto_des_cbc_alloc_ctx(&c); 133 break; 134 case TEE_ALG_DES3_CBC_NOPAD: 135 res = crypto_des3_cbc_alloc_ctx(&c); 136 break; 137 case TEE_ALG_SM4_ECB_NOPAD: 138 res = crypto_sm4_ecb_alloc_ctx(&c); 139 break; 140 case TEE_ALG_SM4_CBC_NOPAD: 141 res = crypto_sm4_cbc_alloc_ctx(&c); 142 break; 143 case TEE_ALG_SM4_CTR: 144 res = crypto_sm4_ctr_alloc_ctx(&c); 145 break; 146 default: 147 return TEE_ERROR_NOT_IMPLEMENTED; 148 } 149 } 150 151 if (!res) 152 *ctx = c; 153 154 return res; 155 } 156 157 static const struct crypto_cipher_ops *cipher_ops(void *ctx) 158 { 159 struct crypto_cipher_ctx *c = ctx; 160 161 assert(c && c->ops); 162 163 return c->ops; 164 } 165 166 void crypto_cipher_free_ctx(void *ctx) 167 { 168 if (ctx) 169 cipher_ops(ctx)->free_ctx(ctx); 170 } 171 172 void crypto_cipher_copy_state(void *dst_ctx, void *src_ctx) 173 { 174 cipher_ops(dst_ctx)->copy_state(dst_ctx, src_ctx); 175 } 176 177 TEE_Result crypto_cipher_init(void *ctx, TEE_OperationMode mode, 178 const uint8_t *key1, size_t key1_len, 179 const uint8_t *key2, size_t key2_len, 180 const uint8_t *iv, size_t iv_len) 181 { 182 if (mode != TEE_MODE_DECRYPT && mode != TEE_MODE_ENCRYPT) 183 return TEE_ERROR_BAD_PARAMETERS; 184 185 return cipher_ops(ctx)->init(ctx, mode, key1, key1_len, key2, key2_len, 186 iv, iv_len); 187 } 188 189 TEE_Result crypto_cipher_update(void *ctx, TEE_OperationMode mode __unused, 190 bool last_block, const uint8_t *data, 191 size_t len, uint8_t *dst) 192 { 193 return cipher_ops(ctx)->update(ctx, last_block, data, len, dst); 194 } 195 196 void crypto_cipher_final(void *ctx) 197 { 198 cipher_ops(ctx)->final(ctx); 199 } 200 201 TEE_Result crypto_cipher_get_block_size(uint32_t algo, size_t *size) 202 { 203 uint32_t class = TEE_ALG_GET_CLASS(algo); 204 205 if (class != TEE_OPERATION_CIPHER && class != TEE_OPERATION_MAC && 206 class != TEE_OPERATION_AE) 207 return TEE_ERROR_BAD_PARAMETERS; 208 209 switch (TEE_ALG_GET_MAIN_ALG(algo)) { 210 case TEE_MAIN_ALGO_AES: 211 *size = TEE_AES_BLOCK_SIZE; 212 return TEE_SUCCESS; 213 case TEE_MAIN_ALGO_DES: 214 case TEE_MAIN_ALGO_DES3: 215 *size = TEE_DES_BLOCK_SIZE; 216 return TEE_SUCCESS; 217 case TEE_MAIN_ALGO_SM4: 218 *size = TEE_SM4_BLOCK_SIZE; 219 return TEE_SUCCESS; 220 default: 221 return TEE_ERROR_NOT_SUPPORTED; 222 } 223 } 224 225 TEE_Result crypto_mac_alloc_ctx(void **ctx, uint32_t algo) 226 { 227 TEE_Result res = TEE_SUCCESS; 228 struct crypto_mac_ctx *c = NULL; 229 230 /* 231 * Use default cryptographic implementation if no matching 232 * drvcrypt device. 233 */ 234 res = drvcrypt_mac_alloc_ctx(&c, algo); 235 236 if (res == TEE_ERROR_NOT_IMPLEMENTED) { 237 switch (algo) { 238 case TEE_ALG_HMAC_MD5: 239 res = crypto_hmac_md5_alloc_ctx(&c); 240 break; 241 case TEE_ALG_HMAC_SHA1: 242 res = crypto_hmac_sha1_alloc_ctx(&c); 243 break; 244 case TEE_ALG_HMAC_SHA224: 245 res = crypto_hmac_sha224_alloc_ctx(&c); 246 break; 247 case TEE_ALG_HMAC_SHA256: 248 res = crypto_hmac_sha256_alloc_ctx(&c); 249 break; 250 case TEE_ALG_HMAC_SHA384: 251 res = crypto_hmac_sha384_alloc_ctx(&c); 252 break; 253 case TEE_ALG_HMAC_SHA512: 254 res = crypto_hmac_sha512_alloc_ctx(&c); 255 break; 256 case TEE_ALG_HMAC_SM3: 257 res = crypto_hmac_sm3_alloc_ctx(&c); 258 break; 259 case TEE_ALG_AES_CBC_MAC_NOPAD: 260 res = crypto_aes_cbc_mac_nopad_alloc_ctx(&c); 261 break; 262 case TEE_ALG_AES_CBC_MAC_PKCS5: 263 res = crypto_aes_cbc_mac_pkcs5_alloc_ctx(&c); 264 break; 265 case TEE_ALG_DES_CBC_MAC_NOPAD: 266 res = crypto_des_cbc_mac_nopad_alloc_ctx(&c); 267 break; 268 case TEE_ALG_DES_CBC_MAC_PKCS5: 269 res = crypto_des_cbc_mac_pkcs5_alloc_ctx(&c); 270 break; 271 case TEE_ALG_DES3_CBC_MAC_NOPAD: 272 res = crypto_des3_cbc_mac_nopad_alloc_ctx(&c); 273 break; 274 case TEE_ALG_DES3_CBC_MAC_PKCS5: 275 res = crypto_des3_cbc_mac_pkcs5_alloc_ctx(&c); 276 break; 277 case TEE_ALG_DES3_CMAC: 278 res = crypto_des3_cmac_alloc_ctx(&c); 279 break; 280 case TEE_ALG_AES_CMAC: 281 res = crypto_aes_cmac_alloc_ctx(&c); 282 break; 283 default: 284 return TEE_ERROR_NOT_SUPPORTED; 285 } 286 } 287 288 if (!res) 289 *ctx = c; 290 291 return res; 292 } 293 294 static const struct crypto_mac_ops *mac_ops(void *ctx) 295 { 296 struct crypto_mac_ctx *c = ctx; 297 298 assert(c && c->ops); 299 300 return c->ops; 301 } 302 303 void crypto_mac_free_ctx(void *ctx) 304 { 305 if (ctx) 306 mac_ops(ctx)->free_ctx(ctx); 307 } 308 309 void crypto_mac_copy_state(void *dst_ctx, void *src_ctx) 310 { 311 mac_ops(dst_ctx)->copy_state(dst_ctx, src_ctx); 312 } 313 314 TEE_Result crypto_mac_init(void *ctx, const uint8_t *key, size_t len) 315 { 316 return mac_ops(ctx)->init(ctx, key, len); 317 } 318 319 TEE_Result crypto_mac_update(void *ctx, const uint8_t *data, size_t len) 320 { 321 if (!len) 322 return TEE_SUCCESS; 323 324 return mac_ops(ctx)->update(ctx, data, len); 325 } 326 327 TEE_Result crypto_mac_final(void *ctx, uint8_t *digest, size_t digest_len) 328 { 329 return mac_ops(ctx)->final(ctx, digest, digest_len); 330 } 331 332 TEE_Result crypto_authenc_alloc_ctx(void **ctx, uint32_t algo) 333 { 334 TEE_Result res = TEE_SUCCESS; 335 struct crypto_authenc_ctx *c = NULL; 336 337 switch (algo) { 338 #if defined(CFG_CRYPTO_CCM) 339 case TEE_ALG_AES_CCM: 340 res = crypto_aes_ccm_alloc_ctx(&c); 341 break; 342 #endif 343 #if defined(CFG_CRYPTO_GCM) 344 case TEE_ALG_AES_GCM: 345 res = crypto_aes_gcm_alloc_ctx(&c); 346 break; 347 #endif 348 default: 349 return TEE_ERROR_NOT_IMPLEMENTED; 350 } 351 352 if (!res) 353 *ctx = c; 354 355 return res; 356 } 357 358 static const struct crypto_authenc_ops *ae_ops(void *ctx) 359 { 360 struct crypto_authenc_ctx *c = ctx; 361 362 assert(c && c->ops); 363 364 return c->ops; 365 } 366 367 TEE_Result crypto_authenc_init(void *ctx, TEE_OperationMode mode, 368 const uint8_t *key, size_t key_len, 369 const uint8_t *nonce, size_t nonce_len, 370 size_t tag_len, size_t aad_len, 371 size_t payload_len) 372 { 373 return ae_ops(ctx)->init(ctx, mode, key, key_len, nonce, nonce_len, 374 tag_len, aad_len, payload_len); 375 } 376 377 TEE_Result crypto_authenc_update_aad(void *ctx, TEE_OperationMode mode __unused, 378 const uint8_t *data, size_t len) 379 { 380 return ae_ops(ctx)->update_aad(ctx, data, len); 381 } 382 383 384 TEE_Result crypto_authenc_update_payload(void *ctx, TEE_OperationMode mode, 385 const uint8_t *src_data, 386 size_t src_len, uint8_t *dst_data, 387 size_t *dst_len) 388 { 389 if (*dst_len < src_len) 390 return TEE_ERROR_SHORT_BUFFER; 391 *dst_len = src_len; 392 393 return ae_ops(ctx)->update_payload(ctx, mode, src_data, src_len, 394 dst_data); 395 } 396 397 TEE_Result crypto_authenc_enc_final(void *ctx, const uint8_t *src_data, 398 size_t src_len, uint8_t *dst_data, 399 size_t *dst_len, uint8_t *dst_tag, 400 size_t *dst_tag_len) 401 { 402 if (*dst_len < src_len) 403 return TEE_ERROR_SHORT_BUFFER; 404 *dst_len = src_len; 405 406 return ae_ops(ctx)->enc_final(ctx, src_data, src_len, dst_data, 407 dst_tag, dst_tag_len); 408 } 409 410 TEE_Result crypto_authenc_dec_final(void *ctx, const uint8_t *src_data, 411 size_t src_len, uint8_t *dst_data, 412 size_t *dst_len, const uint8_t *tag, 413 size_t tag_len) 414 { 415 if (*dst_len < src_len) 416 return TEE_ERROR_SHORT_BUFFER; 417 *dst_len = src_len; 418 419 return ae_ops(ctx)->dec_final(ctx, src_data, src_len, dst_data, tag, 420 tag_len); 421 } 422 423 void crypto_authenc_final(void *ctx) 424 { 425 ae_ops(ctx)->final(ctx); 426 } 427 428 void crypto_authenc_free_ctx(void *ctx) 429 { 430 if (ctx) 431 ae_ops(ctx)->free_ctx(ctx); 432 } 433 434 void crypto_authenc_copy_state(void *dst_ctx, void *src_ctx) 435 { 436 ae_ops(dst_ctx)->copy_state(dst_ctx, src_ctx); 437 } 438 439 #if !defined(CFG_CRYPTO_RSA) && !defined(CFG_CRYPTO_DSA) && \ 440 !defined(CFG_CRYPTO_DH) && !defined(CFG_CRYPTO_ECC) 441 struct bignum *crypto_bignum_allocate(size_t size_bits __unused) 442 { 443 return NULL; 444 } 445 446 TEE_Result crypto_bignum_bin2bn(const uint8_t *from __unused, 447 size_t fromsize __unused, 448 struct bignum *to __unused) 449 { 450 return TEE_ERROR_NOT_IMPLEMENTED; 451 } 452 453 size_t crypto_bignum_num_bytes(struct bignum *a __unused) 454 { 455 return 0; 456 } 457 458 size_t crypto_bignum_num_bits(struct bignum *a __unused) 459 { 460 return 0; 461 } 462 463 /* 464 * crypto_bignum_allocate() and crypto_bignum_bin2bn() failing should be 465 * enough to guarantee that the functions calling this function aren't 466 * called, but just in case add a panic() here to avoid unexpected 467 * behavoir. 468 */ 469 static void bignum_cant_happen(void) 470 { 471 volatile bool b = true; 472 473 /* Avoid warning about function does not return */ 474 if (b) 475 panic(); 476 } 477 478 void crypto_bignum_bn2bin(const struct bignum *from __unused, 479 uint8_t *to __unused) 480 { 481 bignum_cant_happen(); 482 } 483 484 void crypto_bignum_copy(struct bignum *to __unused, 485 const struct bignum *from __unused) 486 { 487 bignum_cant_happen(); 488 } 489 490 void crypto_bignum_free(struct bignum *a) 491 { 492 if (a) 493 panic(); 494 } 495 496 void crypto_bignum_clear(struct bignum *a __unused) 497 { 498 bignum_cant_happen(); 499 } 500 501 /* return -1 if a<b, 0 if a==b, +1 if a>b */ 502 int32_t crypto_bignum_compare(struct bignum *a __unused, 503 struct bignum *b __unused) 504 { 505 bignum_cant_happen(); 506 return -1; 507 } 508 #endif 509 510 #if !defined(CFG_CRYPTO_RSA) 511 TEE_Result crypto_acipher_alloc_rsa_keypair(struct rsa_keypair *s __unused, 512 size_t key_size_bits __unused) 513 { 514 return TEE_ERROR_NOT_IMPLEMENTED; 515 } 516 517 TEE_Result 518 crypto_acipher_alloc_rsa_public_key(struct rsa_public_key *s __unused, 519 size_t key_size_bits __unused) 520 { 521 return TEE_ERROR_NOT_IMPLEMENTED; 522 } 523 524 void crypto_acipher_free_rsa_public_key(struct rsa_public_key *s __unused) 525 { 526 } 527 528 void crypto_acipher_free_rsa_keypair(struct rsa_keypair *s __unused) 529 { 530 } 531 532 TEE_Result crypto_acipher_gen_rsa_key(struct rsa_keypair *key __unused, 533 size_t key_size __unused) 534 { 535 return TEE_ERROR_NOT_IMPLEMENTED; 536 } 537 538 TEE_Result crypto_acipher_rsanopad_decrypt(struct rsa_keypair *key __unused, 539 const uint8_t *src __unused, 540 size_t src_len __unused, 541 uint8_t *dst __unused, 542 size_t *dst_len __unused) 543 { 544 return TEE_ERROR_NOT_IMPLEMENTED; 545 } 546 547 TEE_Result crypto_acipher_rsanopad_encrypt(struct rsa_public_key *key __unused, 548 const uint8_t *src __unused, 549 size_t src_len __unused, 550 uint8_t *dst __unused, 551 size_t *dst_len __unused) 552 { 553 return TEE_ERROR_NOT_IMPLEMENTED; 554 } 555 556 TEE_Result crypto_acipher_rsaes_decrypt(uint32_t algo __unused, 557 struct rsa_keypair *key __unused, 558 const uint8_t *label __unused, 559 size_t label_len __unused, 560 const uint8_t *src __unused, 561 size_t src_len __unused, 562 uint8_t *dst __unused, 563 size_t *dst_len __unused) 564 { 565 return TEE_ERROR_NOT_IMPLEMENTED; 566 } 567 568 TEE_Result crypto_acipher_rsaes_encrypt(uint32_t algo __unused, 569 struct rsa_public_key *key __unused, 570 const uint8_t *label __unused, 571 size_t label_len __unused, 572 const uint8_t *src __unused, 573 size_t src_len __unused, 574 uint8_t *dst __unused, 575 size_t *dst_len __unused) 576 { 577 return TEE_ERROR_NOT_IMPLEMENTED; 578 } 579 580 TEE_Result crypto_acipher_rsassa_sign(uint32_t algo __unused, 581 struct rsa_keypair *key __unused, 582 int salt_len __unused, 583 const uint8_t *msg __unused, 584 size_t msg_len __unused, 585 uint8_t *sig __unused, 586 size_t *sig_len __unused) 587 { 588 return TEE_ERROR_NOT_IMPLEMENTED; 589 } 590 591 TEE_Result crypto_acipher_rsassa_verify(uint32_t algo __unused, 592 struct rsa_public_key *key __unused, 593 int salt_len __unused, 594 const uint8_t *msg __unused, 595 size_t msg_len __unused, 596 const uint8_t *sig __unused, 597 size_t sig_len __unused) 598 { 599 return TEE_ERROR_NOT_IMPLEMENTED; 600 } 601 #endif /*!CFG_CRYPTO_RSA*/ 602 603 #if !defined(CFG_CRYPTO_DSA) 604 TEE_Result crypto_acipher_alloc_dsa_keypair(struct dsa_keypair *s __unused, 605 size_t key_size_bits __unused) 606 { 607 return TEE_ERROR_NOT_IMPLEMENTED; 608 } 609 610 TEE_Result 611 crypto_acipher_alloc_dsa_public_key(struct dsa_public_key *s __unused, 612 size_t key_size_bits __unused) 613 { 614 return TEE_ERROR_NOT_IMPLEMENTED; 615 } 616 617 TEE_Result crypto_acipher_gen_dsa_key(struct dsa_keypair *key __unused, 618 size_t key_size __unused) 619 { 620 return TEE_ERROR_NOT_IMPLEMENTED; 621 } 622 623 TEE_Result crypto_acipher_dsa_sign(uint32_t algo __unused, 624 struct dsa_keypair *key __unused, 625 const uint8_t *msg __unused, 626 size_t msg_len __unused, 627 uint8_t *sig __unused, 628 size_t *sig_len __unused) 629 { 630 return TEE_ERROR_NOT_IMPLEMENTED; 631 } 632 633 TEE_Result crypto_acipher_dsa_verify(uint32_t algo __unused, 634 struct dsa_public_key *key __unused, 635 const uint8_t *msg __unused, 636 size_t msg_len __unused, 637 const uint8_t *sig __unused, 638 size_t sig_len __unused) 639 { 640 return TEE_ERROR_NOT_IMPLEMENTED; 641 } 642 #endif /*!CFG_CRYPTO_DSA*/ 643 644 #if !defined(CFG_CRYPTO_DH) 645 TEE_Result crypto_acipher_alloc_dh_keypair(struct dh_keypair *s __unused, 646 size_t key_size_bits __unused) 647 { 648 return TEE_ERROR_NOT_IMPLEMENTED; 649 } 650 651 TEE_Result crypto_acipher_gen_dh_key(struct dh_keypair *key __unused, 652 struct bignum *q __unused, 653 size_t xbits __unused, 654 size_t key_size __unused) 655 { 656 return TEE_ERROR_NOT_IMPLEMENTED; 657 } 658 659 TEE_Result 660 crypto_acipher_dh_shared_secret(struct dh_keypair *private_key __unused, 661 struct bignum *public_key __unused, 662 struct bignum *secret __unused) 663 { 664 return TEE_ERROR_NOT_IMPLEMENTED; 665 } 666 #endif /*!CFG_CRYPTO_DH*/ 667 668 TEE_Result crypto_acipher_alloc_ecc_public_key(struct ecc_public_key *key, 669 uint32_t key_type, 670 size_t key_size_bits) 671 { 672 TEE_Result res = TEE_ERROR_NOT_IMPLEMENTED; 673 674 /* 675 * Use default cryptographic implementation if no matching 676 * drvcrypt device. 677 */ 678 res = drvcrypt_asym_alloc_ecc_public_key(key, key_type, key_size_bits); 679 if (res == TEE_ERROR_NOT_IMPLEMENTED) 680 res = crypto_asym_alloc_ecc_public_key(key, key_type, 681 key_size_bits); 682 683 return res; 684 } 685 686 TEE_Result crypto_acipher_alloc_ecc_keypair(struct ecc_keypair *key, 687 uint32_t key_type, 688 size_t key_size_bits) 689 { 690 TEE_Result res = TEE_ERROR_NOT_IMPLEMENTED; 691 692 /* 693 * Use default cryptographic implementation if no matching 694 * drvcrypt device. 695 */ 696 res = drvcrypt_asym_alloc_ecc_keypair(key, key_type, key_size_bits); 697 if (res == TEE_ERROR_NOT_IMPLEMENTED) 698 res = crypto_asym_alloc_ecc_keypair(key, key_type, 699 key_size_bits); 700 701 return res; 702 } 703 704 void crypto_acipher_free_ecc_public_key(struct ecc_public_key *key) 705 { 706 assert(key->ops && key->ops->free); 707 708 key->ops->free(key); 709 } 710 711 TEE_Result crypto_acipher_gen_ecc_key(struct ecc_keypair *key, 712 size_t key_size_bits) 713 { 714 assert(key->ops && key->ops->generate); 715 716 return key->ops->generate(key, key_size_bits); 717 } 718 719 TEE_Result crypto_acipher_ecc_sign(uint32_t algo, struct ecc_keypair *key, 720 const uint8_t *msg, size_t msg_len, 721 uint8_t *sig, size_t *sig_len) 722 { 723 assert(key->ops); 724 725 if (!key->ops->sign) 726 return TEE_ERROR_NOT_IMPLEMENTED; 727 728 return key->ops->sign(algo, key, msg, msg_len, sig, sig_len); 729 } 730 731 TEE_Result crypto_acipher_ecc_verify(uint32_t algo, struct ecc_public_key *key, 732 const uint8_t *msg, size_t msg_len, 733 const uint8_t *sig, size_t sig_len) 734 { 735 assert(key->ops); 736 737 if (!key->ops->verify) 738 return TEE_ERROR_NOT_IMPLEMENTED; 739 740 return key->ops->verify(algo, key, msg, msg_len, sig, sig_len); 741 } 742 743 TEE_Result crypto_acipher_ecc_shared_secret(struct ecc_keypair *private_key, 744 struct ecc_public_key *public_key, 745 void *secret, 746 unsigned long *secret_len) 747 { 748 assert(private_key->ops); 749 750 if (!private_key->ops->shared_secret) 751 return TEE_ERROR_NOT_IMPLEMENTED; 752 753 return private_key->ops->shared_secret(private_key, public_key, secret, 754 secret_len); 755 } 756 757 TEE_Result crypto_acipher_sm2_pke_decrypt(struct ecc_keypair *key, 758 const uint8_t *src, size_t src_len, 759 uint8_t *dst, size_t *dst_len) 760 { 761 assert(key->ops); 762 763 if (!key->ops->decrypt) 764 return TEE_ERROR_NOT_IMPLEMENTED; 765 766 return key->ops->decrypt(key, src, src_len, dst, dst_len); 767 } 768 769 TEE_Result crypto_acipher_sm2_pke_encrypt(struct ecc_public_key *key, 770 const uint8_t *src, size_t src_len, 771 uint8_t *dst, size_t *dst_len) 772 { 773 assert(key->ops); 774 775 if (!key->ops->encrypt) 776 return TEE_ERROR_NOT_IMPLEMENTED; 777 778 return key->ops->encrypt(key, src, src_len, dst, dst_len); 779 } 780 781 #if !defined(CFG_CRYPTO_SM2_KEP) 782 TEE_Result crypto_acipher_sm2_kep_derive(struct ecc_keypair *my_key __unused, 783 struct ecc_keypair *my_eph_key 784 __unused, 785 struct ecc_public_key *peer_key 786 __unused, 787 struct ecc_public_key *peer_eph_key 788 __unused, 789 struct sm2_kep_parms *p __unused) 790 { 791 return TEE_ERROR_NOT_IMPLEMENTED; 792 } 793 #endif 794 795 __weak void crypto_storage_obj_del(uint8_t *data __unused, size_t len __unused) 796 { 797 } 798