1 // SPDX-License-Identifier: BSD-2-Clause 2 /* 3 * Copyright 2022-2024 HiSilicon Limited. 4 * Kunpeng hardware accelerator sec hash algorithm implementation. 5 */ 6 7 #include <drvcrypt_hash.h> 8 #include <initcall.h> 9 10 #include "sec_hash.h" 11 #include "sec_main.h" 12 13 static enum hisi_drv_status sec_digest_set_hmac_key(struct hashctx *ctx, 14 struct hisi_sec_sqe *sqe) 15 { 16 if (ctx->key_len > SEC_DIGEST_MAX_KEY_SIZE || !ctx->key_len) { 17 EMSG("Invalid digest key len(%ld)", ctx->key_len); 18 return HISI_QM_DRVCRYPT_IN_EPARA; 19 } 20 21 /* If the length of key is not word-aligned, increment by 1 */ 22 sqe->type2.mac_key_alg |= SHIFT_U64(DIV_ROUND_UP(ctx->key_len, 23 SEC_ENCODE_BYTES), 24 SEC_AKEY_OFFSET); 25 sqe->type2.a_key_addr = ctx->key_dma; 26 27 return HISI_QM_DRVCRYPT_NO_ERR; 28 } 29 30 static void sec_digest_fill_long_bd2(struct hashctx *ctx, 31 struct hisi_sec_sqe *sqe) 32 { 33 uint64_t total_bits = 0; 34 35 if (ctx->has_next && !ctx->iv_len) { 36 /* LONG BD FIRST */ 37 sqe->ai_apd_cs |= AI_GEN_INNER; 38 sqe->ai_apd_cs |= SHIFT_U32(AUTHPAD_NOPAD, SEC_APAD_OFFSET); 39 ctx->iv_len = ctx->mac_len; 40 } else if (ctx->has_next && ctx->iv_len) { 41 /* LONG BD MIDDLE */ 42 sqe->ai_apd_cs |= AI_GEN_IVIN_ADDR; 43 sqe->ai_apd_cs |= SHIFT_U32(AUTHPAD_NOPAD, SEC_APAD_OFFSET); 44 sqe->type2.a_ivin_addr = sqe->type2.mac_addr; 45 ctx->iv_len = ctx->mac_len; 46 } else if (!ctx->has_next && ctx->iv_len) { 47 /* LONG BD END */ 48 sqe->ai_apd_cs |= AI_GEN_IVIN_ADDR; 49 sqe->ai_apd_cs |= SHIFT_U32(AUTHPAD_PAD, SEC_APAD_OFFSET); 50 sqe->type2.a_ivin_addr = sqe->type2.mac_addr; 51 total_bits = ctx->long_data_len * BYTE_BITS; 52 sqe->type2.long_a_data_len = total_bits; 53 ctx->iv_len = 0; 54 } else { 55 /* SHORT BD */ 56 ctx->iv_len = 0; 57 } 58 } 59 60 static struct crypto_hash *to_hash_ctx(struct crypto_hash_ctx *ctx) 61 { 62 return container_of(ctx, struct crypto_hash, hash_ctx); 63 } 64 65 static uint32_t sec_digest_get_alg_type(uint32_t algo) 66 { 67 switch (algo) { 68 case TEE_ALG_MD5: 69 return A_ALG_MD5; 70 case TEE_ALG_HMAC_MD5: 71 return A_ALG_HMAC_MD5; 72 case TEE_ALG_SHA1: 73 return A_ALG_SHA1; 74 case TEE_ALG_HMAC_SHA1: 75 return A_ALG_HMAC_SHA1; 76 case TEE_ALG_SHA224: 77 return A_ALG_SHA224; 78 case TEE_ALG_HMAC_SHA224: 79 return A_ALG_HMAC_SHA224; 80 case TEE_ALG_SM3: 81 return A_ALG_SM3; 82 case TEE_ALG_HMAC_SM3: 83 return A_ALG_HMAC_SM3; 84 case TEE_ALG_SHA256: 85 return A_ALG_SHA256; 86 case TEE_ALG_HMAC_SHA256: 87 return A_ALG_HMAC_SHA256; 88 case TEE_ALG_SHA384: 89 return A_ALG_SHA384; 90 case TEE_ALG_HMAC_SHA384: 91 return A_ALG_HMAC_SHA384; 92 case TEE_ALG_SHA512: 93 return A_ALG_SHA512; 94 case TEE_ALG_HMAC_SHA512: 95 return A_ALG_HMAC_SHA512; 96 default: 97 return A_ALG_MAX; 98 } 99 } 100 101 static enum hisi_drv_status sec_digest_fill_sqe(void *bd, void *msg) 102 { 103 enum hisi_drv_status ret = HISI_QM_DRVCRYPT_NO_ERR; 104 struct hisi_sec_sqe *sqe = bd; 105 struct hashctx *ctx = msg; 106 uint32_t alg_type = 0; 107 108 if (!ctx->in_len) { 109 EMSG("Digest bd2 not support 0 packet"); 110 return HISI_QM_DRVCRYPT_IN_EPARA; 111 } 112 113 sqe->type_auth_cipher = BD_TYPE2; 114 sqe->sds_sa_type = SHIFT_U32(SCENE_NOTHING, SEC_SCENE_OFFSET); 115 sqe->type_auth_cipher |= SHIFT_U32(AUTH_MAC_CALCULATE, SEC_AUTH_OFFSET); 116 sqe->type2.alen_ivllen = ctx->in_len; 117 118 sqe->type2.data_src_addr = ctx->in_dma; 119 sqe->type2.mac_addr = ctx->out_dma; 120 sqe->type2.mac_key_alg |= ctx->mac_len / SEC_ENCODE_BYTES; 121 122 if (ctx->mode == WCRYPTO_DIGEST_HMAC) { 123 ret = sec_digest_set_hmac_key(ctx, sqe); 124 if (ret) 125 return ret; 126 } 127 128 alg_type = sec_digest_get_alg_type(ctx->algo); 129 if (alg_type >= A_ALG_MAX) { 130 EMSG("Fail to get digest alg type"); 131 return HISI_QM_DRVCRYPT_IN_EPARA; 132 } 133 sqe->type2.mac_key_alg |= SHIFT_U32(alg_type, SEC_AEAD_ALG_OFFSET); 134 135 sec_digest_fill_long_bd2(ctx, sqe); 136 137 return ret; 138 } 139 140 static enum hisi_drv_status 141 sec_digest_set_hmac_bd3_key(struct hashctx *ctx, struct hisi_sec_bd3_sqe *sqe) 142 { 143 if (ctx->key_len > SEC_DIGEST_MAX_KEY_SIZE || !ctx->key_len) { 144 EMSG("Invalid digest key len(%ld)", ctx->key_len); 145 return HISI_QM_DRVCRYPT_IN_EPARA; 146 } 147 148 /* If the length of key is not word-aligned, increment by 1 */ 149 sqe->auth_mac_key |= SHIFT_U64(DIV_ROUND_UP(ctx->key_len, 150 SEC_ENCODE_BYTES), 151 SEC_AKEY_OFFSET_V3); 152 sqe->a_key_addr = ctx->key_dma; 153 154 return HISI_QM_DRVCRYPT_NO_ERR; 155 } 156 157 static void sec_digest_fill_long_bd3(struct hashctx *ctx, 158 struct hisi_sec_bd3_sqe *sqe) 159 { 160 uint64_t total_bits = 0; 161 162 if (ctx->has_next && !ctx->iv_len) { 163 /* LONG BD FIRST */ 164 sqe->auth_mac_key |= SHIFT_U32(AI_GEN_INNER, 165 SEC_AI_GEN_OFFSET_V3); 166 sqe->stream_scene.auth_pad = AUTHPAD_NOPAD; 167 ctx->iv_len = ctx->mac_len; 168 } else if (ctx->has_next && ctx->iv_len) { 169 /* LONG BD MIDDLE */ 170 sqe->auth_mac_key |= SHIFT_U32(AI_GEN_IVIN_ADDR, 171 SEC_AI_GEN_OFFSET_V3); 172 sqe->stream_scene.auth_pad = AUTHPAD_NOPAD; 173 sqe->a_ivin_addr = sqe->mac_addr; 174 ctx->iv_len = ctx->mac_len; 175 } else if (!ctx->has_next && ctx->iv_len) { 176 /* LONG BD END */ 177 sqe->auth_mac_key |= SHIFT_U32(AI_GEN_IVIN_ADDR, 178 SEC_AI_GEN_OFFSET_V3); 179 sqe->stream_scene.auth_pad = AUTHPAD_PAD; 180 sqe->a_ivin_addr = sqe->mac_addr; 181 total_bits = ctx->long_data_len * BYTE_BITS; 182 sqe->stream_scene.long_a_data_len = total_bits; 183 ctx->iv_len = 0; 184 } else { 185 /* SHORT BD */ 186 ctx->iv_len = 0; 187 } 188 } 189 190 static enum hisi_drv_status sec_digest_fill_bd3_sqe(void *bd, void *msg) 191 { 192 enum hisi_drv_status ret = HISI_QM_DRVCRYPT_NO_ERR; 193 struct hisi_sec_bd3_sqe *sqe = bd; 194 struct hashctx *ctx = msg; 195 uint32_t alg_type = 0; 196 197 sqe->bd_param = BD_TYPE3 | SHIFT_U32(ctx->scene, SEC_SCENE_OFFSET_V3); 198 sqe->a_len_key = ctx->in_len; 199 sqe->auth_mac_key = AUTH_MAC_CALCULATE; 200 sqe->data_src_addr = ctx->in_dma; 201 sqe->mac_addr = ctx->out_dma; 202 203 if (ctx->mode == WCRYPTO_DIGEST_HMAC) { 204 ret = sec_digest_set_hmac_bd3_key(ctx, sqe); 205 if (ret) 206 return HISI_QM_DRVCRYPT_IN_EPARA; 207 } 208 209 sqe->auth_mac_key |= SHIFT_U64(ctx->mac_len / SEC_ENCODE_BYTES, 210 SEC_MAC_OFFSET_V3); 211 alg_type = sec_digest_get_alg_type(ctx->algo); 212 if (alg_type >= A_ALG_MAX) { 213 EMSG("Fail to get digest bd3 alg"); 214 return HISI_QM_DRVCRYPT_IN_EPARA; 215 } 216 sqe->auth_mac_key |= SHIFT_U32(alg_type, SEC_AUTH_ALG_OFFSET_V3); 217 sec_digest_fill_long_bd3(ctx, sqe); 218 219 return HISI_QM_DRVCRYPT_NO_ERR; 220 } 221 222 static TEE_Result sec_digest_do_task(struct hisi_qp *qp, void *msg) 223 { 224 TEE_Result ret = TEE_SUCCESS; 225 226 ret = hisi_qp_send(qp, msg); 227 if (ret) { 228 EMSG("Fail to send task, ret=%d", ret); 229 return TEE_ERROR_BAD_STATE; 230 } 231 232 ret = hisi_qp_recv_sync(qp, msg); 233 if (ret) { 234 EMSG("Recv task error, ret=%d", ret); 235 return TEE_ERROR_BAD_STATE; 236 } 237 238 return TEE_SUCCESS; 239 } 240 241 static enum hisi_drv_status sec_parse_digest_sqe(void *bd, void *msg __unused) 242 { 243 struct hisi_sec_sqe *sqe = bd; 244 uint16_t done = 0; 245 246 done = SEC_GET_FIELD(sqe->type2.done_flag, SEC_DONE_MASK, 0); 247 if (done != SEC_HW_TASK_DONE || sqe->type2.error_type) { 248 EMSG("SEC BD2 fail! done=%#"PRIx16", etype=%#"PRIx8, 249 done, sqe->type2.error_type); 250 return HISI_QM_DRVCRYPT_IN_EPARA; 251 } 252 253 return HISI_QM_DRVCRYPT_NO_ERR; 254 } 255 256 static enum hisi_drv_status sec_parse_digest_bd3_sqe(void *bd, 257 void *msg __unused) 258 { 259 struct hisi_sec_bd3_sqe *sqe = bd; 260 uint16_t done = 0; 261 262 done = SEC_GET_FIELD(sqe->done_flag, SEC_DONE_MASK, 0); 263 if (done != SEC_HW_TASK_DONE || sqe->error_type) { 264 EMSG("SEC BD3 fail! done=%#"PRIx16", etype=%#"PRIx8, 265 done, sqe->error_type); 266 return HISI_QM_DRVCRYPT_IN_EPARA; 267 } 268 269 return HISI_QM_DRVCRYPT_NO_ERR; 270 } 271 272 TEE_Result hisi_sec_digest_ctx_init(struct hashctx *hash_ctx, 273 const uint8_t *key, size_t len) 274 { 275 if (!hash_ctx) { 276 EMSG("Input hash_ctx is NULL"); 277 return TEE_ERROR_BAD_PARAMETERS; 278 } 279 280 hash_ctx->in_len = 0; 281 hash_ctx->iv_len = 0; 282 hash_ctx->has_next = false; 283 hash_ctx->long_data_len = 0; 284 hash_ctx->scene = SCENE_NOTHING; 285 286 /* 287 * In reset ctx scenarios, sec_hash_initialize will be called. 288 * To ensure in data is NULL, reset ctx need to free in data 289 * which is not NULL. 290 */ 291 free(hash_ctx->in); 292 hash_ctx->in = NULL; 293 294 if (len) { 295 hash_ctx->key_len = len; 296 memcpy(hash_ctx->key, key, len); 297 } 298 299 return TEE_SUCCESS; 300 } 301 302 static TEE_Result sec_hash_initialize(struct crypto_hash_ctx *ctx) 303 { 304 struct crypto_hash *hash = NULL; 305 struct hashctx *hash_ctx = NULL; 306 307 if (!ctx) { 308 EMSG("Input ctx is NULL"); 309 return TEE_ERROR_BAD_PARAMETERS; 310 } 311 312 hash = to_hash_ctx(ctx); 313 hash_ctx = hash->ctx; 314 315 return hisi_sec_digest_ctx_init(hash_ctx, NULL, 0); 316 } 317 318 TEE_Result hisi_sec_digest_do_update(struct hashctx *hash_ctx, 319 const uint8_t *data, size_t len) 320 { 321 TEE_Result ret = TEE_SUCCESS; 322 size_t left_size = 0; 323 324 hash_ctx->long_data_len += len; 325 326 if (!hash_ctx->in) { 327 if (len <= SMALL_BUF_SIZE) 328 hash_ctx->buf_len = SMALL_BUF_SIZE; 329 else if (len <= MAX_AUTH_LENGTH) 330 hash_ctx->buf_len = ROUNDUP(len, HISI_QM_ALIGN128); 331 else 332 hash_ctx->buf_len = MAX_AUTH_LENGTH; 333 334 hash_ctx->in_len = 0; 335 hash_ctx->in = malloc(hash_ctx->buf_len); 336 if (!hash_ctx->in) { 337 EMSG("Fail to alloc in data buf"); 338 return TEE_ERROR_STORAGE_NO_SPACE; 339 } 340 hash_ctx->in_dma = virt_to_phys(hash_ctx->in); 341 if (!hash_ctx->in_dma) { 342 free(hash_ctx->in); 343 hash_ctx->in = NULL; 344 EMSG("Fail to get in_dma"); 345 return TEE_ERROR_STORAGE_NO_SPACE; 346 } 347 } 348 349 while (len > 0) { 350 if (hash_ctx->in_len + len <= hash_ctx->buf_len) { 351 memcpy(hash_ctx->in + hash_ctx->in_len, data, len); 352 hash_ctx->in_len += len; 353 len = 0; 354 } else { 355 left_size = hash_ctx->buf_len - hash_ctx->in_len; 356 memcpy(hash_ctx->in + hash_ctx->in_len, data, 357 left_size); 358 hash_ctx->in_len = hash_ctx->buf_len; 359 hash_ctx->scene = SCENE_STREAM; 360 hash_ctx->has_next = true; 361 data += left_size; 362 len -= left_size; 363 ret = sec_digest_do_task(hash_ctx->qp, hash_ctx); 364 if (ret) { 365 EMSG("Fail to do digest task! ret = %#"PRIx32, 366 ret); 367 return ret; 368 } 369 hash_ctx->iv_len = hash_ctx->mac_len; 370 hash_ctx->in_len = 0; 371 } 372 } 373 return TEE_SUCCESS; 374 } 375 376 static TEE_Result sec_hash_do_update(struct crypto_hash_ctx *ctx, 377 const uint8_t *data, size_t len) 378 { 379 struct crypto_hash *hash = NULL; 380 struct hashctx *hash_ctx = NULL; 381 382 if (!len) { 383 IMSG("This is 0 len task, skip"); 384 return TEE_SUCCESS; 385 } 386 387 if (!ctx || (!data && len)) { 388 EMSG("Invalid input parameters"); 389 return TEE_ERROR_BAD_PARAMETERS; 390 } 391 392 hash = to_hash_ctx(ctx); 393 hash_ctx = hash->ctx; 394 395 return hisi_sec_digest_do_update(hash_ctx, data, len); 396 } 397 398 TEE_Result hisi_sec_digest_do_final(struct hashctx *hash_ctx, uint8_t *digest, 399 size_t len) 400 { 401 TEE_Result ret = TEE_SUCCESS; 402 403 if (!digest || len == 0) { 404 EMSG("Invalid input parameters"); 405 return TEE_ERROR_BAD_PARAMETERS; 406 } 407 408 if (hash_ctx->mac_len & WORD_ALIGNMENT_MASK) { 409 EMSG("Invalid digest out_bytes"); 410 return TEE_ERROR_BAD_PARAMETERS; 411 } 412 413 hash_ctx->has_next = false; 414 ret = sec_digest_do_task(hash_ctx->qp, hash_ctx); 415 if (ret) { 416 EMSG("Fail to do digest task! ret = %#"PRIx32, ret); 417 return ret; 418 } 419 420 memcpy(digest, hash_ctx->out, MIN(hash_ctx->mac_len, len)); 421 422 return TEE_SUCCESS; 423 } 424 425 static TEE_Result sec_hash_do_final(struct crypto_hash_ctx *ctx, 426 uint8_t *digest, size_t len) 427 { 428 struct crypto_hash *hash = to_hash_ctx(ctx); 429 struct hashctx *hash_ctx = hash->ctx; 430 431 return hisi_sec_digest_do_final(hash_ctx, digest, len); 432 } 433 434 void hisi_sec_digest_ctx_free(struct hashctx *hash_ctx) 435 { 436 hisi_qm_release_qp(hash_ctx->qp); 437 438 free(hash_ctx->in); 439 hash_ctx->in = NULL; 440 441 memzero_explicit(hash_ctx->key, SEC_DIGEST_MAX_KEY_SIZE); 442 443 free(hash_ctx); 444 } 445 446 static void sec_hash_ctx_free(struct crypto_hash_ctx *ctx) 447 { 448 struct crypto_hash *hash = NULL; 449 struct hashctx *hash_ctx = NULL; 450 451 if (!ctx) 452 return; 453 454 hash = to_hash_ctx(ctx); 455 hash_ctx = hash->ctx; 456 if (!hash_ctx) 457 return; 458 hisi_sec_digest_ctx_free(hash_ctx); 459 460 hash->ctx = NULL; 461 462 free(hash); 463 } 464 465 void hisi_sec_digest_copy_state(struct hashctx *out_hash_ctx, 466 struct hashctx *in_hash_ctx) 467 { 468 out_hash_ctx->iv_len = in_hash_ctx->iv_len; 469 out_hash_ctx->buf_len = in_hash_ctx->buf_len; 470 out_hash_ctx->key_len = in_hash_ctx->key_len; 471 out_hash_ctx->has_next = in_hash_ctx->has_next; 472 out_hash_ctx->long_data_len = in_hash_ctx->long_data_len; 473 474 if (in_hash_ctx->in) { 475 out_hash_ctx->in = malloc(out_hash_ctx->buf_len); 476 if (!out_hash_ctx->in) { 477 EMSG("Fail to alloc in buf"); 478 return; 479 } 480 out_hash_ctx->in_dma = virt_to_phys(out_hash_ctx->in); 481 if (!out_hash_ctx->in_dma) { 482 free(out_hash_ctx->in); 483 out_hash_ctx->in = NULL; 484 EMSG("Fail to get in_dma"); 485 return; 486 } 487 out_hash_ctx->in_len = in_hash_ctx->in_len; 488 memcpy(out_hash_ctx->in, in_hash_ctx->in, 489 out_hash_ctx->buf_len); 490 } 491 492 memcpy(out_hash_ctx->iv, in_hash_ctx->iv, out_hash_ctx->iv_len); 493 memcpy(out_hash_ctx->key, in_hash_ctx->key, out_hash_ctx->key_len); 494 } 495 496 static void sec_hash_copy_state(struct crypto_hash_ctx *out_ctx, 497 struct crypto_hash_ctx *in_ctx) 498 { 499 struct crypto_hash *out_hash = NULL; 500 struct crypto_hash *in_hash = NULL; 501 struct hashctx *out_hash_ctx = NULL; 502 struct hashctx *in_hash_ctx = NULL; 503 504 if (!out_ctx || !in_ctx) { 505 EMSG("Invalid input parameters"); 506 return; 507 } 508 509 out_hash = to_hash_ctx(out_ctx); 510 in_hash = to_hash_ctx(in_ctx); 511 512 out_hash_ctx = out_hash->ctx; 513 in_hash_ctx = in_hash->ctx; 514 515 hisi_sec_digest_copy_state(out_hash_ctx, in_hash_ctx); 516 } 517 518 static struct crypto_hash_ops hash_ops = { 519 .init = sec_hash_initialize, 520 .update = sec_hash_do_update, 521 .final = sec_hash_do_final, 522 .free_ctx = sec_hash_ctx_free, 523 .copy_state = sec_hash_copy_state, 524 }; 525 526 static size_t sec_hash_get_mac_len(uint32_t type) 527 { 528 switch (type) { 529 case TEE_ALG_MD5: 530 case TEE_ALG_HMAC_MD5: 531 return HASH_MAC_LEN128; 532 case TEE_ALG_SHA1: 533 case TEE_ALG_HMAC_SHA1: 534 return HASH_MAC_LEN160; 535 case TEE_ALG_SHA224: 536 case TEE_ALG_HMAC_SHA224: 537 return HASH_MAC_LEN224; 538 case TEE_ALG_SM3: 539 case TEE_ALG_HMAC_SM3: 540 case TEE_ALG_SHA256: 541 case TEE_ALG_HMAC_SHA256: 542 return HASH_MAC_LEN256; 543 case TEE_ALG_SHA384: 544 case TEE_ALG_HMAC_SHA384: 545 return HASH_MAC_LEN384; 546 case TEE_ALG_SHA512: 547 case TEE_ALG_HMAC_SHA512: 548 return HASH_MAC_LEN512; 549 default: 550 return 0; 551 } 552 } 553 554 static TEE_Result sec_hash_get_dma(struct hashctx *hash_ctx) 555 { 556 hash_ctx->key_dma = virt_to_phys(hash_ctx->key); 557 if (!hash_ctx->key_dma) { 558 EMSG("Fail to get key_dma"); 559 return TEE_ERROR_STORAGE_NO_SPACE; 560 } 561 562 hash_ctx->iv_dma = virt_to_phys(hash_ctx->iv); 563 if (!hash_ctx->iv_dma) { 564 EMSG("Fail to get iv_dma"); 565 return TEE_ERROR_STORAGE_NO_SPACE; 566 } 567 568 hash_ctx->out_dma = virt_to_phys(hash_ctx->out); 569 if (!hash_ctx->out_dma) { 570 EMSG("Fail to get out_dma"); 571 return TEE_ERROR_STORAGE_NO_SPACE; 572 } 573 574 return TEE_SUCCESS; 575 } 576 577 TEE_Result hisi_sec_hash_ctx_init(struct hashctx *hash_ctx, uint32_t algo) 578 { 579 TEE_Result ret = TEE_SUCCESS; 580 581 hash_ctx->mac_len = sec_hash_get_mac_len(algo); 582 if (!hash_ctx->mac_len) { 583 EMSG("Invalid algo type %#"PRIx32, algo); 584 return TEE_ERROR_NOT_IMPLEMENTED; 585 } 586 587 hash_ctx->algo = algo; 588 hash_ctx->mode = algo >> HASH_MODE_OFFSET; 589 590 ret = sec_hash_get_dma(hash_ctx); 591 if (ret) 592 return ret; 593 594 hash_ctx->qp = sec_create_qp(HISI_QM_CHANNEL_TYPE0); 595 if (!hash_ctx->qp) { 596 EMSG("Fail to create hash qp"); 597 return TEE_ERROR_BUSY; 598 } 599 600 if (hash_ctx->qp->qm->version == HISI_QM_HW_V2) { 601 hash_ctx->qp->fill_sqe = sec_digest_fill_sqe; 602 hash_ctx->qp->parse_sqe = sec_parse_digest_sqe; 603 } else { 604 hash_ctx->qp->fill_sqe = sec_digest_fill_bd3_sqe; 605 hash_ctx->qp->parse_sqe = sec_parse_digest_bd3_sqe; 606 } 607 608 return TEE_SUCCESS; 609 } 610 611 static TEE_Result sec_hash_ctx_allocate(struct crypto_hash_ctx **ctx, 612 uint32_t algo) 613 { 614 struct crypto_hash *hash = NULL; 615 struct hashctx *hash_ctx = NULL; 616 TEE_Result ret = TEE_SUCCESS; 617 618 if (!ctx) { 619 EMSG("Ctx is NULL"); 620 return TEE_ERROR_BAD_PARAMETERS; 621 } 622 623 hash = calloc(1, sizeof(*hash)); 624 if (!hash) { 625 EMSG("Fail to alloc hash"); 626 return TEE_ERROR_STORAGE_NO_SPACE; 627 } 628 629 hash_ctx = calloc(1, sizeof(*hash_ctx)); 630 if (!hash_ctx) { 631 EMSG("Fail to alloc hash_ctx"); 632 ret = TEE_ERROR_STORAGE_NO_SPACE; 633 goto free_hash; 634 } 635 636 ret = hisi_sec_hash_ctx_init(hash_ctx, algo); 637 if (ret) 638 goto free_ctx; 639 640 hash->hash_ctx.ops = &hash_ops; 641 hash->ctx = hash_ctx; 642 *ctx = &hash->hash_ctx; 643 644 return TEE_SUCCESS; 645 646 free_ctx: 647 free(hash_ctx); 648 free_hash: 649 free(hash); 650 651 return ret; 652 } 653 654 static TEE_Result sec_hash_init(void) 655 { 656 TEE_Result ret = TEE_SUCCESS; 657 658 ret = drvcrypt_register_hash(&sec_hash_ctx_allocate); 659 if (ret) 660 EMSG("Sec hash register to crypto fail"); 661 662 return ret; 663 } 664 driver_init(sec_hash_init); 665