1 // SPDX-License-Identifier: BSD-2-Clause 2 /* 3 * Copyright 2022-2024 HiSilicon Limited. 4 * Kunpeng hardware accelerator sec authenc algorithm implementation. 5 */ 6 7 #include <crypto/crypto_impl.h> 8 #include <drvcrypt.h> 9 #include <drvcrypt_authenc.h> 10 #include <initcall.h> 11 #include <tee_api_types.h> 12 #include <trace.h> 13 #include <utee_defines.h> 14 15 #include "sec_authenc.h" 16 #include "sec_cipher.h" 17 #include "sec_hash.h" 18 #include "sec_main.h" 19 20 static enum hisi_drv_status sec_aead_bd_fill(void *bd, void *msg) 21 { 22 struct authenc_ctx *ae_ctx = msg; 23 struct hisi_sec_sqe *sqe = bd; 24 uint8_t scene = 0; 25 uint8_t de = 0; 26 27 sqe->type_auth_cipher = BD_TYPE2 | SHIFT_U32(NO_AUTH, SEC_AUTH_OFFSET); 28 scene = SHIFT_U32(SCENE_NOTHING, SEC_SCENE_OFFSET); 29 de = SHIFT_U32(DATA_DST_ADDR_ENABLE, SEC_DE_OFFSET); 30 sqe->sds_sa_type = de | scene; 31 sqe->type2.cipher_src_offset = ae_ctx->aad.length; 32 sqe->type2.icvw_kmode = SHIFT_U32(ae_ctx->c_key_len, SEC_CKEY_OFFSET) | 33 SHIFT_U32(ae_ctx->mode, SEC_CMODE_OFFSET) | 34 ae_ctx->tag_len; 35 sqe->type2.clen_ivhlen = ae_ctx->payload_len; 36 sqe->type2.alen_ivllen = ae_ctx->aad.length; 37 sqe->type2.c_alg = ae_ctx->algo; 38 39 if (ae_ctx->encrypt) { 40 sqe->type_auth_cipher |= SHIFT_U32(CIPHER_ENCRYPT, 41 SEC_CIPHER_OFFSET); 42 sqe->sds_sa_type |= SEC_CIPHER_THEN_DIGEST; 43 } else { 44 sqe->type_auth_cipher |= SHIFT_U32(CIPHER_DECRYPT, 45 SEC_CIPHER_OFFSET); 46 sqe->sds_sa_type |= SEC_DIGEST_THEN_CIPHER; 47 } 48 49 sqe->type2.data_dst_addr = ae_ctx->dst_dma; 50 sqe->type2.data_src_addr = ae_ctx->src_dma; 51 sqe->type2.c_ivin_addr = ae_ctx->civ_dma; 52 sqe->type2.c_key_addr = ae_ctx->key_dma; 53 sqe->type2.mac_addr = ae_ctx->tag_dma; 54 sqe->type2.a_ivin_addr = ae_ctx->aiv_dma; 55 56 return HISI_QM_DRVCRYPT_NO_ERR; 57 } 58 59 static enum hisi_drv_status sec_aead_bd_parse(void *bd, void *msg) 60 { 61 struct hisi_sec_sqe *sqe = bd; 62 struct authenc_ctx *ctx = msg; 63 uint16_t done = 0; 64 65 ctx->result = SEC_GET_FIELD(sqe->type2.done_flag, SEC_ICV_MASK, 1); 66 done = SEC_GET_FIELD(sqe->type2.done_flag, SEC_DONE_MASK, 0); 67 if (done != SEC_HW_TASK_DONE || sqe->type2.error_type) { 68 EMSG("SEC BD2 fail! done=%#"PRIx16", etype=%#"PRIx8, 69 done, sqe->type2.error_type); 70 return HISI_QM_DRVCRYPT_IN_EPARA; 71 } 72 73 return HISI_QM_DRVCRYPT_NO_ERR; 74 } 75 76 static enum hisi_drv_status sec_aead_bd3_fill(void *bd, void *msg) 77 { 78 struct authenc_ctx *ae_ctx = msg; 79 struct hisi_sec_bd3_sqe *sqe = bd; 80 81 sqe->bd_param = BD_TYPE3 | SHIFT_U32(SCENE_NOTHING, 82 SEC_SCENE_OFFSET_V3) | 83 SHIFT_U32(DATA_DST_ADDR_ENABLE, SEC_DE_OFFSET_V3); 84 sqe->auth_mac_key = NO_AUTH; 85 sqe->cipher_src_offset = ae_ctx->aad.length; 86 sqe->c_icv_key = SHIFT_U32(ae_ctx->c_key_len, SEC_CKEY_OFFSET_V3) | 87 SHIFT_U32(ae_ctx->tag_len, SEC_ICV_LEN_OFFSET_V3); 88 sqe->c_len_ivin = ae_ctx->payload_len; 89 90 sqe->a_len_key = ae_ctx->aad.length; 91 sqe->c_mode_alg = ae_ctx->mode | 92 SHIFT_U32(ae_ctx->algo, SEC_CALG_OFFSET_V3); 93 94 if (ae_ctx->encrypt) { 95 sqe->c_icv_key |= CIPHER_ENCRYPT; 96 sqe->huk_iv_seq = SHIFT_U32(SEC_CIPHER_THEN_DIGEST, 97 SEC_SEQ_OFFSET_V3); 98 } else { 99 sqe->c_icv_key |= CIPHER_DECRYPT; 100 sqe->huk_iv_seq = SHIFT_U32(SEC_DIGEST_THEN_CIPHER, 101 SEC_SEQ_OFFSET_V3); 102 } 103 104 sqe->no_scene.c_ivin_addr = ae_ctx->civ_dma; 105 sqe->data_dst_addr = ae_ctx->dst_dma; 106 sqe->data_src_addr = ae_ctx->src_dma; 107 sqe->c_key_addr = ae_ctx->key_dma; 108 sqe->mac_addr = ae_ctx->tag_dma; 109 sqe->a_ivin_addr = ae_ctx->aiv_dma; 110 111 return HISI_QM_DRVCRYPT_NO_ERR; 112 } 113 114 static enum hisi_drv_status sec_aead_bd3_parse(void *bd, void *msg) 115 { 116 struct hisi_sec_bd3_sqe *sqe = bd; 117 struct authenc_ctx *ctx = msg; 118 uint16_t done = 0; 119 120 ctx->result = SEC_GET_FIELD(sqe->done_flag, SEC_ICV_MASK, 1); 121 done = SEC_GET_FIELD(sqe->done_flag, SEC_DONE_MASK, 0); 122 if (done != SEC_HW_TASK_DONE || sqe->error_type) { 123 EMSG("SEC BD3 fail! done=%#"PRIx16", etype=%#"PRIx8, 124 done, sqe->error_type); 125 return HISI_QM_DRVCRYPT_IN_EPARA; 126 } 127 128 return HISI_QM_DRVCRYPT_NO_ERR; 129 } 130 131 static TEE_Result sec_do_aead_task(struct hisi_qp *qp, void *msg) 132 { 133 enum hisi_drv_status ret = HISI_QM_DRVCRYPT_NO_ERR; 134 135 ret = hisi_qp_send(qp, msg); 136 if (ret) { 137 EMSG("Fail to send task, ret=%d", ret); 138 return TEE_ERROR_BAD_STATE; 139 } 140 141 ret = hisi_qp_recv_sync(qp, msg); 142 if (ret) { 143 EMSG("Recv task error, ret=%d", ret); 144 return TEE_ERROR_BAD_STATE; 145 } 146 147 return TEE_SUCCESS; 148 } 149 150 static TEE_Result authenc_algo_check(uint32_t algo) 151 { 152 switch (algo) { 153 case TEE_ALG_AES_GCM: 154 case TEE_ALG_AES_CCM: 155 return TEE_SUCCESS; 156 default: 157 return TEE_ERROR_NOT_IMPLEMENTED; 158 } 159 } 160 161 static uint8_t crypto_set_alg(uint32_t alg) 162 { 163 switch (alg) { 164 case TEE_MAIN_ALGO_AES: 165 return C_ALG_AES; 166 default: 167 return 0; 168 } 169 } 170 171 static uint8_t crypto_set_mode(uint32_t mode) 172 { 173 switch (mode) { 174 case TEE_CHAIN_MODE_CCM: 175 return C_MODE_CCM; 176 case TEE_CHAIN_MODE_GCM: 177 return C_MODE_GCM; 178 default: 179 return 0; 180 } 181 } 182 183 static TEE_Result sec_authenc_ctx_allocate(void **ctx, uint32_t algo) 184 { 185 struct crypto_authenc_ctx *ae_soft_ctx = NULL; 186 struct authenc_ctx *ae_drv_ctx = NULL; 187 TEE_Result ret = TEE_SUCCESS; 188 189 if (!ctx) { 190 EMSG("ctx is NULL"); 191 return TEE_ERROR_BAD_PARAMETERS; 192 } 193 194 ret = authenc_algo_check(algo); 195 if (ret) 196 return ret; 197 198 ae_drv_ctx = calloc(1, sizeof(struct authenc_ctx)); 199 if (!ae_drv_ctx) { 200 EMSG("Fail to calloc ae_drv_ctx"); 201 return TEE_ERROR_OUT_OF_MEMORY; 202 } 203 204 ae_drv_ctx->algo = crypto_set_alg(TEE_ALG_GET_MAIN_ALG(algo)); 205 ae_drv_ctx->mode = crypto_set_mode(TEE_ALG_GET_CHAIN_MODE(algo)); 206 207 ret = crypto_aes_gcm_alloc_ctx(&ae_soft_ctx); 208 if (ret) { 209 EMSG("soft ctx is NULL"); 210 goto free_ctx; 211 } 212 213 ae_drv_ctx->ae_soft_ctx = ae_soft_ctx; 214 ae_drv_ctx->is_hw_supported = true; 215 216 ae_drv_ctx->qp = sec_create_qp(HISI_QM_CHANNEL_TYPE0); 217 if (!ae_drv_ctx->qp) { 218 ret = TEE_ERROR_BUSY; 219 goto free_soft_ctx; 220 } 221 222 if (ae_drv_ctx->qp->qm->version == HISI_QM_HW_V2) { 223 ae_drv_ctx->qp->fill_sqe = sec_aead_bd_fill; 224 ae_drv_ctx->qp->parse_sqe = sec_aead_bd_parse; 225 } else { 226 ae_drv_ctx->qp->fill_sqe = sec_aead_bd3_fill; 227 ae_drv_ctx->qp->parse_sqe = sec_aead_bd3_parse; 228 } 229 230 *ctx = ae_drv_ctx; 231 232 return TEE_SUCCESS; 233 234 free_soft_ctx: 235 ae_soft_ctx->ops->free_ctx(ae_soft_ctx); 236 free_ctx: 237 free(ae_drv_ctx); 238 return ret; 239 } 240 241 static void sec_authenc_ctx_free(void *ctx) 242 { 243 struct authenc_ctx *ae_drv_ctx = ctx; 244 245 if (!ae_drv_ctx) 246 return; 247 248 ae_drv_ctx->ae_soft_ctx->ops->free_ctx(ae_drv_ctx->ae_soft_ctx); 249 250 hisi_qm_release_qp(ae_drv_ctx->qp); 251 memzero_explicit(ae_drv_ctx->key, ae_drv_ctx->key_len); 252 253 if (ae_drv_ctx->src.data) { 254 free(ae_drv_ctx->src.data); 255 ae_drv_ctx->src.data = NULL; 256 } 257 258 if (ae_drv_ctx->dst.data) { 259 free(ae_drv_ctx->dst.data); 260 ae_drv_ctx->dst.data = NULL; 261 } 262 263 free(ae_drv_ctx); 264 } 265 266 static TEE_Result authenc_init_params_check(struct drvcrypt_authenc_init *dinit) 267 { 268 if (!dinit) { 269 EMSG("dinit is NULL"); 270 return TEE_ERROR_BAD_PARAMETERS; 271 } 272 273 if (!dinit->ctx) { 274 EMSG("ctx is NULL"); 275 return TEE_ERROR_BAD_PARAMETERS; 276 } 277 278 if (!dinit->key.length || !dinit->key.data) { 279 EMSG("key is NULL"); 280 return TEE_ERROR_BAD_PARAMETERS; 281 } 282 283 if (!dinit->nonce.data || !dinit->nonce.length) { 284 EMSG("iv is NULL"); 285 return TEE_ERROR_BAD_PARAMETERS; 286 } 287 288 return TEE_SUCCESS; 289 } 290 291 static bool is_hw_supported(struct drvcrypt_authenc_init *dinit) 292 { 293 struct authenc_ctx *ae_drv_ctx = dinit->ctx; 294 295 if (ae_drv_ctx->mode == C_MODE_GCM) { 296 if (dinit->nonce.length != GCM_IV_SIZE) 297 return false; 298 299 if (dinit->aad_len > MAX_GCM_AAD_SIZE) { 300 EMSG("Invalid aad len"); 301 return false; 302 } 303 304 if (dinit->tag_len < SEC_MIN_GCM_TAG_LEN || 305 dinit->tag_len > SEC_MAX_TAG_LEN) { 306 EMSG("Invalid tag len"); 307 return false; 308 } 309 } else { 310 if (dinit->nonce.length < MIN_CCM_NONCE_SIZE || 311 dinit->nonce.length > MAX_CCM_NONCE_SIZE) { 312 EMSG("Invalid nonce len"); 313 return false; 314 } 315 316 if (dinit->aad_len > MAX_CCM_AAD_SIZE) { 317 EMSG("Invalid aad len"); 318 return false; 319 } 320 321 if (dinit->tag_len < SEC_MIN_CCM_TAG_LEN || 322 dinit->tag_len > SEC_MAX_TAG_LEN || 323 dinit->tag_len % TAG_ALIGN) { 324 EMSG("Invalid tag len"); 325 return false; 326 } 327 } 328 329 if (dinit->payload_len + dinit->aad_len > SEC_MAX_AEAD_LENGTH || 330 (ae_drv_ctx->qp->qm->version == HISI_QM_HW_V2 && 331 dinit->payload_len == 0)) { 332 EMSG("Invalid src len"); 333 return false; 334 } 335 336 return true; 337 } 338 339 static TEE_Result sec_aead_set_key(struct drvcrypt_authenc_init *dinit) 340 { 341 struct authenc_ctx *ae_drv_ctx = dinit->ctx; 342 343 ae_drv_ctx->key_len = dinit->key.length; 344 345 switch (ae_drv_ctx->key_len) { 346 case AES_KEYSIZE_128: 347 ae_drv_ctx->c_key_len = CKEY_LEN_128_BIT; 348 break; 349 case AES_KEYSIZE_192: 350 ae_drv_ctx->c_key_len = CKEY_LEN_192_BIT; 351 break; 352 case AES_KEYSIZE_256: 353 ae_drv_ctx->c_key_len = CKEY_LEN_256_BIT; 354 break; 355 default: 356 EMSG("Invalid AES key size"); 357 return TEE_ERROR_BAD_PARAMETERS; 358 } 359 360 memcpy(ae_drv_ctx->key, dinit->key.data, dinit->key.length); 361 362 return TEE_SUCCESS; 363 } 364 365 static TEE_Result sec_aead_set_iv(struct drvcrypt_authenc_init *dinit) 366 { 367 struct authenc_ctx *ae_drv_ctx = dinit->ctx; 368 uint32_t data_size = dinit->payload_len; 369 uint8_t adata = 0; 370 uint8_t flags = 0; 371 uint8_t cm = 0; 372 uint8_t cl = 0; 373 374 ae_drv_ctx->civ_len = MAX_IV_SIZE; 375 if (ae_drv_ctx->mode == C_MODE_GCM) { 376 ae_drv_ctx->civ_len = dinit->nonce.length; 377 memcpy(ae_drv_ctx->civ, dinit->nonce.data, dinit->nonce.length); 378 return TEE_SUCCESS; 379 } 380 381 if (dinit->aad_len) 382 adata = AAD_NOT_NULL; 383 384 cm = ((dinit->tag_len - IV_CM_CAL_NUM) / IV_CM_CAL_NUM) & IV_CL_MASK; 385 cl = IV_CL_CAL_NUM - dinit->nonce.length; 386 flags = cl | SHIFT_U32(cm, IV_CM_OFFSET) | 387 SHIFT_U32(adata, IV_FLAGS_OFFSET); 388 389 memcpy(ae_drv_ctx->civ + NONCE_OFFSET, dinit->nonce.data, 390 dinit->nonce.length); 391 memcpy(ae_drv_ctx->aiv + NONCE_OFFSET, dinit->nonce.data, 392 dinit->nonce.length); 393 394 ae_drv_ctx->aiv[0] = flags; 395 ae_drv_ctx->aiv[IV_LAST_BYTE1] = data_size & IV_LAST_BYTE_MASK; 396 data_size >>= IV_BYTE_OFFSET; 397 ae_drv_ctx->aiv[IV_LAST_BYTE2] = data_size & IV_LAST_BYTE_MASK; 398 data_size >>= IV_BYTE_OFFSET; 399 ae_drv_ctx->aiv[IV_LAST_BYTE3] = data_size & IV_LAST_BYTE_MASK; 400 401 ae_drv_ctx->civ[0] = cl; 402 ae_drv_ctx->civ[MAX_IV_SIZE - 1] = IV_CTR_INIT; 403 404 return TEE_SUCCESS; 405 } 406 407 static TEE_Result sec_aead_get_dma(struct authenc_ctx *ae_drv_ctx) 408 { 409 ae_drv_ctx->key_dma = virt_to_phys(ae_drv_ctx->key); 410 ae_drv_ctx->civ_dma = virt_to_phys(ae_drv_ctx->civ); 411 ae_drv_ctx->tag_dma = virt_to_phys(ae_drv_ctx->tag); 412 ae_drv_ctx->src_dma = virt_to_phys(ae_drv_ctx->src.data); 413 ae_drv_ctx->dst_dma = virt_to_phys(ae_drv_ctx->dst.data); 414 415 if (ae_drv_ctx->mode == C_MODE_GCM) 416 return TEE_SUCCESS; 417 ae_drv_ctx->aiv_dma = virt_to_phys(ae_drv_ctx->aiv); 418 419 return TEE_SUCCESS; 420 } 421 422 static TEE_Result sec_aead_data_alloc(struct authenc_ctx *ae_drv_ctx) 423 { 424 ae_drv_ctx->src.length = ae_drv_ctx->payload_len + 425 ae_drv_ctx->aad.length; 426 ae_drv_ctx->src.data = malloc(ae_drv_ctx->src.length); 427 if (!ae_drv_ctx->src.data) { 428 EMSG("Fail to malloc src"); 429 return TEE_ERROR_OUT_OF_MEMORY; 430 } 431 432 ae_drv_ctx->dst.length = ae_drv_ctx->src.length; 433 ae_drv_ctx->dst.data = malloc(ae_drv_ctx->dst.length); 434 if (!ae_drv_ctx->dst.data) { 435 EMSG("Fail to malloc dst"); 436 free(ae_drv_ctx->src.data); 437 ae_drv_ctx->src.data = NULL; 438 return TEE_ERROR_OUT_OF_MEMORY; 439 } 440 441 return TEE_SUCCESS; 442 } 443 444 static TEE_Result ae_soft_calc_init(struct authenc_ctx *ae_drv_ctx, 445 struct drvcrypt_authenc_init *dinit) 446 { 447 struct crypto_authenc_ctx *ae_soft_ctx = NULL; 448 TEE_Result ret = TEE_SUCCESS; 449 TEE_OperationMode mode = 0; 450 451 if (ae_drv_ctx->algo == C_ALG_AES && ae_drv_ctx->mode == C_MODE_GCM) { 452 ae_drv_ctx->is_hw_supported = false; 453 ae_soft_ctx = ae_drv_ctx->ae_soft_ctx; 454 if (!dinit->encrypt) 455 mode = 1; 456 ret = ae_soft_ctx->ops->init(ae_soft_ctx, mode, 457 dinit->key.data, dinit->key.length, 458 dinit->nonce.data, dinit->nonce.length, 459 dinit->tag_len, dinit->aad_len, 460 dinit->payload_len); 461 if (ret) 462 EMSG("Fail to init by soft ctx"); 463 464 return ret; 465 } 466 467 return TEE_ERROR_NOT_IMPLEMENTED; 468 } 469 470 static TEE_Result sec_authenc_initialize(struct drvcrypt_authenc_init *dinit) 471 { 472 struct authenc_ctx *ae_drv_ctx = NULL; 473 TEE_Result ret = TEE_SUCCESS; 474 bool hw_support = false; 475 476 ret = authenc_init_params_check(dinit); 477 if (ret) 478 return ret; 479 480 ae_drv_ctx = dinit->ctx; 481 hw_support = is_hw_supported(dinit); 482 if (!hw_support) 483 return ae_soft_calc_init(ae_drv_ctx, dinit); 484 485 ae_drv_ctx->encrypt = dinit->encrypt; 486 ae_drv_ctx->payload_len = dinit->payload_len; 487 ae_drv_ctx->aad.length = dinit->aad_len; 488 ae_drv_ctx->tag_len = dinit->tag_len; 489 490 ret = sec_aead_set_key(dinit); 491 if (ret) 492 return ret; 493 494 ret = sec_aead_set_iv(dinit); 495 if (ret) 496 goto clean_key; 497 498 ret = sec_aead_data_alloc(ae_drv_ctx); 499 if (ret) 500 goto clean_key; 501 502 ret = sec_aead_get_dma(ae_drv_ctx); 503 if (ret) 504 goto free_data; 505 506 return TEE_SUCCESS; 507 free_data: 508 if (ae_drv_ctx->src.data) { 509 free(ae_drv_ctx->src.data); 510 ae_drv_ctx->src.data = NULL; 511 } 512 if (ae_drv_ctx->dst.data) { 513 free(ae_drv_ctx->dst.data); 514 ae_drv_ctx->src.data = NULL; 515 } 516 clean_key: 517 memzero_explicit(ae_drv_ctx->key, sizeof(ae_drv_ctx->key)); 518 return ret; 519 } 520 521 static TEE_Result 522 sec_authenc_update_aad(struct drvcrypt_authenc_update_aad *dupdate) 523 { 524 struct crypto_authenc_ctx *ae_soft_ctx = NULL; 525 struct authenc_ctx *ae_drv_ctx = NULL; 526 TEE_Result ret = TEE_SUCCESS; 527 528 if (!dupdate || !dupdate->ctx) { 529 EMSG("Invalid input parameters"); 530 return TEE_ERROR_BAD_PARAMETERS; 531 } 532 533 ae_drv_ctx = dupdate->ctx; 534 if (!ae_drv_ctx->is_hw_supported) { 535 ae_soft_ctx = ae_drv_ctx->ae_soft_ctx; 536 ret = ae_soft_ctx->ops->update_aad(ae_soft_ctx, 537 dupdate->aad.data, dupdate->aad.length); 538 if (ret) 539 EMSG("Fail to update aad by soft ctx"); 540 541 return ret; 542 } 543 544 if (dupdate->aad.length + ae_drv_ctx->src_offset > 545 ae_drv_ctx->src.length) { 546 EMSG("Invalid aad length"); 547 return TEE_ERROR_BAD_PARAMETERS; 548 } 549 /* 550 * Both aad and ptx need to be filled in the src field. 551 * Here, aad is placed in the header of the src field. 552 */ 553 memcpy(ae_drv_ctx->src.data + ae_drv_ctx->src_offset, 554 dupdate->aad.data, dupdate->aad.length); 555 ae_drv_ctx->src_offset += dupdate->aad.length; 556 557 return TEE_SUCCESS; 558 } 559 560 static TEE_Result update_params_check(struct drvcrypt_authenc_update_payload *d) 561 { 562 struct authenc_ctx *ae_drv_ctx = NULL; 563 564 ae_drv_ctx = d->ctx; 565 if (!ae_drv_ctx->src.data || !ae_drv_ctx->dst.data) { 566 EMSG("Invalid input/output data"); 567 return TEE_ERROR_BAD_PARAMETERS; 568 } 569 570 if (d->src.length + ae_drv_ctx->src_offset > ae_drv_ctx->src.length) { 571 EMSG("Invalid update src length"); 572 return TEE_ERROR_BAD_PARAMETERS; 573 } 574 575 if (d->dst.length + ae_drv_ctx->src_offset > ae_drv_ctx->dst.length) { 576 EMSG("Invalid update dst length"); 577 return TEE_ERROR_BAD_PARAMETERS; 578 } 579 580 return TEE_SUCCESS; 581 } 582 583 static TEE_Result 584 sec_authenc_update_payload(struct drvcrypt_authenc_update_payload *d) 585 { 586 struct crypto_authenc_ctx *ae_soft_ctx = NULL; 587 struct authenc_ctx *ae_drv_ctx = NULL; 588 TEE_Result ret = TEE_SUCCESS; 589 590 if (!d || !d->ctx) { 591 EMSG("Invalid input parameters"); 592 return TEE_ERROR_BAD_PARAMETERS; 593 } 594 595 ae_drv_ctx = d->ctx; 596 if (!ae_drv_ctx->is_hw_supported) { 597 ae_soft_ctx = ae_drv_ctx->ae_soft_ctx; 598 ret = ae_soft_ctx->ops->update_payload(ae_soft_ctx, 599 (TEE_OperationMode)(d->encrypt == 0), 600 d->src.data, d->src.length, d->dst.data); 601 if (ret) 602 EMSG("Fail to update payload by soft ctx"); 603 604 return ret; 605 } 606 607 ret = update_params_check(d); 608 if (ret) 609 return ret; 610 611 memcpy(ae_drv_ctx->src.data + ae_drv_ctx->src_offset, 612 d->src.data, d->src.length); 613 614 ret = sec_do_aead_task(ae_drv_ctx->qp, ae_drv_ctx); 615 if (ret) 616 return ret; 617 618 memcpy(d->dst.data, ae_drv_ctx->dst.data + ae_drv_ctx->src_offset, 619 d->dst.length); 620 ae_drv_ctx->src_offset += d->src.length; 621 622 return TEE_SUCCESS; 623 } 624 625 static TEE_Result final_params_check(struct drvcrypt_authenc_final *dfinal) 626 { 627 struct authenc_ctx *ae_drv_ctx = dfinal->ctx; 628 629 if (!ae_drv_ctx->src.data || !ae_drv_ctx->dst.data) { 630 EMSG("Invalid input/output data"); 631 return TEE_ERROR_BAD_PARAMETERS; 632 } 633 634 if (dfinal->src.length + ae_drv_ctx->src_offset > 635 ae_drv_ctx->src.length) { 636 EMSG("Invalid dfinal src length"); 637 return TEE_ERROR_BAD_PARAMETERS; 638 } 639 640 if (dfinal->dst.length + ae_drv_ctx->src_offset > 641 ae_drv_ctx->dst.length) { 642 EMSG("Invalid dfinal dst length"); 643 return TEE_ERROR_BAD_PARAMETERS; 644 } 645 646 if (dfinal->tag.length > SEC_MAX_TAG_LEN) { 647 EMSG("Invalid dfinal tag length"); 648 return TEE_ERROR_BAD_PARAMETERS; 649 } 650 651 return TEE_SUCCESS; 652 } 653 654 static TEE_Result sec_authenc_enc_final(struct drvcrypt_authenc_final *dfinal) 655 { 656 struct crypto_authenc_ctx *ae_soft_ctx = NULL; 657 struct authenc_ctx *ae_drv_ctx = NULL; 658 TEE_Result ret = TEE_SUCCESS; 659 660 if (!dfinal || !dfinal->ctx) { 661 EMSG("Invalid input parameters"); 662 return TEE_ERROR_BAD_PARAMETERS; 663 } 664 665 ae_drv_ctx = dfinal->ctx; 666 if (!ae_drv_ctx->is_hw_supported) { 667 ae_soft_ctx = ae_drv_ctx->ae_soft_ctx; 668 ret = ae_soft_ctx->ops->enc_final(ae_soft_ctx, 669 dfinal->src.data, dfinal->src.length, 670 dfinal->dst.data, dfinal->tag.data, 671 &dfinal->tag.length); 672 if (ret) 673 EMSG("Fail to do enc final by soft ctx"); 674 675 return ret; 676 } 677 678 ret = final_params_check(dfinal); 679 if (ret) 680 return ret; 681 memcpy(ae_drv_ctx->src.data + ae_drv_ctx->src_offset, dfinal->src.data, 682 dfinal->src.length); 683 684 ret = sec_do_aead_task(ae_drv_ctx->qp, ae_drv_ctx); 685 686 memcpy(dfinal->tag.data, ae_drv_ctx->tag, dfinal->tag.length); 687 memcpy(dfinal->dst.data, ae_drv_ctx->dst.data + ae_drv_ctx->src_offset, 688 dfinal->dst.length); 689 690 return ret; 691 } 692 693 static TEE_Result sec_authenc_dec_final(struct drvcrypt_authenc_final *dfinal) 694 { 695 struct crypto_authenc_ctx *ae_soft_ctx = NULL; 696 struct authenc_ctx *ae_drv_ctx = NULL; 697 TEE_Result ret = TEE_SUCCESS; 698 699 if (!dfinal || !dfinal->ctx) { 700 EMSG("Invalid input parameters"); 701 return TEE_ERROR_BAD_PARAMETERS; 702 } 703 704 ae_drv_ctx = dfinal->ctx; 705 if (!ae_drv_ctx->is_hw_supported) { 706 ae_soft_ctx = ae_drv_ctx->ae_soft_ctx; 707 ret = ae_soft_ctx->ops->dec_final(ae_soft_ctx, 708 dfinal->src.data, dfinal->src.length, 709 dfinal->dst.data, dfinal->tag.data, 710 dfinal->tag.length); 711 if (ret) 712 EMSG("Fail to do dec final by soft ctx"); 713 714 return ret; 715 } 716 717 ret = final_params_check(dfinal); 718 if (ret) 719 return ret; 720 memcpy(ae_drv_ctx->src.data + ae_drv_ctx->src_offset, dfinal->src.data, 721 dfinal->src.length); 722 memcpy(ae_drv_ctx->tag, dfinal->tag.data, dfinal->tag.length); 723 724 ret = sec_do_aead_task(ae_drv_ctx->qp, ae_drv_ctx); 725 if (ret) 726 return ret; 727 728 memcpy(dfinal->dst.data, ae_drv_ctx->dst.data + ae_drv_ctx->src_offset, 729 dfinal->dst.length); 730 if (ae_drv_ctx->result == SEC_TAG_ERR) { 731 EMSG("Integrity check failed"); 732 return TEE_ERROR_BAD_STATE; 733 } 734 735 return TEE_SUCCESS; 736 } 737 738 static void sec_authenc_do_final(void *ctx __unused) 739 { 740 } 741 742 static void sec_authenc_copy_state(void *dst_ctx, void *src_ctx) 743 { 744 struct authenc_ctx *dst = dst_ctx; 745 struct authenc_ctx *src = src_ctx; 746 TEE_Result ret = TEE_SUCCESS; 747 748 if (!src->is_hw_supported) { 749 dst->is_hw_supported = false; 750 src->ae_soft_ctx->ops->copy_state(dst->ae_soft_ctx, 751 src->ae_soft_ctx); 752 return; 753 } 754 755 dst->algo = src->algo; 756 dst->mode = src->mode; 757 dst->encrypt = src->encrypt; 758 dst->key_len = src->key_len; 759 dst->tag_len = src->tag_len; 760 dst->c_key_len = src->c_key_len; 761 dst->aad.length = src->aad.length; 762 dst->src_offset = src->src_offset; 763 dst->payload_len = src->payload_len; 764 dst->is_hw_supported = src->is_hw_supported; 765 memcpy(dst->key, src->key, src->key_len); 766 memcpy(dst->civ, src->civ, src->civ_len); 767 /* The len of aiv is always MAX_IV_SIZE */ 768 memcpy(dst->aiv, src->aiv, MAX_IV_SIZE); 769 ret = sec_aead_data_alloc(dst); 770 if (ret) 771 return; 772 memcpy(dst->src.data, src->src.data, 773 src->aad.length + src->payload_len); 774 memcpy(dst->dst.data, src->dst.data, 775 src->aad.length + src->payload_len); 776 777 ret = sec_aead_get_dma(dst); 778 if (ret) { 779 memzero_explicit(dst->key, dst->key_len); 780 free(dst->src.data); 781 dst->src.data = NULL; 782 free(dst->dst.data); 783 dst->dst.data = NULL; 784 } 785 } 786 787 static struct drvcrypt_authenc driver_authenc = { 788 .alloc_ctx = sec_authenc_ctx_allocate, 789 .free_ctx = sec_authenc_ctx_free, 790 .init = sec_authenc_initialize, 791 .update_aad = sec_authenc_update_aad, 792 .update_payload = sec_authenc_update_payload, 793 .enc_final = sec_authenc_enc_final, 794 .dec_final = sec_authenc_dec_final, 795 .final = sec_authenc_do_final, 796 .copy_state = sec_authenc_copy_state, 797 }; 798 799 static TEE_Result sec_authenc_init(void) 800 { 801 TEE_Result ret = TEE_SUCCESS; 802 803 ret = drvcrypt_register_authenc(&driver_authenc); 804 if (ret) 805 EMSG("Sec authenc register to crypto fail ret=%#"PRIx32, ret); 806 807 return ret; 808 } 809 driver_init(sec_authenc_init); 810