xref: /optee_os/core/drivers/crypto/hisilicon/sec_hash.c (revision e885351c3cb51de60ae7dfbf282fc0f1ceb7e654)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright 2022-2024 HiSilicon Limited.
4  * Kunpeng hardware accelerator sec hash algorithm implementation.
5  */
6 
7 #include <drvcrypt_hash.h>
8 #include <initcall.h>
9 
10 #include "sec_hash.h"
11 #include "sec_main.h"
12 
sec_digest_set_hmac_key(struct hashctx * ctx,struct hisi_sec_sqe * sqe)13 static enum hisi_drv_status sec_digest_set_hmac_key(struct hashctx *ctx,
14 						    struct hisi_sec_sqe *sqe)
15 {
16 	if (ctx->key_len > SEC_DIGEST_MAX_KEY_SIZE || !ctx->key_len) {
17 		EMSG("Invalid digest key len(%ld)", ctx->key_len);
18 		return HISI_QM_DRVCRYPT_IN_EPARA;
19 	}
20 
21 	/* If the length of key is not word-aligned, increment by 1 */
22 	sqe->type2.mac_key_alg |= SHIFT_U64(DIV_ROUND_UP(ctx->key_len,
23 							 SEC_ENCODE_BYTES),
24 					    SEC_AKEY_OFFSET);
25 	sqe->type2.a_key_addr = ctx->key_dma;
26 
27 	return HISI_QM_DRVCRYPT_NO_ERR;
28 }
29 
sec_digest_fill_long_bd2(struct hashctx * ctx,struct hisi_sec_sqe * sqe)30 static void sec_digest_fill_long_bd2(struct hashctx *ctx,
31 				     struct hisi_sec_sqe *sqe)
32 {
33 	uint64_t total_bits = 0;
34 
35 	if (ctx->has_next && !ctx->iv_len) {
36 		/* LONG BD FIRST */
37 		sqe->ai_apd_cs |= AI_GEN_INNER;
38 		sqe->ai_apd_cs |= SHIFT_U32(AUTHPAD_NOPAD, SEC_APAD_OFFSET);
39 		ctx->iv_len = ctx->mac_len;
40 	} else if (ctx->has_next && ctx->iv_len) {
41 		/* LONG BD MIDDLE */
42 		sqe->ai_apd_cs |= AI_GEN_IVIN_ADDR;
43 		sqe->ai_apd_cs |= SHIFT_U32(AUTHPAD_NOPAD, SEC_APAD_OFFSET);
44 		sqe->type2.a_ivin_addr = sqe->type2.mac_addr;
45 		ctx->iv_len = ctx->mac_len;
46 	} else if (!ctx->has_next && ctx->iv_len) {
47 		/* LONG BD END */
48 		sqe->ai_apd_cs |= AI_GEN_IVIN_ADDR;
49 		sqe->ai_apd_cs |= SHIFT_U32(AUTHPAD_PAD, SEC_APAD_OFFSET);
50 		sqe->type2.a_ivin_addr = sqe->type2.mac_addr;
51 		total_bits = ctx->long_data_len * BYTE_BITS;
52 		sqe->type2.long_a_data_len = total_bits;
53 		ctx->iv_len = 0;
54 	} else {
55 		/* SHORT BD */
56 		ctx->iv_len = 0;
57 	}
58 }
59 
to_hash_ctx(struct crypto_hash_ctx * ctx)60 static struct crypto_hash *to_hash_ctx(struct crypto_hash_ctx *ctx)
61 {
62 	return container_of(ctx, struct crypto_hash, hash_ctx);
63 }
64 
sec_digest_get_alg_type(uint32_t algo)65 static uint32_t sec_digest_get_alg_type(uint32_t algo)
66 {
67 	switch (algo) {
68 	case TEE_ALG_MD5:
69 		return A_ALG_MD5;
70 	case TEE_ALG_HMAC_MD5:
71 		return A_ALG_HMAC_MD5;
72 	case TEE_ALG_SHA1:
73 		return A_ALG_SHA1;
74 	case TEE_ALG_HMAC_SHA1:
75 		return A_ALG_HMAC_SHA1;
76 	case TEE_ALG_SHA224:
77 		return A_ALG_SHA224;
78 	case TEE_ALG_HMAC_SHA224:
79 		return A_ALG_HMAC_SHA224;
80 	case TEE_ALG_SM3:
81 		return A_ALG_SM3;
82 	case TEE_ALG_HMAC_SM3:
83 		return A_ALG_HMAC_SM3;
84 	case TEE_ALG_SHA256:
85 		return A_ALG_SHA256;
86 	case TEE_ALG_HMAC_SHA256:
87 		return A_ALG_HMAC_SHA256;
88 	case TEE_ALG_SHA384:
89 		return A_ALG_SHA384;
90 	case TEE_ALG_HMAC_SHA384:
91 		return A_ALG_HMAC_SHA384;
92 	case TEE_ALG_SHA512:
93 		return A_ALG_SHA512;
94 	case TEE_ALG_HMAC_SHA512:
95 		return A_ALG_HMAC_SHA512;
96 	default:
97 		return A_ALG_MAX;
98 	}
99 }
100 
sec_digest_fill_sqe(void * bd,void * msg)101 static enum hisi_drv_status sec_digest_fill_sqe(void *bd, void *msg)
102 {
103 	enum hisi_drv_status ret = HISI_QM_DRVCRYPT_NO_ERR;
104 	struct hisi_sec_sqe *sqe = bd;
105 	struct hashctx *ctx = msg;
106 	uint32_t alg_type = 0;
107 
108 	if (!ctx->in_len) {
109 		EMSG("Digest bd2 not support 0 packet");
110 		return HISI_QM_DRVCRYPT_IN_EPARA;
111 	}
112 
113 	sqe->type_auth_cipher = BD_TYPE2;
114 	sqe->sds_sa_type = SHIFT_U32(SCENE_NOTHING, SEC_SCENE_OFFSET);
115 	sqe->type_auth_cipher |= SHIFT_U32(AUTH_MAC_CALCULATE, SEC_AUTH_OFFSET);
116 	sqe->type2.alen_ivllen = ctx->in_len;
117 
118 	sqe->type2.data_src_addr = ctx->in_dma;
119 	sqe->type2.mac_addr = ctx->out_dma;
120 	sqe->type2.mac_key_alg |= ctx->mac_len / SEC_ENCODE_BYTES;
121 
122 	if (ctx->mode == WCRYPTO_DIGEST_HMAC) {
123 		ret = sec_digest_set_hmac_key(ctx, sqe);
124 		if (ret)
125 			return ret;
126 	}
127 
128 	alg_type = sec_digest_get_alg_type(ctx->algo);
129 	if (alg_type >= A_ALG_MAX) {
130 		EMSG("Fail to get digest alg type");
131 		return HISI_QM_DRVCRYPT_IN_EPARA;
132 	}
133 	sqe->type2.mac_key_alg |= SHIFT_U32(alg_type, SEC_AEAD_ALG_OFFSET);
134 
135 	sec_digest_fill_long_bd2(ctx, sqe);
136 
137 	return ret;
138 }
139 
140 static enum hisi_drv_status
sec_digest_set_hmac_bd3_key(struct hashctx * ctx,struct hisi_sec_bd3_sqe * sqe)141 sec_digest_set_hmac_bd3_key(struct hashctx *ctx, struct hisi_sec_bd3_sqe *sqe)
142 {
143 	if (ctx->key_len > SEC_DIGEST_MAX_KEY_SIZE || !ctx->key_len) {
144 		EMSG("Invalid digest key len(%ld)", ctx->key_len);
145 		return HISI_QM_DRVCRYPT_IN_EPARA;
146 	}
147 
148 	/* If the length of key is not word-aligned, increment by 1 */
149 	sqe->auth_mac_key |= SHIFT_U64(DIV_ROUND_UP(ctx->key_len,
150 						    SEC_ENCODE_BYTES),
151 				       SEC_AKEY_OFFSET_V3);
152 	sqe->a_key_addr = ctx->key_dma;
153 
154 	return HISI_QM_DRVCRYPT_NO_ERR;
155 }
156 
sec_digest_fill_long_bd3(struct hashctx * ctx,struct hisi_sec_bd3_sqe * sqe)157 static void sec_digest_fill_long_bd3(struct hashctx *ctx,
158 				     struct hisi_sec_bd3_sqe *sqe)
159 {
160 	uint64_t total_bits = 0;
161 
162 	if (ctx->has_next && !ctx->iv_len) {
163 		/* LONG BD FIRST */
164 		sqe->auth_mac_key |= SHIFT_U32(AI_GEN_INNER,
165 					       SEC_AI_GEN_OFFSET_V3);
166 		sqe->stream_scene.auth_pad = AUTHPAD_NOPAD;
167 		ctx->iv_len = ctx->mac_len;
168 	} else if (ctx->has_next && ctx->iv_len) {
169 		/* LONG BD MIDDLE */
170 		sqe->auth_mac_key |= SHIFT_U32(AI_GEN_IVIN_ADDR,
171 					       SEC_AI_GEN_OFFSET_V3);
172 		sqe->stream_scene.auth_pad = AUTHPAD_NOPAD;
173 		sqe->a_ivin_addr = sqe->mac_addr;
174 		ctx->iv_len = ctx->mac_len;
175 	} else if (!ctx->has_next && ctx->iv_len) {
176 		/* LONG BD END */
177 		sqe->auth_mac_key |= SHIFT_U32(AI_GEN_IVIN_ADDR,
178 					       SEC_AI_GEN_OFFSET_V3);
179 		sqe->stream_scene.auth_pad = AUTHPAD_PAD;
180 		sqe->a_ivin_addr = sqe->mac_addr;
181 		total_bits = ctx->long_data_len * BYTE_BITS;
182 		sqe->stream_scene.long_a_data_len = total_bits;
183 		ctx->iv_len = 0;
184 	} else {
185 		/* SHORT BD */
186 		ctx->iv_len = 0;
187 	}
188 }
189 
sec_digest_fill_bd3_sqe(void * bd,void * msg)190 static enum hisi_drv_status sec_digest_fill_bd3_sqe(void *bd, void *msg)
191 {
192 	enum hisi_drv_status ret = HISI_QM_DRVCRYPT_NO_ERR;
193 	struct hisi_sec_bd3_sqe *sqe = bd;
194 	struct hashctx *ctx = msg;
195 	uint32_t alg_type = 0;
196 
197 	sqe->bd_param = BD_TYPE3 | SHIFT_U32(ctx->scene, SEC_SCENE_OFFSET_V3);
198 	sqe->a_len_key = ctx->in_len;
199 	sqe->auth_mac_key = AUTH_MAC_CALCULATE;
200 	sqe->data_src_addr = ctx->in_dma;
201 	sqe->mac_addr = ctx->out_dma;
202 
203 	if (ctx->mode == WCRYPTO_DIGEST_HMAC) {
204 		ret = sec_digest_set_hmac_bd3_key(ctx, sqe);
205 		if (ret)
206 			return HISI_QM_DRVCRYPT_IN_EPARA;
207 	}
208 
209 	sqe->auth_mac_key |= SHIFT_U64(ctx->mac_len / SEC_ENCODE_BYTES,
210 				       SEC_MAC_OFFSET_V3);
211 	alg_type = sec_digest_get_alg_type(ctx->algo);
212 	if (alg_type >= A_ALG_MAX) {
213 		EMSG("Fail to get digest bd3 alg");
214 		return HISI_QM_DRVCRYPT_IN_EPARA;
215 	}
216 	sqe->auth_mac_key |= SHIFT_U32(alg_type, SEC_AUTH_ALG_OFFSET_V3);
217 	sec_digest_fill_long_bd3(ctx, sqe);
218 
219 	return HISI_QM_DRVCRYPT_NO_ERR;
220 }
221 
sec_digest_do_task(struct hisi_qp * qp,void * msg)222 static TEE_Result sec_digest_do_task(struct hisi_qp *qp, void *msg)
223 {
224 	TEE_Result ret = TEE_SUCCESS;
225 
226 	ret = hisi_qp_send(qp, msg);
227 	if (ret) {
228 		EMSG("Fail to send task, ret=%d", ret);
229 		return TEE_ERROR_BAD_STATE;
230 	}
231 
232 	ret = hisi_qp_recv_sync(qp, msg);
233 	if (ret) {
234 		EMSG("Recv task error, ret=%d", ret);
235 		return TEE_ERROR_BAD_STATE;
236 	}
237 
238 	return TEE_SUCCESS;
239 }
240 
sec_parse_digest_sqe(void * bd,void * msg __unused)241 static enum hisi_drv_status sec_parse_digest_sqe(void *bd, void *msg __unused)
242 {
243 	struct hisi_sec_sqe *sqe = bd;
244 	uint16_t done = 0;
245 
246 	done = SEC_GET_FIELD(sqe->type2.done_flag, SEC_DONE_MASK, 0);
247 	if (done != SEC_HW_TASK_DONE || sqe->type2.error_type) {
248 		EMSG("SEC BD2 fail! done=%#"PRIx16", etype=%#"PRIx8,
249 		     done, sqe->type2.error_type);
250 		return HISI_QM_DRVCRYPT_IN_EPARA;
251 	}
252 
253 	return HISI_QM_DRVCRYPT_NO_ERR;
254 }
255 
sec_parse_digest_bd3_sqe(void * bd,void * msg __unused)256 static enum hisi_drv_status sec_parse_digest_bd3_sqe(void *bd,
257 						     void *msg __unused)
258 {
259 	struct hisi_sec_bd3_sqe *sqe = bd;
260 	uint16_t done = 0;
261 
262 	done = SEC_GET_FIELD(sqe->done_flag, SEC_DONE_MASK, 0);
263 	if (done != SEC_HW_TASK_DONE || sqe->error_type) {
264 		EMSG("SEC BD3 fail! done=%#"PRIx16", etype=%#"PRIx8,
265 		     done, sqe->error_type);
266 		return HISI_QM_DRVCRYPT_IN_EPARA;
267 	}
268 
269 	return HISI_QM_DRVCRYPT_NO_ERR;
270 }
271 
hisi_sec_digest_ctx_init(struct hashctx * hash_ctx,const uint8_t * key,size_t len)272 TEE_Result hisi_sec_digest_ctx_init(struct hashctx *hash_ctx,
273 				    const uint8_t *key, size_t len)
274 {
275 	if (!hash_ctx) {
276 		EMSG("Input hash_ctx is NULL");
277 		return TEE_ERROR_BAD_PARAMETERS;
278 	}
279 
280 	hash_ctx->in_len = 0;
281 	hash_ctx->iv_len = 0;
282 	hash_ctx->has_next = false;
283 	hash_ctx->long_data_len = 0;
284 	hash_ctx->scene = SCENE_NOTHING;
285 
286 	/*
287 	 * In reset ctx scenarios, sec_hash_initialize will be called.
288 	 * To ensure in data is NULL, reset ctx need to free in data
289 	 * which is not NULL.
290 	 */
291 	free(hash_ctx->in);
292 	hash_ctx->in = NULL;
293 
294 	if (len) {
295 		hash_ctx->key_len = len;
296 		memcpy(hash_ctx->key, key, len);
297 	}
298 
299 	return TEE_SUCCESS;
300 }
301 
sec_hash_initialize(struct crypto_hash_ctx * ctx)302 static TEE_Result sec_hash_initialize(struct crypto_hash_ctx *ctx)
303 {
304 	struct crypto_hash *hash = NULL;
305 	struct hashctx *hash_ctx = NULL;
306 
307 	if (!ctx) {
308 		EMSG("Input ctx is NULL");
309 		return TEE_ERROR_BAD_PARAMETERS;
310 	}
311 
312 	hash = to_hash_ctx(ctx);
313 	hash_ctx = hash->ctx;
314 
315 	return hisi_sec_digest_ctx_init(hash_ctx, NULL, 0);
316 }
317 
hisi_sec_digest_do_update(struct hashctx * hash_ctx,const uint8_t * data,size_t len)318 TEE_Result hisi_sec_digest_do_update(struct hashctx *hash_ctx,
319 				     const uint8_t *data, size_t len)
320 {
321 	TEE_Result ret = TEE_SUCCESS;
322 	size_t left_size = 0;
323 
324 	hash_ctx->long_data_len += len;
325 
326 	if (!hash_ctx->in) {
327 		if (len <= SMALL_BUF_SIZE)
328 			hash_ctx->buf_len = SMALL_BUF_SIZE;
329 		else if (len <= MAX_AUTH_LENGTH)
330 			hash_ctx->buf_len = ROUNDUP(len, HISI_QM_ALIGN128);
331 		else
332 			hash_ctx->buf_len = MAX_AUTH_LENGTH;
333 
334 		hash_ctx->in_len = 0;
335 		hash_ctx->in = malloc(hash_ctx->buf_len);
336 		if (!hash_ctx->in) {
337 			EMSG("Fail to alloc in data buf");
338 			return TEE_ERROR_OUT_OF_MEMORY;
339 		}
340 		hash_ctx->in_dma = virt_to_phys(hash_ctx->in);
341 	}
342 
343 	while (len > 0) {
344 		if (hash_ctx->in_len + len <= hash_ctx->buf_len) {
345 			memcpy(hash_ctx->in + hash_ctx->in_len, data, len);
346 			hash_ctx->in_len += len;
347 			len = 0;
348 		} else {
349 			left_size = hash_ctx->buf_len - hash_ctx->in_len;
350 			memcpy(hash_ctx->in + hash_ctx->in_len, data,
351 			       left_size);
352 			hash_ctx->in_len = hash_ctx->buf_len;
353 			hash_ctx->scene = SCENE_STREAM;
354 			hash_ctx->has_next = true;
355 			data += left_size;
356 			len -= left_size;
357 			ret = sec_digest_do_task(hash_ctx->qp, hash_ctx);
358 			if (ret) {
359 				EMSG("Fail to do digest task! ret = %#"PRIx32,
360 				     ret);
361 				return ret;
362 			}
363 			hash_ctx->iv_len = hash_ctx->mac_len;
364 			hash_ctx->in_len = 0;
365 		}
366 	}
367 	return TEE_SUCCESS;
368 }
369 
sec_hash_do_update(struct crypto_hash_ctx * ctx,const uint8_t * data,size_t len)370 static TEE_Result sec_hash_do_update(struct crypto_hash_ctx *ctx,
371 				     const uint8_t *data, size_t len)
372 {
373 	struct crypto_hash *hash = NULL;
374 	struct hashctx *hash_ctx = NULL;
375 
376 	if (!len) {
377 		IMSG("This is 0 len task, skip");
378 		return TEE_SUCCESS;
379 	}
380 
381 	if (!ctx || (!data && len)) {
382 		EMSG("Invalid input parameters");
383 		return TEE_ERROR_BAD_PARAMETERS;
384 	}
385 
386 	hash = to_hash_ctx(ctx);
387 	hash_ctx = hash->ctx;
388 
389 	return hisi_sec_digest_do_update(hash_ctx, data, len);
390 }
391 
hisi_sec_digest_do_final(struct hashctx * hash_ctx,uint8_t * digest,size_t len)392 TEE_Result hisi_sec_digest_do_final(struct hashctx *hash_ctx, uint8_t *digest,
393 				    size_t len)
394 {
395 	TEE_Result ret = TEE_SUCCESS;
396 
397 	if (!digest || len == 0) {
398 		EMSG("Invalid input parameters");
399 		return TEE_ERROR_BAD_PARAMETERS;
400 	}
401 
402 	if (hash_ctx->mac_len & WORD_ALIGNMENT_MASK) {
403 		EMSG("Invalid digest out_bytes");
404 		return TEE_ERROR_BAD_PARAMETERS;
405 	}
406 
407 	hash_ctx->has_next = false;
408 	ret = sec_digest_do_task(hash_ctx->qp, hash_ctx);
409 	if (ret) {
410 		EMSG("Fail to do digest task! ret = %#"PRIx32, ret);
411 		return ret;
412 	}
413 
414 	memcpy(digest, hash_ctx->out, MIN(hash_ctx->mac_len, len));
415 
416 	return TEE_SUCCESS;
417 }
418 
sec_hash_do_final(struct crypto_hash_ctx * ctx,uint8_t * digest,size_t len)419 static TEE_Result sec_hash_do_final(struct crypto_hash_ctx *ctx,
420 				    uint8_t *digest, size_t len)
421 {
422 	struct crypto_hash *hash = to_hash_ctx(ctx);
423 	struct hashctx *hash_ctx = hash->ctx;
424 
425 	return hisi_sec_digest_do_final(hash_ctx, digest, len);
426 }
427 
hisi_sec_digest_ctx_free(struct hashctx * hash_ctx)428 void hisi_sec_digest_ctx_free(struct hashctx *hash_ctx)
429 {
430 	hisi_qm_release_qp(hash_ctx->qp);
431 
432 	free(hash_ctx->in);
433 	hash_ctx->in = NULL;
434 
435 	memzero_explicit(hash_ctx->key, SEC_DIGEST_MAX_KEY_SIZE);
436 
437 	free(hash_ctx);
438 }
439 
sec_hash_ctx_free(struct crypto_hash_ctx * ctx)440 static void sec_hash_ctx_free(struct crypto_hash_ctx *ctx)
441 {
442 	struct crypto_hash *hash = NULL;
443 	struct hashctx *hash_ctx = NULL;
444 
445 	if (!ctx)
446 		return;
447 
448 	hash = to_hash_ctx(ctx);
449 	hash_ctx = hash->ctx;
450 	if (!hash_ctx)
451 		return;
452 	hisi_sec_digest_ctx_free(hash_ctx);
453 
454 	hash->ctx = NULL;
455 
456 	free(hash);
457 }
458 
hisi_sec_digest_copy_state(struct hashctx * out_hash_ctx,struct hashctx * in_hash_ctx)459 void hisi_sec_digest_copy_state(struct hashctx *out_hash_ctx,
460 				struct hashctx *in_hash_ctx)
461 {
462 	out_hash_ctx->iv_len = in_hash_ctx->iv_len;
463 	out_hash_ctx->buf_len = in_hash_ctx->buf_len;
464 	out_hash_ctx->key_len = in_hash_ctx->key_len;
465 	out_hash_ctx->has_next = in_hash_ctx->has_next;
466 	out_hash_ctx->long_data_len = in_hash_ctx->long_data_len;
467 
468 	if (in_hash_ctx->in) {
469 		out_hash_ctx->in = malloc(out_hash_ctx->buf_len);
470 		if (!out_hash_ctx->in) {
471 			EMSG("Fail to alloc in buf");
472 			return;
473 		}
474 		out_hash_ctx->in_dma = virt_to_phys(out_hash_ctx->in);
475 		out_hash_ctx->in_len = in_hash_ctx->in_len;
476 		memcpy(out_hash_ctx->in, in_hash_ctx->in,
477 		       out_hash_ctx->buf_len);
478 	}
479 
480 	memcpy(out_hash_ctx->iv, in_hash_ctx->iv, out_hash_ctx->iv_len);
481 	memcpy(out_hash_ctx->key, in_hash_ctx->key, out_hash_ctx->key_len);
482 }
483 
sec_hash_copy_state(struct crypto_hash_ctx * out_ctx,struct crypto_hash_ctx * in_ctx)484 static void sec_hash_copy_state(struct crypto_hash_ctx *out_ctx,
485 				struct crypto_hash_ctx *in_ctx)
486 {
487 	struct crypto_hash *out_hash = NULL;
488 	struct crypto_hash *in_hash = NULL;
489 	struct hashctx *out_hash_ctx = NULL;
490 	struct hashctx *in_hash_ctx = NULL;
491 
492 	if (!out_ctx || !in_ctx) {
493 		EMSG("Invalid input parameters");
494 		return;
495 	}
496 
497 	out_hash = to_hash_ctx(out_ctx);
498 	in_hash = to_hash_ctx(in_ctx);
499 
500 	out_hash_ctx = out_hash->ctx;
501 	in_hash_ctx = in_hash->ctx;
502 
503 	hisi_sec_digest_copy_state(out_hash_ctx, in_hash_ctx);
504 }
505 
506 static struct crypto_hash_ops hash_ops = {
507 	.init = sec_hash_initialize,
508 	.update = sec_hash_do_update,
509 	.final = sec_hash_do_final,
510 	.free_ctx = sec_hash_ctx_free,
511 	.copy_state = sec_hash_copy_state,
512 };
513 
sec_hash_get_mac_len(uint32_t type)514 static size_t sec_hash_get_mac_len(uint32_t type)
515 {
516 	switch (type) {
517 	case TEE_ALG_MD5:
518 	case TEE_ALG_HMAC_MD5:
519 		return HASH_MAC_LEN128;
520 	case TEE_ALG_SHA1:
521 	case TEE_ALG_HMAC_SHA1:
522 		return HASH_MAC_LEN160;
523 	case TEE_ALG_SHA224:
524 	case TEE_ALG_HMAC_SHA224:
525 		return HASH_MAC_LEN224;
526 	case TEE_ALG_SM3:
527 	case TEE_ALG_HMAC_SM3:
528 	case TEE_ALG_SHA256:
529 	case TEE_ALG_HMAC_SHA256:
530 		return HASH_MAC_LEN256;
531 	case TEE_ALG_SHA384:
532 	case TEE_ALG_HMAC_SHA384:
533 		return HASH_MAC_LEN384;
534 	case TEE_ALG_SHA512:
535 	case TEE_ALG_HMAC_SHA512:
536 		return HASH_MAC_LEN512;
537 	default:
538 		return 0;
539 	}
540 }
541 
sec_hash_get_dma(struct hashctx * hash_ctx)542 static TEE_Result sec_hash_get_dma(struct hashctx *hash_ctx)
543 {
544 	hash_ctx->key_dma = virt_to_phys(hash_ctx->key);
545 	if (!hash_ctx->key_dma) {
546 		EMSG("Fail to get key_dma");
547 		return TEE_ERROR_GENERIC;
548 	}
549 
550 	hash_ctx->iv_dma = virt_to_phys(hash_ctx->iv);
551 	if (!hash_ctx->iv_dma) {
552 		EMSG("Fail to get iv_dma");
553 		return TEE_ERROR_GENERIC;
554 	}
555 
556 	hash_ctx->out_dma = virt_to_phys(hash_ctx->out);
557 	if (!hash_ctx->out_dma) {
558 		EMSG("Fail to get out_dma");
559 		return TEE_ERROR_GENERIC;
560 	}
561 
562 	return TEE_SUCCESS;
563 }
564 
hisi_sec_hash_ctx_init(struct hashctx * hash_ctx,uint32_t algo)565 TEE_Result hisi_sec_hash_ctx_init(struct hashctx *hash_ctx, uint32_t algo)
566 {
567 	TEE_Result ret = TEE_SUCCESS;
568 
569 	hash_ctx->mac_len = sec_hash_get_mac_len(algo);
570 	if (!hash_ctx->mac_len) {
571 		EMSG("Invalid algo type %#"PRIx32, algo);
572 		return TEE_ERROR_NOT_IMPLEMENTED;
573 	}
574 
575 	hash_ctx->algo = algo;
576 	hash_ctx->mode = algo >> HASH_MODE_OFFSET;
577 
578 	ret = sec_hash_get_dma(hash_ctx);
579 	if (ret)
580 		return ret;
581 
582 	hash_ctx->qp = sec_create_qp(HISI_QM_CHANNEL_TYPE0);
583 	if (!hash_ctx->qp) {
584 		EMSG("Fail to create hash qp");
585 		return TEE_ERROR_BUSY;
586 	}
587 
588 	if (hash_ctx->qp->qm->version == HISI_QM_HW_V2) {
589 		hash_ctx->qp->fill_sqe = sec_digest_fill_sqe;
590 		hash_ctx->qp->parse_sqe = sec_parse_digest_sqe;
591 	} else {
592 		hash_ctx->qp->fill_sqe = sec_digest_fill_bd3_sqe;
593 		hash_ctx->qp->parse_sqe = sec_parse_digest_bd3_sqe;
594 	}
595 
596 	return TEE_SUCCESS;
597 }
598 
sec_hash_ctx_allocate(struct crypto_hash_ctx ** ctx,uint32_t algo)599 static TEE_Result sec_hash_ctx_allocate(struct crypto_hash_ctx **ctx,
600 					uint32_t algo)
601 {
602 	struct crypto_hash *hash = NULL;
603 	struct hashctx *hash_ctx = NULL;
604 	TEE_Result ret = TEE_SUCCESS;
605 
606 	if (!ctx) {
607 		EMSG("Ctx is NULL");
608 		return TEE_ERROR_BAD_PARAMETERS;
609 	}
610 
611 	hash = calloc(1, sizeof(*hash));
612 	if (!hash) {
613 		EMSG("Fail to alloc hash");
614 		return TEE_ERROR_OUT_OF_MEMORY;
615 	}
616 
617 	hash_ctx = calloc(1, sizeof(*hash_ctx));
618 	if (!hash_ctx) {
619 		EMSG("Fail to alloc hash_ctx");
620 		ret = TEE_ERROR_OUT_OF_MEMORY;
621 		goto free_hash;
622 	}
623 
624 	ret = hisi_sec_hash_ctx_init(hash_ctx, algo);
625 	if (ret)
626 		goto free_ctx;
627 
628 	hash->hash_ctx.ops = &hash_ops;
629 	hash->ctx = hash_ctx;
630 	*ctx = &hash->hash_ctx;
631 
632 	return TEE_SUCCESS;
633 
634 free_ctx:
635 	free(hash_ctx);
636 free_hash:
637 	free(hash);
638 
639 	return ret;
640 }
641 
sec_hash_init(void)642 static TEE_Result sec_hash_init(void)
643 {
644 	TEE_Result ret = TEE_SUCCESS;
645 
646 	ret = drvcrypt_register_hash(&sec_hash_ctx_allocate);
647 	if (ret)
648 		EMSG("Sec hash register to crypto fail ret=%#"PRIx32, ret);
649 
650 	return ret;
651 }
652 driver_init(sec_hash_init);
653