xref: /optee_os/core/drivers/crypto/asu_driver/asu_hash.c (revision 06c4e95e469c9c89e9ba4a6915d1be7bb8ea6fbc)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2025-2026, Advanced Micro Devices, Inc. All rights reserved.
4  *
5  */
6 
7 #include <assert.h>
8 #include <drivers/amd/asu_client.h>
9 #include <drvcrypt_hash.h>
10 #include <initcall.h>
11 #include <io.h>
12 #include <kernel/mutex.h>
13 #include <kernel/panic.h>
14 #include <kernel/unwind.h>
15 #include <mm/core_memprot.h>
16 #include <stdbool.h>
17 #include <stdio.h>
18 #include <string.h>
19 #include <tee/cache.h>
20 #include <trace.h>
21 #include <util.h>
22 
23 #define ASU_SHA_OPERATION_CMD_ID		0U
24 /* SHA modes */
25 #define ASU_SHA_MODE_SHA256			0U
26 #define ASU_SHA_MODE_SHA384			1U
27 #define ASU_SHA_MODE_SHA512			2U
28 #define ASU_SHA_MODE_SHAKE256			4U
29 
30 /* SHA operation mode */
31 #define ASU_SHA_START				0x1U
32 #define ASU_SHA_UPDATE				0x2U
33 #define ASU_SHA_FINISH				0x4U
34 
35 /* SHA hash lengths */
36 #define ASU_SHA_256_HASH_LEN			32U
37 #define ASU_SHA_384_HASH_LEN			48U
38 #define ASU_SHA_512_HASH_LEN			64U
39 #define ASU_SHAKE_256_HASH_LEN			32U
40 #define ASU_SHAKE_256_MAX_HASH_LEN		136U
41 #define ASU_DATA_CHUNK_LEN			4096U
42 
43 #define ASU_DMA_ALIGNMENT			64U
44 
45 struct asu_shadev {
46 	bool sha2_available;
47 	bool sha3_available;
48 	/* Control access to engine*/
49 	struct mutex engine_lock;
50 };
51 
52 struct asu_sha_op_cmd {
53 	uint64_t dataaddr;
54 	uint64_t hashaddr;
55 	uint32_t datasize;
56 	uint32_t hashbufsize;
57 	uint8_t shamode;
58 	uint8_t islast;
59 	uint8_t opflags;
60 	uint8_t shakereserved;
61 };
62 
63 struct asu_hash_ctx {
64 	struct crypto_hash_ctx hash_ctx; /* Crypto Hash API context */
65 	struct asu_client_params cparam;
66 	uint32_t shamode;
67 	uint32_t shastart;
68 	uint8_t uniqueid;
69 	uint8_t module;
70 };
71 
72 static const struct crypto_hash_ops asu_hash_ops;
73 static struct asu_shadev *asu_shadev;
74 static struct asu_hash_ctx *to_hash_ctx(struct crypto_hash_ctx *ctx);
75 
76 /**
77  * asu_hash_get_alg() - Get fw engine module ID and Hash mode.
78  * @algo:	TEE algo type.
79  * @module:	Engine module ID
80  * @mode:	Hash operation mode
81  * Map TEE algo type to fw module ID amd mode.
82  *
83  * Return: TEE_SUCCESS or TEE_ERROR_NOT_IMPLEMENTED
84  */
85 
asu_hash_get_alg(uint32_t algo,uint32_t * module,uint32_t * mode)86 static TEE_Result asu_hash_get_alg(uint32_t algo,
87 				   uint32_t *module,
88 				   uint32_t *mode)
89 {
90 	TEE_Result ret = TEE_SUCCESS;
91 
92 	switch (algo) {
93 	case TEE_ALG_SHA256:
94 		*module = ASU_MODULE_SHA2_ID;
95 		*mode = ASU_SHA_MODE_SHA256;
96 		break;
97 	case TEE_ALG_SHA384:
98 		*module = ASU_MODULE_SHA2_ID;
99 		*mode = ASU_SHA_MODE_SHA384;
100 		break;
101 	case TEE_ALG_SHA512:
102 		*module = ASU_MODULE_SHA2_ID;
103 		*mode = ASU_SHA_MODE_SHA512;
104 		break;
105 	case TEE_ALG_SHA3_256:
106 		*module = ASU_MODULE_SHA3_ID;
107 		*mode = ASU_SHA_MODE_SHA256;
108 		break;
109 	case TEE_ALG_SHA3_384:
110 		*module = ASU_MODULE_SHA3_ID;
111 		*mode = ASU_SHA_MODE_SHA384;
112 		break;
113 	case TEE_ALG_SHA3_512:
114 		*module = ASU_MODULE_SHA3_ID;
115 		*mode = ASU_SHA_MODE_SHA512;
116 		break;
117 	default:
118 		ret = TEE_ERROR_NOT_IMPLEMENTED;
119 		break;
120 	}
121 
122 	return ret;
123 }
124 
125 /**
126  * asu_hash_initialize() - Initialize private asu_hash_ctx for hash operation.
127  * @ctx: crypto context used by the crypto_hash_*() functions
128  * Initialize hash operation request
129  *
130  * Return: TEE_SUCCESS or TEE_ERROR_BAD_PARAMETERS
131  */
132 
asu_hash_initialize(struct crypto_hash_ctx * ctx)133 static TEE_Result asu_hash_initialize(struct crypto_hash_ctx *ctx)
134 {
135 	to_hash_ctx(ctx)->shastart = ASU_SHA_START;
136 
137 	return TEE_SUCCESS;
138 }
139 
140 /**
141  * asu_sha_op() - Perform hash operation.
142  * @asu_hashctx:Request private hash context
143  * @op:		asu_sha_op_cmd parameters for fw engine
144  * @module:	Engine module ID
145  * @data:	Output digest received from engine
146  * Create request header, send and wait for result
147  * from engine.
148  *
149  * Return: TEE_SUCCESS or TEE_ERROR_GENERIC
150  */
151 
asu_sha_op(struct asu_hash_ctx * asu_hashctx,struct asu_sha_op_cmd * op,uint8_t module)152 static TEE_Result asu_sha_op(struct asu_hash_ctx *asu_hashctx,
153 			     struct asu_sha_op_cmd *op,
154 			     uint8_t module)
155 {
156 	TEE_Result ret = TEE_SUCCESS;
157 	uint32_t header = 0;
158 	uint32_t status = 0;
159 
160 	header = asu_create_header(ASU_SHA_OPERATION_CMD_ID,
161 				   asu_hashctx->uniqueid, module,
162 				   sizeof(*op) / sizeof(uint32_t));
163 	ret = asu_update_queue_buffer_n_send_ipi(&asu_hashctx->cparam, op,
164 						 sizeof(*op), header,
165 						 &status);
166 	if (status) {
167 		EMSG("FW error 0x%x\n", status);
168 		ret = TEE_ERROR_GENERIC;
169 	}
170 
171 	return ret;
172 }
173 
174 /**
175  * asu_hash_update() - Send update request to engine.
176  * @asu_hashctx:Request private hash context
177  * @data:	Input data buffer
178  * @len:	Size of data buffer
179  * Send update request to engine
180  * from engine.
181  *
182  * Return: TEE_SUCCESS or TEE_ERROR_GENERIC
183  */
184 
asu_hash_update(struct asu_hash_ctx * asu_hashctx,uint8_t * data,size_t len)185 static TEE_Result asu_hash_update(struct asu_hash_ctx *asu_hashctx,
186 				  uint8_t *data, size_t len)
187 {
188 	TEE_Result ret = TEE_SUCCESS;
189 	struct asu_sha_op_cmd op = {};
190 	struct asu_client_params *cparam = NULL;
191 	uint32_t remaining = 0;
192 
193 	/* Inputs of client request */
194 	cparam = &asu_hashctx->cparam;
195 	cparam->priority = ASU_PRIORITY_HIGH;
196 	cparam->cbhandler = NULL;
197 
198 	/* Inputs of SHA request */
199 	cache_operation(TEE_CACHEFLUSH, data, len);
200 	op.hashaddr = 0;
201 	op.hashbufsize = 0;
202 	op.shamode = asu_hashctx->shamode;
203 	op.islast = 0;
204 	remaining = len;
205 	while (remaining) {
206 		op.datasize = MIN(remaining, ASU_DATA_CHUNK_LEN);
207 		op.opflags = ASU_SHA_UPDATE | asu_hashctx->shastart;
208 		op.dataaddr = virt_to_phys(data);
209 		remaining -= op.datasize;
210 		data += op.datasize;
211 		ret = asu_sha_op(asu_hashctx, &op, asu_hashctx->module);
212 		if (ret)
213 			break;
214 		asu_hashctx->shastart = 0;
215 	}
216 
217 	return ret;
218 }
219 
asu_hash_do_update(struct crypto_hash_ctx * ctx,const uint8_t * data,size_t len)220 static TEE_Result asu_hash_do_update(struct crypto_hash_ctx *ctx,
221 				     const uint8_t *data, size_t len)
222 {
223 	struct asu_hash_ctx *asu_hashctx = NULL;
224 
225 	if (!len) {
226 		DMSG("This is 0 len task, skip");
227 		return TEE_SUCCESS;
228 	}
229 
230 	if (!data && len) {
231 		EMSG("Invalid input parameters");
232 		return TEE_ERROR_BAD_PARAMETERS;
233 	}
234 
235 	asu_hashctx = to_hash_ctx(ctx);
236 	if (asu_hashctx->uniqueid == ASU_UNIQUE_ID_MAX)
237 		return TEE_ERROR_BAD_PARAMETERS;
238 
239 	return asu_hash_update(asu_hashctx, (uint8_t *)data, len);
240 }
241 
242 /**
243  * asu_hash_final() - Send final request to engine.
244  * @asu_hashctx:Request private hash context
245  * @digest:	Output digest buffer
246  * @len:	Size of digest buffer
247  *
248  * Send final request to engine and populate digest result
249  *
250  * Return: TEE_SUCCESS, TEE_ERROR_BAD_PARAMETERS or TEE_ERROR_GENERIC
251  */
252 
asu_hash_final(struct asu_hash_ctx * asu_hashctx,uint8_t * digest,size_t len)253 static TEE_Result asu_hash_final(struct asu_hash_ctx *asu_hashctx,
254 				 uint8_t *digest, size_t len)
255 {
256 	TEE_Result ret = TEE_SUCCESS;
257 	struct asu_sha_op_cmd op = {};
258 	struct asu_client_params *cparam = NULL;
259 	uint8_t *dma_digest = NULL;
260 
261 	if (!digest || len == 0)
262 		return TEE_ERROR_BAD_PARAMETERS;
263 
264 	cparam = &asu_hashctx->cparam;
265 	cparam->priority = ASU_PRIORITY_HIGH;
266 	cparam->cbhandler = NULL;
267 
268 	/* Inputs of SHA request */
269 	op.dataaddr = 0;
270 	op.datasize = 0;
271 	op.hashbufsize = len;
272 	if (asu_hashctx->shamode == ASU_SHA_MODE_SHA256)
273 		op.hashbufsize = ASU_SHA_256_HASH_LEN;
274 	else if (asu_hashctx->shamode == ASU_SHA_MODE_SHA384)
275 		op.hashbufsize = ASU_SHA_384_HASH_LEN;
276 	else if (asu_hashctx->shamode == ASU_SHA_MODE_SHA512)
277 		op.hashbufsize = ASU_SHA_512_HASH_LEN;
278 
279 	dma_digest = memalign(ASU_DMA_ALIGNMENT, op.hashbufsize);
280 	if (!dma_digest) {
281 		EMSG("Failed to allocate DMA buffer for hash digest");
282 		return TEE_ERROR_OUT_OF_MEMORY;
283 	}
284 
285 	op.shamode = asu_hashctx->shamode;
286 	op.islast = 1;
287 	op.opflags = ASU_SHA_FINISH | asu_hashctx->shastart;
288 	op.hashaddr = virt_to_phys(dma_digest);
289 	cache_operation(TEE_CACHEFLUSH, dma_digest, op.hashbufsize);
290 	ret = asu_sha_op(asu_hashctx, &op, asu_hashctx->module);
291 	if (ret) {
292 		EMSG("SHA final operation failed");
293 		goto out;
294 	}
295 	cache_operation(TEE_CACHEINVALIDATE, dma_digest, op.hashbufsize);
296 	memcpy(digest, dma_digest, op.hashbufsize);
297 
298 out:
299 	free(dma_digest);
300 	return ret;
301 }
302 
asu_hash_do_final(struct crypto_hash_ctx * ctx,uint8_t * digest,size_t len)303 static TEE_Result asu_hash_do_final(struct crypto_hash_ctx *ctx,
304 				    uint8_t *digest, size_t len)
305 {
306 	struct asu_hash_ctx *asu_hashctx = NULL;
307 
308 	asu_hashctx = to_hash_ctx(ctx);
309 
310 	return asu_hash_final(asu_hashctx, digest, len);
311 }
312 
313 /**
314  * asu_hash_ctx_free() - Free Private context.
315  * @crypto_hash_ctx: crypto context used by the crypto_hash_*() functions
316  * Release crypto engine and free private context memory.
317  *
318  * Return: void
319  */
320 
asu_hash_ctx_free(struct crypto_hash_ctx * ctx)321 static void asu_hash_ctx_free(struct crypto_hash_ctx *ctx)
322 {
323 	struct asu_hash_ctx *asu_hashctx = NULL;
324 
325 	asu_hashctx = to_hash_ctx(ctx);
326 	asu_free_unique_id(asu_hashctx->uniqueid);
327 	asu_hashctx->uniqueid = ASU_UNIQUE_ID_MAX;
328 	mutex_lock(&asu_shadev->engine_lock);
329 	if (asu_hashctx->module == ASU_MODULE_SHA2_ID) {
330 		assert(!asu_shadev->sha2_available);
331 		asu_shadev->sha2_available = true;
332 	} else if (asu_hashctx->module == ASU_MODULE_SHA3_ID) {
333 		assert(!asu_shadev->sha3_available);
334 		asu_shadev->sha3_available = true;
335 	}
336 	mutex_unlock(&asu_shadev->engine_lock);
337 	free(asu_hashctx);
338 }
339 
340 static const struct crypto_hash_ops asu_hash_ops = {
341 	.init = asu_hash_initialize,
342 	.update = asu_hash_do_update,
343 	.final = asu_hash_do_final,
344 	.free_ctx = asu_hash_ctx_free,
345 	/*
346 	 * Current engine does not support partial state copy operation.
347 	 */
348 	.copy_state = NULL,
349 };
350 
351 /*
352  * Returns the reference to the driver context
353  *
354  * @ctx  API Context
355  */
to_hash_ctx(struct crypto_hash_ctx * ctx)356 static struct asu_hash_ctx *to_hash_ctx(struct crypto_hash_ctx *ctx)
357 {
358 	assert(ctx && ctx->ops == &asu_hash_ops);
359 
360 	return container_of(ctx, struct asu_hash_ctx, hash_ctx);
361 }
362 
363 /**
364  * asu_hash_ctx_allocate() - Allocate Private context.
365  * @crypto_hash_ctx: crypto context used by the crypto_hash_*() functions
366  * @algo:	TEE algo type.
367  * Grab crypto engine and free private context memory.
368  *
369  * Return: TEE_SUCCESS, TEE_ERROR_BAD_PARAMETERS or TEE_ERROR_OUT_OF_MEMORY
370  */
371 
asu_hash_ctx_allocate(struct crypto_hash_ctx ** ctx,uint32_t algo)372 static TEE_Result asu_hash_ctx_allocate(struct crypto_hash_ctx **ctx,
373 					uint32_t algo)
374 {
375 	struct asu_hash_ctx *asu_hashctx = NULL;
376 	uint32_t module = 0;
377 	uint32_t shamode = 0;
378 	TEE_Result ret = TEE_SUCCESS;
379 
380 	ret = asu_hash_get_alg(algo, &module, &shamode);
381 	if (ret)
382 		return ret;
383 	mutex_lock(&asu_shadev->engine_lock);
384 	if (module == ASU_MODULE_SHA2_ID && asu_shadev->sha2_available) {
385 		asu_shadev->sha2_available = false;
386 	} else if (module == ASU_MODULE_SHA3_ID && asu_shadev->sha3_available) {
387 		asu_shadev->sha3_available = false;
388 	} else {
389 		mutex_unlock(&asu_shadev->engine_lock);
390 		return TEE_ERROR_NOT_IMPLEMENTED;
391 	}
392 	mutex_unlock(&asu_shadev->engine_lock);
393 
394 	asu_hashctx = calloc(1, sizeof(*asu_hashctx));
395 	if (!asu_hashctx) {
396 		EMSG("Fail to alloc hash");
397 		ret = TEE_ERROR_OUT_OF_MEMORY;
398 		goto free_dev_mem;
399 	}
400 
401 	asu_hashctx->module = module;
402 	asu_hashctx->shamode = shamode;
403 	asu_hashctx->uniqueid = asu_alloc_unique_id();
404 
405 	if (asu_hashctx->uniqueid == ASU_UNIQUE_ID_MAX) {
406 		EMSG("Fail to get unique ID");
407 		ret = TEE_ERROR_BAD_PARAMETERS;
408 		goto free_dev_mem;
409 	}
410 	asu_hashctx->hash_ctx.ops = &asu_hash_ops;
411 	*ctx = &asu_hashctx->hash_ctx;
412 
413 	return ret;
414 
415 free_dev_mem:
416 	mutex_lock(&asu_shadev->engine_lock);
417 	if (asu_hashctx->module == ASU_MODULE_SHA2_ID &&
418 	    !asu_shadev->sha2_available)
419 		asu_shadev->sha2_available = true;
420 	else if (asu_hashctx->module == ASU_MODULE_SHA3_ID &&
421 		 !asu_shadev->sha3_available)
422 		asu_shadev->sha3_available = true;
423 	mutex_unlock(&asu_shadev->engine_lock);
424 
425 	if (asu_hashctx)
426 		free(asu_hashctx);
427 
428 	return ret;
429 }
430 
asu_hash_init(void)431 static TEE_Result asu_hash_init(void)
432 {
433 	TEE_Result ret = TEE_SUCCESS;
434 
435 	asu_shadev = calloc(1, sizeof(*asu_shadev));
436 	mutex_init(&asu_shadev->engine_lock);
437 	asu_shadev->sha2_available = true;
438 	asu_shadev->sha3_available = true;
439 	ret = drvcrypt_register_hash(&asu_hash_ctx_allocate);
440 	if (ret)
441 		EMSG("ASU hash register to crypto fail ret=%#"PRIx32, ret);
442 
443 	return ret;
444 }
445 driver_init(asu_hash_init);
446