xref: /optee_os/core/drivers/crypto/aspeed/hace_ast2600.c (revision e231582fca25178ed521995577f537580ed47a41)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2022, Aspeed Technology Inc.
4  */
5 #include <stdint.h>
6 #include <stddef.h>
7 #include <string.h>
8 #include <io.h>
9 #include <util.h>
10 #include <drvcrypt_hash.h>
11 #include <mm/core_mmu.h>
12 #include <mm/core_memprot.h>
13 #include <kernel/delay.h>
14 #include <tee/cache.h>
15 
16 #include "hace_ast2600.h"
17 
18 #define HACE_BASE	0x1e6d0000
19 
20 /* register offsets and bit fields */
21 #define HACE_STS		0x1C
22 #define   HACE_STS_HASH_INT		BIT(9)
23 #define   HACE_STS_HASH_BUSY		BIT(0)
24 #define HACE_HASH_DATA		0x20
25 #define HACE_HASH_DIGEST	0x24
26 #define HACE_HASH_HMAC_KEY	0x28
27 #define HACE_HASH_DATA_LEN	0x2C
28 #define HACE_HASH_CMD		0x30
29 #define   HACE_HASH_CMD_ACCUM		BIT(8)
30 #define   HACE_HASH_CMD_ALG_SHA1	BIT(5)
31 #define   HACE_HASH_CMD_ALG_SHA256	(BIT(6) | BIT(4))
32 #define   HACE_HASH_CMD_ALG_SHA384	(BIT(10) | BIT(6) | BIT(5))
33 #define   HACE_HASH_CMD_ALG_SHA512	(BIT(6) | BIT(5))
34 #define   HACE_HASH_CMD_SHA_BE		BIT(3)
35 
36 /* buffer size based on SHA-512 need */
37 #define HASH_BLK_BUFSZ	128
38 #define HASH_DGT_BUFSZ	64
39 
40 register_phys_mem(MEM_AREA_IO_SEC, HACE_BASE, SMALL_PAGE_SIZE);
41 
42 struct ast2600_hace_ctx {
43 	struct crypto_hash_ctx hash_ctx;
44 	uint32_t cmd;
45 	uint32_t algo;
46 	uint32_t dgt_size;
47 	uint32_t blk_size;
48 	uint32_t pad_size;
49 	uint64_t total[2];
50 
51 	/* DMA memory to interact with HACE */
52 	uint8_t buf[HASH_BLK_BUFSZ];
53 	uint8_t digest[HASH_DGT_BUFSZ] __aligned((8));
54 };
55 
56 static vaddr_t hace_virt;
57 struct mutex hace_mtx = MUTEX_INITIALIZER;
58 
59 static const uint32_t iv_sha1[8] = {
60 	0x01234567, 0x89abcdef, 0xfedcba98, 0x76543210,
61 	0xf0e1d2c3, 0, 0, 0
62 };
63 
64 static const uint32_t iv_sha256[8] = {
65 	0x67e6096a, 0x85ae67bb, 0x72f36e3c, 0x3af54fa5,
66 	0x7f520e51, 0x8c68059b, 0xabd9831f, 0x19cde05b
67 };
68 
69 static const uint32_t iv_sha384[16] = {
70 	0x5d9dbbcb, 0xd89e05c1, 0x2a299a62, 0x07d57c36,
71 	0x5a015991, 0x17dd7030, 0xd8ec2f15, 0x39590ef7,
72 	0x67263367, 0x310bc0ff, 0x874ab48e, 0x11155868,
73 	0x0d2e0cdb, 0xa78ff964, 0x1d48b547, 0xa44ffabe
74 };
75 
76 static const uint32_t iv_sha512[16] = {
77 	0x67e6096a, 0x08c9bcf3, 0x85ae67bb, 0x3ba7ca84,
78 	0x72f36e3c, 0x2bf894fe, 0x3af54fa5, 0xf1361d5f,
79 	0x7f520e51, 0xd182e6ad, 0x8c68059b, 0x1f6c3e2b,
80 	0xabd9831f, 0x6bbd41fb, 0x19cde05b, 0x79217e13
81 };
82 
83 static TEE_Result ast2600_hace_process(struct crypto_hash_ctx *ctx,
84 				       const uint8_t *data, size_t len)
85 {
86 	TEE_Result rc = TEE_ERROR_GENERIC;
87 	uint32_t sts = 0;
88 	uint64_t tref = 0;
89 	paddr_t data_phys = 0;
90 	paddr_t digest_phys = 0;
91 	struct ast2600_hace_ctx *hctx = NULL;
92 
93 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
94 
95 	sts = io_read32(hace_virt + HACE_STS);
96 	if (sts & HACE_STS_HASH_BUSY)
97 		return TEE_ERROR_BUSY;
98 
99 	mutex_lock(&hace_mtx);
100 
101 	cache_operation(TEE_CACHEFLUSH, (void *)data, len);
102 
103 	io_write32(hace_virt + HACE_STS, HACE_STS_HASH_INT);
104 
105 	data_phys = virt_to_phys((void *)data);
106 	digest_phys = virt_to_phys(hctx->digest);
107 
108 	io_write32(hace_virt + HACE_HASH_DATA, (uint32_t)data_phys);
109 	io_write32(hace_virt + HACE_HASH_DIGEST, (uint32_t)digest_phys);
110 	io_write32(hace_virt + HACE_HASH_HMAC_KEY, (uint32_t)digest_phys);
111 
112 	io_write32(hace_virt + HACE_HASH_DATA_LEN, len);
113 	io_write32(hace_virt + HACE_HASH_CMD, hctx->cmd);
114 
115 	/* poll for completion */
116 	tref = timeout_init_us(1000 + (len >> 3));
117 
118 	do {
119 		sts = io_read32(hace_virt + HACE_STS);
120 		if (timeout_elapsed(tref)) {
121 			rc = TEE_ERROR_TARGET_DEAD;
122 			goto out;
123 		}
124 	} while (!(sts & HACE_STS_HASH_INT));
125 
126 out:
127 	mutex_unlock(&hace_mtx);
128 
129 	return rc;
130 }
131 
132 static TEE_Result ast2600_hace_init(struct crypto_hash_ctx *ctx)
133 {
134 	struct ast2600_hace_ctx *hctx = NULL;
135 
136 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
137 
138 	switch (hctx->algo) {
139 	case TEE_ALG_SHA1:
140 		memcpy(hctx->digest, iv_sha1, sizeof(iv_sha1));
141 		break;
142 	case TEE_ALG_SHA256:
143 		memcpy(hctx->digest, iv_sha256, sizeof(iv_sha256));
144 		break;
145 	case TEE_ALG_SHA384:
146 		memcpy(hctx->digest, iv_sha384, sizeof(iv_sha384));
147 		break;
148 	case TEE_ALG_SHA512:
149 		memcpy(hctx->digest, iv_sha512, sizeof(iv_sha512));
150 		break;
151 	default:
152 		return TEE_ERROR_NOT_SUPPORTED;
153 	}
154 
155 	hace_ctx->total[0] = 0;
156 	hace_ctx->total[1] = 0;
157 
158 	cache_operation(TEE_CACHEFLUSH, hctx->digest, sizeof(hctx->digest));
159 
160 	return TEE_SUCCESS;
161 }
162 
163 static TEE_Result ast2600_hace_update(struct crypto_hash_ctx *ctx,
164 				      const uint8_t *data, size_t len)
165 {
166 	TEE_Result rc = TEE_ERROR_GENERIC;
167 	uint32_t left = 0;
168 	uint32_t fill = 0;
169 	size_t blk_size = 0;
170 	struct ast2600_hace_ctx *hctx = NULL;
171 
172 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
173 
174 	blk_size = hctx->blk_size;
175 
176 	left = hctx->total[0] & (blk_size - 1);
177 	fill = blk_size - left;
178 
179 	hctx->total[0] += len;
180 	if (hctx->total[0] < len)
181 		hctx->total[1]++;
182 
183 	if (left && len >= fill) {
184 		memcpy(hctx->buf + left, data, fill);
185 		rc = ast2600_hace_process(ctx, hctx->buf, blk_size);
186 		if (rc)
187 			return rc;
188 
189 		data += fill;
190 		len -= fill;
191 		left = 0;
192 	}
193 
194 	while (len >= blk_size) {
195 		rc = ast2600_hace_process(ctx, data, blk_size);
196 		if (rc)
197 			return rc;
198 
199 		data += blk_size;
200 		len -= blk_size;
201 	}
202 
203 	if (len)
204 		memcpy(hctx->buf + left, data, len);
205 
206 	return TEE_SUCCESS;
207 }
208 
209 static TEE_Result ast2600_hace_final(struct crypto_hash_ctx *ctx,
210 				     uint8_t *digest, size_t len)
211 {
212 	TEE_Result rc = TEE_ERROR_GENERIC;
213 	uint32_t last = 0;
214 	uint32_t padn = 0;
215 	uint8_t pad[HASH_BLK_BUFSZ * 2] = { };
216 	uint64_t dbits[2] = { };
217 	uint64_t dbits_be[2] = { };
218 	struct ast2600_hace_ctx *hctx = NULL;
219 
220 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
221 
222 	if (len < hctx->dgt_size)
223 		return TEE_ERROR_BAD_PARAMETERS;
224 
225 	memset(pad, 0, sizeof(pad));
226 	pad[0] = 0x80;
227 
228 	dbits[0] = (hctx->total[0] << 3);
229 	dbits_be[0] = get_be64(&dbits[0]);
230 
231 	dbits[1] = (hctx->total[0] >> 61) | (hctx->total[1] << 3);
232 	dbits_be[1] = get_be64(&dbits[1]);
233 
234 	last = hctx->total[0] & (hctx->blk_size - 1);
235 
236 	switch (hace_ctx->algo) {
237 	case TEE_ALG_SHA1:
238 	case TEE_ALG_SHA256:
239 		if (last < 56)
240 			padn = 56 - last;
241 		else
242 			padn = 120 - last;
243 
244 		rc = ast2600_hace_update(ctx, pad, padn);
245 		if (rc)
246 			return rc;
247 
248 		rc = ast2600_hace_update(ctx, (uint8_t *)&dbits_be[0],
249 					 sizeof(dbits_be[0]));
250 		if (rc)
251 			return rc;
252 		break;
253 	case TEE_ALG_SHA384:
254 	case TEE_ALG_SHA512:
255 		if (last < 112)
256 			padn = 112 - last;
257 		else
258 			padn = 240 - last;
259 
260 		rc = ast2600_hace_update(ctx, pad, padn);
261 		if (rc)
262 			return rc;
263 
264 		rc = ast2600_hace_update(ctx, (uint8_t *)&dbits_be[1],
265 					 sizeof(dbits_be[1]));
266 		if (rc)
267 			return rc;
268 
269 		rc = ast2600_hace_update(ctx, (uint8_t *)&dbits_be[0],
270 					 sizeof(dbits_be[0]));
271 		if (rc)
272 			return rc;
273 		break;
274 	default:
275 		return TEE_ERROR_NOT_SUPPORTED;
276 	}
277 
278 	cache_operation(TEE_CACHEINVALIDATE, hctx->digest,
279 			sizeof(hctx->digest));
280 
281 	memcpy(digest, hctx->digest, hctx->dgt_size);
282 
283 	return TEE_SUCCESS;
284 }
285 
286 static void ast2600_hace_free(struct crypto_hash_ctx *ctx)
287 {
288 	struct ast2600_hace_ctx *hctx = NULL;
289 
290 	hctx = container_of(ctx, struct ast2600_hace_ctx, hash_ctx);
291 
292 	free(hctx);
293 }
294 
295 static void ast2600_hace_copy_state(struct crypto_hash_ctx *dst_ctx,
296 				    struct crypto_hash_ctx *src_ctx)
297 {
298 	struct ast2600_hace_ctx *src_hctx = NULL;
299 	struct ast2600_hace_ctx *dst_hctx = NULL;
300 
301 	src_hctx = container_of(src_ctx, struct ast2600_hace_ctx, hash_ctx);
302 	dst_hctx = container_of(dst_ctx, struct ast2600_hace_ctx, hash_ctx);
303 
304 	cache_operation(TEE_CACHEINVALIDATE, src_hctx, sizeof(*src_hctx);
305 
306 	memcpy(dst_hctx, src_hctx, sizeof(*dst_hctx));
307 
308 	cache_operation(TEE_CACHEFLUSH,	dst_hctx, sizeof(*dst_hctx));
309 }
310 
311 static const struct crypto_hash_ops ast2600_hace_ops = {
312 	.init = ast2600_hace_init,
313 	.update = ast2600_hace_update,
314 	.final = ast2600_hace_final,
315 	.free_ctx = ast2600_hace_free,
316 	.copy_state = ast2600_hace_copy_state,
317 };
318 
319 static TEE_Result ast2600_hace_alloc(struct crypto_hash_ctx **pctx,
320 				     uint32_t algo)
321 {
322 	struct ast2600_hace_ctx *hctx = calloc(1, sizeof(*hctx));
323 
324 	if (!hctx)
325 		return TEE_ERROR_OUT_OF_MEMORY;
326 
327 	hace_ctx->ops = &ast2600_hace_ops;
328 	hace_ctx->algo = algo;
329 	hace_ctx->cmd = HACE_HASH_CMD_ACCUM | HACE_HASH_CMD_SHA_BE;
330 
331 	switch (algo) {
332 	case TEE_ALG_SHA1:
333 		hctx->dgt_size = 20;
334 		hctx->blk_size = 64;
335 		hctx->pad_size = 8;
336 		hctx->cmd |= HACE_HASH_CMD_ALG_SHA1;
337 		break;
338 	case TEE_ALG_SHA256:
339 		hctx->dgt_size = 32;
340 		hctx->blk_size = 64;
341 		hctx->pad_size = 8;
342 		hctx->cmd |= HACE_HASH_CMD_ALG_SHA256;
343 		break;
344 	case TEE_ALG_SHA384:
345 		hctx->dgt_size = 48;
346 		hctx->blk_size = 128;
347 		hctx->pad_size = 16;
348 		hctx->cmd |= HACE_HASH_CMD_ALG_SHA384;
349 		break;
350 	case TEE_ALG_SHA512:
351 		hctx->dgt_size = 64;
352 		hctx->blk_size = 128;
353 		hctx->pad_size = 16;
354 		hctx->cmd |= HACE_HASH_CMD_ALG_SHA512;
355 		break;
356 	default:
357 		free(hctx);
358 		return TEE_ERROR_NOT_IMPLEMENTED;
359 	}
360 
361 	*pctx = &hctx->hash_ctx;
362 
363 	return TEE_SUCCESS;
364 }
365 
366 TEE_Result ast2600_drvcrypt_register_hash(void)
367 {
368 	hace_virt = core_mmu_get_va(HACE_BASE, MEM_AREA_IO_SEC,
369 				    SMALL_PAGE_SIZE);
370 	if (!hace_virt) {
371 		EMSG("cannot get HACE virtual address");
372 		return TEE_ERROR_GENERIC;
373 	}
374 
375 	return drvcrypt_register_hash(ast2600_hace_alloc);
376 }
377