xref: /rk3399_rockchip-uboot/drivers/crypto/rockchip/crypto_v2.c (revision d9332f1c9555e2b415ae536413394adfaebc4308)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 #include <clk.h>
8 #include <crypto.h>
9 #include <dm.h>
10 #include <asm/io.h>
11 #include <asm/arch/hardware.h>
12 #include <asm/arch/clock.h>
13 #include <rockchip/crypto_hash_cache.h>
14 #include <rockchip/crypto_v2.h>
15 #include <rockchip/crypto_v2_pka.h>
16 
17 #define	RK_HASH_CTX_MAGIC		0x1A1A1A1A
18 
19 #ifdef DEBUG
20 #define IMSG(format, ...) printf("[%s, %05d]-trace: " format "\n", \
21 				 __func__, __LINE__, ##__VA_ARGS__)
22 #else
23 #define IMSG(format, ...)
24 #endif
25 
26 struct crypto_lli_desc {
27 	u32 src_addr;
28 	u32 src_len;
29 	u32 dst_addr;
30 	u32 dst_len;
31 	u32 user_define;
32 	u32 reserve;
33 	u32 dma_ctrl;
34 	u32 next_addr;
35 };
36 
37 struct rk_hash_ctx {
38 	struct crypto_lli_desc		data_lli;	/* lli desc */
39 	struct crypto_hash_cache	*hash_cache;
40 	u32				magic;		/* to check ctx */
41 	u32				algo;		/* hash algo */
42 	u8				digest_size;	/* hash out length */
43 	u8				reserved[3];
44 };
45 
46 struct rk_crypto_soc_data {
47 	u32 capability;
48 };
49 
50 struct rockchip_crypto_priv {
51 	fdt_addr_t			reg;
52 	struct clk			clk;
53 	u32				frequency;
54 	char				*clocks;
55 	u32				*frequencies;
56 	u32				nclocks;
57 	u32				length;
58 	struct rk_hash_ctx		*hw_ctx;
59 	struct rk_crypto_soc_data	*soc_data;
60 };
61 
62 #define LLI_ADDR_ALIGN_SIZE	8
63 #define DATA_ADDR_ALIGN_SIZE	8
64 #define DATA_LEN_ALIGN_SIZE	64
65 
66 /* crypto timeout 500ms, must support more than 32M data per times*/
67 #define HASH_UPDATE_LIMIT	(32 * 1024 * 1024)
68 #define RK_CRYPTO_TIMEOUT	500000
69 
70 #define RK_POLL_TIMEOUT(condition, timeout) \
71 ({ \
72 	int time_out = timeout; \
73 	while (condition) { \
74 		if (--time_out <= 0) { \
75 			debug("[%s] %d: time out!\n", __func__,\
76 				__LINE__); \
77 			break; \
78 		} \
79 		udelay(1); \
80 	} \
81 	(time_out <= 0) ? -ETIMEDOUT : 0; \
82 })
83 
84 #define WAIT_TAG_VALID(channel, timeout) ({ \
85 	u32 tag_mask = CRYPTO_CH0_TAG_VALID << (channel);\
86 	int ret;\
87 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_TAG_VALID) & tag_mask),\
88 			      timeout);\
89 	crypto_write(crypto_read(CRYPTO_TAG_VALID) & tag_mask, CRYPTO_TAG_VALID);\
90 	ret;\
91 })
92 
93 #define virt_to_phys(addr)		(((unsigned long)addr) & 0xffffffff)
94 #define phys_to_virt(addr, area)	((unsigned long)addr)
95 
96 #define align_malloc(bytes, alignment)	memalign(alignment, bytes)
97 #define align_free(addr)		free(addr)
98 
99 #define ROUNDUP(size, alignment)	round_up(size, alignment)
100 #define cache_op_inner(type, addr, size) \
101 					crypto_flush_cacheline((ulong)addr, size)
102 
103 #define IS_NEED_IV(rk_mode) ((rk_mode) != RK_MODE_ECB && \
104 			     (rk_mode) != RK_MODE_CMAC && \
105 			     (rk_mode) != RK_MODE_CBC_MAC)
106 
107 #define IS_NEED_TAG(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
108 			      (rk_mode) == RK_MODE_CBC_MAC)
109 
110 #define IS_MAC_MODE(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
111 			      (rk_mode) == RK_MODE_CBC_MAC)
112 
113 fdt_addr_t crypto_base;
114 
115 static inline void word2byte_be(u32 word, u8 *ch)
116 {
117 	ch[0] = (word >> 24) & 0xff;
118 	ch[1] = (word >> 16) & 0xff;
119 	ch[2] = (word >> 8) & 0xff;
120 	ch[3] = (word >> 0) & 0xff;
121 }
122 
123 static inline u32 byte2word_be(const u8 *ch)
124 {
125 	return (*ch << 24) + (*(ch + 1) << 16) + (*(ch + 2) << 8) + *(ch + 3);
126 }
127 
128 static inline void clear_regs(u32 base, u32 words)
129 {
130 	int i;
131 
132 	/*clear out register*/
133 	for (i = 0; i < words; i++)
134 		crypto_write(0, base + 4 * i);
135 }
136 
137 static inline void clear_hash_out_reg(void)
138 {
139 	clear_regs(CRYPTO_HASH_DOUT_0, 16);
140 }
141 
142 static inline void clear_key_regs(void)
143 {
144 	clear_regs(CRYPTO_CH0_KEY_0, CRYPTO_KEY_CHANNEL_NUM * 4);
145 }
146 
147 static inline void write_regs(u32 base, const u8 *data, u32 data_len)
148 {
149 	u8 tmp_buf[4];
150 	u32 i;
151 
152 	for (i = 0; i < data_len / 4; i++, base += 4)
153 		crypto_write(byte2word_be(data + i * 4), base);
154 
155 	if (data_len % 4) {
156 		memset(tmp_buf, 0x00, sizeof(tmp_buf));
157 		memcpy((u8 *)tmp_buf, data + i * 4, data_len % 4);
158 		crypto_write(byte2word_be(tmp_buf), base);
159 	}
160 }
161 
162 static inline void write_key_reg(u32 chn, const u8 *key, u32 key_len)
163 {
164 	write_regs(CRYPTO_CH0_KEY_0 + chn * 0x10, key, key_len);
165 }
166 
167 static inline void set_iv_reg(u32 chn, const u8 *iv, u32 iv_len)
168 {
169 	u32 base_iv;
170 
171 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
172 
173 	/* clear iv */
174 	clear_regs(base_iv, 4);
175 
176 	if (!iv || iv_len == 0)
177 		return;
178 
179 	write_regs(base_iv, iv, iv_len);
180 
181 	crypto_write(iv_len, CRYPTO_CH0_IV_LEN_0 + 4 * chn);
182 }
183 
184 static inline void get_tag_from_reg(u32 chn, u8 *tag, u32 tag_len)
185 {
186 	u32 i;
187 	u32 chn_base = CRYPTO_CH0_TAG_0 + 0x10 * chn;
188 
189 	for (i = 0; i < tag_len / 4; i++, chn_base += 4)
190 		word2byte_be(crypto_read(chn_base), tag + 4 * i);
191 }
192 
193 static int hw_crypto_reset(void)
194 {
195 	u32 val = 0, mask = 0;
196 	int ret;
197 
198 	val = CRYPTO_SW_PKA_RESET | CRYPTO_SW_CC_RESET;
199 	mask = val << CRYPTO_WRITE_MASK_SHIFT;
200 
201 	/* reset pka and crypto modules*/
202 	crypto_write(val | mask, CRYPTO_RST_CTL);
203 
204 	/* wait reset compelete */
205 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL), RK_CRYPTO_TIMEOUT);
206 
207 	return ret;
208 }
209 
210 static void hw_hash_clean_ctx(struct rk_hash_ctx *ctx)
211 {
212 	/* clear hash status */
213 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
214 
215 	assert(ctx);
216 	assert(ctx->magic == RK_HASH_CTX_MAGIC);
217 
218 	crypto_hash_cache_free(ctx->hash_cache);
219 
220 	memset(ctx, 0x00, sizeof(*ctx));
221 }
222 
223 static int rk_hash_init(void *hw_ctx, u32 algo)
224 {
225 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)hw_ctx;
226 	u32 reg_ctrl = 0;
227 	int ret;
228 
229 	if (!tmp_ctx)
230 		return -EINVAL;
231 
232 	reg_ctrl = CRYPTO_SW_CC_RESET;
233 	crypto_write(reg_ctrl | (reg_ctrl << CRYPTO_WRITE_MASK_SHIFT),
234 		     CRYPTO_RST_CTL);
235 
236 	/* wait reset compelete */
237 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL),
238 			      RK_CRYPTO_TIMEOUT);
239 
240 	reg_ctrl = 0;
241 	tmp_ctx->algo = algo;
242 	switch (algo) {
243 	case CRYPTO_MD5:
244 	case CRYPTO_HMAC_MD5:
245 		reg_ctrl |= CRYPTO_MODE_MD5;
246 		tmp_ctx->digest_size = 16;
247 		break;
248 	case CRYPTO_SHA1:
249 	case CRYPTO_HMAC_SHA1:
250 		reg_ctrl |= CRYPTO_MODE_SHA1;
251 		tmp_ctx->digest_size = 20;
252 		break;
253 	case CRYPTO_SHA256:
254 	case CRYPTO_HMAC_SHA256:
255 		reg_ctrl |= CRYPTO_MODE_SHA256;
256 		tmp_ctx->digest_size = 32;
257 		break;
258 	case CRYPTO_SHA512:
259 	case CRYPTO_HMAC_SHA512:
260 		reg_ctrl |= CRYPTO_MODE_SHA512;
261 		tmp_ctx->digest_size = 64;
262 		break;
263 	case CRYPTO_SM3:
264 	case CRYPTO_HMAC_SM3:
265 		reg_ctrl |= CRYPTO_MODE_SM3;
266 		tmp_ctx->digest_size = 32;
267 		break;
268 	default:
269 		ret = -EINVAL;
270 		goto exit;
271 	}
272 
273 	clear_hash_out_reg();
274 
275 	/* enable hardware padding */
276 	reg_ctrl |= CRYPTO_HW_PAD_ENABLE;
277 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
278 
279 	/* FIFO input and output data byte swap */
280 	/* such as B0, B1, B2, B3 -> B3, B2, B1, B0 */
281 	reg_ctrl = CRYPTO_DOUT_BYTESWAP | CRYPTO_DOIN_BYTESWAP;
282 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_FIFO_CTL);
283 
284 	/* enable src_item_done interrupt */
285 	crypto_write(CRYPTO_SRC_ITEM_INT_EN, CRYPTO_DMA_INT_EN);
286 
287 	tmp_ctx->magic = RK_HASH_CTX_MAGIC;
288 
289 	return 0;
290 exit:
291 	/* clear hash setting if init failed */
292 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
293 
294 	return ret;
295 }
296 
297 static int rk_hash_direct_calc(void *hw_data, const u8 *data,
298 			       u32 data_len, u8 *started_flag, u8 is_last)
299 {
300 	struct rockchip_crypto_priv *priv = hw_data;
301 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
302 	struct crypto_lli_desc *lli = &hash_ctx->data_lli;
303 	int ret = -EINVAL;
304 	u32 tmp = 0, mask = 0;
305 
306 	assert(IS_ALIGNED((ulong)data, DATA_ADDR_ALIGN_SIZE));
307 	assert(is_last || IS_ALIGNED(data_len, DATA_LEN_ALIGN_SIZE));
308 
309 	debug("%s: data = %p, len = %u, s = %x, l = %x\n",
310 	      __func__, data, data_len, *started_flag, is_last);
311 
312 	memset(lli, 0x00, sizeof(*lli));
313 	lli->src_addr = (u32)virt_to_phys(data);
314 	lli->src_len = data_len;
315 	lli->dma_ctrl = LLI_DMA_CTRL_SRC_DONE;
316 
317 	if (is_last) {
318 		lli->user_define |= LLI_USER_STRING_LAST;
319 		lli->dma_ctrl |= LLI_DMA_CTRL_LAST;
320 	} else {
321 		lli->next_addr = (u32)virt_to_phys(lli);
322 		lli->dma_ctrl |= LLI_DMA_CTRL_PAUSE;
323 	}
324 
325 	if (!(*started_flag)) {
326 		lli->user_define |=
327 			(LLI_USER_STRING_START | LLI_USER_CPIHER_START);
328 		crypto_write((u32)virt_to_phys(lli), CRYPTO_DMA_LLI_ADDR);
329 		crypto_write((CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
330 			     CRYPTO_HASH_ENABLE, CRYPTO_HASH_CTL);
331 		tmp = CRYPTO_DMA_START;
332 		*started_flag = 1;
333 	} else {
334 		tmp = CRYPTO_DMA_RESTART;
335 	}
336 
337 	/* flush cache */
338 	crypto_flush_cacheline((ulong)lli, sizeof(*lli));
339 	crypto_flush_cacheline((ulong)data, data_len);
340 
341 	/* start calculate */
342 	crypto_write(tmp << CRYPTO_WRITE_MASK_SHIFT | tmp,
343 		     CRYPTO_DMA_CTL);
344 
345 	/* mask CRYPTO_SYNC_LOCKSTEP_INT_ST flag */
346 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
347 
348 	/* wait calc ok */
349 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
350 			      RK_CRYPTO_TIMEOUT);
351 
352 	/* clear interrupt status */
353 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
354 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
355 
356 	if (tmp != CRYPTO_SRC_ITEM_DONE_INT_ST &&
357 	    tmp != CRYPTO_ZERO_LEN_INT_ST) {
358 		debug("[%s] %d: CRYPTO_DMA_INT_ST = 0x%x\n",
359 		      __func__, __LINE__, tmp);
360 		goto exit;
361 	}
362 
363 	priv->length += data_len;
364 exit:
365 	return ret;
366 }
367 
368 int rk_hash_update(void *ctx, const u8 *data, u32 data_len)
369 {
370 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
371 	int ret = -EINVAL;
372 
373 	debug("\n");
374 	if (!tmp_ctx || !data)
375 		goto exit;
376 
377 	if (tmp_ctx->digest_size == 0 || tmp_ctx->magic != RK_HASH_CTX_MAGIC)
378 		goto exit;
379 
380 	ret = crypto_hash_update_with_cache(tmp_ctx->hash_cache,
381 					    data, data_len);
382 
383 exit:
384 	/* free lli list */
385 	if (ret)
386 		hw_hash_clean_ctx(tmp_ctx);
387 
388 	return ret;
389 }
390 
391 int rk_hash_final(void *ctx, u8 *digest, size_t len)
392 {
393 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
394 	int ret = -EINVAL;
395 	u32 i;
396 
397 	if (!digest)
398 		goto exit;
399 
400 	if (!tmp_ctx ||
401 	    tmp_ctx->digest_size == 0 ||
402 	    len > tmp_ctx->digest_size ||
403 	    tmp_ctx->magic != RK_HASH_CTX_MAGIC) {
404 		goto exit;
405 	}
406 
407 	/* wait hash value ok */
408 	ret = RK_POLL_TIMEOUT(!crypto_read(CRYPTO_HASH_VALID),
409 			      RK_CRYPTO_TIMEOUT);
410 
411 	for (i = 0; i < len / 4; i++)
412 		word2byte_be(crypto_read(CRYPTO_HASH_DOUT_0 + i * 4),
413 			     digest + i * 4);
414 
415 	if (len % 4) {
416 		u8 tmp_buf[4];
417 
418 		word2byte_be(crypto_read(CRYPTO_HASH_DOUT_0 + i * 4), tmp_buf);
419 		memcpy(digest + i * 4, tmp_buf, len % 4);
420 	}
421 
422 	/* clear hash status */
423 	crypto_write(CRYPTO_HASH_IS_VALID, CRYPTO_HASH_VALID);
424 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
425 
426 exit:
427 
428 	return ret;
429 }
430 
431 static u32 rockchip_crypto_capability(struct udevice *dev)
432 {
433 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
434 	u32 capability, mask = 0;
435 
436 	capability = priv->soc_data->capability;
437 
438 #if !(CONFIG_IS_ENABLED(ROCKCHIP_CIPHER))
439 	mask |= (CRYPTO_DES | CRYPTO_AES | CRYPTO_SM4);
440 #endif
441 
442 #if !(CONFIG_IS_ENABLED(ROCKCHIP_HMAC))
443 	mask |= (CRYPTO_HMAC_MD5 | CRYPTO_HMAC_SHA1 | CRYPTO_HMAC_SHA256 |
444 			 CRYPTO_HMAC_SHA512 | CRYPTO_HMAC_SM3);
445 #endif
446 
447 #if !(CONFIG_IS_ENABLED(ROCKCHIP_RSA))
448 	mask |= (CRYPTO_RSA512 | CRYPTO_RSA1024 | CRYPTO_RSA2048 |
449 			 CRYPTO_RSA3072 | CRYPTO_RSA4096);
450 #endif
451 
452 	return capability & (~mask);
453 }
454 
455 static int rockchip_crypto_sha_init(struct udevice *dev, sha_context *ctx)
456 {
457 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
458 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
459 
460 	if (!ctx)
461 		return -EINVAL;
462 
463 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
464 
465 	priv->length = 0;
466 
467 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
468 						       priv, ctx->length,
469 						       DATA_ADDR_ALIGN_SIZE,
470 						       DATA_LEN_ALIGN_SIZE);
471 	if (!hash_ctx->hash_cache)
472 		return -EFAULT;
473 
474 	return rk_hash_init(hash_ctx, ctx->algo);
475 }
476 
477 static int rockchip_crypto_sha_update(struct udevice *dev,
478 				      u32 *input, u32 len)
479 {
480 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
481 	int ret, i;
482 	u8 *p;
483 
484 	if (!len)
485 		return -EINVAL;
486 
487 	p = (u8 *)input;
488 
489 	for (i = 0; i < len / HASH_UPDATE_LIMIT; i++, p += HASH_UPDATE_LIMIT) {
490 		ret = rk_hash_update(priv->hw_ctx, p, HASH_UPDATE_LIMIT);
491 		if (ret)
492 			goto exit;
493 	}
494 
495 	if (len % HASH_UPDATE_LIMIT)
496 		ret = rk_hash_update(priv->hw_ctx, p, len % HASH_UPDATE_LIMIT);
497 
498 exit:
499 	return ret;
500 }
501 
502 static int rockchip_crypto_sha_final(struct udevice *dev,
503 				     sha_context *ctx, u8 *output)
504 {
505 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
506 	u32 nbits;
507 	int ret;
508 
509 	nbits = crypto_algo_nbits(ctx->algo);
510 
511 	if (priv->length != ctx->length) {
512 		printf("total length(0x%08x) != init length(0x%08x)!\n",
513 		       priv->length, ctx->length);
514 		ret = -EIO;
515 		goto exit;
516 	}
517 
518 	ret = rk_hash_final(priv->hw_ctx, (u8 *)output, BITS2BYTE(nbits));
519 
520 exit:
521 	hw_hash_clean_ctx(priv->hw_ctx);
522 	return ret;
523 }
524 
525 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
526 int rk_hmac_init(void *hw_ctx, u32 algo, u8 *key, u32 key_len)
527 {
528 	u32 reg_ctrl = 0;
529 	int ret;
530 
531 	if (!key || !key_len || key_len > 64)
532 		return -EINVAL;
533 
534 	clear_key_regs();
535 
536 	write_key_reg(0, key, key_len);
537 
538 	ret = rk_hash_init(hw_ctx, algo);
539 	if (ret)
540 		return ret;
541 
542 	reg_ctrl = crypto_read(CRYPTO_HASH_CTL) | CRYPTO_HMAC_ENABLE;
543 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
544 
545 	return ret;
546 }
547 
548 static int rockchip_crypto_hmac_init(struct udevice *dev,
549 				     sha_context *ctx, u8 *key, u32 key_len)
550 {
551 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
552 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
553 
554 	if (!ctx)
555 		return -EINVAL;
556 
557 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
558 
559 	priv->length = 0;
560 
561 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
562 						       priv, ctx->length,
563 						       DATA_ADDR_ALIGN_SIZE,
564 						       DATA_LEN_ALIGN_SIZE);
565 	if (!hash_ctx->hash_cache)
566 		return -EFAULT;
567 
568 	return rk_hmac_init(priv->hw_ctx, ctx->algo, key, key_len);
569 }
570 
571 static int rockchip_crypto_hmac_update(struct udevice *dev,
572 				       u32 *input, u32 len)
573 {
574 	return rockchip_crypto_sha_update(dev, input, len);
575 }
576 
577 static int rockchip_crypto_hmac_final(struct udevice *dev,
578 				      sha_context *ctx, u8 *output)
579 {
580 	return rockchip_crypto_sha_final(dev, ctx, output);
581 }
582 
583 #endif
584 
585 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
586 static u8 g_key_chn;
587 
588 static const u32 rk_mode2bc_mode[RK_MODE_MAX] = {
589 	[RK_MODE_ECB] = CRYPTO_BC_ECB,
590 	[RK_MODE_CBC] = CRYPTO_BC_CBC,
591 	[RK_MODE_CTS] = CRYPTO_BC_CTS,
592 	[RK_MODE_CTR] = CRYPTO_BC_CTR,
593 	[RK_MODE_CFB] = CRYPTO_BC_CFB,
594 	[RK_MODE_OFB] = CRYPTO_BC_OFB,
595 	[RK_MODE_XTS] = CRYPTO_BC_XTS,
596 	[RK_MODE_CMAC] = CRYPTO_BC_CMAC,
597 	[RK_MODE_CBC_MAC] = CRYPTO_BC_CBC_MAC,
598 };
599 
600 static inline bool is_des_mode(u32 rk_mode)
601 {
602 	return (rk_mode == RK_MODE_ECB ||
603 		rk_mode == RK_MODE_CBC ||
604 		rk_mode == RK_MODE_CFB ||
605 		rk_mode == RK_MODE_OFB);
606 }
607 
608 static void dump_crypto_state(struct crypto_lli_desc *desc, int ret)
609 {
610 	IMSG("%s\n", ret == -ETIME ? "timeout" : "dismatch");
611 
612 	IMSG("CRYPTO_DMA_INT_ST = %08x, expect_int = %08x\n",
613 	     tmp, expt_int);
614 	IMSG("data desc		= %p\n", desc);
615 	IMSG("\taddr_in		= [%08x <=> %08x]\n",
616 	     desc->src_addr, (u32)virt_to_phys(in));
617 	IMSG("\taddr_out	= [%08x <=> %08x]\n",
618 	     desc->dst_addr, (u32)virt_to_phys(out));
619 	IMSG("\tsrc_len		= [%08x <=> %08x]\n",
620 	     desc->src_len, (u32)len);
621 	IMSG("\tdst_len		= %08x\n", desc->dst_len);
622 	IMSG("\tdma_ctl		= %08x\n", desc->dma_ctrl);
623 	IMSG("\tuser_define	= %08x\n", desc->user_define);
624 
625 	IMSG("\n\nDMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
626 	     crypto_read(CRYPTO_DMA_LLI_ADDR));
627 	IMSG("DMA CRYPTO_DMA_ST status = %08x\n",
628 	     crypto_read(CRYPTO_DMA_ST));
629 	IMSG("DMA CRYPTO_DMA_STATE status = %08x\n",
630 	     crypto_read(CRYPTO_DMA_STATE));
631 	IMSG("DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
632 	     crypto_read(CRYPTO_DMA_LLI_RADDR));
633 	IMSG("DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
634 	     crypto_read(CRYPTO_DMA_SRC_RADDR));
635 	IMSG("DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
636 	     crypto_read(CRYPTO_DMA_DST_RADDR));
637 	IMSG("DMA CRYPTO_CIPHER_ST status = %08x\n",
638 	     crypto_read(CRYPTO_CIPHER_ST));
639 	IMSG("DMA CRYPTO_CIPHER_STATE status = %08x\n",
640 	     crypto_read(CRYPTO_CIPHER_STATE));
641 	IMSG("DMA CRYPTO_TAG_VALID status = %08x\n",
642 	     crypto_read(CRYPTO_TAG_VALID));
643 	IMSG("LOCKSTEP status = %08x\n\n",
644 	     crypto_read(0x618));
645 
646 	IMSG("dst %dbyte not transferred\n",
647 	     desc->dst_addr + desc->dst_len -
648 	     crypto_read(CRYPTO_DMA_DST_RADDR));
649 }
650 
651 static int hw_cipher_init(u32 chn, const u8 *key, const u8 *twk_key,
652 			  u32 key_len, const u8 *iv, u32 iv_len,
653 			  u32 algo, u32 mode, bool enc)
654 {
655 	u32 rk_mode = RK_GET_RK_MODE(mode);
656 	u32 key_chn_sel = chn;
657 	u32 reg_ctrl = 0;
658 
659 	IMSG("%s: key addr is %p, key_len is %d, iv addr is %p",
660 	     __func__, key, key_len, iv);
661 	if (rk_mode >= RK_MODE_MAX)
662 		return -EINVAL;
663 
664 	switch (algo) {
665 	case CRYPTO_DES:
666 		if (key_len > DES_BLOCK_SIZE)
667 			reg_ctrl |= CRYPTO_BC_TDES;
668 		else
669 			reg_ctrl |= CRYPTO_BC_DES;
670 		break;
671 	case CRYPTO_AES:
672 		reg_ctrl |= CRYPTO_BC_AES;
673 		break;
674 	case CRYPTO_SM4:
675 		reg_ctrl |= CRYPTO_BC_SM4;
676 		break;
677 	default:
678 		return -EINVAL;
679 	}
680 
681 	if (algo == CRYPTO_AES || algo == CRYPTO_SM4) {
682 		switch (key_len) {
683 		case AES_KEYSIZE_128:
684 			reg_ctrl |= CRYPTO_BC_128_bit_key;
685 			break;
686 		case AES_KEYSIZE_192:
687 			reg_ctrl |= CRYPTO_BC_192_bit_key;
688 			break;
689 		case AES_KEYSIZE_256:
690 			reg_ctrl |= CRYPTO_BC_256_bit_key;
691 			break;
692 		default:
693 			return -EINVAL;
694 		}
695 	}
696 
697 	reg_ctrl |= rk_mode2bc_mode[rk_mode];
698 	if (!enc)
699 		reg_ctrl |= CRYPTO_BC_DECRYPT;
700 
701 	/* write key data to reg */
702 	write_key_reg(key_chn_sel, key, key_len);
703 
704 	/* write twk key for xts mode */
705 	if (rk_mode == RK_MODE_XTS)
706 		write_key_reg(key_chn_sel + 4, twk_key, key_len);
707 
708 	/* set iv reg */
709 	set_iv_reg(chn, iv, iv_len);
710 
711 	/* din_swap set 1, dout_swap set 1, default 1. */
712 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
713 	crypto_write(CRYPTO_LIST_DONE_INT_EN | CRYPTO_DST_ITEM_DONE_INT_EN,
714 		     CRYPTO_DMA_INT_EN);
715 
716 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
717 
718 	return 0;
719 }
720 
721 static int hw_cipher_crypt(const u8 *in, u8 *out, u64 len,
722 			   const u8 *aad, u64 aad_len, u8 *tag, u32 tag_len,
723 			   u32 mode)
724 {
725 	struct crypto_lli_desc *data_desc = NULL;
726 	u8 *dma_in = NULL, *dma_out = NULL;
727 	u32 rk_mode = RK_GET_RK_MODE(mode);
728 	u32 reg_ctrl = 0, tmp_len = 0;
729 	u32 expt_int = 0, mask = 0;
730 	u32 key_chn = g_key_chn;
731 	u32 tmp, dst_len = 0;
732 	int ret = -1;
733 
734 	if (rk_mode == RK_MODE_CTS && len <= AES_BLOCK_SIZE) {
735 		printf("CTS mode length %u < 16Byte\n", (u32)len);
736 		return -EINVAL;
737 	}
738 
739 	tmp_len = (rk_mode == RK_MODE_CTR) ? ROUNDUP(len, AES_BLOCK_SIZE) : len;
740 
741 	data_desc = align_malloc(sizeof(*data_desc), LLI_ADDR_ALIGN_SIZE);
742 	if (!data_desc)
743 		goto exit;
744 
745 	if (IS_ALIGNED((ulong)in, DATA_ADDR_ALIGN_SIZE) && tmp_len == len)
746 		dma_in = (void *)in;
747 	else
748 		dma_in = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
749 	if (!dma_in)
750 		goto exit;
751 
752 	if (out) {
753 		if (IS_ALIGNED((ulong)out, DATA_ADDR_ALIGN_SIZE) &&
754 		    tmp_len == len)
755 			dma_out = out;
756 		else
757 			dma_out = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
758 		if (!dma_out)
759 			goto exit;
760 		dst_len = tmp_len;
761 	}
762 
763 	memset(data_desc, 0x00, sizeof(*data_desc));
764 	if (dma_in != in)
765 		memcpy(dma_in, in, len);
766 
767 	data_desc->src_addr    = (u32)virt_to_phys(dma_in);
768 	data_desc->src_len     = tmp_len;
769 	data_desc->dst_addr    = (u32)virt_to_phys(dma_out);
770 	data_desc->dst_len     = dst_len;
771 	data_desc->dma_ctrl    = LLI_DMA_CTRL_LAST;
772 	data_desc->user_define = LLI_USER_CPIHER_START |
773 				 LLI_USER_STRING_START |
774 				 LLI_USER_STRING_LAST |
775 				 (key_chn << 4);
776 
777 	if (IS_MAC_MODE(rk_mode)) {
778 		expt_int = CRYPTO_LIST_DONE_INT_ST;
779 		data_desc->dma_ctrl |= LLI_DMA_CTRL_LIST_DONE;
780 	} else {
781 		expt_int = CRYPTO_DST_ITEM_DONE_INT_ST;
782 		data_desc->dma_ctrl |= LLI_DMA_CTRL_DST_DONE;
783 	}
784 
785 	crypto_write((u32)virt_to_phys(data_desc), CRYPTO_DMA_LLI_ADDR);
786 
787 	cache_op_inner(DCACHE_AREA_CLEAN, data_desc, sizeof(*data_desc));
788 	cache_op_inner(DCACHE_AREA_CLEAN, (void *)aad, aad_len);
789 	cache_op_inner(DCACHE_AREA_CLEAN, dma_in, tmp_len);
790 	cache_op_inner(DCACHE_AREA_INVALIDATE, dma_out, tmp_len);
791 
792 	/* din_swap set 1, dout_swap set 1, default 1. */
793 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
794 	crypto_write(CRYPTO_DST_ITEM_DONE_INT_EN | CRYPTO_LIST_DONE_INT_EN,
795 		     CRYPTO_DMA_INT_EN);
796 
797 	reg_ctrl = crypto_read(CRYPTO_BC_CTL) | CRYPTO_BC_ENABLE;
798 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
799 	crypto_write(0x00010001, CRYPTO_DMA_CTL);//start
800 
801 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
802 
803 	/* wait calc ok */
804 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
805 			      RK_CRYPTO_TIMEOUT);
806 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
807 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
808 
809 	if ((tmp & mask) == expt_int) {
810 		if (out && out != dma_out)
811 			memcpy(out, dma_out, len);
812 
813 		if (IS_NEED_TAG(rk_mode)) {
814 			ret = WAIT_TAG_VALID(key_chn, RK_CRYPTO_TIMEOUT);
815 			get_tag_from_reg(key_chn, tag, AES_BLOCK_SIZE);
816 		}
817 	} else {
818 		dump_crypto_state(data_desc, ret);
819 		ret = -1;
820 	}
821 
822 exit:
823 	crypto_write(0xffff0000, CRYPTO_BC_CTL);//bc_ctl disable
824 	align_free(data_desc);
825 	if (dma_in && dma_in != in)
826 		align_free(dma_in);
827 	if (dma_out && dma_out != out)
828 		align_free(dma_out);
829 
830 	return ret;
831 }
832 
833 static int hw_aes_init(u32 chn, const u8 *key, const u8 *twk_key, u32 key_len,
834 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
835 {
836 	u32 rk_mode = RK_GET_RK_MODE(mode);
837 
838 	if (rk_mode > RK_MODE_XTS)
839 		return -EINVAL;
840 
841 	if (iv_len > AES_BLOCK_SIZE)
842 		return -EINVAL;
843 
844 	if (IS_NEED_IV(rk_mode)) {
845 		if (!iv || iv_len != AES_BLOCK_SIZE)
846 			return -EINVAL;
847 	} else {
848 		iv_len = 0;
849 	}
850 
851 	if (rk_mode == RK_MODE_XTS) {
852 		if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_256)
853 			return -EINVAL;
854 
855 		if (!key || !twk_key)
856 			return -EINVAL;
857 	} else {
858 		if (key_len != AES_KEYSIZE_128 &&
859 		    key_len != AES_KEYSIZE_192 &&
860 		    key_len != AES_KEYSIZE_256)
861 			return -EINVAL;
862 	}
863 
864 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
865 			      CRYPTO_AES, mode, enc);
866 }
867 
868 static int hw_sm4_init(u32  chn, const u8 *key, const u8 *twk_key, u32 key_len,
869 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
870 {
871 	u32 rk_mode = RK_GET_RK_MODE(mode);
872 
873 	if (rk_mode > RK_MODE_XTS)
874 		return -EINVAL;
875 
876 	if (iv_len > SM4_BLOCK_SIZE || key_len != SM4_KEYSIZE)
877 		return -EINVAL;
878 
879 	if (IS_NEED_IV(rk_mode)) {
880 		if (!iv || iv_len != SM4_BLOCK_SIZE)
881 			return -EINVAL;
882 	} else {
883 		iv_len = 0;
884 	}
885 
886 	if (rk_mode == RK_MODE_XTS) {
887 		if (!key || !twk_key)
888 			return -EINVAL;
889 	}
890 
891 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
892 			      CRYPTO_SM4, mode, enc);
893 }
894 
895 int rk_crypto_des(struct udevice *dev, u32 mode, const u8 *key, u32 key_len,
896 		  const u8 *iv, const u8 *in, u8 *out, u32 len, bool enc)
897 {
898 	u32 rk_mode = RK_GET_RK_MODE(mode);
899 	u8 tmp_key[24];
900 	int ret;
901 
902 	if (!is_des_mode(rk_mode))
903 		return -EINVAL;
904 
905 	if (key_len == DES_BLOCK_SIZE || key_len == 3 * DES_BLOCK_SIZE) {
906 		memcpy(tmp_key, key, key_len);
907 	} else if (key_len == 2 * DES_BLOCK_SIZE) {
908 		memcpy(tmp_key, key, 16);
909 		memcpy(tmp_key + 16, key, 8);
910 		key_len = 3 * DES_BLOCK_SIZE;
911 	} else {
912 		return -EINVAL;
913 	}
914 
915 	ret = hw_cipher_init(0, tmp_key, NULL, key_len, iv, DES_BLOCK_SIZE,
916 			     CRYPTO_DES, mode, enc);
917 	if (ret)
918 		goto exit;
919 
920 	ret = hw_cipher_crypt(in, out, len, NULL, 0,
921 			      NULL, 0, mode);
922 
923 exit:
924 	return ret;
925 }
926 
927 int rk_crypto_aes(struct udevice *dev, u32 mode,
928 		  const u8 *key, const u8 *twk_key, u32 key_len,
929 		  const u8 *iv, u32 iv_len,
930 		  const u8 *in, u8 *out, u32 len, bool enc)
931 {
932 	int ret;
933 
934 	/* RV1126/RV1109 do not support aes-192 */
935 #if defined(CONFIG_ROCKCHIP_RV1126)
936 	if (key_len == AES_KEYSIZE_192)
937 		return -EINVAL;
938 #endif
939 
940 	ret = hw_aes_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
941 	if (ret)
942 		return ret;
943 
944 	return hw_cipher_crypt(in, out, len, NULL, 0,
945 			       NULL, 0, mode);
946 }
947 
948 int rk_crypto_sm4(struct udevice *dev, u32 mode,
949 		  const u8 *key, const u8 *twk_key, u32 key_len,
950 		  const u8 *iv, u32 iv_len,
951 		  const u8 *in, u8 *out, u32 len, bool enc)
952 {
953 	int ret;
954 
955 	ret = hw_sm4_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
956 	if (ret)
957 		return ret;
958 
959 	return hw_cipher_crypt(in, out, len, NULL, 0, NULL, 0, mode);
960 }
961 
962 int rockchip_crypto_cipher(struct udevice *dev, cipher_context *ctx,
963 			   const u8 *in, u8 *out, u32 len, bool enc)
964 {
965 	switch (ctx->algo) {
966 	case CRYPTO_DES:
967 		return rk_crypto_des(dev, ctx->mode, ctx->key, ctx->key_len,
968 				     ctx->iv, in, out, len, enc);
969 	case CRYPTO_AES:
970 		return rk_crypto_aes(dev, ctx->mode,
971 				     ctx->key, ctx->twk_key, ctx->key_len,
972 				     ctx->iv, ctx->iv_len, in, out, len, enc);
973 	case CRYPTO_SM4:
974 		return rk_crypto_sm4(dev, ctx->mode,
975 				     ctx->key, ctx->twk_key, ctx->key_len,
976 				     ctx->iv, ctx->iv_len, in, out, len, enc);
977 	default:
978 		return -EINVAL;
979 	}
980 }
981 
982 int rk_crypto_mac(struct udevice *dev, u32 algo, u32 mode,
983 		  const u8 *key, u32 key_len,
984 		  const u8 *in, u32 len, u8 *tag)
985 {
986 	u32 rk_mode = RK_GET_RK_MODE(mode);
987 	int ret;
988 
989 	if (!IS_MAC_MODE(rk_mode))
990 		return -EINVAL;
991 
992 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
993 		return -EINVAL;
994 
995 	/* RV1126/RV1109 do not support aes-192 */
996 #if defined(CONFIG_ROCKCHIP_RV1126)
997 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
998 		return -EINVAL;
999 #endif
1000 
1001 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, NULL, 0,
1002 			     algo, mode, true);
1003 	if (ret)
1004 		return ret;
1005 
1006 	return hw_cipher_crypt(in, NULL, len, NULL, 0,
1007 			       tag, AES_BLOCK_SIZE, mode);
1008 }
1009 
1010 int rockchip_crypto_mac(struct udevice *dev, cipher_context *ctx,
1011 			const u8 *in, u32 len, u8 *tag)
1012 {
1013 	return rk_crypto_mac(dev, ctx->algo, ctx->mode,
1014 			     ctx->key, ctx->key_len, in, len, tag);
1015 }
1016 
1017 #endif
1018 
1019 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1020 static int rockchip_crypto_rsa_verify(struct udevice *dev, rsa_key *ctx,
1021 				      u8 *sign, u8 *output)
1022 {
1023 	struct mpa_num *mpa_m = NULL, *mpa_e = NULL, *mpa_n = NULL;
1024 	struct mpa_num *mpa_c = NULL, *mpa_result = NULL;
1025 	u32 n_bits, n_words;
1026 	u32 *rsa_result;
1027 	int ret;
1028 
1029 	if (!ctx)
1030 		return -EINVAL;
1031 
1032 	if (ctx->algo != CRYPTO_RSA512 &&
1033 	    ctx->algo != CRYPTO_RSA1024 &&
1034 	    ctx->algo != CRYPTO_RSA2048 &&
1035 	    ctx->algo != CRYPTO_RSA3072 &&
1036 	    ctx->algo != CRYPTO_RSA4096)
1037 		return -EINVAL;
1038 
1039 	n_bits = crypto_algo_nbits(ctx->algo);
1040 	n_words = BITS2WORD(n_bits);
1041 
1042 	rsa_result = malloc(BITS2BYTE(n_bits));
1043 	if (!rsa_result)
1044 		return -ENOMEM;
1045 
1046 	memset(rsa_result, 0x00, BITS2BYTE(n_bits));
1047 
1048 	ret = rk_mpa_alloc(&mpa_m);
1049 	ret |= rk_mpa_alloc(&mpa_e);
1050 	ret |= rk_mpa_alloc(&mpa_n);
1051 	ret |= rk_mpa_alloc(&mpa_c);
1052 	ret |= rk_mpa_alloc(&mpa_result);
1053 	if (ret)
1054 		goto exit;
1055 
1056 	mpa_m->d = (void *)sign;
1057 	mpa_e->d = (void *)ctx->e;
1058 	mpa_n->d = (void *)ctx->n;
1059 	mpa_c->d = (void *)ctx->c;
1060 	mpa_result->d = (void *)rsa_result;
1061 
1062 	mpa_m->size = n_words;
1063 	mpa_e->size = n_words;
1064 	mpa_n->size = n_words;
1065 	mpa_c->size = n_words;
1066 	mpa_result->size = n_words;
1067 
1068 	ret = rk_exptmod_np(mpa_m, mpa_e, mpa_n, mpa_c, mpa_result);
1069 	if (!ret)
1070 		memcpy(output, rsa_result, BITS2BYTE(n_bits));
1071 
1072 exit:
1073 	free(rsa_result);
1074 	rk_mpa_free(&mpa_m);
1075 	rk_mpa_free(&mpa_e);
1076 	rk_mpa_free(&mpa_n);
1077 	rk_mpa_free(&mpa_c);
1078 	rk_mpa_free(&mpa_result);
1079 
1080 	return ret;
1081 }
1082 #endif
1083 
1084 static const struct dm_crypto_ops rockchip_crypto_ops = {
1085 	.capability   = rockchip_crypto_capability,
1086 	.sha_init     = rockchip_crypto_sha_init,
1087 	.sha_update   = rockchip_crypto_sha_update,
1088 	.sha_final    = rockchip_crypto_sha_final,
1089 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1090 	.rsa_verify   = rockchip_crypto_rsa_verify,
1091 #endif
1092 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
1093 	.hmac_init    = rockchip_crypto_hmac_init,
1094 	.hmac_update  = rockchip_crypto_hmac_update,
1095 	.hmac_final   = rockchip_crypto_hmac_final,
1096 #endif
1097 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
1098 	.cipher_crypt = rockchip_crypto_cipher,
1099 	.cipher_mac = rockchip_crypto_mac,
1100 #endif
1101 };
1102 
1103 /*
1104  * Only use "clocks" to parse crypto clock id and use rockchip_get_clk().
1105  * Because we always add crypto node in U-Boot dts, when kernel dtb enabled :
1106  *
1107  *   1. There is cru phandle mismatch between U-Boot and kernel dtb;
1108  *   2. CONFIG_OF_SPL_REMOVE_PROPS removes clock property;
1109  */
1110 static int rockchip_crypto_ofdata_to_platdata(struct udevice *dev)
1111 {
1112 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1113 	int len, ret = -EINVAL;
1114 
1115 	if (!dev_read_prop(dev, "clocks", &len)) {
1116 		printf("Can't find \"clocks\" property\n");
1117 		return -EINVAL;
1118 	}
1119 
1120 	memset(priv, 0x00, sizeof(*priv));
1121 	priv->clocks = malloc(len);
1122 	if (!priv->clocks)
1123 		return -ENOMEM;
1124 
1125 	priv->nclocks = len / sizeof(u32);
1126 	if (dev_read_u32_array(dev, "clocks", (u32 *)priv->clocks,
1127 			       priv->nclocks)) {
1128 		printf("Can't read \"clocks\" property\n");
1129 		ret = -EINVAL;
1130 		goto exit;
1131 	}
1132 
1133 	if (!dev_read_prop(dev, "clock-frequency", &len)) {
1134 		printf("Can't find \"clock-frequency\" property\n");
1135 		ret = -EINVAL;
1136 		goto exit;
1137 	}
1138 
1139 	priv->frequencies = malloc(len);
1140 	if (!priv->frequencies) {
1141 		ret = -ENOMEM;
1142 		goto exit;
1143 	}
1144 
1145 	priv->nclocks = len / sizeof(u32);
1146 	if (dev_read_u32_array(dev, "clock-frequency", priv->frequencies,
1147 			       priv->nclocks)) {
1148 		printf("Can't read \"clock-frequency\" property\n");
1149 		ret = -EINVAL;
1150 		goto exit;
1151 	}
1152 
1153 	priv->reg = (fdt_addr_t)dev_read_addr_ptr(dev);
1154 
1155 	crypto_base = priv->reg;
1156 
1157 	return 0;
1158 exit:
1159 	if (priv->clocks)
1160 		free(priv->clocks);
1161 
1162 	if (priv->frequencies)
1163 		free(priv->frequencies);
1164 
1165 	return ret;
1166 }
1167 
1168 static int rockchip_crypto_probe(struct udevice *dev)
1169 {
1170 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1171 	struct rk_crypto_soc_data *sdata;
1172 	int i, ret = 0;
1173 	u32* clocks;
1174 
1175 	sdata = (struct rk_crypto_soc_data *)dev_get_driver_data(dev);
1176 	priv->soc_data = sdata;
1177 
1178 	priv->hw_ctx = memalign(LLI_ADDR_ALIGN_SIZE,
1179 				sizeof(struct rk_hash_ctx));
1180 	if (!priv->hw_ctx)
1181 		return -ENOMEM;
1182 
1183 	ret = rockchip_get_clk(&priv->clk.dev);
1184 	if (ret) {
1185 		printf("Failed to get clk device, ret=%d\n", ret);
1186 		return ret;
1187 	}
1188 
1189 	clocks = (u32 *)priv->clocks;
1190 	for (i = 0; i < priv->nclocks; i++) {
1191 		priv->clk.id = clocks[i * 2 + 1];
1192 		ret = clk_set_rate(&priv->clk, priv->frequencies[i]);
1193 		if (ret < 0) {
1194 			printf("%s: Failed to set clk(%ld): ret=%d\n",
1195 			       __func__, priv->clk.id, ret);
1196 			return ret;
1197 		}
1198 	}
1199 
1200 	hw_crypto_reset();
1201 
1202 	return 0;
1203 }
1204 
1205 static const struct rk_crypto_soc_data soc_data_base = {
1206 	.capability = CRYPTO_MD5 |
1207 		      CRYPTO_SHA1 |
1208 		      CRYPTO_SHA256 |
1209 		      CRYPTO_SHA512 |
1210 		      CRYPTO_HMAC_MD5 |
1211 		      CRYPTO_HMAC_SHA1 |
1212 		      CRYPTO_HMAC_SHA256 |
1213 		      CRYPTO_HMAC_SHA512 |
1214 		      CRYPTO_RSA512 |
1215 		      CRYPTO_RSA1024 |
1216 		      CRYPTO_RSA2048 |
1217 		      CRYPTO_RSA3072 |
1218 		      CRYPTO_RSA4096 |
1219 		      CRYPTO_DES |
1220 		      CRYPTO_AES,
1221 };
1222 
1223 static const struct rk_crypto_soc_data soc_data_base_sm = {
1224 	.capability = CRYPTO_MD5 |
1225 		      CRYPTO_SHA1 |
1226 		      CRYPTO_SHA256 |
1227 		      CRYPTO_SHA512 |
1228 		      CRYPTO_SM3 |
1229 		      CRYPTO_HMAC_MD5 |
1230 		      CRYPTO_HMAC_SHA1 |
1231 		      CRYPTO_HMAC_SHA256 |
1232 		      CRYPTO_HMAC_SHA512 |
1233 		      CRYPTO_HMAC_SM3 |
1234 		      CRYPTO_RSA512 |
1235 		      CRYPTO_RSA1024 |
1236 		      CRYPTO_RSA2048 |
1237 		      CRYPTO_RSA3072 |
1238 		      CRYPTO_RSA4096 |
1239 		      CRYPTO_DES |
1240 		      CRYPTO_AES |
1241 		      CRYPTO_SM4,
1242 };
1243 
1244 static const struct rk_crypto_soc_data soc_data_rk1808 = {
1245 	.capability = CRYPTO_MD5 |
1246 		      CRYPTO_SHA1 |
1247 		      CRYPTO_SHA256 |
1248 		      CRYPTO_HMAC_MD5 |
1249 		      CRYPTO_HMAC_SHA1 |
1250 		      CRYPTO_HMAC_SHA256 |
1251 		      CRYPTO_RSA512 |
1252 		      CRYPTO_RSA1024 |
1253 		      CRYPTO_RSA2048 |
1254 		      CRYPTO_RSA3072 |
1255 		      CRYPTO_RSA4096,
1256 };
1257 
1258 static const struct udevice_id rockchip_crypto_ids[] = {
1259 	{
1260 		.compatible = "rockchip,px30-crypto",
1261 		.data = (ulong)&soc_data_base
1262 	},
1263 	{
1264 		.compatible = "rockchip,rk1808-crypto",
1265 		.data = (ulong)&soc_data_rk1808
1266 	},
1267 	{
1268 		.compatible = "rockchip,rk3308-crypto",
1269 		.data = (ulong)&soc_data_base
1270 	},
1271 	{
1272 		.compatible = "rockchip,rv1126-crypto",
1273 		.data = (ulong)&soc_data_base_sm
1274 	},
1275 	{
1276 		.compatible = "rockchip,rk3568-crypto",
1277 		.data = (ulong)&soc_data_base_sm
1278 	},
1279 	{ }
1280 };
1281 
1282 U_BOOT_DRIVER(rockchip_crypto_v2) = {
1283 	.name		= "rockchip_crypto_v2",
1284 	.id		= UCLASS_CRYPTO,
1285 	.of_match	= rockchip_crypto_ids,
1286 	.ops		= &rockchip_crypto_ops,
1287 	.probe		= rockchip_crypto_probe,
1288 	.ofdata_to_platdata = rockchip_crypto_ofdata_to_platdata,
1289 	.priv_auto_alloc_size = sizeof(struct rockchip_crypto_priv),
1290 };
1291