xref: /rk3399_rockchip-uboot/drivers/crypto/rockchip/crypto_v2.c (revision f947fca4e63be90f2fbc2fa6ac2e99fcec95078a)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 #include <clk.h>
8 #include <crypto.h>
9 #include <dm.h>
10 #include <asm/io.h>
11 #include <clk-uclass.h>
12 #include <asm/arch/hardware.h>
13 #include <asm/arch/clock.h>
14 #include <rockchip/crypto_ecc.h>
15 #include <rockchip/crypto_hash_cache.h>
16 #include <rockchip/crypto_v2.h>
17 #include <rockchip/crypto_v2_pka.h>
18 
19 #define	RK_HASH_CTX_MAGIC		0x1A1A1A1A
20 
21 #define CRYPTO_MAJOR_VER(ver)		((ver) & 0x0f000000)
22 
23 #define CRYPTO_MAJOR_VER_3		0x03000000
24 #define CRYPTO_MAJOR_VER_4		0x04000000
25 
26 #ifdef DEBUG
27 #define IMSG(format, ...) printf("[%s, %05d]-trace: " format "\n", \
28 				 __func__, __LINE__, ##__VA_ARGS__)
29 #else
30 #define IMSG(format, ...)
31 #endif
32 
33 struct crypto_lli_desc {
34 	u32 src_addr;
35 	u32 src_len;
36 	u32 dst_addr;
37 	u32 dst_len;
38 	u32 user_define;
39 	u32 reserve;
40 	u32 dma_ctrl;
41 	u32 next_addr;
42 };
43 
44 struct rk_hash_ctx {
45 	struct crypto_lli_desc		data_lli;	/* lli desc */
46 	struct crypto_hash_cache	*hash_cache;
47 	u32				magic;		/* to check ctx */
48 	u32				algo;		/* hash algo */
49 	u8				digest_size;	/* hash out length */
50 	u8				reserved[3];
51 };
52 
53 struct rk_crypto_soc_data {
54 	u32 capability;
55 	u32 (*dynamic_cap)(void);
56 };
57 
58 struct rockchip_crypto_priv {
59 	fdt_addr_t			reg;
60 	u32				frequency;
61 	char				*clocks;
62 	u32				*frequencies;
63 	u32				nclocks;
64 	u32				freq_nclocks;
65 	u32				length;
66 	struct rk_hash_ctx		*hw_ctx;
67 	struct rk_crypto_soc_data	*soc_data;
68 };
69 
70 #define LLI_ADDR_ALIGN_SIZE	8
71 #define DATA_ADDR_ALIGN_SIZE	8
72 #define DATA_LEN_ALIGN_SIZE	64
73 
74 /* crypto timeout 500ms, must support more than 32M data per times*/
75 #define HASH_UPDATE_LIMIT	(32 * 1024 * 1024)
76 #define RK_CRYPTO_TIMEOUT	500000
77 
78 #define RK_POLL_TIMEOUT(condition, timeout) \
79 ({ \
80 	int time_out = timeout; \
81 	while (condition) { \
82 		if (--time_out <= 0) { \
83 			debug("[%s] %d: time out!\n", __func__,\
84 				__LINE__); \
85 			break; \
86 		} \
87 		udelay(1); \
88 	} \
89 	(time_out <= 0) ? -ETIMEDOUT : 0; \
90 })
91 
92 #define WAIT_TAG_VALID(channel, timeout) ({ \
93 	u32 tag_mask = CRYPTO_CH0_TAG_VALID << (channel);\
94 	int ret = 0;\
95 	if (is_check_tag_valid()) { \
96 		ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_TAG_VALID) & tag_mask),\
97 				      timeout);\
98 	} \
99 	crypto_write(crypto_read(CRYPTO_TAG_VALID) & tag_mask, CRYPTO_TAG_VALID);\
100 	ret;\
101 })
102 
103 #define virt_to_phys(addr)		(((unsigned long)addr) & 0xffffffff)
104 #define phys_to_virt(addr, area)	((unsigned long)addr)
105 
106 #define align_malloc(bytes, alignment)	memalign(alignment, bytes)
107 #define align_free(addr)		do {if (addr) free(addr);} while (0)
108 
109 #define ROUNDUP(size, alignment)	round_up(size, alignment)
110 #define cache_op_inner(type, addr, size) \
111 					crypto_flush_cacheline((ulong)addr, size)
112 
113 #define IS_NEED_IV(rk_mode) ((rk_mode) != RK_MODE_ECB && \
114 			     (rk_mode) != RK_MODE_CMAC && \
115 			     (rk_mode) != RK_MODE_CBC_MAC)
116 
117 #define IS_NEED_TAG(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
118 			      (rk_mode) == RK_MODE_CBC_MAC || \
119 			      (rk_mode) == RK_MODE_CCM || \
120 			      (rk_mode) == RK_MODE_GCM)
121 
122 #define IS_MAC_MODE(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
123 			      (rk_mode) == RK_MODE_CBC_MAC)
124 
125 #define IS_AE_MODE(rk_mode) ((rk_mode) == RK_MODE_CCM || \
126 			     (rk_mode) == RK_MODE_GCM)
127 
128 fdt_addr_t crypto_base;
129 static uint32_t g_crypto_version;
130 
131 static inline bool is_check_hash_valid(void)
132 {
133 	/* crypto < v4 need to check hash valid */
134 	return CRYPTO_MAJOR_VER(g_crypto_version) < CRYPTO_MAJOR_VER_4;
135 }
136 
137 static inline bool is_check_tag_valid(void)
138 {
139 	/* crypto < v4 need to check hash valid */
140 	return CRYPTO_MAJOR_VER(g_crypto_version) < CRYPTO_MAJOR_VER_4;
141 }
142 
143 static inline void word2byte_be(u32 word, u8 *ch)
144 {
145 	ch[0] = (word >> 24) & 0xff;
146 	ch[1] = (word >> 16) & 0xff;
147 	ch[2] = (word >> 8) & 0xff;
148 	ch[3] = (word >> 0) & 0xff;
149 }
150 
151 static inline u32 byte2word_be(const u8 *ch)
152 {
153 	return (*ch << 24) + (*(ch + 1) << 16) + (*(ch + 2) << 8) + *(ch + 3);
154 }
155 
156 static inline void clear_regs(u32 base, u32 words)
157 {
158 	int i;
159 
160 	/*clear out register*/
161 	for (i = 0; i < words; i++)
162 		crypto_write(0, base + 4 * i);
163 }
164 
165 static inline void clear_key_regs(void)
166 {
167 	clear_regs(CRYPTO_CH0_KEY_0, CRYPTO_KEY_CHANNEL_NUM * 4);
168 }
169 
170 static inline void read_regs(u32 base, u8 *data, u32 data_len)
171 {
172 	u8 tmp_buf[4];
173 	u32 i;
174 
175 	for (i = 0; i < data_len / 4; i++)
176 		word2byte_be(crypto_read(base + i * 4),
177 			     data + i * 4);
178 
179 	if (data_len % 4) {
180 		word2byte_be(crypto_read(base + i * 4), tmp_buf);
181 		memcpy(data + i * 4, tmp_buf, data_len % 4);
182 	}
183 }
184 
185 static inline void write_regs(u32 base, const u8 *data, u32 data_len)
186 {
187 	u8 tmp_buf[4];
188 	u32 i;
189 
190 	for (i = 0; i < data_len / 4; i++, base += 4)
191 		crypto_write(byte2word_be(data + i * 4), base);
192 
193 	if (data_len % 4) {
194 		memset(tmp_buf, 0x00, sizeof(tmp_buf));
195 		memcpy((u8 *)tmp_buf, data + i * 4, data_len % 4);
196 		crypto_write(byte2word_be(tmp_buf), base);
197 	}
198 }
199 
200 static inline void write_key_reg(u32 chn, const u8 *key, u32 key_len)
201 {
202 	write_regs(CRYPTO_CH0_KEY_0 + chn * 0x10, key, key_len);
203 }
204 
205 static inline void set_iv_reg(u32 chn, const u8 *iv, u32 iv_len)
206 {
207 	u32 base_iv;
208 
209 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
210 
211 	/* clear iv */
212 	clear_regs(base_iv, 4);
213 
214 	if (!iv || iv_len == 0)
215 		return;
216 
217 	write_regs(base_iv, iv, iv_len);
218 
219 	crypto_write(iv_len, CRYPTO_CH0_IV_LEN_0 + 4 * chn);
220 }
221 
222 static inline void get_iv_reg(u32 chn, u8 *iv, u32 iv_len)
223 {
224 	u32 base_iv;
225 
226 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
227 
228 	read_regs(base_iv, iv, iv_len);
229 }
230 
231 static inline void get_tag_from_reg(u32 chn, u8 *tag, u32 tag_len)
232 {
233 	u32 i;
234 	u32 chn_base = CRYPTO_CH0_TAG_0 + 0x10 * chn;
235 
236 	for (i = 0; i < tag_len / 4; i++, chn_base += 4)
237 		word2byte_be(crypto_read(chn_base), tag + 4 * i);
238 }
239 
240 static int rk_crypto_do_enable_clk(struct udevice *dev, int enable)
241 {
242 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
243 	struct clk clk;
244 	int i, ret;
245 
246 	for (i = 0; i < priv->nclocks; i++) {
247 		ret = clk_get_by_index(dev, i, &clk);
248 		if (ret < 0) {
249 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
250 			return ret;
251 		}
252 
253 		if (enable)
254 			ret = clk_enable(&clk);
255 		else
256 			ret = clk_disable(&clk);
257 		if (ret < 0 && ret != -ENOSYS) {
258 			printf("Failed to enable(%d) clk(%ld): ret=%d\n",
259 			       enable, clk.id, ret);
260 			return ret;
261 		}
262 	}
263 
264 	return 0;
265 }
266 
267 static int rk_crypto_enable_clk(struct udevice *dev)
268 {
269 	return rk_crypto_do_enable_clk(dev, 1);
270 }
271 
272 static int rk_crypto_disable_clk(struct udevice *dev)
273 {
274 	return rk_crypto_do_enable_clk(dev, 0);
275 }
276 
277 static u32 crypto_v3_dynamic_cap(void)
278 {
279 	u32 capability = 0;
280 	u32 ver_reg, i;
281 	struct cap_map {
282 		u32 ver_offset;
283 		u32 mask;
284 		u32 cap_bit;
285 	};
286 	const struct cap_map cap_tbl[] = {
287 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_MD5_FLAG,    CRYPTO_MD5},
288 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA1_FLAG,   CRYPTO_SHA1},
289 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA256_FLAG, CRYPTO_SHA256},
290 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA512_FLAG, CRYPTO_SHA512},
291 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SM3_FLAG,    CRYPTO_SM3},
292 
293 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_MD5_FLAG,    CRYPTO_HMAC_MD5},
294 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA1_FLAG,   CRYPTO_HMAC_SHA1},
295 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA256_FLAG, CRYPTO_HMAC_SHA256},
296 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA512_FLAG, CRYPTO_HMAC_SHA512},
297 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SM3_FLAG,    CRYPTO_HMAC_SM3},
298 
299 	{CRYPTO_AES_VERSION,  CRYPTO_AES256_FLAG,      CRYPTO_AES},
300 	{CRYPTO_DES_VERSION,  CRYPTO_TDES_FLAG,        CRYPTO_DES},
301 	{CRYPTO_SM4_VERSION,  CRYPTO_ECB_FLAG,         CRYPTO_SM4},
302 	};
303 
304 	/* rsa */
305 	capability = CRYPTO_RSA512 |
306 		     CRYPTO_RSA1024 |
307 		     CRYPTO_RSA2048 |
308 		     CRYPTO_RSA3072 |
309 		     CRYPTO_RSA4096;
310 
311 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
312 	capability |= (CRYPTO_SM2 |
313 		       CRYPTO_ECC_192R1 |
314 		       CRYPTO_ECC_224R1 |
315 		       CRYPTO_ECC_256R1);
316 #endif
317 
318 	for (i = 0; i < ARRAY_SIZE(cap_tbl); i++) {
319 		ver_reg = crypto_read(cap_tbl[i].ver_offset);
320 
321 		if ((ver_reg & cap_tbl[i].mask) == cap_tbl[i].mask)
322 			capability |= cap_tbl[i].cap_bit;
323 	}
324 
325 	return capability;
326 }
327 
328 static int hw_crypto_reset(void)
329 {
330 	u32 val = 0, mask = 0;
331 	int ret;
332 
333 	val = CRYPTO_SW_PKA_RESET | CRYPTO_SW_CC_RESET;
334 	mask = val << CRYPTO_WRITE_MASK_SHIFT;
335 
336 	/* reset pka and crypto modules*/
337 	crypto_write(val | mask, CRYPTO_RST_CTL);
338 
339 	/* wait reset compelete */
340 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL), RK_CRYPTO_TIMEOUT);
341 
342 	g_crypto_version = crypto_read(CRYPTO_CRYPTO_VERSION_NEW);
343 
344 	return ret;
345 }
346 
347 static void hw_hash_clean_ctx(struct rk_hash_ctx *ctx)
348 {
349 	/* clear hash status */
350 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
351 
352 	assert(ctx);
353 	assert(ctx->magic == RK_HASH_CTX_MAGIC);
354 
355 	crypto_hash_cache_free(ctx->hash_cache);
356 
357 	memset(ctx, 0x00, sizeof(*ctx));
358 }
359 
360 static int rk_hash_init(void *hw_ctx, u32 algo)
361 {
362 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)hw_ctx;
363 	u32 reg_ctrl = 0;
364 	int ret;
365 
366 	if (!tmp_ctx)
367 		return -EINVAL;
368 
369 	reg_ctrl = CRYPTO_SW_CC_RESET;
370 	crypto_write(reg_ctrl | (reg_ctrl << CRYPTO_WRITE_MASK_SHIFT),
371 		     CRYPTO_RST_CTL);
372 
373 	/* wait reset compelete */
374 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL),
375 			      RK_CRYPTO_TIMEOUT);
376 
377 	reg_ctrl = 0;
378 	tmp_ctx->algo = algo;
379 	switch (algo) {
380 	case CRYPTO_MD5:
381 	case CRYPTO_HMAC_MD5:
382 		reg_ctrl |= CRYPTO_MODE_MD5;
383 		tmp_ctx->digest_size = 16;
384 		break;
385 	case CRYPTO_SHA1:
386 	case CRYPTO_HMAC_SHA1:
387 		reg_ctrl |= CRYPTO_MODE_SHA1;
388 		tmp_ctx->digest_size = 20;
389 		break;
390 	case CRYPTO_SHA256:
391 	case CRYPTO_HMAC_SHA256:
392 		reg_ctrl |= CRYPTO_MODE_SHA256;
393 		tmp_ctx->digest_size = 32;
394 		break;
395 	case CRYPTO_SHA512:
396 	case CRYPTO_HMAC_SHA512:
397 		reg_ctrl |= CRYPTO_MODE_SHA512;
398 		tmp_ctx->digest_size = 64;
399 		break;
400 	case CRYPTO_SM3:
401 	case CRYPTO_HMAC_SM3:
402 		reg_ctrl |= CRYPTO_MODE_SM3;
403 		tmp_ctx->digest_size = 32;
404 		break;
405 	default:
406 		ret = -EINVAL;
407 		goto exit;
408 	}
409 
410 	/* enable hardware padding */
411 	reg_ctrl |= CRYPTO_HW_PAD_ENABLE;
412 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
413 
414 	/* FIFO input and output data byte swap */
415 	/* such as B0, B1, B2, B3 -> B3, B2, B1, B0 */
416 	reg_ctrl = CRYPTO_DOUT_BYTESWAP | CRYPTO_DOIN_BYTESWAP;
417 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_FIFO_CTL);
418 
419 	/* enable src_item_done interrupt */
420 	crypto_write(0, CRYPTO_DMA_INT_EN);
421 
422 	tmp_ctx->magic = RK_HASH_CTX_MAGIC;
423 
424 	return 0;
425 exit:
426 	/* clear hash setting if init failed */
427 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
428 
429 	return ret;
430 }
431 
432 static int rk_hash_direct_calc(void *hw_data, const u8 *data,
433 			       u32 data_len, u8 *started_flag, u8 is_last)
434 {
435 	struct rockchip_crypto_priv *priv = hw_data;
436 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
437 	struct crypto_lli_desc *lli = &hash_ctx->data_lli;
438 	int ret = -EINVAL;
439 	u32 tmp = 0, mask = 0;
440 
441 	assert(IS_ALIGNED((ulong)data, DATA_ADDR_ALIGN_SIZE));
442 	assert(is_last || IS_ALIGNED(data_len, DATA_LEN_ALIGN_SIZE));
443 
444 	debug("%s: data = %p, len = %u, s = %x, l = %x\n",
445 	      __func__, data, data_len, *started_flag, is_last);
446 
447 	memset(lli, 0x00, sizeof(*lli));
448 	lli->src_addr = (u32)virt_to_phys(data);
449 	lli->src_len = data_len;
450 	lli->dma_ctrl = LLI_DMA_CTRL_SRC_DONE;
451 
452 	if (is_last) {
453 		lli->user_define |= LLI_USER_STRING_LAST;
454 		lli->dma_ctrl |= LLI_DMA_CTRL_LAST;
455 	} else {
456 		lli->next_addr = (u32)virt_to_phys(lli);
457 		lli->dma_ctrl |= LLI_DMA_CTRL_PAUSE;
458 	}
459 
460 	if (!(*started_flag)) {
461 		lli->user_define |=
462 			(LLI_USER_STRING_START | LLI_USER_CIPHER_START);
463 		crypto_write((u32)virt_to_phys(lli), CRYPTO_DMA_LLI_ADDR);
464 		crypto_write((CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
465 			     CRYPTO_HASH_ENABLE, CRYPTO_HASH_CTL);
466 		tmp = CRYPTO_DMA_START;
467 		*started_flag = 1;
468 	} else {
469 		tmp = CRYPTO_DMA_RESTART;
470 	}
471 
472 	/* flush cache */
473 	crypto_flush_cacheline((ulong)lli, sizeof(*lli));
474 	crypto_flush_cacheline((ulong)data, data_len);
475 
476 	/* start calculate */
477 	crypto_write(tmp << CRYPTO_WRITE_MASK_SHIFT | tmp,
478 		     CRYPTO_DMA_CTL);
479 
480 	/* mask CRYPTO_SYNC_LOCKSTEP_INT_ST flag */
481 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
482 
483 	/* wait calc ok */
484 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
485 			      RK_CRYPTO_TIMEOUT);
486 
487 	/* clear interrupt status */
488 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
489 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
490 
491 	if ((tmp & mask) != CRYPTO_SRC_ITEM_DONE_INT_ST &&
492 	    (tmp & mask) != CRYPTO_ZERO_LEN_INT_ST) {
493 		ret = -EFAULT;
494 		debug("[%s] %d: CRYPTO_DMA_INT_ST = 0x%x\n",
495 		      __func__, __LINE__, tmp);
496 		goto exit;
497 	}
498 
499 	priv->length += data_len;
500 exit:
501 	return ret;
502 }
503 
504 int rk_hash_update(void *ctx, const u8 *data, u32 data_len)
505 {
506 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
507 	int ret = -EINVAL;
508 
509 	debug("\n");
510 	if (!tmp_ctx || !data)
511 		goto exit;
512 
513 	if (tmp_ctx->digest_size == 0 || tmp_ctx->magic != RK_HASH_CTX_MAGIC)
514 		goto exit;
515 
516 	ret = crypto_hash_update_with_cache(tmp_ctx->hash_cache,
517 					    data, data_len);
518 
519 exit:
520 	/* free lli list */
521 	if (ret)
522 		hw_hash_clean_ctx(tmp_ctx);
523 
524 	return ret;
525 }
526 
527 int rk_hash_final(void *ctx, u8 *digest, size_t len)
528 {
529 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
530 	int ret = 0;
531 
532 	if (!digest)
533 		goto exit;
534 
535 	if (!tmp_ctx ||
536 	    tmp_ctx->digest_size == 0 ||
537 	    len > tmp_ctx->digest_size ||
538 	    tmp_ctx->magic != RK_HASH_CTX_MAGIC) {
539 		goto exit;
540 	}
541 
542 	if(is_check_hash_valid()) {
543 		/* wait hash value ok */
544 		ret = RK_POLL_TIMEOUT(!crypto_read(CRYPTO_HASH_VALID),
545 				      RK_CRYPTO_TIMEOUT);
546 	}
547 
548 	read_regs(CRYPTO_HASH_DOUT_0, digest, len);
549 
550 	/* clear hash status */
551 	crypto_write(CRYPTO_HASH_IS_VALID, CRYPTO_HASH_VALID);
552 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
553 
554 exit:
555 
556 	return ret;
557 }
558 
559 static u32 rockchip_crypto_capability(struct udevice *dev)
560 {
561 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
562 	u32 capability, mask = 0;
563 
564 	capability = priv->soc_data->capability;
565 
566 #if !(CONFIG_IS_ENABLED(ROCKCHIP_CIPHER))
567 	mask |= (CRYPTO_DES | CRYPTO_AES | CRYPTO_SM4);
568 #endif
569 
570 #if !(CONFIG_IS_ENABLED(ROCKCHIP_HMAC))
571 	mask |= (CRYPTO_HMAC_MD5 | CRYPTO_HMAC_SHA1 | CRYPTO_HMAC_SHA256 |
572 			 CRYPTO_HMAC_SHA512 | CRYPTO_HMAC_SM3);
573 #endif
574 
575 #if !(CONFIG_IS_ENABLED(ROCKCHIP_RSA))
576 	mask |= (CRYPTO_RSA512 | CRYPTO_RSA1024 | CRYPTO_RSA2048 |
577 			 CRYPTO_RSA3072 | CRYPTO_RSA4096);
578 #endif
579 
580 	return capability & (~mask);
581 }
582 
583 static int rockchip_crypto_sha_init(struct udevice *dev, sha_context *ctx)
584 {
585 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
586 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
587 	int ret = 0;
588 
589 	if (!ctx)
590 		return -EINVAL;
591 
592 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
593 
594 	priv->length = 0;
595 
596 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
597 						       priv, ctx->length,
598 						       DATA_ADDR_ALIGN_SIZE,
599 						       DATA_LEN_ALIGN_SIZE);
600 	if (!hash_ctx->hash_cache)
601 		return -EFAULT;
602 
603 	rk_crypto_enable_clk(dev);
604 	ret = rk_hash_init(hash_ctx, ctx->algo);
605 	if (ret)
606 		rk_crypto_disable_clk(dev);
607 
608 	return ret;
609 }
610 
611 static int rockchip_crypto_sha_update(struct udevice *dev,
612 				      u32 *input, u32 len)
613 {
614 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
615 	int ret, i;
616 	u8 *p;
617 
618 	if (!len) {
619 		ret = -EINVAL;
620 		goto exit;
621 	}
622 
623 	p = (u8 *)input;
624 
625 	for (i = 0; i < len / HASH_UPDATE_LIMIT; i++, p += HASH_UPDATE_LIMIT) {
626 		ret = rk_hash_update(priv->hw_ctx, p, HASH_UPDATE_LIMIT);
627 		if (ret)
628 			goto exit;
629 	}
630 
631 	if (len % HASH_UPDATE_LIMIT)
632 		ret = rk_hash_update(priv->hw_ctx, p, len % HASH_UPDATE_LIMIT);
633 
634 exit:
635 	if (ret)
636 		rk_crypto_disable_clk(dev);
637 
638 	return ret;
639 }
640 
641 static int rockchip_crypto_sha_final(struct udevice *dev,
642 				     sha_context *ctx, u8 *output)
643 {
644 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
645 	u32 nbits;
646 	int ret;
647 
648 	nbits = crypto_algo_nbits(ctx->algo);
649 
650 	if (priv->length != ctx->length) {
651 		printf("total length(0x%08x) != init length(0x%08x)!\n",
652 		       priv->length, ctx->length);
653 		ret = -EIO;
654 		goto exit;
655 	}
656 
657 	ret = rk_hash_final(priv->hw_ctx, (u8 *)output, BITS2BYTE(nbits));
658 
659 exit:
660 	hw_hash_clean_ctx(priv->hw_ctx);
661 	rk_crypto_disable_clk(dev);
662 
663 	return ret;
664 }
665 
666 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
667 int rk_hmac_init(void *hw_ctx, u32 algo, u8 *key, u32 key_len)
668 {
669 	u32 reg_ctrl = 0;
670 	int ret;
671 
672 	if (!key || !key_len || key_len > 64)
673 		return -EINVAL;
674 
675 	clear_key_regs();
676 
677 	write_key_reg(0, key, key_len);
678 
679 	ret = rk_hash_init(hw_ctx, algo);
680 	if (ret)
681 		return ret;
682 
683 	reg_ctrl = crypto_read(CRYPTO_HASH_CTL) | CRYPTO_HMAC_ENABLE;
684 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
685 
686 	return ret;
687 }
688 
689 static int rockchip_crypto_hmac_init(struct udevice *dev,
690 				     sha_context *ctx, u8 *key, u32 key_len)
691 {
692 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
693 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
694 	int ret = 0;
695 
696 	if (!ctx)
697 		return -EINVAL;
698 
699 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
700 
701 	priv->length = 0;
702 
703 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
704 						       priv, ctx->length,
705 						       DATA_ADDR_ALIGN_SIZE,
706 						       DATA_LEN_ALIGN_SIZE);
707 	if (!hash_ctx->hash_cache)
708 		return -EFAULT;
709 
710 	rk_crypto_enable_clk(dev);
711 	ret = rk_hmac_init(priv->hw_ctx, ctx->algo, key, key_len);
712 	if (ret)
713 		rk_crypto_disable_clk(dev);
714 
715 	return ret;
716 }
717 
718 static int rockchip_crypto_hmac_update(struct udevice *dev,
719 				       u32 *input, u32 len)
720 {
721 	return rockchip_crypto_sha_update(dev, input, len);
722 }
723 
724 static int rockchip_crypto_hmac_final(struct udevice *dev,
725 				      sha_context *ctx, u8 *output)
726 {
727 	return rockchip_crypto_sha_final(dev, ctx, output);
728 }
729 
730 #endif
731 
732 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
733 static u8 g_key_chn;
734 
735 static const u32 rk_mode2bc_mode[RK_MODE_MAX] = {
736 	[RK_MODE_ECB] = CRYPTO_BC_ECB,
737 	[RK_MODE_CBC] = CRYPTO_BC_CBC,
738 	[RK_MODE_CTS] = CRYPTO_BC_CTS,
739 	[RK_MODE_CTR] = CRYPTO_BC_CTR,
740 	[RK_MODE_CFB] = CRYPTO_BC_CFB,
741 	[RK_MODE_OFB] = CRYPTO_BC_OFB,
742 	[RK_MODE_XTS] = CRYPTO_BC_XTS,
743 	[RK_MODE_CCM] = CRYPTO_BC_CCM,
744 	[RK_MODE_GCM] = CRYPTO_BC_GCM,
745 	[RK_MODE_CMAC] = CRYPTO_BC_CMAC,
746 	[RK_MODE_CBC_MAC] = CRYPTO_BC_CBC_MAC,
747 };
748 
749 static inline void set_pc_len_reg(u32 chn, u64 pc_len)
750 {
751 	u32 chn_base = CRYPTO_CH0_PC_LEN_0 + chn * 0x08;
752 
753 	crypto_write(pc_len & 0xffffffff, chn_base);
754 	crypto_write(pc_len >> 32, chn_base + 4);
755 }
756 
757 static inline void set_aad_len_reg(u32 chn, u64 pc_len)
758 {
759 	u32 chn_base = CRYPTO_CH0_AAD_LEN_0 + chn * 0x08;
760 
761 	crypto_write(pc_len & 0xffffffff, chn_base);
762 	crypto_write(pc_len >> 32, chn_base + 4);
763 }
764 
765 static inline bool is_des_mode(u32 rk_mode)
766 {
767 	return (rk_mode == RK_MODE_ECB ||
768 		rk_mode == RK_MODE_CBC ||
769 		rk_mode == RK_MODE_CFB ||
770 		rk_mode == RK_MODE_OFB);
771 }
772 
773 static void dump_crypto_state(struct crypto_lli_desc *desc,
774 			      u32 tmp, u32 expt_int,
775 			      const u8 *in, const u8 *out,
776 			      u32 len, int ret)
777 {
778 	IMSG("%s\n", ret == -ETIME ? "timeout" : "dismatch");
779 
780 	IMSG("CRYPTO_DMA_INT_ST = %08x, expect_int = %08x\n",
781 	     tmp, expt_int);
782 	IMSG("data desc		= %p\n", desc);
783 	IMSG("\taddr_in		= [%08x <=> %08x]\n",
784 	     desc->src_addr, (u32)virt_to_phys(in));
785 	IMSG("\taddr_out	= [%08x <=> %08x]\n",
786 	     desc->dst_addr, (u32)virt_to_phys(out));
787 	IMSG("\tsrc_len		= [%08x <=> %08x]\n",
788 	     desc->src_len, (u32)len);
789 	IMSG("\tdst_len		= %08x\n", desc->dst_len);
790 	IMSG("\tdma_ctl		= %08x\n", desc->dma_ctrl);
791 	IMSG("\tuser_define	= %08x\n", desc->user_define);
792 
793 	IMSG("\n\nDMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
794 	     crypto_read(CRYPTO_DMA_LLI_ADDR));
795 	IMSG("DMA CRYPTO_DMA_ST status = %08x\n",
796 	     crypto_read(CRYPTO_DMA_ST));
797 	IMSG("DMA CRYPTO_DMA_STATE status = %08x\n",
798 	     crypto_read(CRYPTO_DMA_STATE));
799 	IMSG("DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
800 	     crypto_read(CRYPTO_DMA_LLI_RADDR));
801 	IMSG("DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
802 	     crypto_read(CRYPTO_DMA_SRC_RADDR));
803 	IMSG("DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
804 	     crypto_read(CRYPTO_DMA_DST_RADDR));
805 	IMSG("DMA CRYPTO_CIPHER_ST status = %08x\n",
806 	     crypto_read(CRYPTO_CIPHER_ST));
807 	IMSG("DMA CRYPTO_CIPHER_STATE status = %08x\n",
808 	     crypto_read(CRYPTO_CIPHER_STATE));
809 	IMSG("DMA CRYPTO_TAG_VALID status = %08x\n",
810 	     crypto_read(CRYPTO_TAG_VALID));
811 	IMSG("LOCKSTEP status = %08x\n\n",
812 	     crypto_read(0x618));
813 
814 	IMSG("dst %dbyte not transferred\n",
815 	     desc->dst_addr + desc->dst_len -
816 	     crypto_read(CRYPTO_DMA_DST_RADDR));
817 }
818 
819 static int ccm128_set_iv_reg(u32 chn, const u8 *nonce, u32 nlen)
820 {
821 	u8 iv_buf[AES_BLOCK_SIZE];
822 	u32 L;
823 
824 	memset(iv_buf, 0x00, sizeof(iv_buf));
825 
826 	L = 15 - nlen;
827 	iv_buf[0] = ((u8)(L - 1) & 7);
828 
829 	/* the L parameter */
830 	L = iv_buf[0] & 7;
831 
832 	/* nonce is too short */
833 	if (nlen < (14 - L))
834 		return -EINVAL;
835 
836 	/* clear aad flag */
837 	iv_buf[0] &= ~0x40;
838 	memcpy(&iv_buf[1], nonce, 14 - L);
839 
840 	set_iv_reg(chn, iv_buf, AES_BLOCK_SIZE);
841 
842 	return 0;
843 }
844 
845 static void ccm_aad_padding(u32 aad_len, u8 *padding, u32 *padding_size)
846 {
847 	u32 i;
848 
849 	if (aad_len == 0) {
850 		*padding_size = 0;
851 		return;
852 	}
853 
854 	i = aad_len < (0x10000 - 0x100) ? 2 : 6;
855 
856 	if (i == 2) {
857 		padding[0] = (u8)(aad_len >> 8);
858 		padding[1] = (u8)aad_len;
859 	} else {
860 		padding[0] = 0xFF;
861 		padding[1] = 0xFE;
862 		padding[2] = (u8)(aad_len >> 24);
863 		padding[3] = (u8)(aad_len >> 16);
864 		padding[4] = (u8)(aad_len >> 8);
865 	}
866 
867 	*padding_size = i;
868 }
869 
870 static int ccm_compose_aad_iv(u8 *aad_iv, u32 data_len, u32 aad_len, u32 tag_size)
871 {
872 	aad_iv[0] |= ((u8)(((tag_size - 2) / 2) & 7) << 3);
873 
874 	aad_iv[12] = (u8)(data_len >> 24);
875 	aad_iv[13] = (u8)(data_len >> 16);
876 	aad_iv[14] = (u8)(data_len >> 8);
877 	aad_iv[15] = (u8)data_len;
878 
879 	if (aad_len)
880 		aad_iv[0] |= 0x40;	//set aad flag
881 
882 	return 0;
883 }
884 
885 static int hw_cipher_init(u32 chn, const u8 *key, const u8 *twk_key,
886 			  u32 key_len, const u8 *iv, u32 iv_len,
887 			  u32 algo, u32 mode, bool enc)
888 {
889 	u32 rk_mode = RK_GET_RK_MODE(mode);
890 	u32 key_chn_sel = chn;
891 	u32 reg_ctrl = 0;
892 	bool use_otpkey = false;
893 
894 	if (!key && key_len)
895 		use_otpkey = true;
896 
897 	IMSG("%s: key addr is %p, key_len is %d, iv addr is %p",
898 	     __func__, key, key_len, iv);
899 	if (rk_mode >= RK_MODE_MAX)
900 		return -EINVAL;
901 
902 	switch (algo) {
903 	case CRYPTO_DES:
904 		if (key_len > DES_BLOCK_SIZE)
905 			reg_ctrl |= CRYPTO_BC_TDES;
906 		else
907 			reg_ctrl |= CRYPTO_BC_DES;
908 		break;
909 	case CRYPTO_AES:
910 		reg_ctrl |= CRYPTO_BC_AES;
911 		break;
912 	case CRYPTO_SM4:
913 		reg_ctrl |= CRYPTO_BC_SM4;
914 		break;
915 	default:
916 		return -EINVAL;
917 	}
918 
919 	if (algo == CRYPTO_AES || algo == CRYPTO_SM4) {
920 		switch (key_len) {
921 		case AES_KEYSIZE_128:
922 			reg_ctrl |= CRYPTO_BC_128_bit_key;
923 			break;
924 		case AES_KEYSIZE_192:
925 			reg_ctrl |= CRYPTO_BC_192_bit_key;
926 			break;
927 		case AES_KEYSIZE_256:
928 			reg_ctrl |= CRYPTO_BC_256_bit_key;
929 			break;
930 		default:
931 			return -EINVAL;
932 		}
933 	}
934 
935 	reg_ctrl |= rk_mode2bc_mode[rk_mode];
936 	if (!enc)
937 		reg_ctrl |= CRYPTO_BC_DECRYPT;
938 
939 	/* write key data to reg */
940 	if (!use_otpkey) {
941 		write_key_reg(key_chn_sel, key, key_len);
942 		crypto_write(CRYPTO_SEL_USER, CRYPTO_KEY_SEL);
943 	} else {
944 		crypto_write(CRYPTO_SEL_KEYTABLE, CRYPTO_KEY_SEL);
945 	}
946 
947 	/* write twk key for xts mode */
948 	if (rk_mode == RK_MODE_XTS)
949 		write_key_reg(key_chn_sel + 4, twk_key, key_len);
950 
951 	/* set iv reg */
952 	if (rk_mode == RK_MODE_CCM)
953 		ccm128_set_iv_reg(chn, iv, iv_len);
954 	else
955 		set_iv_reg(chn, iv, iv_len);
956 
957 	/* din_swap set 1, dout_swap set 1, default 1. */
958 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
959 	crypto_write(0, CRYPTO_DMA_INT_EN);
960 
961 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
962 
963 	return 0;
964 }
965 
966 static int hw_cipher_crypt(const u8 *in, u8 *out, u64 len,
967 			   const u8 *aad, u32 aad_len,
968 			   u8 *tag, u32 tag_len, u32 mode)
969 {
970 	struct crypto_lli_desc *data_desc = NULL, *aad_desc = NULL;
971 	u8 *dma_in = NULL, *dma_out = NULL, *aad_tmp = NULL;
972 	u32 rk_mode = RK_GET_RK_MODE(mode);
973 	u32 reg_ctrl = 0, tmp_len = 0;
974 	u32 expt_int = 0, mask = 0;
975 	u32 key_chn = g_key_chn;
976 	u32 tmp, dst_len = 0;
977 	int ret = -1;
978 
979 	if (rk_mode == RK_MODE_CTS && len <= AES_BLOCK_SIZE) {
980 		printf("CTS mode length %u < 16Byte\n", (u32)len);
981 		return -EINVAL;
982 	}
983 
984 	tmp_len = (rk_mode == RK_MODE_CTR) ? ROUNDUP(len, AES_BLOCK_SIZE) : len;
985 
986 	data_desc = align_malloc(sizeof(*data_desc), LLI_ADDR_ALIGN_SIZE);
987 	if (!data_desc)
988 		goto exit;
989 
990 	if (IS_ALIGNED((ulong)in, DATA_ADDR_ALIGN_SIZE) && tmp_len == len)
991 		dma_in = (void *)in;
992 	else
993 		dma_in = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
994 	if (!dma_in)
995 		goto exit;
996 
997 	if (out) {
998 		if (IS_ALIGNED((ulong)out, DATA_ADDR_ALIGN_SIZE) &&
999 		    tmp_len == len)
1000 			dma_out = out;
1001 		else
1002 			dma_out = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
1003 		if (!dma_out)
1004 			goto exit;
1005 		dst_len = tmp_len;
1006 	}
1007 
1008 	memset(data_desc, 0x00, sizeof(*data_desc));
1009 	if (dma_in != in)
1010 		memcpy(dma_in, in, len);
1011 
1012 	data_desc->src_addr    = (u32)virt_to_phys(dma_in);
1013 	data_desc->src_len     = tmp_len;
1014 	data_desc->dst_addr    = (u32)virt_to_phys(dma_out);
1015 	data_desc->dst_len     = dst_len;
1016 	data_desc->dma_ctrl    = LLI_DMA_CTRL_LAST;
1017 
1018 	if (IS_MAC_MODE(rk_mode)) {
1019 		expt_int = CRYPTO_LIST_DONE_INT_ST;
1020 		data_desc->dma_ctrl |= LLI_DMA_CTRL_LIST_DONE;
1021 	} else {
1022 		expt_int = CRYPTO_DST_ITEM_DONE_INT_ST;
1023 		data_desc->dma_ctrl |= LLI_DMA_CTRL_DST_DONE;
1024 	}
1025 
1026 	data_desc->user_define = LLI_USER_CIPHER_START |
1027 				 LLI_USER_STRING_START |
1028 				 LLI_USER_STRING_LAST |
1029 				 (key_chn << 4);
1030 	crypto_write((u32)virt_to_phys(data_desc), CRYPTO_DMA_LLI_ADDR);
1031 
1032 	if (rk_mode == RK_MODE_CCM || rk_mode == RK_MODE_GCM) {
1033 		u32 aad_tmp_len = 0;
1034 
1035 		aad_desc = align_malloc(sizeof(*aad_desc), LLI_ADDR_ALIGN_SIZE);
1036 		if (!aad_desc)
1037 			goto exit;
1038 
1039 		memset(aad_desc, 0x00, sizeof(*aad_desc));
1040 		aad_desc->next_addr = (u32)virt_to_phys(data_desc);
1041 		aad_desc->user_define = LLI_USER_CIPHER_START |
1042 					 LLI_USER_STRING_START |
1043 					 LLI_USER_STRING_LAST |
1044 					 LLI_USER_STRING_AAD |
1045 					 (key_chn << 4);
1046 
1047 		if (rk_mode == RK_MODE_CCM) {
1048 			u8 padding[AES_BLOCK_SIZE];
1049 			u32 padding_size = 0;
1050 
1051 			memset(padding, 0x00, sizeof(padding));
1052 			ccm_aad_padding(aad_len, padding, &padding_size);
1053 
1054 			aad_tmp_len = aad_len + AES_BLOCK_SIZE + padding_size;
1055 			aad_tmp_len = ROUNDUP(aad_tmp_len, AES_BLOCK_SIZE);
1056 			aad_tmp = align_malloc(aad_tmp_len,
1057 					       DATA_ADDR_ALIGN_SIZE);
1058 			if (!aad_tmp)
1059 				goto exit;
1060 
1061 			/* clear last block */
1062 			memset(aad_tmp + aad_tmp_len - AES_BLOCK_SIZE,
1063 			       0x00, AES_BLOCK_SIZE);
1064 
1065 			/* read iv data from reg */
1066 			get_iv_reg(key_chn, aad_tmp, AES_BLOCK_SIZE);
1067 			ccm_compose_aad_iv(aad_tmp, tmp_len, aad_len, tag_len);
1068 			memcpy(aad_tmp + AES_BLOCK_SIZE, padding, padding_size);
1069 
1070 			memcpy(aad_tmp + AES_BLOCK_SIZE + padding_size,
1071 			       aad, aad_len);
1072 		} else {
1073 			aad_tmp_len = aad_len;
1074 			if (IS_ALIGNED((ulong)aad, DATA_ADDR_ALIGN_SIZE)) {
1075 				aad_tmp = (void *)aad;
1076 			} else {
1077 				aad_tmp = align_malloc(aad_tmp_len,
1078 						       DATA_ADDR_ALIGN_SIZE);
1079 				if (!aad_tmp)
1080 					goto exit;
1081 
1082 				memcpy(aad_tmp, aad, aad_tmp_len);
1083 			}
1084 
1085 			set_aad_len_reg(key_chn, aad_tmp_len);
1086 			set_pc_len_reg(key_chn, tmp_len);
1087 		}
1088 
1089 		aad_desc->src_addr = (u32)virt_to_phys(aad_tmp);
1090 		aad_desc->src_len  = aad_tmp_len;
1091 
1092 		if (aad_tmp_len) {
1093 			data_desc->user_define = LLI_USER_STRING_START |
1094 						 LLI_USER_STRING_LAST |
1095 						 (key_chn << 4);
1096 			crypto_write((u32)virt_to_phys(aad_desc), CRYPTO_DMA_LLI_ADDR);
1097 			cache_op_inner(DCACHE_AREA_CLEAN, aad_tmp, aad_tmp_len);
1098 			cache_op_inner(DCACHE_AREA_CLEAN, aad_desc, sizeof(*aad_desc));
1099 		}
1100 	}
1101 
1102 	cache_op_inner(DCACHE_AREA_CLEAN, data_desc, sizeof(*data_desc));
1103 	cache_op_inner(DCACHE_AREA_CLEAN, dma_in, tmp_len);
1104 	cache_op_inner(DCACHE_AREA_INVALIDATE, dma_out, tmp_len);
1105 
1106 	/* din_swap set 1, dout_swap set 1, default 1. */
1107 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
1108 	crypto_write(0, CRYPTO_DMA_INT_EN);
1109 
1110 	reg_ctrl = crypto_read(CRYPTO_BC_CTL) | CRYPTO_BC_ENABLE;
1111 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
1112 	crypto_write(0x00010001, CRYPTO_DMA_CTL);//start
1113 
1114 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
1115 
1116 	/* wait calc ok */
1117 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
1118 			      RK_CRYPTO_TIMEOUT);
1119 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
1120 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
1121 
1122 	if ((tmp & mask) == expt_int) {
1123 		if (out && out != dma_out)
1124 			memcpy(out, dma_out, len);
1125 
1126 		if (IS_NEED_TAG(rk_mode)) {
1127 			ret = WAIT_TAG_VALID(key_chn, RK_CRYPTO_TIMEOUT);
1128 			get_tag_from_reg(key_chn, tag, AES_BLOCK_SIZE);
1129 		}
1130 	} else {
1131 		dump_crypto_state(data_desc, tmp, expt_int, in, out, len, ret);
1132 		ret = -1;
1133 	}
1134 
1135 exit:
1136 	crypto_write(0xffff0000, CRYPTO_BC_CTL);//bc_ctl disable
1137 	align_free(data_desc);
1138 	align_free(aad_desc);
1139 	if (dma_in != in)
1140 		align_free(dma_in);
1141 	if (out && dma_out != out)
1142 		align_free(dma_out);
1143 	if (aad && aad != aad_tmp)
1144 		align_free(aad_tmp);
1145 
1146 	return ret;
1147 }
1148 
1149 static int hw_aes_init(u32 chn, const u8 *key, const u8 *twk_key, u32 key_len,
1150 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1151 {
1152 	u32 rk_mode = RK_GET_RK_MODE(mode);
1153 
1154 	if (rk_mode > RK_MODE_XTS)
1155 		return -EINVAL;
1156 
1157 	if (iv_len > AES_BLOCK_SIZE)
1158 		return -EINVAL;
1159 
1160 	if (IS_NEED_IV(rk_mode)) {
1161 		if (!iv || iv_len != AES_BLOCK_SIZE)
1162 			return -EINVAL;
1163 	} else {
1164 		iv_len = 0;
1165 	}
1166 
1167 	if (rk_mode == RK_MODE_XTS) {
1168 		if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_256)
1169 			return -EINVAL;
1170 
1171 		if (!key || !twk_key)
1172 			return -EINVAL;
1173 	} else {
1174 		if (key_len != AES_KEYSIZE_128 &&
1175 		    key_len != AES_KEYSIZE_192 &&
1176 		    key_len != AES_KEYSIZE_256)
1177 			return -EINVAL;
1178 	}
1179 
1180 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1181 			      CRYPTO_AES, mode, enc);
1182 }
1183 
1184 static int hw_sm4_init(u32  chn, const u8 *key, const u8 *twk_key, u32 key_len,
1185 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1186 {
1187 	u32 rk_mode = RK_GET_RK_MODE(mode);
1188 
1189 	if (rk_mode > RK_MODE_XTS)
1190 		return -EINVAL;
1191 
1192 	if (iv_len > SM4_BLOCK_SIZE || key_len != SM4_KEYSIZE)
1193 		return -EINVAL;
1194 
1195 	if (IS_NEED_IV(rk_mode)) {
1196 		if (!iv || iv_len != SM4_BLOCK_SIZE)
1197 			return -EINVAL;
1198 	} else {
1199 		iv_len = 0;
1200 	}
1201 
1202 	if (rk_mode == RK_MODE_XTS) {
1203 		if (!key || !twk_key)
1204 			return -EINVAL;
1205 	}
1206 
1207 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1208 			      CRYPTO_SM4, mode, enc);
1209 }
1210 
1211 int rk_crypto_des(struct udevice *dev, u32 mode, const u8 *key, u32 key_len,
1212 		  const u8 *iv, const u8 *in, u8 *out, u32 len, bool enc)
1213 {
1214 	u32 rk_mode = RK_GET_RK_MODE(mode);
1215 	u8 tmp_key[24];
1216 	int ret;
1217 
1218 	if (!is_des_mode(rk_mode))
1219 		return -EINVAL;
1220 
1221 	if (key_len == DES_BLOCK_SIZE || key_len == 3 * DES_BLOCK_SIZE) {
1222 		memcpy(tmp_key, key, key_len);
1223 	} else if (key_len == 2 * DES_BLOCK_SIZE) {
1224 		memcpy(tmp_key, key, 16);
1225 		memcpy(tmp_key + 16, key, 8);
1226 		key_len = 3 * DES_BLOCK_SIZE;
1227 	} else {
1228 		return -EINVAL;
1229 	}
1230 
1231 	ret = hw_cipher_init(0, tmp_key, NULL, key_len, iv, DES_BLOCK_SIZE,
1232 			     CRYPTO_DES, mode, enc);
1233 	if (ret)
1234 		goto exit;
1235 
1236 	ret = hw_cipher_crypt(in, out, len, NULL, 0,
1237 			      NULL, 0, mode);
1238 
1239 exit:
1240 	return ret;
1241 }
1242 
1243 int rk_crypto_aes(struct udevice *dev, u32 mode,
1244 		  const u8 *key, const u8 *twk_key, u32 key_len,
1245 		  const u8 *iv, u32 iv_len,
1246 		  const u8 *in, u8 *out, u32 len, bool enc)
1247 {
1248 	int ret;
1249 
1250 	/* RV1126/RV1109 do not support aes-192 */
1251 #if defined(CONFIG_ROCKCHIP_RV1126)
1252 	if (key_len == AES_KEYSIZE_192)
1253 		return -EINVAL;
1254 #endif
1255 
1256 	ret = hw_aes_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1257 	if (ret)
1258 		return ret;
1259 
1260 	return hw_cipher_crypt(in, out, len, NULL, 0,
1261 			       NULL, 0, mode);
1262 }
1263 
1264 int rk_crypto_sm4(struct udevice *dev, u32 mode,
1265 		  const u8 *key, const u8 *twk_key, u32 key_len,
1266 		  const u8 *iv, u32 iv_len,
1267 		  const u8 *in, u8 *out, u32 len, bool enc)
1268 {
1269 	int ret;
1270 
1271 	ret = hw_sm4_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1272 	if (ret)
1273 		return ret;
1274 
1275 	return hw_cipher_crypt(in, out, len, NULL, 0, NULL, 0, mode);
1276 }
1277 
1278 int rockchip_crypto_cipher(struct udevice *dev, cipher_context *ctx,
1279 			   const u8 *in, u8 *out, u32 len, bool enc)
1280 {
1281 	int ret;
1282 
1283 	rk_crypto_enable_clk(dev);
1284 
1285 	switch (ctx->algo) {
1286 	case CRYPTO_DES:
1287 		ret = rk_crypto_des(dev, ctx->mode, ctx->key, ctx->key_len,
1288 				    ctx->iv, in, out, len, enc);
1289 		break;
1290 	case CRYPTO_AES:
1291 		ret = rk_crypto_aes(dev, ctx->mode,
1292 				    ctx->key, ctx->twk_key, ctx->key_len,
1293 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1294 		break;
1295 	case CRYPTO_SM4:
1296 		ret = rk_crypto_sm4(dev, ctx->mode,
1297 				    ctx->key, ctx->twk_key, ctx->key_len,
1298 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1299 		break;
1300 	default:
1301 		ret = -EINVAL;
1302 		break;
1303 	}
1304 
1305 	rk_crypto_disable_clk(dev);
1306 
1307 	return ret;
1308 }
1309 
1310 int rockchip_crypto_fw_cipher(struct udevice *dev, cipher_fw_context *ctx,
1311 			      const u8 *in, u8 *out, u32 len, bool enc)
1312 {
1313 	int ret;
1314 
1315 	rk_crypto_enable_clk(dev);
1316 
1317 	switch (ctx->algo) {
1318 	case CRYPTO_DES:
1319 		ret = rk_crypto_des(dev, ctx->mode, NULL, ctx->key_len,
1320 				    ctx->iv, in, out, len, enc);
1321 		break;
1322 	case CRYPTO_AES:
1323 		ret = rk_crypto_aes(dev, ctx->mode, NULL, NULL, ctx->key_len,
1324 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1325 		break;
1326 	case CRYPTO_SM4:
1327 		ret = rk_crypto_sm4(dev, ctx->mode, NULL, NULL, ctx->key_len,
1328 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1329 		break;
1330 	default:
1331 		ret = -EINVAL;
1332 		break;
1333 	}
1334 
1335 	rk_crypto_disable_clk(dev);
1336 
1337 	return ret;
1338 }
1339 
1340 int rk_crypto_mac(struct udevice *dev, u32 algo, u32 mode,
1341 		  const u8 *key, u32 key_len,
1342 		  const u8 *in, u32 len, u8 *tag)
1343 {
1344 	u32 rk_mode = RK_GET_RK_MODE(mode);
1345 	int ret;
1346 
1347 	if (!IS_MAC_MODE(rk_mode))
1348 		return -EINVAL;
1349 
1350 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1351 		return -EINVAL;
1352 
1353 	/* RV1126/RV1109 do not support aes-192 */
1354 #if defined(CONFIG_ROCKCHIP_RV1126)
1355 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1356 		return -EINVAL;
1357 #endif
1358 
1359 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, NULL, 0,
1360 			     algo, mode, true);
1361 	if (ret)
1362 		return ret;
1363 
1364 	return hw_cipher_crypt(in, NULL, len, NULL, 0,
1365 			       tag, AES_BLOCK_SIZE, mode);
1366 }
1367 
1368 int rockchip_crypto_mac(struct udevice *dev, cipher_context *ctx,
1369 			const u8 *in, u32 len, u8 *tag)
1370 {
1371 	int ret = 0;
1372 
1373 	rk_crypto_enable_clk(dev);
1374 
1375 	ret = rk_crypto_mac(dev, ctx->algo, ctx->mode,
1376 			    ctx->key, ctx->key_len, in, len, tag);
1377 
1378 	rk_crypto_disable_clk(dev);
1379 
1380 	return ret;
1381 }
1382 
1383 int rk_crypto_ae(struct udevice *dev, u32 algo, u32 mode,
1384 		 const u8 *key, u32 key_len, const u8 *nonce, u32 nonce_len,
1385 		 const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1386 		 u8 *out, u8 *tag)
1387 {
1388 	u32 rk_mode = RK_GET_RK_MODE(mode);
1389 	int ret;
1390 
1391 	if (!IS_AE_MODE(rk_mode))
1392 		return -EINVAL;
1393 
1394 	if (len == 0)
1395 		return -EINVAL;
1396 
1397 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1398 		return -EINVAL;
1399 
1400 	/* RV1126/RV1109 do not support aes-192 */
1401 #if defined(CONFIG_ROCKCHIP_RV1126)
1402 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1403 		return -EINVAL;
1404 #endif
1405 
1406 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, nonce, nonce_len,
1407 			     algo, mode, true);
1408 	if (ret)
1409 		return ret;
1410 
1411 	return hw_cipher_crypt(in, out, len, aad, aad_len,
1412 			       tag, AES_BLOCK_SIZE, mode);
1413 }
1414 
1415 int rockchip_crypto_ae(struct udevice *dev, cipher_context *ctx,
1416 		       const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1417 		       u8 *out, u8 *tag)
1418 
1419 {
1420 	int ret = 0;
1421 
1422 	rk_crypto_enable_clk(dev);
1423 
1424 	ret = rk_crypto_ae(dev, ctx->algo, ctx->mode, ctx->key, ctx->key_len,
1425 			   ctx->iv, ctx->iv_len, in, len,
1426 			   aad, aad_len, out, tag);
1427 
1428 	rk_crypto_disable_clk(dev);
1429 
1430 	return ret;
1431 }
1432 
1433 static ulong rockchip_crypto_keytable_addr(struct udevice *dev)
1434 {
1435 	return CRYPTO_S_BY_KEYLAD_BASE + CRYPTO_CH0_KEY_0;
1436 }
1437 
1438 #endif
1439 
1440 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1441 static int rockchip_crypto_rsa_verify(struct udevice *dev, rsa_key *ctx,
1442 				      u8 *sign, u8 *output)
1443 {
1444 	struct mpa_num *mpa_m = NULL, *mpa_e = NULL, *mpa_n = NULL;
1445 	struct mpa_num *mpa_c = NULL, *mpa_result = NULL;
1446 	u32 n_bits, n_words;
1447 	int ret;
1448 
1449 	if (!ctx)
1450 		return -EINVAL;
1451 
1452 	if (ctx->algo != CRYPTO_RSA512 &&
1453 	    ctx->algo != CRYPTO_RSA1024 &&
1454 	    ctx->algo != CRYPTO_RSA2048 &&
1455 	    ctx->algo != CRYPTO_RSA3072 &&
1456 	    ctx->algo != CRYPTO_RSA4096)
1457 		return -EINVAL;
1458 
1459 	n_bits = crypto_algo_nbits(ctx->algo);
1460 	n_words = BITS2WORD(n_bits);
1461 
1462 	ret = rk_mpa_alloc(&mpa_m, sign, n_words);
1463 	if (ret)
1464 		goto exit;
1465 
1466 	ret = rk_mpa_alloc(&mpa_e, ctx->e, n_words);
1467 	if (ret)
1468 		goto exit;
1469 
1470 	ret = rk_mpa_alloc(&mpa_n, ctx->n, n_words);
1471 	if (ret)
1472 		goto exit;
1473 
1474 	if (ctx->c) {
1475 		ret = rk_mpa_alloc(&mpa_c, ctx->c, n_words);
1476 		if (ret)
1477 			goto exit;
1478 	}
1479 
1480 	ret = rk_mpa_alloc(&mpa_result, NULL, n_words);
1481 	if (ret)
1482 		goto exit;
1483 
1484 	rk_crypto_enable_clk(dev);
1485 	ret = rk_exptmod_np(mpa_m, mpa_e, mpa_n, mpa_c, mpa_result);
1486 	if (!ret)
1487 		memcpy(output, mpa_result->d, BITS2BYTE(n_bits));
1488 	rk_crypto_disable_clk(dev);
1489 
1490 exit:
1491 	rk_mpa_free(&mpa_m);
1492 	rk_mpa_free(&mpa_e);
1493 	rk_mpa_free(&mpa_n);
1494 	rk_mpa_free(&mpa_c);
1495 	rk_mpa_free(&mpa_result);
1496 
1497 	return ret;
1498 }
1499 #endif
1500 
1501 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
1502 static int rockchip_crypto_ec_verify(struct udevice *dev, ec_key *ctx,
1503 				     u8 *hash, u32 hash_len, u8 *sign)
1504 {
1505 	struct mpa_num *bn_sign = NULL;
1506 	struct rk_ecp_point point_P, point_sign;
1507 	u32 n_bits, n_words;
1508 	int ret;
1509 
1510 	if (!ctx)
1511 		return -EINVAL;
1512 
1513 	if (ctx->algo != CRYPTO_SM2 &&
1514 	    ctx->algo != CRYPTO_ECC_192R1 &&
1515 	    ctx->algo != CRYPTO_ECC_224R1 &&
1516 	    ctx->algo != CRYPTO_ECC_256R1)
1517 		return -EINVAL;
1518 
1519 	n_bits = crypto_algo_nbits(ctx->algo);
1520 	n_words = BITS2WORD(n_bits);
1521 
1522 	ret = rk_mpa_alloc(&bn_sign, sign, n_words);
1523 	if (ret)
1524 		goto exit;
1525 
1526 	ret = rk_mpa_alloc(&point_P.x, ctx->x, n_words);
1527 	ret |= rk_mpa_alloc(&point_P.y, ctx->y, n_words);
1528 	if (ret)
1529 		goto exit;
1530 
1531 	ret = rk_mpa_alloc(&point_sign.x, sign, n_words);
1532 	ret |= rk_mpa_alloc(&point_sign.y, sign + WORD2BYTE(n_words), n_words);
1533 	if (ret)
1534 		goto exit;
1535 
1536 	rk_crypto_enable_clk(dev);
1537 	ret = rockchip_ecc_verify(ctx->algo, hash, hash_len, &point_P, &point_sign);
1538 	rk_crypto_disable_clk(dev);
1539 exit:
1540 	rk_mpa_free(&bn_sign);
1541 	rk_mpa_free(&point_P.x);
1542 	rk_mpa_free(&point_P.y);
1543 	rk_mpa_free(&point_sign.x);
1544 	rk_mpa_free(&point_sign.y);
1545 
1546 	return ret;
1547 }
1548 #endif
1549 
1550 static const struct dm_crypto_ops rockchip_crypto_ops = {
1551 	.capability   = rockchip_crypto_capability,
1552 	.sha_init     = rockchip_crypto_sha_init,
1553 	.sha_update   = rockchip_crypto_sha_update,
1554 	.sha_final    = rockchip_crypto_sha_final,
1555 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1556 	.rsa_verify   = rockchip_crypto_rsa_verify,
1557 #endif
1558 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
1559 	.ec_verify    = rockchip_crypto_ec_verify,
1560 #endif
1561 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
1562 	.hmac_init    = rockchip_crypto_hmac_init,
1563 	.hmac_update  = rockchip_crypto_hmac_update,
1564 	.hmac_final   = rockchip_crypto_hmac_final,
1565 #endif
1566 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
1567 	.cipher_crypt    = rockchip_crypto_cipher,
1568 	.cipher_mac      = rockchip_crypto_mac,
1569 	.cipher_ae       = rockchip_crypto_ae,
1570 	.cipher_fw_crypt = rockchip_crypto_fw_cipher,
1571 	.keytable_addr   = rockchip_crypto_keytable_addr,
1572 #endif
1573 };
1574 
1575 /*
1576  * Only use "clocks" to parse crypto clock id and use rockchip_get_clk().
1577  * Because we always add crypto node in U-Boot dts, when kernel dtb enabled :
1578  *
1579  *   1. There is cru phandle mismatch between U-Boot and kernel dtb;
1580  *   2. CONFIG_OF_SPL_REMOVE_PROPS removes clock property;
1581  */
1582 static int rockchip_crypto_ofdata_to_platdata(struct udevice *dev)
1583 {
1584 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1585 	int len, ret = -EINVAL;
1586 
1587 	memset(priv, 0x00, sizeof(*priv));
1588 
1589 	priv->reg = (fdt_addr_t)dev_read_addr_ptr(dev);
1590 	if (priv->reg == FDT_ADDR_T_NONE)
1591 		return -EINVAL;
1592 
1593 	crypto_base = priv->reg;
1594 
1595 	/* if there is no clocks in dts, just skip it */
1596 	if (!dev_read_prop(dev, "clocks", &len)) {
1597 		printf("Can't find \"clocks\" property\n");
1598 		return 0;
1599 	}
1600 
1601 	memset(priv, 0x00, sizeof(*priv));
1602 	priv->clocks = malloc(len);
1603 	if (!priv->clocks)
1604 		return -ENOMEM;
1605 
1606 	priv->nclocks = len / (2 * sizeof(u32));
1607 	if (dev_read_u32_array(dev, "clocks", (u32 *)priv->clocks,
1608 			       priv->nclocks)) {
1609 		printf("Can't read \"clocks\" property\n");
1610 		ret = -EINVAL;
1611 		goto exit;
1612 	}
1613 
1614 	if (dev_read_prop(dev, "clock-frequency", &len)) {
1615 		priv->frequencies = malloc(len);
1616 		if (!priv->frequencies) {
1617 			ret = -ENOMEM;
1618 			goto exit;
1619 		}
1620 		priv->freq_nclocks = len / sizeof(u32);
1621 		if (dev_read_u32_array(dev, "clock-frequency", priv->frequencies,
1622 				       priv->freq_nclocks)) {
1623 			printf("Can't read \"clock-frequency\" property\n");
1624 			ret = -EINVAL;
1625 			goto exit;
1626 		}
1627 	}
1628 
1629 	return 0;
1630 exit:
1631 	if (priv->clocks)
1632 		free(priv->clocks);
1633 
1634 	if (priv->frequencies)
1635 		free(priv->frequencies);
1636 
1637 	return ret;
1638 }
1639 
1640 static int rk_crypto_set_clk(struct udevice *dev)
1641 {
1642 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1643 	struct clk clk;
1644 	int i, ret;
1645 
1646 	/* use standard "assigned-clock-rates" props */
1647 	if (dev_read_size(dev, "assigned-clock-rates") > 0)
1648 		return clk_set_defaults(dev);
1649 
1650 	/* use "clock-frequency" props */
1651 	if (priv->freq_nclocks == 0)
1652 		return 0;
1653 
1654 	for (i = 0; i < priv->freq_nclocks; i++) {
1655 		ret = clk_get_by_index(dev, i, &clk);
1656 		if (ret < 0) {
1657 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
1658 			return ret;
1659 		}
1660 		ret = clk_set_rate(&clk, priv->frequencies[i]);
1661 		if (ret < 0) {
1662 			printf("%s: Failed to set clk(%ld): ret=%d\n",
1663 			       __func__, clk.id, ret);
1664 			return ret;
1665 		}
1666 	}
1667 
1668 	return 0;
1669 }
1670 
1671 static int rockchip_crypto_probe(struct udevice *dev)
1672 {
1673 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1674 	struct rk_crypto_soc_data *sdata;
1675 	int ret = 0;
1676 
1677 	sdata = (struct rk_crypto_soc_data *)dev_get_driver_data(dev);
1678 
1679 	if (sdata->dynamic_cap)
1680 		sdata->capability = sdata->dynamic_cap();
1681 
1682 	priv->soc_data = sdata;
1683 
1684 	priv->hw_ctx = memalign(LLI_ADDR_ALIGN_SIZE,
1685 				sizeof(struct rk_hash_ctx));
1686 	if (!priv->hw_ctx)
1687 		return -ENOMEM;
1688 
1689 	ret = rk_crypto_set_clk(dev);
1690 	if (ret)
1691 		return ret;
1692 
1693 	rk_crypto_enable_clk(dev);
1694 
1695 	hw_crypto_reset();
1696 
1697 	rk_crypto_disable_clk(dev);
1698 
1699 	return 0;
1700 }
1701 
1702 static const struct rk_crypto_soc_data soc_data_base = {
1703 	.capability = CRYPTO_MD5 |
1704 		      CRYPTO_SHA1 |
1705 		      CRYPTO_SHA256 |
1706 		      CRYPTO_SHA512 |
1707 		      CRYPTO_HMAC_MD5 |
1708 		      CRYPTO_HMAC_SHA1 |
1709 		      CRYPTO_HMAC_SHA256 |
1710 		      CRYPTO_HMAC_SHA512 |
1711 		      CRYPTO_RSA512 |
1712 		      CRYPTO_RSA1024 |
1713 		      CRYPTO_RSA2048 |
1714 		      CRYPTO_RSA3072 |
1715 		      CRYPTO_RSA4096 |
1716 		      CRYPTO_DES |
1717 		      CRYPTO_AES,
1718 };
1719 
1720 static const struct rk_crypto_soc_data soc_data_base_sm = {
1721 	.capability = CRYPTO_MD5 |
1722 		      CRYPTO_SHA1 |
1723 		      CRYPTO_SHA256 |
1724 		      CRYPTO_SHA512 |
1725 		      CRYPTO_SM3 |
1726 		      CRYPTO_HMAC_MD5 |
1727 		      CRYPTO_HMAC_SHA1 |
1728 		      CRYPTO_HMAC_SHA256 |
1729 		      CRYPTO_HMAC_SHA512 |
1730 		      CRYPTO_HMAC_SM3 |
1731 		      CRYPTO_RSA512 |
1732 		      CRYPTO_RSA1024 |
1733 		      CRYPTO_RSA2048 |
1734 		      CRYPTO_RSA3072 |
1735 		      CRYPTO_RSA4096 |
1736 		      CRYPTO_DES |
1737 		      CRYPTO_AES |
1738 		      CRYPTO_SM4,
1739 };
1740 
1741 static const struct rk_crypto_soc_data soc_data_rk1808 = {
1742 	.capability = CRYPTO_MD5 |
1743 		      CRYPTO_SHA1 |
1744 		      CRYPTO_SHA256 |
1745 		      CRYPTO_HMAC_MD5 |
1746 		      CRYPTO_HMAC_SHA1 |
1747 		      CRYPTO_HMAC_SHA256 |
1748 		      CRYPTO_RSA512 |
1749 		      CRYPTO_RSA1024 |
1750 		      CRYPTO_RSA2048 |
1751 		      CRYPTO_RSA3072 |
1752 		      CRYPTO_RSA4096,
1753 };
1754 
1755 static const struct rk_crypto_soc_data soc_data_cryptov3 = {
1756 	.capability  = 0,
1757 	.dynamic_cap = crypto_v3_dynamic_cap,
1758 };
1759 
1760 static const struct udevice_id rockchip_crypto_ids[] = {
1761 	{
1762 		.compatible = "rockchip,px30-crypto",
1763 		.data = (ulong)&soc_data_base
1764 	},
1765 	{
1766 		.compatible = "rockchip,rk1808-crypto",
1767 		.data = (ulong)&soc_data_rk1808
1768 	},
1769 	{
1770 		.compatible = "rockchip,rk3308-crypto",
1771 		.data = (ulong)&soc_data_base
1772 	},
1773 	{
1774 		.compatible = "rockchip,rv1126-crypto",
1775 		.data = (ulong)&soc_data_base_sm
1776 	},
1777 	{
1778 		.compatible = "rockchip,rk3568-crypto",
1779 		.data = (ulong)&soc_data_base_sm
1780 	},
1781 	{
1782 		.compatible = "rockchip,rk3588-crypto",
1783 		.data = (ulong)&soc_data_base_sm
1784 	},
1785 	{
1786 		.compatible = "rockchip,crypto-v3",
1787 		.data = (ulong)&soc_data_cryptov3
1788 	},
1789 	{
1790 		.compatible = "rockchip,crypto-v4",
1791 		.data = (ulong)&soc_data_cryptov3 /* reuse crypto v3 config */
1792 	},
1793 	{ }
1794 };
1795 
1796 U_BOOT_DRIVER(rockchip_crypto_v2) = {
1797 	.name		= "rockchip_crypto_v2",
1798 	.id		= UCLASS_CRYPTO,
1799 	.of_match	= rockchip_crypto_ids,
1800 	.ops		= &rockchip_crypto_ops,
1801 	.probe		= rockchip_crypto_probe,
1802 	.ofdata_to_platdata = rockchip_crypto_ofdata_to_platdata,
1803 	.priv_auto_alloc_size = sizeof(struct rockchip_crypto_priv),
1804 };
1805