xref: /rk3399_rockchip-uboot/drivers/crypto/rockchip/crypto_v2.c (revision b1a0aa40469ad613e30369ad2c82c3ca44e32eba)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 #include <clk.h>
8 #include <crypto.h>
9 #include <dm.h>
10 #include <asm/io.h>
11 #include <clk-uclass.h>
12 #include <asm/arch/hardware.h>
13 #include <asm/arch/clock.h>
14 #include <rockchip/crypto_ecc.h>
15 #include <rockchip/crypto_hash_cache.h>
16 #include <rockchip/crypto_v2.h>
17 #include <rockchip/crypto_v2_pka.h>
18 
19 #define	RK_HASH_CTX_MAGIC		0x1A1A1A1A
20 
21 #define CRYPTO_MAJOR_VER(ver)		((ver) & 0x0f000000)
22 
23 #define CRYPTO_MAJOR_VER_3		0x03000000
24 #define CRYPTO_MAJOR_VER_4		0x04000000
25 
26 #ifdef DEBUG
27 #define IMSG(format, ...) printf("[%s, %05d]-trace: " format "\n", \
28 				 __func__, __LINE__, ##__VA_ARGS__)
29 #else
30 #define IMSG(format, ...)
31 #endif
32 
33 struct crypto_lli_desc {
34 	u32 src_addr;
35 	u32 src_len;
36 	u32 dst_addr;
37 	u32 dst_len;
38 	u32 user_define;
39 	u32 reserve;
40 	u32 dma_ctrl;
41 	u32 next_addr;
42 };
43 
44 struct rk_hash_ctx {
45 	struct crypto_lli_desc		data_lli;	/* lli desc */
46 	struct crypto_hash_cache	*hash_cache;
47 	u32				magic;		/* to check ctx */
48 	u32				algo;		/* hash algo */
49 	u8				digest_size;	/* hash out length */
50 	u8				reserved[3];
51 };
52 
53 struct rk_crypto_soc_data {
54 	u32 capability;
55 	u32 (*dynamic_cap)(void);
56 };
57 
58 struct rockchip_crypto_priv {
59 	fdt_addr_t			reg;
60 	u32				frequency;
61 	char				*clocks;
62 	u32				*frequencies;
63 	u32				nclocks;
64 	u32				freq_nclocks;
65 	u32				length;
66 	struct rk_hash_ctx		*hw_ctx;
67 	struct rk_crypto_soc_data	*soc_data;
68 	u16				enable;
69 	u16				secure;
70 };
71 
72 #define LLI_ADDR_ALIGN_SIZE	8
73 #define DATA_ADDR_ALIGN_SIZE	8
74 #define DATA_LEN_ALIGN_SIZE	64
75 
76 /* crypto timeout 500ms, must support more than 32M data per times*/
77 #define HASH_UPDATE_LIMIT	(32 * 1024 * 1024)
78 #define RK_CRYPTO_TIMEOUT	500000
79 
80 #define RK_POLL_TIMEOUT(condition, timeout) \
81 ({ \
82 	int time_out = timeout; \
83 	while (condition) { \
84 		if (--time_out <= 0) { \
85 			debug("[%s] %d: time out!\n", __func__,\
86 				__LINE__); \
87 			break; \
88 		} \
89 		udelay(1); \
90 	} \
91 	(time_out <= 0) ? -ETIMEDOUT : 0; \
92 })
93 
94 #define WAIT_TAG_VALID(channel, timeout) ({ \
95 	u32 tag_mask = CRYPTO_CH0_TAG_VALID << (channel);\
96 	int ret = 0;\
97 	if (is_check_tag_valid()) { \
98 		ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_TAG_VALID) & tag_mask),\
99 				      timeout);\
100 	} \
101 	crypto_write(crypto_read(CRYPTO_TAG_VALID) & tag_mask, CRYPTO_TAG_VALID);\
102 	ret;\
103 })
104 
105 #define virt_to_phys(addr)		(((unsigned long)addr) & 0xffffffff)
106 #define phys_to_virt(addr, area)	((unsigned long)addr)
107 
108 #define align_malloc(bytes, alignment)	memalign(alignment, bytes)
109 #define align_free(addr)		do {if (addr) free(addr);} while (0)
110 
111 #define ROUNDUP(size, alignment)	round_up(size, alignment)
112 #define cache_op_inner(type, addr, size) \
113 					crypto_flush_cacheline((ulong)addr, size)
114 
115 #define IS_NEED_IV(rk_mode) ((rk_mode) != RK_MODE_ECB && \
116 			     (rk_mode) != RK_MODE_CMAC && \
117 			     (rk_mode) != RK_MODE_CBC_MAC)
118 
119 #define IS_NEED_TAG(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
120 			      (rk_mode) == RK_MODE_CBC_MAC || \
121 			      (rk_mode) == RK_MODE_CCM || \
122 			      (rk_mode) == RK_MODE_GCM)
123 
124 #define IS_MAC_MODE(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
125 			      (rk_mode) == RK_MODE_CBC_MAC)
126 
127 #define IS_AE_MODE(rk_mode) ((rk_mode) == RK_MODE_CCM || \
128 			     (rk_mode) == RK_MODE_GCM)
129 
130 fdt_addr_t crypto_base;
131 static uint32_t g_crypto_version;
132 
133 static inline bool is_check_hash_valid(void)
134 {
135 	/* crypto < v4 need to check hash valid */
136 	return CRYPTO_MAJOR_VER(g_crypto_version) < CRYPTO_MAJOR_VER_4;
137 }
138 
139 static inline bool is_check_tag_valid(void)
140 {
141 	/* crypto < v4 need to check hash valid */
142 	return CRYPTO_MAJOR_VER(g_crypto_version) < CRYPTO_MAJOR_VER_4;
143 }
144 
145 static inline void word2byte_be(u32 word, u8 *ch)
146 {
147 	ch[0] = (word >> 24) & 0xff;
148 	ch[1] = (word >> 16) & 0xff;
149 	ch[2] = (word >> 8) & 0xff;
150 	ch[3] = (word >> 0) & 0xff;
151 }
152 
153 static inline u32 byte2word_be(const u8 *ch)
154 {
155 	return (*ch << 24) + (*(ch + 1) << 16) + (*(ch + 2) << 8) + *(ch + 3);
156 }
157 
158 static inline void clear_regs(u32 base, u32 words)
159 {
160 	int i;
161 
162 	/*clear out register*/
163 	for (i = 0; i < words; i++)
164 		crypto_write(0, base + 4 * i);
165 }
166 
167 static inline void clear_key_regs(void)
168 {
169 	clear_regs(CRYPTO_CH0_KEY_0, CRYPTO_KEY_CHANNEL_NUM * 4);
170 }
171 
172 static inline void read_regs(u32 base, u8 *data, u32 data_len)
173 {
174 	u8 tmp_buf[4];
175 	u32 i;
176 
177 	for (i = 0; i < data_len / 4; i++)
178 		word2byte_be(crypto_read(base + i * 4),
179 			     data + i * 4);
180 
181 	if (data_len % 4) {
182 		word2byte_be(crypto_read(base + i * 4), tmp_buf);
183 		memcpy(data + i * 4, tmp_buf, data_len % 4);
184 	}
185 }
186 
187 static inline void write_regs(u32 base, const u8 *data, u32 data_len)
188 {
189 	u8 tmp_buf[4];
190 	u32 i;
191 
192 	for (i = 0; i < data_len / 4; i++, base += 4)
193 		crypto_write(byte2word_be(data + i * 4), base);
194 
195 	if (data_len % 4) {
196 		memset(tmp_buf, 0x00, sizeof(tmp_buf));
197 		memcpy((u8 *)tmp_buf, data + i * 4, data_len % 4);
198 		crypto_write(byte2word_be(tmp_buf), base);
199 	}
200 }
201 
202 static inline void write_key_reg(u32 chn, const u8 *key, u32 key_len)
203 {
204 	write_regs(CRYPTO_CH0_KEY_0 + chn * 0x10, key, key_len);
205 }
206 
207 static inline void set_iv_reg(u32 chn, const u8 *iv, u32 iv_len)
208 {
209 	u32 base_iv;
210 
211 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
212 
213 	/* clear iv */
214 	clear_regs(base_iv, 4);
215 
216 	if (!iv || iv_len == 0)
217 		return;
218 
219 	write_regs(base_iv, iv, iv_len);
220 
221 	crypto_write(iv_len, CRYPTO_CH0_IV_LEN_0 + 4 * chn);
222 }
223 
224 static inline void get_iv_reg(u32 chn, u8 *iv, u32 iv_len)
225 {
226 	u32 base_iv;
227 
228 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
229 
230 	read_regs(base_iv, iv, iv_len);
231 }
232 
233 static inline void get_tag_from_reg(u32 chn, u8 *tag, u32 tag_len)
234 {
235 	u32 i;
236 	u32 chn_base = CRYPTO_CH0_TAG_0 + 0x10 * chn;
237 
238 	for (i = 0; i < tag_len / 4; i++, chn_base += 4)
239 		word2byte_be(crypto_read(chn_base), tag + 4 * i);
240 }
241 
242 static int rk_crypto_do_enable_clk(struct udevice *dev, int enable)
243 {
244 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
245 	struct clk clk;
246 	int i, ret;
247 
248 	for (i = 0; i < priv->nclocks; i++) {
249 		ret = clk_get_by_index(dev, i, &clk);
250 		if (ret < 0) {
251 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
252 			return ret;
253 		}
254 
255 		if (enable)
256 			ret = clk_enable(&clk);
257 		else
258 			ret = clk_disable(&clk);
259 		if (ret < 0 && ret != -ENOSYS) {
260 			printf("Failed to enable(%d) clk(%ld): ret=%d\n",
261 			       enable, clk.id, ret);
262 			return ret;
263 		}
264 	}
265 
266 	return 0;
267 }
268 
269 static int rk_crypto_enable_clk(struct udevice *dev)
270 {
271 	return rk_crypto_do_enable_clk(dev, 1);
272 }
273 
274 static int rk_crypto_disable_clk(struct udevice *dev)
275 {
276 	return rk_crypto_do_enable_clk(dev, 0);
277 }
278 
279 static u32 crypto_v3_dynamic_cap(void)
280 {
281 	u32 capability = 0;
282 	u32 ver_reg, i;
283 	struct cap_map {
284 		u32 ver_offset;
285 		u32 mask;
286 		u32 cap_bit;
287 	};
288 	const struct cap_map cap_tbl[] = {
289 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_MD5_FLAG,    CRYPTO_MD5},
290 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA1_FLAG,   CRYPTO_SHA1},
291 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA256_FLAG, CRYPTO_SHA256},
292 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA512_FLAG, CRYPTO_SHA512},
293 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SM3_FLAG,    CRYPTO_SM3},
294 
295 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_MD5_FLAG,    CRYPTO_HMAC_MD5},
296 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA1_FLAG,   CRYPTO_HMAC_SHA1},
297 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA256_FLAG, CRYPTO_HMAC_SHA256},
298 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA512_FLAG, CRYPTO_HMAC_SHA512},
299 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SM3_FLAG,    CRYPTO_HMAC_SM3},
300 
301 	{CRYPTO_AES_VERSION,  CRYPTO_AES256_FLAG,      CRYPTO_AES},
302 	{CRYPTO_DES_VERSION,  CRYPTO_TDES_FLAG,        CRYPTO_DES},
303 	{CRYPTO_SM4_VERSION,  CRYPTO_ECB_FLAG,         CRYPTO_SM4},
304 	};
305 
306 	/* rsa */
307 	capability = CRYPTO_RSA512 |
308 		     CRYPTO_RSA1024 |
309 		     CRYPTO_RSA2048 |
310 		     CRYPTO_RSA3072 |
311 		     CRYPTO_RSA4096;
312 
313 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
314 	capability |= (CRYPTO_SM2 |
315 		       CRYPTO_ECC_192R1 |
316 		       CRYPTO_ECC_224R1 |
317 		       CRYPTO_ECC_256R1);
318 #endif
319 
320 	for (i = 0; i < ARRAY_SIZE(cap_tbl); i++) {
321 		ver_reg = crypto_read(cap_tbl[i].ver_offset);
322 
323 		if ((ver_reg & cap_tbl[i].mask) == cap_tbl[i].mask)
324 			capability |= cap_tbl[i].cap_bit;
325 	}
326 
327 	return capability;
328 }
329 
330 static int hw_crypto_reset(void)
331 {
332 	u32 val = 0, mask = 0;
333 	int ret;
334 
335 	val = CRYPTO_SW_PKA_RESET | CRYPTO_SW_CC_RESET;
336 	mask = val << CRYPTO_WRITE_MASK_SHIFT;
337 
338 	/* reset pka and crypto modules*/
339 	crypto_write(val | mask, CRYPTO_RST_CTL);
340 
341 	/* wait reset compelete */
342 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL), RK_CRYPTO_TIMEOUT);
343 
344 	g_crypto_version = crypto_read(CRYPTO_CRYPTO_VERSION_NEW);
345 
346 	return ret;
347 }
348 
349 static void hw_hash_clean_ctx(struct rk_hash_ctx *ctx)
350 {
351 	/* clear hash status */
352 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
353 
354 	assert(ctx);
355 	assert(ctx->magic == RK_HASH_CTX_MAGIC);
356 
357 	crypto_hash_cache_free(ctx->hash_cache);
358 
359 	memset(ctx, 0x00, sizeof(*ctx));
360 }
361 
362 static int rk_hash_init(void *hw_ctx, u32 algo)
363 {
364 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)hw_ctx;
365 	u32 reg_ctrl = 0;
366 	int ret;
367 
368 	if (!tmp_ctx)
369 		return -EINVAL;
370 
371 	reg_ctrl = CRYPTO_SW_CC_RESET;
372 	crypto_write(reg_ctrl | (reg_ctrl << CRYPTO_WRITE_MASK_SHIFT),
373 		     CRYPTO_RST_CTL);
374 
375 	/* wait reset compelete */
376 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL),
377 			      RK_CRYPTO_TIMEOUT);
378 
379 	reg_ctrl = 0;
380 	tmp_ctx->algo = algo;
381 	switch (algo) {
382 	case CRYPTO_MD5:
383 	case CRYPTO_HMAC_MD5:
384 		reg_ctrl |= CRYPTO_MODE_MD5;
385 		tmp_ctx->digest_size = 16;
386 		break;
387 	case CRYPTO_SHA1:
388 	case CRYPTO_HMAC_SHA1:
389 		reg_ctrl |= CRYPTO_MODE_SHA1;
390 		tmp_ctx->digest_size = 20;
391 		break;
392 	case CRYPTO_SHA256:
393 	case CRYPTO_HMAC_SHA256:
394 		reg_ctrl |= CRYPTO_MODE_SHA256;
395 		tmp_ctx->digest_size = 32;
396 		break;
397 	case CRYPTO_SHA512:
398 	case CRYPTO_HMAC_SHA512:
399 		reg_ctrl |= CRYPTO_MODE_SHA512;
400 		tmp_ctx->digest_size = 64;
401 		break;
402 	case CRYPTO_SM3:
403 	case CRYPTO_HMAC_SM3:
404 		reg_ctrl |= CRYPTO_MODE_SM3;
405 		tmp_ctx->digest_size = 32;
406 		break;
407 	default:
408 		ret = -EINVAL;
409 		goto exit;
410 	}
411 
412 	/* enable hardware padding */
413 	reg_ctrl |= CRYPTO_HW_PAD_ENABLE;
414 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
415 
416 	/* FIFO input and output data byte swap */
417 	/* such as B0, B1, B2, B3 -> B3, B2, B1, B0 */
418 	reg_ctrl = CRYPTO_DOUT_BYTESWAP | CRYPTO_DOIN_BYTESWAP;
419 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_FIFO_CTL);
420 
421 	/* enable src_item_done interrupt */
422 	crypto_write(0, CRYPTO_DMA_INT_EN);
423 
424 	tmp_ctx->magic = RK_HASH_CTX_MAGIC;
425 
426 	return 0;
427 exit:
428 	/* clear hash setting if init failed */
429 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
430 
431 	return ret;
432 }
433 
434 static int rk_hash_direct_calc(void *hw_data, const u8 *data,
435 			       u32 data_len, u8 *started_flag, u8 is_last)
436 {
437 	struct rockchip_crypto_priv *priv = hw_data;
438 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
439 	struct crypto_lli_desc *lli = &hash_ctx->data_lli;
440 	int ret = -EINVAL;
441 	u32 tmp = 0, mask = 0;
442 
443 	assert(IS_ALIGNED((ulong)data, DATA_ADDR_ALIGN_SIZE));
444 	assert(is_last || IS_ALIGNED(data_len, DATA_LEN_ALIGN_SIZE));
445 
446 	debug("%s: data = %p, len = %u, s = %x, l = %x\n",
447 	      __func__, data, data_len, *started_flag, is_last);
448 
449 	memset(lli, 0x00, sizeof(*lli));
450 	lli->src_addr = (u32)virt_to_phys(data);
451 	lli->src_len = data_len;
452 	lli->dma_ctrl = LLI_DMA_CTRL_SRC_DONE;
453 
454 	if (is_last) {
455 		lli->user_define |= LLI_USER_STRING_LAST;
456 		lli->dma_ctrl |= LLI_DMA_CTRL_LAST;
457 	} else {
458 		lli->next_addr = (u32)virt_to_phys(lli);
459 		lli->dma_ctrl |= LLI_DMA_CTRL_PAUSE;
460 	}
461 
462 	if (!(*started_flag)) {
463 		lli->user_define |=
464 			(LLI_USER_STRING_START | LLI_USER_CIPHER_START);
465 		crypto_write((u32)virt_to_phys(lli), CRYPTO_DMA_LLI_ADDR);
466 		crypto_write((CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
467 			     CRYPTO_HASH_ENABLE, CRYPTO_HASH_CTL);
468 		tmp = CRYPTO_DMA_START;
469 		*started_flag = 1;
470 	} else {
471 		tmp = CRYPTO_DMA_RESTART;
472 	}
473 
474 	/* flush cache */
475 	crypto_flush_cacheline((ulong)lli, sizeof(*lli));
476 	crypto_flush_cacheline((ulong)data, data_len);
477 
478 	/* start calculate */
479 	crypto_write(tmp << CRYPTO_WRITE_MASK_SHIFT | tmp,
480 		     CRYPTO_DMA_CTL);
481 
482 	/* mask CRYPTO_SYNC_LOCKSTEP_INT_ST flag */
483 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
484 
485 	/* wait calc ok */
486 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
487 			      RK_CRYPTO_TIMEOUT);
488 
489 	/* clear interrupt status */
490 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
491 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
492 
493 	if ((tmp & mask) != CRYPTO_SRC_ITEM_DONE_INT_ST &&
494 	    (tmp & mask) != CRYPTO_ZERO_LEN_INT_ST) {
495 		ret = -EFAULT;
496 		debug("[%s] %d: CRYPTO_DMA_INT_ST = 0x%x\n",
497 		      __func__, __LINE__, tmp);
498 		goto exit;
499 	}
500 
501 	priv->length += data_len;
502 exit:
503 	return ret;
504 }
505 
506 int rk_hash_update(void *ctx, const u8 *data, u32 data_len)
507 {
508 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
509 	int ret = -EINVAL;
510 
511 	debug("\n");
512 	if (!tmp_ctx || !data)
513 		goto exit;
514 
515 	if (tmp_ctx->digest_size == 0 || tmp_ctx->magic != RK_HASH_CTX_MAGIC)
516 		goto exit;
517 
518 	ret = crypto_hash_update_with_cache(tmp_ctx->hash_cache,
519 					    data, data_len);
520 
521 exit:
522 	/* free lli list */
523 	if (ret)
524 		hw_hash_clean_ctx(tmp_ctx);
525 
526 	return ret;
527 }
528 
529 int rk_hash_final(void *ctx, u8 *digest, size_t len)
530 {
531 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
532 	int ret = 0;
533 
534 	if (!digest)
535 		goto exit;
536 
537 	if (!tmp_ctx ||
538 	    tmp_ctx->digest_size == 0 ||
539 	    len > tmp_ctx->digest_size ||
540 	    tmp_ctx->magic != RK_HASH_CTX_MAGIC) {
541 		goto exit;
542 	}
543 
544 	if(is_check_hash_valid()) {
545 		/* wait hash value ok */
546 		ret = RK_POLL_TIMEOUT(!crypto_read(CRYPTO_HASH_VALID),
547 				      RK_CRYPTO_TIMEOUT);
548 	}
549 
550 	read_regs(CRYPTO_HASH_DOUT_0, digest, len);
551 
552 	/* clear hash status */
553 	crypto_write(CRYPTO_HASH_IS_VALID, CRYPTO_HASH_VALID);
554 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
555 
556 exit:
557 
558 	return ret;
559 }
560 
561 static u32 rockchip_crypto_capability(struct udevice *dev)
562 {
563 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
564 	u32 capability, mask = 0;
565 
566 	if (!priv->enable)
567 		return 0;
568 
569 	capability = priv->soc_data->capability;
570 
571 #if !(CONFIG_IS_ENABLED(ROCKCHIP_CIPHER))
572 	mask |= (CRYPTO_DES | CRYPTO_AES | CRYPTO_SM4);
573 #endif
574 
575 #if !(CONFIG_IS_ENABLED(ROCKCHIP_HMAC))
576 	mask |= (CRYPTO_HMAC_MD5 | CRYPTO_HMAC_SHA1 | CRYPTO_HMAC_SHA256 |
577 			 CRYPTO_HMAC_SHA512 | CRYPTO_HMAC_SM3);
578 #endif
579 
580 #if !(CONFIG_IS_ENABLED(ROCKCHIP_RSA))
581 	mask |= (CRYPTO_RSA512 | CRYPTO_RSA1024 | CRYPTO_RSA2048 |
582 			 CRYPTO_RSA3072 | CRYPTO_RSA4096);
583 #endif
584 
585 	return capability & (~mask);
586 }
587 
588 static int rockchip_crypto_sha_init(struct udevice *dev, sha_context *ctx)
589 {
590 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
591 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
592 	int ret = 0;
593 
594 	if (!ctx)
595 		return -EINVAL;
596 
597 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
598 
599 	priv->length = 0;
600 
601 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
602 						       priv, ctx->length,
603 						       DATA_ADDR_ALIGN_SIZE,
604 						       DATA_LEN_ALIGN_SIZE);
605 	if (!hash_ctx->hash_cache)
606 		return -EFAULT;
607 
608 	rk_crypto_enable_clk(dev);
609 	ret = rk_hash_init(hash_ctx, ctx->algo);
610 	if (ret)
611 		rk_crypto_disable_clk(dev);
612 
613 	return ret;
614 }
615 
616 static int rockchip_crypto_sha_update(struct udevice *dev,
617 				      u32 *input, u32 len)
618 {
619 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
620 	int ret, i;
621 	u8 *p;
622 
623 	if (!len) {
624 		ret = -EINVAL;
625 		goto exit;
626 	}
627 
628 	p = (u8 *)input;
629 
630 	for (i = 0; i < len / HASH_UPDATE_LIMIT; i++, p += HASH_UPDATE_LIMIT) {
631 		ret = rk_hash_update(priv->hw_ctx, p, HASH_UPDATE_LIMIT);
632 		if (ret)
633 			goto exit;
634 	}
635 
636 	if (len % HASH_UPDATE_LIMIT)
637 		ret = rk_hash_update(priv->hw_ctx, p, len % HASH_UPDATE_LIMIT);
638 
639 exit:
640 	if (ret)
641 		rk_crypto_disable_clk(dev);
642 
643 	return ret;
644 }
645 
646 static int rockchip_crypto_sha_final(struct udevice *dev,
647 				     sha_context *ctx, u8 *output)
648 {
649 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
650 	u32 nbits;
651 	int ret;
652 
653 	nbits = crypto_algo_nbits(ctx->algo);
654 
655 	if (priv->length != ctx->length) {
656 		printf("total length(0x%08x) != init length(0x%08x)!\n",
657 		       priv->length, ctx->length);
658 		ret = -EIO;
659 		goto exit;
660 	}
661 
662 	ret = rk_hash_final(priv->hw_ctx, (u8 *)output, BITS2BYTE(nbits));
663 
664 exit:
665 	hw_hash_clean_ctx(priv->hw_ctx);
666 	rk_crypto_disable_clk(dev);
667 
668 	return ret;
669 }
670 
671 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
672 int rk_hmac_init(void *hw_ctx, u32 algo, u8 *key, u32 key_len)
673 {
674 	u32 reg_ctrl = 0;
675 	int ret;
676 
677 	if (!key || !key_len || key_len > 64)
678 		return -EINVAL;
679 
680 	clear_key_regs();
681 
682 	write_key_reg(0, key, key_len);
683 
684 	ret = rk_hash_init(hw_ctx, algo);
685 	if (ret)
686 		return ret;
687 
688 	reg_ctrl = crypto_read(CRYPTO_HASH_CTL) | CRYPTO_HMAC_ENABLE;
689 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
690 
691 	return ret;
692 }
693 
694 static int rockchip_crypto_hmac_init(struct udevice *dev,
695 				     sha_context *ctx, u8 *key, u32 key_len)
696 {
697 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
698 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
699 	int ret = 0;
700 
701 	if (!ctx)
702 		return -EINVAL;
703 
704 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
705 
706 	priv->length = 0;
707 
708 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
709 						       priv, ctx->length,
710 						       DATA_ADDR_ALIGN_SIZE,
711 						       DATA_LEN_ALIGN_SIZE);
712 	if (!hash_ctx->hash_cache)
713 		return -EFAULT;
714 
715 	rk_crypto_enable_clk(dev);
716 	ret = rk_hmac_init(priv->hw_ctx, ctx->algo, key, key_len);
717 	if (ret)
718 		rk_crypto_disable_clk(dev);
719 
720 	return ret;
721 }
722 
723 static int rockchip_crypto_hmac_update(struct udevice *dev,
724 				       u32 *input, u32 len)
725 {
726 	return rockchip_crypto_sha_update(dev, input, len);
727 }
728 
729 static int rockchip_crypto_hmac_final(struct udevice *dev,
730 				      sha_context *ctx, u8 *output)
731 {
732 	return rockchip_crypto_sha_final(dev, ctx, output);
733 }
734 
735 #endif
736 
737 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
738 static u8 g_key_chn;
739 
740 static const u32 rk_mode2bc_mode[RK_MODE_MAX] = {
741 	[RK_MODE_ECB] = CRYPTO_BC_ECB,
742 	[RK_MODE_CBC] = CRYPTO_BC_CBC,
743 	[RK_MODE_CTS] = CRYPTO_BC_CTS,
744 	[RK_MODE_CTR] = CRYPTO_BC_CTR,
745 	[RK_MODE_CFB] = CRYPTO_BC_CFB,
746 	[RK_MODE_OFB] = CRYPTO_BC_OFB,
747 	[RK_MODE_XTS] = CRYPTO_BC_XTS,
748 	[RK_MODE_CCM] = CRYPTO_BC_CCM,
749 	[RK_MODE_GCM] = CRYPTO_BC_GCM,
750 	[RK_MODE_CMAC] = CRYPTO_BC_CMAC,
751 	[RK_MODE_CBC_MAC] = CRYPTO_BC_CBC_MAC,
752 };
753 
754 static inline void set_pc_len_reg(u32 chn, u64 pc_len)
755 {
756 	u32 chn_base = CRYPTO_CH0_PC_LEN_0 + chn * 0x08;
757 
758 	crypto_write(pc_len & 0xffffffff, chn_base);
759 	crypto_write(pc_len >> 32, chn_base + 4);
760 }
761 
762 static inline void set_aad_len_reg(u32 chn, u64 pc_len)
763 {
764 	u32 chn_base = CRYPTO_CH0_AAD_LEN_0 + chn * 0x08;
765 
766 	crypto_write(pc_len & 0xffffffff, chn_base);
767 	crypto_write(pc_len >> 32, chn_base + 4);
768 }
769 
770 static inline bool is_des_mode(u32 rk_mode)
771 {
772 	return (rk_mode == RK_MODE_ECB ||
773 		rk_mode == RK_MODE_CBC ||
774 		rk_mode == RK_MODE_CFB ||
775 		rk_mode == RK_MODE_OFB);
776 }
777 
778 static void dump_crypto_state(struct crypto_lli_desc *desc,
779 			      u32 tmp, u32 expt_int,
780 			      const u8 *in, const u8 *out,
781 			      u32 len, int ret)
782 {
783 	IMSG("%s\n", ret == -ETIME ? "timeout" : "dismatch");
784 
785 	IMSG("CRYPTO_DMA_INT_ST = %08x, expect_int = %08x\n",
786 	     tmp, expt_int);
787 	IMSG("data desc		= %p\n", desc);
788 	IMSG("\taddr_in		= [%08x <=> %08x]\n",
789 	     desc->src_addr, (u32)virt_to_phys(in));
790 	IMSG("\taddr_out	= [%08x <=> %08x]\n",
791 	     desc->dst_addr, (u32)virt_to_phys(out));
792 	IMSG("\tsrc_len		= [%08x <=> %08x]\n",
793 	     desc->src_len, (u32)len);
794 	IMSG("\tdst_len		= %08x\n", desc->dst_len);
795 	IMSG("\tdma_ctl		= %08x\n", desc->dma_ctrl);
796 	IMSG("\tuser_define	= %08x\n", desc->user_define);
797 
798 	IMSG("\n\nDMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
799 	     crypto_read(CRYPTO_DMA_LLI_ADDR));
800 	IMSG("DMA CRYPTO_DMA_ST status = %08x\n",
801 	     crypto_read(CRYPTO_DMA_ST));
802 	IMSG("DMA CRYPTO_DMA_STATE status = %08x\n",
803 	     crypto_read(CRYPTO_DMA_STATE));
804 	IMSG("DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
805 	     crypto_read(CRYPTO_DMA_LLI_RADDR));
806 	IMSG("DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
807 	     crypto_read(CRYPTO_DMA_SRC_RADDR));
808 	IMSG("DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
809 	     crypto_read(CRYPTO_DMA_DST_RADDR));
810 	IMSG("DMA CRYPTO_CIPHER_ST status = %08x\n",
811 	     crypto_read(CRYPTO_CIPHER_ST));
812 	IMSG("DMA CRYPTO_CIPHER_STATE status = %08x\n",
813 	     crypto_read(CRYPTO_CIPHER_STATE));
814 	IMSG("DMA CRYPTO_TAG_VALID status = %08x\n",
815 	     crypto_read(CRYPTO_TAG_VALID));
816 	IMSG("LOCKSTEP status = %08x\n\n",
817 	     crypto_read(0x618));
818 
819 	IMSG("dst %dbyte not transferred\n",
820 	     desc->dst_addr + desc->dst_len -
821 	     crypto_read(CRYPTO_DMA_DST_RADDR));
822 }
823 
824 static int ccm128_set_iv_reg(u32 chn, const u8 *nonce, u32 nlen)
825 {
826 	u8 iv_buf[AES_BLOCK_SIZE];
827 	u32 L;
828 
829 	memset(iv_buf, 0x00, sizeof(iv_buf));
830 
831 	L = 15 - nlen;
832 	iv_buf[0] = ((u8)(L - 1) & 7);
833 
834 	/* the L parameter */
835 	L = iv_buf[0] & 7;
836 
837 	/* nonce is too short */
838 	if (nlen < (14 - L))
839 		return -EINVAL;
840 
841 	/* clear aad flag */
842 	iv_buf[0] &= ~0x40;
843 	memcpy(&iv_buf[1], nonce, 14 - L);
844 
845 	set_iv_reg(chn, iv_buf, AES_BLOCK_SIZE);
846 
847 	return 0;
848 }
849 
850 static void ccm_aad_padding(u32 aad_len, u8 *padding, u32 *padding_size)
851 {
852 	u32 i;
853 
854 	if (aad_len == 0) {
855 		*padding_size = 0;
856 		return;
857 	}
858 
859 	i = aad_len < (0x10000 - 0x100) ? 2 : 6;
860 
861 	if (i == 2) {
862 		padding[0] = (u8)(aad_len >> 8);
863 		padding[1] = (u8)aad_len;
864 	} else {
865 		padding[0] = 0xFF;
866 		padding[1] = 0xFE;
867 		padding[2] = (u8)(aad_len >> 24);
868 		padding[3] = (u8)(aad_len >> 16);
869 		padding[4] = (u8)(aad_len >> 8);
870 	}
871 
872 	*padding_size = i;
873 }
874 
875 static int ccm_compose_aad_iv(u8 *aad_iv, u32 data_len, u32 aad_len, u32 tag_size)
876 {
877 	aad_iv[0] |= ((u8)(((tag_size - 2) / 2) & 7) << 3);
878 
879 	aad_iv[12] = (u8)(data_len >> 24);
880 	aad_iv[13] = (u8)(data_len >> 16);
881 	aad_iv[14] = (u8)(data_len >> 8);
882 	aad_iv[15] = (u8)data_len;
883 
884 	if (aad_len)
885 		aad_iv[0] |= 0x40;	//set aad flag
886 
887 	return 0;
888 }
889 
890 static int hw_cipher_init(u32 chn, const u8 *key, const u8 *twk_key,
891 			  u32 key_len, const u8 *iv, u32 iv_len,
892 			  u32 algo, u32 mode, bool enc)
893 {
894 	u32 rk_mode = RK_GET_RK_MODE(mode);
895 	u32 key_chn_sel = chn;
896 	u32 reg_ctrl = 0;
897 	bool use_otpkey = false;
898 
899 	if (!key && key_len)
900 		use_otpkey = true;
901 
902 	IMSG("%s: key addr is %p, key_len is %d, iv addr is %p",
903 	     __func__, key, key_len, iv);
904 	if (rk_mode >= RK_MODE_MAX)
905 		return -EINVAL;
906 
907 	switch (algo) {
908 	case CRYPTO_DES:
909 		if (key_len > DES_BLOCK_SIZE)
910 			reg_ctrl |= CRYPTO_BC_TDES;
911 		else
912 			reg_ctrl |= CRYPTO_BC_DES;
913 		break;
914 	case CRYPTO_AES:
915 		reg_ctrl |= CRYPTO_BC_AES;
916 		break;
917 	case CRYPTO_SM4:
918 		reg_ctrl |= CRYPTO_BC_SM4;
919 		break;
920 	default:
921 		return -EINVAL;
922 	}
923 
924 	if (algo == CRYPTO_AES || algo == CRYPTO_SM4) {
925 		switch (key_len) {
926 		case AES_KEYSIZE_128:
927 			reg_ctrl |= CRYPTO_BC_128_bit_key;
928 			break;
929 		case AES_KEYSIZE_192:
930 			reg_ctrl |= CRYPTO_BC_192_bit_key;
931 			break;
932 		case AES_KEYSIZE_256:
933 			reg_ctrl |= CRYPTO_BC_256_bit_key;
934 			break;
935 		default:
936 			return -EINVAL;
937 		}
938 	}
939 
940 	reg_ctrl |= rk_mode2bc_mode[rk_mode];
941 	if (!enc)
942 		reg_ctrl |= CRYPTO_BC_DECRYPT;
943 
944 	/* write key data to reg */
945 	if (!use_otpkey) {
946 		write_key_reg(key_chn_sel, key, key_len);
947 		crypto_write(CRYPTO_SEL_USER, CRYPTO_KEY_SEL);
948 	} else {
949 		crypto_write(CRYPTO_SEL_KEYTABLE, CRYPTO_KEY_SEL);
950 	}
951 
952 	/* write twk key for xts mode */
953 	if (rk_mode == RK_MODE_XTS)
954 		write_key_reg(key_chn_sel + 4, twk_key, key_len);
955 
956 	/* set iv reg */
957 	if (rk_mode == RK_MODE_CCM)
958 		ccm128_set_iv_reg(chn, iv, iv_len);
959 	else
960 		set_iv_reg(chn, iv, iv_len);
961 
962 	/* din_swap set 1, dout_swap set 1, default 1. */
963 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
964 	crypto_write(0, CRYPTO_DMA_INT_EN);
965 
966 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
967 
968 	return 0;
969 }
970 
971 static int hw_cipher_crypt(const u8 *in, u8 *out, u64 len,
972 			   const u8 *aad, u32 aad_len,
973 			   u8 *tag, u32 tag_len, u32 mode)
974 {
975 	struct crypto_lli_desc *data_desc = NULL, *aad_desc = NULL;
976 	u8 *dma_in = NULL, *dma_out = NULL, *aad_tmp = NULL;
977 	u32 rk_mode = RK_GET_RK_MODE(mode);
978 	u32 reg_ctrl = 0, tmp_len = 0;
979 	u32 expt_int = 0, mask = 0;
980 	u32 key_chn = g_key_chn;
981 	u32 tmp, dst_len = 0;
982 	int ret = -1;
983 
984 	if (rk_mode == RK_MODE_CTS && len <= AES_BLOCK_SIZE) {
985 		printf("CTS mode length %u < 16Byte\n", (u32)len);
986 		return -EINVAL;
987 	}
988 
989 	tmp_len = (rk_mode == RK_MODE_CTR) ? ROUNDUP(len, AES_BLOCK_SIZE) : len;
990 
991 	data_desc = align_malloc(sizeof(*data_desc), LLI_ADDR_ALIGN_SIZE);
992 	if (!data_desc)
993 		goto exit;
994 
995 	if (IS_ALIGNED((ulong)in, DATA_ADDR_ALIGN_SIZE) && tmp_len == len)
996 		dma_in = (void *)in;
997 	else
998 		dma_in = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
999 	if (!dma_in)
1000 		goto exit;
1001 
1002 	if (out) {
1003 		if (IS_ALIGNED((ulong)out, DATA_ADDR_ALIGN_SIZE) &&
1004 		    tmp_len == len)
1005 			dma_out = out;
1006 		else
1007 			dma_out = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
1008 		if (!dma_out)
1009 			goto exit;
1010 		dst_len = tmp_len;
1011 	}
1012 
1013 	memset(data_desc, 0x00, sizeof(*data_desc));
1014 	if (dma_in != in)
1015 		memcpy(dma_in, in, len);
1016 
1017 	data_desc->src_addr    = (u32)virt_to_phys(dma_in);
1018 	data_desc->src_len     = tmp_len;
1019 	data_desc->dst_addr    = (u32)virt_to_phys(dma_out);
1020 	data_desc->dst_len     = dst_len;
1021 	data_desc->dma_ctrl    = LLI_DMA_CTRL_LAST;
1022 
1023 	if (IS_MAC_MODE(rk_mode)) {
1024 		expt_int = CRYPTO_LIST_DONE_INT_ST;
1025 		data_desc->dma_ctrl |= LLI_DMA_CTRL_LIST_DONE;
1026 	} else {
1027 		expt_int = CRYPTO_DST_ITEM_DONE_INT_ST;
1028 		data_desc->dma_ctrl |= LLI_DMA_CTRL_DST_DONE;
1029 	}
1030 
1031 	data_desc->user_define = LLI_USER_CIPHER_START |
1032 				 LLI_USER_STRING_START |
1033 				 LLI_USER_STRING_LAST |
1034 				 (key_chn << 4);
1035 	crypto_write((u32)virt_to_phys(data_desc), CRYPTO_DMA_LLI_ADDR);
1036 
1037 	if (rk_mode == RK_MODE_CCM || rk_mode == RK_MODE_GCM) {
1038 		u32 aad_tmp_len = 0;
1039 
1040 		aad_desc = align_malloc(sizeof(*aad_desc), LLI_ADDR_ALIGN_SIZE);
1041 		if (!aad_desc)
1042 			goto exit;
1043 
1044 		memset(aad_desc, 0x00, sizeof(*aad_desc));
1045 		aad_desc->next_addr = (u32)virt_to_phys(data_desc);
1046 		aad_desc->user_define = LLI_USER_CIPHER_START |
1047 					 LLI_USER_STRING_START |
1048 					 LLI_USER_STRING_LAST |
1049 					 LLI_USER_STRING_AAD |
1050 					 (key_chn << 4);
1051 
1052 		if (rk_mode == RK_MODE_CCM) {
1053 			u8 padding[AES_BLOCK_SIZE];
1054 			u32 padding_size = 0;
1055 
1056 			memset(padding, 0x00, sizeof(padding));
1057 			ccm_aad_padding(aad_len, padding, &padding_size);
1058 
1059 			aad_tmp_len = aad_len + AES_BLOCK_SIZE + padding_size;
1060 			aad_tmp_len = ROUNDUP(aad_tmp_len, AES_BLOCK_SIZE);
1061 			aad_tmp = align_malloc(aad_tmp_len,
1062 					       DATA_ADDR_ALIGN_SIZE);
1063 			if (!aad_tmp)
1064 				goto exit;
1065 
1066 			/* clear last block */
1067 			memset(aad_tmp + aad_tmp_len - AES_BLOCK_SIZE,
1068 			       0x00, AES_BLOCK_SIZE);
1069 
1070 			/* read iv data from reg */
1071 			get_iv_reg(key_chn, aad_tmp, AES_BLOCK_SIZE);
1072 			ccm_compose_aad_iv(aad_tmp, tmp_len, aad_len, tag_len);
1073 			memcpy(aad_tmp + AES_BLOCK_SIZE, padding, padding_size);
1074 
1075 			memcpy(aad_tmp + AES_BLOCK_SIZE + padding_size,
1076 			       aad, aad_len);
1077 		} else {
1078 			aad_tmp_len = aad_len;
1079 			if (IS_ALIGNED((ulong)aad, DATA_ADDR_ALIGN_SIZE)) {
1080 				aad_tmp = (void *)aad;
1081 			} else {
1082 				aad_tmp = align_malloc(aad_tmp_len,
1083 						       DATA_ADDR_ALIGN_SIZE);
1084 				if (!aad_tmp)
1085 					goto exit;
1086 
1087 				memcpy(aad_tmp, aad, aad_tmp_len);
1088 			}
1089 
1090 			set_aad_len_reg(key_chn, aad_tmp_len);
1091 			set_pc_len_reg(key_chn, tmp_len);
1092 		}
1093 
1094 		aad_desc->src_addr = (u32)virt_to_phys(aad_tmp);
1095 		aad_desc->src_len  = aad_tmp_len;
1096 
1097 		if (aad_tmp_len) {
1098 			data_desc->user_define = LLI_USER_STRING_START |
1099 						 LLI_USER_STRING_LAST |
1100 						 (key_chn << 4);
1101 			crypto_write((u32)virt_to_phys(aad_desc), CRYPTO_DMA_LLI_ADDR);
1102 			cache_op_inner(DCACHE_AREA_CLEAN, aad_tmp, aad_tmp_len);
1103 			cache_op_inner(DCACHE_AREA_CLEAN, aad_desc, sizeof(*aad_desc));
1104 		}
1105 	}
1106 
1107 	cache_op_inner(DCACHE_AREA_CLEAN, data_desc, sizeof(*data_desc));
1108 	cache_op_inner(DCACHE_AREA_CLEAN, dma_in, tmp_len);
1109 	cache_op_inner(DCACHE_AREA_INVALIDATE, dma_out, tmp_len);
1110 
1111 	/* din_swap set 1, dout_swap set 1, default 1. */
1112 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
1113 	crypto_write(0, CRYPTO_DMA_INT_EN);
1114 
1115 	reg_ctrl = crypto_read(CRYPTO_BC_CTL) | CRYPTO_BC_ENABLE;
1116 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
1117 	crypto_write(0x00010001, CRYPTO_DMA_CTL);//start
1118 
1119 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
1120 
1121 	/* wait calc ok */
1122 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
1123 			      RK_CRYPTO_TIMEOUT);
1124 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
1125 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
1126 
1127 	if ((tmp & mask) == expt_int) {
1128 		if (out && out != dma_out)
1129 			memcpy(out, dma_out, len);
1130 
1131 		if (IS_NEED_TAG(rk_mode)) {
1132 			ret = WAIT_TAG_VALID(key_chn, RK_CRYPTO_TIMEOUT);
1133 			get_tag_from_reg(key_chn, tag, AES_BLOCK_SIZE);
1134 		}
1135 	} else {
1136 		dump_crypto_state(data_desc, tmp, expt_int, in, out, len, ret);
1137 		ret = -1;
1138 	}
1139 
1140 exit:
1141 	crypto_write(0xffff0000, CRYPTO_BC_CTL);//bc_ctl disable
1142 	align_free(data_desc);
1143 	align_free(aad_desc);
1144 	if (dma_in != in)
1145 		align_free(dma_in);
1146 	if (out && dma_out != out)
1147 		align_free(dma_out);
1148 	if (aad && aad != aad_tmp)
1149 		align_free(aad_tmp);
1150 
1151 	return ret;
1152 }
1153 
1154 static int hw_aes_init(u32 chn, const u8 *key, const u8 *twk_key, u32 key_len,
1155 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1156 {
1157 	u32 rk_mode = RK_GET_RK_MODE(mode);
1158 
1159 	if (rk_mode > RK_MODE_XTS)
1160 		return -EINVAL;
1161 
1162 	if (iv_len > AES_BLOCK_SIZE)
1163 		return -EINVAL;
1164 
1165 	if (IS_NEED_IV(rk_mode)) {
1166 		if (!iv || iv_len != AES_BLOCK_SIZE)
1167 			return -EINVAL;
1168 	} else {
1169 		iv_len = 0;
1170 	}
1171 
1172 	if (rk_mode == RK_MODE_XTS) {
1173 		if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_256)
1174 			return -EINVAL;
1175 
1176 		if (!key || !twk_key)
1177 			return -EINVAL;
1178 	} else {
1179 		if (key_len != AES_KEYSIZE_128 &&
1180 		    key_len != AES_KEYSIZE_192 &&
1181 		    key_len != AES_KEYSIZE_256)
1182 			return -EINVAL;
1183 	}
1184 
1185 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1186 			      CRYPTO_AES, mode, enc);
1187 }
1188 
1189 static int hw_sm4_init(u32  chn, const u8 *key, const u8 *twk_key, u32 key_len,
1190 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1191 {
1192 	u32 rk_mode = RK_GET_RK_MODE(mode);
1193 
1194 	if (rk_mode > RK_MODE_XTS)
1195 		return -EINVAL;
1196 
1197 	if (iv_len > SM4_BLOCK_SIZE || key_len != SM4_KEYSIZE)
1198 		return -EINVAL;
1199 
1200 	if (IS_NEED_IV(rk_mode)) {
1201 		if (!iv || iv_len != SM4_BLOCK_SIZE)
1202 			return -EINVAL;
1203 	} else {
1204 		iv_len = 0;
1205 	}
1206 
1207 	if (rk_mode == RK_MODE_XTS) {
1208 		if (!key || !twk_key)
1209 			return -EINVAL;
1210 	}
1211 
1212 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1213 			      CRYPTO_SM4, mode, enc);
1214 }
1215 
1216 int rk_crypto_des(struct udevice *dev, u32 mode, const u8 *key, u32 key_len,
1217 		  const u8 *iv, const u8 *in, u8 *out, u32 len, bool enc)
1218 {
1219 	u32 rk_mode = RK_GET_RK_MODE(mode);
1220 	u8 tmp_key[24];
1221 	int ret;
1222 
1223 	if (!is_des_mode(rk_mode))
1224 		return -EINVAL;
1225 
1226 	if (key_len == DES_BLOCK_SIZE || key_len == 3 * DES_BLOCK_SIZE) {
1227 		memcpy(tmp_key, key, key_len);
1228 	} else if (key_len == 2 * DES_BLOCK_SIZE) {
1229 		memcpy(tmp_key, key, 16);
1230 		memcpy(tmp_key + 16, key, 8);
1231 		key_len = 3 * DES_BLOCK_SIZE;
1232 	} else {
1233 		return -EINVAL;
1234 	}
1235 
1236 	ret = hw_cipher_init(0, tmp_key, NULL, key_len, iv, DES_BLOCK_SIZE,
1237 			     CRYPTO_DES, mode, enc);
1238 	if (ret)
1239 		goto exit;
1240 
1241 	ret = hw_cipher_crypt(in, out, len, NULL, 0,
1242 			      NULL, 0, mode);
1243 
1244 exit:
1245 	return ret;
1246 }
1247 
1248 int rk_crypto_aes(struct udevice *dev, u32 mode,
1249 		  const u8 *key, const u8 *twk_key, u32 key_len,
1250 		  const u8 *iv, u32 iv_len,
1251 		  const u8 *in, u8 *out, u32 len, bool enc)
1252 {
1253 	int ret;
1254 
1255 	/* RV1126/RV1109 do not support aes-192 */
1256 #if defined(CONFIG_ROCKCHIP_RV1126)
1257 	if (key_len == AES_KEYSIZE_192)
1258 		return -EINVAL;
1259 #endif
1260 
1261 	ret = hw_aes_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1262 	if (ret)
1263 		return ret;
1264 
1265 	return hw_cipher_crypt(in, out, len, NULL, 0,
1266 			       NULL, 0, mode);
1267 }
1268 
1269 int rk_crypto_sm4(struct udevice *dev, u32 mode,
1270 		  const u8 *key, const u8 *twk_key, u32 key_len,
1271 		  const u8 *iv, u32 iv_len,
1272 		  const u8 *in, u8 *out, u32 len, bool enc)
1273 {
1274 	int ret;
1275 
1276 	ret = hw_sm4_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1277 	if (ret)
1278 		return ret;
1279 
1280 	return hw_cipher_crypt(in, out, len, NULL, 0, NULL, 0, mode);
1281 }
1282 
1283 int rockchip_crypto_cipher(struct udevice *dev, cipher_context *ctx,
1284 			   const u8 *in, u8 *out, u32 len, bool enc)
1285 {
1286 	int ret;
1287 
1288 	rk_crypto_enable_clk(dev);
1289 
1290 	switch (ctx->algo) {
1291 	case CRYPTO_DES:
1292 		ret = rk_crypto_des(dev, ctx->mode, ctx->key, ctx->key_len,
1293 				    ctx->iv, in, out, len, enc);
1294 		break;
1295 	case CRYPTO_AES:
1296 		ret = rk_crypto_aes(dev, ctx->mode,
1297 				    ctx->key, ctx->twk_key, ctx->key_len,
1298 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1299 		break;
1300 	case CRYPTO_SM4:
1301 		ret = rk_crypto_sm4(dev, ctx->mode,
1302 				    ctx->key, ctx->twk_key, ctx->key_len,
1303 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1304 		break;
1305 	default:
1306 		ret = -EINVAL;
1307 		break;
1308 	}
1309 
1310 	rk_crypto_disable_clk(dev);
1311 
1312 	return ret;
1313 }
1314 
1315 int rockchip_crypto_fw_cipher(struct udevice *dev, cipher_fw_context *ctx,
1316 			      const u8 *in, u8 *out, u32 len, bool enc)
1317 {
1318 	int ret;
1319 
1320 	rk_crypto_enable_clk(dev);
1321 
1322 	switch (ctx->algo) {
1323 	case CRYPTO_DES:
1324 		ret = rk_crypto_des(dev, ctx->mode, NULL, ctx->key_len,
1325 				    ctx->iv, in, out, len, enc);
1326 		break;
1327 	case CRYPTO_AES:
1328 		ret = rk_crypto_aes(dev, ctx->mode, NULL, NULL, ctx->key_len,
1329 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1330 		break;
1331 	case CRYPTO_SM4:
1332 		ret = rk_crypto_sm4(dev, ctx->mode, NULL, NULL, ctx->key_len,
1333 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1334 		break;
1335 	default:
1336 		ret = -EINVAL;
1337 		break;
1338 	}
1339 
1340 	rk_crypto_disable_clk(dev);
1341 
1342 	return ret;
1343 }
1344 
1345 int rk_crypto_mac(struct udevice *dev, u32 algo, u32 mode,
1346 		  const u8 *key, u32 key_len,
1347 		  const u8 *in, u32 len, u8 *tag)
1348 {
1349 	u32 rk_mode = RK_GET_RK_MODE(mode);
1350 	int ret;
1351 
1352 	if (!IS_MAC_MODE(rk_mode))
1353 		return -EINVAL;
1354 
1355 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1356 		return -EINVAL;
1357 
1358 	/* RV1126/RV1109 do not support aes-192 */
1359 #if defined(CONFIG_ROCKCHIP_RV1126)
1360 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1361 		return -EINVAL;
1362 #endif
1363 
1364 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, NULL, 0,
1365 			     algo, mode, true);
1366 	if (ret)
1367 		return ret;
1368 
1369 	return hw_cipher_crypt(in, NULL, len, NULL, 0,
1370 			       tag, AES_BLOCK_SIZE, mode);
1371 }
1372 
1373 int rockchip_crypto_mac(struct udevice *dev, cipher_context *ctx,
1374 			const u8 *in, u32 len, u8 *tag)
1375 {
1376 	int ret = 0;
1377 
1378 	rk_crypto_enable_clk(dev);
1379 
1380 	ret = rk_crypto_mac(dev, ctx->algo, ctx->mode,
1381 			    ctx->key, ctx->key_len, in, len, tag);
1382 
1383 	rk_crypto_disable_clk(dev);
1384 
1385 	return ret;
1386 }
1387 
1388 int rk_crypto_ae(struct udevice *dev, u32 algo, u32 mode,
1389 		 const u8 *key, u32 key_len, const u8 *nonce, u32 nonce_len,
1390 		 const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1391 		 u8 *out, u8 *tag)
1392 {
1393 	u32 rk_mode = RK_GET_RK_MODE(mode);
1394 	int ret;
1395 
1396 	if (!IS_AE_MODE(rk_mode))
1397 		return -EINVAL;
1398 
1399 	if (len == 0)
1400 		return -EINVAL;
1401 
1402 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1403 		return -EINVAL;
1404 
1405 	/* RV1126/RV1109 do not support aes-192 */
1406 #if defined(CONFIG_ROCKCHIP_RV1126)
1407 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1408 		return -EINVAL;
1409 #endif
1410 
1411 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, nonce, nonce_len,
1412 			     algo, mode, true);
1413 	if (ret)
1414 		return ret;
1415 
1416 	return hw_cipher_crypt(in, out, len, aad, aad_len,
1417 			       tag, AES_BLOCK_SIZE, mode);
1418 }
1419 
1420 int rockchip_crypto_ae(struct udevice *dev, cipher_context *ctx,
1421 		       const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1422 		       u8 *out, u8 *tag)
1423 {
1424 	int ret = 0;
1425 
1426 	rk_crypto_enable_clk(dev);
1427 
1428 	ret = rk_crypto_ae(dev, ctx->algo, ctx->mode, ctx->key, ctx->key_len,
1429 			   ctx->iv, ctx->iv_len, in, len,
1430 			   aad, aad_len, out, tag);
1431 
1432 	rk_crypto_disable_clk(dev);
1433 
1434 	return ret;
1435 }
1436 
1437 static ulong rockchip_crypto_keytable_addr(struct udevice *dev)
1438 {
1439 	return CRYPTO_S_BY_KEYLAD_BASE + CRYPTO_CH0_KEY_0;
1440 }
1441 
1442 #endif
1443 
1444 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1445 static int rockchip_crypto_rsa_verify(struct udevice *dev, rsa_key *ctx,
1446 				      u8 *sign, u8 *output)
1447 {
1448 	struct mpa_num *mpa_m = NULL, *mpa_e = NULL, *mpa_n = NULL;
1449 	struct mpa_num *mpa_c = NULL, *mpa_result = NULL;
1450 	u32 n_bits, n_words;
1451 	int ret;
1452 
1453 	if (!ctx)
1454 		return -EINVAL;
1455 
1456 	if (ctx->algo != CRYPTO_RSA512 &&
1457 	    ctx->algo != CRYPTO_RSA1024 &&
1458 	    ctx->algo != CRYPTO_RSA2048 &&
1459 	    ctx->algo != CRYPTO_RSA3072 &&
1460 	    ctx->algo != CRYPTO_RSA4096)
1461 		return -EINVAL;
1462 
1463 	n_bits = crypto_algo_nbits(ctx->algo);
1464 	n_words = BITS2WORD(n_bits);
1465 
1466 	ret = rk_mpa_alloc(&mpa_m, sign, n_words);
1467 	if (ret)
1468 		goto exit;
1469 
1470 	ret = rk_mpa_alloc(&mpa_e, ctx->e, n_words);
1471 	if (ret)
1472 		goto exit;
1473 
1474 	ret = rk_mpa_alloc(&mpa_n, ctx->n, n_words);
1475 	if (ret)
1476 		goto exit;
1477 
1478 	if (ctx->c) {
1479 		ret = rk_mpa_alloc(&mpa_c, ctx->c, n_words);
1480 		if (ret)
1481 			goto exit;
1482 	}
1483 
1484 	ret = rk_mpa_alloc(&mpa_result, NULL, n_words);
1485 	if (ret)
1486 		goto exit;
1487 
1488 	rk_crypto_enable_clk(dev);
1489 	ret = rk_exptmod_np(mpa_m, mpa_e, mpa_n, mpa_c, mpa_result);
1490 	if (!ret)
1491 		memcpy(output, mpa_result->d, BITS2BYTE(n_bits));
1492 	rk_crypto_disable_clk(dev);
1493 
1494 exit:
1495 	rk_mpa_free(&mpa_m);
1496 	rk_mpa_free(&mpa_e);
1497 	rk_mpa_free(&mpa_n);
1498 	rk_mpa_free(&mpa_c);
1499 	rk_mpa_free(&mpa_result);
1500 
1501 	return ret;
1502 }
1503 #endif
1504 
1505 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
1506 static int rockchip_crypto_ec_verify(struct udevice *dev, ec_key *ctx,
1507 				     u8 *hash, u32 hash_len, u8 *sign)
1508 {
1509 	struct mpa_num *bn_sign = NULL;
1510 	struct rk_ecp_point point_P, point_sign;
1511 	u32 n_bits, n_words;
1512 	int ret;
1513 
1514 	if (!ctx)
1515 		return -EINVAL;
1516 
1517 	if (ctx->algo != CRYPTO_SM2 &&
1518 	    ctx->algo != CRYPTO_ECC_192R1 &&
1519 	    ctx->algo != CRYPTO_ECC_224R1 &&
1520 	    ctx->algo != CRYPTO_ECC_256R1)
1521 		return -EINVAL;
1522 
1523 	n_bits = crypto_algo_nbits(ctx->algo);
1524 	n_words = BITS2WORD(n_bits);
1525 
1526 	ret = rk_mpa_alloc(&bn_sign, sign, n_words);
1527 	if (ret)
1528 		goto exit;
1529 
1530 	ret = rk_mpa_alloc(&point_P.x, ctx->x, n_words);
1531 	ret |= rk_mpa_alloc(&point_P.y, ctx->y, n_words);
1532 	if (ret)
1533 		goto exit;
1534 
1535 	ret = rk_mpa_alloc(&point_sign.x, sign, n_words);
1536 	ret |= rk_mpa_alloc(&point_sign.y, sign + WORD2BYTE(n_words), n_words);
1537 	if (ret)
1538 		goto exit;
1539 
1540 	rk_crypto_enable_clk(dev);
1541 	ret = rockchip_ecc_verify(ctx->algo, hash, hash_len, &point_P, &point_sign);
1542 	rk_crypto_disable_clk(dev);
1543 exit:
1544 	rk_mpa_free(&bn_sign);
1545 	rk_mpa_free(&point_P.x);
1546 	rk_mpa_free(&point_P.y);
1547 	rk_mpa_free(&point_sign.x);
1548 	rk_mpa_free(&point_sign.y);
1549 
1550 	return ret;
1551 }
1552 #endif
1553 
1554 static const struct dm_crypto_ops rockchip_crypto_ops = {
1555 	.capability   = rockchip_crypto_capability,
1556 	.sha_init     = rockchip_crypto_sha_init,
1557 	.sha_update   = rockchip_crypto_sha_update,
1558 	.sha_final    = rockchip_crypto_sha_final,
1559 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1560 	.rsa_verify   = rockchip_crypto_rsa_verify,
1561 #endif
1562 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
1563 	.ec_verify    = rockchip_crypto_ec_verify,
1564 #endif
1565 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
1566 	.hmac_init    = rockchip_crypto_hmac_init,
1567 	.hmac_update  = rockchip_crypto_hmac_update,
1568 	.hmac_final   = rockchip_crypto_hmac_final,
1569 #endif
1570 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
1571 	.cipher_crypt    = rockchip_crypto_cipher,
1572 	.cipher_mac      = rockchip_crypto_mac,
1573 	.cipher_ae       = rockchip_crypto_ae,
1574 	.cipher_fw_crypt = rockchip_crypto_fw_cipher,
1575 	.keytable_addr   = rockchip_crypto_keytable_addr,
1576 #endif
1577 };
1578 
1579 /*
1580  * Only use "clocks" to parse crypto clock id and use rockchip_get_clk().
1581  * Because we always add crypto node in U-Boot dts, when kernel dtb enabled :
1582  *
1583  *   1. There is cru phandle mismatch between U-Boot and kernel dtb;
1584  *   2. CONFIG_OF_SPL_REMOVE_PROPS removes clock property;
1585  */
1586 static int rockchip_crypto_ofdata_to_platdata(struct udevice *dev)
1587 {
1588 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1589 	int len, ret = -EINVAL;
1590 	bool secure_flag;
1591 
1592 	memset(priv, 0x00, sizeof(*priv));
1593 
1594 	priv->reg = (fdt_addr_t)dev_read_addr_ptr(dev);
1595 	if (priv->reg == FDT_ADDR_T_NONE)
1596 		return -EINVAL;
1597 
1598 	secure_flag = dev_read_bool(dev, "security");
1599 
1600 	priv->secure = secure_flag;
1601 
1602 #if CONFIG_SPL_BUILD
1603 	if (secure_flag == 1)
1604 #else
1605 	if (secure_flag == 0)
1606 #endif
1607 		priv->enable = 1;
1608 
1609 	if (!priv->enable)
1610 		return 0;
1611 
1612 	crypto_base = priv->reg;
1613 
1614 	/* if there is no clocks in dts, just skip it */
1615 	if (!dev_read_prop(dev, "clocks", &len)) {
1616 		printf("Can't find \"clocks\" property\n");
1617 		return 0;
1618 	}
1619 
1620 	priv->clocks = malloc(len);
1621 	if (!priv->clocks)
1622 		return -ENOMEM;
1623 
1624 	priv->nclocks = len / (2 * sizeof(u32));
1625 	if (dev_read_u32_array(dev, "clocks", (u32 *)priv->clocks,
1626 			       priv->nclocks)) {
1627 		printf("Can't read \"clocks\" property\n");
1628 		ret = -EINVAL;
1629 		goto exit;
1630 	}
1631 
1632 	if (dev_read_prop(dev, "clock-frequency", &len)) {
1633 		priv->frequencies = malloc(len);
1634 		if (!priv->frequencies) {
1635 			ret = -ENOMEM;
1636 			goto exit;
1637 		}
1638 		priv->freq_nclocks = len / sizeof(u32);
1639 		if (dev_read_u32_array(dev, "clock-frequency", priv->frequencies,
1640 				       priv->freq_nclocks)) {
1641 			printf("Can't read \"clock-frequency\" property\n");
1642 			ret = -EINVAL;
1643 			goto exit;
1644 		}
1645 	}
1646 
1647 	return 0;
1648 exit:
1649 	if (priv->clocks)
1650 		free(priv->clocks);
1651 
1652 	if (priv->frequencies)
1653 		free(priv->frequencies);
1654 
1655 	return ret;
1656 }
1657 
1658 static int rk_crypto_set_clk(struct udevice *dev)
1659 {
1660 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1661 	struct clk clk;
1662 	int i, ret;
1663 
1664 	/* use standard "assigned-clock-rates" props */
1665 	if (dev_read_size(dev, "assigned-clock-rates") > 0)
1666 		return clk_set_defaults(dev);
1667 
1668 	/* use "clock-frequency" props */
1669 	if (priv->freq_nclocks == 0)
1670 		return 0;
1671 
1672 	for (i = 0; i < priv->freq_nclocks; i++) {
1673 		ret = clk_get_by_index(dev, i, &clk);
1674 		if (ret < 0) {
1675 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
1676 			return ret;
1677 		}
1678 		ret = clk_set_rate(&clk, priv->frequencies[i]);
1679 		if (ret < 0) {
1680 			printf("%s: Failed to set clk(%ld): ret=%d\n",
1681 			       __func__, clk.id, ret);
1682 			return ret;
1683 		}
1684 	}
1685 
1686 	return 0;
1687 }
1688 
1689 static int rockchip_crypto_probe(struct udevice *dev)
1690 {
1691 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1692 	struct rk_crypto_soc_data *sdata;
1693 	int ret = 0;
1694 
1695 	if (!priv->enable) {
1696 		printf("crypto %s 0x%08x skip probe!!!!!!\n",
1697 		       priv->secure ? "secure" : "non_secure",
1698 		       (u32)priv->reg);
1699 		return 0;
1700 	}
1701 
1702 	sdata = (struct rk_crypto_soc_data *)dev_get_driver_data(dev);
1703 
1704 	if (sdata->dynamic_cap)
1705 		sdata->capability = sdata->dynamic_cap();
1706 
1707 	priv->soc_data = sdata;
1708 
1709 	priv->hw_ctx = memalign(LLI_ADDR_ALIGN_SIZE,
1710 				sizeof(struct rk_hash_ctx));
1711 	if (!priv->hw_ctx)
1712 		return -ENOMEM;
1713 
1714 	ret = rk_crypto_set_clk(dev);
1715 	if (ret)
1716 		return ret;
1717 
1718 	rk_crypto_enable_clk(dev);
1719 
1720 	hw_crypto_reset();
1721 
1722 	rk_crypto_disable_clk(dev);
1723 
1724 	return 0;
1725 }
1726 
1727 static const struct rk_crypto_soc_data soc_data_base = {
1728 	.capability = CRYPTO_MD5 |
1729 		      CRYPTO_SHA1 |
1730 		      CRYPTO_SHA256 |
1731 		      CRYPTO_SHA512 |
1732 		      CRYPTO_HMAC_MD5 |
1733 		      CRYPTO_HMAC_SHA1 |
1734 		      CRYPTO_HMAC_SHA256 |
1735 		      CRYPTO_HMAC_SHA512 |
1736 		      CRYPTO_RSA512 |
1737 		      CRYPTO_RSA1024 |
1738 		      CRYPTO_RSA2048 |
1739 		      CRYPTO_RSA3072 |
1740 		      CRYPTO_RSA4096 |
1741 		      CRYPTO_DES |
1742 		      CRYPTO_AES,
1743 };
1744 
1745 static const struct rk_crypto_soc_data soc_data_base_sm = {
1746 	.capability = CRYPTO_MD5 |
1747 		      CRYPTO_SHA1 |
1748 		      CRYPTO_SHA256 |
1749 		      CRYPTO_SHA512 |
1750 		      CRYPTO_SM3 |
1751 		      CRYPTO_HMAC_MD5 |
1752 		      CRYPTO_HMAC_SHA1 |
1753 		      CRYPTO_HMAC_SHA256 |
1754 		      CRYPTO_HMAC_SHA512 |
1755 		      CRYPTO_HMAC_SM3 |
1756 		      CRYPTO_RSA512 |
1757 		      CRYPTO_RSA1024 |
1758 		      CRYPTO_RSA2048 |
1759 		      CRYPTO_RSA3072 |
1760 		      CRYPTO_RSA4096 |
1761 		      CRYPTO_DES |
1762 		      CRYPTO_AES |
1763 		      CRYPTO_SM4,
1764 };
1765 
1766 static const struct rk_crypto_soc_data soc_data_rk1808 = {
1767 	.capability = CRYPTO_MD5 |
1768 		      CRYPTO_SHA1 |
1769 		      CRYPTO_SHA256 |
1770 		      CRYPTO_HMAC_MD5 |
1771 		      CRYPTO_HMAC_SHA1 |
1772 		      CRYPTO_HMAC_SHA256 |
1773 		      CRYPTO_RSA512 |
1774 		      CRYPTO_RSA1024 |
1775 		      CRYPTO_RSA2048 |
1776 		      CRYPTO_RSA3072 |
1777 		      CRYPTO_RSA4096,
1778 };
1779 
1780 static const struct rk_crypto_soc_data soc_data_cryptov3 = {
1781 	.capability  = 0,
1782 	.dynamic_cap = crypto_v3_dynamic_cap,
1783 };
1784 
1785 static const struct udevice_id rockchip_crypto_ids[] = {
1786 	{
1787 		.compatible = "rockchip,px30-crypto",
1788 		.data = (ulong)&soc_data_base
1789 	},
1790 	{
1791 		.compatible = "rockchip,rk1808-crypto",
1792 		.data = (ulong)&soc_data_rk1808
1793 	},
1794 	{
1795 		.compatible = "rockchip,rk3308-crypto",
1796 		.data = (ulong)&soc_data_base
1797 	},
1798 	{
1799 		.compatible = "rockchip,rv1126-crypto",
1800 		.data = (ulong)&soc_data_base_sm
1801 	},
1802 	{
1803 		.compatible = "rockchip,rk3568-crypto",
1804 		.data = (ulong)&soc_data_base_sm
1805 	},
1806 	{
1807 		.compatible = "rockchip,rk3588-crypto",
1808 		.data = (ulong)&soc_data_base_sm
1809 	},
1810 	{
1811 		.compatible = "rockchip,crypto-v3",
1812 		.data = (ulong)&soc_data_cryptov3
1813 	},
1814 	{
1815 		.compatible = "rockchip,crypto-v4",
1816 		.data = (ulong)&soc_data_cryptov3 /* reuse crypto v3 config */
1817 	},
1818 	{ }
1819 };
1820 
1821 U_BOOT_DRIVER(rockchip_crypto_v2) = {
1822 	.name		= "rockchip_crypto_v2",
1823 	.id		= UCLASS_CRYPTO,
1824 	.of_match	= rockchip_crypto_ids,
1825 	.ops		= &rockchip_crypto_ops,
1826 	.probe		= rockchip_crypto_probe,
1827 	.ofdata_to_platdata = rockchip_crypto_ofdata_to_platdata,
1828 	.priv_auto_alloc_size = sizeof(struct rockchip_crypto_priv),
1829 };
1830