xref: /rk3399_rockchip-uboot/drivers/crypto/rockchip/crypto_v2.c (revision 4a251cba6b2a4eac96fc47a81edaf3838e352aee)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 #include <clk.h>
8 #include <crypto.h>
9 #include <dm.h>
10 #include <asm/io.h>
11 #include <clk-uclass.h>
12 #include <asm/arch/hardware.h>
13 #include <asm/arch/clock.h>
14 #include <rockchip/crypto_ecc.h>
15 #include <rockchip/crypto_hash_cache.h>
16 #include <rockchip/crypto_v2.h>
17 #include <rockchip/crypto_v2_pka.h>
18 
19 #define	RK_HASH_CTX_MAGIC		0x1A1A1A1A
20 
21 #define CRYPTO_MAJOR_VER(ver)		((ver) & 0x0f000000)
22 
23 #define CRYPTO_MAJOR_VER_3		0x03000000
24 #define CRYPTO_MAJOR_VER_4		0x04000000
25 #ifdef CONFIG_ROCKCHIP_RK3562
26 #define CRYPTO_S_BY_KEYLAD_BASE  	0xFF8A8000
27 #endif
28 
29 #ifdef DEBUG
30 #define IMSG(format, ...) printf("[%s, %05d]-trace: " format "\n", \
31 				 __func__, __LINE__, ##__VA_ARGS__)
32 #else
33 #define IMSG(format, ...)
34 #endif
35 
36 struct crypto_lli_desc {
37 	u32 src_addr;
38 	u32 src_len;
39 	u32 dst_addr;
40 	u32 dst_len;
41 	u32 user_define;
42 	u32 reserve;
43 	u32 dma_ctrl;
44 	u32 next_addr;
45 };
46 
47 struct rk_hash_ctx {
48 	struct crypto_lli_desc		data_lli;	/* lli desc */
49 	struct crypto_hash_cache	*hash_cache;
50 	u32				magic;		/* to check ctx */
51 	u32				algo;		/* hash algo */
52 	u8				digest_size;	/* hash out length */
53 	u8				reserved[3];
54 };
55 
56 struct rk_crypto_soc_data {
57 	u32 capability;
58 	u32 (*dynamic_cap)(void);
59 };
60 
61 struct rockchip_crypto_priv {
62 	fdt_addr_t			reg;
63 	u32				frequency;
64 	char				*clocks;
65 	u32				*frequencies;
66 	u32				nclocks;
67 	u32				freq_nclocks;
68 	u32				length;
69 	struct rk_hash_ctx		*hw_ctx;
70 	struct rk_crypto_soc_data	*soc_data;
71 };
72 
73 #define LLI_ADDR_ALIGN_SIZE	8
74 #define DATA_ADDR_ALIGN_SIZE	8
75 #define DATA_LEN_ALIGN_SIZE	64
76 
77 /* crypto timeout 500ms, must support more than 32M data per times*/
78 #define HASH_UPDATE_LIMIT	(32 * 1024 * 1024)
79 #define RK_CRYPTO_TIMEOUT	500000
80 
81 #define RK_POLL_TIMEOUT(condition, timeout) \
82 ({ \
83 	int time_out = timeout; \
84 	while (condition) { \
85 		if (--time_out <= 0) { \
86 			debug("[%s] %d: time out!\n", __func__,\
87 				__LINE__); \
88 			break; \
89 		} \
90 		udelay(1); \
91 	} \
92 	(time_out <= 0) ? -ETIMEDOUT : 0; \
93 })
94 
95 #define WAIT_TAG_VALID(channel, timeout) ({ \
96 	u32 tag_mask = CRYPTO_CH0_TAG_VALID << (channel);\
97 	int ret = 0;\
98 	if (is_check_tag_valid()) { \
99 		ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_TAG_VALID) & tag_mask),\
100 				      timeout);\
101 	} \
102 	crypto_write(crypto_read(CRYPTO_TAG_VALID) & tag_mask, CRYPTO_TAG_VALID);\
103 	ret;\
104 })
105 
106 #define virt_to_phys(addr)		(((unsigned long)addr) & 0xffffffff)
107 #define phys_to_virt(addr, area)	((unsigned long)addr)
108 
109 #define align_malloc(bytes, alignment)	memalign(alignment, bytes)
110 #define align_free(addr)		do {if (addr) free(addr);} while (0)
111 
112 #define ROUNDUP(size, alignment)	round_up(size, alignment)
113 #define cache_op_inner(type, addr, size) \
114 					crypto_flush_cacheline((ulong)addr, size)
115 
116 #define IS_NEED_IV(rk_mode) ((rk_mode) != RK_MODE_ECB && \
117 			     (rk_mode) != RK_MODE_CMAC && \
118 			     (rk_mode) != RK_MODE_CBC_MAC)
119 
120 #define IS_NEED_TAG(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
121 			      (rk_mode) == RK_MODE_CBC_MAC || \
122 			      (rk_mode) == RK_MODE_CCM || \
123 			      (rk_mode) == RK_MODE_GCM)
124 
125 #define IS_MAC_MODE(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
126 			      (rk_mode) == RK_MODE_CBC_MAC)
127 
128 #define IS_AE_MODE(rk_mode) ((rk_mode) == RK_MODE_CCM || \
129 			     (rk_mode) == RK_MODE_GCM)
130 
131 fdt_addr_t crypto_base;
132 static uint32_t g_crypto_version;
133 
134 static inline bool is_check_hash_valid(void)
135 {
136 	/* crypto < v4 need to check hash valid */
137 	return CRYPTO_MAJOR_VER(g_crypto_version) < CRYPTO_MAJOR_VER_4;
138 }
139 
140 static inline bool is_check_tag_valid(void)
141 {
142 	/* crypto < v4 need to check hash valid */
143 	return CRYPTO_MAJOR_VER(g_crypto_version) < CRYPTO_MAJOR_VER_4;
144 }
145 
146 static inline void word2byte_be(u32 word, u8 *ch)
147 {
148 	ch[0] = (word >> 24) & 0xff;
149 	ch[1] = (word >> 16) & 0xff;
150 	ch[2] = (word >> 8) & 0xff;
151 	ch[3] = (word >> 0) & 0xff;
152 }
153 
154 static inline u32 byte2word_be(const u8 *ch)
155 {
156 	return (*ch << 24) + (*(ch + 1) << 16) + (*(ch + 2) << 8) + *(ch + 3);
157 }
158 
159 static inline void clear_regs(u32 base, u32 words)
160 {
161 	int i;
162 
163 	/*clear out register*/
164 	for (i = 0; i < words; i++)
165 		crypto_write(0, base + 4 * i);
166 }
167 
168 static inline void clear_key_regs(void)
169 {
170 	clear_regs(CRYPTO_CH0_KEY_0, CRYPTO_KEY_CHANNEL_NUM * 4);
171 }
172 
173 static inline void read_regs(u32 base, u8 *data, u32 data_len)
174 {
175 	u8 tmp_buf[4];
176 	u32 i;
177 
178 	for (i = 0; i < data_len / 4; i++)
179 		word2byte_be(crypto_read(base + i * 4),
180 			     data + i * 4);
181 
182 	if (data_len % 4) {
183 		word2byte_be(crypto_read(base + i * 4), tmp_buf);
184 		memcpy(data + i * 4, tmp_buf, data_len % 4);
185 	}
186 }
187 
188 static inline void write_regs(u32 base, const u8 *data, u32 data_len)
189 {
190 	u8 tmp_buf[4];
191 	u32 i;
192 
193 	for (i = 0; i < data_len / 4; i++, base += 4)
194 		crypto_write(byte2word_be(data + i * 4), base);
195 
196 	if (data_len % 4) {
197 		memset(tmp_buf, 0x00, sizeof(tmp_buf));
198 		memcpy((u8 *)tmp_buf, data + i * 4, data_len % 4);
199 		crypto_write(byte2word_be(tmp_buf), base);
200 	}
201 }
202 
203 static inline void write_key_reg(u32 chn, const u8 *key, u32 key_len)
204 {
205 	write_regs(CRYPTO_CH0_KEY_0 + chn * 0x10, key, key_len);
206 }
207 
208 static inline void set_iv_reg(u32 chn, const u8 *iv, u32 iv_len)
209 {
210 	u32 base_iv;
211 
212 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
213 
214 	/* clear iv */
215 	clear_regs(base_iv, 4);
216 
217 	if (!iv || iv_len == 0)
218 		return;
219 
220 	write_regs(base_iv, iv, iv_len);
221 
222 	crypto_write(iv_len, CRYPTO_CH0_IV_LEN_0 + 4 * chn);
223 }
224 
225 static inline void get_iv_reg(u32 chn, u8 *iv, u32 iv_len)
226 {
227 	u32 base_iv;
228 
229 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
230 
231 	read_regs(base_iv, iv, iv_len);
232 }
233 
234 static inline void get_tag_from_reg(u32 chn, u8 *tag, u32 tag_len)
235 {
236 	u32 i;
237 	u32 chn_base = CRYPTO_CH0_TAG_0 + 0x10 * chn;
238 
239 	for (i = 0; i < tag_len / 4; i++, chn_base += 4)
240 		word2byte_be(crypto_read(chn_base), tag + 4 * i);
241 }
242 
243 static int rk_crypto_do_enable_clk(struct udevice *dev, int enable)
244 {
245 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
246 	struct clk clk;
247 	int i, ret;
248 
249 	for (i = 0; i < priv->nclocks; i++) {
250 		ret = clk_get_by_index(dev, i, &clk);
251 		if (ret < 0) {
252 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
253 			return ret;
254 		}
255 
256 		if (enable)
257 			ret = clk_enable(&clk);
258 		else
259 			ret = clk_disable(&clk);
260 		if (ret < 0 && ret != -ENOSYS) {
261 			printf("Failed to enable(%d) clk(%ld): ret=%d\n",
262 			       enable, clk.id, ret);
263 			return ret;
264 		}
265 	}
266 
267 	return 0;
268 }
269 
270 static int rk_crypto_enable_clk(struct udevice *dev)
271 {
272 	return rk_crypto_do_enable_clk(dev, 1);
273 }
274 
275 static int rk_crypto_disable_clk(struct udevice *dev)
276 {
277 	return rk_crypto_do_enable_clk(dev, 0);
278 }
279 
280 static u32 crypto_v3_dynamic_cap(void)
281 {
282 	u32 capability = 0;
283 	u32 ver_reg, i;
284 	struct cap_map {
285 		u32 ver_offset;
286 		u32 mask;
287 		u32 cap_bit;
288 	};
289 	const struct cap_map cap_tbl[] = {
290 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_MD5_FLAG,    CRYPTO_MD5},
291 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA1_FLAG,   CRYPTO_SHA1},
292 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA256_FLAG, CRYPTO_SHA256},
293 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA512_FLAG, CRYPTO_SHA512},
294 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SM3_FLAG,    CRYPTO_SM3},
295 
296 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_MD5_FLAG,    CRYPTO_HMAC_MD5},
297 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA1_FLAG,   CRYPTO_HMAC_SHA1},
298 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA256_FLAG, CRYPTO_HMAC_SHA256},
299 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA512_FLAG, CRYPTO_HMAC_SHA512},
300 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SM3_FLAG,    CRYPTO_HMAC_SM3},
301 
302 	{CRYPTO_AES_VERSION,  CRYPTO_AES256_FLAG,      CRYPTO_AES},
303 	{CRYPTO_DES_VERSION,  CRYPTO_TDES_FLAG,        CRYPTO_DES},
304 	{CRYPTO_SM4_VERSION,  CRYPTO_ECB_FLAG,         CRYPTO_SM4},
305 	};
306 
307 	/* rsa */
308 	capability = CRYPTO_RSA512 |
309 		     CRYPTO_RSA1024 |
310 		     CRYPTO_RSA2048 |
311 		     CRYPTO_RSA3072 |
312 		     CRYPTO_RSA4096;
313 
314 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
315 	capability |= (CRYPTO_SM2 |
316 		       CRYPTO_ECC_192R1 |
317 		       CRYPTO_ECC_224R1 |
318 		       CRYPTO_ECC_256R1);
319 #endif
320 
321 	for (i = 0; i < ARRAY_SIZE(cap_tbl); i++) {
322 		ver_reg = crypto_read(cap_tbl[i].ver_offset);
323 
324 		if ((ver_reg & cap_tbl[i].mask) == cap_tbl[i].mask)
325 			capability |= cap_tbl[i].cap_bit;
326 	}
327 
328 	return capability;
329 }
330 
331 static int hw_crypto_reset(void)
332 {
333 	u32 val = 0, mask = 0;
334 	int ret;
335 
336 	val = CRYPTO_SW_PKA_RESET | CRYPTO_SW_CC_RESET;
337 	mask = val << CRYPTO_WRITE_MASK_SHIFT;
338 
339 	/* reset pka and crypto modules*/
340 	crypto_write(val | mask, CRYPTO_RST_CTL);
341 
342 	/* wait reset compelete */
343 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL), RK_CRYPTO_TIMEOUT);
344 
345 	g_crypto_version = crypto_read(CRYPTO_CRYPTO_VERSION_NEW);
346 
347 	return ret;
348 }
349 
350 static void hw_hash_clean_ctx(struct rk_hash_ctx *ctx)
351 {
352 	/* clear hash status */
353 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
354 
355 	assert(ctx);
356 	assert(ctx->magic == RK_HASH_CTX_MAGIC);
357 
358 	crypto_hash_cache_free(ctx->hash_cache);
359 
360 	memset(ctx, 0x00, sizeof(*ctx));
361 }
362 
363 static int rk_hash_init(void *hw_ctx, u32 algo)
364 {
365 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)hw_ctx;
366 	u32 reg_ctrl = 0;
367 	int ret;
368 
369 	if (!tmp_ctx)
370 		return -EINVAL;
371 
372 	reg_ctrl = CRYPTO_SW_CC_RESET;
373 	crypto_write(reg_ctrl | (reg_ctrl << CRYPTO_WRITE_MASK_SHIFT),
374 		     CRYPTO_RST_CTL);
375 
376 	/* wait reset compelete */
377 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL),
378 			      RK_CRYPTO_TIMEOUT);
379 
380 	reg_ctrl = 0;
381 	tmp_ctx->algo = algo;
382 	switch (algo) {
383 	case CRYPTO_MD5:
384 	case CRYPTO_HMAC_MD5:
385 		reg_ctrl |= CRYPTO_MODE_MD5;
386 		tmp_ctx->digest_size = 16;
387 		break;
388 	case CRYPTO_SHA1:
389 	case CRYPTO_HMAC_SHA1:
390 		reg_ctrl |= CRYPTO_MODE_SHA1;
391 		tmp_ctx->digest_size = 20;
392 		break;
393 	case CRYPTO_SHA256:
394 	case CRYPTO_HMAC_SHA256:
395 		reg_ctrl |= CRYPTO_MODE_SHA256;
396 		tmp_ctx->digest_size = 32;
397 		break;
398 	case CRYPTO_SHA512:
399 	case CRYPTO_HMAC_SHA512:
400 		reg_ctrl |= CRYPTO_MODE_SHA512;
401 		tmp_ctx->digest_size = 64;
402 		break;
403 	case CRYPTO_SM3:
404 	case CRYPTO_HMAC_SM3:
405 		reg_ctrl |= CRYPTO_MODE_SM3;
406 		tmp_ctx->digest_size = 32;
407 		break;
408 	default:
409 		ret = -EINVAL;
410 		goto exit;
411 	}
412 
413 	/* enable hardware padding */
414 	reg_ctrl |= CRYPTO_HW_PAD_ENABLE;
415 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
416 
417 	/* FIFO input and output data byte swap */
418 	/* such as B0, B1, B2, B3 -> B3, B2, B1, B0 */
419 	reg_ctrl = CRYPTO_DOUT_BYTESWAP | CRYPTO_DOIN_BYTESWAP;
420 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_FIFO_CTL);
421 
422 	/* enable src_item_done interrupt */
423 	crypto_write(0, CRYPTO_DMA_INT_EN);
424 
425 	tmp_ctx->magic = RK_HASH_CTX_MAGIC;
426 
427 	return 0;
428 exit:
429 	/* clear hash setting if init failed */
430 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
431 
432 	return ret;
433 }
434 
435 static int rk_hash_direct_calc(void *hw_data, const u8 *data,
436 			       u32 data_len, u8 *started_flag, u8 is_last)
437 {
438 	struct rockchip_crypto_priv *priv = hw_data;
439 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
440 	struct crypto_lli_desc *lli = &hash_ctx->data_lli;
441 	int ret = -EINVAL;
442 	u32 tmp = 0, mask = 0;
443 
444 	assert(IS_ALIGNED((ulong)data, DATA_ADDR_ALIGN_SIZE));
445 	assert(is_last || IS_ALIGNED(data_len, DATA_LEN_ALIGN_SIZE));
446 
447 	debug("%s: data = %p, len = %u, s = %x, l = %x\n",
448 	      __func__, data, data_len, *started_flag, is_last);
449 
450 	memset(lli, 0x00, sizeof(*lli));
451 	lli->src_addr = (u32)virt_to_phys(data);
452 	lli->src_len = data_len;
453 	lli->dma_ctrl = LLI_DMA_CTRL_SRC_DONE;
454 
455 	if (is_last) {
456 		lli->user_define |= LLI_USER_STRING_LAST;
457 		lli->dma_ctrl |= LLI_DMA_CTRL_LAST;
458 	} else {
459 		lli->next_addr = (u32)virt_to_phys(lli);
460 		lli->dma_ctrl |= LLI_DMA_CTRL_PAUSE;
461 	}
462 
463 	if (!(*started_flag)) {
464 		lli->user_define |=
465 			(LLI_USER_STRING_START | LLI_USER_CIPHER_START);
466 		crypto_write((u32)virt_to_phys(lli), CRYPTO_DMA_LLI_ADDR);
467 		crypto_write((CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
468 			     CRYPTO_HASH_ENABLE, CRYPTO_HASH_CTL);
469 		tmp = CRYPTO_DMA_START;
470 		*started_flag = 1;
471 	} else {
472 		tmp = CRYPTO_DMA_RESTART;
473 	}
474 
475 	/* flush cache */
476 	crypto_flush_cacheline((ulong)lli, sizeof(*lli));
477 	crypto_flush_cacheline((ulong)data, data_len);
478 
479 	/* start calculate */
480 	crypto_write(tmp << CRYPTO_WRITE_MASK_SHIFT | tmp,
481 		     CRYPTO_DMA_CTL);
482 
483 	/* mask CRYPTO_SYNC_LOCKSTEP_INT_ST flag */
484 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
485 
486 	/* wait calc ok */
487 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
488 			      RK_CRYPTO_TIMEOUT);
489 
490 	/* clear interrupt status */
491 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
492 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
493 
494 	if ((tmp & mask) != CRYPTO_SRC_ITEM_DONE_INT_ST &&
495 	    (tmp & mask) != CRYPTO_ZERO_LEN_INT_ST) {
496 		ret = -EFAULT;
497 		debug("[%s] %d: CRYPTO_DMA_INT_ST = 0x%x\n",
498 		      __func__, __LINE__, tmp);
499 		goto exit;
500 	}
501 
502 	priv->length += data_len;
503 exit:
504 	return ret;
505 }
506 
507 int rk_hash_update(void *ctx, const u8 *data, u32 data_len)
508 {
509 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
510 	int ret = -EINVAL;
511 
512 	debug("\n");
513 	if (!tmp_ctx || !data)
514 		goto exit;
515 
516 	if (tmp_ctx->digest_size == 0 || tmp_ctx->magic != RK_HASH_CTX_MAGIC)
517 		goto exit;
518 
519 	ret = crypto_hash_update_with_cache(tmp_ctx->hash_cache,
520 					    data, data_len);
521 
522 exit:
523 	/* free lli list */
524 	if (ret)
525 		hw_hash_clean_ctx(tmp_ctx);
526 
527 	return ret;
528 }
529 
530 int rk_hash_final(void *ctx, u8 *digest, size_t len)
531 {
532 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
533 	int ret = 0;
534 
535 	if (!digest)
536 		goto exit;
537 
538 	if (!tmp_ctx ||
539 	    tmp_ctx->digest_size == 0 ||
540 	    len > tmp_ctx->digest_size ||
541 	    tmp_ctx->magic != RK_HASH_CTX_MAGIC) {
542 		goto exit;
543 	}
544 
545 	if(is_check_hash_valid()) {
546 		/* wait hash value ok */
547 		ret = RK_POLL_TIMEOUT(!crypto_read(CRYPTO_HASH_VALID),
548 				      RK_CRYPTO_TIMEOUT);
549 	}
550 
551 	read_regs(CRYPTO_HASH_DOUT_0, digest, len);
552 
553 	/* clear hash status */
554 	crypto_write(CRYPTO_HASH_IS_VALID, CRYPTO_HASH_VALID);
555 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
556 
557 exit:
558 
559 	return ret;
560 }
561 
562 static u32 rockchip_crypto_capability(struct udevice *dev)
563 {
564 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
565 	u32 capability, mask = 0;
566 
567 	capability = priv->soc_data->capability;
568 
569 #if !(CONFIG_IS_ENABLED(ROCKCHIP_CIPHER))
570 	mask |= (CRYPTO_DES | CRYPTO_AES | CRYPTO_SM4);
571 #endif
572 
573 #if !(CONFIG_IS_ENABLED(ROCKCHIP_HMAC))
574 	mask |= (CRYPTO_HMAC_MD5 | CRYPTO_HMAC_SHA1 | CRYPTO_HMAC_SHA256 |
575 			 CRYPTO_HMAC_SHA512 | CRYPTO_HMAC_SM3);
576 #endif
577 
578 #if !(CONFIG_IS_ENABLED(ROCKCHIP_RSA))
579 	mask |= (CRYPTO_RSA512 | CRYPTO_RSA1024 | CRYPTO_RSA2048 |
580 			 CRYPTO_RSA3072 | CRYPTO_RSA4096);
581 #endif
582 
583 	return capability & (~mask);
584 }
585 
586 static int rockchip_crypto_sha_init(struct udevice *dev, sha_context *ctx)
587 {
588 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
589 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
590 	int ret = 0;
591 
592 	if (!ctx)
593 		return -EINVAL;
594 
595 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
596 
597 	priv->length = 0;
598 
599 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
600 						       priv, ctx->length,
601 						       DATA_ADDR_ALIGN_SIZE,
602 						       DATA_LEN_ALIGN_SIZE);
603 	if (!hash_ctx->hash_cache)
604 		return -EFAULT;
605 
606 	rk_crypto_enable_clk(dev);
607 	ret = rk_hash_init(hash_ctx, ctx->algo);
608 	if (ret)
609 		rk_crypto_disable_clk(dev);
610 
611 	return ret;
612 }
613 
614 static int rockchip_crypto_sha_update(struct udevice *dev,
615 				      u32 *input, u32 len)
616 {
617 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
618 	int ret, i;
619 	u8 *p;
620 
621 	if (!len) {
622 		ret = -EINVAL;
623 		goto exit;
624 	}
625 
626 	p = (u8 *)input;
627 
628 	for (i = 0; i < len / HASH_UPDATE_LIMIT; i++, p += HASH_UPDATE_LIMIT) {
629 		ret = rk_hash_update(priv->hw_ctx, p, HASH_UPDATE_LIMIT);
630 		if (ret)
631 			goto exit;
632 	}
633 
634 	if (len % HASH_UPDATE_LIMIT)
635 		ret = rk_hash_update(priv->hw_ctx, p, len % HASH_UPDATE_LIMIT);
636 
637 exit:
638 	if (ret)
639 		rk_crypto_disable_clk(dev);
640 
641 	return ret;
642 }
643 
644 static int rockchip_crypto_sha_final(struct udevice *dev,
645 				     sha_context *ctx, u8 *output)
646 {
647 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
648 	u32 nbits;
649 	int ret;
650 
651 	nbits = crypto_algo_nbits(ctx->algo);
652 
653 	if (priv->length != ctx->length) {
654 		printf("total length(0x%08x) != init length(0x%08x)!\n",
655 		       priv->length, ctx->length);
656 		ret = -EIO;
657 		goto exit;
658 	}
659 
660 	ret = rk_hash_final(priv->hw_ctx, (u8 *)output, BITS2BYTE(nbits));
661 
662 exit:
663 	hw_hash_clean_ctx(priv->hw_ctx);
664 	rk_crypto_disable_clk(dev);
665 
666 	return ret;
667 }
668 
669 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
670 int rk_hmac_init(void *hw_ctx, u32 algo, u8 *key, u32 key_len)
671 {
672 	u32 reg_ctrl = 0;
673 	int ret;
674 
675 	if (!key || !key_len || key_len > 64)
676 		return -EINVAL;
677 
678 	clear_key_regs();
679 
680 	write_key_reg(0, key, key_len);
681 
682 	ret = rk_hash_init(hw_ctx, algo);
683 	if (ret)
684 		return ret;
685 
686 	reg_ctrl = crypto_read(CRYPTO_HASH_CTL) | CRYPTO_HMAC_ENABLE;
687 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
688 
689 	return ret;
690 }
691 
692 static int rockchip_crypto_hmac_init(struct udevice *dev,
693 				     sha_context *ctx, u8 *key, u32 key_len)
694 {
695 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
696 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
697 	int ret = 0;
698 
699 	if (!ctx)
700 		return -EINVAL;
701 
702 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
703 
704 	priv->length = 0;
705 
706 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
707 						       priv, ctx->length,
708 						       DATA_ADDR_ALIGN_SIZE,
709 						       DATA_LEN_ALIGN_SIZE);
710 	if (!hash_ctx->hash_cache)
711 		return -EFAULT;
712 
713 	rk_crypto_enable_clk(dev);
714 	ret = rk_hmac_init(priv->hw_ctx, ctx->algo, key, key_len);
715 	if (ret)
716 		rk_crypto_disable_clk(dev);
717 
718 	return ret;
719 }
720 
721 static int rockchip_crypto_hmac_update(struct udevice *dev,
722 				       u32 *input, u32 len)
723 {
724 	return rockchip_crypto_sha_update(dev, input, len);
725 }
726 
727 static int rockchip_crypto_hmac_final(struct udevice *dev,
728 				      sha_context *ctx, u8 *output)
729 {
730 	return rockchip_crypto_sha_final(dev, ctx, output);
731 }
732 
733 #endif
734 
735 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
736 static u8 g_key_chn;
737 
738 static const u32 rk_mode2bc_mode[RK_MODE_MAX] = {
739 	[RK_MODE_ECB] = CRYPTO_BC_ECB,
740 	[RK_MODE_CBC] = CRYPTO_BC_CBC,
741 	[RK_MODE_CTS] = CRYPTO_BC_CTS,
742 	[RK_MODE_CTR] = CRYPTO_BC_CTR,
743 	[RK_MODE_CFB] = CRYPTO_BC_CFB,
744 	[RK_MODE_OFB] = CRYPTO_BC_OFB,
745 	[RK_MODE_XTS] = CRYPTO_BC_XTS,
746 	[RK_MODE_CCM] = CRYPTO_BC_CCM,
747 	[RK_MODE_GCM] = CRYPTO_BC_GCM,
748 	[RK_MODE_CMAC] = CRYPTO_BC_CMAC,
749 	[RK_MODE_CBC_MAC] = CRYPTO_BC_CBC_MAC,
750 };
751 
752 static inline void set_pc_len_reg(u32 chn, u64 pc_len)
753 {
754 	u32 chn_base = CRYPTO_CH0_PC_LEN_0 + chn * 0x08;
755 
756 	crypto_write(pc_len & 0xffffffff, chn_base);
757 	crypto_write(pc_len >> 32, chn_base + 4);
758 }
759 
760 static inline void set_aad_len_reg(u32 chn, u64 pc_len)
761 {
762 	u32 chn_base = CRYPTO_CH0_AAD_LEN_0 + chn * 0x08;
763 
764 	crypto_write(pc_len & 0xffffffff, chn_base);
765 	crypto_write(pc_len >> 32, chn_base + 4);
766 }
767 
768 static inline bool is_des_mode(u32 rk_mode)
769 {
770 	return (rk_mode == RK_MODE_ECB ||
771 		rk_mode == RK_MODE_CBC ||
772 		rk_mode == RK_MODE_CFB ||
773 		rk_mode == RK_MODE_OFB);
774 }
775 
776 static void dump_crypto_state(struct crypto_lli_desc *desc,
777 			      u32 tmp, u32 expt_int,
778 			      const u8 *in, const u8 *out,
779 			      u32 len, int ret)
780 {
781 	IMSG("%s\n", ret == -ETIME ? "timeout" : "dismatch");
782 
783 	IMSG("CRYPTO_DMA_INT_ST = %08x, expect_int = %08x\n",
784 	     tmp, expt_int);
785 	IMSG("data desc		= %p\n", desc);
786 	IMSG("\taddr_in		= [%08x <=> %08x]\n",
787 	     desc->src_addr, (u32)virt_to_phys(in));
788 	IMSG("\taddr_out	= [%08x <=> %08x]\n",
789 	     desc->dst_addr, (u32)virt_to_phys(out));
790 	IMSG("\tsrc_len		= [%08x <=> %08x]\n",
791 	     desc->src_len, (u32)len);
792 	IMSG("\tdst_len		= %08x\n", desc->dst_len);
793 	IMSG("\tdma_ctl		= %08x\n", desc->dma_ctrl);
794 	IMSG("\tuser_define	= %08x\n", desc->user_define);
795 
796 	IMSG("\n\nDMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
797 	     crypto_read(CRYPTO_DMA_LLI_ADDR));
798 	IMSG("DMA CRYPTO_DMA_ST status = %08x\n",
799 	     crypto_read(CRYPTO_DMA_ST));
800 	IMSG("DMA CRYPTO_DMA_STATE status = %08x\n",
801 	     crypto_read(CRYPTO_DMA_STATE));
802 	IMSG("DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
803 	     crypto_read(CRYPTO_DMA_LLI_RADDR));
804 	IMSG("DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
805 	     crypto_read(CRYPTO_DMA_SRC_RADDR));
806 	IMSG("DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
807 	     crypto_read(CRYPTO_DMA_DST_RADDR));
808 	IMSG("DMA CRYPTO_CIPHER_ST status = %08x\n",
809 	     crypto_read(CRYPTO_CIPHER_ST));
810 	IMSG("DMA CRYPTO_CIPHER_STATE status = %08x\n",
811 	     crypto_read(CRYPTO_CIPHER_STATE));
812 	IMSG("DMA CRYPTO_TAG_VALID status = %08x\n",
813 	     crypto_read(CRYPTO_TAG_VALID));
814 	IMSG("LOCKSTEP status = %08x\n\n",
815 	     crypto_read(0x618));
816 
817 	IMSG("dst %dbyte not transferred\n",
818 	     desc->dst_addr + desc->dst_len -
819 	     crypto_read(CRYPTO_DMA_DST_RADDR));
820 }
821 
822 static int ccm128_set_iv_reg(u32 chn, const u8 *nonce, u32 nlen)
823 {
824 	u8 iv_buf[AES_BLOCK_SIZE];
825 	u32 L;
826 
827 	memset(iv_buf, 0x00, sizeof(iv_buf));
828 
829 	L = 15 - nlen;
830 	iv_buf[0] = ((u8)(L - 1) & 7);
831 
832 	/* the L parameter */
833 	L = iv_buf[0] & 7;
834 
835 	/* nonce is too short */
836 	if (nlen < (14 - L))
837 		return -EINVAL;
838 
839 	/* clear aad flag */
840 	iv_buf[0] &= ~0x40;
841 	memcpy(&iv_buf[1], nonce, 14 - L);
842 
843 	set_iv_reg(chn, iv_buf, AES_BLOCK_SIZE);
844 
845 	return 0;
846 }
847 
848 static void ccm_aad_padding(u32 aad_len, u8 *padding, u32 *padding_size)
849 {
850 	u32 i;
851 
852 	if (aad_len == 0) {
853 		*padding_size = 0;
854 		return;
855 	}
856 
857 	i = aad_len < (0x10000 - 0x100) ? 2 : 6;
858 
859 	if (i == 2) {
860 		padding[0] = (u8)(aad_len >> 8);
861 		padding[1] = (u8)aad_len;
862 	} else {
863 		padding[0] = 0xFF;
864 		padding[1] = 0xFE;
865 		padding[2] = (u8)(aad_len >> 24);
866 		padding[3] = (u8)(aad_len >> 16);
867 		padding[4] = (u8)(aad_len >> 8);
868 	}
869 
870 	*padding_size = i;
871 }
872 
873 static int ccm_compose_aad_iv(u8 *aad_iv, u32 data_len, u32 aad_len, u32 tag_size)
874 {
875 	aad_iv[0] |= ((u8)(((tag_size - 2) / 2) & 7) << 3);
876 
877 	aad_iv[12] = (u8)(data_len >> 24);
878 	aad_iv[13] = (u8)(data_len >> 16);
879 	aad_iv[14] = (u8)(data_len >> 8);
880 	aad_iv[15] = (u8)data_len;
881 
882 	if (aad_len)
883 		aad_iv[0] |= 0x40;	//set aad flag
884 
885 	return 0;
886 }
887 
888 static int hw_cipher_init(u32 chn, const u8 *key, const u8 *twk_key,
889 			  u32 key_len, const u8 *iv, u32 iv_len,
890 			  u32 algo, u32 mode, bool enc)
891 {
892 	u32 rk_mode = RK_GET_RK_MODE(mode);
893 	u32 key_chn_sel = chn;
894 	u32 reg_ctrl = 0;
895 	bool use_otpkey = false;
896 
897 	if (!key && key_len)
898 		use_otpkey = true;
899 
900 	IMSG("%s: key addr is %p, key_len is %d, iv addr is %p",
901 	     __func__, key, key_len, iv);
902 	if (rk_mode >= RK_MODE_MAX)
903 		return -EINVAL;
904 
905 	switch (algo) {
906 	case CRYPTO_DES:
907 		if (key_len > DES_BLOCK_SIZE)
908 			reg_ctrl |= CRYPTO_BC_TDES;
909 		else
910 			reg_ctrl |= CRYPTO_BC_DES;
911 		break;
912 	case CRYPTO_AES:
913 		reg_ctrl |= CRYPTO_BC_AES;
914 		break;
915 	case CRYPTO_SM4:
916 		reg_ctrl |= CRYPTO_BC_SM4;
917 		break;
918 	default:
919 		return -EINVAL;
920 	}
921 
922 	if (algo == CRYPTO_AES || algo == CRYPTO_SM4) {
923 		switch (key_len) {
924 		case AES_KEYSIZE_128:
925 			reg_ctrl |= CRYPTO_BC_128_bit_key;
926 			break;
927 		case AES_KEYSIZE_192:
928 			reg_ctrl |= CRYPTO_BC_192_bit_key;
929 			break;
930 		case AES_KEYSIZE_256:
931 			reg_ctrl |= CRYPTO_BC_256_bit_key;
932 			break;
933 		default:
934 			return -EINVAL;
935 		}
936 	}
937 
938 	reg_ctrl |= rk_mode2bc_mode[rk_mode];
939 	if (!enc)
940 		reg_ctrl |= CRYPTO_BC_DECRYPT;
941 
942 	/* write key data to reg */
943 	if (!use_otpkey) {
944 		write_key_reg(key_chn_sel, key, key_len);
945 		crypto_write(CRYPTO_SEL_USER, CRYPTO_KEY_SEL);
946 	} else {
947 		crypto_write(CRYPTO_SEL_KEYTABLE, CRYPTO_KEY_SEL);
948 	}
949 
950 	/* write twk key for xts mode */
951 	if (rk_mode == RK_MODE_XTS)
952 		write_key_reg(key_chn_sel + 4, twk_key, key_len);
953 
954 	/* set iv reg */
955 	if (rk_mode == RK_MODE_CCM)
956 		ccm128_set_iv_reg(chn, iv, iv_len);
957 	else
958 		set_iv_reg(chn, iv, iv_len);
959 
960 	/* din_swap set 1, dout_swap set 1, default 1. */
961 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
962 	crypto_write(0, CRYPTO_DMA_INT_EN);
963 
964 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
965 
966 	return 0;
967 }
968 
969 static int hw_cipher_crypt(const u8 *in, u8 *out, u64 len,
970 			   const u8 *aad, u32 aad_len,
971 			   u8 *tag, u32 tag_len, u32 mode)
972 {
973 	struct crypto_lli_desc *data_desc = NULL, *aad_desc = NULL;
974 	u8 *dma_in = NULL, *dma_out = NULL, *aad_tmp = NULL;
975 	u32 rk_mode = RK_GET_RK_MODE(mode);
976 	u32 reg_ctrl = 0, tmp_len = 0;
977 	u32 expt_int = 0, mask = 0;
978 	u32 key_chn = g_key_chn;
979 	u32 tmp, dst_len = 0;
980 	int ret = -1;
981 
982 	if (rk_mode == RK_MODE_CTS && len <= AES_BLOCK_SIZE) {
983 		printf("CTS mode length %u < 16Byte\n", (u32)len);
984 		return -EINVAL;
985 	}
986 
987 	tmp_len = (rk_mode == RK_MODE_CTR) ? ROUNDUP(len, AES_BLOCK_SIZE) : len;
988 
989 	data_desc = align_malloc(sizeof(*data_desc), LLI_ADDR_ALIGN_SIZE);
990 	if (!data_desc)
991 		goto exit;
992 
993 	if (IS_ALIGNED((ulong)in, DATA_ADDR_ALIGN_SIZE) && tmp_len == len)
994 		dma_in = (void *)in;
995 	else
996 		dma_in = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
997 	if (!dma_in)
998 		goto exit;
999 
1000 	if (out) {
1001 		if (IS_ALIGNED((ulong)out, DATA_ADDR_ALIGN_SIZE) &&
1002 		    tmp_len == len)
1003 			dma_out = out;
1004 		else
1005 			dma_out = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
1006 		if (!dma_out)
1007 			goto exit;
1008 		dst_len = tmp_len;
1009 	}
1010 
1011 	memset(data_desc, 0x00, sizeof(*data_desc));
1012 	if (dma_in != in)
1013 		memcpy(dma_in, in, len);
1014 
1015 	data_desc->src_addr    = (u32)virt_to_phys(dma_in);
1016 	data_desc->src_len     = tmp_len;
1017 	data_desc->dst_addr    = (u32)virt_to_phys(dma_out);
1018 	data_desc->dst_len     = dst_len;
1019 	data_desc->dma_ctrl    = LLI_DMA_CTRL_LAST;
1020 
1021 	if (IS_MAC_MODE(rk_mode)) {
1022 		expt_int = CRYPTO_LIST_DONE_INT_ST;
1023 		data_desc->dma_ctrl |= LLI_DMA_CTRL_LIST_DONE;
1024 	} else {
1025 		expt_int = CRYPTO_DST_ITEM_DONE_INT_ST;
1026 		data_desc->dma_ctrl |= LLI_DMA_CTRL_DST_DONE;
1027 	}
1028 
1029 	data_desc->user_define = LLI_USER_CIPHER_START |
1030 				 LLI_USER_STRING_START |
1031 				 LLI_USER_STRING_LAST |
1032 				 (key_chn << 4);
1033 	crypto_write((u32)virt_to_phys(data_desc), CRYPTO_DMA_LLI_ADDR);
1034 
1035 	if (rk_mode == RK_MODE_CCM || rk_mode == RK_MODE_GCM) {
1036 		u32 aad_tmp_len = 0;
1037 
1038 		aad_desc = align_malloc(sizeof(*aad_desc), LLI_ADDR_ALIGN_SIZE);
1039 		if (!aad_desc)
1040 			goto exit;
1041 
1042 		memset(aad_desc, 0x00, sizeof(*aad_desc));
1043 		aad_desc->next_addr = (u32)virt_to_phys(data_desc);
1044 		aad_desc->user_define = LLI_USER_CIPHER_START |
1045 					 LLI_USER_STRING_START |
1046 					 LLI_USER_STRING_LAST |
1047 					 LLI_USER_STRING_AAD |
1048 					 (key_chn << 4);
1049 
1050 		if (rk_mode == RK_MODE_CCM) {
1051 			u8 padding[AES_BLOCK_SIZE];
1052 			u32 padding_size = 0;
1053 
1054 			memset(padding, 0x00, sizeof(padding));
1055 			ccm_aad_padding(aad_len, padding, &padding_size);
1056 
1057 			aad_tmp_len = aad_len + AES_BLOCK_SIZE + padding_size;
1058 			aad_tmp_len = ROUNDUP(aad_tmp_len, AES_BLOCK_SIZE);
1059 			aad_tmp = align_malloc(aad_tmp_len,
1060 					       DATA_ADDR_ALIGN_SIZE);
1061 			if (!aad_tmp)
1062 				goto exit;
1063 
1064 			/* clear last block */
1065 			memset(aad_tmp + aad_tmp_len - AES_BLOCK_SIZE,
1066 			       0x00, AES_BLOCK_SIZE);
1067 
1068 			/* read iv data from reg */
1069 			get_iv_reg(key_chn, aad_tmp, AES_BLOCK_SIZE);
1070 			ccm_compose_aad_iv(aad_tmp, tmp_len, aad_len, tag_len);
1071 			memcpy(aad_tmp + AES_BLOCK_SIZE, padding, padding_size);
1072 
1073 			memcpy(aad_tmp + AES_BLOCK_SIZE + padding_size,
1074 			       aad, aad_len);
1075 		} else {
1076 			aad_tmp_len = aad_len;
1077 			if (IS_ALIGNED((ulong)aad, DATA_ADDR_ALIGN_SIZE)) {
1078 				aad_tmp = (void *)aad;
1079 			} else {
1080 				aad_tmp = align_malloc(aad_tmp_len,
1081 						       DATA_ADDR_ALIGN_SIZE);
1082 				if (!aad_tmp)
1083 					goto exit;
1084 
1085 				memcpy(aad_tmp, aad, aad_tmp_len);
1086 			}
1087 
1088 			set_aad_len_reg(key_chn, aad_tmp_len);
1089 			set_pc_len_reg(key_chn, tmp_len);
1090 		}
1091 
1092 		aad_desc->src_addr = (u32)virt_to_phys(aad_tmp);
1093 		aad_desc->src_len  = aad_tmp_len;
1094 
1095 		if (aad_tmp_len) {
1096 			data_desc->user_define = LLI_USER_STRING_START |
1097 						 LLI_USER_STRING_LAST |
1098 						 (key_chn << 4);
1099 			crypto_write((u32)virt_to_phys(aad_desc), CRYPTO_DMA_LLI_ADDR);
1100 			cache_op_inner(DCACHE_AREA_CLEAN, aad_tmp, aad_tmp_len);
1101 			cache_op_inner(DCACHE_AREA_CLEAN, aad_desc, sizeof(*aad_desc));
1102 		}
1103 	}
1104 
1105 	cache_op_inner(DCACHE_AREA_CLEAN, data_desc, sizeof(*data_desc));
1106 	cache_op_inner(DCACHE_AREA_CLEAN, dma_in, tmp_len);
1107 	cache_op_inner(DCACHE_AREA_INVALIDATE, dma_out, tmp_len);
1108 
1109 	/* din_swap set 1, dout_swap set 1, default 1. */
1110 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
1111 	crypto_write(0, CRYPTO_DMA_INT_EN);
1112 
1113 	reg_ctrl = crypto_read(CRYPTO_BC_CTL) | CRYPTO_BC_ENABLE;
1114 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
1115 	crypto_write(0x00010001, CRYPTO_DMA_CTL);//start
1116 
1117 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
1118 
1119 	/* wait calc ok */
1120 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
1121 			      RK_CRYPTO_TIMEOUT);
1122 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
1123 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
1124 
1125 	if ((tmp & mask) == expt_int) {
1126 		if (out && out != dma_out)
1127 			memcpy(out, dma_out, len);
1128 
1129 		if (IS_NEED_TAG(rk_mode)) {
1130 			ret = WAIT_TAG_VALID(key_chn, RK_CRYPTO_TIMEOUT);
1131 			get_tag_from_reg(key_chn, tag, AES_BLOCK_SIZE);
1132 		}
1133 	} else {
1134 		dump_crypto_state(data_desc, tmp, expt_int, in, out, len, ret);
1135 		ret = -1;
1136 	}
1137 
1138 exit:
1139 	crypto_write(0xffff0000, CRYPTO_BC_CTL);//bc_ctl disable
1140 	align_free(data_desc);
1141 	align_free(aad_desc);
1142 	if (dma_in != in)
1143 		align_free(dma_in);
1144 	if (out && dma_out != out)
1145 		align_free(dma_out);
1146 	if (aad && aad != aad_tmp)
1147 		align_free(aad_tmp);
1148 
1149 	return ret;
1150 }
1151 
1152 static int hw_aes_init(u32 chn, const u8 *key, const u8 *twk_key, u32 key_len,
1153 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1154 {
1155 	u32 rk_mode = RK_GET_RK_MODE(mode);
1156 
1157 	if (rk_mode > RK_MODE_XTS)
1158 		return -EINVAL;
1159 
1160 	if (iv_len > AES_BLOCK_SIZE)
1161 		return -EINVAL;
1162 
1163 	if (IS_NEED_IV(rk_mode)) {
1164 		if (!iv || iv_len != AES_BLOCK_SIZE)
1165 			return -EINVAL;
1166 	} else {
1167 		iv_len = 0;
1168 	}
1169 
1170 	if (rk_mode == RK_MODE_XTS) {
1171 		if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_256)
1172 			return -EINVAL;
1173 
1174 		if (!key || !twk_key)
1175 			return -EINVAL;
1176 	} else {
1177 		if (key_len != AES_KEYSIZE_128 &&
1178 		    key_len != AES_KEYSIZE_192 &&
1179 		    key_len != AES_KEYSIZE_256)
1180 			return -EINVAL;
1181 	}
1182 
1183 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1184 			      CRYPTO_AES, mode, enc);
1185 }
1186 
1187 static int hw_sm4_init(u32  chn, const u8 *key, const u8 *twk_key, u32 key_len,
1188 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1189 {
1190 	u32 rk_mode = RK_GET_RK_MODE(mode);
1191 
1192 	if (rk_mode > RK_MODE_XTS)
1193 		return -EINVAL;
1194 
1195 	if (iv_len > SM4_BLOCK_SIZE || key_len != SM4_KEYSIZE)
1196 		return -EINVAL;
1197 
1198 	if (IS_NEED_IV(rk_mode)) {
1199 		if (!iv || iv_len != SM4_BLOCK_SIZE)
1200 			return -EINVAL;
1201 	} else {
1202 		iv_len = 0;
1203 	}
1204 
1205 	if (rk_mode == RK_MODE_XTS) {
1206 		if (!key || !twk_key)
1207 			return -EINVAL;
1208 	}
1209 
1210 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1211 			      CRYPTO_SM4, mode, enc);
1212 }
1213 
1214 int rk_crypto_des(struct udevice *dev, u32 mode, const u8 *key, u32 key_len,
1215 		  const u8 *iv, const u8 *in, u8 *out, u32 len, bool enc)
1216 {
1217 	u32 rk_mode = RK_GET_RK_MODE(mode);
1218 	u8 tmp_key[24];
1219 	int ret;
1220 
1221 	if (!is_des_mode(rk_mode))
1222 		return -EINVAL;
1223 
1224 	if (key_len == DES_BLOCK_SIZE || key_len == 3 * DES_BLOCK_SIZE) {
1225 		memcpy(tmp_key, key, key_len);
1226 	} else if (key_len == 2 * DES_BLOCK_SIZE) {
1227 		memcpy(tmp_key, key, 16);
1228 		memcpy(tmp_key + 16, key, 8);
1229 		key_len = 3 * DES_BLOCK_SIZE;
1230 	} else {
1231 		return -EINVAL;
1232 	}
1233 
1234 	ret = hw_cipher_init(0, tmp_key, NULL, key_len, iv, DES_BLOCK_SIZE,
1235 			     CRYPTO_DES, mode, enc);
1236 	if (ret)
1237 		goto exit;
1238 
1239 	ret = hw_cipher_crypt(in, out, len, NULL, 0,
1240 			      NULL, 0, mode);
1241 
1242 exit:
1243 	return ret;
1244 }
1245 
1246 int rk_crypto_aes(struct udevice *dev, u32 mode,
1247 		  const u8 *key, const u8 *twk_key, u32 key_len,
1248 		  const u8 *iv, u32 iv_len,
1249 		  const u8 *in, u8 *out, u32 len, bool enc)
1250 {
1251 	int ret;
1252 
1253 	/* RV1126/RV1109 do not support aes-192 */
1254 #if defined(CONFIG_ROCKCHIP_RV1126)
1255 	if (key_len == AES_KEYSIZE_192)
1256 		return -EINVAL;
1257 #endif
1258 
1259 	ret = hw_aes_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1260 	if (ret)
1261 		return ret;
1262 
1263 	return hw_cipher_crypt(in, out, len, NULL, 0,
1264 			       NULL, 0, mode);
1265 }
1266 
1267 int rk_crypto_sm4(struct udevice *dev, u32 mode,
1268 		  const u8 *key, const u8 *twk_key, u32 key_len,
1269 		  const u8 *iv, u32 iv_len,
1270 		  const u8 *in, u8 *out, u32 len, bool enc)
1271 {
1272 	int ret;
1273 
1274 	ret = hw_sm4_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1275 	if (ret)
1276 		return ret;
1277 
1278 	return hw_cipher_crypt(in, out, len, NULL, 0, NULL, 0, mode);
1279 }
1280 
1281 int rockchip_crypto_cipher(struct udevice *dev, cipher_context *ctx,
1282 			   const u8 *in, u8 *out, u32 len, bool enc)
1283 {
1284 	int ret;
1285 
1286 	rk_crypto_enable_clk(dev);
1287 
1288 	switch (ctx->algo) {
1289 	case CRYPTO_DES:
1290 		ret = rk_crypto_des(dev, ctx->mode, ctx->key, ctx->key_len,
1291 				    ctx->iv, in, out, len, enc);
1292 		break;
1293 	case CRYPTO_AES:
1294 		ret = rk_crypto_aes(dev, ctx->mode,
1295 				    ctx->key, ctx->twk_key, ctx->key_len,
1296 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1297 		break;
1298 	case CRYPTO_SM4:
1299 		ret = rk_crypto_sm4(dev, ctx->mode,
1300 				    ctx->key, ctx->twk_key, ctx->key_len,
1301 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1302 		break;
1303 	default:
1304 		ret = -EINVAL;
1305 		break;
1306 	}
1307 
1308 	rk_crypto_disable_clk(dev);
1309 
1310 	return ret;
1311 }
1312 
1313 int rk_crypto_mac(struct udevice *dev, u32 algo, u32 mode,
1314 		  const u8 *key, u32 key_len,
1315 		  const u8 *in, u32 len, u8 *tag)
1316 {
1317 	u32 rk_mode = RK_GET_RK_MODE(mode);
1318 	int ret;
1319 
1320 	if (!IS_MAC_MODE(rk_mode))
1321 		return -EINVAL;
1322 
1323 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1324 		return -EINVAL;
1325 
1326 	/* RV1126/RV1109 do not support aes-192 */
1327 #if defined(CONFIG_ROCKCHIP_RV1126)
1328 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1329 		return -EINVAL;
1330 #endif
1331 
1332 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, NULL, 0,
1333 			     algo, mode, true);
1334 	if (ret)
1335 		return ret;
1336 
1337 	return hw_cipher_crypt(in, NULL, len, NULL, 0,
1338 			       tag, AES_BLOCK_SIZE, mode);
1339 }
1340 
1341 int rockchip_crypto_mac(struct udevice *dev, cipher_context *ctx,
1342 			const u8 *in, u32 len, u8 *tag)
1343 {
1344 	int ret = 0;
1345 
1346 	rk_crypto_enable_clk(dev);
1347 
1348 	ret = rk_crypto_mac(dev, ctx->algo, ctx->mode,
1349 			    ctx->key, ctx->key_len, in, len, tag);
1350 
1351 	rk_crypto_disable_clk(dev);
1352 
1353 	return ret;
1354 }
1355 
1356 int rk_crypto_ae(struct udevice *dev, u32 algo, u32 mode,
1357 		 const u8 *key, u32 key_len, const u8 *nonce, u32 nonce_len,
1358 		 const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1359 		 u8 *out, u8 *tag)
1360 {
1361 	u32 rk_mode = RK_GET_RK_MODE(mode);
1362 	int ret;
1363 
1364 	if (!IS_AE_MODE(rk_mode))
1365 		return -EINVAL;
1366 
1367 	if (len == 0)
1368 		return -EINVAL;
1369 
1370 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1371 		return -EINVAL;
1372 
1373 	/* RV1126/RV1109 do not support aes-192 */
1374 #if defined(CONFIG_ROCKCHIP_RV1126)
1375 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1376 		return -EINVAL;
1377 #endif
1378 
1379 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, nonce, nonce_len,
1380 			     algo, mode, true);
1381 	if (ret)
1382 		return ret;
1383 
1384 	return hw_cipher_crypt(in, out, len, aad, aad_len,
1385 			       tag, AES_BLOCK_SIZE, mode);
1386 }
1387 
1388 int rockchip_crypto_ae(struct udevice *dev, cipher_context *ctx,
1389 		       const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1390 		       u8 *out, u8 *tag)
1391 
1392 {
1393 	int ret = 0;
1394 
1395 	rk_crypto_enable_clk(dev);
1396 
1397 	ret = rk_crypto_ae(dev, ctx->algo, ctx->mode, ctx->key, ctx->key_len,
1398 			   ctx->iv, ctx->iv_len, in, len,
1399 			   aad, aad_len, out, tag);
1400 
1401 	rk_crypto_disable_clk(dev);
1402 
1403 	return ret;
1404 }
1405 
1406 #if CONFIG_IS_ENABLED(DM_KEYLAD)
1407 int rockchip_crypto_fw_cipher(struct udevice *dev, cipher_fw_context *ctx,
1408 			      const u8 *in, u8 *out, u32 len, bool enc)
1409 {
1410 	int ret;
1411 
1412 	rk_crypto_enable_clk(dev);
1413 
1414 	switch (ctx->algo) {
1415 	case CRYPTO_DES:
1416 		ret = rk_crypto_des(dev, ctx->mode, NULL, ctx->key_len,
1417 				    ctx->iv, in, out, len, enc);
1418 		break;
1419 	case CRYPTO_AES:
1420 		ret = rk_crypto_aes(dev, ctx->mode, NULL, NULL, ctx->key_len,
1421 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1422 		break;
1423 	case CRYPTO_SM4:
1424 		ret = rk_crypto_sm4(dev, ctx->mode, NULL, NULL, ctx->key_len,
1425 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1426 		break;
1427 	default:
1428 		ret = -EINVAL;
1429 		break;
1430 	}
1431 
1432 	rk_crypto_disable_clk(dev);
1433 
1434 	return ret;
1435 }
1436 
1437 static ulong rockchip_crypto_keytable_addr(struct udevice *dev)
1438 {
1439 	return CRYPTO_S_BY_KEYLAD_BASE + CRYPTO_CH0_KEY_0;
1440 }
1441 #endif
1442 #endif
1443 
1444 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1445 static int rockchip_crypto_rsa_verify(struct udevice *dev, rsa_key *ctx,
1446 				      u8 *sign, u8 *output)
1447 {
1448 	struct mpa_num *mpa_m = NULL, *mpa_e = NULL, *mpa_n = NULL;
1449 	struct mpa_num *mpa_c = NULL, *mpa_result = NULL;
1450 	u32 n_bits, n_words;
1451 	int ret;
1452 
1453 	if (!ctx)
1454 		return -EINVAL;
1455 
1456 	if (ctx->algo != CRYPTO_RSA512 &&
1457 	    ctx->algo != CRYPTO_RSA1024 &&
1458 	    ctx->algo != CRYPTO_RSA2048 &&
1459 	    ctx->algo != CRYPTO_RSA3072 &&
1460 	    ctx->algo != CRYPTO_RSA4096)
1461 		return -EINVAL;
1462 
1463 	n_bits = crypto_algo_nbits(ctx->algo);
1464 	n_words = BITS2WORD(n_bits);
1465 
1466 	ret = rk_mpa_alloc(&mpa_m, sign, n_words);
1467 	if (ret)
1468 		goto exit;
1469 
1470 	ret = rk_mpa_alloc(&mpa_e, ctx->e, n_words);
1471 	if (ret)
1472 		goto exit;
1473 
1474 	ret = rk_mpa_alloc(&mpa_n, ctx->n, n_words);
1475 	if (ret)
1476 		goto exit;
1477 
1478 	if (ctx->c) {
1479 		ret = rk_mpa_alloc(&mpa_c, ctx->c, n_words);
1480 		if (ret)
1481 			goto exit;
1482 	}
1483 
1484 	ret = rk_mpa_alloc(&mpa_result, NULL, n_words);
1485 	if (ret)
1486 		goto exit;
1487 
1488 	rk_crypto_enable_clk(dev);
1489 	ret = rk_exptmod_np(mpa_m, mpa_e, mpa_n, mpa_c, mpa_result);
1490 	if (!ret)
1491 		memcpy(output, mpa_result->d, BITS2BYTE(n_bits));
1492 	rk_crypto_disable_clk(dev);
1493 
1494 exit:
1495 	rk_mpa_free(&mpa_m);
1496 	rk_mpa_free(&mpa_e);
1497 	rk_mpa_free(&mpa_n);
1498 	rk_mpa_free(&mpa_c);
1499 	rk_mpa_free(&mpa_result);
1500 
1501 	return ret;
1502 }
1503 #endif
1504 
1505 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
1506 static int rockchip_crypto_ec_verify(struct udevice *dev, ec_key *ctx,
1507 				     u8 *hash, u32 hash_len, u8 *sign)
1508 {
1509 	struct mpa_num *bn_sign = NULL;
1510 	struct rk_ecp_point point_P, point_sign;
1511 	u32 n_bits, n_words;
1512 	int ret;
1513 
1514 	if (!ctx)
1515 		return -EINVAL;
1516 
1517 	if (ctx->algo != CRYPTO_SM2 &&
1518 	    ctx->algo != CRYPTO_ECC_192R1 &&
1519 	    ctx->algo != CRYPTO_ECC_224R1 &&
1520 	    ctx->algo != CRYPTO_ECC_256R1)
1521 		return -EINVAL;
1522 
1523 	n_bits = crypto_algo_nbits(ctx->algo);
1524 	n_words = BITS2WORD(n_bits);
1525 
1526 	ret = rk_mpa_alloc(&bn_sign, sign, n_words);
1527 	if (ret)
1528 		goto exit;
1529 
1530 	ret = rk_mpa_alloc(&point_P.x, ctx->x, n_words);
1531 	ret |= rk_mpa_alloc(&point_P.y, ctx->y, n_words);
1532 	if (ret)
1533 		goto exit;
1534 
1535 	ret = rk_mpa_alloc(&point_sign.x, sign, n_words);
1536 	ret |= rk_mpa_alloc(&point_sign.y, sign + WORD2BYTE(n_words), n_words);
1537 	if (ret)
1538 		goto exit;
1539 
1540 	rk_crypto_enable_clk(dev);
1541 	ret = rockchip_ecc_verify(ctx->algo, hash, hash_len, &point_P, &point_sign);
1542 	rk_crypto_disable_clk(dev);
1543 exit:
1544 	rk_mpa_free(&bn_sign);
1545 	rk_mpa_free(&point_P.x);
1546 	rk_mpa_free(&point_P.y);
1547 	rk_mpa_free(&point_sign.x);
1548 	rk_mpa_free(&point_sign.y);
1549 
1550 	return ret;
1551 }
1552 #endif
1553 
1554 static const struct dm_crypto_ops rockchip_crypto_ops = {
1555 	.capability   = rockchip_crypto_capability,
1556 	.sha_init     = rockchip_crypto_sha_init,
1557 	.sha_update   = rockchip_crypto_sha_update,
1558 	.sha_final    = rockchip_crypto_sha_final,
1559 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1560 	.rsa_verify   = rockchip_crypto_rsa_verify,
1561 #endif
1562 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
1563 	.ec_verify    = rockchip_crypto_ec_verify,
1564 #endif
1565 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
1566 	.hmac_init    = rockchip_crypto_hmac_init,
1567 	.hmac_update  = rockchip_crypto_hmac_update,
1568 	.hmac_final   = rockchip_crypto_hmac_final,
1569 #endif
1570 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
1571 	.cipher_crypt    = rockchip_crypto_cipher,
1572 	.cipher_mac      = rockchip_crypto_mac,
1573 	.cipher_ae       = rockchip_crypto_ae,
1574 #if CONFIG_IS_ENABLED(DM_KEYLAD)
1575 	.cipher_fw_crypt = rockchip_crypto_fw_cipher,
1576 	.keytable_addr   = rockchip_crypto_keytable_addr,
1577 #endif
1578 #endif
1579 };
1580 
1581 /*
1582  * Only use "clocks" to parse crypto clock id and use rockchip_get_clk().
1583  * Because we always add crypto node in U-Boot dts, when kernel dtb enabled :
1584  *
1585  *   1. There is cru phandle mismatch between U-Boot and kernel dtb;
1586  *   2. CONFIG_OF_SPL_REMOVE_PROPS removes clock property;
1587  */
1588 static int rockchip_crypto_ofdata_to_platdata(struct udevice *dev)
1589 {
1590 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1591 	int len, ret = -EINVAL;
1592 
1593 	memset(priv, 0x00, sizeof(*priv));
1594 
1595 	priv->reg = (fdt_addr_t)dev_read_addr_ptr(dev);
1596 	if (priv->reg == FDT_ADDR_T_NONE)
1597 		return -EINVAL;
1598 
1599 	crypto_base = priv->reg;
1600 
1601 	/* if there is no clocks in dts, just skip it */
1602 	if (!dev_read_prop(dev, "clocks", &len)) {
1603 		printf("Can't find \"clocks\" property\n");
1604 		return 0;
1605 	}
1606 
1607 	memset(priv, 0x00, sizeof(*priv));
1608 	priv->clocks = malloc(len);
1609 	if (!priv->clocks)
1610 		return -ENOMEM;
1611 
1612 	priv->nclocks = len / (2 * sizeof(u32));
1613 	if (dev_read_u32_array(dev, "clocks", (u32 *)priv->clocks,
1614 			       priv->nclocks)) {
1615 		printf("Can't read \"clocks\" property\n");
1616 		ret = -EINVAL;
1617 		goto exit;
1618 	}
1619 
1620 	if (dev_read_prop(dev, "clock-frequency", &len)) {
1621 		priv->frequencies = malloc(len);
1622 		if (!priv->frequencies) {
1623 			ret = -ENOMEM;
1624 			goto exit;
1625 		}
1626 		priv->freq_nclocks = len / sizeof(u32);
1627 		if (dev_read_u32_array(dev, "clock-frequency", priv->frequencies,
1628 				       priv->freq_nclocks)) {
1629 			printf("Can't read \"clock-frequency\" property\n");
1630 			ret = -EINVAL;
1631 			goto exit;
1632 		}
1633 	}
1634 
1635 	return 0;
1636 exit:
1637 	if (priv->clocks)
1638 		free(priv->clocks);
1639 
1640 	if (priv->frequencies)
1641 		free(priv->frequencies);
1642 
1643 	return ret;
1644 }
1645 
1646 static int rk_crypto_set_clk(struct udevice *dev)
1647 {
1648 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1649 	struct clk clk;
1650 	int i, ret;
1651 
1652 	/* use standard "assigned-clock-rates" props */
1653 	if (dev_read_size(dev, "assigned-clock-rates") > 0)
1654 		return clk_set_defaults(dev);
1655 
1656 	/* use "clock-frequency" props */
1657 	if (priv->freq_nclocks == 0)
1658 		return 0;
1659 
1660 	for (i = 0; i < priv->freq_nclocks; i++) {
1661 		ret = clk_get_by_index(dev, i, &clk);
1662 		if (ret < 0) {
1663 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
1664 			return ret;
1665 		}
1666 		ret = clk_set_rate(&clk, priv->frequencies[i]);
1667 		if (ret < 0) {
1668 			printf("%s: Failed to set clk(%ld): ret=%d\n",
1669 			       __func__, clk.id, ret);
1670 			return ret;
1671 		}
1672 	}
1673 
1674 	return 0;
1675 }
1676 
1677 static int rockchip_crypto_probe(struct udevice *dev)
1678 {
1679 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1680 	struct rk_crypto_soc_data *sdata;
1681 	int ret = 0;
1682 
1683 	sdata = (struct rk_crypto_soc_data *)dev_get_driver_data(dev);
1684 
1685 	if (sdata->dynamic_cap)
1686 		sdata->capability = sdata->dynamic_cap();
1687 
1688 	priv->soc_data = sdata;
1689 
1690 	priv->hw_ctx = memalign(LLI_ADDR_ALIGN_SIZE,
1691 				sizeof(struct rk_hash_ctx));
1692 	if (!priv->hw_ctx)
1693 		return -ENOMEM;
1694 
1695 	ret = rk_crypto_set_clk(dev);
1696 	if (ret)
1697 		return ret;
1698 
1699 	rk_crypto_enable_clk(dev);
1700 
1701 	hw_crypto_reset();
1702 
1703 	rk_crypto_disable_clk(dev);
1704 
1705 	return 0;
1706 }
1707 
1708 static const struct rk_crypto_soc_data soc_data_base = {
1709 	.capability = CRYPTO_MD5 |
1710 		      CRYPTO_SHA1 |
1711 		      CRYPTO_SHA256 |
1712 		      CRYPTO_SHA512 |
1713 		      CRYPTO_HMAC_MD5 |
1714 		      CRYPTO_HMAC_SHA1 |
1715 		      CRYPTO_HMAC_SHA256 |
1716 		      CRYPTO_HMAC_SHA512 |
1717 		      CRYPTO_RSA512 |
1718 		      CRYPTO_RSA1024 |
1719 		      CRYPTO_RSA2048 |
1720 		      CRYPTO_RSA3072 |
1721 		      CRYPTO_RSA4096 |
1722 		      CRYPTO_DES |
1723 		      CRYPTO_AES,
1724 };
1725 
1726 static const struct rk_crypto_soc_data soc_data_base_sm = {
1727 	.capability = CRYPTO_MD5 |
1728 		      CRYPTO_SHA1 |
1729 		      CRYPTO_SHA256 |
1730 		      CRYPTO_SHA512 |
1731 		      CRYPTO_SM3 |
1732 		      CRYPTO_HMAC_MD5 |
1733 		      CRYPTO_HMAC_SHA1 |
1734 		      CRYPTO_HMAC_SHA256 |
1735 		      CRYPTO_HMAC_SHA512 |
1736 		      CRYPTO_HMAC_SM3 |
1737 		      CRYPTO_RSA512 |
1738 		      CRYPTO_RSA1024 |
1739 		      CRYPTO_RSA2048 |
1740 		      CRYPTO_RSA3072 |
1741 		      CRYPTO_RSA4096 |
1742 		      CRYPTO_DES |
1743 		      CRYPTO_AES |
1744 		      CRYPTO_SM4,
1745 };
1746 
1747 static const struct rk_crypto_soc_data soc_data_rk1808 = {
1748 	.capability = CRYPTO_MD5 |
1749 		      CRYPTO_SHA1 |
1750 		      CRYPTO_SHA256 |
1751 		      CRYPTO_HMAC_MD5 |
1752 		      CRYPTO_HMAC_SHA1 |
1753 		      CRYPTO_HMAC_SHA256 |
1754 		      CRYPTO_RSA512 |
1755 		      CRYPTO_RSA1024 |
1756 		      CRYPTO_RSA2048 |
1757 		      CRYPTO_RSA3072 |
1758 		      CRYPTO_RSA4096,
1759 };
1760 
1761 static const struct rk_crypto_soc_data soc_data_cryptov3 = {
1762 	.capability  = 0,
1763 	.dynamic_cap = crypto_v3_dynamic_cap,
1764 };
1765 
1766 static const struct udevice_id rockchip_crypto_ids[] = {
1767 	{
1768 		.compatible = "rockchip,px30-crypto",
1769 		.data = (ulong)&soc_data_base
1770 	},
1771 	{
1772 		.compatible = "rockchip,rk1808-crypto",
1773 		.data = (ulong)&soc_data_rk1808
1774 	},
1775 	{
1776 		.compatible = "rockchip,rk3308-crypto",
1777 		.data = (ulong)&soc_data_base
1778 	},
1779 	{
1780 		.compatible = "rockchip,rv1126-crypto",
1781 		.data = (ulong)&soc_data_base_sm
1782 	},
1783 	{
1784 		.compatible = "rockchip,rk3568-crypto",
1785 		.data = (ulong)&soc_data_base_sm
1786 	},
1787 	{
1788 		.compatible = "rockchip,rk3588-crypto",
1789 		.data = (ulong)&soc_data_base_sm
1790 	},
1791 	{
1792 		.compatible = "rockchip,crypto-v3",
1793 		.data = (ulong)&soc_data_cryptov3
1794 	},
1795 	{
1796 		.compatible = "rockchip,crypto-v4",
1797 		.data = (ulong)&soc_data_cryptov3 /* reuse crypto v3 config */
1798 	},
1799 	{ }
1800 };
1801 
1802 U_BOOT_DRIVER(rockchip_crypto_v2) = {
1803 	.name		= "rockchip_crypto_v2",
1804 	.id		= UCLASS_CRYPTO,
1805 	.of_match	= rockchip_crypto_ids,
1806 	.ops		= &rockchip_crypto_ops,
1807 	.probe		= rockchip_crypto_probe,
1808 	.ofdata_to_platdata = rockchip_crypto_ofdata_to_platdata,
1809 	.priv_auto_alloc_size = sizeof(struct rockchip_crypto_priv),
1810 };
1811