xref: /rk3399_rockchip-uboot/drivers/crypto/rockchip/crypto_v2.c (revision 25fbb41f6739e72525dd4ae75a35dd90ede3426f)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 #include <clk.h>
8 #include <crypto.h>
9 #include <dm.h>
10 #include <asm/io.h>
11 #include <asm/arch/hardware.h>
12 #include <asm/arch/clock.h>
13 #include <rockchip/crypto_hash_cache.h>
14 #include <rockchip/crypto_v2.h>
15 #include <rockchip/crypto_v2_pka.h>
16 
17 #define	RK_HASH_CTX_MAGIC		0x1A1A1A1A
18 
19 #ifdef DEBUG
20 #define IMSG(format, ...) printf("[%s, %05d]-trace: " format "\n", \
21 				 __func__, __LINE__, ##__VA_ARGS__)
22 #else
23 #define IMSG(format, ...)
24 #endif
25 
26 struct crypto_lli_desc {
27 	u32 src_addr;
28 	u32 src_len;
29 	u32 dst_addr;
30 	u32 dst_len;
31 	u32 user_define;
32 	u32 reserve;
33 	u32 dma_ctrl;
34 	u32 next_addr;
35 };
36 
37 struct rk_hash_ctx {
38 	struct crypto_lli_desc		data_lli;	/* lli desc */
39 	struct crypto_hash_cache	*hash_cache;
40 	u32				magic;		/* to check ctx */
41 	u32				algo;		/* hash algo */
42 	u8				digest_size;	/* hash out length */
43 	u8				reserved[3];
44 };
45 
46 struct rk_crypto_soc_data {
47 	u32 capability;
48 };
49 
50 struct rockchip_crypto_priv {
51 	fdt_addr_t			reg;
52 	struct clk			clk;
53 	u32				frequency;
54 	char				*clocks;
55 	u32				*frequencies;
56 	u32				nclocks;
57 	u32				length;
58 	struct rk_hash_ctx		*hw_ctx;
59 	struct rk_crypto_soc_data	*soc_data;
60 };
61 
62 #define LLI_ADDR_ALIGN_SIZE	8
63 #define DATA_ADDR_ALIGN_SIZE	8
64 #define DATA_LEN_ALIGN_SIZE	64
65 
66 /* crypto timeout 500ms, must support more than 32M data per times*/
67 #define HASH_UPDATE_LIMIT	(32 * 1024 * 1024)
68 #define RK_CRYPTO_TIMEOUT	500000
69 
70 #define RK_POLL_TIMEOUT(condition, timeout) \
71 ({ \
72 	int time_out = timeout; \
73 	while (condition) { \
74 		if (--time_out <= 0) { \
75 			debug("[%s] %d: time out!\n", __func__,\
76 				__LINE__); \
77 			break; \
78 		} \
79 		udelay(1); \
80 	} \
81 	(time_out <= 0) ? -ETIMEDOUT : 0; \
82 })
83 
84 #define virt_to_phys(addr)		(((unsigned long)addr) & 0xffffffff)
85 #define phys_to_virt(addr, area)	((unsigned long)addr)
86 
87 #define align_malloc(bytes, alignment)	memalign(alignment, bytes)
88 #define align_free(addr)		free(addr)
89 
90 #define ROUNDUP(size, alignment)	round_up(size, alignment)
91 #define cache_op_inner(type, addr, size) \
92 					crypto_flush_cacheline((ulong)addr, size)
93 
94 fdt_addr_t crypto_base;
95 
96 static inline void word2byte_be(u32 word, u8 *ch)
97 {
98 	ch[0] = (word >> 24) & 0xff;
99 	ch[1] = (word >> 16) & 0xff;
100 	ch[2] = (word >> 8) & 0xff;
101 	ch[3] = (word >> 0) & 0xff;
102 }
103 
104 static inline u32 byte2word_be(const u8 *ch)
105 {
106 	return (*ch << 24) + (*(ch + 1) << 16) + (*(ch + 2) << 8) + *(ch + 3);
107 }
108 
109 static inline void clear_regs(u32 base, u32 words)
110 {
111 	int i;
112 
113 	/*clear out register*/
114 	for (i = 0; i < words; i++)
115 		crypto_write(0, base + 4 * i);
116 }
117 
118 static inline void clear_hash_out_reg(void)
119 {
120 	clear_regs(CRYPTO_HASH_DOUT_0, 16);
121 }
122 
123 static inline void clear_key_regs(void)
124 {
125 	clear_regs(CRYPTO_CH0_KEY_0, CRYPTO_KEY_CHANNEL_NUM * 4);
126 }
127 
128 static inline void write_regs(u32 base, const u8 *data, u32 data_len)
129 {
130 	u8 tmp_buf[4];
131 	u32 i;
132 
133 	for (i = 0; i < data_len / 4; i++, base += 4)
134 		crypto_write(byte2word_be(data + i * 4), base);
135 
136 	if (data_len % 4) {
137 		memset(tmp_buf, 0x00, sizeof(tmp_buf));
138 		memcpy((u8 *)tmp_buf, data + i * 4, data_len % 4);
139 		crypto_write(byte2word_be(tmp_buf), base);
140 	}
141 }
142 
143 static inline void write_key_reg(u32 chn, const u8 *key, u32 key_len)
144 {
145 	write_regs(CRYPTO_CH0_KEY_0 + chn * 0x10, key, key_len);
146 }
147 
148 static inline void set_iv_reg(u32 chn, const u8 *iv, u32 iv_len)
149 {
150 	u32 base_iv;
151 
152 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
153 
154 	/* clear iv */
155 	clear_regs(base_iv, 4);
156 
157 	if (!iv || iv_len == 0)
158 		return;
159 
160 	write_regs(base_iv, iv, iv_len);
161 
162 	crypto_write(iv_len, CRYPTO_CH0_IV_LEN_0 + 4 * chn);
163 }
164 
165 static int hw_crypto_reset(void)
166 {
167 	u32 val = 0, mask = 0;
168 	int ret;
169 
170 	val = CRYPTO_SW_PKA_RESET | CRYPTO_SW_CC_RESET;
171 	mask = val << CRYPTO_WRITE_MASK_SHIFT;
172 
173 	/* reset pka and crypto modules*/
174 	crypto_write(val | mask, CRYPTO_RST_CTL);
175 
176 	/* wait reset compelete */
177 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL), RK_CRYPTO_TIMEOUT);
178 
179 	return ret;
180 }
181 
182 static void hw_hash_clean_ctx(struct rk_hash_ctx *ctx)
183 {
184 	/* clear hash status */
185 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
186 
187 	assert(ctx);
188 	assert(ctx->magic == RK_HASH_CTX_MAGIC);
189 
190 	crypto_hash_cache_free(ctx->hash_cache);
191 
192 	memset(ctx, 0x00, sizeof(*ctx));
193 }
194 
195 static int rk_hash_init(void *hw_ctx, u32 algo)
196 {
197 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)hw_ctx;
198 	u32 reg_ctrl = 0;
199 	int ret;
200 
201 	if (!tmp_ctx)
202 		return -EINVAL;
203 
204 	reg_ctrl = CRYPTO_SW_CC_RESET;
205 	crypto_write(reg_ctrl | (reg_ctrl << CRYPTO_WRITE_MASK_SHIFT),
206 		     CRYPTO_RST_CTL);
207 
208 	/* wait reset compelete */
209 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL),
210 			      RK_CRYPTO_TIMEOUT);
211 
212 	reg_ctrl = 0;
213 	tmp_ctx->algo = algo;
214 	switch (algo) {
215 	case CRYPTO_MD5:
216 	case CRYPTO_HMAC_MD5:
217 		reg_ctrl |= CRYPTO_MODE_MD5;
218 		tmp_ctx->digest_size = 16;
219 		break;
220 	case CRYPTO_SHA1:
221 	case CRYPTO_HMAC_SHA1:
222 		reg_ctrl |= CRYPTO_MODE_SHA1;
223 		tmp_ctx->digest_size = 20;
224 		break;
225 	case CRYPTO_SHA256:
226 	case CRYPTO_HMAC_SHA256:
227 		reg_ctrl |= CRYPTO_MODE_SHA256;
228 		tmp_ctx->digest_size = 32;
229 		break;
230 	case CRYPTO_SHA512:
231 	case CRYPTO_HMAC_SHA512:
232 		reg_ctrl |= CRYPTO_MODE_SHA512;
233 		tmp_ctx->digest_size = 64;
234 		break;
235 	case CRYPTO_SM3:
236 	case CRYPTO_HMAC_SM3:
237 		reg_ctrl |= CRYPTO_MODE_SM3;
238 		tmp_ctx->digest_size = 32;
239 		break;
240 	default:
241 		ret = -EINVAL;
242 		goto exit;
243 	}
244 
245 	clear_hash_out_reg();
246 
247 	/* enable hardware padding */
248 	reg_ctrl |= CRYPTO_HW_PAD_ENABLE;
249 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
250 
251 	/* FIFO input and output data byte swap */
252 	/* such as B0, B1, B2, B3 -> B3, B2, B1, B0 */
253 	reg_ctrl = CRYPTO_DOUT_BYTESWAP | CRYPTO_DOIN_BYTESWAP;
254 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_FIFO_CTL);
255 
256 	/* enable src_item_done interrupt */
257 	crypto_write(CRYPTO_SRC_ITEM_INT_EN, CRYPTO_DMA_INT_EN);
258 
259 	tmp_ctx->magic = RK_HASH_CTX_MAGIC;
260 
261 	return 0;
262 exit:
263 	/* clear hash setting if init failed */
264 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
265 
266 	return ret;
267 }
268 
269 static int rk_hash_direct_calc(void *hw_data, const u8 *data,
270 			       u32 data_len, u8 *started_flag, u8 is_last)
271 {
272 	struct rockchip_crypto_priv *priv = hw_data;
273 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
274 	struct crypto_lli_desc *lli = &hash_ctx->data_lli;
275 	int ret = -EINVAL;
276 	u32 tmp = 0, mask = 0;
277 
278 	assert(IS_ALIGNED((ulong)data, DATA_ADDR_ALIGN_SIZE));
279 	assert(is_last || IS_ALIGNED(data_len, DATA_LEN_ALIGN_SIZE));
280 
281 	debug("%s: data = %p, len = %u, s = %x, l = %x\n",
282 	      __func__, data, data_len, *started_flag, is_last);
283 
284 	memset(lli, 0x00, sizeof(*lli));
285 	lli->src_addr = (u32)virt_to_phys(data);
286 	lli->src_len = data_len;
287 	lli->dma_ctrl = LLI_DMA_CTRL_SRC_DONE;
288 
289 	if (is_last) {
290 		lli->user_define |= LLI_USER_STRING_LAST;
291 		lli->dma_ctrl |= LLI_DMA_CTRL_LAST;
292 	} else {
293 		lli->next_addr = (u32)virt_to_phys(lli);
294 		lli->dma_ctrl |= LLI_DMA_CTRL_PAUSE;
295 	}
296 
297 	if (!(*started_flag)) {
298 		lli->user_define |=
299 			(LLI_USER_STRING_START | LLI_USER_CPIHER_START);
300 		crypto_write((u32)virt_to_phys(lli), CRYPTO_DMA_LLI_ADDR);
301 		crypto_write((CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
302 			     CRYPTO_HASH_ENABLE, CRYPTO_HASH_CTL);
303 		tmp = CRYPTO_DMA_START;
304 		*started_flag = 1;
305 	} else {
306 		tmp = CRYPTO_DMA_RESTART;
307 	}
308 
309 	/* flush cache */
310 	crypto_flush_cacheline((ulong)lli, sizeof(*lli));
311 	crypto_flush_cacheline((ulong)data, data_len);
312 
313 	/* start calculate */
314 	crypto_write(tmp << CRYPTO_WRITE_MASK_SHIFT | tmp,
315 		     CRYPTO_DMA_CTL);
316 
317 	/* mask CRYPTO_SYNC_LOCKSTEP_INT_ST flag */
318 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
319 
320 	/* wait calc ok */
321 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
322 			      RK_CRYPTO_TIMEOUT);
323 
324 	/* clear interrupt status */
325 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
326 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
327 
328 	if (tmp != CRYPTO_SRC_ITEM_DONE_INT_ST &&
329 	    tmp != CRYPTO_ZERO_LEN_INT_ST) {
330 		debug("[%s] %d: CRYPTO_DMA_INT_ST = 0x%x\n",
331 		      __func__, __LINE__, tmp);
332 		goto exit;
333 	}
334 
335 	priv->length += data_len;
336 exit:
337 	return ret;
338 }
339 
340 int rk_hash_update(void *ctx, const u8 *data, u32 data_len)
341 {
342 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
343 	int ret = -EINVAL;
344 
345 	debug("\n");
346 	if (!tmp_ctx || !data)
347 		goto exit;
348 
349 	if (tmp_ctx->digest_size == 0 || tmp_ctx->magic != RK_HASH_CTX_MAGIC)
350 		goto exit;
351 
352 	ret = crypto_hash_update_with_cache(tmp_ctx->hash_cache,
353 					    data, data_len);
354 
355 exit:
356 	/* free lli list */
357 	if (ret)
358 		hw_hash_clean_ctx(tmp_ctx);
359 
360 	return ret;
361 }
362 
363 int rk_hash_final(void *ctx, u8 *digest, size_t len)
364 {
365 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
366 	int ret = -EINVAL;
367 	u32 i;
368 
369 	if (!digest)
370 		goto exit;
371 
372 	if (!tmp_ctx ||
373 	    tmp_ctx->digest_size == 0 ||
374 	    len > tmp_ctx->digest_size ||
375 	    tmp_ctx->magic != RK_HASH_CTX_MAGIC) {
376 		goto exit;
377 	}
378 
379 	/* wait hash value ok */
380 	ret = RK_POLL_TIMEOUT(!crypto_read(CRYPTO_HASH_VALID),
381 			      RK_CRYPTO_TIMEOUT);
382 
383 	for (i = 0; i < len / 4; i++)
384 		word2byte_be(crypto_read(CRYPTO_HASH_DOUT_0 + i * 4),
385 			     digest + i * 4);
386 
387 	if (len % 4) {
388 		u8 tmp_buf[4];
389 
390 		word2byte_be(crypto_read(CRYPTO_HASH_DOUT_0 + i * 4), tmp_buf);
391 		memcpy(digest + i * 4, tmp_buf, len % 4);
392 	}
393 
394 	/* clear hash status */
395 	crypto_write(CRYPTO_HASH_IS_VALID, CRYPTO_HASH_VALID);
396 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
397 
398 exit:
399 
400 	return ret;
401 }
402 
403 static u32 rockchip_crypto_capability(struct udevice *dev)
404 {
405 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
406 	u32 capability, mask = 0;
407 
408 	capability = priv->soc_data->capability;
409 
410 #if !(CONFIG_IS_ENABLED(ROCKCHIP_CIPHER))
411 	mask |= (CRYPTO_DES | CRYPTO_AES | CRYPTO_SM4);
412 #endif
413 
414 #if !(CONFIG_IS_ENABLED(ROCKCHIP_HMAC))
415 	mask |= (CRYPTO_HMAC_MD5 | CRYPTO_HMAC_SHA1 | CRYPTO_HMAC_SHA256 |
416 			 CRYPTO_HMAC_SHA512 | CRYPTO_HMAC_SM3);
417 #endif
418 
419 #if !(CONFIG_IS_ENABLED(ROCKCHIP_RSA))
420 	mask |= (CRYPTO_RSA512 | CRYPTO_RSA1024 | CRYPTO_RSA2048 |
421 			 CRYPTO_RSA3072 | CRYPTO_RSA4096);
422 #endif
423 
424 	return capability & (~mask);
425 }
426 
427 static int rockchip_crypto_sha_init(struct udevice *dev, sha_context *ctx)
428 {
429 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
430 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
431 
432 	if (!ctx)
433 		return -EINVAL;
434 
435 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
436 
437 	priv->length = 0;
438 
439 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
440 						       priv, ctx->length,
441 						       DATA_ADDR_ALIGN_SIZE,
442 						       DATA_LEN_ALIGN_SIZE);
443 	if (!hash_ctx->hash_cache)
444 		return -EFAULT;
445 
446 	return rk_hash_init(hash_ctx, ctx->algo);
447 }
448 
449 static int rockchip_crypto_sha_update(struct udevice *dev,
450 				      u32 *input, u32 len)
451 {
452 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
453 	int ret, i;
454 	u8 *p;
455 
456 	if (!len)
457 		return -EINVAL;
458 
459 	p = (u8 *)input;
460 
461 	for (i = 0; i < len / HASH_UPDATE_LIMIT; i++, p += HASH_UPDATE_LIMIT) {
462 		ret = rk_hash_update(priv->hw_ctx, p, HASH_UPDATE_LIMIT);
463 		if (ret)
464 			goto exit;
465 	}
466 
467 	if (len % HASH_UPDATE_LIMIT)
468 		ret = rk_hash_update(priv->hw_ctx, p, len % HASH_UPDATE_LIMIT);
469 
470 exit:
471 	return ret;
472 }
473 
474 static int rockchip_crypto_sha_final(struct udevice *dev,
475 				     sha_context *ctx, u8 *output)
476 {
477 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
478 	u32 nbits;
479 	int ret;
480 
481 	nbits = crypto_algo_nbits(ctx->algo);
482 
483 	if (priv->length != ctx->length) {
484 		printf("total length(0x%08x) != init length(0x%08x)!\n",
485 		       priv->length, ctx->length);
486 		ret = -EIO;
487 		goto exit;
488 	}
489 
490 	ret = rk_hash_final(priv->hw_ctx, (u8 *)output, BITS2BYTE(nbits));
491 
492 exit:
493 	hw_hash_clean_ctx(priv->hw_ctx);
494 	return ret;
495 }
496 
497 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
498 int rk_hmac_init(void *hw_ctx, u32 algo, u8 *key, u32 key_len)
499 {
500 	u32 reg_ctrl = 0;
501 	int ret;
502 
503 	if (!key || !key_len || key_len > 64)
504 		return -EINVAL;
505 
506 	clear_key_regs();
507 
508 	write_key_reg(0, key, key_len);
509 
510 	ret = rk_hash_init(hw_ctx, algo);
511 	if (ret)
512 		return ret;
513 
514 	reg_ctrl = crypto_read(CRYPTO_HASH_CTL) | CRYPTO_HMAC_ENABLE;
515 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
516 
517 	return ret;
518 }
519 
520 static int rockchip_crypto_hmac_init(struct udevice *dev,
521 				     sha_context *ctx, u8 *key, u32 key_len)
522 {
523 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
524 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
525 
526 	if (!ctx)
527 		return -EINVAL;
528 
529 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
530 
531 	priv->length = 0;
532 
533 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
534 						       priv, ctx->length,
535 						       DATA_ADDR_ALIGN_SIZE,
536 						       DATA_LEN_ALIGN_SIZE);
537 	if (!hash_ctx->hash_cache)
538 		return -EFAULT;
539 
540 	return rk_hmac_init(priv->hw_ctx, ctx->algo, key, key_len);
541 }
542 
543 static int rockchip_crypto_hmac_update(struct udevice *dev,
544 				       u32 *input, u32 len)
545 {
546 	return rockchip_crypto_sha_update(dev, input, len);
547 }
548 
549 static int rockchip_crypto_hmac_final(struct udevice *dev,
550 				      sha_context *ctx, u8 *output)
551 {
552 	return rockchip_crypto_sha_final(dev, ctx, output);
553 }
554 
555 #endif
556 
557 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
558 static u8 g_key_chn;
559 
560 static const u32 rk_mode2bc_mode[RK_MODE_MAX] = {
561 	[RK_MODE_ECB] = CRYPTO_BC_ECB,
562 	[RK_MODE_CBC] = CRYPTO_BC_CBC,
563 	[RK_MODE_CTS] = CRYPTO_BC_CTS,
564 	[RK_MODE_CTR] = CRYPTO_BC_CTR,
565 	[RK_MODE_CFB] = CRYPTO_BC_CFB,
566 	[RK_MODE_OFB] = CRYPTO_BC_OFB,
567 	[RK_MODE_XTS] = CRYPTO_BC_XTS,
568 };
569 
570 static inline bool is_des_mode(u32 rk_mode)
571 {
572 	return (rk_mode == RK_MODE_ECB ||
573 		rk_mode == RK_MODE_CBC ||
574 		rk_mode == RK_MODE_CFB ||
575 		rk_mode == RK_MODE_OFB);
576 }
577 
578 static void dump_crypto_state(struct crypto_lli_desc *desc, int ret)
579 {
580 	IMSG("%s\n", ret == -ETIME ? "timeout" : "dismatch");
581 
582 	IMSG("CRYPTO_DMA_INT_ST = %08x, expect_int = %08x\n",
583 	     tmp, expt_int);
584 	IMSG("data desc		= %p\n", desc);
585 	IMSG("\taddr_in		= [%08x <=> %08x]\n",
586 	     desc->src_addr, (u32)virt_to_phys(in));
587 	IMSG("\taddr_out	= [%08x <=> %08x]\n",
588 	     desc->dst_addr, (u32)virt_to_phys(out));
589 	IMSG("\tsrc_len		= [%08x <=> %08x]\n",
590 	     desc->src_len, (u32)len);
591 	IMSG("\tdst_len		= %08x\n", desc->dst_len);
592 	IMSG("\tdma_ctl		= %08x\n", desc->dma_ctrl);
593 	IMSG("\tuser_define	= %08x\n", desc->user_define);
594 
595 	IMSG("\n\nDMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
596 	     crypto_read(CRYPTO_DMA_LLI_ADDR));
597 	IMSG("DMA CRYPTO_DMA_ST status = %08x\n",
598 	     crypto_read(CRYPTO_DMA_ST));
599 	IMSG("DMA CRYPTO_DMA_STATE status = %08x\n",
600 	     crypto_read(CRYPTO_DMA_STATE));
601 	IMSG("DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
602 	     crypto_read(CRYPTO_DMA_LLI_RADDR));
603 	IMSG("DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
604 	     crypto_read(CRYPTO_DMA_SRC_RADDR));
605 	IMSG("DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
606 	     crypto_read(CRYPTO_DMA_DST_RADDR));
607 	IMSG("DMA CRYPTO_CIPHER_ST status = %08x\n",
608 	     crypto_read(CRYPTO_CIPHER_ST));
609 	IMSG("DMA CRYPTO_CIPHER_STATE status = %08x\n",
610 	     crypto_read(CRYPTO_CIPHER_STATE));
611 	IMSG("DMA CRYPTO_TAG_VALID status = %08x\n",
612 	     crypto_read(CRYPTO_TAG_VALID));
613 	IMSG("LOCKSTEP status = %08x\n\n",
614 	     crypto_read(0x618));
615 
616 	IMSG("dst %dbyte not transferred\n",
617 	     desc->dst_addr + desc->dst_len -
618 	     crypto_read(CRYPTO_DMA_DST_RADDR));
619 }
620 
621 static int hw_cipher_init(u32 chn, const u8 *key, const u8 *twk_key,
622 			  u32 key_len, const u8 *iv, u32 iv_len,
623 			  u32 algo, u32 mode, bool enc)
624 {
625 	u32 rk_mode = RK_GET_RK_MODE(mode);
626 	u32 key_chn_sel = chn;
627 	u32 reg_ctrl = 0;
628 
629 	IMSG("%s: key addr is %p, key_len is %d, iv addr is %p",
630 	     __func__, key, key_len, iv);
631 	if (rk_mode >= RK_MODE_MAX)
632 		return -EINVAL;
633 
634 	switch (algo) {
635 	case CRYPTO_DES:
636 		if (key_len > DES_BLOCK_SIZE)
637 			reg_ctrl |= CRYPTO_BC_TDES;
638 		else
639 			reg_ctrl |= CRYPTO_BC_DES;
640 		break;
641 	case CRYPTO_AES:
642 		reg_ctrl |= CRYPTO_BC_AES;
643 		break;
644 	case CRYPTO_SM4:
645 		reg_ctrl |= CRYPTO_BC_SM4;
646 		break;
647 	default:
648 		return -EINVAL;
649 	}
650 
651 	if (algo == CRYPTO_AES || algo == CRYPTO_SM4) {
652 		switch (key_len) {
653 		case AES_KEYSIZE_128:
654 			reg_ctrl |= CRYPTO_BC_128_bit_key;
655 			break;
656 		case AES_KEYSIZE_192:
657 			reg_ctrl |= CRYPTO_BC_192_bit_key;
658 			break;
659 		case AES_KEYSIZE_256:
660 			reg_ctrl |= CRYPTO_BC_256_bit_key;
661 			break;
662 		default:
663 			return -EINVAL;
664 		}
665 	}
666 
667 	reg_ctrl |= rk_mode2bc_mode[rk_mode];
668 	if (!enc)
669 		reg_ctrl |= CRYPTO_BC_DECRYPT;
670 
671 	/* write key data to reg */
672 	write_key_reg(key_chn_sel, key, key_len);
673 
674 	/* write twk key for xts mode */
675 	if (rk_mode == RK_MODE_XTS)
676 		write_key_reg(key_chn_sel + 4, twk_key, key_len);
677 
678 	/* set iv reg */
679 	set_iv_reg(chn, iv, iv_len);
680 
681 	/* din_swap set 1, dout_swap set 1, default 1. */
682 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
683 	crypto_write(CRYPTO_LIST_DONE_INT_EN | CRYPTO_DST_ITEM_DONE_INT_EN,
684 		     CRYPTO_DMA_INT_EN);
685 
686 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
687 
688 	return 0;
689 }
690 
691 static int hw_cipher_crypt(const u8 *in, u8 *out, u64 len,
692 			   const u8 *aad, u64 aad_len, u8 *tag, u32 tag_len,
693 			   u32 mode)
694 {
695 	struct crypto_lli_desc *data_desc = NULL;
696 	u8 *dma_in = NULL, *dma_out = NULL;
697 	u32 rk_mode = RK_GET_RK_MODE(mode);
698 	u32 reg_ctrl = 0, tmp_len = 0;
699 	u32 expt_int = 0, mask = 0;
700 	u32 key_chn = g_key_chn;
701 	u32 tmp, dst_len = 0;
702 	int ret = -1;
703 
704 	if (rk_mode == RK_MODE_CTS && len <= AES_BLOCK_SIZE) {
705 		printf("CTS mode length %u < 16Byte\n", (u32)len);
706 		return -EINVAL;
707 	}
708 
709 	tmp_len = (rk_mode == RK_MODE_CTR) ? ROUNDUP(len, AES_BLOCK_SIZE) : len;
710 
711 	data_desc = align_malloc(sizeof(*data_desc), LLI_ADDR_ALIGN_SIZE);
712 	if (!data_desc)
713 		goto exit;
714 
715 	if (IS_ALIGNED((ulong)in, DATA_ADDR_ALIGN_SIZE) && tmp_len == len)
716 		dma_in = (void *)in;
717 	else
718 		dma_in = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
719 	if (!dma_in)
720 		goto exit;
721 
722 	if (out) {
723 		if (IS_ALIGNED((ulong)out, DATA_ADDR_ALIGN_SIZE) &&
724 		    tmp_len == len)
725 			dma_out = out;
726 		else
727 			dma_out = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
728 		if (!dma_out)
729 			goto exit;
730 		dst_len = tmp_len;
731 	}
732 
733 	memset(data_desc, 0x00, sizeof(*data_desc));
734 	if (dma_in != in)
735 		memcpy(dma_in, in, len);
736 
737 	data_desc->src_addr    = (u32)virt_to_phys(dma_in);
738 	data_desc->src_len     = tmp_len;
739 	data_desc->dst_addr    = (u32)virt_to_phys(dma_out);
740 	data_desc->dst_len     = dst_len;
741 	data_desc->dma_ctrl    = LLI_DMA_CTRL_DST_DONE |
742 				 LLI_DMA_CTRL_LAST;
743 	data_desc->user_define = LLI_USER_CPIHER_START |
744 				 LLI_USER_STRING_START |
745 				 LLI_USER_STRING_LAST |
746 				 (key_chn << 4);
747 	expt_int = CRYPTO_DST_ITEM_DONE_INT_ST;
748 
749 	crypto_write((u32)virt_to_phys(data_desc), CRYPTO_DMA_LLI_ADDR);
750 
751 	cache_op_inner(DCACHE_AREA_CLEAN, data_desc, sizeof(*data_desc));
752 	cache_op_inner(DCACHE_AREA_CLEAN, (void *)aad, aad_len);
753 	cache_op_inner(DCACHE_AREA_CLEAN, dma_in, tmp_len);
754 	cache_op_inner(DCACHE_AREA_INVALIDATE, dma_out, tmp_len);
755 
756 	/* din_swap set 1, dout_swap set 1, default 1. */
757 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
758 	crypto_write(CRYPTO_DST_ITEM_DONE_INT_EN | CRYPTO_LIST_DONE_INT_EN,
759 		     CRYPTO_DMA_INT_EN);
760 
761 	reg_ctrl = crypto_read(CRYPTO_BC_CTL) | CRYPTO_BC_ENABLE;
762 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
763 	crypto_write(0x00010001, CRYPTO_DMA_CTL);//start
764 
765 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
766 
767 	/* wait calc ok */
768 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
769 			      RK_CRYPTO_TIMEOUT);
770 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
771 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
772 
773 	if ((tmp & mask) == expt_int) {
774 		if (out && out != dma_out)
775 			memcpy(out, dma_out, len);
776 	} else {
777 		dump_crypto_state(data_desc, ret);
778 		ret = -1;
779 	}
780 
781 exit:
782 	crypto_write(0xffff0000, CRYPTO_BC_CTL);//bc_ctl disable
783 	align_free(data_desc);
784 	if (dma_in && dma_in != in)
785 		align_free(dma_in);
786 	if (dma_out && dma_out != out)
787 		align_free(dma_out);
788 
789 	return ret;
790 }
791 
792 static int hw_aes_init(u32 chn, const u8 *key, const u8 *twk_key, u32 key_len,
793 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
794 {
795 	u32 rk_mode = RK_GET_RK_MODE(mode);
796 
797 	if (iv_len > AES_BLOCK_SIZE)
798 		return -EINVAL;
799 
800 	if (rk_mode != RK_MODE_ECB) {
801 		if (!iv || iv_len != AES_BLOCK_SIZE)
802 			return -EINVAL;
803 	} else {
804 		iv_len = 0;
805 	}
806 
807 	if (rk_mode == RK_MODE_XTS) {
808 		if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_256)
809 			return -EINVAL;
810 
811 		if (!key || !twk_key)
812 			return -EINVAL;
813 	} else {
814 		if (key_len != AES_KEYSIZE_128 &&
815 		    key_len != AES_KEYSIZE_192 &&
816 		    key_len != AES_KEYSIZE_256)
817 			return -EINVAL;
818 	}
819 
820 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
821 			      CRYPTO_AES, mode, enc);
822 }
823 
824 static int hw_sm4_init(u32  chn, const u8 *key, const u8 *twk_key, u32 key_len,
825 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
826 {
827 	u32 rk_mode = RK_GET_RK_MODE(mode);
828 
829 	if (iv_len > SM4_BLOCK_SIZE || key_len != SM4_KEYSIZE)
830 		return -EINVAL;
831 
832 	if (rk_mode != RK_MODE_ECB) {
833 		if (!iv || iv_len != SM4_BLOCK_SIZE)
834 			return -EINVAL;
835 	} else {
836 		iv_len = 0;
837 	}
838 
839 	if (rk_mode == RK_MODE_XTS) {
840 		if (!key || !twk_key)
841 			return -EINVAL;
842 	}
843 
844 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
845 			      CRYPTO_SM4, mode, enc);
846 }
847 
848 int rk_crypto_des(struct udevice *dev, u32 mode, const u8 *key, u32 key_len,
849 		  const u8 *iv, const u8 *in, u8 *out, u32 len, bool enc)
850 {
851 	u32 rk_mode = RK_GET_RK_MODE(mode);
852 	u8 tmp_key[24];
853 	int ret;
854 
855 	if (!is_des_mode(rk_mode))
856 		return -EINVAL;
857 
858 	if (key_len == DES_BLOCK_SIZE || key_len == 3 * DES_BLOCK_SIZE) {
859 		memcpy(tmp_key, key, key_len);
860 	} else if (key_len == 2 * DES_BLOCK_SIZE) {
861 		memcpy(tmp_key, key, 16);
862 		memcpy(tmp_key + 16, key, 8);
863 		key_len = 3 * DES_BLOCK_SIZE;
864 	} else {
865 		return -EINVAL;
866 	}
867 
868 	ret = hw_cipher_init(0, tmp_key, NULL, key_len, iv, DES_BLOCK_SIZE,
869 			     CRYPTO_DES, mode, enc);
870 	if (ret)
871 		goto exit;
872 
873 	ret = hw_cipher_crypt(in, out, len, NULL, 0,
874 			      NULL, 0, mode);
875 
876 exit:
877 	return ret;
878 }
879 
880 int rk_crypto_aes(struct udevice *dev, u32 mode,
881 		  const u8 *key, const u8 *twk_key, u32 key_len,
882 		  const u8 *iv, u32 iv_len,
883 		  const u8 *in, u8 *out, u32 len, bool enc)
884 {
885 	int ret;
886 
887 	/* RV1126/RV1109 do not support aes-192 */
888 #if defined(CONFIG_ROCKCHIP_RV1126)
889 	if (key_len == AES_KEYSIZE_192)
890 		return -EINVAL;
891 #endif
892 
893 	ret = hw_aes_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
894 	if (ret)
895 		return ret;
896 
897 	return hw_cipher_crypt(in, out, len, NULL, 0,
898 			       NULL, 0, mode);
899 }
900 
901 int rk_crypto_sm4(struct udevice *dev, u32 mode,
902 		  const u8 *key, const u8 *twk_key, u32 key_len,
903 		  const u8 *iv, u32 iv_len,
904 		  const u8 *in, u8 *out, u32 len, bool enc)
905 {
906 	int ret;
907 
908 	ret = hw_sm4_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
909 	if (ret)
910 		return ret;
911 
912 	return hw_cipher_crypt(in, out, len, NULL, 0, NULL, 0, mode);
913 }
914 
915 int rockchip_crypto_cipher(struct udevice *dev, cipher_context *ctx,
916 			   const u8 *in, u8 *out, u32 len, bool enc)
917 {
918 	switch (ctx->algo) {
919 	case CRYPTO_DES:
920 		return rk_crypto_des(dev, ctx->mode, ctx->key, ctx->key_len,
921 				     ctx->iv, in, out, len, enc);
922 	case CRYPTO_AES:
923 		return rk_crypto_aes(dev, ctx->mode,
924 				     ctx->key, ctx->twk_key, ctx->key_len,
925 				     ctx->iv, ctx->iv_len, in, out, len, enc);
926 	case CRYPTO_SM4:
927 		return rk_crypto_sm4(dev, ctx->mode,
928 				     ctx->key, ctx->twk_key, ctx->key_len,
929 				     ctx->iv, ctx->iv_len, in, out, len, enc);
930 	default:
931 		return -EINVAL;
932 	}
933 }
934 #endif
935 
936 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
937 static int rockchip_crypto_rsa_verify(struct udevice *dev, rsa_key *ctx,
938 				      u8 *sign, u8 *output)
939 {
940 	struct mpa_num *mpa_m = NULL, *mpa_e = NULL, *mpa_n = NULL;
941 	struct mpa_num *mpa_c = NULL, *mpa_result = NULL;
942 	u32 n_bits, n_words;
943 	u32 *rsa_result;
944 	int ret;
945 
946 	if (!ctx)
947 		return -EINVAL;
948 
949 	if (ctx->algo != CRYPTO_RSA512 &&
950 	    ctx->algo != CRYPTO_RSA1024 &&
951 	    ctx->algo != CRYPTO_RSA2048 &&
952 	    ctx->algo != CRYPTO_RSA3072 &&
953 	    ctx->algo != CRYPTO_RSA4096)
954 		return -EINVAL;
955 
956 	n_bits = crypto_algo_nbits(ctx->algo);
957 	n_words = BITS2WORD(n_bits);
958 
959 	rsa_result = malloc(BITS2BYTE(n_bits));
960 	if (!rsa_result)
961 		return -ENOMEM;
962 
963 	memset(rsa_result, 0x00, BITS2BYTE(n_bits));
964 
965 	ret = rk_mpa_alloc(&mpa_m);
966 	ret |= rk_mpa_alloc(&mpa_e);
967 	ret |= rk_mpa_alloc(&mpa_n);
968 	ret |= rk_mpa_alloc(&mpa_c);
969 	ret |= rk_mpa_alloc(&mpa_result);
970 	if (ret)
971 		goto exit;
972 
973 	mpa_m->d = (void *)sign;
974 	mpa_e->d = (void *)ctx->e;
975 	mpa_n->d = (void *)ctx->n;
976 	mpa_c->d = (void *)ctx->c;
977 	mpa_result->d = (void *)rsa_result;
978 
979 	mpa_m->size = n_words;
980 	mpa_e->size = n_words;
981 	mpa_n->size = n_words;
982 	mpa_c->size = n_words;
983 	mpa_result->size = n_words;
984 
985 	ret = rk_exptmod_np(mpa_m, mpa_e, mpa_n, mpa_c, mpa_result);
986 	if (!ret)
987 		memcpy(output, rsa_result, BITS2BYTE(n_bits));
988 
989 exit:
990 	free(rsa_result);
991 	rk_mpa_free(&mpa_m);
992 	rk_mpa_free(&mpa_e);
993 	rk_mpa_free(&mpa_n);
994 	rk_mpa_free(&mpa_c);
995 	rk_mpa_free(&mpa_result);
996 
997 	return ret;
998 }
999 #endif
1000 
1001 static const struct dm_crypto_ops rockchip_crypto_ops = {
1002 	.capability   = rockchip_crypto_capability,
1003 	.sha_init     = rockchip_crypto_sha_init,
1004 	.sha_update   = rockchip_crypto_sha_update,
1005 	.sha_final    = rockchip_crypto_sha_final,
1006 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1007 	.rsa_verify   = rockchip_crypto_rsa_verify,
1008 #endif
1009 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
1010 	.hmac_init    = rockchip_crypto_hmac_init,
1011 	.hmac_update  = rockchip_crypto_hmac_update,
1012 	.hmac_final   = rockchip_crypto_hmac_final,
1013 #endif
1014 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
1015 	.cipher_crypt = rockchip_crypto_cipher,
1016 #endif
1017 };
1018 
1019 /*
1020  * Only use "clocks" to parse crypto clock id and use rockchip_get_clk().
1021  * Because we always add crypto node in U-Boot dts, when kernel dtb enabled :
1022  *
1023  *   1. There is cru phandle mismatch between U-Boot and kernel dtb;
1024  *   2. CONFIG_OF_SPL_REMOVE_PROPS removes clock property;
1025  */
1026 static int rockchip_crypto_ofdata_to_platdata(struct udevice *dev)
1027 {
1028 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1029 	int len, ret = -EINVAL;
1030 
1031 	if (!dev_read_prop(dev, "clocks", &len)) {
1032 		printf("Can't find \"clocks\" property\n");
1033 		return -EINVAL;
1034 	}
1035 
1036 	memset(priv, 0x00, sizeof(*priv));
1037 	priv->clocks = malloc(len);
1038 	if (!priv->clocks)
1039 		return -ENOMEM;
1040 
1041 	priv->nclocks = len / sizeof(u32);
1042 	if (dev_read_u32_array(dev, "clocks", (u32 *)priv->clocks,
1043 			       priv->nclocks)) {
1044 		printf("Can't read \"clocks\" property\n");
1045 		ret = -EINVAL;
1046 		goto exit;
1047 	}
1048 
1049 	if (!dev_read_prop(dev, "clock-frequency", &len)) {
1050 		printf("Can't find \"clock-frequency\" property\n");
1051 		ret = -EINVAL;
1052 		goto exit;
1053 	}
1054 
1055 	priv->frequencies = malloc(len);
1056 	if (!priv->frequencies) {
1057 		ret = -ENOMEM;
1058 		goto exit;
1059 	}
1060 
1061 	priv->nclocks = len / sizeof(u32);
1062 	if (dev_read_u32_array(dev, "clock-frequency", priv->frequencies,
1063 			       priv->nclocks)) {
1064 		printf("Can't read \"clock-frequency\" property\n");
1065 		ret = -EINVAL;
1066 		goto exit;
1067 	}
1068 
1069 	priv->reg = (fdt_addr_t)dev_read_addr_ptr(dev);
1070 
1071 	crypto_base = priv->reg;
1072 
1073 	return 0;
1074 exit:
1075 	if (priv->clocks)
1076 		free(priv->clocks);
1077 
1078 	if (priv->frequencies)
1079 		free(priv->frequencies);
1080 
1081 	return ret;
1082 }
1083 
1084 static int rockchip_crypto_probe(struct udevice *dev)
1085 {
1086 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1087 	struct rk_crypto_soc_data *sdata;
1088 	int i, ret = 0;
1089 	u32* clocks;
1090 
1091 	sdata = (struct rk_crypto_soc_data *)dev_get_driver_data(dev);
1092 	priv->soc_data = sdata;
1093 
1094 	priv->hw_ctx = memalign(LLI_ADDR_ALIGN_SIZE,
1095 				sizeof(struct rk_hash_ctx));
1096 	if (!priv->hw_ctx)
1097 		return -ENOMEM;
1098 
1099 	ret = rockchip_get_clk(&priv->clk.dev);
1100 	if (ret) {
1101 		printf("Failed to get clk device, ret=%d\n", ret);
1102 		return ret;
1103 	}
1104 
1105 	clocks = (u32 *)priv->clocks;
1106 	for (i = 0; i < priv->nclocks; i++) {
1107 		priv->clk.id = clocks[i * 2 + 1];
1108 		ret = clk_set_rate(&priv->clk, priv->frequencies[i]);
1109 		if (ret < 0) {
1110 			printf("%s: Failed to set clk(%ld): ret=%d\n",
1111 			       __func__, priv->clk.id, ret);
1112 			return ret;
1113 		}
1114 	}
1115 
1116 	hw_crypto_reset();
1117 
1118 	return 0;
1119 }
1120 
1121 static const struct rk_crypto_soc_data soc_data_base = {
1122 	.capability = CRYPTO_MD5 |
1123 		      CRYPTO_SHA1 |
1124 		      CRYPTO_SHA256 |
1125 		      CRYPTO_SHA512 |
1126 		      CRYPTO_HMAC_MD5 |
1127 		      CRYPTO_HMAC_SHA1 |
1128 		      CRYPTO_HMAC_SHA256 |
1129 		      CRYPTO_HMAC_SHA512 |
1130 		      CRYPTO_RSA512 |
1131 		      CRYPTO_RSA1024 |
1132 		      CRYPTO_RSA2048 |
1133 		      CRYPTO_RSA3072 |
1134 		      CRYPTO_RSA4096 |
1135 		      CRYPTO_DES |
1136 		      CRYPTO_AES,
1137 };
1138 
1139 static const struct rk_crypto_soc_data soc_data_base_sm = {
1140 	.capability = CRYPTO_MD5 |
1141 		      CRYPTO_SHA1 |
1142 		      CRYPTO_SHA256 |
1143 		      CRYPTO_SHA512 |
1144 		      CRYPTO_SM3 |
1145 		      CRYPTO_HMAC_MD5 |
1146 		      CRYPTO_HMAC_SHA1 |
1147 		      CRYPTO_HMAC_SHA256 |
1148 		      CRYPTO_HMAC_SHA512 |
1149 		      CRYPTO_HMAC_SM3 |
1150 		      CRYPTO_RSA512 |
1151 		      CRYPTO_RSA1024 |
1152 		      CRYPTO_RSA2048 |
1153 		      CRYPTO_RSA3072 |
1154 		      CRYPTO_RSA4096 |
1155 		      CRYPTO_DES |
1156 		      CRYPTO_AES |
1157 		      CRYPTO_SM4,
1158 };
1159 
1160 static const struct rk_crypto_soc_data soc_data_rk1808 = {
1161 	.capability = CRYPTO_MD5 |
1162 		      CRYPTO_SHA1 |
1163 		      CRYPTO_SHA256 |
1164 		      CRYPTO_HMAC_MD5 |
1165 		      CRYPTO_HMAC_SHA1 |
1166 		      CRYPTO_HMAC_SHA256 |
1167 		      CRYPTO_RSA512 |
1168 		      CRYPTO_RSA1024 |
1169 		      CRYPTO_RSA2048 |
1170 		      CRYPTO_RSA3072 |
1171 		      CRYPTO_RSA4096,
1172 };
1173 
1174 static const struct udevice_id rockchip_crypto_ids[] = {
1175 	{
1176 		.compatible = "rockchip,px30-crypto",
1177 		.data = (ulong)&soc_data_base
1178 	},
1179 	{
1180 		.compatible = "rockchip,rk1808-crypto",
1181 		.data = (ulong)&soc_data_rk1808
1182 	},
1183 	{
1184 		.compatible = "rockchip,rk3308-crypto",
1185 		.data = (ulong)&soc_data_base
1186 	},
1187 	{
1188 		.compatible = "rockchip,rv1126-crypto",
1189 		.data = (ulong)&soc_data_base_sm
1190 	},
1191 	{
1192 		.compatible = "rockchip,rk3568-crypto",
1193 		.data = (ulong)&soc_data_base_sm
1194 	},
1195 	{ }
1196 };
1197 
1198 U_BOOT_DRIVER(rockchip_crypto_v2) = {
1199 	.name		= "rockchip_crypto_v2",
1200 	.id		= UCLASS_CRYPTO,
1201 	.of_match	= rockchip_crypto_ids,
1202 	.ops		= &rockchip_crypto_ops,
1203 	.probe		= rockchip_crypto_probe,
1204 	.ofdata_to_platdata = rockchip_crypto_ofdata_to_platdata,
1205 	.priv_auto_alloc_size = sizeof(struct rockchip_crypto_priv),
1206 };
1207