xref: /OK3568_Linux_fs/u-boot/drivers/crypto/rockchip/crypto_v2.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 #include <clk.h>
8 #include <crypto.h>
9 #include <dm.h>
10 #include <asm/io.h>
11 #include <clk-uclass.h>
12 #include <asm/arch/hardware.h>
13 #include <asm/arch/clock.h>
14 #include <rockchip/crypto_hash_cache.h>
15 #include <rockchip/crypto_v2.h>
16 #include <rockchip/crypto_v2_pka.h>
17 
18 #define	RK_HASH_CTX_MAGIC		0x1A1A1A1A
19 
20 #ifdef DEBUG
21 #define IMSG(format, ...) printf("[%s, %05d]-trace: " format "\n", \
22 				 __func__, __LINE__, ##__VA_ARGS__)
23 #else
24 #define IMSG(format, ...)
25 #endif
26 
27 struct crypto_lli_desc {
28 	u32 src_addr;
29 	u32 src_len;
30 	u32 dst_addr;
31 	u32 dst_len;
32 	u32 user_define;
33 	u32 reserve;
34 	u32 dma_ctrl;
35 	u32 next_addr;
36 };
37 
38 struct rk_hash_ctx {
39 	struct crypto_lli_desc		data_lli;	/* lli desc */
40 	struct crypto_hash_cache	*hash_cache;
41 	u32				magic;		/* to check ctx */
42 	u32				algo;		/* hash algo */
43 	u8				digest_size;	/* hash out length */
44 	u8				reserved[3];
45 };
46 
47 struct rk_crypto_soc_data {
48 	u32 capability;
49 	u32 (*dynamic_cap)(void);
50 };
51 
52 struct rockchip_crypto_priv {
53 	fdt_addr_t			reg;
54 	u32				frequency;
55 	char				*clocks;
56 	u32				*frequencies;
57 	u32				nclocks;
58 	u32				freq_nclocks;
59 	u32				length;
60 	struct rk_hash_ctx		*hw_ctx;
61 	struct rk_crypto_soc_data	*soc_data;
62 };
63 
64 #define LLI_ADDR_ALIGN_SIZE	8
65 #define DATA_ADDR_ALIGN_SIZE	8
66 #define DATA_LEN_ALIGN_SIZE	64
67 
68 /* crypto timeout 500ms, must support more than 32M data per times*/
69 #define HASH_UPDATE_LIMIT	(32 * 1024 * 1024)
70 #define RK_CRYPTO_TIMEOUT	500000
71 
72 #define RK_POLL_TIMEOUT(condition, timeout) \
73 ({ \
74 	int time_out = timeout; \
75 	while (condition) { \
76 		if (--time_out <= 0) { \
77 			debug("[%s] %d: time out!\n", __func__,\
78 				__LINE__); \
79 			break; \
80 		} \
81 		udelay(1); \
82 	} \
83 	(time_out <= 0) ? -ETIMEDOUT : 0; \
84 })
85 
86 #define WAIT_TAG_VALID(channel, timeout) ({ \
87 	u32 tag_mask = CRYPTO_CH0_TAG_VALID << (channel);\
88 	int ret;\
89 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_TAG_VALID) & tag_mask),\
90 			      timeout);\
91 	crypto_write(crypto_read(CRYPTO_TAG_VALID) & tag_mask, CRYPTO_TAG_VALID);\
92 	ret;\
93 })
94 
95 #define virt_to_phys(addr)		(((unsigned long)addr) & 0xffffffff)
96 #define phys_to_virt(addr, area)	((unsigned long)addr)
97 
98 #define align_malloc(bytes, alignment)	memalign(alignment, bytes)
99 #define align_free(addr)		do {if (addr) free(addr);} while (0)
100 
101 #define ROUNDUP(size, alignment)	round_up(size, alignment)
102 #define cache_op_inner(type, addr, size) \
103 					crypto_flush_cacheline((ulong)addr, size)
104 
105 #define IS_NEED_IV(rk_mode) ((rk_mode) != RK_MODE_ECB && \
106 			     (rk_mode) != RK_MODE_CMAC && \
107 			     (rk_mode) != RK_MODE_CBC_MAC)
108 
109 #define IS_NEED_TAG(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
110 			      (rk_mode) == RK_MODE_CBC_MAC || \
111 			      (rk_mode) == RK_MODE_CCM || \
112 			      (rk_mode) == RK_MODE_GCM)
113 
114 #define IS_MAC_MODE(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
115 			      (rk_mode) == RK_MODE_CBC_MAC)
116 
117 #define IS_AE_MODE(rk_mode) ((rk_mode) == RK_MODE_CCM || \
118 			     (rk_mode) == RK_MODE_GCM)
119 
120 fdt_addr_t crypto_base;
121 
word2byte_be(u32 word,u8 * ch)122 static inline void word2byte_be(u32 word, u8 *ch)
123 {
124 	ch[0] = (word >> 24) & 0xff;
125 	ch[1] = (word >> 16) & 0xff;
126 	ch[2] = (word >> 8) & 0xff;
127 	ch[3] = (word >> 0) & 0xff;
128 }
129 
byte2word_be(const u8 * ch)130 static inline u32 byte2word_be(const u8 *ch)
131 {
132 	return (*ch << 24) + (*(ch + 1) << 16) + (*(ch + 2) << 8) + *(ch + 3);
133 }
134 
clear_regs(u32 base,u32 words)135 static inline void clear_regs(u32 base, u32 words)
136 {
137 	int i;
138 
139 	/*clear out register*/
140 	for (i = 0; i < words; i++)
141 		crypto_write(0, base + 4 * i);
142 }
143 
clear_hash_out_reg(void)144 static inline void clear_hash_out_reg(void)
145 {
146 	clear_regs(CRYPTO_HASH_DOUT_0, 16);
147 }
148 
clear_key_regs(void)149 static inline void clear_key_regs(void)
150 {
151 	clear_regs(CRYPTO_CH0_KEY_0, CRYPTO_KEY_CHANNEL_NUM * 4);
152 }
153 
read_regs(u32 base,u8 * data,u32 data_len)154 static inline void read_regs(u32 base, u8 *data, u32 data_len)
155 {
156 	u8 tmp_buf[4];
157 	u32 i;
158 
159 	for (i = 0; i < data_len / 4; i++)
160 		word2byte_be(crypto_read(base + i * 4),
161 			     data + i * 4);
162 
163 	if (data_len % 4) {
164 		word2byte_be(crypto_read(base + i * 4), tmp_buf);
165 		memcpy(data + i * 4, tmp_buf, data_len % 4);
166 	}
167 }
168 
write_regs(u32 base,const u8 * data,u32 data_len)169 static inline void write_regs(u32 base, const u8 *data, u32 data_len)
170 {
171 	u8 tmp_buf[4];
172 	u32 i;
173 
174 	for (i = 0; i < data_len / 4; i++, base += 4)
175 		crypto_write(byte2word_be(data + i * 4), base);
176 
177 	if (data_len % 4) {
178 		memset(tmp_buf, 0x00, sizeof(tmp_buf));
179 		memcpy((u8 *)tmp_buf, data + i * 4, data_len % 4);
180 		crypto_write(byte2word_be(tmp_buf), base);
181 	}
182 }
183 
write_key_reg(u32 chn,const u8 * key,u32 key_len)184 static inline void write_key_reg(u32 chn, const u8 *key, u32 key_len)
185 {
186 	write_regs(CRYPTO_CH0_KEY_0 + chn * 0x10, key, key_len);
187 }
188 
set_iv_reg(u32 chn,const u8 * iv,u32 iv_len)189 static inline void set_iv_reg(u32 chn, const u8 *iv, u32 iv_len)
190 {
191 	u32 base_iv;
192 
193 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
194 
195 	/* clear iv */
196 	clear_regs(base_iv, 4);
197 
198 	if (!iv || iv_len == 0)
199 		return;
200 
201 	write_regs(base_iv, iv, iv_len);
202 
203 	crypto_write(iv_len, CRYPTO_CH0_IV_LEN_0 + 4 * chn);
204 }
205 
get_iv_reg(u32 chn,u8 * iv,u32 iv_len)206 static inline void get_iv_reg(u32 chn, u8 *iv, u32 iv_len)
207 {
208 	u32 base_iv;
209 
210 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
211 
212 	read_regs(base_iv, iv, iv_len);
213 }
214 
get_tag_from_reg(u32 chn,u8 * tag,u32 tag_len)215 static inline void get_tag_from_reg(u32 chn, u8 *tag, u32 tag_len)
216 {
217 	u32 i;
218 	u32 chn_base = CRYPTO_CH0_TAG_0 + 0x10 * chn;
219 
220 	for (i = 0; i < tag_len / 4; i++, chn_base += 4)
221 		word2byte_be(crypto_read(chn_base), tag + 4 * i);
222 }
223 
rk_crypto_do_enable_clk(struct udevice * dev,int enable)224 static int rk_crypto_do_enable_clk(struct udevice *dev, int enable)
225 {
226 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
227 	struct clk clk;
228 	int i, ret;
229 
230 	for (i = 0; i < priv->nclocks; i++) {
231 		ret = clk_get_by_index(dev, i, &clk);
232 		if (ret < 0) {
233 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
234 			return ret;
235 		}
236 
237 		if (enable)
238 			ret = clk_enable(&clk);
239 		else
240 			ret = clk_disable(&clk);
241 		if (ret < 0 && ret != -ENOSYS) {
242 			printf("Failed to enable(%d) clk(%ld): ret=%d\n",
243 			       enable, clk.id, ret);
244 			return ret;
245 		}
246 	}
247 
248 	return 0;
249 }
250 
rk_crypto_enable_clk(struct udevice * dev)251 static int rk_crypto_enable_clk(struct udevice *dev)
252 {
253 	return rk_crypto_do_enable_clk(dev, 1);
254 }
255 
rk_crypto_disable_clk(struct udevice * dev)256 static int rk_crypto_disable_clk(struct udevice *dev)
257 {
258 	return rk_crypto_do_enable_clk(dev, 0);
259 }
260 
crypto_v3_dynamic_cap(void)261 static u32 crypto_v3_dynamic_cap(void)
262 {
263 	u32 capability = 0;
264 	u32 ver_reg, i;
265 	struct cap_map {
266 		u32 ver_offset;
267 		u32 mask;
268 		u32 cap_bit;
269 	};
270 	const struct cap_map cap_tbl[] = {
271 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_MD5_FLAG,    CRYPTO_MD5},
272 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA1_FLAG,   CRYPTO_SHA1},
273 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA256_FLAG, CRYPTO_SHA256},
274 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA512_FLAG, CRYPTO_SHA512},
275 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SM3_FLAG,    CRYPTO_SM3},
276 
277 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_MD5_FLAG,    CRYPTO_HMAC_MD5},
278 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA1_FLAG,   CRYPTO_HMAC_SHA1},
279 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA256_FLAG, CRYPTO_HMAC_SHA256},
280 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA512_FLAG, CRYPTO_HMAC_SHA512},
281 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SM3_FLAG,    CRYPTO_HMAC_SM3},
282 
283 	{CRYPTO_AES_VERSION,  CRYPTO_AES256_FLAG,      CRYPTO_AES},
284 	{CRYPTO_DES_VERSION,  CRYPTO_TDES_FLAG,        CRYPTO_DES},
285 	{CRYPTO_SM4_VERSION,  CRYPTO_ECB_FLAG,         CRYPTO_SM4},
286 	};
287 
288 	/* rsa */
289 	capability = CRYPTO_RSA512 |
290 		     CRYPTO_RSA1024 |
291 		     CRYPTO_RSA2048 |
292 		     CRYPTO_RSA3072 |
293 		     CRYPTO_RSA4096;
294 
295 	for (i = 0; i < ARRAY_SIZE(cap_tbl); i++) {
296 		ver_reg = crypto_read(cap_tbl[i].ver_offset);
297 
298 		if ((ver_reg & cap_tbl[i].mask) == cap_tbl[i].mask)
299 			capability |= cap_tbl[i].cap_bit;
300 	}
301 
302 	return capability;
303 }
304 
hw_crypto_reset(void)305 static int hw_crypto_reset(void)
306 {
307 	u32 val = 0, mask = 0;
308 	int ret;
309 
310 	val = CRYPTO_SW_PKA_RESET | CRYPTO_SW_CC_RESET;
311 	mask = val << CRYPTO_WRITE_MASK_SHIFT;
312 
313 	/* reset pka and crypto modules*/
314 	crypto_write(val | mask, CRYPTO_RST_CTL);
315 
316 	/* wait reset compelete */
317 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL), RK_CRYPTO_TIMEOUT);
318 
319 	return ret;
320 }
321 
hw_hash_clean_ctx(struct rk_hash_ctx * ctx)322 static void hw_hash_clean_ctx(struct rk_hash_ctx *ctx)
323 {
324 	/* clear hash status */
325 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
326 
327 	assert(ctx);
328 	assert(ctx->magic == RK_HASH_CTX_MAGIC);
329 
330 	crypto_hash_cache_free(ctx->hash_cache);
331 
332 	memset(ctx, 0x00, sizeof(*ctx));
333 }
334 
rk_hash_init(void * hw_ctx,u32 algo)335 static int rk_hash_init(void *hw_ctx, u32 algo)
336 {
337 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)hw_ctx;
338 	u32 reg_ctrl = 0;
339 	int ret;
340 
341 	if (!tmp_ctx)
342 		return -EINVAL;
343 
344 	reg_ctrl = CRYPTO_SW_CC_RESET;
345 	crypto_write(reg_ctrl | (reg_ctrl << CRYPTO_WRITE_MASK_SHIFT),
346 		     CRYPTO_RST_CTL);
347 
348 	/* wait reset compelete */
349 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL),
350 			      RK_CRYPTO_TIMEOUT);
351 
352 	reg_ctrl = 0;
353 	tmp_ctx->algo = algo;
354 	switch (algo) {
355 	case CRYPTO_MD5:
356 	case CRYPTO_HMAC_MD5:
357 		reg_ctrl |= CRYPTO_MODE_MD5;
358 		tmp_ctx->digest_size = 16;
359 		break;
360 	case CRYPTO_SHA1:
361 	case CRYPTO_HMAC_SHA1:
362 		reg_ctrl |= CRYPTO_MODE_SHA1;
363 		tmp_ctx->digest_size = 20;
364 		break;
365 	case CRYPTO_SHA256:
366 	case CRYPTO_HMAC_SHA256:
367 		reg_ctrl |= CRYPTO_MODE_SHA256;
368 		tmp_ctx->digest_size = 32;
369 		break;
370 	case CRYPTO_SHA512:
371 	case CRYPTO_HMAC_SHA512:
372 		reg_ctrl |= CRYPTO_MODE_SHA512;
373 		tmp_ctx->digest_size = 64;
374 		break;
375 	case CRYPTO_SM3:
376 	case CRYPTO_HMAC_SM3:
377 		reg_ctrl |= CRYPTO_MODE_SM3;
378 		tmp_ctx->digest_size = 32;
379 		break;
380 	default:
381 		ret = -EINVAL;
382 		goto exit;
383 	}
384 
385 	clear_hash_out_reg();
386 
387 	/* enable hardware padding */
388 	reg_ctrl |= CRYPTO_HW_PAD_ENABLE;
389 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
390 
391 	/* FIFO input and output data byte swap */
392 	/* such as B0, B1, B2, B3 -> B3, B2, B1, B0 */
393 	reg_ctrl = CRYPTO_DOUT_BYTESWAP | CRYPTO_DOIN_BYTESWAP;
394 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_FIFO_CTL);
395 
396 	/* enable src_item_done interrupt */
397 	crypto_write(0, CRYPTO_DMA_INT_EN);
398 
399 	tmp_ctx->magic = RK_HASH_CTX_MAGIC;
400 
401 	return 0;
402 exit:
403 	/* clear hash setting if init failed */
404 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
405 
406 	return ret;
407 }
408 
rk_hash_direct_calc(void * hw_data,const u8 * data,u32 data_len,u8 * started_flag,u8 is_last)409 static int rk_hash_direct_calc(void *hw_data, const u8 *data,
410 			       u32 data_len, u8 *started_flag, u8 is_last)
411 {
412 	struct rockchip_crypto_priv *priv = hw_data;
413 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
414 	struct crypto_lli_desc *lli = &hash_ctx->data_lli;
415 	int ret = -EINVAL;
416 	u32 tmp = 0, mask = 0;
417 
418 	assert(IS_ALIGNED((ulong)data, DATA_ADDR_ALIGN_SIZE));
419 	assert(is_last || IS_ALIGNED(data_len, DATA_LEN_ALIGN_SIZE));
420 
421 	debug("%s: data = %p, len = %u, s = %x, l = %x\n",
422 	      __func__, data, data_len, *started_flag, is_last);
423 
424 	memset(lli, 0x00, sizeof(*lli));
425 	lli->src_addr = (u32)virt_to_phys(data);
426 	lli->src_len = data_len;
427 	lli->dma_ctrl = LLI_DMA_CTRL_SRC_DONE;
428 
429 	if (is_last) {
430 		lli->user_define |= LLI_USER_STRING_LAST;
431 		lli->dma_ctrl |= LLI_DMA_CTRL_LAST;
432 	} else {
433 		lli->next_addr = (u32)virt_to_phys(lli);
434 		lli->dma_ctrl |= LLI_DMA_CTRL_PAUSE;
435 	}
436 
437 	if (!(*started_flag)) {
438 		lli->user_define |=
439 			(LLI_USER_STRING_START | LLI_USER_CIPHER_START);
440 		crypto_write((u32)virt_to_phys(lli), CRYPTO_DMA_LLI_ADDR);
441 		crypto_write((CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
442 			     CRYPTO_HASH_ENABLE, CRYPTO_HASH_CTL);
443 		tmp = CRYPTO_DMA_START;
444 		*started_flag = 1;
445 	} else {
446 		tmp = CRYPTO_DMA_RESTART;
447 	}
448 
449 	/* flush cache */
450 	crypto_flush_cacheline((ulong)lli, sizeof(*lli));
451 	crypto_flush_cacheline((ulong)data, data_len);
452 
453 	/* start calculate */
454 	crypto_write(tmp << CRYPTO_WRITE_MASK_SHIFT | tmp,
455 		     CRYPTO_DMA_CTL);
456 
457 	/* mask CRYPTO_SYNC_LOCKSTEP_INT_ST flag */
458 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
459 
460 	/* wait calc ok */
461 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
462 			      RK_CRYPTO_TIMEOUT);
463 
464 	/* clear interrupt status */
465 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
466 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
467 
468 	if ((tmp & mask) != CRYPTO_SRC_ITEM_DONE_INT_ST &&
469 	    (tmp & mask) != CRYPTO_ZERO_LEN_INT_ST) {
470 		ret = -EFAULT;
471 		debug("[%s] %d: CRYPTO_DMA_INT_ST = 0x%x\n",
472 		      __func__, __LINE__, tmp);
473 		goto exit;
474 	}
475 
476 	priv->length += data_len;
477 exit:
478 	return ret;
479 }
480 
rk_hash_update(void * ctx,const u8 * data,u32 data_len)481 int rk_hash_update(void *ctx, const u8 *data, u32 data_len)
482 {
483 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
484 	int ret = -EINVAL;
485 
486 	debug("\n");
487 	if (!tmp_ctx || !data)
488 		goto exit;
489 
490 	if (tmp_ctx->digest_size == 0 || tmp_ctx->magic != RK_HASH_CTX_MAGIC)
491 		goto exit;
492 
493 	ret = crypto_hash_update_with_cache(tmp_ctx->hash_cache,
494 					    data, data_len);
495 
496 exit:
497 	/* free lli list */
498 	if (ret)
499 		hw_hash_clean_ctx(tmp_ctx);
500 
501 	return ret;
502 }
503 
rk_hash_final(void * ctx,u8 * digest,size_t len)504 int rk_hash_final(void *ctx, u8 *digest, size_t len)
505 {
506 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
507 	int ret = -EINVAL;
508 
509 	if (!digest)
510 		goto exit;
511 
512 	if (!tmp_ctx ||
513 	    tmp_ctx->digest_size == 0 ||
514 	    len > tmp_ctx->digest_size ||
515 	    tmp_ctx->magic != RK_HASH_CTX_MAGIC) {
516 		goto exit;
517 	}
518 
519 	/* wait hash value ok */
520 	ret = RK_POLL_TIMEOUT(!crypto_read(CRYPTO_HASH_VALID),
521 			      RK_CRYPTO_TIMEOUT);
522 
523 	read_regs(CRYPTO_HASH_DOUT_0, digest, len);
524 
525 	/* clear hash status */
526 	crypto_write(CRYPTO_HASH_IS_VALID, CRYPTO_HASH_VALID);
527 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
528 
529 exit:
530 
531 	return ret;
532 }
533 
rockchip_crypto_capability(struct udevice * dev)534 static u32 rockchip_crypto_capability(struct udevice *dev)
535 {
536 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
537 	u32 capability, mask = 0;
538 
539 	capability = priv->soc_data->capability;
540 
541 #if !(CONFIG_IS_ENABLED(ROCKCHIP_CIPHER))
542 	mask |= (CRYPTO_DES | CRYPTO_AES | CRYPTO_SM4);
543 #endif
544 
545 #if !(CONFIG_IS_ENABLED(ROCKCHIP_HMAC))
546 	mask |= (CRYPTO_HMAC_MD5 | CRYPTO_HMAC_SHA1 | CRYPTO_HMAC_SHA256 |
547 			 CRYPTO_HMAC_SHA512 | CRYPTO_HMAC_SM3);
548 #endif
549 
550 #if !(CONFIG_IS_ENABLED(ROCKCHIP_RSA))
551 	mask |= (CRYPTO_RSA512 | CRYPTO_RSA1024 | CRYPTO_RSA2048 |
552 			 CRYPTO_RSA3072 | CRYPTO_RSA4096);
553 #endif
554 
555 	return capability & (~mask);
556 }
557 
rockchip_crypto_sha_init(struct udevice * dev,sha_context * ctx)558 static int rockchip_crypto_sha_init(struct udevice *dev, sha_context *ctx)
559 {
560 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
561 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
562 	int ret = 0;
563 
564 	if (!ctx)
565 		return -EINVAL;
566 
567 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
568 
569 	priv->length = 0;
570 
571 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
572 						       priv, ctx->length,
573 						       DATA_ADDR_ALIGN_SIZE,
574 						       DATA_LEN_ALIGN_SIZE);
575 	if (!hash_ctx->hash_cache)
576 		return -EFAULT;
577 
578 	rk_crypto_enable_clk(dev);
579 	ret = rk_hash_init(hash_ctx, ctx->algo);
580 	if (ret)
581 		rk_crypto_disable_clk(dev);
582 
583 	return ret;
584 }
585 
rockchip_crypto_sha_update(struct udevice * dev,u32 * input,u32 len)586 static int rockchip_crypto_sha_update(struct udevice *dev,
587 				      u32 *input, u32 len)
588 {
589 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
590 	int ret, i;
591 	u8 *p;
592 
593 	if (!len) {
594 		ret = -EINVAL;
595 		goto exit;
596 	}
597 
598 	p = (u8 *)input;
599 
600 	for (i = 0; i < len / HASH_UPDATE_LIMIT; i++, p += HASH_UPDATE_LIMIT) {
601 		ret = rk_hash_update(priv->hw_ctx, p, HASH_UPDATE_LIMIT);
602 		if (ret)
603 			goto exit;
604 	}
605 
606 	if (len % HASH_UPDATE_LIMIT)
607 		ret = rk_hash_update(priv->hw_ctx, p, len % HASH_UPDATE_LIMIT);
608 
609 exit:
610 	if (ret)
611 		rk_crypto_disable_clk(dev);
612 
613 	return ret;
614 }
615 
rockchip_crypto_sha_final(struct udevice * dev,sha_context * ctx,u8 * output)616 static int rockchip_crypto_sha_final(struct udevice *dev,
617 				     sha_context *ctx, u8 *output)
618 {
619 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
620 	u32 nbits;
621 	int ret;
622 
623 	nbits = crypto_algo_nbits(ctx->algo);
624 
625 	if (priv->length != ctx->length) {
626 		printf("total length(0x%08x) != init length(0x%08x)!\n",
627 		       priv->length, ctx->length);
628 		ret = -EIO;
629 		goto exit;
630 	}
631 
632 	ret = rk_hash_final(priv->hw_ctx, (u8 *)output, BITS2BYTE(nbits));
633 
634 exit:
635 	hw_hash_clean_ctx(priv->hw_ctx);
636 	rk_crypto_disable_clk(dev);
637 
638 	return ret;
639 }
640 
641 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
rk_hmac_init(void * hw_ctx,u32 algo,u8 * key,u32 key_len)642 int rk_hmac_init(void *hw_ctx, u32 algo, u8 *key, u32 key_len)
643 {
644 	u32 reg_ctrl = 0;
645 	int ret;
646 
647 	if (!key || !key_len || key_len > 64)
648 		return -EINVAL;
649 
650 	clear_key_regs();
651 
652 	write_key_reg(0, key, key_len);
653 
654 	ret = rk_hash_init(hw_ctx, algo);
655 	if (ret)
656 		return ret;
657 
658 	reg_ctrl = crypto_read(CRYPTO_HASH_CTL) | CRYPTO_HMAC_ENABLE;
659 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
660 
661 	return ret;
662 }
663 
rockchip_crypto_hmac_init(struct udevice * dev,sha_context * ctx,u8 * key,u32 key_len)664 static int rockchip_crypto_hmac_init(struct udevice *dev,
665 				     sha_context *ctx, u8 *key, u32 key_len)
666 {
667 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
668 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
669 	int ret = 0;
670 
671 	if (!ctx)
672 		return -EINVAL;
673 
674 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
675 
676 	priv->length = 0;
677 
678 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
679 						       priv, ctx->length,
680 						       DATA_ADDR_ALIGN_SIZE,
681 						       DATA_LEN_ALIGN_SIZE);
682 	if (!hash_ctx->hash_cache)
683 		return -EFAULT;
684 
685 	rk_crypto_enable_clk(dev);
686 	ret = rk_hmac_init(priv->hw_ctx, ctx->algo, key, key_len);
687 	if (ret)
688 		rk_crypto_disable_clk(dev);
689 
690 	return ret;
691 }
692 
rockchip_crypto_hmac_update(struct udevice * dev,u32 * input,u32 len)693 static int rockchip_crypto_hmac_update(struct udevice *dev,
694 				       u32 *input, u32 len)
695 {
696 	return rockchip_crypto_sha_update(dev, input, len);
697 }
698 
rockchip_crypto_hmac_final(struct udevice * dev,sha_context * ctx,u8 * output)699 static int rockchip_crypto_hmac_final(struct udevice *dev,
700 				      sha_context *ctx, u8 *output)
701 {
702 	return rockchip_crypto_sha_final(dev, ctx, output);
703 }
704 
705 #endif
706 
707 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
708 static u8 g_key_chn;
709 
710 static const u32 rk_mode2bc_mode[RK_MODE_MAX] = {
711 	[RK_MODE_ECB] = CRYPTO_BC_ECB,
712 	[RK_MODE_CBC] = CRYPTO_BC_CBC,
713 	[RK_MODE_CTS] = CRYPTO_BC_CTS,
714 	[RK_MODE_CTR] = CRYPTO_BC_CTR,
715 	[RK_MODE_CFB] = CRYPTO_BC_CFB,
716 	[RK_MODE_OFB] = CRYPTO_BC_OFB,
717 	[RK_MODE_XTS] = CRYPTO_BC_XTS,
718 	[RK_MODE_CCM] = CRYPTO_BC_CCM,
719 	[RK_MODE_GCM] = CRYPTO_BC_GCM,
720 	[RK_MODE_CMAC] = CRYPTO_BC_CMAC,
721 	[RK_MODE_CBC_MAC] = CRYPTO_BC_CBC_MAC,
722 };
723 
set_pc_len_reg(u32 chn,u64 pc_len)724 static inline void set_pc_len_reg(u32 chn, u64 pc_len)
725 {
726 	u32 chn_base = CRYPTO_CH0_PC_LEN_0 + chn * 0x08;
727 
728 	crypto_write(pc_len & 0xffffffff, chn_base);
729 	crypto_write(pc_len >> 32, chn_base + 4);
730 }
731 
set_aad_len_reg(u32 chn,u64 pc_len)732 static inline void set_aad_len_reg(u32 chn, u64 pc_len)
733 {
734 	u32 chn_base = CRYPTO_CH0_AAD_LEN_0 + chn * 0x08;
735 
736 	crypto_write(pc_len & 0xffffffff, chn_base);
737 	crypto_write(pc_len >> 32, chn_base + 4);
738 }
739 
is_des_mode(u32 rk_mode)740 static inline bool is_des_mode(u32 rk_mode)
741 {
742 	return (rk_mode == RK_MODE_ECB ||
743 		rk_mode == RK_MODE_CBC ||
744 		rk_mode == RK_MODE_CFB ||
745 		rk_mode == RK_MODE_OFB);
746 }
747 
dump_crypto_state(struct crypto_lli_desc * desc,u32 tmp,u32 expt_int,const u8 * in,const u8 * out,u32 len,int ret)748 static void dump_crypto_state(struct crypto_lli_desc *desc,
749 			      u32 tmp, u32 expt_int,
750 			      const u8 *in, const u8 *out,
751 			      u32 len, int ret)
752 {
753 	IMSG("%s\n", ret == -ETIME ? "timeout" : "dismatch");
754 
755 	IMSG("CRYPTO_DMA_INT_ST = %08x, expect_int = %08x\n",
756 	     tmp, expt_int);
757 	IMSG("data desc		= %p\n", desc);
758 	IMSG("\taddr_in		= [%08x <=> %08x]\n",
759 	     desc->src_addr, (u32)virt_to_phys(in));
760 	IMSG("\taddr_out	= [%08x <=> %08x]\n",
761 	     desc->dst_addr, (u32)virt_to_phys(out));
762 	IMSG("\tsrc_len		= [%08x <=> %08x]\n",
763 	     desc->src_len, (u32)len);
764 	IMSG("\tdst_len		= %08x\n", desc->dst_len);
765 	IMSG("\tdma_ctl		= %08x\n", desc->dma_ctrl);
766 	IMSG("\tuser_define	= %08x\n", desc->user_define);
767 
768 	IMSG("\n\nDMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
769 	     crypto_read(CRYPTO_DMA_LLI_ADDR));
770 	IMSG("DMA CRYPTO_DMA_ST status = %08x\n",
771 	     crypto_read(CRYPTO_DMA_ST));
772 	IMSG("DMA CRYPTO_DMA_STATE status = %08x\n",
773 	     crypto_read(CRYPTO_DMA_STATE));
774 	IMSG("DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
775 	     crypto_read(CRYPTO_DMA_LLI_RADDR));
776 	IMSG("DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
777 	     crypto_read(CRYPTO_DMA_SRC_RADDR));
778 	IMSG("DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
779 	     crypto_read(CRYPTO_DMA_DST_RADDR));
780 	IMSG("DMA CRYPTO_CIPHER_ST status = %08x\n",
781 	     crypto_read(CRYPTO_CIPHER_ST));
782 	IMSG("DMA CRYPTO_CIPHER_STATE status = %08x\n",
783 	     crypto_read(CRYPTO_CIPHER_STATE));
784 	IMSG("DMA CRYPTO_TAG_VALID status = %08x\n",
785 	     crypto_read(CRYPTO_TAG_VALID));
786 	IMSG("LOCKSTEP status = %08x\n\n",
787 	     crypto_read(0x618));
788 
789 	IMSG("dst %dbyte not transferred\n",
790 	     desc->dst_addr + desc->dst_len -
791 	     crypto_read(CRYPTO_DMA_DST_RADDR));
792 }
793 
ccm128_set_iv_reg(u32 chn,const u8 * nonce,u32 nlen)794 static int ccm128_set_iv_reg(u32 chn, const u8 *nonce, u32 nlen)
795 {
796 	u8 iv_buf[AES_BLOCK_SIZE];
797 	u32 L;
798 
799 	memset(iv_buf, 0x00, sizeof(iv_buf));
800 
801 	L = 15 - nlen;
802 	iv_buf[0] = ((u8)(L - 1) & 7);
803 
804 	/* the L parameter */
805 	L = iv_buf[0] & 7;
806 
807 	/* nonce is too short */
808 	if (nlen < (14 - L))
809 		return -EINVAL;
810 
811 	/* clear aad flag */
812 	iv_buf[0] &= ~0x40;
813 	memcpy(&iv_buf[1], nonce, 14 - L);
814 
815 	set_iv_reg(chn, iv_buf, AES_BLOCK_SIZE);
816 
817 	return 0;
818 }
819 
ccm_aad_padding(u32 aad_len,u8 * padding,u32 * padding_size)820 static void ccm_aad_padding(u32 aad_len, u8 *padding, u32 *padding_size)
821 {
822 	u32 i;
823 
824 	if (aad_len == 0) {
825 		*padding_size = 0;
826 		return;
827 	}
828 
829 	i = aad_len < (0x10000 - 0x100) ? 2 : 6;
830 
831 	if (i == 2) {
832 		padding[0] = (u8)(aad_len >> 8);
833 		padding[1] = (u8)aad_len;
834 	} else {
835 		padding[0] = 0xFF;
836 		padding[1] = 0xFE;
837 		padding[2] = (u8)(aad_len >> 24);
838 		padding[3] = (u8)(aad_len >> 16);
839 		padding[4] = (u8)(aad_len >> 8);
840 	}
841 
842 	*padding_size = i;
843 }
844 
ccm_compose_aad_iv(u8 * aad_iv,u32 data_len,u32 aad_len,u32 tag_size)845 static int ccm_compose_aad_iv(u8 *aad_iv, u32 data_len, u32 aad_len, u32 tag_size)
846 {
847 	aad_iv[0] |= ((u8)(((tag_size - 2) / 2) & 7) << 3);
848 
849 	aad_iv[12] = (u8)(data_len >> 24);
850 	aad_iv[13] = (u8)(data_len >> 16);
851 	aad_iv[14] = (u8)(data_len >> 8);
852 	aad_iv[15] = (u8)data_len;
853 
854 	if (aad_len)
855 		aad_iv[0] |= 0x40;	//set aad flag
856 
857 	return 0;
858 }
859 
hw_cipher_init(u32 chn,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,u32 algo,u32 mode,bool enc)860 static int hw_cipher_init(u32 chn, const u8 *key, const u8 *twk_key,
861 			  u32 key_len, const u8 *iv, u32 iv_len,
862 			  u32 algo, u32 mode, bool enc)
863 {
864 	u32 rk_mode = RK_GET_RK_MODE(mode);
865 	u32 key_chn_sel = chn;
866 	u32 reg_ctrl = 0;
867 
868 	IMSG("%s: key addr is %p, key_len is %d, iv addr is %p",
869 	     __func__, key, key_len, iv);
870 	if (rk_mode >= RK_MODE_MAX)
871 		return -EINVAL;
872 
873 	switch (algo) {
874 	case CRYPTO_DES:
875 		if (key_len > DES_BLOCK_SIZE)
876 			reg_ctrl |= CRYPTO_BC_TDES;
877 		else
878 			reg_ctrl |= CRYPTO_BC_DES;
879 		break;
880 	case CRYPTO_AES:
881 		reg_ctrl |= CRYPTO_BC_AES;
882 		break;
883 	case CRYPTO_SM4:
884 		reg_ctrl |= CRYPTO_BC_SM4;
885 		break;
886 	default:
887 		return -EINVAL;
888 	}
889 
890 	if (algo == CRYPTO_AES || algo == CRYPTO_SM4) {
891 		switch (key_len) {
892 		case AES_KEYSIZE_128:
893 			reg_ctrl |= CRYPTO_BC_128_bit_key;
894 			break;
895 		case AES_KEYSIZE_192:
896 			reg_ctrl |= CRYPTO_BC_192_bit_key;
897 			break;
898 		case AES_KEYSIZE_256:
899 			reg_ctrl |= CRYPTO_BC_256_bit_key;
900 			break;
901 		default:
902 			return -EINVAL;
903 		}
904 	}
905 
906 	reg_ctrl |= rk_mode2bc_mode[rk_mode];
907 	if (!enc)
908 		reg_ctrl |= CRYPTO_BC_DECRYPT;
909 
910 	/* write key data to reg */
911 	write_key_reg(key_chn_sel, key, key_len);
912 
913 	/* write twk key for xts mode */
914 	if (rk_mode == RK_MODE_XTS)
915 		write_key_reg(key_chn_sel + 4, twk_key, key_len);
916 
917 	/* set iv reg */
918 	if (rk_mode == RK_MODE_CCM)
919 		ccm128_set_iv_reg(chn, iv, iv_len);
920 	else
921 		set_iv_reg(chn, iv, iv_len);
922 
923 	/* din_swap set 1, dout_swap set 1, default 1. */
924 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
925 	crypto_write(0, CRYPTO_DMA_INT_EN);
926 
927 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
928 
929 	return 0;
930 }
931 
hw_cipher_crypt(const u8 * in,u8 * out,u64 len,const u8 * aad,u32 aad_len,u8 * tag,u32 tag_len,u32 mode)932 static int hw_cipher_crypt(const u8 *in, u8 *out, u64 len,
933 			   const u8 *aad, u32 aad_len,
934 			   u8 *tag, u32 tag_len, u32 mode)
935 {
936 	struct crypto_lli_desc *data_desc = NULL, *aad_desc = NULL;
937 	u8 *dma_in = NULL, *dma_out = NULL, *aad_tmp = NULL;
938 	u32 rk_mode = RK_GET_RK_MODE(mode);
939 	u32 reg_ctrl = 0, tmp_len = 0;
940 	u32 expt_int = 0, mask = 0;
941 	u32 key_chn = g_key_chn;
942 	u32 tmp, dst_len = 0;
943 	int ret = -1;
944 
945 	if (rk_mode == RK_MODE_CTS && len <= AES_BLOCK_SIZE) {
946 		printf("CTS mode length %u < 16Byte\n", (u32)len);
947 		return -EINVAL;
948 	}
949 
950 	tmp_len = (rk_mode == RK_MODE_CTR) ? ROUNDUP(len, AES_BLOCK_SIZE) : len;
951 
952 	data_desc = align_malloc(sizeof(*data_desc), LLI_ADDR_ALIGN_SIZE);
953 	if (!data_desc)
954 		goto exit;
955 
956 	if (IS_ALIGNED((ulong)in, DATA_ADDR_ALIGN_SIZE) && tmp_len == len)
957 		dma_in = (void *)in;
958 	else
959 		dma_in = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
960 	if (!dma_in)
961 		goto exit;
962 
963 	if (out) {
964 		if (IS_ALIGNED((ulong)out, DATA_ADDR_ALIGN_SIZE) &&
965 		    tmp_len == len)
966 			dma_out = out;
967 		else
968 			dma_out = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
969 		if (!dma_out)
970 			goto exit;
971 		dst_len = tmp_len;
972 	}
973 
974 	memset(data_desc, 0x00, sizeof(*data_desc));
975 	if (dma_in != in)
976 		memcpy(dma_in, in, len);
977 
978 	data_desc->src_addr    = (u32)virt_to_phys(dma_in);
979 	data_desc->src_len     = tmp_len;
980 	data_desc->dst_addr    = (u32)virt_to_phys(dma_out);
981 	data_desc->dst_len     = dst_len;
982 	data_desc->dma_ctrl    = LLI_DMA_CTRL_LAST;
983 
984 	if (IS_MAC_MODE(rk_mode)) {
985 		expt_int = CRYPTO_LIST_DONE_INT_ST;
986 		data_desc->dma_ctrl |= LLI_DMA_CTRL_LIST_DONE;
987 	} else {
988 		expt_int = CRYPTO_DST_ITEM_DONE_INT_ST;
989 		data_desc->dma_ctrl |= LLI_DMA_CTRL_DST_DONE;
990 	}
991 
992 	data_desc->user_define = LLI_USER_CIPHER_START |
993 				 LLI_USER_STRING_START |
994 				 LLI_USER_STRING_LAST |
995 				 (key_chn << 4);
996 	crypto_write((u32)virt_to_phys(data_desc), CRYPTO_DMA_LLI_ADDR);
997 
998 	if (rk_mode == RK_MODE_CCM || rk_mode == RK_MODE_GCM) {
999 		u32 aad_tmp_len = 0;
1000 
1001 		aad_desc = align_malloc(sizeof(*aad_desc), LLI_ADDR_ALIGN_SIZE);
1002 		if (!aad_desc)
1003 			goto exit;
1004 
1005 		memset(aad_desc, 0x00, sizeof(*aad_desc));
1006 		aad_desc->next_addr = (u32)virt_to_phys(data_desc);
1007 		aad_desc->user_define = LLI_USER_CIPHER_START |
1008 					 LLI_USER_STRING_START |
1009 					 LLI_USER_STRING_LAST |
1010 					 LLI_USER_STRING_AAD |
1011 					 (key_chn << 4);
1012 
1013 		if (rk_mode == RK_MODE_CCM) {
1014 			u8 padding[AES_BLOCK_SIZE];
1015 			u32 padding_size = 0;
1016 
1017 			memset(padding, 0x00, sizeof(padding));
1018 			ccm_aad_padding(aad_len, padding, &padding_size);
1019 
1020 			aad_tmp_len = aad_len + AES_BLOCK_SIZE + padding_size;
1021 			aad_tmp_len = ROUNDUP(aad_tmp_len, AES_BLOCK_SIZE);
1022 			aad_tmp = align_malloc(aad_tmp_len,
1023 					       DATA_ADDR_ALIGN_SIZE);
1024 			if (!aad_tmp)
1025 				goto exit;
1026 
1027 			/* clear last block */
1028 			memset(aad_tmp + aad_tmp_len - AES_BLOCK_SIZE,
1029 			       0x00, AES_BLOCK_SIZE);
1030 
1031 			/* read iv data from reg */
1032 			get_iv_reg(key_chn, aad_tmp, AES_BLOCK_SIZE);
1033 			ccm_compose_aad_iv(aad_tmp, tmp_len, aad_len, tag_len);
1034 			memcpy(aad_tmp + AES_BLOCK_SIZE, padding, padding_size);
1035 
1036 			memcpy(aad_tmp + AES_BLOCK_SIZE + padding_size,
1037 			       aad, aad_len);
1038 		} else {
1039 			aad_tmp_len = aad_len;
1040 			if (IS_ALIGNED((ulong)aad, DATA_ADDR_ALIGN_SIZE)) {
1041 				aad_tmp = (void *)aad;
1042 			} else {
1043 				aad_tmp = align_malloc(aad_tmp_len,
1044 						       DATA_ADDR_ALIGN_SIZE);
1045 				if (!aad_tmp)
1046 					goto exit;
1047 
1048 				memcpy(aad_tmp, aad, aad_tmp_len);
1049 			}
1050 
1051 			set_aad_len_reg(key_chn, aad_tmp_len);
1052 			set_pc_len_reg(key_chn, tmp_len);
1053 		}
1054 
1055 		aad_desc->src_addr = (u32)virt_to_phys(aad_tmp);
1056 		aad_desc->src_len  = aad_tmp_len;
1057 
1058 		if (aad_tmp_len) {
1059 			data_desc->user_define = LLI_USER_STRING_START |
1060 						 LLI_USER_STRING_LAST |
1061 						 (key_chn << 4);
1062 			crypto_write((u32)virt_to_phys(aad_desc), CRYPTO_DMA_LLI_ADDR);
1063 			cache_op_inner(DCACHE_AREA_CLEAN, aad_tmp, aad_tmp_len);
1064 			cache_op_inner(DCACHE_AREA_CLEAN, aad_desc, sizeof(*aad_desc));
1065 		}
1066 	}
1067 
1068 	cache_op_inner(DCACHE_AREA_CLEAN, data_desc, sizeof(*data_desc));
1069 	cache_op_inner(DCACHE_AREA_CLEAN, dma_in, tmp_len);
1070 	cache_op_inner(DCACHE_AREA_INVALIDATE, dma_out, tmp_len);
1071 
1072 	/* din_swap set 1, dout_swap set 1, default 1. */
1073 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
1074 	crypto_write(0, CRYPTO_DMA_INT_EN);
1075 
1076 	reg_ctrl = crypto_read(CRYPTO_BC_CTL) | CRYPTO_BC_ENABLE;
1077 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
1078 	crypto_write(0x00010001, CRYPTO_DMA_CTL);//start
1079 
1080 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
1081 
1082 	/* wait calc ok */
1083 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
1084 			      RK_CRYPTO_TIMEOUT);
1085 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
1086 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
1087 
1088 	if ((tmp & mask) == expt_int) {
1089 		if (out && out != dma_out)
1090 			memcpy(out, dma_out, len);
1091 
1092 		if (IS_NEED_TAG(rk_mode)) {
1093 			ret = WAIT_TAG_VALID(key_chn, RK_CRYPTO_TIMEOUT);
1094 			get_tag_from_reg(key_chn, tag, AES_BLOCK_SIZE);
1095 		}
1096 	} else {
1097 		dump_crypto_state(data_desc, tmp, expt_int, in, out, len, ret);
1098 		ret = -1;
1099 	}
1100 
1101 exit:
1102 	crypto_write(0xffff0000, CRYPTO_BC_CTL);//bc_ctl disable
1103 	align_free(data_desc);
1104 	align_free(aad_desc);
1105 	if (dma_in != in)
1106 		align_free(dma_in);
1107 	if (out && dma_out != out)
1108 		align_free(dma_out);
1109 	if (aad && aad != aad_tmp)
1110 		align_free(aad_tmp);
1111 
1112 	return ret;
1113 }
1114 
hw_aes_init(u32 chn,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,u32 mode,bool enc)1115 static int hw_aes_init(u32 chn, const u8 *key, const u8 *twk_key, u32 key_len,
1116 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1117 {
1118 	u32 rk_mode = RK_GET_RK_MODE(mode);
1119 
1120 	if (rk_mode > RK_MODE_XTS)
1121 		return -EINVAL;
1122 
1123 	if (iv_len > AES_BLOCK_SIZE)
1124 		return -EINVAL;
1125 
1126 	if (IS_NEED_IV(rk_mode)) {
1127 		if (!iv || iv_len != AES_BLOCK_SIZE)
1128 			return -EINVAL;
1129 	} else {
1130 		iv_len = 0;
1131 	}
1132 
1133 	if (rk_mode == RK_MODE_XTS) {
1134 		if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_256)
1135 			return -EINVAL;
1136 
1137 		if (!key || !twk_key)
1138 			return -EINVAL;
1139 	} else {
1140 		if (key_len != AES_KEYSIZE_128 &&
1141 		    key_len != AES_KEYSIZE_192 &&
1142 		    key_len != AES_KEYSIZE_256)
1143 			return -EINVAL;
1144 	}
1145 
1146 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1147 			      CRYPTO_AES, mode, enc);
1148 }
1149 
hw_sm4_init(u32 chn,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,u32 mode,bool enc)1150 static int hw_sm4_init(u32  chn, const u8 *key, const u8 *twk_key, u32 key_len,
1151 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1152 {
1153 	u32 rk_mode = RK_GET_RK_MODE(mode);
1154 
1155 	if (rk_mode > RK_MODE_XTS)
1156 		return -EINVAL;
1157 
1158 	if (iv_len > SM4_BLOCK_SIZE || key_len != SM4_KEYSIZE)
1159 		return -EINVAL;
1160 
1161 	if (IS_NEED_IV(rk_mode)) {
1162 		if (!iv || iv_len != SM4_BLOCK_SIZE)
1163 			return -EINVAL;
1164 	} else {
1165 		iv_len = 0;
1166 	}
1167 
1168 	if (rk_mode == RK_MODE_XTS) {
1169 		if (!key || !twk_key)
1170 			return -EINVAL;
1171 	}
1172 
1173 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1174 			      CRYPTO_SM4, mode, enc);
1175 }
1176 
rk_crypto_des(struct udevice * dev,u32 mode,const u8 * key,u32 key_len,const u8 * iv,const u8 * in,u8 * out,u32 len,bool enc)1177 int rk_crypto_des(struct udevice *dev, u32 mode, const u8 *key, u32 key_len,
1178 		  const u8 *iv, const u8 *in, u8 *out, u32 len, bool enc)
1179 {
1180 	u32 rk_mode = RK_GET_RK_MODE(mode);
1181 	u8 tmp_key[24];
1182 	int ret;
1183 
1184 	if (!is_des_mode(rk_mode))
1185 		return -EINVAL;
1186 
1187 	if (key_len == DES_BLOCK_SIZE || key_len == 3 * DES_BLOCK_SIZE) {
1188 		memcpy(tmp_key, key, key_len);
1189 	} else if (key_len == 2 * DES_BLOCK_SIZE) {
1190 		memcpy(tmp_key, key, 16);
1191 		memcpy(tmp_key + 16, key, 8);
1192 		key_len = 3 * DES_BLOCK_SIZE;
1193 	} else {
1194 		return -EINVAL;
1195 	}
1196 
1197 	ret = hw_cipher_init(0, tmp_key, NULL, key_len, iv, DES_BLOCK_SIZE,
1198 			     CRYPTO_DES, mode, enc);
1199 	if (ret)
1200 		goto exit;
1201 
1202 	ret = hw_cipher_crypt(in, out, len, NULL, 0,
1203 			      NULL, 0, mode);
1204 
1205 exit:
1206 	return ret;
1207 }
1208 
rk_crypto_aes(struct udevice * dev,u32 mode,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,const u8 * in,u8 * out,u32 len,bool enc)1209 int rk_crypto_aes(struct udevice *dev, u32 mode,
1210 		  const u8 *key, const u8 *twk_key, u32 key_len,
1211 		  const u8 *iv, u32 iv_len,
1212 		  const u8 *in, u8 *out, u32 len, bool enc)
1213 {
1214 	int ret;
1215 
1216 	/* RV1126/RV1109 do not support aes-192 */
1217 #if defined(CONFIG_ROCKCHIP_RV1126)
1218 	if (key_len == AES_KEYSIZE_192)
1219 		return -EINVAL;
1220 #endif
1221 
1222 	ret = hw_aes_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1223 	if (ret)
1224 		return ret;
1225 
1226 	return hw_cipher_crypt(in, out, len, NULL, 0,
1227 			       NULL, 0, mode);
1228 }
1229 
rk_crypto_sm4(struct udevice * dev,u32 mode,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,const u8 * in,u8 * out,u32 len,bool enc)1230 int rk_crypto_sm4(struct udevice *dev, u32 mode,
1231 		  const u8 *key, const u8 *twk_key, u32 key_len,
1232 		  const u8 *iv, u32 iv_len,
1233 		  const u8 *in, u8 *out, u32 len, bool enc)
1234 {
1235 	int ret;
1236 
1237 	ret = hw_sm4_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1238 	if (ret)
1239 		return ret;
1240 
1241 	return hw_cipher_crypt(in, out, len, NULL, 0, NULL, 0, mode);
1242 }
1243 
rockchip_crypto_cipher(struct udevice * dev,cipher_context * ctx,const u8 * in,u8 * out,u32 len,bool enc)1244 int rockchip_crypto_cipher(struct udevice *dev, cipher_context *ctx,
1245 			   const u8 *in, u8 *out, u32 len, bool enc)
1246 {
1247 	int ret;
1248 
1249 	rk_crypto_enable_clk(dev);
1250 
1251 	switch (ctx->algo) {
1252 	case CRYPTO_DES:
1253 		ret = rk_crypto_des(dev, ctx->mode, ctx->key, ctx->key_len,
1254 				    ctx->iv, in, out, len, enc);
1255 	case CRYPTO_AES:
1256 		ret = rk_crypto_aes(dev, ctx->mode,
1257 				    ctx->key, ctx->twk_key, ctx->key_len,
1258 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1259 	case CRYPTO_SM4:
1260 		ret = rk_crypto_sm4(dev, ctx->mode,
1261 				    ctx->key, ctx->twk_key, ctx->key_len,
1262 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1263 	default:
1264 		ret = -EINVAL;
1265 	}
1266 
1267 	rk_crypto_disable_clk(dev);
1268 
1269 	return ret;
1270 }
1271 
rk_crypto_mac(struct udevice * dev,u32 algo,u32 mode,const u8 * key,u32 key_len,const u8 * in,u32 len,u8 * tag)1272 int rk_crypto_mac(struct udevice *dev, u32 algo, u32 mode,
1273 		  const u8 *key, u32 key_len,
1274 		  const u8 *in, u32 len, u8 *tag)
1275 {
1276 	u32 rk_mode = RK_GET_RK_MODE(mode);
1277 	int ret;
1278 
1279 	if (!IS_MAC_MODE(rk_mode))
1280 		return -EINVAL;
1281 
1282 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1283 		return -EINVAL;
1284 
1285 	/* RV1126/RV1109 do not support aes-192 */
1286 #if defined(CONFIG_ROCKCHIP_RV1126)
1287 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1288 		return -EINVAL;
1289 #endif
1290 
1291 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, NULL, 0,
1292 			     algo, mode, true);
1293 	if (ret)
1294 		return ret;
1295 
1296 	return hw_cipher_crypt(in, NULL, len, NULL, 0,
1297 			       tag, AES_BLOCK_SIZE, mode);
1298 }
1299 
rockchip_crypto_mac(struct udevice * dev,cipher_context * ctx,const u8 * in,u32 len,u8 * tag)1300 int rockchip_crypto_mac(struct udevice *dev, cipher_context *ctx,
1301 			const u8 *in, u32 len, u8 *tag)
1302 {
1303 	int ret = 0;
1304 
1305 	rk_crypto_enable_clk(dev);
1306 
1307 	ret = rk_crypto_mac(dev, ctx->algo, ctx->mode,
1308 			    ctx->key, ctx->key_len, in, len, tag);
1309 
1310 	rk_crypto_disable_clk(dev);
1311 
1312 	return ret;
1313 }
1314 
rk_crypto_ae(struct udevice * dev,u32 algo,u32 mode,const u8 * key,u32 key_len,const u8 * nonce,u32 nonce_len,const u8 * in,u32 len,const u8 * aad,u32 aad_len,u8 * out,u8 * tag)1315 int rk_crypto_ae(struct udevice *dev, u32 algo, u32 mode,
1316 		 const u8 *key, u32 key_len, const u8 *nonce, u32 nonce_len,
1317 		 const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1318 		 u8 *out, u8 *tag)
1319 {
1320 	u32 rk_mode = RK_GET_RK_MODE(mode);
1321 	int ret;
1322 
1323 	if (!IS_AE_MODE(rk_mode))
1324 		return -EINVAL;
1325 
1326 	if (len == 0)
1327 		return -EINVAL;
1328 
1329 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1330 		return -EINVAL;
1331 
1332 	/* RV1126/RV1109 do not support aes-192 */
1333 #if defined(CONFIG_ROCKCHIP_RV1126)
1334 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1335 		return -EINVAL;
1336 #endif
1337 
1338 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, nonce, nonce_len,
1339 			     algo, mode, true);
1340 	if (ret)
1341 		return ret;
1342 
1343 	return hw_cipher_crypt(in, out, len, aad, aad_len,
1344 			       tag, AES_BLOCK_SIZE, mode);
1345 }
1346 
rockchip_crypto_ae(struct udevice * dev,cipher_context * ctx,const u8 * in,u32 len,const u8 * aad,u32 aad_len,u8 * out,u8 * tag)1347 int rockchip_crypto_ae(struct udevice *dev, cipher_context *ctx,
1348 		       const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1349 		       u8 *out, u8 *tag)
1350 
1351 {
1352 	int ret = 0;
1353 
1354 	rk_crypto_enable_clk(dev);
1355 
1356 	ret = rk_crypto_ae(dev, ctx->algo, ctx->mode, ctx->key, ctx->key_len,
1357 			   ctx->iv, ctx->iv_len, in, len,
1358 			   aad, aad_len, out, tag);
1359 
1360 	rk_crypto_disable_clk(dev);
1361 
1362 	return ret;
1363 }
1364 
1365 #endif
1366 
1367 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
rockchip_crypto_rsa_verify(struct udevice * dev,rsa_key * ctx,u8 * sign,u8 * output)1368 static int rockchip_crypto_rsa_verify(struct udevice *dev, rsa_key *ctx,
1369 				      u8 *sign, u8 *output)
1370 {
1371 	struct mpa_num *mpa_m = NULL, *mpa_e = NULL, *mpa_n = NULL;
1372 	struct mpa_num *mpa_c = NULL, *mpa_result = NULL;
1373 	u32 n_bits, n_words;
1374 	int ret;
1375 
1376 	if (!ctx)
1377 		return -EINVAL;
1378 
1379 	if (ctx->algo != CRYPTO_RSA512 &&
1380 	    ctx->algo != CRYPTO_RSA1024 &&
1381 	    ctx->algo != CRYPTO_RSA2048 &&
1382 	    ctx->algo != CRYPTO_RSA3072 &&
1383 	    ctx->algo != CRYPTO_RSA4096)
1384 		return -EINVAL;
1385 
1386 	n_bits = crypto_algo_nbits(ctx->algo);
1387 	n_words = BITS2WORD(n_bits);
1388 
1389 	ret = rk_mpa_alloc(&mpa_m, sign, n_words);
1390 	if (ret)
1391 		goto exit;
1392 
1393 	ret = rk_mpa_alloc(&mpa_e, ctx->e, n_words);
1394 	if (ret)
1395 		goto exit;
1396 
1397 	ret = rk_mpa_alloc(&mpa_n, ctx->n, n_words);
1398 	if (ret)
1399 		goto exit;
1400 
1401 	if (ctx->c) {
1402 		ret = rk_mpa_alloc(&mpa_c, ctx->c, n_words);
1403 		if (ret)
1404 			goto exit;
1405 	}
1406 
1407 	ret = rk_mpa_alloc(&mpa_result, NULL, n_words);
1408 	if (ret)
1409 		goto exit;
1410 
1411 	rk_crypto_enable_clk(dev);
1412 	ret = rk_exptmod_np(mpa_m, mpa_e, mpa_n, mpa_c, mpa_result);
1413 	if (!ret)
1414 		memcpy(output, mpa_result->d, BITS2BYTE(n_bits));
1415 	rk_crypto_disable_clk(dev);
1416 
1417 exit:
1418 	rk_mpa_free(&mpa_m);
1419 	rk_mpa_free(&mpa_e);
1420 	rk_mpa_free(&mpa_n);
1421 	rk_mpa_free(&mpa_c);
1422 	rk_mpa_free(&mpa_result);
1423 
1424 	return ret;
1425 }
1426 #endif
1427 
1428 static const struct dm_crypto_ops rockchip_crypto_ops = {
1429 	.capability   = rockchip_crypto_capability,
1430 	.sha_init     = rockchip_crypto_sha_init,
1431 	.sha_update   = rockchip_crypto_sha_update,
1432 	.sha_final    = rockchip_crypto_sha_final,
1433 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1434 	.rsa_verify   = rockchip_crypto_rsa_verify,
1435 #endif
1436 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
1437 	.hmac_init    = rockchip_crypto_hmac_init,
1438 	.hmac_update  = rockchip_crypto_hmac_update,
1439 	.hmac_final   = rockchip_crypto_hmac_final,
1440 #endif
1441 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
1442 	.cipher_crypt = rockchip_crypto_cipher,
1443 	.cipher_mac = rockchip_crypto_mac,
1444 	.cipher_ae  = rockchip_crypto_ae,
1445 #endif
1446 };
1447 
1448 /*
1449  * Only use "clocks" to parse crypto clock id and use rockchip_get_clk().
1450  * Because we always add crypto node in U-Boot dts, when kernel dtb enabled :
1451  *
1452  *   1. There is cru phandle mismatch between U-Boot and kernel dtb;
1453  *   2. CONFIG_OF_SPL_REMOVE_PROPS removes clock property;
1454  */
rockchip_crypto_ofdata_to_platdata(struct udevice * dev)1455 static int rockchip_crypto_ofdata_to_platdata(struct udevice *dev)
1456 {
1457 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1458 	int len, ret = -EINVAL;
1459 
1460 	memset(priv, 0x00, sizeof(*priv));
1461 
1462 	priv->reg = (fdt_addr_t)dev_read_addr_ptr(dev);
1463 	if (priv->reg == FDT_ADDR_T_NONE)
1464 		return -EINVAL;
1465 
1466 	crypto_base = priv->reg;
1467 
1468 	/* if there is no clocks in dts, just skip it */
1469 	if (!dev_read_prop(dev, "clocks", &len)) {
1470 		printf("Can't find \"clocks\" property\n");
1471 		return 0;
1472 	}
1473 
1474 	memset(priv, 0x00, sizeof(*priv));
1475 	priv->clocks = malloc(len);
1476 	if (!priv->clocks)
1477 		return -ENOMEM;
1478 
1479 	priv->nclocks = len / (2 * sizeof(u32));
1480 	if (dev_read_u32_array(dev, "clocks", (u32 *)priv->clocks,
1481 			       priv->nclocks)) {
1482 		printf("Can't read \"clocks\" property\n");
1483 		ret = -EINVAL;
1484 		goto exit;
1485 	}
1486 
1487 	if (dev_read_prop(dev, "clock-frequency", &len)) {
1488 		priv->frequencies = malloc(len);
1489 		if (!priv->frequencies) {
1490 			ret = -ENOMEM;
1491 			goto exit;
1492 		}
1493 		priv->freq_nclocks = len / sizeof(u32);
1494 		if (dev_read_u32_array(dev, "clock-frequency", priv->frequencies,
1495 				       priv->freq_nclocks)) {
1496 			printf("Can't read \"clock-frequency\" property\n");
1497 			ret = -EINVAL;
1498 			goto exit;
1499 		}
1500 	}
1501 
1502 	return 0;
1503 exit:
1504 	if (priv->clocks)
1505 		free(priv->clocks);
1506 
1507 	if (priv->frequencies)
1508 		free(priv->frequencies);
1509 
1510 	return ret;
1511 }
1512 
rk_crypto_set_clk(struct udevice * dev)1513 static int rk_crypto_set_clk(struct udevice *dev)
1514 {
1515 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1516 	struct clk clk;
1517 	int i, ret;
1518 
1519 	/* use standard "assigned-clock-rates" props */
1520 	if (dev_read_size(dev, "assigned-clock-rates") > 0)
1521 		return clk_set_defaults(dev);
1522 
1523 	/* use "clock-frequency" props */
1524 	if (priv->freq_nclocks == 0)
1525 		return 0;
1526 
1527 	for (i = 0; i < priv->freq_nclocks; i++) {
1528 		ret = clk_get_by_index(dev, i, &clk);
1529 		if (ret < 0) {
1530 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
1531 			return ret;
1532 		}
1533 		ret = clk_set_rate(&clk, priv->frequencies[i]);
1534 		if (ret < 0) {
1535 			printf("%s: Failed to set clk(%ld): ret=%d\n",
1536 			       __func__, clk.id, ret);
1537 			return ret;
1538 		}
1539 	}
1540 
1541 	return 0;
1542 }
1543 
rockchip_crypto_probe(struct udevice * dev)1544 static int rockchip_crypto_probe(struct udevice *dev)
1545 {
1546 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1547 	struct rk_crypto_soc_data *sdata;
1548 	int ret = 0;
1549 
1550 	sdata = (struct rk_crypto_soc_data *)dev_get_driver_data(dev);
1551 
1552 	if (sdata->dynamic_cap)
1553 		sdata->capability = sdata->dynamic_cap();
1554 
1555 	priv->soc_data = sdata;
1556 
1557 	priv->hw_ctx = memalign(LLI_ADDR_ALIGN_SIZE,
1558 				sizeof(struct rk_hash_ctx));
1559 	if (!priv->hw_ctx)
1560 		return -ENOMEM;
1561 
1562 	ret = rk_crypto_set_clk(dev);
1563 	if (ret)
1564 		return ret;
1565 
1566 	rk_crypto_enable_clk(dev);
1567 
1568 	hw_crypto_reset();
1569 
1570 	rk_crypto_disable_clk(dev);
1571 
1572 	return 0;
1573 }
1574 
1575 static const struct rk_crypto_soc_data soc_data_base = {
1576 	.capability = CRYPTO_MD5 |
1577 		      CRYPTO_SHA1 |
1578 		      CRYPTO_SHA256 |
1579 		      CRYPTO_SHA512 |
1580 		      CRYPTO_HMAC_MD5 |
1581 		      CRYPTO_HMAC_SHA1 |
1582 		      CRYPTO_HMAC_SHA256 |
1583 		      CRYPTO_HMAC_SHA512 |
1584 		      CRYPTO_RSA512 |
1585 		      CRYPTO_RSA1024 |
1586 		      CRYPTO_RSA2048 |
1587 		      CRYPTO_RSA3072 |
1588 		      CRYPTO_RSA4096 |
1589 		      CRYPTO_DES |
1590 		      CRYPTO_AES,
1591 };
1592 
1593 static const struct rk_crypto_soc_data soc_data_base_sm = {
1594 	.capability = CRYPTO_MD5 |
1595 		      CRYPTO_SHA1 |
1596 		      CRYPTO_SHA256 |
1597 		      CRYPTO_SHA512 |
1598 		      CRYPTO_SM3 |
1599 		      CRYPTO_HMAC_MD5 |
1600 		      CRYPTO_HMAC_SHA1 |
1601 		      CRYPTO_HMAC_SHA256 |
1602 		      CRYPTO_HMAC_SHA512 |
1603 		      CRYPTO_HMAC_SM3 |
1604 		      CRYPTO_RSA512 |
1605 		      CRYPTO_RSA1024 |
1606 		      CRYPTO_RSA2048 |
1607 		      CRYPTO_RSA3072 |
1608 		      CRYPTO_RSA4096 |
1609 		      CRYPTO_DES |
1610 		      CRYPTO_AES |
1611 		      CRYPTO_SM4,
1612 };
1613 
1614 static const struct rk_crypto_soc_data soc_data_rk1808 = {
1615 	.capability = CRYPTO_MD5 |
1616 		      CRYPTO_SHA1 |
1617 		      CRYPTO_SHA256 |
1618 		      CRYPTO_HMAC_MD5 |
1619 		      CRYPTO_HMAC_SHA1 |
1620 		      CRYPTO_HMAC_SHA256 |
1621 		      CRYPTO_RSA512 |
1622 		      CRYPTO_RSA1024 |
1623 		      CRYPTO_RSA2048 |
1624 		      CRYPTO_RSA3072 |
1625 		      CRYPTO_RSA4096,
1626 };
1627 
1628 static const struct rk_crypto_soc_data soc_data_cryptov3 = {
1629 	.capability  = 0,
1630 	.dynamic_cap = crypto_v3_dynamic_cap,
1631 };
1632 
1633 static const struct udevice_id rockchip_crypto_ids[] = {
1634 	{
1635 		.compatible = "rockchip,px30-crypto",
1636 		.data = (ulong)&soc_data_base
1637 	},
1638 	{
1639 		.compatible = "rockchip,rk1808-crypto",
1640 		.data = (ulong)&soc_data_rk1808
1641 	},
1642 	{
1643 		.compatible = "rockchip,rk3308-crypto",
1644 		.data = (ulong)&soc_data_base
1645 	},
1646 	{
1647 		.compatible = "rockchip,rv1126-crypto",
1648 		.data = (ulong)&soc_data_base_sm
1649 	},
1650 	{
1651 		.compatible = "rockchip,rk3568-crypto",
1652 		.data = (ulong)&soc_data_base_sm
1653 	},
1654 	{
1655 		.compatible = "rockchip,rk3588-crypto",
1656 		.data = (ulong)&soc_data_base_sm
1657 	},
1658 	{
1659 		.compatible = "rockchip,crypto-v3",
1660 		.data = (ulong)&soc_data_cryptov3
1661 	},
1662 	{
1663 		.compatible = "rockchip,crypto-v4",
1664 		.data = (ulong)&soc_data_cryptov3 /* reuse crypto v3 config */
1665 	},
1666 	{ }
1667 };
1668 
1669 U_BOOT_DRIVER(rockchip_crypto_v2) = {
1670 	.name		= "rockchip_crypto_v2",
1671 	.id		= UCLASS_CRYPTO,
1672 	.of_match	= rockchip_crypto_ids,
1673 	.ops		= &rockchip_crypto_ops,
1674 	.probe		= rockchip_crypto_probe,
1675 	.ofdata_to_platdata = rockchip_crypto_ofdata_to_platdata,
1676 	.priv_auto_alloc_size = sizeof(struct rockchip_crypto_priv),
1677 };
1678