xref: /rk3399_rockchip-uboot/drivers/crypto/rockchip/crypto_v2.c (revision 5c51263aff3d2d2520aa041c679a16ff0548e7e2)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 #include <clk.h>
8 #include <crypto.h>
9 #include <dm.h>
10 #include <asm/io.h>
11 #include <asm/arch/hardware.h>
12 #include <asm/arch/clock.h>
13 #include <rockchip/crypto_hash_cache.h>
14 #include <rockchip/crypto_v2.h>
15 #include <rockchip/crypto_v2_pka.h>
16 
17 #define	RK_HASH_CTX_MAGIC		0x1A1A1A1A
18 
19 #ifdef DEBUG
20 #define IMSG(format, ...) printf("[%s, %05d]-trace: " format "\n", \
21 				 __func__, __LINE__, ##__VA_ARGS__)
22 #else
23 #define IMSG(format, ...)
24 #endif
25 
26 struct crypto_lli_desc {
27 	u32 src_addr;
28 	u32 src_len;
29 	u32 dst_addr;
30 	u32 dst_len;
31 	u32 user_define;
32 	u32 reserve;
33 	u32 dma_ctrl;
34 	u32 next_addr;
35 };
36 
37 struct rk_hash_ctx {
38 	struct crypto_lli_desc		data_lli;	/* lli desc */
39 	struct crypto_hash_cache	*hash_cache;
40 	u32				magic;		/* to check ctx */
41 	u32				algo;		/* hash algo */
42 	u8				digest_size;	/* hash out length */
43 	u8				reserved[3];
44 };
45 
46 struct rk_crypto_soc_data {
47 	u32 capability;
48 };
49 
50 struct rockchip_crypto_priv {
51 	fdt_addr_t			reg;
52 	struct clk			clk;
53 	u32				frequency;
54 	char				*clocks;
55 	u32				*frequencies;
56 	u32				nclocks;
57 	u32				length;
58 	struct rk_hash_ctx		*hw_ctx;
59 	struct rk_crypto_soc_data	*soc_data;
60 };
61 
62 #define LLI_ADDR_ALIGN_SIZE	8
63 #define DATA_ADDR_ALIGN_SIZE	8
64 #define DATA_LEN_ALIGN_SIZE	64
65 
66 /* crypto timeout 500ms, must support more than 32M data per times*/
67 #define HASH_UPDATE_LIMIT	(32 * 1024 * 1024)
68 #define RK_CRYPTO_TIMEOUT	500000
69 
70 #define RK_POLL_TIMEOUT(condition, timeout) \
71 ({ \
72 	int time_out = timeout; \
73 	while (condition) { \
74 		if (--time_out <= 0) { \
75 			debug("[%s] %d: time out!\n", __func__,\
76 				__LINE__); \
77 			break; \
78 		} \
79 		udelay(1); \
80 	} \
81 	(time_out <= 0) ? -ETIMEDOUT : 0; \
82 })
83 
84 #define WAIT_TAG_VALID(channel, timeout) ({ \
85 	u32 tag_mask = CRYPTO_CH0_TAG_VALID << (channel);\
86 	int ret;\
87 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_TAG_VALID) & tag_mask),\
88 			      timeout);\
89 	crypto_write(crypto_read(CRYPTO_TAG_VALID) & tag_mask, CRYPTO_TAG_VALID);\
90 	ret;\
91 })
92 
93 #define virt_to_phys(addr)		(((unsigned long)addr) & 0xffffffff)
94 #define phys_to_virt(addr, area)	((unsigned long)addr)
95 
96 #define align_malloc(bytes, alignment)	memalign(alignment, bytes)
97 #define align_free(addr)		do {if (addr) free(addr);} while (0)
98 
99 #define ROUNDUP(size, alignment)	round_up(size, alignment)
100 #define cache_op_inner(type, addr, size) \
101 					crypto_flush_cacheline((ulong)addr, size)
102 
103 #define IS_NEED_IV(rk_mode) ((rk_mode) != RK_MODE_ECB && \
104 			     (rk_mode) != RK_MODE_CMAC && \
105 			     (rk_mode) != RK_MODE_CBC_MAC)
106 
107 #define IS_NEED_TAG(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
108 			      (rk_mode) == RK_MODE_CBC_MAC || \
109 			      (rk_mode) == RK_MODE_CCM || \
110 			      (rk_mode) == RK_MODE_GCM)
111 
112 #define IS_MAC_MODE(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
113 			      (rk_mode) == RK_MODE_CBC_MAC)
114 
115 #define IS_AE_MODE(rk_mode) ((rk_mode) == RK_MODE_CCM || \
116 			     (rk_mode) == RK_MODE_GCM)
117 
118 fdt_addr_t crypto_base;
119 
120 static inline void word2byte_be(u32 word, u8 *ch)
121 {
122 	ch[0] = (word >> 24) & 0xff;
123 	ch[1] = (word >> 16) & 0xff;
124 	ch[2] = (word >> 8) & 0xff;
125 	ch[3] = (word >> 0) & 0xff;
126 }
127 
128 static inline u32 byte2word_be(const u8 *ch)
129 {
130 	return (*ch << 24) + (*(ch + 1) << 16) + (*(ch + 2) << 8) + *(ch + 3);
131 }
132 
133 static inline void clear_regs(u32 base, u32 words)
134 {
135 	int i;
136 
137 	/*clear out register*/
138 	for (i = 0; i < words; i++)
139 		crypto_write(0, base + 4 * i);
140 }
141 
142 static inline void clear_hash_out_reg(void)
143 {
144 	clear_regs(CRYPTO_HASH_DOUT_0, 16);
145 }
146 
147 static inline void clear_key_regs(void)
148 {
149 	clear_regs(CRYPTO_CH0_KEY_0, CRYPTO_KEY_CHANNEL_NUM * 4);
150 }
151 
152 static inline void read_regs(u32 base, u8 *data, u32 data_len)
153 {
154 	u8 tmp_buf[4];
155 	u32 i;
156 
157 	for (i = 0; i < data_len / 4; i++)
158 		word2byte_be(crypto_read(base + i * 4),
159 			     data + i * 4);
160 
161 	if (data_len % 4) {
162 		word2byte_be(crypto_read(base + i * 4), tmp_buf);
163 		memcpy(data + i * 4, tmp_buf, data_len % 4);
164 	}
165 }
166 
167 static inline void write_regs(u32 base, const u8 *data, u32 data_len)
168 {
169 	u8 tmp_buf[4];
170 	u32 i;
171 
172 	for (i = 0; i < data_len / 4; i++, base += 4)
173 		crypto_write(byte2word_be(data + i * 4), base);
174 
175 	if (data_len % 4) {
176 		memset(tmp_buf, 0x00, sizeof(tmp_buf));
177 		memcpy((u8 *)tmp_buf, data + i * 4, data_len % 4);
178 		crypto_write(byte2word_be(tmp_buf), base);
179 	}
180 }
181 
182 static inline void write_key_reg(u32 chn, const u8 *key, u32 key_len)
183 {
184 	write_regs(CRYPTO_CH0_KEY_0 + chn * 0x10, key, key_len);
185 }
186 
187 static inline void set_iv_reg(u32 chn, const u8 *iv, u32 iv_len)
188 {
189 	u32 base_iv;
190 
191 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
192 
193 	/* clear iv */
194 	clear_regs(base_iv, 4);
195 
196 	if (!iv || iv_len == 0)
197 		return;
198 
199 	write_regs(base_iv, iv, iv_len);
200 
201 	crypto_write(iv_len, CRYPTO_CH0_IV_LEN_0 + 4 * chn);
202 }
203 
204 static inline void get_iv_reg(u32 chn, u8 *iv, u32 iv_len)
205 {
206 	u32 base_iv;
207 
208 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
209 
210 	read_regs(base_iv, iv, iv_len);
211 }
212 
213 static inline void get_tag_from_reg(u32 chn, u8 *tag, u32 tag_len)
214 {
215 	u32 i;
216 	u32 chn_base = CRYPTO_CH0_TAG_0 + 0x10 * chn;
217 
218 	for (i = 0; i < tag_len / 4; i++, chn_base += 4)
219 		word2byte_be(crypto_read(chn_base), tag + 4 * i);
220 }
221 
222 static int hw_crypto_reset(void)
223 {
224 	u32 val = 0, mask = 0;
225 	int ret;
226 
227 	val = CRYPTO_SW_PKA_RESET | CRYPTO_SW_CC_RESET;
228 	mask = val << CRYPTO_WRITE_MASK_SHIFT;
229 
230 	/* reset pka and crypto modules*/
231 	crypto_write(val | mask, CRYPTO_RST_CTL);
232 
233 	/* wait reset compelete */
234 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL), RK_CRYPTO_TIMEOUT);
235 
236 	return ret;
237 }
238 
239 static void hw_hash_clean_ctx(struct rk_hash_ctx *ctx)
240 {
241 	/* clear hash status */
242 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
243 
244 	assert(ctx);
245 	assert(ctx->magic == RK_HASH_CTX_MAGIC);
246 
247 	crypto_hash_cache_free(ctx->hash_cache);
248 
249 	memset(ctx, 0x00, sizeof(*ctx));
250 }
251 
252 static int rk_hash_init(void *hw_ctx, u32 algo)
253 {
254 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)hw_ctx;
255 	u32 reg_ctrl = 0;
256 	int ret;
257 
258 	if (!tmp_ctx)
259 		return -EINVAL;
260 
261 	reg_ctrl = CRYPTO_SW_CC_RESET;
262 	crypto_write(reg_ctrl | (reg_ctrl << CRYPTO_WRITE_MASK_SHIFT),
263 		     CRYPTO_RST_CTL);
264 
265 	/* wait reset compelete */
266 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL),
267 			      RK_CRYPTO_TIMEOUT);
268 
269 	reg_ctrl = 0;
270 	tmp_ctx->algo = algo;
271 	switch (algo) {
272 	case CRYPTO_MD5:
273 	case CRYPTO_HMAC_MD5:
274 		reg_ctrl |= CRYPTO_MODE_MD5;
275 		tmp_ctx->digest_size = 16;
276 		break;
277 	case CRYPTO_SHA1:
278 	case CRYPTO_HMAC_SHA1:
279 		reg_ctrl |= CRYPTO_MODE_SHA1;
280 		tmp_ctx->digest_size = 20;
281 		break;
282 	case CRYPTO_SHA256:
283 	case CRYPTO_HMAC_SHA256:
284 		reg_ctrl |= CRYPTO_MODE_SHA256;
285 		tmp_ctx->digest_size = 32;
286 		break;
287 	case CRYPTO_SHA512:
288 	case CRYPTO_HMAC_SHA512:
289 		reg_ctrl |= CRYPTO_MODE_SHA512;
290 		tmp_ctx->digest_size = 64;
291 		break;
292 	case CRYPTO_SM3:
293 	case CRYPTO_HMAC_SM3:
294 		reg_ctrl |= CRYPTO_MODE_SM3;
295 		tmp_ctx->digest_size = 32;
296 		break;
297 	default:
298 		ret = -EINVAL;
299 		goto exit;
300 	}
301 
302 	clear_hash_out_reg();
303 
304 	/* enable hardware padding */
305 	reg_ctrl |= CRYPTO_HW_PAD_ENABLE;
306 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
307 
308 	/* FIFO input and output data byte swap */
309 	/* such as B0, B1, B2, B3 -> B3, B2, B1, B0 */
310 	reg_ctrl = CRYPTO_DOUT_BYTESWAP | CRYPTO_DOIN_BYTESWAP;
311 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_FIFO_CTL);
312 
313 	/* enable src_item_done interrupt */
314 	crypto_write(0, CRYPTO_DMA_INT_EN);
315 
316 	tmp_ctx->magic = RK_HASH_CTX_MAGIC;
317 
318 	return 0;
319 exit:
320 	/* clear hash setting if init failed */
321 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
322 
323 	return ret;
324 }
325 
326 static int rk_hash_direct_calc(void *hw_data, const u8 *data,
327 			       u32 data_len, u8 *started_flag, u8 is_last)
328 {
329 	struct rockchip_crypto_priv *priv = hw_data;
330 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
331 	struct crypto_lli_desc *lli = &hash_ctx->data_lli;
332 	int ret = -EINVAL;
333 	u32 tmp = 0, mask = 0;
334 
335 	assert(IS_ALIGNED((ulong)data, DATA_ADDR_ALIGN_SIZE));
336 	assert(is_last || IS_ALIGNED(data_len, DATA_LEN_ALIGN_SIZE));
337 
338 	debug("%s: data = %p, len = %u, s = %x, l = %x\n",
339 	      __func__, data, data_len, *started_flag, is_last);
340 
341 	memset(lli, 0x00, sizeof(*lli));
342 	lli->src_addr = (u32)virt_to_phys(data);
343 	lli->src_len = data_len;
344 	lli->dma_ctrl = LLI_DMA_CTRL_SRC_DONE;
345 
346 	if (is_last) {
347 		lli->user_define |= LLI_USER_STRING_LAST;
348 		lli->dma_ctrl |= LLI_DMA_CTRL_LAST;
349 	} else {
350 		lli->next_addr = (u32)virt_to_phys(lli);
351 		lli->dma_ctrl |= LLI_DMA_CTRL_PAUSE;
352 	}
353 
354 	if (!(*started_flag)) {
355 		lli->user_define |=
356 			(LLI_USER_STRING_START | LLI_USER_CPIHER_START);
357 		crypto_write((u32)virt_to_phys(lli), CRYPTO_DMA_LLI_ADDR);
358 		crypto_write((CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
359 			     CRYPTO_HASH_ENABLE, CRYPTO_HASH_CTL);
360 		tmp = CRYPTO_DMA_START;
361 		*started_flag = 1;
362 	} else {
363 		tmp = CRYPTO_DMA_RESTART;
364 	}
365 
366 	/* flush cache */
367 	crypto_flush_cacheline((ulong)lli, sizeof(*lli));
368 	crypto_flush_cacheline((ulong)data, data_len);
369 
370 	/* start calculate */
371 	crypto_write(tmp << CRYPTO_WRITE_MASK_SHIFT | tmp,
372 		     CRYPTO_DMA_CTL);
373 
374 	/* mask CRYPTO_SYNC_LOCKSTEP_INT_ST flag */
375 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
376 
377 	/* wait calc ok */
378 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
379 			      RK_CRYPTO_TIMEOUT);
380 
381 	/* clear interrupt status */
382 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
383 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
384 
385 	if (tmp != CRYPTO_SRC_ITEM_DONE_INT_ST &&
386 	    tmp != CRYPTO_ZERO_LEN_INT_ST) {
387 		debug("[%s] %d: CRYPTO_DMA_INT_ST = 0x%x\n",
388 		      __func__, __LINE__, tmp);
389 		goto exit;
390 	}
391 
392 	priv->length += data_len;
393 exit:
394 	return ret;
395 }
396 
397 int rk_hash_update(void *ctx, const u8 *data, u32 data_len)
398 {
399 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
400 	int ret = -EINVAL;
401 
402 	debug("\n");
403 	if (!tmp_ctx || !data)
404 		goto exit;
405 
406 	if (tmp_ctx->digest_size == 0 || tmp_ctx->magic != RK_HASH_CTX_MAGIC)
407 		goto exit;
408 
409 	ret = crypto_hash_update_with_cache(tmp_ctx->hash_cache,
410 					    data, data_len);
411 
412 exit:
413 	/* free lli list */
414 	if (ret)
415 		hw_hash_clean_ctx(tmp_ctx);
416 
417 	return ret;
418 }
419 
420 int rk_hash_final(void *ctx, u8 *digest, size_t len)
421 {
422 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
423 	int ret = -EINVAL;
424 
425 	if (!digest)
426 		goto exit;
427 
428 	if (!tmp_ctx ||
429 	    tmp_ctx->digest_size == 0 ||
430 	    len > tmp_ctx->digest_size ||
431 	    tmp_ctx->magic != RK_HASH_CTX_MAGIC) {
432 		goto exit;
433 	}
434 
435 	/* wait hash value ok */
436 	ret = RK_POLL_TIMEOUT(!crypto_read(CRYPTO_HASH_VALID),
437 			      RK_CRYPTO_TIMEOUT);
438 
439 	read_regs(CRYPTO_HASH_DOUT_0, digest, len);
440 
441 	/* clear hash status */
442 	crypto_write(CRYPTO_HASH_IS_VALID, CRYPTO_HASH_VALID);
443 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
444 
445 exit:
446 
447 	return ret;
448 }
449 
450 static u32 rockchip_crypto_capability(struct udevice *dev)
451 {
452 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
453 	u32 capability, mask = 0;
454 
455 	capability = priv->soc_data->capability;
456 
457 #if !(CONFIG_IS_ENABLED(ROCKCHIP_CIPHER))
458 	mask |= (CRYPTO_DES | CRYPTO_AES | CRYPTO_SM4);
459 #endif
460 
461 #if !(CONFIG_IS_ENABLED(ROCKCHIP_HMAC))
462 	mask |= (CRYPTO_HMAC_MD5 | CRYPTO_HMAC_SHA1 | CRYPTO_HMAC_SHA256 |
463 			 CRYPTO_HMAC_SHA512 | CRYPTO_HMAC_SM3);
464 #endif
465 
466 #if !(CONFIG_IS_ENABLED(ROCKCHIP_RSA))
467 	mask |= (CRYPTO_RSA512 | CRYPTO_RSA1024 | CRYPTO_RSA2048 |
468 			 CRYPTO_RSA3072 | CRYPTO_RSA4096);
469 #endif
470 
471 	return capability & (~mask);
472 }
473 
474 static int rockchip_crypto_sha_init(struct udevice *dev, sha_context *ctx)
475 {
476 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
477 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
478 
479 	if (!ctx)
480 		return -EINVAL;
481 
482 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
483 
484 	priv->length = 0;
485 
486 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
487 						       priv, ctx->length,
488 						       DATA_ADDR_ALIGN_SIZE,
489 						       DATA_LEN_ALIGN_SIZE);
490 	if (!hash_ctx->hash_cache)
491 		return -EFAULT;
492 
493 	return rk_hash_init(hash_ctx, ctx->algo);
494 }
495 
496 static int rockchip_crypto_sha_update(struct udevice *dev,
497 				      u32 *input, u32 len)
498 {
499 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
500 	int ret, i;
501 	u8 *p;
502 
503 	if (!len)
504 		return -EINVAL;
505 
506 	p = (u8 *)input;
507 
508 	for (i = 0; i < len / HASH_UPDATE_LIMIT; i++, p += HASH_UPDATE_LIMIT) {
509 		ret = rk_hash_update(priv->hw_ctx, p, HASH_UPDATE_LIMIT);
510 		if (ret)
511 			goto exit;
512 	}
513 
514 	if (len % HASH_UPDATE_LIMIT)
515 		ret = rk_hash_update(priv->hw_ctx, p, len % HASH_UPDATE_LIMIT);
516 
517 exit:
518 	return ret;
519 }
520 
521 static int rockchip_crypto_sha_final(struct udevice *dev,
522 				     sha_context *ctx, u8 *output)
523 {
524 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
525 	u32 nbits;
526 	int ret;
527 
528 	nbits = crypto_algo_nbits(ctx->algo);
529 
530 	if (priv->length != ctx->length) {
531 		printf("total length(0x%08x) != init length(0x%08x)!\n",
532 		       priv->length, ctx->length);
533 		ret = -EIO;
534 		goto exit;
535 	}
536 
537 	ret = rk_hash_final(priv->hw_ctx, (u8 *)output, BITS2BYTE(nbits));
538 
539 exit:
540 	hw_hash_clean_ctx(priv->hw_ctx);
541 	return ret;
542 }
543 
544 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
545 int rk_hmac_init(void *hw_ctx, u32 algo, u8 *key, u32 key_len)
546 {
547 	u32 reg_ctrl = 0;
548 	int ret;
549 
550 	if (!key || !key_len || key_len > 64)
551 		return -EINVAL;
552 
553 	clear_key_regs();
554 
555 	write_key_reg(0, key, key_len);
556 
557 	ret = rk_hash_init(hw_ctx, algo);
558 	if (ret)
559 		return ret;
560 
561 	reg_ctrl = crypto_read(CRYPTO_HASH_CTL) | CRYPTO_HMAC_ENABLE;
562 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
563 
564 	return ret;
565 }
566 
567 static int rockchip_crypto_hmac_init(struct udevice *dev,
568 				     sha_context *ctx, u8 *key, u32 key_len)
569 {
570 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
571 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
572 
573 	if (!ctx)
574 		return -EINVAL;
575 
576 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
577 
578 	priv->length = 0;
579 
580 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
581 						       priv, ctx->length,
582 						       DATA_ADDR_ALIGN_SIZE,
583 						       DATA_LEN_ALIGN_SIZE);
584 	if (!hash_ctx->hash_cache)
585 		return -EFAULT;
586 
587 	return rk_hmac_init(priv->hw_ctx, ctx->algo, key, key_len);
588 }
589 
590 static int rockchip_crypto_hmac_update(struct udevice *dev,
591 				       u32 *input, u32 len)
592 {
593 	return rockchip_crypto_sha_update(dev, input, len);
594 }
595 
596 static int rockchip_crypto_hmac_final(struct udevice *dev,
597 				      sha_context *ctx, u8 *output)
598 {
599 	return rockchip_crypto_sha_final(dev, ctx, output);
600 }
601 
602 #endif
603 
604 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
605 static u8 g_key_chn;
606 
607 static const u32 rk_mode2bc_mode[RK_MODE_MAX] = {
608 	[RK_MODE_ECB] = CRYPTO_BC_ECB,
609 	[RK_MODE_CBC] = CRYPTO_BC_CBC,
610 	[RK_MODE_CTS] = CRYPTO_BC_CTS,
611 	[RK_MODE_CTR] = CRYPTO_BC_CTR,
612 	[RK_MODE_CFB] = CRYPTO_BC_CFB,
613 	[RK_MODE_OFB] = CRYPTO_BC_OFB,
614 	[RK_MODE_XTS] = CRYPTO_BC_XTS,
615 	[RK_MODE_CCM] = CRYPTO_BC_CCM,
616 	[RK_MODE_GCM] = CRYPTO_BC_GCM,
617 	[RK_MODE_CMAC] = CRYPTO_BC_CMAC,
618 	[RK_MODE_CBC_MAC] = CRYPTO_BC_CBC_MAC,
619 };
620 
621 static inline void set_pc_len_reg(u32 chn, u64 pc_len)
622 {
623 	u32 chn_base = CRYPTO_CH0_PC_LEN_0 + chn * 0x08;
624 
625 	crypto_write(pc_len & 0xffffffff, chn_base);
626 	crypto_write(pc_len >> 32, chn_base + 4);
627 }
628 
629 static inline void set_aad_len_reg(u32 chn, u64 pc_len)
630 {
631 	u32 chn_base = CRYPTO_CH0_AAD_LEN_0 + chn * 0x08;
632 
633 	crypto_write(pc_len & 0xffffffff, chn_base);
634 	crypto_write(pc_len >> 32, chn_base + 4);
635 }
636 
637 static inline bool is_des_mode(u32 rk_mode)
638 {
639 	return (rk_mode == RK_MODE_ECB ||
640 		rk_mode == RK_MODE_CBC ||
641 		rk_mode == RK_MODE_CFB ||
642 		rk_mode == RK_MODE_OFB);
643 }
644 
645 static void dump_crypto_state(struct crypto_lli_desc *desc,
646 			      u32 tmp, u32 expt_int,
647 			      const u8 *in, const u8 *out,
648 			      u32 len, int ret)
649 {
650 	IMSG("%s\n", ret == -ETIME ? "timeout" : "dismatch");
651 
652 	IMSG("CRYPTO_DMA_INT_ST = %08x, expect_int = %08x\n",
653 	     tmp, expt_int);
654 	IMSG("data desc		= %p\n", desc);
655 	IMSG("\taddr_in		= [%08x <=> %08x]\n",
656 	     desc->src_addr, (u32)virt_to_phys(in));
657 	IMSG("\taddr_out	= [%08x <=> %08x]\n",
658 	     desc->dst_addr, (u32)virt_to_phys(out));
659 	IMSG("\tsrc_len		= [%08x <=> %08x]\n",
660 	     desc->src_len, (u32)len);
661 	IMSG("\tdst_len		= %08x\n", desc->dst_len);
662 	IMSG("\tdma_ctl		= %08x\n", desc->dma_ctrl);
663 	IMSG("\tuser_define	= %08x\n", desc->user_define);
664 
665 	IMSG("\n\nDMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
666 	     crypto_read(CRYPTO_DMA_LLI_ADDR));
667 	IMSG("DMA CRYPTO_DMA_ST status = %08x\n",
668 	     crypto_read(CRYPTO_DMA_ST));
669 	IMSG("DMA CRYPTO_DMA_STATE status = %08x\n",
670 	     crypto_read(CRYPTO_DMA_STATE));
671 	IMSG("DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
672 	     crypto_read(CRYPTO_DMA_LLI_RADDR));
673 	IMSG("DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
674 	     crypto_read(CRYPTO_DMA_SRC_RADDR));
675 	IMSG("DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
676 	     crypto_read(CRYPTO_DMA_DST_RADDR));
677 	IMSG("DMA CRYPTO_CIPHER_ST status = %08x\n",
678 	     crypto_read(CRYPTO_CIPHER_ST));
679 	IMSG("DMA CRYPTO_CIPHER_STATE status = %08x\n",
680 	     crypto_read(CRYPTO_CIPHER_STATE));
681 	IMSG("DMA CRYPTO_TAG_VALID status = %08x\n",
682 	     crypto_read(CRYPTO_TAG_VALID));
683 	IMSG("LOCKSTEP status = %08x\n\n",
684 	     crypto_read(0x618));
685 
686 	IMSG("dst %dbyte not transferred\n",
687 	     desc->dst_addr + desc->dst_len -
688 	     crypto_read(CRYPTO_DMA_DST_RADDR));
689 }
690 
691 static int ccm128_set_iv_reg(u32 chn, const u8 *nonce, u32 nlen)
692 {
693 	u8 iv_buf[AES_BLOCK_SIZE];
694 	u32 L;
695 
696 	memset(iv_buf, 0x00, sizeof(iv_buf));
697 
698 	L = 15 - nlen;
699 	iv_buf[0] = ((u8)(L - 1) & 7);
700 
701 	/* the L parameter */
702 	L = iv_buf[0] & 7;
703 
704 	/* nonce is too short */
705 	if (nlen < (14 - L))
706 		return -EINVAL;
707 
708 	/* clear aad flag */
709 	iv_buf[0] &= ~0x40;
710 	memcpy(&iv_buf[1], nonce, 14 - L);
711 
712 	set_iv_reg(chn, iv_buf, AES_BLOCK_SIZE);
713 
714 	return 0;
715 }
716 
717 static void ccm_aad_padding(u32 aad_len, u8 *padding, u32 *padding_size)
718 {
719 	u32 i;
720 
721 	i = aad_len < (0x10000 - 0x100) ? 2 : 6;
722 
723 	if (i == 2) {
724 		padding[0] = (u8)(aad_len >> 8);
725 		padding[1] = (u8)aad_len;
726 	} else {
727 		padding[0] = 0xFF;
728 		padding[1] = 0xFE;
729 		padding[2] = (u8)(aad_len >> 24);
730 		padding[3] = (u8)(aad_len >> 16);
731 		padding[4] = (u8)(aad_len >> 8);
732 	}
733 
734 	*padding_size = i;
735 }
736 
737 static int ccm_compose_aad_iv(u8 *aad_iv, u32 data_len, u32 tag_size)
738 {
739 	aad_iv[0] |= ((u8)(((tag_size - 2) / 2) & 7) << 3);
740 
741 	aad_iv[12] = (u8)(data_len >> 24);
742 	aad_iv[13] = (u8)(data_len >> 16);
743 	aad_iv[14] = (u8)(data_len >> 8);
744 	aad_iv[15] = (u8)data_len;
745 
746 	aad_iv[0] |= 0x40;	//set aad flag
747 
748 	return 0;
749 }
750 
751 static int hw_cipher_init(u32 chn, const u8 *key, const u8 *twk_key,
752 			  u32 key_len, const u8 *iv, u32 iv_len,
753 			  u32 algo, u32 mode, bool enc)
754 {
755 	u32 rk_mode = RK_GET_RK_MODE(mode);
756 	u32 key_chn_sel = chn;
757 	u32 reg_ctrl = 0;
758 
759 	IMSG("%s: key addr is %p, key_len is %d, iv addr is %p",
760 	     __func__, key, key_len, iv);
761 	if (rk_mode >= RK_MODE_MAX)
762 		return -EINVAL;
763 
764 	switch (algo) {
765 	case CRYPTO_DES:
766 		if (key_len > DES_BLOCK_SIZE)
767 			reg_ctrl |= CRYPTO_BC_TDES;
768 		else
769 			reg_ctrl |= CRYPTO_BC_DES;
770 		break;
771 	case CRYPTO_AES:
772 		reg_ctrl |= CRYPTO_BC_AES;
773 		break;
774 	case CRYPTO_SM4:
775 		reg_ctrl |= CRYPTO_BC_SM4;
776 		break;
777 	default:
778 		return -EINVAL;
779 	}
780 
781 	if (algo == CRYPTO_AES || algo == CRYPTO_SM4) {
782 		switch (key_len) {
783 		case AES_KEYSIZE_128:
784 			reg_ctrl |= CRYPTO_BC_128_bit_key;
785 			break;
786 		case AES_KEYSIZE_192:
787 			reg_ctrl |= CRYPTO_BC_192_bit_key;
788 			break;
789 		case AES_KEYSIZE_256:
790 			reg_ctrl |= CRYPTO_BC_256_bit_key;
791 			break;
792 		default:
793 			return -EINVAL;
794 		}
795 	}
796 
797 	reg_ctrl |= rk_mode2bc_mode[rk_mode];
798 	if (!enc)
799 		reg_ctrl |= CRYPTO_BC_DECRYPT;
800 
801 	/* write key data to reg */
802 	write_key_reg(key_chn_sel, key, key_len);
803 
804 	/* write twk key for xts mode */
805 	if (rk_mode == RK_MODE_XTS)
806 		write_key_reg(key_chn_sel + 4, twk_key, key_len);
807 
808 	/* set iv reg */
809 	if (rk_mode == RK_MODE_CCM)
810 		ccm128_set_iv_reg(chn, iv, iv_len);
811 	else
812 		set_iv_reg(chn, iv, iv_len);
813 
814 	/* din_swap set 1, dout_swap set 1, default 1. */
815 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
816 	crypto_write(0, CRYPTO_DMA_INT_EN);
817 
818 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
819 
820 	return 0;
821 }
822 
823 static int hw_cipher_crypt(const u8 *in, u8 *out, u64 len,
824 			   const u8 *aad, u32 aad_len,
825 			   u8 *tag, u32 tag_len, u32 mode)
826 {
827 	struct crypto_lli_desc *data_desc = NULL, *aad_desc = NULL;
828 	u8 *dma_in = NULL, *dma_out = NULL, *aad_tmp = NULL;
829 	u32 rk_mode = RK_GET_RK_MODE(mode);
830 	u32 reg_ctrl = 0, tmp_len = 0;
831 	u32 expt_int = 0, mask = 0;
832 	u32 key_chn = g_key_chn;
833 	u32 tmp, dst_len = 0;
834 	int ret = -1;
835 
836 	if (rk_mode == RK_MODE_CTS && len <= AES_BLOCK_SIZE) {
837 		printf("CTS mode length %u < 16Byte\n", (u32)len);
838 		return -EINVAL;
839 	}
840 
841 	tmp_len = (rk_mode == RK_MODE_CTR) ? ROUNDUP(len, AES_BLOCK_SIZE) : len;
842 
843 	data_desc = align_malloc(sizeof(*data_desc), LLI_ADDR_ALIGN_SIZE);
844 	if (!data_desc)
845 		goto exit;
846 
847 	if (IS_ALIGNED((ulong)in, DATA_ADDR_ALIGN_SIZE) && tmp_len == len)
848 		dma_in = (void *)in;
849 	else
850 		dma_in = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
851 	if (!dma_in)
852 		goto exit;
853 
854 	if (out) {
855 		if (IS_ALIGNED((ulong)out, DATA_ADDR_ALIGN_SIZE) &&
856 		    tmp_len == len)
857 			dma_out = out;
858 		else
859 			dma_out = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
860 		if (!dma_out)
861 			goto exit;
862 		dst_len = tmp_len;
863 	}
864 
865 	memset(data_desc, 0x00, sizeof(*data_desc));
866 	if (dma_in != in)
867 		memcpy(dma_in, in, len);
868 
869 	data_desc->src_addr    = (u32)virt_to_phys(dma_in);
870 	data_desc->src_len     = tmp_len;
871 	data_desc->dst_addr    = (u32)virt_to_phys(dma_out);
872 	data_desc->dst_len     = dst_len;
873 	data_desc->dma_ctrl    = LLI_DMA_CTRL_LAST;
874 
875 	if (IS_MAC_MODE(rk_mode)) {
876 		expt_int = CRYPTO_LIST_DONE_INT_ST;
877 		data_desc->dma_ctrl |= LLI_DMA_CTRL_LIST_DONE;
878 	} else {
879 		expt_int = CRYPTO_DST_ITEM_DONE_INT_ST;
880 		data_desc->dma_ctrl |= LLI_DMA_CTRL_DST_DONE;
881 	}
882 
883 	if (rk_mode == RK_MODE_CCM || rk_mode == RK_MODE_GCM) {
884 		u32 aad_tmp_len = 0;
885 
886 		data_desc->user_define = LLI_USER_STRING_START |
887 					 LLI_USER_STRING_LAST |
888 					 (key_chn << 4);
889 
890 		aad_desc = align_malloc(sizeof(*aad_desc), LLI_ADDR_ALIGN_SIZE);
891 		if (!aad_desc)
892 			goto exit;
893 
894 		memset(aad_desc, 0x00, sizeof(*aad_desc));
895 		aad_desc->next_addr = (u32)virt_to_phys(data_desc);
896 		aad_desc->user_define = LLI_USER_CPIHER_START |
897 					 LLI_USER_STRING_START |
898 					 LLI_USER_STRING_LAST |
899 					 LLI_USER_STRING_AAD |
900 					 (key_chn << 4);
901 
902 		if (rk_mode == RK_MODE_CCM) {
903 			u8 padding[AES_BLOCK_SIZE];
904 			u32 padding_size = 0;
905 
906 			memset(padding, 0x00, sizeof(padding));
907 			ccm_aad_padding(aad_len, padding, &padding_size);
908 
909 			aad_tmp_len = aad_len + AES_BLOCK_SIZE + padding_size;
910 			aad_tmp_len = ROUNDUP(aad_tmp_len, AES_BLOCK_SIZE);
911 			aad_tmp = align_malloc(aad_tmp_len,
912 					       DATA_ADDR_ALIGN_SIZE);
913 			if (!aad_tmp)
914 				goto exit;
915 
916 			/* read iv data from reg */
917 			get_iv_reg(key_chn, aad_tmp, AES_BLOCK_SIZE);
918 			ccm_compose_aad_iv(aad_tmp, tmp_len, tag_len);
919 			memcpy(aad_tmp + AES_BLOCK_SIZE, padding, padding_size);
920 			memset(aad_tmp + aad_tmp_len - AES_BLOCK_SIZE,
921 			       0x00, AES_BLOCK_SIZE);
922 			memcpy(aad_tmp + AES_BLOCK_SIZE + padding_size,
923 			       aad, aad_len);
924 		} else {
925 			aad_tmp_len = aad_len;
926 			if (IS_ALIGNED((ulong)aad, DATA_ADDR_ALIGN_SIZE)) {
927 				aad_tmp = (void *)aad;
928 			} else {
929 				aad_tmp = align_malloc(aad_tmp_len,
930 						       DATA_ADDR_ALIGN_SIZE);
931 				if (!aad_tmp)
932 					goto exit;
933 
934 				memcpy(aad_tmp, aad, aad_tmp_len);
935 			}
936 
937 			set_aad_len_reg(key_chn, aad_tmp_len);
938 			set_pc_len_reg(key_chn, tmp_len);
939 		}
940 
941 		aad_desc->src_addr = (u32)virt_to_phys(aad_tmp);
942 		aad_desc->src_len  = aad_tmp_len;
943 		crypto_write((u32)virt_to_phys(aad_desc), CRYPTO_DMA_LLI_ADDR);
944 		cache_op_inner(DCACHE_AREA_CLEAN, aad_tmp, aad_tmp_len);
945 		cache_op_inner(DCACHE_AREA_CLEAN, aad_desc, sizeof(*aad_desc));
946 	} else {
947 		data_desc->user_define = LLI_USER_CPIHER_START |
948 					 LLI_USER_STRING_START |
949 					 LLI_USER_STRING_LAST |
950 					 (key_chn << 4);
951 		crypto_write((u32)virt_to_phys(data_desc), CRYPTO_DMA_LLI_ADDR);
952 	}
953 
954 	cache_op_inner(DCACHE_AREA_CLEAN, data_desc, sizeof(*data_desc));
955 	cache_op_inner(DCACHE_AREA_CLEAN, dma_in, tmp_len);
956 	cache_op_inner(DCACHE_AREA_INVALIDATE, dma_out, tmp_len);
957 
958 	/* din_swap set 1, dout_swap set 1, default 1. */
959 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
960 	crypto_write(0, CRYPTO_DMA_INT_EN);
961 
962 	reg_ctrl = crypto_read(CRYPTO_BC_CTL) | CRYPTO_BC_ENABLE;
963 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
964 	crypto_write(0x00010001, CRYPTO_DMA_CTL);//start
965 
966 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
967 
968 	/* wait calc ok */
969 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
970 			      RK_CRYPTO_TIMEOUT);
971 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
972 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
973 
974 	if ((tmp & mask) == expt_int) {
975 		if (out && out != dma_out)
976 			memcpy(out, dma_out, len);
977 
978 		if (IS_NEED_TAG(rk_mode)) {
979 			ret = WAIT_TAG_VALID(key_chn, RK_CRYPTO_TIMEOUT);
980 			get_tag_from_reg(key_chn, tag, AES_BLOCK_SIZE);
981 		}
982 	} else {
983 		dump_crypto_state(data_desc, tmp, expt_int, in, out, len, ret);
984 		ret = -1;
985 	}
986 
987 exit:
988 	crypto_write(0xffff0000, CRYPTO_BC_CTL);//bc_ctl disable
989 	align_free(data_desc);
990 	align_free(aad_desc);
991 	if (dma_in != in)
992 		align_free(dma_in);
993 	if (out && dma_out != out)
994 		align_free(dma_out);
995 	if (aad && aad != aad_tmp)
996 		align_free(aad_tmp);
997 
998 	return ret;
999 }
1000 
1001 static int hw_aes_init(u32 chn, const u8 *key, const u8 *twk_key, u32 key_len,
1002 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1003 {
1004 	u32 rk_mode = RK_GET_RK_MODE(mode);
1005 
1006 	if (rk_mode > RK_MODE_XTS)
1007 		return -EINVAL;
1008 
1009 	if (iv_len > AES_BLOCK_SIZE)
1010 		return -EINVAL;
1011 
1012 	if (IS_NEED_IV(rk_mode)) {
1013 		if (!iv || iv_len != AES_BLOCK_SIZE)
1014 			return -EINVAL;
1015 	} else {
1016 		iv_len = 0;
1017 	}
1018 
1019 	if (rk_mode == RK_MODE_XTS) {
1020 		if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_256)
1021 			return -EINVAL;
1022 
1023 		if (!key || !twk_key)
1024 			return -EINVAL;
1025 	} else {
1026 		if (key_len != AES_KEYSIZE_128 &&
1027 		    key_len != AES_KEYSIZE_192 &&
1028 		    key_len != AES_KEYSIZE_256)
1029 			return -EINVAL;
1030 	}
1031 
1032 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1033 			      CRYPTO_AES, mode, enc);
1034 }
1035 
1036 static int hw_sm4_init(u32  chn, const u8 *key, const u8 *twk_key, u32 key_len,
1037 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1038 {
1039 	u32 rk_mode = RK_GET_RK_MODE(mode);
1040 
1041 	if (rk_mode > RK_MODE_XTS)
1042 		return -EINVAL;
1043 
1044 	if (iv_len > SM4_BLOCK_SIZE || key_len != SM4_KEYSIZE)
1045 		return -EINVAL;
1046 
1047 	if (IS_NEED_IV(rk_mode)) {
1048 		if (!iv || iv_len != SM4_BLOCK_SIZE)
1049 			return -EINVAL;
1050 	} else {
1051 		iv_len = 0;
1052 	}
1053 
1054 	if (rk_mode == RK_MODE_XTS) {
1055 		if (!key || !twk_key)
1056 			return -EINVAL;
1057 	}
1058 
1059 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1060 			      CRYPTO_SM4, mode, enc);
1061 }
1062 
1063 int rk_crypto_des(struct udevice *dev, u32 mode, const u8 *key, u32 key_len,
1064 		  const u8 *iv, const u8 *in, u8 *out, u32 len, bool enc)
1065 {
1066 	u32 rk_mode = RK_GET_RK_MODE(mode);
1067 	u8 tmp_key[24];
1068 	int ret;
1069 
1070 	if (!is_des_mode(rk_mode))
1071 		return -EINVAL;
1072 
1073 	if (key_len == DES_BLOCK_SIZE || key_len == 3 * DES_BLOCK_SIZE) {
1074 		memcpy(tmp_key, key, key_len);
1075 	} else if (key_len == 2 * DES_BLOCK_SIZE) {
1076 		memcpy(tmp_key, key, 16);
1077 		memcpy(tmp_key + 16, key, 8);
1078 		key_len = 3 * DES_BLOCK_SIZE;
1079 	} else {
1080 		return -EINVAL;
1081 	}
1082 
1083 	ret = hw_cipher_init(0, tmp_key, NULL, key_len, iv, DES_BLOCK_SIZE,
1084 			     CRYPTO_DES, mode, enc);
1085 	if (ret)
1086 		goto exit;
1087 
1088 	ret = hw_cipher_crypt(in, out, len, NULL, 0,
1089 			      NULL, 0, mode);
1090 
1091 exit:
1092 	return ret;
1093 }
1094 
1095 int rk_crypto_aes(struct udevice *dev, u32 mode,
1096 		  const u8 *key, const u8 *twk_key, u32 key_len,
1097 		  const u8 *iv, u32 iv_len,
1098 		  const u8 *in, u8 *out, u32 len, bool enc)
1099 {
1100 	int ret;
1101 
1102 	/* RV1126/RV1109 do not support aes-192 */
1103 #if defined(CONFIG_ROCKCHIP_RV1126)
1104 	if (key_len == AES_KEYSIZE_192)
1105 		return -EINVAL;
1106 #endif
1107 
1108 	ret = hw_aes_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1109 	if (ret)
1110 		return ret;
1111 
1112 	return hw_cipher_crypt(in, out, len, NULL, 0,
1113 			       NULL, 0, mode);
1114 }
1115 
1116 int rk_crypto_sm4(struct udevice *dev, u32 mode,
1117 		  const u8 *key, const u8 *twk_key, u32 key_len,
1118 		  const u8 *iv, u32 iv_len,
1119 		  const u8 *in, u8 *out, u32 len, bool enc)
1120 {
1121 	int ret;
1122 
1123 	ret = hw_sm4_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1124 	if (ret)
1125 		return ret;
1126 
1127 	return hw_cipher_crypt(in, out, len, NULL, 0, NULL, 0, mode);
1128 }
1129 
1130 int rockchip_crypto_cipher(struct udevice *dev, cipher_context *ctx,
1131 			   const u8 *in, u8 *out, u32 len, bool enc)
1132 {
1133 	switch (ctx->algo) {
1134 	case CRYPTO_DES:
1135 		return rk_crypto_des(dev, ctx->mode, ctx->key, ctx->key_len,
1136 				     ctx->iv, in, out, len, enc);
1137 	case CRYPTO_AES:
1138 		return rk_crypto_aes(dev, ctx->mode,
1139 				     ctx->key, ctx->twk_key, ctx->key_len,
1140 				     ctx->iv, ctx->iv_len, in, out, len, enc);
1141 	case CRYPTO_SM4:
1142 		return rk_crypto_sm4(dev, ctx->mode,
1143 				     ctx->key, ctx->twk_key, ctx->key_len,
1144 				     ctx->iv, ctx->iv_len, in, out, len, enc);
1145 	default:
1146 		return -EINVAL;
1147 	}
1148 }
1149 
1150 int rk_crypto_mac(struct udevice *dev, u32 algo, u32 mode,
1151 		  const u8 *key, u32 key_len,
1152 		  const u8 *in, u32 len, u8 *tag)
1153 {
1154 	u32 rk_mode = RK_GET_RK_MODE(mode);
1155 	int ret;
1156 
1157 	if (!IS_MAC_MODE(rk_mode))
1158 		return -EINVAL;
1159 
1160 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1161 		return -EINVAL;
1162 
1163 	/* RV1126/RV1109 do not support aes-192 */
1164 #if defined(CONFIG_ROCKCHIP_RV1126)
1165 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1166 		return -EINVAL;
1167 #endif
1168 
1169 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, NULL, 0,
1170 			     algo, mode, true);
1171 	if (ret)
1172 		return ret;
1173 
1174 	return hw_cipher_crypt(in, NULL, len, NULL, 0,
1175 			       tag, AES_BLOCK_SIZE, mode);
1176 }
1177 
1178 int rockchip_crypto_mac(struct udevice *dev, cipher_context *ctx,
1179 			const u8 *in, u32 len, u8 *tag)
1180 {
1181 	return rk_crypto_mac(dev, ctx->algo, ctx->mode,
1182 			     ctx->key, ctx->key_len, in, len, tag);
1183 }
1184 
1185 int rk_crypto_ae(struct udevice *dev, u32 algo, u32 mode,
1186 		 const u8 *key, u32 key_len, const u8 *nonce, u32 nonce_len,
1187 		 const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1188 		 u8 *out, u8 *tag)
1189 {
1190 	u32 rk_mode = RK_GET_RK_MODE(mode);
1191 	int ret;
1192 
1193 	if (!IS_AE_MODE(rk_mode))
1194 		return -EINVAL;
1195 
1196 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1197 		return -EINVAL;
1198 
1199 	/* RV1126/RV1109 do not support aes-192 */
1200 #if defined(CONFIG_ROCKCHIP_RV1126)
1201 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1202 		return -EINVAL;
1203 #endif
1204 
1205 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, nonce, nonce_len,
1206 			     algo, mode, true);
1207 	if (ret)
1208 		return ret;
1209 
1210 	return hw_cipher_crypt(in, out, len, aad, aad_len,
1211 			       tag, AES_BLOCK_SIZE, mode);
1212 }
1213 
1214 int rockchip_crypto_ae(struct udevice *dev, cipher_context *ctx,
1215 		       const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1216 		       u8 *out, u8 *tag)
1217 
1218 {
1219 	return rk_crypto_ae(dev, ctx->algo, ctx->mode, ctx->key, ctx->key_len,
1220 			    ctx->iv, ctx->iv_len, in, len,
1221 			    aad, aad_len, out, tag);
1222 }
1223 
1224 #endif
1225 
1226 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1227 static int rockchip_crypto_rsa_verify(struct udevice *dev, rsa_key *ctx,
1228 				      u8 *sign, u8 *output)
1229 {
1230 	struct mpa_num *mpa_m = NULL, *mpa_e = NULL, *mpa_n = NULL;
1231 	struct mpa_num *mpa_c = NULL, *mpa_result = NULL;
1232 	u32 n_bits, n_words;
1233 	u32 *rsa_result;
1234 	int ret;
1235 
1236 	if (!ctx)
1237 		return -EINVAL;
1238 
1239 	if (ctx->algo != CRYPTO_RSA512 &&
1240 	    ctx->algo != CRYPTO_RSA1024 &&
1241 	    ctx->algo != CRYPTO_RSA2048 &&
1242 	    ctx->algo != CRYPTO_RSA3072 &&
1243 	    ctx->algo != CRYPTO_RSA4096)
1244 		return -EINVAL;
1245 
1246 	n_bits = crypto_algo_nbits(ctx->algo);
1247 	n_words = BITS2WORD(n_bits);
1248 
1249 	rsa_result = malloc(BITS2BYTE(n_bits));
1250 	if (!rsa_result)
1251 		return -ENOMEM;
1252 
1253 	memset(rsa_result, 0x00, BITS2BYTE(n_bits));
1254 
1255 	ret = rk_mpa_alloc(&mpa_m);
1256 	ret |= rk_mpa_alloc(&mpa_e);
1257 	ret |= rk_mpa_alloc(&mpa_n);
1258 	ret |= rk_mpa_alloc(&mpa_c);
1259 	ret |= rk_mpa_alloc(&mpa_result);
1260 	if (ret)
1261 		goto exit;
1262 
1263 	mpa_m->d = (void *)sign;
1264 	mpa_e->d = (void *)ctx->e;
1265 	mpa_n->d = (void *)ctx->n;
1266 	mpa_c->d = (void *)ctx->c;
1267 	mpa_result->d = (void *)rsa_result;
1268 
1269 	mpa_m->size = n_words;
1270 	mpa_e->size = n_words;
1271 	mpa_n->size = n_words;
1272 	mpa_c->size = n_words;
1273 	mpa_result->size = n_words;
1274 
1275 	ret = rk_exptmod_np(mpa_m, mpa_e, mpa_n, mpa_c, mpa_result);
1276 	if (!ret)
1277 		memcpy(output, rsa_result, BITS2BYTE(n_bits));
1278 
1279 exit:
1280 	free(rsa_result);
1281 	rk_mpa_free(&mpa_m);
1282 	rk_mpa_free(&mpa_e);
1283 	rk_mpa_free(&mpa_n);
1284 	rk_mpa_free(&mpa_c);
1285 	rk_mpa_free(&mpa_result);
1286 
1287 	return ret;
1288 }
1289 #endif
1290 
1291 static const struct dm_crypto_ops rockchip_crypto_ops = {
1292 	.capability   = rockchip_crypto_capability,
1293 	.sha_init     = rockchip_crypto_sha_init,
1294 	.sha_update   = rockchip_crypto_sha_update,
1295 	.sha_final    = rockchip_crypto_sha_final,
1296 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1297 	.rsa_verify   = rockchip_crypto_rsa_verify,
1298 #endif
1299 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
1300 	.hmac_init    = rockchip_crypto_hmac_init,
1301 	.hmac_update  = rockchip_crypto_hmac_update,
1302 	.hmac_final   = rockchip_crypto_hmac_final,
1303 #endif
1304 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
1305 	.cipher_crypt = rockchip_crypto_cipher,
1306 	.cipher_mac = rockchip_crypto_mac,
1307 	.cipher_ae  = rockchip_crypto_ae,
1308 #endif
1309 };
1310 
1311 /*
1312  * Only use "clocks" to parse crypto clock id and use rockchip_get_clk().
1313  * Because we always add crypto node in U-Boot dts, when kernel dtb enabled :
1314  *
1315  *   1. There is cru phandle mismatch between U-Boot and kernel dtb;
1316  *   2. CONFIG_OF_SPL_REMOVE_PROPS removes clock property;
1317  */
1318 static int rockchip_crypto_ofdata_to_platdata(struct udevice *dev)
1319 {
1320 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1321 	int len, ret = -EINVAL;
1322 
1323 	memset(priv, 0x00, sizeof(*priv));
1324 
1325 	priv->reg = (fdt_addr_t)dev_read_addr_ptr(dev);
1326 	if (priv->reg == FDT_ADDR_T_NONE)
1327 		return -EINVAL;
1328 
1329 	crypto_base = priv->reg;
1330 
1331 	/* if there is no clocks in dts, just skip it */
1332 	if (!dev_read_prop(dev, "clocks", &len)) {
1333 		printf("Can't find \"clocks\" property\n");
1334 		return 0;
1335 	}
1336 
1337 	memset(priv, 0x00, sizeof(*priv));
1338 	priv->clocks = malloc(len);
1339 	if (!priv->clocks)
1340 		return -ENOMEM;
1341 
1342 	priv->nclocks = len / sizeof(u32);
1343 	if (dev_read_u32_array(dev, "clocks", (u32 *)priv->clocks,
1344 			       priv->nclocks)) {
1345 		printf("Can't read \"clocks\" property\n");
1346 		ret = -EINVAL;
1347 		goto exit;
1348 	}
1349 
1350 	if (!dev_read_prop(dev, "clock-frequency", &len)) {
1351 		printf("Can't find \"clock-frequency\" property\n");
1352 		ret = -EINVAL;
1353 		goto exit;
1354 	}
1355 
1356 	priv->frequencies = malloc(len);
1357 	if (!priv->frequencies) {
1358 		ret = -ENOMEM;
1359 		goto exit;
1360 	}
1361 
1362 	priv->nclocks = len / sizeof(u32);
1363 	if (dev_read_u32_array(dev, "clock-frequency", priv->frequencies,
1364 			       priv->nclocks)) {
1365 		printf("Can't read \"clock-frequency\" property\n");
1366 		ret = -EINVAL;
1367 		goto exit;
1368 	}
1369 
1370 	return 0;
1371 exit:
1372 	if (priv->clocks)
1373 		free(priv->clocks);
1374 
1375 	if (priv->frequencies)
1376 		free(priv->frequencies);
1377 
1378 	return ret;
1379 }
1380 
1381 static int rk_crypto_set_clk(struct rockchip_crypto_priv *priv)
1382 {
1383 	int i, ret;
1384 	u32* clocks;
1385 
1386 	if (!priv->clocks && priv->nclocks == 0)
1387 		return 0;
1388 
1389 #if CONFIG_IS_ENABLED(CLK_SCMI)
1390 	ret = rockchip_get_scmi_clk(&priv->clk.dev);
1391 #else
1392 	ret = rockchip_get_clk(&priv->clk.dev);
1393 #endif
1394 	if (priv->nclocks && ret) {
1395 		printf("Failed to get clk device, ret=%d\n", ret);
1396 		return ret;
1397 	}
1398 
1399 	clocks = (u32 *)priv->clocks;
1400 	for (i = 0; i < priv->nclocks; i++) {
1401 		priv->clk.id = clocks[i * 2 + 1];
1402 		ret = clk_set_rate(&priv->clk, priv->frequencies[i]);
1403 		if (ret < 0) {
1404 			printf("%s: Failed to set clk(%ld): ret=%d\n",
1405 			       __func__, priv->clk.id, ret);
1406 			return ret;
1407 		}
1408 	}
1409 
1410 	return 0;
1411 }
1412 
1413 static int rockchip_crypto_probe(struct udevice *dev)
1414 {
1415 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1416 	struct rk_crypto_soc_data *sdata;
1417 	int ret = 0;
1418 
1419 	sdata = (struct rk_crypto_soc_data *)dev_get_driver_data(dev);
1420 	priv->soc_data = sdata;
1421 
1422 	priv->hw_ctx = memalign(LLI_ADDR_ALIGN_SIZE,
1423 				sizeof(struct rk_hash_ctx));
1424 	if (!priv->hw_ctx)
1425 		return -ENOMEM;
1426 
1427 	ret = rk_crypto_set_clk(priv);
1428 	if (ret)
1429 		return ret;
1430 
1431 	hw_crypto_reset();
1432 
1433 	return 0;
1434 }
1435 
1436 static const struct rk_crypto_soc_data soc_data_base = {
1437 	.capability = CRYPTO_MD5 |
1438 		      CRYPTO_SHA1 |
1439 		      CRYPTO_SHA256 |
1440 		      CRYPTO_SHA512 |
1441 		      CRYPTO_HMAC_MD5 |
1442 		      CRYPTO_HMAC_SHA1 |
1443 		      CRYPTO_HMAC_SHA256 |
1444 		      CRYPTO_HMAC_SHA512 |
1445 		      CRYPTO_RSA512 |
1446 		      CRYPTO_RSA1024 |
1447 		      CRYPTO_RSA2048 |
1448 		      CRYPTO_RSA3072 |
1449 		      CRYPTO_RSA4096 |
1450 		      CRYPTO_DES |
1451 		      CRYPTO_AES,
1452 };
1453 
1454 static const struct rk_crypto_soc_data soc_data_base_sm = {
1455 	.capability = CRYPTO_MD5 |
1456 		      CRYPTO_SHA1 |
1457 		      CRYPTO_SHA256 |
1458 		      CRYPTO_SHA512 |
1459 		      CRYPTO_SM3 |
1460 		      CRYPTO_HMAC_MD5 |
1461 		      CRYPTO_HMAC_SHA1 |
1462 		      CRYPTO_HMAC_SHA256 |
1463 		      CRYPTO_HMAC_SHA512 |
1464 		      CRYPTO_HMAC_SM3 |
1465 		      CRYPTO_RSA512 |
1466 		      CRYPTO_RSA1024 |
1467 		      CRYPTO_RSA2048 |
1468 		      CRYPTO_RSA3072 |
1469 		      CRYPTO_RSA4096 |
1470 		      CRYPTO_DES |
1471 		      CRYPTO_AES |
1472 		      CRYPTO_SM4,
1473 };
1474 
1475 static const struct rk_crypto_soc_data soc_data_rk1808 = {
1476 	.capability = CRYPTO_MD5 |
1477 		      CRYPTO_SHA1 |
1478 		      CRYPTO_SHA256 |
1479 		      CRYPTO_HMAC_MD5 |
1480 		      CRYPTO_HMAC_SHA1 |
1481 		      CRYPTO_HMAC_SHA256 |
1482 		      CRYPTO_RSA512 |
1483 		      CRYPTO_RSA1024 |
1484 		      CRYPTO_RSA2048 |
1485 		      CRYPTO_RSA3072 |
1486 		      CRYPTO_RSA4096,
1487 };
1488 
1489 static const struct udevice_id rockchip_crypto_ids[] = {
1490 	{
1491 		.compatible = "rockchip,px30-crypto",
1492 		.data = (ulong)&soc_data_base
1493 	},
1494 	{
1495 		.compatible = "rockchip,rk1808-crypto",
1496 		.data = (ulong)&soc_data_rk1808
1497 	},
1498 	{
1499 		.compatible = "rockchip,rk3308-crypto",
1500 		.data = (ulong)&soc_data_base
1501 	},
1502 	{
1503 		.compatible = "rockchip,rv1126-crypto",
1504 		.data = (ulong)&soc_data_base_sm
1505 	},
1506 	{
1507 		.compatible = "rockchip,rk3568-crypto",
1508 		.data = (ulong)&soc_data_base_sm
1509 	},
1510 	{
1511 		.compatible = "rockchip,rk3588-crypto",
1512 		.data = (ulong)&soc_data_base_sm
1513 	},
1514 	{ }
1515 };
1516 
1517 U_BOOT_DRIVER(rockchip_crypto_v2) = {
1518 	.name		= "rockchip_crypto_v2",
1519 	.id		= UCLASS_CRYPTO,
1520 	.of_match	= rockchip_crypto_ids,
1521 	.ops		= &rockchip_crypto_ops,
1522 	.probe		= rockchip_crypto_probe,
1523 	.ofdata_to_platdata = rockchip_crypto_ofdata_to_platdata,
1524 	.priv_auto_alloc_size = sizeof(struct rockchip_crypto_priv),
1525 };
1526