xref: /rk3399_rockchip-uboot/drivers/crypto/rockchip/crypto_v2.c (revision f84a40b4d6035fe6786533e2a13162c3238a8436)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4  */
5 
6 #include <common.h>
7 #include <clk.h>
8 #include <crypto.h>
9 #include <dm.h>
10 #include <asm/io.h>
11 #include <clk-uclass.h>
12 #include <asm/arch/hardware.h>
13 #include <asm/arch/clock.h>
14 #include <rockchip/crypto_ecc.h>
15 #include <rockchip/crypto_hash_cache.h>
16 #include <rockchip/crypto_v2.h>
17 #include <rockchip/crypto_v2_pka.h>
18 
19 #define	RK_HASH_CTX_MAGIC		0x1A1A1A1A
20 
21 #define CRYPTO_MAJOR_VER(ver)		((ver) & 0x0f000000)
22 
23 #define CRYPTO_MAJOR_VER_3		0x03000000
24 #define CRYPTO_MAJOR_VER_4		0x04000000
25 #ifdef CONFIG_ROCKCHIP_RK3562
26 #define CRYPTO_S_BY_KEYLAD_BASE  	0xFF8A8000
27 #endif
28 
29 #ifdef DEBUG
30 #define IMSG(format, ...) printf("[%s, %05d]-trace: " format "\n", \
31 				 __func__, __LINE__, ##__VA_ARGS__)
32 #else
33 #define IMSG(format, ...)
34 #endif
35 
36 struct crypto_lli_desc {
37 	u32 src_addr;
38 	u32 src_len;
39 	u32 dst_addr;
40 	u32 dst_len;
41 	u32 user_define;
42 	u32 reserve;
43 	u32 dma_ctrl;
44 	u32 next_addr;
45 };
46 
47 struct rk_hash_ctx {
48 	struct crypto_lli_desc		data_lli;	/* lli desc */
49 	struct crypto_hash_cache	*hash_cache;
50 	u32				magic;		/* to check ctx */
51 	u32				algo;		/* hash algo */
52 	u8				digest_size;	/* hash out length */
53 	u8				reserved[3];
54 };
55 
56 struct rk_crypto_soc_data {
57 	u32 capability;
58 	u32 (*dynamic_cap)(void);
59 };
60 
61 struct rockchip_crypto_priv {
62 	fdt_addr_t			reg;
63 	u32				frequency;
64 	char				*clocks;
65 	u32				*frequencies;
66 	u32				nclocks;
67 	u32				freq_nclocks;
68 	u32				length;
69 	struct rk_hash_ctx		*hw_ctx;
70 	struct rk_crypto_soc_data	*soc_data;
71 
72 	u16				secure;
73 	u16				enabled;
74 };
75 
76 #define LLI_ADDR_ALIGN_SIZE	8
77 #define DATA_ADDR_ALIGN_SIZE	8
78 #define DATA_LEN_ALIGN_SIZE	64
79 
80 /* crypto timeout 500ms, must support more than 32M data per times*/
81 #define HASH_UPDATE_LIMIT	(32 * 1024 * 1024)
82 #define RK_CRYPTO_TIMEOUT	500000
83 
84 #define RK_POLL_TIMEOUT(condition, timeout) \
85 ({ \
86 	int time_out = timeout; \
87 	while (condition) { \
88 		if (--time_out <= 0) { \
89 			debug("[%s] %d: time out!\n", __func__,\
90 				__LINE__); \
91 			break; \
92 		} \
93 		udelay(1); \
94 	} \
95 	(time_out <= 0) ? -ETIMEDOUT : 0; \
96 })
97 
98 #define WAIT_TAG_VALID(channel, timeout) ({ \
99 	u32 tag_mask = CRYPTO_CH0_TAG_VALID << (channel);\
100 	int ret = 0;\
101 	if (is_check_tag_valid()) { \
102 		ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_TAG_VALID) & tag_mask),\
103 				      timeout);\
104 	} \
105 	crypto_write(crypto_read(CRYPTO_TAG_VALID) & tag_mask, CRYPTO_TAG_VALID);\
106 	ret;\
107 })
108 
109 #define virt_to_phys(addr)		(((unsigned long)addr) & 0xffffffff)
110 #define phys_to_virt(addr, area)	((unsigned long)addr)
111 
112 #define align_malloc(bytes, alignment)	memalign(alignment, bytes)
113 #define align_free(addr)		do {if (addr) free(addr);} while (0)
114 
115 #define ROUNDUP(size, alignment)	round_up(size, alignment)
116 #define cache_op_inner(type, addr, size) \
117 					crypto_flush_cacheline((ulong)addr, size)
118 
119 #define IS_NEED_IV(rk_mode) ((rk_mode) != RK_MODE_ECB && \
120 			     (rk_mode) != RK_MODE_CMAC && \
121 			     (rk_mode) != RK_MODE_CBC_MAC)
122 
123 #define IS_NEED_TAG(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
124 			      (rk_mode) == RK_MODE_CBC_MAC || \
125 			      (rk_mode) == RK_MODE_CCM || \
126 			      (rk_mode) == RK_MODE_GCM)
127 
128 #define IS_MAC_MODE(rk_mode) ((rk_mode) == RK_MODE_CMAC || \
129 			      (rk_mode) == RK_MODE_CBC_MAC)
130 
131 #define IS_AE_MODE(rk_mode) ((rk_mode) == RK_MODE_CCM || \
132 			     (rk_mode) == RK_MODE_GCM)
133 
134 fdt_addr_t crypto_base;
135 static uint32_t g_crypto_version;
136 
is_check_hash_valid(void)137 static inline bool is_check_hash_valid(void)
138 {
139 	/* crypto < v4 need to check hash valid */
140 	return CRYPTO_MAJOR_VER(g_crypto_version) < CRYPTO_MAJOR_VER_4;
141 }
142 
is_check_tag_valid(void)143 static inline bool is_check_tag_valid(void)
144 {
145 	/* crypto < v4 need to check hash valid */
146 	return CRYPTO_MAJOR_VER(g_crypto_version) < CRYPTO_MAJOR_VER_4;
147 }
148 
word2byte_be(u32 word,u8 * ch)149 static inline void word2byte_be(u32 word, u8 *ch)
150 {
151 	ch[0] = (word >> 24) & 0xff;
152 	ch[1] = (word >> 16) & 0xff;
153 	ch[2] = (word >> 8) & 0xff;
154 	ch[3] = (word >> 0) & 0xff;
155 }
156 
byte2word_be(const u8 * ch)157 static inline u32 byte2word_be(const u8 *ch)
158 {
159 	return (*ch << 24) + (*(ch + 1) << 16) + (*(ch + 2) << 8) + *(ch + 3);
160 }
161 
clear_regs(u32 base,u32 words)162 static inline void clear_regs(u32 base, u32 words)
163 {
164 	int i;
165 
166 	/*clear out register*/
167 	for (i = 0; i < words; i++)
168 		crypto_write(0, base + 4 * i);
169 }
170 
clear_key_regs(void)171 static inline void clear_key_regs(void)
172 {
173 	clear_regs(CRYPTO_CH0_KEY_0, CRYPTO_KEY_CHANNEL_NUM * 4);
174 }
175 
read_regs(u32 base,u8 * data,u32 data_len)176 static inline void read_regs(u32 base, u8 *data, u32 data_len)
177 {
178 	u8 tmp_buf[4];
179 	u32 i;
180 
181 	for (i = 0; i < data_len / 4; i++)
182 		word2byte_be(crypto_read(base + i * 4),
183 			     data + i * 4);
184 
185 	if (data_len % 4) {
186 		word2byte_be(crypto_read(base + i * 4), tmp_buf);
187 		memcpy(data + i * 4, tmp_buf, data_len % 4);
188 	}
189 }
190 
write_regs(u32 base,const u8 * data,u32 data_len)191 static inline void write_regs(u32 base, const u8 *data, u32 data_len)
192 {
193 	u8 tmp_buf[4];
194 	u32 i;
195 
196 	for (i = 0; i < data_len / 4; i++, base += 4)
197 		crypto_write(byte2word_be(data + i * 4), base);
198 
199 	if (data_len % 4) {
200 		memset(tmp_buf, 0x00, sizeof(tmp_buf));
201 		memcpy((u8 *)tmp_buf, data + i * 4, data_len % 4);
202 		crypto_write(byte2word_be(tmp_buf), base);
203 	}
204 }
205 
write_key_reg(u32 chn,const u8 * key,u32 key_len)206 static inline void write_key_reg(u32 chn, const u8 *key, u32 key_len)
207 {
208 	write_regs(CRYPTO_CH0_KEY_0 + chn * 0x10, key, key_len);
209 }
210 
set_iv_reg(u32 chn,const u8 * iv,u32 iv_len)211 static inline void set_iv_reg(u32 chn, const u8 *iv, u32 iv_len)
212 {
213 	u32 base_iv;
214 
215 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
216 
217 	/* clear iv */
218 	clear_regs(base_iv, 4);
219 
220 	if (!iv || iv_len == 0)
221 		return;
222 
223 	write_regs(base_iv, iv, iv_len);
224 
225 	crypto_write(iv_len, CRYPTO_CH0_IV_LEN_0 + 4 * chn);
226 }
227 
get_iv_reg(u32 chn,u8 * iv,u32 iv_len)228 static inline void get_iv_reg(u32 chn, u8 *iv, u32 iv_len)
229 {
230 	u32 base_iv;
231 
232 	base_iv = CRYPTO_CH0_IV_0 + chn * 0x10;
233 
234 	read_regs(base_iv, iv, iv_len);
235 }
236 
get_tag_from_reg(u32 chn,u8 * tag,u32 tag_len)237 static inline void get_tag_from_reg(u32 chn, u8 *tag, u32 tag_len)
238 {
239 	u32 i;
240 	u32 chn_base = CRYPTO_CH0_TAG_0 + 0x10 * chn;
241 
242 	for (i = 0; i < tag_len / 4; i++, chn_base += 4)
243 		word2byte_be(crypto_read(chn_base), tag + 4 * i);
244 }
245 
rk_crypto_do_enable_clk(struct udevice * dev,int enable)246 static int rk_crypto_do_enable_clk(struct udevice *dev, int enable)
247 {
248 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
249 	struct clk clk;
250 	int i, ret;
251 
252 	for (i = 0; i < priv->nclocks; i++) {
253 		ret = clk_get_by_index(dev, i, &clk);
254 		if (ret < 0) {
255 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
256 			return ret;
257 		}
258 
259 		if (enable)
260 			ret = clk_enable(&clk);
261 		else
262 			ret = clk_disable(&clk);
263 		if (ret < 0 && ret != -ENOSYS) {
264 			debug("Failed to enable(%d) clk(%ld): ret=%d\n",
265 			       enable, clk.id, ret);
266 			return ret;
267 		}
268 	}
269 
270 	return 0;
271 }
272 
rk_crypto_enable_clk(struct udevice * dev)273 static int rk_crypto_enable_clk(struct udevice *dev)
274 {
275 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
276 
277 	crypto_base = priv->reg;
278 
279 	return rk_crypto_do_enable_clk(dev, 1);
280 }
281 
rk_crypto_disable_clk(struct udevice * dev)282 static int rk_crypto_disable_clk(struct udevice *dev)
283 {
284 	crypto_base = 0;
285 
286 	return rk_crypto_do_enable_clk(dev, 0);
287 }
288 
crypto_v3_dynamic_cap(void)289 static u32 crypto_v3_dynamic_cap(void)
290 {
291 	u32 capability = 0;
292 	u32 ver_reg, i;
293 	struct cap_map {
294 		u32 ver_offset;
295 		u32 mask;
296 		u32 cap_bit;
297 	};
298 	const struct cap_map cap_tbl[] = {
299 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_MD5_FLAG,    CRYPTO_MD5},
300 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA1_FLAG,   CRYPTO_SHA1},
301 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA256_FLAG, CRYPTO_SHA256},
302 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SHA512_FLAG, CRYPTO_SHA512},
303 	{CRYPTO_HASH_VERSION, CRYPTO_HASH_SM3_FLAG,    CRYPTO_SM3},
304 
305 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_MD5_FLAG,    CRYPTO_HMAC_MD5},
306 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA1_FLAG,   CRYPTO_HMAC_SHA1},
307 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA256_FLAG, CRYPTO_HMAC_SHA256},
308 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SHA512_FLAG, CRYPTO_HMAC_SHA512},
309 	{CRYPTO_HMAC_VERSION, CRYPTO_HMAC_SM3_FLAG,    CRYPTO_HMAC_SM3},
310 
311 	{CRYPTO_AES_VERSION,  CRYPTO_AES256_FLAG,      CRYPTO_AES},
312 	{CRYPTO_DES_VERSION,  CRYPTO_TDES_FLAG,        CRYPTO_DES},
313 	{CRYPTO_SM4_VERSION,  CRYPTO_ECB_FLAG,         CRYPTO_SM4},
314 	};
315 
316 	/* rsa */
317 	capability = CRYPTO_RSA512 |
318 		     CRYPTO_RSA1024 |
319 		     CRYPTO_RSA2048 |
320 		     CRYPTO_RSA3072 |
321 		     CRYPTO_RSA4096;
322 
323 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
324 	capability |= (CRYPTO_SM2 |
325 		       CRYPTO_ECC_192R1 |
326 		       CRYPTO_ECC_224R1 |
327 		       CRYPTO_ECC_256R1);
328 #endif
329 
330 	for (i = 0; i < ARRAY_SIZE(cap_tbl); i++) {
331 		ver_reg = crypto_read(cap_tbl[i].ver_offset);
332 
333 		if ((ver_reg & cap_tbl[i].mask) == cap_tbl[i].mask)
334 			capability |= cap_tbl[i].cap_bit;
335 	}
336 
337 	return capability;
338 }
339 
hw_crypto_reset(void)340 static int hw_crypto_reset(void)
341 {
342 	u32 val = 0, mask = 0;
343 	int ret;
344 
345 	val = CRYPTO_SW_PKA_RESET | CRYPTO_SW_CC_RESET;
346 	mask = val << CRYPTO_WRITE_MASK_SHIFT;
347 
348 	/* reset pka and crypto modules*/
349 	crypto_write(val | mask, CRYPTO_RST_CTL);
350 
351 	/* wait reset compelete */
352 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL), RK_CRYPTO_TIMEOUT);
353 
354 	g_crypto_version = crypto_read(CRYPTO_CRYPTO_VERSION_NEW);
355 
356 	return ret;
357 }
358 
hw_hash_clean_ctx(struct rk_hash_ctx * ctx)359 static void hw_hash_clean_ctx(struct rk_hash_ctx *ctx)
360 {
361 	/* clear hash status */
362 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
363 
364 	assert(ctx);
365 	assert(ctx->magic == RK_HASH_CTX_MAGIC);
366 
367 	crypto_hash_cache_free(ctx->hash_cache);
368 
369 	memset(ctx, 0x00, sizeof(*ctx));
370 }
371 
rk_hash_init(void * hw_ctx,u32 algo)372 static int rk_hash_init(void *hw_ctx, u32 algo)
373 {
374 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)hw_ctx;
375 	u32 reg_ctrl = 0;
376 	int ret;
377 
378 	if (!tmp_ctx)
379 		return -EINVAL;
380 
381 	reg_ctrl = CRYPTO_SW_CC_RESET;
382 	crypto_write(reg_ctrl | (reg_ctrl << CRYPTO_WRITE_MASK_SHIFT),
383 		     CRYPTO_RST_CTL);
384 
385 	/* wait reset compelete */
386 	ret = RK_POLL_TIMEOUT(crypto_read(CRYPTO_RST_CTL),
387 			      RK_CRYPTO_TIMEOUT);
388 
389 	reg_ctrl = 0;
390 	tmp_ctx->algo = algo;
391 	switch (algo) {
392 	case CRYPTO_MD5:
393 	case CRYPTO_HMAC_MD5:
394 		reg_ctrl |= CRYPTO_MODE_MD5;
395 		tmp_ctx->digest_size = 16;
396 		break;
397 	case CRYPTO_SHA1:
398 	case CRYPTO_HMAC_SHA1:
399 		reg_ctrl |= CRYPTO_MODE_SHA1;
400 		tmp_ctx->digest_size = 20;
401 		break;
402 	case CRYPTO_SHA256:
403 	case CRYPTO_HMAC_SHA256:
404 		reg_ctrl |= CRYPTO_MODE_SHA256;
405 		tmp_ctx->digest_size = 32;
406 		break;
407 	case CRYPTO_SHA512:
408 	case CRYPTO_HMAC_SHA512:
409 		reg_ctrl |= CRYPTO_MODE_SHA512;
410 		tmp_ctx->digest_size = 64;
411 		break;
412 	case CRYPTO_SM3:
413 	case CRYPTO_HMAC_SM3:
414 		reg_ctrl |= CRYPTO_MODE_SM3;
415 		tmp_ctx->digest_size = 32;
416 		break;
417 	default:
418 		ret = -EINVAL;
419 		goto exit;
420 	}
421 
422 	/* enable hardware padding */
423 	reg_ctrl |= CRYPTO_HW_PAD_ENABLE;
424 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
425 
426 	/* FIFO input and output data byte swap */
427 	/* such as B0, B1, B2, B3 -> B3, B2, B1, B0 */
428 	reg_ctrl = CRYPTO_DOUT_BYTESWAP | CRYPTO_DOIN_BYTESWAP;
429 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_FIFO_CTL);
430 
431 	/* enable src_item_done interrupt */
432 	crypto_write(0, CRYPTO_DMA_INT_EN);
433 
434 	tmp_ctx->magic = RK_HASH_CTX_MAGIC;
435 
436 	return 0;
437 exit:
438 	/* clear hash setting if init failed */
439 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
440 
441 	return ret;
442 }
443 
rk_hash_direct_calc(void * hw_data,const u8 * data,u32 data_len,u8 * started_flag,u8 is_last)444 static int rk_hash_direct_calc(void *hw_data, const u8 *data,
445 			       u32 data_len, u8 *started_flag, u8 is_last)
446 {
447 	struct rockchip_crypto_priv *priv = hw_data;
448 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
449 	struct crypto_lli_desc *lli = &hash_ctx->data_lli;
450 	int ret = -EINVAL;
451 	u32 tmp = 0, mask = 0;
452 
453 	assert(IS_ALIGNED((ulong)data, DATA_ADDR_ALIGN_SIZE));
454 	assert(is_last || IS_ALIGNED(data_len, DATA_LEN_ALIGN_SIZE));
455 
456 	debug("%s: data = %p, len = %u, s = %x, l = %x\n",
457 	      __func__, data, data_len, *started_flag, is_last);
458 
459 	memset(lli, 0x00, sizeof(*lli));
460 	lli->src_addr = (u32)virt_to_phys(data);
461 	lli->src_len = data_len;
462 	lli->dma_ctrl = LLI_DMA_CTRL_SRC_DONE;
463 
464 	if (is_last) {
465 		lli->user_define |= LLI_USER_STRING_LAST;
466 		lli->dma_ctrl |= LLI_DMA_CTRL_LAST;
467 	} else {
468 		lli->next_addr = (u32)virt_to_phys(lli);
469 		lli->dma_ctrl |= LLI_DMA_CTRL_PAUSE;
470 	}
471 
472 	if (!(*started_flag)) {
473 		lli->user_define |=
474 			(LLI_USER_STRING_START | LLI_USER_CIPHER_START);
475 		crypto_write((u32)virt_to_phys(lli), CRYPTO_DMA_LLI_ADDR);
476 		crypto_write((CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
477 			     CRYPTO_HASH_ENABLE, CRYPTO_HASH_CTL);
478 		tmp = CRYPTO_DMA_START;
479 		*started_flag = 1;
480 	} else {
481 		tmp = CRYPTO_DMA_RESTART;
482 	}
483 
484 	/* flush cache */
485 	crypto_flush_cacheline((ulong)lli, sizeof(*lli));
486 	crypto_flush_cacheline((ulong)data, data_len);
487 
488 	/* start calculate */
489 	crypto_write(tmp << CRYPTO_WRITE_MASK_SHIFT | tmp,
490 		     CRYPTO_DMA_CTL);
491 
492 	/* mask CRYPTO_SYNC_LOCKSTEP_INT_ST flag */
493 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
494 
495 	/* wait calc ok */
496 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
497 			      RK_CRYPTO_TIMEOUT);
498 
499 	/* clear interrupt status */
500 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
501 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
502 
503 	if ((tmp & mask) != CRYPTO_SRC_ITEM_DONE_INT_ST &&
504 	    (tmp & mask) != CRYPTO_ZERO_LEN_INT_ST) {
505 		ret = -EFAULT;
506 		debug("[%s] %d: CRYPTO_DMA_INT_ST = 0x%x\n",
507 		      __func__, __LINE__, tmp);
508 		goto exit;
509 	}
510 
511 	priv->length += data_len;
512 exit:
513 	return ret;
514 }
515 
rk_hash_update(void * ctx,const u8 * data,u32 data_len)516 int rk_hash_update(void *ctx, const u8 *data, u32 data_len)
517 {
518 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
519 	int ret = -EINVAL;
520 
521 	debug("\n");
522 	if (!tmp_ctx || !data)
523 		goto exit;
524 
525 	if (tmp_ctx->digest_size == 0 || tmp_ctx->magic != RK_HASH_CTX_MAGIC)
526 		goto exit;
527 
528 	ret = crypto_hash_update_with_cache(tmp_ctx->hash_cache,
529 					    data, data_len);
530 
531 exit:
532 	/* free lli list */
533 	if (ret)
534 		hw_hash_clean_ctx(tmp_ctx);
535 
536 	return ret;
537 }
538 
rk_hash_final(void * ctx,u8 * digest,size_t len)539 int rk_hash_final(void *ctx, u8 *digest, size_t len)
540 {
541 	struct rk_hash_ctx *tmp_ctx = (struct rk_hash_ctx *)ctx;
542 	int ret = 0;
543 
544 	if (!digest)
545 		goto exit;
546 
547 	if (!tmp_ctx ||
548 	    tmp_ctx->digest_size == 0 ||
549 	    len > tmp_ctx->digest_size ||
550 	    tmp_ctx->magic != RK_HASH_CTX_MAGIC) {
551 		goto exit;
552 	}
553 
554 	if(is_check_hash_valid()) {
555 		/* wait hash value ok */
556 		ret = RK_POLL_TIMEOUT(!crypto_read(CRYPTO_HASH_VALID),
557 				      RK_CRYPTO_TIMEOUT);
558 	}
559 
560 	read_regs(CRYPTO_HASH_DOUT_0, digest, len);
561 
562 	/* clear hash status */
563 	crypto_write(CRYPTO_HASH_IS_VALID, CRYPTO_HASH_VALID);
564 	crypto_write(CRYPTO_WRITE_MASK_ALL | 0, CRYPTO_HASH_CTL);
565 
566 exit:
567 
568 	return ret;
569 }
570 
rockchip_crypto_capability(struct udevice * dev)571 static u32 rockchip_crypto_capability(struct udevice *dev)
572 {
573 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
574 	u32 capability, mask = 0;
575 
576 	if (!priv->enabled)
577 		return 0;
578 
579 	capability = priv->soc_data->capability;
580 
581 #if !(CONFIG_IS_ENABLED(ROCKCHIP_CIPHER))
582 	mask |= (CRYPTO_DES | CRYPTO_AES | CRYPTO_SM4);
583 #endif
584 
585 #if !(CONFIG_IS_ENABLED(ROCKCHIP_HMAC))
586 	mask |= (CRYPTO_HMAC_MD5 | CRYPTO_HMAC_SHA1 | CRYPTO_HMAC_SHA256 |
587 			 CRYPTO_HMAC_SHA512 | CRYPTO_HMAC_SM3);
588 #endif
589 
590 #if !(CONFIG_IS_ENABLED(ROCKCHIP_RSA))
591 	mask |= (CRYPTO_RSA512 | CRYPTO_RSA1024 | CRYPTO_RSA2048 |
592 			 CRYPTO_RSA3072 | CRYPTO_RSA4096);
593 #endif
594 
595 	return capability & (~mask);
596 }
597 
rockchip_crypto_sha_init(struct udevice * dev,sha_context * ctx)598 static int rockchip_crypto_sha_init(struct udevice *dev, sha_context *ctx)
599 {
600 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
601 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
602 	int ret = 0;
603 
604 	if (!ctx)
605 		return -EINVAL;
606 
607 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
608 
609 	priv->length = 0;
610 
611 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
612 						       priv, ctx->length,
613 						       DATA_ADDR_ALIGN_SIZE,
614 						       DATA_LEN_ALIGN_SIZE);
615 	if (!hash_ctx->hash_cache)
616 		return -EFAULT;
617 
618 	rk_crypto_enable_clk(dev);
619 	ret = rk_hash_init(hash_ctx, ctx->algo);
620 	if (ret)
621 		rk_crypto_disable_clk(dev);
622 
623 	return ret;
624 }
625 
rockchip_crypto_sha_update(struct udevice * dev,u32 * input,u32 len)626 static int rockchip_crypto_sha_update(struct udevice *dev,
627 				      u32 *input, u32 len)
628 {
629 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
630 	int ret, i;
631 	u8 *p;
632 
633 	if (!len) {
634 		ret = -EINVAL;
635 		goto exit;
636 	}
637 
638 	p = (u8 *)input;
639 
640 	for (i = 0; i < len / HASH_UPDATE_LIMIT; i++, p += HASH_UPDATE_LIMIT) {
641 		ret = rk_hash_update(priv->hw_ctx, p, HASH_UPDATE_LIMIT);
642 		if (ret)
643 			goto exit;
644 	}
645 
646 	if (len % HASH_UPDATE_LIMIT)
647 		ret = rk_hash_update(priv->hw_ctx, p, len % HASH_UPDATE_LIMIT);
648 
649 exit:
650 	if (ret)
651 		rk_crypto_disable_clk(dev);
652 
653 	return ret;
654 }
655 
rockchip_crypto_sha_final(struct udevice * dev,sha_context * ctx,u8 * output)656 static int rockchip_crypto_sha_final(struct udevice *dev,
657 				     sha_context *ctx, u8 *output)
658 {
659 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
660 	u32 nbits;
661 	int ret;
662 
663 	nbits = crypto_algo_nbits(ctx->algo);
664 
665 	if (priv->length != ctx->length) {
666 		printf("total length(0x%08x) != init length(0x%08x)!\n",
667 		       priv->length, ctx->length);
668 		ret = -EIO;
669 		goto exit;
670 	}
671 
672 	ret = rk_hash_final(priv->hw_ctx, (u8 *)output, BITS2BYTE(nbits));
673 
674 exit:
675 	hw_hash_clean_ctx(priv->hw_ctx);
676 	rk_crypto_disable_clk(dev);
677 
678 	return ret;
679 }
680 
681 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
rk_hmac_init(void * hw_ctx,u32 algo,u8 * key,u32 key_len)682 int rk_hmac_init(void *hw_ctx, u32 algo, u8 *key, u32 key_len)
683 {
684 	u32 reg_ctrl = 0;
685 	int ret;
686 
687 	if (!key || !key_len || key_len > 64)
688 		return -EINVAL;
689 
690 	clear_key_regs();
691 
692 	write_key_reg(0, key, key_len);
693 
694 	ret = rk_hash_init(hw_ctx, algo);
695 	if (ret)
696 		return ret;
697 
698 	reg_ctrl = crypto_read(CRYPTO_HASH_CTL) | CRYPTO_HMAC_ENABLE;
699 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_HASH_CTL);
700 
701 	return ret;
702 }
703 
rockchip_crypto_hmac_init(struct udevice * dev,sha_context * ctx,u8 * key,u32 key_len)704 static int rockchip_crypto_hmac_init(struct udevice *dev,
705 				     sha_context *ctx, u8 *key, u32 key_len)
706 {
707 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
708 	struct rk_hash_ctx *hash_ctx = priv->hw_ctx;
709 	int ret = 0;
710 
711 	if (!ctx)
712 		return -EINVAL;
713 
714 	memset(hash_ctx, 0x00, sizeof(*hash_ctx));
715 
716 	priv->length = 0;
717 
718 	hash_ctx->hash_cache = crypto_hash_cache_alloc(rk_hash_direct_calc,
719 						       priv, ctx->length,
720 						       DATA_ADDR_ALIGN_SIZE,
721 						       DATA_LEN_ALIGN_SIZE);
722 	if (!hash_ctx->hash_cache)
723 		return -EFAULT;
724 
725 	rk_crypto_enable_clk(dev);
726 	ret = rk_hmac_init(priv->hw_ctx, ctx->algo, key, key_len);
727 	if (ret)
728 		rk_crypto_disable_clk(dev);
729 
730 	return ret;
731 }
732 
rockchip_crypto_hmac_update(struct udevice * dev,u32 * input,u32 len)733 static int rockchip_crypto_hmac_update(struct udevice *dev,
734 				       u32 *input, u32 len)
735 {
736 	return rockchip_crypto_sha_update(dev, input, len);
737 }
738 
rockchip_crypto_hmac_final(struct udevice * dev,sha_context * ctx,u8 * output)739 static int rockchip_crypto_hmac_final(struct udevice *dev,
740 				      sha_context *ctx, u8 *output)
741 {
742 	return rockchip_crypto_sha_final(dev, ctx, output);
743 }
744 
745 #endif
746 
747 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
748 static u8 g_key_chn;
749 
750 static const u32 rk_mode2bc_mode[RK_MODE_MAX] = {
751 	[RK_MODE_ECB] = CRYPTO_BC_ECB,
752 	[RK_MODE_CBC] = CRYPTO_BC_CBC,
753 	[RK_MODE_CTS] = CRYPTO_BC_CTS,
754 	[RK_MODE_CTR] = CRYPTO_BC_CTR,
755 	[RK_MODE_CFB] = CRYPTO_BC_CFB,
756 	[RK_MODE_OFB] = CRYPTO_BC_OFB,
757 	[RK_MODE_XTS] = CRYPTO_BC_XTS,
758 	[RK_MODE_CCM] = CRYPTO_BC_CCM,
759 	[RK_MODE_GCM] = CRYPTO_BC_GCM,
760 	[RK_MODE_CMAC] = CRYPTO_BC_CMAC,
761 	[RK_MODE_CBC_MAC] = CRYPTO_BC_CBC_MAC,
762 };
763 
set_pc_len_reg(u32 chn,u64 pc_len)764 static inline void set_pc_len_reg(u32 chn, u64 pc_len)
765 {
766 	u32 chn_base = CRYPTO_CH0_PC_LEN_0 + chn * 0x08;
767 
768 	crypto_write(pc_len & 0xffffffff, chn_base);
769 	crypto_write(pc_len >> 32, chn_base + 4);
770 }
771 
set_aad_len_reg(u32 chn,u64 pc_len)772 static inline void set_aad_len_reg(u32 chn, u64 pc_len)
773 {
774 	u32 chn_base = CRYPTO_CH0_AAD_LEN_0 + chn * 0x08;
775 
776 	crypto_write(pc_len & 0xffffffff, chn_base);
777 	crypto_write(pc_len >> 32, chn_base + 4);
778 }
779 
is_des_mode(u32 rk_mode)780 static inline bool is_des_mode(u32 rk_mode)
781 {
782 	return (rk_mode == RK_MODE_ECB ||
783 		rk_mode == RK_MODE_CBC ||
784 		rk_mode == RK_MODE_CFB ||
785 		rk_mode == RK_MODE_OFB);
786 }
787 
dump_crypto_state(struct crypto_lli_desc * desc,u32 tmp,u32 expt_int,const u8 * in,const u8 * out,u32 len,int ret)788 static void dump_crypto_state(struct crypto_lli_desc *desc,
789 			      u32 tmp, u32 expt_int,
790 			      const u8 *in, const u8 *out,
791 			      u32 len, int ret)
792 {
793 	IMSG("%s\n", ret == -ETIME ? "timeout" : "dismatch");
794 
795 	IMSG("CRYPTO_DMA_INT_ST = %08x, expect_int = %08x\n",
796 	     tmp, expt_int);
797 	IMSG("data desc		= %p\n", desc);
798 	IMSG("\taddr_in		= [%08x <=> %08x]\n",
799 	     desc->src_addr, (u32)virt_to_phys(in));
800 	IMSG("\taddr_out	= [%08x <=> %08x]\n",
801 	     desc->dst_addr, (u32)virt_to_phys(out));
802 	IMSG("\tsrc_len		= [%08x <=> %08x]\n",
803 	     desc->src_len, (u32)len);
804 	IMSG("\tdst_len		= %08x\n", desc->dst_len);
805 	IMSG("\tdma_ctl		= %08x\n", desc->dma_ctrl);
806 	IMSG("\tuser_define	= %08x\n", desc->user_define);
807 
808 	IMSG("\n\nDMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
809 	     crypto_read(CRYPTO_DMA_LLI_ADDR));
810 	IMSG("DMA CRYPTO_DMA_ST status = %08x\n",
811 	     crypto_read(CRYPTO_DMA_ST));
812 	IMSG("DMA CRYPTO_DMA_STATE status = %08x\n",
813 	     crypto_read(CRYPTO_DMA_STATE));
814 	IMSG("DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
815 	     crypto_read(CRYPTO_DMA_LLI_RADDR));
816 	IMSG("DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
817 	     crypto_read(CRYPTO_DMA_SRC_RADDR));
818 	IMSG("DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
819 	     crypto_read(CRYPTO_DMA_DST_RADDR));
820 	IMSG("DMA CRYPTO_CIPHER_ST status = %08x\n",
821 	     crypto_read(CRYPTO_CIPHER_ST));
822 	IMSG("DMA CRYPTO_CIPHER_STATE status = %08x\n",
823 	     crypto_read(CRYPTO_CIPHER_STATE));
824 	IMSG("DMA CRYPTO_TAG_VALID status = %08x\n",
825 	     crypto_read(CRYPTO_TAG_VALID));
826 	IMSG("LOCKSTEP status = %08x\n\n",
827 	     crypto_read(0x618));
828 
829 	IMSG("dst %dbyte not transferred\n",
830 	     desc->dst_addr + desc->dst_len -
831 	     crypto_read(CRYPTO_DMA_DST_RADDR));
832 }
833 
ccm128_set_iv_reg(u32 chn,const u8 * nonce,u32 nlen)834 static int ccm128_set_iv_reg(u32 chn, const u8 *nonce, u32 nlen)
835 {
836 	u8 iv_buf[AES_BLOCK_SIZE];
837 	u32 L;
838 
839 	memset(iv_buf, 0x00, sizeof(iv_buf));
840 
841 	L = 15 - nlen;
842 	iv_buf[0] = ((u8)(L - 1) & 7);
843 
844 	/* the L parameter */
845 	L = iv_buf[0] & 7;
846 
847 	/* nonce is too short */
848 	if (nlen < (14 - L))
849 		return -EINVAL;
850 
851 	/* clear aad flag */
852 	iv_buf[0] &= ~0x40;
853 	memcpy(&iv_buf[1], nonce, 14 - L);
854 
855 	set_iv_reg(chn, iv_buf, AES_BLOCK_SIZE);
856 
857 	return 0;
858 }
859 
ccm_aad_padding(u32 aad_len,u8 * padding,u32 * padding_size)860 static void ccm_aad_padding(u32 aad_len, u8 *padding, u32 *padding_size)
861 {
862 	u32 i;
863 
864 	if (aad_len == 0) {
865 		*padding_size = 0;
866 		return;
867 	}
868 
869 	i = aad_len < (0x10000 - 0x100) ? 2 : 6;
870 
871 	if (i == 2) {
872 		padding[0] = (u8)(aad_len >> 8);
873 		padding[1] = (u8)aad_len;
874 	} else {
875 		padding[0] = 0xFF;
876 		padding[1] = 0xFE;
877 		padding[2] = (u8)(aad_len >> 24);
878 		padding[3] = (u8)(aad_len >> 16);
879 		padding[4] = (u8)(aad_len >> 8);
880 	}
881 
882 	*padding_size = i;
883 }
884 
ccm_compose_aad_iv(u8 * aad_iv,u32 data_len,u32 aad_len,u32 tag_size)885 static int ccm_compose_aad_iv(u8 *aad_iv, u32 data_len, u32 aad_len, u32 tag_size)
886 {
887 	aad_iv[0] |= ((u8)(((tag_size - 2) / 2) & 7) << 3);
888 
889 	aad_iv[12] = (u8)(data_len >> 24);
890 	aad_iv[13] = (u8)(data_len >> 16);
891 	aad_iv[14] = (u8)(data_len >> 8);
892 	aad_iv[15] = (u8)data_len;
893 
894 	if (aad_len)
895 		aad_iv[0] |= 0x40;	//set aad flag
896 
897 	return 0;
898 }
899 
hw_cipher_init(u32 chn,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,u32 algo,u32 mode,bool enc)900 static int hw_cipher_init(u32 chn, const u8 *key, const u8 *twk_key,
901 			  u32 key_len, const u8 *iv, u32 iv_len,
902 			  u32 algo, u32 mode, bool enc)
903 {
904 	u32 rk_mode = RK_GET_RK_MODE(mode);
905 	u32 key_chn_sel = chn;
906 	u32 reg_ctrl = 0;
907 	bool use_otpkey = false;
908 
909 	if (!key && key_len)
910 		use_otpkey = true;
911 
912 	IMSG("%s: key addr is %p, key_len is %d, iv addr is %p",
913 	     __func__, key, key_len, iv);
914 	if (rk_mode >= RK_MODE_MAX)
915 		return -EINVAL;
916 
917 	switch (algo) {
918 	case CRYPTO_DES:
919 		if (key_len > DES_BLOCK_SIZE)
920 			reg_ctrl |= CRYPTO_BC_TDES;
921 		else
922 			reg_ctrl |= CRYPTO_BC_DES;
923 		break;
924 	case CRYPTO_AES:
925 		reg_ctrl |= CRYPTO_BC_AES;
926 		break;
927 	case CRYPTO_SM4:
928 		reg_ctrl |= CRYPTO_BC_SM4;
929 		break;
930 	default:
931 		return -EINVAL;
932 	}
933 
934 	if (algo == CRYPTO_AES || algo == CRYPTO_SM4) {
935 		switch (key_len) {
936 		case AES_KEYSIZE_128:
937 			reg_ctrl |= CRYPTO_BC_128_bit_key;
938 			break;
939 		case AES_KEYSIZE_192:
940 			reg_ctrl |= CRYPTO_BC_192_bit_key;
941 			break;
942 		case AES_KEYSIZE_256:
943 			reg_ctrl |= CRYPTO_BC_256_bit_key;
944 			break;
945 		default:
946 			return -EINVAL;
947 		}
948 	}
949 
950 	reg_ctrl |= rk_mode2bc_mode[rk_mode];
951 	if (!enc)
952 		reg_ctrl |= CRYPTO_BC_DECRYPT;
953 
954 	/* write key data to reg */
955 	if (!use_otpkey) {
956 		write_key_reg(key_chn_sel, key, key_len);
957 		crypto_write(CRYPTO_SEL_USER, CRYPTO_KEY_SEL);
958 	} else {
959 		crypto_write(CRYPTO_SEL_KEYTABLE, CRYPTO_KEY_SEL);
960 	}
961 
962 	/* write twk key for xts mode */
963 	if (rk_mode == RK_MODE_XTS)
964 		write_key_reg(key_chn_sel + 4, twk_key, key_len);
965 
966 	/* set iv reg */
967 	if (rk_mode == RK_MODE_CCM)
968 		ccm128_set_iv_reg(chn, iv, iv_len);
969 	else
970 		set_iv_reg(chn, iv, iv_len);
971 
972 	/* din_swap set 1, dout_swap set 1, default 1. */
973 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
974 	crypto_write(0, CRYPTO_DMA_INT_EN);
975 
976 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
977 
978 	return 0;
979 }
980 
hw_cipher_crypt(const u8 * in,u8 * out,u64 len,const u8 * aad,u32 aad_len,u8 * tag,u32 tag_len,u32 mode)981 static int hw_cipher_crypt(const u8 *in, u8 *out, u64 len,
982 			   const u8 *aad, u32 aad_len,
983 			   u8 *tag, u32 tag_len, u32 mode)
984 {
985 	struct crypto_lli_desc *data_desc = NULL, *aad_desc = NULL;
986 	u8 *dma_in = NULL, *dma_out = NULL, *aad_tmp = NULL;
987 	u32 rk_mode = RK_GET_RK_MODE(mode);
988 	u32 reg_ctrl = 0, tmp_len = 0;
989 	u32 expt_int = 0, mask = 0;
990 	u32 key_chn = g_key_chn;
991 	u32 tmp, dst_len = 0;
992 	int ret = -1;
993 
994 	if (rk_mode == RK_MODE_CTS && len <= AES_BLOCK_SIZE) {
995 		printf("CTS mode length %u < 16Byte\n", (u32)len);
996 		return -EINVAL;
997 	}
998 
999 	tmp_len = (rk_mode == RK_MODE_CTR) ? ROUNDUP(len, AES_BLOCK_SIZE) : len;
1000 
1001 	data_desc = align_malloc(sizeof(*data_desc), LLI_ADDR_ALIGN_SIZE);
1002 	if (!data_desc)
1003 		goto exit;
1004 
1005 	if (IS_ALIGNED((ulong)in, DATA_ADDR_ALIGN_SIZE) && tmp_len == len)
1006 		dma_in = (void *)in;
1007 	else
1008 		dma_in = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
1009 	if (!dma_in)
1010 		goto exit;
1011 
1012 	if (out) {
1013 		if (IS_ALIGNED((ulong)out, DATA_ADDR_ALIGN_SIZE) &&
1014 		    tmp_len == len)
1015 			dma_out = out;
1016 		else
1017 			dma_out = align_malloc(tmp_len, DATA_ADDR_ALIGN_SIZE);
1018 		if (!dma_out)
1019 			goto exit;
1020 		dst_len = tmp_len;
1021 	}
1022 
1023 	memset(data_desc, 0x00, sizeof(*data_desc));
1024 	if (dma_in != in)
1025 		memcpy(dma_in, in, len);
1026 
1027 	data_desc->src_addr    = (u32)virt_to_phys(dma_in);
1028 	data_desc->src_len     = tmp_len;
1029 	data_desc->dst_addr    = (u32)virt_to_phys(dma_out);
1030 	data_desc->dst_len     = dst_len;
1031 	data_desc->dma_ctrl    = LLI_DMA_CTRL_LAST;
1032 
1033 	if (IS_MAC_MODE(rk_mode)) {
1034 		expt_int = CRYPTO_LIST_DONE_INT_ST;
1035 		data_desc->dma_ctrl |= LLI_DMA_CTRL_LIST_DONE;
1036 	} else {
1037 		expt_int = CRYPTO_DST_ITEM_DONE_INT_ST;
1038 		data_desc->dma_ctrl |= LLI_DMA_CTRL_DST_DONE;
1039 	}
1040 
1041 	data_desc->user_define = LLI_USER_CIPHER_START |
1042 				 LLI_USER_STRING_START |
1043 				 LLI_USER_STRING_LAST |
1044 				 (key_chn << 4);
1045 	crypto_write((u32)virt_to_phys(data_desc), CRYPTO_DMA_LLI_ADDR);
1046 
1047 	if (rk_mode == RK_MODE_CCM || rk_mode == RK_MODE_GCM) {
1048 		u32 aad_tmp_len = 0;
1049 
1050 		aad_desc = align_malloc(sizeof(*aad_desc), LLI_ADDR_ALIGN_SIZE);
1051 		if (!aad_desc)
1052 			goto exit;
1053 
1054 		memset(aad_desc, 0x00, sizeof(*aad_desc));
1055 		aad_desc->next_addr = (u32)virt_to_phys(data_desc);
1056 		aad_desc->user_define = LLI_USER_CIPHER_START |
1057 					 LLI_USER_STRING_START |
1058 					 LLI_USER_STRING_LAST |
1059 					 LLI_USER_STRING_AAD |
1060 					 (key_chn << 4);
1061 
1062 		if (rk_mode == RK_MODE_CCM) {
1063 			u8 padding[AES_BLOCK_SIZE];
1064 			u32 padding_size = 0;
1065 
1066 			memset(padding, 0x00, sizeof(padding));
1067 			ccm_aad_padding(aad_len, padding, &padding_size);
1068 
1069 			aad_tmp_len = aad_len + AES_BLOCK_SIZE + padding_size;
1070 			aad_tmp_len = ROUNDUP(aad_tmp_len, AES_BLOCK_SIZE);
1071 			aad_tmp = align_malloc(aad_tmp_len,
1072 					       DATA_ADDR_ALIGN_SIZE);
1073 			if (!aad_tmp)
1074 				goto exit;
1075 
1076 			/* clear last block */
1077 			memset(aad_tmp + aad_tmp_len - AES_BLOCK_SIZE,
1078 			       0x00, AES_BLOCK_SIZE);
1079 
1080 			/* read iv data from reg */
1081 			get_iv_reg(key_chn, aad_tmp, AES_BLOCK_SIZE);
1082 			ccm_compose_aad_iv(aad_tmp, tmp_len, aad_len, tag_len);
1083 			memcpy(aad_tmp + AES_BLOCK_SIZE, padding, padding_size);
1084 
1085 			memcpy(aad_tmp + AES_BLOCK_SIZE + padding_size,
1086 			       aad, aad_len);
1087 		} else {
1088 			aad_tmp_len = aad_len;
1089 			if (IS_ALIGNED((ulong)aad, DATA_ADDR_ALIGN_SIZE)) {
1090 				aad_tmp = (void *)aad;
1091 			} else {
1092 				aad_tmp = align_malloc(aad_tmp_len,
1093 						       DATA_ADDR_ALIGN_SIZE);
1094 				if (!aad_tmp)
1095 					goto exit;
1096 
1097 				memcpy(aad_tmp, aad, aad_tmp_len);
1098 			}
1099 
1100 			set_aad_len_reg(key_chn, aad_tmp_len);
1101 			set_pc_len_reg(key_chn, tmp_len);
1102 		}
1103 
1104 		aad_desc->src_addr = (u32)virt_to_phys(aad_tmp);
1105 		aad_desc->src_len  = aad_tmp_len;
1106 
1107 		if (aad_tmp_len) {
1108 			data_desc->user_define = LLI_USER_STRING_START |
1109 						 LLI_USER_STRING_LAST |
1110 						 (key_chn << 4);
1111 			crypto_write((u32)virt_to_phys(aad_desc), CRYPTO_DMA_LLI_ADDR);
1112 			cache_op_inner(DCACHE_AREA_CLEAN, aad_tmp, aad_tmp_len);
1113 			cache_op_inner(DCACHE_AREA_CLEAN, aad_desc, sizeof(*aad_desc));
1114 		}
1115 	}
1116 
1117 	cache_op_inner(DCACHE_AREA_CLEAN, data_desc, sizeof(*data_desc));
1118 	cache_op_inner(DCACHE_AREA_CLEAN, dma_in, tmp_len);
1119 	cache_op_inner(DCACHE_AREA_INVALIDATE, dma_out, tmp_len);
1120 
1121 	/* din_swap set 1, dout_swap set 1, default 1. */
1122 	crypto_write(0x00030003, CRYPTO_FIFO_CTL);
1123 	crypto_write(0, CRYPTO_DMA_INT_EN);
1124 
1125 	reg_ctrl = crypto_read(CRYPTO_BC_CTL) | CRYPTO_BC_ENABLE;
1126 	crypto_write(reg_ctrl | CRYPTO_WRITE_MASK_ALL, CRYPTO_BC_CTL);
1127 	crypto_write(0x00010001, CRYPTO_DMA_CTL);//start
1128 
1129 	mask = ~(mask | CRYPTO_SYNC_LOCKSTEP_INT_ST);
1130 
1131 	/* wait calc ok */
1132 	ret = RK_POLL_TIMEOUT(!(crypto_read(CRYPTO_DMA_INT_ST) & mask),
1133 			      RK_CRYPTO_TIMEOUT);
1134 	tmp = crypto_read(CRYPTO_DMA_INT_ST);
1135 	crypto_write(tmp, CRYPTO_DMA_INT_ST);
1136 
1137 	if ((tmp & mask) == expt_int) {
1138 		if (out && out != dma_out)
1139 			memcpy(out, dma_out, len);
1140 
1141 		if (IS_NEED_TAG(rk_mode)) {
1142 			ret = WAIT_TAG_VALID(key_chn, RK_CRYPTO_TIMEOUT);
1143 			get_tag_from_reg(key_chn, tag, AES_BLOCK_SIZE);
1144 		}
1145 	} else {
1146 		dump_crypto_state(data_desc, tmp, expt_int, in, out, len, ret);
1147 		ret = -1;
1148 	}
1149 
1150 exit:
1151 	crypto_write(0xffff0000, CRYPTO_BC_CTL);//bc_ctl disable
1152 	align_free(data_desc);
1153 	align_free(aad_desc);
1154 	if (dma_in != in)
1155 		align_free(dma_in);
1156 	if (out && dma_out != out)
1157 		align_free(dma_out);
1158 	if (aad && aad != aad_tmp)
1159 		align_free(aad_tmp);
1160 
1161 	return ret;
1162 }
1163 
hw_aes_init(u32 chn,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,u32 mode,bool enc)1164 static int hw_aes_init(u32 chn, const u8 *key, const u8 *twk_key, u32 key_len,
1165 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1166 {
1167 	u32 rk_mode = RK_GET_RK_MODE(mode);
1168 
1169 	if (rk_mode > RK_MODE_XTS)
1170 		return -EINVAL;
1171 
1172 	if (iv_len > AES_BLOCK_SIZE)
1173 		return -EINVAL;
1174 
1175 	if (IS_NEED_IV(rk_mode)) {
1176 		if (!iv || iv_len != AES_BLOCK_SIZE)
1177 			return -EINVAL;
1178 	} else {
1179 		iv_len = 0;
1180 	}
1181 
1182 	if (rk_mode == RK_MODE_XTS) {
1183 		if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_256)
1184 			return -EINVAL;
1185 
1186 		if (!key || !twk_key)
1187 			return -EINVAL;
1188 	} else {
1189 		if (key_len != AES_KEYSIZE_128 &&
1190 		    key_len != AES_KEYSIZE_192 &&
1191 		    key_len != AES_KEYSIZE_256)
1192 			return -EINVAL;
1193 	}
1194 
1195 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1196 			      CRYPTO_AES, mode, enc);
1197 }
1198 
hw_sm4_init(u32 chn,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,u32 mode,bool enc)1199 static int hw_sm4_init(u32  chn, const u8 *key, const u8 *twk_key, u32 key_len,
1200 		       const u8 *iv, u32 iv_len, u32 mode, bool enc)
1201 {
1202 	u32 rk_mode = RK_GET_RK_MODE(mode);
1203 
1204 	if (rk_mode > RK_MODE_XTS)
1205 		return -EINVAL;
1206 
1207 	if (iv_len > SM4_BLOCK_SIZE || key_len != SM4_KEYSIZE)
1208 		return -EINVAL;
1209 
1210 	if (IS_NEED_IV(rk_mode)) {
1211 		if (!iv || iv_len != SM4_BLOCK_SIZE)
1212 			return -EINVAL;
1213 	} else {
1214 		iv_len = 0;
1215 	}
1216 
1217 	if (rk_mode == RK_MODE_XTS) {
1218 		if (!key || !twk_key)
1219 			return -EINVAL;
1220 	}
1221 
1222 	return hw_cipher_init(chn, key, twk_key, key_len, iv, iv_len,
1223 			      CRYPTO_SM4, mode, enc);
1224 }
1225 
rk_crypto_des(struct udevice * dev,u32 mode,const u8 * key,u32 key_len,const u8 * iv,const u8 * in,u8 * out,u32 len,bool enc)1226 int rk_crypto_des(struct udevice *dev, u32 mode, const u8 *key, u32 key_len,
1227 		  const u8 *iv, const u8 *in, u8 *out, u32 len, bool enc)
1228 {
1229 	u32 rk_mode = RK_GET_RK_MODE(mode);
1230 	u8 tmp_key[24];
1231 	int ret;
1232 
1233 	if (!is_des_mode(rk_mode))
1234 		return -EINVAL;
1235 
1236 	if (key_len == DES_BLOCK_SIZE || key_len == 3 * DES_BLOCK_SIZE) {
1237 		memcpy(tmp_key, key, key_len);
1238 	} else if (key_len == 2 * DES_BLOCK_SIZE) {
1239 		memcpy(tmp_key, key, 16);
1240 		memcpy(tmp_key + 16, key, 8);
1241 		key_len = 3 * DES_BLOCK_SIZE;
1242 	} else {
1243 		return -EINVAL;
1244 	}
1245 
1246 	ret = hw_cipher_init(0, tmp_key, NULL, key_len, iv, DES_BLOCK_SIZE,
1247 			     CRYPTO_DES, mode, enc);
1248 	if (ret)
1249 		goto exit;
1250 
1251 	ret = hw_cipher_crypt(in, out, len, NULL, 0,
1252 			      NULL, 0, mode);
1253 
1254 exit:
1255 	return ret;
1256 }
1257 
rk_crypto_aes(struct udevice * dev,u32 mode,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,const u8 * in,u8 * out,u32 len,bool enc)1258 int rk_crypto_aes(struct udevice *dev, u32 mode,
1259 		  const u8 *key, const u8 *twk_key, u32 key_len,
1260 		  const u8 *iv, u32 iv_len,
1261 		  const u8 *in, u8 *out, u32 len, bool enc)
1262 {
1263 	int ret;
1264 
1265 	/* RV1126/RV1109 do not support aes-192 */
1266 #if defined(CONFIG_ROCKCHIP_RV1126)
1267 	if (key_len == AES_KEYSIZE_192)
1268 		return -EINVAL;
1269 #endif
1270 
1271 	ret = hw_aes_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1272 	if (ret)
1273 		return ret;
1274 
1275 	return hw_cipher_crypt(in, out, len, NULL, 0,
1276 			       NULL, 0, mode);
1277 }
1278 
rk_crypto_sm4(struct udevice * dev,u32 mode,const u8 * key,const u8 * twk_key,u32 key_len,const u8 * iv,u32 iv_len,const u8 * in,u8 * out,u32 len,bool enc)1279 int rk_crypto_sm4(struct udevice *dev, u32 mode,
1280 		  const u8 *key, const u8 *twk_key, u32 key_len,
1281 		  const u8 *iv, u32 iv_len,
1282 		  const u8 *in, u8 *out, u32 len, bool enc)
1283 {
1284 	int ret;
1285 
1286 	ret = hw_sm4_init(0, key, twk_key, key_len, iv, iv_len, mode, enc);
1287 	if (ret)
1288 		return ret;
1289 
1290 	return hw_cipher_crypt(in, out, len, NULL, 0, NULL, 0, mode);
1291 }
1292 
rockchip_crypto_cipher(struct udevice * dev,cipher_context * ctx,const u8 * in,u8 * out,u32 len,bool enc)1293 int rockchip_crypto_cipher(struct udevice *dev, cipher_context *ctx,
1294 			   const u8 *in, u8 *out, u32 len, bool enc)
1295 {
1296 	int ret;
1297 
1298 	rk_crypto_enable_clk(dev);
1299 
1300 	switch (ctx->algo) {
1301 	case CRYPTO_DES:
1302 		ret = rk_crypto_des(dev, ctx->mode, ctx->key, ctx->key_len,
1303 				    ctx->iv, in, out, len, enc);
1304 		break;
1305 	case CRYPTO_AES:
1306 		ret = rk_crypto_aes(dev, ctx->mode,
1307 				    ctx->key, ctx->twk_key, ctx->key_len,
1308 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1309 		break;
1310 	case CRYPTO_SM4:
1311 		ret = rk_crypto_sm4(dev, ctx->mode,
1312 				    ctx->key, ctx->twk_key, ctx->key_len,
1313 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1314 		break;
1315 	default:
1316 		ret = -EINVAL;
1317 		break;
1318 	}
1319 
1320 	rk_crypto_disable_clk(dev);
1321 
1322 	return ret;
1323 }
1324 
rk_crypto_mac(struct udevice * dev,u32 algo,u32 mode,const u8 * key,u32 key_len,const u8 * in,u32 len,u8 * tag)1325 int rk_crypto_mac(struct udevice *dev, u32 algo, u32 mode,
1326 		  const u8 *key, u32 key_len,
1327 		  const u8 *in, u32 len, u8 *tag)
1328 {
1329 	u32 rk_mode = RK_GET_RK_MODE(mode);
1330 	int ret;
1331 
1332 	if (!IS_MAC_MODE(rk_mode))
1333 		return -EINVAL;
1334 
1335 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1336 		return -EINVAL;
1337 
1338 	/* RV1126/RV1109 do not support aes-192 */
1339 #if defined(CONFIG_ROCKCHIP_RV1126)
1340 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1341 		return -EINVAL;
1342 #endif
1343 
1344 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, NULL, 0,
1345 			     algo, mode, true);
1346 	if (ret)
1347 		return ret;
1348 
1349 	return hw_cipher_crypt(in, NULL, len, NULL, 0,
1350 			       tag, AES_BLOCK_SIZE, mode);
1351 }
1352 
rockchip_crypto_mac(struct udevice * dev,cipher_context * ctx,const u8 * in,u32 len,u8 * tag)1353 int rockchip_crypto_mac(struct udevice *dev, cipher_context *ctx,
1354 			const u8 *in, u32 len, u8 *tag)
1355 {
1356 	int ret = 0;
1357 
1358 	rk_crypto_enable_clk(dev);
1359 
1360 	ret = rk_crypto_mac(dev, ctx->algo, ctx->mode,
1361 			    ctx->key, ctx->key_len, in, len, tag);
1362 
1363 	rk_crypto_disable_clk(dev);
1364 
1365 	return ret;
1366 }
1367 
rk_crypto_ae(struct udevice * dev,u32 algo,u32 mode,const u8 * key,u32 key_len,const u8 * nonce,u32 nonce_len,const u8 * in,u32 len,const u8 * aad,u32 aad_len,u8 * out,u8 * tag)1368 int rk_crypto_ae(struct udevice *dev, u32 algo, u32 mode,
1369 		 const u8 *key, u32 key_len, const u8 *nonce, u32 nonce_len,
1370 		 const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1371 		 u8 *out, u8 *tag)
1372 {
1373 	u32 rk_mode = RK_GET_RK_MODE(mode);
1374 	int ret;
1375 
1376 	if (!IS_AE_MODE(rk_mode))
1377 		return -EINVAL;
1378 
1379 	if (len == 0)
1380 		return -EINVAL;
1381 
1382 	if (algo != CRYPTO_AES && algo != CRYPTO_SM4)
1383 		return -EINVAL;
1384 
1385 	/* RV1126/RV1109 do not support aes-192 */
1386 #if defined(CONFIG_ROCKCHIP_RV1126)
1387 	if (algo == CRYPTO_AES && key_len == AES_KEYSIZE_192)
1388 		return -EINVAL;
1389 #endif
1390 
1391 	ret = hw_cipher_init(g_key_chn, key, NULL, key_len, nonce, nonce_len,
1392 			     algo, mode, true);
1393 	if (ret)
1394 		return ret;
1395 
1396 	return hw_cipher_crypt(in, out, len, aad, aad_len,
1397 			       tag, AES_BLOCK_SIZE, mode);
1398 }
1399 
rockchip_crypto_ae(struct udevice * dev,cipher_context * ctx,const u8 * in,u32 len,const u8 * aad,u32 aad_len,u8 * out,u8 * tag)1400 int rockchip_crypto_ae(struct udevice *dev, cipher_context *ctx,
1401 		       const u8 *in, u32 len, const u8 *aad, u32 aad_len,
1402 		       u8 *out, u8 *tag)
1403 {
1404 	int ret = 0;
1405 
1406 	rk_crypto_enable_clk(dev);
1407 
1408 	ret = rk_crypto_ae(dev, ctx->algo, ctx->mode, ctx->key, ctx->key_len,
1409 			   ctx->iv, ctx->iv_len, in, len,
1410 			   aad, aad_len, out, tag);
1411 
1412 	rk_crypto_disable_clk(dev);
1413 
1414 	return ret;
1415 }
1416 
1417 #if CONFIG_IS_ENABLED(DM_KEYLAD)
rockchip_crypto_fw_cipher(struct udevice * dev,cipher_fw_context * ctx,const u8 * in,u8 * out,u32 len,bool enc)1418 int rockchip_crypto_fw_cipher(struct udevice *dev, cipher_fw_context *ctx,
1419 			      const u8 *in, u8 *out, u32 len, bool enc)
1420 {
1421 	int ret;
1422 
1423 	rk_crypto_enable_clk(dev);
1424 
1425 	switch (ctx->algo) {
1426 	case CRYPTO_DES:
1427 		ret = rk_crypto_des(dev, ctx->mode, NULL, ctx->key_len,
1428 				    ctx->iv, in, out, len, enc);
1429 		break;
1430 	case CRYPTO_AES:
1431 		ret = rk_crypto_aes(dev, ctx->mode, NULL, NULL, ctx->key_len,
1432 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1433 		break;
1434 	case CRYPTO_SM4:
1435 		ret = rk_crypto_sm4(dev, ctx->mode, NULL, NULL, ctx->key_len,
1436 				    ctx->iv, ctx->iv_len, in, out, len, enc);
1437 		break;
1438 	default:
1439 		ret = -EINVAL;
1440 		break;
1441 	}
1442 
1443 	rk_crypto_disable_clk(dev);
1444 
1445 	return ret;
1446 }
1447 
rockchip_crypto_keytable_addr(struct udevice * dev)1448 static ulong rockchip_crypto_keytable_addr(struct udevice *dev)
1449 {
1450 	return CRYPTO_S_BY_KEYLAD_BASE + CRYPTO_CH0_KEY_0;
1451 }
1452 #endif
1453 #endif
1454 
1455 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
rockchip_crypto_rsa_verify(struct udevice * dev,rsa_key * ctx,u8 * sign,u8 * output)1456 static int rockchip_crypto_rsa_verify(struct udevice *dev, rsa_key *ctx,
1457 				      u8 *sign, u8 *output)
1458 {
1459 	struct mpa_num *mpa_m = NULL, *mpa_e = NULL, *mpa_n = NULL;
1460 	struct mpa_num *mpa_c = NULL, *mpa_result = NULL;
1461 	u32 n_bits, n_words;
1462 	int ret;
1463 
1464 	if (!ctx)
1465 		return -EINVAL;
1466 
1467 	if (ctx->algo != CRYPTO_RSA512 &&
1468 	    ctx->algo != CRYPTO_RSA1024 &&
1469 	    ctx->algo != CRYPTO_RSA2048 &&
1470 	    ctx->algo != CRYPTO_RSA3072 &&
1471 	    ctx->algo != CRYPTO_RSA4096)
1472 		return -EINVAL;
1473 
1474 	n_bits = crypto_algo_nbits(ctx->algo);
1475 	n_words = BITS2WORD(n_bits);
1476 
1477 	ret = rk_mpa_alloc(&mpa_m, sign, n_words);
1478 	if (ret)
1479 		goto exit;
1480 
1481 	ret = rk_mpa_alloc(&mpa_e, ctx->e, n_words);
1482 	if (ret)
1483 		goto exit;
1484 
1485 	ret = rk_mpa_alloc(&mpa_n, ctx->n, n_words);
1486 	if (ret)
1487 		goto exit;
1488 
1489 	if (ctx->c) {
1490 		ret = rk_mpa_alloc(&mpa_c, ctx->c, n_words);
1491 		if (ret)
1492 			goto exit;
1493 	}
1494 
1495 	ret = rk_mpa_alloc(&mpa_result, NULL, n_words);
1496 	if (ret)
1497 		goto exit;
1498 
1499 	rk_crypto_enable_clk(dev);
1500 	ret = rk_exptmod_np(mpa_m, mpa_e, mpa_n, mpa_c, mpa_result);
1501 	if (!ret)
1502 		memcpy(output, mpa_result->d, BITS2BYTE(n_bits));
1503 	rk_crypto_disable_clk(dev);
1504 
1505 exit:
1506 	rk_mpa_free(&mpa_m);
1507 	rk_mpa_free(&mpa_e);
1508 	rk_mpa_free(&mpa_n);
1509 	rk_mpa_free(&mpa_c);
1510 	rk_mpa_free(&mpa_result);
1511 
1512 	return ret;
1513 }
1514 #endif
1515 
1516 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
rockchip_crypto_ec_verify(struct udevice * dev,ec_key * ctx,u8 * hash,u32 hash_len,u8 * sign)1517 static int rockchip_crypto_ec_verify(struct udevice *dev, ec_key *ctx,
1518 				     u8 *hash, u32 hash_len, u8 *sign)
1519 {
1520 	struct mpa_num *bn_sign = NULL;
1521 	struct rk_ecp_point point_P, point_sign;
1522 	u32 n_bits, n_words;
1523 	int ret;
1524 
1525 	if (!ctx)
1526 		return -EINVAL;
1527 
1528 	if (ctx->algo != CRYPTO_SM2 &&
1529 	    ctx->algo != CRYPTO_ECC_192R1 &&
1530 	    ctx->algo != CRYPTO_ECC_224R1 &&
1531 	    ctx->algo != CRYPTO_ECC_256R1)
1532 		return -EINVAL;
1533 
1534 	n_bits = crypto_algo_nbits(ctx->algo);
1535 	n_words = BITS2WORD(n_bits);
1536 
1537 	ret = rk_mpa_alloc(&bn_sign, sign, n_words);
1538 	if (ret)
1539 		goto exit;
1540 
1541 	ret = rk_mpa_alloc(&point_P.x, ctx->x, n_words);
1542 	ret |= rk_mpa_alloc(&point_P.y, ctx->y, n_words);
1543 	if (ret)
1544 		goto exit;
1545 
1546 	ret = rk_mpa_alloc(&point_sign.x, sign, n_words);
1547 	ret |= rk_mpa_alloc(&point_sign.y, sign + WORD2BYTE(n_words), n_words);
1548 	if (ret)
1549 		goto exit;
1550 
1551 	rk_crypto_enable_clk(dev);
1552 	ret = rockchip_ecc_verify(ctx->algo, hash, hash_len, &point_P, &point_sign);
1553 	rk_crypto_disable_clk(dev);
1554 exit:
1555 	rk_mpa_free(&bn_sign);
1556 	rk_mpa_free(&point_P.x);
1557 	rk_mpa_free(&point_P.y);
1558 	rk_mpa_free(&point_sign.x);
1559 	rk_mpa_free(&point_sign.y);
1560 
1561 	return ret;
1562 }
1563 #endif
1564 
rockchip_crypto_is_secure(struct udevice * dev)1565 static bool rockchip_crypto_is_secure(struct udevice *dev)
1566 {
1567 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1568 
1569 	return priv->secure;
1570 }
1571 
1572 static const struct dm_crypto_ops rockchip_crypto_ops = {
1573 	.capability   = rockchip_crypto_capability,
1574 	.sha_init     = rockchip_crypto_sha_init,
1575 	.sha_update   = rockchip_crypto_sha_update,
1576 	.sha_final    = rockchip_crypto_sha_final,
1577 #if CONFIG_IS_ENABLED(ROCKCHIP_RSA)
1578 	.rsa_verify   = rockchip_crypto_rsa_verify,
1579 #endif
1580 #if CONFIG_IS_ENABLED(ROCKCHIP_EC)
1581 	.ec_verify    = rockchip_crypto_ec_verify,
1582 #endif
1583 #if CONFIG_IS_ENABLED(ROCKCHIP_HMAC)
1584 	.hmac_init    = rockchip_crypto_hmac_init,
1585 	.hmac_update  = rockchip_crypto_hmac_update,
1586 	.hmac_final   = rockchip_crypto_hmac_final,
1587 #endif
1588 #if CONFIG_IS_ENABLED(ROCKCHIP_CIPHER)
1589 	.cipher_crypt    = rockchip_crypto_cipher,
1590 	.cipher_mac      = rockchip_crypto_mac,
1591 	.cipher_ae       = rockchip_crypto_ae,
1592 #if CONFIG_IS_ENABLED(DM_KEYLAD)
1593 	.cipher_fw_crypt = rockchip_crypto_fw_cipher,
1594 	.keytable_addr   = rockchip_crypto_keytable_addr,
1595 #endif
1596 #endif
1597 	.is_secure       = rockchip_crypto_is_secure,
1598 };
1599 
1600 /*
1601  * Only use "clocks" to parse crypto clock id and use rockchip_get_clk().
1602  * Because we always add crypto node in U-Boot dts, when kernel dtb enabled :
1603  *
1604  *   1. There is cru phandle mismatch between U-Boot and kernel dtb;
1605  *   2. CONFIG_OF_SPL_REMOVE_PROPS removes clock property;
1606  */
rockchip_crypto_ofdata_to_platdata(struct udevice * dev)1607 static int rockchip_crypto_ofdata_to_platdata(struct udevice *dev)
1608 {
1609 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1610 	int len, ret = -EINVAL;
1611 
1612 	memset(priv, 0x00, sizeof(*priv));
1613 
1614 	priv->reg = (fdt_addr_t)dev_read_addr_ptr(dev);
1615 	if (priv->reg == FDT_ADDR_T_NONE)
1616 		return -EINVAL;
1617 
1618 	crypto_base = priv->reg;
1619 
1620 	priv->secure = dev_read_bool(dev, "secure");
1621 	priv->enabled = true;
1622 
1623 #if !defined(CONFIG_SPL_BUILD)
1624 	/* uboot disabled secure crypto */
1625 	priv->enabled = !priv->secure;
1626 #endif
1627 	if (!priv->enabled)
1628 		return 0;
1629 
1630 	/* if there is no clocks in dts, just skip it */
1631 	if (!dev_read_prop(dev, "clocks", &len)) {
1632 		printf("Can't find \"clocks\" property\n");
1633 		return 0;
1634 	}
1635 
1636 	priv->clocks = malloc(len);
1637 	if (!priv->clocks)
1638 		return -ENOMEM;
1639 
1640 	priv->nclocks = len / (2 * sizeof(u32));
1641 	if (dev_read_u32_array(dev, "clocks", (u32 *)priv->clocks,
1642 			       priv->nclocks)) {
1643 		printf("Can't read \"clocks\" property\n");
1644 		ret = -EINVAL;
1645 		goto exit;
1646 	}
1647 
1648 	if (dev_read_prop(dev, "clock-frequency", &len)) {
1649 		priv->frequencies = malloc(len);
1650 		if (!priv->frequencies) {
1651 			ret = -ENOMEM;
1652 			goto exit;
1653 		}
1654 		priv->freq_nclocks = len / sizeof(u32);
1655 		if (dev_read_u32_array(dev, "clock-frequency", priv->frequencies,
1656 				       priv->freq_nclocks)) {
1657 			printf("Can't read \"clock-frequency\" property\n");
1658 			ret = -EINVAL;
1659 			goto exit;
1660 		}
1661 	}
1662 
1663 	return 0;
1664 exit:
1665 	if (priv->clocks)
1666 		free(priv->clocks);
1667 
1668 	if (priv->frequencies)
1669 		free(priv->frequencies);
1670 
1671 	return ret;
1672 }
1673 
rk_crypto_set_clk(struct udevice * dev)1674 static int rk_crypto_set_clk(struct udevice *dev)
1675 {
1676 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1677 	struct clk clk;
1678 	int i, ret;
1679 
1680 	/* use standard "assigned-clock-rates" props */
1681 	if (dev_read_size(dev, "assigned-clock-rates") > 0)
1682 		return clk_set_defaults(dev);
1683 
1684 	/* use "clock-frequency" props */
1685 	if (priv->freq_nclocks == 0)
1686 		return 0;
1687 
1688 	for (i = 0; i < priv->freq_nclocks; i++) {
1689 		ret = clk_get_by_index(dev, i, &clk);
1690 		if (ret < 0) {
1691 			printf("Failed to get clk index %d, ret=%d\n", i, ret);
1692 			return ret;
1693 		}
1694 		ret = clk_set_rate(&clk, priv->frequencies[i]);
1695 		if (ret < 0) {
1696 			printf("%s: Failed to set clk(%ld): ret=%d\n",
1697 			       __func__, clk.id, ret);
1698 			return ret;
1699 		}
1700 	}
1701 
1702 	return 0;
1703 }
1704 
rockchip_crypto_probe(struct udevice * dev)1705 static int rockchip_crypto_probe(struct udevice *dev)
1706 {
1707 	struct rockchip_crypto_priv *priv = dev_get_priv(dev);
1708 	struct rk_crypto_soc_data *sdata;
1709 	int ret = 0;
1710 
1711 	sdata = (struct rk_crypto_soc_data *)dev_get_driver_data(dev);
1712 
1713 	if (!priv->enabled)
1714 		return 0;
1715 
1716 	priv->hw_ctx = memalign(LLI_ADDR_ALIGN_SIZE,
1717 				sizeof(struct rk_hash_ctx));
1718 	if (!priv->hw_ctx)
1719 		return -ENOMEM;
1720 
1721 	ret = rk_crypto_set_clk(dev);
1722 	if (ret)
1723 		return ret;
1724 
1725 	rk_crypto_enable_clk(dev);
1726 
1727 	hw_crypto_reset();
1728 
1729 	if (sdata->dynamic_cap)
1730 		sdata->capability = sdata->dynamic_cap();
1731 
1732 	priv->soc_data = sdata;
1733 
1734 	rk_crypto_disable_clk(dev);
1735 
1736 	return 0;
1737 }
1738 
1739 static const struct rk_crypto_soc_data soc_data_base = {
1740 	.capability = CRYPTO_MD5 |
1741 		      CRYPTO_SHA1 |
1742 		      CRYPTO_SHA256 |
1743 		      CRYPTO_SHA512 |
1744 		      CRYPTO_HMAC_MD5 |
1745 		      CRYPTO_HMAC_SHA1 |
1746 		      CRYPTO_HMAC_SHA256 |
1747 		      CRYPTO_HMAC_SHA512 |
1748 		      CRYPTO_RSA512 |
1749 		      CRYPTO_RSA1024 |
1750 		      CRYPTO_RSA2048 |
1751 		      CRYPTO_RSA3072 |
1752 		      CRYPTO_RSA4096 |
1753 		      CRYPTO_DES |
1754 		      CRYPTO_AES,
1755 };
1756 
1757 static const struct rk_crypto_soc_data soc_data_base_sm = {
1758 	.capability = CRYPTO_MD5 |
1759 		      CRYPTO_SHA1 |
1760 		      CRYPTO_SHA256 |
1761 		      CRYPTO_SHA512 |
1762 		      CRYPTO_SM3 |
1763 		      CRYPTO_HMAC_MD5 |
1764 		      CRYPTO_HMAC_SHA1 |
1765 		      CRYPTO_HMAC_SHA256 |
1766 		      CRYPTO_HMAC_SHA512 |
1767 		      CRYPTO_HMAC_SM3 |
1768 		      CRYPTO_RSA512 |
1769 		      CRYPTO_RSA1024 |
1770 		      CRYPTO_RSA2048 |
1771 		      CRYPTO_RSA3072 |
1772 		      CRYPTO_RSA4096 |
1773 		      CRYPTO_DES |
1774 		      CRYPTO_AES |
1775 		      CRYPTO_SM4,
1776 };
1777 
1778 static const struct rk_crypto_soc_data soc_data_rk1808 = {
1779 	.capability = CRYPTO_MD5 |
1780 		      CRYPTO_SHA1 |
1781 		      CRYPTO_SHA256 |
1782 		      CRYPTO_HMAC_MD5 |
1783 		      CRYPTO_HMAC_SHA1 |
1784 		      CRYPTO_HMAC_SHA256 |
1785 		      CRYPTO_RSA512 |
1786 		      CRYPTO_RSA1024 |
1787 		      CRYPTO_RSA2048 |
1788 		      CRYPTO_RSA3072 |
1789 		      CRYPTO_RSA4096,
1790 };
1791 
1792 static const struct rk_crypto_soc_data soc_data_cryptov3 = {
1793 	.capability  = 0,
1794 	.dynamic_cap = crypto_v3_dynamic_cap,
1795 };
1796 
1797 static const struct udevice_id rockchip_crypto_ids[] = {
1798 	{
1799 		.compatible = "rockchip,px30-crypto",
1800 		.data = (ulong)&soc_data_base
1801 	},
1802 	{
1803 		.compatible = "rockchip,rk1808-crypto",
1804 		.data = (ulong)&soc_data_rk1808
1805 	},
1806 	{
1807 		.compatible = "rockchip,rk3308-crypto",
1808 		.data = (ulong)&soc_data_base
1809 	},
1810 	{
1811 		.compatible = "rockchip,rv1126-crypto",
1812 		.data = (ulong)&soc_data_base_sm
1813 	},
1814 	{
1815 		.compatible = "rockchip,rk3568-crypto",
1816 		.data = (ulong)&soc_data_base_sm
1817 	},
1818 	{
1819 		.compatible = "rockchip,rk3588-crypto",
1820 		.data = (ulong)&soc_data_base_sm
1821 	},
1822 	{
1823 		.compatible = "rockchip,crypto-v3",
1824 		.data = (ulong)&soc_data_cryptov3
1825 	},
1826 	{
1827 		.compatible = "rockchip,crypto-v4",
1828 		.data = (ulong)&soc_data_cryptov3 /* reuse crypto v3 config */
1829 	},
1830 	{ }
1831 };
1832 
1833 U_BOOT_DRIVER(rockchip_crypto_v2) = {
1834 	.name		= "rockchip_crypto_v2",
1835 	.id		= UCLASS_CRYPTO,
1836 	.of_match	= rockchip_crypto_ids,
1837 	.ops		= &rockchip_crypto_ops,
1838 	.probe		= rockchip_crypto_probe,
1839 	.ofdata_to_platdata = rockchip_crypto_ofdata_to_platdata,
1840 	.priv_auto_alloc_size = sizeof(struct rockchip_crypto_priv),
1841 };
1842