xref: /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/rk_crypto_v2_ahash.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Hash acceleration support for Rockchip Crypto v2
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2020, Rockchip Electronics Co., Ltd
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Author: Lin Jinhan <troy.lin@rock-chips.com>
8*4882a593Smuzhiyun  *
9*4882a593Smuzhiyun  * Some ideas are from marvell/cesa.c and s5p-sss.c driver.
10*4882a593Smuzhiyun  */
11*4882a593Smuzhiyun 
12*4882a593Smuzhiyun #include <linux/slab.h>
13*4882a593Smuzhiyun #include <linux/iopoll.h>
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #include "rk_crypto_core.h"
16*4882a593Smuzhiyun #include "rk_crypto_v2.h"
17*4882a593Smuzhiyun #include "rk_crypto_v2_reg.h"
18*4882a593Smuzhiyun #include "rk_crypto_ahash_utils.h"
19*4882a593Smuzhiyun #include "rk_crypto_utils.h"
20*4882a593Smuzhiyun 
21*4882a593Smuzhiyun #define RK_HASH_CTX_MAGIC	0x1A1A1A1A
22*4882a593Smuzhiyun #define RK_POLL_PERIOD_US	100
23*4882a593Smuzhiyun #define RK_POLL_TIMEOUT_US	50000
24*4882a593Smuzhiyun 
25*4882a593Smuzhiyun struct rk_ahash_expt_ctx {
26*4882a593Smuzhiyun 	struct rk_ahash_ctx	ctx;
27*4882a593Smuzhiyun 	u8			lastc[RK_DMA_ALIGNMENT];
28*4882a593Smuzhiyun };
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun static const u32 hash_algo2bc[] = {
31*4882a593Smuzhiyun 	[HASH_ALGO_MD5]    = CRYPTO_MD5,
32*4882a593Smuzhiyun 	[HASH_ALGO_SHA1]   = CRYPTO_SHA1,
33*4882a593Smuzhiyun 	[HASH_ALGO_SHA224] = CRYPTO_SHA224,
34*4882a593Smuzhiyun 	[HASH_ALGO_SHA256] = CRYPTO_SHA256,
35*4882a593Smuzhiyun 	[HASH_ALGO_SHA384] = CRYPTO_SHA384,
36*4882a593Smuzhiyun 	[HASH_ALGO_SHA512] = CRYPTO_SHA512,
37*4882a593Smuzhiyun 	[HASH_ALGO_SM3]    = CRYPTO_SM3,
38*4882a593Smuzhiyun };
39*4882a593Smuzhiyun 
rk_hash_reset(struct rk_crypto_dev * rk_dev)40*4882a593Smuzhiyun static void rk_hash_reset(struct rk_crypto_dev *rk_dev)
41*4882a593Smuzhiyun {
42*4882a593Smuzhiyun 	int ret;
43*4882a593Smuzhiyun 	u32 tmp = 0, tmp_mask = 0;
44*4882a593Smuzhiyun 	unsigned int pool_timeout_us = 1000;
45*4882a593Smuzhiyun 
46*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0x00);
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun 	tmp = CRYPTO_SW_CC_RESET;
49*4882a593Smuzhiyun 	tmp_mask = tmp << CRYPTO_WRITE_MASK_SHIFT;
50*4882a593Smuzhiyun 
51*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_RST_CTL, tmp | tmp_mask);
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun 	/* This is usually done in 20 clock cycles */
54*4882a593Smuzhiyun 	ret = read_poll_timeout_atomic(CRYPTO_READ, tmp, !tmp, 0, pool_timeout_us,
55*4882a593Smuzhiyun 				       false, rk_dev, CRYPTO_RST_CTL);
56*4882a593Smuzhiyun 	if (ret)
57*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "cipher reset pool timeout %ums.",
58*4882a593Smuzhiyun 			pool_timeout_us);
59*4882a593Smuzhiyun 
60*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, 0xffff0000);
61*4882a593Smuzhiyun }
62*4882a593Smuzhiyun 
rk_crypto_irq_handle(int irq,void * dev_id)63*4882a593Smuzhiyun static int rk_crypto_irq_handle(int irq, void *dev_id)
64*4882a593Smuzhiyun {
65*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev  = platform_get_drvdata(dev_id);
66*4882a593Smuzhiyun 	u32 interrupt_status;
67*4882a593Smuzhiyun 	struct rk_hw_crypto_v2_info *hw_info =
68*4882a593Smuzhiyun 			(struct rk_hw_crypto_v2_info *)rk_dev->hw_info;
69*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev);
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun 	/* disable crypto irq */
72*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0);
73*4882a593Smuzhiyun 
74*4882a593Smuzhiyun 	interrupt_status = CRYPTO_READ(rk_dev, CRYPTO_DMA_INT_ST);
75*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_ST, interrupt_status);
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun 	interrupt_status &= CRYPTO_LOCKSTEP_MASK;
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun 	if (interrupt_status != CRYPTO_SRC_ITEM_DONE_INT_ST) {
80*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA desc = %p\n", hw_info->hw_desc.lli_head);
81*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA addr_in = %08x\n",
82*4882a593Smuzhiyun 			(u32)alg_ctx->addr_in);
83*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA addr_out = %08x\n",
84*4882a593Smuzhiyun 			(u32)alg_ctx->addr_out);
85*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA count = %08x\n", alg_ctx->count);
86*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA desc_dma = %08x\n",
87*4882a593Smuzhiyun 			(u32)hw_info->hw_desc.lli_head_dma);
88*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA Error status = %08x\n",
89*4882a593Smuzhiyun 			interrupt_status);
90*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
91*4882a593Smuzhiyun 			CRYPTO_READ(rk_dev, CRYPTO_DMA_LLI_ADDR));
92*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA CRYPTO_DMA_ST status = %08x\n",
93*4882a593Smuzhiyun 			CRYPTO_READ(rk_dev, CRYPTO_DMA_ST));
94*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA CRYPTO_DMA_STATE status = %08x\n",
95*4882a593Smuzhiyun 			CRYPTO_READ(rk_dev, CRYPTO_DMA_STATE));
96*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
97*4882a593Smuzhiyun 			CRYPTO_READ(rk_dev, CRYPTO_DMA_LLI_RADDR));
98*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
99*4882a593Smuzhiyun 			CRYPTO_READ(rk_dev, CRYPTO_DMA_SRC_RADDR));
100*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
101*4882a593Smuzhiyun 			CRYPTO_READ(rk_dev, CRYPTO_DMA_DST_RADDR));
102*4882a593Smuzhiyun 		rk_dev->err = -EFAULT;
103*4882a593Smuzhiyun 	}
104*4882a593Smuzhiyun 
105*4882a593Smuzhiyun 	return 0;
106*4882a593Smuzhiyun }
107*4882a593Smuzhiyun 
rk_ahash_crypto_complete(struct crypto_async_request * base,int err)108*4882a593Smuzhiyun static void rk_ahash_crypto_complete(struct crypto_async_request *base, int err)
109*4882a593Smuzhiyun {
110*4882a593Smuzhiyun 	struct ahash_request *req = ahash_request_cast(base);
111*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
112*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
113*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(ctx->rk_dev);
114*4882a593Smuzhiyun 
115*4882a593Smuzhiyun 	struct rk_hw_crypto_v2_info *hw_info = ctx->rk_dev->hw_info;
116*4882a593Smuzhiyun 	struct crypto_lli_desc *lli_desc = hw_info->hw_desc.lli_head;
117*4882a593Smuzhiyun 
118*4882a593Smuzhiyun 	if (err) {
119*4882a593Smuzhiyun 		rk_hash_reset(ctx->rk_dev);
120*4882a593Smuzhiyun 		pr_err("aligned = %u, align_size = %u\n",
121*4882a593Smuzhiyun 		       alg_ctx->aligned, alg_ctx->align_size);
122*4882a593Smuzhiyun 		pr_err("total = %u, left = %u, count = %u\n",
123*4882a593Smuzhiyun 		       alg_ctx->total, alg_ctx->left_bytes, alg_ctx->count);
124*4882a593Smuzhiyun 		pr_err("lli->src     = %08x\n", lli_desc->src_addr);
125*4882a593Smuzhiyun 		pr_err("lli->src_len = %08x\n", lli_desc->src_len);
126*4882a593Smuzhiyun 		pr_err("lli->dst     = %08x\n", lli_desc->dst_addr);
127*4882a593Smuzhiyun 		pr_err("lli->dst_len = %08x\n", lli_desc->dst_len);
128*4882a593Smuzhiyun 		pr_err("lli->dma_ctl = %08x\n", lli_desc->dma_ctrl);
129*4882a593Smuzhiyun 		pr_err("lli->usr_def = %08x\n", lli_desc->user_define);
130*4882a593Smuzhiyun 		pr_err("lli->next    = %08x\n\n\n", lli_desc->next_addr);
131*4882a593Smuzhiyun 	}
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun 	if (base->complete)
134*4882a593Smuzhiyun 		base->complete(base, err);
135*4882a593Smuzhiyun }
136*4882a593Smuzhiyun 
clear_hash_out_reg(struct rk_crypto_dev * rk_dev)137*4882a593Smuzhiyun static inline void clear_hash_out_reg(struct rk_crypto_dev *rk_dev)
138*4882a593Smuzhiyun {
139*4882a593Smuzhiyun 	rk_crypto_clear_regs(rk_dev, CRYPTO_HASH_DOUT_0, 16);
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun 
write_key_reg(struct rk_crypto_dev * rk_dev,const u8 * key,u32 key_len)142*4882a593Smuzhiyun static int write_key_reg(struct rk_crypto_dev *rk_dev, const u8 *key,
143*4882a593Smuzhiyun 			  u32 key_len)
144*4882a593Smuzhiyun {
145*4882a593Smuzhiyun 	rk_crypto_write_regs(rk_dev, CRYPTO_CH0_KEY_0, key, key_len);
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun 	return 0;
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun 
rk_hw_hash_init(struct rk_crypto_dev * rk_dev,u32 algo,u32 type)150*4882a593Smuzhiyun static int rk_hw_hash_init(struct rk_crypto_dev *rk_dev, u32 algo, u32 type)
151*4882a593Smuzhiyun {
152*4882a593Smuzhiyun 	u32 reg_ctrl = 0;
153*4882a593Smuzhiyun 
154*4882a593Smuzhiyun 	if (algo >= ARRAY_SIZE(hash_algo2bc))
155*4882a593Smuzhiyun 		goto exit;
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun 	rk_hash_reset(rk_dev);
158*4882a593Smuzhiyun 
159*4882a593Smuzhiyun 	clear_hash_out_reg(rk_dev);
160*4882a593Smuzhiyun 
161*4882a593Smuzhiyun 	reg_ctrl = hash_algo2bc[algo] | CRYPTO_HW_PAD_ENABLE;
162*4882a593Smuzhiyun 
163*4882a593Smuzhiyun 	if (IS_TYPE_HMAC(type)) {
164*4882a593Smuzhiyun 		CRYPTO_TRACE("this is hmac");
165*4882a593Smuzhiyun 		reg_ctrl |= CRYPTO_HMAC_ENABLE;
166*4882a593Smuzhiyun 	}
167*4882a593Smuzhiyun 
168*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, reg_ctrl | CRYPTO_WRITE_MASK_ALL);
169*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_FIFO_CTL, 0x00030003);
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 	return 0;
172*4882a593Smuzhiyun exit:
173*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, 0 | CRYPTO_WRITE_MASK_ALL);
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun 	return -EINVAL;
176*4882a593Smuzhiyun }
177*4882a593Smuzhiyun 
clean_hash_setting(struct rk_crypto_dev * rk_dev)178*4882a593Smuzhiyun static void clean_hash_setting(struct rk_crypto_dev *rk_dev)
179*4882a593Smuzhiyun {
180*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0);
181*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, 0 | CRYPTO_WRITE_MASK_ALL);
182*4882a593Smuzhiyun }
183*4882a593Smuzhiyun 
rk_ahash_import(struct ahash_request * req,const void * in)184*4882a593Smuzhiyun static int rk_ahash_import(struct ahash_request *req, const void *in)
185*4882a593Smuzhiyun {
186*4882a593Smuzhiyun 	struct rk_ahash_expt_ctx state;
187*4882a593Smuzhiyun 
188*4882a593Smuzhiyun 	/* 'in' may not be aligned so memcpy to local variable */
189*4882a593Smuzhiyun 	memcpy(&state, in, sizeof(state));
190*4882a593Smuzhiyun 
191*4882a593Smuzhiyun 	///TODO:  deal with import
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun 	return 0;
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun 
rk_ahash_export(struct ahash_request * req,void * out)196*4882a593Smuzhiyun static int rk_ahash_export(struct ahash_request *req, void *out)
197*4882a593Smuzhiyun {
198*4882a593Smuzhiyun 	struct rk_ahash_expt_ctx state;
199*4882a593Smuzhiyun 
200*4882a593Smuzhiyun 	/* Don't let anything leak to 'out' */
201*4882a593Smuzhiyun 	memset(&state, 0, sizeof(state));
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 	///TODO:  deal with import
204*4882a593Smuzhiyun 
205*4882a593Smuzhiyun 	memcpy(out, &state, sizeof(state));
206*4882a593Smuzhiyun 
207*4882a593Smuzhiyun 	return 0;
208*4882a593Smuzhiyun }
209*4882a593Smuzhiyun 
rk_ahash_dma_start(struct rk_crypto_dev * rk_dev,uint32_t flag)210*4882a593Smuzhiyun static int rk_ahash_dma_start(struct rk_crypto_dev *rk_dev, uint32_t flag)
211*4882a593Smuzhiyun {
212*4882a593Smuzhiyun 	struct rk_hw_crypto_v2_info *hw_info =
213*4882a593Smuzhiyun 			(struct rk_hw_crypto_v2_info *)rk_dev->hw_info;
214*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev);
215*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = rk_ahash_ctx_cast(rk_dev);
216*4882a593Smuzhiyun 	struct crypto_lli_desc *lli_head, *lli_tail;
217*4882a593Smuzhiyun 	u32 dma_ctl = CRYPTO_DMA_RESTART;
218*4882a593Smuzhiyun 	bool is_final = flag & RK_FLAG_FINAL;
219*4882a593Smuzhiyun 	int ret;
220*4882a593Smuzhiyun 
221*4882a593Smuzhiyun 	CRYPTO_TRACE("ctx->calc_cnt = %u, count %u Byte, is_final = %d",
222*4882a593Smuzhiyun 		     ctx->calc_cnt, alg_ctx->count, is_final);
223*4882a593Smuzhiyun 
224*4882a593Smuzhiyun 	if (alg_ctx->count % RK_DMA_ALIGNMENT && !is_final) {
225*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "count = %u is not aligned with [%u]\n",
226*4882a593Smuzhiyun 			alg_ctx->count, RK_DMA_ALIGNMENT);
227*4882a593Smuzhiyun 		return -EINVAL;
228*4882a593Smuzhiyun 	}
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun 	if (alg_ctx->count == 0) {
231*4882a593Smuzhiyun 		/* do nothing */
232*4882a593Smuzhiyun 		CRYPTO_TRACE("empty calc");
233*4882a593Smuzhiyun 		return 0;
234*4882a593Smuzhiyun 	}
235*4882a593Smuzhiyun 
236*4882a593Smuzhiyun 	if (alg_ctx->aligned)
237*4882a593Smuzhiyun 		ret = rk_crypto_hw_desc_init(&hw_info->hw_desc,
238*4882a593Smuzhiyun 					     alg_ctx->sg_src, NULL, alg_ctx->count);
239*4882a593Smuzhiyun 	else
240*4882a593Smuzhiyun 		ret = rk_crypto_hw_desc_init(&hw_info->hw_desc,
241*4882a593Smuzhiyun 					     &alg_ctx->sg_tmp, NULL, alg_ctx->count);
242*4882a593Smuzhiyun 	if (ret)
243*4882a593Smuzhiyun 		return ret;
244*4882a593Smuzhiyun 
245*4882a593Smuzhiyun 	lli_head = hw_info->hw_desc.lli_head;
246*4882a593Smuzhiyun 	lli_tail = hw_info->hw_desc.lli_tail;
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun 	lli_tail->dma_ctrl  = is_final ? LLI_DMA_CTRL_LAST : LLI_DMA_CTRL_PAUSE;
249*4882a593Smuzhiyun 	lli_tail->dma_ctrl |= LLI_DMA_CTRL_SRC_DONE;
250*4882a593Smuzhiyun 	lli_tail->next_addr = hw_info->hw_desc.lli_head_dma;
251*4882a593Smuzhiyun 
252*4882a593Smuzhiyun 	if (ctx->calc_cnt == 0) {
253*4882a593Smuzhiyun 		dma_ctl = CRYPTO_DMA_START;
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun 		lli_head->user_define |= LLI_USER_CIPHER_START;
256*4882a593Smuzhiyun 		lli_head->user_define |= LLI_USER_STRING_START;
257*4882a593Smuzhiyun 
258*4882a593Smuzhiyun 		CRYPTO_WRITE(rk_dev, CRYPTO_DMA_LLI_ADDR, hw_info->hw_desc.lli_head_dma);
259*4882a593Smuzhiyun 		CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL,
260*4882a593Smuzhiyun 			     (CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
261*4882a593Smuzhiyun 			     CRYPTO_HASH_ENABLE);
262*4882a593Smuzhiyun 	}
263*4882a593Smuzhiyun 
264*4882a593Smuzhiyun 	if (is_final && alg_ctx->left_bytes == 0)
265*4882a593Smuzhiyun 		lli_tail->user_define |= LLI_USER_STRING_LAST;
266*4882a593Smuzhiyun 
267*4882a593Smuzhiyun 	CRYPTO_TRACE("dma_ctrl = %08x, user_define = %08x, len = %u",
268*4882a593Smuzhiyun 		     lli_head->dma_ctrl, lli_head->user_define, alg_ctx->count);
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun 	rk_crypto_dump_hw_desc(&hw_info->hw_desc);
271*4882a593Smuzhiyun 
272*4882a593Smuzhiyun 	dma_wmb();
273*4882a593Smuzhiyun 
274*4882a593Smuzhiyun 	/* enable crypto irq */
275*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0x7f);
276*4882a593Smuzhiyun 
277*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_DMA_CTL, dma_ctl | dma_ctl << CRYPTO_WRITE_MASK_SHIFT);
278*4882a593Smuzhiyun 
279*4882a593Smuzhiyun 	return 0;
280*4882a593Smuzhiyun }
281*4882a593Smuzhiyun 
rk_ahash_get_result(struct rk_crypto_dev * rk_dev,uint8_t * data,uint32_t data_len)282*4882a593Smuzhiyun static int rk_ahash_get_result(struct rk_crypto_dev *rk_dev,
283*4882a593Smuzhiyun 			       uint8_t *data, uint32_t data_len)
284*4882a593Smuzhiyun {
285*4882a593Smuzhiyun 	int ret = 0;
286*4882a593Smuzhiyun 	u32 reg_ctrl = 0;
287*4882a593Smuzhiyun 
288*4882a593Smuzhiyun 	ret = read_poll_timeout_atomic(CRYPTO_READ, reg_ctrl,
289*4882a593Smuzhiyun 				       reg_ctrl & CRYPTO_HASH_IS_VALID,
290*4882a593Smuzhiyun 				       RK_POLL_PERIOD_US,
291*4882a593Smuzhiyun 				       RK_POLL_TIMEOUT_US, false,
292*4882a593Smuzhiyun 				       rk_dev, CRYPTO_HASH_VALID);
293*4882a593Smuzhiyun 	if (ret)
294*4882a593Smuzhiyun 		goto exit;
295*4882a593Smuzhiyun 
296*4882a593Smuzhiyun 	rk_crypto_read_regs(rk_dev, CRYPTO_HASH_DOUT_0, data, data_len);
297*4882a593Smuzhiyun 
298*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, CRYPTO_HASH_VALID, CRYPTO_HASH_IS_VALID);
299*4882a593Smuzhiyun 
300*4882a593Smuzhiyun exit:
301*4882a593Smuzhiyun 	clean_hash_setting(rk_dev);
302*4882a593Smuzhiyun 
303*4882a593Smuzhiyun 	return ret;
304*4882a593Smuzhiyun }
305*4882a593Smuzhiyun 
rk_cra_hash_init(struct crypto_tfm * tfm)306*4882a593Smuzhiyun static int rk_cra_hash_init(struct crypto_tfm *tfm)
307*4882a593Smuzhiyun {
308*4882a593Smuzhiyun 	struct rk_crypto_algt *algt =
309*4882a593Smuzhiyun 		rk_ahash_get_algt(__crypto_ahash_cast(tfm));
310*4882a593Smuzhiyun 	const char *alg_name = crypto_tfm_alg_name(tfm);
311*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
312*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev = algt->rk_dev;
313*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = &ctx->algs_ctx;
314*4882a593Smuzhiyun 
315*4882a593Smuzhiyun 	CRYPTO_TRACE();
316*4882a593Smuzhiyun 
317*4882a593Smuzhiyun 	memset(ctx, 0x00, sizeof(*ctx));
318*4882a593Smuzhiyun 
319*4882a593Smuzhiyun 	if (!rk_dev->request_crypto)
320*4882a593Smuzhiyun 		return -EFAULT;
321*4882a593Smuzhiyun 
322*4882a593Smuzhiyun 	alg_ctx->align_size     = RK_DMA_ALIGNMENT;
323*4882a593Smuzhiyun 
324*4882a593Smuzhiyun 	alg_ctx->ops.start      = rk_ahash_start;
325*4882a593Smuzhiyun 	alg_ctx->ops.update     = rk_ahash_crypto_rx;
326*4882a593Smuzhiyun 	alg_ctx->ops.complete   = rk_ahash_crypto_complete;
327*4882a593Smuzhiyun 	alg_ctx->ops.irq_handle = rk_crypto_irq_handle;
328*4882a593Smuzhiyun 
329*4882a593Smuzhiyun 	alg_ctx->ops.hw_write_key  = write_key_reg;
330*4882a593Smuzhiyun 	alg_ctx->ops.hw_init       = rk_hw_hash_init;
331*4882a593Smuzhiyun 	alg_ctx->ops.hw_dma_start  = rk_ahash_dma_start;
332*4882a593Smuzhiyun 	alg_ctx->ops.hw_get_result = rk_ahash_get_result;
333*4882a593Smuzhiyun 
334*4882a593Smuzhiyun 	ctx->rk_dev   = rk_dev;
335*4882a593Smuzhiyun 	ctx->hash_tmp = (u8 *)get_zeroed_page(GFP_KERNEL | GFP_DMA32);
336*4882a593Smuzhiyun 	if (!ctx->hash_tmp) {
337*4882a593Smuzhiyun 		dev_err(rk_dev->dev, "Can't get zeroed page for hash tmp.\n");
338*4882a593Smuzhiyun 		return -ENOMEM;
339*4882a593Smuzhiyun 	}
340*4882a593Smuzhiyun 
341*4882a593Smuzhiyun 	rk_dev->request_crypto(rk_dev, alg_name);
342*4882a593Smuzhiyun 
343*4882a593Smuzhiyun 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct rk_ahash_rctx));
344*4882a593Smuzhiyun 
345*4882a593Smuzhiyun 	algt->alg.hash.halg.statesize = sizeof(struct rk_ahash_expt_ctx);
346*4882a593Smuzhiyun 
347*4882a593Smuzhiyun 	return 0;
348*4882a593Smuzhiyun }
349*4882a593Smuzhiyun 
rk_cra_hash_exit(struct crypto_tfm * tfm)350*4882a593Smuzhiyun static void rk_cra_hash_exit(struct crypto_tfm *tfm)
351*4882a593Smuzhiyun {
352*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
353*4882a593Smuzhiyun 
354*4882a593Smuzhiyun 	CRYPTO_TRACE();
355*4882a593Smuzhiyun 
356*4882a593Smuzhiyun 	if (ctx->hash_tmp)
357*4882a593Smuzhiyun 		free_page((unsigned long)ctx->hash_tmp);
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun 	ctx->rk_dev->release_crypto(ctx->rk_dev, crypto_tfm_alg_name(tfm));
360*4882a593Smuzhiyun }
361*4882a593Smuzhiyun 
362*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_ahash_md5    = RK_HASH_ALGO_INIT(MD5, md5);
363*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_ahash_sha1   = RK_HASH_ALGO_INIT(SHA1, sha1);
364*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_ahash_sha224 = RK_HASH_ALGO_INIT(SHA224, sha224);
365*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_ahash_sha256 = RK_HASH_ALGO_INIT(SHA256, sha256);
366*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_ahash_sha384 = RK_HASH_ALGO_INIT(SHA384, sha384);
367*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_ahash_sha512 = RK_HASH_ALGO_INIT(SHA512, sha512);
368*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_ahash_sm3    = RK_HASH_ALGO_INIT(SM3, sm3);
369*4882a593Smuzhiyun 
370*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_hmac_md5     = RK_HMAC_ALGO_INIT(MD5, md5);
371*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_hmac_sha1    = RK_HMAC_ALGO_INIT(SHA1, sha1);
372*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_hmac_sha256  = RK_HMAC_ALGO_INIT(SHA256, sha256);
373*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_hmac_sha512  = RK_HMAC_ALGO_INIT(SHA512, sha512);
374*4882a593Smuzhiyun struct rk_crypto_algt rk_v2_hmac_sm3     = RK_HMAC_ALGO_INIT(SM3, sm3);
375*4882a593Smuzhiyun 
376