xref: /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/rk_crypto_ahash_utils.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Rockchip crypto hash uitls
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2022, Rockchip Electronics Co., Ltd
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Author: Lin Jinhan <troy.lin@rock-chips.com>
8*4882a593Smuzhiyun  *
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include "rk_crypto_core.h"
12*4882a593Smuzhiyun #include "rk_crypto_ahash_utils.h"
13*4882a593Smuzhiyun 
14*4882a593Smuzhiyun static const char * const hash_algo2name[] = {
15*4882a593Smuzhiyun 	[HASH_ALGO_MD5]    = "md5",
16*4882a593Smuzhiyun 	[HASH_ALGO_SHA1]   = "sha1",
17*4882a593Smuzhiyun 	[HASH_ALGO_SHA224] = "sha224",
18*4882a593Smuzhiyun 	[HASH_ALGO_SHA256] = "sha256",
19*4882a593Smuzhiyun 	[HASH_ALGO_SHA384] = "sha384",
20*4882a593Smuzhiyun 	[HASH_ALGO_SHA512] = "sha512",
21*4882a593Smuzhiyun 	[HASH_ALGO_SM3]    = "sm3",
22*4882a593Smuzhiyun };
23*4882a593Smuzhiyun 
rk_alg_ctx_clear(struct rk_alg_ctx * alg_ctx)24*4882a593Smuzhiyun static void rk_alg_ctx_clear(struct rk_alg_ctx *alg_ctx)
25*4882a593Smuzhiyun {
26*4882a593Smuzhiyun 	alg_ctx->total	    = 0;
27*4882a593Smuzhiyun 	alg_ctx->left_bytes = 0;
28*4882a593Smuzhiyun 	alg_ctx->count      = 0;
29*4882a593Smuzhiyun 	alg_ctx->sg_src     = 0;
30*4882a593Smuzhiyun 	alg_ctx->req_src    = 0;
31*4882a593Smuzhiyun 	alg_ctx->src_nents  = 0;
32*4882a593Smuzhiyun }
33*4882a593Smuzhiyun 
rk_ahash_ctx_clear(struct rk_ahash_ctx * ctx)34*4882a593Smuzhiyun static void rk_ahash_ctx_clear(struct rk_ahash_ctx *ctx)
35*4882a593Smuzhiyun {
36*4882a593Smuzhiyun 	rk_alg_ctx_clear(&ctx->algs_ctx);
37*4882a593Smuzhiyun 
38*4882a593Smuzhiyun 	memset(ctx->hash_tmp, 0x00, RK_DMA_ALIGNMENT);
39*4882a593Smuzhiyun 	memset(ctx->lastc, 0x00, sizeof(ctx->lastc));
40*4882a593Smuzhiyun 
41*4882a593Smuzhiyun 	ctx->hash_tmp_len = 0;
42*4882a593Smuzhiyun 	ctx->calc_cnt     = 0;
43*4882a593Smuzhiyun 	ctx->lastc_len    = 0;
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun 
rk_ahash_ctx_cast(struct rk_crypto_dev * rk_dev)46*4882a593Smuzhiyun struct rk_ahash_ctx *rk_ahash_ctx_cast(struct rk_crypto_dev *rk_dev)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun 	struct ahash_request *req = ahash_request_cast(rk_dev->async_req);
49*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
50*4882a593Smuzhiyun 
51*4882a593Smuzhiyun 	return crypto_ahash_ctx(tfm);
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun 
rk_ahash_alg_ctx(struct rk_crypto_dev * rk_dev)54*4882a593Smuzhiyun struct rk_alg_ctx *rk_ahash_alg_ctx(struct rk_crypto_dev *rk_dev)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun 	return &(rk_ahash_ctx_cast(rk_dev))->algs_ctx;
57*4882a593Smuzhiyun }
58*4882a593Smuzhiyun 
rk_ahash_get_algt(struct crypto_ahash * tfm)59*4882a593Smuzhiyun struct rk_crypto_algt *rk_ahash_get_algt(struct crypto_ahash *tfm)
60*4882a593Smuzhiyun {
61*4882a593Smuzhiyun 	struct ahash_alg *alg = __crypto_ahash_alg(tfm->base.__crt_alg);
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun 	return container_of(alg, struct rk_crypto_algt, alg.hash);
64*4882a593Smuzhiyun }
65*4882a593Smuzhiyun 
rk_ahash_set_data_start(struct rk_crypto_dev * rk_dev,uint32_t flag)66*4882a593Smuzhiyun static int rk_ahash_set_data_start(struct rk_crypto_dev *rk_dev, uint32_t flag)
67*4882a593Smuzhiyun {
68*4882a593Smuzhiyun 	int err;
69*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev);
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun 	CRYPTO_TRACE();
72*4882a593Smuzhiyun 
73*4882a593Smuzhiyun 	err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, alg_ctx->sg_dst);
74*4882a593Smuzhiyun 	if (!err)
75*4882a593Smuzhiyun 		err = alg_ctx->ops.hw_dma_start(rk_dev, flag);
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun 	return err;
78*4882a593Smuzhiyun }
79*4882a593Smuzhiyun 
rk_calc_lastc_new_len(u32 nbytes,u32 old_len)80*4882a593Smuzhiyun static u32 rk_calc_lastc_new_len(u32 nbytes, u32 old_len)
81*4882a593Smuzhiyun {
82*4882a593Smuzhiyun 	u32 total_len = nbytes + old_len;
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun 	if (total_len <= RK_DMA_ALIGNMENT)
85*4882a593Smuzhiyun 		return nbytes;
86*4882a593Smuzhiyun 
87*4882a593Smuzhiyun 	if (total_len % RK_DMA_ALIGNMENT)
88*4882a593Smuzhiyun 		return total_len % RK_DMA_ALIGNMENT;
89*4882a593Smuzhiyun 
90*4882a593Smuzhiyun 	return RK_DMA_ALIGNMENT;
91*4882a593Smuzhiyun }
92*4882a593Smuzhiyun 
rk_ahash_fallback_digest(const char * alg_name,bool is_hmac,const u8 * key,u32 key_len,const u8 * msg,u32 msg_len,u8 * digest)93*4882a593Smuzhiyun static int rk_ahash_fallback_digest(const char *alg_name, bool is_hmac,
94*4882a593Smuzhiyun 				    const u8 *key, u32 key_len,
95*4882a593Smuzhiyun 				    const u8 *msg, u32 msg_len,
96*4882a593Smuzhiyun 				    u8 *digest)
97*4882a593Smuzhiyun {
98*4882a593Smuzhiyun 	struct crypto_ahash *ahash_tfm;
99*4882a593Smuzhiyun 	struct ahash_request *req;
100*4882a593Smuzhiyun 	struct crypto_wait wait;
101*4882a593Smuzhiyun 	struct scatterlist sg;
102*4882a593Smuzhiyun 	int ret;
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun 	CRYPTO_TRACE("%s, is_hmac = %d, key_len = %u, msg_len = %u",
105*4882a593Smuzhiyun 		     alg_name, is_hmac, key_len, msg_len);
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun 	ahash_tfm = crypto_alloc_ahash(alg_name, 0, CRYPTO_ALG_NEED_FALLBACK);
108*4882a593Smuzhiyun 	if (IS_ERR(ahash_tfm))
109*4882a593Smuzhiyun 		return PTR_ERR(ahash_tfm);
110*4882a593Smuzhiyun 
111*4882a593Smuzhiyun 	req = ahash_request_alloc(ahash_tfm, GFP_KERNEL);
112*4882a593Smuzhiyun 	if (!req) {
113*4882a593Smuzhiyun 		crypto_free_ahash(ahash_tfm);
114*4882a593Smuzhiyun 		return -ENOMEM;
115*4882a593Smuzhiyun 	}
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun 	init_completion(&wait.completion);
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun 	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
120*4882a593Smuzhiyun 				   crypto_req_done, &wait);
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun 	crypto_ahash_clear_flags(ahash_tfm, ~0);
123*4882a593Smuzhiyun 
124*4882a593Smuzhiyun 	sg_init_one(&sg, msg, msg_len);
125*4882a593Smuzhiyun 	ahash_request_set_crypt(req, &sg, digest, msg_len);
126*4882a593Smuzhiyun 
127*4882a593Smuzhiyun 	if (is_hmac)
128*4882a593Smuzhiyun 		crypto_ahash_setkey(ahash_tfm, key, key_len);
129*4882a593Smuzhiyun 
130*4882a593Smuzhiyun 	ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
131*4882a593Smuzhiyun 	if (ret) {
132*4882a593Smuzhiyun 		CRYPTO_MSG("digest failed, ret = %d", ret);
133*4882a593Smuzhiyun 		goto exit;
134*4882a593Smuzhiyun 	}
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun exit:
137*4882a593Smuzhiyun 	ahash_request_free(req);
138*4882a593Smuzhiyun 	crypto_free_ahash(ahash_tfm);
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 	return ret;
141*4882a593Smuzhiyun }
142*4882a593Smuzhiyun 
rk_ahash_get_zero_result(struct ahash_request * req)143*4882a593Smuzhiyun static int rk_ahash_get_zero_result(struct ahash_request *req)
144*4882a593Smuzhiyun {
145*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
146*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_ahash_get_algt(tfm);
147*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun 	return rk_ahash_fallback_digest(crypto_ahash_alg_name(tfm),
150*4882a593Smuzhiyun 					algt->type == ALG_TYPE_HMAC,
151*4882a593Smuzhiyun 					ctx->authkey, ctx->authkey_len,
152*4882a593Smuzhiyun 					NULL, 0, req->result);
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun 
rk_ahash_hmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)155*4882a593Smuzhiyun int rk_ahash_hmac_setkey(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen)
156*4882a593Smuzhiyun {
157*4882a593Smuzhiyun 	unsigned int blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
158*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_ahash_get_algt(tfm);
159*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
160*4882a593Smuzhiyun 	const char *alg_name;
161*4882a593Smuzhiyun 	int ret = 0;
162*4882a593Smuzhiyun 
163*4882a593Smuzhiyun 	CRYPTO_MSG();
164*4882a593Smuzhiyun 
165*4882a593Smuzhiyun 	if (algt->algo >= ARRAY_SIZE(hash_algo2name)) {
166*4882a593Smuzhiyun 		CRYPTO_MSG("hash algo %d invalid\n", algt->algo);
167*4882a593Smuzhiyun 		return -EINVAL;
168*4882a593Smuzhiyun 	}
169*4882a593Smuzhiyun 
170*4882a593Smuzhiyun 	memset(ctx->authkey, 0, sizeof(ctx->authkey));
171*4882a593Smuzhiyun 
172*4882a593Smuzhiyun 	if (keylen <= blocksize) {
173*4882a593Smuzhiyun 		memcpy(ctx->authkey, key, keylen);
174*4882a593Smuzhiyun 		ctx->authkey_len = keylen;
175*4882a593Smuzhiyun 		goto exit;
176*4882a593Smuzhiyun 	}
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	alg_name = hash_algo2name[algt->algo];
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 	CRYPTO_TRACE("calc key digest %s", alg_name);
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 	ret = rk_ahash_fallback_digest(alg_name, false, NULL, 0, key, keylen,
183*4882a593Smuzhiyun 				       ctx->authkey);
184*4882a593Smuzhiyun 	if (ret) {
185*4882a593Smuzhiyun 		CRYPTO_MSG("rk_ahash_fallback_digest error ret = %d\n", ret);
186*4882a593Smuzhiyun 		goto exit;
187*4882a593Smuzhiyun 	}
188*4882a593Smuzhiyun 
189*4882a593Smuzhiyun 	ctx->authkey_len = crypto_ahash_digestsize(tfm);
190*4882a593Smuzhiyun exit:
191*4882a593Smuzhiyun 	return ret;
192*4882a593Smuzhiyun }
193*4882a593Smuzhiyun 
rk_ahash_init(struct ahash_request * req)194*4882a593Smuzhiyun int rk_ahash_init(struct ahash_request *req)
195*4882a593Smuzhiyun {
196*4882a593Smuzhiyun 	struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
197*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
198*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
199*4882a593Smuzhiyun 
200*4882a593Smuzhiyun 	CRYPTO_TRACE();
201*4882a593Smuzhiyun 
202*4882a593Smuzhiyun 	memset(rctx, 0x00, sizeof(*rctx));
203*4882a593Smuzhiyun 	rk_ahash_ctx_clear(ctx);
204*4882a593Smuzhiyun 
205*4882a593Smuzhiyun 	return 0;
206*4882a593Smuzhiyun }
207*4882a593Smuzhiyun 
rk_ahash_update(struct ahash_request * req)208*4882a593Smuzhiyun int rk_ahash_update(struct ahash_request *req)
209*4882a593Smuzhiyun {
210*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
211*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
212*4882a593Smuzhiyun 	struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
213*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev = ctx->rk_dev;
214*4882a593Smuzhiyun 
215*4882a593Smuzhiyun 	CRYPTO_TRACE("nbytes = %u", req->nbytes);
216*4882a593Smuzhiyun 
217*4882a593Smuzhiyun 	memset(rctx, 0x00, sizeof(*rctx));
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun 	rctx->flag = RK_FLAG_UPDATE;
220*4882a593Smuzhiyun 
221*4882a593Smuzhiyun 	return rk_dev->enqueue(rk_dev, &req->base);
222*4882a593Smuzhiyun }
223*4882a593Smuzhiyun 
rk_ahash_final(struct ahash_request * req)224*4882a593Smuzhiyun int rk_ahash_final(struct ahash_request *req)
225*4882a593Smuzhiyun {
226*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
227*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
228*4882a593Smuzhiyun 	struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
229*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev = ctx->rk_dev;
230*4882a593Smuzhiyun 
231*4882a593Smuzhiyun 	CRYPTO_TRACE();
232*4882a593Smuzhiyun 
233*4882a593Smuzhiyun 	memset(rctx, 0x00, sizeof(*rctx));
234*4882a593Smuzhiyun 
235*4882a593Smuzhiyun 	rctx->flag = RK_FLAG_FINAL;
236*4882a593Smuzhiyun 
237*4882a593Smuzhiyun 	/* use fallback hash */
238*4882a593Smuzhiyun 	if (ctx->calc_cnt == 0 &&
239*4882a593Smuzhiyun 	    ctx->hash_tmp_len == 0 &&
240*4882a593Smuzhiyun 	    ctx->lastc_len == 0) {
241*4882a593Smuzhiyun 		CRYPTO_TRACE("use fallback hash");
242*4882a593Smuzhiyun 		return rk_ahash_get_zero_result(req);
243*4882a593Smuzhiyun 	}
244*4882a593Smuzhiyun 
245*4882a593Smuzhiyun 	return rk_dev->enqueue(rk_dev, &req->base);
246*4882a593Smuzhiyun }
247*4882a593Smuzhiyun 
rk_ahash_finup(struct ahash_request * req)248*4882a593Smuzhiyun int rk_ahash_finup(struct ahash_request *req)
249*4882a593Smuzhiyun {
250*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
251*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
252*4882a593Smuzhiyun 	struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
253*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev = ctx->rk_dev;
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun 	CRYPTO_TRACE("nbytes = %u", req->nbytes);
256*4882a593Smuzhiyun 
257*4882a593Smuzhiyun 	memset(rctx, 0x00, sizeof(*rctx));
258*4882a593Smuzhiyun 
259*4882a593Smuzhiyun 	rctx->flag = RK_FLAG_UPDATE | RK_FLAG_FINAL;
260*4882a593Smuzhiyun 
261*4882a593Smuzhiyun 	/* use fallback hash */
262*4882a593Smuzhiyun 	if (req->nbytes == 0 &&
263*4882a593Smuzhiyun 	    ctx->calc_cnt == 0 &&
264*4882a593Smuzhiyun 	    ctx->hash_tmp_len == 0 &&
265*4882a593Smuzhiyun 	    ctx->lastc_len == 0) {
266*4882a593Smuzhiyun 		CRYPTO_TRACE("use fallback hash");
267*4882a593Smuzhiyun 		return rk_ahash_get_zero_result(req);
268*4882a593Smuzhiyun 	}
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun 	return rk_dev->enqueue(rk_dev, &req->base);
271*4882a593Smuzhiyun }
272*4882a593Smuzhiyun 
rk_ahash_digest(struct ahash_request * req)273*4882a593Smuzhiyun int rk_ahash_digest(struct ahash_request *req)
274*4882a593Smuzhiyun {
275*4882a593Smuzhiyun 	CRYPTO_TRACE("calc data %u bytes.", req->nbytes);
276*4882a593Smuzhiyun 
277*4882a593Smuzhiyun 	return rk_ahash_init(req) ?: rk_ahash_finup(req);
278*4882a593Smuzhiyun }
279*4882a593Smuzhiyun 
rk_ahash_start(struct rk_crypto_dev * rk_dev)280*4882a593Smuzhiyun int rk_ahash_start(struct rk_crypto_dev *rk_dev)
281*4882a593Smuzhiyun {
282*4882a593Smuzhiyun 	struct ahash_request *req = ahash_request_cast(rk_dev->async_req);
283*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev);
284*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = rk_ahash_ctx_cast(rk_dev);
285*4882a593Smuzhiyun 	struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
286*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
287*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_ahash_get_algt(tfm);
288*4882a593Smuzhiyun 	struct scatterlist *src_sg;
289*4882a593Smuzhiyun 	unsigned int nbytes;
290*4882a593Smuzhiyun 	int ret = 0;
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun 	CRYPTO_TRACE("origin: old_len = %u, new_len = %u, nbytes = %u, flag = %d",
293*4882a593Smuzhiyun 		     ctx->hash_tmp_len, ctx->lastc_len, req->nbytes, rctx->flag);
294*4882a593Smuzhiyun 
295*4882a593Smuzhiyun 	/* update 0Byte do nothing */
296*4882a593Smuzhiyun 	if (req->nbytes == 0 && !(rctx->flag & RK_FLAG_FINAL))
297*4882a593Smuzhiyun 		goto no_calc;
298*4882a593Smuzhiyun 
299*4882a593Smuzhiyun 	if (ctx->lastc_len) {
300*4882a593Smuzhiyun 		/* move lastc saved last time to the head of this calculation */
301*4882a593Smuzhiyun 		memcpy(ctx->hash_tmp + ctx->hash_tmp_len, ctx->lastc, ctx->lastc_len);
302*4882a593Smuzhiyun 		ctx->hash_tmp_len = ctx->hash_tmp_len + ctx->lastc_len;
303*4882a593Smuzhiyun 		ctx->lastc_len = 0;
304*4882a593Smuzhiyun 	}
305*4882a593Smuzhiyun 
306*4882a593Smuzhiyun 	CRYPTO_TRACE("hash_tmp_len = %u", ctx->hash_tmp_len);
307*4882a593Smuzhiyun 
308*4882a593Smuzhiyun 	/* final request no need to save lastc_new */
309*4882a593Smuzhiyun 	if ((rctx->flag & RK_FLAG_UPDATE) && (rctx->flag & RK_FLAG_FINAL)) {
310*4882a593Smuzhiyun 		nbytes = req->nbytes + ctx->hash_tmp_len;
311*4882a593Smuzhiyun 
312*4882a593Smuzhiyun 		CRYPTO_TRACE("finup %u bytes", nbytes);
313*4882a593Smuzhiyun 	} else if (rctx->flag & RK_FLAG_UPDATE) {
314*4882a593Smuzhiyun 		ctx->lastc_len = rk_calc_lastc_new_len(req->nbytes, ctx->hash_tmp_len);
315*4882a593Smuzhiyun 
316*4882a593Smuzhiyun 		CRYPTO_TRACE("nents = %u, ctx->lastc_len = %u, offset = %u",
317*4882a593Smuzhiyun 			sg_nents_for_len(req->src, req->nbytes), ctx->lastc_len,
318*4882a593Smuzhiyun 			req->nbytes - ctx->lastc_len);
319*4882a593Smuzhiyun 
320*4882a593Smuzhiyun 		if (!sg_pcopy_to_buffer(req->src, sg_nents_for_len(req->src, req->nbytes),
321*4882a593Smuzhiyun 			  ctx->lastc, ctx->lastc_len, req->nbytes - ctx->lastc_len)) {
322*4882a593Smuzhiyun 			ret = -EINVAL;
323*4882a593Smuzhiyun 			goto exit;
324*4882a593Smuzhiyun 		}
325*4882a593Smuzhiyun 
326*4882a593Smuzhiyun 		nbytes = ctx->hash_tmp_len + req->nbytes - ctx->lastc_len;
327*4882a593Smuzhiyun 
328*4882a593Smuzhiyun 		/* not enough data */
329*4882a593Smuzhiyun 		if (nbytes < RK_DMA_ALIGNMENT) {
330*4882a593Smuzhiyun 			CRYPTO_TRACE("nbytes = %u, not enough data", nbytes);
331*4882a593Smuzhiyun 			memcpy(ctx->hash_tmp + ctx->hash_tmp_len,
332*4882a593Smuzhiyun 			       ctx->lastc, ctx->lastc_len);
333*4882a593Smuzhiyun 			ctx->hash_tmp_len = ctx->hash_tmp_len + ctx->lastc_len;
334*4882a593Smuzhiyun 			ctx->lastc_len = 0;
335*4882a593Smuzhiyun 			goto no_calc;
336*4882a593Smuzhiyun 		}
337*4882a593Smuzhiyun 
338*4882a593Smuzhiyun 		CRYPTO_TRACE("update nbytes = %u", nbytes);
339*4882a593Smuzhiyun 	} else {
340*4882a593Smuzhiyun 		/* final just calc lastc_old */
341*4882a593Smuzhiyun 		nbytes = ctx->hash_tmp_len;
342*4882a593Smuzhiyun 
343*4882a593Smuzhiyun 		CRYPTO_TRACE("final nbytes = %u", nbytes);
344*4882a593Smuzhiyun 	}
345*4882a593Smuzhiyun 
346*4882a593Smuzhiyun 	if (ctx->hash_tmp_len) {
347*4882a593Smuzhiyun 		/* Concatenate old data to the header */
348*4882a593Smuzhiyun 		sg_init_table(ctx->hash_sg, ARRAY_SIZE(ctx->hash_sg));
349*4882a593Smuzhiyun 		sg_set_buf(ctx->hash_sg, ctx->hash_tmp, ctx->hash_tmp_len);
350*4882a593Smuzhiyun 
351*4882a593Smuzhiyun 		if (rk_crypto_check_dmafd(req->src, sg_nents_for_len(req->src, req->nbytes))) {
352*4882a593Smuzhiyun 			CRYPTO_TRACE("is hash dmafd");
353*4882a593Smuzhiyun 			if (!dma_map_sg(rk_dev->dev, &ctx->hash_sg[0], 1, DMA_TO_DEVICE)) {
354*4882a593Smuzhiyun 				dev_err(rk_dev->dev, "[%s:%d] dma_map_sg(hash_sg)  error\n",
355*4882a593Smuzhiyun 					__func__, __LINE__);
356*4882a593Smuzhiyun 				ret = -ENOMEM;
357*4882a593Smuzhiyun 				goto exit;
358*4882a593Smuzhiyun 			}
359*4882a593Smuzhiyun 			ctx->hash_tmp_mapped = true;
360*4882a593Smuzhiyun 		}
361*4882a593Smuzhiyun 
362*4882a593Smuzhiyun 		sg_chain(ctx->hash_sg, ARRAY_SIZE(ctx->hash_sg), req->src);
363*4882a593Smuzhiyun 
364*4882a593Smuzhiyun 		src_sg = &ctx->hash_sg[0];
365*4882a593Smuzhiyun 		ctx->hash_tmp_len = 0;
366*4882a593Smuzhiyun 	} else {
367*4882a593Smuzhiyun 		src_sg = req->src;
368*4882a593Smuzhiyun 	}
369*4882a593Smuzhiyun 
370*4882a593Smuzhiyun 	alg_ctx->total      = nbytes;
371*4882a593Smuzhiyun 	alg_ctx->left_bytes = nbytes;
372*4882a593Smuzhiyun 	alg_ctx->sg_src     = src_sg;
373*4882a593Smuzhiyun 	alg_ctx->req_src    = src_sg;
374*4882a593Smuzhiyun 	alg_ctx->src_nents  = sg_nents_for_len(src_sg, nbytes);
375*4882a593Smuzhiyun 
376*4882a593Smuzhiyun 	CRYPTO_TRACE("adjust: old_len = %u, new_len = %u, nbytes = %u",
377*4882a593Smuzhiyun 		     ctx->hash_tmp_len, ctx->lastc_len, nbytes);
378*4882a593Smuzhiyun 
379*4882a593Smuzhiyun 	if (nbytes) {
380*4882a593Smuzhiyun 		if (ctx->calc_cnt == 0)
381*4882a593Smuzhiyun 			alg_ctx->ops.hw_init(rk_dev, algt->algo, algt->type);
382*4882a593Smuzhiyun 
383*4882a593Smuzhiyun 		/* flush all 64byte key buffer for hmac */
384*4882a593Smuzhiyun 		alg_ctx->ops.hw_write_key(ctx->rk_dev, ctx->authkey, sizeof(ctx->authkey));
385*4882a593Smuzhiyun 		ret = rk_ahash_set_data_start(rk_dev, rctx->flag);
386*4882a593Smuzhiyun 	}
387*4882a593Smuzhiyun exit:
388*4882a593Smuzhiyun 	return ret;
389*4882a593Smuzhiyun no_calc:
390*4882a593Smuzhiyun 	CRYPTO_TRACE("no calc");
391*4882a593Smuzhiyun 	rk_alg_ctx_clear(alg_ctx);
392*4882a593Smuzhiyun 
393*4882a593Smuzhiyun 	return 0;
394*4882a593Smuzhiyun }
395*4882a593Smuzhiyun 
rk_ahash_crypto_rx(struct rk_crypto_dev * rk_dev)396*4882a593Smuzhiyun int rk_ahash_crypto_rx(struct rk_crypto_dev *rk_dev)
397*4882a593Smuzhiyun {
398*4882a593Smuzhiyun 	int err = 0;
399*4882a593Smuzhiyun 	struct ahash_request *req = ahash_request_cast(rk_dev->async_req);
400*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev);
401*4882a593Smuzhiyun 	struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
402*4882a593Smuzhiyun 	struct rk_ahash_ctx *ctx = rk_ahash_ctx_cast(rk_dev);
403*4882a593Smuzhiyun 
404*4882a593Smuzhiyun 	CRYPTO_TRACE("left bytes = %u, flag = %d", alg_ctx->left_bytes, rctx->flag);
405*4882a593Smuzhiyun 
406*4882a593Smuzhiyun 	err = rk_dev->unload_data(rk_dev);
407*4882a593Smuzhiyun 	if (err)
408*4882a593Smuzhiyun 		goto out_rx;
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 	ctx->calc_cnt += alg_ctx->count;
411*4882a593Smuzhiyun 
412*4882a593Smuzhiyun 	if (alg_ctx->left_bytes) {
413*4882a593Smuzhiyun 		if (alg_ctx->aligned) {
414*4882a593Smuzhiyun 			if (sg_is_last(alg_ctx->sg_src)) {
415*4882a593Smuzhiyun 				dev_warn(rk_dev->dev, "[%s:%d], Lack of data\n",
416*4882a593Smuzhiyun 					 __func__, __LINE__);
417*4882a593Smuzhiyun 				err = -ENOMEM;
418*4882a593Smuzhiyun 				goto out_rx;
419*4882a593Smuzhiyun 			}
420*4882a593Smuzhiyun 			alg_ctx->sg_src = sg_next(alg_ctx->sg_src);
421*4882a593Smuzhiyun 		}
422*4882a593Smuzhiyun 		err = rk_ahash_set_data_start(rk_dev, rctx->flag);
423*4882a593Smuzhiyun 	} else {
424*4882a593Smuzhiyun 		/*
425*4882a593Smuzhiyun 		 * it will take some time to process date after last dma
426*4882a593Smuzhiyun 		 * transmission.
427*4882a593Smuzhiyun 		 */
428*4882a593Smuzhiyun 		struct crypto_ahash *tfm;
429*4882a593Smuzhiyun 
430*4882a593Smuzhiyun 		if (ctx->hash_tmp_mapped)
431*4882a593Smuzhiyun 			dma_unmap_sg(rk_dev->dev, &ctx->hash_sg[0], 1, DMA_TO_DEVICE);
432*4882a593Smuzhiyun 
433*4882a593Smuzhiyun 		/* only final will get result */
434*4882a593Smuzhiyun 		if (!(rctx->flag & RK_FLAG_FINAL))
435*4882a593Smuzhiyun 			goto out_rx;
436*4882a593Smuzhiyun 
437*4882a593Smuzhiyun 		if (!req->result) {
438*4882a593Smuzhiyun 			err = -EINVAL;
439*4882a593Smuzhiyun 			goto out_rx;
440*4882a593Smuzhiyun 		}
441*4882a593Smuzhiyun 
442*4882a593Smuzhiyun 		tfm = crypto_ahash_reqtfm(req);
443*4882a593Smuzhiyun 
444*4882a593Smuzhiyun 		err = alg_ctx->ops.hw_get_result(rk_dev, req->result,
445*4882a593Smuzhiyun 						 crypto_ahash_digestsize(tfm));
446*4882a593Smuzhiyun 	}
447*4882a593Smuzhiyun 
448*4882a593Smuzhiyun out_rx:
449*4882a593Smuzhiyun 	return err;
450*4882a593Smuzhiyun }
451