xref: /OK3568_Linux_fs/kernel/drivers/crypto/allwinner/sun8i-ss/sun8i-ss-hash.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * sun8i-ss-hash.c - hardware cryptographic offloader for
4*4882a593Smuzhiyun  * Allwinner A80/A83T SoC
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  * Copyright (C) 2015-2020 Corentin Labbe <clabbe@baylibre.com>
7*4882a593Smuzhiyun  *
8*4882a593Smuzhiyun  * This file add support for MD5 and SHA1/SHA224/SHA256.
9*4882a593Smuzhiyun  *
10*4882a593Smuzhiyun  * You could find the datasheet in Documentation/arm/sunxi.rst
11*4882a593Smuzhiyun  */
12*4882a593Smuzhiyun #include <linux/bottom_half.h>
13*4882a593Smuzhiyun #include <linux/dma-mapping.h>
14*4882a593Smuzhiyun #include <linux/pm_runtime.h>
15*4882a593Smuzhiyun #include <linux/scatterlist.h>
16*4882a593Smuzhiyun #include <crypto/internal/hash.h>
17*4882a593Smuzhiyun #include <crypto/sha.h>
18*4882a593Smuzhiyun #include <crypto/md5.h>
19*4882a593Smuzhiyun #include "sun8i-ss.h"
20*4882a593Smuzhiyun 
sun8i_ss_hash_crainit(struct crypto_tfm * tfm)21*4882a593Smuzhiyun int sun8i_ss_hash_crainit(struct crypto_tfm *tfm)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun 	struct sun8i_ss_hash_tfm_ctx *op = crypto_tfm_ctx(tfm);
24*4882a593Smuzhiyun 	struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg);
25*4882a593Smuzhiyun 	struct sun8i_ss_alg_template *algt;
26*4882a593Smuzhiyun 	int err;
27*4882a593Smuzhiyun 
28*4882a593Smuzhiyun 	memset(op, 0, sizeof(struct sun8i_ss_hash_tfm_ctx));
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun 	algt = container_of(alg, struct sun8i_ss_alg_template, alg.hash);
31*4882a593Smuzhiyun 	op->ss = algt->ss;
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun 	op->enginectx.op.do_one_request = sun8i_ss_hash_run;
34*4882a593Smuzhiyun 	op->enginectx.op.prepare_request = NULL;
35*4882a593Smuzhiyun 	op->enginectx.op.unprepare_request = NULL;
36*4882a593Smuzhiyun 
37*4882a593Smuzhiyun 	/* FALLBACK */
38*4882a593Smuzhiyun 	op->fallback_tfm = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
39*4882a593Smuzhiyun 					      CRYPTO_ALG_NEED_FALLBACK);
40*4882a593Smuzhiyun 	if (IS_ERR(op->fallback_tfm)) {
41*4882a593Smuzhiyun 		dev_err(algt->ss->dev, "Fallback driver could no be loaded\n");
42*4882a593Smuzhiyun 		return PTR_ERR(op->fallback_tfm);
43*4882a593Smuzhiyun 	}
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun 	if (algt->alg.hash.halg.statesize < crypto_ahash_statesize(op->fallback_tfm))
46*4882a593Smuzhiyun 		algt->alg.hash.halg.statesize = crypto_ahash_statesize(op->fallback_tfm);
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
49*4882a593Smuzhiyun 				 sizeof(struct sun8i_ss_hash_reqctx) +
50*4882a593Smuzhiyun 				 crypto_ahash_reqsize(op->fallback_tfm));
51*4882a593Smuzhiyun 
52*4882a593Smuzhiyun 	dev_info(op->ss->dev, "Fallback for %s is %s\n",
53*4882a593Smuzhiyun 		 crypto_tfm_alg_driver_name(tfm),
54*4882a593Smuzhiyun 		 crypto_tfm_alg_driver_name(&op->fallback_tfm->base));
55*4882a593Smuzhiyun 	err = pm_runtime_get_sync(op->ss->dev);
56*4882a593Smuzhiyun 	if (err < 0)
57*4882a593Smuzhiyun 		goto error_pm;
58*4882a593Smuzhiyun 	return 0;
59*4882a593Smuzhiyun error_pm:
60*4882a593Smuzhiyun 	pm_runtime_put_noidle(op->ss->dev);
61*4882a593Smuzhiyun 	crypto_free_ahash(op->fallback_tfm);
62*4882a593Smuzhiyun 	return err;
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun 
sun8i_ss_hash_craexit(struct crypto_tfm * tfm)65*4882a593Smuzhiyun void sun8i_ss_hash_craexit(struct crypto_tfm *tfm)
66*4882a593Smuzhiyun {
67*4882a593Smuzhiyun 	struct sun8i_ss_hash_tfm_ctx *tfmctx = crypto_tfm_ctx(tfm);
68*4882a593Smuzhiyun 
69*4882a593Smuzhiyun 	crypto_free_ahash(tfmctx->fallback_tfm);
70*4882a593Smuzhiyun 	pm_runtime_put_sync_suspend(tfmctx->ss->dev);
71*4882a593Smuzhiyun }
72*4882a593Smuzhiyun 
sun8i_ss_hash_init(struct ahash_request * areq)73*4882a593Smuzhiyun int sun8i_ss_hash_init(struct ahash_request *areq)
74*4882a593Smuzhiyun {
75*4882a593Smuzhiyun 	struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq);
76*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
77*4882a593Smuzhiyun 	struct sun8i_ss_hash_tfm_ctx *tfmctx = crypto_ahash_ctx(tfm);
78*4882a593Smuzhiyun 
79*4882a593Smuzhiyun 	memset(rctx, 0, sizeof(struct sun8i_ss_hash_reqctx));
80*4882a593Smuzhiyun 
81*4882a593Smuzhiyun 	ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
82*4882a593Smuzhiyun 	rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun 	return crypto_ahash_init(&rctx->fallback_req);
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun 
sun8i_ss_hash_export(struct ahash_request * areq,void * out)87*4882a593Smuzhiyun int sun8i_ss_hash_export(struct ahash_request *areq, void *out)
88*4882a593Smuzhiyun {
89*4882a593Smuzhiyun 	struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq);
90*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
91*4882a593Smuzhiyun 	struct sun8i_ss_hash_tfm_ctx *tfmctx = crypto_ahash_ctx(tfm);
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun 	ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
94*4882a593Smuzhiyun 	rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
95*4882a593Smuzhiyun 
96*4882a593Smuzhiyun 	return crypto_ahash_export(&rctx->fallback_req, out);
97*4882a593Smuzhiyun }
98*4882a593Smuzhiyun 
sun8i_ss_hash_import(struct ahash_request * areq,const void * in)99*4882a593Smuzhiyun int sun8i_ss_hash_import(struct ahash_request *areq, const void *in)
100*4882a593Smuzhiyun {
101*4882a593Smuzhiyun 	struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq);
102*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
103*4882a593Smuzhiyun 	struct sun8i_ss_hash_tfm_ctx *tfmctx = crypto_ahash_ctx(tfm);
104*4882a593Smuzhiyun 
105*4882a593Smuzhiyun 	ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
106*4882a593Smuzhiyun 	rctx->fallback_req.base.flags = areq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP;
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun 	return crypto_ahash_import(&rctx->fallback_req, in);
109*4882a593Smuzhiyun }
110*4882a593Smuzhiyun 
sun8i_ss_hash_final(struct ahash_request * areq)111*4882a593Smuzhiyun int sun8i_ss_hash_final(struct ahash_request *areq)
112*4882a593Smuzhiyun {
113*4882a593Smuzhiyun 	struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq);
114*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
115*4882a593Smuzhiyun 	struct sun8i_ss_hash_tfm_ctx *tfmctx = crypto_ahash_ctx(tfm);
116*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
117*4882a593Smuzhiyun 	struct ahash_alg *alg = __crypto_ahash_alg(tfm->base.__crt_alg);
118*4882a593Smuzhiyun 	struct sun8i_ss_alg_template *algt;
119*4882a593Smuzhiyun #endif
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 	ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
122*4882a593Smuzhiyun 	rctx->fallback_req.base.flags = areq->base.flags &
123*4882a593Smuzhiyun 					CRYPTO_TFM_REQ_MAY_SLEEP;
124*4882a593Smuzhiyun 	rctx->fallback_req.result = areq->result;
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
127*4882a593Smuzhiyun 	algt = container_of(alg, struct sun8i_ss_alg_template, alg.hash);
128*4882a593Smuzhiyun 	algt->stat_fb++;
129*4882a593Smuzhiyun #endif
130*4882a593Smuzhiyun 
131*4882a593Smuzhiyun 	return crypto_ahash_final(&rctx->fallback_req);
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun 
sun8i_ss_hash_update(struct ahash_request * areq)134*4882a593Smuzhiyun int sun8i_ss_hash_update(struct ahash_request *areq)
135*4882a593Smuzhiyun {
136*4882a593Smuzhiyun 	struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq);
137*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
138*4882a593Smuzhiyun 	struct sun8i_ss_hash_tfm_ctx *tfmctx = crypto_ahash_ctx(tfm);
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 	ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
141*4882a593Smuzhiyun 	rctx->fallback_req.base.flags = areq->base.flags &
142*4882a593Smuzhiyun 					CRYPTO_TFM_REQ_MAY_SLEEP;
143*4882a593Smuzhiyun 	rctx->fallback_req.nbytes = areq->nbytes;
144*4882a593Smuzhiyun 	rctx->fallback_req.src = areq->src;
145*4882a593Smuzhiyun 
146*4882a593Smuzhiyun 	return crypto_ahash_update(&rctx->fallback_req);
147*4882a593Smuzhiyun }
148*4882a593Smuzhiyun 
sun8i_ss_hash_finup(struct ahash_request * areq)149*4882a593Smuzhiyun int sun8i_ss_hash_finup(struct ahash_request *areq)
150*4882a593Smuzhiyun {
151*4882a593Smuzhiyun 	struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq);
152*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
153*4882a593Smuzhiyun 	struct sun8i_ss_hash_tfm_ctx *tfmctx = crypto_ahash_ctx(tfm);
154*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
155*4882a593Smuzhiyun 	struct ahash_alg *alg = __crypto_ahash_alg(tfm->base.__crt_alg);
156*4882a593Smuzhiyun 	struct sun8i_ss_alg_template *algt;
157*4882a593Smuzhiyun #endif
158*4882a593Smuzhiyun 
159*4882a593Smuzhiyun 	ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
160*4882a593Smuzhiyun 	rctx->fallback_req.base.flags = areq->base.flags &
161*4882a593Smuzhiyun 					CRYPTO_TFM_REQ_MAY_SLEEP;
162*4882a593Smuzhiyun 
163*4882a593Smuzhiyun 	rctx->fallback_req.nbytes = areq->nbytes;
164*4882a593Smuzhiyun 	rctx->fallback_req.src = areq->src;
165*4882a593Smuzhiyun 	rctx->fallback_req.result = areq->result;
166*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
167*4882a593Smuzhiyun 	algt = container_of(alg, struct sun8i_ss_alg_template, alg.hash);
168*4882a593Smuzhiyun 	algt->stat_fb++;
169*4882a593Smuzhiyun #endif
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 	return crypto_ahash_finup(&rctx->fallback_req);
172*4882a593Smuzhiyun }
173*4882a593Smuzhiyun 
sun8i_ss_hash_digest_fb(struct ahash_request * areq)174*4882a593Smuzhiyun static int sun8i_ss_hash_digest_fb(struct ahash_request *areq)
175*4882a593Smuzhiyun {
176*4882a593Smuzhiyun 	struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq);
177*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
178*4882a593Smuzhiyun 	struct sun8i_ss_hash_tfm_ctx *tfmctx = crypto_ahash_ctx(tfm);
179*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
180*4882a593Smuzhiyun 	struct ahash_alg *alg = __crypto_ahash_alg(tfm->base.__crt_alg);
181*4882a593Smuzhiyun 	struct sun8i_ss_alg_template *algt;
182*4882a593Smuzhiyun #endif
183*4882a593Smuzhiyun 
184*4882a593Smuzhiyun 	ahash_request_set_tfm(&rctx->fallback_req, tfmctx->fallback_tfm);
185*4882a593Smuzhiyun 	rctx->fallback_req.base.flags = areq->base.flags &
186*4882a593Smuzhiyun 					CRYPTO_TFM_REQ_MAY_SLEEP;
187*4882a593Smuzhiyun 
188*4882a593Smuzhiyun 	rctx->fallback_req.nbytes = areq->nbytes;
189*4882a593Smuzhiyun 	rctx->fallback_req.src = areq->src;
190*4882a593Smuzhiyun 	rctx->fallback_req.result = areq->result;
191*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
192*4882a593Smuzhiyun 	algt = container_of(alg, struct sun8i_ss_alg_template, alg.hash);
193*4882a593Smuzhiyun 	algt->stat_fb++;
194*4882a593Smuzhiyun #endif
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun 	return crypto_ahash_digest(&rctx->fallback_req);
197*4882a593Smuzhiyun }
198*4882a593Smuzhiyun 
sun8i_ss_run_hash_task(struct sun8i_ss_dev * ss,struct sun8i_ss_hash_reqctx * rctx,const char * name)199*4882a593Smuzhiyun static int sun8i_ss_run_hash_task(struct sun8i_ss_dev *ss,
200*4882a593Smuzhiyun 				  struct sun8i_ss_hash_reqctx *rctx,
201*4882a593Smuzhiyun 				  const char *name)
202*4882a593Smuzhiyun {
203*4882a593Smuzhiyun 	int flow = rctx->flow;
204*4882a593Smuzhiyun 	u32 v = SS_START;
205*4882a593Smuzhiyun 	int i;
206*4882a593Smuzhiyun 
207*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
208*4882a593Smuzhiyun 	ss->flows[flow].stat_req++;
209*4882a593Smuzhiyun #endif
210*4882a593Smuzhiyun 
211*4882a593Smuzhiyun 	/* choose between stream0/stream1 */
212*4882a593Smuzhiyun 	if (flow)
213*4882a593Smuzhiyun 		v |= SS_FLOW1;
214*4882a593Smuzhiyun 	else
215*4882a593Smuzhiyun 		v |= SS_FLOW0;
216*4882a593Smuzhiyun 
217*4882a593Smuzhiyun 	v |= rctx->method;
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun 	for (i = 0; i < MAX_SG; i++) {
220*4882a593Smuzhiyun 		if (!rctx->t_dst[i].addr)
221*4882a593Smuzhiyun 			break;
222*4882a593Smuzhiyun 
223*4882a593Smuzhiyun 		mutex_lock(&ss->mlock);
224*4882a593Smuzhiyun 		if (i > 0) {
225*4882a593Smuzhiyun 			v |= BIT(17);
226*4882a593Smuzhiyun 			writel(rctx->t_dst[i - 1].addr, ss->base + SS_KEY_ADR_REG);
227*4882a593Smuzhiyun 			writel(rctx->t_dst[i - 1].addr, ss->base + SS_IV_ADR_REG);
228*4882a593Smuzhiyun 		}
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun 		dev_dbg(ss->dev,
231*4882a593Smuzhiyun 			"Processing SG %d on flow %d %s ctl=%x %d to %d method=%x src=%x dst=%x\n",
232*4882a593Smuzhiyun 			i, flow, name, v,
233*4882a593Smuzhiyun 			rctx->t_src[i].len, rctx->t_dst[i].len,
234*4882a593Smuzhiyun 			rctx->method, rctx->t_src[i].addr, rctx->t_dst[i].addr);
235*4882a593Smuzhiyun 
236*4882a593Smuzhiyun 		writel(rctx->t_src[i].addr, ss->base + SS_SRC_ADR_REG);
237*4882a593Smuzhiyun 		writel(rctx->t_dst[i].addr, ss->base + SS_DST_ADR_REG);
238*4882a593Smuzhiyun 		writel(rctx->t_src[i].len, ss->base + SS_LEN_ADR_REG);
239*4882a593Smuzhiyun 		writel(BIT(0) | BIT(1), ss->base + SS_INT_CTL_REG);
240*4882a593Smuzhiyun 
241*4882a593Smuzhiyun 		reinit_completion(&ss->flows[flow].complete);
242*4882a593Smuzhiyun 		ss->flows[flow].status = 0;
243*4882a593Smuzhiyun 		wmb();
244*4882a593Smuzhiyun 
245*4882a593Smuzhiyun 		writel(v, ss->base + SS_CTL_REG);
246*4882a593Smuzhiyun 		mutex_unlock(&ss->mlock);
247*4882a593Smuzhiyun 		wait_for_completion_interruptible_timeout(&ss->flows[flow].complete,
248*4882a593Smuzhiyun 							  msecs_to_jiffies(2000));
249*4882a593Smuzhiyun 		if (ss->flows[flow].status == 0) {
250*4882a593Smuzhiyun 			dev_err(ss->dev, "DMA timeout for %s\n", name);
251*4882a593Smuzhiyun 			return -EFAULT;
252*4882a593Smuzhiyun 		}
253*4882a593Smuzhiyun 	}
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun 	return 0;
256*4882a593Smuzhiyun }
257*4882a593Smuzhiyun 
sun8i_ss_hash_need_fallback(struct ahash_request * areq)258*4882a593Smuzhiyun static bool sun8i_ss_hash_need_fallback(struct ahash_request *areq)
259*4882a593Smuzhiyun {
260*4882a593Smuzhiyun 	struct scatterlist *sg;
261*4882a593Smuzhiyun 
262*4882a593Smuzhiyun 	if (areq->nbytes == 0)
263*4882a593Smuzhiyun 		return true;
264*4882a593Smuzhiyun 	/* we need to reserve one SG for the padding one */
265*4882a593Smuzhiyun 	if (sg_nents(areq->src) > MAX_SG - 1)
266*4882a593Smuzhiyun 		return true;
267*4882a593Smuzhiyun 	sg = areq->src;
268*4882a593Smuzhiyun 	while (sg) {
269*4882a593Smuzhiyun 		/* SS can operate hash only on full block size
270*4882a593Smuzhiyun 		 * since SS support only MD5,sha1,sha224 and sha256, blocksize
271*4882a593Smuzhiyun 		 * is always 64
272*4882a593Smuzhiyun 		 * TODO: handle request if last SG is not len%64
273*4882a593Smuzhiyun 		 * but this will need to copy data on a new SG of size=64
274*4882a593Smuzhiyun 		 */
275*4882a593Smuzhiyun 		if (sg->length % 64 || !IS_ALIGNED(sg->offset, sizeof(u32)))
276*4882a593Smuzhiyun 			return true;
277*4882a593Smuzhiyun 		sg = sg_next(sg);
278*4882a593Smuzhiyun 	}
279*4882a593Smuzhiyun 	return false;
280*4882a593Smuzhiyun }
281*4882a593Smuzhiyun 
sun8i_ss_hash_digest(struct ahash_request * areq)282*4882a593Smuzhiyun int sun8i_ss_hash_digest(struct ahash_request *areq)
283*4882a593Smuzhiyun {
284*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
285*4882a593Smuzhiyun 	struct ahash_alg *alg = __crypto_ahash_alg(tfm->base.__crt_alg);
286*4882a593Smuzhiyun 	struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq);
287*4882a593Smuzhiyun 	struct sun8i_ss_alg_template *algt;
288*4882a593Smuzhiyun 	struct sun8i_ss_dev *ss;
289*4882a593Smuzhiyun 	struct crypto_engine *engine;
290*4882a593Smuzhiyun 	struct scatterlist *sg;
291*4882a593Smuzhiyun 	int nr_sgs, e, i;
292*4882a593Smuzhiyun 
293*4882a593Smuzhiyun 	if (sun8i_ss_hash_need_fallback(areq))
294*4882a593Smuzhiyun 		return sun8i_ss_hash_digest_fb(areq);
295*4882a593Smuzhiyun 
296*4882a593Smuzhiyun 	nr_sgs = sg_nents(areq->src);
297*4882a593Smuzhiyun 	if (nr_sgs > MAX_SG - 1)
298*4882a593Smuzhiyun 		return sun8i_ss_hash_digest_fb(areq);
299*4882a593Smuzhiyun 
300*4882a593Smuzhiyun 	for_each_sg(areq->src, sg, nr_sgs, i) {
301*4882a593Smuzhiyun 		if (sg->length % 4 || !IS_ALIGNED(sg->offset, sizeof(u32)))
302*4882a593Smuzhiyun 			return sun8i_ss_hash_digest_fb(areq);
303*4882a593Smuzhiyun 	}
304*4882a593Smuzhiyun 
305*4882a593Smuzhiyun 	algt = container_of(alg, struct sun8i_ss_alg_template, alg.hash);
306*4882a593Smuzhiyun 	ss = algt->ss;
307*4882a593Smuzhiyun 
308*4882a593Smuzhiyun 	e = sun8i_ss_get_engine_number(ss);
309*4882a593Smuzhiyun 	rctx->flow = e;
310*4882a593Smuzhiyun 	engine = ss->flows[e].engine;
311*4882a593Smuzhiyun 
312*4882a593Smuzhiyun 	return crypto_transfer_hash_request_to_engine(engine, areq);
313*4882a593Smuzhiyun }
314*4882a593Smuzhiyun 
315*4882a593Smuzhiyun /* sun8i_ss_hash_run - run an ahash request
316*4882a593Smuzhiyun  * Send the data of the request to the SS along with an extra SG with padding
317*4882a593Smuzhiyun  */
sun8i_ss_hash_run(struct crypto_engine * engine,void * breq)318*4882a593Smuzhiyun int sun8i_ss_hash_run(struct crypto_engine *engine, void *breq)
319*4882a593Smuzhiyun {
320*4882a593Smuzhiyun 	struct ahash_request *areq = container_of(breq, struct ahash_request, base);
321*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
322*4882a593Smuzhiyun 	struct ahash_alg *alg = __crypto_ahash_alg(tfm->base.__crt_alg);
323*4882a593Smuzhiyun 	struct sun8i_ss_hash_reqctx *rctx = ahash_request_ctx(areq);
324*4882a593Smuzhiyun 	struct sun8i_ss_alg_template *algt;
325*4882a593Smuzhiyun 	struct sun8i_ss_dev *ss;
326*4882a593Smuzhiyun 	struct scatterlist *sg;
327*4882a593Smuzhiyun 	int nr_sgs, err, digestsize;
328*4882a593Smuzhiyun 	unsigned int len;
329*4882a593Smuzhiyun 	u64 fill, min_fill, byte_count;
330*4882a593Smuzhiyun 	void *pad, *result;
331*4882a593Smuzhiyun 	int j, i, todo;
332*4882a593Smuzhiyun 	__be64 *bebits;
333*4882a593Smuzhiyun 	__le64 *lebits;
334*4882a593Smuzhiyun 	dma_addr_t addr_res, addr_pad;
335*4882a593Smuzhiyun 	__le32 *bf;
336*4882a593Smuzhiyun 
337*4882a593Smuzhiyun 	algt = container_of(alg, struct sun8i_ss_alg_template, alg.hash);
338*4882a593Smuzhiyun 	ss = algt->ss;
339*4882a593Smuzhiyun 
340*4882a593Smuzhiyun 	digestsize = algt->alg.hash.halg.digestsize;
341*4882a593Smuzhiyun 	if (digestsize == SHA224_DIGEST_SIZE)
342*4882a593Smuzhiyun 		digestsize = SHA256_DIGEST_SIZE;
343*4882a593Smuzhiyun 
344*4882a593Smuzhiyun 	result = ss->flows[rctx->flow].result;
345*4882a593Smuzhiyun 	pad = ss->flows[rctx->flow].pad;
346*4882a593Smuzhiyun 	memset(pad, 0, algt->alg.hash.halg.base.cra_blocksize * 2);
347*4882a593Smuzhiyun 	bf = (__le32 *)pad;
348*4882a593Smuzhiyun 
349*4882a593Smuzhiyun 	for (i = 0; i < MAX_SG; i++) {
350*4882a593Smuzhiyun 		rctx->t_dst[i].addr = 0;
351*4882a593Smuzhiyun 		rctx->t_dst[i].len = 0;
352*4882a593Smuzhiyun 	}
353*4882a593Smuzhiyun 
354*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_SUN8I_SS_DEBUG
355*4882a593Smuzhiyun 	algt->stat_req++;
356*4882a593Smuzhiyun #endif
357*4882a593Smuzhiyun 
358*4882a593Smuzhiyun 	rctx->method = ss->variant->alg_hash[algt->ss_algo_id];
359*4882a593Smuzhiyun 
360*4882a593Smuzhiyun 	nr_sgs = dma_map_sg(ss->dev, areq->src, sg_nents(areq->src), DMA_TO_DEVICE);
361*4882a593Smuzhiyun 	if (nr_sgs <= 0 || nr_sgs > MAX_SG) {
362*4882a593Smuzhiyun 		dev_err(ss->dev, "Invalid sg number %d\n", nr_sgs);
363*4882a593Smuzhiyun 		err = -EINVAL;
364*4882a593Smuzhiyun 		goto theend;
365*4882a593Smuzhiyun 	}
366*4882a593Smuzhiyun 
367*4882a593Smuzhiyun 	addr_res = dma_map_single(ss->dev, result, digestsize, DMA_FROM_DEVICE);
368*4882a593Smuzhiyun 	if (dma_mapping_error(ss->dev, addr_res)) {
369*4882a593Smuzhiyun 		dev_err(ss->dev, "DMA map dest\n");
370*4882a593Smuzhiyun 		err = -EINVAL;
371*4882a593Smuzhiyun 		goto theend;
372*4882a593Smuzhiyun 	}
373*4882a593Smuzhiyun 
374*4882a593Smuzhiyun 	len = areq->nbytes;
375*4882a593Smuzhiyun 	sg = areq->src;
376*4882a593Smuzhiyun 	i = 0;
377*4882a593Smuzhiyun 	while (len > 0 && sg) {
378*4882a593Smuzhiyun 		if (sg_dma_len(sg) == 0) {
379*4882a593Smuzhiyun 			sg = sg_next(sg);
380*4882a593Smuzhiyun 			continue;
381*4882a593Smuzhiyun 		}
382*4882a593Smuzhiyun 		rctx->t_src[i].addr = sg_dma_address(sg);
383*4882a593Smuzhiyun 		todo = min(len, sg_dma_len(sg));
384*4882a593Smuzhiyun 		rctx->t_src[i].len = todo / 4;
385*4882a593Smuzhiyun 		len -= todo;
386*4882a593Smuzhiyun 		rctx->t_dst[i].addr = addr_res;
387*4882a593Smuzhiyun 		rctx->t_dst[i].len = digestsize / 4;
388*4882a593Smuzhiyun 		sg = sg_next(sg);
389*4882a593Smuzhiyun 		i++;
390*4882a593Smuzhiyun 	}
391*4882a593Smuzhiyun 	if (len > 0) {
392*4882a593Smuzhiyun 		dev_err(ss->dev, "remaining len %d\n", len);
393*4882a593Smuzhiyun 		err = -EINVAL;
394*4882a593Smuzhiyun 		goto theend;
395*4882a593Smuzhiyun 	}
396*4882a593Smuzhiyun 
397*4882a593Smuzhiyun 	byte_count = areq->nbytes;
398*4882a593Smuzhiyun 	j = 0;
399*4882a593Smuzhiyun 	bf[j++] = cpu_to_le32(0x80);
400*4882a593Smuzhiyun 
401*4882a593Smuzhiyun 	fill = 64 - (byte_count % 64);
402*4882a593Smuzhiyun 	min_fill = 3 * sizeof(u32);
403*4882a593Smuzhiyun 
404*4882a593Smuzhiyun 	if (fill < min_fill)
405*4882a593Smuzhiyun 		fill += 64;
406*4882a593Smuzhiyun 
407*4882a593Smuzhiyun 	j += (fill - min_fill) / sizeof(u32);
408*4882a593Smuzhiyun 
409*4882a593Smuzhiyun 	switch (algt->ss_algo_id) {
410*4882a593Smuzhiyun 	case SS_ID_HASH_MD5:
411*4882a593Smuzhiyun 		lebits = (__le64 *)&bf[j];
412*4882a593Smuzhiyun 		*lebits = cpu_to_le64(byte_count << 3);
413*4882a593Smuzhiyun 		j += 2;
414*4882a593Smuzhiyun 		break;
415*4882a593Smuzhiyun 	case SS_ID_HASH_SHA1:
416*4882a593Smuzhiyun 	case SS_ID_HASH_SHA224:
417*4882a593Smuzhiyun 	case SS_ID_HASH_SHA256:
418*4882a593Smuzhiyun 		bebits = (__be64 *)&bf[j];
419*4882a593Smuzhiyun 		*bebits = cpu_to_be64(byte_count << 3);
420*4882a593Smuzhiyun 		j += 2;
421*4882a593Smuzhiyun 		break;
422*4882a593Smuzhiyun 	}
423*4882a593Smuzhiyun 
424*4882a593Smuzhiyun 	addr_pad = dma_map_single(ss->dev, pad, j * 4, DMA_TO_DEVICE);
425*4882a593Smuzhiyun 	rctx->t_src[i].addr = addr_pad;
426*4882a593Smuzhiyun 	rctx->t_src[i].len = j;
427*4882a593Smuzhiyun 	rctx->t_dst[i].addr = addr_res;
428*4882a593Smuzhiyun 	rctx->t_dst[i].len = digestsize / 4;
429*4882a593Smuzhiyun 	if (dma_mapping_error(ss->dev, addr_pad)) {
430*4882a593Smuzhiyun 		dev_err(ss->dev, "DMA error on padding SG\n");
431*4882a593Smuzhiyun 		err = -EINVAL;
432*4882a593Smuzhiyun 		goto theend;
433*4882a593Smuzhiyun 	}
434*4882a593Smuzhiyun 
435*4882a593Smuzhiyun 	err = sun8i_ss_run_hash_task(ss, rctx, crypto_tfm_alg_name(areq->base.tfm));
436*4882a593Smuzhiyun 
437*4882a593Smuzhiyun 	dma_unmap_single(ss->dev, addr_pad, j * 4, DMA_TO_DEVICE);
438*4882a593Smuzhiyun 	dma_unmap_sg(ss->dev, areq->src, nr_sgs, DMA_TO_DEVICE);
439*4882a593Smuzhiyun 	dma_unmap_single(ss->dev, addr_res, digestsize, DMA_FROM_DEVICE);
440*4882a593Smuzhiyun 
441*4882a593Smuzhiyun 	memcpy(areq->result, result, algt->alg.hash.halg.digestsize);
442*4882a593Smuzhiyun theend:
443*4882a593Smuzhiyun 	local_bh_disable();
444*4882a593Smuzhiyun 	crypto_finalize_hash_request(engine, breq, err);
445*4882a593Smuzhiyun 	local_bh_enable();
446*4882a593Smuzhiyun 	return 0;
447*4882a593Smuzhiyun }
448