1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Crypto acceleration support for Rockchip RK3288
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Author: Zain Wang <zain.wang@rock-chips.com>
8*4882a593Smuzhiyun *
9*4882a593Smuzhiyun * Some ideas are from marvell/cesa.c and s5p-sss.c driver.
10*4882a593Smuzhiyun */
11*4882a593Smuzhiyun #include <linux/device.h>
12*4882a593Smuzhiyun #include "rk3288_crypto.h"
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun /*
15*4882a593Smuzhiyun * IC can not process zero message hash,
16*4882a593Smuzhiyun * so we put the fixed hash out when met zero message.
17*4882a593Smuzhiyun */
18*4882a593Smuzhiyun
zero_message_process(struct ahash_request * req)19*4882a593Smuzhiyun static int zero_message_process(struct ahash_request *req)
20*4882a593Smuzhiyun {
21*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
22*4882a593Smuzhiyun int rk_digest_size = crypto_ahash_digestsize(tfm);
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun switch (rk_digest_size) {
25*4882a593Smuzhiyun case SHA1_DIGEST_SIZE:
26*4882a593Smuzhiyun memcpy(req->result, sha1_zero_message_hash, rk_digest_size);
27*4882a593Smuzhiyun break;
28*4882a593Smuzhiyun case SHA256_DIGEST_SIZE:
29*4882a593Smuzhiyun memcpy(req->result, sha256_zero_message_hash, rk_digest_size);
30*4882a593Smuzhiyun break;
31*4882a593Smuzhiyun case MD5_DIGEST_SIZE:
32*4882a593Smuzhiyun memcpy(req->result, md5_zero_message_hash, rk_digest_size);
33*4882a593Smuzhiyun break;
34*4882a593Smuzhiyun default:
35*4882a593Smuzhiyun return -EINVAL;
36*4882a593Smuzhiyun }
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun return 0;
39*4882a593Smuzhiyun }
40*4882a593Smuzhiyun
rk_ahash_crypto_complete(struct crypto_async_request * base,int err)41*4882a593Smuzhiyun static void rk_ahash_crypto_complete(struct crypto_async_request *base, int err)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun if (base->complete)
44*4882a593Smuzhiyun base->complete(base, err);
45*4882a593Smuzhiyun }
46*4882a593Smuzhiyun
rk_ahash_reg_init(struct rk_crypto_info * dev)47*4882a593Smuzhiyun static void rk_ahash_reg_init(struct rk_crypto_info *dev)
48*4882a593Smuzhiyun {
49*4882a593Smuzhiyun struct ahash_request *req = ahash_request_cast(dev->async_req);
50*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
51*4882a593Smuzhiyun int reg_status = 0;
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun reg_status = CRYPTO_READ(dev, RK_CRYPTO_CTRL) |
54*4882a593Smuzhiyun RK_CRYPTO_HASH_FLUSH | _SBF(0xffff, 16);
55*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, reg_status);
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun reg_status = CRYPTO_READ(dev, RK_CRYPTO_CTRL);
58*4882a593Smuzhiyun reg_status &= (~RK_CRYPTO_HASH_FLUSH);
59*4882a593Smuzhiyun reg_status |= _SBF(0xffff, 16);
60*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, reg_status);
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun memset_io(dev->reg + RK_CRYPTO_HASH_DOUT_0, 0, 32);
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_INTENA, RK_CRYPTO_HRDMA_ERR_ENA |
65*4882a593Smuzhiyun RK_CRYPTO_HRDMA_DONE_ENA);
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_INTSTS, RK_CRYPTO_HRDMA_ERR_INT |
68*4882a593Smuzhiyun RK_CRYPTO_HRDMA_DONE_INT);
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_HASH_CTRL, rctx->mode |
71*4882a593Smuzhiyun RK_CRYPTO_HASH_SWAP_DO);
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_CONF, RK_CRYPTO_BYTESWAP_HRFIFO |
74*4882a593Smuzhiyun RK_CRYPTO_BYTESWAP_BRFIFO |
75*4882a593Smuzhiyun RK_CRYPTO_BYTESWAP_BTFIFO);
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_HASH_MSG_LEN, dev->total);
78*4882a593Smuzhiyun }
79*4882a593Smuzhiyun
rk_ahash_init(struct ahash_request * req)80*4882a593Smuzhiyun static int rk_ahash_init(struct ahash_request *req)
81*4882a593Smuzhiyun {
82*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
83*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
84*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
85*4882a593Smuzhiyun
86*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
87*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
88*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun return crypto_ahash_init(&rctx->fallback_req);
91*4882a593Smuzhiyun }
92*4882a593Smuzhiyun
rk_ahash_update(struct ahash_request * req)93*4882a593Smuzhiyun static int rk_ahash_update(struct ahash_request *req)
94*4882a593Smuzhiyun {
95*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
96*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
97*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
100*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
101*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
102*4882a593Smuzhiyun rctx->fallback_req.nbytes = req->nbytes;
103*4882a593Smuzhiyun rctx->fallback_req.src = req->src;
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun return crypto_ahash_update(&rctx->fallback_req);
106*4882a593Smuzhiyun }
107*4882a593Smuzhiyun
rk_ahash_final(struct ahash_request * req)108*4882a593Smuzhiyun static int rk_ahash_final(struct ahash_request *req)
109*4882a593Smuzhiyun {
110*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
111*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
112*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
113*4882a593Smuzhiyun
114*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
115*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
116*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
117*4882a593Smuzhiyun rctx->fallback_req.result = req->result;
118*4882a593Smuzhiyun
119*4882a593Smuzhiyun return crypto_ahash_final(&rctx->fallback_req);
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun
rk_ahash_finup(struct ahash_request * req)122*4882a593Smuzhiyun static int rk_ahash_finup(struct ahash_request *req)
123*4882a593Smuzhiyun {
124*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
125*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
126*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
129*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
130*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun rctx->fallback_req.nbytes = req->nbytes;
133*4882a593Smuzhiyun rctx->fallback_req.src = req->src;
134*4882a593Smuzhiyun rctx->fallback_req.result = req->result;
135*4882a593Smuzhiyun
136*4882a593Smuzhiyun return crypto_ahash_finup(&rctx->fallback_req);
137*4882a593Smuzhiyun }
138*4882a593Smuzhiyun
rk_ahash_import(struct ahash_request * req,const void * in)139*4882a593Smuzhiyun static int rk_ahash_import(struct ahash_request *req, const void *in)
140*4882a593Smuzhiyun {
141*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
142*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
143*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
146*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
147*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun return crypto_ahash_import(&rctx->fallback_req, in);
150*4882a593Smuzhiyun }
151*4882a593Smuzhiyun
rk_ahash_export(struct ahash_request * req,void * out)152*4882a593Smuzhiyun static int rk_ahash_export(struct ahash_request *req, void *out)
153*4882a593Smuzhiyun {
154*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
155*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
156*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
159*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
160*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
161*4882a593Smuzhiyun
162*4882a593Smuzhiyun return crypto_ahash_export(&rctx->fallback_req, out);
163*4882a593Smuzhiyun }
164*4882a593Smuzhiyun
rk_ahash_digest(struct ahash_request * req)165*4882a593Smuzhiyun static int rk_ahash_digest(struct ahash_request *req)
166*4882a593Smuzhiyun {
167*4882a593Smuzhiyun struct rk_ahash_ctx *tctx = crypto_tfm_ctx(req->base.tfm);
168*4882a593Smuzhiyun struct rk_crypto_info *dev = tctx->dev;
169*4882a593Smuzhiyun
170*4882a593Smuzhiyun if (!req->nbytes)
171*4882a593Smuzhiyun return zero_message_process(req);
172*4882a593Smuzhiyun else
173*4882a593Smuzhiyun return dev->enqueue(dev, &req->base);
174*4882a593Smuzhiyun }
175*4882a593Smuzhiyun
crypto_ahash_dma_start(struct rk_crypto_info * dev)176*4882a593Smuzhiyun static void crypto_ahash_dma_start(struct rk_crypto_info *dev)
177*4882a593Smuzhiyun {
178*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_HRDMAS, dev->addr_in);
179*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_HRDMAL, (dev->count + 3) / 4);
180*4882a593Smuzhiyun CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_HASH_START |
181*4882a593Smuzhiyun (RK_CRYPTO_HASH_START << 16));
182*4882a593Smuzhiyun }
183*4882a593Smuzhiyun
rk_ahash_set_data_start(struct rk_crypto_info * dev)184*4882a593Smuzhiyun static int rk_ahash_set_data_start(struct rk_crypto_info *dev)
185*4882a593Smuzhiyun {
186*4882a593Smuzhiyun int err;
187*4882a593Smuzhiyun
188*4882a593Smuzhiyun err = dev->load_data(dev, dev->sg_src, NULL);
189*4882a593Smuzhiyun if (!err)
190*4882a593Smuzhiyun crypto_ahash_dma_start(dev);
191*4882a593Smuzhiyun return err;
192*4882a593Smuzhiyun }
193*4882a593Smuzhiyun
rk_ahash_start(struct rk_crypto_info * dev)194*4882a593Smuzhiyun static int rk_ahash_start(struct rk_crypto_info *dev)
195*4882a593Smuzhiyun {
196*4882a593Smuzhiyun struct ahash_request *req = ahash_request_cast(dev->async_req);
197*4882a593Smuzhiyun struct crypto_ahash *tfm;
198*4882a593Smuzhiyun struct rk_ahash_rctx *rctx;
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun dev->total = req->nbytes;
201*4882a593Smuzhiyun dev->left_bytes = req->nbytes;
202*4882a593Smuzhiyun dev->aligned = 0;
203*4882a593Smuzhiyun dev->align_size = 4;
204*4882a593Smuzhiyun dev->sg_dst = NULL;
205*4882a593Smuzhiyun dev->sg_src = req->src;
206*4882a593Smuzhiyun dev->first = req->src;
207*4882a593Smuzhiyun dev->src_nents = sg_nents(req->src);
208*4882a593Smuzhiyun rctx = ahash_request_ctx(req);
209*4882a593Smuzhiyun rctx->mode = 0;
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun tfm = crypto_ahash_reqtfm(req);
212*4882a593Smuzhiyun switch (crypto_ahash_digestsize(tfm)) {
213*4882a593Smuzhiyun case SHA1_DIGEST_SIZE:
214*4882a593Smuzhiyun rctx->mode = RK_CRYPTO_HASH_SHA1;
215*4882a593Smuzhiyun break;
216*4882a593Smuzhiyun case SHA256_DIGEST_SIZE:
217*4882a593Smuzhiyun rctx->mode = RK_CRYPTO_HASH_SHA256;
218*4882a593Smuzhiyun break;
219*4882a593Smuzhiyun case MD5_DIGEST_SIZE:
220*4882a593Smuzhiyun rctx->mode = RK_CRYPTO_HASH_MD5;
221*4882a593Smuzhiyun break;
222*4882a593Smuzhiyun default:
223*4882a593Smuzhiyun return -EINVAL;
224*4882a593Smuzhiyun }
225*4882a593Smuzhiyun
226*4882a593Smuzhiyun rk_ahash_reg_init(dev);
227*4882a593Smuzhiyun return rk_ahash_set_data_start(dev);
228*4882a593Smuzhiyun }
229*4882a593Smuzhiyun
rk_ahash_crypto_rx(struct rk_crypto_info * dev)230*4882a593Smuzhiyun static int rk_ahash_crypto_rx(struct rk_crypto_info *dev)
231*4882a593Smuzhiyun {
232*4882a593Smuzhiyun int err = 0;
233*4882a593Smuzhiyun struct ahash_request *req = ahash_request_cast(dev->async_req);
234*4882a593Smuzhiyun struct crypto_ahash *tfm;
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun dev->unload_data(dev);
237*4882a593Smuzhiyun if (dev->left_bytes) {
238*4882a593Smuzhiyun if (dev->aligned) {
239*4882a593Smuzhiyun if (sg_is_last(dev->sg_src)) {
240*4882a593Smuzhiyun dev_warn(dev->dev, "[%s:%d], Lack of data\n",
241*4882a593Smuzhiyun __func__, __LINE__);
242*4882a593Smuzhiyun err = -ENOMEM;
243*4882a593Smuzhiyun goto out_rx;
244*4882a593Smuzhiyun }
245*4882a593Smuzhiyun dev->sg_src = sg_next(dev->sg_src);
246*4882a593Smuzhiyun }
247*4882a593Smuzhiyun err = rk_ahash_set_data_start(dev);
248*4882a593Smuzhiyun } else {
249*4882a593Smuzhiyun /*
250*4882a593Smuzhiyun * it will take some time to process date after last dma
251*4882a593Smuzhiyun * transmission.
252*4882a593Smuzhiyun *
253*4882a593Smuzhiyun * waiting time is relative with the last date len,
254*4882a593Smuzhiyun * so cannot set a fixed time here.
255*4882a593Smuzhiyun * 10us makes system not call here frequently wasting
256*4882a593Smuzhiyun * efficiency, and make it response quickly when dma
257*4882a593Smuzhiyun * complete.
258*4882a593Smuzhiyun */
259*4882a593Smuzhiyun while (!CRYPTO_READ(dev, RK_CRYPTO_HASH_STS))
260*4882a593Smuzhiyun udelay(10);
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun tfm = crypto_ahash_reqtfm(req);
263*4882a593Smuzhiyun memcpy_fromio(req->result, dev->reg + RK_CRYPTO_HASH_DOUT_0,
264*4882a593Smuzhiyun crypto_ahash_digestsize(tfm));
265*4882a593Smuzhiyun dev->complete(dev->async_req, 0);
266*4882a593Smuzhiyun tasklet_schedule(&dev->queue_task);
267*4882a593Smuzhiyun }
268*4882a593Smuzhiyun
269*4882a593Smuzhiyun out_rx:
270*4882a593Smuzhiyun return err;
271*4882a593Smuzhiyun }
272*4882a593Smuzhiyun
rk_cra_hash_init(struct crypto_tfm * tfm)273*4882a593Smuzhiyun static int rk_cra_hash_init(struct crypto_tfm *tfm)
274*4882a593Smuzhiyun {
275*4882a593Smuzhiyun struct rk_ahash_ctx *tctx = crypto_tfm_ctx(tfm);
276*4882a593Smuzhiyun struct rk_crypto_tmp *algt;
277*4882a593Smuzhiyun struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg);
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun const char *alg_name = crypto_tfm_alg_name(tfm);
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun algt = container_of(alg, struct rk_crypto_tmp, alg.hash);
282*4882a593Smuzhiyun
283*4882a593Smuzhiyun tctx->dev = algt->dev;
284*4882a593Smuzhiyun tctx->dev->addr_vir = (void *)__get_free_page(GFP_KERNEL);
285*4882a593Smuzhiyun if (!tctx->dev->addr_vir) {
286*4882a593Smuzhiyun dev_err(tctx->dev->dev, "failed to kmalloc for addr_vir\n");
287*4882a593Smuzhiyun return -ENOMEM;
288*4882a593Smuzhiyun }
289*4882a593Smuzhiyun tctx->dev->start = rk_ahash_start;
290*4882a593Smuzhiyun tctx->dev->update = rk_ahash_crypto_rx;
291*4882a593Smuzhiyun tctx->dev->complete = rk_ahash_crypto_complete;
292*4882a593Smuzhiyun
293*4882a593Smuzhiyun /* for fallback */
294*4882a593Smuzhiyun tctx->fallback_tfm = crypto_alloc_ahash(alg_name, 0,
295*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK);
296*4882a593Smuzhiyun if (IS_ERR(tctx->fallback_tfm)) {
297*4882a593Smuzhiyun dev_err(tctx->dev->dev, "Could not load fallback driver.\n");
298*4882a593Smuzhiyun return PTR_ERR(tctx->fallback_tfm);
299*4882a593Smuzhiyun }
300*4882a593Smuzhiyun crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
301*4882a593Smuzhiyun sizeof(struct rk_ahash_rctx) +
302*4882a593Smuzhiyun crypto_ahash_reqsize(tctx->fallback_tfm));
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun return tctx->dev->enable_clk(tctx->dev);
305*4882a593Smuzhiyun }
306*4882a593Smuzhiyun
rk_cra_hash_exit(struct crypto_tfm * tfm)307*4882a593Smuzhiyun static void rk_cra_hash_exit(struct crypto_tfm *tfm)
308*4882a593Smuzhiyun {
309*4882a593Smuzhiyun struct rk_ahash_ctx *tctx = crypto_tfm_ctx(tfm);
310*4882a593Smuzhiyun
311*4882a593Smuzhiyun free_page((unsigned long)tctx->dev->addr_vir);
312*4882a593Smuzhiyun return tctx->dev->disable_clk(tctx->dev);
313*4882a593Smuzhiyun }
314*4882a593Smuzhiyun
315*4882a593Smuzhiyun struct rk_crypto_tmp rk_ahash_sha1 = {
316*4882a593Smuzhiyun .type = ALG_TYPE_HASH,
317*4882a593Smuzhiyun .alg.hash = {
318*4882a593Smuzhiyun .init = rk_ahash_init,
319*4882a593Smuzhiyun .update = rk_ahash_update,
320*4882a593Smuzhiyun .final = rk_ahash_final,
321*4882a593Smuzhiyun .finup = rk_ahash_finup,
322*4882a593Smuzhiyun .export = rk_ahash_export,
323*4882a593Smuzhiyun .import = rk_ahash_import,
324*4882a593Smuzhiyun .digest = rk_ahash_digest,
325*4882a593Smuzhiyun .halg = {
326*4882a593Smuzhiyun .digestsize = SHA1_DIGEST_SIZE,
327*4882a593Smuzhiyun .statesize = sizeof(struct sha1_state),
328*4882a593Smuzhiyun .base = {
329*4882a593Smuzhiyun .cra_name = "sha1",
330*4882a593Smuzhiyun .cra_driver_name = "rk-sha1",
331*4882a593Smuzhiyun .cra_priority = 300,
332*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
333*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK,
334*4882a593Smuzhiyun .cra_blocksize = SHA1_BLOCK_SIZE,
335*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct rk_ahash_ctx),
336*4882a593Smuzhiyun .cra_alignmask = 3,
337*4882a593Smuzhiyun .cra_init = rk_cra_hash_init,
338*4882a593Smuzhiyun .cra_exit = rk_cra_hash_exit,
339*4882a593Smuzhiyun .cra_module = THIS_MODULE,
340*4882a593Smuzhiyun }
341*4882a593Smuzhiyun }
342*4882a593Smuzhiyun }
343*4882a593Smuzhiyun };
344*4882a593Smuzhiyun
345*4882a593Smuzhiyun struct rk_crypto_tmp rk_ahash_sha256 = {
346*4882a593Smuzhiyun .type = ALG_TYPE_HASH,
347*4882a593Smuzhiyun .alg.hash = {
348*4882a593Smuzhiyun .init = rk_ahash_init,
349*4882a593Smuzhiyun .update = rk_ahash_update,
350*4882a593Smuzhiyun .final = rk_ahash_final,
351*4882a593Smuzhiyun .finup = rk_ahash_finup,
352*4882a593Smuzhiyun .export = rk_ahash_export,
353*4882a593Smuzhiyun .import = rk_ahash_import,
354*4882a593Smuzhiyun .digest = rk_ahash_digest,
355*4882a593Smuzhiyun .halg = {
356*4882a593Smuzhiyun .digestsize = SHA256_DIGEST_SIZE,
357*4882a593Smuzhiyun .statesize = sizeof(struct sha256_state),
358*4882a593Smuzhiyun .base = {
359*4882a593Smuzhiyun .cra_name = "sha256",
360*4882a593Smuzhiyun .cra_driver_name = "rk-sha256",
361*4882a593Smuzhiyun .cra_priority = 300,
362*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
363*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK,
364*4882a593Smuzhiyun .cra_blocksize = SHA256_BLOCK_SIZE,
365*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct rk_ahash_ctx),
366*4882a593Smuzhiyun .cra_alignmask = 3,
367*4882a593Smuzhiyun .cra_init = rk_cra_hash_init,
368*4882a593Smuzhiyun .cra_exit = rk_cra_hash_exit,
369*4882a593Smuzhiyun .cra_module = THIS_MODULE,
370*4882a593Smuzhiyun }
371*4882a593Smuzhiyun }
372*4882a593Smuzhiyun }
373*4882a593Smuzhiyun };
374*4882a593Smuzhiyun
375*4882a593Smuzhiyun struct rk_crypto_tmp rk_ahash_md5 = {
376*4882a593Smuzhiyun .type = ALG_TYPE_HASH,
377*4882a593Smuzhiyun .alg.hash = {
378*4882a593Smuzhiyun .init = rk_ahash_init,
379*4882a593Smuzhiyun .update = rk_ahash_update,
380*4882a593Smuzhiyun .final = rk_ahash_final,
381*4882a593Smuzhiyun .finup = rk_ahash_finup,
382*4882a593Smuzhiyun .export = rk_ahash_export,
383*4882a593Smuzhiyun .import = rk_ahash_import,
384*4882a593Smuzhiyun .digest = rk_ahash_digest,
385*4882a593Smuzhiyun .halg = {
386*4882a593Smuzhiyun .digestsize = MD5_DIGEST_SIZE,
387*4882a593Smuzhiyun .statesize = sizeof(struct md5_state),
388*4882a593Smuzhiyun .base = {
389*4882a593Smuzhiyun .cra_name = "md5",
390*4882a593Smuzhiyun .cra_driver_name = "rk-md5",
391*4882a593Smuzhiyun .cra_priority = 300,
392*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
393*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK,
394*4882a593Smuzhiyun .cra_blocksize = SHA1_BLOCK_SIZE,
395*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct rk_ahash_ctx),
396*4882a593Smuzhiyun .cra_alignmask = 3,
397*4882a593Smuzhiyun .cra_init = rk_cra_hash_init,
398*4882a593Smuzhiyun .cra_exit = rk_cra_hash_exit,
399*4882a593Smuzhiyun .cra_module = THIS_MODULE,
400*4882a593Smuzhiyun }
401*4882a593Smuzhiyun }
402*4882a593Smuzhiyun }
403*4882a593Smuzhiyun };
404