1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Crypto acceleration support for Rockchip RK3288
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Author: Zain Wang <zain.wang@rock-chips.com>
8*4882a593Smuzhiyun *
9*4882a593Smuzhiyun * Some ideas are from marvell/cesa.c and s5p-sss.c driver.
10*4882a593Smuzhiyun */
11*4882a593Smuzhiyun #include "rk_crypto_core.h"
12*4882a593Smuzhiyun #include "rk_crypto_v1.h"
13*4882a593Smuzhiyun #include "rk_crypto_v1_reg.h"
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun /*
16*4882a593Smuzhiyun * IC can not process zero message hash,
17*4882a593Smuzhiyun * so we put the fixed hash out when met zero message.
18*4882a593Smuzhiyun */
19*4882a593Smuzhiyun
rk_alg_ctx_cast(struct rk_crypto_dev * rk_dev)20*4882a593Smuzhiyun static struct rk_alg_ctx *rk_alg_ctx_cast(
21*4882a593Smuzhiyun struct rk_crypto_dev *rk_dev)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun struct ahash_request *req =
24*4882a593Smuzhiyun ahash_request_cast(rk_dev->async_req);
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
27*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun return &ctx->algs_ctx;
30*4882a593Smuzhiyun }
31*4882a593Smuzhiyun
rk_crypto_irq_handle(int irq,void * dev_id)32*4882a593Smuzhiyun static int rk_crypto_irq_handle(int irq, void *dev_id)
33*4882a593Smuzhiyun {
34*4882a593Smuzhiyun struct rk_crypto_dev *rk_dev = platform_get_drvdata(dev_id);
35*4882a593Smuzhiyun u32 interrupt_status;
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun interrupt_status = CRYPTO_READ(rk_dev, RK_CRYPTO_INTSTS);
38*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_INTSTS, interrupt_status);
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun if (interrupt_status & 0x0a) {
41*4882a593Smuzhiyun dev_warn(rk_dev->dev, "DMA Error\n");
42*4882a593Smuzhiyun rk_dev->err = -EFAULT;
43*4882a593Smuzhiyun }
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun return 0;
46*4882a593Smuzhiyun }
47*4882a593Smuzhiyun
zero_message_process(struct ahash_request * req)48*4882a593Smuzhiyun static int zero_message_process(struct ahash_request *req)
49*4882a593Smuzhiyun {
50*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
51*4882a593Smuzhiyun int rk_digest_size = crypto_ahash_digestsize(tfm);
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun const u8 sha256_zero_msg_hash[SHA256_DIGEST_SIZE] = {
54*4882a593Smuzhiyun 0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14,
55*4882a593Smuzhiyun 0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24,
56*4882a593Smuzhiyun 0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c,
57*4882a593Smuzhiyun 0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55
58*4882a593Smuzhiyun };
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun const u8 sha1_zero_msg_hash[SHA1_DIGEST_SIZE] = {
61*4882a593Smuzhiyun 0xda, 0x39, 0xa3, 0xee, 0x5e, 0x6b, 0x4b, 0x0d,
62*4882a593Smuzhiyun 0x32, 0x55, 0xbf, 0xef, 0x95, 0x60, 0x18, 0x90,
63*4882a593Smuzhiyun 0xaf, 0xd8, 0x07, 0x09
64*4882a593Smuzhiyun };
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun const u8 md5_zero_msg_hash[MD5_DIGEST_SIZE] = {
67*4882a593Smuzhiyun 0xd4, 0x1d, 0x8c, 0xd9, 0x8f, 0x00, 0xb2, 0x04,
68*4882a593Smuzhiyun 0xe9, 0x80, 0x09, 0x98, 0xec, 0xf8, 0x42, 0x7e,
69*4882a593Smuzhiyun };
70*4882a593Smuzhiyun
71*4882a593Smuzhiyun switch (rk_digest_size) {
72*4882a593Smuzhiyun case SHA1_DIGEST_SIZE:
73*4882a593Smuzhiyun memcpy(req->result, sha1_zero_msg_hash, rk_digest_size);
74*4882a593Smuzhiyun break;
75*4882a593Smuzhiyun case SHA256_DIGEST_SIZE:
76*4882a593Smuzhiyun memcpy(req->result, sha256_zero_msg_hash, rk_digest_size);
77*4882a593Smuzhiyun break;
78*4882a593Smuzhiyun case MD5_DIGEST_SIZE:
79*4882a593Smuzhiyun memcpy(req->result, md5_zero_msg_hash, rk_digest_size);
80*4882a593Smuzhiyun break;
81*4882a593Smuzhiyun default:
82*4882a593Smuzhiyun return -EINVAL;
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun return 0;
86*4882a593Smuzhiyun }
87*4882a593Smuzhiyun
rk_ahash_crypto_complete(struct crypto_async_request * base,int err)88*4882a593Smuzhiyun static void rk_ahash_crypto_complete(struct crypto_async_request *base, int err)
89*4882a593Smuzhiyun {
90*4882a593Smuzhiyun if (base->complete)
91*4882a593Smuzhiyun base->complete(base, err);
92*4882a593Smuzhiyun }
93*4882a593Smuzhiyun
rk_ahash_reg_init(struct rk_crypto_dev * rk_dev)94*4882a593Smuzhiyun static void rk_ahash_reg_init(struct rk_crypto_dev *rk_dev)
95*4882a593Smuzhiyun {
96*4882a593Smuzhiyun struct ahash_request *req = ahash_request_cast(rk_dev->async_req);
97*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
98*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
99*4882a593Smuzhiyun int reg_status = 0;
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun reg_status = CRYPTO_READ(rk_dev, RK_CRYPTO_CTRL) |
102*4882a593Smuzhiyun RK_CRYPTO_HASH_FLUSH | _SBF(0xffff, 16);
103*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_CTRL, reg_status);
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun reg_status = CRYPTO_READ(rk_dev, RK_CRYPTO_CTRL);
106*4882a593Smuzhiyun reg_status &= (~RK_CRYPTO_HASH_FLUSH);
107*4882a593Smuzhiyun reg_status |= _SBF(0xffff, 16);
108*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_CTRL, reg_status);
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun memset_io(rk_dev->reg + RK_CRYPTO_HASH_DOUT_0, 0, 32);
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_INTENA, RK_CRYPTO_HRDMA_ERR_ENA |
113*4882a593Smuzhiyun RK_CRYPTO_HRDMA_DONE_ENA);
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_INTSTS, RK_CRYPTO_HRDMA_ERR_INT |
116*4882a593Smuzhiyun RK_CRYPTO_HRDMA_DONE_INT);
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_HASH_CTRL, rctx->mode |
119*4882a593Smuzhiyun RK_CRYPTO_HASH_SWAP_DO);
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_CONF, RK_CRYPTO_BYTESWAP_HRFIFO |
122*4882a593Smuzhiyun RK_CRYPTO_BYTESWAP_BRFIFO |
123*4882a593Smuzhiyun RK_CRYPTO_BYTESWAP_BTFIFO);
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_HASH_MSG_LEN, alg_ctx->total);
126*4882a593Smuzhiyun }
127*4882a593Smuzhiyun
rk_ahash_init(struct ahash_request * req)128*4882a593Smuzhiyun static int rk_ahash_init(struct ahash_request *req)
129*4882a593Smuzhiyun {
130*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
131*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
132*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
133*4882a593Smuzhiyun
134*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
135*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
136*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun return crypto_ahash_init(&rctx->fallback_req);
139*4882a593Smuzhiyun }
140*4882a593Smuzhiyun
rk_ahash_update(struct ahash_request * req)141*4882a593Smuzhiyun static int rk_ahash_update(struct ahash_request *req)
142*4882a593Smuzhiyun {
143*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
144*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
145*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
146*4882a593Smuzhiyun
147*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
148*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
149*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
150*4882a593Smuzhiyun rctx->fallback_req.nbytes = req->nbytes;
151*4882a593Smuzhiyun rctx->fallback_req.src = req->src;
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun return crypto_ahash_update(&rctx->fallback_req);
154*4882a593Smuzhiyun }
155*4882a593Smuzhiyun
rk_ahash_final(struct ahash_request * req)156*4882a593Smuzhiyun static int rk_ahash_final(struct ahash_request *req)
157*4882a593Smuzhiyun {
158*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
159*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
160*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
161*4882a593Smuzhiyun
162*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
163*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
164*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
165*4882a593Smuzhiyun rctx->fallback_req.result = req->result;
166*4882a593Smuzhiyun
167*4882a593Smuzhiyun return crypto_ahash_final(&rctx->fallback_req);
168*4882a593Smuzhiyun }
169*4882a593Smuzhiyun
rk_ahash_finup(struct ahash_request * req)170*4882a593Smuzhiyun static int rk_ahash_finup(struct ahash_request *req)
171*4882a593Smuzhiyun {
172*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
173*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
174*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
177*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
178*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun rctx->fallback_req.nbytes = req->nbytes;
181*4882a593Smuzhiyun rctx->fallback_req.src = req->src;
182*4882a593Smuzhiyun rctx->fallback_req.result = req->result;
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun return crypto_ahash_finup(&rctx->fallback_req);
185*4882a593Smuzhiyun }
186*4882a593Smuzhiyun
rk_ahash_import(struct ahash_request * req,const void * in)187*4882a593Smuzhiyun static int rk_ahash_import(struct ahash_request *req, const void *in)
188*4882a593Smuzhiyun {
189*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
190*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
191*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
192*4882a593Smuzhiyun
193*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
194*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
195*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun return crypto_ahash_import(&rctx->fallback_req, in);
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun
rk_ahash_export(struct ahash_request * req,void * out)200*4882a593Smuzhiyun static int rk_ahash_export(struct ahash_request *req, void *out)
201*4882a593Smuzhiyun {
202*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
203*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
204*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
205*4882a593Smuzhiyun
206*4882a593Smuzhiyun ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
207*4882a593Smuzhiyun rctx->fallback_req.base.flags = req->base.flags &
208*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP;
209*4882a593Smuzhiyun
210*4882a593Smuzhiyun return crypto_ahash_export(&rctx->fallback_req, out);
211*4882a593Smuzhiyun }
212*4882a593Smuzhiyun
rk_ahash_digest(struct ahash_request * req)213*4882a593Smuzhiyun static int rk_ahash_digest(struct ahash_request *req)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun struct rk_ahash_ctx *tctx = crypto_tfm_ctx(req->base.tfm);
216*4882a593Smuzhiyun struct rk_crypto_dev *rk_dev = tctx->rk_dev;
217*4882a593Smuzhiyun
218*4882a593Smuzhiyun if (!req->nbytes)
219*4882a593Smuzhiyun return zero_message_process(req);
220*4882a593Smuzhiyun else
221*4882a593Smuzhiyun return rk_dev->enqueue(rk_dev, &req->base);
222*4882a593Smuzhiyun }
223*4882a593Smuzhiyun
crypto_ahash_dma_start(struct rk_crypto_dev * rk_dev)224*4882a593Smuzhiyun static void crypto_ahash_dma_start(struct rk_crypto_dev *rk_dev)
225*4882a593Smuzhiyun {
226*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
227*4882a593Smuzhiyun
228*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_HRDMAS, alg_ctx->addr_in);
229*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_HRDMAL, (alg_ctx->count + 3) / 4);
230*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, RK_CRYPTO_CTRL, RK_CRYPTO_HASH_START |
231*4882a593Smuzhiyun (RK_CRYPTO_HASH_START << 16));
232*4882a593Smuzhiyun }
233*4882a593Smuzhiyun
rk_ahash_set_data_start(struct rk_crypto_dev * rk_dev)234*4882a593Smuzhiyun static int rk_ahash_set_data_start(struct rk_crypto_dev *rk_dev)
235*4882a593Smuzhiyun {
236*4882a593Smuzhiyun int err;
237*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, NULL);
240*4882a593Smuzhiyun if (!err)
241*4882a593Smuzhiyun crypto_ahash_dma_start(rk_dev);
242*4882a593Smuzhiyun return err;
243*4882a593Smuzhiyun }
244*4882a593Smuzhiyun
rk_ahash_start(struct rk_crypto_dev * rk_dev)245*4882a593Smuzhiyun static int rk_ahash_start(struct rk_crypto_dev *rk_dev)
246*4882a593Smuzhiyun {
247*4882a593Smuzhiyun struct ahash_request *req = ahash_request_cast(rk_dev->async_req);
248*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
249*4882a593Smuzhiyun struct rk_ahash_rctx *rctx = ahash_request_ctx(req);
250*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
251*4882a593Smuzhiyun
252*4882a593Smuzhiyun alg_ctx->total = req->nbytes;
253*4882a593Smuzhiyun alg_ctx->left_bytes = req->nbytes;
254*4882a593Smuzhiyun alg_ctx->sg_src = req->src;
255*4882a593Smuzhiyun alg_ctx->req_src = req->src;
256*4882a593Smuzhiyun alg_ctx->src_nents = sg_nents_for_len(req->src, req->nbytes);
257*4882a593Smuzhiyun
258*4882a593Smuzhiyun rctx->mode = 0;
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun switch (crypto_ahash_digestsize(tfm)) {
261*4882a593Smuzhiyun case SHA1_DIGEST_SIZE:
262*4882a593Smuzhiyun rctx->mode = RK_CRYPTO_HASH_SHA1;
263*4882a593Smuzhiyun break;
264*4882a593Smuzhiyun case SHA256_DIGEST_SIZE:
265*4882a593Smuzhiyun rctx->mode = RK_CRYPTO_HASH_SHA256;
266*4882a593Smuzhiyun break;
267*4882a593Smuzhiyun case MD5_DIGEST_SIZE:
268*4882a593Smuzhiyun rctx->mode = RK_CRYPTO_HASH_MD5;
269*4882a593Smuzhiyun break;
270*4882a593Smuzhiyun default:
271*4882a593Smuzhiyun return -EINVAL;
272*4882a593Smuzhiyun }
273*4882a593Smuzhiyun
274*4882a593Smuzhiyun rk_ahash_reg_init(rk_dev);
275*4882a593Smuzhiyun return rk_ahash_set_data_start(rk_dev);
276*4882a593Smuzhiyun }
277*4882a593Smuzhiyun
rk_ahash_crypto_rx(struct rk_crypto_dev * rk_dev)278*4882a593Smuzhiyun static int rk_ahash_crypto_rx(struct rk_crypto_dev *rk_dev)
279*4882a593Smuzhiyun {
280*4882a593Smuzhiyun int err = 0;
281*4882a593Smuzhiyun struct ahash_request *req = ahash_request_cast(rk_dev->async_req);
282*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
283*4882a593Smuzhiyun struct crypto_ahash *tfm;
284*4882a593Smuzhiyun
285*4882a593Smuzhiyun CRYPTO_TRACE("left_bytes = %u\n", alg_ctx->left_bytes);
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun err = rk_dev->unload_data(rk_dev);
288*4882a593Smuzhiyun if (err)
289*4882a593Smuzhiyun goto out_rx;
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun if (alg_ctx->left_bytes) {
292*4882a593Smuzhiyun if (alg_ctx->aligned) {
293*4882a593Smuzhiyun if (sg_is_last(alg_ctx->sg_src)) {
294*4882a593Smuzhiyun dev_warn(rk_dev->dev, "[%s:%d], Lack of data\n",
295*4882a593Smuzhiyun __func__, __LINE__);
296*4882a593Smuzhiyun err = -ENOMEM;
297*4882a593Smuzhiyun goto out_rx;
298*4882a593Smuzhiyun }
299*4882a593Smuzhiyun alg_ctx->sg_src = sg_next(alg_ctx->sg_src);
300*4882a593Smuzhiyun }
301*4882a593Smuzhiyun err = rk_ahash_set_data_start(rk_dev);
302*4882a593Smuzhiyun } else {
303*4882a593Smuzhiyun /*
304*4882a593Smuzhiyun * it will take some time to process date after last dma
305*4882a593Smuzhiyun * transmission.
306*4882a593Smuzhiyun *
307*4882a593Smuzhiyun * waiting time is relative with the last date len,
308*4882a593Smuzhiyun * so cannot set a fixed time here.
309*4882a593Smuzhiyun * 10us makes system not call here frequently wasting
310*4882a593Smuzhiyun * efficiency, and make it response quickly when dma
311*4882a593Smuzhiyun * complete.
312*4882a593Smuzhiyun */
313*4882a593Smuzhiyun while (!CRYPTO_READ(rk_dev, RK_CRYPTO_HASH_STS))
314*4882a593Smuzhiyun udelay(10);
315*4882a593Smuzhiyun
316*4882a593Smuzhiyun tfm = crypto_ahash_reqtfm(req);
317*4882a593Smuzhiyun memcpy_fromio(req->result, rk_dev->reg + RK_CRYPTO_HASH_DOUT_0,
318*4882a593Smuzhiyun crypto_ahash_digestsize(tfm));
319*4882a593Smuzhiyun }
320*4882a593Smuzhiyun
321*4882a593Smuzhiyun out_rx:
322*4882a593Smuzhiyun return err;
323*4882a593Smuzhiyun }
324*4882a593Smuzhiyun
rk_cra_hash_init(struct crypto_tfm * tfm)325*4882a593Smuzhiyun static int rk_cra_hash_init(struct crypto_tfm *tfm)
326*4882a593Smuzhiyun {
327*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
328*4882a593Smuzhiyun struct rk_crypto_algt *algt;
329*4882a593Smuzhiyun struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg);
330*4882a593Smuzhiyun const char *alg_name = crypto_tfm_alg_name(tfm);
331*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = &ctx->algs_ctx;
332*4882a593Smuzhiyun struct rk_crypto_dev *rk_dev;
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun algt = container_of(alg, struct rk_crypto_algt, alg.hash);
335*4882a593Smuzhiyun rk_dev = algt->rk_dev;
336*4882a593Smuzhiyun
337*4882a593Smuzhiyun memset(ctx, 0x00, sizeof(*ctx));
338*4882a593Smuzhiyun
339*4882a593Smuzhiyun if (!rk_dev->request_crypto)
340*4882a593Smuzhiyun return -EFAULT;
341*4882a593Smuzhiyun
342*4882a593Smuzhiyun rk_dev->request_crypto(rk_dev, crypto_tfm_alg_name(tfm));
343*4882a593Smuzhiyun
344*4882a593Smuzhiyun alg_ctx->align_size = 4;
345*4882a593Smuzhiyun
346*4882a593Smuzhiyun alg_ctx->ops.start = rk_ahash_start;
347*4882a593Smuzhiyun alg_ctx->ops.update = rk_ahash_crypto_rx;
348*4882a593Smuzhiyun alg_ctx->ops.complete = rk_ahash_crypto_complete;
349*4882a593Smuzhiyun alg_ctx->ops.irq_handle = rk_crypto_irq_handle;
350*4882a593Smuzhiyun
351*4882a593Smuzhiyun ctx->rk_dev = rk_dev;
352*4882a593Smuzhiyun
353*4882a593Smuzhiyun /* for fallback */
354*4882a593Smuzhiyun ctx->fallback_tfm = crypto_alloc_ahash(alg_name, 0,
355*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK);
356*4882a593Smuzhiyun if (IS_ERR(ctx->fallback_tfm)) {
357*4882a593Smuzhiyun dev_err(rk_dev->dev, "Could not load fallback driver.\n");
358*4882a593Smuzhiyun return PTR_ERR(ctx->fallback_tfm);
359*4882a593Smuzhiyun }
360*4882a593Smuzhiyun crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
361*4882a593Smuzhiyun sizeof(struct rk_ahash_rctx) +
362*4882a593Smuzhiyun crypto_ahash_reqsize(ctx->fallback_tfm));
363*4882a593Smuzhiyun
364*4882a593Smuzhiyun algt->alg.hash.halg.statesize = crypto_ahash_statesize(ctx->fallback_tfm);
365*4882a593Smuzhiyun
366*4882a593Smuzhiyun return 0;
367*4882a593Smuzhiyun }
368*4882a593Smuzhiyun
rk_cra_hash_exit(struct crypto_tfm * tfm)369*4882a593Smuzhiyun static void rk_cra_hash_exit(struct crypto_tfm *tfm)
370*4882a593Smuzhiyun {
371*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
372*4882a593Smuzhiyun
373*4882a593Smuzhiyun if (ctx->fallback_tfm)
374*4882a593Smuzhiyun crypto_free_ahash(ctx->fallback_tfm);
375*4882a593Smuzhiyun
376*4882a593Smuzhiyun ctx->rk_dev->release_crypto(ctx->rk_dev, crypto_tfm_alg_name(tfm));
377*4882a593Smuzhiyun }
378*4882a593Smuzhiyun
379*4882a593Smuzhiyun struct rk_crypto_algt rk_v1_ahash_sha1 = RK_HASH_ALGO_INIT(SHA1, sha1);
380*4882a593Smuzhiyun struct rk_crypto_algt rk_v1_ahash_sha256 = RK_HASH_ALGO_INIT(SHA256, sha256);
381*4882a593Smuzhiyun struct rk_crypto_algt rk_v1_ahash_md5 = RK_HASH_ALGO_INIT(MD5, md5);
382*4882a593Smuzhiyun
383