1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Copyright (c) 2010-2014, The Linux Foundation. All rights reserved.
4*4882a593Smuzhiyun */
5*4882a593Smuzhiyun
6*4882a593Smuzhiyun #include <linux/device.h>
7*4882a593Smuzhiyun #include <linux/dma-mapping.h>
8*4882a593Smuzhiyun #include <linux/interrupt.h>
9*4882a593Smuzhiyun #include <crypto/internal/hash.h>
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun #include "common.h"
12*4882a593Smuzhiyun #include "core.h"
13*4882a593Smuzhiyun #include "sha.h"
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun /* crypto hw padding constant for first operation */
16*4882a593Smuzhiyun #define SHA_PADDING 64
17*4882a593Smuzhiyun #define SHA_PADDING_MASK (SHA_PADDING - 1)
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun static LIST_HEAD(ahash_algs);
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun static const u32 std_iv_sha1[SHA256_DIGEST_SIZE / sizeof(u32)] = {
22*4882a593Smuzhiyun SHA1_H0, SHA1_H1, SHA1_H2, SHA1_H3, SHA1_H4, 0, 0, 0
23*4882a593Smuzhiyun };
24*4882a593Smuzhiyun
25*4882a593Smuzhiyun static const u32 std_iv_sha256[SHA256_DIGEST_SIZE / sizeof(u32)] = {
26*4882a593Smuzhiyun SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
27*4882a593Smuzhiyun SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7
28*4882a593Smuzhiyun };
29*4882a593Smuzhiyun
qce_ahash_done(void * data)30*4882a593Smuzhiyun static void qce_ahash_done(void *data)
31*4882a593Smuzhiyun {
32*4882a593Smuzhiyun struct crypto_async_request *async_req = data;
33*4882a593Smuzhiyun struct ahash_request *req = ahash_request_cast(async_req);
34*4882a593Smuzhiyun struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
35*4882a593Smuzhiyun struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
36*4882a593Smuzhiyun struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
37*4882a593Smuzhiyun struct qce_device *qce = tmpl->qce;
38*4882a593Smuzhiyun struct qce_result_dump *result = qce->dma.result_buf;
39*4882a593Smuzhiyun unsigned int digestsize = crypto_ahash_digestsize(ahash);
40*4882a593Smuzhiyun int error;
41*4882a593Smuzhiyun u32 status;
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun error = qce_dma_terminate_all(&qce->dma);
44*4882a593Smuzhiyun if (error)
45*4882a593Smuzhiyun dev_dbg(qce->dev, "ahash dma termination error (%d)\n", error);
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
48*4882a593Smuzhiyun dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE);
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun memcpy(rctx->digest, result->auth_iv, digestsize);
51*4882a593Smuzhiyun if (req->result)
52*4882a593Smuzhiyun memcpy(req->result, result->auth_iv, digestsize);
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]);
55*4882a593Smuzhiyun rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]);
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun error = qce_check_status(qce, &status);
58*4882a593Smuzhiyun if (error < 0)
59*4882a593Smuzhiyun dev_dbg(qce->dev, "ahash operation error (%x)\n", status);
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun req->src = rctx->src_orig;
62*4882a593Smuzhiyun req->nbytes = rctx->nbytes_orig;
63*4882a593Smuzhiyun rctx->last_blk = false;
64*4882a593Smuzhiyun rctx->first_blk = false;
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun qce->async_req_done(tmpl->qce, error);
67*4882a593Smuzhiyun }
68*4882a593Smuzhiyun
qce_ahash_async_req_handle(struct crypto_async_request * async_req)69*4882a593Smuzhiyun static int qce_ahash_async_req_handle(struct crypto_async_request *async_req)
70*4882a593Smuzhiyun {
71*4882a593Smuzhiyun struct ahash_request *req = ahash_request_cast(async_req);
72*4882a593Smuzhiyun struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
73*4882a593Smuzhiyun struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
74*4882a593Smuzhiyun struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
75*4882a593Smuzhiyun struct qce_device *qce = tmpl->qce;
76*4882a593Smuzhiyun unsigned long flags = rctx->flags;
77*4882a593Smuzhiyun int ret;
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun if (IS_SHA_HMAC(flags)) {
80*4882a593Smuzhiyun rctx->authkey = ctx->authkey;
81*4882a593Smuzhiyun rctx->authklen = QCE_SHA_HMAC_KEY_SIZE;
82*4882a593Smuzhiyun } else if (IS_CMAC(flags)) {
83*4882a593Smuzhiyun rctx->authkey = ctx->authkey;
84*4882a593Smuzhiyun rctx->authklen = AES_KEYSIZE_128;
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun rctx->src_nents = sg_nents_for_len(req->src, req->nbytes);
88*4882a593Smuzhiyun if (rctx->src_nents < 0) {
89*4882a593Smuzhiyun dev_err(qce->dev, "Invalid numbers of src SG.\n");
90*4882a593Smuzhiyun return rctx->src_nents;
91*4882a593Smuzhiyun }
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
94*4882a593Smuzhiyun if (ret < 0)
95*4882a593Smuzhiyun return ret;
96*4882a593Smuzhiyun
97*4882a593Smuzhiyun sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ);
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun ret = dma_map_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE);
100*4882a593Smuzhiyun if (ret < 0)
101*4882a593Smuzhiyun goto error_unmap_src;
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents,
104*4882a593Smuzhiyun &rctx->result_sg, 1, qce_ahash_done, async_req);
105*4882a593Smuzhiyun if (ret)
106*4882a593Smuzhiyun goto error_unmap_dst;
107*4882a593Smuzhiyun
108*4882a593Smuzhiyun qce_dma_issue_pending(&qce->dma);
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun ret = qce_start(async_req, tmpl->crypto_alg_type, 0, 0);
111*4882a593Smuzhiyun if (ret)
112*4882a593Smuzhiyun goto error_terminate;
113*4882a593Smuzhiyun
114*4882a593Smuzhiyun return 0;
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun error_terminate:
117*4882a593Smuzhiyun qce_dma_terminate_all(&qce->dma);
118*4882a593Smuzhiyun error_unmap_dst:
119*4882a593Smuzhiyun dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE);
120*4882a593Smuzhiyun error_unmap_src:
121*4882a593Smuzhiyun dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE);
122*4882a593Smuzhiyun return ret;
123*4882a593Smuzhiyun }
124*4882a593Smuzhiyun
qce_ahash_init(struct ahash_request * req)125*4882a593Smuzhiyun static int qce_ahash_init(struct ahash_request *req)
126*4882a593Smuzhiyun {
127*4882a593Smuzhiyun struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
128*4882a593Smuzhiyun struct qce_alg_template *tmpl = to_ahash_tmpl(req->base.tfm);
129*4882a593Smuzhiyun const u32 *std_iv = tmpl->std_iv;
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun memset(rctx, 0, sizeof(*rctx));
132*4882a593Smuzhiyun rctx->first_blk = true;
133*4882a593Smuzhiyun rctx->last_blk = false;
134*4882a593Smuzhiyun rctx->flags = tmpl->alg_flags;
135*4882a593Smuzhiyun memcpy(rctx->digest, std_iv, sizeof(rctx->digest));
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun return 0;
138*4882a593Smuzhiyun }
139*4882a593Smuzhiyun
qce_ahash_export(struct ahash_request * req,void * out)140*4882a593Smuzhiyun static int qce_ahash_export(struct ahash_request *req, void *out)
141*4882a593Smuzhiyun {
142*4882a593Smuzhiyun struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
143*4882a593Smuzhiyun struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
144*4882a593Smuzhiyun unsigned long flags = rctx->flags;
145*4882a593Smuzhiyun unsigned int digestsize = crypto_ahash_digestsize(ahash);
146*4882a593Smuzhiyun unsigned int blocksize =
147*4882a593Smuzhiyun crypto_tfm_alg_blocksize(crypto_ahash_tfm(ahash));
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun if (IS_SHA1(flags) || IS_SHA1_HMAC(flags)) {
150*4882a593Smuzhiyun struct sha1_state *out_state = out;
151*4882a593Smuzhiyun
152*4882a593Smuzhiyun out_state->count = rctx->count;
153*4882a593Smuzhiyun qce_cpu_to_be32p_array((__be32 *)out_state->state,
154*4882a593Smuzhiyun rctx->digest, digestsize);
155*4882a593Smuzhiyun memcpy(out_state->buffer, rctx->buf, blocksize);
156*4882a593Smuzhiyun } else if (IS_SHA256(flags) || IS_SHA256_HMAC(flags)) {
157*4882a593Smuzhiyun struct sha256_state *out_state = out;
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun out_state->count = rctx->count;
160*4882a593Smuzhiyun qce_cpu_to_be32p_array((__be32 *)out_state->state,
161*4882a593Smuzhiyun rctx->digest, digestsize);
162*4882a593Smuzhiyun memcpy(out_state->buf, rctx->buf, blocksize);
163*4882a593Smuzhiyun } else {
164*4882a593Smuzhiyun return -EINVAL;
165*4882a593Smuzhiyun }
166*4882a593Smuzhiyun
167*4882a593Smuzhiyun return 0;
168*4882a593Smuzhiyun }
169*4882a593Smuzhiyun
qce_import_common(struct ahash_request * req,u64 in_count,const u32 * state,const u8 * buffer,bool hmac)170*4882a593Smuzhiyun static int qce_import_common(struct ahash_request *req, u64 in_count,
171*4882a593Smuzhiyun const u32 *state, const u8 *buffer, bool hmac)
172*4882a593Smuzhiyun {
173*4882a593Smuzhiyun struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
174*4882a593Smuzhiyun struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
175*4882a593Smuzhiyun unsigned int digestsize = crypto_ahash_digestsize(ahash);
176*4882a593Smuzhiyun unsigned int blocksize;
177*4882a593Smuzhiyun u64 count = in_count;
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(ahash));
180*4882a593Smuzhiyun rctx->count = in_count;
181*4882a593Smuzhiyun memcpy(rctx->buf, buffer, blocksize);
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun if (in_count <= blocksize) {
184*4882a593Smuzhiyun rctx->first_blk = 1;
185*4882a593Smuzhiyun } else {
186*4882a593Smuzhiyun rctx->first_blk = 0;
187*4882a593Smuzhiyun /*
188*4882a593Smuzhiyun * For HMAC, there is a hardware padding done when first block
189*4882a593Smuzhiyun * is set. Therefore the byte_count must be incremened by 64
190*4882a593Smuzhiyun * after the first block operation.
191*4882a593Smuzhiyun */
192*4882a593Smuzhiyun if (hmac)
193*4882a593Smuzhiyun count += SHA_PADDING;
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun rctx->byte_count[0] = (__force __be32)(count & ~SHA_PADDING_MASK);
197*4882a593Smuzhiyun rctx->byte_count[1] = (__force __be32)(count >> 32);
198*4882a593Smuzhiyun qce_cpu_to_be32p_array((__be32 *)rctx->digest, (const u8 *)state,
199*4882a593Smuzhiyun digestsize);
200*4882a593Smuzhiyun rctx->buflen = (unsigned int)(in_count & (blocksize - 1));
201*4882a593Smuzhiyun
202*4882a593Smuzhiyun return 0;
203*4882a593Smuzhiyun }
204*4882a593Smuzhiyun
qce_ahash_import(struct ahash_request * req,const void * in)205*4882a593Smuzhiyun static int qce_ahash_import(struct ahash_request *req, const void *in)
206*4882a593Smuzhiyun {
207*4882a593Smuzhiyun struct qce_sha_reqctx *rctx;
208*4882a593Smuzhiyun unsigned long flags;
209*4882a593Smuzhiyun bool hmac;
210*4882a593Smuzhiyun int ret;
211*4882a593Smuzhiyun
212*4882a593Smuzhiyun ret = qce_ahash_init(req);
213*4882a593Smuzhiyun if (ret)
214*4882a593Smuzhiyun return ret;
215*4882a593Smuzhiyun
216*4882a593Smuzhiyun rctx = ahash_request_ctx(req);
217*4882a593Smuzhiyun flags = rctx->flags;
218*4882a593Smuzhiyun hmac = IS_SHA_HMAC(flags);
219*4882a593Smuzhiyun
220*4882a593Smuzhiyun if (IS_SHA1(flags) || IS_SHA1_HMAC(flags)) {
221*4882a593Smuzhiyun const struct sha1_state *state = in;
222*4882a593Smuzhiyun
223*4882a593Smuzhiyun ret = qce_import_common(req, state->count, state->state,
224*4882a593Smuzhiyun state->buffer, hmac);
225*4882a593Smuzhiyun } else if (IS_SHA256(flags) || IS_SHA256_HMAC(flags)) {
226*4882a593Smuzhiyun const struct sha256_state *state = in;
227*4882a593Smuzhiyun
228*4882a593Smuzhiyun ret = qce_import_common(req, state->count, state->state,
229*4882a593Smuzhiyun state->buf, hmac);
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun
232*4882a593Smuzhiyun return ret;
233*4882a593Smuzhiyun }
234*4882a593Smuzhiyun
qce_ahash_update(struct ahash_request * req)235*4882a593Smuzhiyun static int qce_ahash_update(struct ahash_request *req)
236*4882a593Smuzhiyun {
237*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
238*4882a593Smuzhiyun struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
239*4882a593Smuzhiyun struct qce_alg_template *tmpl = to_ahash_tmpl(req->base.tfm);
240*4882a593Smuzhiyun struct qce_device *qce = tmpl->qce;
241*4882a593Smuzhiyun struct scatterlist *sg_last, *sg;
242*4882a593Smuzhiyun unsigned int total, len;
243*4882a593Smuzhiyun unsigned int hash_later;
244*4882a593Smuzhiyun unsigned int nbytes;
245*4882a593Smuzhiyun unsigned int blocksize;
246*4882a593Smuzhiyun
247*4882a593Smuzhiyun blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
248*4882a593Smuzhiyun rctx->count += req->nbytes;
249*4882a593Smuzhiyun
250*4882a593Smuzhiyun /* check for buffer from previous updates and append it */
251*4882a593Smuzhiyun total = req->nbytes + rctx->buflen;
252*4882a593Smuzhiyun
253*4882a593Smuzhiyun if (total <= blocksize) {
254*4882a593Smuzhiyun scatterwalk_map_and_copy(rctx->buf + rctx->buflen, req->src,
255*4882a593Smuzhiyun 0, req->nbytes, 0);
256*4882a593Smuzhiyun rctx->buflen += req->nbytes;
257*4882a593Smuzhiyun return 0;
258*4882a593Smuzhiyun }
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun /* save the original req structure fields */
261*4882a593Smuzhiyun rctx->src_orig = req->src;
262*4882a593Smuzhiyun rctx->nbytes_orig = req->nbytes;
263*4882a593Smuzhiyun
264*4882a593Smuzhiyun /*
265*4882a593Smuzhiyun * if we have data from previous update copy them on buffer. The old
266*4882a593Smuzhiyun * data will be combined with current request bytes.
267*4882a593Smuzhiyun */
268*4882a593Smuzhiyun if (rctx->buflen)
269*4882a593Smuzhiyun memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen);
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun /* calculate how many bytes will be hashed later */
272*4882a593Smuzhiyun hash_later = total % blocksize;
273*4882a593Smuzhiyun if (hash_later) {
274*4882a593Smuzhiyun unsigned int src_offset = req->nbytes - hash_later;
275*4882a593Smuzhiyun scatterwalk_map_and_copy(rctx->buf, req->src, src_offset,
276*4882a593Smuzhiyun hash_later, 0);
277*4882a593Smuzhiyun }
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun /* here nbytes is multiple of blocksize */
280*4882a593Smuzhiyun nbytes = total - hash_later;
281*4882a593Smuzhiyun
282*4882a593Smuzhiyun len = rctx->buflen;
283*4882a593Smuzhiyun sg = sg_last = req->src;
284*4882a593Smuzhiyun
285*4882a593Smuzhiyun while (len < nbytes && sg) {
286*4882a593Smuzhiyun if (len + sg_dma_len(sg) > nbytes)
287*4882a593Smuzhiyun break;
288*4882a593Smuzhiyun len += sg_dma_len(sg);
289*4882a593Smuzhiyun sg_last = sg;
290*4882a593Smuzhiyun sg = sg_next(sg);
291*4882a593Smuzhiyun }
292*4882a593Smuzhiyun
293*4882a593Smuzhiyun if (!sg_last)
294*4882a593Smuzhiyun return -EINVAL;
295*4882a593Smuzhiyun
296*4882a593Smuzhiyun if (rctx->buflen) {
297*4882a593Smuzhiyun sg_init_table(rctx->sg, 2);
298*4882a593Smuzhiyun sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen);
299*4882a593Smuzhiyun sg_chain(rctx->sg, 2, req->src);
300*4882a593Smuzhiyun req->src = rctx->sg;
301*4882a593Smuzhiyun }
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun req->nbytes = nbytes;
304*4882a593Smuzhiyun rctx->buflen = hash_later;
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun return qce->async_req_enqueue(tmpl->qce, &req->base);
307*4882a593Smuzhiyun }
308*4882a593Smuzhiyun
qce_ahash_final(struct ahash_request * req)309*4882a593Smuzhiyun static int qce_ahash_final(struct ahash_request *req)
310*4882a593Smuzhiyun {
311*4882a593Smuzhiyun struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
312*4882a593Smuzhiyun struct qce_alg_template *tmpl = to_ahash_tmpl(req->base.tfm);
313*4882a593Smuzhiyun struct qce_device *qce = tmpl->qce;
314*4882a593Smuzhiyun
315*4882a593Smuzhiyun if (!rctx->buflen) {
316*4882a593Smuzhiyun if (tmpl->hash_zero)
317*4882a593Smuzhiyun memcpy(req->result, tmpl->hash_zero,
318*4882a593Smuzhiyun tmpl->alg.ahash.halg.digestsize);
319*4882a593Smuzhiyun return 0;
320*4882a593Smuzhiyun }
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun rctx->last_blk = true;
323*4882a593Smuzhiyun
324*4882a593Smuzhiyun rctx->src_orig = req->src;
325*4882a593Smuzhiyun rctx->nbytes_orig = req->nbytes;
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen);
328*4882a593Smuzhiyun sg_init_one(rctx->sg, rctx->tmpbuf, rctx->buflen);
329*4882a593Smuzhiyun
330*4882a593Smuzhiyun req->src = rctx->sg;
331*4882a593Smuzhiyun req->nbytes = rctx->buflen;
332*4882a593Smuzhiyun
333*4882a593Smuzhiyun return qce->async_req_enqueue(tmpl->qce, &req->base);
334*4882a593Smuzhiyun }
335*4882a593Smuzhiyun
qce_ahash_digest(struct ahash_request * req)336*4882a593Smuzhiyun static int qce_ahash_digest(struct ahash_request *req)
337*4882a593Smuzhiyun {
338*4882a593Smuzhiyun struct qce_sha_reqctx *rctx = ahash_request_ctx(req);
339*4882a593Smuzhiyun struct qce_alg_template *tmpl = to_ahash_tmpl(req->base.tfm);
340*4882a593Smuzhiyun struct qce_device *qce = tmpl->qce;
341*4882a593Smuzhiyun int ret;
342*4882a593Smuzhiyun
343*4882a593Smuzhiyun ret = qce_ahash_init(req);
344*4882a593Smuzhiyun if (ret)
345*4882a593Smuzhiyun return ret;
346*4882a593Smuzhiyun
347*4882a593Smuzhiyun rctx->src_orig = req->src;
348*4882a593Smuzhiyun rctx->nbytes_orig = req->nbytes;
349*4882a593Smuzhiyun rctx->first_blk = true;
350*4882a593Smuzhiyun rctx->last_blk = true;
351*4882a593Smuzhiyun
352*4882a593Smuzhiyun if (!rctx->nbytes_orig) {
353*4882a593Smuzhiyun if (tmpl->hash_zero)
354*4882a593Smuzhiyun memcpy(req->result, tmpl->hash_zero,
355*4882a593Smuzhiyun tmpl->alg.ahash.halg.digestsize);
356*4882a593Smuzhiyun return 0;
357*4882a593Smuzhiyun }
358*4882a593Smuzhiyun
359*4882a593Smuzhiyun return qce->async_req_enqueue(tmpl->qce, &req->base);
360*4882a593Smuzhiyun }
361*4882a593Smuzhiyun
qce_ahash_hmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)362*4882a593Smuzhiyun static int qce_ahash_hmac_setkey(struct crypto_ahash *tfm, const u8 *key,
363*4882a593Smuzhiyun unsigned int keylen)
364*4882a593Smuzhiyun {
365*4882a593Smuzhiyun unsigned int digestsize = crypto_ahash_digestsize(tfm);
366*4882a593Smuzhiyun struct qce_sha_ctx *ctx = crypto_tfm_ctx(&tfm->base);
367*4882a593Smuzhiyun struct crypto_wait wait;
368*4882a593Smuzhiyun struct ahash_request *req;
369*4882a593Smuzhiyun struct scatterlist sg;
370*4882a593Smuzhiyun unsigned int blocksize;
371*4882a593Smuzhiyun struct crypto_ahash *ahash_tfm;
372*4882a593Smuzhiyun u8 *buf;
373*4882a593Smuzhiyun int ret;
374*4882a593Smuzhiyun const char *alg_name;
375*4882a593Smuzhiyun
376*4882a593Smuzhiyun blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
377*4882a593Smuzhiyun memset(ctx->authkey, 0, sizeof(ctx->authkey));
378*4882a593Smuzhiyun
379*4882a593Smuzhiyun if (keylen <= blocksize) {
380*4882a593Smuzhiyun memcpy(ctx->authkey, key, keylen);
381*4882a593Smuzhiyun return 0;
382*4882a593Smuzhiyun }
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun if (digestsize == SHA1_DIGEST_SIZE)
385*4882a593Smuzhiyun alg_name = "sha1-qce";
386*4882a593Smuzhiyun else if (digestsize == SHA256_DIGEST_SIZE)
387*4882a593Smuzhiyun alg_name = "sha256-qce";
388*4882a593Smuzhiyun else
389*4882a593Smuzhiyun return -EINVAL;
390*4882a593Smuzhiyun
391*4882a593Smuzhiyun ahash_tfm = crypto_alloc_ahash(alg_name, 0, 0);
392*4882a593Smuzhiyun if (IS_ERR(ahash_tfm))
393*4882a593Smuzhiyun return PTR_ERR(ahash_tfm);
394*4882a593Smuzhiyun
395*4882a593Smuzhiyun req = ahash_request_alloc(ahash_tfm, GFP_KERNEL);
396*4882a593Smuzhiyun if (!req) {
397*4882a593Smuzhiyun ret = -ENOMEM;
398*4882a593Smuzhiyun goto err_free_ahash;
399*4882a593Smuzhiyun }
400*4882a593Smuzhiyun
401*4882a593Smuzhiyun crypto_init_wait(&wait);
402*4882a593Smuzhiyun ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
403*4882a593Smuzhiyun crypto_req_done, &wait);
404*4882a593Smuzhiyun crypto_ahash_clear_flags(ahash_tfm, ~0);
405*4882a593Smuzhiyun
406*4882a593Smuzhiyun buf = kzalloc(keylen + QCE_MAX_ALIGN_SIZE, GFP_KERNEL);
407*4882a593Smuzhiyun if (!buf) {
408*4882a593Smuzhiyun ret = -ENOMEM;
409*4882a593Smuzhiyun goto err_free_req;
410*4882a593Smuzhiyun }
411*4882a593Smuzhiyun
412*4882a593Smuzhiyun memcpy(buf, key, keylen);
413*4882a593Smuzhiyun sg_init_one(&sg, buf, keylen);
414*4882a593Smuzhiyun ahash_request_set_crypt(req, &sg, ctx->authkey, keylen);
415*4882a593Smuzhiyun
416*4882a593Smuzhiyun ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
417*4882a593Smuzhiyun
418*4882a593Smuzhiyun kfree(buf);
419*4882a593Smuzhiyun err_free_req:
420*4882a593Smuzhiyun ahash_request_free(req);
421*4882a593Smuzhiyun err_free_ahash:
422*4882a593Smuzhiyun crypto_free_ahash(ahash_tfm);
423*4882a593Smuzhiyun return ret;
424*4882a593Smuzhiyun }
425*4882a593Smuzhiyun
qce_ahash_cra_init(struct crypto_tfm * tfm)426*4882a593Smuzhiyun static int qce_ahash_cra_init(struct crypto_tfm *tfm)
427*4882a593Smuzhiyun {
428*4882a593Smuzhiyun struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
429*4882a593Smuzhiyun struct qce_sha_ctx *ctx = crypto_tfm_ctx(tfm);
430*4882a593Smuzhiyun
431*4882a593Smuzhiyun crypto_ahash_set_reqsize(ahash, sizeof(struct qce_sha_reqctx));
432*4882a593Smuzhiyun memset(ctx, 0, sizeof(*ctx));
433*4882a593Smuzhiyun return 0;
434*4882a593Smuzhiyun }
435*4882a593Smuzhiyun
436*4882a593Smuzhiyun struct qce_ahash_def {
437*4882a593Smuzhiyun unsigned long flags;
438*4882a593Smuzhiyun const char *name;
439*4882a593Smuzhiyun const char *drv_name;
440*4882a593Smuzhiyun unsigned int digestsize;
441*4882a593Smuzhiyun unsigned int blocksize;
442*4882a593Smuzhiyun unsigned int statesize;
443*4882a593Smuzhiyun const u32 *std_iv;
444*4882a593Smuzhiyun };
445*4882a593Smuzhiyun
446*4882a593Smuzhiyun static const struct qce_ahash_def ahash_def[] = {
447*4882a593Smuzhiyun {
448*4882a593Smuzhiyun .flags = QCE_HASH_SHA1,
449*4882a593Smuzhiyun .name = "sha1",
450*4882a593Smuzhiyun .drv_name = "sha1-qce",
451*4882a593Smuzhiyun .digestsize = SHA1_DIGEST_SIZE,
452*4882a593Smuzhiyun .blocksize = SHA1_BLOCK_SIZE,
453*4882a593Smuzhiyun .statesize = sizeof(struct sha1_state),
454*4882a593Smuzhiyun .std_iv = std_iv_sha1,
455*4882a593Smuzhiyun },
456*4882a593Smuzhiyun {
457*4882a593Smuzhiyun .flags = QCE_HASH_SHA256,
458*4882a593Smuzhiyun .name = "sha256",
459*4882a593Smuzhiyun .drv_name = "sha256-qce",
460*4882a593Smuzhiyun .digestsize = SHA256_DIGEST_SIZE,
461*4882a593Smuzhiyun .blocksize = SHA256_BLOCK_SIZE,
462*4882a593Smuzhiyun .statesize = sizeof(struct sha256_state),
463*4882a593Smuzhiyun .std_iv = std_iv_sha256,
464*4882a593Smuzhiyun },
465*4882a593Smuzhiyun {
466*4882a593Smuzhiyun .flags = QCE_HASH_SHA1_HMAC,
467*4882a593Smuzhiyun .name = "hmac(sha1)",
468*4882a593Smuzhiyun .drv_name = "hmac-sha1-qce",
469*4882a593Smuzhiyun .digestsize = SHA1_DIGEST_SIZE,
470*4882a593Smuzhiyun .blocksize = SHA1_BLOCK_SIZE,
471*4882a593Smuzhiyun .statesize = sizeof(struct sha1_state),
472*4882a593Smuzhiyun .std_iv = std_iv_sha1,
473*4882a593Smuzhiyun },
474*4882a593Smuzhiyun {
475*4882a593Smuzhiyun .flags = QCE_HASH_SHA256_HMAC,
476*4882a593Smuzhiyun .name = "hmac(sha256)",
477*4882a593Smuzhiyun .drv_name = "hmac-sha256-qce",
478*4882a593Smuzhiyun .digestsize = SHA256_DIGEST_SIZE,
479*4882a593Smuzhiyun .blocksize = SHA256_BLOCK_SIZE,
480*4882a593Smuzhiyun .statesize = sizeof(struct sha256_state),
481*4882a593Smuzhiyun .std_iv = std_iv_sha256,
482*4882a593Smuzhiyun },
483*4882a593Smuzhiyun };
484*4882a593Smuzhiyun
qce_ahash_register_one(const struct qce_ahash_def * def,struct qce_device * qce)485*4882a593Smuzhiyun static int qce_ahash_register_one(const struct qce_ahash_def *def,
486*4882a593Smuzhiyun struct qce_device *qce)
487*4882a593Smuzhiyun {
488*4882a593Smuzhiyun struct qce_alg_template *tmpl;
489*4882a593Smuzhiyun struct ahash_alg *alg;
490*4882a593Smuzhiyun struct crypto_alg *base;
491*4882a593Smuzhiyun int ret;
492*4882a593Smuzhiyun
493*4882a593Smuzhiyun tmpl = kzalloc(sizeof(*tmpl), GFP_KERNEL);
494*4882a593Smuzhiyun if (!tmpl)
495*4882a593Smuzhiyun return -ENOMEM;
496*4882a593Smuzhiyun
497*4882a593Smuzhiyun tmpl->std_iv = def->std_iv;
498*4882a593Smuzhiyun
499*4882a593Smuzhiyun alg = &tmpl->alg.ahash;
500*4882a593Smuzhiyun alg->init = qce_ahash_init;
501*4882a593Smuzhiyun alg->update = qce_ahash_update;
502*4882a593Smuzhiyun alg->final = qce_ahash_final;
503*4882a593Smuzhiyun alg->digest = qce_ahash_digest;
504*4882a593Smuzhiyun alg->export = qce_ahash_export;
505*4882a593Smuzhiyun alg->import = qce_ahash_import;
506*4882a593Smuzhiyun if (IS_SHA_HMAC(def->flags))
507*4882a593Smuzhiyun alg->setkey = qce_ahash_hmac_setkey;
508*4882a593Smuzhiyun alg->halg.digestsize = def->digestsize;
509*4882a593Smuzhiyun alg->halg.statesize = def->statesize;
510*4882a593Smuzhiyun
511*4882a593Smuzhiyun if (IS_SHA1(def->flags))
512*4882a593Smuzhiyun tmpl->hash_zero = sha1_zero_message_hash;
513*4882a593Smuzhiyun else if (IS_SHA256(def->flags))
514*4882a593Smuzhiyun tmpl->hash_zero = sha256_zero_message_hash;
515*4882a593Smuzhiyun
516*4882a593Smuzhiyun base = &alg->halg.base;
517*4882a593Smuzhiyun base->cra_blocksize = def->blocksize;
518*4882a593Smuzhiyun base->cra_priority = 300;
519*4882a593Smuzhiyun base->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
520*4882a593Smuzhiyun base->cra_ctxsize = sizeof(struct qce_sha_ctx);
521*4882a593Smuzhiyun base->cra_alignmask = 0;
522*4882a593Smuzhiyun base->cra_module = THIS_MODULE;
523*4882a593Smuzhiyun base->cra_init = qce_ahash_cra_init;
524*4882a593Smuzhiyun
525*4882a593Smuzhiyun snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name);
526*4882a593Smuzhiyun snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
527*4882a593Smuzhiyun def->drv_name);
528*4882a593Smuzhiyun
529*4882a593Smuzhiyun INIT_LIST_HEAD(&tmpl->entry);
530*4882a593Smuzhiyun tmpl->crypto_alg_type = CRYPTO_ALG_TYPE_AHASH;
531*4882a593Smuzhiyun tmpl->alg_flags = def->flags;
532*4882a593Smuzhiyun tmpl->qce = qce;
533*4882a593Smuzhiyun
534*4882a593Smuzhiyun ret = crypto_register_ahash(alg);
535*4882a593Smuzhiyun if (ret) {
536*4882a593Smuzhiyun dev_err(qce->dev, "%s registration failed\n", base->cra_name);
537*4882a593Smuzhiyun kfree(tmpl);
538*4882a593Smuzhiyun return ret;
539*4882a593Smuzhiyun }
540*4882a593Smuzhiyun
541*4882a593Smuzhiyun list_add_tail(&tmpl->entry, &ahash_algs);
542*4882a593Smuzhiyun dev_dbg(qce->dev, "%s is registered\n", base->cra_name);
543*4882a593Smuzhiyun return 0;
544*4882a593Smuzhiyun }
545*4882a593Smuzhiyun
qce_ahash_unregister(struct qce_device * qce)546*4882a593Smuzhiyun static void qce_ahash_unregister(struct qce_device *qce)
547*4882a593Smuzhiyun {
548*4882a593Smuzhiyun struct qce_alg_template *tmpl, *n;
549*4882a593Smuzhiyun
550*4882a593Smuzhiyun list_for_each_entry_safe(tmpl, n, &ahash_algs, entry) {
551*4882a593Smuzhiyun crypto_unregister_ahash(&tmpl->alg.ahash);
552*4882a593Smuzhiyun list_del(&tmpl->entry);
553*4882a593Smuzhiyun kfree(tmpl);
554*4882a593Smuzhiyun }
555*4882a593Smuzhiyun }
556*4882a593Smuzhiyun
qce_ahash_register(struct qce_device * qce)557*4882a593Smuzhiyun static int qce_ahash_register(struct qce_device *qce)
558*4882a593Smuzhiyun {
559*4882a593Smuzhiyun int ret, i;
560*4882a593Smuzhiyun
561*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(ahash_def); i++) {
562*4882a593Smuzhiyun ret = qce_ahash_register_one(&ahash_def[i], qce);
563*4882a593Smuzhiyun if (ret)
564*4882a593Smuzhiyun goto err;
565*4882a593Smuzhiyun }
566*4882a593Smuzhiyun
567*4882a593Smuzhiyun return 0;
568*4882a593Smuzhiyun err:
569*4882a593Smuzhiyun qce_ahash_unregister(qce);
570*4882a593Smuzhiyun return ret;
571*4882a593Smuzhiyun }
572*4882a593Smuzhiyun
573*4882a593Smuzhiyun const struct qce_algo_ops ahash_ops = {
574*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AHASH,
575*4882a593Smuzhiyun .register_algs = qce_ahash_register,
576*4882a593Smuzhiyun .unregister_algs = qce_ahash_unregister,
577*4882a593Smuzhiyun .async_req_handle = qce_ahash_async_req_handle,
578*4882a593Smuzhiyun };
579