1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Synchronous Cryptographic Hash operations.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6*4882a593Smuzhiyun */
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
9*4882a593Smuzhiyun #include <crypto/internal/hash.h>
10*4882a593Smuzhiyun #include <linux/err.h>
11*4882a593Smuzhiyun #include <linux/kernel.h>
12*4882a593Smuzhiyun #include <linux/module.h>
13*4882a593Smuzhiyun #include <linux/slab.h>
14*4882a593Smuzhiyun #include <linux/seq_file.h>
15*4882a593Smuzhiyun #include <linux/cryptouser.h>
16*4882a593Smuzhiyun #include <net/netlink.h>
17*4882a593Smuzhiyun #include <linux/compiler.h>
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun #include "internal.h"
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun static const struct crypto_type crypto_shash_type;
22*4882a593Smuzhiyun
shash_no_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)23*4882a593Smuzhiyun static int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
24*4882a593Smuzhiyun unsigned int keylen)
25*4882a593Smuzhiyun {
26*4882a593Smuzhiyun return -ENOSYS;
27*4882a593Smuzhiyun }
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun /*
30*4882a593Smuzhiyun * Check whether an shash algorithm has a setkey function.
31*4882a593Smuzhiyun *
32*4882a593Smuzhiyun * For CFI compatibility, this must not be an inline function. This is because
33*4882a593Smuzhiyun * when CFI is enabled, modules won't get the same address for shash_no_setkey
34*4882a593Smuzhiyun * (if it were exported, which inlining would require) as the core kernel will.
35*4882a593Smuzhiyun */
crypto_shash_alg_has_setkey(struct shash_alg * alg)36*4882a593Smuzhiyun bool crypto_shash_alg_has_setkey(struct shash_alg *alg)
37*4882a593Smuzhiyun {
38*4882a593Smuzhiyun return alg->setkey != shash_no_setkey;
39*4882a593Smuzhiyun }
40*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_shash_alg_has_setkey);
41*4882a593Smuzhiyun
shash_setkey_unaligned(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)42*4882a593Smuzhiyun static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
43*4882a593Smuzhiyun unsigned int keylen)
44*4882a593Smuzhiyun {
45*4882a593Smuzhiyun struct shash_alg *shash = crypto_shash_alg(tfm);
46*4882a593Smuzhiyun unsigned long alignmask = crypto_shash_alignmask(tfm);
47*4882a593Smuzhiyun unsigned long absize;
48*4882a593Smuzhiyun u8 *buffer, *alignbuffer;
49*4882a593Smuzhiyun int err;
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
52*4882a593Smuzhiyun buffer = kmalloc(absize, GFP_ATOMIC);
53*4882a593Smuzhiyun if (!buffer)
54*4882a593Smuzhiyun return -ENOMEM;
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
57*4882a593Smuzhiyun memcpy(alignbuffer, key, keylen);
58*4882a593Smuzhiyun err = shash->setkey(tfm, alignbuffer, keylen);
59*4882a593Smuzhiyun kfree_sensitive(buffer);
60*4882a593Smuzhiyun return err;
61*4882a593Smuzhiyun }
62*4882a593Smuzhiyun
shash_set_needkey(struct crypto_shash * tfm,struct shash_alg * alg)63*4882a593Smuzhiyun static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
64*4882a593Smuzhiyun {
65*4882a593Smuzhiyun if (crypto_shash_alg_needs_key(alg))
66*4882a593Smuzhiyun crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
67*4882a593Smuzhiyun }
68*4882a593Smuzhiyun
crypto_shash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)69*4882a593Smuzhiyun int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
70*4882a593Smuzhiyun unsigned int keylen)
71*4882a593Smuzhiyun {
72*4882a593Smuzhiyun struct shash_alg *shash = crypto_shash_alg(tfm);
73*4882a593Smuzhiyun unsigned long alignmask = crypto_shash_alignmask(tfm);
74*4882a593Smuzhiyun int err;
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun if ((unsigned long)key & alignmask)
77*4882a593Smuzhiyun err = shash_setkey_unaligned(tfm, key, keylen);
78*4882a593Smuzhiyun else
79*4882a593Smuzhiyun err = shash->setkey(tfm, key, keylen);
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun if (unlikely(err)) {
82*4882a593Smuzhiyun shash_set_needkey(tfm, shash);
83*4882a593Smuzhiyun return err;
84*4882a593Smuzhiyun }
85*4882a593Smuzhiyun
86*4882a593Smuzhiyun crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
87*4882a593Smuzhiyun return 0;
88*4882a593Smuzhiyun }
89*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_shash_setkey);
90*4882a593Smuzhiyun
shash_update_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len)91*4882a593Smuzhiyun static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
92*4882a593Smuzhiyun unsigned int len)
93*4882a593Smuzhiyun {
94*4882a593Smuzhiyun struct crypto_shash *tfm = desc->tfm;
95*4882a593Smuzhiyun struct shash_alg *shash = crypto_shash_alg(tfm);
96*4882a593Smuzhiyun unsigned long alignmask = crypto_shash_alignmask(tfm);
97*4882a593Smuzhiyun unsigned int unaligned_len = alignmask + 1 -
98*4882a593Smuzhiyun ((unsigned long)data & alignmask);
99*4882a593Smuzhiyun /*
100*4882a593Smuzhiyun * We cannot count on __aligned() working for large values:
101*4882a593Smuzhiyun * https://patchwork.kernel.org/patch/9507697/
102*4882a593Smuzhiyun */
103*4882a593Smuzhiyun u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
104*4882a593Smuzhiyun u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
105*4882a593Smuzhiyun int err;
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
108*4882a593Smuzhiyun return -EINVAL;
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun if (unaligned_len > len)
111*4882a593Smuzhiyun unaligned_len = len;
112*4882a593Smuzhiyun
113*4882a593Smuzhiyun memcpy(buf, data, unaligned_len);
114*4882a593Smuzhiyun err = shash->update(desc, buf, unaligned_len);
115*4882a593Smuzhiyun memset(buf, 0, unaligned_len);
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun return err ?:
118*4882a593Smuzhiyun shash->update(desc, data + unaligned_len, len - unaligned_len);
119*4882a593Smuzhiyun }
120*4882a593Smuzhiyun
crypto_shash_update(struct shash_desc * desc,const u8 * data,unsigned int len)121*4882a593Smuzhiyun int crypto_shash_update(struct shash_desc *desc, const u8 *data,
122*4882a593Smuzhiyun unsigned int len)
123*4882a593Smuzhiyun {
124*4882a593Smuzhiyun struct crypto_shash *tfm = desc->tfm;
125*4882a593Smuzhiyun struct shash_alg *shash = crypto_shash_alg(tfm);
126*4882a593Smuzhiyun unsigned long alignmask = crypto_shash_alignmask(tfm);
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun if ((unsigned long)data & alignmask)
129*4882a593Smuzhiyun return shash_update_unaligned(desc, data, len);
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun return shash->update(desc, data, len);
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_shash_update);
134*4882a593Smuzhiyun
shash_final_unaligned(struct shash_desc * desc,u8 * out)135*4882a593Smuzhiyun static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
136*4882a593Smuzhiyun {
137*4882a593Smuzhiyun struct crypto_shash *tfm = desc->tfm;
138*4882a593Smuzhiyun unsigned long alignmask = crypto_shash_alignmask(tfm);
139*4882a593Smuzhiyun struct shash_alg *shash = crypto_shash_alg(tfm);
140*4882a593Smuzhiyun unsigned int ds = crypto_shash_digestsize(tfm);
141*4882a593Smuzhiyun /*
142*4882a593Smuzhiyun * We cannot count on __aligned() working for large values:
143*4882a593Smuzhiyun * https://patchwork.kernel.org/patch/9507697/
144*4882a593Smuzhiyun */
145*4882a593Smuzhiyun u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
146*4882a593Smuzhiyun u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
147*4882a593Smuzhiyun int err;
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
150*4882a593Smuzhiyun return -EINVAL;
151*4882a593Smuzhiyun
152*4882a593Smuzhiyun err = shash->final(desc, buf);
153*4882a593Smuzhiyun if (err)
154*4882a593Smuzhiyun goto out;
155*4882a593Smuzhiyun
156*4882a593Smuzhiyun memcpy(out, buf, ds);
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun out:
159*4882a593Smuzhiyun memset(buf, 0, ds);
160*4882a593Smuzhiyun return err;
161*4882a593Smuzhiyun }
162*4882a593Smuzhiyun
crypto_shash_final(struct shash_desc * desc,u8 * out)163*4882a593Smuzhiyun int crypto_shash_final(struct shash_desc *desc, u8 *out)
164*4882a593Smuzhiyun {
165*4882a593Smuzhiyun struct crypto_shash *tfm = desc->tfm;
166*4882a593Smuzhiyun struct shash_alg *shash = crypto_shash_alg(tfm);
167*4882a593Smuzhiyun unsigned long alignmask = crypto_shash_alignmask(tfm);
168*4882a593Smuzhiyun
169*4882a593Smuzhiyun if ((unsigned long)out & alignmask)
170*4882a593Smuzhiyun return shash_final_unaligned(desc, out);
171*4882a593Smuzhiyun
172*4882a593Smuzhiyun return shash->final(desc, out);
173*4882a593Smuzhiyun }
174*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_shash_final);
175*4882a593Smuzhiyun
shash_finup_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)176*4882a593Smuzhiyun static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
177*4882a593Smuzhiyun unsigned int len, u8 *out)
178*4882a593Smuzhiyun {
179*4882a593Smuzhiyun return crypto_shash_update(desc, data, len) ?:
180*4882a593Smuzhiyun crypto_shash_final(desc, out);
181*4882a593Smuzhiyun }
182*4882a593Smuzhiyun
crypto_shash_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)183*4882a593Smuzhiyun int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
184*4882a593Smuzhiyun unsigned int len, u8 *out)
185*4882a593Smuzhiyun {
186*4882a593Smuzhiyun struct crypto_shash *tfm = desc->tfm;
187*4882a593Smuzhiyun struct shash_alg *shash = crypto_shash_alg(tfm);
188*4882a593Smuzhiyun unsigned long alignmask = crypto_shash_alignmask(tfm);
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun if (((unsigned long)data | (unsigned long)out) & alignmask)
191*4882a593Smuzhiyun return shash_finup_unaligned(desc, data, len, out);
192*4882a593Smuzhiyun
193*4882a593Smuzhiyun return shash->finup(desc, data, len, out);
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_shash_finup);
196*4882a593Smuzhiyun
shash_digest_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)197*4882a593Smuzhiyun static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
198*4882a593Smuzhiyun unsigned int len, u8 *out)
199*4882a593Smuzhiyun {
200*4882a593Smuzhiyun return crypto_shash_init(desc) ?:
201*4882a593Smuzhiyun crypto_shash_finup(desc, data, len, out);
202*4882a593Smuzhiyun }
203*4882a593Smuzhiyun
crypto_shash_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)204*4882a593Smuzhiyun int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
205*4882a593Smuzhiyun unsigned int len, u8 *out)
206*4882a593Smuzhiyun {
207*4882a593Smuzhiyun struct crypto_shash *tfm = desc->tfm;
208*4882a593Smuzhiyun struct shash_alg *shash = crypto_shash_alg(tfm);
209*4882a593Smuzhiyun unsigned long alignmask = crypto_shash_alignmask(tfm);
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
212*4882a593Smuzhiyun return -ENOKEY;
213*4882a593Smuzhiyun
214*4882a593Smuzhiyun if (((unsigned long)data | (unsigned long)out) & alignmask)
215*4882a593Smuzhiyun return shash_digest_unaligned(desc, data, len, out);
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun return shash->digest(desc, data, len, out);
218*4882a593Smuzhiyun }
219*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_shash_digest);
220*4882a593Smuzhiyun
crypto_shash_tfm_digest(struct crypto_shash * tfm,const u8 * data,unsigned int len,u8 * out)221*4882a593Smuzhiyun int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
222*4882a593Smuzhiyun unsigned int len, u8 *out)
223*4882a593Smuzhiyun {
224*4882a593Smuzhiyun SHASH_DESC_ON_STACK(desc, tfm);
225*4882a593Smuzhiyun int err;
226*4882a593Smuzhiyun
227*4882a593Smuzhiyun desc->tfm = tfm;
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun err = crypto_shash_digest(desc, data, len, out);
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun shash_desc_zero(desc);
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun return err;
234*4882a593Smuzhiyun }
235*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
236*4882a593Smuzhiyun
shash_default_export(struct shash_desc * desc,void * out)237*4882a593Smuzhiyun static int shash_default_export(struct shash_desc *desc, void *out)
238*4882a593Smuzhiyun {
239*4882a593Smuzhiyun memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
240*4882a593Smuzhiyun return 0;
241*4882a593Smuzhiyun }
242*4882a593Smuzhiyun
shash_default_import(struct shash_desc * desc,const void * in)243*4882a593Smuzhiyun static int shash_default_import(struct shash_desc *desc, const void *in)
244*4882a593Smuzhiyun {
245*4882a593Smuzhiyun memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
246*4882a593Smuzhiyun return 0;
247*4882a593Smuzhiyun }
248*4882a593Smuzhiyun
shash_async_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)249*4882a593Smuzhiyun static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
250*4882a593Smuzhiyun unsigned int keylen)
251*4882a593Smuzhiyun {
252*4882a593Smuzhiyun struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
253*4882a593Smuzhiyun
254*4882a593Smuzhiyun return crypto_shash_setkey(*ctx, key, keylen);
255*4882a593Smuzhiyun }
256*4882a593Smuzhiyun
shash_async_init(struct ahash_request * req)257*4882a593Smuzhiyun static int shash_async_init(struct ahash_request *req)
258*4882a593Smuzhiyun {
259*4882a593Smuzhiyun struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
260*4882a593Smuzhiyun struct shash_desc *desc = ahash_request_ctx(req);
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun desc->tfm = *ctx;
263*4882a593Smuzhiyun
264*4882a593Smuzhiyun return crypto_shash_init(desc);
265*4882a593Smuzhiyun }
266*4882a593Smuzhiyun
shash_ahash_update(struct ahash_request * req,struct shash_desc * desc)267*4882a593Smuzhiyun int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
268*4882a593Smuzhiyun {
269*4882a593Smuzhiyun struct crypto_hash_walk walk;
270*4882a593Smuzhiyun int nbytes;
271*4882a593Smuzhiyun
272*4882a593Smuzhiyun for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
273*4882a593Smuzhiyun nbytes = crypto_hash_walk_done(&walk, nbytes))
274*4882a593Smuzhiyun nbytes = crypto_shash_update(desc, walk.data, nbytes);
275*4882a593Smuzhiyun
276*4882a593Smuzhiyun return nbytes;
277*4882a593Smuzhiyun }
278*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(shash_ahash_update);
279*4882a593Smuzhiyun
shash_async_update(struct ahash_request * req)280*4882a593Smuzhiyun static int shash_async_update(struct ahash_request *req)
281*4882a593Smuzhiyun {
282*4882a593Smuzhiyun return shash_ahash_update(req, ahash_request_ctx(req));
283*4882a593Smuzhiyun }
284*4882a593Smuzhiyun
shash_async_final(struct ahash_request * req)285*4882a593Smuzhiyun static int shash_async_final(struct ahash_request *req)
286*4882a593Smuzhiyun {
287*4882a593Smuzhiyun return crypto_shash_final(ahash_request_ctx(req), req->result);
288*4882a593Smuzhiyun }
289*4882a593Smuzhiyun
shash_ahash_finup(struct ahash_request * req,struct shash_desc * desc)290*4882a593Smuzhiyun int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
291*4882a593Smuzhiyun {
292*4882a593Smuzhiyun struct crypto_hash_walk walk;
293*4882a593Smuzhiyun int nbytes;
294*4882a593Smuzhiyun
295*4882a593Smuzhiyun nbytes = crypto_hash_walk_first(req, &walk);
296*4882a593Smuzhiyun if (!nbytes)
297*4882a593Smuzhiyun return crypto_shash_final(desc, req->result);
298*4882a593Smuzhiyun
299*4882a593Smuzhiyun do {
300*4882a593Smuzhiyun nbytes = crypto_hash_walk_last(&walk) ?
301*4882a593Smuzhiyun crypto_shash_finup(desc, walk.data, nbytes,
302*4882a593Smuzhiyun req->result) :
303*4882a593Smuzhiyun crypto_shash_update(desc, walk.data, nbytes);
304*4882a593Smuzhiyun nbytes = crypto_hash_walk_done(&walk, nbytes);
305*4882a593Smuzhiyun } while (nbytes > 0);
306*4882a593Smuzhiyun
307*4882a593Smuzhiyun return nbytes;
308*4882a593Smuzhiyun }
309*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(shash_ahash_finup);
310*4882a593Smuzhiyun
shash_async_finup(struct ahash_request * req)311*4882a593Smuzhiyun static int shash_async_finup(struct ahash_request *req)
312*4882a593Smuzhiyun {
313*4882a593Smuzhiyun struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
314*4882a593Smuzhiyun struct shash_desc *desc = ahash_request_ctx(req);
315*4882a593Smuzhiyun
316*4882a593Smuzhiyun desc->tfm = *ctx;
317*4882a593Smuzhiyun
318*4882a593Smuzhiyun return shash_ahash_finup(req, desc);
319*4882a593Smuzhiyun }
320*4882a593Smuzhiyun
shash_ahash_digest(struct ahash_request * req,struct shash_desc * desc)321*4882a593Smuzhiyun int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
322*4882a593Smuzhiyun {
323*4882a593Smuzhiyun unsigned int nbytes = req->nbytes;
324*4882a593Smuzhiyun struct scatterlist *sg;
325*4882a593Smuzhiyun unsigned int offset;
326*4882a593Smuzhiyun int err;
327*4882a593Smuzhiyun
328*4882a593Smuzhiyun if (nbytes &&
329*4882a593Smuzhiyun (sg = req->src, offset = sg->offset,
330*4882a593Smuzhiyun nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
331*4882a593Smuzhiyun void *data;
332*4882a593Smuzhiyun
333*4882a593Smuzhiyun data = kmap_atomic(sg_page(sg));
334*4882a593Smuzhiyun err = crypto_shash_digest(desc, data + offset, nbytes,
335*4882a593Smuzhiyun req->result);
336*4882a593Smuzhiyun kunmap_atomic(data);
337*4882a593Smuzhiyun } else
338*4882a593Smuzhiyun err = crypto_shash_init(desc) ?:
339*4882a593Smuzhiyun shash_ahash_finup(req, desc);
340*4882a593Smuzhiyun
341*4882a593Smuzhiyun return err;
342*4882a593Smuzhiyun }
343*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(shash_ahash_digest);
344*4882a593Smuzhiyun
shash_async_digest(struct ahash_request * req)345*4882a593Smuzhiyun static int shash_async_digest(struct ahash_request *req)
346*4882a593Smuzhiyun {
347*4882a593Smuzhiyun struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
348*4882a593Smuzhiyun struct shash_desc *desc = ahash_request_ctx(req);
349*4882a593Smuzhiyun
350*4882a593Smuzhiyun desc->tfm = *ctx;
351*4882a593Smuzhiyun
352*4882a593Smuzhiyun return shash_ahash_digest(req, desc);
353*4882a593Smuzhiyun }
354*4882a593Smuzhiyun
shash_async_export(struct ahash_request * req,void * out)355*4882a593Smuzhiyun static int shash_async_export(struct ahash_request *req, void *out)
356*4882a593Smuzhiyun {
357*4882a593Smuzhiyun return crypto_shash_export(ahash_request_ctx(req), out);
358*4882a593Smuzhiyun }
359*4882a593Smuzhiyun
shash_async_import(struct ahash_request * req,const void * in)360*4882a593Smuzhiyun static int shash_async_import(struct ahash_request *req, const void *in)
361*4882a593Smuzhiyun {
362*4882a593Smuzhiyun struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
363*4882a593Smuzhiyun struct shash_desc *desc = ahash_request_ctx(req);
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun desc->tfm = *ctx;
366*4882a593Smuzhiyun
367*4882a593Smuzhiyun return crypto_shash_import(desc, in);
368*4882a593Smuzhiyun }
369*4882a593Smuzhiyun
crypto_exit_shash_ops_async(struct crypto_tfm * tfm)370*4882a593Smuzhiyun static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
371*4882a593Smuzhiyun {
372*4882a593Smuzhiyun struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
373*4882a593Smuzhiyun
374*4882a593Smuzhiyun crypto_free_shash(*ctx);
375*4882a593Smuzhiyun }
376*4882a593Smuzhiyun
crypto_init_shash_ops_async(struct crypto_tfm * tfm)377*4882a593Smuzhiyun int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
378*4882a593Smuzhiyun {
379*4882a593Smuzhiyun struct crypto_alg *calg = tfm->__crt_alg;
380*4882a593Smuzhiyun struct shash_alg *alg = __crypto_shash_alg(calg);
381*4882a593Smuzhiyun struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
382*4882a593Smuzhiyun struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
383*4882a593Smuzhiyun struct crypto_shash *shash;
384*4882a593Smuzhiyun
385*4882a593Smuzhiyun if (!crypto_mod_get(calg))
386*4882a593Smuzhiyun return -EAGAIN;
387*4882a593Smuzhiyun
388*4882a593Smuzhiyun shash = crypto_create_tfm(calg, &crypto_shash_type);
389*4882a593Smuzhiyun if (IS_ERR(shash)) {
390*4882a593Smuzhiyun crypto_mod_put(calg);
391*4882a593Smuzhiyun return PTR_ERR(shash);
392*4882a593Smuzhiyun }
393*4882a593Smuzhiyun
394*4882a593Smuzhiyun *ctx = shash;
395*4882a593Smuzhiyun tfm->exit = crypto_exit_shash_ops_async;
396*4882a593Smuzhiyun
397*4882a593Smuzhiyun crt->init = shash_async_init;
398*4882a593Smuzhiyun crt->update = shash_async_update;
399*4882a593Smuzhiyun crt->final = shash_async_final;
400*4882a593Smuzhiyun crt->finup = shash_async_finup;
401*4882a593Smuzhiyun crt->digest = shash_async_digest;
402*4882a593Smuzhiyun if (crypto_shash_alg_has_setkey(alg))
403*4882a593Smuzhiyun crt->setkey = shash_async_setkey;
404*4882a593Smuzhiyun
405*4882a593Smuzhiyun crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
406*4882a593Smuzhiyun CRYPTO_TFM_NEED_KEY);
407*4882a593Smuzhiyun
408*4882a593Smuzhiyun crt->export = shash_async_export;
409*4882a593Smuzhiyun crt->import = shash_async_import;
410*4882a593Smuzhiyun
411*4882a593Smuzhiyun crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
412*4882a593Smuzhiyun
413*4882a593Smuzhiyun return 0;
414*4882a593Smuzhiyun }
415*4882a593Smuzhiyun
crypto_shash_exit_tfm(struct crypto_tfm * tfm)416*4882a593Smuzhiyun static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
417*4882a593Smuzhiyun {
418*4882a593Smuzhiyun struct crypto_shash *hash = __crypto_shash_cast(tfm);
419*4882a593Smuzhiyun struct shash_alg *alg = crypto_shash_alg(hash);
420*4882a593Smuzhiyun
421*4882a593Smuzhiyun alg->exit_tfm(hash);
422*4882a593Smuzhiyun }
423*4882a593Smuzhiyun
crypto_shash_init_tfm(struct crypto_tfm * tfm)424*4882a593Smuzhiyun static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
425*4882a593Smuzhiyun {
426*4882a593Smuzhiyun struct crypto_shash *hash = __crypto_shash_cast(tfm);
427*4882a593Smuzhiyun struct shash_alg *alg = crypto_shash_alg(hash);
428*4882a593Smuzhiyun int err;
429*4882a593Smuzhiyun
430*4882a593Smuzhiyun hash->descsize = alg->descsize;
431*4882a593Smuzhiyun
432*4882a593Smuzhiyun shash_set_needkey(hash, alg);
433*4882a593Smuzhiyun
434*4882a593Smuzhiyun if (alg->exit_tfm)
435*4882a593Smuzhiyun tfm->exit = crypto_shash_exit_tfm;
436*4882a593Smuzhiyun
437*4882a593Smuzhiyun if (!alg->init_tfm)
438*4882a593Smuzhiyun return 0;
439*4882a593Smuzhiyun
440*4882a593Smuzhiyun err = alg->init_tfm(hash);
441*4882a593Smuzhiyun if (err)
442*4882a593Smuzhiyun return err;
443*4882a593Smuzhiyun
444*4882a593Smuzhiyun /* ->init_tfm() may have increased the descsize. */
445*4882a593Smuzhiyun if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
446*4882a593Smuzhiyun if (alg->exit_tfm)
447*4882a593Smuzhiyun alg->exit_tfm(hash);
448*4882a593Smuzhiyun return -EINVAL;
449*4882a593Smuzhiyun }
450*4882a593Smuzhiyun
451*4882a593Smuzhiyun return 0;
452*4882a593Smuzhiyun }
453*4882a593Smuzhiyun
crypto_shash_free_instance(struct crypto_instance * inst)454*4882a593Smuzhiyun static void crypto_shash_free_instance(struct crypto_instance *inst)
455*4882a593Smuzhiyun {
456*4882a593Smuzhiyun struct shash_instance *shash = shash_instance(inst);
457*4882a593Smuzhiyun
458*4882a593Smuzhiyun shash->free(shash);
459*4882a593Smuzhiyun }
460*4882a593Smuzhiyun
461*4882a593Smuzhiyun #ifdef CONFIG_NET
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)462*4882a593Smuzhiyun static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
463*4882a593Smuzhiyun {
464*4882a593Smuzhiyun struct crypto_report_hash rhash;
465*4882a593Smuzhiyun struct shash_alg *salg = __crypto_shash_alg(alg);
466*4882a593Smuzhiyun
467*4882a593Smuzhiyun memset(&rhash, 0, sizeof(rhash));
468*4882a593Smuzhiyun
469*4882a593Smuzhiyun strscpy(rhash.type, "shash", sizeof(rhash.type));
470*4882a593Smuzhiyun
471*4882a593Smuzhiyun rhash.blocksize = alg->cra_blocksize;
472*4882a593Smuzhiyun rhash.digestsize = salg->digestsize;
473*4882a593Smuzhiyun
474*4882a593Smuzhiyun return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
475*4882a593Smuzhiyun }
476*4882a593Smuzhiyun #else
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)477*4882a593Smuzhiyun static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
478*4882a593Smuzhiyun {
479*4882a593Smuzhiyun return -ENOSYS;
480*4882a593Smuzhiyun }
481*4882a593Smuzhiyun #endif
482*4882a593Smuzhiyun
483*4882a593Smuzhiyun static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
484*4882a593Smuzhiyun __maybe_unused;
crypto_shash_show(struct seq_file * m,struct crypto_alg * alg)485*4882a593Smuzhiyun static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
486*4882a593Smuzhiyun {
487*4882a593Smuzhiyun struct shash_alg *salg = __crypto_shash_alg(alg);
488*4882a593Smuzhiyun
489*4882a593Smuzhiyun seq_printf(m, "type : shash\n");
490*4882a593Smuzhiyun seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
491*4882a593Smuzhiyun seq_printf(m, "digestsize : %u\n", salg->digestsize);
492*4882a593Smuzhiyun }
493*4882a593Smuzhiyun
494*4882a593Smuzhiyun static const struct crypto_type crypto_shash_type = {
495*4882a593Smuzhiyun .extsize = crypto_alg_extsize,
496*4882a593Smuzhiyun .init_tfm = crypto_shash_init_tfm,
497*4882a593Smuzhiyun .free = crypto_shash_free_instance,
498*4882a593Smuzhiyun #ifdef CONFIG_PROC_FS
499*4882a593Smuzhiyun .show = crypto_shash_show,
500*4882a593Smuzhiyun #endif
501*4882a593Smuzhiyun .report = crypto_shash_report,
502*4882a593Smuzhiyun .maskclear = ~CRYPTO_ALG_TYPE_MASK,
503*4882a593Smuzhiyun .maskset = CRYPTO_ALG_TYPE_MASK,
504*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_SHASH,
505*4882a593Smuzhiyun .tfmsize = offsetof(struct crypto_shash, base),
506*4882a593Smuzhiyun };
507*4882a593Smuzhiyun
crypto_grab_shash(struct crypto_shash_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)508*4882a593Smuzhiyun int crypto_grab_shash(struct crypto_shash_spawn *spawn,
509*4882a593Smuzhiyun struct crypto_instance *inst,
510*4882a593Smuzhiyun const char *name, u32 type, u32 mask)
511*4882a593Smuzhiyun {
512*4882a593Smuzhiyun spawn->base.frontend = &crypto_shash_type;
513*4882a593Smuzhiyun return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
514*4882a593Smuzhiyun }
515*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_grab_shash);
516*4882a593Smuzhiyun
crypto_alloc_shash(const char * alg_name,u32 type,u32 mask)517*4882a593Smuzhiyun struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
518*4882a593Smuzhiyun u32 mask)
519*4882a593Smuzhiyun {
520*4882a593Smuzhiyun return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
521*4882a593Smuzhiyun }
522*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_alloc_shash);
523*4882a593Smuzhiyun
shash_prepare_alg(struct shash_alg * alg)524*4882a593Smuzhiyun static int shash_prepare_alg(struct shash_alg *alg)
525*4882a593Smuzhiyun {
526*4882a593Smuzhiyun struct crypto_alg *base = &alg->base;
527*4882a593Smuzhiyun
528*4882a593Smuzhiyun if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
529*4882a593Smuzhiyun alg->descsize > HASH_MAX_DESCSIZE ||
530*4882a593Smuzhiyun alg->statesize > HASH_MAX_STATESIZE)
531*4882a593Smuzhiyun return -EINVAL;
532*4882a593Smuzhiyun
533*4882a593Smuzhiyun if ((alg->export && !alg->import) || (alg->import && !alg->export))
534*4882a593Smuzhiyun return -EINVAL;
535*4882a593Smuzhiyun
536*4882a593Smuzhiyun base->cra_type = &crypto_shash_type;
537*4882a593Smuzhiyun base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
538*4882a593Smuzhiyun base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
539*4882a593Smuzhiyun
540*4882a593Smuzhiyun if (!alg->finup)
541*4882a593Smuzhiyun alg->finup = shash_finup_unaligned;
542*4882a593Smuzhiyun if (!alg->digest)
543*4882a593Smuzhiyun alg->digest = shash_digest_unaligned;
544*4882a593Smuzhiyun if (!alg->export) {
545*4882a593Smuzhiyun alg->export = shash_default_export;
546*4882a593Smuzhiyun alg->import = shash_default_import;
547*4882a593Smuzhiyun alg->statesize = alg->descsize;
548*4882a593Smuzhiyun }
549*4882a593Smuzhiyun if (!alg->setkey)
550*4882a593Smuzhiyun alg->setkey = shash_no_setkey;
551*4882a593Smuzhiyun
552*4882a593Smuzhiyun return 0;
553*4882a593Smuzhiyun }
554*4882a593Smuzhiyun
crypto_register_shash(struct shash_alg * alg)555*4882a593Smuzhiyun int crypto_register_shash(struct shash_alg *alg)
556*4882a593Smuzhiyun {
557*4882a593Smuzhiyun struct crypto_alg *base = &alg->base;
558*4882a593Smuzhiyun int err;
559*4882a593Smuzhiyun
560*4882a593Smuzhiyun err = shash_prepare_alg(alg);
561*4882a593Smuzhiyun if (err)
562*4882a593Smuzhiyun return err;
563*4882a593Smuzhiyun
564*4882a593Smuzhiyun return crypto_register_alg(base);
565*4882a593Smuzhiyun }
566*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_shash);
567*4882a593Smuzhiyun
crypto_unregister_shash(struct shash_alg * alg)568*4882a593Smuzhiyun void crypto_unregister_shash(struct shash_alg *alg)
569*4882a593Smuzhiyun {
570*4882a593Smuzhiyun crypto_unregister_alg(&alg->base);
571*4882a593Smuzhiyun }
572*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_shash);
573*4882a593Smuzhiyun
crypto_register_shashes(struct shash_alg * algs,int count)574*4882a593Smuzhiyun int crypto_register_shashes(struct shash_alg *algs, int count)
575*4882a593Smuzhiyun {
576*4882a593Smuzhiyun int i, ret;
577*4882a593Smuzhiyun
578*4882a593Smuzhiyun for (i = 0; i < count; i++) {
579*4882a593Smuzhiyun ret = crypto_register_shash(&algs[i]);
580*4882a593Smuzhiyun if (ret)
581*4882a593Smuzhiyun goto err;
582*4882a593Smuzhiyun }
583*4882a593Smuzhiyun
584*4882a593Smuzhiyun return 0;
585*4882a593Smuzhiyun
586*4882a593Smuzhiyun err:
587*4882a593Smuzhiyun for (--i; i >= 0; --i)
588*4882a593Smuzhiyun crypto_unregister_shash(&algs[i]);
589*4882a593Smuzhiyun
590*4882a593Smuzhiyun return ret;
591*4882a593Smuzhiyun }
592*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_shashes);
593*4882a593Smuzhiyun
crypto_unregister_shashes(struct shash_alg * algs,int count)594*4882a593Smuzhiyun void crypto_unregister_shashes(struct shash_alg *algs, int count)
595*4882a593Smuzhiyun {
596*4882a593Smuzhiyun int i;
597*4882a593Smuzhiyun
598*4882a593Smuzhiyun for (i = count - 1; i >= 0; --i)
599*4882a593Smuzhiyun crypto_unregister_shash(&algs[i]);
600*4882a593Smuzhiyun }
601*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
602*4882a593Smuzhiyun
shash_register_instance(struct crypto_template * tmpl,struct shash_instance * inst)603*4882a593Smuzhiyun int shash_register_instance(struct crypto_template *tmpl,
604*4882a593Smuzhiyun struct shash_instance *inst)
605*4882a593Smuzhiyun {
606*4882a593Smuzhiyun int err;
607*4882a593Smuzhiyun
608*4882a593Smuzhiyun if (WARN_ON(!inst->free))
609*4882a593Smuzhiyun return -EINVAL;
610*4882a593Smuzhiyun
611*4882a593Smuzhiyun err = shash_prepare_alg(&inst->alg);
612*4882a593Smuzhiyun if (err)
613*4882a593Smuzhiyun return err;
614*4882a593Smuzhiyun
615*4882a593Smuzhiyun return crypto_register_instance(tmpl, shash_crypto_instance(inst));
616*4882a593Smuzhiyun }
617*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(shash_register_instance);
618*4882a593Smuzhiyun
shash_free_singlespawn_instance(struct shash_instance * inst)619*4882a593Smuzhiyun void shash_free_singlespawn_instance(struct shash_instance *inst)
620*4882a593Smuzhiyun {
621*4882a593Smuzhiyun crypto_drop_spawn(shash_instance_ctx(inst));
622*4882a593Smuzhiyun kfree(inst);
623*4882a593Smuzhiyun }
624*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
625*4882a593Smuzhiyun
626*4882a593Smuzhiyun MODULE_LICENSE("GPL");
627*4882a593Smuzhiyun MODULE_DESCRIPTION("Synchronous cryptographic hash type");
628