xref: /OK3568_Linux_fs/kernel/crypto/algapi.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Cryptographic API for algorithms (i.e., low-level API).
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6*4882a593Smuzhiyun  */
7*4882a593Smuzhiyun 
8*4882a593Smuzhiyun #include <crypto/algapi.h>
9*4882a593Smuzhiyun #include <linux/err.h>
10*4882a593Smuzhiyun #include <linux/errno.h>
11*4882a593Smuzhiyun #include <linux/fips.h>
12*4882a593Smuzhiyun #include <linux/init.h>
13*4882a593Smuzhiyun #include <linux/kernel.h>
14*4882a593Smuzhiyun #include <linux/list.h>
15*4882a593Smuzhiyun #include <linux/module.h>
16*4882a593Smuzhiyun #include <linux/rtnetlink.h>
17*4882a593Smuzhiyun #include <linux/slab.h>
18*4882a593Smuzhiyun #include <linux/string.h>
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun #include "internal.h"
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun static LIST_HEAD(crypto_template_list);
23*4882a593Smuzhiyun 
crypto_check_module_sig(struct module * mod)24*4882a593Smuzhiyun static inline void crypto_check_module_sig(struct module *mod)
25*4882a593Smuzhiyun {
26*4882a593Smuzhiyun 	if (fips_enabled && mod && !module_sig_ok(mod))
27*4882a593Smuzhiyun 		panic("Module %s signature verification failed in FIPS mode\n",
28*4882a593Smuzhiyun 		      module_name(mod));
29*4882a593Smuzhiyun }
30*4882a593Smuzhiyun 
crypto_check_alg(struct crypto_alg * alg)31*4882a593Smuzhiyun static int crypto_check_alg(struct crypto_alg *alg)
32*4882a593Smuzhiyun {
33*4882a593Smuzhiyun 	crypto_check_module_sig(alg->cra_module);
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun 	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36*4882a593Smuzhiyun 		return -EINVAL;
37*4882a593Smuzhiyun 
38*4882a593Smuzhiyun 	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39*4882a593Smuzhiyun 		return -EINVAL;
40*4882a593Smuzhiyun 
41*4882a593Smuzhiyun 	/* General maximums for all algs. */
42*4882a593Smuzhiyun 	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43*4882a593Smuzhiyun 		return -EINVAL;
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun 	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46*4882a593Smuzhiyun 		return -EINVAL;
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun 	/* Lower maximums for specific alg types. */
49*4882a593Smuzhiyun 	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50*4882a593Smuzhiyun 			       CRYPTO_ALG_TYPE_CIPHER) {
51*4882a593Smuzhiyun 		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52*4882a593Smuzhiyun 			return -EINVAL;
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun 		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55*4882a593Smuzhiyun 			return -EINVAL;
56*4882a593Smuzhiyun 	}
57*4882a593Smuzhiyun 
58*4882a593Smuzhiyun 	if (alg->cra_priority < 0)
59*4882a593Smuzhiyun 		return -EINVAL;
60*4882a593Smuzhiyun 
61*4882a593Smuzhiyun 	refcount_set(&alg->cra_refcnt, 1);
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun 	return 0;
64*4882a593Smuzhiyun }
65*4882a593Smuzhiyun 
crypto_free_instance(struct crypto_instance * inst)66*4882a593Smuzhiyun static void crypto_free_instance(struct crypto_instance *inst)
67*4882a593Smuzhiyun {
68*4882a593Smuzhiyun 	inst->alg.cra_type->free(inst);
69*4882a593Smuzhiyun }
70*4882a593Smuzhiyun 
crypto_destroy_instance(struct crypto_alg * alg)71*4882a593Smuzhiyun static void crypto_destroy_instance(struct crypto_alg *alg)
72*4882a593Smuzhiyun {
73*4882a593Smuzhiyun 	struct crypto_instance *inst = (void *)alg;
74*4882a593Smuzhiyun 	struct crypto_template *tmpl = inst->tmpl;
75*4882a593Smuzhiyun 
76*4882a593Smuzhiyun 	crypto_free_instance(inst);
77*4882a593Smuzhiyun 	crypto_tmpl_put(tmpl);
78*4882a593Smuzhiyun }
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun /*
81*4882a593Smuzhiyun  * This function adds a spawn to the list secondary_spawns which
82*4882a593Smuzhiyun  * will be used at the end of crypto_remove_spawns to unregister
83*4882a593Smuzhiyun  * instances, unless the spawn happens to be one that is depended
84*4882a593Smuzhiyun  * on by the new algorithm (nalg in crypto_remove_spawns).
85*4882a593Smuzhiyun  *
86*4882a593Smuzhiyun  * This function is also responsible for resurrecting any algorithms
87*4882a593Smuzhiyun  * in the dependency chain of nalg by unsetting n->dead.
88*4882a593Smuzhiyun  */
crypto_more_spawns(struct crypto_alg * alg,struct list_head * stack,struct list_head * top,struct list_head * secondary_spawns)89*4882a593Smuzhiyun static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
90*4882a593Smuzhiyun 					    struct list_head *stack,
91*4882a593Smuzhiyun 					    struct list_head *top,
92*4882a593Smuzhiyun 					    struct list_head *secondary_spawns)
93*4882a593Smuzhiyun {
94*4882a593Smuzhiyun 	struct crypto_spawn *spawn, *n;
95*4882a593Smuzhiyun 
96*4882a593Smuzhiyun 	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
97*4882a593Smuzhiyun 	if (!spawn)
98*4882a593Smuzhiyun 		return NULL;
99*4882a593Smuzhiyun 
100*4882a593Smuzhiyun 	n = list_prev_entry(spawn, list);
101*4882a593Smuzhiyun 	list_move(&spawn->list, secondary_spawns);
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun 	if (list_is_last(&n->list, stack))
104*4882a593Smuzhiyun 		return top;
105*4882a593Smuzhiyun 
106*4882a593Smuzhiyun 	n = list_next_entry(n, list);
107*4882a593Smuzhiyun 	if (!spawn->dead)
108*4882a593Smuzhiyun 		n->dead = false;
109*4882a593Smuzhiyun 
110*4882a593Smuzhiyun 	return &n->inst->alg.cra_users;
111*4882a593Smuzhiyun }
112*4882a593Smuzhiyun 
crypto_remove_instance(struct crypto_instance * inst,struct list_head * list)113*4882a593Smuzhiyun static void crypto_remove_instance(struct crypto_instance *inst,
114*4882a593Smuzhiyun 				   struct list_head *list)
115*4882a593Smuzhiyun {
116*4882a593Smuzhiyun 	struct crypto_template *tmpl = inst->tmpl;
117*4882a593Smuzhiyun 
118*4882a593Smuzhiyun 	if (crypto_is_dead(&inst->alg))
119*4882a593Smuzhiyun 		return;
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
122*4882a593Smuzhiyun 
123*4882a593Smuzhiyun 	if (!tmpl || !crypto_tmpl_get(tmpl))
124*4882a593Smuzhiyun 		return;
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun 	list_move(&inst->alg.cra_list, list);
127*4882a593Smuzhiyun 	hlist_del(&inst->list);
128*4882a593Smuzhiyun 	inst->alg.cra_destroy = crypto_destroy_instance;
129*4882a593Smuzhiyun 
130*4882a593Smuzhiyun 	BUG_ON(!list_empty(&inst->alg.cra_users));
131*4882a593Smuzhiyun }
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun /*
134*4882a593Smuzhiyun  * Given an algorithm alg, remove all algorithms that depend on it
135*4882a593Smuzhiyun  * through spawns.  If nalg is not null, then exempt any algorithms
136*4882a593Smuzhiyun  * that is depended on by nalg.  This is useful when nalg itself
137*4882a593Smuzhiyun  * depends on alg.
138*4882a593Smuzhiyun  */
crypto_remove_spawns(struct crypto_alg * alg,struct list_head * list,struct crypto_alg * nalg)139*4882a593Smuzhiyun void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
140*4882a593Smuzhiyun 			  struct crypto_alg *nalg)
141*4882a593Smuzhiyun {
142*4882a593Smuzhiyun 	u32 new_type = (nalg ?: alg)->cra_flags;
143*4882a593Smuzhiyun 	struct crypto_spawn *spawn, *n;
144*4882a593Smuzhiyun 	LIST_HEAD(secondary_spawns);
145*4882a593Smuzhiyun 	struct list_head *spawns;
146*4882a593Smuzhiyun 	LIST_HEAD(stack);
147*4882a593Smuzhiyun 	LIST_HEAD(top);
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun 	spawns = &alg->cra_users;
150*4882a593Smuzhiyun 	list_for_each_entry_safe(spawn, n, spawns, list) {
151*4882a593Smuzhiyun 		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
152*4882a593Smuzhiyun 			continue;
153*4882a593Smuzhiyun 
154*4882a593Smuzhiyun 		list_move(&spawn->list, &top);
155*4882a593Smuzhiyun 	}
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun 	/*
158*4882a593Smuzhiyun 	 * Perform a depth-first walk starting from alg through
159*4882a593Smuzhiyun 	 * the cra_users tree.  The list stack records the path
160*4882a593Smuzhiyun 	 * from alg to the current spawn.
161*4882a593Smuzhiyun 	 */
162*4882a593Smuzhiyun 	spawns = &top;
163*4882a593Smuzhiyun 	do {
164*4882a593Smuzhiyun 		while (!list_empty(spawns)) {
165*4882a593Smuzhiyun 			struct crypto_instance *inst;
166*4882a593Smuzhiyun 
167*4882a593Smuzhiyun 			spawn = list_first_entry(spawns, struct crypto_spawn,
168*4882a593Smuzhiyun 						 list);
169*4882a593Smuzhiyun 			inst = spawn->inst;
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 			list_move(&spawn->list, &stack);
172*4882a593Smuzhiyun 			spawn->dead = !spawn->registered || &inst->alg != nalg;
173*4882a593Smuzhiyun 
174*4882a593Smuzhiyun 			if (!spawn->registered)
175*4882a593Smuzhiyun 				break;
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun 			BUG_ON(&inst->alg == alg);
178*4882a593Smuzhiyun 
179*4882a593Smuzhiyun 			if (&inst->alg == nalg)
180*4882a593Smuzhiyun 				break;
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 			spawns = &inst->alg.cra_users;
183*4882a593Smuzhiyun 
184*4882a593Smuzhiyun 			/*
185*4882a593Smuzhiyun 			 * Even if spawn->registered is true, the
186*4882a593Smuzhiyun 			 * instance itself may still be unregistered.
187*4882a593Smuzhiyun 			 * This is because it may have failed during
188*4882a593Smuzhiyun 			 * registration.  Therefore we still need to
189*4882a593Smuzhiyun 			 * make the following test.
190*4882a593Smuzhiyun 			 *
191*4882a593Smuzhiyun 			 * We may encounter an unregistered instance here, since
192*4882a593Smuzhiyun 			 * an instance's spawns are set up prior to the instance
193*4882a593Smuzhiyun 			 * being registered.  An unregistered instance will have
194*4882a593Smuzhiyun 			 * NULL ->cra_users.next, since ->cra_users isn't
195*4882a593Smuzhiyun 			 * properly initialized until registration.  But an
196*4882a593Smuzhiyun 			 * unregistered instance cannot have any users, so treat
197*4882a593Smuzhiyun 			 * it the same as ->cra_users being empty.
198*4882a593Smuzhiyun 			 */
199*4882a593Smuzhiyun 			if (spawns->next == NULL)
200*4882a593Smuzhiyun 				break;
201*4882a593Smuzhiyun 		}
202*4882a593Smuzhiyun 	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
203*4882a593Smuzhiyun 					      &secondary_spawns)));
204*4882a593Smuzhiyun 
205*4882a593Smuzhiyun 	/*
206*4882a593Smuzhiyun 	 * Remove all instances that are marked as dead.  Also
207*4882a593Smuzhiyun 	 * complete the resurrection of the others by moving them
208*4882a593Smuzhiyun 	 * back to the cra_users list.
209*4882a593Smuzhiyun 	 */
210*4882a593Smuzhiyun 	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
211*4882a593Smuzhiyun 		if (!spawn->dead)
212*4882a593Smuzhiyun 			list_move(&spawn->list, &spawn->alg->cra_users);
213*4882a593Smuzhiyun 		else if (spawn->registered)
214*4882a593Smuzhiyun 			crypto_remove_instance(spawn->inst, list);
215*4882a593Smuzhiyun 	}
216*4882a593Smuzhiyun }
217*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_remove_spawns);
218*4882a593Smuzhiyun 
__crypto_register_alg(struct crypto_alg * alg)219*4882a593Smuzhiyun static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
220*4882a593Smuzhiyun {
221*4882a593Smuzhiyun 	struct crypto_alg *q;
222*4882a593Smuzhiyun 	struct crypto_larval *larval;
223*4882a593Smuzhiyun 	int ret = -EAGAIN;
224*4882a593Smuzhiyun 
225*4882a593Smuzhiyun 	if (crypto_is_dead(alg))
226*4882a593Smuzhiyun 		goto err;
227*4882a593Smuzhiyun 
228*4882a593Smuzhiyun 	INIT_LIST_HEAD(&alg->cra_users);
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun 	/* No cheating! */
231*4882a593Smuzhiyun 	alg->cra_flags &= ~CRYPTO_ALG_TESTED;
232*4882a593Smuzhiyun 
233*4882a593Smuzhiyun 	ret = -EEXIST;
234*4882a593Smuzhiyun 
235*4882a593Smuzhiyun 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
236*4882a593Smuzhiyun 		if (q == alg)
237*4882a593Smuzhiyun 			goto err;
238*4882a593Smuzhiyun 
239*4882a593Smuzhiyun 		if (crypto_is_moribund(q))
240*4882a593Smuzhiyun 			continue;
241*4882a593Smuzhiyun 
242*4882a593Smuzhiyun 		if (crypto_is_larval(q)) {
243*4882a593Smuzhiyun 			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
244*4882a593Smuzhiyun 				goto err;
245*4882a593Smuzhiyun 			continue;
246*4882a593Smuzhiyun 		}
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun 		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
249*4882a593Smuzhiyun 		    !strcmp(q->cra_name, alg->cra_driver_name))
250*4882a593Smuzhiyun 			goto err;
251*4882a593Smuzhiyun 	}
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun 	larval = crypto_larval_alloc(alg->cra_name,
254*4882a593Smuzhiyun 				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
255*4882a593Smuzhiyun 	if (IS_ERR(larval))
256*4882a593Smuzhiyun 		goto out;
257*4882a593Smuzhiyun 
258*4882a593Smuzhiyun 	ret = -ENOENT;
259*4882a593Smuzhiyun 	larval->adult = crypto_mod_get(alg);
260*4882a593Smuzhiyun 	if (!larval->adult)
261*4882a593Smuzhiyun 		goto free_larval;
262*4882a593Smuzhiyun 
263*4882a593Smuzhiyun 	refcount_set(&larval->alg.cra_refcnt, 1);
264*4882a593Smuzhiyun 	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
265*4882a593Smuzhiyun 	       CRYPTO_MAX_ALG_NAME);
266*4882a593Smuzhiyun 	larval->alg.cra_priority = alg->cra_priority;
267*4882a593Smuzhiyun 
268*4882a593Smuzhiyun 	list_add(&alg->cra_list, &crypto_alg_list);
269*4882a593Smuzhiyun 	list_add(&larval->alg.cra_list, &crypto_alg_list);
270*4882a593Smuzhiyun 
271*4882a593Smuzhiyun 	crypto_stats_init(alg);
272*4882a593Smuzhiyun 
273*4882a593Smuzhiyun out:
274*4882a593Smuzhiyun 	return larval;
275*4882a593Smuzhiyun 
276*4882a593Smuzhiyun free_larval:
277*4882a593Smuzhiyun 	kfree(larval);
278*4882a593Smuzhiyun err:
279*4882a593Smuzhiyun 	larval = ERR_PTR(ret);
280*4882a593Smuzhiyun 	goto out;
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun 
crypto_alg_tested(const char * name,int err)283*4882a593Smuzhiyun void crypto_alg_tested(const char *name, int err)
284*4882a593Smuzhiyun {
285*4882a593Smuzhiyun 	struct crypto_larval *test;
286*4882a593Smuzhiyun 	struct crypto_alg *alg;
287*4882a593Smuzhiyun 	struct crypto_alg *q;
288*4882a593Smuzhiyun 	LIST_HEAD(list);
289*4882a593Smuzhiyun 	bool best;
290*4882a593Smuzhiyun 
291*4882a593Smuzhiyun 	down_write(&crypto_alg_sem);
292*4882a593Smuzhiyun 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
293*4882a593Smuzhiyun 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
294*4882a593Smuzhiyun 			continue;
295*4882a593Smuzhiyun 
296*4882a593Smuzhiyun 		test = (struct crypto_larval *)q;
297*4882a593Smuzhiyun 
298*4882a593Smuzhiyun 		if (!strcmp(q->cra_driver_name, name))
299*4882a593Smuzhiyun 			goto found;
300*4882a593Smuzhiyun 	}
301*4882a593Smuzhiyun 
302*4882a593Smuzhiyun 	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
303*4882a593Smuzhiyun 	goto unlock;
304*4882a593Smuzhiyun 
305*4882a593Smuzhiyun found:
306*4882a593Smuzhiyun 	q->cra_flags |= CRYPTO_ALG_DEAD;
307*4882a593Smuzhiyun 	alg = test->adult;
308*4882a593Smuzhiyun 	if (err || list_empty(&alg->cra_list))
309*4882a593Smuzhiyun 		goto complete;
310*4882a593Smuzhiyun 
311*4882a593Smuzhiyun 	alg->cra_flags |= CRYPTO_ALG_TESTED;
312*4882a593Smuzhiyun 
313*4882a593Smuzhiyun 	/* Only satisfy larval waiters if we are the best. */
314*4882a593Smuzhiyun 	best = true;
315*4882a593Smuzhiyun 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
316*4882a593Smuzhiyun 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
317*4882a593Smuzhiyun 			continue;
318*4882a593Smuzhiyun 
319*4882a593Smuzhiyun 		if (strcmp(alg->cra_name, q->cra_name))
320*4882a593Smuzhiyun 			continue;
321*4882a593Smuzhiyun 
322*4882a593Smuzhiyun 		if (q->cra_priority > alg->cra_priority) {
323*4882a593Smuzhiyun 			best = false;
324*4882a593Smuzhiyun 			break;
325*4882a593Smuzhiyun 		}
326*4882a593Smuzhiyun 	}
327*4882a593Smuzhiyun 
328*4882a593Smuzhiyun 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
329*4882a593Smuzhiyun 		if (q == alg)
330*4882a593Smuzhiyun 			continue;
331*4882a593Smuzhiyun 
332*4882a593Smuzhiyun 		if (crypto_is_moribund(q))
333*4882a593Smuzhiyun 			continue;
334*4882a593Smuzhiyun 
335*4882a593Smuzhiyun 		if (crypto_is_larval(q)) {
336*4882a593Smuzhiyun 			struct crypto_larval *larval = (void *)q;
337*4882a593Smuzhiyun 
338*4882a593Smuzhiyun 			/*
339*4882a593Smuzhiyun 			 * Check to see if either our generic name or
340*4882a593Smuzhiyun 			 * specific name can satisfy the name requested
341*4882a593Smuzhiyun 			 * by the larval entry q.
342*4882a593Smuzhiyun 			 */
343*4882a593Smuzhiyun 			if (strcmp(alg->cra_name, q->cra_name) &&
344*4882a593Smuzhiyun 			    strcmp(alg->cra_driver_name, q->cra_name))
345*4882a593Smuzhiyun 				continue;
346*4882a593Smuzhiyun 
347*4882a593Smuzhiyun 			if (larval->adult)
348*4882a593Smuzhiyun 				continue;
349*4882a593Smuzhiyun 			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
350*4882a593Smuzhiyun 				continue;
351*4882a593Smuzhiyun 
352*4882a593Smuzhiyun 			if (best && crypto_mod_get(alg))
353*4882a593Smuzhiyun 				larval->adult = alg;
354*4882a593Smuzhiyun 			else
355*4882a593Smuzhiyun 				larval->adult = ERR_PTR(-EAGAIN);
356*4882a593Smuzhiyun 
357*4882a593Smuzhiyun 			continue;
358*4882a593Smuzhiyun 		}
359*4882a593Smuzhiyun 
360*4882a593Smuzhiyun 		if (strcmp(alg->cra_name, q->cra_name))
361*4882a593Smuzhiyun 			continue;
362*4882a593Smuzhiyun 
363*4882a593Smuzhiyun 		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
364*4882a593Smuzhiyun 		    q->cra_priority > alg->cra_priority)
365*4882a593Smuzhiyun 			continue;
366*4882a593Smuzhiyun 
367*4882a593Smuzhiyun 		crypto_remove_spawns(q, &list, alg);
368*4882a593Smuzhiyun 	}
369*4882a593Smuzhiyun 
370*4882a593Smuzhiyun complete:
371*4882a593Smuzhiyun 	complete_all(&test->completion);
372*4882a593Smuzhiyun 
373*4882a593Smuzhiyun unlock:
374*4882a593Smuzhiyun 	up_write(&crypto_alg_sem);
375*4882a593Smuzhiyun 
376*4882a593Smuzhiyun 	crypto_remove_final(&list);
377*4882a593Smuzhiyun }
378*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_alg_tested);
379*4882a593Smuzhiyun 
crypto_remove_final(struct list_head * list)380*4882a593Smuzhiyun void crypto_remove_final(struct list_head *list)
381*4882a593Smuzhiyun {
382*4882a593Smuzhiyun 	struct crypto_alg *alg;
383*4882a593Smuzhiyun 	struct crypto_alg *n;
384*4882a593Smuzhiyun 
385*4882a593Smuzhiyun 	list_for_each_entry_safe(alg, n, list, cra_list) {
386*4882a593Smuzhiyun 		list_del_init(&alg->cra_list);
387*4882a593Smuzhiyun 		crypto_alg_put(alg);
388*4882a593Smuzhiyun 	}
389*4882a593Smuzhiyun }
390*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_remove_final);
391*4882a593Smuzhiyun 
crypto_wait_for_test(struct crypto_larval * larval)392*4882a593Smuzhiyun static void crypto_wait_for_test(struct crypto_larval *larval)
393*4882a593Smuzhiyun {
394*4882a593Smuzhiyun 	int err;
395*4882a593Smuzhiyun 
396*4882a593Smuzhiyun 	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
397*4882a593Smuzhiyun 	if (err != NOTIFY_STOP) {
398*4882a593Smuzhiyun 		if (WARN_ON(err != NOTIFY_DONE))
399*4882a593Smuzhiyun 			goto out;
400*4882a593Smuzhiyun 		crypto_alg_tested(larval->alg.cra_driver_name, 0);
401*4882a593Smuzhiyun 	}
402*4882a593Smuzhiyun 
403*4882a593Smuzhiyun 	err = wait_for_completion_killable(&larval->completion);
404*4882a593Smuzhiyun 	WARN_ON(err);
405*4882a593Smuzhiyun 	if (!err)
406*4882a593Smuzhiyun 		crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
407*4882a593Smuzhiyun 
408*4882a593Smuzhiyun out:
409*4882a593Smuzhiyun 	crypto_larval_kill(&larval->alg);
410*4882a593Smuzhiyun }
411*4882a593Smuzhiyun 
crypto_register_alg(struct crypto_alg * alg)412*4882a593Smuzhiyun int crypto_register_alg(struct crypto_alg *alg)
413*4882a593Smuzhiyun {
414*4882a593Smuzhiyun 	struct crypto_larval *larval;
415*4882a593Smuzhiyun 	int err;
416*4882a593Smuzhiyun 
417*4882a593Smuzhiyun 	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
418*4882a593Smuzhiyun 	err = crypto_check_alg(alg);
419*4882a593Smuzhiyun 	if (err)
420*4882a593Smuzhiyun 		return err;
421*4882a593Smuzhiyun 
422*4882a593Smuzhiyun 	down_write(&crypto_alg_sem);
423*4882a593Smuzhiyun 	larval = __crypto_register_alg(alg);
424*4882a593Smuzhiyun 	up_write(&crypto_alg_sem);
425*4882a593Smuzhiyun 
426*4882a593Smuzhiyun 	if (IS_ERR(larval))
427*4882a593Smuzhiyun 		return PTR_ERR(larval);
428*4882a593Smuzhiyun 
429*4882a593Smuzhiyun 	crypto_wait_for_test(larval);
430*4882a593Smuzhiyun 	return 0;
431*4882a593Smuzhiyun }
432*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_alg);
433*4882a593Smuzhiyun 
crypto_remove_alg(struct crypto_alg * alg,struct list_head * list)434*4882a593Smuzhiyun static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
435*4882a593Smuzhiyun {
436*4882a593Smuzhiyun 	if (unlikely(list_empty(&alg->cra_list)))
437*4882a593Smuzhiyun 		return -ENOENT;
438*4882a593Smuzhiyun 
439*4882a593Smuzhiyun 	alg->cra_flags |= CRYPTO_ALG_DEAD;
440*4882a593Smuzhiyun 
441*4882a593Smuzhiyun 	list_del_init(&alg->cra_list);
442*4882a593Smuzhiyun 	crypto_remove_spawns(alg, list, NULL);
443*4882a593Smuzhiyun 
444*4882a593Smuzhiyun 	return 0;
445*4882a593Smuzhiyun }
446*4882a593Smuzhiyun 
crypto_unregister_alg(struct crypto_alg * alg)447*4882a593Smuzhiyun void crypto_unregister_alg(struct crypto_alg *alg)
448*4882a593Smuzhiyun {
449*4882a593Smuzhiyun 	int ret;
450*4882a593Smuzhiyun 	LIST_HEAD(list);
451*4882a593Smuzhiyun 
452*4882a593Smuzhiyun 	down_write(&crypto_alg_sem);
453*4882a593Smuzhiyun 	ret = crypto_remove_alg(alg, &list);
454*4882a593Smuzhiyun 	up_write(&crypto_alg_sem);
455*4882a593Smuzhiyun 
456*4882a593Smuzhiyun 	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
457*4882a593Smuzhiyun 		return;
458*4882a593Smuzhiyun 
459*4882a593Smuzhiyun 	BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
460*4882a593Smuzhiyun 	if (alg->cra_destroy)
461*4882a593Smuzhiyun 		alg->cra_destroy(alg);
462*4882a593Smuzhiyun 
463*4882a593Smuzhiyun 	crypto_remove_final(&list);
464*4882a593Smuzhiyun }
465*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_alg);
466*4882a593Smuzhiyun 
crypto_register_algs(struct crypto_alg * algs,int count)467*4882a593Smuzhiyun int crypto_register_algs(struct crypto_alg *algs, int count)
468*4882a593Smuzhiyun {
469*4882a593Smuzhiyun 	int i, ret;
470*4882a593Smuzhiyun 
471*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
472*4882a593Smuzhiyun 		ret = crypto_register_alg(&algs[i]);
473*4882a593Smuzhiyun 		if (ret)
474*4882a593Smuzhiyun 			goto err;
475*4882a593Smuzhiyun 	}
476*4882a593Smuzhiyun 
477*4882a593Smuzhiyun 	return 0;
478*4882a593Smuzhiyun 
479*4882a593Smuzhiyun err:
480*4882a593Smuzhiyun 	for (--i; i >= 0; --i)
481*4882a593Smuzhiyun 		crypto_unregister_alg(&algs[i]);
482*4882a593Smuzhiyun 
483*4882a593Smuzhiyun 	return ret;
484*4882a593Smuzhiyun }
485*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_algs);
486*4882a593Smuzhiyun 
crypto_unregister_algs(struct crypto_alg * algs,int count)487*4882a593Smuzhiyun void crypto_unregister_algs(struct crypto_alg *algs, int count)
488*4882a593Smuzhiyun {
489*4882a593Smuzhiyun 	int i;
490*4882a593Smuzhiyun 
491*4882a593Smuzhiyun 	for (i = 0; i < count; i++)
492*4882a593Smuzhiyun 		crypto_unregister_alg(&algs[i]);
493*4882a593Smuzhiyun }
494*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_algs);
495*4882a593Smuzhiyun 
crypto_register_template(struct crypto_template * tmpl)496*4882a593Smuzhiyun int crypto_register_template(struct crypto_template *tmpl)
497*4882a593Smuzhiyun {
498*4882a593Smuzhiyun 	struct crypto_template *q;
499*4882a593Smuzhiyun 	int err = -EEXIST;
500*4882a593Smuzhiyun 
501*4882a593Smuzhiyun 	down_write(&crypto_alg_sem);
502*4882a593Smuzhiyun 
503*4882a593Smuzhiyun 	crypto_check_module_sig(tmpl->module);
504*4882a593Smuzhiyun 
505*4882a593Smuzhiyun 	list_for_each_entry(q, &crypto_template_list, list) {
506*4882a593Smuzhiyun 		if (q == tmpl)
507*4882a593Smuzhiyun 			goto out;
508*4882a593Smuzhiyun 	}
509*4882a593Smuzhiyun 
510*4882a593Smuzhiyun 	list_add(&tmpl->list, &crypto_template_list);
511*4882a593Smuzhiyun 	err = 0;
512*4882a593Smuzhiyun out:
513*4882a593Smuzhiyun 	up_write(&crypto_alg_sem);
514*4882a593Smuzhiyun 	return err;
515*4882a593Smuzhiyun }
516*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_template);
517*4882a593Smuzhiyun 
crypto_register_templates(struct crypto_template * tmpls,int count)518*4882a593Smuzhiyun int crypto_register_templates(struct crypto_template *tmpls, int count)
519*4882a593Smuzhiyun {
520*4882a593Smuzhiyun 	int i, err;
521*4882a593Smuzhiyun 
522*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
523*4882a593Smuzhiyun 		err = crypto_register_template(&tmpls[i]);
524*4882a593Smuzhiyun 		if (err)
525*4882a593Smuzhiyun 			goto out;
526*4882a593Smuzhiyun 	}
527*4882a593Smuzhiyun 	return 0;
528*4882a593Smuzhiyun 
529*4882a593Smuzhiyun out:
530*4882a593Smuzhiyun 	for (--i; i >= 0; --i)
531*4882a593Smuzhiyun 		crypto_unregister_template(&tmpls[i]);
532*4882a593Smuzhiyun 	return err;
533*4882a593Smuzhiyun }
534*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_templates);
535*4882a593Smuzhiyun 
crypto_unregister_template(struct crypto_template * tmpl)536*4882a593Smuzhiyun void crypto_unregister_template(struct crypto_template *tmpl)
537*4882a593Smuzhiyun {
538*4882a593Smuzhiyun 	struct crypto_instance *inst;
539*4882a593Smuzhiyun 	struct hlist_node *n;
540*4882a593Smuzhiyun 	struct hlist_head *list;
541*4882a593Smuzhiyun 	LIST_HEAD(users);
542*4882a593Smuzhiyun 
543*4882a593Smuzhiyun 	down_write(&crypto_alg_sem);
544*4882a593Smuzhiyun 
545*4882a593Smuzhiyun 	BUG_ON(list_empty(&tmpl->list));
546*4882a593Smuzhiyun 	list_del_init(&tmpl->list);
547*4882a593Smuzhiyun 
548*4882a593Smuzhiyun 	list = &tmpl->instances;
549*4882a593Smuzhiyun 	hlist_for_each_entry(inst, list, list) {
550*4882a593Smuzhiyun 		int err = crypto_remove_alg(&inst->alg, &users);
551*4882a593Smuzhiyun 
552*4882a593Smuzhiyun 		BUG_ON(err);
553*4882a593Smuzhiyun 	}
554*4882a593Smuzhiyun 
555*4882a593Smuzhiyun 	up_write(&crypto_alg_sem);
556*4882a593Smuzhiyun 
557*4882a593Smuzhiyun 	hlist_for_each_entry_safe(inst, n, list, list) {
558*4882a593Smuzhiyun 		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
559*4882a593Smuzhiyun 		crypto_free_instance(inst);
560*4882a593Smuzhiyun 	}
561*4882a593Smuzhiyun 	crypto_remove_final(&users);
562*4882a593Smuzhiyun }
563*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_template);
564*4882a593Smuzhiyun 
crypto_unregister_templates(struct crypto_template * tmpls,int count)565*4882a593Smuzhiyun void crypto_unregister_templates(struct crypto_template *tmpls, int count)
566*4882a593Smuzhiyun {
567*4882a593Smuzhiyun 	int i;
568*4882a593Smuzhiyun 
569*4882a593Smuzhiyun 	for (i = count - 1; i >= 0; --i)
570*4882a593Smuzhiyun 		crypto_unregister_template(&tmpls[i]);
571*4882a593Smuzhiyun }
572*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_templates);
573*4882a593Smuzhiyun 
__crypto_lookup_template(const char * name)574*4882a593Smuzhiyun static struct crypto_template *__crypto_lookup_template(const char *name)
575*4882a593Smuzhiyun {
576*4882a593Smuzhiyun 	struct crypto_template *q, *tmpl = NULL;
577*4882a593Smuzhiyun 
578*4882a593Smuzhiyun 	down_read(&crypto_alg_sem);
579*4882a593Smuzhiyun 	list_for_each_entry(q, &crypto_template_list, list) {
580*4882a593Smuzhiyun 		if (strcmp(q->name, name))
581*4882a593Smuzhiyun 			continue;
582*4882a593Smuzhiyun 		if (unlikely(!crypto_tmpl_get(q)))
583*4882a593Smuzhiyun 			continue;
584*4882a593Smuzhiyun 
585*4882a593Smuzhiyun 		tmpl = q;
586*4882a593Smuzhiyun 		break;
587*4882a593Smuzhiyun 	}
588*4882a593Smuzhiyun 	up_read(&crypto_alg_sem);
589*4882a593Smuzhiyun 
590*4882a593Smuzhiyun 	return tmpl;
591*4882a593Smuzhiyun }
592*4882a593Smuzhiyun 
crypto_lookup_template(const char * name)593*4882a593Smuzhiyun struct crypto_template *crypto_lookup_template(const char *name)
594*4882a593Smuzhiyun {
595*4882a593Smuzhiyun 	return try_then_request_module(__crypto_lookup_template(name),
596*4882a593Smuzhiyun 				       "crypto-%s", name);
597*4882a593Smuzhiyun }
598*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_lookup_template);
599*4882a593Smuzhiyun 
crypto_register_instance(struct crypto_template * tmpl,struct crypto_instance * inst)600*4882a593Smuzhiyun int crypto_register_instance(struct crypto_template *tmpl,
601*4882a593Smuzhiyun 			     struct crypto_instance *inst)
602*4882a593Smuzhiyun {
603*4882a593Smuzhiyun 	struct crypto_larval *larval;
604*4882a593Smuzhiyun 	struct crypto_spawn *spawn;
605*4882a593Smuzhiyun 	int err;
606*4882a593Smuzhiyun 
607*4882a593Smuzhiyun 	err = crypto_check_alg(&inst->alg);
608*4882a593Smuzhiyun 	if (err)
609*4882a593Smuzhiyun 		return err;
610*4882a593Smuzhiyun 
611*4882a593Smuzhiyun 	inst->alg.cra_module = tmpl->module;
612*4882a593Smuzhiyun 	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
613*4882a593Smuzhiyun 
614*4882a593Smuzhiyun 	down_write(&crypto_alg_sem);
615*4882a593Smuzhiyun 
616*4882a593Smuzhiyun 	larval = ERR_PTR(-EAGAIN);
617*4882a593Smuzhiyun 	for (spawn = inst->spawns; spawn;) {
618*4882a593Smuzhiyun 		struct crypto_spawn *next;
619*4882a593Smuzhiyun 
620*4882a593Smuzhiyun 		if (spawn->dead)
621*4882a593Smuzhiyun 			goto unlock;
622*4882a593Smuzhiyun 
623*4882a593Smuzhiyun 		next = spawn->next;
624*4882a593Smuzhiyun 		spawn->inst = inst;
625*4882a593Smuzhiyun 		spawn->registered = true;
626*4882a593Smuzhiyun 
627*4882a593Smuzhiyun 		crypto_mod_put(spawn->alg);
628*4882a593Smuzhiyun 
629*4882a593Smuzhiyun 		spawn = next;
630*4882a593Smuzhiyun 	}
631*4882a593Smuzhiyun 
632*4882a593Smuzhiyun 	larval = __crypto_register_alg(&inst->alg);
633*4882a593Smuzhiyun 	if (IS_ERR(larval))
634*4882a593Smuzhiyun 		goto unlock;
635*4882a593Smuzhiyun 
636*4882a593Smuzhiyun 	hlist_add_head(&inst->list, &tmpl->instances);
637*4882a593Smuzhiyun 	inst->tmpl = tmpl;
638*4882a593Smuzhiyun 
639*4882a593Smuzhiyun unlock:
640*4882a593Smuzhiyun 	up_write(&crypto_alg_sem);
641*4882a593Smuzhiyun 
642*4882a593Smuzhiyun 	err = PTR_ERR(larval);
643*4882a593Smuzhiyun 	if (IS_ERR(larval))
644*4882a593Smuzhiyun 		goto err;
645*4882a593Smuzhiyun 
646*4882a593Smuzhiyun 	crypto_wait_for_test(larval);
647*4882a593Smuzhiyun 	err = 0;
648*4882a593Smuzhiyun 
649*4882a593Smuzhiyun err:
650*4882a593Smuzhiyun 	return err;
651*4882a593Smuzhiyun }
652*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_instance);
653*4882a593Smuzhiyun 
crypto_unregister_instance(struct crypto_instance * inst)654*4882a593Smuzhiyun void crypto_unregister_instance(struct crypto_instance *inst)
655*4882a593Smuzhiyun {
656*4882a593Smuzhiyun 	LIST_HEAD(list);
657*4882a593Smuzhiyun 
658*4882a593Smuzhiyun 	down_write(&crypto_alg_sem);
659*4882a593Smuzhiyun 
660*4882a593Smuzhiyun 	crypto_remove_spawns(&inst->alg, &list, NULL);
661*4882a593Smuzhiyun 	crypto_remove_instance(inst, &list);
662*4882a593Smuzhiyun 
663*4882a593Smuzhiyun 	up_write(&crypto_alg_sem);
664*4882a593Smuzhiyun 
665*4882a593Smuzhiyun 	crypto_remove_final(&list);
666*4882a593Smuzhiyun }
667*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_instance);
668*4882a593Smuzhiyun 
crypto_grab_spawn(struct crypto_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)669*4882a593Smuzhiyun int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
670*4882a593Smuzhiyun 		      const char *name, u32 type, u32 mask)
671*4882a593Smuzhiyun {
672*4882a593Smuzhiyun 	struct crypto_alg *alg;
673*4882a593Smuzhiyun 	int err = -EAGAIN;
674*4882a593Smuzhiyun 
675*4882a593Smuzhiyun 	if (WARN_ON_ONCE(inst == NULL))
676*4882a593Smuzhiyun 		return -EINVAL;
677*4882a593Smuzhiyun 
678*4882a593Smuzhiyun 	/* Allow the result of crypto_attr_alg_name() to be passed directly */
679*4882a593Smuzhiyun 	if (IS_ERR(name))
680*4882a593Smuzhiyun 		return PTR_ERR(name);
681*4882a593Smuzhiyun 
682*4882a593Smuzhiyun 	alg = crypto_find_alg(name, spawn->frontend, type, mask);
683*4882a593Smuzhiyun 	if (IS_ERR(alg))
684*4882a593Smuzhiyun 		return PTR_ERR(alg);
685*4882a593Smuzhiyun 
686*4882a593Smuzhiyun 	down_write(&crypto_alg_sem);
687*4882a593Smuzhiyun 	if (!crypto_is_moribund(alg)) {
688*4882a593Smuzhiyun 		list_add(&spawn->list, &alg->cra_users);
689*4882a593Smuzhiyun 		spawn->alg = alg;
690*4882a593Smuzhiyun 		spawn->mask = mask;
691*4882a593Smuzhiyun 		spawn->next = inst->spawns;
692*4882a593Smuzhiyun 		inst->spawns = spawn;
693*4882a593Smuzhiyun 		inst->alg.cra_flags |=
694*4882a593Smuzhiyun 			(alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
695*4882a593Smuzhiyun 		err = 0;
696*4882a593Smuzhiyun 	}
697*4882a593Smuzhiyun 	up_write(&crypto_alg_sem);
698*4882a593Smuzhiyun 	if (err)
699*4882a593Smuzhiyun 		crypto_mod_put(alg);
700*4882a593Smuzhiyun 	return err;
701*4882a593Smuzhiyun }
702*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_grab_spawn);
703*4882a593Smuzhiyun 
crypto_drop_spawn(struct crypto_spawn * spawn)704*4882a593Smuzhiyun void crypto_drop_spawn(struct crypto_spawn *spawn)
705*4882a593Smuzhiyun {
706*4882a593Smuzhiyun 	if (!spawn->alg) /* not yet initialized? */
707*4882a593Smuzhiyun 		return;
708*4882a593Smuzhiyun 
709*4882a593Smuzhiyun 	down_write(&crypto_alg_sem);
710*4882a593Smuzhiyun 	if (!spawn->dead)
711*4882a593Smuzhiyun 		list_del(&spawn->list);
712*4882a593Smuzhiyun 	up_write(&crypto_alg_sem);
713*4882a593Smuzhiyun 
714*4882a593Smuzhiyun 	if (!spawn->registered)
715*4882a593Smuzhiyun 		crypto_mod_put(spawn->alg);
716*4882a593Smuzhiyun }
717*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_drop_spawn);
718*4882a593Smuzhiyun 
crypto_spawn_alg(struct crypto_spawn * spawn)719*4882a593Smuzhiyun static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
720*4882a593Smuzhiyun {
721*4882a593Smuzhiyun 	struct crypto_alg *alg = ERR_PTR(-EAGAIN);
722*4882a593Smuzhiyun 	struct crypto_alg *target;
723*4882a593Smuzhiyun 	bool shoot = false;
724*4882a593Smuzhiyun 
725*4882a593Smuzhiyun 	down_read(&crypto_alg_sem);
726*4882a593Smuzhiyun 	if (!spawn->dead) {
727*4882a593Smuzhiyun 		alg = spawn->alg;
728*4882a593Smuzhiyun 		if (!crypto_mod_get(alg)) {
729*4882a593Smuzhiyun 			target = crypto_alg_get(alg);
730*4882a593Smuzhiyun 			shoot = true;
731*4882a593Smuzhiyun 			alg = ERR_PTR(-EAGAIN);
732*4882a593Smuzhiyun 		}
733*4882a593Smuzhiyun 	}
734*4882a593Smuzhiyun 	up_read(&crypto_alg_sem);
735*4882a593Smuzhiyun 
736*4882a593Smuzhiyun 	if (shoot) {
737*4882a593Smuzhiyun 		crypto_shoot_alg(target);
738*4882a593Smuzhiyun 		crypto_alg_put(target);
739*4882a593Smuzhiyun 	}
740*4882a593Smuzhiyun 
741*4882a593Smuzhiyun 	return alg;
742*4882a593Smuzhiyun }
743*4882a593Smuzhiyun 
crypto_spawn_tfm(struct crypto_spawn * spawn,u32 type,u32 mask)744*4882a593Smuzhiyun struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
745*4882a593Smuzhiyun 				    u32 mask)
746*4882a593Smuzhiyun {
747*4882a593Smuzhiyun 	struct crypto_alg *alg;
748*4882a593Smuzhiyun 	struct crypto_tfm *tfm;
749*4882a593Smuzhiyun 
750*4882a593Smuzhiyun 	alg = crypto_spawn_alg(spawn);
751*4882a593Smuzhiyun 	if (IS_ERR(alg))
752*4882a593Smuzhiyun 		return ERR_CAST(alg);
753*4882a593Smuzhiyun 
754*4882a593Smuzhiyun 	tfm = ERR_PTR(-EINVAL);
755*4882a593Smuzhiyun 	if (unlikely((alg->cra_flags ^ type) & mask))
756*4882a593Smuzhiyun 		goto out_put_alg;
757*4882a593Smuzhiyun 
758*4882a593Smuzhiyun 	tfm = __crypto_alloc_tfm(alg, type, mask);
759*4882a593Smuzhiyun 	if (IS_ERR(tfm))
760*4882a593Smuzhiyun 		goto out_put_alg;
761*4882a593Smuzhiyun 
762*4882a593Smuzhiyun 	return tfm;
763*4882a593Smuzhiyun 
764*4882a593Smuzhiyun out_put_alg:
765*4882a593Smuzhiyun 	crypto_mod_put(alg);
766*4882a593Smuzhiyun 	return tfm;
767*4882a593Smuzhiyun }
768*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
769*4882a593Smuzhiyun 
crypto_spawn_tfm2(struct crypto_spawn * spawn)770*4882a593Smuzhiyun void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
771*4882a593Smuzhiyun {
772*4882a593Smuzhiyun 	struct crypto_alg *alg;
773*4882a593Smuzhiyun 	struct crypto_tfm *tfm;
774*4882a593Smuzhiyun 
775*4882a593Smuzhiyun 	alg = crypto_spawn_alg(spawn);
776*4882a593Smuzhiyun 	if (IS_ERR(alg))
777*4882a593Smuzhiyun 		return ERR_CAST(alg);
778*4882a593Smuzhiyun 
779*4882a593Smuzhiyun 	tfm = crypto_create_tfm(alg, spawn->frontend);
780*4882a593Smuzhiyun 	if (IS_ERR(tfm))
781*4882a593Smuzhiyun 		goto out_put_alg;
782*4882a593Smuzhiyun 
783*4882a593Smuzhiyun 	return tfm;
784*4882a593Smuzhiyun 
785*4882a593Smuzhiyun out_put_alg:
786*4882a593Smuzhiyun 	crypto_mod_put(alg);
787*4882a593Smuzhiyun 	return tfm;
788*4882a593Smuzhiyun }
789*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
790*4882a593Smuzhiyun 
crypto_register_notifier(struct notifier_block * nb)791*4882a593Smuzhiyun int crypto_register_notifier(struct notifier_block *nb)
792*4882a593Smuzhiyun {
793*4882a593Smuzhiyun 	return blocking_notifier_chain_register(&crypto_chain, nb);
794*4882a593Smuzhiyun }
795*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_notifier);
796*4882a593Smuzhiyun 
crypto_unregister_notifier(struct notifier_block * nb)797*4882a593Smuzhiyun int crypto_unregister_notifier(struct notifier_block *nb)
798*4882a593Smuzhiyun {
799*4882a593Smuzhiyun 	return blocking_notifier_chain_unregister(&crypto_chain, nb);
800*4882a593Smuzhiyun }
801*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
802*4882a593Smuzhiyun 
crypto_get_attr_type(struct rtattr ** tb)803*4882a593Smuzhiyun struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
804*4882a593Smuzhiyun {
805*4882a593Smuzhiyun 	struct rtattr *rta = tb[0];
806*4882a593Smuzhiyun 	struct crypto_attr_type *algt;
807*4882a593Smuzhiyun 
808*4882a593Smuzhiyun 	if (!rta)
809*4882a593Smuzhiyun 		return ERR_PTR(-ENOENT);
810*4882a593Smuzhiyun 	if (RTA_PAYLOAD(rta) < sizeof(*algt))
811*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
812*4882a593Smuzhiyun 	if (rta->rta_type != CRYPTOA_TYPE)
813*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
814*4882a593Smuzhiyun 
815*4882a593Smuzhiyun 	algt = RTA_DATA(rta);
816*4882a593Smuzhiyun 
817*4882a593Smuzhiyun 	return algt;
818*4882a593Smuzhiyun }
819*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_get_attr_type);
820*4882a593Smuzhiyun 
821*4882a593Smuzhiyun /**
822*4882a593Smuzhiyun  * crypto_check_attr_type() - check algorithm type and compute inherited mask
823*4882a593Smuzhiyun  * @tb: the template parameters
824*4882a593Smuzhiyun  * @type: the algorithm type the template would be instantiated as
825*4882a593Smuzhiyun  * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
826*4882a593Smuzhiyun  *	      to restrict the flags of any inner algorithms
827*4882a593Smuzhiyun  *
828*4882a593Smuzhiyun  * Validate that the algorithm type the user requested is compatible with the
829*4882a593Smuzhiyun  * one the template would actually be instantiated as.  E.g., if the user is
830*4882a593Smuzhiyun  * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
831*4882a593Smuzhiyun  * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
832*4882a593Smuzhiyun  *
833*4882a593Smuzhiyun  * Also compute the mask to use to restrict the flags of any inner algorithms.
834*4882a593Smuzhiyun  *
835*4882a593Smuzhiyun  * Return: 0 on success; -errno on failure
836*4882a593Smuzhiyun  */
crypto_check_attr_type(struct rtattr ** tb,u32 type,u32 * mask_ret)837*4882a593Smuzhiyun int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
838*4882a593Smuzhiyun {
839*4882a593Smuzhiyun 	struct crypto_attr_type *algt;
840*4882a593Smuzhiyun 
841*4882a593Smuzhiyun 	algt = crypto_get_attr_type(tb);
842*4882a593Smuzhiyun 	if (IS_ERR(algt))
843*4882a593Smuzhiyun 		return PTR_ERR(algt);
844*4882a593Smuzhiyun 
845*4882a593Smuzhiyun 	if ((algt->type ^ type) & algt->mask)
846*4882a593Smuzhiyun 		return -EINVAL;
847*4882a593Smuzhiyun 
848*4882a593Smuzhiyun 	*mask_ret = crypto_algt_inherited_mask(algt);
849*4882a593Smuzhiyun 	return 0;
850*4882a593Smuzhiyun }
851*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_check_attr_type);
852*4882a593Smuzhiyun 
crypto_attr_alg_name(struct rtattr * rta)853*4882a593Smuzhiyun const char *crypto_attr_alg_name(struct rtattr *rta)
854*4882a593Smuzhiyun {
855*4882a593Smuzhiyun 	struct crypto_attr_alg *alga;
856*4882a593Smuzhiyun 
857*4882a593Smuzhiyun 	if (!rta)
858*4882a593Smuzhiyun 		return ERR_PTR(-ENOENT);
859*4882a593Smuzhiyun 	if (RTA_PAYLOAD(rta) < sizeof(*alga))
860*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
861*4882a593Smuzhiyun 	if (rta->rta_type != CRYPTOA_ALG)
862*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
863*4882a593Smuzhiyun 
864*4882a593Smuzhiyun 	alga = RTA_DATA(rta);
865*4882a593Smuzhiyun 	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
866*4882a593Smuzhiyun 
867*4882a593Smuzhiyun 	return alga->name;
868*4882a593Smuzhiyun }
869*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
870*4882a593Smuzhiyun 
crypto_attr_u32(struct rtattr * rta,u32 * num)871*4882a593Smuzhiyun int crypto_attr_u32(struct rtattr *rta, u32 *num)
872*4882a593Smuzhiyun {
873*4882a593Smuzhiyun 	struct crypto_attr_u32 *nu32;
874*4882a593Smuzhiyun 
875*4882a593Smuzhiyun 	if (!rta)
876*4882a593Smuzhiyun 		return -ENOENT;
877*4882a593Smuzhiyun 	if (RTA_PAYLOAD(rta) < sizeof(*nu32))
878*4882a593Smuzhiyun 		return -EINVAL;
879*4882a593Smuzhiyun 	if (rta->rta_type != CRYPTOA_U32)
880*4882a593Smuzhiyun 		return -EINVAL;
881*4882a593Smuzhiyun 
882*4882a593Smuzhiyun 	nu32 = RTA_DATA(rta);
883*4882a593Smuzhiyun 	*num = nu32->num;
884*4882a593Smuzhiyun 
885*4882a593Smuzhiyun 	return 0;
886*4882a593Smuzhiyun }
887*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_attr_u32);
888*4882a593Smuzhiyun 
crypto_inst_setname(struct crypto_instance * inst,const char * name,struct crypto_alg * alg)889*4882a593Smuzhiyun int crypto_inst_setname(struct crypto_instance *inst, const char *name,
890*4882a593Smuzhiyun 			struct crypto_alg *alg)
891*4882a593Smuzhiyun {
892*4882a593Smuzhiyun 	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
893*4882a593Smuzhiyun 		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
894*4882a593Smuzhiyun 		return -ENAMETOOLONG;
895*4882a593Smuzhiyun 
896*4882a593Smuzhiyun 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
897*4882a593Smuzhiyun 		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
898*4882a593Smuzhiyun 		return -ENAMETOOLONG;
899*4882a593Smuzhiyun 
900*4882a593Smuzhiyun 	return 0;
901*4882a593Smuzhiyun }
902*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_inst_setname);
903*4882a593Smuzhiyun 
crypto_init_queue(struct crypto_queue * queue,unsigned int max_qlen)904*4882a593Smuzhiyun void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
905*4882a593Smuzhiyun {
906*4882a593Smuzhiyun 	INIT_LIST_HEAD(&queue->list);
907*4882a593Smuzhiyun 	queue->backlog = &queue->list;
908*4882a593Smuzhiyun 	queue->qlen = 0;
909*4882a593Smuzhiyun 	queue->max_qlen = max_qlen;
910*4882a593Smuzhiyun }
911*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_init_queue);
912*4882a593Smuzhiyun 
crypto_enqueue_request(struct crypto_queue * queue,struct crypto_async_request * request)913*4882a593Smuzhiyun int crypto_enqueue_request(struct crypto_queue *queue,
914*4882a593Smuzhiyun 			   struct crypto_async_request *request)
915*4882a593Smuzhiyun {
916*4882a593Smuzhiyun 	int err = -EINPROGRESS;
917*4882a593Smuzhiyun 
918*4882a593Smuzhiyun 	if (unlikely(queue->qlen >= queue->max_qlen)) {
919*4882a593Smuzhiyun 		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
920*4882a593Smuzhiyun 			err = -ENOSPC;
921*4882a593Smuzhiyun 			goto out;
922*4882a593Smuzhiyun 		}
923*4882a593Smuzhiyun 		err = -EBUSY;
924*4882a593Smuzhiyun 		if (queue->backlog == &queue->list)
925*4882a593Smuzhiyun 			queue->backlog = &request->list;
926*4882a593Smuzhiyun 	}
927*4882a593Smuzhiyun 
928*4882a593Smuzhiyun 	queue->qlen++;
929*4882a593Smuzhiyun 	list_add_tail(&request->list, &queue->list);
930*4882a593Smuzhiyun 
931*4882a593Smuzhiyun out:
932*4882a593Smuzhiyun 	return err;
933*4882a593Smuzhiyun }
934*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_enqueue_request);
935*4882a593Smuzhiyun 
crypto_enqueue_request_head(struct crypto_queue * queue,struct crypto_async_request * request)936*4882a593Smuzhiyun void crypto_enqueue_request_head(struct crypto_queue *queue,
937*4882a593Smuzhiyun 				 struct crypto_async_request *request)
938*4882a593Smuzhiyun {
939*4882a593Smuzhiyun 	queue->qlen++;
940*4882a593Smuzhiyun 	list_add(&request->list, &queue->list);
941*4882a593Smuzhiyun }
942*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
943*4882a593Smuzhiyun 
crypto_dequeue_request(struct crypto_queue * queue)944*4882a593Smuzhiyun struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
945*4882a593Smuzhiyun {
946*4882a593Smuzhiyun 	struct list_head *request;
947*4882a593Smuzhiyun 
948*4882a593Smuzhiyun 	if (unlikely(!queue->qlen))
949*4882a593Smuzhiyun 		return NULL;
950*4882a593Smuzhiyun 
951*4882a593Smuzhiyun 	queue->qlen--;
952*4882a593Smuzhiyun 
953*4882a593Smuzhiyun 	if (queue->backlog != &queue->list)
954*4882a593Smuzhiyun 		queue->backlog = queue->backlog->next;
955*4882a593Smuzhiyun 
956*4882a593Smuzhiyun 	request = queue->list.next;
957*4882a593Smuzhiyun 	list_del(request);
958*4882a593Smuzhiyun 
959*4882a593Smuzhiyun 	return list_entry(request, struct crypto_async_request, list);
960*4882a593Smuzhiyun }
961*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_dequeue_request);
962*4882a593Smuzhiyun 
crypto_inc_byte(u8 * a,unsigned int size)963*4882a593Smuzhiyun static inline void crypto_inc_byte(u8 *a, unsigned int size)
964*4882a593Smuzhiyun {
965*4882a593Smuzhiyun 	u8 *b = (a + size);
966*4882a593Smuzhiyun 	u8 c;
967*4882a593Smuzhiyun 
968*4882a593Smuzhiyun 	for (; size; size--) {
969*4882a593Smuzhiyun 		c = *--b + 1;
970*4882a593Smuzhiyun 		*b = c;
971*4882a593Smuzhiyun 		if (c)
972*4882a593Smuzhiyun 			break;
973*4882a593Smuzhiyun 	}
974*4882a593Smuzhiyun }
975*4882a593Smuzhiyun 
crypto_inc(u8 * a,unsigned int size)976*4882a593Smuzhiyun void crypto_inc(u8 *a, unsigned int size)
977*4882a593Smuzhiyun {
978*4882a593Smuzhiyun 	__be32 *b = (__be32 *)(a + size);
979*4882a593Smuzhiyun 	u32 c;
980*4882a593Smuzhiyun 
981*4882a593Smuzhiyun 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
982*4882a593Smuzhiyun 	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
983*4882a593Smuzhiyun 		for (; size >= 4; size -= 4) {
984*4882a593Smuzhiyun 			c = be32_to_cpu(*--b) + 1;
985*4882a593Smuzhiyun 			*b = cpu_to_be32(c);
986*4882a593Smuzhiyun 			if (likely(c))
987*4882a593Smuzhiyun 				return;
988*4882a593Smuzhiyun 		}
989*4882a593Smuzhiyun 
990*4882a593Smuzhiyun 	crypto_inc_byte(a, size);
991*4882a593Smuzhiyun }
992*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_inc);
993*4882a593Smuzhiyun 
__crypto_xor(u8 * dst,const u8 * src1,const u8 * src2,unsigned int len)994*4882a593Smuzhiyun void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
995*4882a593Smuzhiyun {
996*4882a593Smuzhiyun 	int relalign = 0;
997*4882a593Smuzhiyun 
998*4882a593Smuzhiyun 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
999*4882a593Smuzhiyun 		int size = sizeof(unsigned long);
1000*4882a593Smuzhiyun 		int d = (((unsigned long)dst ^ (unsigned long)src1) |
1001*4882a593Smuzhiyun 			 ((unsigned long)dst ^ (unsigned long)src2)) &
1002*4882a593Smuzhiyun 			(size - 1);
1003*4882a593Smuzhiyun 
1004*4882a593Smuzhiyun 		relalign = d ? 1 << __ffs(d) : size;
1005*4882a593Smuzhiyun 
1006*4882a593Smuzhiyun 		/*
1007*4882a593Smuzhiyun 		 * If we care about alignment, process as many bytes as
1008*4882a593Smuzhiyun 		 * needed to advance dst and src to values whose alignments
1009*4882a593Smuzhiyun 		 * equal their relative alignment. This will allow us to
1010*4882a593Smuzhiyun 		 * process the remainder of the input using optimal strides.
1011*4882a593Smuzhiyun 		 */
1012*4882a593Smuzhiyun 		while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1013*4882a593Smuzhiyun 			*dst++ = *src1++ ^ *src2++;
1014*4882a593Smuzhiyun 			len--;
1015*4882a593Smuzhiyun 		}
1016*4882a593Smuzhiyun 	}
1017*4882a593Smuzhiyun 
1018*4882a593Smuzhiyun 	while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1019*4882a593Smuzhiyun 		*(u64 *)dst = *(u64 *)src1 ^  *(u64 *)src2;
1020*4882a593Smuzhiyun 		dst += 8;
1021*4882a593Smuzhiyun 		src1 += 8;
1022*4882a593Smuzhiyun 		src2 += 8;
1023*4882a593Smuzhiyun 		len -= 8;
1024*4882a593Smuzhiyun 	}
1025*4882a593Smuzhiyun 
1026*4882a593Smuzhiyun 	while (len >= 4 && !(relalign & 3)) {
1027*4882a593Smuzhiyun 		*(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1028*4882a593Smuzhiyun 		dst += 4;
1029*4882a593Smuzhiyun 		src1 += 4;
1030*4882a593Smuzhiyun 		src2 += 4;
1031*4882a593Smuzhiyun 		len -= 4;
1032*4882a593Smuzhiyun 	}
1033*4882a593Smuzhiyun 
1034*4882a593Smuzhiyun 	while (len >= 2 && !(relalign & 1)) {
1035*4882a593Smuzhiyun 		*(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1036*4882a593Smuzhiyun 		dst += 2;
1037*4882a593Smuzhiyun 		src1 += 2;
1038*4882a593Smuzhiyun 		src2 += 2;
1039*4882a593Smuzhiyun 		len -= 2;
1040*4882a593Smuzhiyun 	}
1041*4882a593Smuzhiyun 
1042*4882a593Smuzhiyun 	while (len--)
1043*4882a593Smuzhiyun 		*dst++ = *src1++ ^ *src2++;
1044*4882a593Smuzhiyun }
1045*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(__crypto_xor);
1046*4882a593Smuzhiyun 
crypto_alg_extsize(struct crypto_alg * alg)1047*4882a593Smuzhiyun unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1048*4882a593Smuzhiyun {
1049*4882a593Smuzhiyun 	return alg->cra_ctxsize +
1050*4882a593Smuzhiyun 	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1051*4882a593Smuzhiyun }
1052*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1053*4882a593Smuzhiyun 
crypto_type_has_alg(const char * name,const struct crypto_type * frontend,u32 type,u32 mask)1054*4882a593Smuzhiyun int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1055*4882a593Smuzhiyun 			u32 type, u32 mask)
1056*4882a593Smuzhiyun {
1057*4882a593Smuzhiyun 	int ret = 0;
1058*4882a593Smuzhiyun 	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1059*4882a593Smuzhiyun 
1060*4882a593Smuzhiyun 	if (!IS_ERR(alg)) {
1061*4882a593Smuzhiyun 		crypto_mod_put(alg);
1062*4882a593Smuzhiyun 		ret = 1;
1063*4882a593Smuzhiyun 	}
1064*4882a593Smuzhiyun 
1065*4882a593Smuzhiyun 	return ret;
1066*4882a593Smuzhiyun }
1067*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1068*4882a593Smuzhiyun 
1069*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_STATS
crypto_stats_init(struct crypto_alg * alg)1070*4882a593Smuzhiyun void crypto_stats_init(struct crypto_alg *alg)
1071*4882a593Smuzhiyun {
1072*4882a593Smuzhiyun 	memset(&alg->stats, 0, sizeof(alg->stats));
1073*4882a593Smuzhiyun }
1074*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_init);
1075*4882a593Smuzhiyun 
crypto_stats_get(struct crypto_alg * alg)1076*4882a593Smuzhiyun void crypto_stats_get(struct crypto_alg *alg)
1077*4882a593Smuzhiyun {
1078*4882a593Smuzhiyun 	crypto_alg_get(alg);
1079*4882a593Smuzhiyun }
1080*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_get);
1081*4882a593Smuzhiyun 
crypto_stats_aead_encrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1082*4882a593Smuzhiyun void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1083*4882a593Smuzhiyun 			       int ret)
1084*4882a593Smuzhiyun {
1085*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1086*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.aead.err_cnt);
1087*4882a593Smuzhiyun 	} else {
1088*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.aead.encrypt_cnt);
1089*4882a593Smuzhiyun 		atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1090*4882a593Smuzhiyun 	}
1091*4882a593Smuzhiyun 	crypto_alg_put(alg);
1092*4882a593Smuzhiyun }
1093*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1094*4882a593Smuzhiyun 
crypto_stats_aead_decrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1095*4882a593Smuzhiyun void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1096*4882a593Smuzhiyun 			       int ret)
1097*4882a593Smuzhiyun {
1098*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1099*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.aead.err_cnt);
1100*4882a593Smuzhiyun 	} else {
1101*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.aead.decrypt_cnt);
1102*4882a593Smuzhiyun 		atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1103*4882a593Smuzhiyun 	}
1104*4882a593Smuzhiyun 	crypto_alg_put(alg);
1105*4882a593Smuzhiyun }
1106*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1107*4882a593Smuzhiyun 
crypto_stats_akcipher_encrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1108*4882a593Smuzhiyun void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1109*4882a593Smuzhiyun 				   struct crypto_alg *alg)
1110*4882a593Smuzhiyun {
1111*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1112*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1113*4882a593Smuzhiyun 	} else {
1114*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1115*4882a593Smuzhiyun 		atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1116*4882a593Smuzhiyun 	}
1117*4882a593Smuzhiyun 	crypto_alg_put(alg);
1118*4882a593Smuzhiyun }
1119*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1120*4882a593Smuzhiyun 
crypto_stats_akcipher_decrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1121*4882a593Smuzhiyun void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1122*4882a593Smuzhiyun 				   struct crypto_alg *alg)
1123*4882a593Smuzhiyun {
1124*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1125*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1126*4882a593Smuzhiyun 	} else {
1127*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1128*4882a593Smuzhiyun 		atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1129*4882a593Smuzhiyun 	}
1130*4882a593Smuzhiyun 	crypto_alg_put(alg);
1131*4882a593Smuzhiyun }
1132*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1133*4882a593Smuzhiyun 
crypto_stats_akcipher_sign(int ret,struct crypto_alg * alg)1134*4882a593Smuzhiyun void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1135*4882a593Smuzhiyun {
1136*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1137*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1138*4882a593Smuzhiyun 	else
1139*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.akcipher.sign_cnt);
1140*4882a593Smuzhiyun 	crypto_alg_put(alg);
1141*4882a593Smuzhiyun }
1142*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1143*4882a593Smuzhiyun 
crypto_stats_akcipher_verify(int ret,struct crypto_alg * alg)1144*4882a593Smuzhiyun void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1145*4882a593Smuzhiyun {
1146*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1147*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1148*4882a593Smuzhiyun 	else
1149*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.akcipher.verify_cnt);
1150*4882a593Smuzhiyun 	crypto_alg_put(alg);
1151*4882a593Smuzhiyun }
1152*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1153*4882a593Smuzhiyun 
crypto_stats_compress(unsigned int slen,int ret,struct crypto_alg * alg)1154*4882a593Smuzhiyun void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1155*4882a593Smuzhiyun {
1156*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1157*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.compress.err_cnt);
1158*4882a593Smuzhiyun 	} else {
1159*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.compress.compress_cnt);
1160*4882a593Smuzhiyun 		atomic64_add(slen, &alg->stats.compress.compress_tlen);
1161*4882a593Smuzhiyun 	}
1162*4882a593Smuzhiyun 	crypto_alg_put(alg);
1163*4882a593Smuzhiyun }
1164*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_compress);
1165*4882a593Smuzhiyun 
crypto_stats_decompress(unsigned int slen,int ret,struct crypto_alg * alg)1166*4882a593Smuzhiyun void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1167*4882a593Smuzhiyun {
1168*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1169*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.compress.err_cnt);
1170*4882a593Smuzhiyun 	} else {
1171*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.compress.decompress_cnt);
1172*4882a593Smuzhiyun 		atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1173*4882a593Smuzhiyun 	}
1174*4882a593Smuzhiyun 	crypto_alg_put(alg);
1175*4882a593Smuzhiyun }
1176*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1177*4882a593Smuzhiyun 
crypto_stats_ahash_update(unsigned int nbytes,int ret,struct crypto_alg * alg)1178*4882a593Smuzhiyun void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1179*4882a593Smuzhiyun 			       struct crypto_alg *alg)
1180*4882a593Smuzhiyun {
1181*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1182*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.hash.err_cnt);
1183*4882a593Smuzhiyun 	else
1184*4882a593Smuzhiyun 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1185*4882a593Smuzhiyun 	crypto_alg_put(alg);
1186*4882a593Smuzhiyun }
1187*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1188*4882a593Smuzhiyun 
crypto_stats_ahash_final(unsigned int nbytes,int ret,struct crypto_alg * alg)1189*4882a593Smuzhiyun void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1190*4882a593Smuzhiyun 			      struct crypto_alg *alg)
1191*4882a593Smuzhiyun {
1192*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1193*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.hash.err_cnt);
1194*4882a593Smuzhiyun 	} else {
1195*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.hash.hash_cnt);
1196*4882a593Smuzhiyun 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1197*4882a593Smuzhiyun 	}
1198*4882a593Smuzhiyun 	crypto_alg_put(alg);
1199*4882a593Smuzhiyun }
1200*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1201*4882a593Smuzhiyun 
crypto_stats_kpp_set_secret(struct crypto_alg * alg,int ret)1202*4882a593Smuzhiyun void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1203*4882a593Smuzhiyun {
1204*4882a593Smuzhiyun 	if (ret)
1205*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.kpp.err_cnt);
1206*4882a593Smuzhiyun 	else
1207*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1208*4882a593Smuzhiyun 	crypto_alg_put(alg);
1209*4882a593Smuzhiyun }
1210*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1211*4882a593Smuzhiyun 
crypto_stats_kpp_generate_public_key(struct crypto_alg * alg,int ret)1212*4882a593Smuzhiyun void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1213*4882a593Smuzhiyun {
1214*4882a593Smuzhiyun 	if (ret)
1215*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.kpp.err_cnt);
1216*4882a593Smuzhiyun 	else
1217*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1218*4882a593Smuzhiyun 	crypto_alg_put(alg);
1219*4882a593Smuzhiyun }
1220*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1221*4882a593Smuzhiyun 
crypto_stats_kpp_compute_shared_secret(struct crypto_alg * alg,int ret)1222*4882a593Smuzhiyun void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1223*4882a593Smuzhiyun {
1224*4882a593Smuzhiyun 	if (ret)
1225*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.kpp.err_cnt);
1226*4882a593Smuzhiyun 	else
1227*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1228*4882a593Smuzhiyun 	crypto_alg_put(alg);
1229*4882a593Smuzhiyun }
1230*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1231*4882a593Smuzhiyun 
crypto_stats_rng_seed(struct crypto_alg * alg,int ret)1232*4882a593Smuzhiyun void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1233*4882a593Smuzhiyun {
1234*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1235*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.rng.err_cnt);
1236*4882a593Smuzhiyun 	else
1237*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.rng.seed_cnt);
1238*4882a593Smuzhiyun 	crypto_alg_put(alg);
1239*4882a593Smuzhiyun }
1240*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1241*4882a593Smuzhiyun 
crypto_stats_rng_generate(struct crypto_alg * alg,unsigned int dlen,int ret)1242*4882a593Smuzhiyun void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1243*4882a593Smuzhiyun 			       int ret)
1244*4882a593Smuzhiyun {
1245*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1246*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.rng.err_cnt);
1247*4882a593Smuzhiyun 	} else {
1248*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.rng.generate_cnt);
1249*4882a593Smuzhiyun 		atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1250*4882a593Smuzhiyun 	}
1251*4882a593Smuzhiyun 	crypto_alg_put(alg);
1252*4882a593Smuzhiyun }
1253*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1254*4882a593Smuzhiyun 
crypto_stats_skcipher_encrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1255*4882a593Smuzhiyun void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1256*4882a593Smuzhiyun 				   struct crypto_alg *alg)
1257*4882a593Smuzhiyun {
1258*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1259*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.cipher.err_cnt);
1260*4882a593Smuzhiyun 	} else {
1261*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1262*4882a593Smuzhiyun 		atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1263*4882a593Smuzhiyun 	}
1264*4882a593Smuzhiyun 	crypto_alg_put(alg);
1265*4882a593Smuzhiyun }
1266*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1267*4882a593Smuzhiyun 
crypto_stats_skcipher_decrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1268*4882a593Smuzhiyun void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1269*4882a593Smuzhiyun 				   struct crypto_alg *alg)
1270*4882a593Smuzhiyun {
1271*4882a593Smuzhiyun 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1272*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.cipher.err_cnt);
1273*4882a593Smuzhiyun 	} else {
1274*4882a593Smuzhiyun 		atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1275*4882a593Smuzhiyun 		atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1276*4882a593Smuzhiyun 	}
1277*4882a593Smuzhiyun 	crypto_alg_put(alg);
1278*4882a593Smuzhiyun }
1279*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1280*4882a593Smuzhiyun #endif
1281*4882a593Smuzhiyun 
crypto_algapi_init(void)1282*4882a593Smuzhiyun static int __init crypto_algapi_init(void)
1283*4882a593Smuzhiyun {
1284*4882a593Smuzhiyun 	crypto_init_proc();
1285*4882a593Smuzhiyun 	return 0;
1286*4882a593Smuzhiyun }
1287*4882a593Smuzhiyun 
crypto_algapi_exit(void)1288*4882a593Smuzhiyun static void __exit crypto_algapi_exit(void)
1289*4882a593Smuzhiyun {
1290*4882a593Smuzhiyun 	crypto_exit_proc();
1291*4882a593Smuzhiyun }
1292*4882a593Smuzhiyun 
1293*4882a593Smuzhiyun module_init(crypto_algapi_init);
1294*4882a593Smuzhiyun module_exit(crypto_algapi_exit);
1295*4882a593Smuzhiyun 
1296*4882a593Smuzhiyun MODULE_LICENSE("GPL");
1297*4882a593Smuzhiyun MODULE_DESCRIPTION("Cryptographic algorithms API");
1298*4882a593Smuzhiyun MODULE_SOFTDEP("pre: cryptomgr");
1299