xref: /OK3568_Linux_fs/kernel/crypto/fips140-alg-registration.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Block crypto operations until tests complete
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright 2021 Google LLC
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * This file defines the fips140_crypto_register_*() functions, to which all
8*4882a593Smuzhiyun  * calls to crypto_register_*() in the module are redirected.  These functions
9*4882a593Smuzhiyun  * override the tfm initialization function of each algorithm to insert a wait
10*4882a593Smuzhiyun  * for the module having completed its self-tests and integrity check.
11*4882a593Smuzhiyun  *
12*4882a593Smuzhiyun  * The exact field that we override depends on the algorithm type.  For
13*4882a593Smuzhiyun  * algorithm types that have a strongly-typed initialization function pointer
14*4882a593Smuzhiyun  * (e.g. skcipher), we must override that, since cra_init isn't guaranteed to be
15*4882a593Smuzhiyun  * called for those despite the field being present in the base struct.  For the
16*4882a593Smuzhiyun  * other algorithm types (e.g. "cipher") we must override cra_init.
17*4882a593Smuzhiyun  *
18*4882a593Smuzhiyun  * All of this applies to both normal algorithms and template instances.
19*4882a593Smuzhiyun  *
20*4882a593Smuzhiyun  * The purpose of all of this is to meet a FIPS requirement where the module
21*4882a593Smuzhiyun  * must not produce any output from cryptographic algorithms until it completes
22*4882a593Smuzhiyun  * its tests.  Technically this is impossible, but this solution meets the
23*4882a593Smuzhiyun  * intent of the requirement, assuming the user makes a supported sequence of
24*4882a593Smuzhiyun  * API calls.  Note that we can't simply run the tests before registering the
25*4882a593Smuzhiyun  * algorithms, as the algorithms must be registered in order to run the tests.
26*4882a593Smuzhiyun  *
27*4882a593Smuzhiyun  * It would be much easier to handle this in the kernel's crypto API framework.
28*4882a593Smuzhiyun  * Unfortunately, that was deemed insufficient because the module itself is
29*4882a593Smuzhiyun  * required to do the enforcement.  What is *actually* required is still very
30*4882a593Smuzhiyun  * vague, but the approach implemented here should meet the requirement.
31*4882a593Smuzhiyun  */
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun /*
34*4882a593Smuzhiyun  * This file is the one place in fips140.ko that needs to call the kernel's real
35*4882a593Smuzhiyun  * algorithm registration functions, so #undefine all the macros from
36*4882a593Smuzhiyun  * fips140-defs.h so that the "fips140_" prefix doesn't automatically get added.
37*4882a593Smuzhiyun  */
38*4882a593Smuzhiyun #undef aead_register_instance
39*4882a593Smuzhiyun #undef ahash_register_instance
40*4882a593Smuzhiyun #undef crypto_register_aead
41*4882a593Smuzhiyun #undef crypto_register_aeads
42*4882a593Smuzhiyun #undef crypto_register_ahash
43*4882a593Smuzhiyun #undef crypto_register_ahashes
44*4882a593Smuzhiyun #undef crypto_register_alg
45*4882a593Smuzhiyun #undef crypto_register_algs
46*4882a593Smuzhiyun #undef crypto_register_rng
47*4882a593Smuzhiyun #undef crypto_register_rngs
48*4882a593Smuzhiyun #undef crypto_register_shash
49*4882a593Smuzhiyun #undef crypto_register_shashes
50*4882a593Smuzhiyun #undef crypto_register_skcipher
51*4882a593Smuzhiyun #undef crypto_register_skciphers
52*4882a593Smuzhiyun #undef shash_register_instance
53*4882a593Smuzhiyun #undef skcipher_register_instance
54*4882a593Smuzhiyun 
55*4882a593Smuzhiyun #include <crypto/algapi.h>
56*4882a593Smuzhiyun #include <crypto/internal/aead.h>
57*4882a593Smuzhiyun #include <crypto/internal/hash.h>
58*4882a593Smuzhiyun #include <crypto/internal/rng.h>
59*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
60*4882a593Smuzhiyun #include <linux/xarray.h>
61*4882a593Smuzhiyun 
62*4882a593Smuzhiyun #include "fips140-module.h"
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun /* Indicates whether the self-tests and integrity check have completed */
65*4882a593Smuzhiyun DECLARE_COMPLETION(fips140_tests_done);
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun /* The thread running the self-tests and integrity check */
68*4882a593Smuzhiyun struct task_struct *fips140_init_thread;
69*4882a593Smuzhiyun 
70*4882a593Smuzhiyun /*
71*4882a593Smuzhiyun  * Map from crypto_alg to original initialization function (possibly NULL)
72*4882a593Smuzhiyun  *
73*4882a593Smuzhiyun  * Note: unregistering an algorithm will leak its map entry, as we don't bother
74*4882a593Smuzhiyun  * to remove it.  This should be fine since fips140.ko can't be unloaded.  The
75*4882a593Smuzhiyun  * proper solution would be to store the original function pointer in a new
76*4882a593Smuzhiyun  * field in 'struct crypto_alg', but that would require kernel support.
77*4882a593Smuzhiyun  */
78*4882a593Smuzhiyun static DEFINE_XARRAY(fips140_init_func_map);
79*4882a593Smuzhiyun 
fips140_ready(void)80*4882a593Smuzhiyun static bool fips140_ready(void)
81*4882a593Smuzhiyun {
82*4882a593Smuzhiyun 	return completion_done(&fips140_tests_done);
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun 
85*4882a593Smuzhiyun /*
86*4882a593Smuzhiyun  * Wait until crypto operations are allowed to proceed.  Return true if the
87*4882a593Smuzhiyun  * tests are done, or false if the caller is the thread running the tests so it
88*4882a593Smuzhiyun  * is allowed to proceed anyway.
89*4882a593Smuzhiyun  */
fips140_wait_until_ready(struct crypto_alg * alg)90*4882a593Smuzhiyun static bool fips140_wait_until_ready(struct crypto_alg *alg)
91*4882a593Smuzhiyun {
92*4882a593Smuzhiyun 	if (fips140_ready())
93*4882a593Smuzhiyun 		return true;
94*4882a593Smuzhiyun 	/*
95*4882a593Smuzhiyun 	 * The thread running the tests must not wait.  Since tfms can only be
96*4882a593Smuzhiyun 	 * allocated in task context, we can reliably determine whether the
97*4882a593Smuzhiyun 	 * invocation is from that thread or not by checking 'current'.
98*4882a593Smuzhiyun 	 */
99*4882a593Smuzhiyun 	if (current == fips140_init_thread)
100*4882a593Smuzhiyun 		return false;
101*4882a593Smuzhiyun 
102*4882a593Smuzhiyun 	pr_info("blocking user of %s until tests complete\n",
103*4882a593Smuzhiyun 		alg->cra_driver_name);
104*4882a593Smuzhiyun 	wait_for_completion(&fips140_tests_done);
105*4882a593Smuzhiyun 	pr_info("tests done, allowing %s to proceed\n", alg->cra_driver_name);
106*4882a593Smuzhiyun 	return true;
107*4882a593Smuzhiyun }
108*4882a593Smuzhiyun 
fips140_store_init_function(struct crypto_alg * alg,void * func)109*4882a593Smuzhiyun static int fips140_store_init_function(struct crypto_alg *alg, void *func)
110*4882a593Smuzhiyun {
111*4882a593Smuzhiyun 	void *ret;
112*4882a593Smuzhiyun 
113*4882a593Smuzhiyun 	/*
114*4882a593Smuzhiyun 	 * The XArray API requires 4-byte aligned values.  Although function
115*4882a593Smuzhiyun 	 * pointers in general aren't guaranteed to be 4-byte aligned, it should
116*4882a593Smuzhiyun 	 * be the case for the platforms this module is used on.
117*4882a593Smuzhiyun 	 */
118*4882a593Smuzhiyun 	if (WARN_ON((unsigned long)func & 3))
119*4882a593Smuzhiyun 		return -EINVAL;
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 	ret = xa_store(&fips140_init_func_map, (unsigned long)alg, func,
122*4882a593Smuzhiyun 		       GFP_KERNEL);
123*4882a593Smuzhiyun 	return xa_err(ret);
124*4882a593Smuzhiyun }
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun /* Get the algorithm's original initialization function (possibly NULL) */
fips140_load_init_function(struct crypto_alg * alg)127*4882a593Smuzhiyun static void *fips140_load_init_function(struct crypto_alg *alg)
128*4882a593Smuzhiyun {
129*4882a593Smuzhiyun 	return xa_load(&fips140_init_func_map, (unsigned long)alg);
130*4882a593Smuzhiyun }
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun /* tfm initialization function overrides */
133*4882a593Smuzhiyun 
fips140_alg_init_tfm(struct crypto_tfm * tfm)134*4882a593Smuzhiyun static int fips140_alg_init_tfm(struct crypto_tfm *tfm)
135*4882a593Smuzhiyun {
136*4882a593Smuzhiyun 	struct crypto_alg *alg = tfm->__crt_alg;
137*4882a593Smuzhiyun 	int (*cra_init)(struct crypto_tfm *tfm) =
138*4882a593Smuzhiyun 		fips140_load_init_function(alg);
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 	if (fips140_wait_until_ready(alg))
141*4882a593Smuzhiyun 		WRITE_ONCE(alg->cra_init, cra_init);
142*4882a593Smuzhiyun 	return cra_init ? cra_init(tfm) : 0;
143*4882a593Smuzhiyun }
144*4882a593Smuzhiyun 
fips140_aead_init_tfm(struct crypto_aead * tfm)145*4882a593Smuzhiyun static int fips140_aead_init_tfm(struct crypto_aead *tfm)
146*4882a593Smuzhiyun {
147*4882a593Smuzhiyun 	struct aead_alg *alg = crypto_aead_alg(tfm);
148*4882a593Smuzhiyun 	int (*init)(struct crypto_aead *tfm) =
149*4882a593Smuzhiyun 		fips140_load_init_function(&alg->base);
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun 	if (fips140_wait_until_ready(&alg->base))
152*4882a593Smuzhiyun 		WRITE_ONCE(alg->init, init);
153*4882a593Smuzhiyun 	return init ? init(tfm) : 0;
154*4882a593Smuzhiyun }
155*4882a593Smuzhiyun 
fips140_ahash_init_tfm(struct crypto_ahash * tfm)156*4882a593Smuzhiyun static int fips140_ahash_init_tfm(struct crypto_ahash *tfm)
157*4882a593Smuzhiyun {
158*4882a593Smuzhiyun 	struct hash_alg_common *halg = crypto_hash_alg_common(tfm);
159*4882a593Smuzhiyun 	struct ahash_alg *alg = container_of(halg, struct ahash_alg, halg);
160*4882a593Smuzhiyun 	int (*init_tfm)(struct crypto_ahash *tfm) =
161*4882a593Smuzhiyun 		fips140_load_init_function(&halg->base);
162*4882a593Smuzhiyun 
163*4882a593Smuzhiyun 	if (fips140_wait_until_ready(&halg->base))
164*4882a593Smuzhiyun 		WRITE_ONCE(alg->init_tfm, init_tfm);
165*4882a593Smuzhiyun 	return init_tfm ? init_tfm(tfm) : 0;
166*4882a593Smuzhiyun }
167*4882a593Smuzhiyun 
fips140_shash_init_tfm(struct crypto_shash * tfm)168*4882a593Smuzhiyun static int fips140_shash_init_tfm(struct crypto_shash *tfm)
169*4882a593Smuzhiyun {
170*4882a593Smuzhiyun 	struct shash_alg *alg = crypto_shash_alg(tfm);
171*4882a593Smuzhiyun 	int (*init_tfm)(struct crypto_shash *tfm) =
172*4882a593Smuzhiyun 		fips140_load_init_function(&alg->base);
173*4882a593Smuzhiyun 
174*4882a593Smuzhiyun 	if (fips140_wait_until_ready(&alg->base))
175*4882a593Smuzhiyun 		WRITE_ONCE(alg->init_tfm, init_tfm);
176*4882a593Smuzhiyun 	return init_tfm ? init_tfm(tfm) : 0;
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun 
fips140_skcipher_init_tfm(struct crypto_skcipher * tfm)179*4882a593Smuzhiyun static int fips140_skcipher_init_tfm(struct crypto_skcipher *tfm)
180*4882a593Smuzhiyun {
181*4882a593Smuzhiyun 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
182*4882a593Smuzhiyun 	int (*init)(struct crypto_skcipher *tfm) =
183*4882a593Smuzhiyun 		fips140_load_init_function(&alg->base);
184*4882a593Smuzhiyun 
185*4882a593Smuzhiyun 	if (fips140_wait_until_ready(&alg->base))
186*4882a593Smuzhiyun 		WRITE_ONCE(alg->init, init);
187*4882a593Smuzhiyun 	return init ? init(tfm) : 0;
188*4882a593Smuzhiyun }
189*4882a593Smuzhiyun 
190*4882a593Smuzhiyun /* Single algorithm registration */
191*4882a593Smuzhiyun 
192*4882a593Smuzhiyun #define prepare_alg(alg, base_alg, field, wrapper_func)			\
193*4882a593Smuzhiyun ({									\
194*4882a593Smuzhiyun 	int err = 0;							\
195*4882a593Smuzhiyun 									\
196*4882a593Smuzhiyun 	if (!fips140_ready() && alg->field != wrapper_func) {		\
197*4882a593Smuzhiyun 		err = fips140_store_init_function(base_alg, alg->field);\
198*4882a593Smuzhiyun 		if (err == 0)						\
199*4882a593Smuzhiyun 			alg->field = wrapper_func;			\
200*4882a593Smuzhiyun 	}								\
201*4882a593Smuzhiyun 	err;								\
202*4882a593Smuzhiyun })
203*4882a593Smuzhiyun 
fips140_prepare_alg(struct crypto_alg * alg)204*4882a593Smuzhiyun static int fips140_prepare_alg(struct crypto_alg *alg)
205*4882a593Smuzhiyun {
206*4882a593Smuzhiyun 	/*
207*4882a593Smuzhiyun 	 * Override cra_init.  This is only for algorithm types like cipher and
208*4882a593Smuzhiyun 	 * rng that don't have a strongly-typed initialization function.
209*4882a593Smuzhiyun 	 */
210*4882a593Smuzhiyun 	return prepare_alg(alg, alg, cra_init, fips140_alg_init_tfm);
211*4882a593Smuzhiyun }
212*4882a593Smuzhiyun 
fips140_prepare_aead_alg(struct aead_alg * alg)213*4882a593Smuzhiyun static int fips140_prepare_aead_alg(struct aead_alg *alg)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun 	return prepare_alg(alg, &alg->base, init, fips140_aead_init_tfm);
216*4882a593Smuzhiyun }
217*4882a593Smuzhiyun 
fips140_prepare_ahash_alg(struct ahash_alg * alg)218*4882a593Smuzhiyun static int fips140_prepare_ahash_alg(struct ahash_alg *alg)
219*4882a593Smuzhiyun {
220*4882a593Smuzhiyun 	return prepare_alg(alg, &alg->halg.base, init_tfm,
221*4882a593Smuzhiyun 			   fips140_ahash_init_tfm);
222*4882a593Smuzhiyun }
223*4882a593Smuzhiyun 
fips140_prepare_rng_alg(struct rng_alg * alg)224*4882a593Smuzhiyun static int fips140_prepare_rng_alg(struct rng_alg *alg)
225*4882a593Smuzhiyun {
226*4882a593Smuzhiyun 	/*
227*4882a593Smuzhiyun 	 * rng doesn't have a strongly-typed initialization function, so we must
228*4882a593Smuzhiyun 	 * treat rng algorithms as "generic" algorithms.
229*4882a593Smuzhiyun 	 */
230*4882a593Smuzhiyun 	return fips140_prepare_alg(&alg->base);
231*4882a593Smuzhiyun }
232*4882a593Smuzhiyun 
fips140_prepare_shash_alg(struct shash_alg * alg)233*4882a593Smuzhiyun static int fips140_prepare_shash_alg(struct shash_alg *alg)
234*4882a593Smuzhiyun {
235*4882a593Smuzhiyun 	return prepare_alg(alg, &alg->base, init_tfm, fips140_shash_init_tfm);
236*4882a593Smuzhiyun }
237*4882a593Smuzhiyun 
fips140_prepare_skcipher_alg(struct skcipher_alg * alg)238*4882a593Smuzhiyun static int fips140_prepare_skcipher_alg(struct skcipher_alg *alg)
239*4882a593Smuzhiyun {
240*4882a593Smuzhiyun 	return prepare_alg(alg, &alg->base, init, fips140_skcipher_init_tfm);
241*4882a593Smuzhiyun }
242*4882a593Smuzhiyun 
fips140_crypto_register_alg(struct crypto_alg * alg)243*4882a593Smuzhiyun int fips140_crypto_register_alg(struct crypto_alg *alg)
244*4882a593Smuzhiyun {
245*4882a593Smuzhiyun 	return fips140_prepare_alg(alg) ?: crypto_register_alg(alg);
246*4882a593Smuzhiyun }
247*4882a593Smuzhiyun 
fips140_crypto_register_aead(struct aead_alg * alg)248*4882a593Smuzhiyun int fips140_crypto_register_aead(struct aead_alg *alg)
249*4882a593Smuzhiyun {
250*4882a593Smuzhiyun 	return fips140_prepare_aead_alg(alg) ?: crypto_register_aead(alg);
251*4882a593Smuzhiyun }
252*4882a593Smuzhiyun 
fips140_crypto_register_ahash(struct ahash_alg * alg)253*4882a593Smuzhiyun int fips140_crypto_register_ahash(struct ahash_alg *alg)
254*4882a593Smuzhiyun {
255*4882a593Smuzhiyun 	return fips140_prepare_ahash_alg(alg) ?: crypto_register_ahash(alg);
256*4882a593Smuzhiyun }
257*4882a593Smuzhiyun 
fips140_crypto_register_rng(struct rng_alg * alg)258*4882a593Smuzhiyun int fips140_crypto_register_rng(struct rng_alg *alg)
259*4882a593Smuzhiyun {
260*4882a593Smuzhiyun 	return fips140_prepare_rng_alg(alg) ?: crypto_register_rng(alg);
261*4882a593Smuzhiyun }
262*4882a593Smuzhiyun 
fips140_crypto_register_shash(struct shash_alg * alg)263*4882a593Smuzhiyun int fips140_crypto_register_shash(struct shash_alg *alg)
264*4882a593Smuzhiyun {
265*4882a593Smuzhiyun 	return fips140_prepare_shash_alg(alg) ?: crypto_register_shash(alg);
266*4882a593Smuzhiyun }
267*4882a593Smuzhiyun 
fips140_crypto_register_skcipher(struct skcipher_alg * alg)268*4882a593Smuzhiyun int fips140_crypto_register_skcipher(struct skcipher_alg *alg)
269*4882a593Smuzhiyun {
270*4882a593Smuzhiyun 	return fips140_prepare_skcipher_alg(alg) ?:
271*4882a593Smuzhiyun 		crypto_register_skcipher(alg);
272*4882a593Smuzhiyun }
273*4882a593Smuzhiyun 
274*4882a593Smuzhiyun /* Instance registration */
275*4882a593Smuzhiyun 
fips140_aead_register_instance(struct crypto_template * tmpl,struct aead_instance * inst)276*4882a593Smuzhiyun int fips140_aead_register_instance(struct crypto_template *tmpl,
277*4882a593Smuzhiyun 				   struct aead_instance *inst)
278*4882a593Smuzhiyun {
279*4882a593Smuzhiyun 	return fips140_prepare_aead_alg(&inst->alg) ?:
280*4882a593Smuzhiyun 		aead_register_instance(tmpl, inst);
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun 
fips140_ahash_register_instance(struct crypto_template * tmpl,struct ahash_instance * inst)283*4882a593Smuzhiyun int fips140_ahash_register_instance(struct crypto_template *tmpl,
284*4882a593Smuzhiyun 				    struct ahash_instance *inst)
285*4882a593Smuzhiyun {
286*4882a593Smuzhiyun 	return fips140_prepare_ahash_alg(&inst->alg) ?:
287*4882a593Smuzhiyun 		ahash_register_instance(tmpl, inst);
288*4882a593Smuzhiyun }
289*4882a593Smuzhiyun 
fips140_shash_register_instance(struct crypto_template * tmpl,struct shash_instance * inst)290*4882a593Smuzhiyun int fips140_shash_register_instance(struct crypto_template *tmpl,
291*4882a593Smuzhiyun 				    struct shash_instance *inst)
292*4882a593Smuzhiyun {
293*4882a593Smuzhiyun 	return fips140_prepare_shash_alg(&inst->alg) ?:
294*4882a593Smuzhiyun 		shash_register_instance(tmpl, inst);
295*4882a593Smuzhiyun }
296*4882a593Smuzhiyun 
fips140_skcipher_register_instance(struct crypto_template * tmpl,struct skcipher_instance * inst)297*4882a593Smuzhiyun int fips140_skcipher_register_instance(struct crypto_template *tmpl,
298*4882a593Smuzhiyun 				       struct skcipher_instance *inst)
299*4882a593Smuzhiyun {
300*4882a593Smuzhiyun 	return fips140_prepare_skcipher_alg(&inst->alg) ?:
301*4882a593Smuzhiyun 		skcipher_register_instance(tmpl, inst);
302*4882a593Smuzhiyun }
303*4882a593Smuzhiyun 
304*4882a593Smuzhiyun /* Bulk algorithm registration */
305*4882a593Smuzhiyun 
fips140_crypto_register_algs(struct crypto_alg * algs,int count)306*4882a593Smuzhiyun int fips140_crypto_register_algs(struct crypto_alg *algs, int count)
307*4882a593Smuzhiyun {
308*4882a593Smuzhiyun 	int i;
309*4882a593Smuzhiyun 	int err;
310*4882a593Smuzhiyun 
311*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
312*4882a593Smuzhiyun 		err = fips140_prepare_alg(&algs[i]);
313*4882a593Smuzhiyun 		if (err)
314*4882a593Smuzhiyun 			return err;
315*4882a593Smuzhiyun 	}
316*4882a593Smuzhiyun 
317*4882a593Smuzhiyun 	return crypto_register_algs(algs, count);
318*4882a593Smuzhiyun }
319*4882a593Smuzhiyun 
fips140_crypto_register_aeads(struct aead_alg * algs,int count)320*4882a593Smuzhiyun int fips140_crypto_register_aeads(struct aead_alg *algs, int count)
321*4882a593Smuzhiyun {
322*4882a593Smuzhiyun 	int i;
323*4882a593Smuzhiyun 	int err;
324*4882a593Smuzhiyun 
325*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
326*4882a593Smuzhiyun 		err = fips140_prepare_aead_alg(&algs[i]);
327*4882a593Smuzhiyun 		if (err)
328*4882a593Smuzhiyun 			return err;
329*4882a593Smuzhiyun 	}
330*4882a593Smuzhiyun 
331*4882a593Smuzhiyun 	return crypto_register_aeads(algs, count);
332*4882a593Smuzhiyun }
333*4882a593Smuzhiyun 
fips140_crypto_register_ahashes(struct ahash_alg * algs,int count)334*4882a593Smuzhiyun int fips140_crypto_register_ahashes(struct ahash_alg *algs, int count)
335*4882a593Smuzhiyun {
336*4882a593Smuzhiyun 	int i;
337*4882a593Smuzhiyun 	int err;
338*4882a593Smuzhiyun 
339*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
340*4882a593Smuzhiyun 		err = fips140_prepare_ahash_alg(&algs[i]);
341*4882a593Smuzhiyun 		if (err)
342*4882a593Smuzhiyun 			return err;
343*4882a593Smuzhiyun 	}
344*4882a593Smuzhiyun 
345*4882a593Smuzhiyun 	return crypto_register_ahashes(algs, count);
346*4882a593Smuzhiyun }
347*4882a593Smuzhiyun 
fips140_crypto_register_rngs(struct rng_alg * algs,int count)348*4882a593Smuzhiyun int fips140_crypto_register_rngs(struct rng_alg *algs, int count)
349*4882a593Smuzhiyun {
350*4882a593Smuzhiyun 	int i;
351*4882a593Smuzhiyun 	int err;
352*4882a593Smuzhiyun 
353*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
354*4882a593Smuzhiyun 		err = fips140_prepare_rng_alg(&algs[i]);
355*4882a593Smuzhiyun 		if (err)
356*4882a593Smuzhiyun 			return err;
357*4882a593Smuzhiyun 	}
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun 	return crypto_register_rngs(algs, count);
360*4882a593Smuzhiyun }
361*4882a593Smuzhiyun 
fips140_crypto_register_shashes(struct shash_alg * algs,int count)362*4882a593Smuzhiyun int fips140_crypto_register_shashes(struct shash_alg *algs, int count)
363*4882a593Smuzhiyun {
364*4882a593Smuzhiyun 	int i;
365*4882a593Smuzhiyun 	int err;
366*4882a593Smuzhiyun 
367*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
368*4882a593Smuzhiyun 		err = fips140_prepare_shash_alg(&algs[i]);
369*4882a593Smuzhiyun 		if (err)
370*4882a593Smuzhiyun 			return err;
371*4882a593Smuzhiyun 	}
372*4882a593Smuzhiyun 
373*4882a593Smuzhiyun 	return crypto_register_shashes(algs, count);
374*4882a593Smuzhiyun }
375*4882a593Smuzhiyun 
fips140_crypto_register_skciphers(struct skcipher_alg * algs,int count)376*4882a593Smuzhiyun int fips140_crypto_register_skciphers(struct skcipher_alg *algs, int count)
377*4882a593Smuzhiyun {
378*4882a593Smuzhiyun 	int i;
379*4882a593Smuzhiyun 	int err;
380*4882a593Smuzhiyun 
381*4882a593Smuzhiyun 	for (i = 0; i < count; i++) {
382*4882a593Smuzhiyun 		err = fips140_prepare_skcipher_alg(&algs[i]);
383*4882a593Smuzhiyun 		if (err)
384*4882a593Smuzhiyun 			return err;
385*4882a593Smuzhiyun 	}
386*4882a593Smuzhiyun 
387*4882a593Smuzhiyun 	return crypto_register_skciphers(algs, count);
388*4882a593Smuzhiyun }
389