xref: /OK3568_Linux_fs/kernel/arch/arm/crypto/chacha-glue.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * ARM NEON accelerated ChaCha and XChaCha stream ciphers,
4*4882a593Smuzhiyun  * including ChaCha20 (RFC7539)
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  * Copyright (C) 2016-2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
7*4882a593Smuzhiyun  * Copyright (C) 2015 Martin Willi
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #include <crypto/algapi.h>
11*4882a593Smuzhiyun #include <crypto/internal/chacha.h>
12*4882a593Smuzhiyun #include <crypto/internal/simd.h>
13*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
14*4882a593Smuzhiyun #include <linux/jump_label.h>
15*4882a593Smuzhiyun #include <linux/kernel.h>
16*4882a593Smuzhiyun #include <linux/module.h>
17*4882a593Smuzhiyun 
18*4882a593Smuzhiyun #include <asm/cputype.h>
19*4882a593Smuzhiyun #include <asm/hwcap.h>
20*4882a593Smuzhiyun #include <asm/neon.h>
21*4882a593Smuzhiyun #include <asm/simd.h>
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun asmlinkage void chacha_block_xor_neon(const u32 *state, u8 *dst, const u8 *src,
24*4882a593Smuzhiyun 				      int nrounds);
25*4882a593Smuzhiyun asmlinkage void chacha_4block_xor_neon(const u32 *state, u8 *dst, const u8 *src,
26*4882a593Smuzhiyun 				       int nrounds);
27*4882a593Smuzhiyun asmlinkage void hchacha_block_arm(const u32 *state, u32 *out, int nrounds);
28*4882a593Smuzhiyun asmlinkage void hchacha_block_neon(const u32 *state, u32 *out, int nrounds);
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun asmlinkage void chacha_doarm(u8 *dst, const u8 *src, unsigned int bytes,
31*4882a593Smuzhiyun 			     const u32 *state, int nrounds);
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_neon);
34*4882a593Smuzhiyun 
neon_usable(void)35*4882a593Smuzhiyun static inline bool neon_usable(void)
36*4882a593Smuzhiyun {
37*4882a593Smuzhiyun 	return static_branch_likely(&use_neon) && crypto_simd_usable();
38*4882a593Smuzhiyun }
39*4882a593Smuzhiyun 
chacha_doneon(u32 * state,u8 * dst,const u8 * src,unsigned int bytes,int nrounds)40*4882a593Smuzhiyun static void chacha_doneon(u32 *state, u8 *dst, const u8 *src,
41*4882a593Smuzhiyun 			  unsigned int bytes, int nrounds)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun 	u8 buf[CHACHA_BLOCK_SIZE];
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun 	while (bytes >= CHACHA_BLOCK_SIZE * 4) {
46*4882a593Smuzhiyun 		chacha_4block_xor_neon(state, dst, src, nrounds);
47*4882a593Smuzhiyun 		bytes -= CHACHA_BLOCK_SIZE * 4;
48*4882a593Smuzhiyun 		src += CHACHA_BLOCK_SIZE * 4;
49*4882a593Smuzhiyun 		dst += CHACHA_BLOCK_SIZE * 4;
50*4882a593Smuzhiyun 		state[12] += 4;
51*4882a593Smuzhiyun 	}
52*4882a593Smuzhiyun 	while (bytes >= CHACHA_BLOCK_SIZE) {
53*4882a593Smuzhiyun 		chacha_block_xor_neon(state, dst, src, nrounds);
54*4882a593Smuzhiyun 		bytes -= CHACHA_BLOCK_SIZE;
55*4882a593Smuzhiyun 		src += CHACHA_BLOCK_SIZE;
56*4882a593Smuzhiyun 		dst += CHACHA_BLOCK_SIZE;
57*4882a593Smuzhiyun 		state[12]++;
58*4882a593Smuzhiyun 	}
59*4882a593Smuzhiyun 	if (bytes) {
60*4882a593Smuzhiyun 		memcpy(buf, src, bytes);
61*4882a593Smuzhiyun 		chacha_block_xor_neon(state, buf, buf, nrounds);
62*4882a593Smuzhiyun 		memcpy(dst, buf, bytes);
63*4882a593Smuzhiyun 	}
64*4882a593Smuzhiyun }
65*4882a593Smuzhiyun 
hchacha_block_arch(const u32 * state,u32 * stream,int nrounds)66*4882a593Smuzhiyun void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
67*4882a593Smuzhiyun {
68*4882a593Smuzhiyun 	if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable()) {
69*4882a593Smuzhiyun 		hchacha_block_arm(state, stream, nrounds);
70*4882a593Smuzhiyun 	} else {
71*4882a593Smuzhiyun 		kernel_neon_begin();
72*4882a593Smuzhiyun 		hchacha_block_neon(state, stream, nrounds);
73*4882a593Smuzhiyun 		kernel_neon_end();
74*4882a593Smuzhiyun 	}
75*4882a593Smuzhiyun }
76*4882a593Smuzhiyun EXPORT_SYMBOL(hchacha_block_arch);
77*4882a593Smuzhiyun 
chacha_init_arch(u32 * state,const u32 * key,const u8 * iv)78*4882a593Smuzhiyun void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
79*4882a593Smuzhiyun {
80*4882a593Smuzhiyun 	chacha_init_generic(state, key, iv);
81*4882a593Smuzhiyun }
82*4882a593Smuzhiyun EXPORT_SYMBOL(chacha_init_arch);
83*4882a593Smuzhiyun 
chacha_crypt_arch(u32 * state,u8 * dst,const u8 * src,unsigned int bytes,int nrounds)84*4882a593Smuzhiyun void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
85*4882a593Smuzhiyun 		       int nrounds)
86*4882a593Smuzhiyun {
87*4882a593Smuzhiyun 	if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable() ||
88*4882a593Smuzhiyun 	    bytes <= CHACHA_BLOCK_SIZE) {
89*4882a593Smuzhiyun 		chacha_doarm(dst, src, bytes, state, nrounds);
90*4882a593Smuzhiyun 		state[12] += DIV_ROUND_UP(bytes, CHACHA_BLOCK_SIZE);
91*4882a593Smuzhiyun 		return;
92*4882a593Smuzhiyun 	}
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun 	do {
95*4882a593Smuzhiyun 		unsigned int todo = min_t(unsigned int, bytes, SZ_4K);
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun 		kernel_neon_begin();
98*4882a593Smuzhiyun 		chacha_doneon(state, dst, src, todo, nrounds);
99*4882a593Smuzhiyun 		kernel_neon_end();
100*4882a593Smuzhiyun 
101*4882a593Smuzhiyun 		bytes -= todo;
102*4882a593Smuzhiyun 		src += todo;
103*4882a593Smuzhiyun 		dst += todo;
104*4882a593Smuzhiyun 	} while (bytes);
105*4882a593Smuzhiyun }
106*4882a593Smuzhiyun EXPORT_SYMBOL(chacha_crypt_arch);
107*4882a593Smuzhiyun 
chacha_stream_xor(struct skcipher_request * req,const struct chacha_ctx * ctx,const u8 * iv,bool neon)108*4882a593Smuzhiyun static int chacha_stream_xor(struct skcipher_request *req,
109*4882a593Smuzhiyun 			     const struct chacha_ctx *ctx, const u8 *iv,
110*4882a593Smuzhiyun 			     bool neon)
111*4882a593Smuzhiyun {
112*4882a593Smuzhiyun 	struct skcipher_walk walk;
113*4882a593Smuzhiyun 	u32 state[16];
114*4882a593Smuzhiyun 	int err;
115*4882a593Smuzhiyun 
116*4882a593Smuzhiyun 	err = skcipher_walk_virt(&walk, req, false);
117*4882a593Smuzhiyun 
118*4882a593Smuzhiyun 	chacha_init_generic(state, ctx->key, iv);
119*4882a593Smuzhiyun 
120*4882a593Smuzhiyun 	while (walk.nbytes > 0) {
121*4882a593Smuzhiyun 		unsigned int nbytes = walk.nbytes;
122*4882a593Smuzhiyun 
123*4882a593Smuzhiyun 		if (nbytes < walk.total)
124*4882a593Smuzhiyun 			nbytes = round_down(nbytes, walk.stride);
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun 		if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) {
127*4882a593Smuzhiyun 			chacha_doarm(walk.dst.virt.addr, walk.src.virt.addr,
128*4882a593Smuzhiyun 				     nbytes, state, ctx->nrounds);
129*4882a593Smuzhiyun 			state[12] += DIV_ROUND_UP(nbytes, CHACHA_BLOCK_SIZE);
130*4882a593Smuzhiyun 		} else {
131*4882a593Smuzhiyun 			kernel_neon_begin();
132*4882a593Smuzhiyun 			chacha_doneon(state, walk.dst.virt.addr,
133*4882a593Smuzhiyun 				      walk.src.virt.addr, nbytes, ctx->nrounds);
134*4882a593Smuzhiyun 			kernel_neon_end();
135*4882a593Smuzhiyun 		}
136*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
137*4882a593Smuzhiyun 	}
138*4882a593Smuzhiyun 
139*4882a593Smuzhiyun 	return err;
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun 
do_chacha(struct skcipher_request * req,bool neon)142*4882a593Smuzhiyun static int do_chacha(struct skcipher_request *req, bool neon)
143*4882a593Smuzhiyun {
144*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
145*4882a593Smuzhiyun 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun 	return chacha_stream_xor(req, ctx, req->iv, neon);
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun 
chacha_arm(struct skcipher_request * req)150*4882a593Smuzhiyun static int chacha_arm(struct skcipher_request *req)
151*4882a593Smuzhiyun {
152*4882a593Smuzhiyun 	return do_chacha(req, false);
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun 
chacha_neon(struct skcipher_request * req)155*4882a593Smuzhiyun static int chacha_neon(struct skcipher_request *req)
156*4882a593Smuzhiyun {
157*4882a593Smuzhiyun 	return do_chacha(req, neon_usable());
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun 
do_xchacha(struct skcipher_request * req,bool neon)160*4882a593Smuzhiyun static int do_xchacha(struct skcipher_request *req, bool neon)
161*4882a593Smuzhiyun {
162*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
163*4882a593Smuzhiyun 	struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
164*4882a593Smuzhiyun 	struct chacha_ctx subctx;
165*4882a593Smuzhiyun 	u32 state[16];
166*4882a593Smuzhiyun 	u8 real_iv[16];
167*4882a593Smuzhiyun 
168*4882a593Smuzhiyun 	chacha_init_generic(state, ctx->key, req->iv);
169*4882a593Smuzhiyun 
170*4882a593Smuzhiyun 	if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) {
171*4882a593Smuzhiyun 		hchacha_block_arm(state, subctx.key, ctx->nrounds);
172*4882a593Smuzhiyun 	} else {
173*4882a593Smuzhiyun 		kernel_neon_begin();
174*4882a593Smuzhiyun 		hchacha_block_neon(state, subctx.key, ctx->nrounds);
175*4882a593Smuzhiyun 		kernel_neon_end();
176*4882a593Smuzhiyun 	}
177*4882a593Smuzhiyun 	subctx.nrounds = ctx->nrounds;
178*4882a593Smuzhiyun 
179*4882a593Smuzhiyun 	memcpy(&real_iv[0], req->iv + 24, 8);
180*4882a593Smuzhiyun 	memcpy(&real_iv[8], req->iv + 16, 8);
181*4882a593Smuzhiyun 	return chacha_stream_xor(req, &subctx, real_iv, neon);
182*4882a593Smuzhiyun }
183*4882a593Smuzhiyun 
xchacha_arm(struct skcipher_request * req)184*4882a593Smuzhiyun static int xchacha_arm(struct skcipher_request *req)
185*4882a593Smuzhiyun {
186*4882a593Smuzhiyun 	return do_xchacha(req, false);
187*4882a593Smuzhiyun }
188*4882a593Smuzhiyun 
xchacha_neon(struct skcipher_request * req)189*4882a593Smuzhiyun static int xchacha_neon(struct skcipher_request *req)
190*4882a593Smuzhiyun {
191*4882a593Smuzhiyun 	return do_xchacha(req, neon_usable());
192*4882a593Smuzhiyun }
193*4882a593Smuzhiyun 
194*4882a593Smuzhiyun static struct skcipher_alg arm_algs[] = {
195*4882a593Smuzhiyun 	{
196*4882a593Smuzhiyun 		.base.cra_name		= "chacha20",
197*4882a593Smuzhiyun 		.base.cra_driver_name	= "chacha20-arm",
198*4882a593Smuzhiyun 		.base.cra_priority	= 200,
199*4882a593Smuzhiyun 		.base.cra_blocksize	= 1,
200*4882a593Smuzhiyun 		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
201*4882a593Smuzhiyun 		.base.cra_module	= THIS_MODULE,
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 		.min_keysize		= CHACHA_KEY_SIZE,
204*4882a593Smuzhiyun 		.max_keysize		= CHACHA_KEY_SIZE,
205*4882a593Smuzhiyun 		.ivsize			= CHACHA_IV_SIZE,
206*4882a593Smuzhiyun 		.chunksize		= CHACHA_BLOCK_SIZE,
207*4882a593Smuzhiyun 		.setkey			= chacha20_setkey,
208*4882a593Smuzhiyun 		.encrypt		= chacha_arm,
209*4882a593Smuzhiyun 		.decrypt		= chacha_arm,
210*4882a593Smuzhiyun 	}, {
211*4882a593Smuzhiyun 		.base.cra_name		= "xchacha20",
212*4882a593Smuzhiyun 		.base.cra_driver_name	= "xchacha20-arm",
213*4882a593Smuzhiyun 		.base.cra_priority	= 200,
214*4882a593Smuzhiyun 		.base.cra_blocksize	= 1,
215*4882a593Smuzhiyun 		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
216*4882a593Smuzhiyun 		.base.cra_module	= THIS_MODULE,
217*4882a593Smuzhiyun 
218*4882a593Smuzhiyun 		.min_keysize		= CHACHA_KEY_SIZE,
219*4882a593Smuzhiyun 		.max_keysize		= CHACHA_KEY_SIZE,
220*4882a593Smuzhiyun 		.ivsize			= XCHACHA_IV_SIZE,
221*4882a593Smuzhiyun 		.chunksize		= CHACHA_BLOCK_SIZE,
222*4882a593Smuzhiyun 		.setkey			= chacha20_setkey,
223*4882a593Smuzhiyun 		.encrypt		= xchacha_arm,
224*4882a593Smuzhiyun 		.decrypt		= xchacha_arm,
225*4882a593Smuzhiyun 	}, {
226*4882a593Smuzhiyun 		.base.cra_name		= "xchacha12",
227*4882a593Smuzhiyun 		.base.cra_driver_name	= "xchacha12-arm",
228*4882a593Smuzhiyun 		.base.cra_priority	= 200,
229*4882a593Smuzhiyun 		.base.cra_blocksize	= 1,
230*4882a593Smuzhiyun 		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
231*4882a593Smuzhiyun 		.base.cra_module	= THIS_MODULE,
232*4882a593Smuzhiyun 
233*4882a593Smuzhiyun 		.min_keysize		= CHACHA_KEY_SIZE,
234*4882a593Smuzhiyun 		.max_keysize		= CHACHA_KEY_SIZE,
235*4882a593Smuzhiyun 		.ivsize			= XCHACHA_IV_SIZE,
236*4882a593Smuzhiyun 		.chunksize		= CHACHA_BLOCK_SIZE,
237*4882a593Smuzhiyun 		.setkey			= chacha12_setkey,
238*4882a593Smuzhiyun 		.encrypt		= xchacha_arm,
239*4882a593Smuzhiyun 		.decrypt		= xchacha_arm,
240*4882a593Smuzhiyun 	},
241*4882a593Smuzhiyun };
242*4882a593Smuzhiyun 
243*4882a593Smuzhiyun static struct skcipher_alg neon_algs[] = {
244*4882a593Smuzhiyun 	{
245*4882a593Smuzhiyun 		.base.cra_name		= "chacha20",
246*4882a593Smuzhiyun 		.base.cra_driver_name	= "chacha20-neon",
247*4882a593Smuzhiyun 		.base.cra_priority	= 300,
248*4882a593Smuzhiyun 		.base.cra_blocksize	= 1,
249*4882a593Smuzhiyun 		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
250*4882a593Smuzhiyun 		.base.cra_module	= THIS_MODULE,
251*4882a593Smuzhiyun 
252*4882a593Smuzhiyun 		.min_keysize		= CHACHA_KEY_SIZE,
253*4882a593Smuzhiyun 		.max_keysize		= CHACHA_KEY_SIZE,
254*4882a593Smuzhiyun 		.ivsize			= CHACHA_IV_SIZE,
255*4882a593Smuzhiyun 		.chunksize		= CHACHA_BLOCK_SIZE,
256*4882a593Smuzhiyun 		.walksize		= 4 * CHACHA_BLOCK_SIZE,
257*4882a593Smuzhiyun 		.setkey			= chacha20_setkey,
258*4882a593Smuzhiyun 		.encrypt		= chacha_neon,
259*4882a593Smuzhiyun 		.decrypt		= chacha_neon,
260*4882a593Smuzhiyun 	}, {
261*4882a593Smuzhiyun 		.base.cra_name		= "xchacha20",
262*4882a593Smuzhiyun 		.base.cra_driver_name	= "xchacha20-neon",
263*4882a593Smuzhiyun 		.base.cra_priority	= 300,
264*4882a593Smuzhiyun 		.base.cra_blocksize	= 1,
265*4882a593Smuzhiyun 		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
266*4882a593Smuzhiyun 		.base.cra_module	= THIS_MODULE,
267*4882a593Smuzhiyun 
268*4882a593Smuzhiyun 		.min_keysize		= CHACHA_KEY_SIZE,
269*4882a593Smuzhiyun 		.max_keysize		= CHACHA_KEY_SIZE,
270*4882a593Smuzhiyun 		.ivsize			= XCHACHA_IV_SIZE,
271*4882a593Smuzhiyun 		.chunksize		= CHACHA_BLOCK_SIZE,
272*4882a593Smuzhiyun 		.walksize		= 4 * CHACHA_BLOCK_SIZE,
273*4882a593Smuzhiyun 		.setkey			= chacha20_setkey,
274*4882a593Smuzhiyun 		.encrypt		= xchacha_neon,
275*4882a593Smuzhiyun 		.decrypt		= xchacha_neon,
276*4882a593Smuzhiyun 	}, {
277*4882a593Smuzhiyun 		.base.cra_name		= "xchacha12",
278*4882a593Smuzhiyun 		.base.cra_driver_name	= "xchacha12-neon",
279*4882a593Smuzhiyun 		.base.cra_priority	= 300,
280*4882a593Smuzhiyun 		.base.cra_blocksize	= 1,
281*4882a593Smuzhiyun 		.base.cra_ctxsize	= sizeof(struct chacha_ctx),
282*4882a593Smuzhiyun 		.base.cra_module	= THIS_MODULE,
283*4882a593Smuzhiyun 
284*4882a593Smuzhiyun 		.min_keysize		= CHACHA_KEY_SIZE,
285*4882a593Smuzhiyun 		.max_keysize		= CHACHA_KEY_SIZE,
286*4882a593Smuzhiyun 		.ivsize			= XCHACHA_IV_SIZE,
287*4882a593Smuzhiyun 		.chunksize		= CHACHA_BLOCK_SIZE,
288*4882a593Smuzhiyun 		.walksize		= 4 * CHACHA_BLOCK_SIZE,
289*4882a593Smuzhiyun 		.setkey			= chacha12_setkey,
290*4882a593Smuzhiyun 		.encrypt		= xchacha_neon,
291*4882a593Smuzhiyun 		.decrypt		= xchacha_neon,
292*4882a593Smuzhiyun 	}
293*4882a593Smuzhiyun };
294*4882a593Smuzhiyun 
chacha_simd_mod_init(void)295*4882a593Smuzhiyun static int __init chacha_simd_mod_init(void)
296*4882a593Smuzhiyun {
297*4882a593Smuzhiyun 	int err = 0;
298*4882a593Smuzhiyun 
299*4882a593Smuzhiyun 	if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) {
300*4882a593Smuzhiyun 		err = crypto_register_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
301*4882a593Smuzhiyun 		if (err)
302*4882a593Smuzhiyun 			return err;
303*4882a593Smuzhiyun 	}
304*4882a593Smuzhiyun 
305*4882a593Smuzhiyun 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON)) {
306*4882a593Smuzhiyun 		int i;
307*4882a593Smuzhiyun 
308*4882a593Smuzhiyun 		switch (read_cpuid_part()) {
309*4882a593Smuzhiyun 		case ARM_CPU_PART_CORTEX_A7:
310*4882a593Smuzhiyun 		case ARM_CPU_PART_CORTEX_A5:
311*4882a593Smuzhiyun 			/*
312*4882a593Smuzhiyun 			 * The Cortex-A7 and Cortex-A5 do not perform well with
313*4882a593Smuzhiyun 			 * the NEON implementation but do incredibly with the
314*4882a593Smuzhiyun 			 * scalar one and use less power.
315*4882a593Smuzhiyun 			 */
316*4882a593Smuzhiyun 			for (i = 0; i < ARRAY_SIZE(neon_algs); i++)
317*4882a593Smuzhiyun 				neon_algs[i].base.cra_priority = 0;
318*4882a593Smuzhiyun 			break;
319*4882a593Smuzhiyun 		default:
320*4882a593Smuzhiyun 			static_branch_enable(&use_neon);
321*4882a593Smuzhiyun 		}
322*4882a593Smuzhiyun 
323*4882a593Smuzhiyun 		if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) {
324*4882a593Smuzhiyun 			err = crypto_register_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
325*4882a593Smuzhiyun 			if (err)
326*4882a593Smuzhiyun 				crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
327*4882a593Smuzhiyun 		}
328*4882a593Smuzhiyun 	}
329*4882a593Smuzhiyun 	return err;
330*4882a593Smuzhiyun }
331*4882a593Smuzhiyun 
chacha_simd_mod_fini(void)332*4882a593Smuzhiyun static void __exit chacha_simd_mod_fini(void)
333*4882a593Smuzhiyun {
334*4882a593Smuzhiyun 	if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER)) {
335*4882a593Smuzhiyun 		crypto_unregister_skciphers(arm_algs, ARRAY_SIZE(arm_algs));
336*4882a593Smuzhiyun 		if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && (elf_hwcap & HWCAP_NEON))
337*4882a593Smuzhiyun 			crypto_unregister_skciphers(neon_algs, ARRAY_SIZE(neon_algs));
338*4882a593Smuzhiyun 	}
339*4882a593Smuzhiyun }
340*4882a593Smuzhiyun 
341*4882a593Smuzhiyun module_init(chacha_simd_mod_init);
342*4882a593Smuzhiyun module_exit(chacha_simd_mod_fini);
343*4882a593Smuzhiyun 
344*4882a593Smuzhiyun MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (scalar and NEON accelerated)");
345*4882a593Smuzhiyun MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
346*4882a593Smuzhiyun MODULE_LICENSE("GPL v2");
347*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("chacha20");
348*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("chacha20-arm");
349*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xchacha20");
350*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xchacha20-arm");
351*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xchacha12");
352*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xchacha12-arm");
353*4882a593Smuzhiyun #ifdef CONFIG_KERNEL_MODE_NEON
354*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("chacha20-neon");
355*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xchacha20-neon");
356*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xchacha12-neon");
357*4882a593Smuzhiyun #endif
358