1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Bit sliced AES using NEON instructions
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6*4882a593Smuzhiyun */
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun #include <asm/neon.h>
9*4882a593Smuzhiyun #include <asm/simd.h>
10*4882a593Smuzhiyun #include <crypto/aes.h>
11*4882a593Smuzhiyun #include <crypto/ctr.h>
12*4882a593Smuzhiyun #include <crypto/internal/simd.h>
13*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
14*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
15*4882a593Smuzhiyun #include <crypto/xts.h>
16*4882a593Smuzhiyun #include <linux/module.h>
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
19*4882a593Smuzhiyun MODULE_LICENSE("GPL v2");
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("ecb(aes)");
22*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("cbc(aes)");
23*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("ctr(aes)");
24*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xts(aes)");
25*4882a593Smuzhiyun
26*4882a593Smuzhiyun asmlinkage void aesbs_convert_key(u8 out[], u32 const rk[], int rounds);
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun asmlinkage void aesbs_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
29*4882a593Smuzhiyun int rounds, int blocks);
30*4882a593Smuzhiyun asmlinkage void aesbs_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
31*4882a593Smuzhiyun int rounds, int blocks);
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun asmlinkage void aesbs_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
34*4882a593Smuzhiyun int rounds, int blocks, u8 iv[]);
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun asmlinkage void aesbs_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
37*4882a593Smuzhiyun int rounds, int blocks, u8 iv[], u8 final[]);
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun asmlinkage void aesbs_xts_encrypt(u8 out[], u8 const in[], u8 const rk[],
40*4882a593Smuzhiyun int rounds, int blocks, u8 iv[]);
41*4882a593Smuzhiyun asmlinkage void aesbs_xts_decrypt(u8 out[], u8 const in[], u8 const rk[],
42*4882a593Smuzhiyun int rounds, int blocks, u8 iv[]);
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun /* borrowed from aes-neon-blk.ko */
45*4882a593Smuzhiyun asmlinkage void neon_aes_ecb_encrypt(u8 out[], u8 const in[], u32 const rk[],
46*4882a593Smuzhiyun int rounds, int blocks);
47*4882a593Smuzhiyun asmlinkage void neon_aes_cbc_encrypt(u8 out[], u8 const in[], u32 const rk[],
48*4882a593Smuzhiyun int rounds, int blocks, u8 iv[]);
49*4882a593Smuzhiyun asmlinkage void neon_aes_xts_encrypt(u8 out[], u8 const in[],
50*4882a593Smuzhiyun u32 const rk1[], int rounds, int bytes,
51*4882a593Smuzhiyun u32 const rk2[], u8 iv[], int first);
52*4882a593Smuzhiyun asmlinkage void neon_aes_xts_decrypt(u8 out[], u8 const in[],
53*4882a593Smuzhiyun u32 const rk1[], int rounds, int bytes,
54*4882a593Smuzhiyun u32 const rk2[], u8 iv[], int first);
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun struct aesbs_ctx {
57*4882a593Smuzhiyun u8 rk[13 * (8 * AES_BLOCK_SIZE) + 32];
58*4882a593Smuzhiyun int rounds;
59*4882a593Smuzhiyun } __aligned(AES_BLOCK_SIZE);
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun struct aesbs_cbc_ctx {
62*4882a593Smuzhiyun struct aesbs_ctx key;
63*4882a593Smuzhiyun u32 enc[AES_MAX_KEYLENGTH_U32];
64*4882a593Smuzhiyun };
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun struct aesbs_xts_ctx {
67*4882a593Smuzhiyun struct aesbs_ctx key;
68*4882a593Smuzhiyun u32 twkey[AES_MAX_KEYLENGTH_U32];
69*4882a593Smuzhiyun struct crypto_aes_ctx cts;
70*4882a593Smuzhiyun };
71*4882a593Smuzhiyun
aesbs_setkey(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)72*4882a593Smuzhiyun static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
73*4882a593Smuzhiyun unsigned int key_len)
74*4882a593Smuzhiyun {
75*4882a593Smuzhiyun struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
76*4882a593Smuzhiyun struct crypto_aes_ctx rk;
77*4882a593Smuzhiyun int err;
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun err = aes_expandkey(&rk, in_key, key_len);
80*4882a593Smuzhiyun if (err)
81*4882a593Smuzhiyun return err;
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun ctx->rounds = 6 + key_len / 4;
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun kernel_neon_begin();
86*4882a593Smuzhiyun aesbs_convert_key(ctx->rk, rk.key_enc, ctx->rounds);
87*4882a593Smuzhiyun kernel_neon_end();
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun return 0;
90*4882a593Smuzhiyun }
91*4882a593Smuzhiyun
__ecb_crypt(struct skcipher_request * req,void (* fn)(u8 out[],u8 const in[],u8 const rk[],int rounds,int blocks))92*4882a593Smuzhiyun static int __ecb_crypt(struct skcipher_request *req,
93*4882a593Smuzhiyun void (*fn)(u8 out[], u8 const in[], u8 const rk[],
94*4882a593Smuzhiyun int rounds, int blocks))
95*4882a593Smuzhiyun {
96*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
97*4882a593Smuzhiyun struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
98*4882a593Smuzhiyun struct skcipher_walk walk;
99*4882a593Smuzhiyun int err;
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, false);
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun while (walk.nbytes >= AES_BLOCK_SIZE) {
104*4882a593Smuzhiyun unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun if (walk.nbytes < walk.total)
107*4882a593Smuzhiyun blocks = round_down(blocks,
108*4882a593Smuzhiyun walk.stride / AES_BLOCK_SIZE);
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun kernel_neon_begin();
111*4882a593Smuzhiyun fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
112*4882a593Smuzhiyun ctx->rounds, blocks);
113*4882a593Smuzhiyun kernel_neon_end();
114*4882a593Smuzhiyun err = skcipher_walk_done(&walk,
115*4882a593Smuzhiyun walk.nbytes - blocks * AES_BLOCK_SIZE);
116*4882a593Smuzhiyun }
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun return err;
119*4882a593Smuzhiyun }
120*4882a593Smuzhiyun
ecb_encrypt(struct skcipher_request * req)121*4882a593Smuzhiyun static int ecb_encrypt(struct skcipher_request *req)
122*4882a593Smuzhiyun {
123*4882a593Smuzhiyun return __ecb_crypt(req, aesbs_ecb_encrypt);
124*4882a593Smuzhiyun }
125*4882a593Smuzhiyun
ecb_decrypt(struct skcipher_request * req)126*4882a593Smuzhiyun static int ecb_decrypt(struct skcipher_request *req)
127*4882a593Smuzhiyun {
128*4882a593Smuzhiyun return __ecb_crypt(req, aesbs_ecb_decrypt);
129*4882a593Smuzhiyun }
130*4882a593Smuzhiyun
aesbs_cbc_setkey(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)131*4882a593Smuzhiyun static int aesbs_cbc_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
132*4882a593Smuzhiyun unsigned int key_len)
133*4882a593Smuzhiyun {
134*4882a593Smuzhiyun struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
135*4882a593Smuzhiyun struct crypto_aes_ctx rk;
136*4882a593Smuzhiyun int err;
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun err = aes_expandkey(&rk, in_key, key_len);
139*4882a593Smuzhiyun if (err)
140*4882a593Smuzhiyun return err;
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun ctx->key.rounds = 6 + key_len / 4;
143*4882a593Smuzhiyun
144*4882a593Smuzhiyun memcpy(ctx->enc, rk.key_enc, sizeof(ctx->enc));
145*4882a593Smuzhiyun
146*4882a593Smuzhiyun kernel_neon_begin();
147*4882a593Smuzhiyun aesbs_convert_key(ctx->key.rk, rk.key_enc, ctx->key.rounds);
148*4882a593Smuzhiyun kernel_neon_end();
149*4882a593Smuzhiyun memzero_explicit(&rk, sizeof(rk));
150*4882a593Smuzhiyun
151*4882a593Smuzhiyun return 0;
152*4882a593Smuzhiyun }
153*4882a593Smuzhiyun
cbc_encrypt(struct skcipher_request * req)154*4882a593Smuzhiyun static int cbc_encrypt(struct skcipher_request *req)
155*4882a593Smuzhiyun {
156*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
157*4882a593Smuzhiyun struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
158*4882a593Smuzhiyun struct skcipher_walk walk;
159*4882a593Smuzhiyun int err;
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, false);
162*4882a593Smuzhiyun
163*4882a593Smuzhiyun while (walk.nbytes >= AES_BLOCK_SIZE) {
164*4882a593Smuzhiyun unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
165*4882a593Smuzhiyun
166*4882a593Smuzhiyun /* fall back to the non-bitsliced NEON implementation */
167*4882a593Smuzhiyun kernel_neon_begin();
168*4882a593Smuzhiyun neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
169*4882a593Smuzhiyun ctx->enc, ctx->key.rounds, blocks,
170*4882a593Smuzhiyun walk.iv);
171*4882a593Smuzhiyun kernel_neon_end();
172*4882a593Smuzhiyun err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
173*4882a593Smuzhiyun }
174*4882a593Smuzhiyun return err;
175*4882a593Smuzhiyun }
176*4882a593Smuzhiyun
cbc_decrypt(struct skcipher_request * req)177*4882a593Smuzhiyun static int cbc_decrypt(struct skcipher_request *req)
178*4882a593Smuzhiyun {
179*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
180*4882a593Smuzhiyun struct aesbs_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
181*4882a593Smuzhiyun struct skcipher_walk walk;
182*4882a593Smuzhiyun int err;
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, false);
185*4882a593Smuzhiyun
186*4882a593Smuzhiyun while (walk.nbytes >= AES_BLOCK_SIZE) {
187*4882a593Smuzhiyun unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
188*4882a593Smuzhiyun
189*4882a593Smuzhiyun if (walk.nbytes < walk.total)
190*4882a593Smuzhiyun blocks = round_down(blocks,
191*4882a593Smuzhiyun walk.stride / AES_BLOCK_SIZE);
192*4882a593Smuzhiyun
193*4882a593Smuzhiyun kernel_neon_begin();
194*4882a593Smuzhiyun aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
195*4882a593Smuzhiyun ctx->key.rk, ctx->key.rounds, blocks,
196*4882a593Smuzhiyun walk.iv);
197*4882a593Smuzhiyun kernel_neon_end();
198*4882a593Smuzhiyun err = skcipher_walk_done(&walk,
199*4882a593Smuzhiyun walk.nbytes - blocks * AES_BLOCK_SIZE);
200*4882a593Smuzhiyun }
201*4882a593Smuzhiyun
202*4882a593Smuzhiyun return err;
203*4882a593Smuzhiyun }
204*4882a593Smuzhiyun
ctr_encrypt(struct skcipher_request * req)205*4882a593Smuzhiyun static int ctr_encrypt(struct skcipher_request *req)
206*4882a593Smuzhiyun {
207*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
208*4882a593Smuzhiyun struct aesbs_ctx *ctx = crypto_skcipher_ctx(tfm);
209*4882a593Smuzhiyun struct skcipher_walk walk;
210*4882a593Smuzhiyun u8 buf[AES_BLOCK_SIZE];
211*4882a593Smuzhiyun int err;
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, false);
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun while (walk.nbytes > 0) {
216*4882a593Smuzhiyun unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
217*4882a593Smuzhiyun u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL;
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun if (walk.nbytes < walk.total) {
220*4882a593Smuzhiyun blocks = round_down(blocks,
221*4882a593Smuzhiyun walk.stride / AES_BLOCK_SIZE);
222*4882a593Smuzhiyun final = NULL;
223*4882a593Smuzhiyun }
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun kernel_neon_begin();
226*4882a593Smuzhiyun aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
227*4882a593Smuzhiyun ctx->rk, ctx->rounds, blocks, walk.iv, final);
228*4882a593Smuzhiyun kernel_neon_end();
229*4882a593Smuzhiyun
230*4882a593Smuzhiyun if (final) {
231*4882a593Smuzhiyun u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
232*4882a593Smuzhiyun u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
233*4882a593Smuzhiyun
234*4882a593Smuzhiyun crypto_xor_cpy(dst, src, final,
235*4882a593Smuzhiyun walk.total % AES_BLOCK_SIZE);
236*4882a593Smuzhiyun
237*4882a593Smuzhiyun err = skcipher_walk_done(&walk, 0);
238*4882a593Smuzhiyun break;
239*4882a593Smuzhiyun }
240*4882a593Smuzhiyun err = skcipher_walk_done(&walk,
241*4882a593Smuzhiyun walk.nbytes - blocks * AES_BLOCK_SIZE);
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun return err;
244*4882a593Smuzhiyun }
245*4882a593Smuzhiyun
aesbs_xts_setkey(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)246*4882a593Smuzhiyun static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key,
247*4882a593Smuzhiyun unsigned int key_len)
248*4882a593Smuzhiyun {
249*4882a593Smuzhiyun struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
250*4882a593Smuzhiyun struct crypto_aes_ctx rk;
251*4882a593Smuzhiyun int err;
252*4882a593Smuzhiyun
253*4882a593Smuzhiyun err = xts_verify_key(tfm, in_key, key_len);
254*4882a593Smuzhiyun if (err)
255*4882a593Smuzhiyun return err;
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun key_len /= 2;
258*4882a593Smuzhiyun err = aes_expandkey(&ctx->cts, in_key, key_len);
259*4882a593Smuzhiyun if (err)
260*4882a593Smuzhiyun return err;
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun err = aes_expandkey(&rk, in_key + key_len, key_len);
263*4882a593Smuzhiyun if (err)
264*4882a593Smuzhiyun return err;
265*4882a593Smuzhiyun
266*4882a593Smuzhiyun memcpy(ctx->twkey, rk.key_enc, sizeof(ctx->twkey));
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun return aesbs_setkey(tfm, in_key, key_len);
269*4882a593Smuzhiyun }
270*4882a593Smuzhiyun
__xts_crypt(struct skcipher_request * req,bool encrypt,void (* fn)(u8 out[],u8 const in[],u8 const rk[],int rounds,int blocks,u8 iv[]))271*4882a593Smuzhiyun static int __xts_crypt(struct skcipher_request *req, bool encrypt,
272*4882a593Smuzhiyun void (*fn)(u8 out[], u8 const in[], u8 const rk[],
273*4882a593Smuzhiyun int rounds, int blocks, u8 iv[]))
274*4882a593Smuzhiyun {
275*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
276*4882a593Smuzhiyun struct aesbs_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
277*4882a593Smuzhiyun int tail = req->cryptlen % (8 * AES_BLOCK_SIZE);
278*4882a593Smuzhiyun struct scatterlist sg_src[2], sg_dst[2];
279*4882a593Smuzhiyun struct skcipher_request subreq;
280*4882a593Smuzhiyun struct scatterlist *src, *dst;
281*4882a593Smuzhiyun struct skcipher_walk walk;
282*4882a593Smuzhiyun int nbytes, err;
283*4882a593Smuzhiyun int first = 1;
284*4882a593Smuzhiyun u8 *out, *in;
285*4882a593Smuzhiyun
286*4882a593Smuzhiyun if (req->cryptlen < AES_BLOCK_SIZE)
287*4882a593Smuzhiyun return -EINVAL;
288*4882a593Smuzhiyun
289*4882a593Smuzhiyun /* ensure that the cts tail is covered by a single step */
290*4882a593Smuzhiyun if (unlikely(tail > 0 && tail < AES_BLOCK_SIZE)) {
291*4882a593Smuzhiyun int xts_blocks = DIV_ROUND_UP(req->cryptlen,
292*4882a593Smuzhiyun AES_BLOCK_SIZE) - 2;
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun skcipher_request_set_tfm(&subreq, tfm);
295*4882a593Smuzhiyun skcipher_request_set_callback(&subreq,
296*4882a593Smuzhiyun skcipher_request_flags(req),
297*4882a593Smuzhiyun NULL, NULL);
298*4882a593Smuzhiyun skcipher_request_set_crypt(&subreq, req->src, req->dst,
299*4882a593Smuzhiyun xts_blocks * AES_BLOCK_SIZE,
300*4882a593Smuzhiyun req->iv);
301*4882a593Smuzhiyun req = &subreq;
302*4882a593Smuzhiyun } else {
303*4882a593Smuzhiyun tail = 0;
304*4882a593Smuzhiyun }
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, false);
307*4882a593Smuzhiyun if (err)
308*4882a593Smuzhiyun return err;
309*4882a593Smuzhiyun
310*4882a593Smuzhiyun while (walk.nbytes >= AES_BLOCK_SIZE) {
311*4882a593Smuzhiyun unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
312*4882a593Smuzhiyun
313*4882a593Smuzhiyun if (walk.nbytes < walk.total || walk.nbytes % AES_BLOCK_SIZE)
314*4882a593Smuzhiyun blocks = round_down(blocks,
315*4882a593Smuzhiyun walk.stride / AES_BLOCK_SIZE);
316*4882a593Smuzhiyun
317*4882a593Smuzhiyun out = walk.dst.virt.addr;
318*4882a593Smuzhiyun in = walk.src.virt.addr;
319*4882a593Smuzhiyun nbytes = walk.nbytes;
320*4882a593Smuzhiyun
321*4882a593Smuzhiyun kernel_neon_begin();
322*4882a593Smuzhiyun if (likely(blocks > 6)) { /* plain NEON is faster otherwise */
323*4882a593Smuzhiyun if (first)
324*4882a593Smuzhiyun neon_aes_ecb_encrypt(walk.iv, walk.iv,
325*4882a593Smuzhiyun ctx->twkey,
326*4882a593Smuzhiyun ctx->key.rounds, 1);
327*4882a593Smuzhiyun first = 0;
328*4882a593Smuzhiyun
329*4882a593Smuzhiyun fn(out, in, ctx->key.rk, ctx->key.rounds, blocks,
330*4882a593Smuzhiyun walk.iv);
331*4882a593Smuzhiyun
332*4882a593Smuzhiyun out += blocks * AES_BLOCK_SIZE;
333*4882a593Smuzhiyun in += blocks * AES_BLOCK_SIZE;
334*4882a593Smuzhiyun nbytes -= blocks * AES_BLOCK_SIZE;
335*4882a593Smuzhiyun }
336*4882a593Smuzhiyun
337*4882a593Smuzhiyun if (walk.nbytes == walk.total && nbytes > 0)
338*4882a593Smuzhiyun goto xts_tail;
339*4882a593Smuzhiyun
340*4882a593Smuzhiyun kernel_neon_end();
341*4882a593Smuzhiyun err = skcipher_walk_done(&walk, nbytes);
342*4882a593Smuzhiyun }
343*4882a593Smuzhiyun
344*4882a593Smuzhiyun if (err || likely(!tail))
345*4882a593Smuzhiyun return err;
346*4882a593Smuzhiyun
347*4882a593Smuzhiyun /* handle ciphertext stealing */
348*4882a593Smuzhiyun dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen);
349*4882a593Smuzhiyun if (req->dst != req->src)
350*4882a593Smuzhiyun dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen);
351*4882a593Smuzhiyun
352*4882a593Smuzhiyun skcipher_request_set_crypt(req, src, dst, AES_BLOCK_SIZE + tail,
353*4882a593Smuzhiyun req->iv);
354*4882a593Smuzhiyun
355*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, false);
356*4882a593Smuzhiyun if (err)
357*4882a593Smuzhiyun return err;
358*4882a593Smuzhiyun
359*4882a593Smuzhiyun out = walk.dst.virt.addr;
360*4882a593Smuzhiyun in = walk.src.virt.addr;
361*4882a593Smuzhiyun nbytes = walk.nbytes;
362*4882a593Smuzhiyun
363*4882a593Smuzhiyun kernel_neon_begin();
364*4882a593Smuzhiyun xts_tail:
365*4882a593Smuzhiyun if (encrypt)
366*4882a593Smuzhiyun neon_aes_xts_encrypt(out, in, ctx->cts.key_enc, ctx->key.rounds,
367*4882a593Smuzhiyun nbytes, ctx->twkey, walk.iv, first ?: 2);
368*4882a593Smuzhiyun else
369*4882a593Smuzhiyun neon_aes_xts_decrypt(out, in, ctx->cts.key_dec, ctx->key.rounds,
370*4882a593Smuzhiyun nbytes, ctx->twkey, walk.iv, first ?: 2);
371*4882a593Smuzhiyun kernel_neon_end();
372*4882a593Smuzhiyun
373*4882a593Smuzhiyun return skcipher_walk_done(&walk, 0);
374*4882a593Smuzhiyun }
375*4882a593Smuzhiyun
xts_encrypt(struct skcipher_request * req)376*4882a593Smuzhiyun static int xts_encrypt(struct skcipher_request *req)
377*4882a593Smuzhiyun {
378*4882a593Smuzhiyun return __xts_crypt(req, true, aesbs_xts_encrypt);
379*4882a593Smuzhiyun }
380*4882a593Smuzhiyun
xts_decrypt(struct skcipher_request * req)381*4882a593Smuzhiyun static int xts_decrypt(struct skcipher_request *req)
382*4882a593Smuzhiyun {
383*4882a593Smuzhiyun return __xts_crypt(req, false, aesbs_xts_decrypt);
384*4882a593Smuzhiyun }
385*4882a593Smuzhiyun
386*4882a593Smuzhiyun static struct skcipher_alg aes_algs[] = { {
387*4882a593Smuzhiyun .base.cra_name = "ecb(aes)",
388*4882a593Smuzhiyun .base.cra_driver_name = "ecb-aes-neonbs",
389*4882a593Smuzhiyun .base.cra_priority = 250,
390*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
391*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct aesbs_ctx),
392*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
393*4882a593Smuzhiyun
394*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
395*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
396*4882a593Smuzhiyun .walksize = 8 * AES_BLOCK_SIZE,
397*4882a593Smuzhiyun .setkey = aesbs_setkey,
398*4882a593Smuzhiyun .encrypt = ecb_encrypt,
399*4882a593Smuzhiyun .decrypt = ecb_decrypt,
400*4882a593Smuzhiyun }, {
401*4882a593Smuzhiyun .base.cra_name = "cbc(aes)",
402*4882a593Smuzhiyun .base.cra_driver_name = "cbc-aes-neonbs",
403*4882a593Smuzhiyun .base.cra_priority = 250,
404*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
405*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct aesbs_cbc_ctx),
406*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
407*4882a593Smuzhiyun
408*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
409*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
410*4882a593Smuzhiyun .walksize = 8 * AES_BLOCK_SIZE,
411*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
412*4882a593Smuzhiyun .setkey = aesbs_cbc_setkey,
413*4882a593Smuzhiyun .encrypt = cbc_encrypt,
414*4882a593Smuzhiyun .decrypt = cbc_decrypt,
415*4882a593Smuzhiyun }, {
416*4882a593Smuzhiyun .base.cra_name = "ctr(aes)",
417*4882a593Smuzhiyun .base.cra_driver_name = "ctr-aes-neonbs",
418*4882a593Smuzhiyun .base.cra_priority = 250,
419*4882a593Smuzhiyun .base.cra_blocksize = 1,
420*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct aesbs_ctx),
421*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
422*4882a593Smuzhiyun
423*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
424*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
425*4882a593Smuzhiyun .chunksize = AES_BLOCK_SIZE,
426*4882a593Smuzhiyun .walksize = 8 * AES_BLOCK_SIZE,
427*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
428*4882a593Smuzhiyun .setkey = aesbs_setkey,
429*4882a593Smuzhiyun .encrypt = ctr_encrypt,
430*4882a593Smuzhiyun .decrypt = ctr_encrypt,
431*4882a593Smuzhiyun }, {
432*4882a593Smuzhiyun .base.cra_name = "xts(aes)",
433*4882a593Smuzhiyun .base.cra_driver_name = "xts-aes-neonbs",
434*4882a593Smuzhiyun .base.cra_priority = 250,
435*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
436*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct aesbs_xts_ctx),
437*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
438*4882a593Smuzhiyun
439*4882a593Smuzhiyun .min_keysize = 2 * AES_MIN_KEY_SIZE,
440*4882a593Smuzhiyun .max_keysize = 2 * AES_MAX_KEY_SIZE,
441*4882a593Smuzhiyun .walksize = 8 * AES_BLOCK_SIZE,
442*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
443*4882a593Smuzhiyun .setkey = aesbs_xts_setkey,
444*4882a593Smuzhiyun .encrypt = xts_encrypt,
445*4882a593Smuzhiyun .decrypt = xts_decrypt,
446*4882a593Smuzhiyun } };
447*4882a593Smuzhiyun
aes_exit(void)448*4882a593Smuzhiyun static void aes_exit(void)
449*4882a593Smuzhiyun {
450*4882a593Smuzhiyun crypto_unregister_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
451*4882a593Smuzhiyun }
452*4882a593Smuzhiyun
aes_init(void)453*4882a593Smuzhiyun static int __init aes_init(void)
454*4882a593Smuzhiyun {
455*4882a593Smuzhiyun if (!cpu_have_named_feature(ASIMD))
456*4882a593Smuzhiyun return -ENODEV;
457*4882a593Smuzhiyun
458*4882a593Smuzhiyun return crypto_register_skciphers(aes_algs, ARRAY_SIZE(aes_algs));
459*4882a593Smuzhiyun }
460*4882a593Smuzhiyun
461*4882a593Smuzhiyun module_init(aes_init);
462*4882a593Smuzhiyun module_exit(aes_exit);
463