1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Accelerated GHASH implementation with ARMv8 PMULL instructions.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 2014 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
6*4882a593Smuzhiyun */
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun #include <asm/neon.h>
9*4882a593Smuzhiyun #include <asm/simd.h>
10*4882a593Smuzhiyun #include <asm/unaligned.h>
11*4882a593Smuzhiyun #include <crypto/aes.h>
12*4882a593Smuzhiyun #include <crypto/algapi.h>
13*4882a593Smuzhiyun #include <crypto/b128ops.h>
14*4882a593Smuzhiyun #include <crypto/gf128mul.h>
15*4882a593Smuzhiyun #include <crypto/internal/aead.h>
16*4882a593Smuzhiyun #include <crypto/internal/hash.h>
17*4882a593Smuzhiyun #include <crypto/internal/simd.h>
18*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
19*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
20*4882a593Smuzhiyun #include <linux/cpufeature.h>
21*4882a593Smuzhiyun #include <linux/crypto.h>
22*4882a593Smuzhiyun #include <linux/module.h>
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun MODULE_DESCRIPTION("GHASH and AES-GCM using ARMv8 Crypto Extensions");
25*4882a593Smuzhiyun MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
26*4882a593Smuzhiyun MODULE_LICENSE("GPL v2");
27*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("ghash");
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun #define GHASH_BLOCK_SIZE 16
30*4882a593Smuzhiyun #define GHASH_DIGEST_SIZE 16
31*4882a593Smuzhiyun #define GCM_IV_SIZE 12
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun struct ghash_key {
34*4882a593Smuzhiyun be128 k;
35*4882a593Smuzhiyun u64 h[][2];
36*4882a593Smuzhiyun };
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun struct ghash_desc_ctx {
39*4882a593Smuzhiyun u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
40*4882a593Smuzhiyun u8 buf[GHASH_BLOCK_SIZE];
41*4882a593Smuzhiyun u32 count;
42*4882a593Smuzhiyun };
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun struct gcm_aes_ctx {
45*4882a593Smuzhiyun struct crypto_aes_ctx aes_key;
46*4882a593Smuzhiyun struct ghash_key ghash_key;
47*4882a593Smuzhiyun };
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
50*4882a593Smuzhiyun u64 const h[][2], const char *head);
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
53*4882a593Smuzhiyun u64 const h[][2], const char *head);
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun asmlinkage void pmull_gcm_encrypt(int bytes, u8 dst[], const u8 src[],
56*4882a593Smuzhiyun u64 const h[][2], u64 dg[], u8 ctr[],
57*4882a593Smuzhiyun u32 const rk[], int rounds, u8 tag[]);
58*4882a593Smuzhiyun asmlinkage int pmull_gcm_decrypt(int bytes, u8 dst[], const u8 src[],
59*4882a593Smuzhiyun u64 const h[][2], u64 dg[], u8 ctr[],
60*4882a593Smuzhiyun u32 const rk[], int rounds, const u8 l[],
61*4882a593Smuzhiyun const u8 tag[], u64 authsize);
62*4882a593Smuzhiyun
ghash_init(struct shash_desc * desc)63*4882a593Smuzhiyun static int ghash_init(struct shash_desc *desc)
64*4882a593Smuzhiyun {
65*4882a593Smuzhiyun struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun *ctx = (struct ghash_desc_ctx){};
68*4882a593Smuzhiyun return 0;
69*4882a593Smuzhiyun }
70*4882a593Smuzhiyun
ghash_do_update(int blocks,u64 dg[],const char * src,struct ghash_key * key,const char * head)71*4882a593Smuzhiyun static void ghash_do_update(int blocks, u64 dg[], const char *src,
72*4882a593Smuzhiyun struct ghash_key *key, const char *head)
73*4882a593Smuzhiyun {
74*4882a593Smuzhiyun be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun do {
77*4882a593Smuzhiyun const u8 *in = src;
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun if (head) {
80*4882a593Smuzhiyun in = head;
81*4882a593Smuzhiyun blocks++;
82*4882a593Smuzhiyun head = NULL;
83*4882a593Smuzhiyun } else {
84*4882a593Smuzhiyun src += GHASH_BLOCK_SIZE;
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun
87*4882a593Smuzhiyun crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
88*4882a593Smuzhiyun gf128mul_lle(&dst, &key->k);
89*4882a593Smuzhiyun } while (--blocks);
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun dg[0] = be64_to_cpu(dst.b);
92*4882a593Smuzhiyun dg[1] = be64_to_cpu(dst.a);
93*4882a593Smuzhiyun }
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun static __always_inline
ghash_do_simd_update(int blocks,u64 dg[],const char * src,struct ghash_key * key,const char * head,void (* simd_update)(int blocks,u64 dg[],const char * src,u64 const h[][2],const char * head))96*4882a593Smuzhiyun void ghash_do_simd_update(int blocks, u64 dg[], const char *src,
97*4882a593Smuzhiyun struct ghash_key *key, const char *head,
98*4882a593Smuzhiyun void (*simd_update)(int blocks, u64 dg[],
99*4882a593Smuzhiyun const char *src,
100*4882a593Smuzhiyun u64 const h[][2],
101*4882a593Smuzhiyun const char *head))
102*4882a593Smuzhiyun {
103*4882a593Smuzhiyun if (likely(crypto_simd_usable())) {
104*4882a593Smuzhiyun kernel_neon_begin();
105*4882a593Smuzhiyun simd_update(blocks, dg, src, key->h, head);
106*4882a593Smuzhiyun kernel_neon_end();
107*4882a593Smuzhiyun } else {
108*4882a593Smuzhiyun ghash_do_update(blocks, dg, src, key, head);
109*4882a593Smuzhiyun }
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun /* avoid hogging the CPU for too long */
113*4882a593Smuzhiyun #define MAX_BLOCKS (SZ_64K / GHASH_BLOCK_SIZE)
114*4882a593Smuzhiyun
ghash_update(struct shash_desc * desc,const u8 * src,unsigned int len)115*4882a593Smuzhiyun static int ghash_update(struct shash_desc *desc, const u8 *src,
116*4882a593Smuzhiyun unsigned int len)
117*4882a593Smuzhiyun {
118*4882a593Smuzhiyun struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
119*4882a593Smuzhiyun unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun ctx->count += len;
122*4882a593Smuzhiyun
123*4882a593Smuzhiyun if ((partial + len) >= GHASH_BLOCK_SIZE) {
124*4882a593Smuzhiyun struct ghash_key *key = crypto_shash_ctx(desc->tfm);
125*4882a593Smuzhiyun int blocks;
126*4882a593Smuzhiyun
127*4882a593Smuzhiyun if (partial) {
128*4882a593Smuzhiyun int p = GHASH_BLOCK_SIZE - partial;
129*4882a593Smuzhiyun
130*4882a593Smuzhiyun memcpy(ctx->buf + partial, src, p);
131*4882a593Smuzhiyun src += p;
132*4882a593Smuzhiyun len -= p;
133*4882a593Smuzhiyun }
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun blocks = len / GHASH_BLOCK_SIZE;
136*4882a593Smuzhiyun len %= GHASH_BLOCK_SIZE;
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun do {
139*4882a593Smuzhiyun int chunk = min(blocks, MAX_BLOCKS);
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun ghash_do_simd_update(chunk, ctx->digest, src, key,
142*4882a593Smuzhiyun partial ? ctx->buf : NULL,
143*4882a593Smuzhiyun pmull_ghash_update_p8);
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun blocks -= chunk;
146*4882a593Smuzhiyun src += chunk * GHASH_BLOCK_SIZE;
147*4882a593Smuzhiyun partial = 0;
148*4882a593Smuzhiyun } while (unlikely(blocks > 0));
149*4882a593Smuzhiyun }
150*4882a593Smuzhiyun if (len)
151*4882a593Smuzhiyun memcpy(ctx->buf + partial, src, len);
152*4882a593Smuzhiyun return 0;
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun
ghash_final(struct shash_desc * desc,u8 * dst)155*4882a593Smuzhiyun static int ghash_final(struct shash_desc *desc, u8 *dst)
156*4882a593Smuzhiyun {
157*4882a593Smuzhiyun struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
158*4882a593Smuzhiyun unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun if (partial) {
161*4882a593Smuzhiyun struct ghash_key *key = crypto_shash_ctx(desc->tfm);
162*4882a593Smuzhiyun
163*4882a593Smuzhiyun memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun ghash_do_simd_update(1, ctx->digest, ctx->buf, key, NULL,
166*4882a593Smuzhiyun pmull_ghash_update_p8);
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun put_unaligned_be64(ctx->digest[1], dst);
169*4882a593Smuzhiyun put_unaligned_be64(ctx->digest[0], dst + 8);
170*4882a593Smuzhiyun
171*4882a593Smuzhiyun *ctx = (struct ghash_desc_ctx){};
172*4882a593Smuzhiyun return 0;
173*4882a593Smuzhiyun }
174*4882a593Smuzhiyun
ghash_reflect(u64 h[],const be128 * k)175*4882a593Smuzhiyun static void ghash_reflect(u64 h[], const be128 *k)
176*4882a593Smuzhiyun {
177*4882a593Smuzhiyun u64 carry = be64_to_cpu(k->a) & BIT(63) ? 1 : 0;
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun h[0] = (be64_to_cpu(k->b) << 1) | carry;
180*4882a593Smuzhiyun h[1] = (be64_to_cpu(k->a) << 1) | (be64_to_cpu(k->b) >> 63);
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun if (carry)
183*4882a593Smuzhiyun h[1] ^= 0xc200000000000000UL;
184*4882a593Smuzhiyun }
185*4882a593Smuzhiyun
ghash_setkey(struct crypto_shash * tfm,const u8 * inkey,unsigned int keylen)186*4882a593Smuzhiyun static int ghash_setkey(struct crypto_shash *tfm,
187*4882a593Smuzhiyun const u8 *inkey, unsigned int keylen)
188*4882a593Smuzhiyun {
189*4882a593Smuzhiyun struct ghash_key *key = crypto_shash_ctx(tfm);
190*4882a593Smuzhiyun
191*4882a593Smuzhiyun if (keylen != GHASH_BLOCK_SIZE)
192*4882a593Smuzhiyun return -EINVAL;
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun /* needed for the fallback */
195*4882a593Smuzhiyun memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun ghash_reflect(key->h[0], &key->k);
198*4882a593Smuzhiyun return 0;
199*4882a593Smuzhiyun }
200*4882a593Smuzhiyun
201*4882a593Smuzhiyun static struct shash_alg ghash_alg = {
202*4882a593Smuzhiyun .base.cra_name = "ghash",
203*4882a593Smuzhiyun .base.cra_driver_name = "ghash-neon",
204*4882a593Smuzhiyun .base.cra_priority = 150,
205*4882a593Smuzhiyun .base.cra_blocksize = GHASH_BLOCK_SIZE,
206*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct ghash_key) + sizeof(u64[2]),
207*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
208*4882a593Smuzhiyun
209*4882a593Smuzhiyun .digestsize = GHASH_DIGEST_SIZE,
210*4882a593Smuzhiyun .init = ghash_init,
211*4882a593Smuzhiyun .update = ghash_update,
212*4882a593Smuzhiyun .final = ghash_final,
213*4882a593Smuzhiyun .setkey = ghash_setkey,
214*4882a593Smuzhiyun .descsize = sizeof(struct ghash_desc_ctx),
215*4882a593Smuzhiyun };
216*4882a593Smuzhiyun
num_rounds(struct crypto_aes_ctx * ctx)217*4882a593Smuzhiyun static int num_rounds(struct crypto_aes_ctx *ctx)
218*4882a593Smuzhiyun {
219*4882a593Smuzhiyun /*
220*4882a593Smuzhiyun * # of rounds specified by AES:
221*4882a593Smuzhiyun * 128 bit key 10 rounds
222*4882a593Smuzhiyun * 192 bit key 12 rounds
223*4882a593Smuzhiyun * 256 bit key 14 rounds
224*4882a593Smuzhiyun * => n byte key => 6 + (n/4) rounds
225*4882a593Smuzhiyun */
226*4882a593Smuzhiyun return 6 + ctx->key_length / 4;
227*4882a593Smuzhiyun }
228*4882a593Smuzhiyun
gcm_setkey(struct crypto_aead * tfm,const u8 * inkey,unsigned int keylen)229*4882a593Smuzhiyun static int gcm_setkey(struct crypto_aead *tfm, const u8 *inkey,
230*4882a593Smuzhiyun unsigned int keylen)
231*4882a593Smuzhiyun {
232*4882a593Smuzhiyun struct gcm_aes_ctx *ctx = crypto_aead_ctx(tfm);
233*4882a593Smuzhiyun u8 key[GHASH_BLOCK_SIZE];
234*4882a593Smuzhiyun be128 h;
235*4882a593Smuzhiyun int ret;
236*4882a593Smuzhiyun
237*4882a593Smuzhiyun ret = aes_expandkey(&ctx->aes_key, inkey, keylen);
238*4882a593Smuzhiyun if (ret)
239*4882a593Smuzhiyun return -EINVAL;
240*4882a593Smuzhiyun
241*4882a593Smuzhiyun aes_encrypt(&ctx->aes_key, key, (u8[AES_BLOCK_SIZE]){});
242*4882a593Smuzhiyun
243*4882a593Smuzhiyun /* needed for the fallback */
244*4882a593Smuzhiyun memcpy(&ctx->ghash_key.k, key, GHASH_BLOCK_SIZE);
245*4882a593Smuzhiyun
246*4882a593Smuzhiyun ghash_reflect(ctx->ghash_key.h[0], &ctx->ghash_key.k);
247*4882a593Smuzhiyun
248*4882a593Smuzhiyun h = ctx->ghash_key.k;
249*4882a593Smuzhiyun gf128mul_lle(&h, &ctx->ghash_key.k);
250*4882a593Smuzhiyun ghash_reflect(ctx->ghash_key.h[1], &h);
251*4882a593Smuzhiyun
252*4882a593Smuzhiyun gf128mul_lle(&h, &ctx->ghash_key.k);
253*4882a593Smuzhiyun ghash_reflect(ctx->ghash_key.h[2], &h);
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun gf128mul_lle(&h, &ctx->ghash_key.k);
256*4882a593Smuzhiyun ghash_reflect(ctx->ghash_key.h[3], &h);
257*4882a593Smuzhiyun
258*4882a593Smuzhiyun return 0;
259*4882a593Smuzhiyun }
260*4882a593Smuzhiyun
gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)261*4882a593Smuzhiyun static int gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
262*4882a593Smuzhiyun {
263*4882a593Smuzhiyun switch (authsize) {
264*4882a593Smuzhiyun case 4:
265*4882a593Smuzhiyun case 8:
266*4882a593Smuzhiyun case 12 ... 16:
267*4882a593Smuzhiyun break;
268*4882a593Smuzhiyun default:
269*4882a593Smuzhiyun return -EINVAL;
270*4882a593Smuzhiyun }
271*4882a593Smuzhiyun return 0;
272*4882a593Smuzhiyun }
273*4882a593Smuzhiyun
gcm_update_mac(u64 dg[],const u8 * src,int count,u8 buf[],int * buf_count,struct gcm_aes_ctx * ctx)274*4882a593Smuzhiyun static void gcm_update_mac(u64 dg[], const u8 *src, int count, u8 buf[],
275*4882a593Smuzhiyun int *buf_count, struct gcm_aes_ctx *ctx)
276*4882a593Smuzhiyun {
277*4882a593Smuzhiyun if (*buf_count > 0) {
278*4882a593Smuzhiyun int buf_added = min(count, GHASH_BLOCK_SIZE - *buf_count);
279*4882a593Smuzhiyun
280*4882a593Smuzhiyun memcpy(&buf[*buf_count], src, buf_added);
281*4882a593Smuzhiyun
282*4882a593Smuzhiyun *buf_count += buf_added;
283*4882a593Smuzhiyun src += buf_added;
284*4882a593Smuzhiyun count -= buf_added;
285*4882a593Smuzhiyun }
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun if (count >= GHASH_BLOCK_SIZE || *buf_count == GHASH_BLOCK_SIZE) {
288*4882a593Smuzhiyun int blocks = count / GHASH_BLOCK_SIZE;
289*4882a593Smuzhiyun
290*4882a593Smuzhiyun ghash_do_simd_update(blocks, dg, src, &ctx->ghash_key,
291*4882a593Smuzhiyun *buf_count ? buf : NULL,
292*4882a593Smuzhiyun pmull_ghash_update_p64);
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun src += blocks * GHASH_BLOCK_SIZE;
295*4882a593Smuzhiyun count %= GHASH_BLOCK_SIZE;
296*4882a593Smuzhiyun *buf_count = 0;
297*4882a593Smuzhiyun }
298*4882a593Smuzhiyun
299*4882a593Smuzhiyun if (count > 0) {
300*4882a593Smuzhiyun memcpy(buf, src, count);
301*4882a593Smuzhiyun *buf_count = count;
302*4882a593Smuzhiyun }
303*4882a593Smuzhiyun }
304*4882a593Smuzhiyun
gcm_calculate_auth_mac(struct aead_request * req,u64 dg[])305*4882a593Smuzhiyun static void gcm_calculate_auth_mac(struct aead_request *req, u64 dg[])
306*4882a593Smuzhiyun {
307*4882a593Smuzhiyun struct crypto_aead *aead = crypto_aead_reqtfm(req);
308*4882a593Smuzhiyun struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
309*4882a593Smuzhiyun u8 buf[GHASH_BLOCK_SIZE];
310*4882a593Smuzhiyun struct scatter_walk walk;
311*4882a593Smuzhiyun u32 len = req->assoclen;
312*4882a593Smuzhiyun int buf_count = 0;
313*4882a593Smuzhiyun
314*4882a593Smuzhiyun scatterwalk_start(&walk, req->src);
315*4882a593Smuzhiyun
316*4882a593Smuzhiyun do {
317*4882a593Smuzhiyun u32 n = scatterwalk_clamp(&walk, len);
318*4882a593Smuzhiyun u8 *p;
319*4882a593Smuzhiyun
320*4882a593Smuzhiyun if (!n) {
321*4882a593Smuzhiyun scatterwalk_start(&walk, sg_next(walk.sg));
322*4882a593Smuzhiyun n = scatterwalk_clamp(&walk, len);
323*4882a593Smuzhiyun }
324*4882a593Smuzhiyun p = scatterwalk_map(&walk);
325*4882a593Smuzhiyun
326*4882a593Smuzhiyun gcm_update_mac(dg, p, n, buf, &buf_count, ctx);
327*4882a593Smuzhiyun len -= n;
328*4882a593Smuzhiyun
329*4882a593Smuzhiyun scatterwalk_unmap(p);
330*4882a593Smuzhiyun scatterwalk_advance(&walk, n);
331*4882a593Smuzhiyun scatterwalk_done(&walk, 0, len);
332*4882a593Smuzhiyun } while (len);
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun if (buf_count) {
335*4882a593Smuzhiyun memset(&buf[buf_count], 0, GHASH_BLOCK_SIZE - buf_count);
336*4882a593Smuzhiyun ghash_do_simd_update(1, dg, buf, &ctx->ghash_key, NULL,
337*4882a593Smuzhiyun pmull_ghash_update_p64);
338*4882a593Smuzhiyun }
339*4882a593Smuzhiyun }
340*4882a593Smuzhiyun
gcm_encrypt(struct aead_request * req)341*4882a593Smuzhiyun static int gcm_encrypt(struct aead_request *req)
342*4882a593Smuzhiyun {
343*4882a593Smuzhiyun struct crypto_aead *aead = crypto_aead_reqtfm(req);
344*4882a593Smuzhiyun struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
345*4882a593Smuzhiyun int nrounds = num_rounds(&ctx->aes_key);
346*4882a593Smuzhiyun struct skcipher_walk walk;
347*4882a593Smuzhiyun u8 buf[AES_BLOCK_SIZE];
348*4882a593Smuzhiyun u8 iv[AES_BLOCK_SIZE];
349*4882a593Smuzhiyun u64 dg[2] = {};
350*4882a593Smuzhiyun be128 lengths;
351*4882a593Smuzhiyun u8 *tag;
352*4882a593Smuzhiyun int err;
353*4882a593Smuzhiyun
354*4882a593Smuzhiyun lengths.a = cpu_to_be64(req->assoclen * 8);
355*4882a593Smuzhiyun lengths.b = cpu_to_be64(req->cryptlen * 8);
356*4882a593Smuzhiyun
357*4882a593Smuzhiyun if (req->assoclen)
358*4882a593Smuzhiyun gcm_calculate_auth_mac(req, dg);
359*4882a593Smuzhiyun
360*4882a593Smuzhiyun memcpy(iv, req->iv, GCM_IV_SIZE);
361*4882a593Smuzhiyun put_unaligned_be32(2, iv + GCM_IV_SIZE);
362*4882a593Smuzhiyun
363*4882a593Smuzhiyun err = skcipher_walk_aead_encrypt(&walk, req, false);
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun if (likely(crypto_simd_usable())) {
366*4882a593Smuzhiyun do {
367*4882a593Smuzhiyun const u8 *src = walk.src.virt.addr;
368*4882a593Smuzhiyun u8 *dst = walk.dst.virt.addr;
369*4882a593Smuzhiyun int nbytes = walk.nbytes;
370*4882a593Smuzhiyun
371*4882a593Smuzhiyun tag = (u8 *)&lengths;
372*4882a593Smuzhiyun
373*4882a593Smuzhiyun if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) {
374*4882a593Smuzhiyun src = dst = memcpy(buf + sizeof(buf) - nbytes,
375*4882a593Smuzhiyun src, nbytes);
376*4882a593Smuzhiyun } else if (nbytes < walk.total) {
377*4882a593Smuzhiyun nbytes &= ~(AES_BLOCK_SIZE - 1);
378*4882a593Smuzhiyun tag = NULL;
379*4882a593Smuzhiyun }
380*4882a593Smuzhiyun
381*4882a593Smuzhiyun kernel_neon_begin();
382*4882a593Smuzhiyun pmull_gcm_encrypt(nbytes, dst, src, ctx->ghash_key.h,
383*4882a593Smuzhiyun dg, iv, ctx->aes_key.key_enc, nrounds,
384*4882a593Smuzhiyun tag);
385*4882a593Smuzhiyun kernel_neon_end();
386*4882a593Smuzhiyun
387*4882a593Smuzhiyun if (unlikely(!nbytes))
388*4882a593Smuzhiyun break;
389*4882a593Smuzhiyun
390*4882a593Smuzhiyun if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE))
391*4882a593Smuzhiyun memcpy(walk.dst.virt.addr,
392*4882a593Smuzhiyun buf + sizeof(buf) - nbytes, nbytes);
393*4882a593Smuzhiyun
394*4882a593Smuzhiyun err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
395*4882a593Smuzhiyun } while (walk.nbytes);
396*4882a593Smuzhiyun } else {
397*4882a593Smuzhiyun while (walk.nbytes >= AES_BLOCK_SIZE) {
398*4882a593Smuzhiyun int blocks = walk.nbytes / AES_BLOCK_SIZE;
399*4882a593Smuzhiyun const u8 *src = walk.src.virt.addr;
400*4882a593Smuzhiyun u8 *dst = walk.dst.virt.addr;
401*4882a593Smuzhiyun int remaining = blocks;
402*4882a593Smuzhiyun
403*4882a593Smuzhiyun do {
404*4882a593Smuzhiyun aes_encrypt(&ctx->aes_key, buf, iv);
405*4882a593Smuzhiyun crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE);
406*4882a593Smuzhiyun crypto_inc(iv, AES_BLOCK_SIZE);
407*4882a593Smuzhiyun
408*4882a593Smuzhiyun dst += AES_BLOCK_SIZE;
409*4882a593Smuzhiyun src += AES_BLOCK_SIZE;
410*4882a593Smuzhiyun } while (--remaining > 0);
411*4882a593Smuzhiyun
412*4882a593Smuzhiyun ghash_do_update(blocks, dg, walk.dst.virt.addr,
413*4882a593Smuzhiyun &ctx->ghash_key, NULL);
414*4882a593Smuzhiyun
415*4882a593Smuzhiyun err = skcipher_walk_done(&walk,
416*4882a593Smuzhiyun walk.nbytes % AES_BLOCK_SIZE);
417*4882a593Smuzhiyun }
418*4882a593Smuzhiyun
419*4882a593Smuzhiyun /* handle the tail */
420*4882a593Smuzhiyun if (walk.nbytes) {
421*4882a593Smuzhiyun aes_encrypt(&ctx->aes_key, buf, iv);
422*4882a593Smuzhiyun
423*4882a593Smuzhiyun crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr,
424*4882a593Smuzhiyun buf, walk.nbytes);
425*4882a593Smuzhiyun
426*4882a593Smuzhiyun memcpy(buf, walk.dst.virt.addr, walk.nbytes);
427*4882a593Smuzhiyun memset(buf + walk.nbytes, 0, sizeof(buf) - walk.nbytes);
428*4882a593Smuzhiyun }
429*4882a593Smuzhiyun
430*4882a593Smuzhiyun tag = (u8 *)&lengths;
431*4882a593Smuzhiyun ghash_do_update(1, dg, tag, &ctx->ghash_key,
432*4882a593Smuzhiyun walk.nbytes ? buf : NULL);
433*4882a593Smuzhiyun
434*4882a593Smuzhiyun if (walk.nbytes)
435*4882a593Smuzhiyun err = skcipher_walk_done(&walk, 0);
436*4882a593Smuzhiyun
437*4882a593Smuzhiyun put_unaligned_be64(dg[1], tag);
438*4882a593Smuzhiyun put_unaligned_be64(dg[0], tag + 8);
439*4882a593Smuzhiyun put_unaligned_be32(1, iv + GCM_IV_SIZE);
440*4882a593Smuzhiyun aes_encrypt(&ctx->aes_key, iv, iv);
441*4882a593Smuzhiyun crypto_xor(tag, iv, AES_BLOCK_SIZE);
442*4882a593Smuzhiyun }
443*4882a593Smuzhiyun
444*4882a593Smuzhiyun if (err)
445*4882a593Smuzhiyun return err;
446*4882a593Smuzhiyun
447*4882a593Smuzhiyun /* copy authtag to end of dst */
448*4882a593Smuzhiyun scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen,
449*4882a593Smuzhiyun crypto_aead_authsize(aead), 1);
450*4882a593Smuzhiyun
451*4882a593Smuzhiyun return 0;
452*4882a593Smuzhiyun }
453*4882a593Smuzhiyun
gcm_decrypt(struct aead_request * req)454*4882a593Smuzhiyun static int gcm_decrypt(struct aead_request *req)
455*4882a593Smuzhiyun {
456*4882a593Smuzhiyun struct crypto_aead *aead = crypto_aead_reqtfm(req);
457*4882a593Smuzhiyun struct gcm_aes_ctx *ctx = crypto_aead_ctx(aead);
458*4882a593Smuzhiyun unsigned int authsize = crypto_aead_authsize(aead);
459*4882a593Smuzhiyun int nrounds = num_rounds(&ctx->aes_key);
460*4882a593Smuzhiyun struct skcipher_walk walk;
461*4882a593Smuzhiyun u8 otag[AES_BLOCK_SIZE];
462*4882a593Smuzhiyun u8 buf[AES_BLOCK_SIZE];
463*4882a593Smuzhiyun u8 iv[AES_BLOCK_SIZE];
464*4882a593Smuzhiyun u64 dg[2] = {};
465*4882a593Smuzhiyun be128 lengths;
466*4882a593Smuzhiyun u8 *tag;
467*4882a593Smuzhiyun int err;
468*4882a593Smuzhiyun
469*4882a593Smuzhiyun lengths.a = cpu_to_be64(req->assoclen * 8);
470*4882a593Smuzhiyun lengths.b = cpu_to_be64((req->cryptlen - authsize) * 8);
471*4882a593Smuzhiyun
472*4882a593Smuzhiyun if (req->assoclen)
473*4882a593Smuzhiyun gcm_calculate_auth_mac(req, dg);
474*4882a593Smuzhiyun
475*4882a593Smuzhiyun memcpy(iv, req->iv, GCM_IV_SIZE);
476*4882a593Smuzhiyun put_unaligned_be32(2, iv + GCM_IV_SIZE);
477*4882a593Smuzhiyun
478*4882a593Smuzhiyun scatterwalk_map_and_copy(otag, req->src,
479*4882a593Smuzhiyun req->assoclen + req->cryptlen - authsize,
480*4882a593Smuzhiyun authsize, 0);
481*4882a593Smuzhiyun
482*4882a593Smuzhiyun err = skcipher_walk_aead_decrypt(&walk, req, false);
483*4882a593Smuzhiyun
484*4882a593Smuzhiyun if (likely(crypto_simd_usable())) {
485*4882a593Smuzhiyun int ret;
486*4882a593Smuzhiyun
487*4882a593Smuzhiyun do {
488*4882a593Smuzhiyun const u8 *src = walk.src.virt.addr;
489*4882a593Smuzhiyun u8 *dst = walk.dst.virt.addr;
490*4882a593Smuzhiyun int nbytes = walk.nbytes;
491*4882a593Smuzhiyun
492*4882a593Smuzhiyun tag = (u8 *)&lengths;
493*4882a593Smuzhiyun
494*4882a593Smuzhiyun if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE)) {
495*4882a593Smuzhiyun src = dst = memcpy(buf + sizeof(buf) - nbytes,
496*4882a593Smuzhiyun src, nbytes);
497*4882a593Smuzhiyun } else if (nbytes < walk.total) {
498*4882a593Smuzhiyun nbytes &= ~(AES_BLOCK_SIZE - 1);
499*4882a593Smuzhiyun tag = NULL;
500*4882a593Smuzhiyun }
501*4882a593Smuzhiyun
502*4882a593Smuzhiyun kernel_neon_begin();
503*4882a593Smuzhiyun ret = pmull_gcm_decrypt(nbytes, dst, src,
504*4882a593Smuzhiyun ctx->ghash_key.h,
505*4882a593Smuzhiyun dg, iv, ctx->aes_key.key_enc,
506*4882a593Smuzhiyun nrounds, tag, otag, authsize);
507*4882a593Smuzhiyun kernel_neon_end();
508*4882a593Smuzhiyun
509*4882a593Smuzhiyun if (unlikely(!nbytes))
510*4882a593Smuzhiyun break;
511*4882a593Smuzhiyun
512*4882a593Smuzhiyun if (unlikely(nbytes > 0 && nbytes < AES_BLOCK_SIZE))
513*4882a593Smuzhiyun memcpy(walk.dst.virt.addr,
514*4882a593Smuzhiyun buf + sizeof(buf) - nbytes, nbytes);
515*4882a593Smuzhiyun
516*4882a593Smuzhiyun err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
517*4882a593Smuzhiyun } while (walk.nbytes);
518*4882a593Smuzhiyun
519*4882a593Smuzhiyun if (err)
520*4882a593Smuzhiyun return err;
521*4882a593Smuzhiyun if (ret)
522*4882a593Smuzhiyun return -EBADMSG;
523*4882a593Smuzhiyun } else {
524*4882a593Smuzhiyun while (walk.nbytes >= AES_BLOCK_SIZE) {
525*4882a593Smuzhiyun int blocks = walk.nbytes / AES_BLOCK_SIZE;
526*4882a593Smuzhiyun const u8 *src = walk.src.virt.addr;
527*4882a593Smuzhiyun u8 *dst = walk.dst.virt.addr;
528*4882a593Smuzhiyun
529*4882a593Smuzhiyun ghash_do_update(blocks, dg, walk.src.virt.addr,
530*4882a593Smuzhiyun &ctx->ghash_key, NULL);
531*4882a593Smuzhiyun
532*4882a593Smuzhiyun do {
533*4882a593Smuzhiyun aes_encrypt(&ctx->aes_key, buf, iv);
534*4882a593Smuzhiyun crypto_xor_cpy(dst, src, buf, AES_BLOCK_SIZE);
535*4882a593Smuzhiyun crypto_inc(iv, AES_BLOCK_SIZE);
536*4882a593Smuzhiyun
537*4882a593Smuzhiyun dst += AES_BLOCK_SIZE;
538*4882a593Smuzhiyun src += AES_BLOCK_SIZE;
539*4882a593Smuzhiyun } while (--blocks > 0);
540*4882a593Smuzhiyun
541*4882a593Smuzhiyun err = skcipher_walk_done(&walk,
542*4882a593Smuzhiyun walk.nbytes % AES_BLOCK_SIZE);
543*4882a593Smuzhiyun }
544*4882a593Smuzhiyun
545*4882a593Smuzhiyun /* handle the tail */
546*4882a593Smuzhiyun if (walk.nbytes) {
547*4882a593Smuzhiyun memcpy(buf, walk.src.virt.addr, walk.nbytes);
548*4882a593Smuzhiyun memset(buf + walk.nbytes, 0, sizeof(buf) - walk.nbytes);
549*4882a593Smuzhiyun }
550*4882a593Smuzhiyun
551*4882a593Smuzhiyun tag = (u8 *)&lengths;
552*4882a593Smuzhiyun ghash_do_update(1, dg, tag, &ctx->ghash_key,
553*4882a593Smuzhiyun walk.nbytes ? buf : NULL);
554*4882a593Smuzhiyun
555*4882a593Smuzhiyun if (walk.nbytes) {
556*4882a593Smuzhiyun aes_encrypt(&ctx->aes_key, buf, iv);
557*4882a593Smuzhiyun
558*4882a593Smuzhiyun crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr,
559*4882a593Smuzhiyun buf, walk.nbytes);
560*4882a593Smuzhiyun
561*4882a593Smuzhiyun err = skcipher_walk_done(&walk, 0);
562*4882a593Smuzhiyun }
563*4882a593Smuzhiyun
564*4882a593Smuzhiyun if (err)
565*4882a593Smuzhiyun return err;
566*4882a593Smuzhiyun
567*4882a593Smuzhiyun put_unaligned_be64(dg[1], tag);
568*4882a593Smuzhiyun put_unaligned_be64(dg[0], tag + 8);
569*4882a593Smuzhiyun put_unaligned_be32(1, iv + GCM_IV_SIZE);
570*4882a593Smuzhiyun aes_encrypt(&ctx->aes_key, iv, iv);
571*4882a593Smuzhiyun crypto_xor(tag, iv, AES_BLOCK_SIZE);
572*4882a593Smuzhiyun
573*4882a593Smuzhiyun if (crypto_memneq(tag, otag, authsize)) {
574*4882a593Smuzhiyun memzero_explicit(tag, AES_BLOCK_SIZE);
575*4882a593Smuzhiyun return -EBADMSG;
576*4882a593Smuzhiyun }
577*4882a593Smuzhiyun }
578*4882a593Smuzhiyun return 0;
579*4882a593Smuzhiyun }
580*4882a593Smuzhiyun
581*4882a593Smuzhiyun static struct aead_alg gcm_aes_alg = {
582*4882a593Smuzhiyun .ivsize = GCM_IV_SIZE,
583*4882a593Smuzhiyun .chunksize = AES_BLOCK_SIZE,
584*4882a593Smuzhiyun .maxauthsize = AES_BLOCK_SIZE,
585*4882a593Smuzhiyun .setkey = gcm_setkey,
586*4882a593Smuzhiyun .setauthsize = gcm_setauthsize,
587*4882a593Smuzhiyun .encrypt = gcm_encrypt,
588*4882a593Smuzhiyun .decrypt = gcm_decrypt,
589*4882a593Smuzhiyun
590*4882a593Smuzhiyun .base.cra_name = "gcm(aes)",
591*4882a593Smuzhiyun .base.cra_driver_name = "gcm-aes-ce",
592*4882a593Smuzhiyun .base.cra_priority = 300,
593*4882a593Smuzhiyun .base.cra_blocksize = 1,
594*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct gcm_aes_ctx) +
595*4882a593Smuzhiyun 4 * sizeof(u64[2]),
596*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
597*4882a593Smuzhiyun };
598*4882a593Smuzhiyun
ghash_ce_mod_init(void)599*4882a593Smuzhiyun static int __init ghash_ce_mod_init(void)
600*4882a593Smuzhiyun {
601*4882a593Smuzhiyun if (!cpu_have_named_feature(ASIMD))
602*4882a593Smuzhiyun return -ENODEV;
603*4882a593Smuzhiyun
604*4882a593Smuzhiyun if (cpu_have_named_feature(PMULL))
605*4882a593Smuzhiyun return crypto_register_aead(&gcm_aes_alg);
606*4882a593Smuzhiyun
607*4882a593Smuzhiyun return crypto_register_shash(&ghash_alg);
608*4882a593Smuzhiyun }
609*4882a593Smuzhiyun
ghash_ce_mod_exit(void)610*4882a593Smuzhiyun static void __exit ghash_ce_mod_exit(void)
611*4882a593Smuzhiyun {
612*4882a593Smuzhiyun if (cpu_have_named_feature(PMULL))
613*4882a593Smuzhiyun crypto_unregister_aead(&gcm_aes_alg);
614*4882a593Smuzhiyun else
615*4882a593Smuzhiyun crypto_unregister_shash(&ghash_alg);
616*4882a593Smuzhiyun }
617*4882a593Smuzhiyun
618*4882a593Smuzhiyun static const struct cpu_feature ghash_cpu_feature[] = {
619*4882a593Smuzhiyun { cpu_feature(PMULL) }, { }
620*4882a593Smuzhiyun };
621*4882a593Smuzhiyun MODULE_DEVICE_TABLE(cpu, ghash_cpu_feature);
622*4882a593Smuzhiyun
623*4882a593Smuzhiyun module_init(ghash_ce_mod_init);
624*4882a593Smuzhiyun module_exit(ghash_ce_mod_exit);
625