xref: /OK3568_Linux_fs/kernel/arch/arm64/crypto/aes-ce-glue.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * aes-ce-cipher.c - core AES cipher using ARMv8 Crypto Extensions
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6*4882a593Smuzhiyun  */
7*4882a593Smuzhiyun 
8*4882a593Smuzhiyun #include <asm/neon.h>
9*4882a593Smuzhiyun #include <asm/simd.h>
10*4882a593Smuzhiyun #include <asm/unaligned.h>
11*4882a593Smuzhiyun #include <crypto/aes.h>
12*4882a593Smuzhiyun #include <crypto/internal/simd.h>
13*4882a593Smuzhiyun #include <linux/cpufeature.h>
14*4882a593Smuzhiyun #include <linux/crypto.h>
15*4882a593Smuzhiyun #include <linux/module.h>
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun #include "aes-ce-setkey.h"
18*4882a593Smuzhiyun 
19*4882a593Smuzhiyun MODULE_DESCRIPTION("Synchronous AES cipher using ARMv8 Crypto Extensions");
20*4882a593Smuzhiyun MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21*4882a593Smuzhiyun MODULE_LICENSE("GPL v2");
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun struct aes_block {
24*4882a593Smuzhiyun 	u8 b[AES_BLOCK_SIZE];
25*4882a593Smuzhiyun };
26*4882a593Smuzhiyun 
27*4882a593Smuzhiyun asmlinkage void __aes_ce_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
28*4882a593Smuzhiyun asmlinkage void __aes_ce_decrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun asmlinkage u32 __aes_ce_sub(u32 l);
31*4882a593Smuzhiyun asmlinkage void __aes_ce_invert(struct aes_block *out,
32*4882a593Smuzhiyun 				const struct aes_block *in);
33*4882a593Smuzhiyun 
num_rounds(struct crypto_aes_ctx * ctx)34*4882a593Smuzhiyun static int num_rounds(struct crypto_aes_ctx *ctx)
35*4882a593Smuzhiyun {
36*4882a593Smuzhiyun 	/*
37*4882a593Smuzhiyun 	 * # of rounds specified by AES:
38*4882a593Smuzhiyun 	 * 128 bit key		10 rounds
39*4882a593Smuzhiyun 	 * 192 bit key		12 rounds
40*4882a593Smuzhiyun 	 * 256 bit key		14 rounds
41*4882a593Smuzhiyun 	 * => n byte key	=> 6 + (n/4) rounds
42*4882a593Smuzhiyun 	 */
43*4882a593Smuzhiyun 	return 6 + ctx->key_length / 4;
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun 
aes_cipher_encrypt(struct crypto_tfm * tfm,u8 dst[],u8 const src[])46*4882a593Smuzhiyun static void aes_cipher_encrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
49*4882a593Smuzhiyun 
50*4882a593Smuzhiyun 	if (!crypto_simd_usable()) {
51*4882a593Smuzhiyun 		aes_encrypt(ctx, dst, src);
52*4882a593Smuzhiyun 		return;
53*4882a593Smuzhiyun 	}
54*4882a593Smuzhiyun 
55*4882a593Smuzhiyun 	kernel_neon_begin();
56*4882a593Smuzhiyun 	__aes_ce_encrypt(ctx->key_enc, dst, src, num_rounds(ctx));
57*4882a593Smuzhiyun 	kernel_neon_end();
58*4882a593Smuzhiyun }
59*4882a593Smuzhiyun 
aes_cipher_decrypt(struct crypto_tfm * tfm,u8 dst[],u8 const src[])60*4882a593Smuzhiyun static void aes_cipher_decrypt(struct crypto_tfm *tfm, u8 dst[], u8 const src[])
61*4882a593Smuzhiyun {
62*4882a593Smuzhiyun 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun 	if (!crypto_simd_usable()) {
65*4882a593Smuzhiyun 		aes_decrypt(ctx, dst, src);
66*4882a593Smuzhiyun 		return;
67*4882a593Smuzhiyun 	}
68*4882a593Smuzhiyun 
69*4882a593Smuzhiyun 	kernel_neon_begin();
70*4882a593Smuzhiyun 	__aes_ce_decrypt(ctx->key_dec, dst, src, num_rounds(ctx));
71*4882a593Smuzhiyun 	kernel_neon_end();
72*4882a593Smuzhiyun }
73*4882a593Smuzhiyun 
ce_aes_expandkey(struct crypto_aes_ctx * ctx,const u8 * in_key,unsigned int key_len)74*4882a593Smuzhiyun int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
75*4882a593Smuzhiyun 		     unsigned int key_len)
76*4882a593Smuzhiyun {
77*4882a593Smuzhiyun 	/*
78*4882a593Smuzhiyun 	 * The AES key schedule round constants
79*4882a593Smuzhiyun 	 */
80*4882a593Smuzhiyun 	static u8 const rcon[] = {
81*4882a593Smuzhiyun 		0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
82*4882a593Smuzhiyun 	};
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun 	u32 kwords = key_len / sizeof(u32);
85*4882a593Smuzhiyun 	struct aes_block *key_enc, *key_dec;
86*4882a593Smuzhiyun 	int i, j;
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun 	if (key_len != AES_KEYSIZE_128 &&
89*4882a593Smuzhiyun 	    key_len != AES_KEYSIZE_192 &&
90*4882a593Smuzhiyun 	    key_len != AES_KEYSIZE_256)
91*4882a593Smuzhiyun 		return -EINVAL;
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun 	ctx->key_length = key_len;
94*4882a593Smuzhiyun 	for (i = 0; i < kwords; i++)
95*4882a593Smuzhiyun 		ctx->key_enc[i] = get_unaligned_le32(in_key + i * sizeof(u32));
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun 	kernel_neon_begin();
98*4882a593Smuzhiyun 	for (i = 0; i < sizeof(rcon); i++) {
99*4882a593Smuzhiyun 		u32 *rki = ctx->key_enc + (i * kwords);
100*4882a593Smuzhiyun 		u32 *rko = rki + kwords;
101*4882a593Smuzhiyun 
102*4882a593Smuzhiyun 		rko[0] = ror32(__aes_ce_sub(rki[kwords - 1]), 8) ^ rcon[i] ^ rki[0];
103*4882a593Smuzhiyun 		rko[1] = rko[0] ^ rki[1];
104*4882a593Smuzhiyun 		rko[2] = rko[1] ^ rki[2];
105*4882a593Smuzhiyun 		rko[3] = rko[2] ^ rki[3];
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun 		if (key_len == AES_KEYSIZE_192) {
108*4882a593Smuzhiyun 			if (i >= 7)
109*4882a593Smuzhiyun 				break;
110*4882a593Smuzhiyun 			rko[4] = rko[3] ^ rki[4];
111*4882a593Smuzhiyun 			rko[5] = rko[4] ^ rki[5];
112*4882a593Smuzhiyun 		} else if (key_len == AES_KEYSIZE_256) {
113*4882a593Smuzhiyun 			if (i >= 6)
114*4882a593Smuzhiyun 				break;
115*4882a593Smuzhiyun 			rko[4] = __aes_ce_sub(rko[3]) ^ rki[4];
116*4882a593Smuzhiyun 			rko[5] = rko[4] ^ rki[5];
117*4882a593Smuzhiyun 			rko[6] = rko[5] ^ rki[6];
118*4882a593Smuzhiyun 			rko[7] = rko[6] ^ rki[7];
119*4882a593Smuzhiyun 		}
120*4882a593Smuzhiyun 	}
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun 	/*
123*4882a593Smuzhiyun 	 * Generate the decryption keys for the Equivalent Inverse Cipher.
124*4882a593Smuzhiyun 	 * This involves reversing the order of the round keys, and applying
125*4882a593Smuzhiyun 	 * the Inverse Mix Columns transformation on all but the first and
126*4882a593Smuzhiyun 	 * the last one.
127*4882a593Smuzhiyun 	 */
128*4882a593Smuzhiyun 	key_enc = (struct aes_block *)ctx->key_enc;
129*4882a593Smuzhiyun 	key_dec = (struct aes_block *)ctx->key_dec;
130*4882a593Smuzhiyun 	j = num_rounds(ctx);
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun 	key_dec[0] = key_enc[j];
133*4882a593Smuzhiyun 	for (i = 1, j--; j > 0; i++, j--)
134*4882a593Smuzhiyun 		__aes_ce_invert(key_dec + i, key_enc + j);
135*4882a593Smuzhiyun 	key_dec[i] = key_enc[0];
136*4882a593Smuzhiyun 
137*4882a593Smuzhiyun 	kernel_neon_end();
138*4882a593Smuzhiyun 	return 0;
139*4882a593Smuzhiyun }
140*4882a593Smuzhiyun EXPORT_SYMBOL(ce_aes_expandkey);
141*4882a593Smuzhiyun 
ce_aes_setkey(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)142*4882a593Smuzhiyun int ce_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key,
143*4882a593Smuzhiyun 		  unsigned int key_len)
144*4882a593Smuzhiyun {
145*4882a593Smuzhiyun 	struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun 	return ce_aes_expandkey(ctx, in_key, key_len);
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun EXPORT_SYMBOL(ce_aes_setkey);
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun static struct crypto_alg aes_alg = {
152*4882a593Smuzhiyun 	.cra_name		= "aes",
153*4882a593Smuzhiyun 	.cra_driver_name	= "aes-ce",
154*4882a593Smuzhiyun 	.cra_priority		= 250,
155*4882a593Smuzhiyun 	.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
156*4882a593Smuzhiyun 	.cra_blocksize		= AES_BLOCK_SIZE,
157*4882a593Smuzhiyun 	.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
158*4882a593Smuzhiyun 	.cra_module		= THIS_MODULE,
159*4882a593Smuzhiyun 	.cra_cipher = {
160*4882a593Smuzhiyun 		.cia_min_keysize	= AES_MIN_KEY_SIZE,
161*4882a593Smuzhiyun 		.cia_max_keysize	= AES_MAX_KEY_SIZE,
162*4882a593Smuzhiyun 		.cia_setkey		= ce_aes_setkey,
163*4882a593Smuzhiyun 		.cia_encrypt		= aes_cipher_encrypt,
164*4882a593Smuzhiyun 		.cia_decrypt		= aes_cipher_decrypt
165*4882a593Smuzhiyun 	}
166*4882a593Smuzhiyun };
167*4882a593Smuzhiyun 
aes_mod_init(void)168*4882a593Smuzhiyun static int __init aes_mod_init(void)
169*4882a593Smuzhiyun {
170*4882a593Smuzhiyun 	return crypto_register_alg(&aes_alg);
171*4882a593Smuzhiyun }
172*4882a593Smuzhiyun 
aes_mod_exit(void)173*4882a593Smuzhiyun static void __exit aes_mod_exit(void)
174*4882a593Smuzhiyun {
175*4882a593Smuzhiyun 	crypto_unregister_alg(&aes_alg);
176*4882a593Smuzhiyun }
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun module_cpu_feature_match(AES, aes_mod_init);
179*4882a593Smuzhiyun module_exit(aes_mod_exit);
180