xref: /OK3568_Linux_fs/kernel/arch/sparc/crypto/camellia_glue.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /* Glue code for CAMELLIA encryption optimized for sparc64 crypto opcodes.
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * Copyright (C) 2012 David S. Miller <davem@davemloft.net>
5*4882a593Smuzhiyun  */
6*4882a593Smuzhiyun 
7*4882a593Smuzhiyun #define pr_fmt(fmt)	KBUILD_MODNAME ": " fmt
8*4882a593Smuzhiyun 
9*4882a593Smuzhiyun #include <linux/crypto.h>
10*4882a593Smuzhiyun #include <linux/init.h>
11*4882a593Smuzhiyun #include <linux/module.h>
12*4882a593Smuzhiyun #include <linux/mm.h>
13*4882a593Smuzhiyun #include <linux/types.h>
14*4882a593Smuzhiyun #include <crypto/algapi.h>
15*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun #include <asm/fpumacro.h>
18*4882a593Smuzhiyun #include <asm/pstate.h>
19*4882a593Smuzhiyun #include <asm/elf.h>
20*4882a593Smuzhiyun 
21*4882a593Smuzhiyun #include "opcodes.h"
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun #define CAMELLIA_MIN_KEY_SIZE        16
24*4882a593Smuzhiyun #define CAMELLIA_MAX_KEY_SIZE        32
25*4882a593Smuzhiyun #define CAMELLIA_BLOCK_SIZE          16
26*4882a593Smuzhiyun #define CAMELLIA_TABLE_BYTE_LEN     272
27*4882a593Smuzhiyun 
28*4882a593Smuzhiyun struct camellia_sparc64_ctx {
29*4882a593Smuzhiyun 	u64 encrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
30*4882a593Smuzhiyun 	u64 decrypt_key[CAMELLIA_TABLE_BYTE_LEN / sizeof(u64)];
31*4882a593Smuzhiyun 	int key_len;
32*4882a593Smuzhiyun };
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun extern void camellia_sparc64_key_expand(const u32 *in_key, u64 *encrypt_key,
35*4882a593Smuzhiyun 					unsigned int key_len, u64 *decrypt_key);
36*4882a593Smuzhiyun 
camellia_set_key(struct crypto_tfm * tfm,const u8 * _in_key,unsigned int key_len)37*4882a593Smuzhiyun static int camellia_set_key(struct crypto_tfm *tfm, const u8 *_in_key,
38*4882a593Smuzhiyun 			    unsigned int key_len)
39*4882a593Smuzhiyun {
40*4882a593Smuzhiyun 	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
41*4882a593Smuzhiyun 	const u32 *in_key = (const u32 *) _in_key;
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun 	if (key_len != 16 && key_len != 24 && key_len != 32)
44*4882a593Smuzhiyun 		return -EINVAL;
45*4882a593Smuzhiyun 
46*4882a593Smuzhiyun 	ctx->key_len = key_len;
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun 	camellia_sparc64_key_expand(in_key, &ctx->encrypt_key[0],
49*4882a593Smuzhiyun 				    key_len, &ctx->decrypt_key[0]);
50*4882a593Smuzhiyun 	return 0;
51*4882a593Smuzhiyun }
52*4882a593Smuzhiyun 
camellia_set_key_skcipher(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)53*4882a593Smuzhiyun static int camellia_set_key_skcipher(struct crypto_skcipher *tfm,
54*4882a593Smuzhiyun 				     const u8 *in_key, unsigned int key_len)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun 	return camellia_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
57*4882a593Smuzhiyun }
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun extern void camellia_sparc64_crypt(const u64 *key, const u32 *input,
60*4882a593Smuzhiyun 				   u32 *output, unsigned int key_len);
61*4882a593Smuzhiyun 
camellia_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)62*4882a593Smuzhiyun static void camellia_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
63*4882a593Smuzhiyun {
64*4882a593Smuzhiyun 	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
65*4882a593Smuzhiyun 
66*4882a593Smuzhiyun 	camellia_sparc64_crypt(&ctx->encrypt_key[0],
67*4882a593Smuzhiyun 			       (const u32 *) src,
68*4882a593Smuzhiyun 			       (u32 *) dst, ctx->key_len);
69*4882a593Smuzhiyun }
70*4882a593Smuzhiyun 
camellia_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)71*4882a593Smuzhiyun static void camellia_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
72*4882a593Smuzhiyun {
73*4882a593Smuzhiyun 	struct camellia_sparc64_ctx *ctx = crypto_tfm_ctx(tfm);
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun 	camellia_sparc64_crypt(&ctx->decrypt_key[0],
76*4882a593Smuzhiyun 			       (const u32 *) src,
77*4882a593Smuzhiyun 			       (u32 *) dst, ctx->key_len);
78*4882a593Smuzhiyun }
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun extern void camellia_sparc64_load_keys(const u64 *key, unsigned int key_len);
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun typedef void ecb_crypt_op(const u64 *input, u64 *output, unsigned int len,
83*4882a593Smuzhiyun 			  const u64 *key);
84*4882a593Smuzhiyun 
85*4882a593Smuzhiyun extern ecb_crypt_op camellia_sparc64_ecb_crypt_3_grand_rounds;
86*4882a593Smuzhiyun extern ecb_crypt_op camellia_sparc64_ecb_crypt_4_grand_rounds;
87*4882a593Smuzhiyun 
__ecb_crypt(struct skcipher_request * req,bool encrypt)88*4882a593Smuzhiyun static int __ecb_crypt(struct skcipher_request *req, bool encrypt)
89*4882a593Smuzhiyun {
90*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
91*4882a593Smuzhiyun 	const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
92*4882a593Smuzhiyun 	struct skcipher_walk walk;
93*4882a593Smuzhiyun 	ecb_crypt_op *op;
94*4882a593Smuzhiyun 	const u64 *key;
95*4882a593Smuzhiyun 	unsigned int nbytes;
96*4882a593Smuzhiyun 	int err;
97*4882a593Smuzhiyun 
98*4882a593Smuzhiyun 	op = camellia_sparc64_ecb_crypt_3_grand_rounds;
99*4882a593Smuzhiyun 	if (ctx->key_len != 16)
100*4882a593Smuzhiyun 		op = camellia_sparc64_ecb_crypt_4_grand_rounds;
101*4882a593Smuzhiyun 
102*4882a593Smuzhiyun 	err = skcipher_walk_virt(&walk, req, true);
103*4882a593Smuzhiyun 	if (err)
104*4882a593Smuzhiyun 		return err;
105*4882a593Smuzhiyun 
106*4882a593Smuzhiyun 	if (encrypt)
107*4882a593Smuzhiyun 		key = &ctx->encrypt_key[0];
108*4882a593Smuzhiyun 	else
109*4882a593Smuzhiyun 		key = &ctx->decrypt_key[0];
110*4882a593Smuzhiyun 	camellia_sparc64_load_keys(key, ctx->key_len);
111*4882a593Smuzhiyun 	while ((nbytes = walk.nbytes) != 0) {
112*4882a593Smuzhiyun 		op(walk.src.virt.addr, walk.dst.virt.addr,
113*4882a593Smuzhiyun 		   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key);
114*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
115*4882a593Smuzhiyun 	}
116*4882a593Smuzhiyun 	fprs_write(0);
117*4882a593Smuzhiyun 	return err;
118*4882a593Smuzhiyun }
119*4882a593Smuzhiyun 
ecb_encrypt(struct skcipher_request * req)120*4882a593Smuzhiyun static int ecb_encrypt(struct skcipher_request *req)
121*4882a593Smuzhiyun {
122*4882a593Smuzhiyun 	return __ecb_crypt(req, true);
123*4882a593Smuzhiyun }
124*4882a593Smuzhiyun 
ecb_decrypt(struct skcipher_request * req)125*4882a593Smuzhiyun static int ecb_decrypt(struct skcipher_request *req)
126*4882a593Smuzhiyun {
127*4882a593Smuzhiyun 	return __ecb_crypt(req, false);
128*4882a593Smuzhiyun }
129*4882a593Smuzhiyun 
130*4882a593Smuzhiyun typedef void cbc_crypt_op(const u64 *input, u64 *output, unsigned int len,
131*4882a593Smuzhiyun 			  const u64 *key, u64 *iv);
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun extern cbc_crypt_op camellia_sparc64_cbc_encrypt_3_grand_rounds;
134*4882a593Smuzhiyun extern cbc_crypt_op camellia_sparc64_cbc_encrypt_4_grand_rounds;
135*4882a593Smuzhiyun extern cbc_crypt_op camellia_sparc64_cbc_decrypt_3_grand_rounds;
136*4882a593Smuzhiyun extern cbc_crypt_op camellia_sparc64_cbc_decrypt_4_grand_rounds;
137*4882a593Smuzhiyun 
cbc_encrypt(struct skcipher_request * req)138*4882a593Smuzhiyun static int cbc_encrypt(struct skcipher_request *req)
139*4882a593Smuzhiyun {
140*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
141*4882a593Smuzhiyun 	const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
142*4882a593Smuzhiyun 	struct skcipher_walk walk;
143*4882a593Smuzhiyun 	cbc_crypt_op *op;
144*4882a593Smuzhiyun 	const u64 *key;
145*4882a593Smuzhiyun 	unsigned int nbytes;
146*4882a593Smuzhiyun 	int err;
147*4882a593Smuzhiyun 
148*4882a593Smuzhiyun 	op = camellia_sparc64_cbc_encrypt_3_grand_rounds;
149*4882a593Smuzhiyun 	if (ctx->key_len != 16)
150*4882a593Smuzhiyun 		op = camellia_sparc64_cbc_encrypt_4_grand_rounds;
151*4882a593Smuzhiyun 
152*4882a593Smuzhiyun 	err = skcipher_walk_virt(&walk, req, true);
153*4882a593Smuzhiyun 	if (err)
154*4882a593Smuzhiyun 		return err;
155*4882a593Smuzhiyun 
156*4882a593Smuzhiyun 	key = &ctx->encrypt_key[0];
157*4882a593Smuzhiyun 	camellia_sparc64_load_keys(key, ctx->key_len);
158*4882a593Smuzhiyun 	while ((nbytes = walk.nbytes) != 0) {
159*4882a593Smuzhiyun 		op(walk.src.virt.addr, walk.dst.virt.addr,
160*4882a593Smuzhiyun 		   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
161*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
162*4882a593Smuzhiyun 	}
163*4882a593Smuzhiyun 	fprs_write(0);
164*4882a593Smuzhiyun 	return err;
165*4882a593Smuzhiyun }
166*4882a593Smuzhiyun 
cbc_decrypt(struct skcipher_request * req)167*4882a593Smuzhiyun static int cbc_decrypt(struct skcipher_request *req)
168*4882a593Smuzhiyun {
169*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
170*4882a593Smuzhiyun 	const struct camellia_sparc64_ctx *ctx = crypto_skcipher_ctx(tfm);
171*4882a593Smuzhiyun 	struct skcipher_walk walk;
172*4882a593Smuzhiyun 	cbc_crypt_op *op;
173*4882a593Smuzhiyun 	const u64 *key;
174*4882a593Smuzhiyun 	unsigned int nbytes;
175*4882a593Smuzhiyun 	int err;
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun 	op = camellia_sparc64_cbc_decrypt_3_grand_rounds;
178*4882a593Smuzhiyun 	if (ctx->key_len != 16)
179*4882a593Smuzhiyun 		op = camellia_sparc64_cbc_decrypt_4_grand_rounds;
180*4882a593Smuzhiyun 
181*4882a593Smuzhiyun 	err = skcipher_walk_virt(&walk, req, true);
182*4882a593Smuzhiyun 	if (err)
183*4882a593Smuzhiyun 		return err;
184*4882a593Smuzhiyun 
185*4882a593Smuzhiyun 	key = &ctx->decrypt_key[0];
186*4882a593Smuzhiyun 	camellia_sparc64_load_keys(key, ctx->key_len);
187*4882a593Smuzhiyun 	while ((nbytes = walk.nbytes) != 0) {
188*4882a593Smuzhiyun 		op(walk.src.virt.addr, walk.dst.virt.addr,
189*4882a593Smuzhiyun 		   round_down(nbytes, CAMELLIA_BLOCK_SIZE), key, walk.iv);
190*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, nbytes % CAMELLIA_BLOCK_SIZE);
191*4882a593Smuzhiyun 	}
192*4882a593Smuzhiyun 	fprs_write(0);
193*4882a593Smuzhiyun 	return err;
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun static struct crypto_alg cipher_alg = {
197*4882a593Smuzhiyun 	.cra_name		= "camellia",
198*4882a593Smuzhiyun 	.cra_driver_name	= "camellia-sparc64",
199*4882a593Smuzhiyun 	.cra_priority		= SPARC_CR_OPCODE_PRIORITY,
200*4882a593Smuzhiyun 	.cra_flags		= CRYPTO_ALG_TYPE_CIPHER,
201*4882a593Smuzhiyun 	.cra_blocksize		= CAMELLIA_BLOCK_SIZE,
202*4882a593Smuzhiyun 	.cra_ctxsize		= sizeof(struct camellia_sparc64_ctx),
203*4882a593Smuzhiyun 	.cra_alignmask		= 3,
204*4882a593Smuzhiyun 	.cra_module		= THIS_MODULE,
205*4882a593Smuzhiyun 	.cra_u	= {
206*4882a593Smuzhiyun 		.cipher	= {
207*4882a593Smuzhiyun 			.cia_min_keysize	= CAMELLIA_MIN_KEY_SIZE,
208*4882a593Smuzhiyun 			.cia_max_keysize	= CAMELLIA_MAX_KEY_SIZE,
209*4882a593Smuzhiyun 			.cia_setkey		= camellia_set_key,
210*4882a593Smuzhiyun 			.cia_encrypt		= camellia_encrypt,
211*4882a593Smuzhiyun 			.cia_decrypt		= camellia_decrypt
212*4882a593Smuzhiyun 		}
213*4882a593Smuzhiyun 	}
214*4882a593Smuzhiyun };
215*4882a593Smuzhiyun 
216*4882a593Smuzhiyun static struct skcipher_alg skcipher_algs[] = {
217*4882a593Smuzhiyun 	{
218*4882a593Smuzhiyun 		.base.cra_name		= "ecb(camellia)",
219*4882a593Smuzhiyun 		.base.cra_driver_name	= "ecb-camellia-sparc64",
220*4882a593Smuzhiyun 		.base.cra_priority	= SPARC_CR_OPCODE_PRIORITY,
221*4882a593Smuzhiyun 		.base.cra_blocksize	= CAMELLIA_BLOCK_SIZE,
222*4882a593Smuzhiyun 		.base.cra_ctxsize	= sizeof(struct camellia_sparc64_ctx),
223*4882a593Smuzhiyun 		.base.cra_alignmask	= 7,
224*4882a593Smuzhiyun 		.base.cra_module	= THIS_MODULE,
225*4882a593Smuzhiyun 		.min_keysize		= CAMELLIA_MIN_KEY_SIZE,
226*4882a593Smuzhiyun 		.max_keysize		= CAMELLIA_MAX_KEY_SIZE,
227*4882a593Smuzhiyun 		.setkey			= camellia_set_key_skcipher,
228*4882a593Smuzhiyun 		.encrypt		= ecb_encrypt,
229*4882a593Smuzhiyun 		.decrypt		= ecb_decrypt,
230*4882a593Smuzhiyun 	}, {
231*4882a593Smuzhiyun 		.base.cra_name		= "cbc(camellia)",
232*4882a593Smuzhiyun 		.base.cra_driver_name	= "cbc-camellia-sparc64",
233*4882a593Smuzhiyun 		.base.cra_priority	= SPARC_CR_OPCODE_PRIORITY,
234*4882a593Smuzhiyun 		.base.cra_blocksize	= CAMELLIA_BLOCK_SIZE,
235*4882a593Smuzhiyun 		.base.cra_ctxsize	= sizeof(struct camellia_sparc64_ctx),
236*4882a593Smuzhiyun 		.base.cra_alignmask	= 7,
237*4882a593Smuzhiyun 		.base.cra_module	= THIS_MODULE,
238*4882a593Smuzhiyun 		.min_keysize		= CAMELLIA_MIN_KEY_SIZE,
239*4882a593Smuzhiyun 		.max_keysize		= CAMELLIA_MAX_KEY_SIZE,
240*4882a593Smuzhiyun 		.ivsize			= CAMELLIA_BLOCK_SIZE,
241*4882a593Smuzhiyun 		.setkey			= camellia_set_key_skcipher,
242*4882a593Smuzhiyun 		.encrypt		= cbc_encrypt,
243*4882a593Smuzhiyun 		.decrypt		= cbc_decrypt,
244*4882a593Smuzhiyun 	}
245*4882a593Smuzhiyun };
246*4882a593Smuzhiyun 
sparc64_has_camellia_opcode(void)247*4882a593Smuzhiyun static bool __init sparc64_has_camellia_opcode(void)
248*4882a593Smuzhiyun {
249*4882a593Smuzhiyun 	unsigned long cfr;
250*4882a593Smuzhiyun 
251*4882a593Smuzhiyun 	if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
252*4882a593Smuzhiyun 		return false;
253*4882a593Smuzhiyun 
254*4882a593Smuzhiyun 	__asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
255*4882a593Smuzhiyun 	if (!(cfr & CFR_CAMELLIA))
256*4882a593Smuzhiyun 		return false;
257*4882a593Smuzhiyun 
258*4882a593Smuzhiyun 	return true;
259*4882a593Smuzhiyun }
260*4882a593Smuzhiyun 
camellia_sparc64_mod_init(void)261*4882a593Smuzhiyun static int __init camellia_sparc64_mod_init(void)
262*4882a593Smuzhiyun {
263*4882a593Smuzhiyun 	int err;
264*4882a593Smuzhiyun 
265*4882a593Smuzhiyun 	if (!sparc64_has_camellia_opcode()) {
266*4882a593Smuzhiyun 		pr_info("sparc64 camellia opcodes not available.\n");
267*4882a593Smuzhiyun 		return -ENODEV;
268*4882a593Smuzhiyun 	}
269*4882a593Smuzhiyun 	pr_info("Using sparc64 camellia opcodes optimized CAMELLIA implementation\n");
270*4882a593Smuzhiyun 	err = crypto_register_alg(&cipher_alg);
271*4882a593Smuzhiyun 	if (err)
272*4882a593Smuzhiyun 		return err;
273*4882a593Smuzhiyun 	err = crypto_register_skciphers(skcipher_algs,
274*4882a593Smuzhiyun 					ARRAY_SIZE(skcipher_algs));
275*4882a593Smuzhiyun 	if (err)
276*4882a593Smuzhiyun 		crypto_unregister_alg(&cipher_alg);
277*4882a593Smuzhiyun 	return err;
278*4882a593Smuzhiyun }
279*4882a593Smuzhiyun 
camellia_sparc64_mod_fini(void)280*4882a593Smuzhiyun static void __exit camellia_sparc64_mod_fini(void)
281*4882a593Smuzhiyun {
282*4882a593Smuzhiyun 	crypto_unregister_alg(&cipher_alg);
283*4882a593Smuzhiyun 	crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
284*4882a593Smuzhiyun }
285*4882a593Smuzhiyun 
286*4882a593Smuzhiyun module_init(camellia_sparc64_mod_init);
287*4882a593Smuzhiyun module_exit(camellia_sparc64_mod_fini);
288*4882a593Smuzhiyun 
289*4882a593Smuzhiyun MODULE_LICENSE("GPL");
290*4882a593Smuzhiyun MODULE_DESCRIPTION("Camellia Cipher Algorithm, sparc64 camellia opcode accelerated");
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("camellia");
293*4882a593Smuzhiyun 
294*4882a593Smuzhiyun #include "crop_devid.c"
295