1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /* Glue code for AES encryption optimized for sparc64 crypto opcodes.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * This is based largely upon arch/x86/crypto/aesni-intel_glue.c
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * Copyright (C) 2008, Intel Corp.
7*4882a593Smuzhiyun * Author: Huang Ying <ying.huang@intel.com>
8*4882a593Smuzhiyun *
9*4882a593Smuzhiyun * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10*4882a593Smuzhiyun * interface for 64-bit kernels.
11*4882a593Smuzhiyun * Authors: Adrian Hoban <adrian.hoban@intel.com>
12*4882a593Smuzhiyun * Gabriele Paoloni <gabriele.paoloni@intel.com>
13*4882a593Smuzhiyun * Tadeusz Struk (tadeusz.struk@intel.com)
14*4882a593Smuzhiyun * Aidan O'Mahony (aidan.o.mahony@intel.com)
15*4882a593Smuzhiyun * Copyright (c) 2010, Intel Corporation.
16*4882a593Smuzhiyun */
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun #include <linux/crypto.h>
21*4882a593Smuzhiyun #include <linux/init.h>
22*4882a593Smuzhiyun #include <linux/module.h>
23*4882a593Smuzhiyun #include <linux/mm.h>
24*4882a593Smuzhiyun #include <linux/types.h>
25*4882a593Smuzhiyun #include <crypto/algapi.h>
26*4882a593Smuzhiyun #include <crypto/aes.h>
27*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun #include <asm/fpumacro.h>
30*4882a593Smuzhiyun #include <asm/pstate.h>
31*4882a593Smuzhiyun #include <asm/elf.h>
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun #include "opcodes.h"
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun struct aes_ops {
36*4882a593Smuzhiyun void (*encrypt)(const u64 *key, const u32 *input, u32 *output);
37*4882a593Smuzhiyun void (*decrypt)(const u64 *key, const u32 *input, u32 *output);
38*4882a593Smuzhiyun void (*load_encrypt_keys)(const u64 *key);
39*4882a593Smuzhiyun void (*load_decrypt_keys)(const u64 *key);
40*4882a593Smuzhiyun void (*ecb_encrypt)(const u64 *key, const u64 *input, u64 *output,
41*4882a593Smuzhiyun unsigned int len);
42*4882a593Smuzhiyun void (*ecb_decrypt)(const u64 *key, const u64 *input, u64 *output,
43*4882a593Smuzhiyun unsigned int len);
44*4882a593Smuzhiyun void (*cbc_encrypt)(const u64 *key, const u64 *input, u64 *output,
45*4882a593Smuzhiyun unsigned int len, u64 *iv);
46*4882a593Smuzhiyun void (*cbc_decrypt)(const u64 *key, const u64 *input, u64 *output,
47*4882a593Smuzhiyun unsigned int len, u64 *iv);
48*4882a593Smuzhiyun void (*ctr_crypt)(const u64 *key, const u64 *input, u64 *output,
49*4882a593Smuzhiyun unsigned int len, u64 *iv);
50*4882a593Smuzhiyun };
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun struct crypto_sparc64_aes_ctx {
53*4882a593Smuzhiyun struct aes_ops *ops;
54*4882a593Smuzhiyun u64 key[AES_MAX_KEYLENGTH / sizeof(u64)];
55*4882a593Smuzhiyun u32 key_length;
56*4882a593Smuzhiyun u32 expanded_key_length;
57*4882a593Smuzhiyun };
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun extern void aes_sparc64_encrypt_128(const u64 *key, const u32 *input,
60*4882a593Smuzhiyun u32 *output);
61*4882a593Smuzhiyun extern void aes_sparc64_encrypt_192(const u64 *key, const u32 *input,
62*4882a593Smuzhiyun u32 *output);
63*4882a593Smuzhiyun extern void aes_sparc64_encrypt_256(const u64 *key, const u32 *input,
64*4882a593Smuzhiyun u32 *output);
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun extern void aes_sparc64_decrypt_128(const u64 *key, const u32 *input,
67*4882a593Smuzhiyun u32 *output);
68*4882a593Smuzhiyun extern void aes_sparc64_decrypt_192(const u64 *key, const u32 *input,
69*4882a593Smuzhiyun u32 *output);
70*4882a593Smuzhiyun extern void aes_sparc64_decrypt_256(const u64 *key, const u32 *input,
71*4882a593Smuzhiyun u32 *output);
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun extern void aes_sparc64_load_encrypt_keys_128(const u64 *key);
74*4882a593Smuzhiyun extern void aes_sparc64_load_encrypt_keys_192(const u64 *key);
75*4882a593Smuzhiyun extern void aes_sparc64_load_encrypt_keys_256(const u64 *key);
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun extern void aes_sparc64_load_decrypt_keys_128(const u64 *key);
78*4882a593Smuzhiyun extern void aes_sparc64_load_decrypt_keys_192(const u64 *key);
79*4882a593Smuzhiyun extern void aes_sparc64_load_decrypt_keys_256(const u64 *key);
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun extern void aes_sparc64_ecb_encrypt_128(const u64 *key, const u64 *input,
82*4882a593Smuzhiyun u64 *output, unsigned int len);
83*4882a593Smuzhiyun extern void aes_sparc64_ecb_encrypt_192(const u64 *key, const u64 *input,
84*4882a593Smuzhiyun u64 *output, unsigned int len);
85*4882a593Smuzhiyun extern void aes_sparc64_ecb_encrypt_256(const u64 *key, const u64 *input,
86*4882a593Smuzhiyun u64 *output, unsigned int len);
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun extern void aes_sparc64_ecb_decrypt_128(const u64 *key, const u64 *input,
89*4882a593Smuzhiyun u64 *output, unsigned int len);
90*4882a593Smuzhiyun extern void aes_sparc64_ecb_decrypt_192(const u64 *key, const u64 *input,
91*4882a593Smuzhiyun u64 *output, unsigned int len);
92*4882a593Smuzhiyun extern void aes_sparc64_ecb_decrypt_256(const u64 *key, const u64 *input,
93*4882a593Smuzhiyun u64 *output, unsigned int len);
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun extern void aes_sparc64_cbc_encrypt_128(const u64 *key, const u64 *input,
96*4882a593Smuzhiyun u64 *output, unsigned int len,
97*4882a593Smuzhiyun u64 *iv);
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun extern void aes_sparc64_cbc_encrypt_192(const u64 *key, const u64 *input,
100*4882a593Smuzhiyun u64 *output, unsigned int len,
101*4882a593Smuzhiyun u64 *iv);
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun extern void aes_sparc64_cbc_encrypt_256(const u64 *key, const u64 *input,
104*4882a593Smuzhiyun u64 *output, unsigned int len,
105*4882a593Smuzhiyun u64 *iv);
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun extern void aes_sparc64_cbc_decrypt_128(const u64 *key, const u64 *input,
108*4882a593Smuzhiyun u64 *output, unsigned int len,
109*4882a593Smuzhiyun u64 *iv);
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun extern void aes_sparc64_cbc_decrypt_192(const u64 *key, const u64 *input,
112*4882a593Smuzhiyun u64 *output, unsigned int len,
113*4882a593Smuzhiyun u64 *iv);
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun extern void aes_sparc64_cbc_decrypt_256(const u64 *key, const u64 *input,
116*4882a593Smuzhiyun u64 *output, unsigned int len,
117*4882a593Smuzhiyun u64 *iv);
118*4882a593Smuzhiyun
119*4882a593Smuzhiyun extern void aes_sparc64_ctr_crypt_128(const u64 *key, const u64 *input,
120*4882a593Smuzhiyun u64 *output, unsigned int len,
121*4882a593Smuzhiyun u64 *iv);
122*4882a593Smuzhiyun extern void aes_sparc64_ctr_crypt_192(const u64 *key, const u64 *input,
123*4882a593Smuzhiyun u64 *output, unsigned int len,
124*4882a593Smuzhiyun u64 *iv);
125*4882a593Smuzhiyun extern void aes_sparc64_ctr_crypt_256(const u64 *key, const u64 *input,
126*4882a593Smuzhiyun u64 *output, unsigned int len,
127*4882a593Smuzhiyun u64 *iv);
128*4882a593Smuzhiyun
129*4882a593Smuzhiyun static struct aes_ops aes128_ops = {
130*4882a593Smuzhiyun .encrypt = aes_sparc64_encrypt_128,
131*4882a593Smuzhiyun .decrypt = aes_sparc64_decrypt_128,
132*4882a593Smuzhiyun .load_encrypt_keys = aes_sparc64_load_encrypt_keys_128,
133*4882a593Smuzhiyun .load_decrypt_keys = aes_sparc64_load_decrypt_keys_128,
134*4882a593Smuzhiyun .ecb_encrypt = aes_sparc64_ecb_encrypt_128,
135*4882a593Smuzhiyun .ecb_decrypt = aes_sparc64_ecb_decrypt_128,
136*4882a593Smuzhiyun .cbc_encrypt = aes_sparc64_cbc_encrypt_128,
137*4882a593Smuzhiyun .cbc_decrypt = aes_sparc64_cbc_decrypt_128,
138*4882a593Smuzhiyun .ctr_crypt = aes_sparc64_ctr_crypt_128,
139*4882a593Smuzhiyun };
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun static struct aes_ops aes192_ops = {
142*4882a593Smuzhiyun .encrypt = aes_sparc64_encrypt_192,
143*4882a593Smuzhiyun .decrypt = aes_sparc64_decrypt_192,
144*4882a593Smuzhiyun .load_encrypt_keys = aes_sparc64_load_encrypt_keys_192,
145*4882a593Smuzhiyun .load_decrypt_keys = aes_sparc64_load_decrypt_keys_192,
146*4882a593Smuzhiyun .ecb_encrypt = aes_sparc64_ecb_encrypt_192,
147*4882a593Smuzhiyun .ecb_decrypt = aes_sparc64_ecb_decrypt_192,
148*4882a593Smuzhiyun .cbc_encrypt = aes_sparc64_cbc_encrypt_192,
149*4882a593Smuzhiyun .cbc_decrypt = aes_sparc64_cbc_decrypt_192,
150*4882a593Smuzhiyun .ctr_crypt = aes_sparc64_ctr_crypt_192,
151*4882a593Smuzhiyun };
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun static struct aes_ops aes256_ops = {
154*4882a593Smuzhiyun .encrypt = aes_sparc64_encrypt_256,
155*4882a593Smuzhiyun .decrypt = aes_sparc64_decrypt_256,
156*4882a593Smuzhiyun .load_encrypt_keys = aes_sparc64_load_encrypt_keys_256,
157*4882a593Smuzhiyun .load_decrypt_keys = aes_sparc64_load_decrypt_keys_256,
158*4882a593Smuzhiyun .ecb_encrypt = aes_sparc64_ecb_encrypt_256,
159*4882a593Smuzhiyun .ecb_decrypt = aes_sparc64_ecb_decrypt_256,
160*4882a593Smuzhiyun .cbc_encrypt = aes_sparc64_cbc_encrypt_256,
161*4882a593Smuzhiyun .cbc_decrypt = aes_sparc64_cbc_decrypt_256,
162*4882a593Smuzhiyun .ctr_crypt = aes_sparc64_ctr_crypt_256,
163*4882a593Smuzhiyun };
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun extern void aes_sparc64_key_expand(const u32 *in_key, u64 *output_key,
166*4882a593Smuzhiyun unsigned int key_len);
167*4882a593Smuzhiyun
aes_set_key(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)168*4882a593Smuzhiyun static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
169*4882a593Smuzhiyun unsigned int key_len)
170*4882a593Smuzhiyun {
171*4882a593Smuzhiyun struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun switch (key_len) {
174*4882a593Smuzhiyun case AES_KEYSIZE_128:
175*4882a593Smuzhiyun ctx->expanded_key_length = 0xb0;
176*4882a593Smuzhiyun ctx->ops = &aes128_ops;
177*4882a593Smuzhiyun break;
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun case AES_KEYSIZE_192:
180*4882a593Smuzhiyun ctx->expanded_key_length = 0xd0;
181*4882a593Smuzhiyun ctx->ops = &aes192_ops;
182*4882a593Smuzhiyun break;
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun case AES_KEYSIZE_256:
185*4882a593Smuzhiyun ctx->expanded_key_length = 0xf0;
186*4882a593Smuzhiyun ctx->ops = &aes256_ops;
187*4882a593Smuzhiyun break;
188*4882a593Smuzhiyun
189*4882a593Smuzhiyun default:
190*4882a593Smuzhiyun return -EINVAL;
191*4882a593Smuzhiyun }
192*4882a593Smuzhiyun
193*4882a593Smuzhiyun aes_sparc64_key_expand((const u32 *)in_key, &ctx->key[0], key_len);
194*4882a593Smuzhiyun ctx->key_length = key_len;
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun return 0;
197*4882a593Smuzhiyun }
198*4882a593Smuzhiyun
aes_set_key_skcipher(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)199*4882a593Smuzhiyun static int aes_set_key_skcipher(struct crypto_skcipher *tfm, const u8 *in_key,
200*4882a593Smuzhiyun unsigned int key_len)
201*4882a593Smuzhiyun {
202*4882a593Smuzhiyun return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
203*4882a593Smuzhiyun }
204*4882a593Smuzhiyun
crypto_aes_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)205*4882a593Smuzhiyun static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
206*4882a593Smuzhiyun {
207*4882a593Smuzhiyun struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
208*4882a593Smuzhiyun
209*4882a593Smuzhiyun ctx->ops->encrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
210*4882a593Smuzhiyun }
211*4882a593Smuzhiyun
crypto_aes_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)212*4882a593Smuzhiyun static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
213*4882a593Smuzhiyun {
214*4882a593Smuzhiyun struct crypto_sparc64_aes_ctx *ctx = crypto_tfm_ctx(tfm);
215*4882a593Smuzhiyun
216*4882a593Smuzhiyun ctx->ops->decrypt(&ctx->key[0], (const u32 *) src, (u32 *) dst);
217*4882a593Smuzhiyun }
218*4882a593Smuzhiyun
ecb_encrypt(struct skcipher_request * req)219*4882a593Smuzhiyun static int ecb_encrypt(struct skcipher_request *req)
220*4882a593Smuzhiyun {
221*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
222*4882a593Smuzhiyun const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
223*4882a593Smuzhiyun struct skcipher_walk walk;
224*4882a593Smuzhiyun unsigned int nbytes;
225*4882a593Smuzhiyun int err;
226*4882a593Smuzhiyun
227*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, true);
228*4882a593Smuzhiyun if (err)
229*4882a593Smuzhiyun return err;
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun ctx->ops->load_encrypt_keys(&ctx->key[0]);
232*4882a593Smuzhiyun while ((nbytes = walk.nbytes) != 0) {
233*4882a593Smuzhiyun ctx->ops->ecb_encrypt(&ctx->key[0], walk.src.virt.addr,
234*4882a593Smuzhiyun walk.dst.virt.addr,
235*4882a593Smuzhiyun round_down(nbytes, AES_BLOCK_SIZE));
236*4882a593Smuzhiyun err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun fprs_write(0);
239*4882a593Smuzhiyun return err;
240*4882a593Smuzhiyun }
241*4882a593Smuzhiyun
ecb_decrypt(struct skcipher_request * req)242*4882a593Smuzhiyun static int ecb_decrypt(struct skcipher_request *req)
243*4882a593Smuzhiyun {
244*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
245*4882a593Smuzhiyun const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
246*4882a593Smuzhiyun const u64 *key_end;
247*4882a593Smuzhiyun struct skcipher_walk walk;
248*4882a593Smuzhiyun unsigned int nbytes;
249*4882a593Smuzhiyun int err;
250*4882a593Smuzhiyun
251*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, true);
252*4882a593Smuzhiyun if (err)
253*4882a593Smuzhiyun return err;
254*4882a593Smuzhiyun
255*4882a593Smuzhiyun ctx->ops->load_decrypt_keys(&ctx->key[0]);
256*4882a593Smuzhiyun key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
257*4882a593Smuzhiyun while ((nbytes = walk.nbytes) != 0) {
258*4882a593Smuzhiyun ctx->ops->ecb_decrypt(key_end, walk.src.virt.addr,
259*4882a593Smuzhiyun walk.dst.virt.addr,
260*4882a593Smuzhiyun round_down(nbytes, AES_BLOCK_SIZE));
261*4882a593Smuzhiyun err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
262*4882a593Smuzhiyun }
263*4882a593Smuzhiyun fprs_write(0);
264*4882a593Smuzhiyun
265*4882a593Smuzhiyun return err;
266*4882a593Smuzhiyun }
267*4882a593Smuzhiyun
cbc_encrypt(struct skcipher_request * req)268*4882a593Smuzhiyun static int cbc_encrypt(struct skcipher_request *req)
269*4882a593Smuzhiyun {
270*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
271*4882a593Smuzhiyun const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
272*4882a593Smuzhiyun struct skcipher_walk walk;
273*4882a593Smuzhiyun unsigned int nbytes;
274*4882a593Smuzhiyun int err;
275*4882a593Smuzhiyun
276*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, true);
277*4882a593Smuzhiyun if (err)
278*4882a593Smuzhiyun return err;
279*4882a593Smuzhiyun
280*4882a593Smuzhiyun ctx->ops->load_encrypt_keys(&ctx->key[0]);
281*4882a593Smuzhiyun while ((nbytes = walk.nbytes) != 0) {
282*4882a593Smuzhiyun ctx->ops->cbc_encrypt(&ctx->key[0], walk.src.virt.addr,
283*4882a593Smuzhiyun walk.dst.virt.addr,
284*4882a593Smuzhiyun round_down(nbytes, AES_BLOCK_SIZE),
285*4882a593Smuzhiyun walk.iv);
286*4882a593Smuzhiyun err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
287*4882a593Smuzhiyun }
288*4882a593Smuzhiyun fprs_write(0);
289*4882a593Smuzhiyun return err;
290*4882a593Smuzhiyun }
291*4882a593Smuzhiyun
cbc_decrypt(struct skcipher_request * req)292*4882a593Smuzhiyun static int cbc_decrypt(struct skcipher_request *req)
293*4882a593Smuzhiyun {
294*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
295*4882a593Smuzhiyun const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
296*4882a593Smuzhiyun const u64 *key_end;
297*4882a593Smuzhiyun struct skcipher_walk walk;
298*4882a593Smuzhiyun unsigned int nbytes;
299*4882a593Smuzhiyun int err;
300*4882a593Smuzhiyun
301*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, true);
302*4882a593Smuzhiyun if (err)
303*4882a593Smuzhiyun return err;
304*4882a593Smuzhiyun
305*4882a593Smuzhiyun ctx->ops->load_decrypt_keys(&ctx->key[0]);
306*4882a593Smuzhiyun key_end = &ctx->key[ctx->expanded_key_length / sizeof(u64)];
307*4882a593Smuzhiyun while ((nbytes = walk.nbytes) != 0) {
308*4882a593Smuzhiyun ctx->ops->cbc_decrypt(key_end, walk.src.virt.addr,
309*4882a593Smuzhiyun walk.dst.virt.addr,
310*4882a593Smuzhiyun round_down(nbytes, AES_BLOCK_SIZE),
311*4882a593Smuzhiyun walk.iv);
312*4882a593Smuzhiyun err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
313*4882a593Smuzhiyun }
314*4882a593Smuzhiyun fprs_write(0);
315*4882a593Smuzhiyun
316*4882a593Smuzhiyun return err;
317*4882a593Smuzhiyun }
318*4882a593Smuzhiyun
ctr_crypt_final(const struct crypto_sparc64_aes_ctx * ctx,struct skcipher_walk * walk)319*4882a593Smuzhiyun static void ctr_crypt_final(const struct crypto_sparc64_aes_ctx *ctx,
320*4882a593Smuzhiyun struct skcipher_walk *walk)
321*4882a593Smuzhiyun {
322*4882a593Smuzhiyun u8 *ctrblk = walk->iv;
323*4882a593Smuzhiyun u64 keystream[AES_BLOCK_SIZE / sizeof(u64)];
324*4882a593Smuzhiyun u8 *src = walk->src.virt.addr;
325*4882a593Smuzhiyun u8 *dst = walk->dst.virt.addr;
326*4882a593Smuzhiyun unsigned int nbytes = walk->nbytes;
327*4882a593Smuzhiyun
328*4882a593Smuzhiyun ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk,
329*4882a593Smuzhiyun keystream, AES_BLOCK_SIZE);
330*4882a593Smuzhiyun crypto_xor_cpy(dst, (u8 *) keystream, src, nbytes);
331*4882a593Smuzhiyun crypto_inc(ctrblk, AES_BLOCK_SIZE);
332*4882a593Smuzhiyun }
333*4882a593Smuzhiyun
ctr_crypt(struct skcipher_request * req)334*4882a593Smuzhiyun static int ctr_crypt(struct skcipher_request *req)
335*4882a593Smuzhiyun {
336*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
337*4882a593Smuzhiyun const struct crypto_sparc64_aes_ctx *ctx = crypto_skcipher_ctx(tfm);
338*4882a593Smuzhiyun struct skcipher_walk walk;
339*4882a593Smuzhiyun unsigned int nbytes;
340*4882a593Smuzhiyun int err;
341*4882a593Smuzhiyun
342*4882a593Smuzhiyun err = skcipher_walk_virt(&walk, req, true);
343*4882a593Smuzhiyun if (err)
344*4882a593Smuzhiyun return err;
345*4882a593Smuzhiyun
346*4882a593Smuzhiyun ctx->ops->load_encrypt_keys(&ctx->key[0]);
347*4882a593Smuzhiyun while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
348*4882a593Smuzhiyun ctx->ops->ctr_crypt(&ctx->key[0], walk.src.virt.addr,
349*4882a593Smuzhiyun walk.dst.virt.addr,
350*4882a593Smuzhiyun round_down(nbytes, AES_BLOCK_SIZE),
351*4882a593Smuzhiyun walk.iv);
352*4882a593Smuzhiyun err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
353*4882a593Smuzhiyun }
354*4882a593Smuzhiyun if (walk.nbytes) {
355*4882a593Smuzhiyun ctr_crypt_final(ctx, &walk);
356*4882a593Smuzhiyun err = skcipher_walk_done(&walk, 0);
357*4882a593Smuzhiyun }
358*4882a593Smuzhiyun fprs_write(0);
359*4882a593Smuzhiyun return err;
360*4882a593Smuzhiyun }
361*4882a593Smuzhiyun
362*4882a593Smuzhiyun static struct crypto_alg cipher_alg = {
363*4882a593Smuzhiyun .cra_name = "aes",
364*4882a593Smuzhiyun .cra_driver_name = "aes-sparc64",
365*4882a593Smuzhiyun .cra_priority = SPARC_CR_OPCODE_PRIORITY,
366*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
367*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
368*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
369*4882a593Smuzhiyun .cra_alignmask = 3,
370*4882a593Smuzhiyun .cra_module = THIS_MODULE,
371*4882a593Smuzhiyun .cra_u = {
372*4882a593Smuzhiyun .cipher = {
373*4882a593Smuzhiyun .cia_min_keysize = AES_MIN_KEY_SIZE,
374*4882a593Smuzhiyun .cia_max_keysize = AES_MAX_KEY_SIZE,
375*4882a593Smuzhiyun .cia_setkey = aes_set_key,
376*4882a593Smuzhiyun .cia_encrypt = crypto_aes_encrypt,
377*4882a593Smuzhiyun .cia_decrypt = crypto_aes_decrypt
378*4882a593Smuzhiyun }
379*4882a593Smuzhiyun }
380*4882a593Smuzhiyun };
381*4882a593Smuzhiyun
382*4882a593Smuzhiyun static struct skcipher_alg skcipher_algs[] = {
383*4882a593Smuzhiyun {
384*4882a593Smuzhiyun .base.cra_name = "ecb(aes)",
385*4882a593Smuzhiyun .base.cra_driver_name = "ecb-aes-sparc64",
386*4882a593Smuzhiyun .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
387*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
388*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
389*4882a593Smuzhiyun .base.cra_alignmask = 7,
390*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
391*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
392*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
393*4882a593Smuzhiyun .setkey = aes_set_key_skcipher,
394*4882a593Smuzhiyun .encrypt = ecb_encrypt,
395*4882a593Smuzhiyun .decrypt = ecb_decrypt,
396*4882a593Smuzhiyun }, {
397*4882a593Smuzhiyun .base.cra_name = "cbc(aes)",
398*4882a593Smuzhiyun .base.cra_driver_name = "cbc-aes-sparc64",
399*4882a593Smuzhiyun .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
400*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
401*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
402*4882a593Smuzhiyun .base.cra_alignmask = 7,
403*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
404*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
405*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
406*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
407*4882a593Smuzhiyun .setkey = aes_set_key_skcipher,
408*4882a593Smuzhiyun .encrypt = cbc_encrypt,
409*4882a593Smuzhiyun .decrypt = cbc_decrypt,
410*4882a593Smuzhiyun }, {
411*4882a593Smuzhiyun .base.cra_name = "ctr(aes)",
412*4882a593Smuzhiyun .base.cra_driver_name = "ctr-aes-sparc64",
413*4882a593Smuzhiyun .base.cra_priority = SPARC_CR_OPCODE_PRIORITY,
414*4882a593Smuzhiyun .base.cra_blocksize = 1,
415*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct crypto_sparc64_aes_ctx),
416*4882a593Smuzhiyun .base.cra_alignmask = 7,
417*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
418*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
419*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
420*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
421*4882a593Smuzhiyun .setkey = aes_set_key_skcipher,
422*4882a593Smuzhiyun .encrypt = ctr_crypt,
423*4882a593Smuzhiyun .decrypt = ctr_crypt,
424*4882a593Smuzhiyun .chunksize = AES_BLOCK_SIZE,
425*4882a593Smuzhiyun }
426*4882a593Smuzhiyun };
427*4882a593Smuzhiyun
sparc64_has_aes_opcode(void)428*4882a593Smuzhiyun static bool __init sparc64_has_aes_opcode(void)
429*4882a593Smuzhiyun {
430*4882a593Smuzhiyun unsigned long cfr;
431*4882a593Smuzhiyun
432*4882a593Smuzhiyun if (!(sparc64_elf_hwcap & HWCAP_SPARC_CRYPTO))
433*4882a593Smuzhiyun return false;
434*4882a593Smuzhiyun
435*4882a593Smuzhiyun __asm__ __volatile__("rd %%asr26, %0" : "=r" (cfr));
436*4882a593Smuzhiyun if (!(cfr & CFR_AES))
437*4882a593Smuzhiyun return false;
438*4882a593Smuzhiyun
439*4882a593Smuzhiyun return true;
440*4882a593Smuzhiyun }
441*4882a593Smuzhiyun
aes_sparc64_mod_init(void)442*4882a593Smuzhiyun static int __init aes_sparc64_mod_init(void)
443*4882a593Smuzhiyun {
444*4882a593Smuzhiyun int err;
445*4882a593Smuzhiyun
446*4882a593Smuzhiyun if (!sparc64_has_aes_opcode()) {
447*4882a593Smuzhiyun pr_info("sparc64 aes opcodes not available.\n");
448*4882a593Smuzhiyun return -ENODEV;
449*4882a593Smuzhiyun }
450*4882a593Smuzhiyun pr_info("Using sparc64 aes opcodes optimized AES implementation\n");
451*4882a593Smuzhiyun err = crypto_register_alg(&cipher_alg);
452*4882a593Smuzhiyun if (err)
453*4882a593Smuzhiyun return err;
454*4882a593Smuzhiyun err = crypto_register_skciphers(skcipher_algs,
455*4882a593Smuzhiyun ARRAY_SIZE(skcipher_algs));
456*4882a593Smuzhiyun if (err)
457*4882a593Smuzhiyun crypto_unregister_alg(&cipher_alg);
458*4882a593Smuzhiyun return err;
459*4882a593Smuzhiyun }
460*4882a593Smuzhiyun
aes_sparc64_mod_fini(void)461*4882a593Smuzhiyun static void __exit aes_sparc64_mod_fini(void)
462*4882a593Smuzhiyun {
463*4882a593Smuzhiyun crypto_unregister_alg(&cipher_alg);
464*4882a593Smuzhiyun crypto_unregister_skciphers(skcipher_algs, ARRAY_SIZE(skcipher_algs));
465*4882a593Smuzhiyun }
466*4882a593Smuzhiyun
467*4882a593Smuzhiyun module_init(aes_sparc64_mod_init);
468*4882a593Smuzhiyun module_exit(aes_sparc64_mod_fini);
469*4882a593Smuzhiyun
470*4882a593Smuzhiyun MODULE_LICENSE("GPL");
471*4882a593Smuzhiyun MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, sparc64 aes opcode accelerated");
472*4882a593Smuzhiyun
473*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("aes");
474*4882a593Smuzhiyun
475*4882a593Smuzhiyun #include "crop_devid.c"
476