1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0+
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Cryptographic API.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * s390 implementation of the AES Cipher Algorithm.
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * s390 Version:
8*4882a593Smuzhiyun * Copyright IBM Corp. 2005, 2017
9*4882a593Smuzhiyun * Author(s): Jan Glauber (jang@de.ibm.com)
10*4882a593Smuzhiyun * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
11*4882a593Smuzhiyun * Patrick Steuer <patrick.steuer@de.ibm.com>
12*4882a593Smuzhiyun * Harald Freudenberger <freude@de.ibm.com>
13*4882a593Smuzhiyun *
14*4882a593Smuzhiyun * Derived from "crypto/aes_generic.c"
15*4882a593Smuzhiyun */
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun #define KMSG_COMPONENT "aes_s390"
18*4882a593Smuzhiyun #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun #include <crypto/aes.h>
21*4882a593Smuzhiyun #include <crypto/algapi.h>
22*4882a593Smuzhiyun #include <crypto/ghash.h>
23*4882a593Smuzhiyun #include <crypto/internal/aead.h>
24*4882a593Smuzhiyun #include <crypto/internal/cipher.h>
25*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
26*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
27*4882a593Smuzhiyun #include <linux/err.h>
28*4882a593Smuzhiyun #include <linux/module.h>
29*4882a593Smuzhiyun #include <linux/cpufeature.h>
30*4882a593Smuzhiyun #include <linux/init.h>
31*4882a593Smuzhiyun #include <linux/mutex.h>
32*4882a593Smuzhiyun #include <linux/fips.h>
33*4882a593Smuzhiyun #include <linux/string.h>
34*4882a593Smuzhiyun #include <crypto/xts.h>
35*4882a593Smuzhiyun #include <asm/cpacf.h>
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun static u8 *ctrblk;
38*4882a593Smuzhiyun static DEFINE_MUTEX(ctrblk_lock);
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun static cpacf_mask_t km_functions, kmc_functions, kmctr_functions,
41*4882a593Smuzhiyun kma_functions;
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun struct s390_aes_ctx {
44*4882a593Smuzhiyun u8 key[AES_MAX_KEY_SIZE];
45*4882a593Smuzhiyun int key_len;
46*4882a593Smuzhiyun unsigned long fc;
47*4882a593Smuzhiyun union {
48*4882a593Smuzhiyun struct crypto_skcipher *skcipher;
49*4882a593Smuzhiyun struct crypto_cipher *cip;
50*4882a593Smuzhiyun } fallback;
51*4882a593Smuzhiyun };
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun struct s390_xts_ctx {
54*4882a593Smuzhiyun u8 key[32];
55*4882a593Smuzhiyun u8 pcc_key[32];
56*4882a593Smuzhiyun int key_len;
57*4882a593Smuzhiyun unsigned long fc;
58*4882a593Smuzhiyun struct crypto_skcipher *fallback;
59*4882a593Smuzhiyun };
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun struct gcm_sg_walk {
62*4882a593Smuzhiyun struct scatter_walk walk;
63*4882a593Smuzhiyun unsigned int walk_bytes;
64*4882a593Smuzhiyun u8 *walk_ptr;
65*4882a593Smuzhiyun unsigned int walk_bytes_remain;
66*4882a593Smuzhiyun u8 buf[AES_BLOCK_SIZE];
67*4882a593Smuzhiyun unsigned int buf_bytes;
68*4882a593Smuzhiyun u8 *ptr;
69*4882a593Smuzhiyun unsigned int nbytes;
70*4882a593Smuzhiyun };
71*4882a593Smuzhiyun
setkey_fallback_cip(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)72*4882a593Smuzhiyun static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
73*4882a593Smuzhiyun unsigned int key_len)
74*4882a593Smuzhiyun {
75*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
78*4882a593Smuzhiyun sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
79*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun return crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
82*4882a593Smuzhiyun }
83*4882a593Smuzhiyun
aes_set_key(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)84*4882a593Smuzhiyun static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
85*4882a593Smuzhiyun unsigned int key_len)
86*4882a593Smuzhiyun {
87*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
88*4882a593Smuzhiyun unsigned long fc;
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun /* Pick the correct function code based on the key length */
91*4882a593Smuzhiyun fc = (key_len == 16) ? CPACF_KM_AES_128 :
92*4882a593Smuzhiyun (key_len == 24) ? CPACF_KM_AES_192 :
93*4882a593Smuzhiyun (key_len == 32) ? CPACF_KM_AES_256 : 0;
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun /* Check if the function code is available */
96*4882a593Smuzhiyun sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
97*4882a593Smuzhiyun if (!sctx->fc)
98*4882a593Smuzhiyun return setkey_fallback_cip(tfm, in_key, key_len);
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun sctx->key_len = key_len;
101*4882a593Smuzhiyun memcpy(sctx->key, in_key, key_len);
102*4882a593Smuzhiyun return 0;
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun
crypto_aes_encrypt(struct crypto_tfm * tfm,u8 * out,const u8 * in)105*4882a593Smuzhiyun static void crypto_aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
106*4882a593Smuzhiyun {
107*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun if (unlikely(!sctx->fc)) {
110*4882a593Smuzhiyun crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
111*4882a593Smuzhiyun return;
112*4882a593Smuzhiyun }
113*4882a593Smuzhiyun cpacf_km(sctx->fc, &sctx->key, out, in, AES_BLOCK_SIZE);
114*4882a593Smuzhiyun }
115*4882a593Smuzhiyun
crypto_aes_decrypt(struct crypto_tfm * tfm,u8 * out,const u8 * in)116*4882a593Smuzhiyun static void crypto_aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
117*4882a593Smuzhiyun {
118*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun if (unlikely(!sctx->fc)) {
121*4882a593Smuzhiyun crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
122*4882a593Smuzhiyun return;
123*4882a593Smuzhiyun }
124*4882a593Smuzhiyun cpacf_km(sctx->fc | CPACF_DECRYPT,
125*4882a593Smuzhiyun &sctx->key, out, in, AES_BLOCK_SIZE);
126*4882a593Smuzhiyun }
127*4882a593Smuzhiyun
fallback_init_cip(struct crypto_tfm * tfm)128*4882a593Smuzhiyun static int fallback_init_cip(struct crypto_tfm *tfm)
129*4882a593Smuzhiyun {
130*4882a593Smuzhiyun const char *name = tfm->__crt_alg->cra_name;
131*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
132*4882a593Smuzhiyun
133*4882a593Smuzhiyun sctx->fallback.cip = crypto_alloc_cipher(name, 0,
134*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK);
135*4882a593Smuzhiyun
136*4882a593Smuzhiyun if (IS_ERR(sctx->fallback.cip)) {
137*4882a593Smuzhiyun pr_err("Allocating AES fallback algorithm %s failed\n",
138*4882a593Smuzhiyun name);
139*4882a593Smuzhiyun return PTR_ERR(sctx->fallback.cip);
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun return 0;
143*4882a593Smuzhiyun }
144*4882a593Smuzhiyun
fallback_exit_cip(struct crypto_tfm * tfm)145*4882a593Smuzhiyun static void fallback_exit_cip(struct crypto_tfm *tfm)
146*4882a593Smuzhiyun {
147*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun crypto_free_cipher(sctx->fallback.cip);
150*4882a593Smuzhiyun sctx->fallback.cip = NULL;
151*4882a593Smuzhiyun }
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun static struct crypto_alg aes_alg = {
154*4882a593Smuzhiyun .cra_name = "aes",
155*4882a593Smuzhiyun .cra_driver_name = "aes-s390",
156*4882a593Smuzhiyun .cra_priority = 300,
157*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
158*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK,
159*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
160*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct s390_aes_ctx),
161*4882a593Smuzhiyun .cra_module = THIS_MODULE,
162*4882a593Smuzhiyun .cra_init = fallback_init_cip,
163*4882a593Smuzhiyun .cra_exit = fallback_exit_cip,
164*4882a593Smuzhiyun .cra_u = {
165*4882a593Smuzhiyun .cipher = {
166*4882a593Smuzhiyun .cia_min_keysize = AES_MIN_KEY_SIZE,
167*4882a593Smuzhiyun .cia_max_keysize = AES_MAX_KEY_SIZE,
168*4882a593Smuzhiyun .cia_setkey = aes_set_key,
169*4882a593Smuzhiyun .cia_encrypt = crypto_aes_encrypt,
170*4882a593Smuzhiyun .cia_decrypt = crypto_aes_decrypt,
171*4882a593Smuzhiyun }
172*4882a593Smuzhiyun }
173*4882a593Smuzhiyun };
174*4882a593Smuzhiyun
setkey_fallback_skcipher(struct crypto_skcipher * tfm,const u8 * key,unsigned int len)175*4882a593Smuzhiyun static int setkey_fallback_skcipher(struct crypto_skcipher *tfm, const u8 *key,
176*4882a593Smuzhiyun unsigned int len)
177*4882a593Smuzhiyun {
178*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun crypto_skcipher_clear_flags(sctx->fallback.skcipher,
181*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
182*4882a593Smuzhiyun crypto_skcipher_set_flags(sctx->fallback.skcipher,
183*4882a593Smuzhiyun crypto_skcipher_get_flags(tfm) &
184*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
185*4882a593Smuzhiyun return crypto_skcipher_setkey(sctx->fallback.skcipher, key, len);
186*4882a593Smuzhiyun }
187*4882a593Smuzhiyun
fallback_skcipher_crypt(struct s390_aes_ctx * sctx,struct skcipher_request * req,unsigned long modifier)188*4882a593Smuzhiyun static int fallback_skcipher_crypt(struct s390_aes_ctx *sctx,
189*4882a593Smuzhiyun struct skcipher_request *req,
190*4882a593Smuzhiyun unsigned long modifier)
191*4882a593Smuzhiyun {
192*4882a593Smuzhiyun struct skcipher_request *subreq = skcipher_request_ctx(req);
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun *subreq = *req;
195*4882a593Smuzhiyun skcipher_request_set_tfm(subreq, sctx->fallback.skcipher);
196*4882a593Smuzhiyun return (modifier & CPACF_DECRYPT) ?
197*4882a593Smuzhiyun crypto_skcipher_decrypt(subreq) :
198*4882a593Smuzhiyun crypto_skcipher_encrypt(subreq);
199*4882a593Smuzhiyun }
200*4882a593Smuzhiyun
ecb_aes_set_key(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)201*4882a593Smuzhiyun static int ecb_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
202*4882a593Smuzhiyun unsigned int key_len)
203*4882a593Smuzhiyun {
204*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
205*4882a593Smuzhiyun unsigned long fc;
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun /* Pick the correct function code based on the key length */
208*4882a593Smuzhiyun fc = (key_len == 16) ? CPACF_KM_AES_128 :
209*4882a593Smuzhiyun (key_len == 24) ? CPACF_KM_AES_192 :
210*4882a593Smuzhiyun (key_len == 32) ? CPACF_KM_AES_256 : 0;
211*4882a593Smuzhiyun
212*4882a593Smuzhiyun /* Check if the function code is available */
213*4882a593Smuzhiyun sctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
214*4882a593Smuzhiyun if (!sctx->fc)
215*4882a593Smuzhiyun return setkey_fallback_skcipher(tfm, in_key, key_len);
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun sctx->key_len = key_len;
218*4882a593Smuzhiyun memcpy(sctx->key, in_key, key_len);
219*4882a593Smuzhiyun return 0;
220*4882a593Smuzhiyun }
221*4882a593Smuzhiyun
ecb_aes_crypt(struct skcipher_request * req,unsigned long modifier)222*4882a593Smuzhiyun static int ecb_aes_crypt(struct skcipher_request *req, unsigned long modifier)
223*4882a593Smuzhiyun {
224*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
225*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
226*4882a593Smuzhiyun struct skcipher_walk walk;
227*4882a593Smuzhiyun unsigned int nbytes, n;
228*4882a593Smuzhiyun int ret;
229*4882a593Smuzhiyun
230*4882a593Smuzhiyun if (unlikely(!sctx->fc))
231*4882a593Smuzhiyun return fallback_skcipher_crypt(sctx, req, modifier);
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun ret = skcipher_walk_virt(&walk, req, false);
234*4882a593Smuzhiyun while ((nbytes = walk.nbytes) != 0) {
235*4882a593Smuzhiyun /* only use complete blocks */
236*4882a593Smuzhiyun n = nbytes & ~(AES_BLOCK_SIZE - 1);
237*4882a593Smuzhiyun cpacf_km(sctx->fc | modifier, sctx->key,
238*4882a593Smuzhiyun walk.dst.virt.addr, walk.src.virt.addr, n);
239*4882a593Smuzhiyun ret = skcipher_walk_done(&walk, nbytes - n);
240*4882a593Smuzhiyun }
241*4882a593Smuzhiyun return ret;
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun
ecb_aes_encrypt(struct skcipher_request * req)244*4882a593Smuzhiyun static int ecb_aes_encrypt(struct skcipher_request *req)
245*4882a593Smuzhiyun {
246*4882a593Smuzhiyun return ecb_aes_crypt(req, 0);
247*4882a593Smuzhiyun }
248*4882a593Smuzhiyun
ecb_aes_decrypt(struct skcipher_request * req)249*4882a593Smuzhiyun static int ecb_aes_decrypt(struct skcipher_request *req)
250*4882a593Smuzhiyun {
251*4882a593Smuzhiyun return ecb_aes_crypt(req, CPACF_DECRYPT);
252*4882a593Smuzhiyun }
253*4882a593Smuzhiyun
fallback_init_skcipher(struct crypto_skcipher * tfm)254*4882a593Smuzhiyun static int fallback_init_skcipher(struct crypto_skcipher *tfm)
255*4882a593Smuzhiyun {
256*4882a593Smuzhiyun const char *name = crypto_tfm_alg_name(&tfm->base);
257*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
258*4882a593Smuzhiyun
259*4882a593Smuzhiyun sctx->fallback.skcipher = crypto_alloc_skcipher(name, 0,
260*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC);
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun if (IS_ERR(sctx->fallback.skcipher)) {
263*4882a593Smuzhiyun pr_err("Allocating AES fallback algorithm %s failed\n",
264*4882a593Smuzhiyun name);
265*4882a593Smuzhiyun return PTR_ERR(sctx->fallback.skcipher);
266*4882a593Smuzhiyun }
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
269*4882a593Smuzhiyun crypto_skcipher_reqsize(sctx->fallback.skcipher));
270*4882a593Smuzhiyun return 0;
271*4882a593Smuzhiyun }
272*4882a593Smuzhiyun
fallback_exit_skcipher(struct crypto_skcipher * tfm)273*4882a593Smuzhiyun static void fallback_exit_skcipher(struct crypto_skcipher *tfm)
274*4882a593Smuzhiyun {
275*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
276*4882a593Smuzhiyun
277*4882a593Smuzhiyun crypto_free_skcipher(sctx->fallback.skcipher);
278*4882a593Smuzhiyun }
279*4882a593Smuzhiyun
280*4882a593Smuzhiyun static struct skcipher_alg ecb_aes_alg = {
281*4882a593Smuzhiyun .base.cra_name = "ecb(aes)",
282*4882a593Smuzhiyun .base.cra_driver_name = "ecb-aes-s390",
283*4882a593Smuzhiyun .base.cra_priority = 401, /* combo: aes + ecb + 1 */
284*4882a593Smuzhiyun .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
285*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
286*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct s390_aes_ctx),
287*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
288*4882a593Smuzhiyun .init = fallback_init_skcipher,
289*4882a593Smuzhiyun .exit = fallback_exit_skcipher,
290*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
291*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
292*4882a593Smuzhiyun .setkey = ecb_aes_set_key,
293*4882a593Smuzhiyun .encrypt = ecb_aes_encrypt,
294*4882a593Smuzhiyun .decrypt = ecb_aes_decrypt,
295*4882a593Smuzhiyun };
296*4882a593Smuzhiyun
cbc_aes_set_key(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)297*4882a593Smuzhiyun static int cbc_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
298*4882a593Smuzhiyun unsigned int key_len)
299*4882a593Smuzhiyun {
300*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
301*4882a593Smuzhiyun unsigned long fc;
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun /* Pick the correct function code based on the key length */
304*4882a593Smuzhiyun fc = (key_len == 16) ? CPACF_KMC_AES_128 :
305*4882a593Smuzhiyun (key_len == 24) ? CPACF_KMC_AES_192 :
306*4882a593Smuzhiyun (key_len == 32) ? CPACF_KMC_AES_256 : 0;
307*4882a593Smuzhiyun
308*4882a593Smuzhiyun /* Check if the function code is available */
309*4882a593Smuzhiyun sctx->fc = (fc && cpacf_test_func(&kmc_functions, fc)) ? fc : 0;
310*4882a593Smuzhiyun if (!sctx->fc)
311*4882a593Smuzhiyun return setkey_fallback_skcipher(tfm, in_key, key_len);
312*4882a593Smuzhiyun
313*4882a593Smuzhiyun sctx->key_len = key_len;
314*4882a593Smuzhiyun memcpy(sctx->key, in_key, key_len);
315*4882a593Smuzhiyun return 0;
316*4882a593Smuzhiyun }
317*4882a593Smuzhiyun
cbc_aes_crypt(struct skcipher_request * req,unsigned long modifier)318*4882a593Smuzhiyun static int cbc_aes_crypt(struct skcipher_request *req, unsigned long modifier)
319*4882a593Smuzhiyun {
320*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
321*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
322*4882a593Smuzhiyun struct skcipher_walk walk;
323*4882a593Smuzhiyun unsigned int nbytes, n;
324*4882a593Smuzhiyun int ret;
325*4882a593Smuzhiyun struct {
326*4882a593Smuzhiyun u8 iv[AES_BLOCK_SIZE];
327*4882a593Smuzhiyun u8 key[AES_MAX_KEY_SIZE];
328*4882a593Smuzhiyun } param;
329*4882a593Smuzhiyun
330*4882a593Smuzhiyun if (unlikely(!sctx->fc))
331*4882a593Smuzhiyun return fallback_skcipher_crypt(sctx, req, modifier);
332*4882a593Smuzhiyun
333*4882a593Smuzhiyun ret = skcipher_walk_virt(&walk, req, false);
334*4882a593Smuzhiyun if (ret)
335*4882a593Smuzhiyun return ret;
336*4882a593Smuzhiyun memcpy(param.iv, walk.iv, AES_BLOCK_SIZE);
337*4882a593Smuzhiyun memcpy(param.key, sctx->key, sctx->key_len);
338*4882a593Smuzhiyun while ((nbytes = walk.nbytes) != 0) {
339*4882a593Smuzhiyun /* only use complete blocks */
340*4882a593Smuzhiyun n = nbytes & ~(AES_BLOCK_SIZE - 1);
341*4882a593Smuzhiyun cpacf_kmc(sctx->fc | modifier, ¶m,
342*4882a593Smuzhiyun walk.dst.virt.addr, walk.src.virt.addr, n);
343*4882a593Smuzhiyun memcpy(walk.iv, param.iv, AES_BLOCK_SIZE);
344*4882a593Smuzhiyun ret = skcipher_walk_done(&walk, nbytes - n);
345*4882a593Smuzhiyun }
346*4882a593Smuzhiyun memzero_explicit(¶m, sizeof(param));
347*4882a593Smuzhiyun return ret;
348*4882a593Smuzhiyun }
349*4882a593Smuzhiyun
cbc_aes_encrypt(struct skcipher_request * req)350*4882a593Smuzhiyun static int cbc_aes_encrypt(struct skcipher_request *req)
351*4882a593Smuzhiyun {
352*4882a593Smuzhiyun return cbc_aes_crypt(req, 0);
353*4882a593Smuzhiyun }
354*4882a593Smuzhiyun
cbc_aes_decrypt(struct skcipher_request * req)355*4882a593Smuzhiyun static int cbc_aes_decrypt(struct skcipher_request *req)
356*4882a593Smuzhiyun {
357*4882a593Smuzhiyun return cbc_aes_crypt(req, CPACF_DECRYPT);
358*4882a593Smuzhiyun }
359*4882a593Smuzhiyun
360*4882a593Smuzhiyun static struct skcipher_alg cbc_aes_alg = {
361*4882a593Smuzhiyun .base.cra_name = "cbc(aes)",
362*4882a593Smuzhiyun .base.cra_driver_name = "cbc-aes-s390",
363*4882a593Smuzhiyun .base.cra_priority = 402, /* ecb-aes-s390 + 1 */
364*4882a593Smuzhiyun .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
365*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
366*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct s390_aes_ctx),
367*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
368*4882a593Smuzhiyun .init = fallback_init_skcipher,
369*4882a593Smuzhiyun .exit = fallback_exit_skcipher,
370*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
371*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
372*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
373*4882a593Smuzhiyun .setkey = cbc_aes_set_key,
374*4882a593Smuzhiyun .encrypt = cbc_aes_encrypt,
375*4882a593Smuzhiyun .decrypt = cbc_aes_decrypt,
376*4882a593Smuzhiyun };
377*4882a593Smuzhiyun
xts_fallback_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int len)378*4882a593Smuzhiyun static int xts_fallback_setkey(struct crypto_skcipher *tfm, const u8 *key,
379*4882a593Smuzhiyun unsigned int len)
380*4882a593Smuzhiyun {
381*4882a593Smuzhiyun struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
382*4882a593Smuzhiyun
383*4882a593Smuzhiyun crypto_skcipher_clear_flags(xts_ctx->fallback, CRYPTO_TFM_REQ_MASK);
384*4882a593Smuzhiyun crypto_skcipher_set_flags(xts_ctx->fallback,
385*4882a593Smuzhiyun crypto_skcipher_get_flags(tfm) &
386*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
387*4882a593Smuzhiyun return crypto_skcipher_setkey(xts_ctx->fallback, key, len);
388*4882a593Smuzhiyun }
389*4882a593Smuzhiyun
xts_aes_set_key(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)390*4882a593Smuzhiyun static int xts_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
391*4882a593Smuzhiyun unsigned int key_len)
392*4882a593Smuzhiyun {
393*4882a593Smuzhiyun struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
394*4882a593Smuzhiyun unsigned long fc;
395*4882a593Smuzhiyun int err;
396*4882a593Smuzhiyun
397*4882a593Smuzhiyun err = xts_fallback_setkey(tfm, in_key, key_len);
398*4882a593Smuzhiyun if (err)
399*4882a593Smuzhiyun return err;
400*4882a593Smuzhiyun
401*4882a593Smuzhiyun /* In fips mode only 128 bit or 256 bit keys are valid */
402*4882a593Smuzhiyun if (fips_enabled && key_len != 32 && key_len != 64)
403*4882a593Smuzhiyun return -EINVAL;
404*4882a593Smuzhiyun
405*4882a593Smuzhiyun /* Pick the correct function code based on the key length */
406*4882a593Smuzhiyun fc = (key_len == 32) ? CPACF_KM_XTS_128 :
407*4882a593Smuzhiyun (key_len == 64) ? CPACF_KM_XTS_256 : 0;
408*4882a593Smuzhiyun
409*4882a593Smuzhiyun /* Check if the function code is available */
410*4882a593Smuzhiyun xts_ctx->fc = (fc && cpacf_test_func(&km_functions, fc)) ? fc : 0;
411*4882a593Smuzhiyun if (!xts_ctx->fc)
412*4882a593Smuzhiyun return 0;
413*4882a593Smuzhiyun
414*4882a593Smuzhiyun /* Split the XTS key into the two subkeys */
415*4882a593Smuzhiyun key_len = key_len / 2;
416*4882a593Smuzhiyun xts_ctx->key_len = key_len;
417*4882a593Smuzhiyun memcpy(xts_ctx->key, in_key, key_len);
418*4882a593Smuzhiyun memcpy(xts_ctx->pcc_key, in_key + key_len, key_len);
419*4882a593Smuzhiyun return 0;
420*4882a593Smuzhiyun }
421*4882a593Smuzhiyun
xts_aes_crypt(struct skcipher_request * req,unsigned long modifier)422*4882a593Smuzhiyun static int xts_aes_crypt(struct skcipher_request *req, unsigned long modifier)
423*4882a593Smuzhiyun {
424*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
425*4882a593Smuzhiyun struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
426*4882a593Smuzhiyun struct skcipher_walk walk;
427*4882a593Smuzhiyun unsigned int offset, nbytes, n;
428*4882a593Smuzhiyun int ret;
429*4882a593Smuzhiyun struct {
430*4882a593Smuzhiyun u8 key[32];
431*4882a593Smuzhiyun u8 tweak[16];
432*4882a593Smuzhiyun u8 block[16];
433*4882a593Smuzhiyun u8 bit[16];
434*4882a593Smuzhiyun u8 xts[16];
435*4882a593Smuzhiyun } pcc_param;
436*4882a593Smuzhiyun struct {
437*4882a593Smuzhiyun u8 key[32];
438*4882a593Smuzhiyun u8 init[16];
439*4882a593Smuzhiyun } xts_param;
440*4882a593Smuzhiyun
441*4882a593Smuzhiyun if (req->cryptlen < AES_BLOCK_SIZE)
442*4882a593Smuzhiyun return -EINVAL;
443*4882a593Smuzhiyun
444*4882a593Smuzhiyun if (unlikely(!xts_ctx->fc || (req->cryptlen % AES_BLOCK_SIZE) != 0)) {
445*4882a593Smuzhiyun struct skcipher_request *subreq = skcipher_request_ctx(req);
446*4882a593Smuzhiyun
447*4882a593Smuzhiyun *subreq = *req;
448*4882a593Smuzhiyun skcipher_request_set_tfm(subreq, xts_ctx->fallback);
449*4882a593Smuzhiyun return (modifier & CPACF_DECRYPT) ?
450*4882a593Smuzhiyun crypto_skcipher_decrypt(subreq) :
451*4882a593Smuzhiyun crypto_skcipher_encrypt(subreq);
452*4882a593Smuzhiyun }
453*4882a593Smuzhiyun
454*4882a593Smuzhiyun ret = skcipher_walk_virt(&walk, req, false);
455*4882a593Smuzhiyun if (ret)
456*4882a593Smuzhiyun return ret;
457*4882a593Smuzhiyun offset = xts_ctx->key_len & 0x10;
458*4882a593Smuzhiyun memset(pcc_param.block, 0, sizeof(pcc_param.block));
459*4882a593Smuzhiyun memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
460*4882a593Smuzhiyun memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
461*4882a593Smuzhiyun memcpy(pcc_param.tweak, walk.iv, sizeof(pcc_param.tweak));
462*4882a593Smuzhiyun memcpy(pcc_param.key + offset, xts_ctx->pcc_key, xts_ctx->key_len);
463*4882a593Smuzhiyun cpacf_pcc(xts_ctx->fc, pcc_param.key + offset);
464*4882a593Smuzhiyun
465*4882a593Smuzhiyun memcpy(xts_param.key + offset, xts_ctx->key, xts_ctx->key_len);
466*4882a593Smuzhiyun memcpy(xts_param.init, pcc_param.xts, 16);
467*4882a593Smuzhiyun
468*4882a593Smuzhiyun while ((nbytes = walk.nbytes) != 0) {
469*4882a593Smuzhiyun /* only use complete blocks */
470*4882a593Smuzhiyun n = nbytes & ~(AES_BLOCK_SIZE - 1);
471*4882a593Smuzhiyun cpacf_km(xts_ctx->fc | modifier, xts_param.key + offset,
472*4882a593Smuzhiyun walk.dst.virt.addr, walk.src.virt.addr, n);
473*4882a593Smuzhiyun ret = skcipher_walk_done(&walk, nbytes - n);
474*4882a593Smuzhiyun }
475*4882a593Smuzhiyun memzero_explicit(&pcc_param, sizeof(pcc_param));
476*4882a593Smuzhiyun memzero_explicit(&xts_param, sizeof(xts_param));
477*4882a593Smuzhiyun return ret;
478*4882a593Smuzhiyun }
479*4882a593Smuzhiyun
xts_aes_encrypt(struct skcipher_request * req)480*4882a593Smuzhiyun static int xts_aes_encrypt(struct skcipher_request *req)
481*4882a593Smuzhiyun {
482*4882a593Smuzhiyun return xts_aes_crypt(req, 0);
483*4882a593Smuzhiyun }
484*4882a593Smuzhiyun
xts_aes_decrypt(struct skcipher_request * req)485*4882a593Smuzhiyun static int xts_aes_decrypt(struct skcipher_request *req)
486*4882a593Smuzhiyun {
487*4882a593Smuzhiyun return xts_aes_crypt(req, CPACF_DECRYPT);
488*4882a593Smuzhiyun }
489*4882a593Smuzhiyun
xts_fallback_init(struct crypto_skcipher * tfm)490*4882a593Smuzhiyun static int xts_fallback_init(struct crypto_skcipher *tfm)
491*4882a593Smuzhiyun {
492*4882a593Smuzhiyun const char *name = crypto_tfm_alg_name(&tfm->base);
493*4882a593Smuzhiyun struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
494*4882a593Smuzhiyun
495*4882a593Smuzhiyun xts_ctx->fallback = crypto_alloc_skcipher(name, 0,
496*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK | CRYPTO_ALG_ASYNC);
497*4882a593Smuzhiyun
498*4882a593Smuzhiyun if (IS_ERR(xts_ctx->fallback)) {
499*4882a593Smuzhiyun pr_err("Allocating XTS fallback algorithm %s failed\n",
500*4882a593Smuzhiyun name);
501*4882a593Smuzhiyun return PTR_ERR(xts_ctx->fallback);
502*4882a593Smuzhiyun }
503*4882a593Smuzhiyun crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
504*4882a593Smuzhiyun crypto_skcipher_reqsize(xts_ctx->fallback));
505*4882a593Smuzhiyun return 0;
506*4882a593Smuzhiyun }
507*4882a593Smuzhiyun
xts_fallback_exit(struct crypto_skcipher * tfm)508*4882a593Smuzhiyun static void xts_fallback_exit(struct crypto_skcipher *tfm)
509*4882a593Smuzhiyun {
510*4882a593Smuzhiyun struct s390_xts_ctx *xts_ctx = crypto_skcipher_ctx(tfm);
511*4882a593Smuzhiyun
512*4882a593Smuzhiyun crypto_free_skcipher(xts_ctx->fallback);
513*4882a593Smuzhiyun }
514*4882a593Smuzhiyun
515*4882a593Smuzhiyun static struct skcipher_alg xts_aes_alg = {
516*4882a593Smuzhiyun .base.cra_name = "xts(aes)",
517*4882a593Smuzhiyun .base.cra_driver_name = "xts-aes-s390",
518*4882a593Smuzhiyun .base.cra_priority = 402, /* ecb-aes-s390 + 1 */
519*4882a593Smuzhiyun .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
520*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
521*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct s390_xts_ctx),
522*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
523*4882a593Smuzhiyun .init = xts_fallback_init,
524*4882a593Smuzhiyun .exit = xts_fallback_exit,
525*4882a593Smuzhiyun .min_keysize = 2 * AES_MIN_KEY_SIZE,
526*4882a593Smuzhiyun .max_keysize = 2 * AES_MAX_KEY_SIZE,
527*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
528*4882a593Smuzhiyun .setkey = xts_aes_set_key,
529*4882a593Smuzhiyun .encrypt = xts_aes_encrypt,
530*4882a593Smuzhiyun .decrypt = xts_aes_decrypt,
531*4882a593Smuzhiyun };
532*4882a593Smuzhiyun
ctr_aes_set_key(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)533*4882a593Smuzhiyun static int ctr_aes_set_key(struct crypto_skcipher *tfm, const u8 *in_key,
534*4882a593Smuzhiyun unsigned int key_len)
535*4882a593Smuzhiyun {
536*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
537*4882a593Smuzhiyun unsigned long fc;
538*4882a593Smuzhiyun
539*4882a593Smuzhiyun /* Pick the correct function code based on the key length */
540*4882a593Smuzhiyun fc = (key_len == 16) ? CPACF_KMCTR_AES_128 :
541*4882a593Smuzhiyun (key_len == 24) ? CPACF_KMCTR_AES_192 :
542*4882a593Smuzhiyun (key_len == 32) ? CPACF_KMCTR_AES_256 : 0;
543*4882a593Smuzhiyun
544*4882a593Smuzhiyun /* Check if the function code is available */
545*4882a593Smuzhiyun sctx->fc = (fc && cpacf_test_func(&kmctr_functions, fc)) ? fc : 0;
546*4882a593Smuzhiyun if (!sctx->fc)
547*4882a593Smuzhiyun return setkey_fallback_skcipher(tfm, in_key, key_len);
548*4882a593Smuzhiyun
549*4882a593Smuzhiyun sctx->key_len = key_len;
550*4882a593Smuzhiyun memcpy(sctx->key, in_key, key_len);
551*4882a593Smuzhiyun return 0;
552*4882a593Smuzhiyun }
553*4882a593Smuzhiyun
__ctrblk_init(u8 * ctrptr,u8 * iv,unsigned int nbytes)554*4882a593Smuzhiyun static unsigned int __ctrblk_init(u8 *ctrptr, u8 *iv, unsigned int nbytes)
555*4882a593Smuzhiyun {
556*4882a593Smuzhiyun unsigned int i, n;
557*4882a593Smuzhiyun
558*4882a593Smuzhiyun /* only use complete blocks, max. PAGE_SIZE */
559*4882a593Smuzhiyun memcpy(ctrptr, iv, AES_BLOCK_SIZE);
560*4882a593Smuzhiyun n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
561*4882a593Smuzhiyun for (i = (n / AES_BLOCK_SIZE) - 1; i > 0; i--) {
562*4882a593Smuzhiyun memcpy(ctrptr + AES_BLOCK_SIZE, ctrptr, AES_BLOCK_SIZE);
563*4882a593Smuzhiyun crypto_inc(ctrptr + AES_BLOCK_SIZE, AES_BLOCK_SIZE);
564*4882a593Smuzhiyun ctrptr += AES_BLOCK_SIZE;
565*4882a593Smuzhiyun }
566*4882a593Smuzhiyun return n;
567*4882a593Smuzhiyun }
568*4882a593Smuzhiyun
ctr_aes_crypt(struct skcipher_request * req)569*4882a593Smuzhiyun static int ctr_aes_crypt(struct skcipher_request *req)
570*4882a593Smuzhiyun {
571*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
572*4882a593Smuzhiyun struct s390_aes_ctx *sctx = crypto_skcipher_ctx(tfm);
573*4882a593Smuzhiyun u8 buf[AES_BLOCK_SIZE], *ctrptr;
574*4882a593Smuzhiyun struct skcipher_walk walk;
575*4882a593Smuzhiyun unsigned int n, nbytes;
576*4882a593Smuzhiyun int ret, locked;
577*4882a593Smuzhiyun
578*4882a593Smuzhiyun if (unlikely(!sctx->fc))
579*4882a593Smuzhiyun return fallback_skcipher_crypt(sctx, req, 0);
580*4882a593Smuzhiyun
581*4882a593Smuzhiyun locked = mutex_trylock(&ctrblk_lock);
582*4882a593Smuzhiyun
583*4882a593Smuzhiyun ret = skcipher_walk_virt(&walk, req, false);
584*4882a593Smuzhiyun while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
585*4882a593Smuzhiyun n = AES_BLOCK_SIZE;
586*4882a593Smuzhiyun
587*4882a593Smuzhiyun if (nbytes >= 2*AES_BLOCK_SIZE && locked)
588*4882a593Smuzhiyun n = __ctrblk_init(ctrblk, walk.iv, nbytes);
589*4882a593Smuzhiyun ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk.iv;
590*4882a593Smuzhiyun cpacf_kmctr(sctx->fc, sctx->key, walk.dst.virt.addr,
591*4882a593Smuzhiyun walk.src.virt.addr, n, ctrptr);
592*4882a593Smuzhiyun if (ctrptr == ctrblk)
593*4882a593Smuzhiyun memcpy(walk.iv, ctrptr + n - AES_BLOCK_SIZE,
594*4882a593Smuzhiyun AES_BLOCK_SIZE);
595*4882a593Smuzhiyun crypto_inc(walk.iv, AES_BLOCK_SIZE);
596*4882a593Smuzhiyun ret = skcipher_walk_done(&walk, nbytes - n);
597*4882a593Smuzhiyun }
598*4882a593Smuzhiyun if (locked)
599*4882a593Smuzhiyun mutex_unlock(&ctrblk_lock);
600*4882a593Smuzhiyun /*
601*4882a593Smuzhiyun * final block may be < AES_BLOCK_SIZE, copy only nbytes
602*4882a593Smuzhiyun */
603*4882a593Smuzhiyun if (nbytes) {
604*4882a593Smuzhiyun cpacf_kmctr(sctx->fc, sctx->key, buf, walk.src.virt.addr,
605*4882a593Smuzhiyun AES_BLOCK_SIZE, walk.iv);
606*4882a593Smuzhiyun memcpy(walk.dst.virt.addr, buf, nbytes);
607*4882a593Smuzhiyun crypto_inc(walk.iv, AES_BLOCK_SIZE);
608*4882a593Smuzhiyun ret = skcipher_walk_done(&walk, 0);
609*4882a593Smuzhiyun }
610*4882a593Smuzhiyun
611*4882a593Smuzhiyun return ret;
612*4882a593Smuzhiyun }
613*4882a593Smuzhiyun
614*4882a593Smuzhiyun static struct skcipher_alg ctr_aes_alg = {
615*4882a593Smuzhiyun .base.cra_name = "ctr(aes)",
616*4882a593Smuzhiyun .base.cra_driver_name = "ctr-aes-s390",
617*4882a593Smuzhiyun .base.cra_priority = 402, /* ecb-aes-s390 + 1 */
618*4882a593Smuzhiyun .base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
619*4882a593Smuzhiyun .base.cra_blocksize = 1,
620*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct s390_aes_ctx),
621*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
622*4882a593Smuzhiyun .init = fallback_init_skcipher,
623*4882a593Smuzhiyun .exit = fallback_exit_skcipher,
624*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
625*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
626*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
627*4882a593Smuzhiyun .setkey = ctr_aes_set_key,
628*4882a593Smuzhiyun .encrypt = ctr_aes_crypt,
629*4882a593Smuzhiyun .decrypt = ctr_aes_crypt,
630*4882a593Smuzhiyun .chunksize = AES_BLOCK_SIZE,
631*4882a593Smuzhiyun };
632*4882a593Smuzhiyun
gcm_aes_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)633*4882a593Smuzhiyun static int gcm_aes_setkey(struct crypto_aead *tfm, const u8 *key,
634*4882a593Smuzhiyun unsigned int keylen)
635*4882a593Smuzhiyun {
636*4882a593Smuzhiyun struct s390_aes_ctx *ctx = crypto_aead_ctx(tfm);
637*4882a593Smuzhiyun
638*4882a593Smuzhiyun switch (keylen) {
639*4882a593Smuzhiyun case AES_KEYSIZE_128:
640*4882a593Smuzhiyun ctx->fc = CPACF_KMA_GCM_AES_128;
641*4882a593Smuzhiyun break;
642*4882a593Smuzhiyun case AES_KEYSIZE_192:
643*4882a593Smuzhiyun ctx->fc = CPACF_KMA_GCM_AES_192;
644*4882a593Smuzhiyun break;
645*4882a593Smuzhiyun case AES_KEYSIZE_256:
646*4882a593Smuzhiyun ctx->fc = CPACF_KMA_GCM_AES_256;
647*4882a593Smuzhiyun break;
648*4882a593Smuzhiyun default:
649*4882a593Smuzhiyun return -EINVAL;
650*4882a593Smuzhiyun }
651*4882a593Smuzhiyun
652*4882a593Smuzhiyun memcpy(ctx->key, key, keylen);
653*4882a593Smuzhiyun ctx->key_len = keylen;
654*4882a593Smuzhiyun return 0;
655*4882a593Smuzhiyun }
656*4882a593Smuzhiyun
gcm_aes_setauthsize(struct crypto_aead * tfm,unsigned int authsize)657*4882a593Smuzhiyun static int gcm_aes_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
658*4882a593Smuzhiyun {
659*4882a593Smuzhiyun switch (authsize) {
660*4882a593Smuzhiyun case 4:
661*4882a593Smuzhiyun case 8:
662*4882a593Smuzhiyun case 12:
663*4882a593Smuzhiyun case 13:
664*4882a593Smuzhiyun case 14:
665*4882a593Smuzhiyun case 15:
666*4882a593Smuzhiyun case 16:
667*4882a593Smuzhiyun break;
668*4882a593Smuzhiyun default:
669*4882a593Smuzhiyun return -EINVAL;
670*4882a593Smuzhiyun }
671*4882a593Smuzhiyun
672*4882a593Smuzhiyun return 0;
673*4882a593Smuzhiyun }
674*4882a593Smuzhiyun
gcm_walk_start(struct gcm_sg_walk * gw,struct scatterlist * sg,unsigned int len)675*4882a593Smuzhiyun static void gcm_walk_start(struct gcm_sg_walk *gw, struct scatterlist *sg,
676*4882a593Smuzhiyun unsigned int len)
677*4882a593Smuzhiyun {
678*4882a593Smuzhiyun memset(gw, 0, sizeof(*gw));
679*4882a593Smuzhiyun gw->walk_bytes_remain = len;
680*4882a593Smuzhiyun scatterwalk_start(&gw->walk, sg);
681*4882a593Smuzhiyun }
682*4882a593Smuzhiyun
_gcm_sg_clamp_and_map(struct gcm_sg_walk * gw)683*4882a593Smuzhiyun static inline unsigned int _gcm_sg_clamp_and_map(struct gcm_sg_walk *gw)
684*4882a593Smuzhiyun {
685*4882a593Smuzhiyun struct scatterlist *nextsg;
686*4882a593Smuzhiyun
687*4882a593Smuzhiyun gw->walk_bytes = scatterwalk_clamp(&gw->walk, gw->walk_bytes_remain);
688*4882a593Smuzhiyun while (!gw->walk_bytes) {
689*4882a593Smuzhiyun nextsg = sg_next(gw->walk.sg);
690*4882a593Smuzhiyun if (!nextsg)
691*4882a593Smuzhiyun return 0;
692*4882a593Smuzhiyun scatterwalk_start(&gw->walk, nextsg);
693*4882a593Smuzhiyun gw->walk_bytes = scatterwalk_clamp(&gw->walk,
694*4882a593Smuzhiyun gw->walk_bytes_remain);
695*4882a593Smuzhiyun }
696*4882a593Smuzhiyun gw->walk_ptr = scatterwalk_map(&gw->walk);
697*4882a593Smuzhiyun return gw->walk_bytes;
698*4882a593Smuzhiyun }
699*4882a593Smuzhiyun
_gcm_sg_unmap_and_advance(struct gcm_sg_walk * gw,unsigned int nbytes)700*4882a593Smuzhiyun static inline void _gcm_sg_unmap_and_advance(struct gcm_sg_walk *gw,
701*4882a593Smuzhiyun unsigned int nbytes)
702*4882a593Smuzhiyun {
703*4882a593Smuzhiyun gw->walk_bytes_remain -= nbytes;
704*4882a593Smuzhiyun scatterwalk_unmap(gw->walk_ptr);
705*4882a593Smuzhiyun scatterwalk_advance(&gw->walk, nbytes);
706*4882a593Smuzhiyun scatterwalk_done(&gw->walk, 0, gw->walk_bytes_remain);
707*4882a593Smuzhiyun gw->walk_ptr = NULL;
708*4882a593Smuzhiyun }
709*4882a593Smuzhiyun
gcm_in_walk_go(struct gcm_sg_walk * gw,unsigned int minbytesneeded)710*4882a593Smuzhiyun static int gcm_in_walk_go(struct gcm_sg_walk *gw, unsigned int minbytesneeded)
711*4882a593Smuzhiyun {
712*4882a593Smuzhiyun int n;
713*4882a593Smuzhiyun
714*4882a593Smuzhiyun if (gw->buf_bytes && gw->buf_bytes >= minbytesneeded) {
715*4882a593Smuzhiyun gw->ptr = gw->buf;
716*4882a593Smuzhiyun gw->nbytes = gw->buf_bytes;
717*4882a593Smuzhiyun goto out;
718*4882a593Smuzhiyun }
719*4882a593Smuzhiyun
720*4882a593Smuzhiyun if (gw->walk_bytes_remain == 0) {
721*4882a593Smuzhiyun gw->ptr = NULL;
722*4882a593Smuzhiyun gw->nbytes = 0;
723*4882a593Smuzhiyun goto out;
724*4882a593Smuzhiyun }
725*4882a593Smuzhiyun
726*4882a593Smuzhiyun if (!_gcm_sg_clamp_and_map(gw)) {
727*4882a593Smuzhiyun gw->ptr = NULL;
728*4882a593Smuzhiyun gw->nbytes = 0;
729*4882a593Smuzhiyun goto out;
730*4882a593Smuzhiyun }
731*4882a593Smuzhiyun
732*4882a593Smuzhiyun if (!gw->buf_bytes && gw->walk_bytes >= minbytesneeded) {
733*4882a593Smuzhiyun gw->ptr = gw->walk_ptr;
734*4882a593Smuzhiyun gw->nbytes = gw->walk_bytes;
735*4882a593Smuzhiyun goto out;
736*4882a593Smuzhiyun }
737*4882a593Smuzhiyun
738*4882a593Smuzhiyun while (1) {
739*4882a593Smuzhiyun n = min(gw->walk_bytes, AES_BLOCK_SIZE - gw->buf_bytes);
740*4882a593Smuzhiyun memcpy(gw->buf + gw->buf_bytes, gw->walk_ptr, n);
741*4882a593Smuzhiyun gw->buf_bytes += n;
742*4882a593Smuzhiyun _gcm_sg_unmap_and_advance(gw, n);
743*4882a593Smuzhiyun if (gw->buf_bytes >= minbytesneeded) {
744*4882a593Smuzhiyun gw->ptr = gw->buf;
745*4882a593Smuzhiyun gw->nbytes = gw->buf_bytes;
746*4882a593Smuzhiyun goto out;
747*4882a593Smuzhiyun }
748*4882a593Smuzhiyun if (!_gcm_sg_clamp_and_map(gw)) {
749*4882a593Smuzhiyun gw->ptr = NULL;
750*4882a593Smuzhiyun gw->nbytes = 0;
751*4882a593Smuzhiyun goto out;
752*4882a593Smuzhiyun }
753*4882a593Smuzhiyun }
754*4882a593Smuzhiyun
755*4882a593Smuzhiyun out:
756*4882a593Smuzhiyun return gw->nbytes;
757*4882a593Smuzhiyun }
758*4882a593Smuzhiyun
gcm_out_walk_go(struct gcm_sg_walk * gw,unsigned int minbytesneeded)759*4882a593Smuzhiyun static int gcm_out_walk_go(struct gcm_sg_walk *gw, unsigned int minbytesneeded)
760*4882a593Smuzhiyun {
761*4882a593Smuzhiyun if (gw->walk_bytes_remain == 0) {
762*4882a593Smuzhiyun gw->ptr = NULL;
763*4882a593Smuzhiyun gw->nbytes = 0;
764*4882a593Smuzhiyun goto out;
765*4882a593Smuzhiyun }
766*4882a593Smuzhiyun
767*4882a593Smuzhiyun if (!_gcm_sg_clamp_and_map(gw)) {
768*4882a593Smuzhiyun gw->ptr = NULL;
769*4882a593Smuzhiyun gw->nbytes = 0;
770*4882a593Smuzhiyun goto out;
771*4882a593Smuzhiyun }
772*4882a593Smuzhiyun
773*4882a593Smuzhiyun if (gw->walk_bytes >= minbytesneeded) {
774*4882a593Smuzhiyun gw->ptr = gw->walk_ptr;
775*4882a593Smuzhiyun gw->nbytes = gw->walk_bytes;
776*4882a593Smuzhiyun goto out;
777*4882a593Smuzhiyun }
778*4882a593Smuzhiyun
779*4882a593Smuzhiyun scatterwalk_unmap(gw->walk_ptr);
780*4882a593Smuzhiyun gw->walk_ptr = NULL;
781*4882a593Smuzhiyun
782*4882a593Smuzhiyun gw->ptr = gw->buf;
783*4882a593Smuzhiyun gw->nbytes = sizeof(gw->buf);
784*4882a593Smuzhiyun
785*4882a593Smuzhiyun out:
786*4882a593Smuzhiyun return gw->nbytes;
787*4882a593Smuzhiyun }
788*4882a593Smuzhiyun
gcm_in_walk_done(struct gcm_sg_walk * gw,unsigned int bytesdone)789*4882a593Smuzhiyun static int gcm_in_walk_done(struct gcm_sg_walk *gw, unsigned int bytesdone)
790*4882a593Smuzhiyun {
791*4882a593Smuzhiyun if (gw->ptr == NULL)
792*4882a593Smuzhiyun return 0;
793*4882a593Smuzhiyun
794*4882a593Smuzhiyun if (gw->ptr == gw->buf) {
795*4882a593Smuzhiyun int n = gw->buf_bytes - bytesdone;
796*4882a593Smuzhiyun if (n > 0) {
797*4882a593Smuzhiyun memmove(gw->buf, gw->buf + bytesdone, n);
798*4882a593Smuzhiyun gw->buf_bytes = n;
799*4882a593Smuzhiyun } else
800*4882a593Smuzhiyun gw->buf_bytes = 0;
801*4882a593Smuzhiyun } else
802*4882a593Smuzhiyun _gcm_sg_unmap_and_advance(gw, bytesdone);
803*4882a593Smuzhiyun
804*4882a593Smuzhiyun return bytesdone;
805*4882a593Smuzhiyun }
806*4882a593Smuzhiyun
gcm_out_walk_done(struct gcm_sg_walk * gw,unsigned int bytesdone)807*4882a593Smuzhiyun static int gcm_out_walk_done(struct gcm_sg_walk *gw, unsigned int bytesdone)
808*4882a593Smuzhiyun {
809*4882a593Smuzhiyun int i, n;
810*4882a593Smuzhiyun
811*4882a593Smuzhiyun if (gw->ptr == NULL)
812*4882a593Smuzhiyun return 0;
813*4882a593Smuzhiyun
814*4882a593Smuzhiyun if (gw->ptr == gw->buf) {
815*4882a593Smuzhiyun for (i = 0; i < bytesdone; i += n) {
816*4882a593Smuzhiyun if (!_gcm_sg_clamp_and_map(gw))
817*4882a593Smuzhiyun return i;
818*4882a593Smuzhiyun n = min(gw->walk_bytes, bytesdone - i);
819*4882a593Smuzhiyun memcpy(gw->walk_ptr, gw->buf + i, n);
820*4882a593Smuzhiyun _gcm_sg_unmap_and_advance(gw, n);
821*4882a593Smuzhiyun }
822*4882a593Smuzhiyun } else
823*4882a593Smuzhiyun _gcm_sg_unmap_and_advance(gw, bytesdone);
824*4882a593Smuzhiyun
825*4882a593Smuzhiyun return bytesdone;
826*4882a593Smuzhiyun }
827*4882a593Smuzhiyun
gcm_aes_crypt(struct aead_request * req,unsigned int flags)828*4882a593Smuzhiyun static int gcm_aes_crypt(struct aead_request *req, unsigned int flags)
829*4882a593Smuzhiyun {
830*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
831*4882a593Smuzhiyun struct s390_aes_ctx *ctx = crypto_aead_ctx(tfm);
832*4882a593Smuzhiyun unsigned int ivsize = crypto_aead_ivsize(tfm);
833*4882a593Smuzhiyun unsigned int taglen = crypto_aead_authsize(tfm);
834*4882a593Smuzhiyun unsigned int aadlen = req->assoclen;
835*4882a593Smuzhiyun unsigned int pclen = req->cryptlen;
836*4882a593Smuzhiyun int ret = 0;
837*4882a593Smuzhiyun
838*4882a593Smuzhiyun unsigned int n, len, in_bytes, out_bytes,
839*4882a593Smuzhiyun min_bytes, bytes, aad_bytes, pc_bytes;
840*4882a593Smuzhiyun struct gcm_sg_walk gw_in, gw_out;
841*4882a593Smuzhiyun u8 tag[GHASH_DIGEST_SIZE];
842*4882a593Smuzhiyun
843*4882a593Smuzhiyun struct {
844*4882a593Smuzhiyun u32 _[3]; /* reserved */
845*4882a593Smuzhiyun u32 cv; /* Counter Value */
846*4882a593Smuzhiyun u8 t[GHASH_DIGEST_SIZE];/* Tag */
847*4882a593Smuzhiyun u8 h[AES_BLOCK_SIZE]; /* Hash-subkey */
848*4882a593Smuzhiyun u64 taadl; /* Total AAD Length */
849*4882a593Smuzhiyun u64 tpcl; /* Total Plain-/Cipher-text Length */
850*4882a593Smuzhiyun u8 j0[GHASH_BLOCK_SIZE];/* initial counter value */
851*4882a593Smuzhiyun u8 k[AES_MAX_KEY_SIZE]; /* Key */
852*4882a593Smuzhiyun } param;
853*4882a593Smuzhiyun
854*4882a593Smuzhiyun /*
855*4882a593Smuzhiyun * encrypt
856*4882a593Smuzhiyun * req->src: aad||plaintext
857*4882a593Smuzhiyun * req->dst: aad||ciphertext||tag
858*4882a593Smuzhiyun * decrypt
859*4882a593Smuzhiyun * req->src: aad||ciphertext||tag
860*4882a593Smuzhiyun * req->dst: aad||plaintext, return 0 or -EBADMSG
861*4882a593Smuzhiyun * aad, plaintext and ciphertext may be empty.
862*4882a593Smuzhiyun */
863*4882a593Smuzhiyun if (flags & CPACF_DECRYPT)
864*4882a593Smuzhiyun pclen -= taglen;
865*4882a593Smuzhiyun len = aadlen + pclen;
866*4882a593Smuzhiyun
867*4882a593Smuzhiyun memset(¶m, 0, sizeof(param));
868*4882a593Smuzhiyun param.cv = 1;
869*4882a593Smuzhiyun param.taadl = aadlen * 8;
870*4882a593Smuzhiyun param.tpcl = pclen * 8;
871*4882a593Smuzhiyun memcpy(param.j0, req->iv, ivsize);
872*4882a593Smuzhiyun *(u32 *)(param.j0 + ivsize) = 1;
873*4882a593Smuzhiyun memcpy(param.k, ctx->key, ctx->key_len);
874*4882a593Smuzhiyun
875*4882a593Smuzhiyun gcm_walk_start(&gw_in, req->src, len);
876*4882a593Smuzhiyun gcm_walk_start(&gw_out, req->dst, len);
877*4882a593Smuzhiyun
878*4882a593Smuzhiyun do {
879*4882a593Smuzhiyun min_bytes = min_t(unsigned int,
880*4882a593Smuzhiyun aadlen > 0 ? aadlen : pclen, AES_BLOCK_SIZE);
881*4882a593Smuzhiyun in_bytes = gcm_in_walk_go(&gw_in, min_bytes);
882*4882a593Smuzhiyun out_bytes = gcm_out_walk_go(&gw_out, min_bytes);
883*4882a593Smuzhiyun bytes = min(in_bytes, out_bytes);
884*4882a593Smuzhiyun
885*4882a593Smuzhiyun if (aadlen + pclen <= bytes) {
886*4882a593Smuzhiyun aad_bytes = aadlen;
887*4882a593Smuzhiyun pc_bytes = pclen;
888*4882a593Smuzhiyun flags |= CPACF_KMA_LAAD | CPACF_KMA_LPC;
889*4882a593Smuzhiyun } else {
890*4882a593Smuzhiyun if (aadlen <= bytes) {
891*4882a593Smuzhiyun aad_bytes = aadlen;
892*4882a593Smuzhiyun pc_bytes = (bytes - aadlen) &
893*4882a593Smuzhiyun ~(AES_BLOCK_SIZE - 1);
894*4882a593Smuzhiyun flags |= CPACF_KMA_LAAD;
895*4882a593Smuzhiyun } else {
896*4882a593Smuzhiyun aad_bytes = bytes & ~(AES_BLOCK_SIZE - 1);
897*4882a593Smuzhiyun pc_bytes = 0;
898*4882a593Smuzhiyun }
899*4882a593Smuzhiyun }
900*4882a593Smuzhiyun
901*4882a593Smuzhiyun if (aad_bytes > 0)
902*4882a593Smuzhiyun memcpy(gw_out.ptr, gw_in.ptr, aad_bytes);
903*4882a593Smuzhiyun
904*4882a593Smuzhiyun cpacf_kma(ctx->fc | flags, ¶m,
905*4882a593Smuzhiyun gw_out.ptr + aad_bytes,
906*4882a593Smuzhiyun gw_in.ptr + aad_bytes, pc_bytes,
907*4882a593Smuzhiyun gw_in.ptr, aad_bytes);
908*4882a593Smuzhiyun
909*4882a593Smuzhiyun n = aad_bytes + pc_bytes;
910*4882a593Smuzhiyun if (gcm_in_walk_done(&gw_in, n) != n)
911*4882a593Smuzhiyun return -ENOMEM;
912*4882a593Smuzhiyun if (gcm_out_walk_done(&gw_out, n) != n)
913*4882a593Smuzhiyun return -ENOMEM;
914*4882a593Smuzhiyun aadlen -= aad_bytes;
915*4882a593Smuzhiyun pclen -= pc_bytes;
916*4882a593Smuzhiyun } while (aadlen + pclen > 0);
917*4882a593Smuzhiyun
918*4882a593Smuzhiyun if (flags & CPACF_DECRYPT) {
919*4882a593Smuzhiyun scatterwalk_map_and_copy(tag, req->src, len, taglen, 0);
920*4882a593Smuzhiyun if (crypto_memneq(tag, param.t, taglen))
921*4882a593Smuzhiyun ret = -EBADMSG;
922*4882a593Smuzhiyun } else
923*4882a593Smuzhiyun scatterwalk_map_and_copy(param.t, req->dst, len, taglen, 1);
924*4882a593Smuzhiyun
925*4882a593Smuzhiyun memzero_explicit(¶m, sizeof(param));
926*4882a593Smuzhiyun return ret;
927*4882a593Smuzhiyun }
928*4882a593Smuzhiyun
gcm_aes_encrypt(struct aead_request * req)929*4882a593Smuzhiyun static int gcm_aes_encrypt(struct aead_request *req)
930*4882a593Smuzhiyun {
931*4882a593Smuzhiyun return gcm_aes_crypt(req, CPACF_ENCRYPT);
932*4882a593Smuzhiyun }
933*4882a593Smuzhiyun
gcm_aes_decrypt(struct aead_request * req)934*4882a593Smuzhiyun static int gcm_aes_decrypt(struct aead_request *req)
935*4882a593Smuzhiyun {
936*4882a593Smuzhiyun return gcm_aes_crypt(req, CPACF_DECRYPT);
937*4882a593Smuzhiyun }
938*4882a593Smuzhiyun
939*4882a593Smuzhiyun static struct aead_alg gcm_aes_aead = {
940*4882a593Smuzhiyun .setkey = gcm_aes_setkey,
941*4882a593Smuzhiyun .setauthsize = gcm_aes_setauthsize,
942*4882a593Smuzhiyun .encrypt = gcm_aes_encrypt,
943*4882a593Smuzhiyun .decrypt = gcm_aes_decrypt,
944*4882a593Smuzhiyun
945*4882a593Smuzhiyun .ivsize = GHASH_BLOCK_SIZE - sizeof(u32),
946*4882a593Smuzhiyun .maxauthsize = GHASH_DIGEST_SIZE,
947*4882a593Smuzhiyun .chunksize = AES_BLOCK_SIZE,
948*4882a593Smuzhiyun
949*4882a593Smuzhiyun .base = {
950*4882a593Smuzhiyun .cra_blocksize = 1,
951*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct s390_aes_ctx),
952*4882a593Smuzhiyun .cra_priority = 900,
953*4882a593Smuzhiyun .cra_name = "gcm(aes)",
954*4882a593Smuzhiyun .cra_driver_name = "gcm-aes-s390",
955*4882a593Smuzhiyun .cra_module = THIS_MODULE,
956*4882a593Smuzhiyun },
957*4882a593Smuzhiyun };
958*4882a593Smuzhiyun
959*4882a593Smuzhiyun static struct crypto_alg *aes_s390_alg;
960*4882a593Smuzhiyun static struct skcipher_alg *aes_s390_skcipher_algs[4];
961*4882a593Smuzhiyun static int aes_s390_skciphers_num;
962*4882a593Smuzhiyun static struct aead_alg *aes_s390_aead_alg;
963*4882a593Smuzhiyun
aes_s390_register_skcipher(struct skcipher_alg * alg)964*4882a593Smuzhiyun static int aes_s390_register_skcipher(struct skcipher_alg *alg)
965*4882a593Smuzhiyun {
966*4882a593Smuzhiyun int ret;
967*4882a593Smuzhiyun
968*4882a593Smuzhiyun ret = crypto_register_skcipher(alg);
969*4882a593Smuzhiyun if (!ret)
970*4882a593Smuzhiyun aes_s390_skcipher_algs[aes_s390_skciphers_num++] = alg;
971*4882a593Smuzhiyun return ret;
972*4882a593Smuzhiyun }
973*4882a593Smuzhiyun
aes_s390_fini(void)974*4882a593Smuzhiyun static void aes_s390_fini(void)
975*4882a593Smuzhiyun {
976*4882a593Smuzhiyun if (aes_s390_alg)
977*4882a593Smuzhiyun crypto_unregister_alg(aes_s390_alg);
978*4882a593Smuzhiyun while (aes_s390_skciphers_num--)
979*4882a593Smuzhiyun crypto_unregister_skcipher(aes_s390_skcipher_algs[aes_s390_skciphers_num]);
980*4882a593Smuzhiyun if (ctrblk)
981*4882a593Smuzhiyun free_page((unsigned long) ctrblk);
982*4882a593Smuzhiyun
983*4882a593Smuzhiyun if (aes_s390_aead_alg)
984*4882a593Smuzhiyun crypto_unregister_aead(aes_s390_aead_alg);
985*4882a593Smuzhiyun }
986*4882a593Smuzhiyun
aes_s390_init(void)987*4882a593Smuzhiyun static int __init aes_s390_init(void)
988*4882a593Smuzhiyun {
989*4882a593Smuzhiyun int ret;
990*4882a593Smuzhiyun
991*4882a593Smuzhiyun /* Query available functions for KM, KMC, KMCTR and KMA */
992*4882a593Smuzhiyun cpacf_query(CPACF_KM, &km_functions);
993*4882a593Smuzhiyun cpacf_query(CPACF_KMC, &kmc_functions);
994*4882a593Smuzhiyun cpacf_query(CPACF_KMCTR, &kmctr_functions);
995*4882a593Smuzhiyun cpacf_query(CPACF_KMA, &kma_functions);
996*4882a593Smuzhiyun
997*4882a593Smuzhiyun if (cpacf_test_func(&km_functions, CPACF_KM_AES_128) ||
998*4882a593Smuzhiyun cpacf_test_func(&km_functions, CPACF_KM_AES_192) ||
999*4882a593Smuzhiyun cpacf_test_func(&km_functions, CPACF_KM_AES_256)) {
1000*4882a593Smuzhiyun ret = crypto_register_alg(&aes_alg);
1001*4882a593Smuzhiyun if (ret)
1002*4882a593Smuzhiyun goto out_err;
1003*4882a593Smuzhiyun aes_s390_alg = &aes_alg;
1004*4882a593Smuzhiyun ret = aes_s390_register_skcipher(&ecb_aes_alg);
1005*4882a593Smuzhiyun if (ret)
1006*4882a593Smuzhiyun goto out_err;
1007*4882a593Smuzhiyun }
1008*4882a593Smuzhiyun
1009*4882a593Smuzhiyun if (cpacf_test_func(&kmc_functions, CPACF_KMC_AES_128) ||
1010*4882a593Smuzhiyun cpacf_test_func(&kmc_functions, CPACF_KMC_AES_192) ||
1011*4882a593Smuzhiyun cpacf_test_func(&kmc_functions, CPACF_KMC_AES_256)) {
1012*4882a593Smuzhiyun ret = aes_s390_register_skcipher(&cbc_aes_alg);
1013*4882a593Smuzhiyun if (ret)
1014*4882a593Smuzhiyun goto out_err;
1015*4882a593Smuzhiyun }
1016*4882a593Smuzhiyun
1017*4882a593Smuzhiyun if (cpacf_test_func(&km_functions, CPACF_KM_XTS_128) ||
1018*4882a593Smuzhiyun cpacf_test_func(&km_functions, CPACF_KM_XTS_256)) {
1019*4882a593Smuzhiyun ret = aes_s390_register_skcipher(&xts_aes_alg);
1020*4882a593Smuzhiyun if (ret)
1021*4882a593Smuzhiyun goto out_err;
1022*4882a593Smuzhiyun }
1023*4882a593Smuzhiyun
1024*4882a593Smuzhiyun if (cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_128) ||
1025*4882a593Smuzhiyun cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_192) ||
1026*4882a593Smuzhiyun cpacf_test_func(&kmctr_functions, CPACF_KMCTR_AES_256)) {
1027*4882a593Smuzhiyun ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
1028*4882a593Smuzhiyun if (!ctrblk) {
1029*4882a593Smuzhiyun ret = -ENOMEM;
1030*4882a593Smuzhiyun goto out_err;
1031*4882a593Smuzhiyun }
1032*4882a593Smuzhiyun ret = aes_s390_register_skcipher(&ctr_aes_alg);
1033*4882a593Smuzhiyun if (ret)
1034*4882a593Smuzhiyun goto out_err;
1035*4882a593Smuzhiyun }
1036*4882a593Smuzhiyun
1037*4882a593Smuzhiyun if (cpacf_test_func(&kma_functions, CPACF_KMA_GCM_AES_128) ||
1038*4882a593Smuzhiyun cpacf_test_func(&kma_functions, CPACF_KMA_GCM_AES_192) ||
1039*4882a593Smuzhiyun cpacf_test_func(&kma_functions, CPACF_KMA_GCM_AES_256)) {
1040*4882a593Smuzhiyun ret = crypto_register_aead(&gcm_aes_aead);
1041*4882a593Smuzhiyun if (ret)
1042*4882a593Smuzhiyun goto out_err;
1043*4882a593Smuzhiyun aes_s390_aead_alg = &gcm_aes_aead;
1044*4882a593Smuzhiyun }
1045*4882a593Smuzhiyun
1046*4882a593Smuzhiyun return 0;
1047*4882a593Smuzhiyun out_err:
1048*4882a593Smuzhiyun aes_s390_fini();
1049*4882a593Smuzhiyun return ret;
1050*4882a593Smuzhiyun }
1051*4882a593Smuzhiyun
1052*4882a593Smuzhiyun module_cpu_feature_match(MSA, aes_s390_init);
1053*4882a593Smuzhiyun module_exit(aes_s390_fini);
1054*4882a593Smuzhiyun
1055*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("aes-all");
1056*4882a593Smuzhiyun
1057*4882a593Smuzhiyun MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
1058*4882a593Smuzhiyun MODULE_LICENSE("GPL");
1059*4882a593Smuzhiyun MODULE_IMPORT_NS(CRYPTO_INTERNAL);
1060