xref: /OK3568_Linux_fs/kernel/drivers/crypto/vmx/aes_ctr.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /**
3*4882a593Smuzhiyun  * AES CTR routines supporting VMX instructions on the Power 8
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (C) 2015 International Business Machines Inc.
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #include <asm/simd.h>
11*4882a593Smuzhiyun #include <asm/switch_to.h>
12*4882a593Smuzhiyun #include <crypto/aes.h>
13*4882a593Smuzhiyun #include <crypto/internal/simd.h>
14*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
15*4882a593Smuzhiyun 
16*4882a593Smuzhiyun #include "aesp8-ppc.h"
17*4882a593Smuzhiyun 
18*4882a593Smuzhiyun struct p8_aes_ctr_ctx {
19*4882a593Smuzhiyun 	struct crypto_skcipher *fallback;
20*4882a593Smuzhiyun 	struct aes_key enc_key;
21*4882a593Smuzhiyun };
22*4882a593Smuzhiyun 
p8_aes_ctr_init(struct crypto_skcipher * tfm)23*4882a593Smuzhiyun static int p8_aes_ctr_init(struct crypto_skcipher *tfm)
24*4882a593Smuzhiyun {
25*4882a593Smuzhiyun 	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
26*4882a593Smuzhiyun 	struct crypto_skcipher *fallback;
27*4882a593Smuzhiyun 
28*4882a593Smuzhiyun 	fallback = crypto_alloc_skcipher("ctr(aes)", 0,
29*4882a593Smuzhiyun 					 CRYPTO_ALG_NEED_FALLBACK |
30*4882a593Smuzhiyun 					 CRYPTO_ALG_ASYNC);
31*4882a593Smuzhiyun 	if (IS_ERR(fallback)) {
32*4882a593Smuzhiyun 		pr_err("Failed to allocate ctr(aes) fallback: %ld\n",
33*4882a593Smuzhiyun 		       PTR_ERR(fallback));
34*4882a593Smuzhiyun 		return PTR_ERR(fallback);
35*4882a593Smuzhiyun 	}
36*4882a593Smuzhiyun 
37*4882a593Smuzhiyun 	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
38*4882a593Smuzhiyun 				    crypto_skcipher_reqsize(fallback));
39*4882a593Smuzhiyun 	ctx->fallback = fallback;
40*4882a593Smuzhiyun 	return 0;
41*4882a593Smuzhiyun }
42*4882a593Smuzhiyun 
p8_aes_ctr_exit(struct crypto_skcipher * tfm)43*4882a593Smuzhiyun static void p8_aes_ctr_exit(struct crypto_skcipher *tfm)
44*4882a593Smuzhiyun {
45*4882a593Smuzhiyun 	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun 	crypto_free_skcipher(ctx->fallback);
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun 
p8_aes_ctr_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)50*4882a593Smuzhiyun static int p8_aes_ctr_setkey(struct crypto_skcipher *tfm, const u8 *key,
51*4882a593Smuzhiyun 			     unsigned int keylen)
52*4882a593Smuzhiyun {
53*4882a593Smuzhiyun 	struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
54*4882a593Smuzhiyun 	int ret;
55*4882a593Smuzhiyun 
56*4882a593Smuzhiyun 	preempt_disable();
57*4882a593Smuzhiyun 	pagefault_disable();
58*4882a593Smuzhiyun 	enable_kernel_vsx();
59*4882a593Smuzhiyun 	ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
60*4882a593Smuzhiyun 	disable_kernel_vsx();
61*4882a593Smuzhiyun 	pagefault_enable();
62*4882a593Smuzhiyun 	preempt_enable();
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun 	ret |= crypto_skcipher_setkey(ctx->fallback, key, keylen);
65*4882a593Smuzhiyun 
66*4882a593Smuzhiyun 	return ret ? -EINVAL : 0;
67*4882a593Smuzhiyun }
68*4882a593Smuzhiyun 
p8_aes_ctr_final(const struct p8_aes_ctr_ctx * ctx,struct skcipher_walk * walk)69*4882a593Smuzhiyun static void p8_aes_ctr_final(const struct p8_aes_ctr_ctx *ctx,
70*4882a593Smuzhiyun 			     struct skcipher_walk *walk)
71*4882a593Smuzhiyun {
72*4882a593Smuzhiyun 	u8 *ctrblk = walk->iv;
73*4882a593Smuzhiyun 	u8 keystream[AES_BLOCK_SIZE];
74*4882a593Smuzhiyun 	u8 *src = walk->src.virt.addr;
75*4882a593Smuzhiyun 	u8 *dst = walk->dst.virt.addr;
76*4882a593Smuzhiyun 	unsigned int nbytes = walk->nbytes;
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun 	preempt_disable();
79*4882a593Smuzhiyun 	pagefault_disable();
80*4882a593Smuzhiyun 	enable_kernel_vsx();
81*4882a593Smuzhiyun 	aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
82*4882a593Smuzhiyun 	disable_kernel_vsx();
83*4882a593Smuzhiyun 	pagefault_enable();
84*4882a593Smuzhiyun 	preempt_enable();
85*4882a593Smuzhiyun 
86*4882a593Smuzhiyun 	crypto_xor_cpy(dst, keystream, src, nbytes);
87*4882a593Smuzhiyun 	crypto_inc(ctrblk, AES_BLOCK_SIZE);
88*4882a593Smuzhiyun }
89*4882a593Smuzhiyun 
p8_aes_ctr_crypt(struct skcipher_request * req)90*4882a593Smuzhiyun static int p8_aes_ctr_crypt(struct skcipher_request *req)
91*4882a593Smuzhiyun {
92*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
93*4882a593Smuzhiyun 	const struct p8_aes_ctr_ctx *ctx = crypto_skcipher_ctx(tfm);
94*4882a593Smuzhiyun 	struct skcipher_walk walk;
95*4882a593Smuzhiyun 	unsigned int nbytes;
96*4882a593Smuzhiyun 	int ret;
97*4882a593Smuzhiyun 
98*4882a593Smuzhiyun 	if (!crypto_simd_usable()) {
99*4882a593Smuzhiyun 		struct skcipher_request *subreq = skcipher_request_ctx(req);
100*4882a593Smuzhiyun 
101*4882a593Smuzhiyun 		*subreq = *req;
102*4882a593Smuzhiyun 		skcipher_request_set_tfm(subreq, ctx->fallback);
103*4882a593Smuzhiyun 		return crypto_skcipher_encrypt(subreq);
104*4882a593Smuzhiyun 	}
105*4882a593Smuzhiyun 
106*4882a593Smuzhiyun 	ret = skcipher_walk_virt(&walk, req, false);
107*4882a593Smuzhiyun 	while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
108*4882a593Smuzhiyun 		preempt_disable();
109*4882a593Smuzhiyun 		pagefault_disable();
110*4882a593Smuzhiyun 		enable_kernel_vsx();
111*4882a593Smuzhiyun 		aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
112*4882a593Smuzhiyun 					    walk.dst.virt.addr,
113*4882a593Smuzhiyun 					    nbytes / AES_BLOCK_SIZE,
114*4882a593Smuzhiyun 					    &ctx->enc_key, walk.iv);
115*4882a593Smuzhiyun 		disable_kernel_vsx();
116*4882a593Smuzhiyun 		pagefault_enable();
117*4882a593Smuzhiyun 		preempt_enable();
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun 		do {
120*4882a593Smuzhiyun 			crypto_inc(walk.iv, AES_BLOCK_SIZE);
121*4882a593Smuzhiyun 		} while ((nbytes -= AES_BLOCK_SIZE) >= AES_BLOCK_SIZE);
122*4882a593Smuzhiyun 
123*4882a593Smuzhiyun 		ret = skcipher_walk_done(&walk, nbytes);
124*4882a593Smuzhiyun 	}
125*4882a593Smuzhiyun 	if (nbytes) {
126*4882a593Smuzhiyun 		p8_aes_ctr_final(ctx, &walk);
127*4882a593Smuzhiyun 		ret = skcipher_walk_done(&walk, 0);
128*4882a593Smuzhiyun 	}
129*4882a593Smuzhiyun 	return ret;
130*4882a593Smuzhiyun }
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun struct skcipher_alg p8_aes_ctr_alg = {
133*4882a593Smuzhiyun 	.base.cra_name = "ctr(aes)",
134*4882a593Smuzhiyun 	.base.cra_driver_name = "p8_aes_ctr",
135*4882a593Smuzhiyun 	.base.cra_module = THIS_MODULE,
136*4882a593Smuzhiyun 	.base.cra_priority = 2000,
137*4882a593Smuzhiyun 	.base.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
138*4882a593Smuzhiyun 	.base.cra_blocksize = 1,
139*4882a593Smuzhiyun 	.base.cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
140*4882a593Smuzhiyun 	.setkey = p8_aes_ctr_setkey,
141*4882a593Smuzhiyun 	.encrypt = p8_aes_ctr_crypt,
142*4882a593Smuzhiyun 	.decrypt = p8_aes_ctr_crypt,
143*4882a593Smuzhiyun 	.init = p8_aes_ctr_init,
144*4882a593Smuzhiyun 	.exit = p8_aes_ctr_exit,
145*4882a593Smuzhiyun 	.min_keysize = AES_MIN_KEY_SIZE,
146*4882a593Smuzhiyun 	.max_keysize = AES_MAX_KEY_SIZE,
147*4882a593Smuzhiyun 	.ivsize = AES_BLOCK_SIZE,
148*4882a593Smuzhiyun 	.chunksize = AES_BLOCK_SIZE,
149*4882a593Smuzhiyun };
150