xref: /OK3568_Linux_fs/kernel/crypto/xts.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /* XTS: as defined in IEEE1619/D16
3*4882a593Smuzhiyun  *	http://grouper.ieee.org/groups/1619/email/pdf00086.pdf
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2007 Rik Snel <rsnel@cube.dyndns.org>
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Based on ecb.c
8*4882a593Smuzhiyun  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun #include <crypto/internal/cipher.h>
11*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
12*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
13*4882a593Smuzhiyun #include <linux/err.h>
14*4882a593Smuzhiyun #include <linux/init.h>
15*4882a593Smuzhiyun #include <linux/kernel.h>
16*4882a593Smuzhiyun #include <linux/module.h>
17*4882a593Smuzhiyun #include <linux/scatterlist.h>
18*4882a593Smuzhiyun #include <linux/slab.h>
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun #include <crypto/xts.h>
21*4882a593Smuzhiyun #include <crypto/b128ops.h>
22*4882a593Smuzhiyun #include <crypto/gf128mul.h>
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun struct xts_tfm_ctx {
25*4882a593Smuzhiyun 	struct crypto_skcipher *child;
26*4882a593Smuzhiyun 	struct crypto_cipher *tweak;
27*4882a593Smuzhiyun };
28*4882a593Smuzhiyun 
29*4882a593Smuzhiyun struct xts_instance_ctx {
30*4882a593Smuzhiyun 	struct crypto_skcipher_spawn spawn;
31*4882a593Smuzhiyun 	char name[CRYPTO_MAX_ALG_NAME];
32*4882a593Smuzhiyun };
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun struct xts_request_ctx {
35*4882a593Smuzhiyun 	le128 t;
36*4882a593Smuzhiyun 	struct scatterlist *tail;
37*4882a593Smuzhiyun 	struct scatterlist sg[2];
38*4882a593Smuzhiyun 	struct skcipher_request subreq;
39*4882a593Smuzhiyun };
40*4882a593Smuzhiyun 
xts_setkey(struct crypto_skcipher * parent,const u8 * key,unsigned int keylen)41*4882a593Smuzhiyun static int xts_setkey(struct crypto_skcipher *parent, const u8 *key,
42*4882a593Smuzhiyun 		      unsigned int keylen)
43*4882a593Smuzhiyun {
44*4882a593Smuzhiyun 	struct xts_tfm_ctx *ctx = crypto_skcipher_ctx(parent);
45*4882a593Smuzhiyun 	struct crypto_skcipher *child;
46*4882a593Smuzhiyun 	struct crypto_cipher *tweak;
47*4882a593Smuzhiyun 	int err;
48*4882a593Smuzhiyun 
49*4882a593Smuzhiyun 	err = xts_verify_key(parent, key, keylen);
50*4882a593Smuzhiyun 	if (err)
51*4882a593Smuzhiyun 		return err;
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun 	keylen /= 2;
54*4882a593Smuzhiyun 
55*4882a593Smuzhiyun 	/* we need two cipher instances: one to compute the initial 'tweak'
56*4882a593Smuzhiyun 	 * by encrypting the IV (usually the 'plain' iv) and the other
57*4882a593Smuzhiyun 	 * one to encrypt and decrypt the data */
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun 	/* tweak cipher, uses Key2 i.e. the second half of *key */
60*4882a593Smuzhiyun 	tweak = ctx->tweak;
61*4882a593Smuzhiyun 	crypto_cipher_clear_flags(tweak, CRYPTO_TFM_REQ_MASK);
62*4882a593Smuzhiyun 	crypto_cipher_set_flags(tweak, crypto_skcipher_get_flags(parent) &
63*4882a593Smuzhiyun 				       CRYPTO_TFM_REQ_MASK);
64*4882a593Smuzhiyun 	err = crypto_cipher_setkey(tweak, key + keylen, keylen);
65*4882a593Smuzhiyun 	if (err)
66*4882a593Smuzhiyun 		return err;
67*4882a593Smuzhiyun 
68*4882a593Smuzhiyun 	/* data cipher, uses Key1 i.e. the first half of *key */
69*4882a593Smuzhiyun 	child = ctx->child;
70*4882a593Smuzhiyun 	crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
71*4882a593Smuzhiyun 	crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
72*4882a593Smuzhiyun 					 CRYPTO_TFM_REQ_MASK);
73*4882a593Smuzhiyun 	return crypto_skcipher_setkey(child, key, keylen);
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun 
76*4882a593Smuzhiyun /*
77*4882a593Smuzhiyun  * We compute the tweak masks twice (both before and after the ECB encryption or
78*4882a593Smuzhiyun  * decryption) to avoid having to allocate a temporary buffer and/or make
79*4882a593Smuzhiyun  * mutliple calls to the 'ecb(..)' instance, which usually would be slower than
80*4882a593Smuzhiyun  * just doing the gf128mul_x_ble() calls again.
81*4882a593Smuzhiyun  */
xts_xor_tweak(struct skcipher_request * req,bool second_pass,bool enc)82*4882a593Smuzhiyun static int xts_xor_tweak(struct skcipher_request *req, bool second_pass,
83*4882a593Smuzhiyun 			 bool enc)
84*4882a593Smuzhiyun {
85*4882a593Smuzhiyun 	struct xts_request_ctx *rctx = skcipher_request_ctx(req);
86*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
87*4882a593Smuzhiyun 	const bool cts = (req->cryptlen % XTS_BLOCK_SIZE);
88*4882a593Smuzhiyun 	const int bs = XTS_BLOCK_SIZE;
89*4882a593Smuzhiyun 	struct skcipher_walk w;
90*4882a593Smuzhiyun 	le128 t = rctx->t;
91*4882a593Smuzhiyun 	int err;
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun 	if (second_pass) {
94*4882a593Smuzhiyun 		req = &rctx->subreq;
95*4882a593Smuzhiyun 		/* set to our TFM to enforce correct alignment: */
96*4882a593Smuzhiyun 		skcipher_request_set_tfm(req, tfm);
97*4882a593Smuzhiyun 	}
98*4882a593Smuzhiyun 	err = skcipher_walk_virt(&w, req, false);
99*4882a593Smuzhiyun 
100*4882a593Smuzhiyun 	while (w.nbytes) {
101*4882a593Smuzhiyun 		unsigned int avail = w.nbytes;
102*4882a593Smuzhiyun 		le128 *wsrc;
103*4882a593Smuzhiyun 		le128 *wdst;
104*4882a593Smuzhiyun 
105*4882a593Smuzhiyun 		wsrc = w.src.virt.addr;
106*4882a593Smuzhiyun 		wdst = w.dst.virt.addr;
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun 		do {
109*4882a593Smuzhiyun 			if (unlikely(cts) &&
110*4882a593Smuzhiyun 			    w.total - w.nbytes + avail < 2 * XTS_BLOCK_SIZE) {
111*4882a593Smuzhiyun 				if (!enc) {
112*4882a593Smuzhiyun 					if (second_pass)
113*4882a593Smuzhiyun 						rctx->t = t;
114*4882a593Smuzhiyun 					gf128mul_x_ble(&t, &t);
115*4882a593Smuzhiyun 				}
116*4882a593Smuzhiyun 				le128_xor(wdst, &t, wsrc);
117*4882a593Smuzhiyun 				if (enc && second_pass)
118*4882a593Smuzhiyun 					gf128mul_x_ble(&rctx->t, &t);
119*4882a593Smuzhiyun 				skcipher_walk_done(&w, avail - bs);
120*4882a593Smuzhiyun 				return 0;
121*4882a593Smuzhiyun 			}
122*4882a593Smuzhiyun 
123*4882a593Smuzhiyun 			le128_xor(wdst++, &t, wsrc++);
124*4882a593Smuzhiyun 			gf128mul_x_ble(&t, &t);
125*4882a593Smuzhiyun 		} while ((avail -= bs) >= bs);
126*4882a593Smuzhiyun 
127*4882a593Smuzhiyun 		err = skcipher_walk_done(&w, avail);
128*4882a593Smuzhiyun 	}
129*4882a593Smuzhiyun 
130*4882a593Smuzhiyun 	return err;
131*4882a593Smuzhiyun }
132*4882a593Smuzhiyun 
xts_xor_tweak_pre(struct skcipher_request * req,bool enc)133*4882a593Smuzhiyun static int xts_xor_tweak_pre(struct skcipher_request *req, bool enc)
134*4882a593Smuzhiyun {
135*4882a593Smuzhiyun 	return xts_xor_tweak(req, false, enc);
136*4882a593Smuzhiyun }
137*4882a593Smuzhiyun 
xts_xor_tweak_post(struct skcipher_request * req,bool enc)138*4882a593Smuzhiyun static int xts_xor_tweak_post(struct skcipher_request *req, bool enc)
139*4882a593Smuzhiyun {
140*4882a593Smuzhiyun 	return xts_xor_tweak(req, true, enc);
141*4882a593Smuzhiyun }
142*4882a593Smuzhiyun 
xts_cts_done(struct crypto_async_request * areq,int err)143*4882a593Smuzhiyun static void xts_cts_done(struct crypto_async_request *areq, int err)
144*4882a593Smuzhiyun {
145*4882a593Smuzhiyun 	struct skcipher_request *req = areq->data;
146*4882a593Smuzhiyun 	le128 b;
147*4882a593Smuzhiyun 
148*4882a593Smuzhiyun 	if (!err) {
149*4882a593Smuzhiyun 		struct xts_request_ctx *rctx = skcipher_request_ctx(req);
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun 		scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
152*4882a593Smuzhiyun 		le128_xor(&b, &rctx->t, &b);
153*4882a593Smuzhiyun 		scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 1);
154*4882a593Smuzhiyun 	}
155*4882a593Smuzhiyun 
156*4882a593Smuzhiyun 	skcipher_request_complete(req, err);
157*4882a593Smuzhiyun }
158*4882a593Smuzhiyun 
xts_cts_final(struct skcipher_request * req,int (* crypt)(struct skcipher_request * req))159*4882a593Smuzhiyun static int xts_cts_final(struct skcipher_request *req,
160*4882a593Smuzhiyun 			 int (*crypt)(struct skcipher_request *req))
161*4882a593Smuzhiyun {
162*4882a593Smuzhiyun 	const struct xts_tfm_ctx *ctx =
163*4882a593Smuzhiyun 		crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
164*4882a593Smuzhiyun 	int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1);
165*4882a593Smuzhiyun 	struct xts_request_ctx *rctx = skcipher_request_ctx(req);
166*4882a593Smuzhiyun 	struct skcipher_request *subreq = &rctx->subreq;
167*4882a593Smuzhiyun 	int tail = req->cryptlen % XTS_BLOCK_SIZE;
168*4882a593Smuzhiyun 	le128 b[2];
169*4882a593Smuzhiyun 	int err;
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 	rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst,
172*4882a593Smuzhiyun 				      offset - XTS_BLOCK_SIZE);
173*4882a593Smuzhiyun 
174*4882a593Smuzhiyun 	scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
175*4882a593Smuzhiyun 	b[1] = b[0];
176*4882a593Smuzhiyun 	scatterwalk_map_and_copy(b, req->src, offset, tail, 0);
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	le128_xor(b, &rctx->t, b);
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 	scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE + tail, 1);
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 	skcipher_request_set_tfm(subreq, ctx->child);
183*4882a593Smuzhiyun 	skcipher_request_set_callback(subreq, req->base.flags, xts_cts_done,
184*4882a593Smuzhiyun 				      req);
185*4882a593Smuzhiyun 	skcipher_request_set_crypt(subreq, rctx->tail, rctx->tail,
186*4882a593Smuzhiyun 				   XTS_BLOCK_SIZE, NULL);
187*4882a593Smuzhiyun 
188*4882a593Smuzhiyun 	err = crypt(subreq);
189*4882a593Smuzhiyun 	if (err)
190*4882a593Smuzhiyun 		return err;
191*4882a593Smuzhiyun 
192*4882a593Smuzhiyun 	scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0);
193*4882a593Smuzhiyun 	le128_xor(b, &rctx->t, b);
194*4882a593Smuzhiyun 	scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 1);
195*4882a593Smuzhiyun 
196*4882a593Smuzhiyun 	return 0;
197*4882a593Smuzhiyun }
198*4882a593Smuzhiyun 
xts_encrypt_done(struct crypto_async_request * areq,int err)199*4882a593Smuzhiyun static void xts_encrypt_done(struct crypto_async_request *areq, int err)
200*4882a593Smuzhiyun {
201*4882a593Smuzhiyun 	struct skcipher_request *req = areq->data;
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 	if (!err) {
204*4882a593Smuzhiyun 		struct xts_request_ctx *rctx = skcipher_request_ctx(req);
205*4882a593Smuzhiyun 
206*4882a593Smuzhiyun 		rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
207*4882a593Smuzhiyun 		err = xts_xor_tweak_post(req, true);
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun 		if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
210*4882a593Smuzhiyun 			err = xts_cts_final(req, crypto_skcipher_encrypt);
211*4882a593Smuzhiyun 			if (err == -EINPROGRESS)
212*4882a593Smuzhiyun 				return;
213*4882a593Smuzhiyun 		}
214*4882a593Smuzhiyun 	}
215*4882a593Smuzhiyun 
216*4882a593Smuzhiyun 	skcipher_request_complete(req, err);
217*4882a593Smuzhiyun }
218*4882a593Smuzhiyun 
xts_decrypt_done(struct crypto_async_request * areq,int err)219*4882a593Smuzhiyun static void xts_decrypt_done(struct crypto_async_request *areq, int err)
220*4882a593Smuzhiyun {
221*4882a593Smuzhiyun 	struct skcipher_request *req = areq->data;
222*4882a593Smuzhiyun 
223*4882a593Smuzhiyun 	if (!err) {
224*4882a593Smuzhiyun 		struct xts_request_ctx *rctx = skcipher_request_ctx(req);
225*4882a593Smuzhiyun 
226*4882a593Smuzhiyun 		rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
227*4882a593Smuzhiyun 		err = xts_xor_tweak_post(req, false);
228*4882a593Smuzhiyun 
229*4882a593Smuzhiyun 		if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) {
230*4882a593Smuzhiyun 			err = xts_cts_final(req, crypto_skcipher_decrypt);
231*4882a593Smuzhiyun 			if (err == -EINPROGRESS)
232*4882a593Smuzhiyun 				return;
233*4882a593Smuzhiyun 		}
234*4882a593Smuzhiyun 	}
235*4882a593Smuzhiyun 
236*4882a593Smuzhiyun 	skcipher_request_complete(req, err);
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun 
xts_init_crypt(struct skcipher_request * req,crypto_completion_t compl)239*4882a593Smuzhiyun static int xts_init_crypt(struct skcipher_request *req,
240*4882a593Smuzhiyun 			  crypto_completion_t compl)
241*4882a593Smuzhiyun {
242*4882a593Smuzhiyun 	const struct xts_tfm_ctx *ctx =
243*4882a593Smuzhiyun 		crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
244*4882a593Smuzhiyun 	struct xts_request_ctx *rctx = skcipher_request_ctx(req);
245*4882a593Smuzhiyun 	struct skcipher_request *subreq = &rctx->subreq;
246*4882a593Smuzhiyun 
247*4882a593Smuzhiyun 	if (req->cryptlen < XTS_BLOCK_SIZE)
248*4882a593Smuzhiyun 		return -EINVAL;
249*4882a593Smuzhiyun 
250*4882a593Smuzhiyun 	skcipher_request_set_tfm(subreq, ctx->child);
251*4882a593Smuzhiyun 	skcipher_request_set_callback(subreq, req->base.flags, compl, req);
252*4882a593Smuzhiyun 	skcipher_request_set_crypt(subreq, req->dst, req->dst,
253*4882a593Smuzhiyun 				   req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL);
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun 	/* calculate first value of T */
256*4882a593Smuzhiyun 	crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv);
257*4882a593Smuzhiyun 
258*4882a593Smuzhiyun 	return 0;
259*4882a593Smuzhiyun }
260*4882a593Smuzhiyun 
xts_encrypt(struct skcipher_request * req)261*4882a593Smuzhiyun static int xts_encrypt(struct skcipher_request *req)
262*4882a593Smuzhiyun {
263*4882a593Smuzhiyun 	struct xts_request_ctx *rctx = skcipher_request_ctx(req);
264*4882a593Smuzhiyun 	struct skcipher_request *subreq = &rctx->subreq;
265*4882a593Smuzhiyun 	int err;
266*4882a593Smuzhiyun 
267*4882a593Smuzhiyun 	err = xts_init_crypt(req, xts_encrypt_done) ?:
268*4882a593Smuzhiyun 	      xts_xor_tweak_pre(req, true) ?:
269*4882a593Smuzhiyun 	      crypto_skcipher_encrypt(subreq) ?:
270*4882a593Smuzhiyun 	      xts_xor_tweak_post(req, true);
271*4882a593Smuzhiyun 
272*4882a593Smuzhiyun 	if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
273*4882a593Smuzhiyun 		return err;
274*4882a593Smuzhiyun 
275*4882a593Smuzhiyun 	return xts_cts_final(req, crypto_skcipher_encrypt);
276*4882a593Smuzhiyun }
277*4882a593Smuzhiyun 
xts_decrypt(struct skcipher_request * req)278*4882a593Smuzhiyun static int xts_decrypt(struct skcipher_request *req)
279*4882a593Smuzhiyun {
280*4882a593Smuzhiyun 	struct xts_request_ctx *rctx = skcipher_request_ctx(req);
281*4882a593Smuzhiyun 	struct skcipher_request *subreq = &rctx->subreq;
282*4882a593Smuzhiyun 	int err;
283*4882a593Smuzhiyun 
284*4882a593Smuzhiyun 	err = xts_init_crypt(req, xts_decrypt_done) ?:
285*4882a593Smuzhiyun 	      xts_xor_tweak_pre(req, false) ?:
286*4882a593Smuzhiyun 	      crypto_skcipher_decrypt(subreq) ?:
287*4882a593Smuzhiyun 	      xts_xor_tweak_post(req, false);
288*4882a593Smuzhiyun 
289*4882a593Smuzhiyun 	if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0))
290*4882a593Smuzhiyun 		return err;
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun 	return xts_cts_final(req, crypto_skcipher_decrypt);
293*4882a593Smuzhiyun }
294*4882a593Smuzhiyun 
xts_init_tfm(struct crypto_skcipher * tfm)295*4882a593Smuzhiyun static int xts_init_tfm(struct crypto_skcipher *tfm)
296*4882a593Smuzhiyun {
297*4882a593Smuzhiyun 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
298*4882a593Smuzhiyun 	struct xts_instance_ctx *ictx = skcipher_instance_ctx(inst);
299*4882a593Smuzhiyun 	struct xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
300*4882a593Smuzhiyun 	struct crypto_skcipher *child;
301*4882a593Smuzhiyun 	struct crypto_cipher *tweak;
302*4882a593Smuzhiyun 
303*4882a593Smuzhiyun 	child = crypto_spawn_skcipher(&ictx->spawn);
304*4882a593Smuzhiyun 	if (IS_ERR(child))
305*4882a593Smuzhiyun 		return PTR_ERR(child);
306*4882a593Smuzhiyun 
307*4882a593Smuzhiyun 	ctx->child = child;
308*4882a593Smuzhiyun 
309*4882a593Smuzhiyun 	tweak = crypto_alloc_cipher(ictx->name, 0, 0);
310*4882a593Smuzhiyun 	if (IS_ERR(tweak)) {
311*4882a593Smuzhiyun 		crypto_free_skcipher(ctx->child);
312*4882a593Smuzhiyun 		return PTR_ERR(tweak);
313*4882a593Smuzhiyun 	}
314*4882a593Smuzhiyun 
315*4882a593Smuzhiyun 	ctx->tweak = tweak;
316*4882a593Smuzhiyun 
317*4882a593Smuzhiyun 	crypto_skcipher_set_reqsize(tfm, crypto_skcipher_reqsize(child) +
318*4882a593Smuzhiyun 					 sizeof(struct xts_request_ctx));
319*4882a593Smuzhiyun 
320*4882a593Smuzhiyun 	return 0;
321*4882a593Smuzhiyun }
322*4882a593Smuzhiyun 
xts_exit_tfm(struct crypto_skcipher * tfm)323*4882a593Smuzhiyun static void xts_exit_tfm(struct crypto_skcipher *tfm)
324*4882a593Smuzhiyun {
325*4882a593Smuzhiyun 	struct xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
326*4882a593Smuzhiyun 
327*4882a593Smuzhiyun 	crypto_free_skcipher(ctx->child);
328*4882a593Smuzhiyun 	crypto_free_cipher(ctx->tweak);
329*4882a593Smuzhiyun }
330*4882a593Smuzhiyun 
xts_free_instance(struct skcipher_instance * inst)331*4882a593Smuzhiyun static void xts_free_instance(struct skcipher_instance *inst)
332*4882a593Smuzhiyun {
333*4882a593Smuzhiyun 	struct xts_instance_ctx *ictx = skcipher_instance_ctx(inst);
334*4882a593Smuzhiyun 
335*4882a593Smuzhiyun 	crypto_drop_skcipher(&ictx->spawn);
336*4882a593Smuzhiyun 	kfree(inst);
337*4882a593Smuzhiyun }
338*4882a593Smuzhiyun 
xts_create(struct crypto_template * tmpl,struct rtattr ** tb)339*4882a593Smuzhiyun static int xts_create(struct crypto_template *tmpl, struct rtattr **tb)
340*4882a593Smuzhiyun {
341*4882a593Smuzhiyun 	struct skcipher_instance *inst;
342*4882a593Smuzhiyun 	struct xts_instance_ctx *ctx;
343*4882a593Smuzhiyun 	struct skcipher_alg *alg;
344*4882a593Smuzhiyun 	const char *cipher_name;
345*4882a593Smuzhiyun 	u32 mask;
346*4882a593Smuzhiyun 	int err;
347*4882a593Smuzhiyun 
348*4882a593Smuzhiyun 	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
349*4882a593Smuzhiyun 	if (err)
350*4882a593Smuzhiyun 		return err;
351*4882a593Smuzhiyun 
352*4882a593Smuzhiyun 	cipher_name = crypto_attr_alg_name(tb[1]);
353*4882a593Smuzhiyun 	if (IS_ERR(cipher_name))
354*4882a593Smuzhiyun 		return PTR_ERR(cipher_name);
355*4882a593Smuzhiyun 
356*4882a593Smuzhiyun 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
357*4882a593Smuzhiyun 	if (!inst)
358*4882a593Smuzhiyun 		return -ENOMEM;
359*4882a593Smuzhiyun 
360*4882a593Smuzhiyun 	ctx = skcipher_instance_ctx(inst);
361*4882a593Smuzhiyun 
362*4882a593Smuzhiyun 	err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
363*4882a593Smuzhiyun 				   cipher_name, 0, mask);
364*4882a593Smuzhiyun 	if (err == -ENOENT) {
365*4882a593Smuzhiyun 		err = -ENAMETOOLONG;
366*4882a593Smuzhiyun 		if (snprintf(ctx->name, CRYPTO_MAX_ALG_NAME, "ecb(%s)",
367*4882a593Smuzhiyun 			     cipher_name) >= CRYPTO_MAX_ALG_NAME)
368*4882a593Smuzhiyun 			goto err_free_inst;
369*4882a593Smuzhiyun 
370*4882a593Smuzhiyun 		err = crypto_grab_skcipher(&ctx->spawn,
371*4882a593Smuzhiyun 					   skcipher_crypto_instance(inst),
372*4882a593Smuzhiyun 					   ctx->name, 0, mask);
373*4882a593Smuzhiyun 	}
374*4882a593Smuzhiyun 
375*4882a593Smuzhiyun 	if (err)
376*4882a593Smuzhiyun 		goto err_free_inst;
377*4882a593Smuzhiyun 
378*4882a593Smuzhiyun 	alg = crypto_skcipher_spawn_alg(&ctx->spawn);
379*4882a593Smuzhiyun 
380*4882a593Smuzhiyun 	err = -EINVAL;
381*4882a593Smuzhiyun 	if (alg->base.cra_blocksize != XTS_BLOCK_SIZE)
382*4882a593Smuzhiyun 		goto err_free_inst;
383*4882a593Smuzhiyun 
384*4882a593Smuzhiyun 	if (crypto_skcipher_alg_ivsize(alg))
385*4882a593Smuzhiyun 		goto err_free_inst;
386*4882a593Smuzhiyun 
387*4882a593Smuzhiyun 	err = crypto_inst_setname(skcipher_crypto_instance(inst), "xts",
388*4882a593Smuzhiyun 				  &alg->base);
389*4882a593Smuzhiyun 	if (err)
390*4882a593Smuzhiyun 		goto err_free_inst;
391*4882a593Smuzhiyun 
392*4882a593Smuzhiyun 	err = -EINVAL;
393*4882a593Smuzhiyun 	cipher_name = alg->base.cra_name;
394*4882a593Smuzhiyun 
395*4882a593Smuzhiyun 	/* Alas we screwed up the naming so we have to mangle the
396*4882a593Smuzhiyun 	 * cipher name.
397*4882a593Smuzhiyun 	 */
398*4882a593Smuzhiyun 	if (!strncmp(cipher_name, "ecb(", 4)) {
399*4882a593Smuzhiyun 		unsigned len;
400*4882a593Smuzhiyun 
401*4882a593Smuzhiyun 		len = strlcpy(ctx->name, cipher_name + 4, sizeof(ctx->name));
402*4882a593Smuzhiyun 		if (len < 2 || len >= sizeof(ctx->name))
403*4882a593Smuzhiyun 			goto err_free_inst;
404*4882a593Smuzhiyun 
405*4882a593Smuzhiyun 		if (ctx->name[len - 1] != ')')
406*4882a593Smuzhiyun 			goto err_free_inst;
407*4882a593Smuzhiyun 
408*4882a593Smuzhiyun 		ctx->name[len - 1] = 0;
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 		if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
411*4882a593Smuzhiyun 			     "xts(%s)", ctx->name) >= CRYPTO_MAX_ALG_NAME) {
412*4882a593Smuzhiyun 			err = -ENAMETOOLONG;
413*4882a593Smuzhiyun 			goto err_free_inst;
414*4882a593Smuzhiyun 		}
415*4882a593Smuzhiyun 	} else
416*4882a593Smuzhiyun 		goto err_free_inst;
417*4882a593Smuzhiyun 
418*4882a593Smuzhiyun 	inst->alg.base.cra_priority = alg->base.cra_priority;
419*4882a593Smuzhiyun 	inst->alg.base.cra_blocksize = XTS_BLOCK_SIZE;
420*4882a593Smuzhiyun 	inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
421*4882a593Smuzhiyun 				       (__alignof__(u64) - 1);
422*4882a593Smuzhiyun 
423*4882a593Smuzhiyun 	inst->alg.ivsize = XTS_BLOCK_SIZE;
424*4882a593Smuzhiyun 	inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) * 2;
425*4882a593Smuzhiyun 	inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) * 2;
426*4882a593Smuzhiyun 
427*4882a593Smuzhiyun 	inst->alg.base.cra_ctxsize = sizeof(struct xts_tfm_ctx);
428*4882a593Smuzhiyun 
429*4882a593Smuzhiyun 	inst->alg.init = xts_init_tfm;
430*4882a593Smuzhiyun 	inst->alg.exit = xts_exit_tfm;
431*4882a593Smuzhiyun 
432*4882a593Smuzhiyun 	inst->alg.setkey = xts_setkey;
433*4882a593Smuzhiyun 	inst->alg.encrypt = xts_encrypt;
434*4882a593Smuzhiyun 	inst->alg.decrypt = xts_decrypt;
435*4882a593Smuzhiyun 
436*4882a593Smuzhiyun 	inst->free = xts_free_instance;
437*4882a593Smuzhiyun 
438*4882a593Smuzhiyun 	err = skcipher_register_instance(tmpl, inst);
439*4882a593Smuzhiyun 	if (err) {
440*4882a593Smuzhiyun err_free_inst:
441*4882a593Smuzhiyun 		xts_free_instance(inst);
442*4882a593Smuzhiyun 	}
443*4882a593Smuzhiyun 	return err;
444*4882a593Smuzhiyun }
445*4882a593Smuzhiyun 
446*4882a593Smuzhiyun static struct crypto_template xts_tmpl = {
447*4882a593Smuzhiyun 	.name = "xts",
448*4882a593Smuzhiyun 	.create = xts_create,
449*4882a593Smuzhiyun 	.module = THIS_MODULE,
450*4882a593Smuzhiyun };
451*4882a593Smuzhiyun 
xts_module_init(void)452*4882a593Smuzhiyun static int __init xts_module_init(void)
453*4882a593Smuzhiyun {
454*4882a593Smuzhiyun 	return crypto_register_template(&xts_tmpl);
455*4882a593Smuzhiyun }
456*4882a593Smuzhiyun 
xts_module_exit(void)457*4882a593Smuzhiyun static void __exit xts_module_exit(void)
458*4882a593Smuzhiyun {
459*4882a593Smuzhiyun 	crypto_unregister_template(&xts_tmpl);
460*4882a593Smuzhiyun }
461*4882a593Smuzhiyun 
462*4882a593Smuzhiyun subsys_initcall(xts_module_init);
463*4882a593Smuzhiyun module_exit(xts_module_exit);
464*4882a593Smuzhiyun 
465*4882a593Smuzhiyun MODULE_LICENSE("GPL");
466*4882a593Smuzhiyun MODULE_DESCRIPTION("XTS block cipher mode");
467*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xts");
468*4882a593Smuzhiyun MODULE_IMPORT_NS(CRYPTO_INTERNAL);
469