xref: /OK3568_Linux_fs/kernel/arch/x86/crypto/glue_helper.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Shared glue code for 128bit block ciphers
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
8*4882a593Smuzhiyun  *   Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
9*4882a593Smuzhiyun  * CTR part based on code (crypto/ctr.c) by:
10*4882a593Smuzhiyun  *   (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
11*4882a593Smuzhiyun  */
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun #include <linux/module.h>
14*4882a593Smuzhiyun #include <crypto/b128ops.h>
15*4882a593Smuzhiyun #include <crypto/gf128mul.h>
16*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
17*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
18*4882a593Smuzhiyun #include <crypto/xts.h>
19*4882a593Smuzhiyun #include <asm/crypto/glue_helper.h>
20*4882a593Smuzhiyun 
glue_ecb_req_128bit(const struct common_glue_ctx * gctx,struct skcipher_request * req)21*4882a593Smuzhiyun int glue_ecb_req_128bit(const struct common_glue_ctx *gctx,
22*4882a593Smuzhiyun 			struct skcipher_request *req)
23*4882a593Smuzhiyun {
24*4882a593Smuzhiyun 	void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
25*4882a593Smuzhiyun 	const unsigned int bsize = 128 / 8;
26*4882a593Smuzhiyun 	struct skcipher_walk walk;
27*4882a593Smuzhiyun 	bool fpu_enabled = false;
28*4882a593Smuzhiyun 	unsigned int nbytes;
29*4882a593Smuzhiyun 	int err;
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun 	err = skcipher_walk_virt(&walk, req, false);
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun 	while ((nbytes = walk.nbytes)) {
34*4882a593Smuzhiyun 		const u8 *src = walk.src.virt.addr;
35*4882a593Smuzhiyun 		u8 *dst = walk.dst.virt.addr;
36*4882a593Smuzhiyun 		unsigned int func_bytes;
37*4882a593Smuzhiyun 		unsigned int i;
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun 		fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
40*4882a593Smuzhiyun 					     &walk, fpu_enabled, nbytes);
41*4882a593Smuzhiyun 		for (i = 0; i < gctx->num_funcs; i++) {
42*4882a593Smuzhiyun 			func_bytes = bsize * gctx->funcs[i].num_blocks;
43*4882a593Smuzhiyun 
44*4882a593Smuzhiyun 			if (nbytes < func_bytes)
45*4882a593Smuzhiyun 				continue;
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun 			/* Process multi-block batch */
48*4882a593Smuzhiyun 			do {
49*4882a593Smuzhiyun 				gctx->funcs[i].fn_u.ecb(ctx, dst, src);
50*4882a593Smuzhiyun 				src += func_bytes;
51*4882a593Smuzhiyun 				dst += func_bytes;
52*4882a593Smuzhiyun 				nbytes -= func_bytes;
53*4882a593Smuzhiyun 			} while (nbytes >= func_bytes);
54*4882a593Smuzhiyun 
55*4882a593Smuzhiyun 			if (nbytes < bsize)
56*4882a593Smuzhiyun 				break;
57*4882a593Smuzhiyun 		}
58*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, nbytes);
59*4882a593Smuzhiyun 	}
60*4882a593Smuzhiyun 
61*4882a593Smuzhiyun 	glue_fpu_end(fpu_enabled);
62*4882a593Smuzhiyun 	return err;
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(glue_ecb_req_128bit);
65*4882a593Smuzhiyun 
glue_cbc_encrypt_req_128bit(const common_glue_func_t fn,struct skcipher_request * req)66*4882a593Smuzhiyun int glue_cbc_encrypt_req_128bit(const common_glue_func_t fn,
67*4882a593Smuzhiyun 				struct skcipher_request *req)
68*4882a593Smuzhiyun {
69*4882a593Smuzhiyun 	void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
70*4882a593Smuzhiyun 	const unsigned int bsize = 128 / 8;
71*4882a593Smuzhiyun 	struct skcipher_walk walk;
72*4882a593Smuzhiyun 	unsigned int nbytes;
73*4882a593Smuzhiyun 	int err;
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun 	err = skcipher_walk_virt(&walk, req, false);
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun 	while ((nbytes = walk.nbytes)) {
78*4882a593Smuzhiyun 		const u128 *src = (u128 *)walk.src.virt.addr;
79*4882a593Smuzhiyun 		u128 *dst = (u128 *)walk.dst.virt.addr;
80*4882a593Smuzhiyun 		u128 *iv = (u128 *)walk.iv;
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun 		do {
83*4882a593Smuzhiyun 			u128_xor(dst, src, iv);
84*4882a593Smuzhiyun 			fn(ctx, (u8 *)dst, (u8 *)dst);
85*4882a593Smuzhiyun 			iv = dst;
86*4882a593Smuzhiyun 			src++;
87*4882a593Smuzhiyun 			dst++;
88*4882a593Smuzhiyun 			nbytes -= bsize;
89*4882a593Smuzhiyun 		} while (nbytes >= bsize);
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun 		*(u128 *)walk.iv = *iv;
92*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, nbytes);
93*4882a593Smuzhiyun 	}
94*4882a593Smuzhiyun 	return err;
95*4882a593Smuzhiyun }
96*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(glue_cbc_encrypt_req_128bit);
97*4882a593Smuzhiyun 
glue_cbc_decrypt_req_128bit(const struct common_glue_ctx * gctx,struct skcipher_request * req)98*4882a593Smuzhiyun int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx,
99*4882a593Smuzhiyun 				struct skcipher_request *req)
100*4882a593Smuzhiyun {
101*4882a593Smuzhiyun 	void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
102*4882a593Smuzhiyun 	const unsigned int bsize = 128 / 8;
103*4882a593Smuzhiyun 	struct skcipher_walk walk;
104*4882a593Smuzhiyun 	bool fpu_enabled = false;
105*4882a593Smuzhiyun 	unsigned int nbytes;
106*4882a593Smuzhiyun 	int err;
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun 	err = skcipher_walk_virt(&walk, req, false);
109*4882a593Smuzhiyun 
110*4882a593Smuzhiyun 	while ((nbytes = walk.nbytes)) {
111*4882a593Smuzhiyun 		const u128 *src = walk.src.virt.addr;
112*4882a593Smuzhiyun 		u128 *dst = walk.dst.virt.addr;
113*4882a593Smuzhiyun 		unsigned int func_bytes, num_blocks;
114*4882a593Smuzhiyun 		unsigned int i;
115*4882a593Smuzhiyun 		u128 last_iv;
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun 		fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
118*4882a593Smuzhiyun 					     &walk, fpu_enabled, nbytes);
119*4882a593Smuzhiyun 		/* Start of the last block. */
120*4882a593Smuzhiyun 		src += nbytes / bsize - 1;
121*4882a593Smuzhiyun 		dst += nbytes / bsize - 1;
122*4882a593Smuzhiyun 
123*4882a593Smuzhiyun 		last_iv = *src;
124*4882a593Smuzhiyun 
125*4882a593Smuzhiyun 		for (i = 0; i < gctx->num_funcs; i++) {
126*4882a593Smuzhiyun 			num_blocks = gctx->funcs[i].num_blocks;
127*4882a593Smuzhiyun 			func_bytes = bsize * num_blocks;
128*4882a593Smuzhiyun 
129*4882a593Smuzhiyun 			if (nbytes < func_bytes)
130*4882a593Smuzhiyun 				continue;
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun 			/* Process multi-block batch */
133*4882a593Smuzhiyun 			do {
134*4882a593Smuzhiyun 				src -= num_blocks - 1;
135*4882a593Smuzhiyun 				dst -= num_blocks - 1;
136*4882a593Smuzhiyun 
137*4882a593Smuzhiyun 				gctx->funcs[i].fn_u.cbc(ctx, (u8 *)dst,
138*4882a593Smuzhiyun 							(const u8 *)src);
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 				nbytes -= func_bytes;
141*4882a593Smuzhiyun 				if (nbytes < bsize)
142*4882a593Smuzhiyun 					goto done;
143*4882a593Smuzhiyun 
144*4882a593Smuzhiyun 				u128_xor(dst, dst, --src);
145*4882a593Smuzhiyun 				dst--;
146*4882a593Smuzhiyun 			} while (nbytes >= func_bytes);
147*4882a593Smuzhiyun 		}
148*4882a593Smuzhiyun done:
149*4882a593Smuzhiyun 		u128_xor(dst, dst, (u128 *)walk.iv);
150*4882a593Smuzhiyun 		*(u128 *)walk.iv = last_iv;
151*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, nbytes);
152*4882a593Smuzhiyun 	}
153*4882a593Smuzhiyun 
154*4882a593Smuzhiyun 	glue_fpu_end(fpu_enabled);
155*4882a593Smuzhiyun 	return err;
156*4882a593Smuzhiyun }
157*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(glue_cbc_decrypt_req_128bit);
158*4882a593Smuzhiyun 
glue_ctr_req_128bit(const struct common_glue_ctx * gctx,struct skcipher_request * req)159*4882a593Smuzhiyun int glue_ctr_req_128bit(const struct common_glue_ctx *gctx,
160*4882a593Smuzhiyun 			struct skcipher_request *req)
161*4882a593Smuzhiyun {
162*4882a593Smuzhiyun 	void *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
163*4882a593Smuzhiyun 	const unsigned int bsize = 128 / 8;
164*4882a593Smuzhiyun 	struct skcipher_walk walk;
165*4882a593Smuzhiyun 	bool fpu_enabled = false;
166*4882a593Smuzhiyun 	unsigned int nbytes;
167*4882a593Smuzhiyun 	int err;
168*4882a593Smuzhiyun 
169*4882a593Smuzhiyun 	err = skcipher_walk_virt(&walk, req, false);
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 	while ((nbytes = walk.nbytes) >= bsize) {
172*4882a593Smuzhiyun 		const u128 *src = walk.src.virt.addr;
173*4882a593Smuzhiyun 		u128 *dst = walk.dst.virt.addr;
174*4882a593Smuzhiyun 		unsigned int func_bytes, num_blocks;
175*4882a593Smuzhiyun 		unsigned int i;
176*4882a593Smuzhiyun 		le128 ctrblk;
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 		fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
179*4882a593Smuzhiyun 					     &walk, fpu_enabled, nbytes);
180*4882a593Smuzhiyun 
181*4882a593Smuzhiyun 		be128_to_le128(&ctrblk, (be128 *)walk.iv);
182*4882a593Smuzhiyun 
183*4882a593Smuzhiyun 		for (i = 0; i < gctx->num_funcs; i++) {
184*4882a593Smuzhiyun 			num_blocks = gctx->funcs[i].num_blocks;
185*4882a593Smuzhiyun 			func_bytes = bsize * num_blocks;
186*4882a593Smuzhiyun 
187*4882a593Smuzhiyun 			if (nbytes < func_bytes)
188*4882a593Smuzhiyun 				continue;
189*4882a593Smuzhiyun 
190*4882a593Smuzhiyun 			/* Process multi-block batch */
191*4882a593Smuzhiyun 			do {
192*4882a593Smuzhiyun 				gctx->funcs[i].fn_u.ctr(ctx, (u8 *)dst,
193*4882a593Smuzhiyun 							(const u8 *)src,
194*4882a593Smuzhiyun 							&ctrblk);
195*4882a593Smuzhiyun 				src += num_blocks;
196*4882a593Smuzhiyun 				dst += num_blocks;
197*4882a593Smuzhiyun 				nbytes -= func_bytes;
198*4882a593Smuzhiyun 			} while (nbytes >= func_bytes);
199*4882a593Smuzhiyun 
200*4882a593Smuzhiyun 			if (nbytes < bsize)
201*4882a593Smuzhiyun 				break;
202*4882a593Smuzhiyun 		}
203*4882a593Smuzhiyun 
204*4882a593Smuzhiyun 		le128_to_be128((be128 *)walk.iv, &ctrblk);
205*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, nbytes);
206*4882a593Smuzhiyun 	}
207*4882a593Smuzhiyun 
208*4882a593Smuzhiyun 	glue_fpu_end(fpu_enabled);
209*4882a593Smuzhiyun 
210*4882a593Smuzhiyun 	if (nbytes) {
211*4882a593Smuzhiyun 		le128 ctrblk;
212*4882a593Smuzhiyun 		u128 tmp;
213*4882a593Smuzhiyun 
214*4882a593Smuzhiyun 		be128_to_le128(&ctrblk, (be128 *)walk.iv);
215*4882a593Smuzhiyun 		memcpy(&tmp, walk.src.virt.addr, nbytes);
216*4882a593Smuzhiyun 		gctx->funcs[gctx->num_funcs - 1].fn_u.ctr(ctx, (u8 *)&tmp,
217*4882a593Smuzhiyun 							  (const u8 *)&tmp,
218*4882a593Smuzhiyun 							  &ctrblk);
219*4882a593Smuzhiyun 		memcpy(walk.dst.virt.addr, &tmp, nbytes);
220*4882a593Smuzhiyun 		le128_to_be128((be128 *)walk.iv, &ctrblk);
221*4882a593Smuzhiyun 
222*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, 0);
223*4882a593Smuzhiyun 	}
224*4882a593Smuzhiyun 
225*4882a593Smuzhiyun 	return err;
226*4882a593Smuzhiyun }
227*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(glue_ctr_req_128bit);
228*4882a593Smuzhiyun 
__glue_xts_req_128bit(const struct common_glue_ctx * gctx,void * ctx,struct skcipher_walk * walk)229*4882a593Smuzhiyun static unsigned int __glue_xts_req_128bit(const struct common_glue_ctx *gctx,
230*4882a593Smuzhiyun 					  void *ctx,
231*4882a593Smuzhiyun 					  struct skcipher_walk *walk)
232*4882a593Smuzhiyun {
233*4882a593Smuzhiyun 	const unsigned int bsize = 128 / 8;
234*4882a593Smuzhiyun 	unsigned int nbytes = walk->nbytes;
235*4882a593Smuzhiyun 	u128 *src = walk->src.virt.addr;
236*4882a593Smuzhiyun 	u128 *dst = walk->dst.virt.addr;
237*4882a593Smuzhiyun 	unsigned int num_blocks, func_bytes;
238*4882a593Smuzhiyun 	unsigned int i;
239*4882a593Smuzhiyun 
240*4882a593Smuzhiyun 	/* Process multi-block batch */
241*4882a593Smuzhiyun 	for (i = 0; i < gctx->num_funcs; i++) {
242*4882a593Smuzhiyun 		num_blocks = gctx->funcs[i].num_blocks;
243*4882a593Smuzhiyun 		func_bytes = bsize * num_blocks;
244*4882a593Smuzhiyun 
245*4882a593Smuzhiyun 		if (nbytes >= func_bytes) {
246*4882a593Smuzhiyun 			do {
247*4882a593Smuzhiyun 				gctx->funcs[i].fn_u.xts(ctx, (u8 *)dst,
248*4882a593Smuzhiyun 							(const u8 *)src,
249*4882a593Smuzhiyun 							walk->iv);
250*4882a593Smuzhiyun 
251*4882a593Smuzhiyun 				src += num_blocks;
252*4882a593Smuzhiyun 				dst += num_blocks;
253*4882a593Smuzhiyun 				nbytes -= func_bytes;
254*4882a593Smuzhiyun 			} while (nbytes >= func_bytes);
255*4882a593Smuzhiyun 
256*4882a593Smuzhiyun 			if (nbytes < bsize)
257*4882a593Smuzhiyun 				goto done;
258*4882a593Smuzhiyun 		}
259*4882a593Smuzhiyun 	}
260*4882a593Smuzhiyun 
261*4882a593Smuzhiyun done:
262*4882a593Smuzhiyun 	return nbytes;
263*4882a593Smuzhiyun }
264*4882a593Smuzhiyun 
glue_xts_req_128bit(const struct common_glue_ctx * gctx,struct skcipher_request * req,common_glue_func_t tweak_fn,void * tweak_ctx,void * crypt_ctx,bool decrypt)265*4882a593Smuzhiyun int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
266*4882a593Smuzhiyun 			struct skcipher_request *req,
267*4882a593Smuzhiyun 			common_glue_func_t tweak_fn, void *tweak_ctx,
268*4882a593Smuzhiyun 			void *crypt_ctx, bool decrypt)
269*4882a593Smuzhiyun {
270*4882a593Smuzhiyun 	const bool cts = (req->cryptlen % XTS_BLOCK_SIZE);
271*4882a593Smuzhiyun 	const unsigned int bsize = 128 / 8;
272*4882a593Smuzhiyun 	struct skcipher_request subreq;
273*4882a593Smuzhiyun 	struct skcipher_walk walk;
274*4882a593Smuzhiyun 	bool fpu_enabled = false;
275*4882a593Smuzhiyun 	unsigned int nbytes, tail;
276*4882a593Smuzhiyun 	int err;
277*4882a593Smuzhiyun 
278*4882a593Smuzhiyun 	if (req->cryptlen < XTS_BLOCK_SIZE)
279*4882a593Smuzhiyun 		return -EINVAL;
280*4882a593Smuzhiyun 
281*4882a593Smuzhiyun 	if (unlikely(cts)) {
282*4882a593Smuzhiyun 		struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
283*4882a593Smuzhiyun 
284*4882a593Smuzhiyun 		tail = req->cryptlen % XTS_BLOCK_SIZE + XTS_BLOCK_SIZE;
285*4882a593Smuzhiyun 
286*4882a593Smuzhiyun 		skcipher_request_set_tfm(&subreq, tfm);
287*4882a593Smuzhiyun 		skcipher_request_set_callback(&subreq,
288*4882a593Smuzhiyun 					      crypto_skcipher_get_flags(tfm),
289*4882a593Smuzhiyun 					      NULL, NULL);
290*4882a593Smuzhiyun 		skcipher_request_set_crypt(&subreq, req->src, req->dst,
291*4882a593Smuzhiyun 					   req->cryptlen - tail, req->iv);
292*4882a593Smuzhiyun 		req = &subreq;
293*4882a593Smuzhiyun 	}
294*4882a593Smuzhiyun 
295*4882a593Smuzhiyun 	err = skcipher_walk_virt(&walk, req, false);
296*4882a593Smuzhiyun 	nbytes = walk.nbytes;
297*4882a593Smuzhiyun 	if (err)
298*4882a593Smuzhiyun 		return err;
299*4882a593Smuzhiyun 
300*4882a593Smuzhiyun 	/* set minimum length to bsize, for tweak_fn */
301*4882a593Smuzhiyun 	fpu_enabled = glue_fpu_begin(bsize, gctx->fpu_blocks_limit,
302*4882a593Smuzhiyun 				     &walk, fpu_enabled,
303*4882a593Smuzhiyun 				     nbytes < bsize ? bsize : nbytes);
304*4882a593Smuzhiyun 
305*4882a593Smuzhiyun 	/* calculate first value of T */
306*4882a593Smuzhiyun 	tweak_fn(tweak_ctx, walk.iv, walk.iv);
307*4882a593Smuzhiyun 
308*4882a593Smuzhiyun 	while (nbytes) {
309*4882a593Smuzhiyun 		nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk);
310*4882a593Smuzhiyun 
311*4882a593Smuzhiyun 		err = skcipher_walk_done(&walk, nbytes);
312*4882a593Smuzhiyun 		nbytes = walk.nbytes;
313*4882a593Smuzhiyun 	}
314*4882a593Smuzhiyun 
315*4882a593Smuzhiyun 	if (unlikely(cts)) {
316*4882a593Smuzhiyun 		u8 *next_tweak, *final_tweak = req->iv;
317*4882a593Smuzhiyun 		struct scatterlist *src, *dst;
318*4882a593Smuzhiyun 		struct scatterlist s[2], d[2];
319*4882a593Smuzhiyun 		le128 b[2];
320*4882a593Smuzhiyun 
321*4882a593Smuzhiyun 		dst = src = scatterwalk_ffwd(s, req->src, req->cryptlen);
322*4882a593Smuzhiyun 		if (req->dst != req->src)
323*4882a593Smuzhiyun 			dst = scatterwalk_ffwd(d, req->dst, req->cryptlen);
324*4882a593Smuzhiyun 
325*4882a593Smuzhiyun 		if (decrypt) {
326*4882a593Smuzhiyun 			next_tweak = memcpy(b, req->iv, XTS_BLOCK_SIZE);
327*4882a593Smuzhiyun 			gf128mul_x_ble(b, b);
328*4882a593Smuzhiyun 		} else {
329*4882a593Smuzhiyun 			next_tweak = req->iv;
330*4882a593Smuzhiyun 		}
331*4882a593Smuzhiyun 
332*4882a593Smuzhiyun 		skcipher_request_set_crypt(&subreq, src, dst, XTS_BLOCK_SIZE,
333*4882a593Smuzhiyun 					   next_tweak);
334*4882a593Smuzhiyun 
335*4882a593Smuzhiyun 		err = skcipher_walk_virt(&walk, req, false) ?:
336*4882a593Smuzhiyun 		      skcipher_walk_done(&walk,
337*4882a593Smuzhiyun 				__glue_xts_req_128bit(gctx, crypt_ctx, &walk));
338*4882a593Smuzhiyun 		if (err)
339*4882a593Smuzhiyun 			goto out;
340*4882a593Smuzhiyun 
341*4882a593Smuzhiyun 		scatterwalk_map_and_copy(b, dst, 0, XTS_BLOCK_SIZE, 0);
342*4882a593Smuzhiyun 		memcpy(b + 1, b, tail - XTS_BLOCK_SIZE);
343*4882a593Smuzhiyun 		scatterwalk_map_and_copy(b, src, XTS_BLOCK_SIZE,
344*4882a593Smuzhiyun 					 tail - XTS_BLOCK_SIZE, 0);
345*4882a593Smuzhiyun 		scatterwalk_map_and_copy(b, dst, 0, tail, 1);
346*4882a593Smuzhiyun 
347*4882a593Smuzhiyun 		skcipher_request_set_crypt(&subreq, dst, dst, XTS_BLOCK_SIZE,
348*4882a593Smuzhiyun 					   final_tweak);
349*4882a593Smuzhiyun 
350*4882a593Smuzhiyun 		err = skcipher_walk_virt(&walk, req, false) ?:
351*4882a593Smuzhiyun 		      skcipher_walk_done(&walk,
352*4882a593Smuzhiyun 				__glue_xts_req_128bit(gctx, crypt_ctx, &walk));
353*4882a593Smuzhiyun 	}
354*4882a593Smuzhiyun 
355*4882a593Smuzhiyun out:
356*4882a593Smuzhiyun 	glue_fpu_end(fpu_enabled);
357*4882a593Smuzhiyun 
358*4882a593Smuzhiyun 	return err;
359*4882a593Smuzhiyun }
360*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(glue_xts_req_128bit);
361*4882a593Smuzhiyun 
glue_xts_crypt_128bit_one(const void * ctx,u8 * dst,const u8 * src,le128 * iv,common_glue_func_t fn)362*4882a593Smuzhiyun void glue_xts_crypt_128bit_one(const void *ctx, u8 *dst, const u8 *src,
363*4882a593Smuzhiyun 			       le128 *iv, common_glue_func_t fn)
364*4882a593Smuzhiyun {
365*4882a593Smuzhiyun 	le128 ivblk = *iv;
366*4882a593Smuzhiyun 
367*4882a593Smuzhiyun 	/* generate next IV */
368*4882a593Smuzhiyun 	gf128mul_x_ble(iv, &ivblk);
369*4882a593Smuzhiyun 
370*4882a593Smuzhiyun 	/* CC <- T xor C */
371*4882a593Smuzhiyun 	u128_xor((u128 *)dst, (const u128 *)src, (u128 *)&ivblk);
372*4882a593Smuzhiyun 
373*4882a593Smuzhiyun 	/* PP <- D(Key2,CC) */
374*4882a593Smuzhiyun 	fn(ctx, dst, dst);
375*4882a593Smuzhiyun 
376*4882a593Smuzhiyun 	/* P <- T xor PP */
377*4882a593Smuzhiyun 	u128_xor((u128 *)dst, (u128 *)dst, (u128 *)&ivblk);
378*4882a593Smuzhiyun }
379*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(glue_xts_crypt_128bit_one);
380*4882a593Smuzhiyun 
381*4882a593Smuzhiyun MODULE_LICENSE("GPL");
382