1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Adiantum length-preserving encryption mode
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright 2018 Google LLC
6*4882a593Smuzhiyun */
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun /*
9*4882a593Smuzhiyun * Adiantum is a tweakable, length-preserving encryption mode designed for fast
10*4882a593Smuzhiyun * and secure disk encryption, especially on CPUs without dedicated crypto
11*4882a593Smuzhiyun * instructions. Adiantum encrypts each sector using the XChaCha12 stream
12*4882a593Smuzhiyun * cipher, two passes of an ε-almost-∆-universal (ε-∆U) hash function based on
13*4882a593Smuzhiyun * NH and Poly1305, and an invocation of the AES-256 block cipher on a single
14*4882a593Smuzhiyun * 16-byte block. See the paper for details:
15*4882a593Smuzhiyun *
16*4882a593Smuzhiyun * Adiantum: length-preserving encryption for entry-level processors
17*4882a593Smuzhiyun * (https://eprint.iacr.org/2018/720.pdf)
18*4882a593Smuzhiyun *
19*4882a593Smuzhiyun * For flexibility, this implementation also allows other ciphers:
20*4882a593Smuzhiyun *
21*4882a593Smuzhiyun * - Stream cipher: XChaCha12 or XChaCha20
22*4882a593Smuzhiyun * - Block cipher: any with a 128-bit block size and 256-bit key
23*4882a593Smuzhiyun *
24*4882a593Smuzhiyun * This implementation doesn't currently allow other ε-∆U hash functions, i.e.
25*4882a593Smuzhiyun * HPolyC is not supported. This is because Adiantum is ~20% faster than HPolyC
26*4882a593Smuzhiyun * but still provably as secure, and also the ε-∆U hash function of HBSH is
27*4882a593Smuzhiyun * formally defined to take two inputs (tweak, message) which makes it difficult
28*4882a593Smuzhiyun * to wrap with the crypto_shash API. Rather, some details need to be handled
29*4882a593Smuzhiyun * here. Nevertheless, if needed in the future, support for other ε-∆U hash
30*4882a593Smuzhiyun * functions could be added here.
31*4882a593Smuzhiyun */
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun #include <crypto/b128ops.h>
34*4882a593Smuzhiyun #include <crypto/chacha.h>
35*4882a593Smuzhiyun #include <crypto/internal/cipher.h>
36*4882a593Smuzhiyun #include <crypto/internal/hash.h>
37*4882a593Smuzhiyun #include <crypto/internal/poly1305.h>
38*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
39*4882a593Smuzhiyun #include <crypto/nhpoly1305.h>
40*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
41*4882a593Smuzhiyun #include <linux/module.h>
42*4882a593Smuzhiyun
43*4882a593Smuzhiyun /*
44*4882a593Smuzhiyun * Size of right-hand part of input data, in bytes; also the size of the block
45*4882a593Smuzhiyun * cipher's block size and the hash function's output.
46*4882a593Smuzhiyun */
47*4882a593Smuzhiyun #define BLOCKCIPHER_BLOCK_SIZE 16
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun /* Size of the block cipher key (K_E) in bytes */
50*4882a593Smuzhiyun #define BLOCKCIPHER_KEY_SIZE 32
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun /* Size of the hash key (K_H) in bytes */
53*4882a593Smuzhiyun #define HASH_KEY_SIZE (POLY1305_BLOCK_SIZE + NHPOLY1305_KEY_SIZE)
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun /*
56*4882a593Smuzhiyun * The specification allows variable-length tweaks, but Linux's crypto API
57*4882a593Smuzhiyun * currently only allows algorithms to support a single length. The "natural"
58*4882a593Smuzhiyun * tweak length for Adiantum is 16, since that fits into one Poly1305 block for
59*4882a593Smuzhiyun * the best performance. But longer tweaks are useful for fscrypt, to avoid
60*4882a593Smuzhiyun * needing to derive per-file keys. So instead we use two blocks, or 32 bytes.
61*4882a593Smuzhiyun */
62*4882a593Smuzhiyun #define TWEAK_SIZE 32
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun struct adiantum_instance_ctx {
65*4882a593Smuzhiyun struct crypto_skcipher_spawn streamcipher_spawn;
66*4882a593Smuzhiyun struct crypto_cipher_spawn blockcipher_spawn;
67*4882a593Smuzhiyun struct crypto_shash_spawn hash_spawn;
68*4882a593Smuzhiyun };
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun struct adiantum_tfm_ctx {
71*4882a593Smuzhiyun struct crypto_skcipher *streamcipher;
72*4882a593Smuzhiyun struct crypto_cipher *blockcipher;
73*4882a593Smuzhiyun struct crypto_shash *hash;
74*4882a593Smuzhiyun struct poly1305_core_key header_hash_key;
75*4882a593Smuzhiyun };
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun struct adiantum_request_ctx {
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun /*
80*4882a593Smuzhiyun * Buffer for right-hand part of data, i.e.
81*4882a593Smuzhiyun *
82*4882a593Smuzhiyun * P_L => P_M => C_M => C_R when encrypting, or
83*4882a593Smuzhiyun * C_R => C_M => P_M => P_L when decrypting.
84*4882a593Smuzhiyun *
85*4882a593Smuzhiyun * Also used to build the IV for the stream cipher.
86*4882a593Smuzhiyun */
87*4882a593Smuzhiyun union {
88*4882a593Smuzhiyun u8 bytes[XCHACHA_IV_SIZE];
89*4882a593Smuzhiyun __le32 words[XCHACHA_IV_SIZE / sizeof(__le32)];
90*4882a593Smuzhiyun le128 bignum; /* interpret as element of Z/(2^{128}Z) */
91*4882a593Smuzhiyun } rbuf;
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun bool enc; /* true if encrypting, false if decrypting */
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun /*
96*4882a593Smuzhiyun * The result of the Poly1305 ε-∆U hash function applied to
97*4882a593Smuzhiyun * (bulk length, tweak)
98*4882a593Smuzhiyun */
99*4882a593Smuzhiyun le128 header_hash;
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun /* Sub-requests, must be last */
102*4882a593Smuzhiyun union {
103*4882a593Smuzhiyun struct shash_desc hash_desc;
104*4882a593Smuzhiyun struct skcipher_request streamcipher_req;
105*4882a593Smuzhiyun } u;
106*4882a593Smuzhiyun };
107*4882a593Smuzhiyun
108*4882a593Smuzhiyun /*
109*4882a593Smuzhiyun * Given the XChaCha stream key K_S, derive the block cipher key K_E and the
110*4882a593Smuzhiyun * hash key K_H as follows:
111*4882a593Smuzhiyun *
112*4882a593Smuzhiyun * K_E || K_H || ... = XChaCha(key=K_S, nonce=1||0^191)
113*4882a593Smuzhiyun *
114*4882a593Smuzhiyun * Note that this denotes using bits from the XChaCha keystream, which here we
115*4882a593Smuzhiyun * get indirectly by encrypting a buffer containing all 0's.
116*4882a593Smuzhiyun */
adiantum_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)117*4882a593Smuzhiyun static int adiantum_setkey(struct crypto_skcipher *tfm, const u8 *key,
118*4882a593Smuzhiyun unsigned int keylen)
119*4882a593Smuzhiyun {
120*4882a593Smuzhiyun struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
121*4882a593Smuzhiyun struct {
122*4882a593Smuzhiyun u8 iv[XCHACHA_IV_SIZE];
123*4882a593Smuzhiyun u8 derived_keys[BLOCKCIPHER_KEY_SIZE + HASH_KEY_SIZE];
124*4882a593Smuzhiyun struct scatterlist sg;
125*4882a593Smuzhiyun struct crypto_wait wait;
126*4882a593Smuzhiyun struct skcipher_request req; /* must be last */
127*4882a593Smuzhiyun } *data;
128*4882a593Smuzhiyun u8 *keyp;
129*4882a593Smuzhiyun int err;
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun /* Set the stream cipher key (K_S) */
132*4882a593Smuzhiyun crypto_skcipher_clear_flags(tctx->streamcipher, CRYPTO_TFM_REQ_MASK);
133*4882a593Smuzhiyun crypto_skcipher_set_flags(tctx->streamcipher,
134*4882a593Smuzhiyun crypto_skcipher_get_flags(tfm) &
135*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
136*4882a593Smuzhiyun err = crypto_skcipher_setkey(tctx->streamcipher, key, keylen);
137*4882a593Smuzhiyun if (err)
138*4882a593Smuzhiyun return err;
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun /* Derive the subkeys */
141*4882a593Smuzhiyun data = kzalloc(sizeof(*data) +
142*4882a593Smuzhiyun crypto_skcipher_reqsize(tctx->streamcipher), GFP_KERNEL);
143*4882a593Smuzhiyun if (!data)
144*4882a593Smuzhiyun return -ENOMEM;
145*4882a593Smuzhiyun data->iv[0] = 1;
146*4882a593Smuzhiyun sg_init_one(&data->sg, data->derived_keys, sizeof(data->derived_keys));
147*4882a593Smuzhiyun crypto_init_wait(&data->wait);
148*4882a593Smuzhiyun skcipher_request_set_tfm(&data->req, tctx->streamcipher);
149*4882a593Smuzhiyun skcipher_request_set_callback(&data->req, CRYPTO_TFM_REQ_MAY_SLEEP |
150*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_BACKLOG,
151*4882a593Smuzhiyun crypto_req_done, &data->wait);
152*4882a593Smuzhiyun skcipher_request_set_crypt(&data->req, &data->sg, &data->sg,
153*4882a593Smuzhiyun sizeof(data->derived_keys), data->iv);
154*4882a593Smuzhiyun err = crypto_wait_req(crypto_skcipher_encrypt(&data->req), &data->wait);
155*4882a593Smuzhiyun if (err)
156*4882a593Smuzhiyun goto out;
157*4882a593Smuzhiyun keyp = data->derived_keys;
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun /* Set the block cipher key (K_E) */
160*4882a593Smuzhiyun crypto_cipher_clear_flags(tctx->blockcipher, CRYPTO_TFM_REQ_MASK);
161*4882a593Smuzhiyun crypto_cipher_set_flags(tctx->blockcipher,
162*4882a593Smuzhiyun crypto_skcipher_get_flags(tfm) &
163*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
164*4882a593Smuzhiyun err = crypto_cipher_setkey(tctx->blockcipher, keyp,
165*4882a593Smuzhiyun BLOCKCIPHER_KEY_SIZE);
166*4882a593Smuzhiyun if (err)
167*4882a593Smuzhiyun goto out;
168*4882a593Smuzhiyun keyp += BLOCKCIPHER_KEY_SIZE;
169*4882a593Smuzhiyun
170*4882a593Smuzhiyun /* Set the hash key (K_H) */
171*4882a593Smuzhiyun poly1305_core_setkey(&tctx->header_hash_key, keyp);
172*4882a593Smuzhiyun keyp += POLY1305_BLOCK_SIZE;
173*4882a593Smuzhiyun
174*4882a593Smuzhiyun crypto_shash_clear_flags(tctx->hash, CRYPTO_TFM_REQ_MASK);
175*4882a593Smuzhiyun crypto_shash_set_flags(tctx->hash, crypto_skcipher_get_flags(tfm) &
176*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
177*4882a593Smuzhiyun err = crypto_shash_setkey(tctx->hash, keyp, NHPOLY1305_KEY_SIZE);
178*4882a593Smuzhiyun keyp += NHPOLY1305_KEY_SIZE;
179*4882a593Smuzhiyun WARN_ON(keyp != &data->derived_keys[ARRAY_SIZE(data->derived_keys)]);
180*4882a593Smuzhiyun out:
181*4882a593Smuzhiyun kfree_sensitive(data);
182*4882a593Smuzhiyun return err;
183*4882a593Smuzhiyun }
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun /* Addition in Z/(2^{128}Z) */
le128_add(le128 * r,const le128 * v1,const le128 * v2)186*4882a593Smuzhiyun static inline void le128_add(le128 *r, const le128 *v1, const le128 *v2)
187*4882a593Smuzhiyun {
188*4882a593Smuzhiyun u64 x = le64_to_cpu(v1->b);
189*4882a593Smuzhiyun u64 y = le64_to_cpu(v2->b);
190*4882a593Smuzhiyun
191*4882a593Smuzhiyun r->b = cpu_to_le64(x + y);
192*4882a593Smuzhiyun r->a = cpu_to_le64(le64_to_cpu(v1->a) + le64_to_cpu(v2->a) +
193*4882a593Smuzhiyun (x + y < x));
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun /* Subtraction in Z/(2^{128}Z) */
le128_sub(le128 * r,const le128 * v1,const le128 * v2)197*4882a593Smuzhiyun static inline void le128_sub(le128 *r, const le128 *v1, const le128 *v2)
198*4882a593Smuzhiyun {
199*4882a593Smuzhiyun u64 x = le64_to_cpu(v1->b);
200*4882a593Smuzhiyun u64 y = le64_to_cpu(v2->b);
201*4882a593Smuzhiyun
202*4882a593Smuzhiyun r->b = cpu_to_le64(x - y);
203*4882a593Smuzhiyun r->a = cpu_to_le64(le64_to_cpu(v1->a) - le64_to_cpu(v2->a) -
204*4882a593Smuzhiyun (x - y > x));
205*4882a593Smuzhiyun }
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun /*
208*4882a593Smuzhiyun * Apply the Poly1305 ε-∆U hash function to (bulk length, tweak) and save the
209*4882a593Smuzhiyun * result to rctx->header_hash. This is the calculation
210*4882a593Smuzhiyun *
211*4882a593Smuzhiyun * H_T ← Poly1305_{K_T}(bin_{128}(|L|) || T)
212*4882a593Smuzhiyun *
213*4882a593Smuzhiyun * from the procedure in section 6.4 of the Adiantum paper. The resulting value
214*4882a593Smuzhiyun * is reused in both the first and second hash steps. Specifically, it's added
215*4882a593Smuzhiyun * to the result of an independently keyed ε-∆U hash function (for equal length
216*4882a593Smuzhiyun * inputs only) taken over the left-hand part (the "bulk") of the message, to
217*4882a593Smuzhiyun * give the overall Adiantum hash of the (tweak, left-hand part) pair.
218*4882a593Smuzhiyun */
adiantum_hash_header(struct skcipher_request * req)219*4882a593Smuzhiyun static void adiantum_hash_header(struct skcipher_request *req)
220*4882a593Smuzhiyun {
221*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
222*4882a593Smuzhiyun const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
223*4882a593Smuzhiyun struct adiantum_request_ctx *rctx = skcipher_request_ctx(req);
224*4882a593Smuzhiyun const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
225*4882a593Smuzhiyun struct {
226*4882a593Smuzhiyun __le64 message_bits;
227*4882a593Smuzhiyun __le64 padding;
228*4882a593Smuzhiyun } header = {
229*4882a593Smuzhiyun .message_bits = cpu_to_le64((u64)bulk_len * 8)
230*4882a593Smuzhiyun };
231*4882a593Smuzhiyun struct poly1305_state state;
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun poly1305_core_init(&state);
234*4882a593Smuzhiyun
235*4882a593Smuzhiyun BUILD_BUG_ON(sizeof(header) % POLY1305_BLOCK_SIZE != 0);
236*4882a593Smuzhiyun poly1305_core_blocks(&state, &tctx->header_hash_key,
237*4882a593Smuzhiyun &header, sizeof(header) / POLY1305_BLOCK_SIZE, 1);
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun BUILD_BUG_ON(TWEAK_SIZE % POLY1305_BLOCK_SIZE != 0);
240*4882a593Smuzhiyun poly1305_core_blocks(&state, &tctx->header_hash_key, req->iv,
241*4882a593Smuzhiyun TWEAK_SIZE / POLY1305_BLOCK_SIZE, 1);
242*4882a593Smuzhiyun
243*4882a593Smuzhiyun poly1305_core_emit(&state, NULL, &rctx->header_hash);
244*4882a593Smuzhiyun }
245*4882a593Smuzhiyun
246*4882a593Smuzhiyun /* Hash the left-hand part (the "bulk") of the message using NHPoly1305 */
adiantum_hash_message(struct skcipher_request * req,struct scatterlist * sgl,le128 * digest)247*4882a593Smuzhiyun static int adiantum_hash_message(struct skcipher_request *req,
248*4882a593Smuzhiyun struct scatterlist *sgl, le128 *digest)
249*4882a593Smuzhiyun {
250*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
251*4882a593Smuzhiyun const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
252*4882a593Smuzhiyun struct adiantum_request_ctx *rctx = skcipher_request_ctx(req);
253*4882a593Smuzhiyun const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
254*4882a593Smuzhiyun struct shash_desc *hash_desc = &rctx->u.hash_desc;
255*4882a593Smuzhiyun struct sg_mapping_iter miter;
256*4882a593Smuzhiyun unsigned int i, n;
257*4882a593Smuzhiyun int err;
258*4882a593Smuzhiyun
259*4882a593Smuzhiyun hash_desc->tfm = tctx->hash;
260*4882a593Smuzhiyun
261*4882a593Smuzhiyun err = crypto_shash_init(hash_desc);
262*4882a593Smuzhiyun if (err)
263*4882a593Smuzhiyun return err;
264*4882a593Smuzhiyun
265*4882a593Smuzhiyun sg_miter_start(&miter, sgl, sg_nents(sgl),
266*4882a593Smuzhiyun SG_MITER_FROM_SG | SG_MITER_ATOMIC);
267*4882a593Smuzhiyun for (i = 0; i < bulk_len; i += n) {
268*4882a593Smuzhiyun sg_miter_next(&miter);
269*4882a593Smuzhiyun n = min_t(unsigned int, miter.length, bulk_len - i);
270*4882a593Smuzhiyun err = crypto_shash_update(hash_desc, miter.addr, n);
271*4882a593Smuzhiyun if (err)
272*4882a593Smuzhiyun break;
273*4882a593Smuzhiyun }
274*4882a593Smuzhiyun sg_miter_stop(&miter);
275*4882a593Smuzhiyun if (err)
276*4882a593Smuzhiyun return err;
277*4882a593Smuzhiyun
278*4882a593Smuzhiyun return crypto_shash_final(hash_desc, (u8 *)digest);
279*4882a593Smuzhiyun }
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun /* Continue Adiantum encryption/decryption after the stream cipher step */
adiantum_finish(struct skcipher_request * req)282*4882a593Smuzhiyun static int adiantum_finish(struct skcipher_request *req)
283*4882a593Smuzhiyun {
284*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
285*4882a593Smuzhiyun const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
286*4882a593Smuzhiyun struct adiantum_request_ctx *rctx = skcipher_request_ctx(req);
287*4882a593Smuzhiyun const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
288*4882a593Smuzhiyun le128 digest;
289*4882a593Smuzhiyun int err;
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun /* If decrypting, decrypt C_M with the block cipher to get P_M */
292*4882a593Smuzhiyun if (!rctx->enc)
293*4882a593Smuzhiyun crypto_cipher_decrypt_one(tctx->blockcipher, rctx->rbuf.bytes,
294*4882a593Smuzhiyun rctx->rbuf.bytes);
295*4882a593Smuzhiyun
296*4882a593Smuzhiyun /*
297*4882a593Smuzhiyun * Second hash step
298*4882a593Smuzhiyun * enc: C_R = C_M - H_{K_H}(T, C_L)
299*4882a593Smuzhiyun * dec: P_R = P_M - H_{K_H}(T, P_L)
300*4882a593Smuzhiyun */
301*4882a593Smuzhiyun err = adiantum_hash_message(req, req->dst, &digest);
302*4882a593Smuzhiyun if (err)
303*4882a593Smuzhiyun return err;
304*4882a593Smuzhiyun le128_add(&digest, &digest, &rctx->header_hash);
305*4882a593Smuzhiyun le128_sub(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest);
306*4882a593Smuzhiyun scatterwalk_map_and_copy(&rctx->rbuf.bignum, req->dst,
307*4882a593Smuzhiyun bulk_len, BLOCKCIPHER_BLOCK_SIZE, 1);
308*4882a593Smuzhiyun return 0;
309*4882a593Smuzhiyun }
310*4882a593Smuzhiyun
adiantum_streamcipher_done(struct crypto_async_request * areq,int err)311*4882a593Smuzhiyun static void adiantum_streamcipher_done(struct crypto_async_request *areq,
312*4882a593Smuzhiyun int err)
313*4882a593Smuzhiyun {
314*4882a593Smuzhiyun struct skcipher_request *req = areq->data;
315*4882a593Smuzhiyun
316*4882a593Smuzhiyun if (!err)
317*4882a593Smuzhiyun err = adiantum_finish(req);
318*4882a593Smuzhiyun
319*4882a593Smuzhiyun skcipher_request_complete(req, err);
320*4882a593Smuzhiyun }
321*4882a593Smuzhiyun
adiantum_crypt(struct skcipher_request * req,bool enc)322*4882a593Smuzhiyun static int adiantum_crypt(struct skcipher_request *req, bool enc)
323*4882a593Smuzhiyun {
324*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
325*4882a593Smuzhiyun const struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
326*4882a593Smuzhiyun struct adiantum_request_ctx *rctx = skcipher_request_ctx(req);
327*4882a593Smuzhiyun const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE;
328*4882a593Smuzhiyun unsigned int stream_len;
329*4882a593Smuzhiyun le128 digest;
330*4882a593Smuzhiyun int err;
331*4882a593Smuzhiyun
332*4882a593Smuzhiyun if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE)
333*4882a593Smuzhiyun return -EINVAL;
334*4882a593Smuzhiyun
335*4882a593Smuzhiyun rctx->enc = enc;
336*4882a593Smuzhiyun
337*4882a593Smuzhiyun /*
338*4882a593Smuzhiyun * First hash step
339*4882a593Smuzhiyun * enc: P_M = P_R + H_{K_H}(T, P_L)
340*4882a593Smuzhiyun * dec: C_M = C_R + H_{K_H}(T, C_L)
341*4882a593Smuzhiyun */
342*4882a593Smuzhiyun adiantum_hash_header(req);
343*4882a593Smuzhiyun err = adiantum_hash_message(req, req->src, &digest);
344*4882a593Smuzhiyun if (err)
345*4882a593Smuzhiyun return err;
346*4882a593Smuzhiyun le128_add(&digest, &digest, &rctx->header_hash);
347*4882a593Smuzhiyun scatterwalk_map_and_copy(&rctx->rbuf.bignum, req->src,
348*4882a593Smuzhiyun bulk_len, BLOCKCIPHER_BLOCK_SIZE, 0);
349*4882a593Smuzhiyun le128_add(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest);
350*4882a593Smuzhiyun
351*4882a593Smuzhiyun /* If encrypting, encrypt P_M with the block cipher to get C_M */
352*4882a593Smuzhiyun if (enc)
353*4882a593Smuzhiyun crypto_cipher_encrypt_one(tctx->blockcipher, rctx->rbuf.bytes,
354*4882a593Smuzhiyun rctx->rbuf.bytes);
355*4882a593Smuzhiyun
356*4882a593Smuzhiyun /* Initialize the rest of the XChaCha IV (first part is C_M) */
357*4882a593Smuzhiyun BUILD_BUG_ON(BLOCKCIPHER_BLOCK_SIZE != 16);
358*4882a593Smuzhiyun BUILD_BUG_ON(XCHACHA_IV_SIZE != 32); /* nonce || stream position */
359*4882a593Smuzhiyun rctx->rbuf.words[4] = cpu_to_le32(1);
360*4882a593Smuzhiyun rctx->rbuf.words[5] = 0;
361*4882a593Smuzhiyun rctx->rbuf.words[6] = 0;
362*4882a593Smuzhiyun rctx->rbuf.words[7] = 0;
363*4882a593Smuzhiyun
364*4882a593Smuzhiyun /*
365*4882a593Smuzhiyun * XChaCha needs to be done on all the data except the last 16 bytes;
366*4882a593Smuzhiyun * for disk encryption that usually means 4080 or 496 bytes. But ChaCha
367*4882a593Smuzhiyun * implementations tend to be most efficient when passed a whole number
368*4882a593Smuzhiyun * of 64-byte ChaCha blocks, or sometimes even a multiple of 256 bytes.
369*4882a593Smuzhiyun * And here it doesn't matter whether the last 16 bytes are written to,
370*4882a593Smuzhiyun * as the second hash step will overwrite them. Thus, round the XChaCha
371*4882a593Smuzhiyun * length up to the next 64-byte boundary if possible.
372*4882a593Smuzhiyun */
373*4882a593Smuzhiyun stream_len = bulk_len;
374*4882a593Smuzhiyun if (round_up(stream_len, CHACHA_BLOCK_SIZE) <= req->cryptlen)
375*4882a593Smuzhiyun stream_len = round_up(stream_len, CHACHA_BLOCK_SIZE);
376*4882a593Smuzhiyun
377*4882a593Smuzhiyun skcipher_request_set_tfm(&rctx->u.streamcipher_req, tctx->streamcipher);
378*4882a593Smuzhiyun skcipher_request_set_crypt(&rctx->u.streamcipher_req, req->src,
379*4882a593Smuzhiyun req->dst, stream_len, &rctx->rbuf);
380*4882a593Smuzhiyun skcipher_request_set_callback(&rctx->u.streamcipher_req,
381*4882a593Smuzhiyun req->base.flags,
382*4882a593Smuzhiyun adiantum_streamcipher_done, req);
383*4882a593Smuzhiyun return crypto_skcipher_encrypt(&rctx->u.streamcipher_req) ?:
384*4882a593Smuzhiyun adiantum_finish(req);
385*4882a593Smuzhiyun }
386*4882a593Smuzhiyun
adiantum_encrypt(struct skcipher_request * req)387*4882a593Smuzhiyun static int adiantum_encrypt(struct skcipher_request *req)
388*4882a593Smuzhiyun {
389*4882a593Smuzhiyun return adiantum_crypt(req, true);
390*4882a593Smuzhiyun }
391*4882a593Smuzhiyun
adiantum_decrypt(struct skcipher_request * req)392*4882a593Smuzhiyun static int adiantum_decrypt(struct skcipher_request *req)
393*4882a593Smuzhiyun {
394*4882a593Smuzhiyun return adiantum_crypt(req, false);
395*4882a593Smuzhiyun }
396*4882a593Smuzhiyun
adiantum_init_tfm(struct crypto_skcipher * tfm)397*4882a593Smuzhiyun static int adiantum_init_tfm(struct crypto_skcipher *tfm)
398*4882a593Smuzhiyun {
399*4882a593Smuzhiyun struct skcipher_instance *inst = skcipher_alg_instance(tfm);
400*4882a593Smuzhiyun struct adiantum_instance_ctx *ictx = skcipher_instance_ctx(inst);
401*4882a593Smuzhiyun struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
402*4882a593Smuzhiyun struct crypto_skcipher *streamcipher;
403*4882a593Smuzhiyun struct crypto_cipher *blockcipher;
404*4882a593Smuzhiyun struct crypto_shash *hash;
405*4882a593Smuzhiyun unsigned int subreq_size;
406*4882a593Smuzhiyun int err;
407*4882a593Smuzhiyun
408*4882a593Smuzhiyun streamcipher = crypto_spawn_skcipher(&ictx->streamcipher_spawn);
409*4882a593Smuzhiyun if (IS_ERR(streamcipher))
410*4882a593Smuzhiyun return PTR_ERR(streamcipher);
411*4882a593Smuzhiyun
412*4882a593Smuzhiyun blockcipher = crypto_spawn_cipher(&ictx->blockcipher_spawn);
413*4882a593Smuzhiyun if (IS_ERR(blockcipher)) {
414*4882a593Smuzhiyun err = PTR_ERR(blockcipher);
415*4882a593Smuzhiyun goto err_free_streamcipher;
416*4882a593Smuzhiyun }
417*4882a593Smuzhiyun
418*4882a593Smuzhiyun hash = crypto_spawn_shash(&ictx->hash_spawn);
419*4882a593Smuzhiyun if (IS_ERR(hash)) {
420*4882a593Smuzhiyun err = PTR_ERR(hash);
421*4882a593Smuzhiyun goto err_free_blockcipher;
422*4882a593Smuzhiyun }
423*4882a593Smuzhiyun
424*4882a593Smuzhiyun tctx->streamcipher = streamcipher;
425*4882a593Smuzhiyun tctx->blockcipher = blockcipher;
426*4882a593Smuzhiyun tctx->hash = hash;
427*4882a593Smuzhiyun
428*4882a593Smuzhiyun BUILD_BUG_ON(offsetofend(struct adiantum_request_ctx, u) !=
429*4882a593Smuzhiyun sizeof(struct adiantum_request_ctx));
430*4882a593Smuzhiyun subreq_size = max(sizeof_field(struct adiantum_request_ctx,
431*4882a593Smuzhiyun u.hash_desc) +
432*4882a593Smuzhiyun crypto_shash_descsize(hash),
433*4882a593Smuzhiyun sizeof_field(struct adiantum_request_ctx,
434*4882a593Smuzhiyun u.streamcipher_req) +
435*4882a593Smuzhiyun crypto_skcipher_reqsize(streamcipher));
436*4882a593Smuzhiyun
437*4882a593Smuzhiyun crypto_skcipher_set_reqsize(tfm,
438*4882a593Smuzhiyun offsetof(struct adiantum_request_ctx, u) +
439*4882a593Smuzhiyun subreq_size);
440*4882a593Smuzhiyun return 0;
441*4882a593Smuzhiyun
442*4882a593Smuzhiyun err_free_blockcipher:
443*4882a593Smuzhiyun crypto_free_cipher(blockcipher);
444*4882a593Smuzhiyun err_free_streamcipher:
445*4882a593Smuzhiyun crypto_free_skcipher(streamcipher);
446*4882a593Smuzhiyun return err;
447*4882a593Smuzhiyun }
448*4882a593Smuzhiyun
adiantum_exit_tfm(struct crypto_skcipher * tfm)449*4882a593Smuzhiyun static void adiantum_exit_tfm(struct crypto_skcipher *tfm)
450*4882a593Smuzhiyun {
451*4882a593Smuzhiyun struct adiantum_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
452*4882a593Smuzhiyun
453*4882a593Smuzhiyun crypto_free_skcipher(tctx->streamcipher);
454*4882a593Smuzhiyun crypto_free_cipher(tctx->blockcipher);
455*4882a593Smuzhiyun crypto_free_shash(tctx->hash);
456*4882a593Smuzhiyun }
457*4882a593Smuzhiyun
adiantum_free_instance(struct skcipher_instance * inst)458*4882a593Smuzhiyun static void adiantum_free_instance(struct skcipher_instance *inst)
459*4882a593Smuzhiyun {
460*4882a593Smuzhiyun struct adiantum_instance_ctx *ictx = skcipher_instance_ctx(inst);
461*4882a593Smuzhiyun
462*4882a593Smuzhiyun crypto_drop_skcipher(&ictx->streamcipher_spawn);
463*4882a593Smuzhiyun crypto_drop_cipher(&ictx->blockcipher_spawn);
464*4882a593Smuzhiyun crypto_drop_shash(&ictx->hash_spawn);
465*4882a593Smuzhiyun kfree(inst);
466*4882a593Smuzhiyun }
467*4882a593Smuzhiyun
468*4882a593Smuzhiyun /*
469*4882a593Smuzhiyun * Check for a supported set of inner algorithms.
470*4882a593Smuzhiyun * See the comment at the beginning of this file.
471*4882a593Smuzhiyun */
adiantum_supported_algorithms(struct skcipher_alg * streamcipher_alg,struct crypto_alg * blockcipher_alg,struct shash_alg * hash_alg)472*4882a593Smuzhiyun static bool adiantum_supported_algorithms(struct skcipher_alg *streamcipher_alg,
473*4882a593Smuzhiyun struct crypto_alg *blockcipher_alg,
474*4882a593Smuzhiyun struct shash_alg *hash_alg)
475*4882a593Smuzhiyun {
476*4882a593Smuzhiyun if (strcmp(streamcipher_alg->base.cra_name, "xchacha12") != 0 &&
477*4882a593Smuzhiyun strcmp(streamcipher_alg->base.cra_name, "xchacha20") != 0)
478*4882a593Smuzhiyun return false;
479*4882a593Smuzhiyun
480*4882a593Smuzhiyun if (blockcipher_alg->cra_cipher.cia_min_keysize > BLOCKCIPHER_KEY_SIZE ||
481*4882a593Smuzhiyun blockcipher_alg->cra_cipher.cia_max_keysize < BLOCKCIPHER_KEY_SIZE)
482*4882a593Smuzhiyun return false;
483*4882a593Smuzhiyun if (blockcipher_alg->cra_blocksize != BLOCKCIPHER_BLOCK_SIZE)
484*4882a593Smuzhiyun return false;
485*4882a593Smuzhiyun
486*4882a593Smuzhiyun if (strcmp(hash_alg->base.cra_name, "nhpoly1305") != 0)
487*4882a593Smuzhiyun return false;
488*4882a593Smuzhiyun
489*4882a593Smuzhiyun return true;
490*4882a593Smuzhiyun }
491*4882a593Smuzhiyun
adiantum_create(struct crypto_template * tmpl,struct rtattr ** tb)492*4882a593Smuzhiyun static int adiantum_create(struct crypto_template *tmpl, struct rtattr **tb)
493*4882a593Smuzhiyun {
494*4882a593Smuzhiyun u32 mask;
495*4882a593Smuzhiyun const char *nhpoly1305_name;
496*4882a593Smuzhiyun struct skcipher_instance *inst;
497*4882a593Smuzhiyun struct adiantum_instance_ctx *ictx;
498*4882a593Smuzhiyun struct skcipher_alg *streamcipher_alg;
499*4882a593Smuzhiyun struct crypto_alg *blockcipher_alg;
500*4882a593Smuzhiyun struct shash_alg *hash_alg;
501*4882a593Smuzhiyun int err;
502*4882a593Smuzhiyun
503*4882a593Smuzhiyun err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
504*4882a593Smuzhiyun if (err)
505*4882a593Smuzhiyun return err;
506*4882a593Smuzhiyun
507*4882a593Smuzhiyun inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
508*4882a593Smuzhiyun if (!inst)
509*4882a593Smuzhiyun return -ENOMEM;
510*4882a593Smuzhiyun ictx = skcipher_instance_ctx(inst);
511*4882a593Smuzhiyun
512*4882a593Smuzhiyun /* Stream cipher, e.g. "xchacha12" */
513*4882a593Smuzhiyun err = crypto_grab_skcipher(&ictx->streamcipher_spawn,
514*4882a593Smuzhiyun skcipher_crypto_instance(inst),
515*4882a593Smuzhiyun crypto_attr_alg_name(tb[1]), 0, mask);
516*4882a593Smuzhiyun if (err)
517*4882a593Smuzhiyun goto err_free_inst;
518*4882a593Smuzhiyun streamcipher_alg = crypto_spawn_skcipher_alg(&ictx->streamcipher_spawn);
519*4882a593Smuzhiyun
520*4882a593Smuzhiyun /* Block cipher, e.g. "aes" */
521*4882a593Smuzhiyun err = crypto_grab_cipher(&ictx->blockcipher_spawn,
522*4882a593Smuzhiyun skcipher_crypto_instance(inst),
523*4882a593Smuzhiyun crypto_attr_alg_name(tb[2]), 0, mask);
524*4882a593Smuzhiyun if (err)
525*4882a593Smuzhiyun goto err_free_inst;
526*4882a593Smuzhiyun blockcipher_alg = crypto_spawn_cipher_alg(&ictx->blockcipher_spawn);
527*4882a593Smuzhiyun
528*4882a593Smuzhiyun /* NHPoly1305 ε-∆U hash function */
529*4882a593Smuzhiyun nhpoly1305_name = crypto_attr_alg_name(tb[3]);
530*4882a593Smuzhiyun if (nhpoly1305_name == ERR_PTR(-ENOENT))
531*4882a593Smuzhiyun nhpoly1305_name = "nhpoly1305";
532*4882a593Smuzhiyun err = crypto_grab_shash(&ictx->hash_spawn,
533*4882a593Smuzhiyun skcipher_crypto_instance(inst),
534*4882a593Smuzhiyun nhpoly1305_name, 0, mask);
535*4882a593Smuzhiyun if (err)
536*4882a593Smuzhiyun goto err_free_inst;
537*4882a593Smuzhiyun hash_alg = crypto_spawn_shash_alg(&ictx->hash_spawn);
538*4882a593Smuzhiyun
539*4882a593Smuzhiyun /* Check the set of algorithms */
540*4882a593Smuzhiyun if (!adiantum_supported_algorithms(streamcipher_alg, blockcipher_alg,
541*4882a593Smuzhiyun hash_alg)) {
542*4882a593Smuzhiyun pr_warn("Unsupported Adiantum instantiation: (%s,%s,%s)\n",
543*4882a593Smuzhiyun streamcipher_alg->base.cra_name,
544*4882a593Smuzhiyun blockcipher_alg->cra_name, hash_alg->base.cra_name);
545*4882a593Smuzhiyun err = -EINVAL;
546*4882a593Smuzhiyun goto err_free_inst;
547*4882a593Smuzhiyun }
548*4882a593Smuzhiyun
549*4882a593Smuzhiyun /* Instance fields */
550*4882a593Smuzhiyun
551*4882a593Smuzhiyun err = -ENAMETOOLONG;
552*4882a593Smuzhiyun if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
553*4882a593Smuzhiyun "adiantum(%s,%s)", streamcipher_alg->base.cra_name,
554*4882a593Smuzhiyun blockcipher_alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
555*4882a593Smuzhiyun goto err_free_inst;
556*4882a593Smuzhiyun if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
557*4882a593Smuzhiyun "adiantum(%s,%s,%s)",
558*4882a593Smuzhiyun streamcipher_alg->base.cra_driver_name,
559*4882a593Smuzhiyun blockcipher_alg->cra_driver_name,
560*4882a593Smuzhiyun hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
561*4882a593Smuzhiyun goto err_free_inst;
562*4882a593Smuzhiyun
563*4882a593Smuzhiyun inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE;
564*4882a593Smuzhiyun inst->alg.base.cra_ctxsize = sizeof(struct adiantum_tfm_ctx);
565*4882a593Smuzhiyun inst->alg.base.cra_alignmask = streamcipher_alg->base.cra_alignmask |
566*4882a593Smuzhiyun hash_alg->base.cra_alignmask;
567*4882a593Smuzhiyun /*
568*4882a593Smuzhiyun * The block cipher is only invoked once per message, so for long
569*4882a593Smuzhiyun * messages (e.g. sectors for disk encryption) its performance doesn't
570*4882a593Smuzhiyun * matter as much as that of the stream cipher and hash function. Thus,
571*4882a593Smuzhiyun * weigh the block cipher's ->cra_priority less.
572*4882a593Smuzhiyun */
573*4882a593Smuzhiyun inst->alg.base.cra_priority = (4 * streamcipher_alg->base.cra_priority +
574*4882a593Smuzhiyun 2 * hash_alg->base.cra_priority +
575*4882a593Smuzhiyun blockcipher_alg->cra_priority) / 7;
576*4882a593Smuzhiyun
577*4882a593Smuzhiyun inst->alg.setkey = adiantum_setkey;
578*4882a593Smuzhiyun inst->alg.encrypt = adiantum_encrypt;
579*4882a593Smuzhiyun inst->alg.decrypt = adiantum_decrypt;
580*4882a593Smuzhiyun inst->alg.init = adiantum_init_tfm;
581*4882a593Smuzhiyun inst->alg.exit = adiantum_exit_tfm;
582*4882a593Smuzhiyun inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(streamcipher_alg);
583*4882a593Smuzhiyun inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(streamcipher_alg);
584*4882a593Smuzhiyun inst->alg.ivsize = TWEAK_SIZE;
585*4882a593Smuzhiyun
586*4882a593Smuzhiyun inst->free = adiantum_free_instance;
587*4882a593Smuzhiyun
588*4882a593Smuzhiyun err = skcipher_register_instance(tmpl, inst);
589*4882a593Smuzhiyun if (err) {
590*4882a593Smuzhiyun err_free_inst:
591*4882a593Smuzhiyun adiantum_free_instance(inst);
592*4882a593Smuzhiyun }
593*4882a593Smuzhiyun return err;
594*4882a593Smuzhiyun }
595*4882a593Smuzhiyun
596*4882a593Smuzhiyun /* adiantum(streamcipher_name, blockcipher_name [, nhpoly1305_name]) */
597*4882a593Smuzhiyun static struct crypto_template adiantum_tmpl = {
598*4882a593Smuzhiyun .name = "adiantum",
599*4882a593Smuzhiyun .create = adiantum_create,
600*4882a593Smuzhiyun .module = THIS_MODULE,
601*4882a593Smuzhiyun };
602*4882a593Smuzhiyun
adiantum_module_init(void)603*4882a593Smuzhiyun static int __init adiantum_module_init(void)
604*4882a593Smuzhiyun {
605*4882a593Smuzhiyun return crypto_register_template(&adiantum_tmpl);
606*4882a593Smuzhiyun }
607*4882a593Smuzhiyun
adiantum_module_exit(void)608*4882a593Smuzhiyun static void __exit adiantum_module_exit(void)
609*4882a593Smuzhiyun {
610*4882a593Smuzhiyun crypto_unregister_template(&adiantum_tmpl);
611*4882a593Smuzhiyun }
612*4882a593Smuzhiyun
613*4882a593Smuzhiyun subsys_initcall(adiantum_module_init);
614*4882a593Smuzhiyun module_exit(adiantum_module_exit);
615*4882a593Smuzhiyun
616*4882a593Smuzhiyun MODULE_DESCRIPTION("Adiantum length-preserving encryption mode");
617*4882a593Smuzhiyun MODULE_LICENSE("GPL v2");
618*4882a593Smuzhiyun MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
619*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("adiantum");
620*4882a593Smuzhiyun MODULE_IMPORT_NS(CRYPTO_INTERNAL);
621