1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Cryptographic API.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * TEA, XTEA, and XETA crypto alogrithms
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * The TEA and Xtended TEA algorithms were developed by David Wheeler
8*4882a593Smuzhiyun * and Roger Needham at the Computer Laboratory of Cambridge University.
9*4882a593Smuzhiyun *
10*4882a593Smuzhiyun * Due to the order of evaluation in XTEA many people have incorrectly
11*4882a593Smuzhiyun * implemented it. XETA (XTEA in the wrong order), exists for
12*4882a593Smuzhiyun * compatibility with these implementations.
13*4882a593Smuzhiyun *
14*4882a593Smuzhiyun * Copyright (c) 2004 Aaron Grothe ajgrothe@yahoo.com
15*4882a593Smuzhiyun */
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun #include <linux/init.h>
18*4882a593Smuzhiyun #include <linux/module.h>
19*4882a593Smuzhiyun #include <linux/mm.h>
20*4882a593Smuzhiyun #include <asm/byteorder.h>
21*4882a593Smuzhiyun #include <linux/crypto.h>
22*4882a593Smuzhiyun #include <linux/types.h>
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun #define TEA_KEY_SIZE 16
25*4882a593Smuzhiyun #define TEA_BLOCK_SIZE 8
26*4882a593Smuzhiyun #define TEA_ROUNDS 32
27*4882a593Smuzhiyun #define TEA_DELTA 0x9e3779b9
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun #define XTEA_KEY_SIZE 16
30*4882a593Smuzhiyun #define XTEA_BLOCK_SIZE 8
31*4882a593Smuzhiyun #define XTEA_ROUNDS 32
32*4882a593Smuzhiyun #define XTEA_DELTA 0x9e3779b9
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun struct tea_ctx {
35*4882a593Smuzhiyun u32 KEY[4];
36*4882a593Smuzhiyun };
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun struct xtea_ctx {
39*4882a593Smuzhiyun u32 KEY[4];
40*4882a593Smuzhiyun };
41*4882a593Smuzhiyun
tea_setkey(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)42*4882a593Smuzhiyun static int tea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
43*4882a593Smuzhiyun unsigned int key_len)
44*4882a593Smuzhiyun {
45*4882a593Smuzhiyun struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
46*4882a593Smuzhiyun const __le32 *key = (const __le32 *)in_key;
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun ctx->KEY[0] = le32_to_cpu(key[0]);
49*4882a593Smuzhiyun ctx->KEY[1] = le32_to_cpu(key[1]);
50*4882a593Smuzhiyun ctx->KEY[2] = le32_to_cpu(key[2]);
51*4882a593Smuzhiyun ctx->KEY[3] = le32_to_cpu(key[3]);
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun return 0;
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun
tea_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)57*4882a593Smuzhiyun static void tea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
58*4882a593Smuzhiyun {
59*4882a593Smuzhiyun u32 y, z, n, sum = 0;
60*4882a593Smuzhiyun u32 k0, k1, k2, k3;
61*4882a593Smuzhiyun struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
62*4882a593Smuzhiyun const __le32 *in = (const __le32 *)src;
63*4882a593Smuzhiyun __le32 *out = (__le32 *)dst;
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun y = le32_to_cpu(in[0]);
66*4882a593Smuzhiyun z = le32_to_cpu(in[1]);
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun k0 = ctx->KEY[0];
69*4882a593Smuzhiyun k1 = ctx->KEY[1];
70*4882a593Smuzhiyun k2 = ctx->KEY[2];
71*4882a593Smuzhiyun k3 = ctx->KEY[3];
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun n = TEA_ROUNDS;
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun while (n-- > 0) {
76*4882a593Smuzhiyun sum += TEA_DELTA;
77*4882a593Smuzhiyun y += ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
78*4882a593Smuzhiyun z += ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
79*4882a593Smuzhiyun }
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun out[0] = cpu_to_le32(y);
82*4882a593Smuzhiyun out[1] = cpu_to_le32(z);
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun
tea_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)85*4882a593Smuzhiyun static void tea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
86*4882a593Smuzhiyun {
87*4882a593Smuzhiyun u32 y, z, n, sum;
88*4882a593Smuzhiyun u32 k0, k1, k2, k3;
89*4882a593Smuzhiyun struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
90*4882a593Smuzhiyun const __le32 *in = (const __le32 *)src;
91*4882a593Smuzhiyun __le32 *out = (__le32 *)dst;
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun y = le32_to_cpu(in[0]);
94*4882a593Smuzhiyun z = le32_to_cpu(in[1]);
95*4882a593Smuzhiyun
96*4882a593Smuzhiyun k0 = ctx->KEY[0];
97*4882a593Smuzhiyun k1 = ctx->KEY[1];
98*4882a593Smuzhiyun k2 = ctx->KEY[2];
99*4882a593Smuzhiyun k3 = ctx->KEY[3];
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun sum = TEA_DELTA << 5;
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun n = TEA_ROUNDS;
104*4882a593Smuzhiyun
105*4882a593Smuzhiyun while (n-- > 0) {
106*4882a593Smuzhiyun z -= ((y << 4) + k2) ^ (y + sum) ^ ((y >> 5) + k3);
107*4882a593Smuzhiyun y -= ((z << 4) + k0) ^ (z + sum) ^ ((z >> 5) + k1);
108*4882a593Smuzhiyun sum -= TEA_DELTA;
109*4882a593Smuzhiyun }
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun out[0] = cpu_to_le32(y);
112*4882a593Smuzhiyun out[1] = cpu_to_le32(z);
113*4882a593Smuzhiyun }
114*4882a593Smuzhiyun
xtea_setkey(struct crypto_tfm * tfm,const u8 * in_key,unsigned int key_len)115*4882a593Smuzhiyun static int xtea_setkey(struct crypto_tfm *tfm, const u8 *in_key,
116*4882a593Smuzhiyun unsigned int key_len)
117*4882a593Smuzhiyun {
118*4882a593Smuzhiyun struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
119*4882a593Smuzhiyun const __le32 *key = (const __le32 *)in_key;
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun ctx->KEY[0] = le32_to_cpu(key[0]);
122*4882a593Smuzhiyun ctx->KEY[1] = le32_to_cpu(key[1]);
123*4882a593Smuzhiyun ctx->KEY[2] = le32_to_cpu(key[2]);
124*4882a593Smuzhiyun ctx->KEY[3] = le32_to_cpu(key[3]);
125*4882a593Smuzhiyun
126*4882a593Smuzhiyun return 0;
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun }
129*4882a593Smuzhiyun
xtea_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)130*4882a593Smuzhiyun static void xtea_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
131*4882a593Smuzhiyun {
132*4882a593Smuzhiyun u32 y, z, sum = 0;
133*4882a593Smuzhiyun u32 limit = XTEA_DELTA * XTEA_ROUNDS;
134*4882a593Smuzhiyun struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
135*4882a593Smuzhiyun const __le32 *in = (const __le32 *)src;
136*4882a593Smuzhiyun __le32 *out = (__le32 *)dst;
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun y = le32_to_cpu(in[0]);
139*4882a593Smuzhiyun z = le32_to_cpu(in[1]);
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun while (sum != limit) {
142*4882a593Smuzhiyun y += ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum&3]);
143*4882a593Smuzhiyun sum += XTEA_DELTA;
144*4882a593Smuzhiyun z += ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 &3]);
145*4882a593Smuzhiyun }
146*4882a593Smuzhiyun
147*4882a593Smuzhiyun out[0] = cpu_to_le32(y);
148*4882a593Smuzhiyun out[1] = cpu_to_le32(z);
149*4882a593Smuzhiyun }
150*4882a593Smuzhiyun
xtea_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)151*4882a593Smuzhiyun static void xtea_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
152*4882a593Smuzhiyun {
153*4882a593Smuzhiyun u32 y, z, sum;
154*4882a593Smuzhiyun struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
155*4882a593Smuzhiyun const __le32 *in = (const __le32 *)src;
156*4882a593Smuzhiyun __le32 *out = (__le32 *)dst;
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun y = le32_to_cpu(in[0]);
159*4882a593Smuzhiyun z = le32_to_cpu(in[1]);
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun sum = XTEA_DELTA * XTEA_ROUNDS;
162*4882a593Smuzhiyun
163*4882a593Smuzhiyun while (sum) {
164*4882a593Smuzhiyun z -= ((y << 4 ^ y >> 5) + y) ^ (sum + ctx->KEY[sum>>11 & 3]);
165*4882a593Smuzhiyun sum -= XTEA_DELTA;
166*4882a593Smuzhiyun y -= ((z << 4 ^ z >> 5) + z) ^ (sum + ctx->KEY[sum & 3]);
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun
169*4882a593Smuzhiyun out[0] = cpu_to_le32(y);
170*4882a593Smuzhiyun out[1] = cpu_to_le32(z);
171*4882a593Smuzhiyun }
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun
xeta_encrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)174*4882a593Smuzhiyun static void xeta_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
175*4882a593Smuzhiyun {
176*4882a593Smuzhiyun u32 y, z, sum = 0;
177*4882a593Smuzhiyun u32 limit = XTEA_DELTA * XTEA_ROUNDS;
178*4882a593Smuzhiyun struct xtea_ctx *ctx = crypto_tfm_ctx(tfm);
179*4882a593Smuzhiyun const __le32 *in = (const __le32 *)src;
180*4882a593Smuzhiyun __le32 *out = (__le32 *)dst;
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun y = le32_to_cpu(in[0]);
183*4882a593Smuzhiyun z = le32_to_cpu(in[1]);
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun while (sum != limit) {
186*4882a593Smuzhiyun y += (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum&3];
187*4882a593Smuzhiyun sum += XTEA_DELTA;
188*4882a593Smuzhiyun z += (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 &3];
189*4882a593Smuzhiyun }
190*4882a593Smuzhiyun
191*4882a593Smuzhiyun out[0] = cpu_to_le32(y);
192*4882a593Smuzhiyun out[1] = cpu_to_le32(z);
193*4882a593Smuzhiyun }
194*4882a593Smuzhiyun
xeta_decrypt(struct crypto_tfm * tfm,u8 * dst,const u8 * src)195*4882a593Smuzhiyun static void xeta_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
196*4882a593Smuzhiyun {
197*4882a593Smuzhiyun u32 y, z, sum;
198*4882a593Smuzhiyun struct tea_ctx *ctx = crypto_tfm_ctx(tfm);
199*4882a593Smuzhiyun const __le32 *in = (const __le32 *)src;
200*4882a593Smuzhiyun __le32 *out = (__le32 *)dst;
201*4882a593Smuzhiyun
202*4882a593Smuzhiyun y = le32_to_cpu(in[0]);
203*4882a593Smuzhiyun z = le32_to_cpu(in[1]);
204*4882a593Smuzhiyun
205*4882a593Smuzhiyun sum = XTEA_DELTA * XTEA_ROUNDS;
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun while (sum) {
208*4882a593Smuzhiyun z -= (y << 4 ^ y >> 5) + (y ^ sum) + ctx->KEY[sum>>11 & 3];
209*4882a593Smuzhiyun sum -= XTEA_DELTA;
210*4882a593Smuzhiyun y -= (z << 4 ^ z >> 5) + (z ^ sum) + ctx->KEY[sum & 3];
211*4882a593Smuzhiyun }
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun out[0] = cpu_to_le32(y);
214*4882a593Smuzhiyun out[1] = cpu_to_le32(z);
215*4882a593Smuzhiyun }
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun static struct crypto_alg tea_algs[3] = { {
218*4882a593Smuzhiyun .cra_name = "tea",
219*4882a593Smuzhiyun .cra_driver_name = "tea-generic",
220*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
221*4882a593Smuzhiyun .cra_blocksize = TEA_BLOCK_SIZE,
222*4882a593Smuzhiyun .cra_ctxsize = sizeof (struct tea_ctx),
223*4882a593Smuzhiyun .cra_alignmask = 3,
224*4882a593Smuzhiyun .cra_module = THIS_MODULE,
225*4882a593Smuzhiyun .cra_u = { .cipher = {
226*4882a593Smuzhiyun .cia_min_keysize = TEA_KEY_SIZE,
227*4882a593Smuzhiyun .cia_max_keysize = TEA_KEY_SIZE,
228*4882a593Smuzhiyun .cia_setkey = tea_setkey,
229*4882a593Smuzhiyun .cia_encrypt = tea_encrypt,
230*4882a593Smuzhiyun .cia_decrypt = tea_decrypt } }
231*4882a593Smuzhiyun }, {
232*4882a593Smuzhiyun .cra_name = "xtea",
233*4882a593Smuzhiyun .cra_driver_name = "xtea-generic",
234*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
235*4882a593Smuzhiyun .cra_blocksize = XTEA_BLOCK_SIZE,
236*4882a593Smuzhiyun .cra_ctxsize = sizeof (struct xtea_ctx),
237*4882a593Smuzhiyun .cra_alignmask = 3,
238*4882a593Smuzhiyun .cra_module = THIS_MODULE,
239*4882a593Smuzhiyun .cra_u = { .cipher = {
240*4882a593Smuzhiyun .cia_min_keysize = XTEA_KEY_SIZE,
241*4882a593Smuzhiyun .cia_max_keysize = XTEA_KEY_SIZE,
242*4882a593Smuzhiyun .cia_setkey = xtea_setkey,
243*4882a593Smuzhiyun .cia_encrypt = xtea_encrypt,
244*4882a593Smuzhiyun .cia_decrypt = xtea_decrypt } }
245*4882a593Smuzhiyun }, {
246*4882a593Smuzhiyun .cra_name = "xeta",
247*4882a593Smuzhiyun .cra_driver_name = "xeta-generic",
248*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
249*4882a593Smuzhiyun .cra_blocksize = XTEA_BLOCK_SIZE,
250*4882a593Smuzhiyun .cra_ctxsize = sizeof (struct xtea_ctx),
251*4882a593Smuzhiyun .cra_alignmask = 3,
252*4882a593Smuzhiyun .cra_module = THIS_MODULE,
253*4882a593Smuzhiyun .cra_u = { .cipher = {
254*4882a593Smuzhiyun .cia_min_keysize = XTEA_KEY_SIZE,
255*4882a593Smuzhiyun .cia_max_keysize = XTEA_KEY_SIZE,
256*4882a593Smuzhiyun .cia_setkey = xtea_setkey,
257*4882a593Smuzhiyun .cia_encrypt = xeta_encrypt,
258*4882a593Smuzhiyun .cia_decrypt = xeta_decrypt } }
259*4882a593Smuzhiyun } };
260*4882a593Smuzhiyun
tea_mod_init(void)261*4882a593Smuzhiyun static int __init tea_mod_init(void)
262*4882a593Smuzhiyun {
263*4882a593Smuzhiyun return crypto_register_algs(tea_algs, ARRAY_SIZE(tea_algs));
264*4882a593Smuzhiyun }
265*4882a593Smuzhiyun
tea_mod_fini(void)266*4882a593Smuzhiyun static void __exit tea_mod_fini(void)
267*4882a593Smuzhiyun {
268*4882a593Smuzhiyun crypto_unregister_algs(tea_algs, ARRAY_SIZE(tea_algs));
269*4882a593Smuzhiyun }
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("tea");
272*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xtea");
273*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("xeta");
274*4882a593Smuzhiyun
275*4882a593Smuzhiyun subsys_initcall(tea_mod_init);
276*4882a593Smuzhiyun module_exit(tea_mod_fini);
277*4882a593Smuzhiyun
278*4882a593Smuzhiyun MODULE_LICENSE("GPL");
279*4882a593Smuzhiyun MODULE_DESCRIPTION("TEA, XTEA & XETA Cryptographic Algorithms");
280