xref: /OK3568_Linux_fs/external/security/librkcrypto/test/c_mode/sm4_xts.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun #include <stdio.h>
2*4882a593Smuzhiyun #include <stdlib.h>
3*4882a593Smuzhiyun #include <string.h>
4*4882a593Smuzhiyun #include "sm4_core.h"
5*4882a593Smuzhiyun 
6*4882a593Smuzhiyun #define DEBUG(format,...) printf("[%s]:%d: "format"\n", __func__,__LINE__, ##__VA_ARGS__)
7*4882a593Smuzhiyun 
aes_xts128_encrypt(void * ctx1,void * ctx2,const unsigned char iv[16],const unsigned char * inp,unsigned char * out,unsigned long len,int enc)8*4882a593Smuzhiyun static int aes_xts128_encrypt(void *ctx1, void *ctx2,
9*4882a593Smuzhiyun 			const unsigned char iv[16], const unsigned char *inp,
10*4882a593Smuzhiyun 			unsigned char *out, unsigned long len, int enc)
11*4882a593Smuzhiyun {
12*4882a593Smuzhiyun 	const union { long one; char little; } is_endian = {1};
13*4882a593Smuzhiyun 	union { u64 u[2]; u32 d[4]; u8 c[16]; } tweak, scratch;
14*4882a593Smuzhiyun 	unsigned int i;
15*4882a593Smuzhiyun 
16*4882a593Smuzhiyun 	if (len < 16)
17*4882a593Smuzhiyun 		return -1;
18*4882a593Smuzhiyun 
19*4882a593Smuzhiyun 	memcpy(tweak.c, iv, 16);
20*4882a593Smuzhiyun 
21*4882a593Smuzhiyun 	rk_sm4_crypt_ecb(ctx2, (const unsigned char*)tweak.c, tweak.c);
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun 	if (!enc && (len % 16))
24*4882a593Smuzhiyun 		len -= 16;
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun 	while (len >= 16) {
27*4882a593Smuzhiyun 		memcpy(scratch.c, inp, 16);
28*4882a593Smuzhiyun 		scratch.u[0] ^= tweak.u[0];
29*4882a593Smuzhiyun 		scratch.u[1] ^= tweak.u[1];
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun 		if (enc)
32*4882a593Smuzhiyun 			rk_sm4_crypt_ecb(ctx1, (const unsigned char*)scratch.c, scratch.c);
33*4882a593Smuzhiyun 		else
34*4882a593Smuzhiyun 			rk_sm4_crypt_ecb(ctx1, (const unsigned char*)scratch.c, scratch.c);
35*4882a593Smuzhiyun 
36*4882a593Smuzhiyun 		scratch.u[0] ^= tweak.u[0];
37*4882a593Smuzhiyun 		scratch.u[1] ^= tweak.u[1];
38*4882a593Smuzhiyun 		memcpy(out, scratch.c, 16);
39*4882a593Smuzhiyun 
40*4882a593Smuzhiyun 		inp += 16;
41*4882a593Smuzhiyun 		out += 16;
42*4882a593Smuzhiyun 		len -= 16;
43*4882a593Smuzhiyun 
44*4882a593Smuzhiyun 		if (len == 0)
45*4882a593Smuzhiyun 			return 0;
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun 		if (is_endian.little) {
48*4882a593Smuzhiyun 			unsigned int carry,res;
49*4882a593Smuzhiyun 
50*4882a593Smuzhiyun 			res = 0x87 & (((int)tweak.d[3]) >> 31);
51*4882a593Smuzhiyun 			carry = (unsigned int)(tweak.u[0] >> 63);
52*4882a593Smuzhiyun 			tweak.u[0] = (tweak.u[0] << 1) ^ res;
53*4882a593Smuzhiyun 			tweak.u[1] = (tweak.u[1] << 1) | carry;
54*4882a593Smuzhiyun 		}
55*4882a593Smuzhiyun 		else {
56*4882a593Smuzhiyun 			size_t c;
57*4882a593Smuzhiyun 
58*4882a593Smuzhiyun 			for (c = 0, i = 0; i < 16; ++i) {
59*4882a593Smuzhiyun 				/*+ substitutes for |, because c is 1 bit */
60*4882a593Smuzhiyun 				c += ((size_t)tweak.c[i]) << 1;
61*4882a593Smuzhiyun 				tweak.c[i] = (u8)c;
62*4882a593Smuzhiyun 				c = c >> 8;
63*4882a593Smuzhiyun 			}
64*4882a593Smuzhiyun 			tweak.c[0] ^= (u8)(0x87 & (0-c));
65*4882a593Smuzhiyun 		}
66*4882a593Smuzhiyun 	}
67*4882a593Smuzhiyun 	if (enc) {
68*4882a593Smuzhiyun 		for (i = 0;i < len; ++i) {
69*4882a593Smuzhiyun 			u8 c = inp[i];
70*4882a593Smuzhiyun 			out[i] = scratch.c[i];
71*4882a593Smuzhiyun 			scratch.c[i] = c;
72*4882a593Smuzhiyun 		}
73*4882a593Smuzhiyun 		scratch.u[0] ^= tweak.u[0];
74*4882a593Smuzhiyun 		scratch.u[1] ^= tweak.u[1];
75*4882a593Smuzhiyun 		rk_sm4_crypt_ecb(ctx1, (const unsigned char*)scratch.c, scratch.c);
76*4882a593Smuzhiyun 		scratch.u[0] ^= tweak.u[0];
77*4882a593Smuzhiyun 		scratch.u[1] ^= tweak.u[1];
78*4882a593Smuzhiyun 		memcpy(out-16, scratch.c, 16);
79*4882a593Smuzhiyun 	}
80*4882a593Smuzhiyun 	else {
81*4882a593Smuzhiyun 		union { u64 u[2]; u8 c[16]; } tweak1;
82*4882a593Smuzhiyun 
83*4882a593Smuzhiyun 		if (is_endian.little) {
84*4882a593Smuzhiyun 			unsigned int carry,res;
85*4882a593Smuzhiyun 
86*4882a593Smuzhiyun 			res = 0x87 & (((int)tweak.d[3]) >> 31);
87*4882a593Smuzhiyun 			carry = (unsigned int)(tweak.u[0] >> 63);
88*4882a593Smuzhiyun 			tweak1.u[0] = (tweak.u[0] << 1) ^ res;
89*4882a593Smuzhiyun 			tweak1.u[1] = (tweak.u[1] << 1) | carry;
90*4882a593Smuzhiyun 		}
91*4882a593Smuzhiyun 		else {
92*4882a593Smuzhiyun 			size_t c;
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun 			for (c = 0, i = 0;i < 16;++i) {
95*4882a593Smuzhiyun 				/*+ substitutes for |, because c is 1 bit */
96*4882a593Smuzhiyun 				c += ((size_t)tweak.c[i]) << 1;
97*4882a593Smuzhiyun 				tweak1.c[i] = (u8)c;
98*4882a593Smuzhiyun 				c = c >> 8;
99*4882a593Smuzhiyun 			}
100*4882a593Smuzhiyun 			tweak1.c[0] ^= (u8)(0x87 & (0-c));
101*4882a593Smuzhiyun 		}
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun 		memcpy(scratch.c, inp, 16);
104*4882a593Smuzhiyun 		scratch.u[0] ^= tweak1.u[0];
105*4882a593Smuzhiyun 		scratch.u[1] ^= tweak1.u[1];
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun 		rk_sm4_crypt_ecb(ctx1, (const unsigned char*)scratch.c, scratch.c);
108*4882a593Smuzhiyun 		scratch.u[0] ^= tweak1.u[0];
109*4882a593Smuzhiyun 		scratch.u[1] ^= tweak1.u[1];
110*4882a593Smuzhiyun 
111*4882a593Smuzhiyun 		for (i = 0;i < len;++i) {
112*4882a593Smuzhiyun 			u8 c = inp[16+i];
113*4882a593Smuzhiyun 			out[16+i] = scratch.c[i];
114*4882a593Smuzhiyun 			scratch.c[i] = c;
115*4882a593Smuzhiyun 		}
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun 		scratch.u[0] ^= tweak.u[0];
118*4882a593Smuzhiyun 		scratch.u[1] ^= tweak.u[1];
119*4882a593Smuzhiyun 		rk_sm4_crypt_ecb(ctx1, (const unsigned char*)scratch.c, scratch.c);
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 		scratch.u[0] ^= tweak.u[0];
122*4882a593Smuzhiyun 		scratch.u[1] ^= tweak.u[1];
123*4882a593Smuzhiyun 		memcpy (out, scratch.c, 16);
124*4882a593Smuzhiyun 	}
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun 	return 0;
127*4882a593Smuzhiyun }
128*4882a593Smuzhiyun 
129*4882a593Smuzhiyun /* XTS makes use of two different keys, usually generated by splitting
130*4882a593Smuzhiyun  * the supplied block cipher's key in half.
131*4882a593Smuzhiyun  * Because of the splitting, users wanting AES 256 and AES 128 encryption
132*4882a593Smuzhiyun  * will need to choose key sizes of 512 bits and 256 bits respectively.
133*4882a593Smuzhiyun  */
rk_sm4_xts_encrypt(const unsigned char * in,unsigned char * out,unsigned int length,const unsigned char * key,const int key_len,unsigned char * ivec,const int enc)134*4882a593Smuzhiyun int rk_sm4_xts_encrypt(const unsigned char *in, unsigned char *out,
135*4882a593Smuzhiyun         unsigned int length, const unsigned char *key, const int key_len,
136*4882a593Smuzhiyun         unsigned char *ivec, const int enc)
137*4882a593Smuzhiyun {
138*4882a593Smuzhiyun     sm4_context  ctx1, ctx2;
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 	if(key_len != 32)
141*4882a593Smuzhiyun 		return -1;
142*4882a593Smuzhiyun 
143*4882a593Smuzhiyun     if (enc) {
144*4882a593Smuzhiyun        rk_sm4_setkey_enc(&ctx1, key);
145*4882a593Smuzhiyun     } else {
146*4882a593Smuzhiyun        rk_sm4_setkey_dec(&ctx1, key);
147*4882a593Smuzhiyun     }
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun 	rk_sm4_setkey_enc(&ctx2, key + key_len / 2);
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun     return aes_xts128_encrypt(&ctx1, &ctx2, ivec, in, out, length, enc);
152*4882a593Smuzhiyun }
153*4882a593Smuzhiyun 
154*4882a593Smuzhiyun 
155