1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Common values and helper functions for the ChaCha and XChaCha stream ciphers.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * XChaCha extends ChaCha's nonce to 192 bits, while provably retaining ChaCha's
6*4882a593Smuzhiyun * security. Here they share the same key size, tfm context, and setkey
7*4882a593Smuzhiyun * function; only their IV size and encrypt/decrypt function differ.
8*4882a593Smuzhiyun *
9*4882a593Smuzhiyun * The ChaCha paper specifies 20, 12, and 8-round variants. In general, it is
10*4882a593Smuzhiyun * recommended to use the 20-round variant ChaCha20. However, the other
11*4882a593Smuzhiyun * variants can be needed in some performance-sensitive scenarios. The generic
12*4882a593Smuzhiyun * ChaCha code currently allows only the 20 and 12-round variants.
13*4882a593Smuzhiyun */
14*4882a593Smuzhiyun
15*4882a593Smuzhiyun #ifndef _CRYPTO_CHACHA_H
16*4882a593Smuzhiyun #define _CRYPTO_CHACHA_H
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun #include <asm/unaligned.h>
19*4882a593Smuzhiyun #include <linux/types.h>
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun /* 32-bit stream position, then 96-bit nonce (RFC7539 convention) */
22*4882a593Smuzhiyun #define CHACHA_IV_SIZE 16
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun #define CHACHA_KEY_SIZE 32
25*4882a593Smuzhiyun #define CHACHA_BLOCK_SIZE 64
26*4882a593Smuzhiyun #define CHACHAPOLY_IV_SIZE 12
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun #define CHACHA_STATE_WORDS (CHACHA_BLOCK_SIZE / sizeof(u32))
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun /* 192-bit nonce, then 64-bit stream position */
31*4882a593Smuzhiyun #define XCHACHA_IV_SIZE 32
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun void chacha_block_generic(u32 *state, u8 *stream, int nrounds);
chacha20_block(u32 * state,u8 * stream)34*4882a593Smuzhiyun static inline void chacha20_block(u32 *state, u8 *stream)
35*4882a593Smuzhiyun {
36*4882a593Smuzhiyun chacha_block_generic(state, stream, 20);
37*4882a593Smuzhiyun }
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun void hchacha_block_arch(const u32 *state, u32 *out, int nrounds);
40*4882a593Smuzhiyun void hchacha_block_generic(const u32 *state, u32 *out, int nrounds);
41*4882a593Smuzhiyun
hchacha_block(const u32 * state,u32 * out,int nrounds)42*4882a593Smuzhiyun static inline void hchacha_block(const u32 *state, u32 *out, int nrounds)
43*4882a593Smuzhiyun {
44*4882a593Smuzhiyun if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
45*4882a593Smuzhiyun hchacha_block_arch(state, out, nrounds);
46*4882a593Smuzhiyun else
47*4882a593Smuzhiyun hchacha_block_generic(state, out, nrounds);
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun enum chacha_constants { /* expand 32-byte k */
51*4882a593Smuzhiyun CHACHA_CONSTANT_EXPA = 0x61707865U,
52*4882a593Smuzhiyun CHACHA_CONSTANT_ND_3 = 0x3320646eU,
53*4882a593Smuzhiyun CHACHA_CONSTANT_2_BY = 0x79622d32U,
54*4882a593Smuzhiyun CHACHA_CONSTANT_TE_K = 0x6b206574U
55*4882a593Smuzhiyun };
56*4882a593Smuzhiyun
chacha_init_consts(u32 * state)57*4882a593Smuzhiyun static inline void chacha_init_consts(u32 *state)
58*4882a593Smuzhiyun {
59*4882a593Smuzhiyun state[0] = CHACHA_CONSTANT_EXPA;
60*4882a593Smuzhiyun state[1] = CHACHA_CONSTANT_ND_3;
61*4882a593Smuzhiyun state[2] = CHACHA_CONSTANT_2_BY;
62*4882a593Smuzhiyun state[3] = CHACHA_CONSTANT_TE_K;
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv);
chacha_init_generic(u32 * state,const u32 * key,const u8 * iv)66*4882a593Smuzhiyun static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv)
67*4882a593Smuzhiyun {
68*4882a593Smuzhiyun chacha_init_consts(state);
69*4882a593Smuzhiyun state[4] = key[0];
70*4882a593Smuzhiyun state[5] = key[1];
71*4882a593Smuzhiyun state[6] = key[2];
72*4882a593Smuzhiyun state[7] = key[3];
73*4882a593Smuzhiyun state[8] = key[4];
74*4882a593Smuzhiyun state[9] = key[5];
75*4882a593Smuzhiyun state[10] = key[6];
76*4882a593Smuzhiyun state[11] = key[7];
77*4882a593Smuzhiyun state[12] = get_unaligned_le32(iv + 0);
78*4882a593Smuzhiyun state[13] = get_unaligned_le32(iv + 4);
79*4882a593Smuzhiyun state[14] = get_unaligned_le32(iv + 8);
80*4882a593Smuzhiyun state[15] = get_unaligned_le32(iv + 12);
81*4882a593Smuzhiyun }
82*4882a593Smuzhiyun
chacha_init(u32 * state,const u32 * key,const u8 * iv)83*4882a593Smuzhiyun static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
84*4882a593Smuzhiyun {
85*4882a593Smuzhiyun if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
86*4882a593Smuzhiyun chacha_init_arch(state, key, iv);
87*4882a593Smuzhiyun else
88*4882a593Smuzhiyun chacha_init_generic(state, key, iv);
89*4882a593Smuzhiyun }
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
92*4882a593Smuzhiyun unsigned int bytes, int nrounds);
93*4882a593Smuzhiyun void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
94*4882a593Smuzhiyun unsigned int bytes, int nrounds);
95*4882a593Smuzhiyun
chacha_crypt(u32 * state,u8 * dst,const u8 * src,unsigned int bytes,int nrounds)96*4882a593Smuzhiyun static inline void chacha_crypt(u32 *state, u8 *dst, const u8 *src,
97*4882a593Smuzhiyun unsigned int bytes, int nrounds)
98*4882a593Smuzhiyun {
99*4882a593Smuzhiyun if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
100*4882a593Smuzhiyun chacha_crypt_arch(state, dst, src, bytes, nrounds);
101*4882a593Smuzhiyun else
102*4882a593Smuzhiyun chacha_crypt_generic(state, dst, src, bytes, nrounds);
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun
chacha20_crypt(u32 * state,u8 * dst,const u8 * src,unsigned int bytes)105*4882a593Smuzhiyun static inline void chacha20_crypt(u32 *state, u8 *dst, const u8 *src,
106*4882a593Smuzhiyun unsigned int bytes)
107*4882a593Smuzhiyun {
108*4882a593Smuzhiyun chacha_crypt(state, dst, src, bytes, 20);
109*4882a593Smuzhiyun }
110*4882a593Smuzhiyun
111*4882a593Smuzhiyun #endif /* _CRYPTO_CHACHA_H */
112