xref: /optee_os/core/arch/arm/crypto/aes-gcm-ce.c (revision b97e9666f646ca681890b1f5c61b8d62f0160d34)
1 /*
2  * Copyright (c) 2017, Linaro Limited
3  * All rights reserved.
4  *
5  * SPDX-License-Identifier: BSD-2-Clause
6  */
7 
8 #include <crypto/internal_aes-gcm.h>
9 #include <crypto/ghash-ce-core.h>
10 #include <io.h>
11 #include <kernel/panic.h>
12 #include <kernel/thread.h>
13 #include <tomcrypt.h>
14 #include <types_ext.h>
15 
16 static void get_be_block(void *dst, const void *src)
17 {
18 	uint64_t *d = dst;
19 
20 	d[1] = get_be64(src);
21 	d[0] = get_be64((const uint8_t *)src + 8);
22 }
23 
24 static void put_be_block(void *dst, const void *src)
25 {
26 	const uint64_t *s = src;
27 
28 	put_be64(dst, s[1]);
29 	put_be64((uint8_t *)dst + 8, s[0]);
30 }
31 
32 void internal_aes_gcm_set_key(struct internal_aes_gcm_state *state,
33 			      const struct internal_aes_gcm_key *enc_key)
34 {
35 	uint64_t k[2];
36 	uint64_t a;
37 	uint64_t b;
38 
39 	internal_aes_gcm_encrypt_block(enc_key, state->ctr, state->hash_subkey);
40 
41 	/* Store hash key in little endian and multiply by 'x' */
42 	b = get_be64(state->hash_subkey);
43 	a = get_be64(state->hash_subkey + 8);
44 	k[0] = (a << 1) | (b >> 63);
45 	k[1] = (b << 1) | (a >> 63);
46 	if (b >> 63)
47 		k[1] ^= 0xc200000000000000UL;
48 
49 	memcpy(state->hash_subkey, k, TEE_AES_BLOCK_SIZE);
50 }
51 
52 void internal_aes_gcm_ghash_update(struct internal_aes_gcm_state *state,
53 				   const void *head, const void *data,
54 				   size_t num_blocks)
55 {
56 	uint32_t vfp_state;
57 	uint64_t dg[2];
58 	uint64_t *k;
59 
60 	get_be_block(dg, state->hash_state);
61 
62 	k = (void *)state->hash_subkey;
63 
64 	vfp_state = thread_kernel_enable_vfp();
65 
66 #ifdef CFG_HWSUPP_PMULL
67 	pmull_ghash_update_p64(num_blocks, dg, data, k, head);
68 #else
69 	pmull_ghash_update_p8(num_blocks, dg, data, k, head);
70 #endif
71 	thread_kernel_disable_vfp(vfp_state);
72 
73 	put_be_block(state->hash_state, dg);
74 }
75 
76 #ifdef ARM64
77 static uint32_t ror32(uint32_t word, unsigned int shift)
78 {
79 	return (word >> shift) | (word << (32 - shift));
80 }
81 
82 TEE_Result internal_aes_gcm_expand_enc_key(const void *key, size_t key_len,
83 					   struct internal_aes_gcm_key *enc_key)
84 {
85 	/* The AES key schedule round constants */
86 	static uint8_t const rcon[] = {
87 		0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
88 	};
89 	uint32_t vfp_state;
90 	uint32_t kwords = key_len / sizeof(uint32_t);
91 	void *p = enc_key->data;
92 	uint32_t *k = p;
93 	unsigned int i;
94 
95 	if (key_len != 16 && key_len != 24 && key_len != 32)
96 		return TEE_ERROR_BAD_PARAMETERS;
97 
98 	memcpy(k, key, key_len);
99 	/*
100 	 * # of rounds specified by AES:
101 	 * 128 bit key          10 rounds
102 	 * 192 bit key          12 rounds
103 	 * 256 bit key          14 rounds
104 	 * => n byte key        => 6 + (n/4) rounds
105 	 */
106 	enc_key->rounds = 6 + key_len / 4;
107 
108 	vfp_state = thread_kernel_enable_vfp();
109 	for (i = 0; i < sizeof(rcon); i++) {
110 		uint32_t *rki = k + (i * kwords);
111 		uint32_t *rko = rki + kwords;
112 
113 		rko[0] = ror32(pmull_gcm_aes_sub(rki[kwords - 1]), 8) ^
114 			 rcon[i] ^ rki[0];
115 		rko[1] = rko[0] ^ rki[1];
116 		rko[2] = rko[1] ^ rki[2];
117 		rko[3] = rko[2] ^ rki[3];
118 
119 		if (key_len == 24) {
120 			if (i >= 7)
121 				break;
122 			rko[4] = rko[3] ^ rki[4];
123 			rko[5] = rko[4] ^ rki[5];
124 		} else if (key_len == 32) {
125 			if (i >= 6)
126 				break;
127 			rko[4] = pmull_gcm_aes_sub(rko[3]) ^ rki[4];
128 			rko[5] = rko[4] ^ rki[5];
129 			rko[6] = rko[5] ^ rki[6];
130 			rko[7] = rko[6] ^ rki[7];
131 		}
132 	}
133 
134 	thread_kernel_disable_vfp(vfp_state);
135 	return TEE_SUCCESS;
136 }
137 
138 void internal_aes_gcm_encrypt_block(const struct internal_aes_gcm_key *ek,
139 				    const void *src, void *dst)
140 {
141 	uint32_t vfp_state;
142 
143 	vfp_state = thread_kernel_enable_vfp();
144 
145 	pmull_gcm_load_round_keys(ek->data, ek->rounds);
146 	pmull_gcm_encrypt_block(dst, src, ek->rounds);
147 
148 	thread_kernel_disable_vfp(vfp_state);
149 }
150 
151 void internal_aes_gcm_update_payload_block_aligned(
152 				struct internal_aes_gcm_state *state,
153 				const struct internal_aes_gcm_key *ek,
154 				TEE_OperationMode mode, const void *src,
155 				size_t num_blocks, void *dst)
156 {
157 	uint32_t vfp_state;
158 	uint64_t dg[2];
159 	uint64_t ctr[2];
160 	uint64_t *k;
161 
162 	get_be_block(dg, state->hash_state);
163 	get_be_block(ctr, state->ctr);
164 
165 	k = (void *)state->hash_subkey;
166 
167 	vfp_state = thread_kernel_enable_vfp();
168 
169 	pmull_gcm_load_round_keys(ek->data, ek->rounds);
170 
171 	if (mode == TEE_MODE_ENCRYPT)
172 		pmull_gcm_encrypt(num_blocks, dg, dst, src, k, ctr, ek->rounds,
173 				  state->buf_cryp);
174 	else
175 		pmull_gcm_decrypt(num_blocks, dg, dst, src, k, ctr, ek->rounds);
176 
177 	thread_kernel_disable_vfp(vfp_state);
178 
179 	put_be_block(state->ctr, ctr);
180 	put_be_block(state->hash_state, dg);
181 }
182 #endif /*ARM64*/
183