xref: /optee_os/core/arch/arm/crypto/aes-gcm-ce.c (revision 9cd2e73b6a86b2946142306c135647c7f7a9ed85)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2017-2020, Linaro Limited
4  */
5 
6 #include <assert.h>
7 #include <crypto/crypto_accel.h>
8 #include <crypto/crypto.h>
9 #include <crypto/ghash-ce-core.h>
10 #include <crypto/internal_aes-gcm.h>
11 #include <io.h>
12 #include <kernel/panic.h>
13 #include <kernel/thread.h>
14 #include <string.h>
15 #include <types_ext.h>
16 
17 #include "aes_armv8a_ce.h"
18 
19 static void get_be_block(void *dst, const void *src)
20 {
21 	uint64_t *d = dst;
22 
23 	d[1] = get_be64(src);
24 	d[0] = get_be64((const uint8_t *)src + 8);
25 }
26 
27 static void put_be_block(void *dst, const void *src)
28 {
29 	const uint64_t *s = src;
30 
31 	put_be64(dst, s[1]);
32 	put_be64((uint8_t *)dst + 8, s[0]);
33 }
34 
35 static void ghash_reflect(uint64_t h[2], const uint64_t k[2])
36 {
37 	uint64_t b = get_be64(k);
38 	uint64_t a = get_be64(k + 1);
39 
40 	h[0] = (a << 1) | (b >> 63);
41 	h[1] = (b << 1) | (a >> 63);
42 	if (b >> 63)
43 		h[1] ^= 0xc200000000000000UL;
44 }
45 
46 void internal_aes_gcm_set_key(struct internal_aes_gcm_state *state,
47 			      const struct internal_aes_gcm_key *enc_key)
48 {
49 	uint64_t k[2] = { 0 };
50 	uint64_t h[2] = { 0 };
51 
52 	crypto_aes_enc_block(enc_key->data, sizeof(enc_key->data),
53 			     enc_key->rounds, state->ctr, k);
54 
55 	ghash_reflect(state->ghash_key.h, k);
56 
57 	internal_aes_gcm_gfmul(k, k, h);
58 	ghash_reflect(state->ghash_key.h2, h);
59 
60 	internal_aes_gcm_gfmul(k, h, h);
61 	ghash_reflect(state->ghash_key.h3, h);
62 
63 	internal_aes_gcm_gfmul(k, h, h);
64 	ghash_reflect(state->ghash_key.h4, h);
65 }
66 
67 static void pmull_ghash_update(int num_blocks, uint64_t dg[2],
68 			       const uint8_t *src,
69 			       const struct internal_ghash_key *ghash_key,
70 			       const uint8_t *head)
71 {
72 #ifdef CFG_HWSUPP_PMULT_64
73 	pmull_ghash_update_p64(num_blocks, dg, src, ghash_key, head);
74 #else
75 	pmull_ghash_update_p8(num_blocks, dg, src, ghash_key, head);
76 #endif
77 }
78 
79 void internal_aes_gcm_ghash_update(struct internal_aes_gcm_state *state,
80 				   const void *head, const void *data,
81 				   size_t num_blocks)
82 {
83 	uint32_t vfp_state;
84 	uint64_t dg[2];
85 
86 	get_be_block(dg, state->hash_state);
87 
88 	vfp_state = thread_kernel_enable_vfp();
89 
90 	pmull_ghash_update(num_blocks, dg, data, &state->ghash_key, head);
91 
92 	thread_kernel_disable_vfp(vfp_state);
93 
94 	put_be_block(state->hash_state, dg);
95 }
96 
97 #ifdef ARM64
98 static void update_payload_2block(struct internal_aes_gcm_state *state,
99 				  const struct internal_aes_gcm_key *ek,
100 				  TEE_OperationMode mode, const void *src,
101 				  size_t num_blocks, void *dst)
102 {
103 	uint32_t vfp_state;
104 	uint64_t dg[2];
105 
106 	assert(num_blocks && !(num_blocks % 2));
107 
108 	get_be_block(dg, state->hash_state);
109 
110 	vfp_state = thread_kernel_enable_vfp();
111 
112 	if (mode == TEE_MODE_ENCRYPT) {
113 		uint8_t ks[sizeof(state->buf_cryp) * 2] = { 0 };
114 
115 		/*
116 		 * ks holds the encrypted counters of the next two blocks.
117 		 * pmull_gcm_encrypt() uses this to encrypt the first two
118 		 * blocks. When pmull_gcm_encrypt() returns is ks updated
119 		 * with the encrypted counters of the next two blocks. As
120 		 * we're only keeping one of these blocks we throw away
121 		 * block number two consequently decreases the counter by
122 		 * one.
123 		 */
124 		memcpy(ks, state->buf_cryp, sizeof(state->buf_cryp));
125 
126 		pmull_gcm_load_round_keys(ek->data, ek->rounds);
127 		pmull_gcm_encrypt_block(ks + sizeof(state->buf_cryp),
128 					(uint8_t *)state->ctr, ek->rounds);
129 		internal_aes_gcm_inc_ctr(state);
130 		pmull_gcm_encrypt(num_blocks, dg, dst, src, &state->ghash_key,
131 				  state->ctr, NULL, ek->rounds, ks);
132 		memcpy(state->buf_cryp, ks, TEE_AES_BLOCK_SIZE);
133 		internal_aes_gcm_dec_ctr(state);
134 	} else {
135 		pmull_gcm_decrypt(num_blocks, dg, dst, src, &state->ghash_key,
136 				  state->ctr, ek->data, ek->rounds);
137 	}
138 
139 	thread_kernel_disable_vfp(vfp_state);
140 
141 	put_be_block(state->hash_state, dg);
142 }
143 
144 /* Overriding the __weak function */
145 void
146 internal_aes_gcm_update_payload_blocks(struct internal_aes_gcm_state *state,
147 				       const struct internal_aes_gcm_key *ek,
148 				       TEE_OperationMode mode, const void *src,
149 				       size_t num_blocks, void *dst)
150 {
151 	size_t nb = ROUNDDOWN(num_blocks, 2);
152 
153 	/*
154 	 * pmull_gcm_encrypt() and pmull_gcm_decrypt() can only handle
155 	 * blocks in multiples of two.
156 	 */
157 	if (nb)
158 		update_payload_2block(state, ek, mode, src, nb, dst);
159 
160 	if (nb != num_blocks) {
161 		/* There's a final block */
162 		const void *s = (const uint8_t *)src + nb * TEE_AES_BLOCK_SIZE;
163 		void *d = (uint8_t *)dst + nb * TEE_AES_BLOCK_SIZE;
164 		uint64_t tmp[2] = { 0 };
165 
166 		if (!ALIGNMENT_IS_OK(s, uint64_t)) {
167 			memcpy(tmp, s, sizeof(tmp));
168 			s = tmp;
169 		}
170 
171 		if (mode == TEE_MODE_ENCRYPT)
172 			internal_aes_gcm_encrypt_block(state, ek, s, d);
173 		else
174 			internal_aes_gcm_decrypt_block(state, ek, s, d);
175 	}
176 }
177 #endif /*ARM64*/
178 
179 #ifdef ARM32
180 static void encrypt_pl(struct internal_aes_gcm_state *state,
181 		       const struct internal_aes_gcm_key *ek, uint64_t dg[2],
182 		       const uint8_t *src, size_t num_blocks, uint8_t *dst)
183 {
184 	void *buf_cryp = state->buf_cryp;
185 
186 	while (num_blocks) {
187 		ce_aes_xor_block(buf_cryp, buf_cryp, src);
188 
189 		pmull_ghash_update(1, dg, buf_cryp, &state->ghash_key, NULL);
190 		memcpy(dst, buf_cryp, TEE_AES_BLOCK_SIZE);
191 
192 		ce_aes_ecb_encrypt(buf_cryp, (const uint8_t *)state->ctr,
193 				   (const uint8_t *)ek->data, ek->rounds,
194 				   1, 1);
195 		internal_aes_gcm_inc_ctr(state);
196 
197 		src += TEE_AES_BLOCK_SIZE;
198 		dst += TEE_AES_BLOCK_SIZE;
199 		num_blocks--;
200 	}
201 }
202 
203 static void decrypt_pl(struct internal_aes_gcm_state *state,
204 		       const struct internal_aes_gcm_key *ek, uint64_t dg[2],
205 		       const uint8_t *src, size_t num_blocks, uint8_t *dst)
206 {
207 	while (num_blocks) {
208 		ce_aes_ctr_encrypt(dst, src, (const uint8_t *)ek->data,
209 				   ek->rounds, 1, (uint8_t *)state->ctr, 1);
210 		pmull_ghash_update(1, dg, src, &state->ghash_key, NULL);
211 
212 		src += TEE_AES_BLOCK_SIZE;
213 		dst += TEE_AES_BLOCK_SIZE;
214 		num_blocks--;
215 	}
216 }
217 
218 /* Overriding the __weak function */
219 void
220 internal_aes_gcm_update_payload_blocks(struct internal_aes_gcm_state *state,
221 				       const struct internal_aes_gcm_key *ek,
222 				       TEE_OperationMode mode, const void *src,
223 				       size_t num_blocks, void *dst)
224 {
225 	uint64_t dg[2] = { 0 };
226 	uint32_t vfp_state = 0;
227 
228 	assert(!state->buf_pos && num_blocks);
229 	get_be_block(dg, state->hash_state);
230 	vfp_state = thread_kernel_enable_vfp();
231 
232 	if (mode == TEE_MODE_ENCRYPT)
233 		encrypt_pl(state, ek, dg, src, num_blocks, dst);
234 	else
235 		decrypt_pl(state, ek, dg, src, num_blocks, dst);
236 
237 	thread_kernel_disable_vfp(vfp_state);
238 	put_be_block(state->hash_state, dg);
239 }
240 #endif
241