xref: /optee_os/core/arch/arm/crypto/aes-gcm-ce.c (revision 8a15c688baf21524392f092611e54614a032affe)
1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3  * Copyright (c) 2017-2020, Linaro Limited
4  */
5 
6 #include <assert.h>
7 #include <crypto/crypto_accel.h>
8 #include <crypto/crypto.h>
9 #include <crypto/ghash-ce-core.h>
10 #include <crypto/internal_aes-gcm.h>
11 #include <io.h>
12 #include <kernel/panic.h>
13 #include <kernel/thread.h>
14 #include <string.h>
15 #include <types_ext.h>
16 
17 static void get_be_block(void *dst, const void *src)
18 {
19 	uint64_t *d = dst;
20 
21 	d[1] = get_be64(src);
22 	d[0] = get_be64((const uint8_t *)src + 8);
23 }
24 
25 static void put_be_block(void *dst, const void *src)
26 {
27 	const uint64_t *s = src;
28 
29 	put_be64(dst, s[1]);
30 	put_be64((uint8_t *)dst + 8, s[0]);
31 }
32 
33 static void ghash_reflect(uint64_t h[2], const uint64_t k[2])
34 {
35 	uint64_t b = get_be64(k);
36 	uint64_t a = get_be64(k + 1);
37 
38 	h[0] = (a << 1) | (b >> 63);
39 	h[1] = (b << 1) | (a >> 63);
40 	if (b >> 63)
41 		h[1] ^= 0xc200000000000000UL;
42 }
43 
44 void internal_aes_gcm_set_key(struct internal_aes_gcm_state *state,
45 			      const struct internal_aes_gcm_key *enc_key)
46 {
47 	uint64_t k[2] = { 0 };
48 	uint64_t h[2] = { 0 };
49 
50 	crypto_aes_enc_block(enc_key->data, sizeof(enc_key->data),
51 			     enc_key->rounds, state->ctr, k);
52 
53 	ghash_reflect(state->ghash_key.h, k);
54 
55 	internal_aes_gcm_gfmul(k, k, h);
56 	ghash_reflect(state->ghash_key.h2, h);
57 
58 	internal_aes_gcm_gfmul(k, h, h);
59 	ghash_reflect(state->ghash_key.h3, h);
60 
61 	internal_aes_gcm_gfmul(k, h, h);
62 	ghash_reflect(state->ghash_key.h4, h);
63 }
64 
65 void internal_aes_gcm_ghash_update(struct internal_aes_gcm_state *state,
66 				   const void *head, const void *data,
67 				   size_t num_blocks)
68 {
69 	uint32_t vfp_state;
70 	uint64_t dg[2];
71 
72 	get_be_block(dg, state->hash_state);
73 
74 	vfp_state = thread_kernel_enable_vfp();
75 
76 #ifdef CFG_HWSUPP_PMULT_64
77 	pmull_ghash_update_p64(num_blocks, dg, data, &state->ghash_key, head);
78 #else
79 	pmull_ghash_update_p8(num_blocks, dg, data, &state->ghash_key, head);
80 #endif
81 	thread_kernel_disable_vfp(vfp_state);
82 
83 	put_be_block(state->hash_state, dg);
84 }
85 
86 TEE_Result internal_aes_gcm_expand_enc_key(const void *key, size_t key_len,
87 					   struct internal_aes_gcm_key *enc_key)
88 {
89 	return crypto_accel_aes_expand_keys(key, key_len, enc_key->data, NULL,
90 					    sizeof(enc_key->data),
91 					    &enc_key->rounds);
92 }
93 
94 #ifdef ARM64
95 static void update_payload_2block(struct internal_aes_gcm_state *state,
96 				  const struct internal_aes_gcm_key *ek,
97 				  TEE_OperationMode mode, const void *src,
98 				  size_t num_blocks, void *dst)
99 {
100 	uint32_t vfp_state;
101 	uint64_t dg[2];
102 
103 	assert(num_blocks && !(num_blocks % 2));
104 
105 	get_be_block(dg, state->hash_state);
106 
107 	vfp_state = thread_kernel_enable_vfp();
108 
109 	if (mode == TEE_MODE_ENCRYPT) {
110 		uint8_t ks[sizeof(state->buf_cryp) * 2] = { 0 };
111 
112 		/*
113 		 * ks holds the encrypted counters of the next two blocks.
114 		 * pmull_gcm_encrypt() uses this to encrypt the first two
115 		 * blocks. When pmull_gcm_encrypt() returns is ks updated
116 		 * with the encrypted counters of the next two blocks. As
117 		 * we're only keeping one of these blocks we throw away
118 		 * block number two consequently decreases the counter by
119 		 * one.
120 		 */
121 		memcpy(ks, state->buf_cryp, sizeof(state->buf_cryp));
122 
123 		pmull_gcm_load_round_keys(ek->data, ek->rounds);
124 		pmull_gcm_encrypt_block(ks + sizeof(state->buf_cryp),
125 					(uint8_t *)state->ctr, ek->rounds);
126 		internal_aes_gcm_inc_ctr(state);
127 		pmull_gcm_encrypt(num_blocks, dg, dst, src, &state->ghash_key,
128 				  state->ctr, NULL, ek->rounds, ks);
129 		memcpy(state->buf_cryp, ks, TEE_AES_BLOCK_SIZE);
130 		internal_aes_gcm_dec_ctr(state);
131 	} else {
132 		pmull_gcm_decrypt(num_blocks, dg, dst, src, &state->ghash_key,
133 				  state->ctr, ek->data, ek->rounds);
134 	}
135 
136 	thread_kernel_disable_vfp(vfp_state);
137 
138 	put_be_block(state->hash_state, dg);
139 }
140 
141 /* Overriding the __weak function */
142 void
143 internal_aes_gcm_update_payload_blocks(struct internal_aes_gcm_state *state,
144 				       const struct internal_aes_gcm_key *ek,
145 				       TEE_OperationMode mode, const void *src,
146 				       size_t num_blocks, void *dst)
147 {
148 	size_t nb = ROUNDDOWN(num_blocks, 2);
149 
150 	/*
151 	 * pmull_gcm_encrypt() and pmull_gcm_decrypt() can only handle
152 	 * blocks in multiples of two.
153 	 */
154 	if (nb)
155 		update_payload_2block(state, ek, mode, src, nb, dst);
156 
157 	if (nb != num_blocks) {
158 		/* There's a final block */
159 		const void *s = (const uint8_t *)src + nb * TEE_AES_BLOCK_SIZE;
160 		void *d = (uint8_t *)dst + nb * TEE_AES_BLOCK_SIZE;
161 		uint64_t tmp[2] = { 0 };
162 
163 		if (!ALIGNMENT_IS_OK(s, uint64_t)) {
164 			memcpy(tmp, s, sizeof(tmp));
165 			s = tmp;
166 		}
167 
168 		if (mode == TEE_MODE_ENCRYPT)
169 			internal_aes_gcm_encrypt_block(state, ek, s, d);
170 		else
171 			internal_aes_gcm_decrypt_block(state, ek, s, d);
172 	}
173 }
174 #endif /*ARM64*/
175