xref: /optee_os/core/arch/arm/crypto/aes-gcm-ce.c (revision 424cb3863ab707e652dc5c2f54c1d78686c45fa2)
1 /*
2  * Copyright (c) 2017, Linaro Limited
3  * All rights reserved.
4  *
5  * SPDX-License-Identifier: BSD-2-Clause
6  */
7 
8 #include <crypto/internal_aes-gcm.h>
9 #include <crypto/ghash-ce-core.h>
10 #include <io.h>
11 #include <kernel/panic.h>
12 #include <kernel/thread.h>
13 #include <tomcrypt.h>
14 #include <types_ext.h>
15 
16 static void get_be_block(void *dst, const void *src)
17 {
18 	uint64_t *d = dst;
19 
20 	d[1] = get_be64(src);
21 	d[0] = get_be64((const uint8_t *)src + 8);
22 }
23 
24 static void put_be_block(void *dst, const void *src)
25 {
26 	const uint64_t *s = src;
27 
28 	put_be64(dst, s[1]);
29 	put_be64((uint8_t *)dst + 8, s[0]);
30 }
31 
32 TEE_Result internal_aes_gcm_set_key(struct internal_aes_gcm_ctx *ctx,
33 				    const void *key, size_t key_len)
34 {
35 	TEE_Result res;
36 	uint64_t k[2];
37 	uint64_t a;
38 	uint64_t b;
39 
40 	res = internal_aes_gcm_expand_enc_key(key, key_len, ctx->enc_key,
41 					      &ctx->rounds);
42 	if (res)
43 		return res;
44 
45 	internal_aes_gcm_encrypt_block(ctx, ctx->ctr, ctx->hash_subkey);
46 
47 	/* Store hash key in little endian and multiply by 'x' */
48 	b = get_be64(ctx->hash_subkey);
49 	a = get_be64(ctx->hash_subkey + 8);
50 	k[0] = (a << 1) | (b >> 63);
51 	k[1] = (b << 1) | (a >> 63);
52 	if (b >> 63)
53 		k[1] ^= 0xc200000000000000UL;
54 
55 	memcpy(ctx->hash_subkey, k, TEE_AES_BLOCK_SIZE);
56 	return TEE_SUCCESS;
57 }
58 
59 void internal_aes_gcm_ghash_update(struct internal_aes_gcm_ctx *ctx,
60 				   const void *head, const void *data,
61 				 size_t num_blocks)
62 {
63 	uint32_t vfp_state;
64 	uint64_t dg[2];
65 	uint64_t *k;
66 
67 	get_be_block(dg, ctx->hash_state);
68 
69 	k = (void *)ctx->hash_subkey;
70 
71 	vfp_state = thread_kernel_enable_vfp();
72 
73 #ifdef CFG_HWSUPP_PMULL
74 	pmull_ghash_update_p64(num_blocks, dg, data, k, head);
75 #else
76 	pmull_ghash_update_p8(num_blocks, dg, data, k, head);
77 #endif
78 	thread_kernel_disable_vfp(vfp_state);
79 
80 	put_be_block(ctx->hash_state, dg);
81 }
82 
83 #ifdef ARM64
84 static uint32_t ror32(uint32_t word, unsigned int shift)
85 {
86 	return (word >> shift) | (word << (32 - shift));
87 }
88 
89 TEE_Result internal_aes_gcm_expand_enc_key(const void *key, size_t key_len,
90 					   uint64_t *enc_key,
91 					   unsigned int *rounds)
92 {
93 	/* The AES key schedule round constants */
94 	static uint8_t const rcon[] = {
95 		0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
96 	};
97 	uint32_t vfp_state;
98 	uint32_t kwords = key_len / sizeof(uint32_t);
99 	uint32_t *ek = (uint32_t *)enc_key;
100 	unsigned int i;
101 
102 	if (key_len != 16 && keylen != 24 && keylen != 32)
103 		return TEE_ERROR_BAD_PARAMETERS;
104 
105 	memcpy(enc_key, key, key_len);
106 	/*
107 	 * # of rounds specified by AES:
108 	 * 128 bit key          10 rounds
109 	 * 192 bit key          12 rounds
110 	 * 256 bit key          14 rounds
111 	 * => n byte key        => 6 + (n/4) rounds
112 	 */
113 	*rounds = 6 + key_len / 4;
114 
115 	vfp_state = thread_kernel_enable_vfp();
116 	for (i = 0; i < sizeof(rcon); i++) {
117 		uint32_t *rki = ek + (i * kwords);
118 		uint32_t *rko = rki + kwords;
119 
120 		rko[0] = ror32(pmull_gcm_aes_sub(rki[kwords - 1]), 8) ^
121 			 rcon[i] ^ rki[0];
122 		rko[1] = rko[0] ^ rki[1];
123 		rko[2] = rko[1] ^ rki[2];
124 		rko[3] = rko[2] ^ rki[3];
125 
126 		if (key_len == 24) {
127 			if (i >= 7)
128 				break;
129 			rko[4] = rko[3] ^ rki[4];
130 			rko[5] = rko[4] ^ rki[5];
131 		} else if (key_len == 32) {
132 			if (i >= 6)
133 				break;
134 			rko[4] = pmull_gcm_aes_sub(rko[3]) ^ rki[4];
135 			rko[5] = rko[4] ^ rki[5];
136 			rko[6] = rko[5] ^ rki[6];
137 			rko[7] = rko[6] ^ rki[7];
138 		}
139 	}
140 
141 	thread_kernel_disable_vfp(vfp_state);
142 	return TEE_SUCCESS;
143 }
144 
145 void internal_aes_gcm_encrypt_block(struct internal_aes_gcm_ctx *ctx,
146 				    const void *src, void *dst)
147 {
148 	uint32_t vfp_state;
149 
150 	vfp_state = thread_kernel_enable_vfp();
151 
152 	pmull_gcm_load_round_keys(ctx->enc_key, ctx->rounds);
153 	pmull_gcm_encrypt_block(dst, src, ctx->rounds);
154 
155 	thread_kernel_disable_vfp(vfp_state);
156 }
157 
158 void
159 internal_aes_gcm_update_payload_block_aligned(struct internal_aes_gcm_ctx *ctx,
160 					      TEE_OperationMode m,
161 					      const void *src,
162 					      size_t num_blocks, void *dst)
163 {
164 	uint32_t vfp_state;
165 	uint64_t dg[2];
166 	uint64_t ctr[2];
167 	uint64_t *k;
168 
169 	get_be_block(dg, ctx->hash_state);
170 	get_be_block(ctr, ctx->ctr);
171 
172 	k = (void *)ctx->hash_subkey;
173 
174 	vfp_state = thread_kernel_enable_vfp();
175 
176 	pmull_gcm_load_round_keys(ctx->enc_key, ctx->rounds);
177 
178 	if (m == TEE_MODE_ENCRYPT)
179 		pmull_gcm_encrypt(num_blocks, dg, dst, src, k, ctr, ctx->rounds,
180 				  ctx->buf_cryp);
181 	else
182 		pmull_gcm_decrypt(num_blocks, dg, dst, src, k, ctr,
183 				  ctx->rounds);
184 
185 	thread_kernel_disable_vfp(vfp_state);
186 
187 	put_be_block(ctx->ctr, ctr);
188 	put_be_block(ctx->hash_state, dg);
189 }
190 #endif /*ARM64*/
191