1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3 * Copyright (c) 2021, STMicroelectronics - All Rights Reserved
4 */
5
6 #include <assert.h>
7 #include <crypto/crypto.h>
8 #include <crypto/crypto_impl.h>
9 #include <drvcrypt.h>
10 #include <drvcrypt_cipher.h>
11 #include <stdlib.h>
12 #include <string.h>
13 #include <tee_api_types.h>
14 #include <util.h>
15
16 #include "common.h"
17 #include "stm32_cryp.h"
18 #include "stm32_saes.h"
19
20 #define DES3_KEY_SIZE 24
21
22 struct cryp_ctx {
23 struct stm32_cryp_context ctx;
24 enum stm32_cryp_algo_mode algo;
25 };
26
27 struct saes_ctx {
28 struct stm32_saes_context ctx;
29 enum stm32_saes_chaining_mode algo;
30 /* Fallback to software implementation on 192bit AES key */
31 bool use_fallback;
32 struct crypto_cipher_ctx *fallback_ctx;
33 };
34
35 /*
36 * Internal peripheral context
37 * SAES and CRYP are registered under the same ID in the crypto framework.
38 * Therefore, only one of them can be registered.
39 */
40
41 union ip_ctx {
42 struct saes_ctx saes;
43 struct cryp_ctx cryp;
44 };
45
46 /* Internal Peripheral cipher ops*/
47 struct ip_cipher_ops {
48 TEE_Result (*init)(union ip_ctx *ctx, bool is_decrypt,
49 const uint8_t *key, size_t key_len,
50 const uint8_t *iv, size_t iv_len);
51 TEE_Result (*update)(union ip_ctx *ctx, bool last_block, uint8_t *src,
52 uint8_t *dst, size_t len);
53 void (*final)(union ip_ctx *ctx);
54 void (*copy_state)(union ip_ctx *dst_ctx, union ip_ctx *src_ctx);
55 };
56
57 struct stm32_cipher_ctx {
58 struct crypto_cipher_ctx c_ctx;
59 union ip_ctx ip_ctx;
60 const struct ip_cipher_ops *ops;
61 };
62
cryp_init(union ip_ctx * ip_ctx,bool is_decrypt,const uint8_t * key,size_t key_len,const uint8_t * iv,size_t iv_len)63 static TEE_Result cryp_init(union ip_ctx *ip_ctx, bool is_decrypt,
64 const uint8_t *key, size_t key_len,
65 const uint8_t *iv, size_t iv_len)
66 {
67 uint8_t temp_key[DES3_KEY_SIZE] = { };
68
69 if (!IS_ENABLED(CFG_STM32_CRYP))
70 return TEE_ERROR_NOT_IMPLEMENTED;
71
72 if (key_len == 16 &&
73 (ip_ctx->cryp.algo == STM32_CRYP_MODE_TDES_ECB ||
74 ip_ctx->cryp.algo == STM32_CRYP_MODE_TDES_CBC)) {
75 /* Manage DES2: i.e. K=K1.K2.K1 */
76 memcpy(temp_key, key, key_len);
77 memcpy(temp_key + key_len, key, key_len / 2);
78 key_len = DES3_KEY_SIZE;
79 key = temp_key;
80 }
81
82 return stm32_cryp_init(&ip_ctx->cryp.ctx, is_decrypt, ip_ctx->cryp.algo,
83 key, key_len, iv, iv_len);
84 }
85
cryp_update(union ip_ctx * ip_ctx,bool last_block,uint8_t * src,uint8_t * dst,size_t len)86 static TEE_Result cryp_update(union ip_ctx *ip_ctx, bool last_block,
87 uint8_t *src, uint8_t *dst, size_t len)
88 {
89 if (!IS_ENABLED(CFG_STM32_CRYP))
90 return TEE_ERROR_NOT_IMPLEMENTED;
91
92 return stm32_cryp_update(&ip_ctx->cryp.ctx, last_block, src, dst, len);
93 }
94
cryp_copy_state(union ip_ctx * dst_ip_ctx,union ip_ctx * src_ip_ctx)95 static void cryp_copy_state(union ip_ctx *dst_ip_ctx, union ip_ctx *src_ip_ctx)
96 {
97 memcpy(&dst_ip_ctx->cryp, &src_ip_ctx->cryp, sizeof(dst_ip_ctx->cryp));
98 }
99
100 static const struct ip_cipher_ops cryp_ops = {
101 .init = cryp_init,
102 .update = cryp_update,
103 .copy_state = cryp_copy_state,
104 };
105
saes_init(union ip_ctx * ip_ctx,bool is_decrypt,const uint8_t * key,size_t key_len,const uint8_t * iv,size_t iv_len)106 static TEE_Result saes_init(union ip_ctx *ip_ctx, bool is_decrypt,
107 const uint8_t *key, size_t key_len,
108 const uint8_t *iv, size_t iv_len)
109 {
110 enum stm32_saes_key_selection key_sel = STM32_SAES_KEY_SOFT;
111
112 if (!IS_ENABLED(CFG_STM32_SAES))
113 return TEE_ERROR_NOT_IMPLEMENTED;
114
115 if (key_len == AES_KEYSIZE_192) {
116 struct crypto_cipher_ctx *ctx = ip_ctx->saes.fallback_ctx;
117 TEE_OperationMode mode = TEE_MODE_ILLEGAL_VALUE;
118 TEE_Result res = TEE_ERROR_GENERIC;
119
120 if (!IS_ENABLED(CFG_STM32_SAES_SW_FALLBACK)) {
121 EMSG("STM32 SAES does not support 192bit keys");
122
123 return TEE_ERROR_NOT_IMPLEMENTED;
124 }
125
126 if (is_decrypt)
127 mode = TEE_MODE_DECRYPT;
128 else
129 mode = TEE_MODE_ENCRYPT;
130
131 res = ctx->ops->init(ctx, mode, key, key_len, NULL, 0, iv,
132 iv_len);
133 if (res)
134 return res;
135
136 ip_ctx->saes.use_fallback = true;
137
138 return TEE_SUCCESS;
139 }
140
141 ip_ctx->saes.use_fallback = false;
142
143 return stm32_saes_init(&ip_ctx->saes.ctx, is_decrypt, ip_ctx->saes.algo,
144 key_sel, key, key_len, iv, iv_len);
145 }
146
saes_update(union ip_ctx * ip_ctx,bool last_block,uint8_t * src,uint8_t * dst,size_t len)147 static TEE_Result saes_update(union ip_ctx *ip_ctx, bool last_block,
148 uint8_t *src, uint8_t *dst, size_t len)
149 {
150 if (!IS_ENABLED(CFG_STM32_SAES))
151 return TEE_ERROR_NOT_IMPLEMENTED;
152
153 if (ip_ctx->saes.use_fallback) {
154 struct crypto_cipher_ctx *ctx = ip_ctx->saes.fallback_ctx;
155
156 assert(IS_ENABLED(CFG_STM32_SAES_SW_FALLBACK));
157
158 return ctx->ops->update(ctx, last_block, src, len, dst);
159 }
160
161 return stm32_saes_update(&ip_ctx->saes.ctx, last_block, src, dst, len);
162 }
163
saes_final(union ip_ctx * ip_ctx)164 static void saes_final(union ip_ctx *ip_ctx)
165 {
166 struct crypto_cipher_ctx *ctx = ip_ctx->saes.fallback_ctx;
167
168 if (ip_ctx->saes.use_fallback) {
169 assert(IS_ENABLED(CFG_STM32_SAES_SW_FALLBACK));
170 ctx->ops->final(ctx);
171 }
172 }
173
saes_copy_state(union ip_ctx * dst_ip_ctx,union ip_ctx * src_ip_ctx)174 static void saes_copy_state(union ip_ctx *dst_ip_ctx, union ip_ctx *src_ip_ctx)
175 {
176 struct saes_ctx *src_ctx = &src_ip_ctx->saes;
177 struct crypto_cipher_ctx *fb_ctx = src_ctx->fallback_ctx;
178
179 memcpy(&dst_ip_ctx->saes.ctx, &src_ctx->ctx, sizeof(src_ctx->ctx));
180
181 dst_ip_ctx->saes.algo = src_ctx->algo;
182 dst_ip_ctx->saes.use_fallback = src_ctx->use_fallback;
183
184 if (src_ctx->use_fallback) {
185 assert(IS_ENABLED(CFG_STM32_SAES_SW_FALLBACK));
186 fb_ctx->ops->copy_state(dst_ip_ctx->saes.fallback_ctx, fb_ctx);
187 }
188 }
189
190 static const struct ip_cipher_ops saes_ops = {
191 .init = saes_init,
192 .update = saes_update,
193 .final = saes_final,
194 .copy_state = saes_copy_state,
195 };
196
197 static struct stm32_cipher_ctx *
to_stm32_cipher_ctx(struct crypto_cipher_ctx * ctx)198 to_stm32_cipher_ctx(struct crypto_cipher_ctx *ctx)
199 {
200 assert(ctx);
201
202 return container_of(ctx, struct stm32_cipher_ctx, c_ctx);
203 }
204
stm32_cipher_initialize(struct drvcrypt_cipher_init * dinit)205 static TEE_Result stm32_cipher_initialize(struct drvcrypt_cipher_init *dinit)
206 {
207 struct stm32_cipher_ctx *c = to_stm32_cipher_ctx(dinit->ctx);
208
209 return c->ops->init(&c->ip_ctx, !dinit->encrypt, dinit->key1.data,
210 dinit->key1.length, dinit->iv.data,
211 dinit->iv.length);
212 }
213
stm32_cipher_update(struct drvcrypt_cipher_update * dupdate)214 static TEE_Result stm32_cipher_update(struct drvcrypt_cipher_update *dupdate)
215 {
216 struct stm32_cipher_ctx *c = to_stm32_cipher_ctx(dupdate->ctx);
217 size_t len = MIN(dupdate->src.length, dupdate->dst.length);
218
219 return c->ops->update(&c->ip_ctx, dupdate->last, dupdate->src.data,
220 dupdate->dst.data, len);
221 }
222
stm32_cipher_final(void * ctx __unused)223 static void stm32_cipher_final(void *ctx __unused)
224 {
225 struct stm32_cipher_ctx *c = to_stm32_cipher_ctx(ctx);
226
227 if (c->ops->final)
228 c->ops->final(&c->ip_ctx);
229 }
230
stm32_cipher_copy_state(void * dst_ctx,void * src_ctx)231 static void stm32_cipher_copy_state(void *dst_ctx, void *src_ctx)
232 {
233 struct stm32_cipher_ctx *src_c = to_stm32_cipher_ctx(src_ctx);
234 struct stm32_cipher_ctx *dst_c = to_stm32_cipher_ctx(dst_ctx);
235
236 src_c->ops->copy_state(&dst_c->ip_ctx, &src_c->ip_ctx);
237 }
238
alloc_cryp_ctx(void ** ctx,enum stm32_cryp_algo_mode algo)239 static TEE_Result alloc_cryp_ctx(void **ctx, enum stm32_cryp_algo_mode algo)
240 {
241 struct stm32_cipher_ctx *c = calloc(1, sizeof(*c));
242
243 if (!c)
244 return TEE_ERROR_OUT_OF_MEMORY;
245
246 FMSG("Using CRYP %d", algo);
247 c->ip_ctx.cryp.algo = algo;
248 c->ops = &cryp_ops;
249 *ctx = &c->c_ctx;
250
251 return TEE_SUCCESS;
252 }
253
stm32_cryp_cipher_allocate(void ** ctx,uint32_t algo)254 static TEE_Result stm32_cryp_cipher_allocate(void **ctx, uint32_t algo)
255 {
256 /*
257 * Convert TEE_ALGO id to internal id
258 */
259 switch (algo) {
260 case TEE_ALG_DES_ECB_NOPAD:
261 return alloc_cryp_ctx(ctx, STM32_CRYP_MODE_DES_ECB);
262 case TEE_ALG_DES_CBC_NOPAD:
263 return alloc_cryp_ctx(ctx, STM32_CRYP_MODE_DES_CBC);
264 case TEE_ALG_DES3_ECB_NOPAD:
265 return alloc_cryp_ctx(ctx, STM32_CRYP_MODE_TDES_ECB);
266 case TEE_ALG_DES3_CBC_NOPAD:
267 return alloc_cryp_ctx(ctx, STM32_CRYP_MODE_TDES_CBC);
268 case TEE_ALG_AES_ECB_NOPAD:
269 return alloc_cryp_ctx(ctx, STM32_CRYP_MODE_AES_ECB);
270 case TEE_ALG_AES_CBC_NOPAD:
271 return alloc_cryp_ctx(ctx, STM32_CRYP_MODE_AES_CBC);
272 case TEE_ALG_AES_CTR:
273 return alloc_cryp_ctx(ctx, STM32_CRYP_MODE_AES_CTR);
274 default:
275 return TEE_ERROR_NOT_IMPLEMENTED;
276 }
277 }
278
stm32_cryp_cipher_free(void * ctx)279 static void stm32_cryp_cipher_free(void *ctx)
280 {
281 struct stm32_cipher_ctx *c = to_stm32_cipher_ctx(ctx);
282
283 free(c);
284 }
285
stm32_saes_cipher_allocate(void ** ctx,uint32_t algo)286 static TEE_Result stm32_saes_cipher_allocate(void **ctx, uint32_t algo)
287 {
288 enum stm32_saes_chaining_mode saes_algo = STM32_SAES_MODE_ECB;
289 struct crypto_cipher_ctx *fallback_ctx = NULL;
290 struct stm32_cipher_ctx *saes_ctx = NULL;
291 TEE_Result res = TEE_SUCCESS;
292
293 switch (algo) {
294 case TEE_ALG_AES_ECB_NOPAD:
295 saes_algo = STM32_SAES_MODE_ECB;
296 if (IS_ENABLED(CFG_STM32_SAES_SW_FALLBACK))
297 res = crypto_aes_ecb_alloc_ctx(&fallback_ctx);
298 break;
299 case TEE_ALG_AES_CBC_NOPAD:
300 saes_algo = STM32_SAES_MODE_CBC;
301 if (IS_ENABLED(CFG_STM32_SAES_SW_FALLBACK))
302 res = crypto_aes_cbc_alloc_ctx(&fallback_ctx);
303 break;
304 case TEE_ALG_AES_CTR:
305 saes_algo = STM32_SAES_MODE_CTR;
306 if (IS_ENABLED(CFG_STM32_SAES_SW_FALLBACK))
307 res = crypto_aes_ctr_alloc_ctx(&fallback_ctx);
308 break;
309 default:
310 return TEE_ERROR_NOT_IMPLEMENTED;
311 }
312 if (res)
313 return res;
314
315 saes_ctx = calloc(1, sizeof(*saes_ctx));
316 if (!saes_ctx) {
317 if (IS_ENABLED(CFG_STM32_SAES_SW_FALLBACK))
318 fallback_ctx->ops->free_ctx(fallback_ctx);
319
320 return TEE_ERROR_OUT_OF_MEMORY;
321 }
322
323 FMSG("Using SAES %d", saes_algo);
324 saes_ctx->ip_ctx.saes.algo = saes_algo;
325 saes_ctx->ops = &saes_ops;
326 saes_ctx->ip_ctx.saes.fallback_ctx = fallback_ctx;
327 *ctx = &saes_ctx->c_ctx;
328
329 return TEE_SUCCESS;
330 }
331
stm32_saes_cipher_free(void * ctx)332 static void stm32_saes_cipher_free(void *ctx)
333 {
334 struct stm32_cipher_ctx *c = to_stm32_cipher_ctx(ctx);
335
336 if (IS_ENABLED(CFG_STM32_SAES_SW_FALLBACK)) {
337 struct crypto_cipher_ctx *fb_ctx = c->ip_ctx.saes.fallback_ctx;
338
339 fb_ctx->ops->free_ctx(fb_ctx);
340 }
341
342 free(c);
343 }
344
345 static struct drvcrypt_cipher driver_cipher_cryp = {
346 .alloc_ctx = stm32_cryp_cipher_allocate,
347 .free_ctx = stm32_cryp_cipher_free,
348 .init = stm32_cipher_initialize,
349 .update = stm32_cipher_update,
350 .final = stm32_cipher_final,
351 .copy_state = stm32_cipher_copy_state,
352 };
353
354 static struct drvcrypt_cipher driver_cipher_saes = {
355 .alloc_ctx = stm32_saes_cipher_allocate,
356 .free_ctx = stm32_saes_cipher_free,
357 .init = stm32_cipher_initialize,
358 .update = stm32_cipher_update,
359 .final = stm32_cipher_final,
360 .copy_state = stm32_cipher_copy_state,
361 };
362
stm32_register_cipher(enum stm32_cipher_ip_id cipher_ip)363 TEE_Result stm32_register_cipher(enum stm32_cipher_ip_id cipher_ip)
364 {
365 void *op = drvcrypt_get_ops(CRYPTO_CIPHER);
366
367 if (op) {
368 EMSG("%s already registered for CRYPTO_CIPHER",
369 op == &driver_cipher_cryp ? "CRYP peripheral" :
370 op == &driver_cipher_saes ? "SAES peripheral" :
371 "Other cipher driver");
372 return TEE_ERROR_GENERIC;
373 }
374
375 if (cipher_ip == SAES_IP)
376 return drvcrypt_register_cipher(&driver_cipher_saes);
377 else if (cipher_ip == CRYP_IP)
378 return drvcrypt_register_cipher(&driver_cipher_cryp);
379 else
380 return TEE_ERROR_BAD_PARAMETERS;
381 }
382