1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4 */
5
6 #include <crypto.h>
7 #include <keylad.h>
8
9 static const u8 null_hash_sha1_value[] = {
10 0xda, 0x39, 0xa3, 0xee, 0x5e, 0x6b, 0x4b, 0x0d,
11 0x32, 0x55, 0xbf, 0xef, 0x95, 0x60, 0x18, 0x90,
12 0xaf, 0xd8, 0x07, 0x09
13 };
14
15 static const u8 null_hash_md5_value[] = {
16 0xd4, 0x1d, 0x8c, 0xd9, 0x8f, 0x00, 0xb2, 0x04,
17 0xe9, 0x80, 0x09, 0x98, 0xec, 0xf8, 0x42, 0x7e
18 };
19
20 static const u8 null_hash_sha256_value[] = {
21 0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14,
22 0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24,
23 0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c,
24 0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55
25 };
26
27 static const u8 null_hash_sha512_value[] = {
28 0xcf, 0x83, 0xe1, 0x35, 0x7e, 0xef, 0xb8, 0xbd,
29 0xf1, 0x54, 0x28, 0x50, 0xd6, 0x6d, 0x80, 0x07,
30 0xd6, 0x20, 0xe4, 0x05, 0x0b, 0x57, 0x15, 0xdc,
31 0x83, 0xf4, 0xa9, 0x21, 0xd3, 0x6c, 0xe9, 0xce,
32 0x47, 0xd0, 0xd1, 0x3c, 0x5d, 0x85, 0xf2, 0xb0,
33 0xff, 0x83, 0x18, 0xd2, 0x87, 0x7e, 0xec, 0x2f,
34 0x63, 0xb9, 0x31, 0xbd, 0x47, 0x41, 0x7a, 0x81,
35 0xa5, 0x38, 0x32, 0x7a, 0xf9, 0x27, 0xda, 0x3e
36 };
37
38 const static u8 null_hash_sm3_value[] = {
39 0x1a, 0xb2, 0x1d, 0x83, 0x55, 0xcf, 0xa1, 0x7f,
40 0x8e, 0x61, 0x19, 0x48, 0x31, 0xe8, 0x1a, 0x8f,
41 0x22, 0xbe, 0xc8, 0xc7, 0x28, 0xfe, 0xfb, 0x74,
42 0x7e, 0xd0, 0x35, 0xeb, 0x50, 0x82, 0xaa, 0x2b
43 };
44
crypto_algo_nbits(u32 algo)45 u32 crypto_algo_nbits(u32 algo)
46 {
47 switch (algo) {
48 case CRYPTO_MD5:
49 case CRYPTO_HMAC_MD5:
50 return 128;
51 case CRYPTO_SHA1:
52 case CRYPTO_HMAC_SHA1:
53 return 160;
54 case CRYPTO_SHA256:
55 case CRYPTO_HMAC_SHA256:
56 return 256;
57 case CRYPTO_SHA512:
58 case CRYPTO_HMAC_SHA512:
59 return 512;
60 case CRYPTO_SM3:
61 case CRYPTO_HMAC_SM3:
62 return 256;
63 case CRYPTO_RSA512:
64 return 512;
65 case CRYPTO_RSA1024:
66 return 1024;
67 case CRYPTO_RSA2048:
68 return 2048;
69 case CRYPTO_RSA3072:
70 return 3072;
71 case CRYPTO_RSA4096:
72 return 4096;
73 case CRYPTO_SM2:
74 return 256;
75 case CRYPTO_ECC_192R1:
76 return 192;
77 case CRYPTO_ECC_224R1:
78 return 224;
79 case CRYPTO_ECC_256R1:
80 return 256;
81 }
82
83 printf("Unknown crypto algorithm: 0x%x\n", algo);
84
85 return 0;
86 }
87
crypto_get_device(u32 capability)88 struct udevice *crypto_get_device(u32 capability)
89 {
90 bool preferred_secure = false;
91 bool cur_secure = false;
92 const struct dm_crypto_ops *ops;
93 struct udevice *best_fit_dev = NULL;
94 struct udevice *dev;
95 struct uclass *uc;
96 int ret;
97 u32 cap;
98
99 #if defined(CONFIG_SPL_BUILD)
100 preferred_secure = true;
101 #else
102 preferred_secure = false;
103 #endif
104
105 ret = uclass_get(UCLASS_CRYPTO, &uc);
106 if (ret)
107 return NULL;
108
109 for (uclass_first_device(UCLASS_CRYPTO, &dev);
110 dev;
111 uclass_next_device(&dev)) {
112 ops = device_get_ops(dev);
113 if (!ops || !ops->capability)
114 continue;
115
116 cur_secure = ops->is_secure ? ops->is_secure(dev) : false;
117
118 cap = ops->capability(dev);
119 if ((cap & capability) == capability) {
120 if (!best_fit_dev) {
121 best_fit_dev = dev;
122 continue;
123 }
124
125 if (preferred_secure == cur_secure)
126 best_fit_dev = dev;
127 }
128 }
129
130 return best_fit_dev;
131 }
132
crypto_sha_init(struct udevice * dev,sha_context * ctx)133 int crypto_sha_init(struct udevice *dev, sha_context *ctx)
134 {
135 const struct dm_crypto_ops *ops = device_get_ops(dev);
136
137 if (ctx && !ctx->length)
138 return 0;
139
140 if (!ops || !ops->sha_init)
141 return -ENOSYS;
142
143 return ops->sha_init(dev, ctx);
144 }
145
crypto_sha_update(struct udevice * dev,u32 * input,u32 len)146 int crypto_sha_update(struct udevice *dev, u32 *input, u32 len)
147 {
148 const struct dm_crypto_ops *ops = device_get_ops(dev);
149
150 if (!len)
151 return 0;
152
153 if (!ops || !ops->sha_update)
154 return -ENOSYS;
155
156 return ops->sha_update(dev, input, len);
157 }
158
crypto_sha_final(struct udevice * dev,sha_context * ctx,u8 * output)159 int crypto_sha_final(struct udevice *dev, sha_context *ctx, u8 *output)
160 {
161 const struct dm_crypto_ops *ops = device_get_ops(dev);
162 const u8 *null_hash = NULL;
163 u32 hash_size = 0;
164
165 if (ctx && !ctx->length && output) {
166 switch (ctx->algo) {
167 case CRYPTO_MD5:
168 null_hash = null_hash_md5_value;
169 hash_size = sizeof(null_hash_md5_value);
170 break;
171 case CRYPTO_SHA1:
172 null_hash = null_hash_sha1_value;
173 hash_size = sizeof(null_hash_sha1_value);
174 break;
175 case CRYPTO_SHA256:
176 null_hash = null_hash_sha256_value;
177 hash_size = sizeof(null_hash_sha256_value);
178 break;
179 case CRYPTO_SHA512:
180 null_hash = null_hash_sha512_value;
181 hash_size = sizeof(null_hash_sha512_value);
182 break;
183 case CRYPTO_SM3:
184 null_hash = null_hash_sm3_value;
185 hash_size = sizeof(null_hash_sm3_value);
186 break;
187 default:
188 return -EINVAL;
189 }
190
191 memcpy(output, null_hash, hash_size);
192
193 return 0;
194 }
195
196 if (!ops || !ops->sha_final)
197 return -ENOSYS;
198
199 return ops->sha_final(dev, ctx, output);
200 }
201
crypto_hmac_init(struct udevice * dev,sha_context * ctx,u8 * key,u32 key_len)202 int crypto_hmac_init(struct udevice *dev, sha_context *ctx,
203 u8 *key, u32 key_len)
204 {
205 const struct dm_crypto_ops *ops = device_get_ops(dev);
206
207 if (ctx && !ctx->length)
208 return -EINVAL;
209
210 if (!ops || !ops->hmac_init)
211 return -ENOSYS;
212
213 return ops->hmac_init(dev, ctx, key, key_len);
214 }
215
crypto_hmac_update(struct udevice * dev,u32 * input,u32 len)216 int crypto_hmac_update(struct udevice *dev, u32 *input, u32 len)
217 {
218 const struct dm_crypto_ops *ops = device_get_ops(dev);
219
220 if (!len)
221 return 0;
222
223 if (!ops || !ops->hmac_update)
224 return -ENOSYS;
225
226 return ops->hmac_update(dev, input, len);
227 }
228
crypto_hmac_final(struct udevice * dev,sha_context * ctx,u8 * output)229 int crypto_hmac_final(struct udevice *dev, sha_context *ctx, u8 *output)
230 {
231 const struct dm_crypto_ops *ops = device_get_ops(dev);
232
233 if (!ops || !ops->hmac_final)
234 return -ENOSYS;
235
236 return ops->hmac_final(dev, ctx, output);
237 }
238
crypto_sha_csum(struct udevice * dev,sha_context * ctx,char * input,u32 input_len,u8 * output)239 int crypto_sha_csum(struct udevice *dev, sha_context *ctx,
240 char *input, u32 input_len, u8 *output)
241 {
242 int ret;
243
244 ret = crypto_sha_init(dev, ctx);
245 if (ret)
246 return ret;
247
248 ret = crypto_sha_update(dev, (u32 *)input, input_len);
249 if (ret)
250 return ret;
251
252 ret = crypto_sha_final(dev, ctx, output);
253
254 return ret;
255 }
256
crypto_sha_regions_csum(struct udevice * dev,sha_context * ctx,const struct image_region region[],int region_count,u8 * output)257 int crypto_sha_regions_csum(struct udevice *dev, sha_context *ctx,
258 const struct image_region region[],
259 int region_count, u8 *output)
260 {
261 int i, ret;
262
263 ctx->length = 0;
264 for (i = 0; i < region_count; i++)
265 ctx->length += region[i].size;
266
267 ret = crypto_sha_init(dev, ctx);
268 if (ret)
269 return ret;
270
271 for (i = 0; i < region_count; i++) {
272 ret = crypto_sha_update(dev, (void *)region[i].data,
273 region[i].size);
274 if (ret)
275 return ret;
276 }
277
278 return crypto_sha_final(dev, ctx, output);
279 }
280
crypto_rsa_verify(struct udevice * dev,rsa_key * ctx,u8 * sign,u8 * output)281 int crypto_rsa_verify(struct udevice *dev, rsa_key *ctx, u8 *sign, u8 *output)
282 {
283 const struct dm_crypto_ops *ops = device_get_ops(dev);
284
285 if (!ops || !ops->rsa_verify)
286 return -ENOSYS;
287
288 if (!ctx || !ctx->n || !ctx->e || !sign || !output)
289 return -EINVAL;
290
291 return ops->rsa_verify(dev, ctx, sign, output);
292 }
293
crypto_ec_verify(struct udevice * dev,ec_key * ctx,u8 * hash,u32 hash_len,u8 * sign)294 int crypto_ec_verify(struct udevice *dev, ec_key *ctx, u8 *hash, u32 hash_len, u8 *sign)
295 {
296 const struct dm_crypto_ops *ops = device_get_ops(dev);
297
298 if (!ops || !ops->ec_verify)
299 return -ENOSYS;
300
301 if (!ctx || !ctx->x || !ctx->y || !ctx->y || !hash || hash_len == 0 || !sign)
302 return -EINVAL;
303
304 return ops->ec_verify(dev, ctx, hash, hash_len, sign);
305 }
306
crypto_cipher(struct udevice * dev,cipher_context * ctx,const u8 * in,u8 * out,u32 len,bool enc)307 int crypto_cipher(struct udevice *dev, cipher_context *ctx,
308 const u8 *in, u8 *out, u32 len, bool enc)
309 {
310 const struct dm_crypto_ops *ops = device_get_ops(dev);
311
312 if (!ops || !ops->cipher_crypt)
313 return -ENOSYS;
314
315 if (!ctx || !ctx->key || ctx->key_len == 0)
316 return -EINVAL;
317
318 return ops->cipher_crypt(dev, ctx, in, out, len, enc);
319 }
320
crypto_mac(struct udevice * dev,cipher_context * ctx,const u8 * in,u32 len,u8 * tag)321 int crypto_mac(struct udevice *dev, cipher_context *ctx,
322 const u8 *in, u32 len, u8 *tag)
323 {
324 const struct dm_crypto_ops *ops = device_get_ops(dev);
325
326 if (!ops || !ops->cipher_mac)
327 return -ENOSYS;
328
329 if (!ctx || !ctx->key || ctx->key_len == 0)
330 return -EINVAL;
331
332 return ops->cipher_mac(dev, ctx, in, len, tag);
333 }
334
crypto_ae(struct udevice * dev,cipher_context * ctx,const u8 * in,u32 len,const u8 * aad,u32 aad_len,u8 * out,u8 * tag)335 int crypto_ae(struct udevice *dev, cipher_context *ctx,
336 const u8 *in, u32 len, const u8 *aad, u32 aad_len,
337 u8 *out, u8 *tag)
338 {
339 const struct dm_crypto_ops *ops = device_get_ops(dev);
340
341 if (!ops || !ops->cipher_ae)
342 return -ENOSYS;
343
344 if (!ctx || !ctx->key || ctx->key_len == 0)
345 return -EINVAL;
346
347 return ops->cipher_ae(dev, ctx, in, len, aad, aad_len, out, tag);
348 }
349
crypto_fw_cipher(struct udevice * dev,cipher_fw_context * ctx,const u8 * in,u8 * out,u32 len,bool enc)350 int crypto_fw_cipher(struct udevice *dev, cipher_fw_context *ctx,
351 const u8 *in, u8 *out, u32 len, bool enc)
352 {
353 #if CONFIG_IS_ENABLED(DM_KEYLAD)
354 const struct dm_crypto_ops *ops = device_get_ops(dev);
355 struct udevice *keylad_dev;
356
357 if (!ops || !ops->cipher_fw_crypt)
358 return -ENOSYS;
359
360 if (!ops->is_secure || !ops->is_secure(dev)) {
361 printf("Only secure crypto support fwkey cipher.\n");
362 return -ENOSYS;
363 }
364
365 keylad_dev = keylad_get_device();
366 if (!keylad_dev) {
367 printf("No keylad device found.\n");
368 return -ENOSYS;
369 }
370
371 if (keylad_transfer_fwkey(keylad_dev, crypto_keytable_addr(dev),
372 ctx->fw_keyid, ctx->key_len)) {
373 printf("Failed to transfer key from keylad.\n");
374 return -ENOSYS;
375 }
376
377 return ops->cipher_fw_crypt(dev, ctx, in, out, len, enc);
378 #else
379 return -ENOSYS;
380 #endif
381 }
382
crypto_keytable_addr(struct udevice * dev)383 ulong crypto_keytable_addr(struct udevice *dev)
384 {
385 const struct dm_crypto_ops *ops = device_get_ops(dev);
386
387 if (!ops || !ops->keytable_addr)
388 return 0;
389
390 return ops->keytable_addr(dev);
391 }
392
crypto_is_secure(struct udevice * dev)393 bool crypto_is_secure(struct udevice *dev)
394 {
395 const struct dm_crypto_ops *ops = device_get_ops(dev);
396
397 if (!ops || !ops->is_secure)
398 return false;
399
400 return ops->is_secure(dev);
401 }
402
403 UCLASS_DRIVER(crypto) = {
404 .id = UCLASS_CRYPTO,
405 .name = "crypto",
406 };
407