xref: /OK3568_Linux_fs/u-boot/drivers/crypto/crypto-uclass.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Copyright (c) 2019 Fuzhou Rockchip Electronics Co., Ltd
4  */
5 
6 #include <crypto.h>
7 
8 static const u8 null_hash_sha1_value[] = {
9 	0xda, 0x39, 0xa3, 0xee, 0x5e, 0x6b, 0x4b, 0x0d,
10 	0x32, 0x55, 0xbf, 0xef, 0x95, 0x60, 0x18, 0x90,
11 	0xaf, 0xd8, 0x07, 0x09
12 };
13 
14 static const u8 null_hash_md5_value[] = {
15 	0xd4, 0x1d, 0x8c, 0xd9, 0x8f, 0x00, 0xb2, 0x04,
16 	0xe9, 0x80, 0x09, 0x98, 0xec, 0xf8, 0x42, 0x7e
17 };
18 
19 static const u8 null_hash_sha256_value[] = {
20 	0xe3, 0xb0, 0xc4, 0x42, 0x98, 0xfc, 0x1c, 0x14,
21 	0x9a, 0xfb, 0xf4, 0xc8, 0x99, 0x6f, 0xb9, 0x24,
22 	0x27, 0xae, 0x41, 0xe4, 0x64, 0x9b, 0x93, 0x4c,
23 	0xa4, 0x95, 0x99, 0x1b, 0x78, 0x52, 0xb8, 0x55
24 };
25 
26 static const u8 null_hash_sha512_value[] = {
27 	0xcf, 0x83, 0xe1, 0x35, 0x7e, 0xef, 0xb8, 0xbd,
28 	0xf1, 0x54, 0x28, 0x50, 0xd6, 0x6d, 0x80, 0x07,
29 	0xd6, 0x20, 0xe4, 0x05, 0x0b, 0x57, 0x15, 0xdc,
30 	0x83, 0xf4, 0xa9, 0x21, 0xd3, 0x6c, 0xe9, 0xce,
31 	0x47, 0xd0, 0xd1, 0x3c, 0x5d, 0x85, 0xf2, 0xb0,
32 	0xff, 0x83, 0x18, 0xd2, 0x87, 0x7e, 0xec, 0x2f,
33 	0x63, 0xb9, 0x31, 0xbd, 0x47, 0x41, 0x7a, 0x81,
34 	0xa5, 0x38, 0x32, 0x7a, 0xf9, 0x27, 0xda, 0x3e
35 };
36 
37 const static u8 null_hash_sm3_value[] = {
38 	0x1a, 0xb2, 0x1d, 0x83, 0x55, 0xcf, 0xa1, 0x7f,
39 	0x8e, 0x61, 0x19, 0x48, 0x31, 0xe8, 0x1a, 0x8f,
40 	0x22, 0xbe, 0xc8, 0xc7, 0x28, 0xfe, 0xfb, 0x74,
41 	0x7e, 0xd0, 0x35, 0xeb, 0x50, 0x82, 0xaa, 0x2b
42 };
43 
crypto_algo_nbits(u32 algo)44 u32 crypto_algo_nbits(u32 algo)
45 {
46 	switch (algo) {
47 	case CRYPTO_MD5:
48 	case CRYPTO_HMAC_MD5:
49 		return 128;
50 	case CRYPTO_SHA1:
51 	case CRYPTO_HMAC_SHA1:
52 		return 160;
53 	case CRYPTO_SHA256:
54 	case CRYPTO_HMAC_SHA256:
55 		return 256;
56 	case CRYPTO_SHA512:
57 	case CRYPTO_HMAC_SHA512:
58 		return 512;
59 	case CRYPTO_SM3:
60 	case CRYPTO_HMAC_SM3:
61 		return 256;
62 	case CRYPTO_RSA512:
63 		return 512;
64 	case CRYPTO_RSA1024:
65 		return 1024;
66 	case CRYPTO_RSA2048:
67 		return 2048;
68 	case CRYPTO_RSA3072:
69 		return 3072;
70 	case CRYPTO_RSA4096:
71 		return 4096;
72 	}
73 
74 	printf("Unknown crypto algorithm: 0x%x\n", algo);
75 
76 	return 0;
77 }
78 
crypto_get_device(u32 capability)79 struct udevice *crypto_get_device(u32 capability)
80 {
81 	const struct dm_crypto_ops *ops;
82 	struct udevice *dev;
83 	struct uclass *uc;
84 	int ret;
85 	u32 cap;
86 
87 	ret = uclass_get(UCLASS_CRYPTO, &uc);
88 	if (ret)
89 		return NULL;
90 
91 	for (uclass_first_device(UCLASS_CRYPTO, &dev);
92 	     dev;
93 	     uclass_next_device(&dev)) {
94 		ops = device_get_ops(dev);
95 		if (!ops || !ops->capability)
96 			continue;
97 
98 		cap = ops->capability(dev);
99 		if ((cap & capability) == capability)
100 			return dev;
101 	}
102 
103 	return NULL;
104 }
105 
crypto_sha_init(struct udevice * dev,sha_context * ctx)106 int crypto_sha_init(struct udevice *dev, sha_context *ctx)
107 {
108 	const struct dm_crypto_ops *ops = device_get_ops(dev);
109 
110 	if (ctx && !ctx->length)
111 		return 0;
112 
113 	if (!ops || !ops->sha_init)
114 		return -ENOSYS;
115 
116 	return ops->sha_init(dev, ctx);
117 }
118 
crypto_sha_update(struct udevice * dev,u32 * input,u32 len)119 int crypto_sha_update(struct udevice *dev, u32 *input, u32 len)
120 {
121 	const struct dm_crypto_ops *ops = device_get_ops(dev);
122 
123 	if (!len)
124 		return 0;
125 
126 	if (!ops || !ops->sha_update)
127 		return -ENOSYS;
128 
129 	return ops->sha_update(dev, input, len);
130 }
131 
crypto_sha_final(struct udevice * dev,sha_context * ctx,u8 * output)132 int crypto_sha_final(struct udevice *dev, sha_context *ctx, u8 *output)
133 {
134 	const struct dm_crypto_ops *ops = device_get_ops(dev);
135 	const u8 *null_hash = NULL;
136 	u32 hash_size = 0;
137 
138 	if (ctx && !ctx->length && output) {
139 		switch (ctx->algo) {
140 		case CRYPTO_MD5:
141 			null_hash = null_hash_md5_value;
142 			hash_size = sizeof(null_hash_md5_value);
143 			break;
144 		case CRYPTO_SHA1:
145 			null_hash = null_hash_sha1_value;
146 			hash_size = sizeof(null_hash_sha1_value);
147 			break;
148 		case CRYPTO_SHA256:
149 			null_hash = null_hash_sha256_value;
150 			hash_size = sizeof(null_hash_sha256_value);
151 			break;
152 		case CRYPTO_SHA512:
153 			null_hash = null_hash_sha512_value;
154 			hash_size = sizeof(null_hash_sha512_value);
155 			break;
156 		case CRYPTO_SM3:
157 			null_hash = null_hash_sm3_value;
158 			hash_size = sizeof(null_hash_sm3_value);
159 			break;
160 		default:
161 			return -EINVAL;
162 		}
163 
164 		memcpy(output, null_hash, hash_size);
165 
166 		return 0;
167 	}
168 
169 	if (!ops || !ops->sha_final)
170 		return -ENOSYS;
171 
172 	return ops->sha_final(dev, ctx, output);
173 }
174 
crypto_hmac_init(struct udevice * dev,sha_context * ctx,u8 * key,u32 key_len)175 int crypto_hmac_init(struct udevice *dev, sha_context *ctx,
176 		     u8 *key, u32 key_len)
177 {
178 	const struct dm_crypto_ops *ops = device_get_ops(dev);
179 
180 	if (ctx && !ctx->length)
181 		return -EINVAL;
182 
183 	if (!ops || !ops->hmac_init)
184 		return -ENOSYS;
185 
186 	return ops->hmac_init(dev, ctx, key, key_len);
187 }
188 
crypto_hmac_update(struct udevice * dev,u32 * input,u32 len)189 int crypto_hmac_update(struct udevice *dev, u32 *input, u32 len)
190 {
191 	const struct dm_crypto_ops *ops = device_get_ops(dev);
192 
193 	if (!len)
194 		return 0;
195 
196 	if (!ops || !ops->hmac_update)
197 		return -ENOSYS;
198 
199 	return ops->hmac_update(dev, input, len);
200 }
201 
crypto_hmac_final(struct udevice * dev,sha_context * ctx,u8 * output)202 int crypto_hmac_final(struct udevice *dev, sha_context *ctx, u8 *output)
203 {
204 	const struct dm_crypto_ops *ops = device_get_ops(dev);
205 
206 	if (!ops || !ops->hmac_final)
207 		return -ENOSYS;
208 
209 	return ops->hmac_final(dev, ctx, output);
210 }
211 
crypto_sha_csum(struct udevice * dev,sha_context * ctx,char * input,u32 input_len,u8 * output)212 int crypto_sha_csum(struct udevice *dev, sha_context *ctx,
213 		    char *input, u32 input_len, u8 *output)
214 {
215 	int ret;
216 
217 	ret = crypto_sha_init(dev, ctx);
218 	if (ret)
219 		return ret;
220 
221 	ret = crypto_sha_update(dev, (u32 *)input, input_len);
222 	if (ret)
223 		return ret;
224 
225 	ret = crypto_sha_final(dev, ctx, output);
226 
227 	return ret;
228 }
229 
crypto_sha_regions_csum(struct udevice * dev,sha_context * ctx,const struct image_region region[],int region_count,u8 * output)230 int crypto_sha_regions_csum(struct udevice *dev, sha_context *ctx,
231 			    const struct image_region region[],
232 			    int region_count, u8 *output)
233 {
234 	int i, ret;
235 
236 	ctx->length = 0;
237 	for (i = 0; i < region_count; i++)
238 		ctx->length += region[i].size;
239 
240 	ret = crypto_sha_init(dev, ctx);
241 	if (ret)
242 		return ret;
243 
244 	for (i = 0; i < region_count; i++) {
245 		ret = crypto_sha_update(dev, (void *)region[i].data,
246 					region[i].size);
247 		if (ret)
248 			return ret;
249 	}
250 
251 	return crypto_sha_final(dev, ctx, output);
252 }
253 
crypto_rsa_verify(struct udevice * dev,rsa_key * ctx,u8 * sign,u8 * output)254 int crypto_rsa_verify(struct udevice *dev, rsa_key *ctx, u8 *sign, u8 *output)
255 {
256 	const struct dm_crypto_ops *ops = device_get_ops(dev);
257 
258 	if (!ops || !ops->rsa_verify)
259 		return -ENOSYS;
260 
261 	if (!ctx || !ctx->n || !ctx->e || !sign || !output)
262 		return -EINVAL;
263 
264 	return ops->rsa_verify(dev, ctx, sign, output);
265 }
266 
crypto_cipher(struct udevice * dev,cipher_context * ctx,const u8 * in,u8 * out,u32 len,bool enc)267 int crypto_cipher(struct udevice *dev, cipher_context *ctx,
268 		  const u8 *in, u8 *out, u32 len, bool enc)
269 {
270 	const struct dm_crypto_ops *ops = device_get_ops(dev);
271 
272 	if (!ops || !ops->cipher_crypt)
273 		return -ENOSYS;
274 
275 	return ops->cipher_crypt(dev, ctx, in, out, len, enc);
276 }
277 
crypto_mac(struct udevice * dev,cipher_context * ctx,const u8 * in,u32 len,u8 * tag)278 int crypto_mac(struct udevice *dev, cipher_context *ctx,
279 	       const u8 *in, u32 len, u8 *tag)
280 {
281 	const struct dm_crypto_ops *ops = device_get_ops(dev);
282 
283 	if (!ops || !ops->cipher_mac)
284 		return -ENOSYS;
285 
286 	return ops->cipher_mac(dev, ctx, in, len, tag);
287 }
288 
crypto_ae(struct udevice * dev,cipher_context * ctx,const u8 * in,u32 len,const u8 * aad,u32 aad_len,u8 * out,u8 * tag)289 int crypto_ae(struct udevice *dev, cipher_context *ctx,
290 	      const u8 *in, u32 len, const u8 *aad, u32 aad_len,
291 	      u8 *out, u8 *tag)
292 {
293 	const struct dm_crypto_ops *ops = device_get_ops(dev);
294 
295 	if (!ops || !ops->cipher_ae)
296 		return -ENOSYS;
297 
298 	return ops->cipher_ae(dev, ctx, in, len, aad, aad_len, out, tag);
299 }
300 
301 UCLASS_DRIVER(crypto) = {
302 	.id	= UCLASS_CRYPTO,
303 	.name	= "crypto",
304 };
305