xref: /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/rk_crypto_core.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 
3 /* Copyright (c) 2018 Rockchip Electronics Co. Ltd. */
4 
5 #ifndef __RK_CRYPTO_CORE_H__
6 #define __RK_CRYPTO_CORE_H__
7 
8 #include <crypto/aes.h>
9 #include <crypto/des.h>
10 #include <crypto/algapi.h>
11 #include <crypto/md5.h>
12 #include <crypto/sha.h>
13 #include <crypto/sm3.h>
14 #include <crypto/sm4.h>
15 #include <crypto/gcm.h>
16 #include <crypto/skcipher.h>
17 #include <crypto/internal/aead.h>
18 #include <crypto/internal/akcipher.h>
19 #include <crypto/internal/hash.h>
20 #include <crypto/internal/rsa.h>
21 #include <crypto/internal/des.h>
22 #include <crypto/internal/skcipher.h>
23 
24 #include <linux/interrupt.h>
25 #include <linux/delay.h>
26 #include <linux/clk.h>
27 #include <linux/io.h>
28 #include <linux/dma-mapping.h>
29 #include <linux/scatterlist.h>
30 #include <linux/timer.h>
31 
32 #include "rk_crypto_bignum.h"
33 
34 /*
35  * Change to the lowest priority, and hardware encryption is
36  * invoked explicitly only at the User layer.
37  */
38 #define RK_CRYPTO_PRIORITY		0
39 
40 /*  Increase the addr_vir buffer size from 1 to 8 pages */
41 #define RK_BUFFER_ORDER			3
42 #define RK_BUFFER_SIZE			(PAGE_SIZE << RK_BUFFER_ORDER)
43 
44 #define RK_DMA_ALIGNMENT		128
45 #define sha384_state			sha512_state
46 #define sha224_state			sha256_state
47 
48 #define RK_FLAG_FINAL			BIT(0)
49 #define RK_FLAG_UPDATE			BIT(1)
50 
51 struct rk_crypto_stat {
52 	unsigned long long	busy_cnt;
53 	unsigned long long	equeue_cnt;
54 	unsigned long long	dequeue_cnt;
55 	unsigned long long	complete_cnt;
56 	unsigned long long	done_cnt;
57 	unsigned long long	fake_cnt;
58 	unsigned long long	irq_cnt;
59 	unsigned long long	timeout_cnt;
60 	unsigned long long	error_cnt;
61 	unsigned long long	ever_queue_max;
62 	int			last_error;
63 };
64 
65 struct rk_crypto_dev {
66 	struct device			*dev;
67 	struct reset_control		*rst;
68 	void __iomem			*reg;
69 	void __iomem			*pka_reg;
70 	int				irq;
71 	struct crypto_queue		queue;
72 	struct tasklet_struct		queue_task;
73 	struct tasklet_struct		done_task;
74 	int				err;
75 	void				*hw_info;
76 	struct rk_crypto_soc_data	*soc_data;
77 	int clks_num;
78 	struct clk_bulk_data		*clk_bulks;
79 	const char			*name;
80 	struct proc_dir_entry		*procfs;
81 	struct rk_crypto_stat		stat;
82 
83 	/* device lock */
84 	spinlock_t			lock;
85 
86 	/* the public variable */
87 	struct crypto_async_request	*async_req;
88 	void				*addr_vir;
89 	u32				vir_max;
90 	void				*addr_aad;
91 	int				aad_max;
92 	struct scatterlist		src[2];
93 	struct scatterlist		dst[2];
94 
95 	struct timer_list		timer;
96 	bool				busy;
97 	void (*request_crypto)(struct rk_crypto_dev *rk_dev, const char *name);
98 	void (*release_crypto)(struct rk_crypto_dev *rk_dev, const char *name);
99 	int (*load_data)(struct rk_crypto_dev *rk_dev,
100 			 struct scatterlist *sg_src,
101 			 struct scatterlist *sg_dst);
102 	int (*unload_data)(struct rk_crypto_dev *rk_dev);
103 	int (*enqueue)(struct rk_crypto_dev *rk_dev,
104 		       struct crypto_async_request *async_req);
105 };
106 
107 struct rk_crypto_soc_data {
108 	const char			*crypto_ver;
109 	char				**valid_algs_name;
110 	int				valid_algs_num;
111 	unsigned int			hw_info_size;
112 	bool				use_soft_aes192;
113 	int				default_pka_offset;
114 	bool				use_lli_chain;
115 
116 	int (*hw_init)(struct device *dev, void *hw_info);
117 	void (*hw_deinit)(struct device *dev, void *hw_info);
118 	const char * const *(*hw_get_rsts)(uint32_t *num);
119 	struct rk_crypto_algt **(*hw_get_algts)(uint32_t *num);
120 	bool (*hw_is_algo_valid)(struct rk_crypto_dev *rk_dev,
121 				 struct rk_crypto_algt *aglt);
122 };
123 
124 struct rk_alg_ops {
125 	int (*start)(struct rk_crypto_dev *rk_dev);
126 	int (*update)(struct rk_crypto_dev *rk_dev);
127 	void (*complete)(struct crypto_async_request *base, int err);
128 	int (*irq_handle)(int irq, void *dev_id);
129 
130 	int (*hw_write_key)(struct rk_crypto_dev *rk_dev, const u8 *key, u32 key_len);
131 	void (*hw_write_iv)(struct rk_crypto_dev *rk_dev, const u8 *iv, u32 iv_len);
132 	int (*hw_init)(struct rk_crypto_dev *rk_dev, u32 algo, u32 type);
133 	int (*hw_dma_start)(struct rk_crypto_dev *rk_dev, uint32_t flag);
134 	int (*hw_get_result)(struct rk_crypto_dev *rk_dev, uint8_t *data, uint32_t data_len);
135 };
136 
137 struct rk_alg_ctx {
138 	struct rk_alg_ops		ops;
139 	struct scatterlist		*sg_src;
140 	struct scatterlist		*sg_dst;
141 	struct scatterlist		sg_tmp;
142 	struct scatterlist		sg_aad;
143 	struct scatterlist		*req_src;
144 	struct scatterlist		*req_dst;
145 	size_t				src_nents;
146 	size_t				dst_nents;
147 	size_t				map_nents;
148 
149 	int				is_aead;
150 	unsigned int			total;
151 	unsigned int			assoclen;
152 	unsigned int			count;
153 	unsigned int			left_bytes;
154 
155 	dma_addr_t			addr_in;
156 	dma_addr_t			addr_out;
157 	dma_addr_t			addr_aad_in;
158 
159 	bool				aligned;
160 	bool				is_dma;
161 	int				align_size;
162 	int				chunk_size;
163 };
164 
165 /* the private variable of hash */
166 struct rk_ahash_ctx {
167 	struct rk_alg_ctx		algs_ctx;
168 	struct rk_crypto_dev		*rk_dev;
169 	u8				authkey[SHA512_BLOCK_SIZE];
170 	u32				authkey_len;
171 	struct scatterlist		hash_sg[2];
172 	u8				*hash_tmp;
173 	u32				hash_tmp_len;
174 	bool				hash_tmp_mapped;
175 	u32				calc_cnt;
176 
177 	u8				lastc[RK_DMA_ALIGNMENT];
178 	u32				lastc_len;
179 
180 	void				*priv;
181 
182 	/* for fallback */
183 	struct crypto_ahash		*fallback_tfm;
184 };
185 
186 /* the privete variable of hash for fallback */
187 struct rk_ahash_rctx {
188 	struct ahash_request		fallback_req;
189 	u32				mode;
190 	u32				flag;
191 };
192 
193 /* the private variable of cipher */
194 struct rk_cipher_ctx {
195 	struct rk_alg_ctx		algs_ctx;
196 	struct rk_crypto_dev		*rk_dev;
197 	unsigned char			key[AES_MAX_KEY_SIZE * 2];
198 	unsigned int			keylen;
199 	u32				mode;
200 	u8				iv[AES_BLOCK_SIZE];
201 	u32				iv_len;
202 	u8				lastc[AES_BLOCK_SIZE];
203 	bool				is_enc;
204 	void				*priv;
205 
206 	/* for fallback */
207 	bool				fallback_key_inited;
208 	struct crypto_skcipher		*fallback_tfm;
209 	struct skcipher_request		fallback_req;	// keep at the end
210 	struct crypto_aead		*fallback_aead;
211 };
212 
213 struct rk_rsa_ctx {
214 	struct rk_alg_ctx		algs_ctx;
215 	struct rk_bignum *n;
216 	struct rk_bignum *e;
217 	struct rk_bignum *d;
218 
219 	struct rk_crypto_dev		*rk_dev;
220 };
221 
222 enum alg_type {
223 	ALG_TYPE_HASH,
224 	ALG_TYPE_HMAC,
225 	ALG_TYPE_CIPHER,
226 	ALG_TYPE_ASYM,
227 	ALG_TYPE_AEAD,
228 	ALG_TYPE_MAX,
229 };
230 
231 struct rk_crypto_algt {
232 	struct rk_crypto_dev		*rk_dev;
233 	union {
234 		struct skcipher_alg	crypto;
235 		struct ahash_alg	hash;
236 		struct akcipher_alg	asym;
237 		struct aead_alg		aead;
238 	} alg;
239 	enum alg_type			type;
240 	u32				algo;
241 	u32				mode;
242 	char				*name;
243 	bool				use_soft_aes192;
244 	bool				valid_flag;
245 };
246 
247 enum rk_hash_algo {
248 	HASH_ALGO_MD5,
249 	HASH_ALGO_SHA1,
250 	HASH_ALGO_SHA224,
251 	HASH_ALGO_SHA256,
252 	HASH_ALGO_SHA384,
253 	HASH_ALGO_SHA512,
254 	HASH_ALGO_SM3,
255 	HASH_ALGO_SHA512_224,
256 	HASH_ALGO_SHA512_256,
257 };
258 
259 enum rk_cipher_algo {
260 	CIPHER_ALGO_DES,
261 	CIPHER_ALGO_DES3_EDE,
262 	CIPHER_ALGO_AES,
263 	CIPHER_ALGO_SM4,
264 };
265 
266 enum rk_cipher_mode {
267 	CIPHER_MODE_ECB,
268 	CIPHER_MODE_CBC,
269 	CIPHER_MODE_CFB,
270 	CIPHER_MODE_OFB,
271 	CIPHER_MODE_CTR,
272 	CIPHER_MODE_XTS,
273 	CIPHER_MODE_CTS,
274 	CIPHER_MODE_CCM,
275 	CIPHER_MODE_GCM,
276 	CIPHER_MODE_CMAC,
277 	CIPHER_MODE_CBCMAC,
278 };
279 
280 #define DES_MIN_KEY_SIZE	DES_KEY_SIZE
281 #define DES_MAX_KEY_SIZE	DES_KEY_SIZE
282 #define DES3_EDE_MIN_KEY_SIZE	DES3_EDE_KEY_SIZE
283 #define DES3_EDE_MAX_KEY_SIZE	DES3_EDE_KEY_SIZE
284 #define SM4_MIN_KEY_SIZE	SM4_KEY_SIZE
285 #define SM4_MAX_KEY_SIZE	SM4_KEY_SIZE
286 
287 #define MD5_BLOCK_SIZE		SHA1_BLOCK_SIZE
288 
289 #define  RK_AEAD_ALGO_INIT(cipher_algo, cipher_mode, algo_name, driver_name) {\
290 	.name = #algo_name,\
291 	.type = ALG_TYPE_AEAD,\
292 	.algo = CIPHER_ALGO_##cipher_algo,\
293 	.mode = CIPHER_MODE_##cipher_mode,\
294 	.alg.aead = {\
295 		.base.cra_name		= #algo_name,\
296 		.base.cra_driver_name	= #driver_name,\
297 		.base.cra_priority	= RK_CRYPTO_PRIORITY,\
298 		.base.cra_flags		= CRYPTO_ALG_TYPE_AEAD |\
299 					  CRYPTO_ALG_KERN_DRIVER_ONLY |\
300 					  CRYPTO_ALG_ASYNC |\
301 					  CRYPTO_ALG_NEED_FALLBACK,\
302 		.base.cra_blocksize	= 1,\
303 		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),\
304 		.base.cra_alignmask	= 0x07,\
305 		.base.cra_module	= THIS_MODULE,\
306 		.init		= rk_aead_init_tfm,\
307 		.exit		= rk_aead_exit_tfm,\
308 		.ivsize		= GCM_AES_IV_SIZE,\
309 		.chunksize      = cipher_algo##_BLOCK_SIZE,\
310 		.maxauthsize    = AES_BLOCK_SIZE,\
311 		.setkey		= rk_aead_setkey,\
312 		.setauthsize	= rk_aead_gcm_setauthsize,\
313 		.encrypt	= rk_aead_encrypt,\
314 		.decrypt	= rk_aead_decrypt,\
315 	} \
316 }
317 
318 #define  RK_CIPHER_ALGO_INIT(cipher_algo, cipher_mode, algo_name, driver_name) {\
319 	.name = #algo_name,\
320 	.type = ALG_TYPE_CIPHER,\
321 	.algo = CIPHER_ALGO_##cipher_algo,\
322 	.mode = CIPHER_MODE_##cipher_mode,\
323 	.alg.crypto = {\
324 		.base.cra_name		= #algo_name,\
325 		.base.cra_driver_name	= #driver_name,\
326 		.base.cra_priority	= RK_CRYPTO_PRIORITY,\
327 		.base.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |\
328 					  CRYPTO_ALG_ASYNC |\
329 					  CRYPTO_ALG_NEED_FALLBACK,\
330 		.base.cra_blocksize	= cipher_algo##_BLOCK_SIZE,\
331 		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),\
332 		.base.cra_alignmask	= 0x07,\
333 		.base.cra_module	= THIS_MODULE,\
334 		.init		= rk_ablk_init_tfm,\
335 		.exit		= rk_ablk_exit_tfm,\
336 		.min_keysize	= cipher_algo##_MIN_KEY_SIZE,\
337 		.max_keysize	= cipher_algo##_MAX_KEY_SIZE,\
338 		.ivsize		= cipher_algo##_BLOCK_SIZE,\
339 		.chunksize      = cipher_algo##_BLOCK_SIZE,\
340 		.setkey		= rk_cipher_setkey,\
341 		.encrypt	= rk_cipher_encrypt,\
342 		.decrypt	= rk_cipher_decrypt,\
343 	} \
344 }
345 
346 #define  RK_CIPHER_ALGO_XTS_INIT(cipher_algo, algo_name, driver_name) {\
347 	.name = #algo_name,\
348 	.type = ALG_TYPE_CIPHER,\
349 	.algo = CIPHER_ALGO_##cipher_algo,\
350 	.mode = CIPHER_MODE_XTS,\
351 	.alg.crypto = {\
352 		.base.cra_name		= #algo_name,\
353 		.base.cra_driver_name	= #driver_name,\
354 		.base.cra_priority	= RK_CRYPTO_PRIORITY,\
355 		.base.cra_flags		= CRYPTO_ALG_KERN_DRIVER_ONLY |\
356 					  CRYPTO_ALG_ASYNC |\
357 					  CRYPTO_ALG_NEED_FALLBACK,\
358 		.base.cra_blocksize	= cipher_algo##_BLOCK_SIZE,\
359 		.base.cra_ctxsize	= sizeof(struct rk_cipher_ctx),\
360 		.base.cra_alignmask	= 0x07,\
361 		.base.cra_module	= THIS_MODULE,\
362 		.init		= rk_ablk_init_tfm,\
363 		.exit		= rk_ablk_exit_tfm,\
364 		.min_keysize	= cipher_algo##_MAX_KEY_SIZE,\
365 		.max_keysize	= cipher_algo##_MAX_KEY_SIZE * 2,\
366 		.ivsize		= cipher_algo##_BLOCK_SIZE,\
367 		.chunksize      = cipher_algo##_BLOCK_SIZE,\
368 		.setkey		= rk_cipher_setkey,\
369 		.encrypt	= rk_cipher_encrypt,\
370 		.decrypt	= rk_cipher_decrypt,\
371 	} \
372 }
373 
374 #define RK_HASH_ALGO_INIT(hash_algo, algo_name) {\
375 	.name = #algo_name,\
376 	.type = ALG_TYPE_HASH,\
377 	.algo = HASH_ALGO_##hash_algo,\
378 	.alg.hash = {\
379 		.init = rk_ahash_init,\
380 		.update = rk_ahash_update,\
381 		.final = rk_ahash_final,\
382 		.finup = rk_ahash_finup,\
383 		.export = rk_ahash_export,\
384 		.import = rk_ahash_import,\
385 		.digest = rk_ahash_digest,\
386 		.halg = {\
387 			.digestsize = hash_algo##_DIGEST_SIZE,\
388 			.statesize = sizeof(struct algo_name##_state),\
389 			.base = {\
390 				.cra_name = #algo_name,\
391 				.cra_driver_name = #algo_name"-rk",\
392 				.cra_priority = RK_CRYPTO_PRIORITY,\
393 				.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |\
394 					     CRYPTO_ALG_ASYNC |\
395 					     CRYPTO_ALG_NEED_FALLBACK,\
396 				.cra_blocksize = hash_algo##_BLOCK_SIZE,\
397 				.cra_ctxsize = sizeof(struct rk_ahash_ctx),\
398 				.cra_alignmask = 0,\
399 				.cra_init = rk_cra_hash_init,\
400 				.cra_exit = rk_cra_hash_exit,\
401 				.cra_module = THIS_MODULE,\
402 			} \
403 		} \
404 	} \
405 }
406 
407 #define RK_HMAC_ALGO_INIT(hash_algo, algo_name) {\
408 	.name = "hmac(" #algo_name ")",\
409 	.type = ALG_TYPE_HMAC,\
410 	.algo = HASH_ALGO_##hash_algo,\
411 	.alg.hash = {\
412 		.init = rk_ahash_init,\
413 		.update = rk_ahash_update,\
414 		.final = rk_ahash_final,\
415 		.finup = rk_ahash_finup,\
416 		.export = rk_ahash_export,\
417 		.import = rk_ahash_import,\
418 		.digest = rk_ahash_digest,\
419 		.setkey = rk_ahash_hmac_setkey,\
420 		.halg = {\
421 			.digestsize = hash_algo##_DIGEST_SIZE,\
422 			.statesize = sizeof(struct algo_name##_state),\
423 			.base = {\
424 				.cra_name = "hmac(" #algo_name ")",\
425 				.cra_driver_name = "hmac-" #algo_name "-rk",\
426 				.cra_priority = RK_CRYPTO_PRIORITY,\
427 				.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |\
428 					     CRYPTO_ALG_ASYNC |\
429 					     CRYPTO_ALG_NEED_FALLBACK,\
430 				.cra_blocksize = hash_algo##_BLOCK_SIZE,\
431 				.cra_ctxsize = sizeof(struct rk_ahash_ctx),\
432 				.cra_alignmask = 0,\
433 				.cra_init = rk_cra_hash_init,\
434 				.cra_exit = rk_cra_hash_exit,\
435 				.cra_module = THIS_MODULE,\
436 			} \
437 		} \
438 	} \
439 }
440 
441 #define IS_TYPE_HMAC(type) ((type) == ALG_TYPE_HMAC)
442 
443 #define CRYPTO_READ(dev, offset)		  \
444 		readl_relaxed(((dev)->reg + (offset)))
445 #define CRYPTO_WRITE(dev, offset, val)	  \
446 		writel_relaxed((val), ((dev)->reg + (offset)))
447 
448 #ifdef DEBUG
449 #define CRYPTO_TRACE(format, ...) pr_err("[%s, %05d]-trace: " format "\n", \
450 					 __func__, __LINE__, ##__VA_ARGS__)
451 #define CRYPTO_MSG(format, ...) pr_err("[%s, %05d]-msg:" format "\n", \
452 				       __func__, __LINE__, ##__VA_ARGS__)
453 #define CRYPTO_DUMPHEX(var_name, data, len) print_hex_dump(KERN_CONT, (var_name), \
454 							   DUMP_PREFIX_OFFSET, \
455 							   16, 1, (data), (len), false)
456 #else
457 #define CRYPTO_TRACE(format, ...)
458 #define CRYPTO_MSG(format, ...)
459 #define CRYPTO_DUMPHEX(var_name, data, len)
460 #endif
461 
462 #endif
463 
464