xref: /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/rk_crypto_v3.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * Crypto acceleration support for Rockchip Crypto V3
4  *
5  * Copyright (c) 2022, Rockchip Electronics Co., Ltd
6  *
7  * Author: Lin Jinhan <troy.lin@rock-chips.com>
8  *
9  */
10 
11 #include "rk_crypto_core.h"
12 #include "rk_crypto_v3.h"
13 #include "rk_crypto_v3_reg.h"
14 #include "rk_crypto_utils.h"
15 
16 static const u32 cipher_mode2bit_mask[] = {
17 	[CIPHER_MODE_ECB]      = CRYPTO_ECB_FLAG,
18 	[CIPHER_MODE_CBC]      = CRYPTO_CBC_FLAG,
19 	[CIPHER_MODE_CFB]      = CRYPTO_CFB_FLAG,
20 	[CIPHER_MODE_OFB]      = CRYPTO_OFB_FLAG,
21 	[CIPHER_MODE_CTR]      = CRYPTO_CTR_FLAG,
22 	[CIPHER_MODE_XTS]      = CRYPTO_XTS_FLAG,
23 	[CIPHER_MODE_CTS]      = CRYPTO_CTS_FLAG,
24 	[CIPHER_MODE_CCM]      = CRYPTO_CCM_FLAG,
25 	[CIPHER_MODE_GCM]      = CRYPTO_GCM_FLAG,
26 	[CIPHER_MODE_CMAC]     = CRYPTO_CMAC_FLAG,
27 	[CIPHER_MODE_CBCMAC]   = CRYPTO_CBCMAC_FLAG,
28 };
29 
30 static const u32 hash_algo2bit_mask[] = {
31 	[HASH_ALGO_SHA1]       = CRYPTO_HASH_SHA1_FLAG,
32 	[HASH_ALGO_SHA224]     = CRYPTO_HASH_SHA224_FLAG,
33 	[HASH_ALGO_SHA256]     = CRYPTO_HASH_SHA256_FLAG,
34 	[HASH_ALGO_SHA384]     = CRYPTO_HASH_SHA384_FLAG,
35 	[HASH_ALGO_SHA512]     = CRYPTO_HASH_SHA512_FLAG,
36 	[HASH_ALGO_SHA512_224] = CRYPTO_HASH_SHA512_224_FLAG,
37 	[HASH_ALGO_SHA512_256] = CRYPTO_HASH_SHA512_256_FLAG,
38 	[HASH_ALGO_MD5]        = CRYPTO_HASH_MD5_FLAG,
39 	[HASH_ALGO_SM3]        = CRYPTO_HASH_SM3_FLAG,
40 };
41 
42 static const u32 hmac_algo2bit_mask[] = {
43 	[HASH_ALGO_SHA1]       = CRYPTO_HMAC_SHA1_FLAG,
44 	[HASH_ALGO_SHA256]     = CRYPTO_HMAC_SHA256_FLAG,
45 	[HASH_ALGO_SHA512]     = CRYPTO_HMAC_SHA512_FLAG,
46 	[HASH_ALGO_MD5]        = CRYPTO_HMAC_MD5_FLAG,
47 	[HASH_ALGO_SM3]        = CRYPTO_HMAC_SM3_FLAG,
48 };
49 
50 static const char * const crypto_v3_rsts[] = {
51 	 "crypto-rst",
52 };
53 
54 static struct rk_crypto_algt *crypto_v3_algs[] = {
55 	&rk_v3_ecb_sm4_alg,		/* ecb(sm4) */
56 	&rk_v3_cbc_sm4_alg,		/* cbc(sm4) */
57 	&rk_v3_xts_sm4_alg,		/* xts(sm4) */
58 	&rk_v3_cfb_sm4_alg,		/* cfb(sm4) */
59 	&rk_v3_ofb_sm4_alg,		/* ofb(sm4) */
60 	&rk_v3_ctr_sm4_alg,		/* ctr(sm4) */
61 	&rk_v3_gcm_sm4_alg,		/* ctr(sm4) */
62 
63 	&rk_v3_ecb_aes_alg,		/* ecb(aes) */
64 	&rk_v3_cbc_aes_alg,		/* cbc(aes) */
65 	&rk_v3_xts_aes_alg,		/* xts(aes) */
66 	&rk_v3_cfb_aes_alg,		/* cfb(aes) */
67 	&rk_v3_ofb_aes_alg,		/* ofb(aes) */
68 	&rk_v3_ctr_aes_alg,		/* ctr(aes) */
69 	&rk_v3_gcm_aes_alg,		/* gcm(aes) */
70 
71 	&rk_v3_ecb_des_alg,		/* ecb(des) */
72 	&rk_v3_cbc_des_alg,		/* cbc(des) */
73 	&rk_v3_cfb_des_alg,		/* cfb(des) */
74 	&rk_v3_ofb_des_alg,		/* ofb(des) */
75 
76 	&rk_v3_ecb_des3_ede_alg,	/* ecb(des3_ede) */
77 	&rk_v3_cbc_des3_ede_alg,	/* cbc(des3_ede) */
78 	&rk_v3_cfb_des3_ede_alg,	/* cfb(des3_ede) */
79 	&rk_v3_ofb_des3_ede_alg,	/* ofb(des3_ede) */
80 
81 	&rk_v3_ahash_sha1,		/* sha1 */
82 	&rk_v3_ahash_sha224,		/* sha224 */
83 	&rk_v3_ahash_sha256,		/* sha256 */
84 	&rk_v3_ahash_sha384,		/* sha384 */
85 	&rk_v3_ahash_sha512,		/* sha512 */
86 	&rk_v3_ahash_md5,		/* md5 */
87 	&rk_v3_ahash_sm3,		/* sm3 */
88 
89 	&rk_v3_hmac_sha1,		/* hmac(sha1) */
90 	&rk_v3_hmac_sha256,		/* hmac(sha256) */
91 	&rk_v3_hmac_sha512,		/* hmac(sha512) */
92 	&rk_v3_hmac_md5,		/* hmac(md5) */
93 	&rk_v3_hmac_sm3,		/* hmac(sm3) */
94 
95 	/* Shared v2 version implementation */
96 	&rk_v2_asym_rsa,		/* rsa */
97 };
98 
rk_is_cipher_support(struct rk_crypto_dev * rk_dev,u32 algo,u32 mode,u32 key_len)99 static bool rk_is_cipher_support(struct rk_crypto_dev *rk_dev, u32 algo, u32 mode, u32 key_len)
100 {
101 	u32 version = 0;
102 	u32 mask = 0;
103 	bool key_len_valid = true;
104 
105 	switch (algo) {
106 	case CIPHER_ALGO_DES:
107 	case CIPHER_ALGO_DES3_EDE:
108 		version = CRYPTO_READ(rk_dev, CRYPTO_DES_VERSION);
109 
110 		if (key_len == 8)
111 			key_len_valid = true;
112 		else if (key_len == 16 || key_len == 24)
113 			key_len_valid = version & CRYPTO_TDES_FLAG;
114 		else
115 			key_len_valid = false;
116 		break;
117 	case CIPHER_ALGO_AES:
118 		version = CRYPTO_READ(rk_dev, CRYPTO_AES_VERSION);
119 
120 		if (key_len == 16)
121 			key_len_valid = version & CRYPTO_AES128_FLAG;
122 		else if (key_len == 24)
123 			key_len_valid = version & CRYPTO_AES192_FLAG;
124 		else if (key_len == 32)
125 			key_len_valid = version & CRYPTO_AES256_FLAG;
126 		else
127 			key_len_valid = false;
128 		break;
129 	case CIPHER_ALGO_SM4:
130 		version = CRYPTO_READ(rk_dev, CRYPTO_SM4_VERSION);
131 
132 		key_len_valid = (key_len == SM4_KEY_SIZE) ? true : false;
133 		break;
134 	default:
135 		return false;
136 	}
137 
138 	mask = cipher_mode2bit_mask[mode];
139 
140 	if (key_len == 0)
141 		key_len_valid = true;
142 
143 	return (version & mask) && key_len_valid;
144 }
145 
rk_is_hash_support(struct rk_crypto_dev * rk_dev,u32 algo,u32 type)146 static bool rk_is_hash_support(struct rk_crypto_dev *rk_dev, u32 algo, u32 type)
147 {
148 	u32 version = 0;
149 	u32 mask = 0;
150 
151 	if (type == ALG_TYPE_HMAC) {
152 		version = CRYPTO_READ(rk_dev, CRYPTO_HMAC_VERSION);
153 		mask    = hmac_algo2bit_mask[algo];
154 	} else if (type == ALG_TYPE_HASH) {
155 		version = CRYPTO_READ(rk_dev, CRYPTO_HASH_VERSION);
156 		mask    = hash_algo2bit_mask[algo];
157 	} else {
158 		return false;
159 	}
160 
161 	return version & mask;
162 }
163 
rk_hw_crypto_v3_init(struct device * dev,void * hw_info)164 int rk_hw_crypto_v3_init(struct device *dev, void *hw_info)
165 {
166 	struct rk_hw_crypto_v3_info *info =
167 		(struct rk_hw_crypto_v3_info *)hw_info;
168 
169 	if (!dev || !hw_info)
170 		return -EINVAL;
171 
172 	memset(info, 0x00, sizeof(*info));
173 
174 	return rk_crypto_hw_desc_alloc(dev, &info->hw_desc);
175 }
176 
rk_hw_crypto_v3_deinit(struct device * dev,void * hw_info)177 void rk_hw_crypto_v3_deinit(struct device *dev, void *hw_info)
178 {
179 	struct rk_hw_crypto_v3_info *info =
180 		(struct rk_hw_crypto_v3_info *)hw_info;
181 
182 	if (!dev || !hw_info)
183 		return;
184 
185 	rk_crypto_hw_desc_free(&info->hw_desc);
186 }
187 
rk_hw_crypto_v3_get_rsts(uint32_t * num)188 const char * const *rk_hw_crypto_v3_get_rsts(uint32_t *num)
189 {
190 	*num = ARRAY_SIZE(crypto_v3_rsts);
191 
192 	return crypto_v3_rsts;
193 }
194 
rk_hw_crypto_v3_get_algts(uint32_t * num)195 struct rk_crypto_algt **rk_hw_crypto_v3_get_algts(uint32_t *num)
196 {
197 	*num = ARRAY_SIZE(crypto_v3_algs);
198 
199 	return crypto_v3_algs;
200 }
201 
rk_hw_crypto_v3_algo_valid(struct rk_crypto_dev * rk_dev,struct rk_crypto_algt * aglt)202 bool rk_hw_crypto_v3_algo_valid(struct rk_crypto_dev *rk_dev, struct rk_crypto_algt *aglt)
203 {
204 	if (aglt->type == ALG_TYPE_CIPHER || aglt->type == ALG_TYPE_AEAD) {
205 		CRYPTO_TRACE("CIPHER");
206 		return rk_is_cipher_support(rk_dev, aglt->algo, aglt->mode, 0);
207 	} else if (aglt->type == ALG_TYPE_HASH || aglt->type == ALG_TYPE_HMAC) {
208 		CRYPTO_TRACE("HASH/HMAC");
209 		return rk_is_hash_support(rk_dev, aglt->algo, aglt->type);
210 	} else if (aglt->type == ALG_TYPE_ASYM) {
211 		CRYPTO_TRACE("RSA");
212 		return true;
213 	} else {
214 		return false;
215 	}
216 }
217 
218