xref: /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/rk_crypto_skcipher_utils.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Rockchip crypto skcipher uitls
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2022, Rockchip Electronics Co., Ltd
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Author: Lin Jinhan <troy.lin@rock-chips.com>
8*4882a593Smuzhiyun  *
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include "rk_crypto_skcipher_utils.h"
12*4882a593Smuzhiyun 
rk_cipher_get_algt(struct crypto_skcipher * tfm)13*4882a593Smuzhiyun struct rk_crypto_algt *rk_cipher_get_algt(struct crypto_skcipher *tfm)
14*4882a593Smuzhiyun {
15*4882a593Smuzhiyun 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun 	return container_of(alg, struct rk_crypto_algt, alg.crypto);
18*4882a593Smuzhiyun }
19*4882a593Smuzhiyun 
rk_aead_get_algt(struct crypto_aead * tfm)20*4882a593Smuzhiyun struct rk_crypto_algt *rk_aead_get_algt(struct crypto_aead *tfm)
21*4882a593Smuzhiyun {
22*4882a593Smuzhiyun 	struct aead_alg *alg = crypto_aead_alg(tfm);
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun 	return container_of(alg, struct rk_crypto_algt, alg.aead);
25*4882a593Smuzhiyun }
26*4882a593Smuzhiyun 
rk_cipher_ctx_cast(struct rk_crypto_dev * rk_dev)27*4882a593Smuzhiyun struct rk_cipher_ctx *rk_cipher_ctx_cast(struct rk_crypto_dev *rk_dev)
28*4882a593Smuzhiyun {
29*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_tfm_ctx(rk_dev->async_req->tfm);
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun 	return ctx;
32*4882a593Smuzhiyun }
33*4882a593Smuzhiyun 
rk_cipher_alg_ctx(struct rk_crypto_dev * rk_dev)34*4882a593Smuzhiyun struct rk_alg_ctx *rk_cipher_alg_ctx(struct rk_crypto_dev *rk_dev)
35*4882a593Smuzhiyun {
36*4882a593Smuzhiyun 	return &(rk_cipher_ctx_cast(rk_dev)->algs_ctx);
37*4882a593Smuzhiyun }
38*4882a593Smuzhiyun 
is_no_multi_blocksize(uint32_t mode)39*4882a593Smuzhiyun static bool is_no_multi_blocksize(uint32_t mode)
40*4882a593Smuzhiyun {
41*4882a593Smuzhiyun 	return (mode == CIPHER_MODE_CFB ||
42*4882a593Smuzhiyun 		mode == CIPHER_MODE_OFB ||
43*4882a593Smuzhiyun 		mode == CIPHER_MODE_CTR ||
44*4882a593Smuzhiyun 		mode == CIPHER_MODE_XTS ||
45*4882a593Smuzhiyun 		mode == CIPHER_MODE_GCM) ? true : false;
46*4882a593Smuzhiyun }
47*4882a593Smuzhiyun 
rk_cipher_fallback(struct skcipher_request * req,struct rk_cipher_ctx * ctx,bool encrypt)48*4882a593Smuzhiyun int rk_cipher_fallback(struct skcipher_request *req, struct rk_cipher_ctx *ctx, bool encrypt)
49*4882a593Smuzhiyun {
50*4882a593Smuzhiyun 	int ret;
51*4882a593Smuzhiyun 
52*4882a593Smuzhiyun 	CRYPTO_MSG("use fallback tfm");
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun 	if (!ctx->fallback_tfm) {
55*4882a593Smuzhiyun 		ret = -ENODEV;
56*4882a593Smuzhiyun 		CRYPTO_MSG("fallback_tfm is empty!\n");
57*4882a593Smuzhiyun 		goto exit;
58*4882a593Smuzhiyun 	}
59*4882a593Smuzhiyun 
60*4882a593Smuzhiyun 	if (!ctx->fallback_key_inited) {
61*4882a593Smuzhiyun 		ret = crypto_skcipher_setkey(ctx->fallback_tfm,
62*4882a593Smuzhiyun 					     ctx->key, ctx->keylen);
63*4882a593Smuzhiyun 		if (ret) {
64*4882a593Smuzhiyun 			CRYPTO_MSG("fallback crypto_skcipher_setkey err = %d\n",
65*4882a593Smuzhiyun 				   ret);
66*4882a593Smuzhiyun 			goto exit;
67*4882a593Smuzhiyun 		}
68*4882a593Smuzhiyun 
69*4882a593Smuzhiyun 		ctx->fallback_key_inited = true;
70*4882a593Smuzhiyun 	}
71*4882a593Smuzhiyun 
72*4882a593Smuzhiyun 	skcipher_request_set_tfm(&ctx->fallback_req, ctx->fallback_tfm);
73*4882a593Smuzhiyun 	skcipher_request_set_callback(&ctx->fallback_req,
74*4882a593Smuzhiyun 				      req->base.flags,
75*4882a593Smuzhiyun 				      req->base.complete,
76*4882a593Smuzhiyun 				      req->base.data);
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun 	skcipher_request_set_crypt(&ctx->fallback_req, req->src,
79*4882a593Smuzhiyun 				   req->dst, req->cryptlen, req->iv);
80*4882a593Smuzhiyun 
81*4882a593Smuzhiyun 	ret = encrypt ? crypto_skcipher_encrypt(&ctx->fallback_req) :
82*4882a593Smuzhiyun 			crypto_skcipher_decrypt(&ctx->fallback_req);
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun exit:
85*4882a593Smuzhiyun 	return ret;
86*4882a593Smuzhiyun }
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun /* increment counter (128-bit int) by 1 */
rk_ctr128_inc(uint8_t * counter)89*4882a593Smuzhiyun static void rk_ctr128_inc(uint8_t *counter)
90*4882a593Smuzhiyun {
91*4882a593Smuzhiyun 	u32 n = 16;
92*4882a593Smuzhiyun 	u8  c;
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun 	do {
95*4882a593Smuzhiyun 		--n;
96*4882a593Smuzhiyun 		c = counter[n];
97*4882a593Smuzhiyun 		++c;
98*4882a593Smuzhiyun 		counter[n] = c;
99*4882a593Smuzhiyun 		if (c)
100*4882a593Smuzhiyun 			return;
101*4882a593Smuzhiyun 	} while (n);
102*4882a593Smuzhiyun }
103*4882a593Smuzhiyun 
rk_ctr128_calc(uint8_t * counter,uint32_t data_len)104*4882a593Smuzhiyun static void rk_ctr128_calc(uint8_t *counter, uint32_t data_len)
105*4882a593Smuzhiyun {
106*4882a593Smuzhiyun 	u32 i;
107*4882a593Smuzhiyun 	u32 chunksize = AES_BLOCK_SIZE;
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun 	for (i = 0; i < DIV_ROUND_UP(data_len, chunksize); i++)
110*4882a593Smuzhiyun 		rk_ctr128_inc(counter);
111*4882a593Smuzhiyun }
112*4882a593Smuzhiyun 
rk_get_new_iv(struct rk_cipher_ctx * ctx,u32 mode,bool is_enc,uint8_t * iv)113*4882a593Smuzhiyun static uint32_t rk_get_new_iv(struct rk_cipher_ctx *ctx, u32 mode, bool is_enc, uint8_t *iv)
114*4882a593Smuzhiyun {
115*4882a593Smuzhiyun 	struct scatterlist *sg_dst;
116*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = &ctx->algs_ctx;
117*4882a593Smuzhiyun 	uint32_t ivsize = alg_ctx->chunk_size;
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun 	if (!iv)
120*4882a593Smuzhiyun 		return 0;
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun 	sg_dst = alg_ctx->aligned ? alg_ctx->sg_dst : &alg_ctx->sg_tmp;
123*4882a593Smuzhiyun 
124*4882a593Smuzhiyun 	CRYPTO_TRACE("aligned = %u, count = %u, ivsize = %u, is_enc = %d\n",
125*4882a593Smuzhiyun 		     alg_ctx->aligned, alg_ctx->count, ivsize, is_enc);
126*4882a593Smuzhiyun 
127*4882a593Smuzhiyun 	switch (mode) {
128*4882a593Smuzhiyun 	case CIPHER_MODE_CTR:
129*4882a593Smuzhiyun 		rk_ctr128_calc(iv, alg_ctx->count);
130*4882a593Smuzhiyun 		break;
131*4882a593Smuzhiyun 	case CIPHER_MODE_CBC:
132*4882a593Smuzhiyun 	case CIPHER_MODE_CFB:
133*4882a593Smuzhiyun 		if (is_enc)
134*4882a593Smuzhiyun 			sg_pcopy_to_buffer(sg_dst, alg_ctx->map_nents,
135*4882a593Smuzhiyun 					   iv, ivsize, alg_ctx->count - ivsize);
136*4882a593Smuzhiyun 		else
137*4882a593Smuzhiyun 			memcpy(iv, ctx->lastc, ivsize);
138*4882a593Smuzhiyun 		break;
139*4882a593Smuzhiyun 	case CIPHER_MODE_OFB:
140*4882a593Smuzhiyun 		sg_pcopy_to_buffer(sg_dst, alg_ctx->map_nents,
141*4882a593Smuzhiyun 				   iv, ivsize, alg_ctx->count - ivsize);
142*4882a593Smuzhiyun 		crypto_xor(iv, ctx->lastc, ivsize);
143*4882a593Smuzhiyun 		break;
144*4882a593Smuzhiyun 	default:
145*4882a593Smuzhiyun 		return 0;
146*4882a593Smuzhiyun 	}
147*4882a593Smuzhiyun 
148*4882a593Smuzhiyun 	return ivsize;
149*4882a593Smuzhiyun }
150*4882a593Smuzhiyun 
rk_iv_copyback(struct rk_crypto_dev * rk_dev)151*4882a593Smuzhiyun static void rk_iv_copyback(struct rk_crypto_dev *rk_dev)
152*4882a593Smuzhiyun {
153*4882a593Smuzhiyun 	uint32_t iv_size;
154*4882a593Smuzhiyun 	struct skcipher_request *req = skcipher_request_cast(rk_dev->async_req);
155*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = rk_cipher_ctx_cast(rk_dev);
156*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
157*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_cipher_get_algt(cipher);
158*4882a593Smuzhiyun 
159*4882a593Smuzhiyun 	iv_size = rk_get_new_iv(ctx, algt->mode, ctx->is_enc, ctx->iv);
160*4882a593Smuzhiyun 
161*4882a593Smuzhiyun 	if (iv_size && req->iv)
162*4882a593Smuzhiyun 		memcpy(req->iv, ctx->iv, iv_size);
163*4882a593Smuzhiyun }
164*4882a593Smuzhiyun 
rk_update_iv(struct rk_crypto_dev * rk_dev)165*4882a593Smuzhiyun static void rk_update_iv(struct rk_crypto_dev *rk_dev)
166*4882a593Smuzhiyun {
167*4882a593Smuzhiyun 	uint32_t iv_size;
168*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = rk_cipher_ctx_cast(rk_dev);
169*4882a593Smuzhiyun 	struct rk_alg_ctx *algs_ctx = &ctx->algs_ctx;
170*4882a593Smuzhiyun 	struct skcipher_request *req = skcipher_request_cast(rk_dev->async_req);
171*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
172*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_cipher_get_algt(cipher);
173*4882a593Smuzhiyun 
174*4882a593Smuzhiyun 	iv_size = rk_get_new_iv(ctx, algt->mode, ctx->is_enc, ctx->iv);
175*4882a593Smuzhiyun 
176*4882a593Smuzhiyun 	if (iv_size)
177*4882a593Smuzhiyun 		algs_ctx->ops.hw_write_iv(rk_dev, ctx->iv, iv_size);
178*4882a593Smuzhiyun }
179*4882a593Smuzhiyun 
rk_set_data_start(struct rk_crypto_dev * rk_dev)180*4882a593Smuzhiyun static int rk_set_data_start(struct rk_crypto_dev *rk_dev)
181*4882a593Smuzhiyun {
182*4882a593Smuzhiyun 	int err;
183*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev);
184*4882a593Smuzhiyun 
185*4882a593Smuzhiyun 	err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, alg_ctx->sg_dst);
186*4882a593Smuzhiyun 	if (!err) {
187*4882a593Smuzhiyun 		u32 ivsize = alg_ctx->chunk_size;
188*4882a593Smuzhiyun 		struct scatterlist *src_sg;
189*4882a593Smuzhiyun 		struct rk_cipher_ctx *ctx = rk_cipher_ctx_cast(rk_dev);
190*4882a593Smuzhiyun 
191*4882a593Smuzhiyun 		memset(ctx->lastc, 0x00, sizeof(ctx->lastc));
192*4882a593Smuzhiyun 
193*4882a593Smuzhiyun 		src_sg = alg_ctx->aligned ? alg_ctx->sg_src : &alg_ctx->sg_tmp;
194*4882a593Smuzhiyun 
195*4882a593Smuzhiyun 		ivsize = alg_ctx->count > ivsize ? ivsize : alg_ctx->count;
196*4882a593Smuzhiyun 
197*4882a593Smuzhiyun 		sg_pcopy_to_buffer(src_sg, alg_ctx->map_nents,
198*4882a593Smuzhiyun 				   ctx->lastc, ivsize, alg_ctx->count - ivsize);
199*4882a593Smuzhiyun 
200*4882a593Smuzhiyun 		alg_ctx->ops.hw_dma_start(rk_dev, true);
201*4882a593Smuzhiyun 	}
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 	return err;
204*4882a593Smuzhiyun }
205*4882a593Smuzhiyun 
rk_cipher_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)206*4882a593Smuzhiyun int rk_cipher_setkey(struct crypto_skcipher *cipher, const u8 *key, unsigned int keylen)
207*4882a593Smuzhiyun {
208*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_cipher_get_algt(cipher);
209*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
210*4882a593Smuzhiyun 	uint32_t key_factor;
211*4882a593Smuzhiyun 	int ret = -EINVAL;
212*4882a593Smuzhiyun 
213*4882a593Smuzhiyun 	CRYPTO_MSG("algo = %x, mode = %x, key_len = %d\n",
214*4882a593Smuzhiyun 		   algt->algo, algt->mode, keylen);
215*4882a593Smuzhiyun 
216*4882a593Smuzhiyun 	/* The key length of XTS is twice the normal length */
217*4882a593Smuzhiyun 	key_factor = algt->mode == CIPHER_MODE_XTS ? 2 : 1;
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun 	switch (algt->algo) {
220*4882a593Smuzhiyun 	case CIPHER_ALGO_DES:
221*4882a593Smuzhiyun 		ret = verify_skcipher_des_key(cipher, key);
222*4882a593Smuzhiyun 		if (ret)
223*4882a593Smuzhiyun 			goto exit;
224*4882a593Smuzhiyun 		break;
225*4882a593Smuzhiyun 	case CIPHER_ALGO_DES3_EDE:
226*4882a593Smuzhiyun 		ret = verify_skcipher_des3_key(cipher, key);
227*4882a593Smuzhiyun 		if (ret)
228*4882a593Smuzhiyun 			goto exit;
229*4882a593Smuzhiyun 		break;
230*4882a593Smuzhiyun 	case CIPHER_ALGO_AES:
231*4882a593Smuzhiyun 		if (keylen != (AES_KEYSIZE_128 * key_factor) &&
232*4882a593Smuzhiyun 		    keylen != (AES_KEYSIZE_192 * key_factor) &&
233*4882a593Smuzhiyun 		    keylen != (AES_KEYSIZE_256 * key_factor))
234*4882a593Smuzhiyun 			goto exit;
235*4882a593Smuzhiyun 		break;
236*4882a593Smuzhiyun 	case CIPHER_ALGO_SM4:
237*4882a593Smuzhiyun 		if (keylen != (SM4_KEY_SIZE * key_factor))
238*4882a593Smuzhiyun 			goto exit;
239*4882a593Smuzhiyun 		break;
240*4882a593Smuzhiyun 	default:
241*4882a593Smuzhiyun 		ret = -EINVAL;
242*4882a593Smuzhiyun 		goto exit;
243*4882a593Smuzhiyun 	}
244*4882a593Smuzhiyun 
245*4882a593Smuzhiyun 	memcpy(ctx->key, key, keylen);
246*4882a593Smuzhiyun 	ctx->keylen = keylen;
247*4882a593Smuzhiyun 	ctx->fallback_key_inited = false;
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun 	ret = 0;
250*4882a593Smuzhiyun exit:
251*4882a593Smuzhiyun 	return ret;
252*4882a593Smuzhiyun }
253*4882a593Smuzhiyun 
rk_ablk_rx(struct rk_crypto_dev * rk_dev)254*4882a593Smuzhiyun int rk_ablk_rx(struct rk_crypto_dev *rk_dev)
255*4882a593Smuzhiyun {
256*4882a593Smuzhiyun 	int err = 0;
257*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = rk_cipher_ctx_cast(rk_dev);
258*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev);
259*4882a593Smuzhiyun 
260*4882a593Smuzhiyun 	CRYPTO_TRACE("left_bytes = %u\n", alg_ctx->left_bytes);
261*4882a593Smuzhiyun 
262*4882a593Smuzhiyun 	err = rk_dev->unload_data(rk_dev);
263*4882a593Smuzhiyun 	if (err)
264*4882a593Smuzhiyun 		goto out_rx;
265*4882a593Smuzhiyun 
266*4882a593Smuzhiyun 	if (alg_ctx->left_bytes) {
267*4882a593Smuzhiyun 		rk_update_iv(rk_dev);
268*4882a593Smuzhiyun 		if (alg_ctx->aligned) {
269*4882a593Smuzhiyun 			if (sg_is_last(alg_ctx->sg_src)) {
270*4882a593Smuzhiyun 				dev_err(rk_dev->dev, "[%s:%d] Lack of data\n",
271*4882a593Smuzhiyun 					__func__, __LINE__);
272*4882a593Smuzhiyun 				err = -ENOMEM;
273*4882a593Smuzhiyun 				goto out_rx;
274*4882a593Smuzhiyun 			}
275*4882a593Smuzhiyun 			alg_ctx->sg_src = sg_next(alg_ctx->sg_src);
276*4882a593Smuzhiyun 			alg_ctx->sg_dst = sg_next(alg_ctx->sg_dst);
277*4882a593Smuzhiyun 		}
278*4882a593Smuzhiyun 		err = rk_set_data_start(rk_dev);
279*4882a593Smuzhiyun 	} else {
280*4882a593Smuzhiyun 		if (alg_ctx->is_aead) {
281*4882a593Smuzhiyun 			u8 hard_tag[RK_MAX_TAG_SIZE];
282*4882a593Smuzhiyun 			u8 user_tag[RK_MAX_TAG_SIZE];
283*4882a593Smuzhiyun 			struct aead_request *req =
284*4882a593Smuzhiyun 				aead_request_cast(rk_dev->async_req);
285*4882a593Smuzhiyun 			struct crypto_aead *tfm = crypto_aead_reqtfm(req);
286*4882a593Smuzhiyun 
287*4882a593Smuzhiyun 			unsigned int authsize = crypto_aead_authsize(tfm);
288*4882a593Smuzhiyun 
289*4882a593Smuzhiyun 			CRYPTO_TRACE("cryptlen = %u, assoclen = %u, aead authsize = %u",
290*4882a593Smuzhiyun 				     alg_ctx->total, alg_ctx->assoclen, authsize);
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun 			err = alg_ctx->ops.hw_get_result(rk_dev, hard_tag, authsize);
293*4882a593Smuzhiyun 			if (err)
294*4882a593Smuzhiyun 				goto out_rx;
295*4882a593Smuzhiyun 
296*4882a593Smuzhiyun 			CRYPTO_DUMPHEX("hard_tag", hard_tag, authsize);
297*4882a593Smuzhiyun 			if (!ctx->is_enc) {
298*4882a593Smuzhiyun 				if (!sg_pcopy_to_buffer(alg_ctx->req_src,
299*4882a593Smuzhiyun 							sg_nents(alg_ctx->req_src),
300*4882a593Smuzhiyun 							user_tag, authsize,
301*4882a593Smuzhiyun 							alg_ctx->total +
302*4882a593Smuzhiyun 							alg_ctx->assoclen)) {
303*4882a593Smuzhiyun 					err = -EINVAL;
304*4882a593Smuzhiyun 					goto out_rx;
305*4882a593Smuzhiyun 				}
306*4882a593Smuzhiyun 
307*4882a593Smuzhiyun 				CRYPTO_DUMPHEX("user_tag", user_tag, authsize);
308*4882a593Smuzhiyun 				err = crypto_memneq(user_tag, hard_tag, authsize) ? -EBADMSG : 0;
309*4882a593Smuzhiyun 			} else {
310*4882a593Smuzhiyun 				if (!sg_pcopy_from_buffer(alg_ctx->req_dst,
311*4882a593Smuzhiyun 							  sg_nents(alg_ctx->req_dst),
312*4882a593Smuzhiyun 							  hard_tag, authsize,
313*4882a593Smuzhiyun 							  alg_ctx->total +
314*4882a593Smuzhiyun 							  alg_ctx->assoclen)) {
315*4882a593Smuzhiyun 					err = -EINVAL;
316*4882a593Smuzhiyun 					goto out_rx;
317*4882a593Smuzhiyun 				}
318*4882a593Smuzhiyun 			}
319*4882a593Smuzhiyun 		} else {
320*4882a593Smuzhiyun 			rk_iv_copyback(rk_dev);
321*4882a593Smuzhiyun 		}
322*4882a593Smuzhiyun 	}
323*4882a593Smuzhiyun out_rx:
324*4882a593Smuzhiyun 	return err;
325*4882a593Smuzhiyun }
326*4882a593Smuzhiyun 
rk_ablk_start(struct rk_crypto_dev * rk_dev)327*4882a593Smuzhiyun int rk_ablk_start(struct rk_crypto_dev *rk_dev)
328*4882a593Smuzhiyun {
329*4882a593Smuzhiyun 	struct skcipher_request *req =
330*4882a593Smuzhiyun 		skcipher_request_cast(rk_dev->async_req);
331*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
332*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_cipher_get_algt(tfm);
333*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev);
334*4882a593Smuzhiyun 	int err = 0;
335*4882a593Smuzhiyun 
336*4882a593Smuzhiyun 	alg_ctx->left_bytes = req->cryptlen;
337*4882a593Smuzhiyun 	alg_ctx->total      = req->cryptlen;
338*4882a593Smuzhiyun 	alg_ctx->sg_src     = req->src;
339*4882a593Smuzhiyun 	alg_ctx->req_src    = req->src;
340*4882a593Smuzhiyun 	alg_ctx->src_nents  = sg_nents_for_len(req->src, req->cryptlen);
341*4882a593Smuzhiyun 	alg_ctx->sg_dst     = req->dst;
342*4882a593Smuzhiyun 	alg_ctx->req_dst    = req->dst;
343*4882a593Smuzhiyun 	alg_ctx->dst_nents  = sg_nents_for_len(req->dst, req->cryptlen);
344*4882a593Smuzhiyun 
345*4882a593Smuzhiyun 	CRYPTO_TRACE("total = %u", alg_ctx->total);
346*4882a593Smuzhiyun 
347*4882a593Smuzhiyun 	alg_ctx->ops.hw_init(rk_dev, algt->algo, algt->mode);
348*4882a593Smuzhiyun 	err = rk_set_data_start(rk_dev);
349*4882a593Smuzhiyun 
350*4882a593Smuzhiyun 	return err;
351*4882a593Smuzhiyun }
352*4882a593Smuzhiyun 
rk_skcipher_handle_req(struct rk_crypto_dev * rk_dev,struct skcipher_request * req)353*4882a593Smuzhiyun int rk_skcipher_handle_req(struct rk_crypto_dev *rk_dev, struct skcipher_request *req)
354*4882a593Smuzhiyun {
355*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
356*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
357*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_cipher_get_algt(cipher);
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun 	if (!IS_ALIGNED(req->cryptlen, ctx->algs_ctx.chunk_size) &&
360*4882a593Smuzhiyun 	    !is_no_multi_blocksize(algt->mode))
361*4882a593Smuzhiyun 		return -EINVAL;
362*4882a593Smuzhiyun 	else
363*4882a593Smuzhiyun 		return rk_dev->enqueue(rk_dev, &req->base);
364*4882a593Smuzhiyun }
365*4882a593Smuzhiyun 
rk_aead_fallback(struct aead_request * req,struct rk_cipher_ctx * ctx,bool encrypt)366*4882a593Smuzhiyun int rk_aead_fallback(struct aead_request *req, struct rk_cipher_ctx *ctx, bool encrypt)
367*4882a593Smuzhiyun {
368*4882a593Smuzhiyun 	int ret;
369*4882a593Smuzhiyun 	struct aead_request *subreq = aead_request_ctx(req);
370*4882a593Smuzhiyun 
371*4882a593Smuzhiyun 	if (!ctx->fallback_aead) {
372*4882a593Smuzhiyun 		CRYPTO_TRACE("fallback_tfm is empty");
373*4882a593Smuzhiyun 		return -EINVAL;
374*4882a593Smuzhiyun 	}
375*4882a593Smuzhiyun 
376*4882a593Smuzhiyun 	CRYPTO_MSG("use fallback tfm");
377*4882a593Smuzhiyun 
378*4882a593Smuzhiyun 	if (!ctx->fallback_key_inited) {
379*4882a593Smuzhiyun 		ret = crypto_aead_setkey(ctx->fallback_aead, ctx->key, ctx->keylen);
380*4882a593Smuzhiyun 		if (ret) {
381*4882a593Smuzhiyun 			CRYPTO_MSG("fallback crypto_skcipher_setkey err = %d\n", ret);
382*4882a593Smuzhiyun 			goto exit;
383*4882a593Smuzhiyun 		}
384*4882a593Smuzhiyun 
385*4882a593Smuzhiyun 		ctx->fallback_key_inited = true;
386*4882a593Smuzhiyun 	}
387*4882a593Smuzhiyun 
388*4882a593Smuzhiyun 	aead_request_set_tfm(subreq, ctx->fallback_aead);
389*4882a593Smuzhiyun 	aead_request_set_callback(subreq, req->base.flags, req->base.complete, req->base.data);
390*4882a593Smuzhiyun 	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, req->iv);
391*4882a593Smuzhiyun 	aead_request_set_ad(subreq, req->assoclen);
392*4882a593Smuzhiyun 
393*4882a593Smuzhiyun 	ret = encrypt ? crypto_aead_encrypt(subreq) : crypto_aead_decrypt(subreq);
394*4882a593Smuzhiyun 
395*4882a593Smuzhiyun exit:
396*4882a593Smuzhiyun 	return ret;
397*4882a593Smuzhiyun }
398*4882a593Smuzhiyun 
rk_aead_setkey(struct crypto_aead * cipher,const u8 * key,unsigned int keylen)399*4882a593Smuzhiyun int rk_aead_setkey(struct crypto_aead *cipher, const u8 *key, unsigned int keylen)
400*4882a593Smuzhiyun {
401*4882a593Smuzhiyun 	struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
402*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_aead_get_algt(cipher);
403*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
404*4882a593Smuzhiyun 	int ret = -EINVAL;
405*4882a593Smuzhiyun 
406*4882a593Smuzhiyun 	CRYPTO_MSG("algo = %x, mode = %x, key_len = %d\n", algt->algo, algt->mode, keylen);
407*4882a593Smuzhiyun 
408*4882a593Smuzhiyun 	switch (algt->algo) {
409*4882a593Smuzhiyun 	case CIPHER_ALGO_AES:
410*4882a593Smuzhiyun 		if (keylen != AES_KEYSIZE_128 &&
411*4882a593Smuzhiyun 		    keylen != AES_KEYSIZE_192 &&
412*4882a593Smuzhiyun 		    keylen != AES_KEYSIZE_256)
413*4882a593Smuzhiyun 			goto error;
414*4882a593Smuzhiyun 
415*4882a593Smuzhiyun 		break;
416*4882a593Smuzhiyun 	case CIPHER_ALGO_SM4:
417*4882a593Smuzhiyun 		if (keylen != SM4_KEY_SIZE)
418*4882a593Smuzhiyun 			goto error;
419*4882a593Smuzhiyun 
420*4882a593Smuzhiyun 		break;
421*4882a593Smuzhiyun 	default:
422*4882a593Smuzhiyun 		CRYPTO_TRACE();
423*4882a593Smuzhiyun 		goto error;
424*4882a593Smuzhiyun 	}
425*4882a593Smuzhiyun 
426*4882a593Smuzhiyun 	memcpy(ctx->key, key, keylen);
427*4882a593Smuzhiyun 	ctx->keylen = keylen;
428*4882a593Smuzhiyun 	ctx->fallback_key_inited = false;
429*4882a593Smuzhiyun 
430*4882a593Smuzhiyun 	return 0;
431*4882a593Smuzhiyun 
432*4882a593Smuzhiyun error:
433*4882a593Smuzhiyun 	return ret;
434*4882a593Smuzhiyun }
435*4882a593Smuzhiyun 
rk_aead_start(struct rk_crypto_dev * rk_dev)436*4882a593Smuzhiyun int rk_aead_start(struct rk_crypto_dev *rk_dev)
437*4882a593Smuzhiyun {
438*4882a593Smuzhiyun 	struct aead_request *req = aead_request_cast(rk_dev->async_req);
439*4882a593Smuzhiyun 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
440*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_aead_ctx(tfm);
441*4882a593Smuzhiyun 	struct rk_crypto_algt *algt = rk_aead_get_algt(tfm);
442*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_cipher_alg_ctx(rk_dev);
443*4882a593Smuzhiyun 	unsigned int total = 0, authsize;
444*4882a593Smuzhiyun 	int err = 0;
445*4882a593Smuzhiyun 
446*4882a593Smuzhiyun 	total = req->cryptlen + req->assoclen;
447*4882a593Smuzhiyun 
448*4882a593Smuzhiyun 	authsize = ctx->is_enc ? 0 : crypto_aead_authsize(tfm);
449*4882a593Smuzhiyun 
450*4882a593Smuzhiyun 	alg_ctx->total      = req->cryptlen - authsize;
451*4882a593Smuzhiyun 	alg_ctx->assoclen   = req->assoclen;
452*4882a593Smuzhiyun 	alg_ctx->sg_src     = req->src;
453*4882a593Smuzhiyun 	alg_ctx->req_src    = req->src;
454*4882a593Smuzhiyun 	alg_ctx->src_nents  = sg_nents_for_len(req->src, total);
455*4882a593Smuzhiyun 	alg_ctx->sg_dst     = req->dst;
456*4882a593Smuzhiyun 	alg_ctx->req_dst    = req->dst;
457*4882a593Smuzhiyun 	alg_ctx->dst_nents  = sg_nents_for_len(req->dst, total - authsize);
458*4882a593Smuzhiyun 	alg_ctx->left_bytes = alg_ctx->total;
459*4882a593Smuzhiyun 
460*4882a593Smuzhiyun 	CRYPTO_TRACE("src_nents = %zu, dst_nents = %zu", alg_ctx->src_nents, alg_ctx->dst_nents);
461*4882a593Smuzhiyun 	CRYPTO_TRACE("is_enc = %d, authsize = %u, cryptlen = %u, total = %u, assoclen = %u",
462*4882a593Smuzhiyun 		     ctx->is_enc, authsize, req->cryptlen, alg_ctx->total, alg_ctx->assoclen);
463*4882a593Smuzhiyun 
464*4882a593Smuzhiyun 	alg_ctx->ops.hw_init(rk_dev, algt->algo, algt->mode);
465*4882a593Smuzhiyun 	err = rk_set_data_start(rk_dev);
466*4882a593Smuzhiyun 
467*4882a593Smuzhiyun 	return err;
468*4882a593Smuzhiyun }
469*4882a593Smuzhiyun 
rk_aead_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)470*4882a593Smuzhiyun int rk_aead_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
471*4882a593Smuzhiyun {
472*4882a593Smuzhiyun 	return crypto_gcm_check_authsize(authsize);
473*4882a593Smuzhiyun }
474*4882a593Smuzhiyun 
rk_aead_handle_req(struct rk_crypto_dev * rk_dev,struct aead_request * req)475*4882a593Smuzhiyun int rk_aead_handle_req(struct rk_crypto_dev *rk_dev, struct aead_request *req)
476*4882a593Smuzhiyun {
477*4882a593Smuzhiyun 	return rk_dev->enqueue(rk_dev, &req->base);
478*4882a593Smuzhiyun }
479