xref: /OK3568_Linux_fs/kernel/drivers/crypto/qce/skcipher.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (c) 2010-2014, The Linux Foundation. All rights reserved.
4*4882a593Smuzhiyun  */
5*4882a593Smuzhiyun 
6*4882a593Smuzhiyun #include <linux/device.h>
7*4882a593Smuzhiyun #include <linux/dma-mapping.h>
8*4882a593Smuzhiyun #include <linux/interrupt.h>
9*4882a593Smuzhiyun #include <linux/moduleparam.h>
10*4882a593Smuzhiyun #include <linux/types.h>
11*4882a593Smuzhiyun #include <crypto/aes.h>
12*4882a593Smuzhiyun #include <crypto/internal/des.h>
13*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #include "cipher.h"
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun static unsigned int aes_sw_max_len = CONFIG_CRYPTO_DEV_QCE_SW_MAX_LEN;
18*4882a593Smuzhiyun module_param(aes_sw_max_len, uint, 0644);
19*4882a593Smuzhiyun MODULE_PARM_DESC(aes_sw_max_len,
20*4882a593Smuzhiyun 		 "Only use hardware for AES requests larger than this "
21*4882a593Smuzhiyun 		 "[0=always use hardware; anything <16 breaks AES-GCM; default="
22*4882a593Smuzhiyun 		 __stringify(CONFIG_CRYPTO_DEV_QCE_SW_MAX_LEN)"]");
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun static LIST_HEAD(skcipher_algs);
25*4882a593Smuzhiyun 
qce_skcipher_done(void * data)26*4882a593Smuzhiyun static void qce_skcipher_done(void *data)
27*4882a593Smuzhiyun {
28*4882a593Smuzhiyun 	struct crypto_async_request *async_req = data;
29*4882a593Smuzhiyun 	struct skcipher_request *req = skcipher_request_cast(async_req);
30*4882a593Smuzhiyun 	struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req);
31*4882a593Smuzhiyun 	struct qce_alg_template *tmpl = to_cipher_tmpl(crypto_skcipher_reqtfm(req));
32*4882a593Smuzhiyun 	struct qce_device *qce = tmpl->qce;
33*4882a593Smuzhiyun 	struct qce_result_dump *result_buf = qce->dma.result_buf;
34*4882a593Smuzhiyun 	enum dma_data_direction dir_src, dir_dst;
35*4882a593Smuzhiyun 	u32 status;
36*4882a593Smuzhiyun 	int error;
37*4882a593Smuzhiyun 	bool diff_dst;
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun 	diff_dst = (req->src != req->dst) ? true : false;
40*4882a593Smuzhiyun 	dir_src = diff_dst ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL;
41*4882a593Smuzhiyun 	dir_dst = diff_dst ? DMA_FROM_DEVICE : DMA_BIDIRECTIONAL;
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun 	error = qce_dma_terminate_all(&qce->dma);
44*4882a593Smuzhiyun 	if (error)
45*4882a593Smuzhiyun 		dev_dbg(qce->dev, "skcipher dma termination error (%d)\n",
46*4882a593Smuzhiyun 			error);
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun 	if (diff_dst)
49*4882a593Smuzhiyun 		dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src);
50*4882a593Smuzhiyun 	dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst);
51*4882a593Smuzhiyun 
52*4882a593Smuzhiyun 	sg_free_table(&rctx->dst_tbl);
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun 	error = qce_check_status(qce, &status);
55*4882a593Smuzhiyun 	if (error < 0)
56*4882a593Smuzhiyun 		dev_dbg(qce->dev, "skcipher operation error (%x)\n", status);
57*4882a593Smuzhiyun 
58*4882a593Smuzhiyun 	memcpy(rctx->iv, result_buf->encr_cntr_iv, rctx->ivsize);
59*4882a593Smuzhiyun 	qce->async_req_done(tmpl->qce, error);
60*4882a593Smuzhiyun }
61*4882a593Smuzhiyun 
62*4882a593Smuzhiyun static int
qce_skcipher_async_req_handle(struct crypto_async_request * async_req)63*4882a593Smuzhiyun qce_skcipher_async_req_handle(struct crypto_async_request *async_req)
64*4882a593Smuzhiyun {
65*4882a593Smuzhiyun 	struct skcipher_request *req = skcipher_request_cast(async_req);
66*4882a593Smuzhiyun 	struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req);
67*4882a593Smuzhiyun 	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
68*4882a593Smuzhiyun 	struct qce_alg_template *tmpl = to_cipher_tmpl(crypto_skcipher_reqtfm(req));
69*4882a593Smuzhiyun 	struct qce_device *qce = tmpl->qce;
70*4882a593Smuzhiyun 	enum dma_data_direction dir_src, dir_dst;
71*4882a593Smuzhiyun 	struct scatterlist *sg;
72*4882a593Smuzhiyun 	bool diff_dst;
73*4882a593Smuzhiyun 	gfp_t gfp;
74*4882a593Smuzhiyun 	int dst_nents, src_nents, ret;
75*4882a593Smuzhiyun 
76*4882a593Smuzhiyun 	rctx->iv = req->iv;
77*4882a593Smuzhiyun 	rctx->ivsize = crypto_skcipher_ivsize(skcipher);
78*4882a593Smuzhiyun 	rctx->cryptlen = req->cryptlen;
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun 	diff_dst = (req->src != req->dst) ? true : false;
81*4882a593Smuzhiyun 	dir_src = diff_dst ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL;
82*4882a593Smuzhiyun 	dir_dst = diff_dst ? DMA_FROM_DEVICE : DMA_BIDIRECTIONAL;
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun 	rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen);
85*4882a593Smuzhiyun 	if (diff_dst)
86*4882a593Smuzhiyun 		rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
87*4882a593Smuzhiyun 	else
88*4882a593Smuzhiyun 		rctx->dst_nents = rctx->src_nents;
89*4882a593Smuzhiyun 	if (rctx->src_nents < 0) {
90*4882a593Smuzhiyun 		dev_err(qce->dev, "Invalid numbers of src SG.\n");
91*4882a593Smuzhiyun 		return rctx->src_nents;
92*4882a593Smuzhiyun 	}
93*4882a593Smuzhiyun 	if (rctx->dst_nents < 0) {
94*4882a593Smuzhiyun 		dev_err(qce->dev, "Invalid numbers of dst SG.\n");
95*4882a593Smuzhiyun 		return -rctx->dst_nents;
96*4882a593Smuzhiyun 	}
97*4882a593Smuzhiyun 
98*4882a593Smuzhiyun 	rctx->dst_nents += 1;
99*4882a593Smuzhiyun 
100*4882a593Smuzhiyun 	gfp = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
101*4882a593Smuzhiyun 						GFP_KERNEL : GFP_ATOMIC;
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun 	ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp);
104*4882a593Smuzhiyun 	if (ret)
105*4882a593Smuzhiyun 		return ret;
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun 	sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ);
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun 	sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, req->cryptlen);
110*4882a593Smuzhiyun 	if (IS_ERR(sg)) {
111*4882a593Smuzhiyun 		ret = PTR_ERR(sg);
112*4882a593Smuzhiyun 		goto error_free;
113*4882a593Smuzhiyun 	}
114*4882a593Smuzhiyun 
115*4882a593Smuzhiyun 	sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg,
116*4882a593Smuzhiyun 			     QCE_RESULT_BUF_SZ);
117*4882a593Smuzhiyun 	if (IS_ERR(sg)) {
118*4882a593Smuzhiyun 		ret = PTR_ERR(sg);
119*4882a593Smuzhiyun 		goto error_free;
120*4882a593Smuzhiyun 	}
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun 	sg_mark_end(sg);
123*4882a593Smuzhiyun 	rctx->dst_sg = rctx->dst_tbl.sgl;
124*4882a593Smuzhiyun 
125*4882a593Smuzhiyun 	dst_nents = dma_map_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst);
126*4882a593Smuzhiyun 	if (dst_nents < 0) {
127*4882a593Smuzhiyun 		ret = dst_nents;
128*4882a593Smuzhiyun 		goto error_free;
129*4882a593Smuzhiyun 	}
130*4882a593Smuzhiyun 
131*4882a593Smuzhiyun 	if (diff_dst) {
132*4882a593Smuzhiyun 		src_nents = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src);
133*4882a593Smuzhiyun 		if (src_nents < 0) {
134*4882a593Smuzhiyun 			ret = src_nents;
135*4882a593Smuzhiyun 			goto error_unmap_dst;
136*4882a593Smuzhiyun 		}
137*4882a593Smuzhiyun 		rctx->src_sg = req->src;
138*4882a593Smuzhiyun 	} else {
139*4882a593Smuzhiyun 		rctx->src_sg = rctx->dst_sg;
140*4882a593Smuzhiyun 		src_nents = dst_nents - 1;
141*4882a593Smuzhiyun 	}
142*4882a593Smuzhiyun 
143*4882a593Smuzhiyun 	ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, src_nents,
144*4882a593Smuzhiyun 			       rctx->dst_sg, dst_nents,
145*4882a593Smuzhiyun 			       qce_skcipher_done, async_req);
146*4882a593Smuzhiyun 	if (ret)
147*4882a593Smuzhiyun 		goto error_unmap_src;
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun 	qce_dma_issue_pending(&qce->dma);
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun 	ret = qce_start(async_req, tmpl->crypto_alg_type, req->cryptlen, 0);
152*4882a593Smuzhiyun 	if (ret)
153*4882a593Smuzhiyun 		goto error_terminate;
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun 	return 0;
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun error_terminate:
158*4882a593Smuzhiyun 	qce_dma_terminate_all(&qce->dma);
159*4882a593Smuzhiyun error_unmap_src:
160*4882a593Smuzhiyun 	if (diff_dst)
161*4882a593Smuzhiyun 		dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src);
162*4882a593Smuzhiyun error_unmap_dst:
163*4882a593Smuzhiyun 	dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst);
164*4882a593Smuzhiyun error_free:
165*4882a593Smuzhiyun 	sg_free_table(&rctx->dst_tbl);
166*4882a593Smuzhiyun 	return ret;
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun 
qce_skcipher_setkey(struct crypto_skcipher * ablk,const u8 * key,unsigned int keylen)169*4882a593Smuzhiyun static int qce_skcipher_setkey(struct crypto_skcipher *ablk, const u8 *key,
170*4882a593Smuzhiyun 				 unsigned int keylen)
171*4882a593Smuzhiyun {
172*4882a593Smuzhiyun 	struct crypto_tfm *tfm = crypto_skcipher_tfm(ablk);
173*4882a593Smuzhiyun 	struct qce_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
174*4882a593Smuzhiyun 	unsigned long flags = to_cipher_tmpl(ablk)->alg_flags;
175*4882a593Smuzhiyun 	int ret;
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun 	if (!key || !keylen)
178*4882a593Smuzhiyun 		return -EINVAL;
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 	switch (IS_XTS(flags) ? keylen >> 1 : keylen) {
181*4882a593Smuzhiyun 	case AES_KEYSIZE_128:
182*4882a593Smuzhiyun 	case AES_KEYSIZE_256:
183*4882a593Smuzhiyun 		memcpy(ctx->enc_key, key, keylen);
184*4882a593Smuzhiyun 		break;
185*4882a593Smuzhiyun 	}
186*4882a593Smuzhiyun 
187*4882a593Smuzhiyun 	ret = crypto_skcipher_setkey(ctx->fallback, key, keylen);
188*4882a593Smuzhiyun 	if (!ret)
189*4882a593Smuzhiyun 		ctx->enc_keylen = keylen;
190*4882a593Smuzhiyun 	return ret;
191*4882a593Smuzhiyun }
192*4882a593Smuzhiyun 
qce_des_setkey(struct crypto_skcipher * ablk,const u8 * key,unsigned int keylen)193*4882a593Smuzhiyun static int qce_des_setkey(struct crypto_skcipher *ablk, const u8 *key,
194*4882a593Smuzhiyun 			  unsigned int keylen)
195*4882a593Smuzhiyun {
196*4882a593Smuzhiyun 	struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(ablk);
197*4882a593Smuzhiyun 	int err;
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun 	err = verify_skcipher_des_key(ablk, key);
200*4882a593Smuzhiyun 	if (err)
201*4882a593Smuzhiyun 		return err;
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 	ctx->enc_keylen = keylen;
204*4882a593Smuzhiyun 	memcpy(ctx->enc_key, key, keylen);
205*4882a593Smuzhiyun 	return 0;
206*4882a593Smuzhiyun }
207*4882a593Smuzhiyun 
qce_des3_setkey(struct crypto_skcipher * ablk,const u8 * key,unsigned int keylen)208*4882a593Smuzhiyun static int qce_des3_setkey(struct crypto_skcipher *ablk, const u8 *key,
209*4882a593Smuzhiyun 			   unsigned int keylen)
210*4882a593Smuzhiyun {
211*4882a593Smuzhiyun 	struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(ablk);
212*4882a593Smuzhiyun 	int err;
213*4882a593Smuzhiyun 
214*4882a593Smuzhiyun 	err = verify_skcipher_des3_key(ablk, key);
215*4882a593Smuzhiyun 	if (err)
216*4882a593Smuzhiyun 		return err;
217*4882a593Smuzhiyun 
218*4882a593Smuzhiyun 	ctx->enc_keylen = keylen;
219*4882a593Smuzhiyun 	memcpy(ctx->enc_key, key, keylen);
220*4882a593Smuzhiyun 	return 0;
221*4882a593Smuzhiyun }
222*4882a593Smuzhiyun 
qce_skcipher_crypt(struct skcipher_request * req,int encrypt)223*4882a593Smuzhiyun static int qce_skcipher_crypt(struct skcipher_request *req, int encrypt)
224*4882a593Smuzhiyun {
225*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
226*4882a593Smuzhiyun 	struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
227*4882a593Smuzhiyun 	struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req);
228*4882a593Smuzhiyun 	struct qce_alg_template *tmpl = to_cipher_tmpl(tfm);
229*4882a593Smuzhiyun 	int keylen;
230*4882a593Smuzhiyun 	int ret;
231*4882a593Smuzhiyun 
232*4882a593Smuzhiyun 	rctx->flags = tmpl->alg_flags;
233*4882a593Smuzhiyun 	rctx->flags |= encrypt ? QCE_ENCRYPT : QCE_DECRYPT;
234*4882a593Smuzhiyun 	keylen = IS_XTS(rctx->flags) ? ctx->enc_keylen >> 1 : ctx->enc_keylen;
235*4882a593Smuzhiyun 
236*4882a593Smuzhiyun 	/* qce is hanging when AES-XTS request len > QCE_SECTOR_SIZE and
237*4882a593Smuzhiyun 	 * is not a multiple of it; pass such requests to the fallback
238*4882a593Smuzhiyun 	 */
239*4882a593Smuzhiyun 	if (IS_AES(rctx->flags) &&
240*4882a593Smuzhiyun 	    (((keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_256) ||
241*4882a593Smuzhiyun 	      req->cryptlen <= aes_sw_max_len) ||
242*4882a593Smuzhiyun 	     (IS_XTS(rctx->flags) && req->cryptlen > QCE_SECTOR_SIZE &&
243*4882a593Smuzhiyun 	      req->cryptlen % QCE_SECTOR_SIZE))) {
244*4882a593Smuzhiyun 		skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
245*4882a593Smuzhiyun 		skcipher_request_set_callback(&rctx->fallback_req,
246*4882a593Smuzhiyun 					      req->base.flags,
247*4882a593Smuzhiyun 					      req->base.complete,
248*4882a593Smuzhiyun 					      req->base.data);
249*4882a593Smuzhiyun 		skcipher_request_set_crypt(&rctx->fallback_req, req->src,
250*4882a593Smuzhiyun 					   req->dst, req->cryptlen, req->iv);
251*4882a593Smuzhiyun 		ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
252*4882a593Smuzhiyun 				crypto_skcipher_decrypt(&rctx->fallback_req);
253*4882a593Smuzhiyun 		return ret;
254*4882a593Smuzhiyun 	}
255*4882a593Smuzhiyun 
256*4882a593Smuzhiyun 	return tmpl->qce->async_req_enqueue(tmpl->qce, &req->base);
257*4882a593Smuzhiyun }
258*4882a593Smuzhiyun 
qce_skcipher_encrypt(struct skcipher_request * req)259*4882a593Smuzhiyun static int qce_skcipher_encrypt(struct skcipher_request *req)
260*4882a593Smuzhiyun {
261*4882a593Smuzhiyun 	return qce_skcipher_crypt(req, 1);
262*4882a593Smuzhiyun }
263*4882a593Smuzhiyun 
qce_skcipher_decrypt(struct skcipher_request * req)264*4882a593Smuzhiyun static int qce_skcipher_decrypt(struct skcipher_request *req)
265*4882a593Smuzhiyun {
266*4882a593Smuzhiyun 	return qce_skcipher_crypt(req, 0);
267*4882a593Smuzhiyun }
268*4882a593Smuzhiyun 
qce_skcipher_init(struct crypto_skcipher * tfm)269*4882a593Smuzhiyun static int qce_skcipher_init(struct crypto_skcipher *tfm)
270*4882a593Smuzhiyun {
271*4882a593Smuzhiyun 	/* take the size without the fallback skcipher_request at the end */
272*4882a593Smuzhiyun 	crypto_skcipher_set_reqsize(tfm, offsetof(struct qce_cipher_reqctx,
273*4882a593Smuzhiyun 						  fallback_req));
274*4882a593Smuzhiyun 	return 0;
275*4882a593Smuzhiyun }
276*4882a593Smuzhiyun 
qce_skcipher_init_fallback(struct crypto_skcipher * tfm)277*4882a593Smuzhiyun static int qce_skcipher_init_fallback(struct crypto_skcipher *tfm)
278*4882a593Smuzhiyun {
279*4882a593Smuzhiyun 	struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
280*4882a593Smuzhiyun 
281*4882a593Smuzhiyun 	ctx->fallback = crypto_alloc_skcipher(crypto_tfm_alg_name(&tfm->base),
282*4882a593Smuzhiyun 					      0, CRYPTO_ALG_NEED_FALLBACK);
283*4882a593Smuzhiyun 	if (IS_ERR(ctx->fallback))
284*4882a593Smuzhiyun 		return PTR_ERR(ctx->fallback);
285*4882a593Smuzhiyun 
286*4882a593Smuzhiyun 	crypto_skcipher_set_reqsize(tfm, sizeof(struct qce_cipher_reqctx) +
287*4882a593Smuzhiyun 					 crypto_skcipher_reqsize(ctx->fallback));
288*4882a593Smuzhiyun 	return 0;
289*4882a593Smuzhiyun }
290*4882a593Smuzhiyun 
qce_skcipher_exit(struct crypto_skcipher * tfm)291*4882a593Smuzhiyun static void qce_skcipher_exit(struct crypto_skcipher *tfm)
292*4882a593Smuzhiyun {
293*4882a593Smuzhiyun 	struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
294*4882a593Smuzhiyun 
295*4882a593Smuzhiyun 	crypto_free_skcipher(ctx->fallback);
296*4882a593Smuzhiyun }
297*4882a593Smuzhiyun 
298*4882a593Smuzhiyun struct qce_skcipher_def {
299*4882a593Smuzhiyun 	unsigned long flags;
300*4882a593Smuzhiyun 	const char *name;
301*4882a593Smuzhiyun 	const char *drv_name;
302*4882a593Smuzhiyun 	unsigned int blocksize;
303*4882a593Smuzhiyun 	unsigned int chunksize;
304*4882a593Smuzhiyun 	unsigned int ivsize;
305*4882a593Smuzhiyun 	unsigned int min_keysize;
306*4882a593Smuzhiyun 	unsigned int max_keysize;
307*4882a593Smuzhiyun };
308*4882a593Smuzhiyun 
309*4882a593Smuzhiyun static const struct qce_skcipher_def skcipher_def[] = {
310*4882a593Smuzhiyun 	{
311*4882a593Smuzhiyun 		.flags		= QCE_ALG_AES | QCE_MODE_ECB,
312*4882a593Smuzhiyun 		.name		= "ecb(aes)",
313*4882a593Smuzhiyun 		.drv_name	= "ecb-aes-qce",
314*4882a593Smuzhiyun 		.blocksize	= AES_BLOCK_SIZE,
315*4882a593Smuzhiyun 		.ivsize		= AES_BLOCK_SIZE,
316*4882a593Smuzhiyun 		.min_keysize	= AES_MIN_KEY_SIZE,
317*4882a593Smuzhiyun 		.max_keysize	= AES_MAX_KEY_SIZE,
318*4882a593Smuzhiyun 	},
319*4882a593Smuzhiyun 	{
320*4882a593Smuzhiyun 		.flags		= QCE_ALG_AES | QCE_MODE_CBC,
321*4882a593Smuzhiyun 		.name		= "cbc(aes)",
322*4882a593Smuzhiyun 		.drv_name	= "cbc-aes-qce",
323*4882a593Smuzhiyun 		.blocksize	= AES_BLOCK_SIZE,
324*4882a593Smuzhiyun 		.ivsize		= AES_BLOCK_SIZE,
325*4882a593Smuzhiyun 		.min_keysize	= AES_MIN_KEY_SIZE,
326*4882a593Smuzhiyun 		.max_keysize	= AES_MAX_KEY_SIZE,
327*4882a593Smuzhiyun 	},
328*4882a593Smuzhiyun 	{
329*4882a593Smuzhiyun 		.flags		= QCE_ALG_AES | QCE_MODE_CTR,
330*4882a593Smuzhiyun 		.name		= "ctr(aes)",
331*4882a593Smuzhiyun 		.drv_name	= "ctr-aes-qce",
332*4882a593Smuzhiyun 		.blocksize	= 1,
333*4882a593Smuzhiyun 		.chunksize	= AES_BLOCK_SIZE,
334*4882a593Smuzhiyun 		.ivsize		= AES_BLOCK_SIZE,
335*4882a593Smuzhiyun 		.min_keysize	= AES_MIN_KEY_SIZE,
336*4882a593Smuzhiyun 		.max_keysize	= AES_MAX_KEY_SIZE,
337*4882a593Smuzhiyun 	},
338*4882a593Smuzhiyun 	{
339*4882a593Smuzhiyun 		.flags		= QCE_ALG_AES | QCE_MODE_XTS,
340*4882a593Smuzhiyun 		.name		= "xts(aes)",
341*4882a593Smuzhiyun 		.drv_name	= "xts-aes-qce",
342*4882a593Smuzhiyun 		.blocksize	= AES_BLOCK_SIZE,
343*4882a593Smuzhiyun 		.ivsize		= AES_BLOCK_SIZE,
344*4882a593Smuzhiyun 		.min_keysize	= AES_MIN_KEY_SIZE * 2,
345*4882a593Smuzhiyun 		.max_keysize	= AES_MAX_KEY_SIZE * 2,
346*4882a593Smuzhiyun 	},
347*4882a593Smuzhiyun 	{
348*4882a593Smuzhiyun 		.flags		= QCE_ALG_DES | QCE_MODE_ECB,
349*4882a593Smuzhiyun 		.name		= "ecb(des)",
350*4882a593Smuzhiyun 		.drv_name	= "ecb-des-qce",
351*4882a593Smuzhiyun 		.blocksize	= DES_BLOCK_SIZE,
352*4882a593Smuzhiyun 		.ivsize		= 0,
353*4882a593Smuzhiyun 		.min_keysize	= DES_KEY_SIZE,
354*4882a593Smuzhiyun 		.max_keysize	= DES_KEY_SIZE,
355*4882a593Smuzhiyun 	},
356*4882a593Smuzhiyun 	{
357*4882a593Smuzhiyun 		.flags		= QCE_ALG_DES | QCE_MODE_CBC,
358*4882a593Smuzhiyun 		.name		= "cbc(des)",
359*4882a593Smuzhiyun 		.drv_name	= "cbc-des-qce",
360*4882a593Smuzhiyun 		.blocksize	= DES_BLOCK_SIZE,
361*4882a593Smuzhiyun 		.ivsize		= DES_BLOCK_SIZE,
362*4882a593Smuzhiyun 		.min_keysize	= DES_KEY_SIZE,
363*4882a593Smuzhiyun 		.max_keysize	= DES_KEY_SIZE,
364*4882a593Smuzhiyun 	},
365*4882a593Smuzhiyun 	{
366*4882a593Smuzhiyun 		.flags		= QCE_ALG_3DES | QCE_MODE_ECB,
367*4882a593Smuzhiyun 		.name		= "ecb(des3_ede)",
368*4882a593Smuzhiyun 		.drv_name	= "ecb-3des-qce",
369*4882a593Smuzhiyun 		.blocksize	= DES3_EDE_BLOCK_SIZE,
370*4882a593Smuzhiyun 		.ivsize		= 0,
371*4882a593Smuzhiyun 		.min_keysize	= DES3_EDE_KEY_SIZE,
372*4882a593Smuzhiyun 		.max_keysize	= DES3_EDE_KEY_SIZE,
373*4882a593Smuzhiyun 	},
374*4882a593Smuzhiyun 	{
375*4882a593Smuzhiyun 		.flags		= QCE_ALG_3DES | QCE_MODE_CBC,
376*4882a593Smuzhiyun 		.name		= "cbc(des3_ede)",
377*4882a593Smuzhiyun 		.drv_name	= "cbc-3des-qce",
378*4882a593Smuzhiyun 		.blocksize	= DES3_EDE_BLOCK_SIZE,
379*4882a593Smuzhiyun 		.ivsize		= DES3_EDE_BLOCK_SIZE,
380*4882a593Smuzhiyun 		.min_keysize	= DES3_EDE_KEY_SIZE,
381*4882a593Smuzhiyun 		.max_keysize	= DES3_EDE_KEY_SIZE,
382*4882a593Smuzhiyun 	},
383*4882a593Smuzhiyun };
384*4882a593Smuzhiyun 
qce_skcipher_register_one(const struct qce_skcipher_def * def,struct qce_device * qce)385*4882a593Smuzhiyun static int qce_skcipher_register_one(const struct qce_skcipher_def *def,
386*4882a593Smuzhiyun 				       struct qce_device *qce)
387*4882a593Smuzhiyun {
388*4882a593Smuzhiyun 	struct qce_alg_template *tmpl;
389*4882a593Smuzhiyun 	struct skcipher_alg *alg;
390*4882a593Smuzhiyun 	int ret;
391*4882a593Smuzhiyun 
392*4882a593Smuzhiyun 	tmpl = kzalloc(sizeof(*tmpl), GFP_KERNEL);
393*4882a593Smuzhiyun 	if (!tmpl)
394*4882a593Smuzhiyun 		return -ENOMEM;
395*4882a593Smuzhiyun 
396*4882a593Smuzhiyun 	alg = &tmpl->alg.skcipher;
397*4882a593Smuzhiyun 
398*4882a593Smuzhiyun 	snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name);
399*4882a593Smuzhiyun 	snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
400*4882a593Smuzhiyun 		 def->drv_name);
401*4882a593Smuzhiyun 
402*4882a593Smuzhiyun 	alg->base.cra_blocksize		= def->blocksize;
403*4882a593Smuzhiyun 	alg->chunksize			= def->chunksize;
404*4882a593Smuzhiyun 	alg->ivsize			= def->ivsize;
405*4882a593Smuzhiyun 	alg->min_keysize		= def->min_keysize;
406*4882a593Smuzhiyun 	alg->max_keysize		= def->max_keysize;
407*4882a593Smuzhiyun 	alg->setkey			= IS_3DES(def->flags) ? qce_des3_setkey :
408*4882a593Smuzhiyun 					  IS_DES(def->flags) ? qce_des_setkey :
409*4882a593Smuzhiyun 					  qce_skcipher_setkey;
410*4882a593Smuzhiyun 	alg->encrypt			= qce_skcipher_encrypt;
411*4882a593Smuzhiyun 	alg->decrypt			= qce_skcipher_decrypt;
412*4882a593Smuzhiyun 
413*4882a593Smuzhiyun 	alg->base.cra_priority		= 300;
414*4882a593Smuzhiyun 	alg->base.cra_flags		= CRYPTO_ALG_ASYNC |
415*4882a593Smuzhiyun 					  CRYPTO_ALG_ALLOCATES_MEMORY |
416*4882a593Smuzhiyun 					  CRYPTO_ALG_KERN_DRIVER_ONLY;
417*4882a593Smuzhiyun 	alg->base.cra_ctxsize		= sizeof(struct qce_cipher_ctx);
418*4882a593Smuzhiyun 	alg->base.cra_alignmask		= 0;
419*4882a593Smuzhiyun 	alg->base.cra_module		= THIS_MODULE;
420*4882a593Smuzhiyun 
421*4882a593Smuzhiyun 	if (IS_AES(def->flags)) {
422*4882a593Smuzhiyun 		alg->base.cra_flags    |= CRYPTO_ALG_NEED_FALLBACK;
423*4882a593Smuzhiyun 		alg->init		= qce_skcipher_init_fallback;
424*4882a593Smuzhiyun 		alg->exit		= qce_skcipher_exit;
425*4882a593Smuzhiyun 	} else {
426*4882a593Smuzhiyun 		alg->init		= qce_skcipher_init;
427*4882a593Smuzhiyun 	}
428*4882a593Smuzhiyun 
429*4882a593Smuzhiyun 	INIT_LIST_HEAD(&tmpl->entry);
430*4882a593Smuzhiyun 	tmpl->crypto_alg_type = CRYPTO_ALG_TYPE_SKCIPHER;
431*4882a593Smuzhiyun 	tmpl->alg_flags = def->flags;
432*4882a593Smuzhiyun 	tmpl->qce = qce;
433*4882a593Smuzhiyun 
434*4882a593Smuzhiyun 	ret = crypto_register_skcipher(alg);
435*4882a593Smuzhiyun 	if (ret) {
436*4882a593Smuzhiyun 		dev_err(qce->dev, "%s registration failed\n", alg->base.cra_name);
437*4882a593Smuzhiyun 		kfree(tmpl);
438*4882a593Smuzhiyun 		return ret;
439*4882a593Smuzhiyun 	}
440*4882a593Smuzhiyun 
441*4882a593Smuzhiyun 	list_add_tail(&tmpl->entry, &skcipher_algs);
442*4882a593Smuzhiyun 	dev_dbg(qce->dev, "%s is registered\n", alg->base.cra_name);
443*4882a593Smuzhiyun 	return 0;
444*4882a593Smuzhiyun }
445*4882a593Smuzhiyun 
qce_skcipher_unregister(struct qce_device * qce)446*4882a593Smuzhiyun static void qce_skcipher_unregister(struct qce_device *qce)
447*4882a593Smuzhiyun {
448*4882a593Smuzhiyun 	struct qce_alg_template *tmpl, *n;
449*4882a593Smuzhiyun 
450*4882a593Smuzhiyun 	list_for_each_entry_safe(tmpl, n, &skcipher_algs, entry) {
451*4882a593Smuzhiyun 		crypto_unregister_skcipher(&tmpl->alg.skcipher);
452*4882a593Smuzhiyun 		list_del(&tmpl->entry);
453*4882a593Smuzhiyun 		kfree(tmpl);
454*4882a593Smuzhiyun 	}
455*4882a593Smuzhiyun }
456*4882a593Smuzhiyun 
qce_skcipher_register(struct qce_device * qce)457*4882a593Smuzhiyun static int qce_skcipher_register(struct qce_device *qce)
458*4882a593Smuzhiyun {
459*4882a593Smuzhiyun 	int ret, i;
460*4882a593Smuzhiyun 
461*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(skcipher_def); i++) {
462*4882a593Smuzhiyun 		ret = qce_skcipher_register_one(&skcipher_def[i], qce);
463*4882a593Smuzhiyun 		if (ret)
464*4882a593Smuzhiyun 			goto err;
465*4882a593Smuzhiyun 	}
466*4882a593Smuzhiyun 
467*4882a593Smuzhiyun 	return 0;
468*4882a593Smuzhiyun err:
469*4882a593Smuzhiyun 	qce_skcipher_unregister(qce);
470*4882a593Smuzhiyun 	return ret;
471*4882a593Smuzhiyun }
472*4882a593Smuzhiyun 
473*4882a593Smuzhiyun const struct qce_algo_ops skcipher_ops = {
474*4882a593Smuzhiyun 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
475*4882a593Smuzhiyun 	.register_algs = qce_skcipher_register,
476*4882a593Smuzhiyun 	.unregister_algs = qce_skcipher_unregister,
477*4882a593Smuzhiyun 	.async_req_handle = qce_skcipher_async_req_handle,
478*4882a593Smuzhiyun };
479