xref: /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/rk_crypto_v1_skcipher.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Crypto acceleration support for Rockchip RK3288
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Author: Zain Wang <zain.wang@rock-chips.com>
8*4882a593Smuzhiyun  *
9*4882a593Smuzhiyun  * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
10*4882a593Smuzhiyun  */
11*4882a593Smuzhiyun #include "rk_crypto_core.h"
12*4882a593Smuzhiyun #include "rk_crypto_v1.h"
13*4882a593Smuzhiyun #include "rk_crypto_v1_reg.h"
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #define RK_CRYPTO_DEC			BIT(0)
16*4882a593Smuzhiyun 
rk_alg_ctx_cast(struct rk_crypto_dev * rk_dev)17*4882a593Smuzhiyun static struct rk_alg_ctx *rk_alg_ctx_cast(
18*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev)
19*4882a593Smuzhiyun {
20*4882a593Smuzhiyun 	struct skcipher_request *req =
21*4882a593Smuzhiyun 		skcipher_request_cast(rk_dev->async_req);
22*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
23*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
24*4882a593Smuzhiyun 
25*4882a593Smuzhiyun 	return &ctx->algs_ctx;
26*4882a593Smuzhiyun }
27*4882a593Smuzhiyun 
rk_crypto_irq_handle(int irq,void * dev_id)28*4882a593Smuzhiyun static int rk_crypto_irq_handle(int irq, void *dev_id)
29*4882a593Smuzhiyun {
30*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev  = platform_get_drvdata(dev_id);
31*4882a593Smuzhiyun 	u32 interrupt_status;
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun 	interrupt_status = CRYPTO_READ(rk_dev, RK_CRYPTO_INTSTS);
34*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, RK_CRYPTO_INTSTS, interrupt_status);
35*4882a593Smuzhiyun 
36*4882a593Smuzhiyun 	if (interrupt_status & 0x0a) {
37*4882a593Smuzhiyun 		dev_warn(rk_dev->dev, "DMA Error\n");
38*4882a593Smuzhiyun 		rk_dev->err = -EFAULT;
39*4882a593Smuzhiyun 	}
40*4882a593Smuzhiyun 
41*4882a593Smuzhiyun 	return 0;
42*4882a593Smuzhiyun }
43*4882a593Smuzhiyun 
rk_crypto_complete(struct crypto_async_request * base,int err)44*4882a593Smuzhiyun static void rk_crypto_complete(struct crypto_async_request *base, int err)
45*4882a593Smuzhiyun {
46*4882a593Smuzhiyun 	if (base->complete)
47*4882a593Smuzhiyun 		base->complete(base, err);
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun 
rk_handle_req(struct rk_crypto_dev * rk_dev,struct skcipher_request * req)50*4882a593Smuzhiyun static int rk_handle_req(struct rk_crypto_dev *rk_dev,
51*4882a593Smuzhiyun 			 struct skcipher_request *req)
52*4882a593Smuzhiyun {
53*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
54*4882a593Smuzhiyun 
55*4882a593Smuzhiyun 	if (!IS_ALIGNED(req->cryptlen, ctx->algs_ctx.align_size))
56*4882a593Smuzhiyun 		return -EINVAL;
57*4882a593Smuzhiyun 	else
58*4882a593Smuzhiyun 		return rk_dev->enqueue(rk_dev, &req->base);
59*4882a593Smuzhiyun }
60*4882a593Smuzhiyun 
rk_get_bc(u32 algo,u32 mode,u32 * bc_val)61*4882a593Smuzhiyun static int rk_get_bc(u32 algo, u32 mode, u32 *bc_val)
62*4882a593Smuzhiyun {
63*4882a593Smuzhiyun 	/* default DES ECB mode */
64*4882a593Smuzhiyun 	*bc_val = 0;
65*4882a593Smuzhiyun 
66*4882a593Smuzhiyun 	switch (algo) {
67*4882a593Smuzhiyun 	case CIPHER_ALGO_DES3_EDE:
68*4882a593Smuzhiyun 		*bc_val |= RK_CRYPTO_TDES_SELECT;
69*4882a593Smuzhiyun 		fallthrough;
70*4882a593Smuzhiyun 	case CIPHER_ALGO_DES:
71*4882a593Smuzhiyun 		if (mode == CIPHER_MODE_ECB)
72*4882a593Smuzhiyun 			*bc_val = 0;
73*4882a593Smuzhiyun 		else if (mode == CIPHER_MODE_CBC)
74*4882a593Smuzhiyun 			*bc_val = RK_CRYPTO_TDES_CHAINMODE_CBC;
75*4882a593Smuzhiyun 		else
76*4882a593Smuzhiyun 			goto error;
77*4882a593Smuzhiyun 		break;
78*4882a593Smuzhiyun 	case CIPHER_ALGO_AES:
79*4882a593Smuzhiyun 		if (mode == CIPHER_MODE_ECB)
80*4882a593Smuzhiyun 			*bc_val = RK_CRYPTO_AES_ECB_MODE;
81*4882a593Smuzhiyun 		else if (mode == CIPHER_MODE_CBC)
82*4882a593Smuzhiyun 			*bc_val = RK_CRYPTO_AES_CBC_MODE;
83*4882a593Smuzhiyun 		else
84*4882a593Smuzhiyun 			goto error;
85*4882a593Smuzhiyun 		break;
86*4882a593Smuzhiyun 	default:
87*4882a593Smuzhiyun 		goto error;
88*4882a593Smuzhiyun 	}
89*4882a593Smuzhiyun 
90*4882a593Smuzhiyun 	return 0;
91*4882a593Smuzhiyun error:
92*4882a593Smuzhiyun 	return -EINVAL;
93*4882a593Smuzhiyun }
94*4882a593Smuzhiyun 
rk_cipher_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)95*4882a593Smuzhiyun static int rk_cipher_setkey(struct crypto_skcipher *cipher,
96*4882a593Smuzhiyun 			    const u8 *key, unsigned int keylen)
97*4882a593Smuzhiyun {
98*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
99*4882a593Smuzhiyun 	struct skcipher_alg *alg = crypto_skcipher_alg(cipher);
100*4882a593Smuzhiyun 	struct rk_crypto_algt *algt;
101*4882a593Smuzhiyun 	int err;
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun 	algt = container_of(alg, struct rk_crypto_algt, alg.crypto);
104*4882a593Smuzhiyun 
105*4882a593Smuzhiyun 	CRYPTO_MSG("algo = %x, mode = %x, key_len = %d\n",
106*4882a593Smuzhiyun 		   algt->algo, algt->mode, keylen);
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun 	switch (algt->algo) {
109*4882a593Smuzhiyun 	case CIPHER_ALGO_DES:
110*4882a593Smuzhiyun 		if (keylen != DES_KEY_SIZE)
111*4882a593Smuzhiyun 			goto error;
112*4882a593Smuzhiyun 
113*4882a593Smuzhiyun 		err = verify_skcipher_des_key(cipher, key);
114*4882a593Smuzhiyun 		if (err)
115*4882a593Smuzhiyun 			goto error;
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun 		break;
118*4882a593Smuzhiyun 	case CIPHER_ALGO_DES3_EDE:
119*4882a593Smuzhiyun 		err = verify_skcipher_des3_key(cipher, key);
120*4882a593Smuzhiyun 		if (err)
121*4882a593Smuzhiyun 			goto error;
122*4882a593Smuzhiyun 		break;
123*4882a593Smuzhiyun 	case CIPHER_ALGO_AES:
124*4882a593Smuzhiyun 		if (keylen != AES_KEYSIZE_128 &&
125*4882a593Smuzhiyun 		    keylen != AES_KEYSIZE_192 &&
126*4882a593Smuzhiyun 		    keylen != AES_KEYSIZE_256)
127*4882a593Smuzhiyun 			goto error;
128*4882a593Smuzhiyun 		break;
129*4882a593Smuzhiyun 	default:
130*4882a593Smuzhiyun 		goto error;
131*4882a593Smuzhiyun 	}
132*4882a593Smuzhiyun 
133*4882a593Smuzhiyun 	memcpy(ctx->key, key, keylen);
134*4882a593Smuzhiyun 	ctx->keylen = keylen;
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun 	return 0;
137*4882a593Smuzhiyun 
138*4882a593Smuzhiyun error:
139*4882a593Smuzhiyun 	return -EINVAL;
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun 
142*4882a593Smuzhiyun 
rk_cipher_encrypt(struct skcipher_request * req)143*4882a593Smuzhiyun static int rk_cipher_encrypt(struct skcipher_request *req)
144*4882a593Smuzhiyun {
145*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
146*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
147*4882a593Smuzhiyun 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
148*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev = ctx->rk_dev;
149*4882a593Smuzhiyun 	struct rk_crypto_algt *algt;
150*4882a593Smuzhiyun 	int ret;
151*4882a593Smuzhiyun 
152*4882a593Smuzhiyun 	algt = container_of(alg, struct rk_crypto_algt, alg.crypto);
153*4882a593Smuzhiyun 
154*4882a593Smuzhiyun 	ret = rk_get_bc(algt->algo, algt->mode, &ctx->mode);
155*4882a593Smuzhiyun 	if (ret)
156*4882a593Smuzhiyun 		return ret;
157*4882a593Smuzhiyun 
158*4882a593Smuzhiyun 	CRYPTO_MSG("ctx->mode = %x\n", ctx->mode);
159*4882a593Smuzhiyun 
160*4882a593Smuzhiyun 	return rk_handle_req(rk_dev, req);
161*4882a593Smuzhiyun }
162*4882a593Smuzhiyun 
rk_cipher_decrypt(struct skcipher_request * req)163*4882a593Smuzhiyun static int rk_cipher_decrypt(struct skcipher_request *req)
164*4882a593Smuzhiyun {
165*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
166*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
167*4882a593Smuzhiyun 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
168*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev = ctx->rk_dev;
169*4882a593Smuzhiyun 	struct rk_crypto_algt *algt;
170*4882a593Smuzhiyun 	int ret;
171*4882a593Smuzhiyun 
172*4882a593Smuzhiyun 	algt = container_of(alg, struct rk_crypto_algt, alg.crypto);
173*4882a593Smuzhiyun 
174*4882a593Smuzhiyun 	ret = rk_get_bc(algt->algo, algt->mode, &ctx->mode);
175*4882a593Smuzhiyun 	if (ret)
176*4882a593Smuzhiyun 		return ret;
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	ctx->mode |= RK_CRYPTO_DEC;
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 	CRYPTO_MSG("ctx->mode = %x\n", ctx->mode);
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 	return rk_handle_req(rk_dev, req);
183*4882a593Smuzhiyun }
184*4882a593Smuzhiyun 
rk_ablk_hw_init(struct rk_crypto_dev * rk_dev)185*4882a593Smuzhiyun static void rk_ablk_hw_init(struct rk_crypto_dev *rk_dev)
186*4882a593Smuzhiyun {
187*4882a593Smuzhiyun 	struct skcipher_request *req =
188*4882a593Smuzhiyun 		skcipher_request_cast(rk_dev->async_req);
189*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
190*4882a593Smuzhiyun 	struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
191*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(cipher);
192*4882a593Smuzhiyun 	u32 ivsize, block, conf_reg = 0;
193*4882a593Smuzhiyun 
194*4882a593Smuzhiyun 	block = crypto_tfm_alg_blocksize(tfm);
195*4882a593Smuzhiyun 	ivsize = crypto_skcipher_ivsize(cipher);
196*4882a593Smuzhiyun 
197*4882a593Smuzhiyun 	if (block == DES_BLOCK_SIZE) {
198*4882a593Smuzhiyun 		memcpy_toio(ctx->rk_dev->reg + RK_CRYPTO_TDES_KEY1_0,
199*4882a593Smuzhiyun 			    ctx->key, ctx->keylen);
200*4882a593Smuzhiyun 		ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
201*4882a593Smuzhiyun 			     RK_CRYPTO_TDES_BYTESWAP_KEY |
202*4882a593Smuzhiyun 			     RK_CRYPTO_TDES_BYTESWAP_IV;
203*4882a593Smuzhiyun 		CRYPTO_WRITE(rk_dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
204*4882a593Smuzhiyun 		memcpy_toio(rk_dev->reg + RK_CRYPTO_TDES_IV_0,
205*4882a593Smuzhiyun 			    req->iv, ivsize);
206*4882a593Smuzhiyun 		conf_reg = RK_CRYPTO_DESSEL;
207*4882a593Smuzhiyun 	} else {
208*4882a593Smuzhiyun 		memcpy_toio(ctx->rk_dev->reg + RK_CRYPTO_AES_KEY_0,
209*4882a593Smuzhiyun 			    ctx->key, ctx->keylen);
210*4882a593Smuzhiyun 		ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
211*4882a593Smuzhiyun 			     RK_CRYPTO_AES_KEY_CHANGE |
212*4882a593Smuzhiyun 			     RK_CRYPTO_AES_BYTESWAP_KEY |
213*4882a593Smuzhiyun 			     RK_CRYPTO_AES_BYTESWAP_IV;
214*4882a593Smuzhiyun 		if (ctx->keylen == AES_KEYSIZE_192)
215*4882a593Smuzhiyun 			ctx->mode |= RK_CRYPTO_AES_192BIT_key;
216*4882a593Smuzhiyun 		else if (ctx->keylen == AES_KEYSIZE_256)
217*4882a593Smuzhiyun 			ctx->mode |= RK_CRYPTO_AES_256BIT_key;
218*4882a593Smuzhiyun 		CRYPTO_WRITE(rk_dev, RK_CRYPTO_AES_CTRL, ctx->mode);
219*4882a593Smuzhiyun 		memcpy_toio(rk_dev->reg + RK_CRYPTO_AES_IV_0,
220*4882a593Smuzhiyun 			    req->iv, ivsize);
221*4882a593Smuzhiyun 	}
222*4882a593Smuzhiyun 	conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
223*4882a593Smuzhiyun 		    RK_CRYPTO_BYTESWAP_BRFIFO;
224*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, RK_CRYPTO_CONF, conf_reg);
225*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, RK_CRYPTO_INTENA,
226*4882a593Smuzhiyun 		     RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
227*4882a593Smuzhiyun }
228*4882a593Smuzhiyun 
crypto_dma_start(struct rk_crypto_dev * rk_dev)229*4882a593Smuzhiyun static void crypto_dma_start(struct rk_crypto_dev *rk_dev)
230*4882a593Smuzhiyun {
231*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
232*4882a593Smuzhiyun 
233*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, RK_CRYPTO_BRDMAS, alg_ctx->addr_in);
234*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, RK_CRYPTO_BRDMAL, alg_ctx->count / 4);
235*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, RK_CRYPTO_BTDMAS, alg_ctx->addr_out);
236*4882a593Smuzhiyun 	CRYPTO_WRITE(rk_dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
237*4882a593Smuzhiyun 		     _SBF(RK_CRYPTO_BLOCK_START, 16));
238*4882a593Smuzhiyun }
239*4882a593Smuzhiyun 
rk_set_data_start(struct rk_crypto_dev * rk_dev)240*4882a593Smuzhiyun static int rk_set_data_start(struct rk_crypto_dev *rk_dev)
241*4882a593Smuzhiyun {
242*4882a593Smuzhiyun 	int err;
243*4882a593Smuzhiyun 	struct skcipher_request *req =
244*4882a593Smuzhiyun 		skcipher_request_cast(rk_dev->async_req);
245*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
246*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
247*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
248*4882a593Smuzhiyun 	u32 ivsize = crypto_skcipher_ivsize(tfm);
249*4882a593Smuzhiyun 	u8 *src_last_blk = page_address(sg_page(alg_ctx->sg_src)) +
250*4882a593Smuzhiyun 		alg_ctx->sg_src->offset + alg_ctx->sg_src->length - ivsize;
251*4882a593Smuzhiyun 
252*4882a593Smuzhiyun 	/* Store the iv that need to be updated in chain mode.
253*4882a593Smuzhiyun 	 * And update the IV buffer to contain the next IV for decryption mode.
254*4882a593Smuzhiyun 	 */
255*4882a593Smuzhiyun 	if (ctx->mode & RK_CRYPTO_DEC) {
256*4882a593Smuzhiyun 		memcpy(ctx->iv, src_last_blk, ivsize);
257*4882a593Smuzhiyun 		sg_pcopy_to_buffer(alg_ctx->req_src, alg_ctx->src_nents,
258*4882a593Smuzhiyun 				   req->iv, ivsize, alg_ctx->total - ivsize);
259*4882a593Smuzhiyun 	}
260*4882a593Smuzhiyun 
261*4882a593Smuzhiyun 	err = rk_dev->load_data(rk_dev, alg_ctx->sg_src, alg_ctx->sg_dst);
262*4882a593Smuzhiyun 	if (!err)
263*4882a593Smuzhiyun 		crypto_dma_start(rk_dev);
264*4882a593Smuzhiyun 	return err;
265*4882a593Smuzhiyun }
266*4882a593Smuzhiyun 
rk_ablk_start(struct rk_crypto_dev * rk_dev)267*4882a593Smuzhiyun static int rk_ablk_start(struct rk_crypto_dev *rk_dev)
268*4882a593Smuzhiyun {
269*4882a593Smuzhiyun 	struct skcipher_request *req =
270*4882a593Smuzhiyun 		skcipher_request_cast(rk_dev->async_req);
271*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
272*4882a593Smuzhiyun 	int err = 0;
273*4882a593Smuzhiyun 
274*4882a593Smuzhiyun 	alg_ctx->left_bytes = req->cryptlen;
275*4882a593Smuzhiyun 	alg_ctx->total      = req->cryptlen;
276*4882a593Smuzhiyun 	alg_ctx->sg_src     = req->src;
277*4882a593Smuzhiyun 	alg_ctx->req_src    = req->src;
278*4882a593Smuzhiyun 	alg_ctx->src_nents  = sg_nents_for_len(req->src, req->cryptlen);
279*4882a593Smuzhiyun 	alg_ctx->sg_dst     = req->dst;
280*4882a593Smuzhiyun 	alg_ctx->req_dst    = req->dst;
281*4882a593Smuzhiyun 	alg_ctx->dst_nents  = sg_nents_for_len(req->dst, req->cryptlen);
282*4882a593Smuzhiyun 
283*4882a593Smuzhiyun 	rk_ablk_hw_init(rk_dev);
284*4882a593Smuzhiyun 	err = rk_set_data_start(rk_dev);
285*4882a593Smuzhiyun 
286*4882a593Smuzhiyun 	return err;
287*4882a593Smuzhiyun }
288*4882a593Smuzhiyun 
rk_iv_copyback(struct rk_crypto_dev * rk_dev)289*4882a593Smuzhiyun static void rk_iv_copyback(struct rk_crypto_dev *rk_dev)
290*4882a593Smuzhiyun {
291*4882a593Smuzhiyun 	struct skcipher_request *req =
292*4882a593Smuzhiyun 		skcipher_request_cast(rk_dev->async_req);
293*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
294*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
295*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
296*4882a593Smuzhiyun 	u32 ivsize = crypto_skcipher_ivsize(tfm);
297*4882a593Smuzhiyun 
298*4882a593Smuzhiyun 	/* Update the IV buffer to contain the next IV for encryption mode. */
299*4882a593Smuzhiyun 	if (!(ctx->mode & RK_CRYPTO_DEC) && req->iv) {
300*4882a593Smuzhiyun 		if (alg_ctx->aligned) {
301*4882a593Smuzhiyun 			memcpy(req->iv, sg_virt(alg_ctx->sg_dst) +
302*4882a593Smuzhiyun 				alg_ctx->sg_dst->length - ivsize, ivsize);
303*4882a593Smuzhiyun 		} else {
304*4882a593Smuzhiyun 			memcpy(req->iv, rk_dev->addr_vir +
305*4882a593Smuzhiyun 				alg_ctx->count - ivsize, ivsize);
306*4882a593Smuzhiyun 		}
307*4882a593Smuzhiyun 	}
308*4882a593Smuzhiyun }
309*4882a593Smuzhiyun 
rk_update_iv(struct rk_crypto_dev * rk_dev)310*4882a593Smuzhiyun static void rk_update_iv(struct rk_crypto_dev *rk_dev)
311*4882a593Smuzhiyun {
312*4882a593Smuzhiyun 	struct skcipher_request *req =
313*4882a593Smuzhiyun 		skcipher_request_cast(rk_dev->async_req);
314*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
315*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
316*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
317*4882a593Smuzhiyun 	u32 ivsize = crypto_skcipher_ivsize(tfm);
318*4882a593Smuzhiyun 	u8 *new_iv = NULL;
319*4882a593Smuzhiyun 
320*4882a593Smuzhiyun 	if (ctx->mode & RK_CRYPTO_DEC) {
321*4882a593Smuzhiyun 		new_iv = ctx->iv;
322*4882a593Smuzhiyun 	} else {
323*4882a593Smuzhiyun 		new_iv = page_address(sg_page(alg_ctx->sg_dst)) +
324*4882a593Smuzhiyun 			 alg_ctx->sg_dst->offset +
325*4882a593Smuzhiyun 			 alg_ctx->sg_dst->length - ivsize;
326*4882a593Smuzhiyun 	}
327*4882a593Smuzhiyun 
328*4882a593Smuzhiyun 	if (ivsize == DES_BLOCK_SIZE)
329*4882a593Smuzhiyun 		memcpy_toio(rk_dev->reg + RK_CRYPTO_TDES_IV_0, new_iv, ivsize);
330*4882a593Smuzhiyun 	else if (ivsize == AES_BLOCK_SIZE)
331*4882a593Smuzhiyun 		memcpy_toio(rk_dev->reg + RK_CRYPTO_AES_IV_0, new_iv, ivsize);
332*4882a593Smuzhiyun }
333*4882a593Smuzhiyun 
334*4882a593Smuzhiyun /* return:
335*4882a593Smuzhiyun  *	true	some err was occurred
336*4882a593Smuzhiyun  *	fault	no err, continue
337*4882a593Smuzhiyun  */
rk_ablk_rx(struct rk_crypto_dev * rk_dev)338*4882a593Smuzhiyun static int rk_ablk_rx(struct rk_crypto_dev *rk_dev)
339*4882a593Smuzhiyun {
340*4882a593Smuzhiyun 	int err = 0;
341*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = rk_alg_ctx_cast(rk_dev);
342*4882a593Smuzhiyun 
343*4882a593Smuzhiyun 	CRYPTO_TRACE("left_bytes = %u\n", alg_ctx->left_bytes);
344*4882a593Smuzhiyun 
345*4882a593Smuzhiyun 	err = rk_dev->unload_data(rk_dev);
346*4882a593Smuzhiyun 	if (err)
347*4882a593Smuzhiyun 		goto out_rx;
348*4882a593Smuzhiyun 
349*4882a593Smuzhiyun 	if (alg_ctx->left_bytes) {
350*4882a593Smuzhiyun 		rk_update_iv(rk_dev);
351*4882a593Smuzhiyun 		if (alg_ctx->aligned) {
352*4882a593Smuzhiyun 			if (sg_is_last(alg_ctx->sg_src)) {
353*4882a593Smuzhiyun 				dev_err(rk_dev->dev, "[%s:%d] Lack of data\n",
354*4882a593Smuzhiyun 					__func__, __LINE__);
355*4882a593Smuzhiyun 				err = -ENOMEM;
356*4882a593Smuzhiyun 				goto out_rx;
357*4882a593Smuzhiyun 			}
358*4882a593Smuzhiyun 			alg_ctx->sg_src = sg_next(alg_ctx->sg_src);
359*4882a593Smuzhiyun 			alg_ctx->sg_dst = sg_next(alg_ctx->sg_dst);
360*4882a593Smuzhiyun 		}
361*4882a593Smuzhiyun 		err = rk_set_data_start(rk_dev);
362*4882a593Smuzhiyun 	} else {
363*4882a593Smuzhiyun 		rk_iv_copyback(rk_dev);
364*4882a593Smuzhiyun 	}
365*4882a593Smuzhiyun out_rx:
366*4882a593Smuzhiyun 	return err;
367*4882a593Smuzhiyun }
368*4882a593Smuzhiyun 
rk_ablk_init_tfm(struct crypto_skcipher * tfm)369*4882a593Smuzhiyun static int rk_ablk_init_tfm(struct crypto_skcipher *tfm)
370*4882a593Smuzhiyun {
371*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
372*4882a593Smuzhiyun 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
373*4882a593Smuzhiyun 	struct rk_alg_ctx *alg_ctx = &ctx->algs_ctx;
374*4882a593Smuzhiyun 	struct rk_crypto_algt *algt;
375*4882a593Smuzhiyun 	struct rk_crypto_dev *rk_dev;
376*4882a593Smuzhiyun 	const char *alg_name = crypto_tfm_alg_name(crypto_skcipher_tfm(tfm));
377*4882a593Smuzhiyun 
378*4882a593Smuzhiyun 	algt = container_of(alg, struct rk_crypto_algt, alg.crypto);
379*4882a593Smuzhiyun 	rk_dev = algt->rk_dev;
380*4882a593Smuzhiyun 
381*4882a593Smuzhiyun 	memset(ctx, 0x00, sizeof(*ctx));
382*4882a593Smuzhiyun 
383*4882a593Smuzhiyun 	if (!rk_dev->request_crypto)
384*4882a593Smuzhiyun 		return -EFAULT;
385*4882a593Smuzhiyun 
386*4882a593Smuzhiyun 	rk_dev->request_crypto(rk_dev, alg_name);
387*4882a593Smuzhiyun 
388*4882a593Smuzhiyun 	alg_ctx->align_size     = crypto_skcipher_alignmask(tfm) + 1;
389*4882a593Smuzhiyun 
390*4882a593Smuzhiyun 	alg_ctx->ops.start      = rk_ablk_start;
391*4882a593Smuzhiyun 	alg_ctx->ops.update     = rk_ablk_rx;
392*4882a593Smuzhiyun 	alg_ctx->ops.complete   = rk_crypto_complete;
393*4882a593Smuzhiyun 	alg_ctx->ops.irq_handle = rk_crypto_irq_handle;
394*4882a593Smuzhiyun 
395*4882a593Smuzhiyun 	ctx->rk_dev = rk_dev;
396*4882a593Smuzhiyun 
397*4882a593Smuzhiyun 	return 0;
398*4882a593Smuzhiyun }
399*4882a593Smuzhiyun 
rk_ablk_exit_tfm(struct crypto_skcipher * tfm)400*4882a593Smuzhiyun static void rk_ablk_exit_tfm(struct crypto_skcipher *tfm)
401*4882a593Smuzhiyun {
402*4882a593Smuzhiyun 	struct rk_cipher_ctx *ctx = crypto_skcipher_ctx(tfm);
403*4882a593Smuzhiyun 	const char *alg_name = crypto_tfm_alg_name(crypto_skcipher_tfm(tfm));
404*4882a593Smuzhiyun 
405*4882a593Smuzhiyun 	ctx->rk_dev->release_crypto(ctx->rk_dev, alg_name);
406*4882a593Smuzhiyun }
407*4882a593Smuzhiyun 
408*4882a593Smuzhiyun struct rk_crypto_algt rk_v1_ecb_aes_alg =
409*4882a593Smuzhiyun 	RK_CIPHER_ALGO_INIT(AES, ECB, ecb(aes), ecb-aes-rk);
410*4882a593Smuzhiyun 
411*4882a593Smuzhiyun struct rk_crypto_algt rk_v1_cbc_aes_alg =
412*4882a593Smuzhiyun 	RK_CIPHER_ALGO_INIT(AES, CBC, cbc(aes), cbc-aes-rk);
413*4882a593Smuzhiyun 
414*4882a593Smuzhiyun struct rk_crypto_algt rk_v1_ecb_des_alg =
415*4882a593Smuzhiyun 	RK_CIPHER_ALGO_INIT(DES, ECB, ecb(des), ecb-des-rk);
416*4882a593Smuzhiyun 
417*4882a593Smuzhiyun struct rk_crypto_algt rk_v1_cbc_des_alg =
418*4882a593Smuzhiyun 	RK_CIPHER_ALGO_INIT(DES, CBC, cbc(des), cbc-des-rk);
419*4882a593Smuzhiyun 
420*4882a593Smuzhiyun struct rk_crypto_algt rk_v1_ecb_des3_ede_alg =
421*4882a593Smuzhiyun 	RK_CIPHER_ALGO_INIT(DES3_EDE, ECB, ecb(des3_ede), ecb-des3_ede-rk);
422*4882a593Smuzhiyun 
423*4882a593Smuzhiyun struct rk_crypto_algt rk_v1_cbc_des3_ede_alg =
424*4882a593Smuzhiyun 	RK_CIPHER_ALGO_INIT(DES3_EDE, CBC, cbc(des3_ede), cbc-des3_ede-rk);
425