1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Cipher algorithms supported by the CESA: DES, 3DES and AES.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Author: Boris Brezillon <boris.brezillon@free-electrons.com>
6*4882a593Smuzhiyun * Author: Arnaud Ebalard <arno@natisbad.org>
7*4882a593Smuzhiyun *
8*4882a593Smuzhiyun * This work is based on an initial version written by
9*4882a593Smuzhiyun * Sebastian Andrzej Siewior < sebastian at breakpoint dot cc >
10*4882a593Smuzhiyun */
11*4882a593Smuzhiyun
12*4882a593Smuzhiyun #include <crypto/aes.h>
13*4882a593Smuzhiyun #include <crypto/internal/des.h>
14*4882a593Smuzhiyun #include <linux/device.h>
15*4882a593Smuzhiyun #include <linux/dma-mapping.h>
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun #include "cesa.h"
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun struct mv_cesa_des_ctx {
20*4882a593Smuzhiyun struct mv_cesa_ctx base;
21*4882a593Smuzhiyun u8 key[DES_KEY_SIZE];
22*4882a593Smuzhiyun };
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun struct mv_cesa_des3_ctx {
25*4882a593Smuzhiyun struct mv_cesa_ctx base;
26*4882a593Smuzhiyun u8 key[DES3_EDE_KEY_SIZE];
27*4882a593Smuzhiyun };
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun struct mv_cesa_aes_ctx {
30*4882a593Smuzhiyun struct mv_cesa_ctx base;
31*4882a593Smuzhiyun struct crypto_aes_ctx aes;
32*4882a593Smuzhiyun };
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun struct mv_cesa_skcipher_dma_iter {
35*4882a593Smuzhiyun struct mv_cesa_dma_iter base;
36*4882a593Smuzhiyun struct mv_cesa_sg_dma_iter src;
37*4882a593Smuzhiyun struct mv_cesa_sg_dma_iter dst;
38*4882a593Smuzhiyun };
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun static inline void
mv_cesa_skcipher_req_iter_init(struct mv_cesa_skcipher_dma_iter * iter,struct skcipher_request * req)41*4882a593Smuzhiyun mv_cesa_skcipher_req_iter_init(struct mv_cesa_skcipher_dma_iter *iter,
42*4882a593Smuzhiyun struct skcipher_request *req)
43*4882a593Smuzhiyun {
44*4882a593Smuzhiyun mv_cesa_req_dma_iter_init(&iter->base, req->cryptlen);
45*4882a593Smuzhiyun mv_cesa_sg_dma_iter_init(&iter->src, req->src, DMA_TO_DEVICE);
46*4882a593Smuzhiyun mv_cesa_sg_dma_iter_init(&iter->dst, req->dst, DMA_FROM_DEVICE);
47*4882a593Smuzhiyun }
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun static inline bool
mv_cesa_skcipher_req_iter_next_op(struct mv_cesa_skcipher_dma_iter * iter)50*4882a593Smuzhiyun mv_cesa_skcipher_req_iter_next_op(struct mv_cesa_skcipher_dma_iter *iter)
51*4882a593Smuzhiyun {
52*4882a593Smuzhiyun iter->src.op_offset = 0;
53*4882a593Smuzhiyun iter->dst.op_offset = 0;
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun return mv_cesa_req_dma_iter_next_op(&iter->base);
56*4882a593Smuzhiyun }
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun static inline void
mv_cesa_skcipher_dma_cleanup(struct skcipher_request * req)59*4882a593Smuzhiyun mv_cesa_skcipher_dma_cleanup(struct skcipher_request *req)
60*4882a593Smuzhiyun {
61*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun if (req->dst != req->src) {
64*4882a593Smuzhiyun dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents,
65*4882a593Smuzhiyun DMA_FROM_DEVICE);
66*4882a593Smuzhiyun dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents,
67*4882a593Smuzhiyun DMA_TO_DEVICE);
68*4882a593Smuzhiyun } else {
69*4882a593Smuzhiyun dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents,
70*4882a593Smuzhiyun DMA_BIDIRECTIONAL);
71*4882a593Smuzhiyun }
72*4882a593Smuzhiyun mv_cesa_dma_cleanup(&creq->base);
73*4882a593Smuzhiyun }
74*4882a593Smuzhiyun
mv_cesa_skcipher_cleanup(struct skcipher_request * req)75*4882a593Smuzhiyun static inline void mv_cesa_skcipher_cleanup(struct skcipher_request *req)
76*4882a593Smuzhiyun {
77*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ)
80*4882a593Smuzhiyun mv_cesa_skcipher_dma_cleanup(req);
81*4882a593Smuzhiyun }
82*4882a593Smuzhiyun
mv_cesa_skcipher_std_step(struct skcipher_request * req)83*4882a593Smuzhiyun static void mv_cesa_skcipher_std_step(struct skcipher_request *req)
84*4882a593Smuzhiyun {
85*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
86*4882a593Smuzhiyun struct mv_cesa_skcipher_std_req *sreq = &creq->std;
87*4882a593Smuzhiyun struct mv_cesa_engine *engine = creq->base.engine;
88*4882a593Smuzhiyun size_t len = min_t(size_t, req->cryptlen - sreq->offset,
89*4882a593Smuzhiyun CESA_SA_SRAM_PAYLOAD_SIZE);
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun mv_cesa_adjust_op(engine, &sreq->op);
92*4882a593Smuzhiyun memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op));
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun len = sg_pcopy_to_buffer(req->src, creq->src_nents,
95*4882a593Smuzhiyun engine->sram + CESA_SA_DATA_SRAM_OFFSET,
96*4882a593Smuzhiyun len, sreq->offset);
97*4882a593Smuzhiyun
98*4882a593Smuzhiyun sreq->size = len;
99*4882a593Smuzhiyun mv_cesa_set_crypt_op_len(&sreq->op, len);
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun /* FIXME: only update enc_len field */
102*4882a593Smuzhiyun if (!sreq->skip_ctx) {
103*4882a593Smuzhiyun memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op));
104*4882a593Smuzhiyun sreq->skip_ctx = true;
105*4882a593Smuzhiyun } else {
106*4882a593Smuzhiyun memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op.desc));
107*4882a593Smuzhiyun }
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun mv_cesa_set_int_mask(engine, CESA_SA_INT_ACCEL0_DONE);
110*4882a593Smuzhiyun writel_relaxed(CESA_SA_CFG_PARA_DIS, engine->regs + CESA_SA_CFG);
111*4882a593Smuzhiyun WARN_ON(readl(engine->regs + CESA_SA_CMD) &
112*4882a593Smuzhiyun CESA_SA_CMD_EN_CESA_SA_ACCL0);
113*4882a593Smuzhiyun writel(CESA_SA_CMD_EN_CESA_SA_ACCL0, engine->regs + CESA_SA_CMD);
114*4882a593Smuzhiyun }
115*4882a593Smuzhiyun
mv_cesa_skcipher_std_process(struct skcipher_request * req,u32 status)116*4882a593Smuzhiyun static int mv_cesa_skcipher_std_process(struct skcipher_request *req,
117*4882a593Smuzhiyun u32 status)
118*4882a593Smuzhiyun {
119*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
120*4882a593Smuzhiyun struct mv_cesa_skcipher_std_req *sreq = &creq->std;
121*4882a593Smuzhiyun struct mv_cesa_engine *engine = creq->base.engine;
122*4882a593Smuzhiyun size_t len;
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun len = sg_pcopy_from_buffer(req->dst, creq->dst_nents,
125*4882a593Smuzhiyun engine->sram + CESA_SA_DATA_SRAM_OFFSET,
126*4882a593Smuzhiyun sreq->size, sreq->offset);
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun sreq->offset += len;
129*4882a593Smuzhiyun if (sreq->offset < req->cryptlen)
130*4882a593Smuzhiyun return -EINPROGRESS;
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun return 0;
133*4882a593Smuzhiyun }
134*4882a593Smuzhiyun
mv_cesa_skcipher_process(struct crypto_async_request * req,u32 status)135*4882a593Smuzhiyun static int mv_cesa_skcipher_process(struct crypto_async_request *req,
136*4882a593Smuzhiyun u32 status)
137*4882a593Smuzhiyun {
138*4882a593Smuzhiyun struct skcipher_request *skreq = skcipher_request_cast(req);
139*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq);
140*4882a593Smuzhiyun struct mv_cesa_req *basereq = &creq->base;
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun if (mv_cesa_req_get_type(basereq) == CESA_STD_REQ)
143*4882a593Smuzhiyun return mv_cesa_skcipher_std_process(skreq, status);
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun return mv_cesa_dma_process(basereq, status);
146*4882a593Smuzhiyun }
147*4882a593Smuzhiyun
mv_cesa_skcipher_step(struct crypto_async_request * req)148*4882a593Smuzhiyun static void mv_cesa_skcipher_step(struct crypto_async_request *req)
149*4882a593Smuzhiyun {
150*4882a593Smuzhiyun struct skcipher_request *skreq = skcipher_request_cast(req);
151*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq);
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ)
154*4882a593Smuzhiyun mv_cesa_dma_step(&creq->base);
155*4882a593Smuzhiyun else
156*4882a593Smuzhiyun mv_cesa_skcipher_std_step(skreq);
157*4882a593Smuzhiyun }
158*4882a593Smuzhiyun
159*4882a593Smuzhiyun static inline void
mv_cesa_skcipher_dma_prepare(struct skcipher_request * req)160*4882a593Smuzhiyun mv_cesa_skcipher_dma_prepare(struct skcipher_request *req)
161*4882a593Smuzhiyun {
162*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
163*4882a593Smuzhiyun struct mv_cesa_req *basereq = &creq->base;
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun mv_cesa_dma_prepare(basereq, basereq->engine);
166*4882a593Smuzhiyun }
167*4882a593Smuzhiyun
168*4882a593Smuzhiyun static inline void
mv_cesa_skcipher_std_prepare(struct skcipher_request * req)169*4882a593Smuzhiyun mv_cesa_skcipher_std_prepare(struct skcipher_request *req)
170*4882a593Smuzhiyun {
171*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
172*4882a593Smuzhiyun struct mv_cesa_skcipher_std_req *sreq = &creq->std;
173*4882a593Smuzhiyun
174*4882a593Smuzhiyun sreq->size = 0;
175*4882a593Smuzhiyun sreq->offset = 0;
176*4882a593Smuzhiyun }
177*4882a593Smuzhiyun
mv_cesa_skcipher_prepare(struct crypto_async_request * req,struct mv_cesa_engine * engine)178*4882a593Smuzhiyun static inline void mv_cesa_skcipher_prepare(struct crypto_async_request *req,
179*4882a593Smuzhiyun struct mv_cesa_engine *engine)
180*4882a593Smuzhiyun {
181*4882a593Smuzhiyun struct skcipher_request *skreq = skcipher_request_cast(req);
182*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq);
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun creq->base.engine = engine;
185*4882a593Smuzhiyun
186*4882a593Smuzhiyun if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ)
187*4882a593Smuzhiyun mv_cesa_skcipher_dma_prepare(skreq);
188*4882a593Smuzhiyun else
189*4882a593Smuzhiyun mv_cesa_skcipher_std_prepare(skreq);
190*4882a593Smuzhiyun }
191*4882a593Smuzhiyun
192*4882a593Smuzhiyun static inline void
mv_cesa_skcipher_req_cleanup(struct crypto_async_request * req)193*4882a593Smuzhiyun mv_cesa_skcipher_req_cleanup(struct crypto_async_request *req)
194*4882a593Smuzhiyun {
195*4882a593Smuzhiyun struct skcipher_request *skreq = skcipher_request_cast(req);
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun mv_cesa_skcipher_cleanup(skreq);
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun static void
mv_cesa_skcipher_complete(struct crypto_async_request * req)201*4882a593Smuzhiyun mv_cesa_skcipher_complete(struct crypto_async_request *req)
202*4882a593Smuzhiyun {
203*4882a593Smuzhiyun struct skcipher_request *skreq = skcipher_request_cast(req);
204*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq);
205*4882a593Smuzhiyun struct mv_cesa_engine *engine = creq->base.engine;
206*4882a593Smuzhiyun unsigned int ivsize;
207*4882a593Smuzhiyun
208*4882a593Smuzhiyun atomic_sub(skreq->cryptlen, &engine->load);
209*4882a593Smuzhiyun ivsize = crypto_skcipher_ivsize(crypto_skcipher_reqtfm(skreq));
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) {
212*4882a593Smuzhiyun struct mv_cesa_req *basereq;
213*4882a593Smuzhiyun
214*4882a593Smuzhiyun basereq = &creq->base;
215*4882a593Smuzhiyun memcpy(skreq->iv, basereq->chain.last->op->ctx.skcipher.iv,
216*4882a593Smuzhiyun ivsize);
217*4882a593Smuzhiyun } else {
218*4882a593Smuzhiyun memcpy_fromio(skreq->iv,
219*4882a593Smuzhiyun engine->sram + CESA_SA_CRYPT_IV_SRAM_OFFSET,
220*4882a593Smuzhiyun ivsize);
221*4882a593Smuzhiyun }
222*4882a593Smuzhiyun }
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun static const struct mv_cesa_req_ops mv_cesa_skcipher_req_ops = {
225*4882a593Smuzhiyun .step = mv_cesa_skcipher_step,
226*4882a593Smuzhiyun .process = mv_cesa_skcipher_process,
227*4882a593Smuzhiyun .cleanup = mv_cesa_skcipher_req_cleanup,
228*4882a593Smuzhiyun .complete = mv_cesa_skcipher_complete,
229*4882a593Smuzhiyun };
230*4882a593Smuzhiyun
mv_cesa_skcipher_cra_exit(struct crypto_tfm * tfm)231*4882a593Smuzhiyun static void mv_cesa_skcipher_cra_exit(struct crypto_tfm *tfm)
232*4882a593Smuzhiyun {
233*4882a593Smuzhiyun void *ctx = crypto_tfm_ctx(tfm);
234*4882a593Smuzhiyun
235*4882a593Smuzhiyun memzero_explicit(ctx, tfm->__crt_alg->cra_ctxsize);
236*4882a593Smuzhiyun }
237*4882a593Smuzhiyun
mv_cesa_skcipher_cra_init(struct crypto_tfm * tfm)238*4882a593Smuzhiyun static int mv_cesa_skcipher_cra_init(struct crypto_tfm *tfm)
239*4882a593Smuzhiyun {
240*4882a593Smuzhiyun struct mv_cesa_ctx *ctx = crypto_tfm_ctx(tfm);
241*4882a593Smuzhiyun
242*4882a593Smuzhiyun ctx->ops = &mv_cesa_skcipher_req_ops;
243*4882a593Smuzhiyun
244*4882a593Smuzhiyun crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
245*4882a593Smuzhiyun sizeof(struct mv_cesa_skcipher_req));
246*4882a593Smuzhiyun
247*4882a593Smuzhiyun return 0;
248*4882a593Smuzhiyun }
249*4882a593Smuzhiyun
mv_cesa_aes_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int len)250*4882a593Smuzhiyun static int mv_cesa_aes_setkey(struct crypto_skcipher *cipher, const u8 *key,
251*4882a593Smuzhiyun unsigned int len)
252*4882a593Smuzhiyun {
253*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_skcipher_tfm(cipher);
254*4882a593Smuzhiyun struct mv_cesa_aes_ctx *ctx = crypto_tfm_ctx(tfm);
255*4882a593Smuzhiyun int remaining;
256*4882a593Smuzhiyun int offset;
257*4882a593Smuzhiyun int ret;
258*4882a593Smuzhiyun int i;
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun ret = aes_expandkey(&ctx->aes, key, len);
261*4882a593Smuzhiyun if (ret)
262*4882a593Smuzhiyun return ret;
263*4882a593Smuzhiyun
264*4882a593Smuzhiyun remaining = (ctx->aes.key_length - 16) / 4;
265*4882a593Smuzhiyun offset = ctx->aes.key_length + 24 - remaining;
266*4882a593Smuzhiyun for (i = 0; i < remaining; i++)
267*4882a593Smuzhiyun ctx->aes.key_dec[4 + i] = ctx->aes.key_enc[offset + i];
268*4882a593Smuzhiyun
269*4882a593Smuzhiyun return 0;
270*4882a593Smuzhiyun }
271*4882a593Smuzhiyun
mv_cesa_des_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int len)272*4882a593Smuzhiyun static int mv_cesa_des_setkey(struct crypto_skcipher *cipher, const u8 *key,
273*4882a593Smuzhiyun unsigned int len)
274*4882a593Smuzhiyun {
275*4882a593Smuzhiyun struct mv_cesa_des_ctx *ctx = crypto_skcipher_ctx(cipher);
276*4882a593Smuzhiyun int err;
277*4882a593Smuzhiyun
278*4882a593Smuzhiyun err = verify_skcipher_des_key(cipher, key);
279*4882a593Smuzhiyun if (err)
280*4882a593Smuzhiyun return err;
281*4882a593Smuzhiyun
282*4882a593Smuzhiyun memcpy(ctx->key, key, DES_KEY_SIZE);
283*4882a593Smuzhiyun
284*4882a593Smuzhiyun return 0;
285*4882a593Smuzhiyun }
286*4882a593Smuzhiyun
mv_cesa_des3_ede_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int len)287*4882a593Smuzhiyun static int mv_cesa_des3_ede_setkey(struct crypto_skcipher *cipher,
288*4882a593Smuzhiyun const u8 *key, unsigned int len)
289*4882a593Smuzhiyun {
290*4882a593Smuzhiyun struct mv_cesa_des_ctx *ctx = crypto_skcipher_ctx(cipher);
291*4882a593Smuzhiyun int err;
292*4882a593Smuzhiyun
293*4882a593Smuzhiyun err = verify_skcipher_des3_key(cipher, key);
294*4882a593Smuzhiyun if (err)
295*4882a593Smuzhiyun return err;
296*4882a593Smuzhiyun
297*4882a593Smuzhiyun memcpy(ctx->key, key, DES3_EDE_KEY_SIZE);
298*4882a593Smuzhiyun
299*4882a593Smuzhiyun return 0;
300*4882a593Smuzhiyun }
301*4882a593Smuzhiyun
mv_cesa_skcipher_dma_req_init(struct skcipher_request * req,const struct mv_cesa_op_ctx * op_templ)302*4882a593Smuzhiyun static int mv_cesa_skcipher_dma_req_init(struct skcipher_request *req,
303*4882a593Smuzhiyun const struct mv_cesa_op_ctx *op_templ)
304*4882a593Smuzhiyun {
305*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
306*4882a593Smuzhiyun gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
307*4882a593Smuzhiyun GFP_KERNEL : GFP_ATOMIC;
308*4882a593Smuzhiyun struct mv_cesa_req *basereq = &creq->base;
309*4882a593Smuzhiyun struct mv_cesa_skcipher_dma_iter iter;
310*4882a593Smuzhiyun bool skip_ctx = false;
311*4882a593Smuzhiyun int ret;
312*4882a593Smuzhiyun
313*4882a593Smuzhiyun basereq->chain.first = NULL;
314*4882a593Smuzhiyun basereq->chain.last = NULL;
315*4882a593Smuzhiyun
316*4882a593Smuzhiyun if (req->src != req->dst) {
317*4882a593Smuzhiyun ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents,
318*4882a593Smuzhiyun DMA_TO_DEVICE);
319*4882a593Smuzhiyun if (!ret)
320*4882a593Smuzhiyun return -ENOMEM;
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun ret = dma_map_sg(cesa_dev->dev, req->dst, creq->dst_nents,
323*4882a593Smuzhiyun DMA_FROM_DEVICE);
324*4882a593Smuzhiyun if (!ret) {
325*4882a593Smuzhiyun ret = -ENOMEM;
326*4882a593Smuzhiyun goto err_unmap_src;
327*4882a593Smuzhiyun }
328*4882a593Smuzhiyun } else {
329*4882a593Smuzhiyun ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents,
330*4882a593Smuzhiyun DMA_BIDIRECTIONAL);
331*4882a593Smuzhiyun if (!ret)
332*4882a593Smuzhiyun return -ENOMEM;
333*4882a593Smuzhiyun }
334*4882a593Smuzhiyun
335*4882a593Smuzhiyun mv_cesa_tdma_desc_iter_init(&basereq->chain);
336*4882a593Smuzhiyun mv_cesa_skcipher_req_iter_init(&iter, req);
337*4882a593Smuzhiyun
338*4882a593Smuzhiyun do {
339*4882a593Smuzhiyun struct mv_cesa_op_ctx *op;
340*4882a593Smuzhiyun
341*4882a593Smuzhiyun op = mv_cesa_dma_add_op(&basereq->chain, op_templ, skip_ctx,
342*4882a593Smuzhiyun flags);
343*4882a593Smuzhiyun if (IS_ERR(op)) {
344*4882a593Smuzhiyun ret = PTR_ERR(op);
345*4882a593Smuzhiyun goto err_free_tdma;
346*4882a593Smuzhiyun }
347*4882a593Smuzhiyun skip_ctx = true;
348*4882a593Smuzhiyun
349*4882a593Smuzhiyun mv_cesa_set_crypt_op_len(op, iter.base.op_len);
350*4882a593Smuzhiyun
351*4882a593Smuzhiyun /* Add input transfers */
352*4882a593Smuzhiyun ret = mv_cesa_dma_add_op_transfers(&basereq->chain, &iter.base,
353*4882a593Smuzhiyun &iter.src, flags);
354*4882a593Smuzhiyun if (ret)
355*4882a593Smuzhiyun goto err_free_tdma;
356*4882a593Smuzhiyun
357*4882a593Smuzhiyun /* Add dummy desc to launch the crypto operation */
358*4882a593Smuzhiyun ret = mv_cesa_dma_add_dummy_launch(&basereq->chain, flags);
359*4882a593Smuzhiyun if (ret)
360*4882a593Smuzhiyun goto err_free_tdma;
361*4882a593Smuzhiyun
362*4882a593Smuzhiyun /* Add output transfers */
363*4882a593Smuzhiyun ret = mv_cesa_dma_add_op_transfers(&basereq->chain, &iter.base,
364*4882a593Smuzhiyun &iter.dst, flags);
365*4882a593Smuzhiyun if (ret)
366*4882a593Smuzhiyun goto err_free_tdma;
367*4882a593Smuzhiyun
368*4882a593Smuzhiyun } while (mv_cesa_skcipher_req_iter_next_op(&iter));
369*4882a593Smuzhiyun
370*4882a593Smuzhiyun /* Add output data for IV */
371*4882a593Smuzhiyun ret = mv_cesa_dma_add_result_op(&basereq->chain,
372*4882a593Smuzhiyun CESA_SA_CFG_SRAM_OFFSET,
373*4882a593Smuzhiyun CESA_SA_DATA_SRAM_OFFSET,
374*4882a593Smuzhiyun CESA_TDMA_SRC_IN_SRAM, flags);
375*4882a593Smuzhiyun
376*4882a593Smuzhiyun if (ret)
377*4882a593Smuzhiyun goto err_free_tdma;
378*4882a593Smuzhiyun
379*4882a593Smuzhiyun basereq->chain.last->flags |= CESA_TDMA_END_OF_REQ;
380*4882a593Smuzhiyun
381*4882a593Smuzhiyun return 0;
382*4882a593Smuzhiyun
383*4882a593Smuzhiyun err_free_tdma:
384*4882a593Smuzhiyun mv_cesa_dma_cleanup(basereq);
385*4882a593Smuzhiyun if (req->dst != req->src)
386*4882a593Smuzhiyun dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents,
387*4882a593Smuzhiyun DMA_FROM_DEVICE);
388*4882a593Smuzhiyun
389*4882a593Smuzhiyun err_unmap_src:
390*4882a593Smuzhiyun dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents,
391*4882a593Smuzhiyun req->dst != req->src ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL);
392*4882a593Smuzhiyun
393*4882a593Smuzhiyun return ret;
394*4882a593Smuzhiyun }
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun static inline int
mv_cesa_skcipher_std_req_init(struct skcipher_request * req,const struct mv_cesa_op_ctx * op_templ)397*4882a593Smuzhiyun mv_cesa_skcipher_std_req_init(struct skcipher_request *req,
398*4882a593Smuzhiyun const struct mv_cesa_op_ctx *op_templ)
399*4882a593Smuzhiyun {
400*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
401*4882a593Smuzhiyun struct mv_cesa_skcipher_std_req *sreq = &creq->std;
402*4882a593Smuzhiyun struct mv_cesa_req *basereq = &creq->base;
403*4882a593Smuzhiyun
404*4882a593Smuzhiyun sreq->op = *op_templ;
405*4882a593Smuzhiyun sreq->skip_ctx = false;
406*4882a593Smuzhiyun basereq->chain.first = NULL;
407*4882a593Smuzhiyun basereq->chain.last = NULL;
408*4882a593Smuzhiyun
409*4882a593Smuzhiyun return 0;
410*4882a593Smuzhiyun }
411*4882a593Smuzhiyun
mv_cesa_skcipher_req_init(struct skcipher_request * req,struct mv_cesa_op_ctx * tmpl)412*4882a593Smuzhiyun static int mv_cesa_skcipher_req_init(struct skcipher_request *req,
413*4882a593Smuzhiyun struct mv_cesa_op_ctx *tmpl)
414*4882a593Smuzhiyun {
415*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
416*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
417*4882a593Smuzhiyun unsigned int blksize = crypto_skcipher_blocksize(tfm);
418*4882a593Smuzhiyun int ret;
419*4882a593Smuzhiyun
420*4882a593Smuzhiyun if (!IS_ALIGNED(req->cryptlen, blksize))
421*4882a593Smuzhiyun return -EINVAL;
422*4882a593Smuzhiyun
423*4882a593Smuzhiyun creq->src_nents = sg_nents_for_len(req->src, req->cryptlen);
424*4882a593Smuzhiyun if (creq->src_nents < 0) {
425*4882a593Smuzhiyun dev_err(cesa_dev->dev, "Invalid number of src SG");
426*4882a593Smuzhiyun return creq->src_nents;
427*4882a593Smuzhiyun }
428*4882a593Smuzhiyun creq->dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
429*4882a593Smuzhiyun if (creq->dst_nents < 0) {
430*4882a593Smuzhiyun dev_err(cesa_dev->dev, "Invalid number of dst SG");
431*4882a593Smuzhiyun return creq->dst_nents;
432*4882a593Smuzhiyun }
433*4882a593Smuzhiyun
434*4882a593Smuzhiyun mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_OP_CRYPT_ONLY,
435*4882a593Smuzhiyun CESA_SA_DESC_CFG_OP_MSK);
436*4882a593Smuzhiyun
437*4882a593Smuzhiyun if (cesa_dev->caps->has_tdma)
438*4882a593Smuzhiyun ret = mv_cesa_skcipher_dma_req_init(req, tmpl);
439*4882a593Smuzhiyun else
440*4882a593Smuzhiyun ret = mv_cesa_skcipher_std_req_init(req, tmpl);
441*4882a593Smuzhiyun
442*4882a593Smuzhiyun return ret;
443*4882a593Smuzhiyun }
444*4882a593Smuzhiyun
mv_cesa_skcipher_queue_req(struct skcipher_request * req,struct mv_cesa_op_ctx * tmpl)445*4882a593Smuzhiyun static int mv_cesa_skcipher_queue_req(struct skcipher_request *req,
446*4882a593Smuzhiyun struct mv_cesa_op_ctx *tmpl)
447*4882a593Smuzhiyun {
448*4882a593Smuzhiyun int ret;
449*4882a593Smuzhiyun struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req);
450*4882a593Smuzhiyun struct mv_cesa_engine *engine;
451*4882a593Smuzhiyun
452*4882a593Smuzhiyun ret = mv_cesa_skcipher_req_init(req, tmpl);
453*4882a593Smuzhiyun if (ret)
454*4882a593Smuzhiyun return ret;
455*4882a593Smuzhiyun
456*4882a593Smuzhiyun engine = mv_cesa_select_engine(req->cryptlen);
457*4882a593Smuzhiyun mv_cesa_skcipher_prepare(&req->base, engine);
458*4882a593Smuzhiyun
459*4882a593Smuzhiyun ret = mv_cesa_queue_req(&req->base, &creq->base);
460*4882a593Smuzhiyun
461*4882a593Smuzhiyun if (mv_cesa_req_needs_cleanup(&req->base, ret))
462*4882a593Smuzhiyun mv_cesa_skcipher_cleanup(req);
463*4882a593Smuzhiyun
464*4882a593Smuzhiyun return ret;
465*4882a593Smuzhiyun }
466*4882a593Smuzhiyun
mv_cesa_des_op(struct skcipher_request * req,struct mv_cesa_op_ctx * tmpl)467*4882a593Smuzhiyun static int mv_cesa_des_op(struct skcipher_request *req,
468*4882a593Smuzhiyun struct mv_cesa_op_ctx *tmpl)
469*4882a593Smuzhiyun {
470*4882a593Smuzhiyun struct mv_cesa_des_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
471*4882a593Smuzhiyun
472*4882a593Smuzhiyun mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_CRYPTM_DES,
473*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTM_MSK);
474*4882a593Smuzhiyun
475*4882a593Smuzhiyun memcpy(tmpl->ctx.skcipher.key, ctx->key, DES_KEY_SIZE);
476*4882a593Smuzhiyun
477*4882a593Smuzhiyun return mv_cesa_skcipher_queue_req(req, tmpl);
478*4882a593Smuzhiyun }
479*4882a593Smuzhiyun
mv_cesa_ecb_des_encrypt(struct skcipher_request * req)480*4882a593Smuzhiyun static int mv_cesa_ecb_des_encrypt(struct skcipher_request *req)
481*4882a593Smuzhiyun {
482*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
483*4882a593Smuzhiyun
484*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl,
485*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_ECB |
486*4882a593Smuzhiyun CESA_SA_DESC_CFG_DIR_ENC);
487*4882a593Smuzhiyun
488*4882a593Smuzhiyun return mv_cesa_des_op(req, &tmpl);
489*4882a593Smuzhiyun }
490*4882a593Smuzhiyun
mv_cesa_ecb_des_decrypt(struct skcipher_request * req)491*4882a593Smuzhiyun static int mv_cesa_ecb_des_decrypt(struct skcipher_request *req)
492*4882a593Smuzhiyun {
493*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
494*4882a593Smuzhiyun
495*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl,
496*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_ECB |
497*4882a593Smuzhiyun CESA_SA_DESC_CFG_DIR_DEC);
498*4882a593Smuzhiyun
499*4882a593Smuzhiyun return mv_cesa_des_op(req, &tmpl);
500*4882a593Smuzhiyun }
501*4882a593Smuzhiyun
502*4882a593Smuzhiyun struct skcipher_alg mv_cesa_ecb_des_alg = {
503*4882a593Smuzhiyun .setkey = mv_cesa_des_setkey,
504*4882a593Smuzhiyun .encrypt = mv_cesa_ecb_des_encrypt,
505*4882a593Smuzhiyun .decrypt = mv_cesa_ecb_des_decrypt,
506*4882a593Smuzhiyun .min_keysize = DES_KEY_SIZE,
507*4882a593Smuzhiyun .max_keysize = DES_KEY_SIZE,
508*4882a593Smuzhiyun .base = {
509*4882a593Smuzhiyun .cra_name = "ecb(des)",
510*4882a593Smuzhiyun .cra_driver_name = "mv-ecb-des",
511*4882a593Smuzhiyun .cra_priority = 300,
512*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
513*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY,
514*4882a593Smuzhiyun .cra_blocksize = DES_BLOCK_SIZE,
515*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct mv_cesa_des_ctx),
516*4882a593Smuzhiyun .cra_alignmask = 0,
517*4882a593Smuzhiyun .cra_module = THIS_MODULE,
518*4882a593Smuzhiyun .cra_init = mv_cesa_skcipher_cra_init,
519*4882a593Smuzhiyun .cra_exit = mv_cesa_skcipher_cra_exit,
520*4882a593Smuzhiyun },
521*4882a593Smuzhiyun };
522*4882a593Smuzhiyun
mv_cesa_cbc_des_op(struct skcipher_request * req,struct mv_cesa_op_ctx * tmpl)523*4882a593Smuzhiyun static int mv_cesa_cbc_des_op(struct skcipher_request *req,
524*4882a593Smuzhiyun struct mv_cesa_op_ctx *tmpl)
525*4882a593Smuzhiyun {
526*4882a593Smuzhiyun mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_CRYPTCM_CBC,
527*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_MSK);
528*4882a593Smuzhiyun
529*4882a593Smuzhiyun memcpy(tmpl->ctx.skcipher.iv, req->iv, DES_BLOCK_SIZE);
530*4882a593Smuzhiyun
531*4882a593Smuzhiyun return mv_cesa_des_op(req, tmpl);
532*4882a593Smuzhiyun }
533*4882a593Smuzhiyun
mv_cesa_cbc_des_encrypt(struct skcipher_request * req)534*4882a593Smuzhiyun static int mv_cesa_cbc_des_encrypt(struct skcipher_request *req)
535*4882a593Smuzhiyun {
536*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
537*4882a593Smuzhiyun
538*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl, CESA_SA_DESC_CFG_DIR_ENC);
539*4882a593Smuzhiyun
540*4882a593Smuzhiyun return mv_cesa_cbc_des_op(req, &tmpl);
541*4882a593Smuzhiyun }
542*4882a593Smuzhiyun
mv_cesa_cbc_des_decrypt(struct skcipher_request * req)543*4882a593Smuzhiyun static int mv_cesa_cbc_des_decrypt(struct skcipher_request *req)
544*4882a593Smuzhiyun {
545*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
546*4882a593Smuzhiyun
547*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl, CESA_SA_DESC_CFG_DIR_DEC);
548*4882a593Smuzhiyun
549*4882a593Smuzhiyun return mv_cesa_cbc_des_op(req, &tmpl);
550*4882a593Smuzhiyun }
551*4882a593Smuzhiyun
552*4882a593Smuzhiyun struct skcipher_alg mv_cesa_cbc_des_alg = {
553*4882a593Smuzhiyun .setkey = mv_cesa_des_setkey,
554*4882a593Smuzhiyun .encrypt = mv_cesa_cbc_des_encrypt,
555*4882a593Smuzhiyun .decrypt = mv_cesa_cbc_des_decrypt,
556*4882a593Smuzhiyun .min_keysize = DES_KEY_SIZE,
557*4882a593Smuzhiyun .max_keysize = DES_KEY_SIZE,
558*4882a593Smuzhiyun .ivsize = DES_BLOCK_SIZE,
559*4882a593Smuzhiyun .base = {
560*4882a593Smuzhiyun .cra_name = "cbc(des)",
561*4882a593Smuzhiyun .cra_driver_name = "mv-cbc-des",
562*4882a593Smuzhiyun .cra_priority = 300,
563*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
564*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY,
565*4882a593Smuzhiyun .cra_blocksize = DES_BLOCK_SIZE,
566*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct mv_cesa_des_ctx),
567*4882a593Smuzhiyun .cra_alignmask = 0,
568*4882a593Smuzhiyun .cra_module = THIS_MODULE,
569*4882a593Smuzhiyun .cra_init = mv_cesa_skcipher_cra_init,
570*4882a593Smuzhiyun .cra_exit = mv_cesa_skcipher_cra_exit,
571*4882a593Smuzhiyun },
572*4882a593Smuzhiyun };
573*4882a593Smuzhiyun
mv_cesa_des3_op(struct skcipher_request * req,struct mv_cesa_op_ctx * tmpl)574*4882a593Smuzhiyun static int mv_cesa_des3_op(struct skcipher_request *req,
575*4882a593Smuzhiyun struct mv_cesa_op_ctx *tmpl)
576*4882a593Smuzhiyun {
577*4882a593Smuzhiyun struct mv_cesa_des3_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
578*4882a593Smuzhiyun
579*4882a593Smuzhiyun mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_CRYPTM_3DES,
580*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTM_MSK);
581*4882a593Smuzhiyun
582*4882a593Smuzhiyun memcpy(tmpl->ctx.skcipher.key, ctx->key, DES3_EDE_KEY_SIZE);
583*4882a593Smuzhiyun
584*4882a593Smuzhiyun return mv_cesa_skcipher_queue_req(req, tmpl);
585*4882a593Smuzhiyun }
586*4882a593Smuzhiyun
mv_cesa_ecb_des3_ede_encrypt(struct skcipher_request * req)587*4882a593Smuzhiyun static int mv_cesa_ecb_des3_ede_encrypt(struct skcipher_request *req)
588*4882a593Smuzhiyun {
589*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
590*4882a593Smuzhiyun
591*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl,
592*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_ECB |
593*4882a593Smuzhiyun CESA_SA_DESC_CFG_3DES_EDE |
594*4882a593Smuzhiyun CESA_SA_DESC_CFG_DIR_ENC);
595*4882a593Smuzhiyun
596*4882a593Smuzhiyun return mv_cesa_des3_op(req, &tmpl);
597*4882a593Smuzhiyun }
598*4882a593Smuzhiyun
mv_cesa_ecb_des3_ede_decrypt(struct skcipher_request * req)599*4882a593Smuzhiyun static int mv_cesa_ecb_des3_ede_decrypt(struct skcipher_request *req)
600*4882a593Smuzhiyun {
601*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
602*4882a593Smuzhiyun
603*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl,
604*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_ECB |
605*4882a593Smuzhiyun CESA_SA_DESC_CFG_3DES_EDE |
606*4882a593Smuzhiyun CESA_SA_DESC_CFG_DIR_DEC);
607*4882a593Smuzhiyun
608*4882a593Smuzhiyun return mv_cesa_des3_op(req, &tmpl);
609*4882a593Smuzhiyun }
610*4882a593Smuzhiyun
611*4882a593Smuzhiyun struct skcipher_alg mv_cesa_ecb_des3_ede_alg = {
612*4882a593Smuzhiyun .setkey = mv_cesa_des3_ede_setkey,
613*4882a593Smuzhiyun .encrypt = mv_cesa_ecb_des3_ede_encrypt,
614*4882a593Smuzhiyun .decrypt = mv_cesa_ecb_des3_ede_decrypt,
615*4882a593Smuzhiyun .min_keysize = DES3_EDE_KEY_SIZE,
616*4882a593Smuzhiyun .max_keysize = DES3_EDE_KEY_SIZE,
617*4882a593Smuzhiyun .base = {
618*4882a593Smuzhiyun .cra_name = "ecb(des3_ede)",
619*4882a593Smuzhiyun .cra_driver_name = "mv-ecb-des3-ede",
620*4882a593Smuzhiyun .cra_priority = 300,
621*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
622*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY,
623*4882a593Smuzhiyun .cra_blocksize = DES3_EDE_BLOCK_SIZE,
624*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct mv_cesa_des3_ctx),
625*4882a593Smuzhiyun .cra_alignmask = 0,
626*4882a593Smuzhiyun .cra_module = THIS_MODULE,
627*4882a593Smuzhiyun .cra_init = mv_cesa_skcipher_cra_init,
628*4882a593Smuzhiyun .cra_exit = mv_cesa_skcipher_cra_exit,
629*4882a593Smuzhiyun },
630*4882a593Smuzhiyun };
631*4882a593Smuzhiyun
mv_cesa_cbc_des3_op(struct skcipher_request * req,struct mv_cesa_op_ctx * tmpl)632*4882a593Smuzhiyun static int mv_cesa_cbc_des3_op(struct skcipher_request *req,
633*4882a593Smuzhiyun struct mv_cesa_op_ctx *tmpl)
634*4882a593Smuzhiyun {
635*4882a593Smuzhiyun memcpy(tmpl->ctx.skcipher.iv, req->iv, DES3_EDE_BLOCK_SIZE);
636*4882a593Smuzhiyun
637*4882a593Smuzhiyun return mv_cesa_des3_op(req, tmpl);
638*4882a593Smuzhiyun }
639*4882a593Smuzhiyun
mv_cesa_cbc_des3_ede_encrypt(struct skcipher_request * req)640*4882a593Smuzhiyun static int mv_cesa_cbc_des3_ede_encrypt(struct skcipher_request *req)
641*4882a593Smuzhiyun {
642*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
643*4882a593Smuzhiyun
644*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl,
645*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_CBC |
646*4882a593Smuzhiyun CESA_SA_DESC_CFG_3DES_EDE |
647*4882a593Smuzhiyun CESA_SA_DESC_CFG_DIR_ENC);
648*4882a593Smuzhiyun
649*4882a593Smuzhiyun return mv_cesa_cbc_des3_op(req, &tmpl);
650*4882a593Smuzhiyun }
651*4882a593Smuzhiyun
mv_cesa_cbc_des3_ede_decrypt(struct skcipher_request * req)652*4882a593Smuzhiyun static int mv_cesa_cbc_des3_ede_decrypt(struct skcipher_request *req)
653*4882a593Smuzhiyun {
654*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
655*4882a593Smuzhiyun
656*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl,
657*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_CBC |
658*4882a593Smuzhiyun CESA_SA_DESC_CFG_3DES_EDE |
659*4882a593Smuzhiyun CESA_SA_DESC_CFG_DIR_DEC);
660*4882a593Smuzhiyun
661*4882a593Smuzhiyun return mv_cesa_cbc_des3_op(req, &tmpl);
662*4882a593Smuzhiyun }
663*4882a593Smuzhiyun
664*4882a593Smuzhiyun struct skcipher_alg mv_cesa_cbc_des3_ede_alg = {
665*4882a593Smuzhiyun .setkey = mv_cesa_des3_ede_setkey,
666*4882a593Smuzhiyun .encrypt = mv_cesa_cbc_des3_ede_encrypt,
667*4882a593Smuzhiyun .decrypt = mv_cesa_cbc_des3_ede_decrypt,
668*4882a593Smuzhiyun .min_keysize = DES3_EDE_KEY_SIZE,
669*4882a593Smuzhiyun .max_keysize = DES3_EDE_KEY_SIZE,
670*4882a593Smuzhiyun .ivsize = DES3_EDE_BLOCK_SIZE,
671*4882a593Smuzhiyun .base = {
672*4882a593Smuzhiyun .cra_name = "cbc(des3_ede)",
673*4882a593Smuzhiyun .cra_driver_name = "mv-cbc-des3-ede",
674*4882a593Smuzhiyun .cra_priority = 300,
675*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
676*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY,
677*4882a593Smuzhiyun .cra_blocksize = DES3_EDE_BLOCK_SIZE,
678*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct mv_cesa_des3_ctx),
679*4882a593Smuzhiyun .cra_alignmask = 0,
680*4882a593Smuzhiyun .cra_module = THIS_MODULE,
681*4882a593Smuzhiyun .cra_init = mv_cesa_skcipher_cra_init,
682*4882a593Smuzhiyun .cra_exit = mv_cesa_skcipher_cra_exit,
683*4882a593Smuzhiyun },
684*4882a593Smuzhiyun };
685*4882a593Smuzhiyun
mv_cesa_aes_op(struct skcipher_request * req,struct mv_cesa_op_ctx * tmpl)686*4882a593Smuzhiyun static int mv_cesa_aes_op(struct skcipher_request *req,
687*4882a593Smuzhiyun struct mv_cesa_op_ctx *tmpl)
688*4882a593Smuzhiyun {
689*4882a593Smuzhiyun struct mv_cesa_aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
690*4882a593Smuzhiyun int i;
691*4882a593Smuzhiyun u32 *key;
692*4882a593Smuzhiyun u32 cfg;
693*4882a593Smuzhiyun
694*4882a593Smuzhiyun cfg = CESA_SA_DESC_CFG_CRYPTM_AES;
695*4882a593Smuzhiyun
696*4882a593Smuzhiyun if (mv_cesa_get_op_cfg(tmpl) & CESA_SA_DESC_CFG_DIR_DEC)
697*4882a593Smuzhiyun key = ctx->aes.key_dec;
698*4882a593Smuzhiyun else
699*4882a593Smuzhiyun key = ctx->aes.key_enc;
700*4882a593Smuzhiyun
701*4882a593Smuzhiyun for (i = 0; i < ctx->aes.key_length / sizeof(u32); i++)
702*4882a593Smuzhiyun tmpl->ctx.skcipher.key[i] = cpu_to_le32(key[i]);
703*4882a593Smuzhiyun
704*4882a593Smuzhiyun if (ctx->aes.key_length == 24)
705*4882a593Smuzhiyun cfg |= CESA_SA_DESC_CFG_AES_LEN_192;
706*4882a593Smuzhiyun else if (ctx->aes.key_length == 32)
707*4882a593Smuzhiyun cfg |= CESA_SA_DESC_CFG_AES_LEN_256;
708*4882a593Smuzhiyun
709*4882a593Smuzhiyun mv_cesa_update_op_cfg(tmpl, cfg,
710*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTM_MSK |
711*4882a593Smuzhiyun CESA_SA_DESC_CFG_AES_LEN_MSK);
712*4882a593Smuzhiyun
713*4882a593Smuzhiyun return mv_cesa_skcipher_queue_req(req, tmpl);
714*4882a593Smuzhiyun }
715*4882a593Smuzhiyun
mv_cesa_ecb_aes_encrypt(struct skcipher_request * req)716*4882a593Smuzhiyun static int mv_cesa_ecb_aes_encrypt(struct skcipher_request *req)
717*4882a593Smuzhiyun {
718*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
719*4882a593Smuzhiyun
720*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl,
721*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_ECB |
722*4882a593Smuzhiyun CESA_SA_DESC_CFG_DIR_ENC);
723*4882a593Smuzhiyun
724*4882a593Smuzhiyun return mv_cesa_aes_op(req, &tmpl);
725*4882a593Smuzhiyun }
726*4882a593Smuzhiyun
mv_cesa_ecb_aes_decrypt(struct skcipher_request * req)727*4882a593Smuzhiyun static int mv_cesa_ecb_aes_decrypt(struct skcipher_request *req)
728*4882a593Smuzhiyun {
729*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
730*4882a593Smuzhiyun
731*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl,
732*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_ECB |
733*4882a593Smuzhiyun CESA_SA_DESC_CFG_DIR_DEC);
734*4882a593Smuzhiyun
735*4882a593Smuzhiyun return mv_cesa_aes_op(req, &tmpl);
736*4882a593Smuzhiyun }
737*4882a593Smuzhiyun
738*4882a593Smuzhiyun struct skcipher_alg mv_cesa_ecb_aes_alg = {
739*4882a593Smuzhiyun .setkey = mv_cesa_aes_setkey,
740*4882a593Smuzhiyun .encrypt = mv_cesa_ecb_aes_encrypt,
741*4882a593Smuzhiyun .decrypt = mv_cesa_ecb_aes_decrypt,
742*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
743*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
744*4882a593Smuzhiyun .base = {
745*4882a593Smuzhiyun .cra_name = "ecb(aes)",
746*4882a593Smuzhiyun .cra_driver_name = "mv-ecb-aes",
747*4882a593Smuzhiyun .cra_priority = 300,
748*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
749*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY,
750*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
751*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct mv_cesa_aes_ctx),
752*4882a593Smuzhiyun .cra_alignmask = 0,
753*4882a593Smuzhiyun .cra_module = THIS_MODULE,
754*4882a593Smuzhiyun .cra_init = mv_cesa_skcipher_cra_init,
755*4882a593Smuzhiyun .cra_exit = mv_cesa_skcipher_cra_exit,
756*4882a593Smuzhiyun },
757*4882a593Smuzhiyun };
758*4882a593Smuzhiyun
mv_cesa_cbc_aes_op(struct skcipher_request * req,struct mv_cesa_op_ctx * tmpl)759*4882a593Smuzhiyun static int mv_cesa_cbc_aes_op(struct skcipher_request *req,
760*4882a593Smuzhiyun struct mv_cesa_op_ctx *tmpl)
761*4882a593Smuzhiyun {
762*4882a593Smuzhiyun mv_cesa_update_op_cfg(tmpl, CESA_SA_DESC_CFG_CRYPTCM_CBC,
763*4882a593Smuzhiyun CESA_SA_DESC_CFG_CRYPTCM_MSK);
764*4882a593Smuzhiyun memcpy(tmpl->ctx.skcipher.iv, req->iv, AES_BLOCK_SIZE);
765*4882a593Smuzhiyun
766*4882a593Smuzhiyun return mv_cesa_aes_op(req, tmpl);
767*4882a593Smuzhiyun }
768*4882a593Smuzhiyun
mv_cesa_cbc_aes_encrypt(struct skcipher_request * req)769*4882a593Smuzhiyun static int mv_cesa_cbc_aes_encrypt(struct skcipher_request *req)
770*4882a593Smuzhiyun {
771*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
772*4882a593Smuzhiyun
773*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl, CESA_SA_DESC_CFG_DIR_ENC);
774*4882a593Smuzhiyun
775*4882a593Smuzhiyun return mv_cesa_cbc_aes_op(req, &tmpl);
776*4882a593Smuzhiyun }
777*4882a593Smuzhiyun
mv_cesa_cbc_aes_decrypt(struct skcipher_request * req)778*4882a593Smuzhiyun static int mv_cesa_cbc_aes_decrypt(struct skcipher_request *req)
779*4882a593Smuzhiyun {
780*4882a593Smuzhiyun struct mv_cesa_op_ctx tmpl;
781*4882a593Smuzhiyun
782*4882a593Smuzhiyun mv_cesa_set_op_cfg(&tmpl, CESA_SA_DESC_CFG_DIR_DEC);
783*4882a593Smuzhiyun
784*4882a593Smuzhiyun return mv_cesa_cbc_aes_op(req, &tmpl);
785*4882a593Smuzhiyun }
786*4882a593Smuzhiyun
787*4882a593Smuzhiyun struct skcipher_alg mv_cesa_cbc_aes_alg = {
788*4882a593Smuzhiyun .setkey = mv_cesa_aes_setkey,
789*4882a593Smuzhiyun .encrypt = mv_cesa_cbc_aes_encrypt,
790*4882a593Smuzhiyun .decrypt = mv_cesa_cbc_aes_decrypt,
791*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
792*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
793*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
794*4882a593Smuzhiyun .base = {
795*4882a593Smuzhiyun .cra_name = "cbc(aes)",
796*4882a593Smuzhiyun .cra_driver_name = "mv-cbc-aes",
797*4882a593Smuzhiyun .cra_priority = 300,
798*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY | CRYPTO_ALG_ASYNC |
799*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY,
800*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
801*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct mv_cesa_aes_ctx),
802*4882a593Smuzhiyun .cra_alignmask = 0,
803*4882a593Smuzhiyun .cra_module = THIS_MODULE,
804*4882a593Smuzhiyun .cra_init = mv_cesa_skcipher_cra_init,
805*4882a593Smuzhiyun .cra_exit = mv_cesa_skcipher_cra_exit,
806*4882a593Smuzhiyun },
807*4882a593Smuzhiyun };
808