1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /* Copyright (c) 2019 HiSilicon Limited. */
3*4882a593Smuzhiyun #include <crypto/akcipher.h>
4*4882a593Smuzhiyun #include <crypto/dh.h>
5*4882a593Smuzhiyun #include <crypto/internal/akcipher.h>
6*4882a593Smuzhiyun #include <crypto/internal/kpp.h>
7*4882a593Smuzhiyun #include <crypto/internal/rsa.h>
8*4882a593Smuzhiyun #include <crypto/kpp.h>
9*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
10*4882a593Smuzhiyun #include <linux/dma-mapping.h>
11*4882a593Smuzhiyun #include <linux/fips.h>
12*4882a593Smuzhiyun #include <linux/module.h>
13*4882a593Smuzhiyun #include <linux/time.h>
14*4882a593Smuzhiyun #include "hpre.h"
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun struct hpre_ctx;
17*4882a593Smuzhiyun
18*4882a593Smuzhiyun #define HPRE_CRYPTO_ALG_PRI 1000
19*4882a593Smuzhiyun #define HPRE_ALIGN_SZ 64
20*4882a593Smuzhiyun #define HPRE_BITS_2_BYTES_SHIFT 3
21*4882a593Smuzhiyun #define HPRE_RSA_512BITS_KSZ 64
22*4882a593Smuzhiyun #define HPRE_RSA_1536BITS_KSZ 192
23*4882a593Smuzhiyun #define HPRE_CRT_PRMS 5
24*4882a593Smuzhiyun #define HPRE_CRT_Q 2
25*4882a593Smuzhiyun #define HPRE_CRT_P 3
26*4882a593Smuzhiyun #define HPRE_CRT_INV 4
27*4882a593Smuzhiyun #define HPRE_DH_G_FLAG 0x02
28*4882a593Smuzhiyun #define HPRE_TRY_SEND_TIMES 100
29*4882a593Smuzhiyun #define HPRE_INVLD_REQ_ID (-1)
30*4882a593Smuzhiyun #define HPRE_DEV(ctx) (&((ctx)->qp->qm->pdev->dev))
31*4882a593Smuzhiyun
32*4882a593Smuzhiyun #define HPRE_SQE_ALG_BITS 5
33*4882a593Smuzhiyun #define HPRE_SQE_DONE_SHIFT 30
34*4882a593Smuzhiyun #define HPRE_DH_MAX_P_SZ 512
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun #define HPRE_DFX_SEC_TO_US 1000000
37*4882a593Smuzhiyun #define HPRE_DFX_US_TO_NS 1000
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun typedef void (*hpre_cb)(struct hpre_ctx *ctx, void *sqe);
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun struct hpre_rsa_ctx {
42*4882a593Smuzhiyun /* low address: e--->n */
43*4882a593Smuzhiyun char *pubkey;
44*4882a593Smuzhiyun dma_addr_t dma_pubkey;
45*4882a593Smuzhiyun
46*4882a593Smuzhiyun /* low address: d--->n */
47*4882a593Smuzhiyun char *prikey;
48*4882a593Smuzhiyun dma_addr_t dma_prikey;
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun /* low address: dq->dp->q->p->qinv */
51*4882a593Smuzhiyun char *crt_prikey;
52*4882a593Smuzhiyun dma_addr_t dma_crt_prikey;
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun struct crypto_akcipher *soft_tfm;
55*4882a593Smuzhiyun };
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun struct hpre_dh_ctx {
58*4882a593Smuzhiyun /*
59*4882a593Smuzhiyun * If base is g we compute the public key
60*4882a593Smuzhiyun * ya = g^xa mod p; [RFC2631 sec 2.1.1]
61*4882a593Smuzhiyun * else if base if the counterpart public key we
62*4882a593Smuzhiyun * compute the shared secret
63*4882a593Smuzhiyun * ZZ = yb^xa mod p; [RFC2631 sec 2.1.1]
64*4882a593Smuzhiyun */
65*4882a593Smuzhiyun char *xa_p; /* low address: d--->n, please refer to Hisilicon HPRE UM */
66*4882a593Smuzhiyun dma_addr_t dma_xa_p;
67*4882a593Smuzhiyun
68*4882a593Smuzhiyun char *g; /* m */
69*4882a593Smuzhiyun dma_addr_t dma_g;
70*4882a593Smuzhiyun };
71*4882a593Smuzhiyun
72*4882a593Smuzhiyun struct hpre_ctx {
73*4882a593Smuzhiyun struct hisi_qp *qp;
74*4882a593Smuzhiyun struct hpre_asym_request **req_list;
75*4882a593Smuzhiyun struct hpre *hpre;
76*4882a593Smuzhiyun spinlock_t req_lock;
77*4882a593Smuzhiyun unsigned int key_sz;
78*4882a593Smuzhiyun bool crt_g2_mode;
79*4882a593Smuzhiyun struct idr req_idr;
80*4882a593Smuzhiyun union {
81*4882a593Smuzhiyun struct hpre_rsa_ctx rsa;
82*4882a593Smuzhiyun struct hpre_dh_ctx dh;
83*4882a593Smuzhiyun };
84*4882a593Smuzhiyun };
85*4882a593Smuzhiyun
86*4882a593Smuzhiyun struct hpre_asym_request {
87*4882a593Smuzhiyun char *src;
88*4882a593Smuzhiyun char *dst;
89*4882a593Smuzhiyun struct hpre_sqe req;
90*4882a593Smuzhiyun struct hpre_ctx *ctx;
91*4882a593Smuzhiyun union {
92*4882a593Smuzhiyun struct akcipher_request *rsa;
93*4882a593Smuzhiyun struct kpp_request *dh;
94*4882a593Smuzhiyun } areq;
95*4882a593Smuzhiyun int err;
96*4882a593Smuzhiyun int req_id;
97*4882a593Smuzhiyun hpre_cb cb;
98*4882a593Smuzhiyun struct timespec64 req_time;
99*4882a593Smuzhiyun };
100*4882a593Smuzhiyun
hpre_alloc_req_id(struct hpre_ctx * ctx)101*4882a593Smuzhiyun static int hpre_alloc_req_id(struct hpre_ctx *ctx)
102*4882a593Smuzhiyun {
103*4882a593Smuzhiyun unsigned long flags;
104*4882a593Smuzhiyun int id;
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun spin_lock_irqsave(&ctx->req_lock, flags);
107*4882a593Smuzhiyun id = idr_alloc(&ctx->req_idr, NULL, 0, QM_Q_DEPTH, GFP_ATOMIC);
108*4882a593Smuzhiyun spin_unlock_irqrestore(&ctx->req_lock, flags);
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun return id;
111*4882a593Smuzhiyun }
112*4882a593Smuzhiyun
hpre_free_req_id(struct hpre_ctx * ctx,int req_id)113*4882a593Smuzhiyun static void hpre_free_req_id(struct hpre_ctx *ctx, int req_id)
114*4882a593Smuzhiyun {
115*4882a593Smuzhiyun unsigned long flags;
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun spin_lock_irqsave(&ctx->req_lock, flags);
118*4882a593Smuzhiyun idr_remove(&ctx->req_idr, req_id);
119*4882a593Smuzhiyun spin_unlock_irqrestore(&ctx->req_lock, flags);
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun
hpre_add_req_to_ctx(struct hpre_asym_request * hpre_req)122*4882a593Smuzhiyun static int hpre_add_req_to_ctx(struct hpre_asym_request *hpre_req)
123*4882a593Smuzhiyun {
124*4882a593Smuzhiyun struct hpre_ctx *ctx;
125*4882a593Smuzhiyun struct hpre_dfx *dfx;
126*4882a593Smuzhiyun int id;
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun ctx = hpre_req->ctx;
129*4882a593Smuzhiyun id = hpre_alloc_req_id(ctx);
130*4882a593Smuzhiyun if (unlikely(id < 0))
131*4882a593Smuzhiyun return -EINVAL;
132*4882a593Smuzhiyun
133*4882a593Smuzhiyun ctx->req_list[id] = hpre_req;
134*4882a593Smuzhiyun hpre_req->req_id = id;
135*4882a593Smuzhiyun
136*4882a593Smuzhiyun dfx = ctx->hpre->debug.dfx;
137*4882a593Smuzhiyun if (atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value))
138*4882a593Smuzhiyun ktime_get_ts64(&hpre_req->req_time);
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun return id;
141*4882a593Smuzhiyun }
142*4882a593Smuzhiyun
hpre_rm_req_from_ctx(struct hpre_asym_request * hpre_req)143*4882a593Smuzhiyun static void hpre_rm_req_from_ctx(struct hpre_asym_request *hpre_req)
144*4882a593Smuzhiyun {
145*4882a593Smuzhiyun struct hpre_ctx *ctx = hpre_req->ctx;
146*4882a593Smuzhiyun int id = hpre_req->req_id;
147*4882a593Smuzhiyun
148*4882a593Smuzhiyun if (hpre_req->req_id >= 0) {
149*4882a593Smuzhiyun hpre_req->req_id = HPRE_INVLD_REQ_ID;
150*4882a593Smuzhiyun ctx->req_list[id] = NULL;
151*4882a593Smuzhiyun hpre_free_req_id(ctx, id);
152*4882a593Smuzhiyun }
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun
hpre_get_qp_and_start(void)155*4882a593Smuzhiyun static struct hisi_qp *hpre_get_qp_and_start(void)
156*4882a593Smuzhiyun {
157*4882a593Smuzhiyun struct hisi_qp *qp;
158*4882a593Smuzhiyun int ret;
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun qp = hpre_create_qp();
161*4882a593Smuzhiyun if (!qp) {
162*4882a593Smuzhiyun pr_err("Can not create hpre qp!\n");
163*4882a593Smuzhiyun return ERR_PTR(-ENODEV);
164*4882a593Smuzhiyun }
165*4882a593Smuzhiyun
166*4882a593Smuzhiyun ret = hisi_qm_start_qp(qp, 0);
167*4882a593Smuzhiyun if (ret < 0) {
168*4882a593Smuzhiyun hisi_qm_free_qps(&qp, 1);
169*4882a593Smuzhiyun pci_err(qp->qm->pdev, "Can not start qp!\n");
170*4882a593Smuzhiyun return ERR_PTR(-EINVAL);
171*4882a593Smuzhiyun }
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun return qp;
174*4882a593Smuzhiyun }
175*4882a593Smuzhiyun
hpre_get_data_dma_addr(struct hpre_asym_request * hpre_req,struct scatterlist * data,unsigned int len,int is_src,dma_addr_t * tmp)176*4882a593Smuzhiyun static int hpre_get_data_dma_addr(struct hpre_asym_request *hpre_req,
177*4882a593Smuzhiyun struct scatterlist *data, unsigned int len,
178*4882a593Smuzhiyun int is_src, dma_addr_t *tmp)
179*4882a593Smuzhiyun {
180*4882a593Smuzhiyun struct hpre_ctx *ctx = hpre_req->ctx;
181*4882a593Smuzhiyun struct device *dev = HPRE_DEV(ctx);
182*4882a593Smuzhiyun enum dma_data_direction dma_dir;
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun if (is_src) {
185*4882a593Smuzhiyun hpre_req->src = NULL;
186*4882a593Smuzhiyun dma_dir = DMA_TO_DEVICE;
187*4882a593Smuzhiyun } else {
188*4882a593Smuzhiyun hpre_req->dst = NULL;
189*4882a593Smuzhiyun dma_dir = DMA_FROM_DEVICE;
190*4882a593Smuzhiyun }
191*4882a593Smuzhiyun *tmp = dma_map_single(dev, sg_virt(data), len, dma_dir);
192*4882a593Smuzhiyun if (unlikely(dma_mapping_error(dev, *tmp))) {
193*4882a593Smuzhiyun dev_err(dev, "dma map data err!\n");
194*4882a593Smuzhiyun return -ENOMEM;
195*4882a593Smuzhiyun }
196*4882a593Smuzhiyun
197*4882a593Smuzhiyun return 0;
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun
hpre_prepare_dma_buf(struct hpre_asym_request * hpre_req,struct scatterlist * data,unsigned int len,int is_src,dma_addr_t * tmp)200*4882a593Smuzhiyun static int hpre_prepare_dma_buf(struct hpre_asym_request *hpre_req,
201*4882a593Smuzhiyun struct scatterlist *data, unsigned int len,
202*4882a593Smuzhiyun int is_src, dma_addr_t *tmp)
203*4882a593Smuzhiyun {
204*4882a593Smuzhiyun struct hpre_ctx *ctx = hpre_req->ctx;
205*4882a593Smuzhiyun struct device *dev = HPRE_DEV(ctx);
206*4882a593Smuzhiyun void *ptr;
207*4882a593Smuzhiyun int shift;
208*4882a593Smuzhiyun
209*4882a593Smuzhiyun shift = ctx->key_sz - len;
210*4882a593Smuzhiyun if (unlikely(shift < 0))
211*4882a593Smuzhiyun return -EINVAL;
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun ptr = dma_alloc_coherent(dev, ctx->key_sz, tmp, GFP_ATOMIC);
214*4882a593Smuzhiyun if (unlikely(!ptr))
215*4882a593Smuzhiyun return -ENOMEM;
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun if (is_src) {
218*4882a593Smuzhiyun scatterwalk_map_and_copy(ptr + shift, data, 0, len, 0);
219*4882a593Smuzhiyun hpre_req->src = ptr;
220*4882a593Smuzhiyun } else {
221*4882a593Smuzhiyun hpre_req->dst = ptr;
222*4882a593Smuzhiyun }
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun return 0;
225*4882a593Smuzhiyun }
226*4882a593Smuzhiyun
hpre_hw_data_init(struct hpre_asym_request * hpre_req,struct scatterlist * data,unsigned int len,int is_src,int is_dh)227*4882a593Smuzhiyun static int hpre_hw_data_init(struct hpre_asym_request *hpre_req,
228*4882a593Smuzhiyun struct scatterlist *data, unsigned int len,
229*4882a593Smuzhiyun int is_src, int is_dh)
230*4882a593Smuzhiyun {
231*4882a593Smuzhiyun struct hpre_sqe *msg = &hpre_req->req;
232*4882a593Smuzhiyun struct hpre_ctx *ctx = hpre_req->ctx;
233*4882a593Smuzhiyun dma_addr_t tmp = 0;
234*4882a593Smuzhiyun int ret;
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun /* when the data is dh's source, we should format it */
237*4882a593Smuzhiyun if ((sg_is_last(data) && len == ctx->key_sz) &&
238*4882a593Smuzhiyun ((is_dh && !is_src) || !is_dh))
239*4882a593Smuzhiyun ret = hpre_get_data_dma_addr(hpre_req, data, len, is_src, &tmp);
240*4882a593Smuzhiyun else
241*4882a593Smuzhiyun ret = hpre_prepare_dma_buf(hpre_req, data, len, is_src, &tmp);
242*4882a593Smuzhiyun
243*4882a593Smuzhiyun if (unlikely(ret))
244*4882a593Smuzhiyun return ret;
245*4882a593Smuzhiyun
246*4882a593Smuzhiyun if (is_src)
247*4882a593Smuzhiyun msg->in = cpu_to_le64(tmp);
248*4882a593Smuzhiyun else
249*4882a593Smuzhiyun msg->out = cpu_to_le64(tmp);
250*4882a593Smuzhiyun
251*4882a593Smuzhiyun return 0;
252*4882a593Smuzhiyun }
253*4882a593Smuzhiyun
hpre_hw_data_clr_all(struct hpre_ctx * ctx,struct hpre_asym_request * req,struct scatterlist * dst,struct scatterlist * src)254*4882a593Smuzhiyun static void hpre_hw_data_clr_all(struct hpre_ctx *ctx,
255*4882a593Smuzhiyun struct hpre_asym_request *req,
256*4882a593Smuzhiyun struct scatterlist *dst,
257*4882a593Smuzhiyun struct scatterlist *src)
258*4882a593Smuzhiyun {
259*4882a593Smuzhiyun struct device *dev = HPRE_DEV(ctx);
260*4882a593Smuzhiyun struct hpre_sqe *sqe = &req->req;
261*4882a593Smuzhiyun dma_addr_t tmp;
262*4882a593Smuzhiyun
263*4882a593Smuzhiyun tmp = le64_to_cpu(sqe->in);
264*4882a593Smuzhiyun if (unlikely(!tmp))
265*4882a593Smuzhiyun return;
266*4882a593Smuzhiyun
267*4882a593Smuzhiyun if (src) {
268*4882a593Smuzhiyun if (req->src)
269*4882a593Smuzhiyun dma_free_coherent(dev, ctx->key_sz, req->src, tmp);
270*4882a593Smuzhiyun else
271*4882a593Smuzhiyun dma_unmap_single(dev, tmp, ctx->key_sz, DMA_TO_DEVICE);
272*4882a593Smuzhiyun }
273*4882a593Smuzhiyun
274*4882a593Smuzhiyun tmp = le64_to_cpu(sqe->out);
275*4882a593Smuzhiyun if (unlikely(!tmp))
276*4882a593Smuzhiyun return;
277*4882a593Smuzhiyun
278*4882a593Smuzhiyun if (req->dst) {
279*4882a593Smuzhiyun if (dst)
280*4882a593Smuzhiyun scatterwalk_map_and_copy(req->dst, dst, 0,
281*4882a593Smuzhiyun ctx->key_sz, 1);
282*4882a593Smuzhiyun dma_free_coherent(dev, ctx->key_sz, req->dst, tmp);
283*4882a593Smuzhiyun } else {
284*4882a593Smuzhiyun dma_unmap_single(dev, tmp, ctx->key_sz, DMA_FROM_DEVICE);
285*4882a593Smuzhiyun }
286*4882a593Smuzhiyun }
287*4882a593Smuzhiyun
hpre_alg_res_post_hf(struct hpre_ctx * ctx,struct hpre_sqe * sqe,void ** kreq)288*4882a593Smuzhiyun static int hpre_alg_res_post_hf(struct hpre_ctx *ctx, struct hpre_sqe *sqe,
289*4882a593Smuzhiyun void **kreq)
290*4882a593Smuzhiyun {
291*4882a593Smuzhiyun struct hpre_asym_request *req;
292*4882a593Smuzhiyun int err, id, done;
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun #define HPRE_NO_HW_ERR 0
295*4882a593Smuzhiyun #define HPRE_HW_TASK_DONE 3
296*4882a593Smuzhiyun #define HREE_HW_ERR_MASK 0x7ff
297*4882a593Smuzhiyun #define HREE_SQE_DONE_MASK 0x3
298*4882a593Smuzhiyun id = (int)le16_to_cpu(sqe->tag);
299*4882a593Smuzhiyun req = ctx->req_list[id];
300*4882a593Smuzhiyun hpre_rm_req_from_ctx(req);
301*4882a593Smuzhiyun *kreq = req;
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun err = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_ALG_BITS) &
304*4882a593Smuzhiyun HREE_HW_ERR_MASK;
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun done = (le32_to_cpu(sqe->dw0) >> HPRE_SQE_DONE_SHIFT) &
307*4882a593Smuzhiyun HREE_SQE_DONE_MASK;
308*4882a593Smuzhiyun
309*4882a593Smuzhiyun if (likely(err == HPRE_NO_HW_ERR && done == HPRE_HW_TASK_DONE))
310*4882a593Smuzhiyun return 0;
311*4882a593Smuzhiyun
312*4882a593Smuzhiyun return -EINVAL;
313*4882a593Smuzhiyun }
314*4882a593Smuzhiyun
hpre_ctx_set(struct hpre_ctx * ctx,struct hisi_qp * qp,int qlen)315*4882a593Smuzhiyun static int hpre_ctx_set(struct hpre_ctx *ctx, struct hisi_qp *qp, int qlen)
316*4882a593Smuzhiyun {
317*4882a593Smuzhiyun struct hpre *hpre;
318*4882a593Smuzhiyun
319*4882a593Smuzhiyun if (!ctx || !qp || qlen < 0)
320*4882a593Smuzhiyun return -EINVAL;
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun spin_lock_init(&ctx->req_lock);
323*4882a593Smuzhiyun ctx->qp = qp;
324*4882a593Smuzhiyun
325*4882a593Smuzhiyun hpre = container_of(ctx->qp->qm, struct hpre, qm);
326*4882a593Smuzhiyun ctx->hpre = hpre;
327*4882a593Smuzhiyun ctx->req_list = kcalloc(qlen, sizeof(void *), GFP_KERNEL);
328*4882a593Smuzhiyun if (!ctx->req_list)
329*4882a593Smuzhiyun return -ENOMEM;
330*4882a593Smuzhiyun ctx->key_sz = 0;
331*4882a593Smuzhiyun ctx->crt_g2_mode = false;
332*4882a593Smuzhiyun idr_init(&ctx->req_idr);
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun return 0;
335*4882a593Smuzhiyun }
336*4882a593Smuzhiyun
hpre_ctx_clear(struct hpre_ctx * ctx,bool is_clear_all)337*4882a593Smuzhiyun static void hpre_ctx_clear(struct hpre_ctx *ctx, bool is_clear_all)
338*4882a593Smuzhiyun {
339*4882a593Smuzhiyun if (is_clear_all) {
340*4882a593Smuzhiyun idr_destroy(&ctx->req_idr);
341*4882a593Smuzhiyun kfree(ctx->req_list);
342*4882a593Smuzhiyun hisi_qm_free_qps(&ctx->qp, 1);
343*4882a593Smuzhiyun }
344*4882a593Smuzhiyun
345*4882a593Smuzhiyun ctx->crt_g2_mode = false;
346*4882a593Smuzhiyun ctx->key_sz = 0;
347*4882a593Smuzhiyun }
348*4882a593Smuzhiyun
hpre_is_bd_timeout(struct hpre_asym_request * req,u64 overtime_thrhld)349*4882a593Smuzhiyun static bool hpre_is_bd_timeout(struct hpre_asym_request *req,
350*4882a593Smuzhiyun u64 overtime_thrhld)
351*4882a593Smuzhiyun {
352*4882a593Smuzhiyun struct timespec64 reply_time;
353*4882a593Smuzhiyun u64 time_use_us;
354*4882a593Smuzhiyun
355*4882a593Smuzhiyun ktime_get_ts64(&reply_time);
356*4882a593Smuzhiyun time_use_us = (reply_time.tv_sec - req->req_time.tv_sec) *
357*4882a593Smuzhiyun HPRE_DFX_SEC_TO_US +
358*4882a593Smuzhiyun (reply_time.tv_nsec - req->req_time.tv_nsec) /
359*4882a593Smuzhiyun HPRE_DFX_US_TO_NS;
360*4882a593Smuzhiyun
361*4882a593Smuzhiyun if (time_use_us <= overtime_thrhld)
362*4882a593Smuzhiyun return false;
363*4882a593Smuzhiyun
364*4882a593Smuzhiyun return true;
365*4882a593Smuzhiyun }
366*4882a593Smuzhiyun
hpre_dh_cb(struct hpre_ctx * ctx,void * resp)367*4882a593Smuzhiyun static void hpre_dh_cb(struct hpre_ctx *ctx, void *resp)
368*4882a593Smuzhiyun {
369*4882a593Smuzhiyun struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
370*4882a593Smuzhiyun struct hpre_asym_request *req;
371*4882a593Smuzhiyun struct kpp_request *areq;
372*4882a593Smuzhiyun u64 overtime_thrhld;
373*4882a593Smuzhiyun int ret;
374*4882a593Smuzhiyun
375*4882a593Smuzhiyun ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);
376*4882a593Smuzhiyun areq = req->areq.dh;
377*4882a593Smuzhiyun areq->dst_len = ctx->key_sz;
378*4882a593Smuzhiyun
379*4882a593Smuzhiyun overtime_thrhld = atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value);
380*4882a593Smuzhiyun if (overtime_thrhld && hpre_is_bd_timeout(req, overtime_thrhld))
381*4882a593Smuzhiyun atomic64_inc(&dfx[HPRE_OVER_THRHLD_CNT].value);
382*4882a593Smuzhiyun
383*4882a593Smuzhiyun hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);
384*4882a593Smuzhiyun kpp_request_complete(areq, ret);
385*4882a593Smuzhiyun atomic64_inc(&dfx[HPRE_RECV_CNT].value);
386*4882a593Smuzhiyun }
387*4882a593Smuzhiyun
hpre_rsa_cb(struct hpre_ctx * ctx,void * resp)388*4882a593Smuzhiyun static void hpre_rsa_cb(struct hpre_ctx *ctx, void *resp)
389*4882a593Smuzhiyun {
390*4882a593Smuzhiyun struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
391*4882a593Smuzhiyun struct hpre_asym_request *req;
392*4882a593Smuzhiyun struct akcipher_request *areq;
393*4882a593Smuzhiyun u64 overtime_thrhld;
394*4882a593Smuzhiyun int ret;
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun ret = hpre_alg_res_post_hf(ctx, resp, (void **)&req);
397*4882a593Smuzhiyun
398*4882a593Smuzhiyun overtime_thrhld = atomic64_read(&dfx[HPRE_OVERTIME_THRHLD].value);
399*4882a593Smuzhiyun if (overtime_thrhld && hpre_is_bd_timeout(req, overtime_thrhld))
400*4882a593Smuzhiyun atomic64_inc(&dfx[HPRE_OVER_THRHLD_CNT].value);
401*4882a593Smuzhiyun
402*4882a593Smuzhiyun areq = req->areq.rsa;
403*4882a593Smuzhiyun areq->dst_len = ctx->key_sz;
404*4882a593Smuzhiyun hpre_hw_data_clr_all(ctx, req, areq->dst, areq->src);
405*4882a593Smuzhiyun akcipher_request_complete(areq, ret);
406*4882a593Smuzhiyun atomic64_inc(&dfx[HPRE_RECV_CNT].value);
407*4882a593Smuzhiyun }
408*4882a593Smuzhiyun
hpre_alg_cb(struct hisi_qp * qp,void * resp)409*4882a593Smuzhiyun static void hpre_alg_cb(struct hisi_qp *qp, void *resp)
410*4882a593Smuzhiyun {
411*4882a593Smuzhiyun struct hpre_ctx *ctx = qp->qp_ctx;
412*4882a593Smuzhiyun struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
413*4882a593Smuzhiyun struct hpre_sqe *sqe = resp;
414*4882a593Smuzhiyun struct hpre_asym_request *req = ctx->req_list[le16_to_cpu(sqe->tag)];
415*4882a593Smuzhiyun
416*4882a593Smuzhiyun
417*4882a593Smuzhiyun if (unlikely(!req)) {
418*4882a593Smuzhiyun atomic64_inc(&dfx[HPRE_INVALID_REQ_CNT].value);
419*4882a593Smuzhiyun return;
420*4882a593Smuzhiyun }
421*4882a593Smuzhiyun
422*4882a593Smuzhiyun req->cb(ctx, resp);
423*4882a593Smuzhiyun }
424*4882a593Smuzhiyun
hpre_ctx_init(struct hpre_ctx * ctx)425*4882a593Smuzhiyun static int hpre_ctx_init(struct hpre_ctx *ctx)
426*4882a593Smuzhiyun {
427*4882a593Smuzhiyun struct hisi_qp *qp;
428*4882a593Smuzhiyun
429*4882a593Smuzhiyun qp = hpre_get_qp_and_start();
430*4882a593Smuzhiyun if (IS_ERR(qp))
431*4882a593Smuzhiyun return PTR_ERR(qp);
432*4882a593Smuzhiyun
433*4882a593Smuzhiyun qp->qp_ctx = ctx;
434*4882a593Smuzhiyun qp->req_cb = hpre_alg_cb;
435*4882a593Smuzhiyun
436*4882a593Smuzhiyun return hpre_ctx_set(ctx, qp, QM_Q_DEPTH);
437*4882a593Smuzhiyun }
438*4882a593Smuzhiyun
hpre_msg_request_set(struct hpre_ctx * ctx,void * req,bool is_rsa)439*4882a593Smuzhiyun static int hpre_msg_request_set(struct hpre_ctx *ctx, void *req, bool is_rsa)
440*4882a593Smuzhiyun {
441*4882a593Smuzhiyun struct hpre_asym_request *h_req;
442*4882a593Smuzhiyun struct hpre_sqe *msg;
443*4882a593Smuzhiyun int req_id;
444*4882a593Smuzhiyun void *tmp;
445*4882a593Smuzhiyun
446*4882a593Smuzhiyun if (is_rsa) {
447*4882a593Smuzhiyun struct akcipher_request *akreq = req;
448*4882a593Smuzhiyun
449*4882a593Smuzhiyun if (akreq->dst_len < ctx->key_sz) {
450*4882a593Smuzhiyun akreq->dst_len = ctx->key_sz;
451*4882a593Smuzhiyun return -EOVERFLOW;
452*4882a593Smuzhiyun }
453*4882a593Smuzhiyun
454*4882a593Smuzhiyun tmp = akcipher_request_ctx(akreq);
455*4882a593Smuzhiyun h_req = PTR_ALIGN(tmp, HPRE_ALIGN_SZ);
456*4882a593Smuzhiyun h_req->cb = hpre_rsa_cb;
457*4882a593Smuzhiyun h_req->areq.rsa = akreq;
458*4882a593Smuzhiyun msg = &h_req->req;
459*4882a593Smuzhiyun memset(msg, 0, sizeof(*msg));
460*4882a593Smuzhiyun } else {
461*4882a593Smuzhiyun struct kpp_request *kreq = req;
462*4882a593Smuzhiyun
463*4882a593Smuzhiyun if (kreq->dst_len < ctx->key_sz) {
464*4882a593Smuzhiyun kreq->dst_len = ctx->key_sz;
465*4882a593Smuzhiyun return -EOVERFLOW;
466*4882a593Smuzhiyun }
467*4882a593Smuzhiyun
468*4882a593Smuzhiyun tmp = kpp_request_ctx(kreq);
469*4882a593Smuzhiyun h_req = PTR_ALIGN(tmp, HPRE_ALIGN_SZ);
470*4882a593Smuzhiyun h_req->cb = hpre_dh_cb;
471*4882a593Smuzhiyun h_req->areq.dh = kreq;
472*4882a593Smuzhiyun msg = &h_req->req;
473*4882a593Smuzhiyun memset(msg, 0, sizeof(*msg));
474*4882a593Smuzhiyun msg->key = cpu_to_le64(ctx->dh.dma_xa_p);
475*4882a593Smuzhiyun }
476*4882a593Smuzhiyun
477*4882a593Smuzhiyun msg->dw0 |= cpu_to_le32(0x1 << HPRE_SQE_DONE_SHIFT);
478*4882a593Smuzhiyun msg->task_len1 = (ctx->key_sz >> HPRE_BITS_2_BYTES_SHIFT) - 1;
479*4882a593Smuzhiyun h_req->ctx = ctx;
480*4882a593Smuzhiyun
481*4882a593Smuzhiyun req_id = hpre_add_req_to_ctx(h_req);
482*4882a593Smuzhiyun if (req_id < 0)
483*4882a593Smuzhiyun return -EBUSY;
484*4882a593Smuzhiyun
485*4882a593Smuzhiyun msg->tag = cpu_to_le16((u16)req_id);
486*4882a593Smuzhiyun
487*4882a593Smuzhiyun return 0;
488*4882a593Smuzhiyun }
489*4882a593Smuzhiyun
hpre_send(struct hpre_ctx * ctx,struct hpre_sqe * msg)490*4882a593Smuzhiyun static int hpre_send(struct hpre_ctx *ctx, struct hpre_sqe *msg)
491*4882a593Smuzhiyun {
492*4882a593Smuzhiyun struct hpre_dfx *dfx = ctx->hpre->debug.dfx;
493*4882a593Smuzhiyun int ctr = 0;
494*4882a593Smuzhiyun int ret;
495*4882a593Smuzhiyun
496*4882a593Smuzhiyun do {
497*4882a593Smuzhiyun atomic64_inc(&dfx[HPRE_SEND_CNT].value);
498*4882a593Smuzhiyun ret = hisi_qp_send(ctx->qp, msg);
499*4882a593Smuzhiyun if (ret != -EBUSY)
500*4882a593Smuzhiyun break;
501*4882a593Smuzhiyun atomic64_inc(&dfx[HPRE_SEND_BUSY_CNT].value);
502*4882a593Smuzhiyun } while (ctr++ < HPRE_TRY_SEND_TIMES);
503*4882a593Smuzhiyun
504*4882a593Smuzhiyun if (likely(!ret))
505*4882a593Smuzhiyun return ret;
506*4882a593Smuzhiyun
507*4882a593Smuzhiyun if (ret != -EBUSY)
508*4882a593Smuzhiyun atomic64_inc(&dfx[HPRE_SEND_FAIL_CNT].value);
509*4882a593Smuzhiyun
510*4882a593Smuzhiyun return ret;
511*4882a593Smuzhiyun }
512*4882a593Smuzhiyun
513*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DH
hpre_dh_compute_value(struct kpp_request * req)514*4882a593Smuzhiyun static int hpre_dh_compute_value(struct kpp_request *req)
515*4882a593Smuzhiyun {
516*4882a593Smuzhiyun struct crypto_kpp *tfm = crypto_kpp_reqtfm(req);
517*4882a593Smuzhiyun struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
518*4882a593Smuzhiyun void *tmp = kpp_request_ctx(req);
519*4882a593Smuzhiyun struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, HPRE_ALIGN_SZ);
520*4882a593Smuzhiyun struct hpre_sqe *msg = &hpre_req->req;
521*4882a593Smuzhiyun int ret;
522*4882a593Smuzhiyun
523*4882a593Smuzhiyun ret = hpre_msg_request_set(ctx, req, false);
524*4882a593Smuzhiyun if (unlikely(ret))
525*4882a593Smuzhiyun return ret;
526*4882a593Smuzhiyun
527*4882a593Smuzhiyun if (req->src) {
528*4882a593Smuzhiyun ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 1);
529*4882a593Smuzhiyun if (unlikely(ret))
530*4882a593Smuzhiyun goto clear_all;
531*4882a593Smuzhiyun } else {
532*4882a593Smuzhiyun msg->in = cpu_to_le64(ctx->dh.dma_g);
533*4882a593Smuzhiyun }
534*4882a593Smuzhiyun
535*4882a593Smuzhiyun ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 1);
536*4882a593Smuzhiyun if (unlikely(ret))
537*4882a593Smuzhiyun goto clear_all;
538*4882a593Smuzhiyun
539*4882a593Smuzhiyun if (ctx->crt_g2_mode && !req->src)
540*4882a593Smuzhiyun msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH_G2);
541*4882a593Smuzhiyun else
542*4882a593Smuzhiyun msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) | HPRE_ALG_DH);
543*4882a593Smuzhiyun
544*4882a593Smuzhiyun /* success */
545*4882a593Smuzhiyun ret = hpre_send(ctx, msg);
546*4882a593Smuzhiyun if (likely(!ret))
547*4882a593Smuzhiyun return -EINPROGRESS;
548*4882a593Smuzhiyun
549*4882a593Smuzhiyun clear_all:
550*4882a593Smuzhiyun hpre_rm_req_from_ctx(hpre_req);
551*4882a593Smuzhiyun hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
552*4882a593Smuzhiyun
553*4882a593Smuzhiyun return ret;
554*4882a593Smuzhiyun }
555*4882a593Smuzhiyun
hpre_is_dh_params_length_valid(unsigned int key_sz)556*4882a593Smuzhiyun static int hpre_is_dh_params_length_valid(unsigned int key_sz)
557*4882a593Smuzhiyun {
558*4882a593Smuzhiyun #define _HPRE_DH_GRP1 768
559*4882a593Smuzhiyun #define _HPRE_DH_GRP2 1024
560*4882a593Smuzhiyun #define _HPRE_DH_GRP5 1536
561*4882a593Smuzhiyun #define _HPRE_DH_GRP14 2048
562*4882a593Smuzhiyun #define _HPRE_DH_GRP15 3072
563*4882a593Smuzhiyun #define _HPRE_DH_GRP16 4096
564*4882a593Smuzhiyun switch (key_sz) {
565*4882a593Smuzhiyun case _HPRE_DH_GRP1:
566*4882a593Smuzhiyun case _HPRE_DH_GRP2:
567*4882a593Smuzhiyun case _HPRE_DH_GRP5:
568*4882a593Smuzhiyun case _HPRE_DH_GRP14:
569*4882a593Smuzhiyun case _HPRE_DH_GRP15:
570*4882a593Smuzhiyun case _HPRE_DH_GRP16:
571*4882a593Smuzhiyun return 0;
572*4882a593Smuzhiyun }
573*4882a593Smuzhiyun
574*4882a593Smuzhiyun return -EINVAL;
575*4882a593Smuzhiyun }
576*4882a593Smuzhiyun
hpre_dh_set_params(struct hpre_ctx * ctx,struct dh * params)577*4882a593Smuzhiyun static int hpre_dh_set_params(struct hpre_ctx *ctx, struct dh *params)
578*4882a593Smuzhiyun {
579*4882a593Smuzhiyun struct device *dev = HPRE_DEV(ctx);
580*4882a593Smuzhiyun unsigned int sz;
581*4882a593Smuzhiyun
582*4882a593Smuzhiyun if (params->p_size > HPRE_DH_MAX_P_SZ)
583*4882a593Smuzhiyun return -EINVAL;
584*4882a593Smuzhiyun
585*4882a593Smuzhiyun if (hpre_is_dh_params_length_valid(params->p_size <<
586*4882a593Smuzhiyun HPRE_BITS_2_BYTES_SHIFT))
587*4882a593Smuzhiyun return -EINVAL;
588*4882a593Smuzhiyun
589*4882a593Smuzhiyun sz = ctx->key_sz = params->p_size;
590*4882a593Smuzhiyun ctx->dh.xa_p = dma_alloc_coherent(dev, sz << 1,
591*4882a593Smuzhiyun &ctx->dh.dma_xa_p, GFP_KERNEL);
592*4882a593Smuzhiyun if (!ctx->dh.xa_p)
593*4882a593Smuzhiyun return -ENOMEM;
594*4882a593Smuzhiyun
595*4882a593Smuzhiyun memcpy(ctx->dh.xa_p + sz, params->p, sz);
596*4882a593Smuzhiyun
597*4882a593Smuzhiyun /* If g equals 2 don't copy it */
598*4882a593Smuzhiyun if (params->g_size == 1 && *(char *)params->g == HPRE_DH_G_FLAG) {
599*4882a593Smuzhiyun ctx->crt_g2_mode = true;
600*4882a593Smuzhiyun return 0;
601*4882a593Smuzhiyun }
602*4882a593Smuzhiyun
603*4882a593Smuzhiyun ctx->dh.g = dma_alloc_coherent(dev, sz, &ctx->dh.dma_g, GFP_KERNEL);
604*4882a593Smuzhiyun if (!ctx->dh.g) {
605*4882a593Smuzhiyun dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,
606*4882a593Smuzhiyun ctx->dh.dma_xa_p);
607*4882a593Smuzhiyun ctx->dh.xa_p = NULL;
608*4882a593Smuzhiyun return -ENOMEM;
609*4882a593Smuzhiyun }
610*4882a593Smuzhiyun
611*4882a593Smuzhiyun memcpy(ctx->dh.g + (sz - params->g_size), params->g, params->g_size);
612*4882a593Smuzhiyun
613*4882a593Smuzhiyun return 0;
614*4882a593Smuzhiyun }
615*4882a593Smuzhiyun
hpre_dh_clear_ctx(struct hpre_ctx * ctx,bool is_clear_all)616*4882a593Smuzhiyun static void hpre_dh_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)
617*4882a593Smuzhiyun {
618*4882a593Smuzhiyun struct device *dev = HPRE_DEV(ctx);
619*4882a593Smuzhiyun unsigned int sz = ctx->key_sz;
620*4882a593Smuzhiyun
621*4882a593Smuzhiyun if (is_clear_all)
622*4882a593Smuzhiyun hisi_qm_stop_qp(ctx->qp);
623*4882a593Smuzhiyun
624*4882a593Smuzhiyun if (ctx->dh.g) {
625*4882a593Smuzhiyun dma_free_coherent(dev, sz, ctx->dh.g, ctx->dh.dma_g);
626*4882a593Smuzhiyun ctx->dh.g = NULL;
627*4882a593Smuzhiyun }
628*4882a593Smuzhiyun
629*4882a593Smuzhiyun if (ctx->dh.xa_p) {
630*4882a593Smuzhiyun memzero_explicit(ctx->dh.xa_p, sz);
631*4882a593Smuzhiyun dma_free_coherent(dev, sz << 1, ctx->dh.xa_p,
632*4882a593Smuzhiyun ctx->dh.dma_xa_p);
633*4882a593Smuzhiyun ctx->dh.xa_p = NULL;
634*4882a593Smuzhiyun }
635*4882a593Smuzhiyun
636*4882a593Smuzhiyun hpre_ctx_clear(ctx, is_clear_all);
637*4882a593Smuzhiyun }
638*4882a593Smuzhiyun
hpre_dh_set_secret(struct crypto_kpp * tfm,const void * buf,unsigned int len)639*4882a593Smuzhiyun static int hpre_dh_set_secret(struct crypto_kpp *tfm, const void *buf,
640*4882a593Smuzhiyun unsigned int len)
641*4882a593Smuzhiyun {
642*4882a593Smuzhiyun struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
643*4882a593Smuzhiyun struct dh params;
644*4882a593Smuzhiyun int ret;
645*4882a593Smuzhiyun
646*4882a593Smuzhiyun if (crypto_dh_decode_key(buf, len, ¶ms) < 0)
647*4882a593Smuzhiyun return -EINVAL;
648*4882a593Smuzhiyun
649*4882a593Smuzhiyun /* Free old secret if any */
650*4882a593Smuzhiyun hpre_dh_clear_ctx(ctx, false);
651*4882a593Smuzhiyun
652*4882a593Smuzhiyun ret = hpre_dh_set_params(ctx, ¶ms);
653*4882a593Smuzhiyun if (ret < 0)
654*4882a593Smuzhiyun goto err_clear_ctx;
655*4882a593Smuzhiyun
656*4882a593Smuzhiyun memcpy(ctx->dh.xa_p + (ctx->key_sz - params.key_size), params.key,
657*4882a593Smuzhiyun params.key_size);
658*4882a593Smuzhiyun
659*4882a593Smuzhiyun return 0;
660*4882a593Smuzhiyun
661*4882a593Smuzhiyun err_clear_ctx:
662*4882a593Smuzhiyun hpre_dh_clear_ctx(ctx, false);
663*4882a593Smuzhiyun return ret;
664*4882a593Smuzhiyun }
665*4882a593Smuzhiyun
hpre_dh_max_size(struct crypto_kpp * tfm)666*4882a593Smuzhiyun static unsigned int hpre_dh_max_size(struct crypto_kpp *tfm)
667*4882a593Smuzhiyun {
668*4882a593Smuzhiyun struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
669*4882a593Smuzhiyun
670*4882a593Smuzhiyun return ctx->key_sz;
671*4882a593Smuzhiyun }
672*4882a593Smuzhiyun
hpre_dh_init_tfm(struct crypto_kpp * tfm)673*4882a593Smuzhiyun static int hpre_dh_init_tfm(struct crypto_kpp *tfm)
674*4882a593Smuzhiyun {
675*4882a593Smuzhiyun struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
676*4882a593Smuzhiyun
677*4882a593Smuzhiyun return hpre_ctx_init(ctx);
678*4882a593Smuzhiyun }
679*4882a593Smuzhiyun
hpre_dh_exit_tfm(struct crypto_kpp * tfm)680*4882a593Smuzhiyun static void hpre_dh_exit_tfm(struct crypto_kpp *tfm)
681*4882a593Smuzhiyun {
682*4882a593Smuzhiyun struct hpre_ctx *ctx = kpp_tfm_ctx(tfm);
683*4882a593Smuzhiyun
684*4882a593Smuzhiyun hpre_dh_clear_ctx(ctx, true);
685*4882a593Smuzhiyun }
686*4882a593Smuzhiyun #endif
687*4882a593Smuzhiyun
hpre_rsa_drop_leading_zeros(const char ** ptr,size_t * len)688*4882a593Smuzhiyun static void hpre_rsa_drop_leading_zeros(const char **ptr, size_t *len)
689*4882a593Smuzhiyun {
690*4882a593Smuzhiyun while (!**ptr && *len) {
691*4882a593Smuzhiyun (*ptr)++;
692*4882a593Smuzhiyun (*len)--;
693*4882a593Smuzhiyun }
694*4882a593Smuzhiyun }
695*4882a593Smuzhiyun
hpre_rsa_key_size_is_support(unsigned int len)696*4882a593Smuzhiyun static bool hpre_rsa_key_size_is_support(unsigned int len)
697*4882a593Smuzhiyun {
698*4882a593Smuzhiyun unsigned int bits = len << HPRE_BITS_2_BYTES_SHIFT;
699*4882a593Smuzhiyun
700*4882a593Smuzhiyun #define _RSA_1024BITS_KEY_WDTH 1024
701*4882a593Smuzhiyun #define _RSA_2048BITS_KEY_WDTH 2048
702*4882a593Smuzhiyun #define _RSA_3072BITS_KEY_WDTH 3072
703*4882a593Smuzhiyun #define _RSA_4096BITS_KEY_WDTH 4096
704*4882a593Smuzhiyun
705*4882a593Smuzhiyun switch (bits) {
706*4882a593Smuzhiyun case _RSA_1024BITS_KEY_WDTH:
707*4882a593Smuzhiyun case _RSA_2048BITS_KEY_WDTH:
708*4882a593Smuzhiyun case _RSA_3072BITS_KEY_WDTH:
709*4882a593Smuzhiyun case _RSA_4096BITS_KEY_WDTH:
710*4882a593Smuzhiyun return true;
711*4882a593Smuzhiyun default:
712*4882a593Smuzhiyun return false;
713*4882a593Smuzhiyun }
714*4882a593Smuzhiyun }
715*4882a593Smuzhiyun
hpre_rsa_enc(struct akcipher_request * req)716*4882a593Smuzhiyun static int hpre_rsa_enc(struct akcipher_request *req)
717*4882a593Smuzhiyun {
718*4882a593Smuzhiyun struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
719*4882a593Smuzhiyun struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
720*4882a593Smuzhiyun void *tmp = akcipher_request_ctx(req);
721*4882a593Smuzhiyun struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, HPRE_ALIGN_SZ);
722*4882a593Smuzhiyun struct hpre_sqe *msg = &hpre_req->req;
723*4882a593Smuzhiyun int ret;
724*4882a593Smuzhiyun
725*4882a593Smuzhiyun /* For 512 and 1536 bits key size, use soft tfm instead */
726*4882a593Smuzhiyun if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
727*4882a593Smuzhiyun ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {
728*4882a593Smuzhiyun akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);
729*4882a593Smuzhiyun ret = crypto_akcipher_encrypt(req);
730*4882a593Smuzhiyun akcipher_request_set_tfm(req, tfm);
731*4882a593Smuzhiyun return ret;
732*4882a593Smuzhiyun }
733*4882a593Smuzhiyun
734*4882a593Smuzhiyun if (unlikely(!ctx->rsa.pubkey))
735*4882a593Smuzhiyun return -EINVAL;
736*4882a593Smuzhiyun
737*4882a593Smuzhiyun ret = hpre_msg_request_set(ctx, req, true);
738*4882a593Smuzhiyun if (unlikely(ret))
739*4882a593Smuzhiyun return ret;
740*4882a593Smuzhiyun
741*4882a593Smuzhiyun msg->dw0 |= cpu_to_le32(HPRE_ALG_NC_NCRT);
742*4882a593Smuzhiyun msg->key = cpu_to_le64(ctx->rsa.dma_pubkey);
743*4882a593Smuzhiyun
744*4882a593Smuzhiyun ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0);
745*4882a593Smuzhiyun if (unlikely(ret))
746*4882a593Smuzhiyun goto clear_all;
747*4882a593Smuzhiyun
748*4882a593Smuzhiyun ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0);
749*4882a593Smuzhiyun if (unlikely(ret))
750*4882a593Smuzhiyun goto clear_all;
751*4882a593Smuzhiyun
752*4882a593Smuzhiyun /* success */
753*4882a593Smuzhiyun ret = hpre_send(ctx, msg);
754*4882a593Smuzhiyun if (likely(!ret))
755*4882a593Smuzhiyun return -EINPROGRESS;
756*4882a593Smuzhiyun
757*4882a593Smuzhiyun clear_all:
758*4882a593Smuzhiyun hpre_rm_req_from_ctx(hpre_req);
759*4882a593Smuzhiyun hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
760*4882a593Smuzhiyun
761*4882a593Smuzhiyun return ret;
762*4882a593Smuzhiyun }
763*4882a593Smuzhiyun
hpre_rsa_dec(struct akcipher_request * req)764*4882a593Smuzhiyun static int hpre_rsa_dec(struct akcipher_request *req)
765*4882a593Smuzhiyun {
766*4882a593Smuzhiyun struct crypto_akcipher *tfm = crypto_akcipher_reqtfm(req);
767*4882a593Smuzhiyun struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
768*4882a593Smuzhiyun void *tmp = akcipher_request_ctx(req);
769*4882a593Smuzhiyun struct hpre_asym_request *hpre_req = PTR_ALIGN(tmp, HPRE_ALIGN_SZ);
770*4882a593Smuzhiyun struct hpre_sqe *msg = &hpre_req->req;
771*4882a593Smuzhiyun int ret;
772*4882a593Smuzhiyun
773*4882a593Smuzhiyun /* For 512 and 1536 bits key size, use soft tfm instead */
774*4882a593Smuzhiyun if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
775*4882a593Smuzhiyun ctx->key_sz == HPRE_RSA_1536BITS_KSZ) {
776*4882a593Smuzhiyun akcipher_request_set_tfm(req, ctx->rsa.soft_tfm);
777*4882a593Smuzhiyun ret = crypto_akcipher_decrypt(req);
778*4882a593Smuzhiyun akcipher_request_set_tfm(req, tfm);
779*4882a593Smuzhiyun return ret;
780*4882a593Smuzhiyun }
781*4882a593Smuzhiyun
782*4882a593Smuzhiyun if (unlikely(!ctx->rsa.prikey))
783*4882a593Smuzhiyun return -EINVAL;
784*4882a593Smuzhiyun
785*4882a593Smuzhiyun ret = hpre_msg_request_set(ctx, req, true);
786*4882a593Smuzhiyun if (unlikely(ret))
787*4882a593Smuzhiyun return ret;
788*4882a593Smuzhiyun
789*4882a593Smuzhiyun if (ctx->crt_g2_mode) {
790*4882a593Smuzhiyun msg->key = cpu_to_le64(ctx->rsa.dma_crt_prikey);
791*4882a593Smuzhiyun msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) |
792*4882a593Smuzhiyun HPRE_ALG_NC_CRT);
793*4882a593Smuzhiyun } else {
794*4882a593Smuzhiyun msg->key = cpu_to_le64(ctx->rsa.dma_prikey);
795*4882a593Smuzhiyun msg->dw0 = cpu_to_le32(le32_to_cpu(msg->dw0) |
796*4882a593Smuzhiyun HPRE_ALG_NC_NCRT);
797*4882a593Smuzhiyun }
798*4882a593Smuzhiyun
799*4882a593Smuzhiyun ret = hpre_hw_data_init(hpre_req, req->src, req->src_len, 1, 0);
800*4882a593Smuzhiyun if (unlikely(ret))
801*4882a593Smuzhiyun goto clear_all;
802*4882a593Smuzhiyun
803*4882a593Smuzhiyun ret = hpre_hw_data_init(hpre_req, req->dst, req->dst_len, 0, 0);
804*4882a593Smuzhiyun if (unlikely(ret))
805*4882a593Smuzhiyun goto clear_all;
806*4882a593Smuzhiyun
807*4882a593Smuzhiyun /* success */
808*4882a593Smuzhiyun ret = hpre_send(ctx, msg);
809*4882a593Smuzhiyun if (likely(!ret))
810*4882a593Smuzhiyun return -EINPROGRESS;
811*4882a593Smuzhiyun
812*4882a593Smuzhiyun clear_all:
813*4882a593Smuzhiyun hpre_rm_req_from_ctx(hpre_req);
814*4882a593Smuzhiyun hpre_hw_data_clr_all(ctx, hpre_req, req->dst, req->src);
815*4882a593Smuzhiyun
816*4882a593Smuzhiyun return ret;
817*4882a593Smuzhiyun }
818*4882a593Smuzhiyun
hpre_rsa_set_n(struct hpre_ctx * ctx,const char * value,size_t vlen,bool private)819*4882a593Smuzhiyun static int hpre_rsa_set_n(struct hpre_ctx *ctx, const char *value,
820*4882a593Smuzhiyun size_t vlen, bool private)
821*4882a593Smuzhiyun {
822*4882a593Smuzhiyun const char *ptr = value;
823*4882a593Smuzhiyun
824*4882a593Smuzhiyun hpre_rsa_drop_leading_zeros(&ptr, &vlen);
825*4882a593Smuzhiyun
826*4882a593Smuzhiyun ctx->key_sz = vlen;
827*4882a593Smuzhiyun
828*4882a593Smuzhiyun /* if invalid key size provided, we use software tfm */
829*4882a593Smuzhiyun if (!hpre_rsa_key_size_is_support(ctx->key_sz))
830*4882a593Smuzhiyun return 0;
831*4882a593Smuzhiyun
832*4882a593Smuzhiyun ctx->rsa.pubkey = dma_alloc_coherent(HPRE_DEV(ctx), vlen << 1,
833*4882a593Smuzhiyun &ctx->rsa.dma_pubkey,
834*4882a593Smuzhiyun GFP_KERNEL);
835*4882a593Smuzhiyun if (!ctx->rsa.pubkey)
836*4882a593Smuzhiyun return -ENOMEM;
837*4882a593Smuzhiyun
838*4882a593Smuzhiyun if (private) {
839*4882a593Smuzhiyun ctx->rsa.prikey = dma_alloc_coherent(HPRE_DEV(ctx), vlen << 1,
840*4882a593Smuzhiyun &ctx->rsa.dma_prikey,
841*4882a593Smuzhiyun GFP_KERNEL);
842*4882a593Smuzhiyun if (!ctx->rsa.prikey) {
843*4882a593Smuzhiyun dma_free_coherent(HPRE_DEV(ctx), vlen << 1,
844*4882a593Smuzhiyun ctx->rsa.pubkey,
845*4882a593Smuzhiyun ctx->rsa.dma_pubkey);
846*4882a593Smuzhiyun ctx->rsa.pubkey = NULL;
847*4882a593Smuzhiyun return -ENOMEM;
848*4882a593Smuzhiyun }
849*4882a593Smuzhiyun memcpy(ctx->rsa.prikey + vlen, ptr, vlen);
850*4882a593Smuzhiyun }
851*4882a593Smuzhiyun memcpy(ctx->rsa.pubkey + vlen, ptr, vlen);
852*4882a593Smuzhiyun
853*4882a593Smuzhiyun /* Using hardware HPRE to do RSA */
854*4882a593Smuzhiyun return 1;
855*4882a593Smuzhiyun }
856*4882a593Smuzhiyun
hpre_rsa_set_e(struct hpre_ctx * ctx,const char * value,size_t vlen)857*4882a593Smuzhiyun static int hpre_rsa_set_e(struct hpre_ctx *ctx, const char *value,
858*4882a593Smuzhiyun size_t vlen)
859*4882a593Smuzhiyun {
860*4882a593Smuzhiyun const char *ptr = value;
861*4882a593Smuzhiyun
862*4882a593Smuzhiyun hpre_rsa_drop_leading_zeros(&ptr, &vlen);
863*4882a593Smuzhiyun
864*4882a593Smuzhiyun if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)
865*4882a593Smuzhiyun return -EINVAL;
866*4882a593Smuzhiyun
867*4882a593Smuzhiyun memcpy(ctx->rsa.pubkey + ctx->key_sz - vlen, ptr, vlen);
868*4882a593Smuzhiyun
869*4882a593Smuzhiyun return 0;
870*4882a593Smuzhiyun }
871*4882a593Smuzhiyun
hpre_rsa_set_d(struct hpre_ctx * ctx,const char * value,size_t vlen)872*4882a593Smuzhiyun static int hpre_rsa_set_d(struct hpre_ctx *ctx, const char *value,
873*4882a593Smuzhiyun size_t vlen)
874*4882a593Smuzhiyun {
875*4882a593Smuzhiyun const char *ptr = value;
876*4882a593Smuzhiyun
877*4882a593Smuzhiyun hpre_rsa_drop_leading_zeros(&ptr, &vlen);
878*4882a593Smuzhiyun
879*4882a593Smuzhiyun if (!ctx->key_sz || !vlen || vlen > ctx->key_sz)
880*4882a593Smuzhiyun return -EINVAL;
881*4882a593Smuzhiyun
882*4882a593Smuzhiyun memcpy(ctx->rsa.prikey + ctx->key_sz - vlen, ptr, vlen);
883*4882a593Smuzhiyun
884*4882a593Smuzhiyun return 0;
885*4882a593Smuzhiyun }
886*4882a593Smuzhiyun
hpre_crt_para_get(char * para,size_t para_sz,const char * raw,size_t raw_sz)887*4882a593Smuzhiyun static int hpre_crt_para_get(char *para, size_t para_sz,
888*4882a593Smuzhiyun const char *raw, size_t raw_sz)
889*4882a593Smuzhiyun {
890*4882a593Smuzhiyun const char *ptr = raw;
891*4882a593Smuzhiyun size_t len = raw_sz;
892*4882a593Smuzhiyun
893*4882a593Smuzhiyun hpre_rsa_drop_leading_zeros(&ptr, &len);
894*4882a593Smuzhiyun if (!len || len > para_sz)
895*4882a593Smuzhiyun return -EINVAL;
896*4882a593Smuzhiyun
897*4882a593Smuzhiyun memcpy(para + para_sz - len, ptr, len);
898*4882a593Smuzhiyun
899*4882a593Smuzhiyun return 0;
900*4882a593Smuzhiyun }
901*4882a593Smuzhiyun
hpre_rsa_setkey_crt(struct hpre_ctx * ctx,struct rsa_key * rsa_key)902*4882a593Smuzhiyun static int hpre_rsa_setkey_crt(struct hpre_ctx *ctx, struct rsa_key *rsa_key)
903*4882a593Smuzhiyun {
904*4882a593Smuzhiyun unsigned int hlf_ksz = ctx->key_sz >> 1;
905*4882a593Smuzhiyun struct device *dev = HPRE_DEV(ctx);
906*4882a593Smuzhiyun u64 offset;
907*4882a593Smuzhiyun int ret;
908*4882a593Smuzhiyun
909*4882a593Smuzhiyun ctx->rsa.crt_prikey = dma_alloc_coherent(dev, hlf_ksz * HPRE_CRT_PRMS,
910*4882a593Smuzhiyun &ctx->rsa.dma_crt_prikey,
911*4882a593Smuzhiyun GFP_KERNEL);
912*4882a593Smuzhiyun if (!ctx->rsa.crt_prikey)
913*4882a593Smuzhiyun return -ENOMEM;
914*4882a593Smuzhiyun
915*4882a593Smuzhiyun ret = hpre_crt_para_get(ctx->rsa.crt_prikey, hlf_ksz,
916*4882a593Smuzhiyun rsa_key->dq, rsa_key->dq_sz);
917*4882a593Smuzhiyun if (ret)
918*4882a593Smuzhiyun goto free_key;
919*4882a593Smuzhiyun
920*4882a593Smuzhiyun offset = hlf_ksz;
921*4882a593Smuzhiyun ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
922*4882a593Smuzhiyun rsa_key->dp, rsa_key->dp_sz);
923*4882a593Smuzhiyun if (ret)
924*4882a593Smuzhiyun goto free_key;
925*4882a593Smuzhiyun
926*4882a593Smuzhiyun offset = hlf_ksz * HPRE_CRT_Q;
927*4882a593Smuzhiyun ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
928*4882a593Smuzhiyun rsa_key->q, rsa_key->q_sz);
929*4882a593Smuzhiyun if (ret)
930*4882a593Smuzhiyun goto free_key;
931*4882a593Smuzhiyun
932*4882a593Smuzhiyun offset = hlf_ksz * HPRE_CRT_P;
933*4882a593Smuzhiyun ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
934*4882a593Smuzhiyun rsa_key->p, rsa_key->p_sz);
935*4882a593Smuzhiyun if (ret)
936*4882a593Smuzhiyun goto free_key;
937*4882a593Smuzhiyun
938*4882a593Smuzhiyun offset = hlf_ksz * HPRE_CRT_INV;
939*4882a593Smuzhiyun ret = hpre_crt_para_get(ctx->rsa.crt_prikey + offset, hlf_ksz,
940*4882a593Smuzhiyun rsa_key->qinv, rsa_key->qinv_sz);
941*4882a593Smuzhiyun if (ret)
942*4882a593Smuzhiyun goto free_key;
943*4882a593Smuzhiyun
944*4882a593Smuzhiyun ctx->crt_g2_mode = true;
945*4882a593Smuzhiyun
946*4882a593Smuzhiyun return 0;
947*4882a593Smuzhiyun
948*4882a593Smuzhiyun free_key:
949*4882a593Smuzhiyun offset = hlf_ksz * HPRE_CRT_PRMS;
950*4882a593Smuzhiyun memzero_explicit(ctx->rsa.crt_prikey, offset);
951*4882a593Smuzhiyun dma_free_coherent(dev, hlf_ksz * HPRE_CRT_PRMS, ctx->rsa.crt_prikey,
952*4882a593Smuzhiyun ctx->rsa.dma_crt_prikey);
953*4882a593Smuzhiyun ctx->rsa.crt_prikey = NULL;
954*4882a593Smuzhiyun ctx->crt_g2_mode = false;
955*4882a593Smuzhiyun
956*4882a593Smuzhiyun return ret;
957*4882a593Smuzhiyun }
958*4882a593Smuzhiyun
959*4882a593Smuzhiyun /* If it is clear all, all the resources of the QP will be cleaned. */
hpre_rsa_clear_ctx(struct hpre_ctx * ctx,bool is_clear_all)960*4882a593Smuzhiyun static void hpre_rsa_clear_ctx(struct hpre_ctx *ctx, bool is_clear_all)
961*4882a593Smuzhiyun {
962*4882a593Smuzhiyun unsigned int half_key_sz = ctx->key_sz >> 1;
963*4882a593Smuzhiyun struct device *dev = HPRE_DEV(ctx);
964*4882a593Smuzhiyun
965*4882a593Smuzhiyun if (is_clear_all)
966*4882a593Smuzhiyun hisi_qm_stop_qp(ctx->qp);
967*4882a593Smuzhiyun
968*4882a593Smuzhiyun if (ctx->rsa.pubkey) {
969*4882a593Smuzhiyun dma_free_coherent(dev, ctx->key_sz << 1,
970*4882a593Smuzhiyun ctx->rsa.pubkey, ctx->rsa.dma_pubkey);
971*4882a593Smuzhiyun ctx->rsa.pubkey = NULL;
972*4882a593Smuzhiyun }
973*4882a593Smuzhiyun
974*4882a593Smuzhiyun if (ctx->rsa.crt_prikey) {
975*4882a593Smuzhiyun memzero_explicit(ctx->rsa.crt_prikey,
976*4882a593Smuzhiyun half_key_sz * HPRE_CRT_PRMS);
977*4882a593Smuzhiyun dma_free_coherent(dev, half_key_sz * HPRE_CRT_PRMS,
978*4882a593Smuzhiyun ctx->rsa.crt_prikey, ctx->rsa.dma_crt_prikey);
979*4882a593Smuzhiyun ctx->rsa.crt_prikey = NULL;
980*4882a593Smuzhiyun }
981*4882a593Smuzhiyun
982*4882a593Smuzhiyun if (ctx->rsa.prikey) {
983*4882a593Smuzhiyun memzero_explicit(ctx->rsa.prikey, ctx->key_sz);
984*4882a593Smuzhiyun dma_free_coherent(dev, ctx->key_sz << 1, ctx->rsa.prikey,
985*4882a593Smuzhiyun ctx->rsa.dma_prikey);
986*4882a593Smuzhiyun ctx->rsa.prikey = NULL;
987*4882a593Smuzhiyun }
988*4882a593Smuzhiyun
989*4882a593Smuzhiyun hpre_ctx_clear(ctx, is_clear_all);
990*4882a593Smuzhiyun }
991*4882a593Smuzhiyun
992*4882a593Smuzhiyun /*
993*4882a593Smuzhiyun * we should judge if it is CRT or not,
994*4882a593Smuzhiyun * CRT: return true, N-CRT: return false .
995*4882a593Smuzhiyun */
hpre_is_crt_key(struct rsa_key * key)996*4882a593Smuzhiyun static bool hpre_is_crt_key(struct rsa_key *key)
997*4882a593Smuzhiyun {
998*4882a593Smuzhiyun u16 len = key->p_sz + key->q_sz + key->dp_sz + key->dq_sz +
999*4882a593Smuzhiyun key->qinv_sz;
1000*4882a593Smuzhiyun
1001*4882a593Smuzhiyun #define LEN_OF_NCRT_PARA 5
1002*4882a593Smuzhiyun
1003*4882a593Smuzhiyun /* N-CRT less than 5 parameters */
1004*4882a593Smuzhiyun return len > LEN_OF_NCRT_PARA;
1005*4882a593Smuzhiyun }
1006*4882a593Smuzhiyun
hpre_rsa_setkey(struct hpre_ctx * ctx,const void * key,unsigned int keylen,bool private)1007*4882a593Smuzhiyun static int hpre_rsa_setkey(struct hpre_ctx *ctx, const void *key,
1008*4882a593Smuzhiyun unsigned int keylen, bool private)
1009*4882a593Smuzhiyun {
1010*4882a593Smuzhiyun struct rsa_key rsa_key;
1011*4882a593Smuzhiyun int ret;
1012*4882a593Smuzhiyun
1013*4882a593Smuzhiyun hpre_rsa_clear_ctx(ctx, false);
1014*4882a593Smuzhiyun
1015*4882a593Smuzhiyun if (private)
1016*4882a593Smuzhiyun ret = rsa_parse_priv_key(&rsa_key, key, keylen);
1017*4882a593Smuzhiyun else
1018*4882a593Smuzhiyun ret = rsa_parse_pub_key(&rsa_key, key, keylen);
1019*4882a593Smuzhiyun if (ret < 0)
1020*4882a593Smuzhiyun return ret;
1021*4882a593Smuzhiyun
1022*4882a593Smuzhiyun ret = hpre_rsa_set_n(ctx, rsa_key.n, rsa_key.n_sz, private);
1023*4882a593Smuzhiyun if (ret <= 0)
1024*4882a593Smuzhiyun return ret;
1025*4882a593Smuzhiyun
1026*4882a593Smuzhiyun if (private) {
1027*4882a593Smuzhiyun ret = hpre_rsa_set_d(ctx, rsa_key.d, rsa_key.d_sz);
1028*4882a593Smuzhiyun if (ret < 0)
1029*4882a593Smuzhiyun goto free;
1030*4882a593Smuzhiyun
1031*4882a593Smuzhiyun if (hpre_is_crt_key(&rsa_key)) {
1032*4882a593Smuzhiyun ret = hpre_rsa_setkey_crt(ctx, &rsa_key);
1033*4882a593Smuzhiyun if (ret < 0)
1034*4882a593Smuzhiyun goto free;
1035*4882a593Smuzhiyun }
1036*4882a593Smuzhiyun }
1037*4882a593Smuzhiyun
1038*4882a593Smuzhiyun ret = hpre_rsa_set_e(ctx, rsa_key.e, rsa_key.e_sz);
1039*4882a593Smuzhiyun if (ret < 0)
1040*4882a593Smuzhiyun goto free;
1041*4882a593Smuzhiyun
1042*4882a593Smuzhiyun if ((private && !ctx->rsa.prikey) || !ctx->rsa.pubkey) {
1043*4882a593Smuzhiyun ret = -EINVAL;
1044*4882a593Smuzhiyun goto free;
1045*4882a593Smuzhiyun }
1046*4882a593Smuzhiyun
1047*4882a593Smuzhiyun return 0;
1048*4882a593Smuzhiyun
1049*4882a593Smuzhiyun free:
1050*4882a593Smuzhiyun hpre_rsa_clear_ctx(ctx, false);
1051*4882a593Smuzhiyun return ret;
1052*4882a593Smuzhiyun }
1053*4882a593Smuzhiyun
hpre_rsa_setpubkey(struct crypto_akcipher * tfm,const void * key,unsigned int keylen)1054*4882a593Smuzhiyun static int hpre_rsa_setpubkey(struct crypto_akcipher *tfm, const void *key,
1055*4882a593Smuzhiyun unsigned int keylen)
1056*4882a593Smuzhiyun {
1057*4882a593Smuzhiyun struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1058*4882a593Smuzhiyun int ret;
1059*4882a593Smuzhiyun
1060*4882a593Smuzhiyun ret = crypto_akcipher_set_pub_key(ctx->rsa.soft_tfm, key, keylen);
1061*4882a593Smuzhiyun if (ret)
1062*4882a593Smuzhiyun return ret;
1063*4882a593Smuzhiyun
1064*4882a593Smuzhiyun return hpre_rsa_setkey(ctx, key, keylen, false);
1065*4882a593Smuzhiyun }
1066*4882a593Smuzhiyun
hpre_rsa_setprivkey(struct crypto_akcipher * tfm,const void * key,unsigned int keylen)1067*4882a593Smuzhiyun static int hpre_rsa_setprivkey(struct crypto_akcipher *tfm, const void *key,
1068*4882a593Smuzhiyun unsigned int keylen)
1069*4882a593Smuzhiyun {
1070*4882a593Smuzhiyun struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1071*4882a593Smuzhiyun int ret;
1072*4882a593Smuzhiyun
1073*4882a593Smuzhiyun ret = crypto_akcipher_set_priv_key(ctx->rsa.soft_tfm, key, keylen);
1074*4882a593Smuzhiyun if (ret)
1075*4882a593Smuzhiyun return ret;
1076*4882a593Smuzhiyun
1077*4882a593Smuzhiyun return hpre_rsa_setkey(ctx, key, keylen, true);
1078*4882a593Smuzhiyun }
1079*4882a593Smuzhiyun
hpre_rsa_max_size(struct crypto_akcipher * tfm)1080*4882a593Smuzhiyun static unsigned int hpre_rsa_max_size(struct crypto_akcipher *tfm)
1081*4882a593Smuzhiyun {
1082*4882a593Smuzhiyun struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1083*4882a593Smuzhiyun
1084*4882a593Smuzhiyun /* For 512 and 1536 bits key size, use soft tfm instead */
1085*4882a593Smuzhiyun if (ctx->key_sz == HPRE_RSA_512BITS_KSZ ||
1086*4882a593Smuzhiyun ctx->key_sz == HPRE_RSA_1536BITS_KSZ)
1087*4882a593Smuzhiyun return crypto_akcipher_maxsize(ctx->rsa.soft_tfm);
1088*4882a593Smuzhiyun
1089*4882a593Smuzhiyun return ctx->key_sz;
1090*4882a593Smuzhiyun }
1091*4882a593Smuzhiyun
hpre_rsa_init_tfm(struct crypto_akcipher * tfm)1092*4882a593Smuzhiyun static int hpre_rsa_init_tfm(struct crypto_akcipher *tfm)
1093*4882a593Smuzhiyun {
1094*4882a593Smuzhiyun struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1095*4882a593Smuzhiyun int ret;
1096*4882a593Smuzhiyun
1097*4882a593Smuzhiyun ctx->rsa.soft_tfm = crypto_alloc_akcipher("rsa-generic", 0, 0);
1098*4882a593Smuzhiyun if (IS_ERR(ctx->rsa.soft_tfm)) {
1099*4882a593Smuzhiyun pr_err("Can not alloc_akcipher!\n");
1100*4882a593Smuzhiyun return PTR_ERR(ctx->rsa.soft_tfm);
1101*4882a593Smuzhiyun }
1102*4882a593Smuzhiyun
1103*4882a593Smuzhiyun ret = hpre_ctx_init(ctx);
1104*4882a593Smuzhiyun if (ret)
1105*4882a593Smuzhiyun crypto_free_akcipher(ctx->rsa.soft_tfm);
1106*4882a593Smuzhiyun
1107*4882a593Smuzhiyun return ret;
1108*4882a593Smuzhiyun }
1109*4882a593Smuzhiyun
hpre_rsa_exit_tfm(struct crypto_akcipher * tfm)1110*4882a593Smuzhiyun static void hpre_rsa_exit_tfm(struct crypto_akcipher *tfm)
1111*4882a593Smuzhiyun {
1112*4882a593Smuzhiyun struct hpre_ctx *ctx = akcipher_tfm_ctx(tfm);
1113*4882a593Smuzhiyun
1114*4882a593Smuzhiyun hpre_rsa_clear_ctx(ctx, true);
1115*4882a593Smuzhiyun crypto_free_akcipher(ctx->rsa.soft_tfm);
1116*4882a593Smuzhiyun }
1117*4882a593Smuzhiyun
1118*4882a593Smuzhiyun static struct akcipher_alg rsa = {
1119*4882a593Smuzhiyun .sign = hpre_rsa_dec,
1120*4882a593Smuzhiyun .verify = hpre_rsa_enc,
1121*4882a593Smuzhiyun .encrypt = hpre_rsa_enc,
1122*4882a593Smuzhiyun .decrypt = hpre_rsa_dec,
1123*4882a593Smuzhiyun .set_pub_key = hpre_rsa_setpubkey,
1124*4882a593Smuzhiyun .set_priv_key = hpre_rsa_setprivkey,
1125*4882a593Smuzhiyun .max_size = hpre_rsa_max_size,
1126*4882a593Smuzhiyun .init = hpre_rsa_init_tfm,
1127*4882a593Smuzhiyun .exit = hpre_rsa_exit_tfm,
1128*4882a593Smuzhiyun .reqsize = sizeof(struct hpre_asym_request) + HPRE_ALIGN_SZ,
1129*4882a593Smuzhiyun .base = {
1130*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct hpre_ctx),
1131*4882a593Smuzhiyun .cra_priority = HPRE_CRYPTO_ALG_PRI,
1132*4882a593Smuzhiyun .cra_name = "rsa",
1133*4882a593Smuzhiyun .cra_driver_name = "hpre-rsa",
1134*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1135*4882a593Smuzhiyun },
1136*4882a593Smuzhiyun };
1137*4882a593Smuzhiyun
1138*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DH
1139*4882a593Smuzhiyun static struct kpp_alg dh = {
1140*4882a593Smuzhiyun .set_secret = hpre_dh_set_secret,
1141*4882a593Smuzhiyun .generate_public_key = hpre_dh_compute_value,
1142*4882a593Smuzhiyun .compute_shared_secret = hpre_dh_compute_value,
1143*4882a593Smuzhiyun .max_size = hpre_dh_max_size,
1144*4882a593Smuzhiyun .init = hpre_dh_init_tfm,
1145*4882a593Smuzhiyun .exit = hpre_dh_exit_tfm,
1146*4882a593Smuzhiyun .reqsize = sizeof(struct hpre_asym_request) + HPRE_ALIGN_SZ,
1147*4882a593Smuzhiyun .base = {
1148*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct hpre_ctx),
1149*4882a593Smuzhiyun .cra_priority = HPRE_CRYPTO_ALG_PRI,
1150*4882a593Smuzhiyun .cra_name = "dh",
1151*4882a593Smuzhiyun .cra_driver_name = "hpre-dh",
1152*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1153*4882a593Smuzhiyun },
1154*4882a593Smuzhiyun };
1155*4882a593Smuzhiyun #endif
1156*4882a593Smuzhiyun
hpre_algs_register(void)1157*4882a593Smuzhiyun int hpre_algs_register(void)
1158*4882a593Smuzhiyun {
1159*4882a593Smuzhiyun int ret;
1160*4882a593Smuzhiyun
1161*4882a593Smuzhiyun rsa.base.cra_flags = 0;
1162*4882a593Smuzhiyun ret = crypto_register_akcipher(&rsa);
1163*4882a593Smuzhiyun if (ret)
1164*4882a593Smuzhiyun return ret;
1165*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DH
1166*4882a593Smuzhiyun ret = crypto_register_kpp(&dh);
1167*4882a593Smuzhiyun if (ret)
1168*4882a593Smuzhiyun crypto_unregister_akcipher(&rsa);
1169*4882a593Smuzhiyun #endif
1170*4882a593Smuzhiyun
1171*4882a593Smuzhiyun return ret;
1172*4882a593Smuzhiyun }
1173*4882a593Smuzhiyun
hpre_algs_unregister(void)1174*4882a593Smuzhiyun void hpre_algs_unregister(void)
1175*4882a593Smuzhiyun {
1176*4882a593Smuzhiyun crypto_unregister_akcipher(&rsa);
1177*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DH
1178*4882a593Smuzhiyun crypto_unregister_kpp(&dh);
1179*4882a593Smuzhiyun #endif
1180*4882a593Smuzhiyun }
1181