xref: /OK3568_Linux_fs/kernel/crypto/cryptd.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Software async crypto daemon.
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Added AEAD support to cryptd.
8*4882a593Smuzhiyun  *    Authors: Tadeusz Struk (tadeusz.struk@intel.com)
9*4882a593Smuzhiyun  *             Adrian Hoban <adrian.hoban@intel.com>
10*4882a593Smuzhiyun  *             Gabriele Paoloni <gabriele.paoloni@intel.com>
11*4882a593Smuzhiyun  *             Aidan O'Mahony (aidan.o.mahony@intel.com)
12*4882a593Smuzhiyun  *    Copyright (c) 2010, Intel Corporation.
13*4882a593Smuzhiyun  */
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #include <crypto/internal/hash.h>
16*4882a593Smuzhiyun #include <crypto/internal/aead.h>
17*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
18*4882a593Smuzhiyun #include <crypto/cryptd.h>
19*4882a593Smuzhiyun #include <linux/refcount.h>
20*4882a593Smuzhiyun #include <linux/err.h>
21*4882a593Smuzhiyun #include <linux/init.h>
22*4882a593Smuzhiyun #include <linux/kernel.h>
23*4882a593Smuzhiyun #include <linux/list.h>
24*4882a593Smuzhiyun #include <linux/module.h>
25*4882a593Smuzhiyun #include <linux/scatterlist.h>
26*4882a593Smuzhiyun #include <linux/sched.h>
27*4882a593Smuzhiyun #include <linux/slab.h>
28*4882a593Smuzhiyun #include <linux/workqueue.h>
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun static unsigned int cryptd_max_cpu_qlen = 1000;
31*4882a593Smuzhiyun module_param(cryptd_max_cpu_qlen, uint, 0);
32*4882a593Smuzhiyun MODULE_PARM_DESC(cryptd_max_cpu_qlen, "Set cryptd Max queue depth");
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun static struct workqueue_struct *cryptd_wq;
35*4882a593Smuzhiyun 
36*4882a593Smuzhiyun struct cryptd_cpu_queue {
37*4882a593Smuzhiyun 	struct crypto_queue queue;
38*4882a593Smuzhiyun 	struct work_struct work;
39*4882a593Smuzhiyun };
40*4882a593Smuzhiyun 
41*4882a593Smuzhiyun struct cryptd_queue {
42*4882a593Smuzhiyun 	/*
43*4882a593Smuzhiyun 	 * Protected by disabling BH to allow enqueueing from softinterrupt and
44*4882a593Smuzhiyun 	 * dequeuing from kworker (cryptd_queue_worker()).
45*4882a593Smuzhiyun 	 */
46*4882a593Smuzhiyun 	struct cryptd_cpu_queue __percpu *cpu_queue;
47*4882a593Smuzhiyun };
48*4882a593Smuzhiyun 
49*4882a593Smuzhiyun struct cryptd_instance_ctx {
50*4882a593Smuzhiyun 	struct crypto_spawn spawn;
51*4882a593Smuzhiyun 	struct cryptd_queue *queue;
52*4882a593Smuzhiyun };
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun struct skcipherd_instance_ctx {
55*4882a593Smuzhiyun 	struct crypto_skcipher_spawn spawn;
56*4882a593Smuzhiyun 	struct cryptd_queue *queue;
57*4882a593Smuzhiyun };
58*4882a593Smuzhiyun 
59*4882a593Smuzhiyun struct hashd_instance_ctx {
60*4882a593Smuzhiyun 	struct crypto_shash_spawn spawn;
61*4882a593Smuzhiyun 	struct cryptd_queue *queue;
62*4882a593Smuzhiyun };
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun struct aead_instance_ctx {
65*4882a593Smuzhiyun 	struct crypto_aead_spawn aead_spawn;
66*4882a593Smuzhiyun 	struct cryptd_queue *queue;
67*4882a593Smuzhiyun };
68*4882a593Smuzhiyun 
69*4882a593Smuzhiyun struct cryptd_skcipher_ctx {
70*4882a593Smuzhiyun 	refcount_t refcnt;
71*4882a593Smuzhiyun 	struct crypto_sync_skcipher *child;
72*4882a593Smuzhiyun };
73*4882a593Smuzhiyun 
74*4882a593Smuzhiyun struct cryptd_skcipher_request_ctx {
75*4882a593Smuzhiyun 	crypto_completion_t complete;
76*4882a593Smuzhiyun };
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun struct cryptd_hash_ctx {
79*4882a593Smuzhiyun 	refcount_t refcnt;
80*4882a593Smuzhiyun 	struct crypto_shash *child;
81*4882a593Smuzhiyun };
82*4882a593Smuzhiyun 
83*4882a593Smuzhiyun struct cryptd_hash_request_ctx {
84*4882a593Smuzhiyun 	crypto_completion_t complete;
85*4882a593Smuzhiyun 	struct shash_desc desc;
86*4882a593Smuzhiyun };
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun struct cryptd_aead_ctx {
89*4882a593Smuzhiyun 	refcount_t refcnt;
90*4882a593Smuzhiyun 	struct crypto_aead *child;
91*4882a593Smuzhiyun };
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun struct cryptd_aead_request_ctx {
94*4882a593Smuzhiyun 	crypto_completion_t complete;
95*4882a593Smuzhiyun };
96*4882a593Smuzhiyun 
97*4882a593Smuzhiyun static void cryptd_queue_worker(struct work_struct *work);
98*4882a593Smuzhiyun 
cryptd_init_queue(struct cryptd_queue * queue,unsigned int max_cpu_qlen)99*4882a593Smuzhiyun static int cryptd_init_queue(struct cryptd_queue *queue,
100*4882a593Smuzhiyun 			     unsigned int max_cpu_qlen)
101*4882a593Smuzhiyun {
102*4882a593Smuzhiyun 	int cpu;
103*4882a593Smuzhiyun 	struct cryptd_cpu_queue *cpu_queue;
104*4882a593Smuzhiyun 
105*4882a593Smuzhiyun 	queue->cpu_queue = alloc_percpu(struct cryptd_cpu_queue);
106*4882a593Smuzhiyun 	if (!queue->cpu_queue)
107*4882a593Smuzhiyun 		return -ENOMEM;
108*4882a593Smuzhiyun 	for_each_possible_cpu(cpu) {
109*4882a593Smuzhiyun 		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
110*4882a593Smuzhiyun 		crypto_init_queue(&cpu_queue->queue, max_cpu_qlen);
111*4882a593Smuzhiyun 		INIT_WORK(&cpu_queue->work, cryptd_queue_worker);
112*4882a593Smuzhiyun 	}
113*4882a593Smuzhiyun 	pr_info("cryptd: max_cpu_qlen set to %d\n", max_cpu_qlen);
114*4882a593Smuzhiyun 	return 0;
115*4882a593Smuzhiyun }
116*4882a593Smuzhiyun 
cryptd_fini_queue(struct cryptd_queue * queue)117*4882a593Smuzhiyun static void cryptd_fini_queue(struct cryptd_queue *queue)
118*4882a593Smuzhiyun {
119*4882a593Smuzhiyun 	int cpu;
120*4882a593Smuzhiyun 	struct cryptd_cpu_queue *cpu_queue;
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun 	for_each_possible_cpu(cpu) {
123*4882a593Smuzhiyun 		cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu);
124*4882a593Smuzhiyun 		BUG_ON(cpu_queue->queue.qlen);
125*4882a593Smuzhiyun 	}
126*4882a593Smuzhiyun 	free_percpu(queue->cpu_queue);
127*4882a593Smuzhiyun }
128*4882a593Smuzhiyun 
cryptd_enqueue_request(struct cryptd_queue * queue,struct crypto_async_request * request)129*4882a593Smuzhiyun static int cryptd_enqueue_request(struct cryptd_queue *queue,
130*4882a593Smuzhiyun 				  struct crypto_async_request *request)
131*4882a593Smuzhiyun {
132*4882a593Smuzhiyun 	int err;
133*4882a593Smuzhiyun 	struct cryptd_cpu_queue *cpu_queue;
134*4882a593Smuzhiyun 	refcount_t *refcnt;
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun 	local_bh_disable();
137*4882a593Smuzhiyun 	cpu_queue = this_cpu_ptr(queue->cpu_queue);
138*4882a593Smuzhiyun 	err = crypto_enqueue_request(&cpu_queue->queue, request);
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 	refcnt = crypto_tfm_ctx(request->tfm);
141*4882a593Smuzhiyun 
142*4882a593Smuzhiyun 	if (err == -ENOSPC)
143*4882a593Smuzhiyun 		goto out;
144*4882a593Smuzhiyun 
145*4882a593Smuzhiyun 	queue_work_on(smp_processor_id(), cryptd_wq, &cpu_queue->work);
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun 	if (!refcount_read(refcnt))
148*4882a593Smuzhiyun 		goto out;
149*4882a593Smuzhiyun 
150*4882a593Smuzhiyun 	refcount_inc(refcnt);
151*4882a593Smuzhiyun 
152*4882a593Smuzhiyun out:
153*4882a593Smuzhiyun 	local_bh_enable();
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun 	return err;
156*4882a593Smuzhiyun }
157*4882a593Smuzhiyun 
158*4882a593Smuzhiyun /* Called in workqueue context, do one real cryption work (via
159*4882a593Smuzhiyun  * req->complete) and reschedule itself if there are more work to
160*4882a593Smuzhiyun  * do. */
cryptd_queue_worker(struct work_struct * work)161*4882a593Smuzhiyun static void cryptd_queue_worker(struct work_struct *work)
162*4882a593Smuzhiyun {
163*4882a593Smuzhiyun 	struct cryptd_cpu_queue *cpu_queue;
164*4882a593Smuzhiyun 	struct crypto_async_request *req, *backlog;
165*4882a593Smuzhiyun 
166*4882a593Smuzhiyun 	cpu_queue = container_of(work, struct cryptd_cpu_queue, work);
167*4882a593Smuzhiyun 	/*
168*4882a593Smuzhiyun 	 * Only handle one request at a time to avoid hogging crypto workqueue.
169*4882a593Smuzhiyun 	 */
170*4882a593Smuzhiyun 	local_bh_disable();
171*4882a593Smuzhiyun 	backlog = crypto_get_backlog(&cpu_queue->queue);
172*4882a593Smuzhiyun 	req = crypto_dequeue_request(&cpu_queue->queue);
173*4882a593Smuzhiyun 	local_bh_enable();
174*4882a593Smuzhiyun 
175*4882a593Smuzhiyun 	if (!req)
176*4882a593Smuzhiyun 		return;
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	if (backlog)
179*4882a593Smuzhiyun 		backlog->complete(backlog, -EINPROGRESS);
180*4882a593Smuzhiyun 	req->complete(req, 0);
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 	if (cpu_queue->queue.qlen)
183*4882a593Smuzhiyun 		queue_work(cryptd_wq, &cpu_queue->work);
184*4882a593Smuzhiyun }
185*4882a593Smuzhiyun 
cryptd_get_queue(struct crypto_tfm * tfm)186*4882a593Smuzhiyun static inline struct cryptd_queue *cryptd_get_queue(struct crypto_tfm *tfm)
187*4882a593Smuzhiyun {
188*4882a593Smuzhiyun 	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
189*4882a593Smuzhiyun 	struct cryptd_instance_ctx *ictx = crypto_instance_ctx(inst);
190*4882a593Smuzhiyun 	return ictx->queue;
191*4882a593Smuzhiyun }
192*4882a593Smuzhiyun 
cryptd_type_and_mask(struct crypto_attr_type * algt,u32 * type,u32 * mask)193*4882a593Smuzhiyun static void cryptd_type_and_mask(struct crypto_attr_type *algt,
194*4882a593Smuzhiyun 				 u32 *type, u32 *mask)
195*4882a593Smuzhiyun {
196*4882a593Smuzhiyun 	/*
197*4882a593Smuzhiyun 	 * cryptd is allowed to wrap internal algorithms, but in that case the
198*4882a593Smuzhiyun 	 * resulting cryptd instance will be marked as internal as well.
199*4882a593Smuzhiyun 	 */
200*4882a593Smuzhiyun 	*type = algt->type & CRYPTO_ALG_INTERNAL;
201*4882a593Smuzhiyun 	*mask = algt->mask & CRYPTO_ALG_INTERNAL;
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 	/* No point in cryptd wrapping an algorithm that's already async. */
204*4882a593Smuzhiyun 	*mask |= CRYPTO_ALG_ASYNC;
205*4882a593Smuzhiyun 
206*4882a593Smuzhiyun 	*mask |= crypto_algt_inherited_mask(algt);
207*4882a593Smuzhiyun }
208*4882a593Smuzhiyun 
cryptd_init_instance(struct crypto_instance * inst,struct crypto_alg * alg)209*4882a593Smuzhiyun static int cryptd_init_instance(struct crypto_instance *inst,
210*4882a593Smuzhiyun 				struct crypto_alg *alg)
211*4882a593Smuzhiyun {
212*4882a593Smuzhiyun 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
213*4882a593Smuzhiyun 		     "cryptd(%s)",
214*4882a593Smuzhiyun 		     alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
215*4882a593Smuzhiyun 		return -ENAMETOOLONG;
216*4882a593Smuzhiyun 
217*4882a593Smuzhiyun 	memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun 	inst->alg.cra_priority = alg->cra_priority + 50;
220*4882a593Smuzhiyun 	inst->alg.cra_blocksize = alg->cra_blocksize;
221*4882a593Smuzhiyun 	inst->alg.cra_alignmask = alg->cra_alignmask;
222*4882a593Smuzhiyun 
223*4882a593Smuzhiyun 	return 0;
224*4882a593Smuzhiyun }
225*4882a593Smuzhiyun 
cryptd_skcipher_setkey(struct crypto_skcipher * parent,const u8 * key,unsigned int keylen)226*4882a593Smuzhiyun static int cryptd_skcipher_setkey(struct crypto_skcipher *parent,
227*4882a593Smuzhiyun 				  const u8 *key, unsigned int keylen)
228*4882a593Smuzhiyun {
229*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(parent);
230*4882a593Smuzhiyun 	struct crypto_sync_skcipher *child = ctx->child;
231*4882a593Smuzhiyun 
232*4882a593Smuzhiyun 	crypto_sync_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
233*4882a593Smuzhiyun 	crypto_sync_skcipher_set_flags(child,
234*4882a593Smuzhiyun 				       crypto_skcipher_get_flags(parent) &
235*4882a593Smuzhiyun 					 CRYPTO_TFM_REQ_MASK);
236*4882a593Smuzhiyun 	return crypto_sync_skcipher_setkey(child, key, keylen);
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun 
cryptd_skcipher_complete(struct skcipher_request * req,int err)239*4882a593Smuzhiyun static void cryptd_skcipher_complete(struct skcipher_request *req, int err)
240*4882a593Smuzhiyun {
241*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
242*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
243*4882a593Smuzhiyun 	struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
244*4882a593Smuzhiyun 	int refcnt = refcount_read(&ctx->refcnt);
245*4882a593Smuzhiyun 
246*4882a593Smuzhiyun 	local_bh_disable();
247*4882a593Smuzhiyun 	rctx->complete(&req->base, err);
248*4882a593Smuzhiyun 	local_bh_enable();
249*4882a593Smuzhiyun 
250*4882a593Smuzhiyun 	if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
251*4882a593Smuzhiyun 		crypto_free_skcipher(tfm);
252*4882a593Smuzhiyun }
253*4882a593Smuzhiyun 
cryptd_skcipher_encrypt(struct crypto_async_request * base,int err)254*4882a593Smuzhiyun static void cryptd_skcipher_encrypt(struct crypto_async_request *base,
255*4882a593Smuzhiyun 				    int err)
256*4882a593Smuzhiyun {
257*4882a593Smuzhiyun 	struct skcipher_request *req = skcipher_request_cast(base);
258*4882a593Smuzhiyun 	struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
259*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
260*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
261*4882a593Smuzhiyun 	struct crypto_sync_skcipher *child = ctx->child;
262*4882a593Smuzhiyun 	SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
263*4882a593Smuzhiyun 
264*4882a593Smuzhiyun 	if (unlikely(err == -EINPROGRESS))
265*4882a593Smuzhiyun 		goto out;
266*4882a593Smuzhiyun 
267*4882a593Smuzhiyun 	skcipher_request_set_sync_tfm(subreq, child);
268*4882a593Smuzhiyun 	skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
269*4882a593Smuzhiyun 				      NULL, NULL);
270*4882a593Smuzhiyun 	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
271*4882a593Smuzhiyun 				   req->iv);
272*4882a593Smuzhiyun 
273*4882a593Smuzhiyun 	err = crypto_skcipher_encrypt(subreq);
274*4882a593Smuzhiyun 	skcipher_request_zero(subreq);
275*4882a593Smuzhiyun 
276*4882a593Smuzhiyun 	req->base.complete = rctx->complete;
277*4882a593Smuzhiyun 
278*4882a593Smuzhiyun out:
279*4882a593Smuzhiyun 	cryptd_skcipher_complete(req, err);
280*4882a593Smuzhiyun }
281*4882a593Smuzhiyun 
cryptd_skcipher_decrypt(struct crypto_async_request * base,int err)282*4882a593Smuzhiyun static void cryptd_skcipher_decrypt(struct crypto_async_request *base,
283*4882a593Smuzhiyun 				    int err)
284*4882a593Smuzhiyun {
285*4882a593Smuzhiyun 	struct skcipher_request *req = skcipher_request_cast(base);
286*4882a593Smuzhiyun 	struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
287*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
288*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
289*4882a593Smuzhiyun 	struct crypto_sync_skcipher *child = ctx->child;
290*4882a593Smuzhiyun 	SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, child);
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun 	if (unlikely(err == -EINPROGRESS))
293*4882a593Smuzhiyun 		goto out;
294*4882a593Smuzhiyun 
295*4882a593Smuzhiyun 	skcipher_request_set_sync_tfm(subreq, child);
296*4882a593Smuzhiyun 	skcipher_request_set_callback(subreq, CRYPTO_TFM_REQ_MAY_SLEEP,
297*4882a593Smuzhiyun 				      NULL, NULL);
298*4882a593Smuzhiyun 	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
299*4882a593Smuzhiyun 				   req->iv);
300*4882a593Smuzhiyun 
301*4882a593Smuzhiyun 	err = crypto_skcipher_decrypt(subreq);
302*4882a593Smuzhiyun 	skcipher_request_zero(subreq);
303*4882a593Smuzhiyun 
304*4882a593Smuzhiyun 	req->base.complete = rctx->complete;
305*4882a593Smuzhiyun 
306*4882a593Smuzhiyun out:
307*4882a593Smuzhiyun 	cryptd_skcipher_complete(req, err);
308*4882a593Smuzhiyun }
309*4882a593Smuzhiyun 
cryptd_skcipher_enqueue(struct skcipher_request * req,crypto_completion_t compl)310*4882a593Smuzhiyun static int cryptd_skcipher_enqueue(struct skcipher_request *req,
311*4882a593Smuzhiyun 				   crypto_completion_t compl)
312*4882a593Smuzhiyun {
313*4882a593Smuzhiyun 	struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req);
314*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
315*4882a593Smuzhiyun 	struct cryptd_queue *queue;
316*4882a593Smuzhiyun 
317*4882a593Smuzhiyun 	queue = cryptd_get_queue(crypto_skcipher_tfm(tfm));
318*4882a593Smuzhiyun 	rctx->complete = req->base.complete;
319*4882a593Smuzhiyun 	req->base.complete = compl;
320*4882a593Smuzhiyun 
321*4882a593Smuzhiyun 	return cryptd_enqueue_request(queue, &req->base);
322*4882a593Smuzhiyun }
323*4882a593Smuzhiyun 
cryptd_skcipher_encrypt_enqueue(struct skcipher_request * req)324*4882a593Smuzhiyun static int cryptd_skcipher_encrypt_enqueue(struct skcipher_request *req)
325*4882a593Smuzhiyun {
326*4882a593Smuzhiyun 	return cryptd_skcipher_enqueue(req, cryptd_skcipher_encrypt);
327*4882a593Smuzhiyun }
328*4882a593Smuzhiyun 
cryptd_skcipher_decrypt_enqueue(struct skcipher_request * req)329*4882a593Smuzhiyun static int cryptd_skcipher_decrypt_enqueue(struct skcipher_request *req)
330*4882a593Smuzhiyun {
331*4882a593Smuzhiyun 	return cryptd_skcipher_enqueue(req, cryptd_skcipher_decrypt);
332*4882a593Smuzhiyun }
333*4882a593Smuzhiyun 
cryptd_skcipher_init_tfm(struct crypto_skcipher * tfm)334*4882a593Smuzhiyun static int cryptd_skcipher_init_tfm(struct crypto_skcipher *tfm)
335*4882a593Smuzhiyun {
336*4882a593Smuzhiyun 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
337*4882a593Smuzhiyun 	struct skcipherd_instance_ctx *ictx = skcipher_instance_ctx(inst);
338*4882a593Smuzhiyun 	struct crypto_skcipher_spawn *spawn = &ictx->spawn;
339*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
340*4882a593Smuzhiyun 	struct crypto_skcipher *cipher;
341*4882a593Smuzhiyun 
342*4882a593Smuzhiyun 	cipher = crypto_spawn_skcipher(spawn);
343*4882a593Smuzhiyun 	if (IS_ERR(cipher))
344*4882a593Smuzhiyun 		return PTR_ERR(cipher);
345*4882a593Smuzhiyun 
346*4882a593Smuzhiyun 	ctx->child = (struct crypto_sync_skcipher *)cipher;
347*4882a593Smuzhiyun 	crypto_skcipher_set_reqsize(
348*4882a593Smuzhiyun 		tfm, sizeof(struct cryptd_skcipher_request_ctx));
349*4882a593Smuzhiyun 	return 0;
350*4882a593Smuzhiyun }
351*4882a593Smuzhiyun 
cryptd_skcipher_exit_tfm(struct crypto_skcipher * tfm)352*4882a593Smuzhiyun static void cryptd_skcipher_exit_tfm(struct crypto_skcipher *tfm)
353*4882a593Smuzhiyun {
354*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(tfm);
355*4882a593Smuzhiyun 
356*4882a593Smuzhiyun 	crypto_free_sync_skcipher(ctx->child);
357*4882a593Smuzhiyun }
358*4882a593Smuzhiyun 
cryptd_skcipher_free(struct skcipher_instance * inst)359*4882a593Smuzhiyun static void cryptd_skcipher_free(struct skcipher_instance *inst)
360*4882a593Smuzhiyun {
361*4882a593Smuzhiyun 	struct skcipherd_instance_ctx *ctx = skcipher_instance_ctx(inst);
362*4882a593Smuzhiyun 
363*4882a593Smuzhiyun 	crypto_drop_skcipher(&ctx->spawn);
364*4882a593Smuzhiyun 	kfree(inst);
365*4882a593Smuzhiyun }
366*4882a593Smuzhiyun 
cryptd_create_skcipher(struct crypto_template * tmpl,struct rtattr ** tb,struct crypto_attr_type * algt,struct cryptd_queue * queue)367*4882a593Smuzhiyun static int cryptd_create_skcipher(struct crypto_template *tmpl,
368*4882a593Smuzhiyun 				  struct rtattr **tb,
369*4882a593Smuzhiyun 				  struct crypto_attr_type *algt,
370*4882a593Smuzhiyun 				  struct cryptd_queue *queue)
371*4882a593Smuzhiyun {
372*4882a593Smuzhiyun 	struct skcipherd_instance_ctx *ctx;
373*4882a593Smuzhiyun 	struct skcipher_instance *inst;
374*4882a593Smuzhiyun 	struct skcipher_alg *alg;
375*4882a593Smuzhiyun 	u32 type;
376*4882a593Smuzhiyun 	u32 mask;
377*4882a593Smuzhiyun 	int err;
378*4882a593Smuzhiyun 
379*4882a593Smuzhiyun 	cryptd_type_and_mask(algt, &type, &mask);
380*4882a593Smuzhiyun 
381*4882a593Smuzhiyun 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
382*4882a593Smuzhiyun 	if (!inst)
383*4882a593Smuzhiyun 		return -ENOMEM;
384*4882a593Smuzhiyun 
385*4882a593Smuzhiyun 	ctx = skcipher_instance_ctx(inst);
386*4882a593Smuzhiyun 	ctx->queue = queue;
387*4882a593Smuzhiyun 
388*4882a593Smuzhiyun 	err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst),
389*4882a593Smuzhiyun 				   crypto_attr_alg_name(tb[1]), type, mask);
390*4882a593Smuzhiyun 	if (err)
391*4882a593Smuzhiyun 		goto err_free_inst;
392*4882a593Smuzhiyun 
393*4882a593Smuzhiyun 	alg = crypto_spawn_skcipher_alg(&ctx->spawn);
394*4882a593Smuzhiyun 	err = cryptd_init_instance(skcipher_crypto_instance(inst), &alg->base);
395*4882a593Smuzhiyun 	if (err)
396*4882a593Smuzhiyun 		goto err_free_inst;
397*4882a593Smuzhiyun 
398*4882a593Smuzhiyun 	inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
399*4882a593Smuzhiyun 		(alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
400*4882a593Smuzhiyun 	inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg);
401*4882a593Smuzhiyun 	inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
402*4882a593Smuzhiyun 	inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg);
403*4882a593Smuzhiyun 	inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg);
404*4882a593Smuzhiyun 
405*4882a593Smuzhiyun 	inst->alg.base.cra_ctxsize = sizeof(struct cryptd_skcipher_ctx);
406*4882a593Smuzhiyun 
407*4882a593Smuzhiyun 	inst->alg.init = cryptd_skcipher_init_tfm;
408*4882a593Smuzhiyun 	inst->alg.exit = cryptd_skcipher_exit_tfm;
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 	inst->alg.setkey = cryptd_skcipher_setkey;
411*4882a593Smuzhiyun 	inst->alg.encrypt = cryptd_skcipher_encrypt_enqueue;
412*4882a593Smuzhiyun 	inst->alg.decrypt = cryptd_skcipher_decrypt_enqueue;
413*4882a593Smuzhiyun 
414*4882a593Smuzhiyun 	inst->free = cryptd_skcipher_free;
415*4882a593Smuzhiyun 
416*4882a593Smuzhiyun 	err = skcipher_register_instance(tmpl, inst);
417*4882a593Smuzhiyun 	if (err) {
418*4882a593Smuzhiyun err_free_inst:
419*4882a593Smuzhiyun 		cryptd_skcipher_free(inst);
420*4882a593Smuzhiyun 	}
421*4882a593Smuzhiyun 	return err;
422*4882a593Smuzhiyun }
423*4882a593Smuzhiyun 
cryptd_hash_init_tfm(struct crypto_tfm * tfm)424*4882a593Smuzhiyun static int cryptd_hash_init_tfm(struct crypto_tfm *tfm)
425*4882a593Smuzhiyun {
426*4882a593Smuzhiyun 	struct crypto_instance *inst = crypto_tfm_alg_instance(tfm);
427*4882a593Smuzhiyun 	struct hashd_instance_ctx *ictx = crypto_instance_ctx(inst);
428*4882a593Smuzhiyun 	struct crypto_shash_spawn *spawn = &ictx->spawn;
429*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
430*4882a593Smuzhiyun 	struct crypto_shash *hash;
431*4882a593Smuzhiyun 
432*4882a593Smuzhiyun 	hash = crypto_spawn_shash(spawn);
433*4882a593Smuzhiyun 	if (IS_ERR(hash))
434*4882a593Smuzhiyun 		return PTR_ERR(hash);
435*4882a593Smuzhiyun 
436*4882a593Smuzhiyun 	ctx->child = hash;
437*4882a593Smuzhiyun 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
438*4882a593Smuzhiyun 				 sizeof(struct cryptd_hash_request_ctx) +
439*4882a593Smuzhiyun 				 crypto_shash_descsize(hash));
440*4882a593Smuzhiyun 	return 0;
441*4882a593Smuzhiyun }
442*4882a593Smuzhiyun 
cryptd_hash_exit_tfm(struct crypto_tfm * tfm)443*4882a593Smuzhiyun static void cryptd_hash_exit_tfm(struct crypto_tfm *tfm)
444*4882a593Smuzhiyun {
445*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(tfm);
446*4882a593Smuzhiyun 
447*4882a593Smuzhiyun 	crypto_free_shash(ctx->child);
448*4882a593Smuzhiyun }
449*4882a593Smuzhiyun 
cryptd_hash_setkey(struct crypto_ahash * parent,const u8 * key,unsigned int keylen)450*4882a593Smuzhiyun static int cryptd_hash_setkey(struct crypto_ahash *parent,
451*4882a593Smuzhiyun 				   const u8 *key, unsigned int keylen)
452*4882a593Smuzhiyun {
453*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx   = crypto_ahash_ctx(parent);
454*4882a593Smuzhiyun 	struct crypto_shash *child = ctx->child;
455*4882a593Smuzhiyun 
456*4882a593Smuzhiyun 	crypto_shash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
457*4882a593Smuzhiyun 	crypto_shash_set_flags(child, crypto_ahash_get_flags(parent) &
458*4882a593Smuzhiyun 				      CRYPTO_TFM_REQ_MASK);
459*4882a593Smuzhiyun 	return crypto_shash_setkey(child, key, keylen);
460*4882a593Smuzhiyun }
461*4882a593Smuzhiyun 
cryptd_hash_enqueue(struct ahash_request * req,crypto_completion_t compl)462*4882a593Smuzhiyun static int cryptd_hash_enqueue(struct ahash_request *req,
463*4882a593Smuzhiyun 				crypto_completion_t compl)
464*4882a593Smuzhiyun {
465*4882a593Smuzhiyun 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
466*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
467*4882a593Smuzhiyun 	struct cryptd_queue *queue =
468*4882a593Smuzhiyun 		cryptd_get_queue(crypto_ahash_tfm(tfm));
469*4882a593Smuzhiyun 
470*4882a593Smuzhiyun 	rctx->complete = req->base.complete;
471*4882a593Smuzhiyun 	req->base.complete = compl;
472*4882a593Smuzhiyun 
473*4882a593Smuzhiyun 	return cryptd_enqueue_request(queue, &req->base);
474*4882a593Smuzhiyun }
475*4882a593Smuzhiyun 
cryptd_hash_complete(struct ahash_request * req,int err)476*4882a593Smuzhiyun static void cryptd_hash_complete(struct ahash_request *req, int err)
477*4882a593Smuzhiyun {
478*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
479*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
480*4882a593Smuzhiyun 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
481*4882a593Smuzhiyun 	int refcnt = refcount_read(&ctx->refcnt);
482*4882a593Smuzhiyun 
483*4882a593Smuzhiyun 	local_bh_disable();
484*4882a593Smuzhiyun 	rctx->complete(&req->base, err);
485*4882a593Smuzhiyun 	local_bh_enable();
486*4882a593Smuzhiyun 
487*4882a593Smuzhiyun 	if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
488*4882a593Smuzhiyun 		crypto_free_ahash(tfm);
489*4882a593Smuzhiyun }
490*4882a593Smuzhiyun 
cryptd_hash_init(struct crypto_async_request * req_async,int err)491*4882a593Smuzhiyun static void cryptd_hash_init(struct crypto_async_request *req_async, int err)
492*4882a593Smuzhiyun {
493*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
494*4882a593Smuzhiyun 	struct crypto_shash *child = ctx->child;
495*4882a593Smuzhiyun 	struct ahash_request *req = ahash_request_cast(req_async);
496*4882a593Smuzhiyun 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
497*4882a593Smuzhiyun 	struct shash_desc *desc = &rctx->desc;
498*4882a593Smuzhiyun 
499*4882a593Smuzhiyun 	if (unlikely(err == -EINPROGRESS))
500*4882a593Smuzhiyun 		goto out;
501*4882a593Smuzhiyun 
502*4882a593Smuzhiyun 	desc->tfm = child;
503*4882a593Smuzhiyun 
504*4882a593Smuzhiyun 	err = crypto_shash_init(desc);
505*4882a593Smuzhiyun 
506*4882a593Smuzhiyun 	req->base.complete = rctx->complete;
507*4882a593Smuzhiyun 
508*4882a593Smuzhiyun out:
509*4882a593Smuzhiyun 	cryptd_hash_complete(req, err);
510*4882a593Smuzhiyun }
511*4882a593Smuzhiyun 
cryptd_hash_init_enqueue(struct ahash_request * req)512*4882a593Smuzhiyun static int cryptd_hash_init_enqueue(struct ahash_request *req)
513*4882a593Smuzhiyun {
514*4882a593Smuzhiyun 	return cryptd_hash_enqueue(req, cryptd_hash_init);
515*4882a593Smuzhiyun }
516*4882a593Smuzhiyun 
cryptd_hash_update(struct crypto_async_request * req_async,int err)517*4882a593Smuzhiyun static void cryptd_hash_update(struct crypto_async_request *req_async, int err)
518*4882a593Smuzhiyun {
519*4882a593Smuzhiyun 	struct ahash_request *req = ahash_request_cast(req_async);
520*4882a593Smuzhiyun 	struct cryptd_hash_request_ctx *rctx;
521*4882a593Smuzhiyun 
522*4882a593Smuzhiyun 	rctx = ahash_request_ctx(req);
523*4882a593Smuzhiyun 
524*4882a593Smuzhiyun 	if (unlikely(err == -EINPROGRESS))
525*4882a593Smuzhiyun 		goto out;
526*4882a593Smuzhiyun 
527*4882a593Smuzhiyun 	err = shash_ahash_update(req, &rctx->desc);
528*4882a593Smuzhiyun 
529*4882a593Smuzhiyun 	req->base.complete = rctx->complete;
530*4882a593Smuzhiyun 
531*4882a593Smuzhiyun out:
532*4882a593Smuzhiyun 	cryptd_hash_complete(req, err);
533*4882a593Smuzhiyun }
534*4882a593Smuzhiyun 
cryptd_hash_update_enqueue(struct ahash_request * req)535*4882a593Smuzhiyun static int cryptd_hash_update_enqueue(struct ahash_request *req)
536*4882a593Smuzhiyun {
537*4882a593Smuzhiyun 	return cryptd_hash_enqueue(req, cryptd_hash_update);
538*4882a593Smuzhiyun }
539*4882a593Smuzhiyun 
cryptd_hash_final(struct crypto_async_request * req_async,int err)540*4882a593Smuzhiyun static void cryptd_hash_final(struct crypto_async_request *req_async, int err)
541*4882a593Smuzhiyun {
542*4882a593Smuzhiyun 	struct ahash_request *req = ahash_request_cast(req_async);
543*4882a593Smuzhiyun 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
544*4882a593Smuzhiyun 
545*4882a593Smuzhiyun 	if (unlikely(err == -EINPROGRESS))
546*4882a593Smuzhiyun 		goto out;
547*4882a593Smuzhiyun 
548*4882a593Smuzhiyun 	err = crypto_shash_final(&rctx->desc, req->result);
549*4882a593Smuzhiyun 
550*4882a593Smuzhiyun 	req->base.complete = rctx->complete;
551*4882a593Smuzhiyun 
552*4882a593Smuzhiyun out:
553*4882a593Smuzhiyun 	cryptd_hash_complete(req, err);
554*4882a593Smuzhiyun }
555*4882a593Smuzhiyun 
cryptd_hash_final_enqueue(struct ahash_request * req)556*4882a593Smuzhiyun static int cryptd_hash_final_enqueue(struct ahash_request *req)
557*4882a593Smuzhiyun {
558*4882a593Smuzhiyun 	return cryptd_hash_enqueue(req, cryptd_hash_final);
559*4882a593Smuzhiyun }
560*4882a593Smuzhiyun 
cryptd_hash_finup(struct crypto_async_request * req_async,int err)561*4882a593Smuzhiyun static void cryptd_hash_finup(struct crypto_async_request *req_async, int err)
562*4882a593Smuzhiyun {
563*4882a593Smuzhiyun 	struct ahash_request *req = ahash_request_cast(req_async);
564*4882a593Smuzhiyun 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
565*4882a593Smuzhiyun 
566*4882a593Smuzhiyun 	if (unlikely(err == -EINPROGRESS))
567*4882a593Smuzhiyun 		goto out;
568*4882a593Smuzhiyun 
569*4882a593Smuzhiyun 	err = shash_ahash_finup(req, &rctx->desc);
570*4882a593Smuzhiyun 
571*4882a593Smuzhiyun 	req->base.complete = rctx->complete;
572*4882a593Smuzhiyun 
573*4882a593Smuzhiyun out:
574*4882a593Smuzhiyun 	cryptd_hash_complete(req, err);
575*4882a593Smuzhiyun }
576*4882a593Smuzhiyun 
cryptd_hash_finup_enqueue(struct ahash_request * req)577*4882a593Smuzhiyun static int cryptd_hash_finup_enqueue(struct ahash_request *req)
578*4882a593Smuzhiyun {
579*4882a593Smuzhiyun 	return cryptd_hash_enqueue(req, cryptd_hash_finup);
580*4882a593Smuzhiyun }
581*4882a593Smuzhiyun 
cryptd_hash_digest(struct crypto_async_request * req_async,int err)582*4882a593Smuzhiyun static void cryptd_hash_digest(struct crypto_async_request *req_async, int err)
583*4882a593Smuzhiyun {
584*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx = crypto_tfm_ctx(req_async->tfm);
585*4882a593Smuzhiyun 	struct crypto_shash *child = ctx->child;
586*4882a593Smuzhiyun 	struct ahash_request *req = ahash_request_cast(req_async);
587*4882a593Smuzhiyun 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
588*4882a593Smuzhiyun 	struct shash_desc *desc = &rctx->desc;
589*4882a593Smuzhiyun 
590*4882a593Smuzhiyun 	if (unlikely(err == -EINPROGRESS))
591*4882a593Smuzhiyun 		goto out;
592*4882a593Smuzhiyun 
593*4882a593Smuzhiyun 	desc->tfm = child;
594*4882a593Smuzhiyun 
595*4882a593Smuzhiyun 	err = shash_ahash_digest(req, desc);
596*4882a593Smuzhiyun 
597*4882a593Smuzhiyun 	req->base.complete = rctx->complete;
598*4882a593Smuzhiyun 
599*4882a593Smuzhiyun out:
600*4882a593Smuzhiyun 	cryptd_hash_complete(req, err);
601*4882a593Smuzhiyun }
602*4882a593Smuzhiyun 
cryptd_hash_digest_enqueue(struct ahash_request * req)603*4882a593Smuzhiyun static int cryptd_hash_digest_enqueue(struct ahash_request *req)
604*4882a593Smuzhiyun {
605*4882a593Smuzhiyun 	return cryptd_hash_enqueue(req, cryptd_hash_digest);
606*4882a593Smuzhiyun }
607*4882a593Smuzhiyun 
cryptd_hash_export(struct ahash_request * req,void * out)608*4882a593Smuzhiyun static int cryptd_hash_export(struct ahash_request *req, void *out)
609*4882a593Smuzhiyun {
610*4882a593Smuzhiyun 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
611*4882a593Smuzhiyun 
612*4882a593Smuzhiyun 	return crypto_shash_export(&rctx->desc, out);
613*4882a593Smuzhiyun }
614*4882a593Smuzhiyun 
cryptd_hash_import(struct ahash_request * req,const void * in)615*4882a593Smuzhiyun static int cryptd_hash_import(struct ahash_request *req, const void *in)
616*4882a593Smuzhiyun {
617*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
618*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(tfm);
619*4882a593Smuzhiyun 	struct shash_desc *desc = cryptd_shash_desc(req);
620*4882a593Smuzhiyun 
621*4882a593Smuzhiyun 	desc->tfm = ctx->child;
622*4882a593Smuzhiyun 
623*4882a593Smuzhiyun 	return crypto_shash_import(desc, in);
624*4882a593Smuzhiyun }
625*4882a593Smuzhiyun 
cryptd_hash_free(struct ahash_instance * inst)626*4882a593Smuzhiyun static void cryptd_hash_free(struct ahash_instance *inst)
627*4882a593Smuzhiyun {
628*4882a593Smuzhiyun 	struct hashd_instance_ctx *ctx = ahash_instance_ctx(inst);
629*4882a593Smuzhiyun 
630*4882a593Smuzhiyun 	crypto_drop_shash(&ctx->spawn);
631*4882a593Smuzhiyun 	kfree(inst);
632*4882a593Smuzhiyun }
633*4882a593Smuzhiyun 
cryptd_create_hash(struct crypto_template * tmpl,struct rtattr ** tb,struct crypto_attr_type * algt,struct cryptd_queue * queue)634*4882a593Smuzhiyun static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb,
635*4882a593Smuzhiyun 			      struct crypto_attr_type *algt,
636*4882a593Smuzhiyun 			      struct cryptd_queue *queue)
637*4882a593Smuzhiyun {
638*4882a593Smuzhiyun 	struct hashd_instance_ctx *ctx;
639*4882a593Smuzhiyun 	struct ahash_instance *inst;
640*4882a593Smuzhiyun 	struct shash_alg *alg;
641*4882a593Smuzhiyun 	u32 type;
642*4882a593Smuzhiyun 	u32 mask;
643*4882a593Smuzhiyun 	int err;
644*4882a593Smuzhiyun 
645*4882a593Smuzhiyun 	cryptd_type_and_mask(algt, &type, &mask);
646*4882a593Smuzhiyun 
647*4882a593Smuzhiyun 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
648*4882a593Smuzhiyun 	if (!inst)
649*4882a593Smuzhiyun 		return -ENOMEM;
650*4882a593Smuzhiyun 
651*4882a593Smuzhiyun 	ctx = ahash_instance_ctx(inst);
652*4882a593Smuzhiyun 	ctx->queue = queue;
653*4882a593Smuzhiyun 
654*4882a593Smuzhiyun 	err = crypto_grab_shash(&ctx->spawn, ahash_crypto_instance(inst),
655*4882a593Smuzhiyun 				crypto_attr_alg_name(tb[1]), type, mask);
656*4882a593Smuzhiyun 	if (err)
657*4882a593Smuzhiyun 		goto err_free_inst;
658*4882a593Smuzhiyun 	alg = crypto_spawn_shash_alg(&ctx->spawn);
659*4882a593Smuzhiyun 
660*4882a593Smuzhiyun 	err = cryptd_init_instance(ahash_crypto_instance(inst), &alg->base);
661*4882a593Smuzhiyun 	if (err)
662*4882a593Smuzhiyun 		goto err_free_inst;
663*4882a593Smuzhiyun 
664*4882a593Smuzhiyun 	inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC |
665*4882a593Smuzhiyun 		(alg->base.cra_flags & (CRYPTO_ALG_INTERNAL|
666*4882a593Smuzhiyun 					CRYPTO_ALG_OPTIONAL_KEY));
667*4882a593Smuzhiyun 	inst->alg.halg.digestsize = alg->digestsize;
668*4882a593Smuzhiyun 	inst->alg.halg.statesize = alg->statesize;
669*4882a593Smuzhiyun 	inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx);
670*4882a593Smuzhiyun 
671*4882a593Smuzhiyun 	inst->alg.halg.base.cra_init = cryptd_hash_init_tfm;
672*4882a593Smuzhiyun 	inst->alg.halg.base.cra_exit = cryptd_hash_exit_tfm;
673*4882a593Smuzhiyun 
674*4882a593Smuzhiyun 	inst->alg.init   = cryptd_hash_init_enqueue;
675*4882a593Smuzhiyun 	inst->alg.update = cryptd_hash_update_enqueue;
676*4882a593Smuzhiyun 	inst->alg.final  = cryptd_hash_final_enqueue;
677*4882a593Smuzhiyun 	inst->alg.finup  = cryptd_hash_finup_enqueue;
678*4882a593Smuzhiyun 	inst->alg.export = cryptd_hash_export;
679*4882a593Smuzhiyun 	inst->alg.import = cryptd_hash_import;
680*4882a593Smuzhiyun 	if (crypto_shash_alg_has_setkey(alg))
681*4882a593Smuzhiyun 		inst->alg.setkey = cryptd_hash_setkey;
682*4882a593Smuzhiyun 	inst->alg.digest = cryptd_hash_digest_enqueue;
683*4882a593Smuzhiyun 
684*4882a593Smuzhiyun 	inst->free = cryptd_hash_free;
685*4882a593Smuzhiyun 
686*4882a593Smuzhiyun 	err = ahash_register_instance(tmpl, inst);
687*4882a593Smuzhiyun 	if (err) {
688*4882a593Smuzhiyun err_free_inst:
689*4882a593Smuzhiyun 		cryptd_hash_free(inst);
690*4882a593Smuzhiyun 	}
691*4882a593Smuzhiyun 	return err;
692*4882a593Smuzhiyun }
693*4882a593Smuzhiyun 
cryptd_aead_setkey(struct crypto_aead * parent,const u8 * key,unsigned int keylen)694*4882a593Smuzhiyun static int cryptd_aead_setkey(struct crypto_aead *parent,
695*4882a593Smuzhiyun 			      const u8 *key, unsigned int keylen)
696*4882a593Smuzhiyun {
697*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
698*4882a593Smuzhiyun 	struct crypto_aead *child = ctx->child;
699*4882a593Smuzhiyun 
700*4882a593Smuzhiyun 	return crypto_aead_setkey(child, key, keylen);
701*4882a593Smuzhiyun }
702*4882a593Smuzhiyun 
cryptd_aead_setauthsize(struct crypto_aead * parent,unsigned int authsize)703*4882a593Smuzhiyun static int cryptd_aead_setauthsize(struct crypto_aead *parent,
704*4882a593Smuzhiyun 				   unsigned int authsize)
705*4882a593Smuzhiyun {
706*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(parent);
707*4882a593Smuzhiyun 	struct crypto_aead *child = ctx->child;
708*4882a593Smuzhiyun 
709*4882a593Smuzhiyun 	return crypto_aead_setauthsize(child, authsize);
710*4882a593Smuzhiyun }
711*4882a593Smuzhiyun 
cryptd_aead_crypt(struct aead_request * req,struct crypto_aead * child,int err,int (* crypt)(struct aead_request * req))712*4882a593Smuzhiyun static void cryptd_aead_crypt(struct aead_request *req,
713*4882a593Smuzhiyun 			struct crypto_aead *child,
714*4882a593Smuzhiyun 			int err,
715*4882a593Smuzhiyun 			int (*crypt)(struct aead_request *req))
716*4882a593Smuzhiyun {
717*4882a593Smuzhiyun 	struct cryptd_aead_request_ctx *rctx;
718*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx;
719*4882a593Smuzhiyun 	crypto_completion_t compl;
720*4882a593Smuzhiyun 	struct crypto_aead *tfm;
721*4882a593Smuzhiyun 	int refcnt;
722*4882a593Smuzhiyun 
723*4882a593Smuzhiyun 	rctx = aead_request_ctx(req);
724*4882a593Smuzhiyun 	compl = rctx->complete;
725*4882a593Smuzhiyun 
726*4882a593Smuzhiyun 	tfm = crypto_aead_reqtfm(req);
727*4882a593Smuzhiyun 
728*4882a593Smuzhiyun 	if (unlikely(err == -EINPROGRESS))
729*4882a593Smuzhiyun 		goto out;
730*4882a593Smuzhiyun 	aead_request_set_tfm(req, child);
731*4882a593Smuzhiyun 	err = crypt( req );
732*4882a593Smuzhiyun 
733*4882a593Smuzhiyun out:
734*4882a593Smuzhiyun 	ctx = crypto_aead_ctx(tfm);
735*4882a593Smuzhiyun 	refcnt = refcount_read(&ctx->refcnt);
736*4882a593Smuzhiyun 
737*4882a593Smuzhiyun 	local_bh_disable();
738*4882a593Smuzhiyun 	compl(&req->base, err);
739*4882a593Smuzhiyun 	local_bh_enable();
740*4882a593Smuzhiyun 
741*4882a593Smuzhiyun 	if (err != -EINPROGRESS && refcnt && refcount_dec_and_test(&ctx->refcnt))
742*4882a593Smuzhiyun 		crypto_free_aead(tfm);
743*4882a593Smuzhiyun }
744*4882a593Smuzhiyun 
cryptd_aead_encrypt(struct crypto_async_request * areq,int err)745*4882a593Smuzhiyun static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err)
746*4882a593Smuzhiyun {
747*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
748*4882a593Smuzhiyun 	struct crypto_aead *child = ctx->child;
749*4882a593Smuzhiyun 	struct aead_request *req;
750*4882a593Smuzhiyun 
751*4882a593Smuzhiyun 	req = container_of(areq, struct aead_request, base);
752*4882a593Smuzhiyun 	cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->encrypt);
753*4882a593Smuzhiyun }
754*4882a593Smuzhiyun 
cryptd_aead_decrypt(struct crypto_async_request * areq,int err)755*4882a593Smuzhiyun static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err)
756*4882a593Smuzhiyun {
757*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm);
758*4882a593Smuzhiyun 	struct crypto_aead *child = ctx->child;
759*4882a593Smuzhiyun 	struct aead_request *req;
760*4882a593Smuzhiyun 
761*4882a593Smuzhiyun 	req = container_of(areq, struct aead_request, base);
762*4882a593Smuzhiyun 	cryptd_aead_crypt(req, child, err, crypto_aead_alg(child)->decrypt);
763*4882a593Smuzhiyun }
764*4882a593Smuzhiyun 
cryptd_aead_enqueue(struct aead_request * req,crypto_completion_t compl)765*4882a593Smuzhiyun static int cryptd_aead_enqueue(struct aead_request *req,
766*4882a593Smuzhiyun 				    crypto_completion_t compl)
767*4882a593Smuzhiyun {
768*4882a593Smuzhiyun 	struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req);
769*4882a593Smuzhiyun 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
770*4882a593Smuzhiyun 	struct cryptd_queue *queue = cryptd_get_queue(crypto_aead_tfm(tfm));
771*4882a593Smuzhiyun 
772*4882a593Smuzhiyun 	rctx->complete = req->base.complete;
773*4882a593Smuzhiyun 	req->base.complete = compl;
774*4882a593Smuzhiyun 	return cryptd_enqueue_request(queue, &req->base);
775*4882a593Smuzhiyun }
776*4882a593Smuzhiyun 
cryptd_aead_encrypt_enqueue(struct aead_request * req)777*4882a593Smuzhiyun static int cryptd_aead_encrypt_enqueue(struct aead_request *req)
778*4882a593Smuzhiyun {
779*4882a593Smuzhiyun 	return cryptd_aead_enqueue(req, cryptd_aead_encrypt );
780*4882a593Smuzhiyun }
781*4882a593Smuzhiyun 
cryptd_aead_decrypt_enqueue(struct aead_request * req)782*4882a593Smuzhiyun static int cryptd_aead_decrypt_enqueue(struct aead_request *req)
783*4882a593Smuzhiyun {
784*4882a593Smuzhiyun 	return cryptd_aead_enqueue(req, cryptd_aead_decrypt );
785*4882a593Smuzhiyun }
786*4882a593Smuzhiyun 
cryptd_aead_init_tfm(struct crypto_aead * tfm)787*4882a593Smuzhiyun static int cryptd_aead_init_tfm(struct crypto_aead *tfm)
788*4882a593Smuzhiyun {
789*4882a593Smuzhiyun 	struct aead_instance *inst = aead_alg_instance(tfm);
790*4882a593Smuzhiyun 	struct aead_instance_ctx *ictx = aead_instance_ctx(inst);
791*4882a593Smuzhiyun 	struct crypto_aead_spawn *spawn = &ictx->aead_spawn;
792*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
793*4882a593Smuzhiyun 	struct crypto_aead *cipher;
794*4882a593Smuzhiyun 
795*4882a593Smuzhiyun 	cipher = crypto_spawn_aead(spawn);
796*4882a593Smuzhiyun 	if (IS_ERR(cipher))
797*4882a593Smuzhiyun 		return PTR_ERR(cipher);
798*4882a593Smuzhiyun 
799*4882a593Smuzhiyun 	ctx->child = cipher;
800*4882a593Smuzhiyun 	crypto_aead_set_reqsize(
801*4882a593Smuzhiyun 		tfm, max((unsigned)sizeof(struct cryptd_aead_request_ctx),
802*4882a593Smuzhiyun 			 crypto_aead_reqsize(cipher)));
803*4882a593Smuzhiyun 	return 0;
804*4882a593Smuzhiyun }
805*4882a593Smuzhiyun 
cryptd_aead_exit_tfm(struct crypto_aead * tfm)806*4882a593Smuzhiyun static void cryptd_aead_exit_tfm(struct crypto_aead *tfm)
807*4882a593Smuzhiyun {
808*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(tfm);
809*4882a593Smuzhiyun 	crypto_free_aead(ctx->child);
810*4882a593Smuzhiyun }
811*4882a593Smuzhiyun 
cryptd_aead_free(struct aead_instance * inst)812*4882a593Smuzhiyun static void cryptd_aead_free(struct aead_instance *inst)
813*4882a593Smuzhiyun {
814*4882a593Smuzhiyun 	struct aead_instance_ctx *ctx = aead_instance_ctx(inst);
815*4882a593Smuzhiyun 
816*4882a593Smuzhiyun 	crypto_drop_aead(&ctx->aead_spawn);
817*4882a593Smuzhiyun 	kfree(inst);
818*4882a593Smuzhiyun }
819*4882a593Smuzhiyun 
cryptd_create_aead(struct crypto_template * tmpl,struct rtattr ** tb,struct crypto_attr_type * algt,struct cryptd_queue * queue)820*4882a593Smuzhiyun static int cryptd_create_aead(struct crypto_template *tmpl,
821*4882a593Smuzhiyun 		              struct rtattr **tb,
822*4882a593Smuzhiyun 			      struct crypto_attr_type *algt,
823*4882a593Smuzhiyun 			      struct cryptd_queue *queue)
824*4882a593Smuzhiyun {
825*4882a593Smuzhiyun 	struct aead_instance_ctx *ctx;
826*4882a593Smuzhiyun 	struct aead_instance *inst;
827*4882a593Smuzhiyun 	struct aead_alg *alg;
828*4882a593Smuzhiyun 	u32 type;
829*4882a593Smuzhiyun 	u32 mask;
830*4882a593Smuzhiyun 	int err;
831*4882a593Smuzhiyun 
832*4882a593Smuzhiyun 	cryptd_type_and_mask(algt, &type, &mask);
833*4882a593Smuzhiyun 
834*4882a593Smuzhiyun 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
835*4882a593Smuzhiyun 	if (!inst)
836*4882a593Smuzhiyun 		return -ENOMEM;
837*4882a593Smuzhiyun 
838*4882a593Smuzhiyun 	ctx = aead_instance_ctx(inst);
839*4882a593Smuzhiyun 	ctx->queue = queue;
840*4882a593Smuzhiyun 
841*4882a593Smuzhiyun 	err = crypto_grab_aead(&ctx->aead_spawn, aead_crypto_instance(inst),
842*4882a593Smuzhiyun 			       crypto_attr_alg_name(tb[1]), type, mask);
843*4882a593Smuzhiyun 	if (err)
844*4882a593Smuzhiyun 		goto err_free_inst;
845*4882a593Smuzhiyun 
846*4882a593Smuzhiyun 	alg = crypto_spawn_aead_alg(&ctx->aead_spawn);
847*4882a593Smuzhiyun 	err = cryptd_init_instance(aead_crypto_instance(inst), &alg->base);
848*4882a593Smuzhiyun 	if (err)
849*4882a593Smuzhiyun 		goto err_free_inst;
850*4882a593Smuzhiyun 
851*4882a593Smuzhiyun 	inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC |
852*4882a593Smuzhiyun 		(alg->base.cra_flags & CRYPTO_ALG_INTERNAL);
853*4882a593Smuzhiyun 	inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx);
854*4882a593Smuzhiyun 
855*4882a593Smuzhiyun 	inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
856*4882a593Smuzhiyun 	inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
857*4882a593Smuzhiyun 
858*4882a593Smuzhiyun 	inst->alg.init = cryptd_aead_init_tfm;
859*4882a593Smuzhiyun 	inst->alg.exit = cryptd_aead_exit_tfm;
860*4882a593Smuzhiyun 	inst->alg.setkey = cryptd_aead_setkey;
861*4882a593Smuzhiyun 	inst->alg.setauthsize = cryptd_aead_setauthsize;
862*4882a593Smuzhiyun 	inst->alg.encrypt = cryptd_aead_encrypt_enqueue;
863*4882a593Smuzhiyun 	inst->alg.decrypt = cryptd_aead_decrypt_enqueue;
864*4882a593Smuzhiyun 
865*4882a593Smuzhiyun 	inst->free = cryptd_aead_free;
866*4882a593Smuzhiyun 
867*4882a593Smuzhiyun 	err = aead_register_instance(tmpl, inst);
868*4882a593Smuzhiyun 	if (err) {
869*4882a593Smuzhiyun err_free_inst:
870*4882a593Smuzhiyun 		cryptd_aead_free(inst);
871*4882a593Smuzhiyun 	}
872*4882a593Smuzhiyun 	return err;
873*4882a593Smuzhiyun }
874*4882a593Smuzhiyun 
875*4882a593Smuzhiyun static struct cryptd_queue queue;
876*4882a593Smuzhiyun 
cryptd_create(struct crypto_template * tmpl,struct rtattr ** tb)877*4882a593Smuzhiyun static int cryptd_create(struct crypto_template *tmpl, struct rtattr **tb)
878*4882a593Smuzhiyun {
879*4882a593Smuzhiyun 	struct crypto_attr_type *algt;
880*4882a593Smuzhiyun 
881*4882a593Smuzhiyun 	algt = crypto_get_attr_type(tb);
882*4882a593Smuzhiyun 	if (IS_ERR(algt))
883*4882a593Smuzhiyun 		return PTR_ERR(algt);
884*4882a593Smuzhiyun 
885*4882a593Smuzhiyun 	switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
886*4882a593Smuzhiyun 	case CRYPTO_ALG_TYPE_SKCIPHER:
887*4882a593Smuzhiyun 		return cryptd_create_skcipher(tmpl, tb, algt, &queue);
888*4882a593Smuzhiyun 	case CRYPTO_ALG_TYPE_HASH:
889*4882a593Smuzhiyun 		return cryptd_create_hash(tmpl, tb, algt, &queue);
890*4882a593Smuzhiyun 	case CRYPTO_ALG_TYPE_AEAD:
891*4882a593Smuzhiyun 		return cryptd_create_aead(tmpl, tb, algt, &queue);
892*4882a593Smuzhiyun 	}
893*4882a593Smuzhiyun 
894*4882a593Smuzhiyun 	return -EINVAL;
895*4882a593Smuzhiyun }
896*4882a593Smuzhiyun 
897*4882a593Smuzhiyun static struct crypto_template cryptd_tmpl = {
898*4882a593Smuzhiyun 	.name = "cryptd",
899*4882a593Smuzhiyun 	.create = cryptd_create,
900*4882a593Smuzhiyun 	.module = THIS_MODULE,
901*4882a593Smuzhiyun };
902*4882a593Smuzhiyun 
cryptd_alloc_skcipher(const char * alg_name,u32 type,u32 mask)903*4882a593Smuzhiyun struct cryptd_skcipher *cryptd_alloc_skcipher(const char *alg_name,
904*4882a593Smuzhiyun 					      u32 type, u32 mask)
905*4882a593Smuzhiyun {
906*4882a593Smuzhiyun 	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
907*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx;
908*4882a593Smuzhiyun 	struct crypto_skcipher *tfm;
909*4882a593Smuzhiyun 
910*4882a593Smuzhiyun 	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
911*4882a593Smuzhiyun 		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
912*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
913*4882a593Smuzhiyun 
914*4882a593Smuzhiyun 	tfm = crypto_alloc_skcipher(cryptd_alg_name, type, mask);
915*4882a593Smuzhiyun 	if (IS_ERR(tfm))
916*4882a593Smuzhiyun 		return ERR_CAST(tfm);
917*4882a593Smuzhiyun 
918*4882a593Smuzhiyun 	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
919*4882a593Smuzhiyun 		crypto_free_skcipher(tfm);
920*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
921*4882a593Smuzhiyun 	}
922*4882a593Smuzhiyun 
923*4882a593Smuzhiyun 	ctx = crypto_skcipher_ctx(tfm);
924*4882a593Smuzhiyun 	refcount_set(&ctx->refcnt, 1);
925*4882a593Smuzhiyun 
926*4882a593Smuzhiyun 	return container_of(tfm, struct cryptd_skcipher, base);
927*4882a593Smuzhiyun }
928*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_alloc_skcipher);
929*4882a593Smuzhiyun 
cryptd_skcipher_child(struct cryptd_skcipher * tfm)930*4882a593Smuzhiyun struct crypto_skcipher *cryptd_skcipher_child(struct cryptd_skcipher *tfm)
931*4882a593Smuzhiyun {
932*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
933*4882a593Smuzhiyun 
934*4882a593Smuzhiyun 	return &ctx->child->base;
935*4882a593Smuzhiyun }
936*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_skcipher_child);
937*4882a593Smuzhiyun 
cryptd_skcipher_queued(struct cryptd_skcipher * tfm)938*4882a593Smuzhiyun bool cryptd_skcipher_queued(struct cryptd_skcipher *tfm)
939*4882a593Smuzhiyun {
940*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
941*4882a593Smuzhiyun 
942*4882a593Smuzhiyun 	return refcount_read(&ctx->refcnt) - 1;
943*4882a593Smuzhiyun }
944*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_skcipher_queued);
945*4882a593Smuzhiyun 
cryptd_free_skcipher(struct cryptd_skcipher * tfm)946*4882a593Smuzhiyun void cryptd_free_skcipher(struct cryptd_skcipher *tfm)
947*4882a593Smuzhiyun {
948*4882a593Smuzhiyun 	struct cryptd_skcipher_ctx *ctx = crypto_skcipher_ctx(&tfm->base);
949*4882a593Smuzhiyun 
950*4882a593Smuzhiyun 	if (refcount_dec_and_test(&ctx->refcnt))
951*4882a593Smuzhiyun 		crypto_free_skcipher(&tfm->base);
952*4882a593Smuzhiyun }
953*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_free_skcipher);
954*4882a593Smuzhiyun 
cryptd_alloc_ahash(const char * alg_name,u32 type,u32 mask)955*4882a593Smuzhiyun struct cryptd_ahash *cryptd_alloc_ahash(const char *alg_name,
956*4882a593Smuzhiyun 					u32 type, u32 mask)
957*4882a593Smuzhiyun {
958*4882a593Smuzhiyun 	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
959*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx;
960*4882a593Smuzhiyun 	struct crypto_ahash *tfm;
961*4882a593Smuzhiyun 
962*4882a593Smuzhiyun 	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
963*4882a593Smuzhiyun 		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
964*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
965*4882a593Smuzhiyun 	tfm = crypto_alloc_ahash(cryptd_alg_name, type, mask);
966*4882a593Smuzhiyun 	if (IS_ERR(tfm))
967*4882a593Smuzhiyun 		return ERR_CAST(tfm);
968*4882a593Smuzhiyun 	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
969*4882a593Smuzhiyun 		crypto_free_ahash(tfm);
970*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
971*4882a593Smuzhiyun 	}
972*4882a593Smuzhiyun 
973*4882a593Smuzhiyun 	ctx = crypto_ahash_ctx(tfm);
974*4882a593Smuzhiyun 	refcount_set(&ctx->refcnt, 1);
975*4882a593Smuzhiyun 
976*4882a593Smuzhiyun 	return __cryptd_ahash_cast(tfm);
977*4882a593Smuzhiyun }
978*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_alloc_ahash);
979*4882a593Smuzhiyun 
cryptd_ahash_child(struct cryptd_ahash * tfm)980*4882a593Smuzhiyun struct crypto_shash *cryptd_ahash_child(struct cryptd_ahash *tfm)
981*4882a593Smuzhiyun {
982*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
983*4882a593Smuzhiyun 
984*4882a593Smuzhiyun 	return ctx->child;
985*4882a593Smuzhiyun }
986*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_ahash_child);
987*4882a593Smuzhiyun 
cryptd_shash_desc(struct ahash_request * req)988*4882a593Smuzhiyun struct shash_desc *cryptd_shash_desc(struct ahash_request *req)
989*4882a593Smuzhiyun {
990*4882a593Smuzhiyun 	struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req);
991*4882a593Smuzhiyun 	return &rctx->desc;
992*4882a593Smuzhiyun }
993*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_shash_desc);
994*4882a593Smuzhiyun 
cryptd_ahash_queued(struct cryptd_ahash * tfm)995*4882a593Smuzhiyun bool cryptd_ahash_queued(struct cryptd_ahash *tfm)
996*4882a593Smuzhiyun {
997*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
998*4882a593Smuzhiyun 
999*4882a593Smuzhiyun 	return refcount_read(&ctx->refcnt) - 1;
1000*4882a593Smuzhiyun }
1001*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_ahash_queued);
1002*4882a593Smuzhiyun 
cryptd_free_ahash(struct cryptd_ahash * tfm)1003*4882a593Smuzhiyun void cryptd_free_ahash(struct cryptd_ahash *tfm)
1004*4882a593Smuzhiyun {
1005*4882a593Smuzhiyun 	struct cryptd_hash_ctx *ctx = crypto_ahash_ctx(&tfm->base);
1006*4882a593Smuzhiyun 
1007*4882a593Smuzhiyun 	if (refcount_dec_and_test(&ctx->refcnt))
1008*4882a593Smuzhiyun 		crypto_free_ahash(&tfm->base);
1009*4882a593Smuzhiyun }
1010*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_free_ahash);
1011*4882a593Smuzhiyun 
cryptd_alloc_aead(const char * alg_name,u32 type,u32 mask)1012*4882a593Smuzhiyun struct cryptd_aead *cryptd_alloc_aead(const char *alg_name,
1013*4882a593Smuzhiyun 						  u32 type, u32 mask)
1014*4882a593Smuzhiyun {
1015*4882a593Smuzhiyun 	char cryptd_alg_name[CRYPTO_MAX_ALG_NAME];
1016*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx;
1017*4882a593Smuzhiyun 	struct crypto_aead *tfm;
1018*4882a593Smuzhiyun 
1019*4882a593Smuzhiyun 	if (snprintf(cryptd_alg_name, CRYPTO_MAX_ALG_NAME,
1020*4882a593Smuzhiyun 		     "cryptd(%s)", alg_name) >= CRYPTO_MAX_ALG_NAME)
1021*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
1022*4882a593Smuzhiyun 	tfm = crypto_alloc_aead(cryptd_alg_name, type, mask);
1023*4882a593Smuzhiyun 	if (IS_ERR(tfm))
1024*4882a593Smuzhiyun 		return ERR_CAST(tfm);
1025*4882a593Smuzhiyun 	if (tfm->base.__crt_alg->cra_module != THIS_MODULE) {
1026*4882a593Smuzhiyun 		crypto_free_aead(tfm);
1027*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
1028*4882a593Smuzhiyun 	}
1029*4882a593Smuzhiyun 
1030*4882a593Smuzhiyun 	ctx = crypto_aead_ctx(tfm);
1031*4882a593Smuzhiyun 	refcount_set(&ctx->refcnt, 1);
1032*4882a593Smuzhiyun 
1033*4882a593Smuzhiyun 	return __cryptd_aead_cast(tfm);
1034*4882a593Smuzhiyun }
1035*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_alloc_aead);
1036*4882a593Smuzhiyun 
cryptd_aead_child(struct cryptd_aead * tfm)1037*4882a593Smuzhiyun struct crypto_aead *cryptd_aead_child(struct cryptd_aead *tfm)
1038*4882a593Smuzhiyun {
1039*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx;
1040*4882a593Smuzhiyun 	ctx = crypto_aead_ctx(&tfm->base);
1041*4882a593Smuzhiyun 	return ctx->child;
1042*4882a593Smuzhiyun }
1043*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_aead_child);
1044*4882a593Smuzhiyun 
cryptd_aead_queued(struct cryptd_aead * tfm)1045*4882a593Smuzhiyun bool cryptd_aead_queued(struct cryptd_aead *tfm)
1046*4882a593Smuzhiyun {
1047*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1048*4882a593Smuzhiyun 
1049*4882a593Smuzhiyun 	return refcount_read(&ctx->refcnt) - 1;
1050*4882a593Smuzhiyun }
1051*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_aead_queued);
1052*4882a593Smuzhiyun 
cryptd_free_aead(struct cryptd_aead * tfm)1053*4882a593Smuzhiyun void cryptd_free_aead(struct cryptd_aead *tfm)
1054*4882a593Smuzhiyun {
1055*4882a593Smuzhiyun 	struct cryptd_aead_ctx *ctx = crypto_aead_ctx(&tfm->base);
1056*4882a593Smuzhiyun 
1057*4882a593Smuzhiyun 	if (refcount_dec_and_test(&ctx->refcnt))
1058*4882a593Smuzhiyun 		crypto_free_aead(&tfm->base);
1059*4882a593Smuzhiyun }
1060*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(cryptd_free_aead);
1061*4882a593Smuzhiyun 
cryptd_init(void)1062*4882a593Smuzhiyun static int __init cryptd_init(void)
1063*4882a593Smuzhiyun {
1064*4882a593Smuzhiyun 	int err;
1065*4882a593Smuzhiyun 
1066*4882a593Smuzhiyun 	cryptd_wq = alloc_workqueue("cryptd", WQ_MEM_RECLAIM | WQ_CPU_INTENSIVE,
1067*4882a593Smuzhiyun 				    1);
1068*4882a593Smuzhiyun 	if (!cryptd_wq)
1069*4882a593Smuzhiyun 		return -ENOMEM;
1070*4882a593Smuzhiyun 
1071*4882a593Smuzhiyun 	err = cryptd_init_queue(&queue, cryptd_max_cpu_qlen);
1072*4882a593Smuzhiyun 	if (err)
1073*4882a593Smuzhiyun 		goto err_destroy_wq;
1074*4882a593Smuzhiyun 
1075*4882a593Smuzhiyun 	err = crypto_register_template(&cryptd_tmpl);
1076*4882a593Smuzhiyun 	if (err)
1077*4882a593Smuzhiyun 		goto err_fini_queue;
1078*4882a593Smuzhiyun 
1079*4882a593Smuzhiyun 	return 0;
1080*4882a593Smuzhiyun 
1081*4882a593Smuzhiyun err_fini_queue:
1082*4882a593Smuzhiyun 	cryptd_fini_queue(&queue);
1083*4882a593Smuzhiyun err_destroy_wq:
1084*4882a593Smuzhiyun 	destroy_workqueue(cryptd_wq);
1085*4882a593Smuzhiyun 	return err;
1086*4882a593Smuzhiyun }
1087*4882a593Smuzhiyun 
cryptd_exit(void)1088*4882a593Smuzhiyun static void __exit cryptd_exit(void)
1089*4882a593Smuzhiyun {
1090*4882a593Smuzhiyun 	destroy_workqueue(cryptd_wq);
1091*4882a593Smuzhiyun 	cryptd_fini_queue(&queue);
1092*4882a593Smuzhiyun 	crypto_unregister_template(&cryptd_tmpl);
1093*4882a593Smuzhiyun }
1094*4882a593Smuzhiyun 
1095*4882a593Smuzhiyun subsys_initcall(cryptd_init);
1096*4882a593Smuzhiyun module_exit(cryptd_exit);
1097*4882a593Smuzhiyun 
1098*4882a593Smuzhiyun MODULE_LICENSE("GPL");
1099*4882a593Smuzhiyun MODULE_DESCRIPTION("Software async crypto daemon");
1100*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("cryptd");
1101