xref: /OK3568_Linux_fs/kernel/drivers/crypto/rockchip/cryptodev_linux/cryptlib.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * Driver for /dev/crypto device (aka CryptoDev)
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * Copyright (c) 2010,2011 Nikos Mavrogiannopoulos <nmav@gnutls.org>
5*4882a593Smuzhiyun  * Portions Copyright (c) 2010 Michael Weiser
6*4882a593Smuzhiyun  * Portions Copyright (c) 2010 Phil Sutter
7*4882a593Smuzhiyun  *
8*4882a593Smuzhiyun  * This file is part of linux cryptodev.
9*4882a593Smuzhiyun  *
10*4882a593Smuzhiyun  * This program is free software; you can redistribute it and/or
11*4882a593Smuzhiyun  * modify it under the terms of the GNU General Public License
12*4882a593Smuzhiyun  * as published by the Free Software Foundation; either version 2
13*4882a593Smuzhiyun  * of the License, or (at your option) any later version.
14*4882a593Smuzhiyun  *
15*4882a593Smuzhiyun  * This program is distributed in the hope that it will be useful,
16*4882a593Smuzhiyun  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17*4882a593Smuzhiyun  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18*4882a593Smuzhiyun  * GNU General Public License for more details.
19*4882a593Smuzhiyun  *
20*4882a593Smuzhiyun  * You should have received a copy of the GNU General Public License
21*4882a593Smuzhiyun  * along with this program; if not, write to the Free Software
22*4882a593Smuzhiyun  * Foundation, Inc.,
23*4882a593Smuzhiyun  * 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.
24*4882a593Smuzhiyun  */
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun #include <linux/mm.h>
27*4882a593Smuzhiyun #include <linux/highmem.h>
28*4882a593Smuzhiyun #include <linux/ioctl.h>
29*4882a593Smuzhiyun #include <linux/random.h>
30*4882a593Smuzhiyun #include <linux/scatterlist.h>
31*4882a593Smuzhiyun #include <linux/uaccess.h>
32*4882a593Smuzhiyun #include <crypto/algapi.h>
33*4882a593Smuzhiyun #include <crypto/hash.h>
34*4882a593Smuzhiyun #include <crypto/aead.h>
35*4882a593Smuzhiyun #include <linux/rtnetlink.h>
36*4882a593Smuzhiyun #include <crypto/authenc.h>
37*4882a593Smuzhiyun #include "cryptodev.h"
38*4882a593Smuzhiyun #include "cipherapi.h"
39*4882a593Smuzhiyun 
40*4882a593Smuzhiyun #if (LINUX_VERSION_CODE < KERNEL_VERSION(5, 0, 0))
41*4882a593Smuzhiyun extern const struct crypto_type crypto_givcipher_type;
42*4882a593Smuzhiyun #endif
43*4882a593Smuzhiyun 
cryptodev_complete(struct crypto_async_request * req,int err)44*4882a593Smuzhiyun static void cryptodev_complete(struct crypto_async_request *req, int err)
45*4882a593Smuzhiyun {
46*4882a593Smuzhiyun 	struct cryptodev_result *res = req->data;
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun 	if (err == -EINPROGRESS)
49*4882a593Smuzhiyun 		return;
50*4882a593Smuzhiyun 
51*4882a593Smuzhiyun 	res->err = err;
52*4882a593Smuzhiyun 	complete(&res->completion);
53*4882a593Smuzhiyun }
54*4882a593Smuzhiyun 
cryptodev_get_cipher_keylen(unsigned int * keylen,struct session_op * sop,int aead)55*4882a593Smuzhiyun int cryptodev_get_cipher_keylen(unsigned int *keylen, struct session_op *sop,
56*4882a593Smuzhiyun 		int aead)
57*4882a593Smuzhiyun {
58*4882a593Smuzhiyun 	/*
59*4882a593Smuzhiyun 	 * For blockciphers (AES-CBC) or non-composite aead ciphers (like AES-GCM),
60*4882a593Smuzhiyun 	 * the key length is simply the cipher keylen obtained from userspace. If
61*4882a593Smuzhiyun 	 * the cipher is composite aead, the keylen is the sum of cipher keylen,
62*4882a593Smuzhiyun 	 * hmac keylen and a key header length. This key format is the one used in
63*4882a593Smuzhiyun 	 * Linux kernel for composite aead ciphers (crypto/authenc.c)
64*4882a593Smuzhiyun 	 */
65*4882a593Smuzhiyun 	unsigned int klen = sop->keylen;
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun 	if (unlikely(sop->keylen > CRYPTO_CIPHER_MAX_KEY_LEN))
68*4882a593Smuzhiyun 		return -EINVAL;
69*4882a593Smuzhiyun 
70*4882a593Smuzhiyun 	if (aead && sop->mackeylen) {
71*4882a593Smuzhiyun 		if (unlikely(sop->mackeylen > CRYPTO_HMAC_MAX_KEY_LEN))
72*4882a593Smuzhiyun 			return -EINVAL;
73*4882a593Smuzhiyun 		klen += sop->mackeylen;
74*4882a593Smuzhiyun 		klen += RTA_SPACE(sizeof(struct crypto_authenc_key_param));
75*4882a593Smuzhiyun 	}
76*4882a593Smuzhiyun 
77*4882a593Smuzhiyun 	*keylen = klen;
78*4882a593Smuzhiyun 	return 0;
79*4882a593Smuzhiyun }
80*4882a593Smuzhiyun 
cryptodev_get_cipher_key(uint8_t * key,struct session_op * sop,int aead)81*4882a593Smuzhiyun int cryptodev_get_cipher_key(uint8_t *key, struct session_op *sop, int aead)
82*4882a593Smuzhiyun {
83*4882a593Smuzhiyun 	/*
84*4882a593Smuzhiyun 	 * Get cipher key from user-space. For blockciphers just copy it from
85*4882a593Smuzhiyun 	 * user-space. For composite aead ciphers combine it with the hmac key in
86*4882a593Smuzhiyun 	 * the format used by Linux kernel in crypto/authenc.c:
87*4882a593Smuzhiyun 	 *
88*4882a593Smuzhiyun 	 * [[AUTHENC_KEY_HEADER + CIPHER_KEYLEN] [AUTHENTICATION KEY] [CIPHER KEY]]
89*4882a593Smuzhiyun 	 */
90*4882a593Smuzhiyun 	struct crypto_authenc_key_param *param;
91*4882a593Smuzhiyun 	struct rtattr *rta;
92*4882a593Smuzhiyun 	int ret = 0;
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun 	if (aead && sop->mackeylen) {
95*4882a593Smuzhiyun 		/*
96*4882a593Smuzhiyun 		 * Composite aead ciphers. The first four bytes are the header type and
97*4882a593Smuzhiyun 		 * header length for aead keys
98*4882a593Smuzhiyun 		 */
99*4882a593Smuzhiyun 		rta = (void *)key;
100*4882a593Smuzhiyun 		rta->rta_type = CRYPTO_AUTHENC_KEYA_PARAM;
101*4882a593Smuzhiyun 		rta->rta_len = RTA_LENGTH(sizeof(*param));
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun 		/*
104*4882a593Smuzhiyun 		 * The next four bytes hold the length of the encryption key
105*4882a593Smuzhiyun 		 */
106*4882a593Smuzhiyun 		param = RTA_DATA(rta);
107*4882a593Smuzhiyun 		param->enckeylen = cpu_to_be32(sop->keylen);
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun 		/* Advance key pointer eight bytes and copy the hmac key */
110*4882a593Smuzhiyun 		key += RTA_SPACE(sizeof(*param));
111*4882a593Smuzhiyun 		if (unlikely(copy_from_user(key, sop->mackey, sop->mackeylen))) {
112*4882a593Smuzhiyun 			ret = -EFAULT;
113*4882a593Smuzhiyun 			goto error;
114*4882a593Smuzhiyun 		}
115*4882a593Smuzhiyun 		/* Advance key pointer past the hmac key */
116*4882a593Smuzhiyun 		key += sop->mackeylen;
117*4882a593Smuzhiyun 	}
118*4882a593Smuzhiyun 	/* now copy the blockcipher key */
119*4882a593Smuzhiyun 	if (unlikely(copy_from_user(key, sop->key, sop->keylen)))
120*4882a593Smuzhiyun 		ret = -EFAULT;
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun error:
123*4882a593Smuzhiyun 	return ret;
124*4882a593Smuzhiyun }
125*4882a593Smuzhiyun 
126*4882a593Smuzhiyun /* Was correct key length supplied? */
check_key_size(size_t keylen,const char * alg_name,unsigned int min_keysize,unsigned int max_keysize)127*4882a593Smuzhiyun static int check_key_size(size_t keylen, const char *alg_name,
128*4882a593Smuzhiyun 			  unsigned int min_keysize, unsigned int max_keysize)
129*4882a593Smuzhiyun {
130*4882a593Smuzhiyun 	if (max_keysize > 0 && unlikely((keylen < min_keysize) ||
131*4882a593Smuzhiyun 					(keylen > max_keysize))) {
132*4882a593Smuzhiyun 		ddebug(1, "Wrong keylen '%zu' for algorithm '%s'. Use %u to %u.",
133*4882a593Smuzhiyun 		       keylen, alg_name, min_keysize, max_keysize);
134*4882a593Smuzhiyun 		return -EINVAL;
135*4882a593Smuzhiyun 	}
136*4882a593Smuzhiyun 
137*4882a593Smuzhiyun 	return 0;
138*4882a593Smuzhiyun }
139*4882a593Smuzhiyun 
cryptodev_cipher_init(struct cipher_data * out,const char * alg_name,uint8_t * keyp,size_t keylen,int stream,int aead)140*4882a593Smuzhiyun int cryptodev_cipher_init(struct cipher_data *out, const char *alg_name,
141*4882a593Smuzhiyun 				uint8_t *keyp, size_t keylen, int stream, int aead)
142*4882a593Smuzhiyun {
143*4882a593Smuzhiyun 	int ret;
144*4882a593Smuzhiyun 
145*4882a593Smuzhiyun 	if (aead == 0) {
146*4882a593Smuzhiyun 		unsigned int min_keysize, max_keysize;
147*4882a593Smuzhiyun #if (LINUX_VERSION_CODE >= KERNEL_VERSION(4, 8, 0))
148*4882a593Smuzhiyun 		struct crypto_tfm *tfm;
149*4882a593Smuzhiyun #else
150*4882a593Smuzhiyun 		struct ablkcipher_alg *alg;
151*4882a593Smuzhiyun #endif
152*4882a593Smuzhiyun 
153*4882a593Smuzhiyun 		out->async.s = cryptodev_crypto_alloc_blkcipher(alg_name, 0, 0);
154*4882a593Smuzhiyun 		if (unlikely(IS_ERR(out->async.s))) {
155*4882a593Smuzhiyun 			ddebug(1, "Failed to load cipher %s", alg_name);
156*4882a593Smuzhiyun 			return PTR_ERR(out->async.s);
157*4882a593Smuzhiyun 		}
158*4882a593Smuzhiyun 
159*4882a593Smuzhiyun #if (LINUX_VERSION_CODE >= KERNEL_VERSION(4, 8, 0))
160*4882a593Smuzhiyun 		tfm = crypto_skcipher_tfm(out->async.s);
161*4882a593Smuzhiyun #if (LINUX_VERSION_CODE <= KERNEL_VERSION(5, 4, 0))
162*4882a593Smuzhiyun 		if ((tfm->__crt_alg->cra_type == &crypto_ablkcipher_type)
163*4882a593Smuzhiyun #if (LINUX_VERSION_CODE < KERNEL_VERSION(5, 0, 0))
164*4882a593Smuzhiyun 		    || (tfm->__crt_alg->cra_type == &crypto_givcipher_type)
165*4882a593Smuzhiyun #endif
166*4882a593Smuzhiyun 							) {
167*4882a593Smuzhiyun 			struct ablkcipher_alg *alg;
168*4882a593Smuzhiyun 
169*4882a593Smuzhiyun 			alg = &tfm->__crt_alg->cra_ablkcipher;
170*4882a593Smuzhiyun 			min_keysize = alg->min_keysize;
171*4882a593Smuzhiyun 			max_keysize = alg->max_keysize;
172*4882a593Smuzhiyun 		} else
173*4882a593Smuzhiyun #endif
174*4882a593Smuzhiyun 		{
175*4882a593Smuzhiyun 			struct skcipher_alg *alg;
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun 			alg = crypto_skcipher_alg(out->async.s);
178*4882a593Smuzhiyun 			min_keysize = alg->min_keysize;
179*4882a593Smuzhiyun 			max_keysize = alg->max_keysize;
180*4882a593Smuzhiyun 		}
181*4882a593Smuzhiyun #else
182*4882a593Smuzhiyun 		alg = crypto_ablkcipher_alg(out->async.s);
183*4882a593Smuzhiyun 		min_keysize = alg->min_keysize;
184*4882a593Smuzhiyun 		max_keysize = alg->max_keysize;
185*4882a593Smuzhiyun #endif
186*4882a593Smuzhiyun 		ret = check_key_size(keylen, alg_name, min_keysize,
187*4882a593Smuzhiyun 				     max_keysize);
188*4882a593Smuzhiyun 		if (ret)
189*4882a593Smuzhiyun 			goto error;
190*4882a593Smuzhiyun 
191*4882a593Smuzhiyun 		out->blocksize = cryptodev_crypto_blkcipher_blocksize(out->async.s);
192*4882a593Smuzhiyun 		out->ivsize = cryptodev_crypto_blkcipher_ivsize(out->async.s);
193*4882a593Smuzhiyun 		out->alignmask = cryptodev_crypto_blkcipher_alignmask(out->async.s);
194*4882a593Smuzhiyun 
195*4882a593Smuzhiyun 		ret = cryptodev_crypto_blkcipher_setkey(out->async.s, keyp, keylen);
196*4882a593Smuzhiyun 	} else {
197*4882a593Smuzhiyun 		out->async.as = crypto_alloc_aead(alg_name, 0, 0);
198*4882a593Smuzhiyun 		if (unlikely(IS_ERR(out->async.as))) {
199*4882a593Smuzhiyun 			ddebug(1, "Failed to load cipher %s", alg_name);
200*4882a593Smuzhiyun 			return PTR_ERR(out->async.as);
201*4882a593Smuzhiyun 		}
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 		out->blocksize = crypto_aead_blocksize(out->async.as);
204*4882a593Smuzhiyun 		out->ivsize = crypto_aead_ivsize(out->async.as);
205*4882a593Smuzhiyun 		out->alignmask = crypto_aead_alignmask(out->async.as);
206*4882a593Smuzhiyun 
207*4882a593Smuzhiyun 		ret = crypto_aead_setkey(out->async.as, keyp, keylen);
208*4882a593Smuzhiyun 	}
209*4882a593Smuzhiyun 
210*4882a593Smuzhiyun 	if (unlikely(ret)) {
211*4882a593Smuzhiyun 		ddebug(1, "Setting key failed for %s-%zu.", alg_name, keylen*8);
212*4882a593Smuzhiyun 		ret = -EINVAL;
213*4882a593Smuzhiyun 		goto error;
214*4882a593Smuzhiyun 	}
215*4882a593Smuzhiyun 
216*4882a593Smuzhiyun 	out->stream = stream;
217*4882a593Smuzhiyun 	out->aead = aead;
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun 	init_completion(&out->async.result.completion);
220*4882a593Smuzhiyun 
221*4882a593Smuzhiyun 	if (aead == 0) {
222*4882a593Smuzhiyun 		out->async.request = cryptodev_blkcipher_request_alloc(out->async.s, GFP_KERNEL);
223*4882a593Smuzhiyun 		if (unlikely(!out->async.request)) {
224*4882a593Smuzhiyun 			derr(1, "error allocating async crypto request");
225*4882a593Smuzhiyun 			ret = -ENOMEM;
226*4882a593Smuzhiyun 			goto error;
227*4882a593Smuzhiyun 		}
228*4882a593Smuzhiyun 
229*4882a593Smuzhiyun 		cryptodev_blkcipher_request_set_callback(out->async.request,
230*4882a593Smuzhiyun 					CRYPTO_TFM_REQ_MAY_BACKLOG,
231*4882a593Smuzhiyun 					cryptodev_complete, &out->async.result);
232*4882a593Smuzhiyun 	} else {
233*4882a593Smuzhiyun 		out->async.arequest = aead_request_alloc(out->async.as, GFP_KERNEL);
234*4882a593Smuzhiyun 		if (unlikely(!out->async.arequest)) {
235*4882a593Smuzhiyun 			derr(1, "error allocating async crypto request");
236*4882a593Smuzhiyun 			ret = -ENOMEM;
237*4882a593Smuzhiyun 			goto error;
238*4882a593Smuzhiyun 		}
239*4882a593Smuzhiyun 
240*4882a593Smuzhiyun 		aead_request_set_callback(out->async.arequest,
241*4882a593Smuzhiyun 					CRYPTO_TFM_REQ_MAY_BACKLOG,
242*4882a593Smuzhiyun 					cryptodev_complete, &out->async.result);
243*4882a593Smuzhiyun 	}
244*4882a593Smuzhiyun 
245*4882a593Smuzhiyun 	out->init = 1;
246*4882a593Smuzhiyun 	return 0;
247*4882a593Smuzhiyun error:
248*4882a593Smuzhiyun 	if (aead == 0) {
249*4882a593Smuzhiyun 		cryptodev_blkcipher_request_free(out->async.request);
250*4882a593Smuzhiyun 		cryptodev_crypto_free_blkcipher(out->async.s);
251*4882a593Smuzhiyun 	} else {
252*4882a593Smuzhiyun 		if (out->async.arequest)
253*4882a593Smuzhiyun 			aead_request_free(out->async.arequest);
254*4882a593Smuzhiyun 		if (out->async.as)
255*4882a593Smuzhiyun 			crypto_free_aead(out->async.as);
256*4882a593Smuzhiyun 	}
257*4882a593Smuzhiyun 
258*4882a593Smuzhiyun 	return ret;
259*4882a593Smuzhiyun }
260*4882a593Smuzhiyun 
cryptodev_cipher_deinit(struct cipher_data * cdata)261*4882a593Smuzhiyun void cryptodev_cipher_deinit(struct cipher_data *cdata)
262*4882a593Smuzhiyun {
263*4882a593Smuzhiyun 	if (cdata->init) {
264*4882a593Smuzhiyun 		if (cdata->aead == 0) {
265*4882a593Smuzhiyun 			cryptodev_blkcipher_request_free(cdata->async.request);
266*4882a593Smuzhiyun 			cryptodev_crypto_free_blkcipher(cdata->async.s);
267*4882a593Smuzhiyun 		} else {
268*4882a593Smuzhiyun 			if (cdata->async.arequest)
269*4882a593Smuzhiyun 				aead_request_free(cdata->async.arequest);
270*4882a593Smuzhiyun 			if (cdata->async.as)
271*4882a593Smuzhiyun 				crypto_free_aead(cdata->async.as);
272*4882a593Smuzhiyun 		}
273*4882a593Smuzhiyun 
274*4882a593Smuzhiyun 		cdata->init = 0;
275*4882a593Smuzhiyun 	}
276*4882a593Smuzhiyun }
277*4882a593Smuzhiyun 
waitfor(struct cryptodev_result * cr,ssize_t ret)278*4882a593Smuzhiyun static inline int waitfor(struct cryptodev_result *cr, ssize_t ret)
279*4882a593Smuzhiyun {
280*4882a593Smuzhiyun 	switch (ret) {
281*4882a593Smuzhiyun 	case 0:
282*4882a593Smuzhiyun 		break;
283*4882a593Smuzhiyun 	case -EINPROGRESS:
284*4882a593Smuzhiyun 	case -EBUSY:
285*4882a593Smuzhiyun 		wait_for_completion(&cr->completion);
286*4882a593Smuzhiyun 		/* At this point we known for sure the request has finished,
287*4882a593Smuzhiyun 		 * because wait_for_completion above was not interruptible.
288*4882a593Smuzhiyun 		 * This is important because otherwise hardware or driver
289*4882a593Smuzhiyun 		 * might try to access memory which will be freed or reused for
290*4882a593Smuzhiyun 		 * another request. */
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun 		if (unlikely(cr->err)) {
293*4882a593Smuzhiyun 			derr(0, "error from async request: %d", cr->err);
294*4882a593Smuzhiyun 			return cr->err;
295*4882a593Smuzhiyun 		}
296*4882a593Smuzhiyun 
297*4882a593Smuzhiyun 		break;
298*4882a593Smuzhiyun 	default:
299*4882a593Smuzhiyun 		return ret;
300*4882a593Smuzhiyun 	}
301*4882a593Smuzhiyun 
302*4882a593Smuzhiyun 	return 0;
303*4882a593Smuzhiyun }
304*4882a593Smuzhiyun 
cryptodev_cipher_encrypt(struct cipher_data * cdata,const struct scatterlist * src,struct scatterlist * dst,size_t len)305*4882a593Smuzhiyun ssize_t cryptodev_cipher_encrypt(struct cipher_data *cdata,
306*4882a593Smuzhiyun 		const struct scatterlist *src, struct scatterlist *dst,
307*4882a593Smuzhiyun 		size_t len)
308*4882a593Smuzhiyun {
309*4882a593Smuzhiyun 	int ret;
310*4882a593Smuzhiyun 
311*4882a593Smuzhiyun 	reinit_completion(&cdata->async.result.completion);
312*4882a593Smuzhiyun 
313*4882a593Smuzhiyun 	if (cdata->aead == 0) {
314*4882a593Smuzhiyun 		cryptodev_blkcipher_request_set_crypt(cdata->async.request,
315*4882a593Smuzhiyun 			(struct scatterlist *)src, dst,
316*4882a593Smuzhiyun 			len, cdata->async.iv);
317*4882a593Smuzhiyun 		ret = cryptodev_crypto_blkcipher_encrypt(cdata->async.request);
318*4882a593Smuzhiyun 	} else {
319*4882a593Smuzhiyun 		aead_request_set_crypt(cdata->async.arequest,
320*4882a593Smuzhiyun 			(struct scatterlist *)src, dst,
321*4882a593Smuzhiyun 			len, cdata->async.iv);
322*4882a593Smuzhiyun 		ret = crypto_aead_encrypt(cdata->async.arequest);
323*4882a593Smuzhiyun 	}
324*4882a593Smuzhiyun 
325*4882a593Smuzhiyun 	return waitfor(&cdata->async.result, ret);
326*4882a593Smuzhiyun }
327*4882a593Smuzhiyun 
cryptodev_cipher_decrypt(struct cipher_data * cdata,const struct scatterlist * src,struct scatterlist * dst,size_t len)328*4882a593Smuzhiyun ssize_t cryptodev_cipher_decrypt(struct cipher_data *cdata,
329*4882a593Smuzhiyun 		const struct scatterlist *src, struct scatterlist *dst,
330*4882a593Smuzhiyun 		size_t len)
331*4882a593Smuzhiyun {
332*4882a593Smuzhiyun 	int ret;
333*4882a593Smuzhiyun 
334*4882a593Smuzhiyun 	reinit_completion(&cdata->async.result.completion);
335*4882a593Smuzhiyun 	if (cdata->aead == 0) {
336*4882a593Smuzhiyun 		cryptodev_blkcipher_request_set_crypt(cdata->async.request,
337*4882a593Smuzhiyun 			(struct scatterlist *)src, dst,
338*4882a593Smuzhiyun 			len, cdata->async.iv);
339*4882a593Smuzhiyun 		ret = cryptodev_crypto_blkcipher_decrypt(cdata->async.request);
340*4882a593Smuzhiyun 	} else {
341*4882a593Smuzhiyun 		aead_request_set_crypt(cdata->async.arequest,
342*4882a593Smuzhiyun 			(struct scatterlist *)src, dst,
343*4882a593Smuzhiyun 			len, cdata->async.iv);
344*4882a593Smuzhiyun 		ret = crypto_aead_decrypt(cdata->async.arequest);
345*4882a593Smuzhiyun 	}
346*4882a593Smuzhiyun 
347*4882a593Smuzhiyun 	return waitfor(&cdata->async.result, ret);
348*4882a593Smuzhiyun }
349*4882a593Smuzhiyun 
350*4882a593Smuzhiyun /* Hash functions */
351*4882a593Smuzhiyun 
cryptodev_hash_init(struct hash_data * hdata,const char * alg_name,int hmac_mode,void * mackey,size_t mackeylen)352*4882a593Smuzhiyun int cryptodev_hash_init(struct hash_data *hdata, const char *alg_name,
353*4882a593Smuzhiyun 			int hmac_mode, void *mackey, size_t mackeylen)
354*4882a593Smuzhiyun {
355*4882a593Smuzhiyun 	int ret;
356*4882a593Smuzhiyun 
357*4882a593Smuzhiyun 	hdata->async.s = crypto_alloc_ahash(alg_name, 0, 0);
358*4882a593Smuzhiyun 	if (unlikely(IS_ERR(hdata->async.s))) {
359*4882a593Smuzhiyun 		ddebug(1, "Failed to load transform for %s", alg_name);
360*4882a593Smuzhiyun 		return PTR_ERR(hdata->async.s);
361*4882a593Smuzhiyun 	}
362*4882a593Smuzhiyun 
363*4882a593Smuzhiyun 	/* Copy the key from user and set to TFM. */
364*4882a593Smuzhiyun 	if (hmac_mode != 0) {
365*4882a593Smuzhiyun 		ret = crypto_ahash_setkey(hdata->async.s, mackey, mackeylen);
366*4882a593Smuzhiyun 		if (unlikely(ret)) {
367*4882a593Smuzhiyun 			ddebug(1, "Setting hmac key failed for %s-%zu.",
368*4882a593Smuzhiyun 					alg_name, mackeylen*8);
369*4882a593Smuzhiyun 			ret = -EINVAL;
370*4882a593Smuzhiyun 			goto error;
371*4882a593Smuzhiyun 		}
372*4882a593Smuzhiyun 	}
373*4882a593Smuzhiyun 
374*4882a593Smuzhiyun 	hdata->digestsize = crypto_ahash_digestsize(hdata->async.s);
375*4882a593Smuzhiyun 	hdata->alignmask = crypto_ahash_alignmask(hdata->async.s);
376*4882a593Smuzhiyun 
377*4882a593Smuzhiyun 	init_completion(&hdata->async.result.completion);
378*4882a593Smuzhiyun 
379*4882a593Smuzhiyun 	hdata->async.request = ahash_request_alloc(hdata->async.s, GFP_KERNEL);
380*4882a593Smuzhiyun 	if (unlikely(!hdata->async.request)) {
381*4882a593Smuzhiyun 		derr(0, "error allocating async crypto request");
382*4882a593Smuzhiyun 		ret = -ENOMEM;
383*4882a593Smuzhiyun 		goto error;
384*4882a593Smuzhiyun 	}
385*4882a593Smuzhiyun 
386*4882a593Smuzhiyun 	ahash_request_set_callback(hdata->async.request,
387*4882a593Smuzhiyun 			CRYPTO_TFM_REQ_MAY_BACKLOG,
388*4882a593Smuzhiyun 			cryptodev_complete, &hdata->async.result);
389*4882a593Smuzhiyun 	hdata->init = 1;
390*4882a593Smuzhiyun 	return 0;
391*4882a593Smuzhiyun 
392*4882a593Smuzhiyun error:
393*4882a593Smuzhiyun 	crypto_free_ahash(hdata->async.s);
394*4882a593Smuzhiyun 	return ret;
395*4882a593Smuzhiyun }
396*4882a593Smuzhiyun 
cryptodev_hash_deinit(struct hash_data * hdata)397*4882a593Smuzhiyun void cryptodev_hash_deinit(struct hash_data *hdata)
398*4882a593Smuzhiyun {
399*4882a593Smuzhiyun 	if (hdata->init) {
400*4882a593Smuzhiyun 		ahash_request_free(hdata->async.request);
401*4882a593Smuzhiyun 		crypto_free_ahash(hdata->async.s);
402*4882a593Smuzhiyun 		hdata->init = 0;
403*4882a593Smuzhiyun 	}
404*4882a593Smuzhiyun }
405*4882a593Smuzhiyun 
cryptodev_hash_reset(struct hash_data * hdata)406*4882a593Smuzhiyun int cryptodev_hash_reset(struct hash_data *hdata)
407*4882a593Smuzhiyun {
408*4882a593Smuzhiyun 	int ret;
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 	ret = crypto_ahash_init(hdata->async.request);
411*4882a593Smuzhiyun 	if (unlikely(ret)) {
412*4882a593Smuzhiyun 		derr(0, "error in crypto_hash_init()");
413*4882a593Smuzhiyun 		return ret;
414*4882a593Smuzhiyun 	}
415*4882a593Smuzhiyun 
416*4882a593Smuzhiyun 	return 0;
417*4882a593Smuzhiyun 
418*4882a593Smuzhiyun }
419*4882a593Smuzhiyun 
cryptodev_hash_update(struct hash_data * hdata,struct scatterlist * sg,size_t len)420*4882a593Smuzhiyun ssize_t cryptodev_hash_update(struct hash_data *hdata,
421*4882a593Smuzhiyun 				struct scatterlist *sg, size_t len)
422*4882a593Smuzhiyun {
423*4882a593Smuzhiyun 	int ret;
424*4882a593Smuzhiyun 
425*4882a593Smuzhiyun 	reinit_completion(&hdata->async.result.completion);
426*4882a593Smuzhiyun 	ahash_request_set_crypt(hdata->async.request, sg, NULL, len);
427*4882a593Smuzhiyun 
428*4882a593Smuzhiyun 	ret = crypto_ahash_update(hdata->async.request);
429*4882a593Smuzhiyun 
430*4882a593Smuzhiyun 	return waitfor(&hdata->async.result, ret);
431*4882a593Smuzhiyun }
432*4882a593Smuzhiyun 
cryptodev_hash_final(struct hash_data * hdata,void * output)433*4882a593Smuzhiyun int cryptodev_hash_final(struct hash_data *hdata, void *output)
434*4882a593Smuzhiyun {
435*4882a593Smuzhiyun 	int ret;
436*4882a593Smuzhiyun 
437*4882a593Smuzhiyun 	reinit_completion(&hdata->async.result.completion);
438*4882a593Smuzhiyun 	ahash_request_set_crypt(hdata->async.request, NULL, output, 0);
439*4882a593Smuzhiyun 
440*4882a593Smuzhiyun 	ret = crypto_ahash_final(hdata->async.request);
441*4882a593Smuzhiyun 
442*4882a593Smuzhiyun 	return waitfor(&hdata->async.result, ret);
443*4882a593Smuzhiyun }
444*4882a593Smuzhiyun 
445*4882a593Smuzhiyun #ifdef CIOCCPHASH
446*4882a593Smuzhiyun /* import the current hash state of src to dst */
cryptodev_hash_copy(struct hash_data * dst,struct hash_data * src)447*4882a593Smuzhiyun int cryptodev_hash_copy(struct hash_data *dst, struct hash_data *src)
448*4882a593Smuzhiyun {
449*4882a593Smuzhiyun 	int ret, statesize;
450*4882a593Smuzhiyun 	void *statedata = NULL;
451*4882a593Smuzhiyun 	struct crypto_tfm *tfm;
452*4882a593Smuzhiyun 
453*4882a593Smuzhiyun 	if (unlikely(src == NULL || !src->init ||
454*4882a593Smuzhiyun 		     dst == NULL || !dst->init)) {
455*4882a593Smuzhiyun 		return -EINVAL;
456*4882a593Smuzhiyun 	}
457*4882a593Smuzhiyun 
458*4882a593Smuzhiyun 	reinit_completion(&src->async.result.completion);
459*4882a593Smuzhiyun 
460*4882a593Smuzhiyun 	statesize = crypto_ahash_statesize(src->async.s);
461*4882a593Smuzhiyun 	if (unlikely(statesize <= 0)) {
462*4882a593Smuzhiyun 		return -EINVAL;
463*4882a593Smuzhiyun 	}
464*4882a593Smuzhiyun 
465*4882a593Smuzhiyun 	statedata = kzalloc(statesize, GFP_KERNEL);
466*4882a593Smuzhiyun 	if (unlikely(statedata == NULL)) {
467*4882a593Smuzhiyun 		return -ENOMEM;
468*4882a593Smuzhiyun 	}
469*4882a593Smuzhiyun 
470*4882a593Smuzhiyun 	ret = crypto_ahash_export(src->async.request, statedata);
471*4882a593Smuzhiyun 	if (unlikely(ret < 0)) {
472*4882a593Smuzhiyun 		if (unlikely(ret == -ENOSYS)) {
473*4882a593Smuzhiyun 			tfm = crypto_ahash_tfm(src->async.s);
474*4882a593Smuzhiyun 			derr(0, "cryptodev_hash_copy: crypto_ahash_export not implemented for "
475*4882a593Smuzhiyun 				"alg='%s', driver='%s'", crypto_tfm_alg_name(tfm),
476*4882a593Smuzhiyun 				crypto_tfm_alg_driver_name(tfm));
477*4882a593Smuzhiyun 		}
478*4882a593Smuzhiyun 		goto out;
479*4882a593Smuzhiyun 	}
480*4882a593Smuzhiyun 
481*4882a593Smuzhiyun 	ret = crypto_ahash_import(dst->async.request, statedata);
482*4882a593Smuzhiyun 	if (unlikely(ret == -ENOSYS)) {
483*4882a593Smuzhiyun 		tfm = crypto_ahash_tfm(dst->async.s);
484*4882a593Smuzhiyun 		derr(0, "cryptodev_hash_copy: crypto_ahash_import not implemented for "
485*4882a593Smuzhiyun 			"alg='%s', driver='%s'", crypto_tfm_alg_name(tfm),
486*4882a593Smuzhiyun 			crypto_tfm_alg_driver_name(tfm));
487*4882a593Smuzhiyun 	}
488*4882a593Smuzhiyun out:
489*4882a593Smuzhiyun 	kfree(statedata);
490*4882a593Smuzhiyun 	return ret;
491*4882a593Smuzhiyun }
492*4882a593Smuzhiyun #endif /* CIOCCPHASH */
493