xref: /OK3568_Linux_fs/kernel/arch/x86/crypto/ghash-clmulni-intel_glue.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Accelerated GHASH implementation with Intel PCLMULQDQ-NI
4*4882a593Smuzhiyun  * instructions. This file contains glue code.
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  * Copyright (c) 2009 Intel Corp.
7*4882a593Smuzhiyun  *   Author: Huang Ying <ying.huang@intel.com>
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #include <linux/err.h>
11*4882a593Smuzhiyun #include <linux/module.h>
12*4882a593Smuzhiyun #include <linux/init.h>
13*4882a593Smuzhiyun #include <linux/kernel.h>
14*4882a593Smuzhiyun #include <linux/crypto.h>
15*4882a593Smuzhiyun #include <crypto/algapi.h>
16*4882a593Smuzhiyun #include <crypto/cryptd.h>
17*4882a593Smuzhiyun #include <crypto/gf128mul.h>
18*4882a593Smuzhiyun #include <crypto/internal/hash.h>
19*4882a593Smuzhiyun #include <crypto/internal/simd.h>
20*4882a593Smuzhiyun #include <asm/cpu_device_id.h>
21*4882a593Smuzhiyun #include <asm/simd.h>
22*4882a593Smuzhiyun 
23*4882a593Smuzhiyun #define GHASH_BLOCK_SIZE	16
24*4882a593Smuzhiyun #define GHASH_DIGEST_SIZE	16
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun void clmul_ghash_mul(char *dst, const u128 *shash);
27*4882a593Smuzhiyun 
28*4882a593Smuzhiyun void clmul_ghash_update(char *dst, const char *src, unsigned int srclen,
29*4882a593Smuzhiyun 			const u128 *shash);
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun struct ghash_async_ctx {
32*4882a593Smuzhiyun 	struct cryptd_ahash *cryptd_tfm;
33*4882a593Smuzhiyun };
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun struct ghash_ctx {
36*4882a593Smuzhiyun 	u128 shash;
37*4882a593Smuzhiyun };
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun struct ghash_desc_ctx {
40*4882a593Smuzhiyun 	u8 buffer[GHASH_BLOCK_SIZE];
41*4882a593Smuzhiyun 	u32 bytes;
42*4882a593Smuzhiyun };
43*4882a593Smuzhiyun 
ghash_init(struct shash_desc * desc)44*4882a593Smuzhiyun static int ghash_init(struct shash_desc *desc)
45*4882a593Smuzhiyun {
46*4882a593Smuzhiyun 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun 	memset(dctx, 0, sizeof(*dctx));
49*4882a593Smuzhiyun 
50*4882a593Smuzhiyun 	return 0;
51*4882a593Smuzhiyun }
52*4882a593Smuzhiyun 
ghash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)53*4882a593Smuzhiyun static int ghash_setkey(struct crypto_shash *tfm,
54*4882a593Smuzhiyun 			const u8 *key, unsigned int keylen)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun 	struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
57*4882a593Smuzhiyun 	be128 *x = (be128 *)key;
58*4882a593Smuzhiyun 	u64 a, b;
59*4882a593Smuzhiyun 
60*4882a593Smuzhiyun 	if (keylen != GHASH_BLOCK_SIZE)
61*4882a593Smuzhiyun 		return -EINVAL;
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun 	/* perform multiplication by 'x' in GF(2^128) */
64*4882a593Smuzhiyun 	a = be64_to_cpu(x->a);
65*4882a593Smuzhiyun 	b = be64_to_cpu(x->b);
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun 	ctx->shash.a = (b << 1) | (a >> 63);
68*4882a593Smuzhiyun 	ctx->shash.b = (a << 1) | (b >> 63);
69*4882a593Smuzhiyun 
70*4882a593Smuzhiyun 	if (a >> 63)
71*4882a593Smuzhiyun 		ctx->shash.b ^= ((u64)0xc2) << 56;
72*4882a593Smuzhiyun 
73*4882a593Smuzhiyun 	return 0;
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun 
ghash_update(struct shash_desc * desc,const u8 * src,unsigned int srclen)76*4882a593Smuzhiyun static int ghash_update(struct shash_desc *desc,
77*4882a593Smuzhiyun 			 const u8 *src, unsigned int srclen)
78*4882a593Smuzhiyun {
79*4882a593Smuzhiyun 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
80*4882a593Smuzhiyun 	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
81*4882a593Smuzhiyun 	u8 *dst = dctx->buffer;
82*4882a593Smuzhiyun 
83*4882a593Smuzhiyun 	kernel_fpu_begin();
84*4882a593Smuzhiyun 	if (dctx->bytes) {
85*4882a593Smuzhiyun 		int n = min(srclen, dctx->bytes);
86*4882a593Smuzhiyun 		u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun 		dctx->bytes -= n;
89*4882a593Smuzhiyun 		srclen -= n;
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun 		while (n--)
92*4882a593Smuzhiyun 			*pos++ ^= *src++;
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun 		if (!dctx->bytes)
95*4882a593Smuzhiyun 			clmul_ghash_mul(dst, &ctx->shash);
96*4882a593Smuzhiyun 	}
97*4882a593Smuzhiyun 
98*4882a593Smuzhiyun 	clmul_ghash_update(dst, src, srclen, &ctx->shash);
99*4882a593Smuzhiyun 	kernel_fpu_end();
100*4882a593Smuzhiyun 
101*4882a593Smuzhiyun 	if (srclen & 0xf) {
102*4882a593Smuzhiyun 		src += srclen - (srclen & 0xf);
103*4882a593Smuzhiyun 		srclen &= 0xf;
104*4882a593Smuzhiyun 		dctx->bytes = GHASH_BLOCK_SIZE - srclen;
105*4882a593Smuzhiyun 		while (srclen--)
106*4882a593Smuzhiyun 			*dst++ ^= *src++;
107*4882a593Smuzhiyun 	}
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun 	return 0;
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun 
ghash_flush(struct ghash_ctx * ctx,struct ghash_desc_ctx * dctx)112*4882a593Smuzhiyun static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
113*4882a593Smuzhiyun {
114*4882a593Smuzhiyun 	u8 *dst = dctx->buffer;
115*4882a593Smuzhiyun 
116*4882a593Smuzhiyun 	if (dctx->bytes) {
117*4882a593Smuzhiyun 		u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun 		while (dctx->bytes--)
120*4882a593Smuzhiyun 			*tmp++ ^= 0;
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun 		kernel_fpu_begin();
123*4882a593Smuzhiyun 		clmul_ghash_mul(dst, &ctx->shash);
124*4882a593Smuzhiyun 		kernel_fpu_end();
125*4882a593Smuzhiyun 	}
126*4882a593Smuzhiyun 
127*4882a593Smuzhiyun 	dctx->bytes = 0;
128*4882a593Smuzhiyun }
129*4882a593Smuzhiyun 
ghash_final(struct shash_desc * desc,u8 * dst)130*4882a593Smuzhiyun static int ghash_final(struct shash_desc *desc, u8 *dst)
131*4882a593Smuzhiyun {
132*4882a593Smuzhiyun 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
133*4882a593Smuzhiyun 	struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
134*4882a593Smuzhiyun 	u8 *buf = dctx->buffer;
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun 	ghash_flush(ctx, dctx);
137*4882a593Smuzhiyun 	memcpy(dst, buf, GHASH_BLOCK_SIZE);
138*4882a593Smuzhiyun 
139*4882a593Smuzhiyun 	return 0;
140*4882a593Smuzhiyun }
141*4882a593Smuzhiyun 
142*4882a593Smuzhiyun static struct shash_alg ghash_alg = {
143*4882a593Smuzhiyun 	.digestsize	= GHASH_DIGEST_SIZE,
144*4882a593Smuzhiyun 	.init		= ghash_init,
145*4882a593Smuzhiyun 	.update		= ghash_update,
146*4882a593Smuzhiyun 	.final		= ghash_final,
147*4882a593Smuzhiyun 	.setkey		= ghash_setkey,
148*4882a593Smuzhiyun 	.descsize	= sizeof(struct ghash_desc_ctx),
149*4882a593Smuzhiyun 	.base		= {
150*4882a593Smuzhiyun 		.cra_name		= "__ghash",
151*4882a593Smuzhiyun 		.cra_driver_name	= "__ghash-pclmulqdqni",
152*4882a593Smuzhiyun 		.cra_priority		= 0,
153*4882a593Smuzhiyun 		.cra_flags		= CRYPTO_ALG_INTERNAL,
154*4882a593Smuzhiyun 		.cra_blocksize		= GHASH_BLOCK_SIZE,
155*4882a593Smuzhiyun 		.cra_ctxsize		= sizeof(struct ghash_ctx),
156*4882a593Smuzhiyun 		.cra_module		= THIS_MODULE,
157*4882a593Smuzhiyun 	},
158*4882a593Smuzhiyun };
159*4882a593Smuzhiyun 
ghash_async_init(struct ahash_request * req)160*4882a593Smuzhiyun static int ghash_async_init(struct ahash_request *req)
161*4882a593Smuzhiyun {
162*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
163*4882a593Smuzhiyun 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
164*4882a593Smuzhiyun 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
165*4882a593Smuzhiyun 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
166*4882a593Smuzhiyun 	struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
167*4882a593Smuzhiyun 	struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
168*4882a593Smuzhiyun 
169*4882a593Smuzhiyun 	desc->tfm = child;
170*4882a593Smuzhiyun 	return crypto_shash_init(desc);
171*4882a593Smuzhiyun }
172*4882a593Smuzhiyun 
ghash_async_update(struct ahash_request * req)173*4882a593Smuzhiyun static int ghash_async_update(struct ahash_request *req)
174*4882a593Smuzhiyun {
175*4882a593Smuzhiyun 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
176*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
177*4882a593Smuzhiyun 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
178*4882a593Smuzhiyun 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 	if (!crypto_simd_usable() ||
181*4882a593Smuzhiyun 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
182*4882a593Smuzhiyun 		memcpy(cryptd_req, req, sizeof(*req));
183*4882a593Smuzhiyun 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
184*4882a593Smuzhiyun 		return crypto_ahash_update(cryptd_req);
185*4882a593Smuzhiyun 	} else {
186*4882a593Smuzhiyun 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
187*4882a593Smuzhiyun 		return shash_ahash_update(req, desc);
188*4882a593Smuzhiyun 	}
189*4882a593Smuzhiyun }
190*4882a593Smuzhiyun 
ghash_async_final(struct ahash_request * req)191*4882a593Smuzhiyun static int ghash_async_final(struct ahash_request *req)
192*4882a593Smuzhiyun {
193*4882a593Smuzhiyun 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
194*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
195*4882a593Smuzhiyun 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
196*4882a593Smuzhiyun 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
197*4882a593Smuzhiyun 
198*4882a593Smuzhiyun 	if (!crypto_simd_usable() ||
199*4882a593Smuzhiyun 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
200*4882a593Smuzhiyun 		memcpy(cryptd_req, req, sizeof(*req));
201*4882a593Smuzhiyun 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
202*4882a593Smuzhiyun 		return crypto_ahash_final(cryptd_req);
203*4882a593Smuzhiyun 	} else {
204*4882a593Smuzhiyun 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
205*4882a593Smuzhiyun 		return crypto_shash_final(desc, req->result);
206*4882a593Smuzhiyun 	}
207*4882a593Smuzhiyun }
208*4882a593Smuzhiyun 
ghash_async_import(struct ahash_request * req,const void * in)209*4882a593Smuzhiyun static int ghash_async_import(struct ahash_request *req, const void *in)
210*4882a593Smuzhiyun {
211*4882a593Smuzhiyun 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
212*4882a593Smuzhiyun 	struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
213*4882a593Smuzhiyun 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
214*4882a593Smuzhiyun 
215*4882a593Smuzhiyun 	ghash_async_init(req);
216*4882a593Smuzhiyun 	memcpy(dctx, in, sizeof(*dctx));
217*4882a593Smuzhiyun 	return 0;
218*4882a593Smuzhiyun 
219*4882a593Smuzhiyun }
220*4882a593Smuzhiyun 
ghash_async_export(struct ahash_request * req,void * out)221*4882a593Smuzhiyun static int ghash_async_export(struct ahash_request *req, void *out)
222*4882a593Smuzhiyun {
223*4882a593Smuzhiyun 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
224*4882a593Smuzhiyun 	struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
225*4882a593Smuzhiyun 	struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
226*4882a593Smuzhiyun 
227*4882a593Smuzhiyun 	memcpy(out, dctx, sizeof(*dctx));
228*4882a593Smuzhiyun 	return 0;
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun 
ghash_async_digest(struct ahash_request * req)232*4882a593Smuzhiyun static int ghash_async_digest(struct ahash_request *req)
233*4882a593Smuzhiyun {
234*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
235*4882a593Smuzhiyun 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
236*4882a593Smuzhiyun 	struct ahash_request *cryptd_req = ahash_request_ctx(req);
237*4882a593Smuzhiyun 	struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
238*4882a593Smuzhiyun 
239*4882a593Smuzhiyun 	if (!crypto_simd_usable() ||
240*4882a593Smuzhiyun 	    (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
241*4882a593Smuzhiyun 		memcpy(cryptd_req, req, sizeof(*req));
242*4882a593Smuzhiyun 		ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
243*4882a593Smuzhiyun 		return crypto_ahash_digest(cryptd_req);
244*4882a593Smuzhiyun 	} else {
245*4882a593Smuzhiyun 		struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
246*4882a593Smuzhiyun 		struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun 		desc->tfm = child;
249*4882a593Smuzhiyun 		return shash_ahash_digest(req, desc);
250*4882a593Smuzhiyun 	}
251*4882a593Smuzhiyun }
252*4882a593Smuzhiyun 
ghash_async_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)253*4882a593Smuzhiyun static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
254*4882a593Smuzhiyun 			      unsigned int keylen)
255*4882a593Smuzhiyun {
256*4882a593Smuzhiyun 	struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
257*4882a593Smuzhiyun 	struct crypto_ahash *child = &ctx->cryptd_tfm->base;
258*4882a593Smuzhiyun 
259*4882a593Smuzhiyun 	crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
260*4882a593Smuzhiyun 	crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
261*4882a593Smuzhiyun 			       & CRYPTO_TFM_REQ_MASK);
262*4882a593Smuzhiyun 	return crypto_ahash_setkey(child, key, keylen);
263*4882a593Smuzhiyun }
264*4882a593Smuzhiyun 
ghash_async_init_tfm(struct crypto_tfm * tfm)265*4882a593Smuzhiyun static int ghash_async_init_tfm(struct crypto_tfm *tfm)
266*4882a593Smuzhiyun {
267*4882a593Smuzhiyun 	struct cryptd_ahash *cryptd_tfm;
268*4882a593Smuzhiyun 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun 	cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni",
271*4882a593Smuzhiyun 					CRYPTO_ALG_INTERNAL,
272*4882a593Smuzhiyun 					CRYPTO_ALG_INTERNAL);
273*4882a593Smuzhiyun 	if (IS_ERR(cryptd_tfm))
274*4882a593Smuzhiyun 		return PTR_ERR(cryptd_tfm);
275*4882a593Smuzhiyun 	ctx->cryptd_tfm = cryptd_tfm;
276*4882a593Smuzhiyun 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
277*4882a593Smuzhiyun 				 sizeof(struct ahash_request) +
278*4882a593Smuzhiyun 				 crypto_ahash_reqsize(&cryptd_tfm->base));
279*4882a593Smuzhiyun 
280*4882a593Smuzhiyun 	return 0;
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun 
ghash_async_exit_tfm(struct crypto_tfm * tfm)283*4882a593Smuzhiyun static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
284*4882a593Smuzhiyun {
285*4882a593Smuzhiyun 	struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
286*4882a593Smuzhiyun 
287*4882a593Smuzhiyun 	cryptd_free_ahash(ctx->cryptd_tfm);
288*4882a593Smuzhiyun }
289*4882a593Smuzhiyun 
290*4882a593Smuzhiyun static struct ahash_alg ghash_async_alg = {
291*4882a593Smuzhiyun 	.init		= ghash_async_init,
292*4882a593Smuzhiyun 	.update		= ghash_async_update,
293*4882a593Smuzhiyun 	.final		= ghash_async_final,
294*4882a593Smuzhiyun 	.setkey		= ghash_async_setkey,
295*4882a593Smuzhiyun 	.digest		= ghash_async_digest,
296*4882a593Smuzhiyun 	.export		= ghash_async_export,
297*4882a593Smuzhiyun 	.import		= ghash_async_import,
298*4882a593Smuzhiyun 	.halg = {
299*4882a593Smuzhiyun 		.digestsize	= GHASH_DIGEST_SIZE,
300*4882a593Smuzhiyun 		.statesize = sizeof(struct ghash_desc_ctx),
301*4882a593Smuzhiyun 		.base = {
302*4882a593Smuzhiyun 			.cra_name		= "ghash",
303*4882a593Smuzhiyun 			.cra_driver_name	= "ghash-clmulni",
304*4882a593Smuzhiyun 			.cra_priority		= 400,
305*4882a593Smuzhiyun 			.cra_ctxsize		= sizeof(struct ghash_async_ctx),
306*4882a593Smuzhiyun 			.cra_flags		= CRYPTO_ALG_ASYNC,
307*4882a593Smuzhiyun 			.cra_blocksize		= GHASH_BLOCK_SIZE,
308*4882a593Smuzhiyun 			.cra_module		= THIS_MODULE,
309*4882a593Smuzhiyun 			.cra_init		= ghash_async_init_tfm,
310*4882a593Smuzhiyun 			.cra_exit		= ghash_async_exit_tfm,
311*4882a593Smuzhiyun 		},
312*4882a593Smuzhiyun 	},
313*4882a593Smuzhiyun };
314*4882a593Smuzhiyun 
315*4882a593Smuzhiyun static const struct x86_cpu_id pcmul_cpu_id[] = {
316*4882a593Smuzhiyun 	X86_MATCH_FEATURE(X86_FEATURE_PCLMULQDQ, NULL), /* Pickle-Mickle-Duck */
317*4882a593Smuzhiyun 	{}
318*4882a593Smuzhiyun };
319*4882a593Smuzhiyun MODULE_DEVICE_TABLE(x86cpu, pcmul_cpu_id);
320*4882a593Smuzhiyun 
ghash_pclmulqdqni_mod_init(void)321*4882a593Smuzhiyun static int __init ghash_pclmulqdqni_mod_init(void)
322*4882a593Smuzhiyun {
323*4882a593Smuzhiyun 	int err;
324*4882a593Smuzhiyun 
325*4882a593Smuzhiyun 	if (!x86_match_cpu(pcmul_cpu_id))
326*4882a593Smuzhiyun 		return -ENODEV;
327*4882a593Smuzhiyun 
328*4882a593Smuzhiyun 	err = crypto_register_shash(&ghash_alg);
329*4882a593Smuzhiyun 	if (err)
330*4882a593Smuzhiyun 		goto err_out;
331*4882a593Smuzhiyun 	err = crypto_register_ahash(&ghash_async_alg);
332*4882a593Smuzhiyun 	if (err)
333*4882a593Smuzhiyun 		goto err_shash;
334*4882a593Smuzhiyun 
335*4882a593Smuzhiyun 	return 0;
336*4882a593Smuzhiyun 
337*4882a593Smuzhiyun err_shash:
338*4882a593Smuzhiyun 	crypto_unregister_shash(&ghash_alg);
339*4882a593Smuzhiyun err_out:
340*4882a593Smuzhiyun 	return err;
341*4882a593Smuzhiyun }
342*4882a593Smuzhiyun 
ghash_pclmulqdqni_mod_exit(void)343*4882a593Smuzhiyun static void __exit ghash_pclmulqdqni_mod_exit(void)
344*4882a593Smuzhiyun {
345*4882a593Smuzhiyun 	crypto_unregister_ahash(&ghash_async_alg);
346*4882a593Smuzhiyun 	crypto_unregister_shash(&ghash_alg);
347*4882a593Smuzhiyun }
348*4882a593Smuzhiyun 
349*4882a593Smuzhiyun module_init(ghash_pclmulqdqni_mod_init);
350*4882a593Smuzhiyun module_exit(ghash_pclmulqdqni_mod_exit);
351*4882a593Smuzhiyun 
352*4882a593Smuzhiyun MODULE_LICENSE("GPL");
353*4882a593Smuzhiyun MODULE_DESCRIPTION("GHASH hash function, accelerated by PCLMULQDQ-NI");
354*4882a593Smuzhiyun MODULE_ALIAS_CRYPTO("ghash");
355