1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Synchronous Compression operations
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright 2015 LG Electronics Inc.
6*4882a593Smuzhiyun * Copyright (c) 2016, Intel Corporation
7*4882a593Smuzhiyun * Author: Giovanni Cabiddu <giovanni.cabiddu@intel.com>
8*4882a593Smuzhiyun */
9*4882a593Smuzhiyun #include <linux/errno.h>
10*4882a593Smuzhiyun #include <linux/kernel.h>
11*4882a593Smuzhiyun #include <linux/module.h>
12*4882a593Smuzhiyun #include <linux/seq_file.h>
13*4882a593Smuzhiyun #include <linux/slab.h>
14*4882a593Smuzhiyun #include <linux/string.h>
15*4882a593Smuzhiyun #include <linux/crypto.h>
16*4882a593Smuzhiyun #include <linux/compiler.h>
17*4882a593Smuzhiyun #include <linux/vmalloc.h>
18*4882a593Smuzhiyun #include <crypto/algapi.h>
19*4882a593Smuzhiyun #include <linux/cryptouser.h>
20*4882a593Smuzhiyun #include <net/netlink.h>
21*4882a593Smuzhiyun #include <linux/scatterlist.h>
22*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
23*4882a593Smuzhiyun #include <crypto/internal/acompress.h>
24*4882a593Smuzhiyun #include <crypto/internal/scompress.h>
25*4882a593Smuzhiyun #include "internal.h"
26*4882a593Smuzhiyun
27*4882a593Smuzhiyun struct scomp_scratch {
28*4882a593Smuzhiyun spinlock_t lock;
29*4882a593Smuzhiyun void *src;
30*4882a593Smuzhiyun void *dst;
31*4882a593Smuzhiyun };
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun static DEFINE_PER_CPU(struct scomp_scratch, scomp_scratch) = {
34*4882a593Smuzhiyun .lock = __SPIN_LOCK_UNLOCKED(scomp_scratch.lock),
35*4882a593Smuzhiyun };
36*4882a593Smuzhiyun
37*4882a593Smuzhiyun static const struct crypto_type crypto_scomp_type;
38*4882a593Smuzhiyun static int scomp_scratch_users;
39*4882a593Smuzhiyun static DEFINE_MUTEX(scomp_lock);
40*4882a593Smuzhiyun
41*4882a593Smuzhiyun #ifdef CONFIG_NET
crypto_scomp_report(struct sk_buff * skb,struct crypto_alg * alg)42*4882a593Smuzhiyun static int crypto_scomp_report(struct sk_buff *skb, struct crypto_alg *alg)
43*4882a593Smuzhiyun {
44*4882a593Smuzhiyun struct crypto_report_comp rscomp;
45*4882a593Smuzhiyun
46*4882a593Smuzhiyun memset(&rscomp, 0, sizeof(rscomp));
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun strscpy(rscomp.type, "scomp", sizeof(rscomp.type));
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun return nla_put(skb, CRYPTOCFGA_REPORT_COMPRESS,
51*4882a593Smuzhiyun sizeof(rscomp), &rscomp);
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun #else
crypto_scomp_report(struct sk_buff * skb,struct crypto_alg * alg)54*4882a593Smuzhiyun static int crypto_scomp_report(struct sk_buff *skb, struct crypto_alg *alg)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun return -ENOSYS;
57*4882a593Smuzhiyun }
58*4882a593Smuzhiyun #endif
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun static void crypto_scomp_show(struct seq_file *m, struct crypto_alg *alg)
61*4882a593Smuzhiyun __maybe_unused;
62*4882a593Smuzhiyun
crypto_scomp_show(struct seq_file * m,struct crypto_alg * alg)63*4882a593Smuzhiyun static void crypto_scomp_show(struct seq_file *m, struct crypto_alg *alg)
64*4882a593Smuzhiyun {
65*4882a593Smuzhiyun seq_puts(m, "type : scomp\n");
66*4882a593Smuzhiyun }
67*4882a593Smuzhiyun
crypto_scomp_free_scratches(void)68*4882a593Smuzhiyun static void crypto_scomp_free_scratches(void)
69*4882a593Smuzhiyun {
70*4882a593Smuzhiyun struct scomp_scratch *scratch;
71*4882a593Smuzhiyun int i;
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun for_each_possible_cpu(i) {
74*4882a593Smuzhiyun scratch = per_cpu_ptr(&scomp_scratch, i);
75*4882a593Smuzhiyun
76*4882a593Smuzhiyun vfree(scratch->src);
77*4882a593Smuzhiyun vfree(scratch->dst);
78*4882a593Smuzhiyun scratch->src = NULL;
79*4882a593Smuzhiyun scratch->dst = NULL;
80*4882a593Smuzhiyun }
81*4882a593Smuzhiyun }
82*4882a593Smuzhiyun
crypto_scomp_alloc_scratches(void)83*4882a593Smuzhiyun static int crypto_scomp_alloc_scratches(void)
84*4882a593Smuzhiyun {
85*4882a593Smuzhiyun struct scomp_scratch *scratch;
86*4882a593Smuzhiyun int i;
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun for_each_possible_cpu(i) {
89*4882a593Smuzhiyun void *mem;
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun scratch = per_cpu_ptr(&scomp_scratch, i);
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun mem = vmalloc_node(SCOMP_SCRATCH_SIZE, cpu_to_node(i));
94*4882a593Smuzhiyun if (!mem)
95*4882a593Smuzhiyun goto error;
96*4882a593Smuzhiyun scratch->src = mem;
97*4882a593Smuzhiyun mem = vmalloc_node(SCOMP_SCRATCH_SIZE, cpu_to_node(i));
98*4882a593Smuzhiyun if (!mem)
99*4882a593Smuzhiyun goto error;
100*4882a593Smuzhiyun scratch->dst = mem;
101*4882a593Smuzhiyun }
102*4882a593Smuzhiyun return 0;
103*4882a593Smuzhiyun error:
104*4882a593Smuzhiyun crypto_scomp_free_scratches();
105*4882a593Smuzhiyun return -ENOMEM;
106*4882a593Smuzhiyun }
107*4882a593Smuzhiyun
crypto_scomp_init_tfm(struct crypto_tfm * tfm)108*4882a593Smuzhiyun static int crypto_scomp_init_tfm(struct crypto_tfm *tfm)
109*4882a593Smuzhiyun {
110*4882a593Smuzhiyun int ret = 0;
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun mutex_lock(&scomp_lock);
113*4882a593Smuzhiyun if (!scomp_scratch_users++)
114*4882a593Smuzhiyun ret = crypto_scomp_alloc_scratches();
115*4882a593Smuzhiyun mutex_unlock(&scomp_lock);
116*4882a593Smuzhiyun
117*4882a593Smuzhiyun return ret;
118*4882a593Smuzhiyun }
119*4882a593Smuzhiyun
scomp_acomp_comp_decomp(struct acomp_req * req,int dir)120*4882a593Smuzhiyun static int scomp_acomp_comp_decomp(struct acomp_req *req, int dir)
121*4882a593Smuzhiyun {
122*4882a593Smuzhiyun struct crypto_acomp *tfm = crypto_acomp_reqtfm(req);
123*4882a593Smuzhiyun void **tfm_ctx = acomp_tfm_ctx(tfm);
124*4882a593Smuzhiyun struct crypto_scomp *scomp = *tfm_ctx;
125*4882a593Smuzhiyun void **ctx = acomp_request_ctx(req);
126*4882a593Smuzhiyun struct scomp_scratch *scratch;
127*4882a593Smuzhiyun int ret;
128*4882a593Smuzhiyun
129*4882a593Smuzhiyun if (!req->src || !req->slen || req->slen > SCOMP_SCRATCH_SIZE)
130*4882a593Smuzhiyun return -EINVAL;
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun if (req->dst && !req->dlen)
133*4882a593Smuzhiyun return -EINVAL;
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun if (!req->dlen || req->dlen > SCOMP_SCRATCH_SIZE)
136*4882a593Smuzhiyun req->dlen = SCOMP_SCRATCH_SIZE;
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun scratch = raw_cpu_ptr(&scomp_scratch);
139*4882a593Smuzhiyun spin_lock(&scratch->lock);
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun scatterwalk_map_and_copy(scratch->src, req->src, 0, req->slen, 0);
142*4882a593Smuzhiyun if (dir)
143*4882a593Smuzhiyun ret = crypto_scomp_compress(scomp, scratch->src, req->slen,
144*4882a593Smuzhiyun scratch->dst, &req->dlen, *ctx);
145*4882a593Smuzhiyun else
146*4882a593Smuzhiyun ret = crypto_scomp_decompress(scomp, scratch->src, req->slen,
147*4882a593Smuzhiyun scratch->dst, &req->dlen, *ctx);
148*4882a593Smuzhiyun if (!ret) {
149*4882a593Smuzhiyun if (!req->dst) {
150*4882a593Smuzhiyun req->dst = sgl_alloc(req->dlen, GFP_ATOMIC, NULL);
151*4882a593Smuzhiyun if (!req->dst) {
152*4882a593Smuzhiyun ret = -ENOMEM;
153*4882a593Smuzhiyun goto out;
154*4882a593Smuzhiyun }
155*4882a593Smuzhiyun }
156*4882a593Smuzhiyun scatterwalk_map_and_copy(scratch->dst, req->dst, 0, req->dlen,
157*4882a593Smuzhiyun 1);
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun out:
160*4882a593Smuzhiyun spin_unlock(&scratch->lock);
161*4882a593Smuzhiyun return ret;
162*4882a593Smuzhiyun }
163*4882a593Smuzhiyun
scomp_acomp_compress(struct acomp_req * req)164*4882a593Smuzhiyun static int scomp_acomp_compress(struct acomp_req *req)
165*4882a593Smuzhiyun {
166*4882a593Smuzhiyun return scomp_acomp_comp_decomp(req, 1);
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun
scomp_acomp_decompress(struct acomp_req * req)169*4882a593Smuzhiyun static int scomp_acomp_decompress(struct acomp_req *req)
170*4882a593Smuzhiyun {
171*4882a593Smuzhiyun return scomp_acomp_comp_decomp(req, 0);
172*4882a593Smuzhiyun }
173*4882a593Smuzhiyun
crypto_exit_scomp_ops_async(struct crypto_tfm * tfm)174*4882a593Smuzhiyun static void crypto_exit_scomp_ops_async(struct crypto_tfm *tfm)
175*4882a593Smuzhiyun {
176*4882a593Smuzhiyun struct crypto_scomp **ctx = crypto_tfm_ctx(tfm);
177*4882a593Smuzhiyun
178*4882a593Smuzhiyun crypto_free_scomp(*ctx);
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun mutex_lock(&scomp_lock);
181*4882a593Smuzhiyun if (!--scomp_scratch_users)
182*4882a593Smuzhiyun crypto_scomp_free_scratches();
183*4882a593Smuzhiyun mutex_unlock(&scomp_lock);
184*4882a593Smuzhiyun }
185*4882a593Smuzhiyun
crypto_init_scomp_ops_async(struct crypto_tfm * tfm)186*4882a593Smuzhiyun int crypto_init_scomp_ops_async(struct crypto_tfm *tfm)
187*4882a593Smuzhiyun {
188*4882a593Smuzhiyun struct crypto_alg *calg = tfm->__crt_alg;
189*4882a593Smuzhiyun struct crypto_acomp *crt = __crypto_acomp_tfm(tfm);
190*4882a593Smuzhiyun struct crypto_scomp **ctx = crypto_tfm_ctx(tfm);
191*4882a593Smuzhiyun struct crypto_scomp *scomp;
192*4882a593Smuzhiyun
193*4882a593Smuzhiyun if (!crypto_mod_get(calg))
194*4882a593Smuzhiyun return -EAGAIN;
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun scomp = crypto_create_tfm(calg, &crypto_scomp_type);
197*4882a593Smuzhiyun if (IS_ERR(scomp)) {
198*4882a593Smuzhiyun crypto_mod_put(calg);
199*4882a593Smuzhiyun return PTR_ERR(scomp);
200*4882a593Smuzhiyun }
201*4882a593Smuzhiyun
202*4882a593Smuzhiyun *ctx = scomp;
203*4882a593Smuzhiyun tfm->exit = crypto_exit_scomp_ops_async;
204*4882a593Smuzhiyun
205*4882a593Smuzhiyun crt->compress = scomp_acomp_compress;
206*4882a593Smuzhiyun crt->decompress = scomp_acomp_decompress;
207*4882a593Smuzhiyun crt->dst_free = sgl_free;
208*4882a593Smuzhiyun crt->reqsize = sizeof(void *);
209*4882a593Smuzhiyun
210*4882a593Smuzhiyun return 0;
211*4882a593Smuzhiyun }
212*4882a593Smuzhiyun
crypto_acomp_scomp_alloc_ctx(struct acomp_req * req)213*4882a593Smuzhiyun struct acomp_req *crypto_acomp_scomp_alloc_ctx(struct acomp_req *req)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun struct crypto_acomp *acomp = crypto_acomp_reqtfm(req);
216*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_acomp_tfm(acomp);
217*4882a593Smuzhiyun struct crypto_scomp **tfm_ctx = crypto_tfm_ctx(tfm);
218*4882a593Smuzhiyun struct crypto_scomp *scomp = *tfm_ctx;
219*4882a593Smuzhiyun void *ctx;
220*4882a593Smuzhiyun
221*4882a593Smuzhiyun ctx = crypto_scomp_alloc_ctx(scomp);
222*4882a593Smuzhiyun if (IS_ERR(ctx)) {
223*4882a593Smuzhiyun kfree(req);
224*4882a593Smuzhiyun return NULL;
225*4882a593Smuzhiyun }
226*4882a593Smuzhiyun
227*4882a593Smuzhiyun *req->__ctx = ctx;
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun return req;
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun
crypto_acomp_scomp_free_ctx(struct acomp_req * req)232*4882a593Smuzhiyun void crypto_acomp_scomp_free_ctx(struct acomp_req *req)
233*4882a593Smuzhiyun {
234*4882a593Smuzhiyun struct crypto_acomp *acomp = crypto_acomp_reqtfm(req);
235*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_acomp_tfm(acomp);
236*4882a593Smuzhiyun struct crypto_scomp **tfm_ctx = crypto_tfm_ctx(tfm);
237*4882a593Smuzhiyun struct crypto_scomp *scomp = *tfm_ctx;
238*4882a593Smuzhiyun void *ctx = *req->__ctx;
239*4882a593Smuzhiyun
240*4882a593Smuzhiyun if (ctx)
241*4882a593Smuzhiyun crypto_scomp_free_ctx(scomp, ctx);
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun
244*4882a593Smuzhiyun static const struct crypto_type crypto_scomp_type = {
245*4882a593Smuzhiyun .extsize = crypto_alg_extsize,
246*4882a593Smuzhiyun .init_tfm = crypto_scomp_init_tfm,
247*4882a593Smuzhiyun #ifdef CONFIG_PROC_FS
248*4882a593Smuzhiyun .show = crypto_scomp_show,
249*4882a593Smuzhiyun #endif
250*4882a593Smuzhiyun .report = crypto_scomp_report,
251*4882a593Smuzhiyun .maskclear = ~CRYPTO_ALG_TYPE_MASK,
252*4882a593Smuzhiyun .maskset = CRYPTO_ALG_TYPE_MASK,
253*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_SCOMPRESS,
254*4882a593Smuzhiyun .tfmsize = offsetof(struct crypto_scomp, base),
255*4882a593Smuzhiyun };
256*4882a593Smuzhiyun
crypto_register_scomp(struct scomp_alg * alg)257*4882a593Smuzhiyun int crypto_register_scomp(struct scomp_alg *alg)
258*4882a593Smuzhiyun {
259*4882a593Smuzhiyun struct crypto_alg *base = &alg->base;
260*4882a593Smuzhiyun
261*4882a593Smuzhiyun base->cra_type = &crypto_scomp_type;
262*4882a593Smuzhiyun base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
263*4882a593Smuzhiyun base->cra_flags |= CRYPTO_ALG_TYPE_SCOMPRESS;
264*4882a593Smuzhiyun
265*4882a593Smuzhiyun return crypto_register_alg(base);
266*4882a593Smuzhiyun }
267*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_scomp);
268*4882a593Smuzhiyun
crypto_unregister_scomp(struct scomp_alg * alg)269*4882a593Smuzhiyun void crypto_unregister_scomp(struct scomp_alg *alg)
270*4882a593Smuzhiyun {
271*4882a593Smuzhiyun crypto_unregister_alg(&alg->base);
272*4882a593Smuzhiyun }
273*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_scomp);
274*4882a593Smuzhiyun
crypto_register_scomps(struct scomp_alg * algs,int count)275*4882a593Smuzhiyun int crypto_register_scomps(struct scomp_alg *algs, int count)
276*4882a593Smuzhiyun {
277*4882a593Smuzhiyun int i, ret;
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun for (i = 0; i < count; i++) {
280*4882a593Smuzhiyun ret = crypto_register_scomp(&algs[i]);
281*4882a593Smuzhiyun if (ret)
282*4882a593Smuzhiyun goto err;
283*4882a593Smuzhiyun }
284*4882a593Smuzhiyun
285*4882a593Smuzhiyun return 0;
286*4882a593Smuzhiyun
287*4882a593Smuzhiyun err:
288*4882a593Smuzhiyun for (--i; i >= 0; --i)
289*4882a593Smuzhiyun crypto_unregister_scomp(&algs[i]);
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun return ret;
292*4882a593Smuzhiyun }
293*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_register_scomps);
294*4882a593Smuzhiyun
crypto_unregister_scomps(struct scomp_alg * algs,int count)295*4882a593Smuzhiyun void crypto_unregister_scomps(struct scomp_alg *algs, int count)
296*4882a593Smuzhiyun {
297*4882a593Smuzhiyun int i;
298*4882a593Smuzhiyun
299*4882a593Smuzhiyun for (i = count - 1; i >= 0; --i)
300*4882a593Smuzhiyun crypto_unregister_scomp(&algs[i]);
301*4882a593Smuzhiyun }
302*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(crypto_unregister_scomps);
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun MODULE_LICENSE("GPL");
305*4882a593Smuzhiyun MODULE_DESCRIPTION("Synchronous compression type");
306