1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * 842 Software Decompression
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 2015 Dan Streetman, IBM Corp
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * See 842.h for details of the 842 compressed format.
8*4882a593Smuzhiyun */
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt
11*4882a593Smuzhiyun #define MODULE_NAME "842_decompress"
12*4882a593Smuzhiyun
13*4882a593Smuzhiyun #include "842.h"
14*4882a593Smuzhiyun #include "842_debugfs.h"
15*4882a593Smuzhiyun
16*4882a593Smuzhiyun /* rolling fifo sizes */
17*4882a593Smuzhiyun #define I2_FIFO_SIZE (2 * (1 << I2_BITS))
18*4882a593Smuzhiyun #define I4_FIFO_SIZE (4 * (1 << I4_BITS))
19*4882a593Smuzhiyun #define I8_FIFO_SIZE (8 * (1 << I8_BITS))
20*4882a593Smuzhiyun
21*4882a593Smuzhiyun static u8 decomp_ops[OPS_MAX][4] = {
22*4882a593Smuzhiyun { D8, N0, N0, N0 },
23*4882a593Smuzhiyun { D4, D2, I2, N0 },
24*4882a593Smuzhiyun { D4, I2, D2, N0 },
25*4882a593Smuzhiyun { D4, I2, I2, N0 },
26*4882a593Smuzhiyun { D4, I4, N0, N0 },
27*4882a593Smuzhiyun { D2, I2, D4, N0 },
28*4882a593Smuzhiyun { D2, I2, D2, I2 },
29*4882a593Smuzhiyun { D2, I2, I2, D2 },
30*4882a593Smuzhiyun { D2, I2, I2, I2 },
31*4882a593Smuzhiyun { D2, I2, I4, N0 },
32*4882a593Smuzhiyun { I2, D2, D4, N0 },
33*4882a593Smuzhiyun { I2, D4, I2, N0 },
34*4882a593Smuzhiyun { I2, D2, I2, D2 },
35*4882a593Smuzhiyun { I2, D2, I2, I2 },
36*4882a593Smuzhiyun { I2, D2, I4, N0 },
37*4882a593Smuzhiyun { I2, I2, D4, N0 },
38*4882a593Smuzhiyun { I2, I2, D2, I2 },
39*4882a593Smuzhiyun { I2, I2, I2, D2 },
40*4882a593Smuzhiyun { I2, I2, I2, I2 },
41*4882a593Smuzhiyun { I2, I2, I4, N0 },
42*4882a593Smuzhiyun { I4, D4, N0, N0 },
43*4882a593Smuzhiyun { I4, D2, I2, N0 },
44*4882a593Smuzhiyun { I4, I2, D2, N0 },
45*4882a593Smuzhiyun { I4, I2, I2, N0 },
46*4882a593Smuzhiyun { I4, I4, N0, N0 },
47*4882a593Smuzhiyun { I8, N0, N0, N0 }
48*4882a593Smuzhiyun };
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun struct sw842_param {
51*4882a593Smuzhiyun u8 *in;
52*4882a593Smuzhiyun u8 bit;
53*4882a593Smuzhiyun u64 ilen;
54*4882a593Smuzhiyun u8 *out;
55*4882a593Smuzhiyun u8 *ostart;
56*4882a593Smuzhiyun u64 olen;
57*4882a593Smuzhiyun };
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun #define beN_to_cpu(d, s) \
60*4882a593Smuzhiyun ((s) == 2 ? be16_to_cpu(get_unaligned((__be16 *)d)) : \
61*4882a593Smuzhiyun (s) == 4 ? be32_to_cpu(get_unaligned((__be32 *)d)) : \
62*4882a593Smuzhiyun (s) == 8 ? be64_to_cpu(get_unaligned((__be64 *)d)) : \
63*4882a593Smuzhiyun 0)
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun static int next_bits(struct sw842_param *p, u64 *d, u8 n);
66*4882a593Smuzhiyun
__split_next_bits(struct sw842_param * p,u64 * d,u8 n,u8 s)67*4882a593Smuzhiyun static int __split_next_bits(struct sw842_param *p, u64 *d, u8 n, u8 s)
68*4882a593Smuzhiyun {
69*4882a593Smuzhiyun u64 tmp = 0;
70*4882a593Smuzhiyun int ret;
71*4882a593Smuzhiyun
72*4882a593Smuzhiyun if (n <= s) {
73*4882a593Smuzhiyun pr_debug("split_next_bits invalid n %u s %u\n", n, s);
74*4882a593Smuzhiyun return -EINVAL;
75*4882a593Smuzhiyun }
76*4882a593Smuzhiyun
77*4882a593Smuzhiyun ret = next_bits(p, &tmp, n - s);
78*4882a593Smuzhiyun if (ret)
79*4882a593Smuzhiyun return ret;
80*4882a593Smuzhiyun ret = next_bits(p, d, s);
81*4882a593Smuzhiyun if (ret)
82*4882a593Smuzhiyun return ret;
83*4882a593Smuzhiyun *d |= tmp << s;
84*4882a593Smuzhiyun return 0;
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun
next_bits(struct sw842_param * p,u64 * d,u8 n)87*4882a593Smuzhiyun static int next_bits(struct sw842_param *p, u64 *d, u8 n)
88*4882a593Smuzhiyun {
89*4882a593Smuzhiyun u8 *in = p->in, b = p->bit, bits = b + n;
90*4882a593Smuzhiyun
91*4882a593Smuzhiyun if (n > 64) {
92*4882a593Smuzhiyun pr_debug("next_bits invalid n %u\n", n);
93*4882a593Smuzhiyun return -EINVAL;
94*4882a593Smuzhiyun }
95*4882a593Smuzhiyun
96*4882a593Smuzhiyun /* split this up if reading > 8 bytes, or if we're at the end of
97*4882a593Smuzhiyun * the input buffer and would read past the end
98*4882a593Smuzhiyun */
99*4882a593Smuzhiyun if (bits > 64)
100*4882a593Smuzhiyun return __split_next_bits(p, d, n, 32);
101*4882a593Smuzhiyun else if (p->ilen < 8 && bits > 32 && bits <= 56)
102*4882a593Smuzhiyun return __split_next_bits(p, d, n, 16);
103*4882a593Smuzhiyun else if (p->ilen < 4 && bits > 16 && bits <= 24)
104*4882a593Smuzhiyun return __split_next_bits(p, d, n, 8);
105*4882a593Smuzhiyun
106*4882a593Smuzhiyun if (DIV_ROUND_UP(bits, 8) > p->ilen)
107*4882a593Smuzhiyun return -EOVERFLOW;
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun if (bits <= 8)
110*4882a593Smuzhiyun *d = *in >> (8 - bits);
111*4882a593Smuzhiyun else if (bits <= 16)
112*4882a593Smuzhiyun *d = be16_to_cpu(get_unaligned((__be16 *)in)) >> (16 - bits);
113*4882a593Smuzhiyun else if (bits <= 32)
114*4882a593Smuzhiyun *d = be32_to_cpu(get_unaligned((__be32 *)in)) >> (32 - bits);
115*4882a593Smuzhiyun else
116*4882a593Smuzhiyun *d = be64_to_cpu(get_unaligned((__be64 *)in)) >> (64 - bits);
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun *d &= GENMASK_ULL(n - 1, 0);
119*4882a593Smuzhiyun
120*4882a593Smuzhiyun p->bit += n;
121*4882a593Smuzhiyun
122*4882a593Smuzhiyun if (p->bit > 7) {
123*4882a593Smuzhiyun p->in += p->bit / 8;
124*4882a593Smuzhiyun p->ilen -= p->bit / 8;
125*4882a593Smuzhiyun p->bit %= 8;
126*4882a593Smuzhiyun }
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun return 0;
129*4882a593Smuzhiyun }
130*4882a593Smuzhiyun
do_data(struct sw842_param * p,u8 n)131*4882a593Smuzhiyun static int do_data(struct sw842_param *p, u8 n)
132*4882a593Smuzhiyun {
133*4882a593Smuzhiyun u64 v;
134*4882a593Smuzhiyun int ret;
135*4882a593Smuzhiyun
136*4882a593Smuzhiyun if (n > p->olen)
137*4882a593Smuzhiyun return -ENOSPC;
138*4882a593Smuzhiyun
139*4882a593Smuzhiyun ret = next_bits(p, &v, n * 8);
140*4882a593Smuzhiyun if (ret)
141*4882a593Smuzhiyun return ret;
142*4882a593Smuzhiyun
143*4882a593Smuzhiyun switch (n) {
144*4882a593Smuzhiyun case 2:
145*4882a593Smuzhiyun put_unaligned(cpu_to_be16((u16)v), (__be16 *)p->out);
146*4882a593Smuzhiyun break;
147*4882a593Smuzhiyun case 4:
148*4882a593Smuzhiyun put_unaligned(cpu_to_be32((u32)v), (__be32 *)p->out);
149*4882a593Smuzhiyun break;
150*4882a593Smuzhiyun case 8:
151*4882a593Smuzhiyun put_unaligned(cpu_to_be64((u64)v), (__be64 *)p->out);
152*4882a593Smuzhiyun break;
153*4882a593Smuzhiyun default:
154*4882a593Smuzhiyun return -EINVAL;
155*4882a593Smuzhiyun }
156*4882a593Smuzhiyun
157*4882a593Smuzhiyun p->out += n;
158*4882a593Smuzhiyun p->olen -= n;
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun return 0;
161*4882a593Smuzhiyun }
162*4882a593Smuzhiyun
__do_index(struct sw842_param * p,u8 size,u8 bits,u64 fsize)163*4882a593Smuzhiyun static int __do_index(struct sw842_param *p, u8 size, u8 bits, u64 fsize)
164*4882a593Smuzhiyun {
165*4882a593Smuzhiyun u64 index, offset, total = round_down(p->out - p->ostart, 8);
166*4882a593Smuzhiyun int ret;
167*4882a593Smuzhiyun
168*4882a593Smuzhiyun ret = next_bits(p, &index, bits);
169*4882a593Smuzhiyun if (ret)
170*4882a593Smuzhiyun return ret;
171*4882a593Smuzhiyun
172*4882a593Smuzhiyun offset = index * size;
173*4882a593Smuzhiyun
174*4882a593Smuzhiyun /* a ring buffer of fsize is used; correct the offset */
175*4882a593Smuzhiyun if (total > fsize) {
176*4882a593Smuzhiyun /* this is where the current fifo is */
177*4882a593Smuzhiyun u64 section = round_down(total, fsize);
178*4882a593Smuzhiyun /* the current pos in the fifo */
179*4882a593Smuzhiyun u64 pos = total - section;
180*4882a593Smuzhiyun
181*4882a593Smuzhiyun /* if the offset is past/at the pos, we need to
182*4882a593Smuzhiyun * go back to the last fifo section
183*4882a593Smuzhiyun */
184*4882a593Smuzhiyun if (offset >= pos)
185*4882a593Smuzhiyun section -= fsize;
186*4882a593Smuzhiyun
187*4882a593Smuzhiyun offset += section;
188*4882a593Smuzhiyun }
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun if (offset + size > total) {
191*4882a593Smuzhiyun pr_debug("index%x %lx points past end %lx\n", size,
192*4882a593Smuzhiyun (unsigned long)offset, (unsigned long)total);
193*4882a593Smuzhiyun return -EINVAL;
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun if (size != 2 && size != 4 && size != 8)
197*4882a593Smuzhiyun WARN(1, "__do_index invalid size %x\n", size);
198*4882a593Smuzhiyun else
199*4882a593Smuzhiyun pr_debug("index%x to %lx off %lx adjoff %lx tot %lx data %lx\n",
200*4882a593Smuzhiyun size, (unsigned long)index,
201*4882a593Smuzhiyun (unsigned long)(index * size), (unsigned long)offset,
202*4882a593Smuzhiyun (unsigned long)total,
203*4882a593Smuzhiyun (unsigned long)beN_to_cpu(&p->ostart[offset], size));
204*4882a593Smuzhiyun
205*4882a593Smuzhiyun memcpy(p->out, &p->ostart[offset], size);
206*4882a593Smuzhiyun p->out += size;
207*4882a593Smuzhiyun p->olen -= size;
208*4882a593Smuzhiyun
209*4882a593Smuzhiyun return 0;
210*4882a593Smuzhiyun }
211*4882a593Smuzhiyun
do_index(struct sw842_param * p,u8 n)212*4882a593Smuzhiyun static int do_index(struct sw842_param *p, u8 n)
213*4882a593Smuzhiyun {
214*4882a593Smuzhiyun switch (n) {
215*4882a593Smuzhiyun case 2:
216*4882a593Smuzhiyun return __do_index(p, 2, I2_BITS, I2_FIFO_SIZE);
217*4882a593Smuzhiyun case 4:
218*4882a593Smuzhiyun return __do_index(p, 4, I4_BITS, I4_FIFO_SIZE);
219*4882a593Smuzhiyun case 8:
220*4882a593Smuzhiyun return __do_index(p, 8, I8_BITS, I8_FIFO_SIZE);
221*4882a593Smuzhiyun default:
222*4882a593Smuzhiyun return -EINVAL;
223*4882a593Smuzhiyun }
224*4882a593Smuzhiyun }
225*4882a593Smuzhiyun
do_op(struct sw842_param * p,u8 o)226*4882a593Smuzhiyun static int do_op(struct sw842_param *p, u8 o)
227*4882a593Smuzhiyun {
228*4882a593Smuzhiyun int i, ret = 0;
229*4882a593Smuzhiyun
230*4882a593Smuzhiyun if (o >= OPS_MAX)
231*4882a593Smuzhiyun return -EINVAL;
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun for (i = 0; i < 4; i++) {
234*4882a593Smuzhiyun u8 op = decomp_ops[o][i];
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun pr_debug("op is %x\n", op);
237*4882a593Smuzhiyun
238*4882a593Smuzhiyun switch (op & OP_ACTION) {
239*4882a593Smuzhiyun case OP_ACTION_DATA:
240*4882a593Smuzhiyun ret = do_data(p, op & OP_AMOUNT);
241*4882a593Smuzhiyun break;
242*4882a593Smuzhiyun case OP_ACTION_INDEX:
243*4882a593Smuzhiyun ret = do_index(p, op & OP_AMOUNT);
244*4882a593Smuzhiyun break;
245*4882a593Smuzhiyun case OP_ACTION_NOOP:
246*4882a593Smuzhiyun break;
247*4882a593Smuzhiyun default:
248*4882a593Smuzhiyun pr_err("Internal error, invalid op %x\n", op);
249*4882a593Smuzhiyun return -EINVAL;
250*4882a593Smuzhiyun }
251*4882a593Smuzhiyun
252*4882a593Smuzhiyun if (ret)
253*4882a593Smuzhiyun return ret;
254*4882a593Smuzhiyun }
255*4882a593Smuzhiyun
256*4882a593Smuzhiyun if (sw842_template_counts)
257*4882a593Smuzhiyun atomic_inc(&template_count[o]);
258*4882a593Smuzhiyun
259*4882a593Smuzhiyun return 0;
260*4882a593Smuzhiyun }
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun /**
263*4882a593Smuzhiyun * sw842_decompress
264*4882a593Smuzhiyun *
265*4882a593Smuzhiyun * Decompress the 842-compressed buffer of length @ilen at @in
266*4882a593Smuzhiyun * to the output buffer @out, using no more than @olen bytes.
267*4882a593Smuzhiyun *
268*4882a593Smuzhiyun * The compressed buffer must be only a single 842-compressed buffer,
269*4882a593Smuzhiyun * with the standard format described in the comments in 842.h
270*4882a593Smuzhiyun * Processing will stop when the 842 "END" template is detected,
271*4882a593Smuzhiyun * not the end of the buffer.
272*4882a593Smuzhiyun *
273*4882a593Smuzhiyun * Returns: 0 on success, error on failure. The @olen parameter
274*4882a593Smuzhiyun * will contain the number of output bytes written on success, or
275*4882a593Smuzhiyun * 0 on error.
276*4882a593Smuzhiyun */
sw842_decompress(const u8 * in,unsigned int ilen,u8 * out,unsigned int * olen)277*4882a593Smuzhiyun int sw842_decompress(const u8 *in, unsigned int ilen,
278*4882a593Smuzhiyun u8 *out, unsigned int *olen)
279*4882a593Smuzhiyun {
280*4882a593Smuzhiyun struct sw842_param p;
281*4882a593Smuzhiyun int ret;
282*4882a593Smuzhiyun u64 op, rep, tmp, bytes, total;
283*4882a593Smuzhiyun u64 crc;
284*4882a593Smuzhiyun
285*4882a593Smuzhiyun p.in = (u8 *)in;
286*4882a593Smuzhiyun p.bit = 0;
287*4882a593Smuzhiyun p.ilen = ilen;
288*4882a593Smuzhiyun p.out = out;
289*4882a593Smuzhiyun p.ostart = out;
290*4882a593Smuzhiyun p.olen = *olen;
291*4882a593Smuzhiyun
292*4882a593Smuzhiyun total = p.olen;
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun *olen = 0;
295*4882a593Smuzhiyun
296*4882a593Smuzhiyun do {
297*4882a593Smuzhiyun ret = next_bits(&p, &op, OP_BITS);
298*4882a593Smuzhiyun if (ret)
299*4882a593Smuzhiyun return ret;
300*4882a593Smuzhiyun
301*4882a593Smuzhiyun pr_debug("template is %lx\n", (unsigned long)op);
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun switch (op) {
304*4882a593Smuzhiyun case OP_REPEAT:
305*4882a593Smuzhiyun ret = next_bits(&p, &rep, REPEAT_BITS);
306*4882a593Smuzhiyun if (ret)
307*4882a593Smuzhiyun return ret;
308*4882a593Smuzhiyun
309*4882a593Smuzhiyun if (p.out == out) /* no previous bytes */
310*4882a593Smuzhiyun return -EINVAL;
311*4882a593Smuzhiyun
312*4882a593Smuzhiyun /* copy rep + 1 */
313*4882a593Smuzhiyun rep++;
314*4882a593Smuzhiyun
315*4882a593Smuzhiyun if (rep * 8 > p.olen)
316*4882a593Smuzhiyun return -ENOSPC;
317*4882a593Smuzhiyun
318*4882a593Smuzhiyun while (rep-- > 0) {
319*4882a593Smuzhiyun memcpy(p.out, p.out - 8, 8);
320*4882a593Smuzhiyun p.out += 8;
321*4882a593Smuzhiyun p.olen -= 8;
322*4882a593Smuzhiyun }
323*4882a593Smuzhiyun
324*4882a593Smuzhiyun if (sw842_template_counts)
325*4882a593Smuzhiyun atomic_inc(&template_repeat_count);
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun break;
328*4882a593Smuzhiyun case OP_ZEROS:
329*4882a593Smuzhiyun if (8 > p.olen)
330*4882a593Smuzhiyun return -ENOSPC;
331*4882a593Smuzhiyun
332*4882a593Smuzhiyun memset(p.out, 0, 8);
333*4882a593Smuzhiyun p.out += 8;
334*4882a593Smuzhiyun p.olen -= 8;
335*4882a593Smuzhiyun
336*4882a593Smuzhiyun if (sw842_template_counts)
337*4882a593Smuzhiyun atomic_inc(&template_zeros_count);
338*4882a593Smuzhiyun
339*4882a593Smuzhiyun break;
340*4882a593Smuzhiyun case OP_SHORT_DATA:
341*4882a593Smuzhiyun ret = next_bits(&p, &bytes, SHORT_DATA_BITS);
342*4882a593Smuzhiyun if (ret)
343*4882a593Smuzhiyun return ret;
344*4882a593Smuzhiyun
345*4882a593Smuzhiyun if (!bytes || bytes > SHORT_DATA_BITS_MAX)
346*4882a593Smuzhiyun return -EINVAL;
347*4882a593Smuzhiyun
348*4882a593Smuzhiyun while (bytes-- > 0) {
349*4882a593Smuzhiyun ret = next_bits(&p, &tmp, 8);
350*4882a593Smuzhiyun if (ret)
351*4882a593Smuzhiyun return ret;
352*4882a593Smuzhiyun *p.out = (u8)tmp;
353*4882a593Smuzhiyun p.out++;
354*4882a593Smuzhiyun p.olen--;
355*4882a593Smuzhiyun }
356*4882a593Smuzhiyun
357*4882a593Smuzhiyun if (sw842_template_counts)
358*4882a593Smuzhiyun atomic_inc(&template_short_data_count);
359*4882a593Smuzhiyun
360*4882a593Smuzhiyun break;
361*4882a593Smuzhiyun case OP_END:
362*4882a593Smuzhiyun if (sw842_template_counts)
363*4882a593Smuzhiyun atomic_inc(&template_end_count);
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun break;
366*4882a593Smuzhiyun default: /* use template */
367*4882a593Smuzhiyun ret = do_op(&p, op);
368*4882a593Smuzhiyun if (ret)
369*4882a593Smuzhiyun return ret;
370*4882a593Smuzhiyun break;
371*4882a593Smuzhiyun }
372*4882a593Smuzhiyun } while (op != OP_END);
373*4882a593Smuzhiyun
374*4882a593Smuzhiyun /*
375*4882a593Smuzhiyun * crc(0:31) is saved in compressed data starting with the
376*4882a593Smuzhiyun * next bit after End of stream template.
377*4882a593Smuzhiyun */
378*4882a593Smuzhiyun ret = next_bits(&p, &crc, CRC_BITS);
379*4882a593Smuzhiyun if (ret)
380*4882a593Smuzhiyun return ret;
381*4882a593Smuzhiyun
382*4882a593Smuzhiyun /*
383*4882a593Smuzhiyun * Validate CRC saved in compressed data.
384*4882a593Smuzhiyun */
385*4882a593Smuzhiyun if (crc != (u64)crc32_be(0, out, total - p.olen)) {
386*4882a593Smuzhiyun pr_debug("CRC mismatch for decompression\n");
387*4882a593Smuzhiyun return -EINVAL;
388*4882a593Smuzhiyun }
389*4882a593Smuzhiyun
390*4882a593Smuzhiyun if (unlikely((total - p.olen) > UINT_MAX))
391*4882a593Smuzhiyun return -ENOSPC;
392*4882a593Smuzhiyun
393*4882a593Smuzhiyun *olen = total - p.olen;
394*4882a593Smuzhiyun
395*4882a593Smuzhiyun return 0;
396*4882a593Smuzhiyun }
397*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(sw842_decompress);
398*4882a593Smuzhiyun
sw842_init(void)399*4882a593Smuzhiyun static int __init sw842_init(void)
400*4882a593Smuzhiyun {
401*4882a593Smuzhiyun if (sw842_template_counts)
402*4882a593Smuzhiyun sw842_debugfs_create();
403*4882a593Smuzhiyun
404*4882a593Smuzhiyun return 0;
405*4882a593Smuzhiyun }
406*4882a593Smuzhiyun module_init(sw842_init);
407*4882a593Smuzhiyun
sw842_exit(void)408*4882a593Smuzhiyun static void __exit sw842_exit(void)
409*4882a593Smuzhiyun {
410*4882a593Smuzhiyun if (sw842_template_counts)
411*4882a593Smuzhiyun sw842_debugfs_remove();
412*4882a593Smuzhiyun }
413*4882a593Smuzhiyun module_exit(sw842_exit);
414*4882a593Smuzhiyun
415*4882a593Smuzhiyun MODULE_LICENSE("GPL");
416*4882a593Smuzhiyun MODULE_DESCRIPTION("Software 842 Decompressor");
417*4882a593Smuzhiyun MODULE_AUTHOR("Dan Streetman <ddstreet@ieee.org>");
418