1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * OMAP Crypto driver common support routines.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (c) 2017 Texas Instruments Incorporated
6*4882a593Smuzhiyun * Tero Kristo <t-kristo@ti.com>
7*4882a593Smuzhiyun */
8*4882a593Smuzhiyun
9*4882a593Smuzhiyun #include <linux/module.h>
10*4882a593Smuzhiyun #include <linux/kernel.h>
11*4882a593Smuzhiyun #include <linux/scatterlist.h>
12*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun #include "omap-crypto.h"
15*4882a593Smuzhiyun
omap_crypto_copy_sg_lists(int total,int bs,struct scatterlist ** sg,struct scatterlist * new_sg,u16 flags)16*4882a593Smuzhiyun static int omap_crypto_copy_sg_lists(int total, int bs,
17*4882a593Smuzhiyun struct scatterlist **sg,
18*4882a593Smuzhiyun struct scatterlist *new_sg, u16 flags)
19*4882a593Smuzhiyun {
20*4882a593Smuzhiyun int n = sg_nents(*sg);
21*4882a593Smuzhiyun struct scatterlist *tmp;
22*4882a593Smuzhiyun
23*4882a593Smuzhiyun if (!(flags & OMAP_CRYPTO_FORCE_SINGLE_ENTRY)) {
24*4882a593Smuzhiyun new_sg = kmalloc_array(n, sizeof(*sg), GFP_KERNEL);
25*4882a593Smuzhiyun if (!new_sg)
26*4882a593Smuzhiyun return -ENOMEM;
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun sg_init_table(new_sg, n);
29*4882a593Smuzhiyun }
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun tmp = new_sg;
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun while (*sg && total) {
34*4882a593Smuzhiyun int len = (*sg)->length;
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun if (total < len)
37*4882a593Smuzhiyun len = total;
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun if (len > 0) {
40*4882a593Smuzhiyun total -= len;
41*4882a593Smuzhiyun sg_set_page(tmp, sg_page(*sg), len, (*sg)->offset);
42*4882a593Smuzhiyun if (total <= 0)
43*4882a593Smuzhiyun sg_mark_end(tmp);
44*4882a593Smuzhiyun tmp = sg_next(tmp);
45*4882a593Smuzhiyun }
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun *sg = sg_next(*sg);
48*4882a593Smuzhiyun }
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun *sg = new_sg;
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun return 0;
53*4882a593Smuzhiyun }
54*4882a593Smuzhiyun
omap_crypto_copy_sgs(int total,int bs,struct scatterlist ** sg,struct scatterlist * new_sg,u16 flags)55*4882a593Smuzhiyun static int omap_crypto_copy_sgs(int total, int bs, struct scatterlist **sg,
56*4882a593Smuzhiyun struct scatterlist *new_sg, u16 flags)
57*4882a593Smuzhiyun {
58*4882a593Smuzhiyun void *buf;
59*4882a593Smuzhiyun int pages;
60*4882a593Smuzhiyun int new_len;
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun new_len = ALIGN(total, bs);
63*4882a593Smuzhiyun pages = get_order(new_len);
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun buf = (void *)__get_free_pages(GFP_ATOMIC, pages);
66*4882a593Smuzhiyun if (!buf) {
67*4882a593Smuzhiyun pr_err("%s: Couldn't allocate pages for unaligned cases.\n",
68*4882a593Smuzhiyun __func__);
69*4882a593Smuzhiyun return -ENOMEM;
70*4882a593Smuzhiyun }
71*4882a593Smuzhiyun
72*4882a593Smuzhiyun if (flags & OMAP_CRYPTO_COPY_DATA) {
73*4882a593Smuzhiyun scatterwalk_map_and_copy(buf, *sg, 0, total, 0);
74*4882a593Smuzhiyun if (flags & OMAP_CRYPTO_ZERO_BUF)
75*4882a593Smuzhiyun memset(buf + total, 0, new_len - total);
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun if (!(flags & OMAP_CRYPTO_FORCE_SINGLE_ENTRY))
79*4882a593Smuzhiyun sg_init_table(new_sg, 1);
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun sg_set_buf(new_sg, buf, new_len);
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun *sg = new_sg;
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun return 0;
86*4882a593Smuzhiyun }
87*4882a593Smuzhiyun
omap_crypto_check_sg(struct scatterlist * sg,int total,int bs,u16 flags)88*4882a593Smuzhiyun static int omap_crypto_check_sg(struct scatterlist *sg, int total, int bs,
89*4882a593Smuzhiyun u16 flags)
90*4882a593Smuzhiyun {
91*4882a593Smuzhiyun int len = 0;
92*4882a593Smuzhiyun int num_sg = 0;
93*4882a593Smuzhiyun
94*4882a593Smuzhiyun if (!IS_ALIGNED(total, bs))
95*4882a593Smuzhiyun return OMAP_CRYPTO_NOT_ALIGNED;
96*4882a593Smuzhiyun
97*4882a593Smuzhiyun while (sg) {
98*4882a593Smuzhiyun num_sg++;
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun if (!IS_ALIGNED(sg->offset, 4))
101*4882a593Smuzhiyun return OMAP_CRYPTO_NOT_ALIGNED;
102*4882a593Smuzhiyun if (!IS_ALIGNED(sg->length, bs))
103*4882a593Smuzhiyun return OMAP_CRYPTO_NOT_ALIGNED;
104*4882a593Smuzhiyun #ifdef CONFIG_ZONE_DMA
105*4882a593Smuzhiyun if (page_zonenum(sg_page(sg)) != ZONE_DMA)
106*4882a593Smuzhiyun return OMAP_CRYPTO_NOT_ALIGNED;
107*4882a593Smuzhiyun #endif
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun len += sg->length;
110*4882a593Smuzhiyun sg = sg_next(sg);
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun if (len >= total)
113*4882a593Smuzhiyun break;
114*4882a593Smuzhiyun }
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun if ((flags & OMAP_CRYPTO_FORCE_SINGLE_ENTRY) && num_sg > 1)
117*4882a593Smuzhiyun return OMAP_CRYPTO_NOT_ALIGNED;
118*4882a593Smuzhiyun
119*4882a593Smuzhiyun if (len != total)
120*4882a593Smuzhiyun return OMAP_CRYPTO_BAD_DATA_LENGTH;
121*4882a593Smuzhiyun
122*4882a593Smuzhiyun return 0;
123*4882a593Smuzhiyun }
124*4882a593Smuzhiyun
omap_crypto_align_sg(struct scatterlist ** sg,int total,int bs,struct scatterlist * new_sg,u16 flags,u8 flags_shift,unsigned long * dd_flags)125*4882a593Smuzhiyun int omap_crypto_align_sg(struct scatterlist **sg, int total, int bs,
126*4882a593Smuzhiyun struct scatterlist *new_sg, u16 flags,
127*4882a593Smuzhiyun u8 flags_shift, unsigned long *dd_flags)
128*4882a593Smuzhiyun {
129*4882a593Smuzhiyun int ret;
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun *dd_flags &= ~(OMAP_CRYPTO_COPY_MASK << flags_shift);
132*4882a593Smuzhiyun
133*4882a593Smuzhiyun if (flags & OMAP_CRYPTO_FORCE_COPY)
134*4882a593Smuzhiyun ret = OMAP_CRYPTO_NOT_ALIGNED;
135*4882a593Smuzhiyun else
136*4882a593Smuzhiyun ret = omap_crypto_check_sg(*sg, total, bs, flags);
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun if (ret == OMAP_CRYPTO_NOT_ALIGNED) {
139*4882a593Smuzhiyun ret = omap_crypto_copy_sgs(total, bs, sg, new_sg, flags);
140*4882a593Smuzhiyun if (ret)
141*4882a593Smuzhiyun return ret;
142*4882a593Smuzhiyun *dd_flags |= OMAP_CRYPTO_DATA_COPIED << flags_shift;
143*4882a593Smuzhiyun } else if (ret == OMAP_CRYPTO_BAD_DATA_LENGTH) {
144*4882a593Smuzhiyun ret = omap_crypto_copy_sg_lists(total, bs, sg, new_sg, flags);
145*4882a593Smuzhiyun if (ret)
146*4882a593Smuzhiyun return ret;
147*4882a593Smuzhiyun if (!(flags & OMAP_CRYPTO_FORCE_SINGLE_ENTRY))
148*4882a593Smuzhiyun *dd_flags |= OMAP_CRYPTO_SG_COPIED << flags_shift;
149*4882a593Smuzhiyun } else if (flags & OMAP_CRYPTO_FORCE_SINGLE_ENTRY) {
150*4882a593Smuzhiyun sg_set_buf(new_sg, sg_virt(*sg), (*sg)->length);
151*4882a593Smuzhiyun }
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun return 0;
154*4882a593Smuzhiyun }
155*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(omap_crypto_align_sg);
156*4882a593Smuzhiyun
omap_crypto_copy_data(struct scatterlist * src,struct scatterlist * dst,int offset,int len)157*4882a593Smuzhiyun static void omap_crypto_copy_data(struct scatterlist *src,
158*4882a593Smuzhiyun struct scatterlist *dst,
159*4882a593Smuzhiyun int offset, int len)
160*4882a593Smuzhiyun {
161*4882a593Smuzhiyun int amt;
162*4882a593Smuzhiyun void *srcb, *dstb;
163*4882a593Smuzhiyun int srco = 0, dsto = offset;
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun while (src && dst && len) {
166*4882a593Smuzhiyun if (srco >= src->length) {
167*4882a593Smuzhiyun srco -= src->length;
168*4882a593Smuzhiyun src = sg_next(src);
169*4882a593Smuzhiyun continue;
170*4882a593Smuzhiyun }
171*4882a593Smuzhiyun
172*4882a593Smuzhiyun if (dsto >= dst->length) {
173*4882a593Smuzhiyun dsto -= dst->length;
174*4882a593Smuzhiyun dst = sg_next(dst);
175*4882a593Smuzhiyun continue;
176*4882a593Smuzhiyun }
177*4882a593Smuzhiyun
178*4882a593Smuzhiyun amt = min(src->length - srco, dst->length - dsto);
179*4882a593Smuzhiyun amt = min(len, amt);
180*4882a593Smuzhiyun
181*4882a593Smuzhiyun srcb = kmap_atomic(sg_page(src)) + srco + src->offset;
182*4882a593Smuzhiyun dstb = kmap_atomic(sg_page(dst)) + dsto + dst->offset;
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun memcpy(dstb, srcb, amt);
185*4882a593Smuzhiyun
186*4882a593Smuzhiyun if (!PageSlab(sg_page(dst)))
187*4882a593Smuzhiyun flush_kernel_dcache_page(sg_page(dst));
188*4882a593Smuzhiyun
189*4882a593Smuzhiyun kunmap_atomic(srcb);
190*4882a593Smuzhiyun kunmap_atomic(dstb);
191*4882a593Smuzhiyun
192*4882a593Smuzhiyun srco += amt;
193*4882a593Smuzhiyun dsto += amt;
194*4882a593Smuzhiyun len -= amt;
195*4882a593Smuzhiyun }
196*4882a593Smuzhiyun }
197*4882a593Smuzhiyun
omap_crypto_cleanup(struct scatterlist * sg,struct scatterlist * orig,int offset,int len,u8 flags_shift,unsigned long flags)198*4882a593Smuzhiyun void omap_crypto_cleanup(struct scatterlist *sg, struct scatterlist *orig,
199*4882a593Smuzhiyun int offset, int len, u8 flags_shift,
200*4882a593Smuzhiyun unsigned long flags)
201*4882a593Smuzhiyun {
202*4882a593Smuzhiyun void *buf;
203*4882a593Smuzhiyun int pages;
204*4882a593Smuzhiyun
205*4882a593Smuzhiyun flags >>= flags_shift;
206*4882a593Smuzhiyun flags &= OMAP_CRYPTO_COPY_MASK;
207*4882a593Smuzhiyun
208*4882a593Smuzhiyun if (!flags)
209*4882a593Smuzhiyun return;
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun buf = sg_virt(sg);
212*4882a593Smuzhiyun pages = get_order(len);
213*4882a593Smuzhiyun
214*4882a593Smuzhiyun if (orig && (flags & OMAP_CRYPTO_COPY_MASK))
215*4882a593Smuzhiyun omap_crypto_copy_data(sg, orig, offset, len);
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun if (flags & OMAP_CRYPTO_DATA_COPIED)
218*4882a593Smuzhiyun free_pages((unsigned long)buf, pages);
219*4882a593Smuzhiyun else if (flags & OMAP_CRYPTO_SG_COPIED)
220*4882a593Smuzhiyun kfree(sg);
221*4882a593Smuzhiyun }
222*4882a593Smuzhiyun EXPORT_SYMBOL_GPL(omap_crypto_cleanup);
223*4882a593Smuzhiyun
224*4882a593Smuzhiyun MODULE_DESCRIPTION("OMAP crypto support library.");
225*4882a593Smuzhiyun MODULE_LICENSE("GPL v2");
226*4882a593Smuzhiyun MODULE_AUTHOR("Tero Kristo <t-kristo@ti.com>");
227