1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Rockchip crypto uitls
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (c) 2022, Rockchip Electronics Co., Ltd
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Author: Lin Jinhan <troy.lin@rock-chips.com>
8*4882a593Smuzhiyun *
9*4882a593Smuzhiyun */
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
12*4882a593Smuzhiyun #include <linux/scatterlist.h>
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun #include "rk_crypto_core.h"
15*4882a593Smuzhiyun #include "rk_crypto_utils.h"
16*4882a593Smuzhiyun
word2byte_be(u32 word,u8 * ch)17*4882a593Smuzhiyun static inline void word2byte_be(u32 word, u8 *ch)
18*4882a593Smuzhiyun {
19*4882a593Smuzhiyun ch[0] = (word >> 24) & 0xff;
20*4882a593Smuzhiyun ch[1] = (word >> 16) & 0xff;
21*4882a593Smuzhiyun ch[2] = (word >> 8) & 0xff;
22*4882a593Smuzhiyun ch[3] = (word >> 0) & 0xff;
23*4882a593Smuzhiyun }
24*4882a593Smuzhiyun
byte2word_be(const u8 * ch)25*4882a593Smuzhiyun static inline u32 byte2word_be(const u8 *ch)
26*4882a593Smuzhiyun {
27*4882a593Smuzhiyun return (*ch << 24) + (*(ch + 1) << 16) +
28*4882a593Smuzhiyun (*(ch + 2) << 8) + *(ch + 3);
29*4882a593Smuzhiyun }
30*4882a593Smuzhiyun
rk_crypto_write_regs(struct rk_crypto_dev * rk_dev,u32 base_addr,const u8 * data,u32 bytes)31*4882a593Smuzhiyun void rk_crypto_write_regs(struct rk_crypto_dev *rk_dev, u32 base_addr, const u8 *data, u32 bytes)
32*4882a593Smuzhiyun {
33*4882a593Smuzhiyun u32 i;
34*4882a593Smuzhiyun u8 tmp_buf[4];
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun for (i = 0; i < bytes / 4; i++, base_addr += 4)
37*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, base_addr, byte2word_be(data + i * 4));
38*4882a593Smuzhiyun
39*4882a593Smuzhiyun if (bytes % 4) {
40*4882a593Smuzhiyun memset(tmp_buf, 0x00, sizeof(tmp_buf));
41*4882a593Smuzhiyun memcpy((u8 *)tmp_buf, data + (bytes / 4) * 4, bytes % 4);
42*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, base_addr, byte2word_be(tmp_buf));
43*4882a593Smuzhiyun }
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun
rk_crypto_clear_regs(struct rk_crypto_dev * rk_dev,u32 base_addr,u32 words)46*4882a593Smuzhiyun void rk_crypto_clear_regs(struct rk_crypto_dev *rk_dev, u32 base_addr, u32 words)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun u32 i;
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun for (i = 0; i < words; i++, base_addr += 4)
51*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, base_addr, 0);
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun
rk_crypto_read_regs(struct rk_crypto_dev * rk_dev,u32 base_addr,u8 * data,u32 bytes)54*4882a593Smuzhiyun void rk_crypto_read_regs(struct rk_crypto_dev *rk_dev, u32 base_addr, u8 *data, u32 bytes)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun u32 i;
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun for (i = 0; i < bytes / 4; i++, base_addr += 4)
59*4882a593Smuzhiyun word2byte_be(CRYPTO_READ(rk_dev, base_addr), data + i * 4);
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun if (bytes % 4) {
62*4882a593Smuzhiyun uint8_t tmp_buf[4];
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun word2byte_be(CRYPTO_READ(rk_dev, base_addr), tmp_buf);
65*4882a593Smuzhiyun memcpy(data + i * 4, tmp_buf, bytes % 4);
66*4882a593Smuzhiyun }
67*4882a593Smuzhiyun }
68*4882a593Smuzhiyun
check_scatter_align(struct scatterlist * sg_src,struct scatterlist * sg_dst,int align_mask)69*4882a593Smuzhiyun static int check_scatter_align(struct scatterlist *sg_src,
70*4882a593Smuzhiyun struct scatterlist *sg_dst,
71*4882a593Smuzhiyun int align_mask)
72*4882a593Smuzhiyun {
73*4882a593Smuzhiyun int in, out, align;
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun in = IS_ALIGNED((u32)sg_src->offset, 4) &&
76*4882a593Smuzhiyun IS_ALIGNED((u32)sg_src->length, align_mask) &&
77*4882a593Smuzhiyun (sg_phys(sg_src) < SZ_4G);
78*4882a593Smuzhiyun if (!sg_dst)
79*4882a593Smuzhiyun return in;
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun out = IS_ALIGNED((u32)sg_dst->offset, 4) &&
82*4882a593Smuzhiyun IS_ALIGNED((u32)sg_dst->length, align_mask) &&
83*4882a593Smuzhiyun (sg_phys(sg_dst) < SZ_4G);
84*4882a593Smuzhiyun align = in && out;
85*4882a593Smuzhiyun
86*4882a593Smuzhiyun return (align && (sg_src->length == sg_dst->length));
87*4882a593Smuzhiyun }
88*4882a593Smuzhiyun
rk_crypto_check_align(struct scatterlist * src_sg,size_t src_nents,struct scatterlist * dst_sg,size_t dst_nents,int align_mask)89*4882a593Smuzhiyun bool rk_crypto_check_align(struct scatterlist *src_sg, size_t src_nents,
90*4882a593Smuzhiyun struct scatterlist *dst_sg, size_t dst_nents,
91*4882a593Smuzhiyun int align_mask)
92*4882a593Smuzhiyun {
93*4882a593Smuzhiyun struct scatterlist *src_tmp = NULL;
94*4882a593Smuzhiyun struct scatterlist *dst_tmp = NULL;
95*4882a593Smuzhiyun unsigned int i;
96*4882a593Smuzhiyun
97*4882a593Smuzhiyun if (dst_sg && src_nents != dst_nents)
98*4882a593Smuzhiyun return false;
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun src_tmp = src_sg;
101*4882a593Smuzhiyun dst_tmp = dst_sg;
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun for (i = 0; i < src_nents; i++) {
104*4882a593Smuzhiyun if (!src_tmp)
105*4882a593Smuzhiyun return false;
106*4882a593Smuzhiyun
107*4882a593Smuzhiyun if (!check_scatter_align(src_tmp, dst_tmp, align_mask))
108*4882a593Smuzhiyun return false;
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun src_tmp = sg_next(src_tmp);
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun if (dst_sg)
113*4882a593Smuzhiyun dst_tmp = sg_next(dst_tmp);
114*4882a593Smuzhiyun }
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun return true;
117*4882a593Smuzhiyun }
118*4882a593Smuzhiyun
rk_crypto_check_dmafd(struct scatterlist * sgl,size_t nents)119*4882a593Smuzhiyun bool rk_crypto_check_dmafd(struct scatterlist *sgl, size_t nents)
120*4882a593Smuzhiyun {
121*4882a593Smuzhiyun struct scatterlist *src_tmp = NULL;
122*4882a593Smuzhiyun unsigned int i;
123*4882a593Smuzhiyun
124*4882a593Smuzhiyun for_each_sg(sgl, src_tmp, nents, i) {
125*4882a593Smuzhiyun if (!src_tmp)
126*4882a593Smuzhiyun return false;
127*4882a593Smuzhiyun
128*4882a593Smuzhiyun if (src_tmp->length && !sg_dma_address(src_tmp))
129*4882a593Smuzhiyun return false;
130*4882a593Smuzhiyun }
131*4882a593Smuzhiyun
132*4882a593Smuzhiyun return true;
133*4882a593Smuzhiyun }
134*4882a593Smuzhiyun
rk_crypto_dump_hw_desc(struct rk_hw_desc * hw_desc)135*4882a593Smuzhiyun void rk_crypto_dump_hw_desc(struct rk_hw_desc *hw_desc)
136*4882a593Smuzhiyun {
137*4882a593Smuzhiyun struct crypto_lli_desc *cur_lli = NULL;
138*4882a593Smuzhiyun u32 i;
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun cur_lli = hw_desc->lli_head;
141*4882a593Smuzhiyun
142*4882a593Smuzhiyun CRYPTO_TRACE("lli_head = %lx, lli_tail = %lx",
143*4882a593Smuzhiyun (unsigned long)hw_desc->lli_head, (unsigned long)hw_desc->lli_tail);
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun for (i = 0; i < hw_desc->total; i++, cur_lli++) {
146*4882a593Smuzhiyun CRYPTO_TRACE("cur_lli = %lx", (unsigned long)cur_lli);
147*4882a593Smuzhiyun CRYPTO_TRACE("src_addr = %08x", cur_lli->src_addr);
148*4882a593Smuzhiyun CRYPTO_TRACE("src_len = %08x", cur_lli->src_len);
149*4882a593Smuzhiyun CRYPTO_TRACE("dst_addr = %08x", cur_lli->dst_addr);
150*4882a593Smuzhiyun CRYPTO_TRACE("dst_len = %08x", cur_lli->dst_len);
151*4882a593Smuzhiyun CRYPTO_TRACE("user_def = %08x", cur_lli->user_define);
152*4882a593Smuzhiyun CRYPTO_TRACE("dma_ctl = %08x", cur_lli->dma_ctrl);
153*4882a593Smuzhiyun CRYPTO_TRACE("next = %08x\n", cur_lli->next_addr);
154*4882a593Smuzhiyun
155*4882a593Smuzhiyun if (cur_lli == hw_desc->lli_tail)
156*4882a593Smuzhiyun break;
157*4882a593Smuzhiyun }
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun
rk_crypto_hw_desc_maxlen(struct scatterlist * sg,u64 len,u32 * max_nents)160*4882a593Smuzhiyun u64 rk_crypto_hw_desc_maxlen(struct scatterlist *sg, u64 len, u32 *max_nents)
161*4882a593Smuzhiyun {
162*4882a593Smuzhiyun int nents;
163*4882a593Smuzhiyun u64 total;
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun if (!len)
166*4882a593Smuzhiyun return 0;
167*4882a593Smuzhiyun
168*4882a593Smuzhiyun for (nents = 0, total = 0; sg; sg = sg_next(sg)) {
169*4882a593Smuzhiyun if (!sg)
170*4882a593Smuzhiyun goto exit;
171*4882a593Smuzhiyun
172*4882a593Smuzhiyun nents++;
173*4882a593Smuzhiyun total += sg->length;
174*4882a593Smuzhiyun
175*4882a593Smuzhiyun if (nents >= RK_DEFAULT_LLI_CNT || total >= len)
176*4882a593Smuzhiyun goto exit;
177*4882a593Smuzhiyun }
178*4882a593Smuzhiyun
179*4882a593Smuzhiyun exit:
180*4882a593Smuzhiyun *max_nents = nents;
181*4882a593Smuzhiyun return total > len ? len : total;
182*4882a593Smuzhiyun }
183*4882a593Smuzhiyun
rk_crypto_hw_desc_alloc(struct device * dev,struct rk_hw_desc * hw_desc)184*4882a593Smuzhiyun int rk_crypto_hw_desc_alloc(struct device *dev, struct rk_hw_desc *hw_desc)
185*4882a593Smuzhiyun {
186*4882a593Smuzhiyun u32 lli_cnt = RK_DEFAULT_LLI_CNT;
187*4882a593Smuzhiyun u32 lli_len = lli_cnt * sizeof(struct crypto_lli_desc);
188*4882a593Smuzhiyun
189*4882a593Smuzhiyun if (!dev || !hw_desc)
190*4882a593Smuzhiyun return -EINVAL;
191*4882a593Smuzhiyun
192*4882a593Smuzhiyun memset(hw_desc, 0x00, sizeof(*hw_desc));
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun hw_desc->lli_aad = dma_alloc_coherent(dev, sizeof(struct crypto_lli_desc),
195*4882a593Smuzhiyun &hw_desc->lli_aad_dma, GFP_KERNEL);
196*4882a593Smuzhiyun if (!hw_desc->lli_aad)
197*4882a593Smuzhiyun return -ENOMEM;
198*4882a593Smuzhiyun
199*4882a593Smuzhiyun ///TODO: cma
200*4882a593Smuzhiyun hw_desc->lli_head = dma_alloc_coherent(dev, lli_len, &hw_desc->lli_head_dma, GFP_KERNEL);
201*4882a593Smuzhiyun if (!hw_desc->lli_head) {
202*4882a593Smuzhiyun dma_free_coherent(dev, sizeof(struct crypto_lli_desc),
203*4882a593Smuzhiyun hw_desc->lli_aad, hw_desc->lli_aad_dma);
204*4882a593Smuzhiyun return -ENOMEM;
205*4882a593Smuzhiyun }
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun hw_desc->lli_tail = hw_desc->lli_head;
208*4882a593Smuzhiyun hw_desc->total = lli_cnt;
209*4882a593Smuzhiyun hw_desc->dev = dev;
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun memset(hw_desc->lli_head, 0x00, lli_len);
212*4882a593Smuzhiyun
213*4882a593Smuzhiyun CRYPTO_TRACE("dev = %lx, buffer_len = %u, lli_head = %lx, lli_head_dma = %lx",
214*4882a593Smuzhiyun (unsigned long)hw_desc->dev, lli_len,
215*4882a593Smuzhiyun (unsigned long)hw_desc->lli_head, (unsigned long)hw_desc->lli_head_dma);
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun return 0;
218*4882a593Smuzhiyun }
219*4882a593Smuzhiyun
rk_crypto_hw_desc_free(struct rk_hw_desc * hw_desc)220*4882a593Smuzhiyun void rk_crypto_hw_desc_free(struct rk_hw_desc *hw_desc)
221*4882a593Smuzhiyun {
222*4882a593Smuzhiyun if (!hw_desc || !hw_desc->dev || !hw_desc->lli_head)
223*4882a593Smuzhiyun return;
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun CRYPTO_TRACE("dev = %lx, buffer_len = %lu, lli_head = %lx, lli_head_dma = %lx",
226*4882a593Smuzhiyun (unsigned long)hw_desc->dev,
227*4882a593Smuzhiyun (unsigned long)hw_desc->total * sizeof(struct crypto_lli_desc),
228*4882a593Smuzhiyun (unsigned long)hw_desc->lli_head, (unsigned long)hw_desc->lli_head_dma);
229*4882a593Smuzhiyun
230*4882a593Smuzhiyun dma_free_coherent(hw_desc->dev, sizeof(struct crypto_lli_desc),
231*4882a593Smuzhiyun hw_desc->lli_aad, hw_desc->lli_aad_dma);
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun dma_free_coherent(hw_desc->dev, hw_desc->total * sizeof(struct crypto_lli_desc),
234*4882a593Smuzhiyun hw_desc->lli_head, hw_desc->lli_head_dma);
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun memset(hw_desc, 0x00, sizeof(*hw_desc));
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun
rk_crypto_hw_desc_init(struct rk_hw_desc * hw_desc,struct scatterlist * src_sg,struct scatterlist * dst_sg,u64 len)239*4882a593Smuzhiyun int rk_crypto_hw_desc_init(struct rk_hw_desc *hw_desc,
240*4882a593Smuzhiyun struct scatterlist *src_sg,
241*4882a593Smuzhiyun struct scatterlist *dst_sg,
242*4882a593Smuzhiyun u64 len)
243*4882a593Smuzhiyun {
244*4882a593Smuzhiyun struct crypto_lli_desc *cur_lli = NULL;
245*4882a593Smuzhiyun struct scatterlist *tmp_src, *tmp_dst;
246*4882a593Smuzhiyun dma_addr_t tmp_next_dma;
247*4882a593Smuzhiyun u32 src_nents, dst_nents;
248*4882a593Smuzhiyun u32 i, data_cnt = 0;
249*4882a593Smuzhiyun
250*4882a593Smuzhiyun if (!hw_desc || !hw_desc->dev || !hw_desc->lli_head)
251*4882a593Smuzhiyun return -EINVAL;
252*4882a593Smuzhiyun
253*4882a593Smuzhiyun if (!src_sg || len == 0)
254*4882a593Smuzhiyun return -EINVAL;
255*4882a593Smuzhiyun
256*4882a593Smuzhiyun src_nents = sg_nents_for_len(src_sg, len);
257*4882a593Smuzhiyun dst_nents = dst_sg ? sg_nents_for_len(dst_sg, len) : src_nents;
258*4882a593Smuzhiyun
259*4882a593Smuzhiyun if (src_nents != dst_nents)
260*4882a593Smuzhiyun return -EINVAL;
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun CRYPTO_TRACE("src_nents = %u, total = %u, len = %llu", src_nents, hw_desc->total, len);
263*4882a593Smuzhiyun
264*4882a593Smuzhiyun if (src_nents > hw_desc->total) {
265*4882a593Smuzhiyun pr_err("crypto: nents overflow, %u > %u", src_nents, hw_desc->total);
266*4882a593Smuzhiyun return -ENOMEM;
267*4882a593Smuzhiyun }
268*4882a593Smuzhiyun
269*4882a593Smuzhiyun memset(hw_desc->lli_head, 0x00, src_nents * sizeof(struct crypto_lli_desc));
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun cur_lli = hw_desc->lli_head;
272*4882a593Smuzhiyun tmp_src = src_sg;
273*4882a593Smuzhiyun tmp_dst = dst_sg;
274*4882a593Smuzhiyun tmp_next_dma = hw_desc->lli_head_dma + sizeof(*cur_lli);
275*4882a593Smuzhiyun
276*4882a593Smuzhiyun if (dst_sg) {
277*4882a593Smuzhiyun for (i = 0; i < src_nents - 1; i++, cur_lli++, tmp_next_dma += sizeof(*cur_lli)) {
278*4882a593Smuzhiyun cur_lli->src_addr = sg_dma_address(tmp_src);
279*4882a593Smuzhiyun cur_lli->src_len = sg_dma_len(tmp_src);
280*4882a593Smuzhiyun cur_lli->dst_addr = sg_dma_address(tmp_dst);
281*4882a593Smuzhiyun cur_lli->dst_len = sg_dma_len(tmp_dst);
282*4882a593Smuzhiyun cur_lli->next_addr = tmp_next_dma;
283*4882a593Smuzhiyun
284*4882a593Smuzhiyun data_cnt += sg_dma_len(tmp_src);
285*4882a593Smuzhiyun tmp_src = sg_next(tmp_src);
286*4882a593Smuzhiyun tmp_dst = sg_next(tmp_dst);
287*4882a593Smuzhiyun }
288*4882a593Smuzhiyun } else {
289*4882a593Smuzhiyun for (i = 0; i < src_nents - 1; i++, cur_lli++, tmp_next_dma += sizeof(*cur_lli)) {
290*4882a593Smuzhiyun cur_lli->src_addr = sg_dma_address(tmp_src);
291*4882a593Smuzhiyun cur_lli->src_len = sg_dma_len(tmp_src);
292*4882a593Smuzhiyun cur_lli->next_addr = tmp_next_dma;
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun data_cnt += sg_dma_len(tmp_src);
295*4882a593Smuzhiyun tmp_src = sg_next(tmp_src);
296*4882a593Smuzhiyun }
297*4882a593Smuzhiyun }
298*4882a593Smuzhiyun
299*4882a593Smuzhiyun /* for last lli */
300*4882a593Smuzhiyun cur_lli->src_addr = sg_dma_address(tmp_src);
301*4882a593Smuzhiyun cur_lli->src_len = len - data_cnt;
302*4882a593Smuzhiyun cur_lli->next_addr = 0;
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun if (dst_sg) {
305*4882a593Smuzhiyun cur_lli->dst_addr = sg_dma_address(tmp_dst);
306*4882a593Smuzhiyun cur_lli->dst_len = len - data_cnt;
307*4882a593Smuzhiyun }
308*4882a593Smuzhiyun
309*4882a593Smuzhiyun hw_desc->lli_tail = cur_lli;
310*4882a593Smuzhiyun
311*4882a593Smuzhiyun return 0;
312*4882a593Smuzhiyun }
313*4882a593Smuzhiyun
314