1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Hash acceleration support for Rockchip Crypto v3
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (c) 2022, Rockchip Electronics Co., Ltd
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Author: Lin Jinhan <troy.lin@rock-chips.com>
8*4882a593Smuzhiyun *
9*4882a593Smuzhiyun */
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun #include <linux/slab.h>
12*4882a593Smuzhiyun #include <linux/iopoll.h>
13*4882a593Smuzhiyun
14*4882a593Smuzhiyun #include "rk_crypto_core.h"
15*4882a593Smuzhiyun #include "rk_crypto_v3.h"
16*4882a593Smuzhiyun #include "rk_crypto_v3_reg.h"
17*4882a593Smuzhiyun #include "rk_crypto_ahash_utils.h"
18*4882a593Smuzhiyun #include "rk_crypto_utils.h"
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun #define RK_HASH_CTX_MAGIC 0x1A1A1A1A
21*4882a593Smuzhiyun #define RK_POLL_PERIOD_US 100
22*4882a593Smuzhiyun #define RK_POLL_TIMEOUT_US 50000
23*4882a593Smuzhiyun
24*4882a593Smuzhiyun struct rk_ahash_expt_ctx {
25*4882a593Smuzhiyun struct rk_ahash_ctx ctx;
26*4882a593Smuzhiyun u8 lastc[RK_DMA_ALIGNMENT];
27*4882a593Smuzhiyun };
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun struct rk_hash_mid_data {
30*4882a593Smuzhiyun u32 valid_flag;
31*4882a593Smuzhiyun u32 hash_ctl;
32*4882a593Smuzhiyun u32 data[CRYPTO_HASH_MID_WORD_SIZE];
33*4882a593Smuzhiyun };
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun static const u32 hash_algo2bc[] = {
36*4882a593Smuzhiyun [HASH_ALGO_MD5] = CRYPTO_MD5,
37*4882a593Smuzhiyun [HASH_ALGO_SHA1] = CRYPTO_SHA1,
38*4882a593Smuzhiyun [HASH_ALGO_SHA224] = CRYPTO_SHA224,
39*4882a593Smuzhiyun [HASH_ALGO_SHA256] = CRYPTO_SHA256,
40*4882a593Smuzhiyun [HASH_ALGO_SHA384] = CRYPTO_SHA384,
41*4882a593Smuzhiyun [HASH_ALGO_SHA512] = CRYPTO_SHA512,
42*4882a593Smuzhiyun [HASH_ALGO_SM3] = CRYPTO_SM3,
43*4882a593Smuzhiyun };
44*4882a593Smuzhiyun
rk_hash_reset(struct rk_crypto_dev * rk_dev)45*4882a593Smuzhiyun static void rk_hash_reset(struct rk_crypto_dev *rk_dev)
46*4882a593Smuzhiyun {
47*4882a593Smuzhiyun int ret;
48*4882a593Smuzhiyun u32 tmp = 0, tmp_mask = 0;
49*4882a593Smuzhiyun unsigned int pool_timeout_us = 1000;
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0x00);
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun tmp = CRYPTO_SW_CC_RESET;
54*4882a593Smuzhiyun tmp_mask = tmp << CRYPTO_WRITE_MASK_SHIFT;
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_RST_CTL, tmp | tmp_mask);
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun /* This is usually done in 20 clock cycles */
59*4882a593Smuzhiyun ret = read_poll_timeout_atomic(CRYPTO_READ, tmp, !tmp, 0, pool_timeout_us,
60*4882a593Smuzhiyun false, rk_dev, CRYPTO_RST_CTL);
61*4882a593Smuzhiyun if (ret)
62*4882a593Smuzhiyun dev_err(rk_dev->dev, "cipher reset pool timeout %ums.",
63*4882a593Smuzhiyun pool_timeout_us);
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, 0xffff0000);
66*4882a593Smuzhiyun }
67*4882a593Smuzhiyun
rk_hash_mid_data_store(struct rk_crypto_dev * rk_dev,struct rk_hash_mid_data * mid_data)68*4882a593Smuzhiyun static int rk_hash_mid_data_store(struct rk_crypto_dev *rk_dev, struct rk_hash_mid_data *mid_data)
69*4882a593Smuzhiyun {
70*4882a593Smuzhiyun int ret;
71*4882a593Smuzhiyun uint32_t reg_ctrl;
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun CRYPTO_TRACE();
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun ret = read_poll_timeout_atomic(CRYPTO_READ,
76*4882a593Smuzhiyun reg_ctrl,
77*4882a593Smuzhiyun reg_ctrl & CRYPTO_HASH_MID_IS_VALID,
78*4882a593Smuzhiyun 0,
79*4882a593Smuzhiyun RK_POLL_TIMEOUT_US,
80*4882a593Smuzhiyun false, rk_dev, CRYPTO_MID_VALID);
81*4882a593Smuzhiyun
82*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_MID_VALID_SWITCH,
83*4882a593Smuzhiyun CRYPTO_MID_VALID_ENABLE << CRYPTO_WRITE_MASK_SHIFT);
84*4882a593Smuzhiyun if (ret) {
85*4882a593Smuzhiyun CRYPTO_TRACE("CRYPTO_MID_VALID timeout.");
86*4882a593Smuzhiyun goto exit;
87*4882a593Smuzhiyun }
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_MID_VALID,
90*4882a593Smuzhiyun CRYPTO_HASH_MID_IS_VALID |
91*4882a593Smuzhiyun CRYPTO_HASH_MID_IS_VALID << CRYPTO_WRITE_MASK_SHIFT);
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun rk_crypto_read_regs(rk_dev, CRYPTO_HASH_MID_DATA_0,
94*4882a593Smuzhiyun (u8 *)mid_data->data, sizeof(mid_data->data));
95*4882a593Smuzhiyun
96*4882a593Smuzhiyun mid_data->hash_ctl = CRYPTO_READ(rk_dev, CRYPTO_HASH_CTL);
97*4882a593Smuzhiyun mid_data->valid_flag = 1;
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, 0 | CRYPTO_WRITE_MASK_ALL);
100*4882a593Smuzhiyun
101*4882a593Smuzhiyun exit:
102*4882a593Smuzhiyun return ret;
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun
rk_hash_mid_data_restore(struct rk_crypto_dev * rk_dev,struct rk_hash_mid_data * mid_data)105*4882a593Smuzhiyun static int rk_hash_mid_data_restore(struct rk_crypto_dev *rk_dev, struct rk_hash_mid_data *mid_data)
106*4882a593Smuzhiyun {
107*4882a593Smuzhiyun CRYPTO_TRACE();
108*4882a593Smuzhiyun
109*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_MID_VALID_SWITCH,
110*4882a593Smuzhiyun CRYPTO_MID_VALID_ENABLE | CRYPTO_MID_VALID_ENABLE << CRYPTO_WRITE_MASK_SHIFT);
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_MID_VALID,
113*4882a593Smuzhiyun CRYPTO_HASH_MID_IS_VALID |
114*4882a593Smuzhiyun CRYPTO_HASH_MID_IS_VALID << CRYPTO_WRITE_MASK_SHIFT);
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun if (!mid_data->valid_flag) {
117*4882a593Smuzhiyun CRYPTO_TRACE("clear mid data");
118*4882a593Smuzhiyun rk_crypto_clear_regs(rk_dev, CRYPTO_HASH_MID_DATA_0, ARRAY_SIZE(mid_data->data));
119*4882a593Smuzhiyun return 0;
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun
122*4882a593Smuzhiyun rk_crypto_write_regs(rk_dev, CRYPTO_HASH_MID_DATA_0,
123*4882a593Smuzhiyun (u8 *)mid_data->data, sizeof(mid_data->data));
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, mid_data->hash_ctl | CRYPTO_WRITE_MASK_ALL);
126*4882a593Smuzhiyun
127*4882a593Smuzhiyun return 0;
128*4882a593Smuzhiyun }
129*4882a593Smuzhiyun
rk_crypto_irq_handle(int irq,void * dev_id)130*4882a593Smuzhiyun static int rk_crypto_irq_handle(int irq, void *dev_id)
131*4882a593Smuzhiyun {
132*4882a593Smuzhiyun struct rk_crypto_dev *rk_dev = platform_get_drvdata(dev_id);
133*4882a593Smuzhiyun u32 interrupt_status;
134*4882a593Smuzhiyun struct rk_hw_crypto_v3_info *hw_info =
135*4882a593Smuzhiyun (struct rk_hw_crypto_v3_info *)rk_dev->hw_info;
136*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev);
137*4882a593Smuzhiyun
138*4882a593Smuzhiyun /* disable crypto irq */
139*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0);
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun interrupt_status = CRYPTO_READ(rk_dev, CRYPTO_DMA_INT_ST);
142*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_ST, interrupt_status);
143*4882a593Smuzhiyun
144*4882a593Smuzhiyun interrupt_status &= CRYPTO_LOCKSTEP_MASK;
145*4882a593Smuzhiyun
146*4882a593Smuzhiyun if (interrupt_status != CRYPTO_SRC_ITEM_DONE_INT_ST) {
147*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA desc = %p\n", hw_info->hw_desc.lli_head);
148*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA addr_in = %08x\n",
149*4882a593Smuzhiyun (u32)alg_ctx->addr_in);
150*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA addr_out = %08x\n",
151*4882a593Smuzhiyun (u32)alg_ctx->addr_out);
152*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA count = %08x\n", alg_ctx->count);
153*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA desc_dma = %08x\n",
154*4882a593Smuzhiyun (u32)hw_info->hw_desc.lli_head_dma);
155*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA Error status = %08x\n",
156*4882a593Smuzhiyun interrupt_status);
157*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA CRYPTO_DMA_LLI_ADDR status = %08x\n",
158*4882a593Smuzhiyun CRYPTO_READ(rk_dev, CRYPTO_DMA_LLI_ADDR));
159*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA CRYPTO_DMA_ST status = %08x\n",
160*4882a593Smuzhiyun CRYPTO_READ(rk_dev, CRYPTO_DMA_ST));
161*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA CRYPTO_DMA_STATE status = %08x\n",
162*4882a593Smuzhiyun CRYPTO_READ(rk_dev, CRYPTO_DMA_STATE));
163*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA CRYPTO_DMA_LLI_RADDR status = %08x\n",
164*4882a593Smuzhiyun CRYPTO_READ(rk_dev, CRYPTO_DMA_LLI_RADDR));
165*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA CRYPTO_DMA_SRC_RADDR status = %08x\n",
166*4882a593Smuzhiyun CRYPTO_READ(rk_dev, CRYPTO_DMA_SRC_RADDR));
167*4882a593Smuzhiyun dev_err(rk_dev->dev, "DMA CRYPTO_DMA_DST_RADDR status = %08x\n",
168*4882a593Smuzhiyun CRYPTO_READ(rk_dev, CRYPTO_DMA_DST_RADDR));
169*4882a593Smuzhiyun rk_dev->err = -EFAULT;
170*4882a593Smuzhiyun }
171*4882a593Smuzhiyun
172*4882a593Smuzhiyun return 0;
173*4882a593Smuzhiyun }
174*4882a593Smuzhiyun
rk_ahash_crypto_complete(struct crypto_async_request * base,int err)175*4882a593Smuzhiyun static void rk_ahash_crypto_complete(struct crypto_async_request *base, int err)
176*4882a593Smuzhiyun {
177*4882a593Smuzhiyun struct ahash_request *req = ahash_request_cast(base);
178*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
179*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
180*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(ctx->rk_dev);
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun struct rk_hw_crypto_v3_info *hw_info = ctx->rk_dev->hw_info;
183*4882a593Smuzhiyun struct crypto_lli_desc *lli_desc = hw_info->hw_desc.lli_head;
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun if (err) {
186*4882a593Smuzhiyun rk_hash_reset(ctx->rk_dev);
187*4882a593Smuzhiyun pr_err("aligned = %u, align_size = %u\n",
188*4882a593Smuzhiyun alg_ctx->aligned, alg_ctx->align_size);
189*4882a593Smuzhiyun pr_err("total = %u, left = %u, count = %u\n",
190*4882a593Smuzhiyun alg_ctx->total, alg_ctx->left_bytes, alg_ctx->count);
191*4882a593Smuzhiyun pr_err("lli->src = %08x\n", lli_desc->src_addr);
192*4882a593Smuzhiyun pr_err("lli->src_len = %08x\n", lli_desc->src_len);
193*4882a593Smuzhiyun pr_err("lli->dst = %08x\n", lli_desc->dst_addr);
194*4882a593Smuzhiyun pr_err("lli->dst_len = %08x\n", lli_desc->dst_len);
195*4882a593Smuzhiyun pr_err("lli->dma_ctl = %08x\n", lli_desc->dma_ctrl);
196*4882a593Smuzhiyun pr_err("lli->usr_def = %08x\n", lli_desc->user_define);
197*4882a593Smuzhiyun pr_err("lli->next = %08x\n\n\n", lli_desc->next_addr);
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun
200*4882a593Smuzhiyun if (alg_ctx->total)
201*4882a593Smuzhiyun rk_hash_mid_data_store(ctx->rk_dev, (struct rk_hash_mid_data *)ctx->priv);
202*4882a593Smuzhiyun
203*4882a593Smuzhiyun if (base->complete)
204*4882a593Smuzhiyun base->complete(base, err);
205*4882a593Smuzhiyun }
206*4882a593Smuzhiyun
clear_hash_out_reg(struct rk_crypto_dev * rk_dev)207*4882a593Smuzhiyun static inline void clear_hash_out_reg(struct rk_crypto_dev *rk_dev)
208*4882a593Smuzhiyun {
209*4882a593Smuzhiyun rk_crypto_clear_regs(rk_dev, CRYPTO_HASH_DOUT_0, 16);
210*4882a593Smuzhiyun }
211*4882a593Smuzhiyun
write_key_reg(struct rk_crypto_dev * rk_dev,const u8 * key,u32 key_len)212*4882a593Smuzhiyun static int write_key_reg(struct rk_crypto_dev *rk_dev, const u8 *key,
213*4882a593Smuzhiyun u32 key_len)
214*4882a593Smuzhiyun {
215*4882a593Smuzhiyun rk_crypto_write_regs(rk_dev, CRYPTO_CH0_KEY_0, key, key_len);
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun return 0;
218*4882a593Smuzhiyun }
219*4882a593Smuzhiyun
rk_hw_hash_init(struct rk_crypto_dev * rk_dev,u32 algo,u32 type)220*4882a593Smuzhiyun static int rk_hw_hash_init(struct rk_crypto_dev *rk_dev, u32 algo, u32 type)
221*4882a593Smuzhiyun {
222*4882a593Smuzhiyun u32 reg_ctrl = 0;
223*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = rk_ahash_ctx_cast(rk_dev);
224*4882a593Smuzhiyun struct rk_hash_mid_data *mid_data = (struct rk_hash_mid_data *)ctx->priv;
225*4882a593Smuzhiyun
226*4882a593Smuzhiyun if (algo >= ARRAY_SIZE(hash_algo2bc))
227*4882a593Smuzhiyun goto exit;
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun rk_hash_reset(rk_dev);
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun clear_hash_out_reg(rk_dev);
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun reg_ctrl = hash_algo2bc[algo] | CRYPTO_HW_PAD_ENABLE;
234*4882a593Smuzhiyun
235*4882a593Smuzhiyun if (IS_TYPE_HMAC(type)) {
236*4882a593Smuzhiyun CRYPTO_TRACE("this is hmac");
237*4882a593Smuzhiyun reg_ctrl |= CRYPTO_HMAC_ENABLE;
238*4882a593Smuzhiyun }
239*4882a593Smuzhiyun
240*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, reg_ctrl | CRYPTO_WRITE_MASK_ALL);
241*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_FIFO_CTL, 0x00030003);
242*4882a593Smuzhiyun
243*4882a593Smuzhiyun memset(mid_data, 0x00, sizeof(*mid_data));
244*4882a593Smuzhiyun
245*4882a593Smuzhiyun return 0;
246*4882a593Smuzhiyun exit:
247*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, 0 | CRYPTO_WRITE_MASK_ALL);
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun return -EINVAL;
250*4882a593Smuzhiyun }
251*4882a593Smuzhiyun
clean_hash_setting(struct rk_crypto_dev * rk_dev)252*4882a593Smuzhiyun static void clean_hash_setting(struct rk_crypto_dev *rk_dev)
253*4882a593Smuzhiyun {
254*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0);
255*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL, 0 | CRYPTO_WRITE_MASK_ALL);
256*4882a593Smuzhiyun }
257*4882a593Smuzhiyun
rk_ahash_import(struct ahash_request * req,const void * in)258*4882a593Smuzhiyun static int rk_ahash_import(struct ahash_request *req, const void *in)
259*4882a593Smuzhiyun {
260*4882a593Smuzhiyun struct rk_ahash_expt_ctx state;
261*4882a593Smuzhiyun
262*4882a593Smuzhiyun /* 'in' may not be aligned so memcpy to local variable */
263*4882a593Smuzhiyun memcpy(&state, in, sizeof(state));
264*4882a593Smuzhiyun
265*4882a593Smuzhiyun ///TODO: deal with import
266*4882a593Smuzhiyun
267*4882a593Smuzhiyun return 0;
268*4882a593Smuzhiyun }
269*4882a593Smuzhiyun
rk_ahash_export(struct ahash_request * req,void * out)270*4882a593Smuzhiyun static int rk_ahash_export(struct ahash_request *req, void *out)
271*4882a593Smuzhiyun {
272*4882a593Smuzhiyun struct rk_ahash_expt_ctx state;
273*4882a593Smuzhiyun
274*4882a593Smuzhiyun /* Don't let anything leak to 'out' */
275*4882a593Smuzhiyun memset(&state, 0, sizeof(state));
276*4882a593Smuzhiyun
277*4882a593Smuzhiyun ///TODO: deal with import
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun memcpy(out, &state, sizeof(state));
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun return 0;
282*4882a593Smuzhiyun }
283*4882a593Smuzhiyun
rk_ahash_dma_start(struct rk_crypto_dev * rk_dev,uint32_t flag)284*4882a593Smuzhiyun static int rk_ahash_dma_start(struct rk_crypto_dev *rk_dev, uint32_t flag)
285*4882a593Smuzhiyun {
286*4882a593Smuzhiyun struct rk_hw_crypto_v3_info *hw_info =
287*4882a593Smuzhiyun (struct rk_hw_crypto_v3_info *)rk_dev->hw_info;
288*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = rk_ahash_alg_ctx(rk_dev);
289*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = rk_ahash_ctx_cast(rk_dev);
290*4882a593Smuzhiyun struct crypto_lli_desc *lli_head, *lli_tail;
291*4882a593Smuzhiyun u32 dma_ctl = CRYPTO_DMA_RESTART;
292*4882a593Smuzhiyun bool is_final = flag & RK_FLAG_FINAL;
293*4882a593Smuzhiyun int ret;
294*4882a593Smuzhiyun
295*4882a593Smuzhiyun CRYPTO_TRACE("ctx->calc_cnt = %u, count %u Byte, is_final = %d",
296*4882a593Smuzhiyun ctx->calc_cnt, alg_ctx->count, is_final);
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun if (alg_ctx->count % RK_DMA_ALIGNMENT && !is_final) {
299*4882a593Smuzhiyun dev_err(rk_dev->dev, "count = %u is not aligned with [%u]\n",
300*4882a593Smuzhiyun alg_ctx->count, RK_DMA_ALIGNMENT);
301*4882a593Smuzhiyun return -EINVAL;
302*4882a593Smuzhiyun }
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun if (alg_ctx->count == 0) {
305*4882a593Smuzhiyun /* do nothing */
306*4882a593Smuzhiyun CRYPTO_TRACE("empty calc");
307*4882a593Smuzhiyun return 0;
308*4882a593Smuzhiyun }
309*4882a593Smuzhiyun
310*4882a593Smuzhiyun if (alg_ctx->total == alg_ctx->left_bytes + alg_ctx->count)
311*4882a593Smuzhiyun rk_hash_mid_data_restore(rk_dev, (struct rk_hash_mid_data *)ctx->priv);
312*4882a593Smuzhiyun
313*4882a593Smuzhiyun if (alg_ctx->aligned)
314*4882a593Smuzhiyun ret = rk_crypto_hw_desc_init(&hw_info->hw_desc,
315*4882a593Smuzhiyun alg_ctx->sg_src, NULL, alg_ctx->count);
316*4882a593Smuzhiyun else
317*4882a593Smuzhiyun ret = rk_crypto_hw_desc_init(&hw_info->hw_desc,
318*4882a593Smuzhiyun &alg_ctx->sg_tmp, NULL, alg_ctx->count);
319*4882a593Smuzhiyun if (ret)
320*4882a593Smuzhiyun return ret;
321*4882a593Smuzhiyun
322*4882a593Smuzhiyun lli_head = hw_info->hw_desc.lli_head;
323*4882a593Smuzhiyun lli_tail = hw_info->hw_desc.lli_tail;
324*4882a593Smuzhiyun
325*4882a593Smuzhiyun lli_tail->dma_ctrl = is_final ? LLI_DMA_CTRL_LAST : LLI_DMA_CTRL_PAUSE;
326*4882a593Smuzhiyun lli_tail->dma_ctrl |= LLI_DMA_CTRL_SRC_DONE;
327*4882a593Smuzhiyun
328*4882a593Smuzhiyun if (ctx->calc_cnt == 0) {
329*4882a593Smuzhiyun dma_ctl = CRYPTO_DMA_START;
330*4882a593Smuzhiyun
331*4882a593Smuzhiyun lli_head->user_define |= LLI_USER_CIPHER_START;
332*4882a593Smuzhiyun lli_head->user_define |= LLI_USER_STRING_START;
333*4882a593Smuzhiyun
334*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_DMA_LLI_ADDR, hw_info->hw_desc.lli_head_dma);
335*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_HASH_CTL,
336*4882a593Smuzhiyun (CRYPTO_HASH_ENABLE << CRYPTO_WRITE_MASK_SHIFT) |
337*4882a593Smuzhiyun CRYPTO_HASH_ENABLE);
338*4882a593Smuzhiyun }
339*4882a593Smuzhiyun
340*4882a593Smuzhiyun if (is_final && alg_ctx->left_bytes == 0)
341*4882a593Smuzhiyun lli_tail->user_define |= LLI_USER_STRING_LAST;
342*4882a593Smuzhiyun
343*4882a593Smuzhiyun CRYPTO_TRACE("dma_ctrl = %08x, user_define = %08x, len = %u",
344*4882a593Smuzhiyun lli_head->dma_ctrl, lli_head->user_define, alg_ctx->count);
345*4882a593Smuzhiyun
346*4882a593Smuzhiyun rk_crypto_dump_hw_desc(&hw_info->hw_desc);
347*4882a593Smuzhiyun
348*4882a593Smuzhiyun dma_wmb();
349*4882a593Smuzhiyun
350*4882a593Smuzhiyun /* enable crypto irq */
351*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_DMA_INT_EN, 0x7f);
352*4882a593Smuzhiyun
353*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_DMA_CTL, dma_ctl | dma_ctl << CRYPTO_WRITE_MASK_SHIFT);
354*4882a593Smuzhiyun
355*4882a593Smuzhiyun return 0;
356*4882a593Smuzhiyun }
357*4882a593Smuzhiyun
rk_ahash_get_result(struct rk_crypto_dev * rk_dev,uint8_t * data,uint32_t data_len)358*4882a593Smuzhiyun static int rk_ahash_get_result(struct rk_crypto_dev *rk_dev,
359*4882a593Smuzhiyun uint8_t *data, uint32_t data_len)
360*4882a593Smuzhiyun {
361*4882a593Smuzhiyun int ret = 0;
362*4882a593Smuzhiyun u32 reg_ctrl = 0;
363*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = rk_ahash_ctx_cast(rk_dev);
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun memset(ctx->priv, 0x00, sizeof(struct rk_hash_mid_data));
366*4882a593Smuzhiyun
367*4882a593Smuzhiyun ret = read_poll_timeout_atomic(CRYPTO_READ, reg_ctrl,
368*4882a593Smuzhiyun reg_ctrl & CRYPTO_HASH_IS_VALID,
369*4882a593Smuzhiyun RK_POLL_PERIOD_US,
370*4882a593Smuzhiyun RK_POLL_TIMEOUT_US, false,
371*4882a593Smuzhiyun rk_dev, CRYPTO_HASH_VALID);
372*4882a593Smuzhiyun if (ret)
373*4882a593Smuzhiyun goto exit;
374*4882a593Smuzhiyun
375*4882a593Smuzhiyun rk_crypto_read_regs(rk_dev, CRYPTO_HASH_DOUT_0, data, data_len);
376*4882a593Smuzhiyun
377*4882a593Smuzhiyun CRYPTO_WRITE(rk_dev, CRYPTO_HASH_VALID, CRYPTO_HASH_IS_VALID);
378*4882a593Smuzhiyun
379*4882a593Smuzhiyun exit:
380*4882a593Smuzhiyun clean_hash_setting(rk_dev);
381*4882a593Smuzhiyun
382*4882a593Smuzhiyun return ret;
383*4882a593Smuzhiyun }
384*4882a593Smuzhiyun
rk_cra_hash_init(struct crypto_tfm * tfm)385*4882a593Smuzhiyun static int rk_cra_hash_init(struct crypto_tfm *tfm)
386*4882a593Smuzhiyun {
387*4882a593Smuzhiyun struct rk_crypto_algt *algt =
388*4882a593Smuzhiyun rk_ahash_get_algt(__crypto_ahash_cast(tfm));
389*4882a593Smuzhiyun const char *alg_name = crypto_tfm_alg_name(tfm);
390*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
391*4882a593Smuzhiyun struct rk_crypto_dev *rk_dev = algt->rk_dev;
392*4882a593Smuzhiyun struct rk_alg_ctx *alg_ctx = &ctx->algs_ctx;
393*4882a593Smuzhiyun
394*4882a593Smuzhiyun CRYPTO_TRACE();
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun memset(ctx, 0x00, sizeof(*ctx));
397*4882a593Smuzhiyun
398*4882a593Smuzhiyun if (!rk_dev->request_crypto)
399*4882a593Smuzhiyun return -EFAULT;
400*4882a593Smuzhiyun
401*4882a593Smuzhiyun alg_ctx->align_size = RK_DMA_ALIGNMENT;
402*4882a593Smuzhiyun
403*4882a593Smuzhiyun alg_ctx->ops.start = rk_ahash_start;
404*4882a593Smuzhiyun alg_ctx->ops.update = rk_ahash_crypto_rx;
405*4882a593Smuzhiyun alg_ctx->ops.complete = rk_ahash_crypto_complete;
406*4882a593Smuzhiyun alg_ctx->ops.irq_handle = rk_crypto_irq_handle;
407*4882a593Smuzhiyun
408*4882a593Smuzhiyun alg_ctx->ops.hw_write_key = write_key_reg;
409*4882a593Smuzhiyun alg_ctx->ops.hw_init = rk_hw_hash_init;
410*4882a593Smuzhiyun alg_ctx->ops.hw_dma_start = rk_ahash_dma_start;
411*4882a593Smuzhiyun alg_ctx->ops.hw_get_result = rk_ahash_get_result;
412*4882a593Smuzhiyun
413*4882a593Smuzhiyun ctx->rk_dev = rk_dev;
414*4882a593Smuzhiyun ctx->hash_tmp = (u8 *)get_zeroed_page(GFP_KERNEL | GFP_DMA32);
415*4882a593Smuzhiyun if (!ctx->hash_tmp) {
416*4882a593Smuzhiyun dev_err(rk_dev->dev, "Can't get zeroed page for hash tmp.\n");
417*4882a593Smuzhiyun return -ENOMEM;
418*4882a593Smuzhiyun }
419*4882a593Smuzhiyun
420*4882a593Smuzhiyun ctx->priv = kmalloc(sizeof(struct rk_hash_mid_data), GFP_KERNEL);
421*4882a593Smuzhiyun if (!ctx->priv) {
422*4882a593Smuzhiyun free_page((unsigned long)ctx->hash_tmp);
423*4882a593Smuzhiyun return -ENOMEM;
424*4882a593Smuzhiyun }
425*4882a593Smuzhiyun
426*4882a593Smuzhiyun memset(ctx->priv, 0x00, sizeof(struct rk_hash_mid_data));
427*4882a593Smuzhiyun
428*4882a593Smuzhiyun rk_dev->request_crypto(rk_dev, alg_name);
429*4882a593Smuzhiyun
430*4882a593Smuzhiyun crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), sizeof(struct rk_ahash_rctx));
431*4882a593Smuzhiyun
432*4882a593Smuzhiyun algt->alg.hash.halg.statesize = sizeof(struct rk_ahash_expt_ctx);
433*4882a593Smuzhiyun
434*4882a593Smuzhiyun return 0;
435*4882a593Smuzhiyun }
436*4882a593Smuzhiyun
rk_cra_hash_exit(struct crypto_tfm * tfm)437*4882a593Smuzhiyun static void rk_cra_hash_exit(struct crypto_tfm *tfm)
438*4882a593Smuzhiyun {
439*4882a593Smuzhiyun struct rk_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
440*4882a593Smuzhiyun
441*4882a593Smuzhiyun CRYPTO_TRACE();
442*4882a593Smuzhiyun
443*4882a593Smuzhiyun if (ctx->hash_tmp)
444*4882a593Smuzhiyun free_page((unsigned long)ctx->hash_tmp);
445*4882a593Smuzhiyun
446*4882a593Smuzhiyun kfree(ctx->priv);
447*4882a593Smuzhiyun
448*4882a593Smuzhiyun ctx->rk_dev->release_crypto(ctx->rk_dev, crypto_tfm_alg_name(tfm));
449*4882a593Smuzhiyun }
450*4882a593Smuzhiyun
451*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_ahash_md5 = RK_HASH_ALGO_INIT(MD5, md5);
452*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_ahash_sha1 = RK_HASH_ALGO_INIT(SHA1, sha1);
453*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_ahash_sha224 = RK_HASH_ALGO_INIT(SHA224, sha224);
454*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_ahash_sha256 = RK_HASH_ALGO_INIT(SHA256, sha256);
455*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_ahash_sha384 = RK_HASH_ALGO_INIT(SHA384, sha384);
456*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_ahash_sha512 = RK_HASH_ALGO_INIT(SHA512, sha512);
457*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_ahash_sm3 = RK_HASH_ALGO_INIT(SM3, sm3);
458*4882a593Smuzhiyun
459*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_hmac_md5 = RK_HMAC_ALGO_INIT(MD5, md5);
460*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_hmac_sha1 = RK_HMAC_ALGO_INIT(SHA1, sha1);
461*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_hmac_sha256 = RK_HMAC_ALGO_INIT(SHA256, sha256);
462*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_hmac_sha512 = RK_HMAC_ALGO_INIT(SHA512, sha512);
463*4882a593Smuzhiyun struct rk_crypto_algt rk_v3_hmac_sm3 = RK_HMAC_ALGO_INIT(SM3, sm3);
464*4882a593Smuzhiyun
465