xref: /OK3568_Linux_fs/kernel/drivers/crypto/amlogic/amlogic-gxl-core.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * amlgoic-core.c - hardware cryptographic offloader for Amlogic GXL SoC
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (C) 2018-2019 Corentin Labbe <clabbe@baylibre.com>
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Core file which registers crypto algorithms supported by the hardware.
8*4882a593Smuzhiyun  */
9*4882a593Smuzhiyun #include <linux/clk.h>
10*4882a593Smuzhiyun #include <linux/crypto.h>
11*4882a593Smuzhiyun #include <linux/io.h>
12*4882a593Smuzhiyun #include <linux/interrupt.h>
13*4882a593Smuzhiyun #include <linux/irq.h>
14*4882a593Smuzhiyun #include <linux/module.h>
15*4882a593Smuzhiyun #include <linux/of.h>
16*4882a593Smuzhiyun #include <linux/of_device.h>
17*4882a593Smuzhiyun #include <linux/platform_device.h>
18*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
19*4882a593Smuzhiyun #include <linux/dma-mapping.h>
20*4882a593Smuzhiyun 
21*4882a593Smuzhiyun #include "amlogic-gxl.h"
22*4882a593Smuzhiyun 
meson_irq_handler(int irq,void * data)23*4882a593Smuzhiyun static irqreturn_t meson_irq_handler(int irq, void *data)
24*4882a593Smuzhiyun {
25*4882a593Smuzhiyun 	struct meson_dev *mc = (struct meson_dev *)data;
26*4882a593Smuzhiyun 	int flow;
27*4882a593Smuzhiyun 	u32 p;
28*4882a593Smuzhiyun 
29*4882a593Smuzhiyun 	for (flow = 0; flow < MAXFLOW; flow++) {
30*4882a593Smuzhiyun 		if (mc->irqs[flow] == irq) {
31*4882a593Smuzhiyun 			p = readl(mc->base + ((0x04 + flow) << 2));
32*4882a593Smuzhiyun 			if (p) {
33*4882a593Smuzhiyun 				writel_relaxed(0xF, mc->base + ((0x4 + flow) << 2));
34*4882a593Smuzhiyun 				mc->chanlist[flow].status = 1;
35*4882a593Smuzhiyun 				complete(&mc->chanlist[flow].complete);
36*4882a593Smuzhiyun 				return IRQ_HANDLED;
37*4882a593Smuzhiyun 			}
38*4882a593Smuzhiyun 			dev_err(mc->dev, "%s %d Got irq for flow %d but ctrl is empty\n", __func__, irq, flow);
39*4882a593Smuzhiyun 		}
40*4882a593Smuzhiyun 	}
41*4882a593Smuzhiyun 
42*4882a593Smuzhiyun 	dev_err(mc->dev, "%s %d from unknown irq\n", __func__, irq);
43*4882a593Smuzhiyun 	return IRQ_HANDLED;
44*4882a593Smuzhiyun }
45*4882a593Smuzhiyun 
46*4882a593Smuzhiyun static struct meson_alg_template mc_algs[] = {
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
49*4882a593Smuzhiyun 	.blockmode = MESON_OPMODE_CBC,
50*4882a593Smuzhiyun 	.alg.skcipher = {
51*4882a593Smuzhiyun 		.base = {
52*4882a593Smuzhiyun 			.cra_name = "cbc(aes)",
53*4882a593Smuzhiyun 			.cra_driver_name = "cbc-aes-gxl",
54*4882a593Smuzhiyun 			.cra_priority = 400,
55*4882a593Smuzhiyun 			.cra_blocksize = AES_BLOCK_SIZE,
56*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
57*4882a593Smuzhiyun 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
58*4882a593Smuzhiyun 				CRYPTO_ALG_NEED_FALLBACK,
59*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct meson_cipher_tfm_ctx),
60*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
61*4882a593Smuzhiyun 			.cra_alignmask = 0xf,
62*4882a593Smuzhiyun 			.cra_init = meson_cipher_init,
63*4882a593Smuzhiyun 			.cra_exit = meson_cipher_exit,
64*4882a593Smuzhiyun 		},
65*4882a593Smuzhiyun 		.min_keysize	= AES_MIN_KEY_SIZE,
66*4882a593Smuzhiyun 		.max_keysize	= AES_MAX_KEY_SIZE,
67*4882a593Smuzhiyun 		.ivsize		= AES_BLOCK_SIZE,
68*4882a593Smuzhiyun 		.setkey		= meson_aes_setkey,
69*4882a593Smuzhiyun 		.encrypt	= meson_skencrypt,
70*4882a593Smuzhiyun 		.decrypt	= meson_skdecrypt,
71*4882a593Smuzhiyun 	}
72*4882a593Smuzhiyun },
73*4882a593Smuzhiyun {
74*4882a593Smuzhiyun 	.type = CRYPTO_ALG_TYPE_SKCIPHER,
75*4882a593Smuzhiyun 	.blockmode = MESON_OPMODE_ECB,
76*4882a593Smuzhiyun 	.alg.skcipher = {
77*4882a593Smuzhiyun 		.base = {
78*4882a593Smuzhiyun 			.cra_name = "ecb(aes)",
79*4882a593Smuzhiyun 			.cra_driver_name = "ecb-aes-gxl",
80*4882a593Smuzhiyun 			.cra_priority = 400,
81*4882a593Smuzhiyun 			.cra_blocksize = AES_BLOCK_SIZE,
82*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER |
83*4882a593Smuzhiyun 				CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
84*4882a593Smuzhiyun 				CRYPTO_ALG_NEED_FALLBACK,
85*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct meson_cipher_tfm_ctx),
86*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
87*4882a593Smuzhiyun 			.cra_alignmask = 0xf,
88*4882a593Smuzhiyun 			.cra_init = meson_cipher_init,
89*4882a593Smuzhiyun 			.cra_exit = meson_cipher_exit,
90*4882a593Smuzhiyun 		},
91*4882a593Smuzhiyun 		.min_keysize	= AES_MIN_KEY_SIZE,
92*4882a593Smuzhiyun 		.max_keysize	= AES_MAX_KEY_SIZE,
93*4882a593Smuzhiyun 		.setkey		= meson_aes_setkey,
94*4882a593Smuzhiyun 		.encrypt	= meson_skencrypt,
95*4882a593Smuzhiyun 		.decrypt	= meson_skdecrypt,
96*4882a593Smuzhiyun 	}
97*4882a593Smuzhiyun },
98*4882a593Smuzhiyun };
99*4882a593Smuzhiyun 
100*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_AMLOGIC_GXL_DEBUG
meson_debugfs_show(struct seq_file * seq,void * v)101*4882a593Smuzhiyun static int meson_debugfs_show(struct seq_file *seq, void *v)
102*4882a593Smuzhiyun {
103*4882a593Smuzhiyun 	struct meson_dev *mc = seq->private;
104*4882a593Smuzhiyun 	int i;
105*4882a593Smuzhiyun 
106*4882a593Smuzhiyun 	for (i = 0; i < MAXFLOW; i++)
107*4882a593Smuzhiyun 		seq_printf(seq, "Channel %d: nreq %lu\n", i, mc->chanlist[i].stat_req);
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(mc_algs); i++) {
110*4882a593Smuzhiyun 		switch (mc_algs[i].type) {
111*4882a593Smuzhiyun 		case CRYPTO_ALG_TYPE_SKCIPHER:
112*4882a593Smuzhiyun 			seq_printf(seq, "%s %s %lu %lu\n",
113*4882a593Smuzhiyun 				   mc_algs[i].alg.skcipher.base.cra_driver_name,
114*4882a593Smuzhiyun 				   mc_algs[i].alg.skcipher.base.cra_name,
115*4882a593Smuzhiyun 				   mc_algs[i].stat_req, mc_algs[i].stat_fb);
116*4882a593Smuzhiyun 			break;
117*4882a593Smuzhiyun 		}
118*4882a593Smuzhiyun 	}
119*4882a593Smuzhiyun 	return 0;
120*4882a593Smuzhiyun }
121*4882a593Smuzhiyun DEFINE_SHOW_ATTRIBUTE(meson_debugfs);
122*4882a593Smuzhiyun #endif
123*4882a593Smuzhiyun 
meson_free_chanlist(struct meson_dev * mc,int i)124*4882a593Smuzhiyun static void meson_free_chanlist(struct meson_dev *mc, int i)
125*4882a593Smuzhiyun {
126*4882a593Smuzhiyun 	while (i >= 0) {
127*4882a593Smuzhiyun 		crypto_engine_exit(mc->chanlist[i].engine);
128*4882a593Smuzhiyun 		if (mc->chanlist[i].tl)
129*4882a593Smuzhiyun 			dma_free_coherent(mc->dev, sizeof(struct meson_desc) * MAXDESC,
130*4882a593Smuzhiyun 					  mc->chanlist[i].tl,
131*4882a593Smuzhiyun 					  mc->chanlist[i].t_phy);
132*4882a593Smuzhiyun 		i--;
133*4882a593Smuzhiyun 	}
134*4882a593Smuzhiyun }
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun /*
137*4882a593Smuzhiyun  * Allocate the channel list structure
138*4882a593Smuzhiyun  */
meson_allocate_chanlist(struct meson_dev * mc)139*4882a593Smuzhiyun static int meson_allocate_chanlist(struct meson_dev *mc)
140*4882a593Smuzhiyun {
141*4882a593Smuzhiyun 	int i, err;
142*4882a593Smuzhiyun 
143*4882a593Smuzhiyun 	mc->chanlist = devm_kcalloc(mc->dev, MAXFLOW,
144*4882a593Smuzhiyun 				    sizeof(struct meson_flow), GFP_KERNEL);
145*4882a593Smuzhiyun 	if (!mc->chanlist)
146*4882a593Smuzhiyun 		return -ENOMEM;
147*4882a593Smuzhiyun 
148*4882a593Smuzhiyun 	for (i = 0; i < MAXFLOW; i++) {
149*4882a593Smuzhiyun 		init_completion(&mc->chanlist[i].complete);
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun 		mc->chanlist[i].engine = crypto_engine_alloc_init(mc->dev, true);
152*4882a593Smuzhiyun 		if (!mc->chanlist[i].engine) {
153*4882a593Smuzhiyun 			dev_err(mc->dev, "Cannot allocate engine\n");
154*4882a593Smuzhiyun 			i--;
155*4882a593Smuzhiyun 			err = -ENOMEM;
156*4882a593Smuzhiyun 			goto error_engine;
157*4882a593Smuzhiyun 		}
158*4882a593Smuzhiyun 		err = crypto_engine_start(mc->chanlist[i].engine);
159*4882a593Smuzhiyun 		if (err) {
160*4882a593Smuzhiyun 			dev_err(mc->dev, "Cannot start engine\n");
161*4882a593Smuzhiyun 			goto error_engine;
162*4882a593Smuzhiyun 		}
163*4882a593Smuzhiyun 		mc->chanlist[i].tl = dma_alloc_coherent(mc->dev,
164*4882a593Smuzhiyun 							sizeof(struct meson_desc) * MAXDESC,
165*4882a593Smuzhiyun 							&mc->chanlist[i].t_phy,
166*4882a593Smuzhiyun 							GFP_KERNEL);
167*4882a593Smuzhiyun 		if (!mc->chanlist[i].tl) {
168*4882a593Smuzhiyun 			err = -ENOMEM;
169*4882a593Smuzhiyun 			goto error_engine;
170*4882a593Smuzhiyun 		}
171*4882a593Smuzhiyun 	}
172*4882a593Smuzhiyun 	return 0;
173*4882a593Smuzhiyun error_engine:
174*4882a593Smuzhiyun 	meson_free_chanlist(mc, i);
175*4882a593Smuzhiyun 	return err;
176*4882a593Smuzhiyun }
177*4882a593Smuzhiyun 
meson_register_algs(struct meson_dev * mc)178*4882a593Smuzhiyun static int meson_register_algs(struct meson_dev *mc)
179*4882a593Smuzhiyun {
180*4882a593Smuzhiyun 	int err, i;
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(mc_algs); i++) {
183*4882a593Smuzhiyun 		mc_algs[i].mc = mc;
184*4882a593Smuzhiyun 		switch (mc_algs[i].type) {
185*4882a593Smuzhiyun 		case CRYPTO_ALG_TYPE_SKCIPHER:
186*4882a593Smuzhiyun 			err = crypto_register_skcipher(&mc_algs[i].alg.skcipher);
187*4882a593Smuzhiyun 			if (err) {
188*4882a593Smuzhiyun 				dev_err(mc->dev, "Fail to register %s\n",
189*4882a593Smuzhiyun 					mc_algs[i].alg.skcipher.base.cra_name);
190*4882a593Smuzhiyun 				mc_algs[i].mc = NULL;
191*4882a593Smuzhiyun 				return err;
192*4882a593Smuzhiyun 			}
193*4882a593Smuzhiyun 			break;
194*4882a593Smuzhiyun 		}
195*4882a593Smuzhiyun 	}
196*4882a593Smuzhiyun 
197*4882a593Smuzhiyun 	return 0;
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun 
meson_unregister_algs(struct meson_dev * mc)200*4882a593Smuzhiyun static void meson_unregister_algs(struct meson_dev *mc)
201*4882a593Smuzhiyun {
202*4882a593Smuzhiyun 	int i;
203*4882a593Smuzhiyun 
204*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(mc_algs); i++) {
205*4882a593Smuzhiyun 		if (!mc_algs[i].mc)
206*4882a593Smuzhiyun 			continue;
207*4882a593Smuzhiyun 		switch (mc_algs[i].type) {
208*4882a593Smuzhiyun 		case CRYPTO_ALG_TYPE_SKCIPHER:
209*4882a593Smuzhiyun 			crypto_unregister_skcipher(&mc_algs[i].alg.skcipher);
210*4882a593Smuzhiyun 			break;
211*4882a593Smuzhiyun 		}
212*4882a593Smuzhiyun 	}
213*4882a593Smuzhiyun }
214*4882a593Smuzhiyun 
meson_crypto_probe(struct platform_device * pdev)215*4882a593Smuzhiyun static int meson_crypto_probe(struct platform_device *pdev)
216*4882a593Smuzhiyun {
217*4882a593Smuzhiyun 	struct meson_dev *mc;
218*4882a593Smuzhiyun 	int err, i;
219*4882a593Smuzhiyun 
220*4882a593Smuzhiyun 	if (!pdev->dev.of_node)
221*4882a593Smuzhiyun 		return -ENODEV;
222*4882a593Smuzhiyun 
223*4882a593Smuzhiyun 	mc = devm_kzalloc(&pdev->dev, sizeof(*mc), GFP_KERNEL);
224*4882a593Smuzhiyun 	if (!mc)
225*4882a593Smuzhiyun 		return -ENOMEM;
226*4882a593Smuzhiyun 
227*4882a593Smuzhiyun 	mc->dev = &pdev->dev;
228*4882a593Smuzhiyun 	platform_set_drvdata(pdev, mc);
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun 	mc->base = devm_platform_ioremap_resource(pdev, 0);
231*4882a593Smuzhiyun 	if (IS_ERR(mc->base)) {
232*4882a593Smuzhiyun 		err = PTR_ERR(mc->base);
233*4882a593Smuzhiyun 		dev_err(&pdev->dev, "Cannot request MMIO err=%d\n", err);
234*4882a593Smuzhiyun 		return err;
235*4882a593Smuzhiyun 	}
236*4882a593Smuzhiyun 	mc->busclk = devm_clk_get(&pdev->dev, "blkmv");
237*4882a593Smuzhiyun 	if (IS_ERR(mc->busclk)) {
238*4882a593Smuzhiyun 		err = PTR_ERR(mc->busclk);
239*4882a593Smuzhiyun 		dev_err(&pdev->dev, "Cannot get core clock err=%d\n", err);
240*4882a593Smuzhiyun 		return err;
241*4882a593Smuzhiyun 	}
242*4882a593Smuzhiyun 
243*4882a593Smuzhiyun 	mc->irqs = devm_kcalloc(mc->dev, MAXFLOW, sizeof(int), GFP_KERNEL);
244*4882a593Smuzhiyun 	for (i = 0; i < MAXFLOW; i++) {
245*4882a593Smuzhiyun 		mc->irqs[i] = platform_get_irq(pdev, i);
246*4882a593Smuzhiyun 		if (mc->irqs[i] < 0)
247*4882a593Smuzhiyun 			return mc->irqs[i];
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun 		err = devm_request_irq(&pdev->dev, mc->irqs[i], meson_irq_handler, 0,
250*4882a593Smuzhiyun 				       "gxl-crypto", mc);
251*4882a593Smuzhiyun 		if (err < 0) {
252*4882a593Smuzhiyun 			dev_err(mc->dev, "Cannot request IRQ for flow %d\n", i);
253*4882a593Smuzhiyun 			return err;
254*4882a593Smuzhiyun 		}
255*4882a593Smuzhiyun 	}
256*4882a593Smuzhiyun 
257*4882a593Smuzhiyun 	err = clk_prepare_enable(mc->busclk);
258*4882a593Smuzhiyun 	if (err != 0) {
259*4882a593Smuzhiyun 		dev_err(&pdev->dev, "Cannot prepare_enable busclk\n");
260*4882a593Smuzhiyun 		return err;
261*4882a593Smuzhiyun 	}
262*4882a593Smuzhiyun 
263*4882a593Smuzhiyun 	err = meson_allocate_chanlist(mc);
264*4882a593Smuzhiyun 	if (err)
265*4882a593Smuzhiyun 		goto error_flow;
266*4882a593Smuzhiyun 
267*4882a593Smuzhiyun 	err = meson_register_algs(mc);
268*4882a593Smuzhiyun 	if (err)
269*4882a593Smuzhiyun 		goto error_alg;
270*4882a593Smuzhiyun 
271*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_AMLOGIC_GXL_DEBUG
272*4882a593Smuzhiyun 	mc->dbgfs_dir = debugfs_create_dir("gxl-crypto", NULL);
273*4882a593Smuzhiyun 	debugfs_create_file("stats", 0444, mc->dbgfs_dir, mc, &meson_debugfs_fops);
274*4882a593Smuzhiyun #endif
275*4882a593Smuzhiyun 
276*4882a593Smuzhiyun 	return 0;
277*4882a593Smuzhiyun error_alg:
278*4882a593Smuzhiyun 	meson_unregister_algs(mc);
279*4882a593Smuzhiyun error_flow:
280*4882a593Smuzhiyun 	meson_free_chanlist(mc, MAXFLOW - 1);
281*4882a593Smuzhiyun 	clk_disable_unprepare(mc->busclk);
282*4882a593Smuzhiyun 	return err;
283*4882a593Smuzhiyun }
284*4882a593Smuzhiyun 
meson_crypto_remove(struct platform_device * pdev)285*4882a593Smuzhiyun static int meson_crypto_remove(struct platform_device *pdev)
286*4882a593Smuzhiyun {
287*4882a593Smuzhiyun 	struct meson_dev *mc = platform_get_drvdata(pdev);
288*4882a593Smuzhiyun 
289*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_AMLOGIC_GXL_DEBUG
290*4882a593Smuzhiyun 	debugfs_remove_recursive(mc->dbgfs_dir);
291*4882a593Smuzhiyun #endif
292*4882a593Smuzhiyun 
293*4882a593Smuzhiyun 	meson_unregister_algs(mc);
294*4882a593Smuzhiyun 
295*4882a593Smuzhiyun 	meson_free_chanlist(mc, MAXFLOW - 1);
296*4882a593Smuzhiyun 
297*4882a593Smuzhiyun 	clk_disable_unprepare(mc->busclk);
298*4882a593Smuzhiyun 	return 0;
299*4882a593Smuzhiyun }
300*4882a593Smuzhiyun 
301*4882a593Smuzhiyun static const struct of_device_id meson_crypto_of_match_table[] = {
302*4882a593Smuzhiyun 	{ .compatible = "amlogic,gxl-crypto", },
303*4882a593Smuzhiyun 	{}
304*4882a593Smuzhiyun };
305*4882a593Smuzhiyun MODULE_DEVICE_TABLE(of, meson_crypto_of_match_table);
306*4882a593Smuzhiyun 
307*4882a593Smuzhiyun static struct platform_driver meson_crypto_driver = {
308*4882a593Smuzhiyun 	.probe		 = meson_crypto_probe,
309*4882a593Smuzhiyun 	.remove		 = meson_crypto_remove,
310*4882a593Smuzhiyun 	.driver		 = {
311*4882a593Smuzhiyun 		.name		   = "gxl-crypto",
312*4882a593Smuzhiyun 		.of_match_table	= meson_crypto_of_match_table,
313*4882a593Smuzhiyun 	},
314*4882a593Smuzhiyun };
315*4882a593Smuzhiyun 
316*4882a593Smuzhiyun module_platform_driver(meson_crypto_driver);
317*4882a593Smuzhiyun 
318*4882a593Smuzhiyun MODULE_DESCRIPTION("Amlogic GXL cryptographic offloader");
319*4882a593Smuzhiyun MODULE_LICENSE("GPL");
320*4882a593Smuzhiyun MODULE_AUTHOR("Corentin Labbe <clabbe@baylibre.com>");
321