xref: /OK3568_Linux_fs/kernel/drivers/crypto/axis/artpec6_crypto.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  *   Driver for ARTPEC-6 crypto block using the kernel asynchronous crypto api.
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  *    Copyright (C) 2014-2017  Axis Communications AB
6*4882a593Smuzhiyun  */
7*4882a593Smuzhiyun #define pr_fmt(fmt)     KBUILD_MODNAME ": " fmt
8*4882a593Smuzhiyun 
9*4882a593Smuzhiyun #include <linux/bitfield.h>
10*4882a593Smuzhiyun #include <linux/crypto.h>
11*4882a593Smuzhiyun #include <linux/debugfs.h>
12*4882a593Smuzhiyun #include <linux/delay.h>
13*4882a593Smuzhiyun #include <linux/dma-mapping.h>
14*4882a593Smuzhiyun #include <linux/fault-inject.h>
15*4882a593Smuzhiyun #include <linux/init.h>
16*4882a593Smuzhiyun #include <linux/interrupt.h>
17*4882a593Smuzhiyun #include <linux/kernel.h>
18*4882a593Smuzhiyun #include <linux/list.h>
19*4882a593Smuzhiyun #include <linux/module.h>
20*4882a593Smuzhiyun #include <linux/of.h>
21*4882a593Smuzhiyun #include <linux/platform_device.h>
22*4882a593Smuzhiyun #include <linux/scatterlist.h>
23*4882a593Smuzhiyun #include <linux/slab.h>
24*4882a593Smuzhiyun 
25*4882a593Smuzhiyun #include <crypto/aes.h>
26*4882a593Smuzhiyun #include <crypto/gcm.h>
27*4882a593Smuzhiyun #include <crypto/internal/aead.h>
28*4882a593Smuzhiyun #include <crypto/internal/hash.h>
29*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
30*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
31*4882a593Smuzhiyun #include <crypto/sha.h>
32*4882a593Smuzhiyun #include <crypto/xts.h>
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun /* Max length of a line in all cache levels for Artpec SoCs. */
35*4882a593Smuzhiyun #define ARTPEC_CACHE_LINE_MAX	32
36*4882a593Smuzhiyun 
37*4882a593Smuzhiyun #define PDMA_OUT_CFG		0x0000
38*4882a593Smuzhiyun #define PDMA_OUT_BUF_CFG	0x0004
39*4882a593Smuzhiyun #define PDMA_OUT_CMD		0x0008
40*4882a593Smuzhiyun #define PDMA_OUT_DESCRQ_PUSH	0x0010
41*4882a593Smuzhiyun #define PDMA_OUT_DESCRQ_STAT	0x0014
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun #define A6_PDMA_IN_CFG		0x0028
44*4882a593Smuzhiyun #define A6_PDMA_IN_BUF_CFG	0x002c
45*4882a593Smuzhiyun #define A6_PDMA_IN_CMD		0x0030
46*4882a593Smuzhiyun #define A6_PDMA_IN_STATQ_PUSH	0x0038
47*4882a593Smuzhiyun #define A6_PDMA_IN_DESCRQ_PUSH	0x0044
48*4882a593Smuzhiyun #define A6_PDMA_IN_DESCRQ_STAT	0x0048
49*4882a593Smuzhiyun #define A6_PDMA_INTR_MASK	0x0068
50*4882a593Smuzhiyun #define A6_PDMA_ACK_INTR	0x006c
51*4882a593Smuzhiyun #define A6_PDMA_MASKED_INTR	0x0074
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun #define A7_PDMA_IN_CFG		0x002c
54*4882a593Smuzhiyun #define A7_PDMA_IN_BUF_CFG	0x0030
55*4882a593Smuzhiyun #define A7_PDMA_IN_CMD		0x0034
56*4882a593Smuzhiyun #define A7_PDMA_IN_STATQ_PUSH	0x003c
57*4882a593Smuzhiyun #define A7_PDMA_IN_DESCRQ_PUSH	0x0048
58*4882a593Smuzhiyun #define A7_PDMA_IN_DESCRQ_STAT	0x004C
59*4882a593Smuzhiyun #define A7_PDMA_INTR_MASK	0x006c
60*4882a593Smuzhiyun #define A7_PDMA_ACK_INTR	0x0070
61*4882a593Smuzhiyun #define A7_PDMA_MASKED_INTR	0x0078
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun #define PDMA_OUT_CFG_EN				BIT(0)
64*4882a593Smuzhiyun 
65*4882a593Smuzhiyun #define PDMA_OUT_BUF_CFG_DATA_BUF_SIZE		GENMASK(4, 0)
66*4882a593Smuzhiyun #define PDMA_OUT_BUF_CFG_DESCR_BUF_SIZE		GENMASK(9, 5)
67*4882a593Smuzhiyun 
68*4882a593Smuzhiyun #define PDMA_OUT_CMD_START			BIT(0)
69*4882a593Smuzhiyun #define A6_PDMA_OUT_CMD_STOP			BIT(3)
70*4882a593Smuzhiyun #define A7_PDMA_OUT_CMD_STOP			BIT(2)
71*4882a593Smuzhiyun 
72*4882a593Smuzhiyun #define PDMA_OUT_DESCRQ_PUSH_LEN		GENMASK(5, 0)
73*4882a593Smuzhiyun #define PDMA_OUT_DESCRQ_PUSH_ADDR		GENMASK(31, 6)
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun #define PDMA_OUT_DESCRQ_STAT_LEVEL		GENMASK(3, 0)
76*4882a593Smuzhiyun #define PDMA_OUT_DESCRQ_STAT_SIZE		GENMASK(7, 4)
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun #define PDMA_IN_CFG_EN				BIT(0)
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun #define PDMA_IN_BUF_CFG_DATA_BUF_SIZE		GENMASK(4, 0)
81*4882a593Smuzhiyun #define PDMA_IN_BUF_CFG_DESCR_BUF_SIZE		GENMASK(9, 5)
82*4882a593Smuzhiyun #define PDMA_IN_BUF_CFG_STAT_BUF_SIZE		GENMASK(14, 10)
83*4882a593Smuzhiyun 
84*4882a593Smuzhiyun #define PDMA_IN_CMD_START			BIT(0)
85*4882a593Smuzhiyun #define A6_PDMA_IN_CMD_FLUSH_STAT		BIT(2)
86*4882a593Smuzhiyun #define A6_PDMA_IN_CMD_STOP			BIT(3)
87*4882a593Smuzhiyun #define A7_PDMA_IN_CMD_FLUSH_STAT		BIT(1)
88*4882a593Smuzhiyun #define A7_PDMA_IN_CMD_STOP			BIT(2)
89*4882a593Smuzhiyun 
90*4882a593Smuzhiyun #define PDMA_IN_STATQ_PUSH_LEN			GENMASK(5, 0)
91*4882a593Smuzhiyun #define PDMA_IN_STATQ_PUSH_ADDR			GENMASK(31, 6)
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun #define PDMA_IN_DESCRQ_PUSH_LEN			GENMASK(5, 0)
94*4882a593Smuzhiyun #define PDMA_IN_DESCRQ_PUSH_ADDR		GENMASK(31, 6)
95*4882a593Smuzhiyun 
96*4882a593Smuzhiyun #define PDMA_IN_DESCRQ_STAT_LEVEL		GENMASK(3, 0)
97*4882a593Smuzhiyun #define PDMA_IN_DESCRQ_STAT_SIZE		GENMASK(7, 4)
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun #define A6_PDMA_INTR_MASK_IN_DATA		BIT(2)
100*4882a593Smuzhiyun #define A6_PDMA_INTR_MASK_IN_EOP		BIT(3)
101*4882a593Smuzhiyun #define A6_PDMA_INTR_MASK_IN_EOP_FLUSH		BIT(4)
102*4882a593Smuzhiyun 
103*4882a593Smuzhiyun #define A7_PDMA_INTR_MASK_IN_DATA		BIT(3)
104*4882a593Smuzhiyun #define A7_PDMA_INTR_MASK_IN_EOP		BIT(4)
105*4882a593Smuzhiyun #define A7_PDMA_INTR_MASK_IN_EOP_FLUSH		BIT(5)
106*4882a593Smuzhiyun 
107*4882a593Smuzhiyun #define A6_CRY_MD_OPER		GENMASK(19, 16)
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun #define A6_CRY_MD_HASH_SEL_CTX	GENMASK(21, 20)
110*4882a593Smuzhiyun #define A6_CRY_MD_HASH_HMAC_FIN	BIT(23)
111*4882a593Smuzhiyun 
112*4882a593Smuzhiyun #define A6_CRY_MD_CIPHER_LEN	GENMASK(21, 20)
113*4882a593Smuzhiyun #define A6_CRY_MD_CIPHER_DECR	BIT(22)
114*4882a593Smuzhiyun #define A6_CRY_MD_CIPHER_TWEAK	BIT(23)
115*4882a593Smuzhiyun #define A6_CRY_MD_CIPHER_DSEQ	BIT(24)
116*4882a593Smuzhiyun 
117*4882a593Smuzhiyun #define A7_CRY_MD_OPER		GENMASK(11, 8)
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun #define A7_CRY_MD_HASH_SEL_CTX	GENMASK(13, 12)
120*4882a593Smuzhiyun #define A7_CRY_MD_HASH_HMAC_FIN	BIT(15)
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun #define A7_CRY_MD_CIPHER_LEN	GENMASK(13, 12)
123*4882a593Smuzhiyun #define A7_CRY_MD_CIPHER_DECR	BIT(14)
124*4882a593Smuzhiyun #define A7_CRY_MD_CIPHER_TWEAK	BIT(15)
125*4882a593Smuzhiyun #define A7_CRY_MD_CIPHER_DSEQ	BIT(16)
126*4882a593Smuzhiyun 
127*4882a593Smuzhiyun /* DMA metadata constants */
128*4882a593Smuzhiyun #define regk_crypto_aes_cbc     0x00000002
129*4882a593Smuzhiyun #define regk_crypto_aes_ctr     0x00000003
130*4882a593Smuzhiyun #define regk_crypto_aes_ecb     0x00000001
131*4882a593Smuzhiyun #define regk_crypto_aes_gcm     0x00000004
132*4882a593Smuzhiyun #define regk_crypto_aes_xts     0x00000005
133*4882a593Smuzhiyun #define regk_crypto_cache       0x00000002
134*4882a593Smuzhiyun #define a6_regk_crypto_dlkey    0x0000000a
135*4882a593Smuzhiyun #define a7_regk_crypto_dlkey    0x0000000e
136*4882a593Smuzhiyun #define regk_crypto_ext         0x00000001
137*4882a593Smuzhiyun #define regk_crypto_hmac_sha1   0x00000007
138*4882a593Smuzhiyun #define regk_crypto_hmac_sha256 0x00000009
139*4882a593Smuzhiyun #define regk_crypto_init        0x00000000
140*4882a593Smuzhiyun #define regk_crypto_key_128     0x00000000
141*4882a593Smuzhiyun #define regk_crypto_key_192     0x00000001
142*4882a593Smuzhiyun #define regk_crypto_key_256     0x00000002
143*4882a593Smuzhiyun #define regk_crypto_null        0x00000000
144*4882a593Smuzhiyun #define regk_crypto_sha1        0x00000006
145*4882a593Smuzhiyun #define regk_crypto_sha256      0x00000008
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun /* DMA descriptor structures */
148*4882a593Smuzhiyun struct pdma_descr_ctrl  {
149*4882a593Smuzhiyun 	unsigned char short_descr : 1;
150*4882a593Smuzhiyun 	unsigned char pad1        : 1;
151*4882a593Smuzhiyun 	unsigned char eop         : 1;
152*4882a593Smuzhiyun 	unsigned char intr        : 1;
153*4882a593Smuzhiyun 	unsigned char short_len   : 3;
154*4882a593Smuzhiyun 	unsigned char pad2        : 1;
155*4882a593Smuzhiyun } __packed;
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun struct pdma_data_descr {
158*4882a593Smuzhiyun 	unsigned int len : 24;
159*4882a593Smuzhiyun 	unsigned int buf : 32;
160*4882a593Smuzhiyun } __packed;
161*4882a593Smuzhiyun 
162*4882a593Smuzhiyun struct pdma_short_descr {
163*4882a593Smuzhiyun 	unsigned char data[7];
164*4882a593Smuzhiyun } __packed;
165*4882a593Smuzhiyun 
166*4882a593Smuzhiyun struct pdma_descr {
167*4882a593Smuzhiyun 	struct pdma_descr_ctrl ctrl;
168*4882a593Smuzhiyun 	union {
169*4882a593Smuzhiyun 		struct pdma_data_descr   data;
170*4882a593Smuzhiyun 		struct pdma_short_descr  shrt;
171*4882a593Smuzhiyun 	};
172*4882a593Smuzhiyun };
173*4882a593Smuzhiyun 
174*4882a593Smuzhiyun struct pdma_stat_descr {
175*4882a593Smuzhiyun 	unsigned char pad1        : 1;
176*4882a593Smuzhiyun 	unsigned char pad2        : 1;
177*4882a593Smuzhiyun 	unsigned char eop         : 1;
178*4882a593Smuzhiyun 	unsigned char pad3        : 5;
179*4882a593Smuzhiyun 	unsigned int  len         : 24;
180*4882a593Smuzhiyun };
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun /* Each descriptor array can hold max 64 entries */
183*4882a593Smuzhiyun #define PDMA_DESCR_COUNT	64
184*4882a593Smuzhiyun 
185*4882a593Smuzhiyun #define MODULE_NAME   "Artpec-6 CA"
186*4882a593Smuzhiyun 
187*4882a593Smuzhiyun /* Hash modes (including HMAC variants) */
188*4882a593Smuzhiyun #define ARTPEC6_CRYPTO_HASH_SHA1	1
189*4882a593Smuzhiyun #define ARTPEC6_CRYPTO_HASH_SHA256	2
190*4882a593Smuzhiyun 
191*4882a593Smuzhiyun /* Crypto modes */
192*4882a593Smuzhiyun #define ARTPEC6_CRYPTO_CIPHER_AES_ECB	1
193*4882a593Smuzhiyun #define ARTPEC6_CRYPTO_CIPHER_AES_CBC	2
194*4882a593Smuzhiyun #define ARTPEC6_CRYPTO_CIPHER_AES_CTR	3
195*4882a593Smuzhiyun #define ARTPEC6_CRYPTO_CIPHER_AES_XTS	5
196*4882a593Smuzhiyun 
197*4882a593Smuzhiyun /* The PDMA is a DMA-engine tightly coupled with a ciphering engine.
198*4882a593Smuzhiyun  * It operates on a descriptor array with up to 64 descriptor entries.
199*4882a593Smuzhiyun  * The arrays must be 64 byte aligned in memory.
200*4882a593Smuzhiyun  *
201*4882a593Smuzhiyun  * The ciphering unit has no registers and is completely controlled by
202*4882a593Smuzhiyun  * a 4-byte metadata that is inserted at the beginning of each dma packet.
203*4882a593Smuzhiyun  *
204*4882a593Smuzhiyun  * A dma packet is a sequence of descriptors terminated by setting the .eop
205*4882a593Smuzhiyun  * field in the final descriptor of the packet.
206*4882a593Smuzhiyun  *
207*4882a593Smuzhiyun  * Multiple packets are used for providing context data, key data and
208*4882a593Smuzhiyun  * the plain/ciphertext.
209*4882a593Smuzhiyun  *
210*4882a593Smuzhiyun  *   PDMA Descriptors (Array)
211*4882a593Smuzhiyun  *  +------+------+------+~~+-------+------+----
212*4882a593Smuzhiyun  *  |  0   |  1   |  2   |~~| 11 EOP|  12  |  ....
213*4882a593Smuzhiyun  *  +--+---+--+---+----+-+~~+-------+----+-+----
214*4882a593Smuzhiyun  *     |      |        |       |         |
215*4882a593Smuzhiyun  *     |      |        |       |         |
216*4882a593Smuzhiyun  *   __|__  +-------++-------++-------+ +----+
217*4882a593Smuzhiyun  *  | MD  | |Payload||Payload||Payload| | MD |
218*4882a593Smuzhiyun  *  +-----+ +-------++-------++-------+ +----+
219*4882a593Smuzhiyun  */
220*4882a593Smuzhiyun 
221*4882a593Smuzhiyun struct artpec6_crypto_bounce_buffer {
222*4882a593Smuzhiyun 	struct list_head list;
223*4882a593Smuzhiyun 	size_t length;
224*4882a593Smuzhiyun 	struct scatterlist *sg;
225*4882a593Smuzhiyun 	size_t offset;
226*4882a593Smuzhiyun 	/* buf is aligned to ARTPEC_CACHE_LINE_MAX and
227*4882a593Smuzhiyun 	 * holds up to ARTPEC_CACHE_LINE_MAX bytes data.
228*4882a593Smuzhiyun 	 */
229*4882a593Smuzhiyun 	void *buf;
230*4882a593Smuzhiyun };
231*4882a593Smuzhiyun 
232*4882a593Smuzhiyun struct artpec6_crypto_dma_map {
233*4882a593Smuzhiyun 	dma_addr_t dma_addr;
234*4882a593Smuzhiyun 	size_t size;
235*4882a593Smuzhiyun 	enum dma_data_direction dir;
236*4882a593Smuzhiyun };
237*4882a593Smuzhiyun 
238*4882a593Smuzhiyun struct artpec6_crypto_dma_descriptors {
239*4882a593Smuzhiyun 	struct pdma_descr out[PDMA_DESCR_COUNT] __aligned(64);
240*4882a593Smuzhiyun 	struct pdma_descr in[PDMA_DESCR_COUNT] __aligned(64);
241*4882a593Smuzhiyun 	u32 stat[PDMA_DESCR_COUNT] __aligned(64);
242*4882a593Smuzhiyun 	struct list_head bounce_buffers;
243*4882a593Smuzhiyun 	/* Enough maps for all out/in buffers, and all three descr. arrays */
244*4882a593Smuzhiyun 	struct artpec6_crypto_dma_map maps[PDMA_DESCR_COUNT * 2 + 2];
245*4882a593Smuzhiyun 	dma_addr_t out_dma_addr;
246*4882a593Smuzhiyun 	dma_addr_t in_dma_addr;
247*4882a593Smuzhiyun 	dma_addr_t stat_dma_addr;
248*4882a593Smuzhiyun 	size_t out_cnt;
249*4882a593Smuzhiyun 	size_t in_cnt;
250*4882a593Smuzhiyun 	size_t map_count;
251*4882a593Smuzhiyun };
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun enum artpec6_crypto_variant {
254*4882a593Smuzhiyun 	ARTPEC6_CRYPTO,
255*4882a593Smuzhiyun 	ARTPEC7_CRYPTO,
256*4882a593Smuzhiyun };
257*4882a593Smuzhiyun 
258*4882a593Smuzhiyun struct artpec6_crypto {
259*4882a593Smuzhiyun 	void __iomem *base;
260*4882a593Smuzhiyun 	spinlock_t queue_lock;
261*4882a593Smuzhiyun 	struct list_head queue; /* waiting for pdma fifo space */
262*4882a593Smuzhiyun 	struct list_head pending; /* submitted to pdma fifo */
263*4882a593Smuzhiyun 	struct tasklet_struct task;
264*4882a593Smuzhiyun 	struct kmem_cache *dma_cache;
265*4882a593Smuzhiyun 	int pending_count;
266*4882a593Smuzhiyun 	struct timer_list timer;
267*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant;
268*4882a593Smuzhiyun 	void *pad_buffer; /* cache-aligned block padding buffer */
269*4882a593Smuzhiyun 	void *zero_buffer;
270*4882a593Smuzhiyun };
271*4882a593Smuzhiyun 
272*4882a593Smuzhiyun enum artpec6_crypto_hash_flags {
273*4882a593Smuzhiyun 	HASH_FLAG_INIT_CTX = 2,
274*4882a593Smuzhiyun 	HASH_FLAG_UPDATE = 4,
275*4882a593Smuzhiyun 	HASH_FLAG_FINALIZE = 8,
276*4882a593Smuzhiyun 	HASH_FLAG_HMAC = 16,
277*4882a593Smuzhiyun 	HASH_FLAG_UPDATE_KEY = 32,
278*4882a593Smuzhiyun };
279*4882a593Smuzhiyun 
280*4882a593Smuzhiyun struct artpec6_crypto_req_common {
281*4882a593Smuzhiyun 	struct list_head list;
282*4882a593Smuzhiyun 	struct list_head complete_in_progress;
283*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma;
284*4882a593Smuzhiyun 	struct crypto_async_request *req;
285*4882a593Smuzhiyun 	void (*complete)(struct crypto_async_request *req);
286*4882a593Smuzhiyun 	gfp_t gfp_flags;
287*4882a593Smuzhiyun };
288*4882a593Smuzhiyun 
289*4882a593Smuzhiyun struct artpec6_hash_request_context {
290*4882a593Smuzhiyun 	char partial_buffer[SHA256_BLOCK_SIZE];
291*4882a593Smuzhiyun 	char partial_buffer_out[SHA256_BLOCK_SIZE];
292*4882a593Smuzhiyun 	char key_buffer[SHA256_BLOCK_SIZE];
293*4882a593Smuzhiyun 	char pad_buffer[SHA256_BLOCK_SIZE + 32];
294*4882a593Smuzhiyun 	unsigned char digeststate[SHA256_DIGEST_SIZE];
295*4882a593Smuzhiyun 	size_t partial_bytes;
296*4882a593Smuzhiyun 	u64 digcnt;
297*4882a593Smuzhiyun 	u32 key_md;
298*4882a593Smuzhiyun 	u32 hash_md;
299*4882a593Smuzhiyun 	enum artpec6_crypto_hash_flags hash_flags;
300*4882a593Smuzhiyun 	struct artpec6_crypto_req_common common;
301*4882a593Smuzhiyun };
302*4882a593Smuzhiyun 
303*4882a593Smuzhiyun struct artpec6_hash_export_state {
304*4882a593Smuzhiyun 	char partial_buffer[SHA256_BLOCK_SIZE];
305*4882a593Smuzhiyun 	unsigned char digeststate[SHA256_DIGEST_SIZE];
306*4882a593Smuzhiyun 	size_t partial_bytes;
307*4882a593Smuzhiyun 	u64 digcnt;
308*4882a593Smuzhiyun 	int oper;
309*4882a593Smuzhiyun 	unsigned int hash_flags;
310*4882a593Smuzhiyun };
311*4882a593Smuzhiyun 
312*4882a593Smuzhiyun struct artpec6_hashalg_context {
313*4882a593Smuzhiyun 	char hmac_key[SHA256_BLOCK_SIZE];
314*4882a593Smuzhiyun 	size_t hmac_key_length;
315*4882a593Smuzhiyun 	struct crypto_shash *child_hash;
316*4882a593Smuzhiyun };
317*4882a593Smuzhiyun 
318*4882a593Smuzhiyun struct artpec6_crypto_request_context {
319*4882a593Smuzhiyun 	u32 cipher_md;
320*4882a593Smuzhiyun 	bool decrypt;
321*4882a593Smuzhiyun 	struct artpec6_crypto_req_common common;
322*4882a593Smuzhiyun };
323*4882a593Smuzhiyun 
324*4882a593Smuzhiyun struct artpec6_cryptotfm_context {
325*4882a593Smuzhiyun 	unsigned char aes_key[2*AES_MAX_KEY_SIZE];
326*4882a593Smuzhiyun 	size_t key_length;
327*4882a593Smuzhiyun 	u32 key_md;
328*4882a593Smuzhiyun 	int crypto_type;
329*4882a593Smuzhiyun 	struct crypto_sync_skcipher *fallback;
330*4882a593Smuzhiyun };
331*4882a593Smuzhiyun 
332*4882a593Smuzhiyun struct artpec6_crypto_aead_hw_ctx {
333*4882a593Smuzhiyun 	__be64	aad_length_bits;
334*4882a593Smuzhiyun 	__be64  text_length_bits;
335*4882a593Smuzhiyun 	__u8	J0[AES_BLOCK_SIZE];
336*4882a593Smuzhiyun };
337*4882a593Smuzhiyun 
338*4882a593Smuzhiyun struct artpec6_crypto_aead_req_ctx {
339*4882a593Smuzhiyun 	struct artpec6_crypto_aead_hw_ctx hw_ctx;
340*4882a593Smuzhiyun 	u32 cipher_md;
341*4882a593Smuzhiyun 	bool decrypt;
342*4882a593Smuzhiyun 	struct artpec6_crypto_req_common common;
343*4882a593Smuzhiyun 	__u8 decryption_tag[AES_BLOCK_SIZE] ____cacheline_aligned;
344*4882a593Smuzhiyun };
345*4882a593Smuzhiyun 
346*4882a593Smuzhiyun /* The crypto framework makes it hard to avoid this global. */
347*4882a593Smuzhiyun static struct device *artpec6_crypto_dev;
348*4882a593Smuzhiyun 
349*4882a593Smuzhiyun #ifdef CONFIG_FAULT_INJECTION
350*4882a593Smuzhiyun static DECLARE_FAULT_ATTR(artpec6_crypto_fail_status_read);
351*4882a593Smuzhiyun static DECLARE_FAULT_ATTR(artpec6_crypto_fail_dma_array_full);
352*4882a593Smuzhiyun #endif
353*4882a593Smuzhiyun 
354*4882a593Smuzhiyun enum {
355*4882a593Smuzhiyun 	ARTPEC6_CRYPTO_PREPARE_HASH_NO_START,
356*4882a593Smuzhiyun 	ARTPEC6_CRYPTO_PREPARE_HASH_START,
357*4882a593Smuzhiyun };
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun static int artpec6_crypto_prepare_aead(struct aead_request *areq);
360*4882a593Smuzhiyun static int artpec6_crypto_prepare_crypto(struct skcipher_request *areq);
361*4882a593Smuzhiyun static int artpec6_crypto_prepare_hash(struct ahash_request *areq);
362*4882a593Smuzhiyun 
363*4882a593Smuzhiyun static void
364*4882a593Smuzhiyun artpec6_crypto_complete_crypto(struct crypto_async_request *req);
365*4882a593Smuzhiyun static void
366*4882a593Smuzhiyun artpec6_crypto_complete_cbc_encrypt(struct crypto_async_request *req);
367*4882a593Smuzhiyun static void
368*4882a593Smuzhiyun artpec6_crypto_complete_cbc_decrypt(struct crypto_async_request *req);
369*4882a593Smuzhiyun static void
370*4882a593Smuzhiyun artpec6_crypto_complete_aead(struct crypto_async_request *req);
371*4882a593Smuzhiyun static void
372*4882a593Smuzhiyun artpec6_crypto_complete_hash(struct crypto_async_request *req);
373*4882a593Smuzhiyun 
374*4882a593Smuzhiyun static int
375*4882a593Smuzhiyun artpec6_crypto_common_destroy(struct artpec6_crypto_req_common *common);
376*4882a593Smuzhiyun 
377*4882a593Smuzhiyun static void
378*4882a593Smuzhiyun artpec6_crypto_start_dma(struct artpec6_crypto_req_common *common);
379*4882a593Smuzhiyun 
380*4882a593Smuzhiyun struct artpec6_crypto_walk {
381*4882a593Smuzhiyun 	struct scatterlist *sg;
382*4882a593Smuzhiyun 	size_t offset;
383*4882a593Smuzhiyun };
384*4882a593Smuzhiyun 
artpec6_crypto_walk_init(struct artpec6_crypto_walk * awalk,struct scatterlist * sg)385*4882a593Smuzhiyun static void artpec6_crypto_walk_init(struct artpec6_crypto_walk *awalk,
386*4882a593Smuzhiyun 				     struct scatterlist *sg)
387*4882a593Smuzhiyun {
388*4882a593Smuzhiyun 	awalk->sg = sg;
389*4882a593Smuzhiyun 	awalk->offset = 0;
390*4882a593Smuzhiyun }
391*4882a593Smuzhiyun 
artpec6_crypto_walk_advance(struct artpec6_crypto_walk * awalk,size_t nbytes)392*4882a593Smuzhiyun static size_t artpec6_crypto_walk_advance(struct artpec6_crypto_walk *awalk,
393*4882a593Smuzhiyun 					  size_t nbytes)
394*4882a593Smuzhiyun {
395*4882a593Smuzhiyun 	while (nbytes && awalk->sg) {
396*4882a593Smuzhiyun 		size_t piece;
397*4882a593Smuzhiyun 
398*4882a593Smuzhiyun 		WARN_ON(awalk->offset > awalk->sg->length);
399*4882a593Smuzhiyun 
400*4882a593Smuzhiyun 		piece = min(nbytes, (size_t)awalk->sg->length - awalk->offset);
401*4882a593Smuzhiyun 		nbytes -= piece;
402*4882a593Smuzhiyun 		awalk->offset += piece;
403*4882a593Smuzhiyun 		if (awalk->offset == awalk->sg->length) {
404*4882a593Smuzhiyun 			awalk->sg = sg_next(awalk->sg);
405*4882a593Smuzhiyun 			awalk->offset = 0;
406*4882a593Smuzhiyun 		}
407*4882a593Smuzhiyun 
408*4882a593Smuzhiyun 	}
409*4882a593Smuzhiyun 
410*4882a593Smuzhiyun 	return nbytes;
411*4882a593Smuzhiyun }
412*4882a593Smuzhiyun 
413*4882a593Smuzhiyun static size_t
artpec6_crypto_walk_chunklen(const struct artpec6_crypto_walk * awalk)414*4882a593Smuzhiyun artpec6_crypto_walk_chunklen(const struct artpec6_crypto_walk *awalk)
415*4882a593Smuzhiyun {
416*4882a593Smuzhiyun 	WARN_ON(awalk->sg->length == awalk->offset);
417*4882a593Smuzhiyun 
418*4882a593Smuzhiyun 	return awalk->sg->length - awalk->offset;
419*4882a593Smuzhiyun }
420*4882a593Smuzhiyun 
421*4882a593Smuzhiyun static dma_addr_t
artpec6_crypto_walk_chunk_phys(const struct artpec6_crypto_walk * awalk)422*4882a593Smuzhiyun artpec6_crypto_walk_chunk_phys(const struct artpec6_crypto_walk *awalk)
423*4882a593Smuzhiyun {
424*4882a593Smuzhiyun 	return sg_phys(awalk->sg) + awalk->offset;
425*4882a593Smuzhiyun }
426*4882a593Smuzhiyun 
427*4882a593Smuzhiyun static void
artpec6_crypto_copy_bounce_buffers(struct artpec6_crypto_req_common * common)428*4882a593Smuzhiyun artpec6_crypto_copy_bounce_buffers(struct artpec6_crypto_req_common *common)
429*4882a593Smuzhiyun {
430*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
431*4882a593Smuzhiyun 	struct artpec6_crypto_bounce_buffer *b;
432*4882a593Smuzhiyun 	struct artpec6_crypto_bounce_buffer *next;
433*4882a593Smuzhiyun 
434*4882a593Smuzhiyun 	list_for_each_entry_safe(b, next, &dma->bounce_buffers, list) {
435*4882a593Smuzhiyun 		pr_debug("bounce entry %p: %zu bytes @ %zu from %p\n",
436*4882a593Smuzhiyun 			 b, b->length, b->offset, b->buf);
437*4882a593Smuzhiyun 		sg_pcopy_from_buffer(b->sg,
438*4882a593Smuzhiyun 				   1,
439*4882a593Smuzhiyun 				   b->buf,
440*4882a593Smuzhiyun 				   b->length,
441*4882a593Smuzhiyun 				   b->offset);
442*4882a593Smuzhiyun 
443*4882a593Smuzhiyun 		list_del(&b->list);
444*4882a593Smuzhiyun 		kfree(b);
445*4882a593Smuzhiyun 	}
446*4882a593Smuzhiyun }
447*4882a593Smuzhiyun 
artpec6_crypto_busy(void)448*4882a593Smuzhiyun static inline bool artpec6_crypto_busy(void)
449*4882a593Smuzhiyun {
450*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
451*4882a593Smuzhiyun 	int fifo_count = ac->pending_count;
452*4882a593Smuzhiyun 
453*4882a593Smuzhiyun 	return fifo_count > 6;
454*4882a593Smuzhiyun }
455*4882a593Smuzhiyun 
artpec6_crypto_submit(struct artpec6_crypto_req_common * req)456*4882a593Smuzhiyun static int artpec6_crypto_submit(struct artpec6_crypto_req_common *req)
457*4882a593Smuzhiyun {
458*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
459*4882a593Smuzhiyun 	int ret = -EBUSY;
460*4882a593Smuzhiyun 
461*4882a593Smuzhiyun 	spin_lock_bh(&ac->queue_lock);
462*4882a593Smuzhiyun 
463*4882a593Smuzhiyun 	if (!artpec6_crypto_busy()) {
464*4882a593Smuzhiyun 		list_add_tail(&req->list, &ac->pending);
465*4882a593Smuzhiyun 		artpec6_crypto_start_dma(req);
466*4882a593Smuzhiyun 		ret = -EINPROGRESS;
467*4882a593Smuzhiyun 	} else if (req->req->flags & CRYPTO_TFM_REQ_MAY_BACKLOG) {
468*4882a593Smuzhiyun 		list_add_tail(&req->list, &ac->queue);
469*4882a593Smuzhiyun 	} else {
470*4882a593Smuzhiyun 		artpec6_crypto_common_destroy(req);
471*4882a593Smuzhiyun 	}
472*4882a593Smuzhiyun 
473*4882a593Smuzhiyun 	spin_unlock_bh(&ac->queue_lock);
474*4882a593Smuzhiyun 
475*4882a593Smuzhiyun 	return ret;
476*4882a593Smuzhiyun }
477*4882a593Smuzhiyun 
artpec6_crypto_start_dma(struct artpec6_crypto_req_common * common)478*4882a593Smuzhiyun static void artpec6_crypto_start_dma(struct artpec6_crypto_req_common *common)
479*4882a593Smuzhiyun {
480*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
481*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
482*4882a593Smuzhiyun 	void __iomem *base = ac->base;
483*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
484*4882a593Smuzhiyun 	u32 ind, statd, outd;
485*4882a593Smuzhiyun 
486*4882a593Smuzhiyun 	/* Make descriptor content visible to the DMA before starting it. */
487*4882a593Smuzhiyun 	wmb();
488*4882a593Smuzhiyun 
489*4882a593Smuzhiyun 	ind = FIELD_PREP(PDMA_IN_DESCRQ_PUSH_LEN, dma->in_cnt - 1) |
490*4882a593Smuzhiyun 	      FIELD_PREP(PDMA_IN_DESCRQ_PUSH_ADDR, dma->in_dma_addr >> 6);
491*4882a593Smuzhiyun 
492*4882a593Smuzhiyun 	statd = FIELD_PREP(PDMA_IN_STATQ_PUSH_LEN, dma->in_cnt - 1) |
493*4882a593Smuzhiyun 		FIELD_PREP(PDMA_IN_STATQ_PUSH_ADDR, dma->stat_dma_addr >> 6);
494*4882a593Smuzhiyun 
495*4882a593Smuzhiyun 	outd = FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_LEN, dma->out_cnt - 1) |
496*4882a593Smuzhiyun 	       FIELD_PREP(PDMA_OUT_DESCRQ_PUSH_ADDR, dma->out_dma_addr >> 6);
497*4882a593Smuzhiyun 
498*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO) {
499*4882a593Smuzhiyun 		writel_relaxed(ind, base + A6_PDMA_IN_DESCRQ_PUSH);
500*4882a593Smuzhiyun 		writel_relaxed(statd, base + A6_PDMA_IN_STATQ_PUSH);
501*4882a593Smuzhiyun 		writel_relaxed(PDMA_IN_CMD_START, base + A6_PDMA_IN_CMD);
502*4882a593Smuzhiyun 	} else {
503*4882a593Smuzhiyun 		writel_relaxed(ind, base + A7_PDMA_IN_DESCRQ_PUSH);
504*4882a593Smuzhiyun 		writel_relaxed(statd, base + A7_PDMA_IN_STATQ_PUSH);
505*4882a593Smuzhiyun 		writel_relaxed(PDMA_IN_CMD_START, base + A7_PDMA_IN_CMD);
506*4882a593Smuzhiyun 	}
507*4882a593Smuzhiyun 
508*4882a593Smuzhiyun 	writel_relaxed(outd, base + PDMA_OUT_DESCRQ_PUSH);
509*4882a593Smuzhiyun 	writel_relaxed(PDMA_OUT_CMD_START, base + PDMA_OUT_CMD);
510*4882a593Smuzhiyun 
511*4882a593Smuzhiyun 	ac->pending_count++;
512*4882a593Smuzhiyun }
513*4882a593Smuzhiyun 
514*4882a593Smuzhiyun static void
artpec6_crypto_init_dma_operation(struct artpec6_crypto_req_common * common)515*4882a593Smuzhiyun artpec6_crypto_init_dma_operation(struct artpec6_crypto_req_common *common)
516*4882a593Smuzhiyun {
517*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
518*4882a593Smuzhiyun 
519*4882a593Smuzhiyun 	dma->out_cnt = 0;
520*4882a593Smuzhiyun 	dma->in_cnt = 0;
521*4882a593Smuzhiyun 	dma->map_count = 0;
522*4882a593Smuzhiyun 	INIT_LIST_HEAD(&dma->bounce_buffers);
523*4882a593Smuzhiyun }
524*4882a593Smuzhiyun 
fault_inject_dma_descr(void)525*4882a593Smuzhiyun static bool fault_inject_dma_descr(void)
526*4882a593Smuzhiyun {
527*4882a593Smuzhiyun #ifdef CONFIG_FAULT_INJECTION
528*4882a593Smuzhiyun 	return should_fail(&artpec6_crypto_fail_dma_array_full, 1);
529*4882a593Smuzhiyun #else
530*4882a593Smuzhiyun 	return false;
531*4882a593Smuzhiyun #endif
532*4882a593Smuzhiyun }
533*4882a593Smuzhiyun 
534*4882a593Smuzhiyun /** artpec6_crypto_setup_out_descr_phys - Setup an out channel with a
535*4882a593Smuzhiyun  *                                        physical address
536*4882a593Smuzhiyun  *
537*4882a593Smuzhiyun  * @addr: The physical address of the data buffer
538*4882a593Smuzhiyun  * @len:  The length of the data buffer
539*4882a593Smuzhiyun  * @eop:  True if this is the last buffer in the packet
540*4882a593Smuzhiyun  *
541*4882a593Smuzhiyun  * @return 0 on success or -ENOSPC if there are no more descriptors available
542*4882a593Smuzhiyun  */
543*4882a593Smuzhiyun static int
artpec6_crypto_setup_out_descr_phys(struct artpec6_crypto_req_common * common,dma_addr_t addr,size_t len,bool eop)544*4882a593Smuzhiyun artpec6_crypto_setup_out_descr_phys(struct artpec6_crypto_req_common *common,
545*4882a593Smuzhiyun 				    dma_addr_t addr, size_t len, bool eop)
546*4882a593Smuzhiyun {
547*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
548*4882a593Smuzhiyun 	struct pdma_descr *d;
549*4882a593Smuzhiyun 
550*4882a593Smuzhiyun 	if (dma->out_cnt >= PDMA_DESCR_COUNT ||
551*4882a593Smuzhiyun 	    fault_inject_dma_descr()) {
552*4882a593Smuzhiyun 		pr_err("No free OUT DMA descriptors available!\n");
553*4882a593Smuzhiyun 		return -ENOSPC;
554*4882a593Smuzhiyun 	}
555*4882a593Smuzhiyun 
556*4882a593Smuzhiyun 	d = &dma->out[dma->out_cnt++];
557*4882a593Smuzhiyun 	memset(d, 0, sizeof(*d));
558*4882a593Smuzhiyun 
559*4882a593Smuzhiyun 	d->ctrl.short_descr = 0;
560*4882a593Smuzhiyun 	d->ctrl.eop = eop;
561*4882a593Smuzhiyun 	d->data.len = len;
562*4882a593Smuzhiyun 	d->data.buf = addr;
563*4882a593Smuzhiyun 	return 0;
564*4882a593Smuzhiyun }
565*4882a593Smuzhiyun 
566*4882a593Smuzhiyun /** artpec6_crypto_setup_out_descr_short - Setup a short out descriptor
567*4882a593Smuzhiyun  *
568*4882a593Smuzhiyun  * @dst: The virtual address of the data
569*4882a593Smuzhiyun  * @len: The length of the data, must be between 1 to 7 bytes
570*4882a593Smuzhiyun  * @eop: True if this is the last buffer in the packet
571*4882a593Smuzhiyun  *
572*4882a593Smuzhiyun  * @return 0 on success
573*4882a593Smuzhiyun  *	-ENOSPC if no more descriptors are available
574*4882a593Smuzhiyun  *	-EINVAL if the data length exceeds 7 bytes
575*4882a593Smuzhiyun  */
576*4882a593Smuzhiyun static int
artpec6_crypto_setup_out_descr_short(struct artpec6_crypto_req_common * common,void * dst,unsigned int len,bool eop)577*4882a593Smuzhiyun artpec6_crypto_setup_out_descr_short(struct artpec6_crypto_req_common *common,
578*4882a593Smuzhiyun 				     void *dst, unsigned int len, bool eop)
579*4882a593Smuzhiyun {
580*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
581*4882a593Smuzhiyun 	struct pdma_descr *d;
582*4882a593Smuzhiyun 
583*4882a593Smuzhiyun 	if (dma->out_cnt >= PDMA_DESCR_COUNT ||
584*4882a593Smuzhiyun 	    fault_inject_dma_descr()) {
585*4882a593Smuzhiyun 		pr_err("No free OUT DMA descriptors available!\n");
586*4882a593Smuzhiyun 		return -ENOSPC;
587*4882a593Smuzhiyun 	} else if (len > 7 || len < 1) {
588*4882a593Smuzhiyun 		return -EINVAL;
589*4882a593Smuzhiyun 	}
590*4882a593Smuzhiyun 	d = &dma->out[dma->out_cnt++];
591*4882a593Smuzhiyun 	memset(d, 0, sizeof(*d));
592*4882a593Smuzhiyun 
593*4882a593Smuzhiyun 	d->ctrl.short_descr = 1;
594*4882a593Smuzhiyun 	d->ctrl.short_len = len;
595*4882a593Smuzhiyun 	d->ctrl.eop = eop;
596*4882a593Smuzhiyun 	memcpy(d->shrt.data, dst, len);
597*4882a593Smuzhiyun 	return 0;
598*4882a593Smuzhiyun }
599*4882a593Smuzhiyun 
artpec6_crypto_dma_map_page(struct artpec6_crypto_req_common * common,struct page * page,size_t offset,size_t size,enum dma_data_direction dir,dma_addr_t * dma_addr_out)600*4882a593Smuzhiyun static int artpec6_crypto_dma_map_page(struct artpec6_crypto_req_common *common,
601*4882a593Smuzhiyun 				      struct page *page, size_t offset,
602*4882a593Smuzhiyun 				      size_t size,
603*4882a593Smuzhiyun 				      enum dma_data_direction dir,
604*4882a593Smuzhiyun 				      dma_addr_t *dma_addr_out)
605*4882a593Smuzhiyun {
606*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
607*4882a593Smuzhiyun 	struct device *dev = artpec6_crypto_dev;
608*4882a593Smuzhiyun 	struct artpec6_crypto_dma_map *map;
609*4882a593Smuzhiyun 	dma_addr_t dma_addr;
610*4882a593Smuzhiyun 
611*4882a593Smuzhiyun 	*dma_addr_out = 0;
612*4882a593Smuzhiyun 
613*4882a593Smuzhiyun 	if (dma->map_count >= ARRAY_SIZE(dma->maps))
614*4882a593Smuzhiyun 		return -ENOMEM;
615*4882a593Smuzhiyun 
616*4882a593Smuzhiyun 	dma_addr = dma_map_page(dev, page, offset, size, dir);
617*4882a593Smuzhiyun 	if (dma_mapping_error(dev, dma_addr))
618*4882a593Smuzhiyun 		return -ENOMEM;
619*4882a593Smuzhiyun 
620*4882a593Smuzhiyun 	map = &dma->maps[dma->map_count++];
621*4882a593Smuzhiyun 	map->size = size;
622*4882a593Smuzhiyun 	map->dma_addr = dma_addr;
623*4882a593Smuzhiyun 	map->dir = dir;
624*4882a593Smuzhiyun 
625*4882a593Smuzhiyun 	*dma_addr_out = dma_addr;
626*4882a593Smuzhiyun 
627*4882a593Smuzhiyun 	return 0;
628*4882a593Smuzhiyun }
629*4882a593Smuzhiyun 
630*4882a593Smuzhiyun static int
artpec6_crypto_dma_map_single(struct artpec6_crypto_req_common * common,void * ptr,size_t size,enum dma_data_direction dir,dma_addr_t * dma_addr_out)631*4882a593Smuzhiyun artpec6_crypto_dma_map_single(struct artpec6_crypto_req_common *common,
632*4882a593Smuzhiyun 			      void *ptr, size_t size,
633*4882a593Smuzhiyun 			      enum dma_data_direction dir,
634*4882a593Smuzhiyun 			      dma_addr_t *dma_addr_out)
635*4882a593Smuzhiyun {
636*4882a593Smuzhiyun 	struct page *page = virt_to_page(ptr);
637*4882a593Smuzhiyun 	size_t offset = (uintptr_t)ptr & ~PAGE_MASK;
638*4882a593Smuzhiyun 
639*4882a593Smuzhiyun 	return artpec6_crypto_dma_map_page(common, page, offset, size, dir,
640*4882a593Smuzhiyun 					  dma_addr_out);
641*4882a593Smuzhiyun }
642*4882a593Smuzhiyun 
643*4882a593Smuzhiyun static int
artpec6_crypto_dma_map_descs(struct artpec6_crypto_req_common * common)644*4882a593Smuzhiyun artpec6_crypto_dma_map_descs(struct artpec6_crypto_req_common *common)
645*4882a593Smuzhiyun {
646*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
647*4882a593Smuzhiyun 	int ret;
648*4882a593Smuzhiyun 
649*4882a593Smuzhiyun 	ret = artpec6_crypto_dma_map_single(common, dma->in,
650*4882a593Smuzhiyun 				sizeof(dma->in[0]) * dma->in_cnt,
651*4882a593Smuzhiyun 				DMA_TO_DEVICE, &dma->in_dma_addr);
652*4882a593Smuzhiyun 	if (ret)
653*4882a593Smuzhiyun 		return ret;
654*4882a593Smuzhiyun 
655*4882a593Smuzhiyun 	ret = artpec6_crypto_dma_map_single(common, dma->out,
656*4882a593Smuzhiyun 				sizeof(dma->out[0]) * dma->out_cnt,
657*4882a593Smuzhiyun 				DMA_TO_DEVICE, &dma->out_dma_addr);
658*4882a593Smuzhiyun 	if (ret)
659*4882a593Smuzhiyun 		return ret;
660*4882a593Smuzhiyun 
661*4882a593Smuzhiyun 	/* We only read one stat descriptor */
662*4882a593Smuzhiyun 	dma->stat[dma->in_cnt - 1] = 0;
663*4882a593Smuzhiyun 
664*4882a593Smuzhiyun 	/*
665*4882a593Smuzhiyun 	 * DMA_BIDIRECTIONAL since we need our zeroing of the stat descriptor
666*4882a593Smuzhiyun 	 * to be written.
667*4882a593Smuzhiyun 	 */
668*4882a593Smuzhiyun 	return artpec6_crypto_dma_map_single(common,
669*4882a593Smuzhiyun 				dma->stat,
670*4882a593Smuzhiyun 				sizeof(dma->stat[0]) * dma->in_cnt,
671*4882a593Smuzhiyun 				DMA_BIDIRECTIONAL,
672*4882a593Smuzhiyun 				&dma->stat_dma_addr);
673*4882a593Smuzhiyun }
674*4882a593Smuzhiyun 
675*4882a593Smuzhiyun static void
artpec6_crypto_dma_unmap_all(struct artpec6_crypto_req_common * common)676*4882a593Smuzhiyun artpec6_crypto_dma_unmap_all(struct artpec6_crypto_req_common *common)
677*4882a593Smuzhiyun {
678*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
679*4882a593Smuzhiyun 	struct device *dev = artpec6_crypto_dev;
680*4882a593Smuzhiyun 	int i;
681*4882a593Smuzhiyun 
682*4882a593Smuzhiyun 	for (i = 0; i < dma->map_count; i++) {
683*4882a593Smuzhiyun 		struct artpec6_crypto_dma_map *map = &dma->maps[i];
684*4882a593Smuzhiyun 
685*4882a593Smuzhiyun 		dma_unmap_page(dev, map->dma_addr, map->size, map->dir);
686*4882a593Smuzhiyun 	}
687*4882a593Smuzhiyun 
688*4882a593Smuzhiyun 	dma->map_count = 0;
689*4882a593Smuzhiyun }
690*4882a593Smuzhiyun 
691*4882a593Smuzhiyun /** artpec6_crypto_setup_out_descr - Setup an out descriptor
692*4882a593Smuzhiyun  *
693*4882a593Smuzhiyun  * @dst: The virtual address of the data
694*4882a593Smuzhiyun  * @len: The length of the data
695*4882a593Smuzhiyun  * @eop: True if this is the last buffer in the packet
696*4882a593Smuzhiyun  * @use_short: If this is true and the data length is 7 bytes or less then
697*4882a593Smuzhiyun  *	a short descriptor will be used
698*4882a593Smuzhiyun  *
699*4882a593Smuzhiyun  * @return 0 on success
700*4882a593Smuzhiyun  *	Any errors from artpec6_crypto_setup_out_descr_short() or
701*4882a593Smuzhiyun  *	setup_out_descr_phys()
702*4882a593Smuzhiyun  */
703*4882a593Smuzhiyun static int
artpec6_crypto_setup_out_descr(struct artpec6_crypto_req_common * common,void * dst,unsigned int len,bool eop,bool use_short)704*4882a593Smuzhiyun artpec6_crypto_setup_out_descr(struct artpec6_crypto_req_common *common,
705*4882a593Smuzhiyun 			       void *dst, unsigned int len, bool eop,
706*4882a593Smuzhiyun 			       bool use_short)
707*4882a593Smuzhiyun {
708*4882a593Smuzhiyun 	if (use_short && len < 7) {
709*4882a593Smuzhiyun 		return artpec6_crypto_setup_out_descr_short(common, dst, len,
710*4882a593Smuzhiyun 							    eop);
711*4882a593Smuzhiyun 	} else {
712*4882a593Smuzhiyun 		int ret;
713*4882a593Smuzhiyun 		dma_addr_t dma_addr;
714*4882a593Smuzhiyun 
715*4882a593Smuzhiyun 		ret = artpec6_crypto_dma_map_single(common, dst, len,
716*4882a593Smuzhiyun 						   DMA_TO_DEVICE,
717*4882a593Smuzhiyun 						   &dma_addr);
718*4882a593Smuzhiyun 		if (ret)
719*4882a593Smuzhiyun 			return ret;
720*4882a593Smuzhiyun 
721*4882a593Smuzhiyun 		return artpec6_crypto_setup_out_descr_phys(common, dma_addr,
722*4882a593Smuzhiyun 							   len, eop);
723*4882a593Smuzhiyun 	}
724*4882a593Smuzhiyun }
725*4882a593Smuzhiyun 
726*4882a593Smuzhiyun /** artpec6_crypto_setup_in_descr_phys - Setup an in channel with a
727*4882a593Smuzhiyun  *                                       physical address
728*4882a593Smuzhiyun  *
729*4882a593Smuzhiyun  * @addr: The physical address of the data buffer
730*4882a593Smuzhiyun  * @len:  The length of the data buffer
731*4882a593Smuzhiyun  * @intr: True if an interrupt should be fired after HW processing of this
732*4882a593Smuzhiyun  *	  descriptor
733*4882a593Smuzhiyun  *
734*4882a593Smuzhiyun  */
735*4882a593Smuzhiyun static int
artpec6_crypto_setup_in_descr_phys(struct artpec6_crypto_req_common * common,dma_addr_t addr,unsigned int len,bool intr)736*4882a593Smuzhiyun artpec6_crypto_setup_in_descr_phys(struct artpec6_crypto_req_common *common,
737*4882a593Smuzhiyun 			       dma_addr_t addr, unsigned int len, bool intr)
738*4882a593Smuzhiyun {
739*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
740*4882a593Smuzhiyun 	struct pdma_descr *d;
741*4882a593Smuzhiyun 
742*4882a593Smuzhiyun 	if (dma->in_cnt >= PDMA_DESCR_COUNT ||
743*4882a593Smuzhiyun 	    fault_inject_dma_descr()) {
744*4882a593Smuzhiyun 		pr_err("No free IN DMA descriptors available!\n");
745*4882a593Smuzhiyun 		return -ENOSPC;
746*4882a593Smuzhiyun 	}
747*4882a593Smuzhiyun 	d = &dma->in[dma->in_cnt++];
748*4882a593Smuzhiyun 	memset(d, 0, sizeof(*d));
749*4882a593Smuzhiyun 
750*4882a593Smuzhiyun 	d->ctrl.intr = intr;
751*4882a593Smuzhiyun 	d->data.len = len;
752*4882a593Smuzhiyun 	d->data.buf = addr;
753*4882a593Smuzhiyun 	return 0;
754*4882a593Smuzhiyun }
755*4882a593Smuzhiyun 
756*4882a593Smuzhiyun /** artpec6_crypto_setup_in_descr - Setup an in channel descriptor
757*4882a593Smuzhiyun  *
758*4882a593Smuzhiyun  * @buffer: The virtual address to of the data buffer
759*4882a593Smuzhiyun  * @len:    The length of the data buffer
760*4882a593Smuzhiyun  * @last:   If this is the last data buffer in the request (i.e. an interrupt
761*4882a593Smuzhiyun  *	    is needed
762*4882a593Smuzhiyun  *
763*4882a593Smuzhiyun  * Short descriptors are not used for the in channel
764*4882a593Smuzhiyun  */
765*4882a593Smuzhiyun static int
artpec6_crypto_setup_in_descr(struct artpec6_crypto_req_common * common,void * buffer,unsigned int len,bool last)766*4882a593Smuzhiyun artpec6_crypto_setup_in_descr(struct artpec6_crypto_req_common *common,
767*4882a593Smuzhiyun 			  void *buffer, unsigned int len, bool last)
768*4882a593Smuzhiyun {
769*4882a593Smuzhiyun 	dma_addr_t dma_addr;
770*4882a593Smuzhiyun 	int ret;
771*4882a593Smuzhiyun 
772*4882a593Smuzhiyun 	ret = artpec6_crypto_dma_map_single(common, buffer, len,
773*4882a593Smuzhiyun 					   DMA_FROM_DEVICE, &dma_addr);
774*4882a593Smuzhiyun 	if (ret)
775*4882a593Smuzhiyun 		return ret;
776*4882a593Smuzhiyun 
777*4882a593Smuzhiyun 	return artpec6_crypto_setup_in_descr_phys(common, dma_addr, len, last);
778*4882a593Smuzhiyun }
779*4882a593Smuzhiyun 
780*4882a593Smuzhiyun static struct artpec6_crypto_bounce_buffer *
artpec6_crypto_alloc_bounce(gfp_t flags)781*4882a593Smuzhiyun artpec6_crypto_alloc_bounce(gfp_t flags)
782*4882a593Smuzhiyun {
783*4882a593Smuzhiyun 	void *base;
784*4882a593Smuzhiyun 	size_t alloc_size = sizeof(struct artpec6_crypto_bounce_buffer) +
785*4882a593Smuzhiyun 			    2 * ARTPEC_CACHE_LINE_MAX;
786*4882a593Smuzhiyun 	struct artpec6_crypto_bounce_buffer *bbuf = kzalloc(alloc_size, flags);
787*4882a593Smuzhiyun 
788*4882a593Smuzhiyun 	if (!bbuf)
789*4882a593Smuzhiyun 		return NULL;
790*4882a593Smuzhiyun 
791*4882a593Smuzhiyun 	base = bbuf + 1;
792*4882a593Smuzhiyun 	bbuf->buf = PTR_ALIGN(base, ARTPEC_CACHE_LINE_MAX);
793*4882a593Smuzhiyun 	return bbuf;
794*4882a593Smuzhiyun }
795*4882a593Smuzhiyun 
setup_bounce_buffer_in(struct artpec6_crypto_req_common * common,struct artpec6_crypto_walk * walk,size_t size)796*4882a593Smuzhiyun static int setup_bounce_buffer_in(struct artpec6_crypto_req_common *common,
797*4882a593Smuzhiyun 				  struct artpec6_crypto_walk *walk, size_t size)
798*4882a593Smuzhiyun {
799*4882a593Smuzhiyun 	struct artpec6_crypto_bounce_buffer *bbuf;
800*4882a593Smuzhiyun 	int ret;
801*4882a593Smuzhiyun 
802*4882a593Smuzhiyun 	bbuf = artpec6_crypto_alloc_bounce(common->gfp_flags);
803*4882a593Smuzhiyun 	if (!bbuf)
804*4882a593Smuzhiyun 		return -ENOMEM;
805*4882a593Smuzhiyun 
806*4882a593Smuzhiyun 	bbuf->length = size;
807*4882a593Smuzhiyun 	bbuf->sg = walk->sg;
808*4882a593Smuzhiyun 	bbuf->offset = walk->offset;
809*4882a593Smuzhiyun 
810*4882a593Smuzhiyun 	ret =  artpec6_crypto_setup_in_descr(common, bbuf->buf, size, false);
811*4882a593Smuzhiyun 	if (ret) {
812*4882a593Smuzhiyun 		kfree(bbuf);
813*4882a593Smuzhiyun 		return ret;
814*4882a593Smuzhiyun 	}
815*4882a593Smuzhiyun 
816*4882a593Smuzhiyun 	pr_debug("BOUNCE %zu offset %zu\n", size, walk->offset);
817*4882a593Smuzhiyun 	list_add_tail(&bbuf->list, &common->dma->bounce_buffers);
818*4882a593Smuzhiyun 	return 0;
819*4882a593Smuzhiyun }
820*4882a593Smuzhiyun 
821*4882a593Smuzhiyun static int
artpec6_crypto_setup_sg_descrs_in(struct artpec6_crypto_req_common * common,struct artpec6_crypto_walk * walk,size_t count)822*4882a593Smuzhiyun artpec6_crypto_setup_sg_descrs_in(struct artpec6_crypto_req_common *common,
823*4882a593Smuzhiyun 				  struct artpec6_crypto_walk *walk,
824*4882a593Smuzhiyun 				  size_t count)
825*4882a593Smuzhiyun {
826*4882a593Smuzhiyun 	size_t chunk;
827*4882a593Smuzhiyun 	int ret;
828*4882a593Smuzhiyun 	dma_addr_t addr;
829*4882a593Smuzhiyun 
830*4882a593Smuzhiyun 	while (walk->sg && count) {
831*4882a593Smuzhiyun 		chunk = min(count, artpec6_crypto_walk_chunklen(walk));
832*4882a593Smuzhiyun 		addr = artpec6_crypto_walk_chunk_phys(walk);
833*4882a593Smuzhiyun 
834*4882a593Smuzhiyun 		/* When destination buffers are not aligned to the cache line
835*4882a593Smuzhiyun 		 * size we need bounce buffers. The DMA-API requires that the
836*4882a593Smuzhiyun 		 * entire line is owned by the DMA buffer and this holds also
837*4882a593Smuzhiyun 		 * for the case when coherent DMA is used.
838*4882a593Smuzhiyun 		 */
839*4882a593Smuzhiyun 		if (!IS_ALIGNED(addr, ARTPEC_CACHE_LINE_MAX)) {
840*4882a593Smuzhiyun 			chunk = min_t(dma_addr_t, chunk,
841*4882a593Smuzhiyun 				      ALIGN(addr, ARTPEC_CACHE_LINE_MAX) -
842*4882a593Smuzhiyun 				      addr);
843*4882a593Smuzhiyun 
844*4882a593Smuzhiyun 			pr_debug("CHUNK-b %pad:%zu\n", &addr, chunk);
845*4882a593Smuzhiyun 			ret = setup_bounce_buffer_in(common, walk, chunk);
846*4882a593Smuzhiyun 		} else if (chunk < ARTPEC_CACHE_LINE_MAX) {
847*4882a593Smuzhiyun 			pr_debug("CHUNK-b %pad:%zu\n", &addr, chunk);
848*4882a593Smuzhiyun 			ret = setup_bounce_buffer_in(common, walk, chunk);
849*4882a593Smuzhiyun 		} else {
850*4882a593Smuzhiyun 			dma_addr_t dma_addr;
851*4882a593Smuzhiyun 
852*4882a593Smuzhiyun 			chunk = chunk & ~(ARTPEC_CACHE_LINE_MAX-1);
853*4882a593Smuzhiyun 
854*4882a593Smuzhiyun 			pr_debug("CHUNK %pad:%zu\n", &addr, chunk);
855*4882a593Smuzhiyun 
856*4882a593Smuzhiyun 			ret = artpec6_crypto_dma_map_page(common,
857*4882a593Smuzhiyun 							 sg_page(walk->sg),
858*4882a593Smuzhiyun 							 walk->sg->offset +
859*4882a593Smuzhiyun 							 walk->offset,
860*4882a593Smuzhiyun 							 chunk,
861*4882a593Smuzhiyun 							 DMA_FROM_DEVICE,
862*4882a593Smuzhiyun 							 &dma_addr);
863*4882a593Smuzhiyun 			if (ret)
864*4882a593Smuzhiyun 				return ret;
865*4882a593Smuzhiyun 
866*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_in_descr_phys(common,
867*4882a593Smuzhiyun 								 dma_addr,
868*4882a593Smuzhiyun 								 chunk, false);
869*4882a593Smuzhiyun 		}
870*4882a593Smuzhiyun 
871*4882a593Smuzhiyun 		if (ret)
872*4882a593Smuzhiyun 			return ret;
873*4882a593Smuzhiyun 
874*4882a593Smuzhiyun 		count = count - chunk;
875*4882a593Smuzhiyun 		artpec6_crypto_walk_advance(walk, chunk);
876*4882a593Smuzhiyun 	}
877*4882a593Smuzhiyun 
878*4882a593Smuzhiyun 	if (count)
879*4882a593Smuzhiyun 		pr_err("EOL unexpected %zu bytes left\n", count);
880*4882a593Smuzhiyun 
881*4882a593Smuzhiyun 	return count ? -EINVAL : 0;
882*4882a593Smuzhiyun }
883*4882a593Smuzhiyun 
884*4882a593Smuzhiyun static int
artpec6_crypto_setup_sg_descrs_out(struct artpec6_crypto_req_common * common,struct artpec6_crypto_walk * walk,size_t count)885*4882a593Smuzhiyun artpec6_crypto_setup_sg_descrs_out(struct artpec6_crypto_req_common *common,
886*4882a593Smuzhiyun 				   struct artpec6_crypto_walk *walk,
887*4882a593Smuzhiyun 				   size_t count)
888*4882a593Smuzhiyun {
889*4882a593Smuzhiyun 	size_t chunk;
890*4882a593Smuzhiyun 	int ret;
891*4882a593Smuzhiyun 	dma_addr_t addr;
892*4882a593Smuzhiyun 
893*4882a593Smuzhiyun 	while (walk->sg && count) {
894*4882a593Smuzhiyun 		chunk = min(count, artpec6_crypto_walk_chunklen(walk));
895*4882a593Smuzhiyun 		addr = artpec6_crypto_walk_chunk_phys(walk);
896*4882a593Smuzhiyun 
897*4882a593Smuzhiyun 		pr_debug("OUT-CHUNK %pad:%zu\n", &addr, chunk);
898*4882a593Smuzhiyun 
899*4882a593Smuzhiyun 		if (addr & 3) {
900*4882a593Smuzhiyun 			char buf[3];
901*4882a593Smuzhiyun 
902*4882a593Smuzhiyun 			chunk = min_t(size_t, chunk, (4-(addr&3)));
903*4882a593Smuzhiyun 
904*4882a593Smuzhiyun 			sg_pcopy_to_buffer(walk->sg, 1, buf, chunk,
905*4882a593Smuzhiyun 					   walk->offset);
906*4882a593Smuzhiyun 
907*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_out_descr_short(common, buf,
908*4882a593Smuzhiyun 								   chunk,
909*4882a593Smuzhiyun 								   false);
910*4882a593Smuzhiyun 		} else {
911*4882a593Smuzhiyun 			dma_addr_t dma_addr;
912*4882a593Smuzhiyun 
913*4882a593Smuzhiyun 			ret = artpec6_crypto_dma_map_page(common,
914*4882a593Smuzhiyun 							 sg_page(walk->sg),
915*4882a593Smuzhiyun 							 walk->sg->offset +
916*4882a593Smuzhiyun 							 walk->offset,
917*4882a593Smuzhiyun 							 chunk,
918*4882a593Smuzhiyun 							 DMA_TO_DEVICE,
919*4882a593Smuzhiyun 							 &dma_addr);
920*4882a593Smuzhiyun 			if (ret)
921*4882a593Smuzhiyun 				return ret;
922*4882a593Smuzhiyun 
923*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_out_descr_phys(common,
924*4882a593Smuzhiyun 								 dma_addr,
925*4882a593Smuzhiyun 								 chunk, false);
926*4882a593Smuzhiyun 		}
927*4882a593Smuzhiyun 
928*4882a593Smuzhiyun 		if (ret)
929*4882a593Smuzhiyun 			return ret;
930*4882a593Smuzhiyun 
931*4882a593Smuzhiyun 		count = count - chunk;
932*4882a593Smuzhiyun 		artpec6_crypto_walk_advance(walk, chunk);
933*4882a593Smuzhiyun 	}
934*4882a593Smuzhiyun 
935*4882a593Smuzhiyun 	if (count)
936*4882a593Smuzhiyun 		pr_err("EOL unexpected %zu bytes left\n", count);
937*4882a593Smuzhiyun 
938*4882a593Smuzhiyun 	return count ? -EINVAL : 0;
939*4882a593Smuzhiyun }
940*4882a593Smuzhiyun 
941*4882a593Smuzhiyun 
942*4882a593Smuzhiyun /** artpec6_crypto_terminate_out_descrs - Set the EOP on the last out descriptor
943*4882a593Smuzhiyun  *
944*4882a593Smuzhiyun  * If the out descriptor list is non-empty, then the eop flag on the
945*4882a593Smuzhiyun  * last used out descriptor will be set.
946*4882a593Smuzhiyun  *
947*4882a593Smuzhiyun  * @return  0 on success
948*4882a593Smuzhiyun  *	-EINVAL if the out descriptor is empty or has overflown
949*4882a593Smuzhiyun  */
950*4882a593Smuzhiyun static int
artpec6_crypto_terminate_out_descrs(struct artpec6_crypto_req_common * common)951*4882a593Smuzhiyun artpec6_crypto_terminate_out_descrs(struct artpec6_crypto_req_common *common)
952*4882a593Smuzhiyun {
953*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
954*4882a593Smuzhiyun 	struct pdma_descr *d;
955*4882a593Smuzhiyun 
956*4882a593Smuzhiyun 	if (!dma->out_cnt || dma->out_cnt > PDMA_DESCR_COUNT) {
957*4882a593Smuzhiyun 		pr_err("%s: OUT descriptor list is %s\n",
958*4882a593Smuzhiyun 			MODULE_NAME, dma->out_cnt ? "empty" : "full");
959*4882a593Smuzhiyun 		return -EINVAL;
960*4882a593Smuzhiyun 
961*4882a593Smuzhiyun 	}
962*4882a593Smuzhiyun 
963*4882a593Smuzhiyun 	d = &dma->out[dma->out_cnt-1];
964*4882a593Smuzhiyun 	d->ctrl.eop = 1;
965*4882a593Smuzhiyun 
966*4882a593Smuzhiyun 	return 0;
967*4882a593Smuzhiyun }
968*4882a593Smuzhiyun 
969*4882a593Smuzhiyun /** artpec6_crypto_terminate_in_descrs - Set the interrupt flag on the last
970*4882a593Smuzhiyun  *                                       in descriptor
971*4882a593Smuzhiyun  *
972*4882a593Smuzhiyun  * See artpec6_crypto_terminate_out_descrs() for return values
973*4882a593Smuzhiyun  */
974*4882a593Smuzhiyun static int
artpec6_crypto_terminate_in_descrs(struct artpec6_crypto_req_common * common)975*4882a593Smuzhiyun artpec6_crypto_terminate_in_descrs(struct artpec6_crypto_req_common *common)
976*4882a593Smuzhiyun {
977*4882a593Smuzhiyun 	struct artpec6_crypto_dma_descriptors *dma = common->dma;
978*4882a593Smuzhiyun 	struct pdma_descr *d;
979*4882a593Smuzhiyun 
980*4882a593Smuzhiyun 	if (!dma->in_cnt || dma->in_cnt > PDMA_DESCR_COUNT) {
981*4882a593Smuzhiyun 		pr_err("%s: IN descriptor list is %s\n",
982*4882a593Smuzhiyun 			MODULE_NAME, dma->in_cnt ? "empty" : "full");
983*4882a593Smuzhiyun 		return -EINVAL;
984*4882a593Smuzhiyun 	}
985*4882a593Smuzhiyun 
986*4882a593Smuzhiyun 	d = &dma->in[dma->in_cnt-1];
987*4882a593Smuzhiyun 	d->ctrl.intr = 1;
988*4882a593Smuzhiyun 	return 0;
989*4882a593Smuzhiyun }
990*4882a593Smuzhiyun 
991*4882a593Smuzhiyun /** create_hash_pad - Create a Secure Hash conformant pad
992*4882a593Smuzhiyun  *
993*4882a593Smuzhiyun  * @dst:      The destination buffer to write the pad. Must be at least 64 bytes
994*4882a593Smuzhiyun  * @dgstlen:  The total length of the hash digest in bytes
995*4882a593Smuzhiyun  * @bitcount: The total length of the digest in bits
996*4882a593Smuzhiyun  *
997*4882a593Smuzhiyun  * @return The total number of padding bytes written to @dst
998*4882a593Smuzhiyun  */
999*4882a593Smuzhiyun static size_t
create_hash_pad(int oper,unsigned char * dst,u64 dgstlen,u64 bitcount)1000*4882a593Smuzhiyun create_hash_pad(int oper, unsigned char *dst, u64 dgstlen, u64 bitcount)
1001*4882a593Smuzhiyun {
1002*4882a593Smuzhiyun 	unsigned int mod, target, diff, pad_bytes, size_bytes;
1003*4882a593Smuzhiyun 	__be64 bits = __cpu_to_be64(bitcount);
1004*4882a593Smuzhiyun 
1005*4882a593Smuzhiyun 	switch (oper) {
1006*4882a593Smuzhiyun 	case regk_crypto_sha1:
1007*4882a593Smuzhiyun 	case regk_crypto_sha256:
1008*4882a593Smuzhiyun 	case regk_crypto_hmac_sha1:
1009*4882a593Smuzhiyun 	case regk_crypto_hmac_sha256:
1010*4882a593Smuzhiyun 		target = 448 / 8;
1011*4882a593Smuzhiyun 		mod = 512 / 8;
1012*4882a593Smuzhiyun 		size_bytes = 8;
1013*4882a593Smuzhiyun 		break;
1014*4882a593Smuzhiyun 	default:
1015*4882a593Smuzhiyun 		target = 896 / 8;
1016*4882a593Smuzhiyun 		mod = 1024 / 8;
1017*4882a593Smuzhiyun 		size_bytes = 16;
1018*4882a593Smuzhiyun 		break;
1019*4882a593Smuzhiyun 	}
1020*4882a593Smuzhiyun 
1021*4882a593Smuzhiyun 	target -= 1;
1022*4882a593Smuzhiyun 	diff = dgstlen & (mod - 1);
1023*4882a593Smuzhiyun 	pad_bytes = diff > target ? target + mod - diff : target - diff;
1024*4882a593Smuzhiyun 
1025*4882a593Smuzhiyun 	memset(dst + 1, 0, pad_bytes);
1026*4882a593Smuzhiyun 	dst[0] = 0x80;
1027*4882a593Smuzhiyun 
1028*4882a593Smuzhiyun 	if (size_bytes == 16) {
1029*4882a593Smuzhiyun 		memset(dst + 1 + pad_bytes, 0, 8);
1030*4882a593Smuzhiyun 		memcpy(dst + 1 + pad_bytes + 8, &bits, 8);
1031*4882a593Smuzhiyun 	} else {
1032*4882a593Smuzhiyun 		memcpy(dst + 1 + pad_bytes, &bits, 8);
1033*4882a593Smuzhiyun 	}
1034*4882a593Smuzhiyun 
1035*4882a593Smuzhiyun 	return pad_bytes + size_bytes + 1;
1036*4882a593Smuzhiyun }
1037*4882a593Smuzhiyun 
artpec6_crypto_common_init(struct artpec6_crypto_req_common * common,struct crypto_async_request * parent,void (* complete)(struct crypto_async_request * req),struct scatterlist * dstsg,unsigned int nbytes)1038*4882a593Smuzhiyun static int artpec6_crypto_common_init(struct artpec6_crypto_req_common *common,
1039*4882a593Smuzhiyun 		struct crypto_async_request *parent,
1040*4882a593Smuzhiyun 		void (*complete)(struct crypto_async_request *req),
1041*4882a593Smuzhiyun 		struct scatterlist *dstsg, unsigned int nbytes)
1042*4882a593Smuzhiyun {
1043*4882a593Smuzhiyun 	gfp_t flags;
1044*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1045*4882a593Smuzhiyun 
1046*4882a593Smuzhiyun 	flags = (parent->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1047*4882a593Smuzhiyun 		 GFP_KERNEL : GFP_ATOMIC;
1048*4882a593Smuzhiyun 
1049*4882a593Smuzhiyun 	common->gfp_flags = flags;
1050*4882a593Smuzhiyun 	common->dma = kmem_cache_alloc(ac->dma_cache, flags);
1051*4882a593Smuzhiyun 	if (!common->dma)
1052*4882a593Smuzhiyun 		return -ENOMEM;
1053*4882a593Smuzhiyun 
1054*4882a593Smuzhiyun 	common->req = parent;
1055*4882a593Smuzhiyun 	common->complete = complete;
1056*4882a593Smuzhiyun 	return 0;
1057*4882a593Smuzhiyun }
1058*4882a593Smuzhiyun 
1059*4882a593Smuzhiyun static void
artpec6_crypto_bounce_destroy(struct artpec6_crypto_dma_descriptors * dma)1060*4882a593Smuzhiyun artpec6_crypto_bounce_destroy(struct artpec6_crypto_dma_descriptors *dma)
1061*4882a593Smuzhiyun {
1062*4882a593Smuzhiyun 	struct artpec6_crypto_bounce_buffer *b;
1063*4882a593Smuzhiyun 	struct artpec6_crypto_bounce_buffer *next;
1064*4882a593Smuzhiyun 
1065*4882a593Smuzhiyun 	list_for_each_entry_safe(b, next, &dma->bounce_buffers, list) {
1066*4882a593Smuzhiyun 		kfree(b);
1067*4882a593Smuzhiyun 	}
1068*4882a593Smuzhiyun }
1069*4882a593Smuzhiyun 
1070*4882a593Smuzhiyun static int
artpec6_crypto_common_destroy(struct artpec6_crypto_req_common * common)1071*4882a593Smuzhiyun artpec6_crypto_common_destroy(struct artpec6_crypto_req_common *common)
1072*4882a593Smuzhiyun {
1073*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1074*4882a593Smuzhiyun 
1075*4882a593Smuzhiyun 	artpec6_crypto_dma_unmap_all(common);
1076*4882a593Smuzhiyun 	artpec6_crypto_bounce_destroy(common->dma);
1077*4882a593Smuzhiyun 	kmem_cache_free(ac->dma_cache, common->dma);
1078*4882a593Smuzhiyun 	common->dma = NULL;
1079*4882a593Smuzhiyun 	return 0;
1080*4882a593Smuzhiyun }
1081*4882a593Smuzhiyun 
1082*4882a593Smuzhiyun /*
1083*4882a593Smuzhiyun  * Ciphering functions.
1084*4882a593Smuzhiyun  */
artpec6_crypto_encrypt(struct skcipher_request * req)1085*4882a593Smuzhiyun static int artpec6_crypto_encrypt(struct skcipher_request *req)
1086*4882a593Smuzhiyun {
1087*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
1088*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
1089*4882a593Smuzhiyun 	struct artpec6_crypto_request_context *req_ctx = NULL;
1090*4882a593Smuzhiyun 	void (*complete)(struct crypto_async_request *req);
1091*4882a593Smuzhiyun 	int ret;
1092*4882a593Smuzhiyun 
1093*4882a593Smuzhiyun 	req_ctx = skcipher_request_ctx(req);
1094*4882a593Smuzhiyun 
1095*4882a593Smuzhiyun 	switch (ctx->crypto_type) {
1096*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1097*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_ECB:
1098*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_XTS:
1099*4882a593Smuzhiyun 		req_ctx->decrypt = 0;
1100*4882a593Smuzhiyun 		break;
1101*4882a593Smuzhiyun 	default:
1102*4882a593Smuzhiyun 		break;
1103*4882a593Smuzhiyun 	}
1104*4882a593Smuzhiyun 
1105*4882a593Smuzhiyun 	switch (ctx->crypto_type) {
1106*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1107*4882a593Smuzhiyun 		complete = artpec6_crypto_complete_cbc_encrypt;
1108*4882a593Smuzhiyun 		break;
1109*4882a593Smuzhiyun 	default:
1110*4882a593Smuzhiyun 		complete = artpec6_crypto_complete_crypto;
1111*4882a593Smuzhiyun 		break;
1112*4882a593Smuzhiyun 	}
1113*4882a593Smuzhiyun 
1114*4882a593Smuzhiyun 	ret = artpec6_crypto_common_init(&req_ctx->common,
1115*4882a593Smuzhiyun 				  &req->base,
1116*4882a593Smuzhiyun 				  complete,
1117*4882a593Smuzhiyun 				  req->dst, req->cryptlen);
1118*4882a593Smuzhiyun 	if (ret)
1119*4882a593Smuzhiyun 		return ret;
1120*4882a593Smuzhiyun 
1121*4882a593Smuzhiyun 	ret = artpec6_crypto_prepare_crypto(req);
1122*4882a593Smuzhiyun 	if (ret) {
1123*4882a593Smuzhiyun 		artpec6_crypto_common_destroy(&req_ctx->common);
1124*4882a593Smuzhiyun 		return ret;
1125*4882a593Smuzhiyun 	}
1126*4882a593Smuzhiyun 
1127*4882a593Smuzhiyun 	return artpec6_crypto_submit(&req_ctx->common);
1128*4882a593Smuzhiyun }
1129*4882a593Smuzhiyun 
artpec6_crypto_decrypt(struct skcipher_request * req)1130*4882a593Smuzhiyun static int artpec6_crypto_decrypt(struct skcipher_request *req)
1131*4882a593Smuzhiyun {
1132*4882a593Smuzhiyun 	int ret;
1133*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
1134*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
1135*4882a593Smuzhiyun 	struct artpec6_crypto_request_context *req_ctx = NULL;
1136*4882a593Smuzhiyun 	void (*complete)(struct crypto_async_request *req);
1137*4882a593Smuzhiyun 
1138*4882a593Smuzhiyun 	req_ctx = skcipher_request_ctx(req);
1139*4882a593Smuzhiyun 
1140*4882a593Smuzhiyun 	switch (ctx->crypto_type) {
1141*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1142*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_ECB:
1143*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_XTS:
1144*4882a593Smuzhiyun 		req_ctx->decrypt = 1;
1145*4882a593Smuzhiyun 		break;
1146*4882a593Smuzhiyun 	default:
1147*4882a593Smuzhiyun 		break;
1148*4882a593Smuzhiyun 	}
1149*4882a593Smuzhiyun 
1150*4882a593Smuzhiyun 
1151*4882a593Smuzhiyun 	switch (ctx->crypto_type) {
1152*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1153*4882a593Smuzhiyun 		complete = artpec6_crypto_complete_cbc_decrypt;
1154*4882a593Smuzhiyun 		break;
1155*4882a593Smuzhiyun 	default:
1156*4882a593Smuzhiyun 		complete = artpec6_crypto_complete_crypto;
1157*4882a593Smuzhiyun 		break;
1158*4882a593Smuzhiyun 	}
1159*4882a593Smuzhiyun 
1160*4882a593Smuzhiyun 	ret = artpec6_crypto_common_init(&req_ctx->common, &req->base,
1161*4882a593Smuzhiyun 				  complete,
1162*4882a593Smuzhiyun 				  req->dst, req->cryptlen);
1163*4882a593Smuzhiyun 	if (ret)
1164*4882a593Smuzhiyun 		return ret;
1165*4882a593Smuzhiyun 
1166*4882a593Smuzhiyun 	ret = artpec6_crypto_prepare_crypto(req);
1167*4882a593Smuzhiyun 	if (ret) {
1168*4882a593Smuzhiyun 		artpec6_crypto_common_destroy(&req_ctx->common);
1169*4882a593Smuzhiyun 		return ret;
1170*4882a593Smuzhiyun 	}
1171*4882a593Smuzhiyun 
1172*4882a593Smuzhiyun 	return artpec6_crypto_submit(&req_ctx->common);
1173*4882a593Smuzhiyun }
1174*4882a593Smuzhiyun 
1175*4882a593Smuzhiyun static int
artpec6_crypto_ctr_crypt(struct skcipher_request * req,bool encrypt)1176*4882a593Smuzhiyun artpec6_crypto_ctr_crypt(struct skcipher_request *req, bool encrypt)
1177*4882a593Smuzhiyun {
1178*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
1179*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
1180*4882a593Smuzhiyun 	size_t iv_len = crypto_skcipher_ivsize(cipher);
1181*4882a593Smuzhiyun 	unsigned int counter = be32_to_cpup((__be32 *)
1182*4882a593Smuzhiyun 					    (req->iv + iv_len - 4));
1183*4882a593Smuzhiyun 	unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
1184*4882a593Smuzhiyun 			     AES_BLOCK_SIZE;
1185*4882a593Smuzhiyun 
1186*4882a593Smuzhiyun 	/*
1187*4882a593Smuzhiyun 	 * The hardware uses only the last 32-bits as the counter while the
1188*4882a593Smuzhiyun 	 * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
1189*4882a593Smuzhiyun 	 * the whole IV is a counter.  So fallback if the counter is going to
1190*4882a593Smuzhiyun 	 * overlow.
1191*4882a593Smuzhiyun 	 */
1192*4882a593Smuzhiyun 	if (counter + nblks < counter) {
1193*4882a593Smuzhiyun 		int ret;
1194*4882a593Smuzhiyun 
1195*4882a593Smuzhiyun 		pr_debug("counter %x will overflow (nblks %u), falling back\n",
1196*4882a593Smuzhiyun 			 counter, counter + nblks);
1197*4882a593Smuzhiyun 
1198*4882a593Smuzhiyun 		ret = crypto_sync_skcipher_setkey(ctx->fallback, ctx->aes_key,
1199*4882a593Smuzhiyun 						  ctx->key_length);
1200*4882a593Smuzhiyun 		if (ret)
1201*4882a593Smuzhiyun 			return ret;
1202*4882a593Smuzhiyun 
1203*4882a593Smuzhiyun 		{
1204*4882a593Smuzhiyun 			SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, ctx->fallback);
1205*4882a593Smuzhiyun 
1206*4882a593Smuzhiyun 			skcipher_request_set_sync_tfm(subreq, ctx->fallback);
1207*4882a593Smuzhiyun 			skcipher_request_set_callback(subreq, req->base.flags,
1208*4882a593Smuzhiyun 						      NULL, NULL);
1209*4882a593Smuzhiyun 			skcipher_request_set_crypt(subreq, req->src, req->dst,
1210*4882a593Smuzhiyun 						   req->cryptlen, req->iv);
1211*4882a593Smuzhiyun 			ret = encrypt ? crypto_skcipher_encrypt(subreq)
1212*4882a593Smuzhiyun 				      : crypto_skcipher_decrypt(subreq);
1213*4882a593Smuzhiyun 			skcipher_request_zero(subreq);
1214*4882a593Smuzhiyun 		}
1215*4882a593Smuzhiyun 		return ret;
1216*4882a593Smuzhiyun 	}
1217*4882a593Smuzhiyun 
1218*4882a593Smuzhiyun 	return encrypt ? artpec6_crypto_encrypt(req)
1219*4882a593Smuzhiyun 		       : artpec6_crypto_decrypt(req);
1220*4882a593Smuzhiyun }
1221*4882a593Smuzhiyun 
artpec6_crypto_ctr_encrypt(struct skcipher_request * req)1222*4882a593Smuzhiyun static int artpec6_crypto_ctr_encrypt(struct skcipher_request *req)
1223*4882a593Smuzhiyun {
1224*4882a593Smuzhiyun 	return artpec6_crypto_ctr_crypt(req, true);
1225*4882a593Smuzhiyun }
1226*4882a593Smuzhiyun 
artpec6_crypto_ctr_decrypt(struct skcipher_request * req)1227*4882a593Smuzhiyun static int artpec6_crypto_ctr_decrypt(struct skcipher_request *req)
1228*4882a593Smuzhiyun {
1229*4882a593Smuzhiyun 	return artpec6_crypto_ctr_crypt(req, false);
1230*4882a593Smuzhiyun }
1231*4882a593Smuzhiyun 
1232*4882a593Smuzhiyun /*
1233*4882a593Smuzhiyun  * AEAD functions
1234*4882a593Smuzhiyun  */
artpec6_crypto_aead_init(struct crypto_aead * tfm)1235*4882a593Smuzhiyun static int artpec6_crypto_aead_init(struct crypto_aead *tfm)
1236*4882a593Smuzhiyun {
1237*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *tfm_ctx = crypto_aead_ctx(tfm);
1238*4882a593Smuzhiyun 
1239*4882a593Smuzhiyun 	memset(tfm_ctx, 0, sizeof(*tfm_ctx));
1240*4882a593Smuzhiyun 
1241*4882a593Smuzhiyun 	crypto_aead_set_reqsize(tfm,
1242*4882a593Smuzhiyun 				sizeof(struct artpec6_crypto_aead_req_ctx));
1243*4882a593Smuzhiyun 
1244*4882a593Smuzhiyun 	return 0;
1245*4882a593Smuzhiyun }
1246*4882a593Smuzhiyun 
artpec6_crypto_aead_set_key(struct crypto_aead * tfm,const u8 * key,unsigned int len)1247*4882a593Smuzhiyun static int artpec6_crypto_aead_set_key(struct crypto_aead *tfm, const u8 *key,
1248*4882a593Smuzhiyun 			       unsigned int len)
1249*4882a593Smuzhiyun {
1250*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_tfm_ctx(&tfm->base);
1251*4882a593Smuzhiyun 
1252*4882a593Smuzhiyun 	if (len != 16 && len != 24 && len != 32)
1253*4882a593Smuzhiyun 		return -EINVAL;
1254*4882a593Smuzhiyun 
1255*4882a593Smuzhiyun 	ctx->key_length = len;
1256*4882a593Smuzhiyun 
1257*4882a593Smuzhiyun 	memcpy(ctx->aes_key, key, len);
1258*4882a593Smuzhiyun 	return 0;
1259*4882a593Smuzhiyun }
1260*4882a593Smuzhiyun 
artpec6_crypto_aead_encrypt(struct aead_request * req)1261*4882a593Smuzhiyun static int artpec6_crypto_aead_encrypt(struct aead_request *req)
1262*4882a593Smuzhiyun {
1263*4882a593Smuzhiyun 	int ret;
1264*4882a593Smuzhiyun 	struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(req);
1265*4882a593Smuzhiyun 
1266*4882a593Smuzhiyun 	req_ctx->decrypt = false;
1267*4882a593Smuzhiyun 	ret = artpec6_crypto_common_init(&req_ctx->common, &req->base,
1268*4882a593Smuzhiyun 				  artpec6_crypto_complete_aead,
1269*4882a593Smuzhiyun 				  NULL, 0);
1270*4882a593Smuzhiyun 	if (ret)
1271*4882a593Smuzhiyun 		return ret;
1272*4882a593Smuzhiyun 
1273*4882a593Smuzhiyun 	ret = artpec6_crypto_prepare_aead(req);
1274*4882a593Smuzhiyun 	if (ret) {
1275*4882a593Smuzhiyun 		artpec6_crypto_common_destroy(&req_ctx->common);
1276*4882a593Smuzhiyun 		return ret;
1277*4882a593Smuzhiyun 	}
1278*4882a593Smuzhiyun 
1279*4882a593Smuzhiyun 	return artpec6_crypto_submit(&req_ctx->common);
1280*4882a593Smuzhiyun }
1281*4882a593Smuzhiyun 
artpec6_crypto_aead_decrypt(struct aead_request * req)1282*4882a593Smuzhiyun static int artpec6_crypto_aead_decrypt(struct aead_request *req)
1283*4882a593Smuzhiyun {
1284*4882a593Smuzhiyun 	int ret;
1285*4882a593Smuzhiyun 	struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(req);
1286*4882a593Smuzhiyun 
1287*4882a593Smuzhiyun 	req_ctx->decrypt = true;
1288*4882a593Smuzhiyun 	if (req->cryptlen < AES_BLOCK_SIZE)
1289*4882a593Smuzhiyun 		return -EINVAL;
1290*4882a593Smuzhiyun 
1291*4882a593Smuzhiyun 	ret = artpec6_crypto_common_init(&req_ctx->common,
1292*4882a593Smuzhiyun 				  &req->base,
1293*4882a593Smuzhiyun 				  artpec6_crypto_complete_aead,
1294*4882a593Smuzhiyun 				  NULL, 0);
1295*4882a593Smuzhiyun 	if (ret)
1296*4882a593Smuzhiyun 		return ret;
1297*4882a593Smuzhiyun 
1298*4882a593Smuzhiyun 	ret = artpec6_crypto_prepare_aead(req);
1299*4882a593Smuzhiyun 	if (ret) {
1300*4882a593Smuzhiyun 		artpec6_crypto_common_destroy(&req_ctx->common);
1301*4882a593Smuzhiyun 		return ret;
1302*4882a593Smuzhiyun 	}
1303*4882a593Smuzhiyun 
1304*4882a593Smuzhiyun 	return artpec6_crypto_submit(&req_ctx->common);
1305*4882a593Smuzhiyun }
1306*4882a593Smuzhiyun 
artpec6_crypto_prepare_hash(struct ahash_request * areq)1307*4882a593Smuzhiyun static int artpec6_crypto_prepare_hash(struct ahash_request *areq)
1308*4882a593Smuzhiyun {
1309*4882a593Smuzhiyun 	struct artpec6_hashalg_context *ctx = crypto_tfm_ctx(areq->base.tfm);
1310*4882a593Smuzhiyun 	struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(areq);
1311*4882a593Smuzhiyun 	size_t digestsize = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1312*4882a593Smuzhiyun 	size_t contextsize = digestsize;
1313*4882a593Smuzhiyun 	size_t blocksize = crypto_tfm_alg_blocksize(
1314*4882a593Smuzhiyun 		crypto_ahash_tfm(crypto_ahash_reqtfm(areq)));
1315*4882a593Smuzhiyun 	struct artpec6_crypto_req_common *common = &req_ctx->common;
1316*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1317*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
1318*4882a593Smuzhiyun 	u32 sel_ctx;
1319*4882a593Smuzhiyun 	bool ext_ctx = false;
1320*4882a593Smuzhiyun 	bool run_hw = false;
1321*4882a593Smuzhiyun 	int error = 0;
1322*4882a593Smuzhiyun 
1323*4882a593Smuzhiyun 	artpec6_crypto_init_dma_operation(common);
1324*4882a593Smuzhiyun 
1325*4882a593Smuzhiyun 	/* Upload HMAC key, must be first the first packet */
1326*4882a593Smuzhiyun 	if (req_ctx->hash_flags & HASH_FLAG_HMAC) {
1327*4882a593Smuzhiyun 		if (variant == ARTPEC6_CRYPTO) {
1328*4882a593Smuzhiyun 			req_ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER,
1329*4882a593Smuzhiyun 						     a6_regk_crypto_dlkey);
1330*4882a593Smuzhiyun 		} else {
1331*4882a593Smuzhiyun 			req_ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER,
1332*4882a593Smuzhiyun 						     a7_regk_crypto_dlkey);
1333*4882a593Smuzhiyun 		}
1334*4882a593Smuzhiyun 
1335*4882a593Smuzhiyun 		/* Copy and pad up the key */
1336*4882a593Smuzhiyun 		memcpy(req_ctx->key_buffer, ctx->hmac_key,
1337*4882a593Smuzhiyun 		       ctx->hmac_key_length);
1338*4882a593Smuzhiyun 		memset(req_ctx->key_buffer + ctx->hmac_key_length, 0,
1339*4882a593Smuzhiyun 		       blocksize - ctx->hmac_key_length);
1340*4882a593Smuzhiyun 
1341*4882a593Smuzhiyun 		error = artpec6_crypto_setup_out_descr(common,
1342*4882a593Smuzhiyun 					(void *)&req_ctx->key_md,
1343*4882a593Smuzhiyun 					sizeof(req_ctx->key_md), false, false);
1344*4882a593Smuzhiyun 		if (error)
1345*4882a593Smuzhiyun 			return error;
1346*4882a593Smuzhiyun 
1347*4882a593Smuzhiyun 		error = artpec6_crypto_setup_out_descr(common,
1348*4882a593Smuzhiyun 					req_ctx->key_buffer, blocksize,
1349*4882a593Smuzhiyun 					true, false);
1350*4882a593Smuzhiyun 		if (error)
1351*4882a593Smuzhiyun 			return error;
1352*4882a593Smuzhiyun 	}
1353*4882a593Smuzhiyun 
1354*4882a593Smuzhiyun 	if (!(req_ctx->hash_flags & HASH_FLAG_INIT_CTX)) {
1355*4882a593Smuzhiyun 		/* Restore context */
1356*4882a593Smuzhiyun 		sel_ctx = regk_crypto_ext;
1357*4882a593Smuzhiyun 		ext_ctx = true;
1358*4882a593Smuzhiyun 	} else {
1359*4882a593Smuzhiyun 		sel_ctx = regk_crypto_init;
1360*4882a593Smuzhiyun 	}
1361*4882a593Smuzhiyun 
1362*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO) {
1363*4882a593Smuzhiyun 		req_ctx->hash_md &= ~A6_CRY_MD_HASH_SEL_CTX;
1364*4882a593Smuzhiyun 		req_ctx->hash_md |= FIELD_PREP(A6_CRY_MD_HASH_SEL_CTX, sel_ctx);
1365*4882a593Smuzhiyun 
1366*4882a593Smuzhiyun 		/* If this is the final round, set the final flag */
1367*4882a593Smuzhiyun 		if (req_ctx->hash_flags & HASH_FLAG_FINALIZE)
1368*4882a593Smuzhiyun 			req_ctx->hash_md |= A6_CRY_MD_HASH_HMAC_FIN;
1369*4882a593Smuzhiyun 	} else {
1370*4882a593Smuzhiyun 		req_ctx->hash_md &= ~A7_CRY_MD_HASH_SEL_CTX;
1371*4882a593Smuzhiyun 		req_ctx->hash_md |= FIELD_PREP(A7_CRY_MD_HASH_SEL_CTX, sel_ctx);
1372*4882a593Smuzhiyun 
1373*4882a593Smuzhiyun 		/* If this is the final round, set the final flag */
1374*4882a593Smuzhiyun 		if (req_ctx->hash_flags & HASH_FLAG_FINALIZE)
1375*4882a593Smuzhiyun 			req_ctx->hash_md |= A7_CRY_MD_HASH_HMAC_FIN;
1376*4882a593Smuzhiyun 	}
1377*4882a593Smuzhiyun 
1378*4882a593Smuzhiyun 	/* Setup up metadata descriptors */
1379*4882a593Smuzhiyun 	error = artpec6_crypto_setup_out_descr(common,
1380*4882a593Smuzhiyun 				(void *)&req_ctx->hash_md,
1381*4882a593Smuzhiyun 				sizeof(req_ctx->hash_md), false, false);
1382*4882a593Smuzhiyun 	if (error)
1383*4882a593Smuzhiyun 		return error;
1384*4882a593Smuzhiyun 
1385*4882a593Smuzhiyun 	error = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false);
1386*4882a593Smuzhiyun 	if (error)
1387*4882a593Smuzhiyun 		return error;
1388*4882a593Smuzhiyun 
1389*4882a593Smuzhiyun 	if (ext_ctx) {
1390*4882a593Smuzhiyun 		error = artpec6_crypto_setup_out_descr(common,
1391*4882a593Smuzhiyun 					req_ctx->digeststate,
1392*4882a593Smuzhiyun 					contextsize, false, false);
1393*4882a593Smuzhiyun 
1394*4882a593Smuzhiyun 		if (error)
1395*4882a593Smuzhiyun 			return error;
1396*4882a593Smuzhiyun 	}
1397*4882a593Smuzhiyun 
1398*4882a593Smuzhiyun 	if (req_ctx->hash_flags & HASH_FLAG_UPDATE) {
1399*4882a593Smuzhiyun 		size_t done_bytes = 0;
1400*4882a593Smuzhiyun 		size_t total_bytes = areq->nbytes + req_ctx->partial_bytes;
1401*4882a593Smuzhiyun 		size_t ready_bytes = round_down(total_bytes, blocksize);
1402*4882a593Smuzhiyun 		struct artpec6_crypto_walk walk;
1403*4882a593Smuzhiyun 
1404*4882a593Smuzhiyun 		run_hw = ready_bytes > 0;
1405*4882a593Smuzhiyun 		if (req_ctx->partial_bytes && ready_bytes) {
1406*4882a593Smuzhiyun 			/* We have a partial buffer and will at least some bytes
1407*4882a593Smuzhiyun 			 * to the HW. Empty this partial buffer before tackling
1408*4882a593Smuzhiyun 			 * the SG lists
1409*4882a593Smuzhiyun 			 */
1410*4882a593Smuzhiyun 			memcpy(req_ctx->partial_buffer_out,
1411*4882a593Smuzhiyun 				req_ctx->partial_buffer,
1412*4882a593Smuzhiyun 				req_ctx->partial_bytes);
1413*4882a593Smuzhiyun 
1414*4882a593Smuzhiyun 			error = artpec6_crypto_setup_out_descr(common,
1415*4882a593Smuzhiyun 						req_ctx->partial_buffer_out,
1416*4882a593Smuzhiyun 						req_ctx->partial_bytes,
1417*4882a593Smuzhiyun 						false, true);
1418*4882a593Smuzhiyun 			if (error)
1419*4882a593Smuzhiyun 				return error;
1420*4882a593Smuzhiyun 
1421*4882a593Smuzhiyun 			/* Reset partial buffer */
1422*4882a593Smuzhiyun 			done_bytes += req_ctx->partial_bytes;
1423*4882a593Smuzhiyun 			req_ctx->partial_bytes = 0;
1424*4882a593Smuzhiyun 		}
1425*4882a593Smuzhiyun 
1426*4882a593Smuzhiyun 		artpec6_crypto_walk_init(&walk, areq->src);
1427*4882a593Smuzhiyun 
1428*4882a593Smuzhiyun 		error = artpec6_crypto_setup_sg_descrs_out(common, &walk,
1429*4882a593Smuzhiyun 							   ready_bytes -
1430*4882a593Smuzhiyun 							   done_bytes);
1431*4882a593Smuzhiyun 		if (error)
1432*4882a593Smuzhiyun 			return error;
1433*4882a593Smuzhiyun 
1434*4882a593Smuzhiyun 		if (walk.sg) {
1435*4882a593Smuzhiyun 			size_t sg_skip = ready_bytes - done_bytes;
1436*4882a593Smuzhiyun 			size_t sg_rem = areq->nbytes - sg_skip;
1437*4882a593Smuzhiyun 
1438*4882a593Smuzhiyun 			sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
1439*4882a593Smuzhiyun 					   req_ctx->partial_buffer +
1440*4882a593Smuzhiyun 					   req_ctx->partial_bytes,
1441*4882a593Smuzhiyun 					   sg_rem, sg_skip);
1442*4882a593Smuzhiyun 
1443*4882a593Smuzhiyun 			req_ctx->partial_bytes += sg_rem;
1444*4882a593Smuzhiyun 		}
1445*4882a593Smuzhiyun 
1446*4882a593Smuzhiyun 		req_ctx->digcnt += ready_bytes;
1447*4882a593Smuzhiyun 		req_ctx->hash_flags &= ~(HASH_FLAG_UPDATE);
1448*4882a593Smuzhiyun 	}
1449*4882a593Smuzhiyun 
1450*4882a593Smuzhiyun 	/* Finalize */
1451*4882a593Smuzhiyun 	if (req_ctx->hash_flags & HASH_FLAG_FINALIZE) {
1452*4882a593Smuzhiyun 		size_t hash_pad_len;
1453*4882a593Smuzhiyun 		u64 digest_bits;
1454*4882a593Smuzhiyun 		u32 oper;
1455*4882a593Smuzhiyun 
1456*4882a593Smuzhiyun 		if (variant == ARTPEC6_CRYPTO)
1457*4882a593Smuzhiyun 			oper = FIELD_GET(A6_CRY_MD_OPER, req_ctx->hash_md);
1458*4882a593Smuzhiyun 		else
1459*4882a593Smuzhiyun 			oper = FIELD_GET(A7_CRY_MD_OPER, req_ctx->hash_md);
1460*4882a593Smuzhiyun 
1461*4882a593Smuzhiyun 		/* Write out the partial buffer if present */
1462*4882a593Smuzhiyun 		if (req_ctx->partial_bytes) {
1463*4882a593Smuzhiyun 			memcpy(req_ctx->partial_buffer_out,
1464*4882a593Smuzhiyun 			       req_ctx->partial_buffer,
1465*4882a593Smuzhiyun 			       req_ctx->partial_bytes);
1466*4882a593Smuzhiyun 			error = artpec6_crypto_setup_out_descr(common,
1467*4882a593Smuzhiyun 						req_ctx->partial_buffer_out,
1468*4882a593Smuzhiyun 						req_ctx->partial_bytes,
1469*4882a593Smuzhiyun 						false, true);
1470*4882a593Smuzhiyun 			if (error)
1471*4882a593Smuzhiyun 				return error;
1472*4882a593Smuzhiyun 
1473*4882a593Smuzhiyun 			req_ctx->digcnt += req_ctx->partial_bytes;
1474*4882a593Smuzhiyun 			req_ctx->partial_bytes = 0;
1475*4882a593Smuzhiyun 		}
1476*4882a593Smuzhiyun 
1477*4882a593Smuzhiyun 		if (req_ctx->hash_flags & HASH_FLAG_HMAC)
1478*4882a593Smuzhiyun 			digest_bits = 8 * (req_ctx->digcnt + blocksize);
1479*4882a593Smuzhiyun 		else
1480*4882a593Smuzhiyun 			digest_bits = 8 * req_ctx->digcnt;
1481*4882a593Smuzhiyun 
1482*4882a593Smuzhiyun 		/* Add the hash pad */
1483*4882a593Smuzhiyun 		hash_pad_len = create_hash_pad(oper, req_ctx->pad_buffer,
1484*4882a593Smuzhiyun 					       req_ctx->digcnt, digest_bits);
1485*4882a593Smuzhiyun 		error = artpec6_crypto_setup_out_descr(common,
1486*4882a593Smuzhiyun 						      req_ctx->pad_buffer,
1487*4882a593Smuzhiyun 						      hash_pad_len, false,
1488*4882a593Smuzhiyun 						      true);
1489*4882a593Smuzhiyun 		req_ctx->digcnt = 0;
1490*4882a593Smuzhiyun 
1491*4882a593Smuzhiyun 		if (error)
1492*4882a593Smuzhiyun 			return error;
1493*4882a593Smuzhiyun 
1494*4882a593Smuzhiyun 		/* Descriptor for the final result */
1495*4882a593Smuzhiyun 		error = artpec6_crypto_setup_in_descr(common, areq->result,
1496*4882a593Smuzhiyun 						      digestsize,
1497*4882a593Smuzhiyun 						      true);
1498*4882a593Smuzhiyun 		if (error)
1499*4882a593Smuzhiyun 			return error;
1500*4882a593Smuzhiyun 
1501*4882a593Smuzhiyun 	} else { /* This is not the final operation for this request */
1502*4882a593Smuzhiyun 		if (!run_hw)
1503*4882a593Smuzhiyun 			return ARTPEC6_CRYPTO_PREPARE_HASH_NO_START;
1504*4882a593Smuzhiyun 
1505*4882a593Smuzhiyun 		/* Save the result to the context */
1506*4882a593Smuzhiyun 		error = artpec6_crypto_setup_in_descr(common,
1507*4882a593Smuzhiyun 						      req_ctx->digeststate,
1508*4882a593Smuzhiyun 						      contextsize, false);
1509*4882a593Smuzhiyun 		if (error)
1510*4882a593Smuzhiyun 			return error;
1511*4882a593Smuzhiyun 		/* fall through */
1512*4882a593Smuzhiyun 	}
1513*4882a593Smuzhiyun 
1514*4882a593Smuzhiyun 	req_ctx->hash_flags &= ~(HASH_FLAG_INIT_CTX | HASH_FLAG_UPDATE |
1515*4882a593Smuzhiyun 				 HASH_FLAG_FINALIZE);
1516*4882a593Smuzhiyun 
1517*4882a593Smuzhiyun 	error = artpec6_crypto_terminate_in_descrs(common);
1518*4882a593Smuzhiyun 	if (error)
1519*4882a593Smuzhiyun 		return error;
1520*4882a593Smuzhiyun 
1521*4882a593Smuzhiyun 	error = artpec6_crypto_terminate_out_descrs(common);
1522*4882a593Smuzhiyun 	if (error)
1523*4882a593Smuzhiyun 		return error;
1524*4882a593Smuzhiyun 
1525*4882a593Smuzhiyun 	error = artpec6_crypto_dma_map_descs(common);
1526*4882a593Smuzhiyun 	if (error)
1527*4882a593Smuzhiyun 		return error;
1528*4882a593Smuzhiyun 
1529*4882a593Smuzhiyun 	return ARTPEC6_CRYPTO_PREPARE_HASH_START;
1530*4882a593Smuzhiyun }
1531*4882a593Smuzhiyun 
1532*4882a593Smuzhiyun 
artpec6_crypto_aes_ecb_init(struct crypto_skcipher * tfm)1533*4882a593Smuzhiyun static int artpec6_crypto_aes_ecb_init(struct crypto_skcipher *tfm)
1534*4882a593Smuzhiyun {
1535*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1536*4882a593Smuzhiyun 
1537*4882a593Smuzhiyun 	tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
1538*4882a593Smuzhiyun 	ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_ECB;
1539*4882a593Smuzhiyun 
1540*4882a593Smuzhiyun 	return 0;
1541*4882a593Smuzhiyun }
1542*4882a593Smuzhiyun 
artpec6_crypto_aes_ctr_init(struct crypto_skcipher * tfm)1543*4882a593Smuzhiyun static int artpec6_crypto_aes_ctr_init(struct crypto_skcipher *tfm)
1544*4882a593Smuzhiyun {
1545*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1546*4882a593Smuzhiyun 
1547*4882a593Smuzhiyun 	ctx->fallback =
1548*4882a593Smuzhiyun 		crypto_alloc_sync_skcipher(crypto_tfm_alg_name(&tfm->base),
1549*4882a593Smuzhiyun 					   0, CRYPTO_ALG_NEED_FALLBACK);
1550*4882a593Smuzhiyun 	if (IS_ERR(ctx->fallback))
1551*4882a593Smuzhiyun 		return PTR_ERR(ctx->fallback);
1552*4882a593Smuzhiyun 
1553*4882a593Smuzhiyun 	tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
1554*4882a593Smuzhiyun 	ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CTR;
1555*4882a593Smuzhiyun 
1556*4882a593Smuzhiyun 	return 0;
1557*4882a593Smuzhiyun }
1558*4882a593Smuzhiyun 
artpec6_crypto_aes_cbc_init(struct crypto_skcipher * tfm)1559*4882a593Smuzhiyun static int artpec6_crypto_aes_cbc_init(struct crypto_skcipher *tfm)
1560*4882a593Smuzhiyun {
1561*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1562*4882a593Smuzhiyun 
1563*4882a593Smuzhiyun 	tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
1564*4882a593Smuzhiyun 	ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_CBC;
1565*4882a593Smuzhiyun 
1566*4882a593Smuzhiyun 	return 0;
1567*4882a593Smuzhiyun }
1568*4882a593Smuzhiyun 
artpec6_crypto_aes_xts_init(struct crypto_skcipher * tfm)1569*4882a593Smuzhiyun static int artpec6_crypto_aes_xts_init(struct crypto_skcipher *tfm)
1570*4882a593Smuzhiyun {
1571*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1572*4882a593Smuzhiyun 
1573*4882a593Smuzhiyun 	tfm->reqsize = sizeof(struct artpec6_crypto_request_context);
1574*4882a593Smuzhiyun 	ctx->crypto_type = ARTPEC6_CRYPTO_CIPHER_AES_XTS;
1575*4882a593Smuzhiyun 
1576*4882a593Smuzhiyun 	return 0;
1577*4882a593Smuzhiyun }
1578*4882a593Smuzhiyun 
artpec6_crypto_aes_exit(struct crypto_skcipher * tfm)1579*4882a593Smuzhiyun static void artpec6_crypto_aes_exit(struct crypto_skcipher *tfm)
1580*4882a593Smuzhiyun {
1581*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1582*4882a593Smuzhiyun 
1583*4882a593Smuzhiyun 	memset(ctx, 0, sizeof(*ctx));
1584*4882a593Smuzhiyun }
1585*4882a593Smuzhiyun 
artpec6_crypto_aes_ctr_exit(struct crypto_skcipher * tfm)1586*4882a593Smuzhiyun static void artpec6_crypto_aes_ctr_exit(struct crypto_skcipher *tfm)
1587*4882a593Smuzhiyun {
1588*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(tfm);
1589*4882a593Smuzhiyun 
1590*4882a593Smuzhiyun 	crypto_free_sync_skcipher(ctx->fallback);
1591*4882a593Smuzhiyun 	artpec6_crypto_aes_exit(tfm);
1592*4882a593Smuzhiyun }
1593*4882a593Smuzhiyun 
1594*4882a593Smuzhiyun static int
artpec6_crypto_cipher_set_key(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)1595*4882a593Smuzhiyun artpec6_crypto_cipher_set_key(struct crypto_skcipher *cipher, const u8 *key,
1596*4882a593Smuzhiyun 			      unsigned int keylen)
1597*4882a593Smuzhiyun {
1598*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx =
1599*4882a593Smuzhiyun 		crypto_skcipher_ctx(cipher);
1600*4882a593Smuzhiyun 
1601*4882a593Smuzhiyun 	switch (keylen) {
1602*4882a593Smuzhiyun 	case 16:
1603*4882a593Smuzhiyun 	case 24:
1604*4882a593Smuzhiyun 	case 32:
1605*4882a593Smuzhiyun 		break;
1606*4882a593Smuzhiyun 	default:
1607*4882a593Smuzhiyun 		return -EINVAL;
1608*4882a593Smuzhiyun 	}
1609*4882a593Smuzhiyun 
1610*4882a593Smuzhiyun 	memcpy(ctx->aes_key, key, keylen);
1611*4882a593Smuzhiyun 	ctx->key_length = keylen;
1612*4882a593Smuzhiyun 	return 0;
1613*4882a593Smuzhiyun }
1614*4882a593Smuzhiyun 
1615*4882a593Smuzhiyun static int
artpec6_crypto_xts_set_key(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)1616*4882a593Smuzhiyun artpec6_crypto_xts_set_key(struct crypto_skcipher *cipher, const u8 *key,
1617*4882a593Smuzhiyun 			      unsigned int keylen)
1618*4882a593Smuzhiyun {
1619*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx =
1620*4882a593Smuzhiyun 		crypto_skcipher_ctx(cipher);
1621*4882a593Smuzhiyun 	int ret;
1622*4882a593Smuzhiyun 
1623*4882a593Smuzhiyun 	ret = xts_check_key(&cipher->base, key, keylen);
1624*4882a593Smuzhiyun 	if (ret)
1625*4882a593Smuzhiyun 		return ret;
1626*4882a593Smuzhiyun 
1627*4882a593Smuzhiyun 	switch (keylen) {
1628*4882a593Smuzhiyun 	case 32:
1629*4882a593Smuzhiyun 	case 48:
1630*4882a593Smuzhiyun 	case 64:
1631*4882a593Smuzhiyun 		break;
1632*4882a593Smuzhiyun 	default:
1633*4882a593Smuzhiyun 		return -EINVAL;
1634*4882a593Smuzhiyun 	}
1635*4882a593Smuzhiyun 
1636*4882a593Smuzhiyun 	memcpy(ctx->aes_key, key, keylen);
1637*4882a593Smuzhiyun 	ctx->key_length = keylen;
1638*4882a593Smuzhiyun 	return 0;
1639*4882a593Smuzhiyun }
1640*4882a593Smuzhiyun 
1641*4882a593Smuzhiyun /** artpec6_crypto_process_crypto - Prepare an async block cipher crypto request
1642*4882a593Smuzhiyun  *
1643*4882a593Smuzhiyun  * @req: The asynch request to process
1644*4882a593Smuzhiyun  *
1645*4882a593Smuzhiyun  * @return 0 if the dma job was successfully prepared
1646*4882a593Smuzhiyun  *	  <0 on error
1647*4882a593Smuzhiyun  *
1648*4882a593Smuzhiyun  * This function sets up the PDMA descriptors for a block cipher request.
1649*4882a593Smuzhiyun  *
1650*4882a593Smuzhiyun  * The required padding is added for AES-CTR using a statically defined
1651*4882a593Smuzhiyun  * buffer.
1652*4882a593Smuzhiyun  *
1653*4882a593Smuzhiyun  * The PDMA descriptor list will be as follows:
1654*4882a593Smuzhiyun  *
1655*4882a593Smuzhiyun  * OUT: [KEY_MD][KEY][EOP]<CIPHER_MD>[IV]<data_0>...[data_n][AES-CTR_pad]<eop>
1656*4882a593Smuzhiyun  * IN:  <CIPHER_MD><data_0>...[data_n]<intr>
1657*4882a593Smuzhiyun  *
1658*4882a593Smuzhiyun  */
artpec6_crypto_prepare_crypto(struct skcipher_request * areq)1659*4882a593Smuzhiyun static int artpec6_crypto_prepare_crypto(struct skcipher_request *areq)
1660*4882a593Smuzhiyun {
1661*4882a593Smuzhiyun 	int ret;
1662*4882a593Smuzhiyun 	struct artpec6_crypto_walk walk;
1663*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1664*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_skcipher_ctx(cipher);
1665*4882a593Smuzhiyun 	struct artpec6_crypto_request_context *req_ctx = NULL;
1666*4882a593Smuzhiyun 	size_t iv_len = crypto_skcipher_ivsize(cipher);
1667*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1668*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
1669*4882a593Smuzhiyun 	struct artpec6_crypto_req_common *common;
1670*4882a593Smuzhiyun 	bool cipher_decr = false;
1671*4882a593Smuzhiyun 	size_t cipher_klen;
1672*4882a593Smuzhiyun 	u32 cipher_len = 0; /* Same as regk_crypto_key_128 for NULL crypto */
1673*4882a593Smuzhiyun 	u32 oper;
1674*4882a593Smuzhiyun 
1675*4882a593Smuzhiyun 	req_ctx = skcipher_request_ctx(areq);
1676*4882a593Smuzhiyun 	common = &req_ctx->common;
1677*4882a593Smuzhiyun 
1678*4882a593Smuzhiyun 	artpec6_crypto_init_dma_operation(common);
1679*4882a593Smuzhiyun 
1680*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO)
1681*4882a593Smuzhiyun 		ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER, a6_regk_crypto_dlkey);
1682*4882a593Smuzhiyun 	else
1683*4882a593Smuzhiyun 		ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER, a7_regk_crypto_dlkey);
1684*4882a593Smuzhiyun 
1685*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_out_descr(common, (void *)&ctx->key_md,
1686*4882a593Smuzhiyun 					     sizeof(ctx->key_md), false, false);
1687*4882a593Smuzhiyun 	if (ret)
1688*4882a593Smuzhiyun 		return ret;
1689*4882a593Smuzhiyun 
1690*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_out_descr(common, ctx->aes_key,
1691*4882a593Smuzhiyun 					      ctx->key_length, true, false);
1692*4882a593Smuzhiyun 	if (ret)
1693*4882a593Smuzhiyun 		return ret;
1694*4882a593Smuzhiyun 
1695*4882a593Smuzhiyun 	req_ctx->cipher_md = 0;
1696*4882a593Smuzhiyun 
1697*4882a593Smuzhiyun 	if (ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_XTS)
1698*4882a593Smuzhiyun 		cipher_klen = ctx->key_length/2;
1699*4882a593Smuzhiyun 	else
1700*4882a593Smuzhiyun 		cipher_klen =  ctx->key_length;
1701*4882a593Smuzhiyun 
1702*4882a593Smuzhiyun 	/* Metadata */
1703*4882a593Smuzhiyun 	switch (cipher_klen) {
1704*4882a593Smuzhiyun 	case 16:
1705*4882a593Smuzhiyun 		cipher_len = regk_crypto_key_128;
1706*4882a593Smuzhiyun 		break;
1707*4882a593Smuzhiyun 	case 24:
1708*4882a593Smuzhiyun 		cipher_len = regk_crypto_key_192;
1709*4882a593Smuzhiyun 		break;
1710*4882a593Smuzhiyun 	case 32:
1711*4882a593Smuzhiyun 		cipher_len = regk_crypto_key_256;
1712*4882a593Smuzhiyun 		break;
1713*4882a593Smuzhiyun 	default:
1714*4882a593Smuzhiyun 		pr_err("%s: Invalid key length %d!\n",
1715*4882a593Smuzhiyun 			MODULE_NAME, ctx->key_length);
1716*4882a593Smuzhiyun 		return -EINVAL;
1717*4882a593Smuzhiyun 	}
1718*4882a593Smuzhiyun 
1719*4882a593Smuzhiyun 	switch (ctx->crypto_type) {
1720*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_ECB:
1721*4882a593Smuzhiyun 		oper = regk_crypto_aes_ecb;
1722*4882a593Smuzhiyun 		cipher_decr = req_ctx->decrypt;
1723*4882a593Smuzhiyun 		break;
1724*4882a593Smuzhiyun 
1725*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_CBC:
1726*4882a593Smuzhiyun 		oper = regk_crypto_aes_cbc;
1727*4882a593Smuzhiyun 		cipher_decr = req_ctx->decrypt;
1728*4882a593Smuzhiyun 		break;
1729*4882a593Smuzhiyun 
1730*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_CTR:
1731*4882a593Smuzhiyun 		oper = regk_crypto_aes_ctr;
1732*4882a593Smuzhiyun 		cipher_decr = false;
1733*4882a593Smuzhiyun 		break;
1734*4882a593Smuzhiyun 
1735*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_CIPHER_AES_XTS:
1736*4882a593Smuzhiyun 		oper = regk_crypto_aes_xts;
1737*4882a593Smuzhiyun 		cipher_decr = req_ctx->decrypt;
1738*4882a593Smuzhiyun 
1739*4882a593Smuzhiyun 		if (variant == ARTPEC6_CRYPTO)
1740*4882a593Smuzhiyun 			req_ctx->cipher_md |= A6_CRY_MD_CIPHER_DSEQ;
1741*4882a593Smuzhiyun 		else
1742*4882a593Smuzhiyun 			req_ctx->cipher_md |= A7_CRY_MD_CIPHER_DSEQ;
1743*4882a593Smuzhiyun 		break;
1744*4882a593Smuzhiyun 
1745*4882a593Smuzhiyun 	default:
1746*4882a593Smuzhiyun 		pr_err("%s: Invalid cipher mode %d!\n",
1747*4882a593Smuzhiyun 			MODULE_NAME, ctx->crypto_type);
1748*4882a593Smuzhiyun 		return -EINVAL;
1749*4882a593Smuzhiyun 	}
1750*4882a593Smuzhiyun 
1751*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO) {
1752*4882a593Smuzhiyun 		req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_OPER, oper);
1753*4882a593Smuzhiyun 		req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_CIPHER_LEN,
1754*4882a593Smuzhiyun 						 cipher_len);
1755*4882a593Smuzhiyun 		if (cipher_decr)
1756*4882a593Smuzhiyun 			req_ctx->cipher_md |= A6_CRY_MD_CIPHER_DECR;
1757*4882a593Smuzhiyun 	} else {
1758*4882a593Smuzhiyun 		req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_OPER, oper);
1759*4882a593Smuzhiyun 		req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_CIPHER_LEN,
1760*4882a593Smuzhiyun 						 cipher_len);
1761*4882a593Smuzhiyun 		if (cipher_decr)
1762*4882a593Smuzhiyun 			req_ctx->cipher_md |= A7_CRY_MD_CIPHER_DECR;
1763*4882a593Smuzhiyun 	}
1764*4882a593Smuzhiyun 
1765*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_out_descr(common,
1766*4882a593Smuzhiyun 					    &req_ctx->cipher_md,
1767*4882a593Smuzhiyun 					    sizeof(req_ctx->cipher_md),
1768*4882a593Smuzhiyun 					    false, false);
1769*4882a593Smuzhiyun 	if (ret)
1770*4882a593Smuzhiyun 		return ret;
1771*4882a593Smuzhiyun 
1772*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false);
1773*4882a593Smuzhiyun 	if (ret)
1774*4882a593Smuzhiyun 		return ret;
1775*4882a593Smuzhiyun 
1776*4882a593Smuzhiyun 	if (iv_len) {
1777*4882a593Smuzhiyun 		ret = artpec6_crypto_setup_out_descr(common, areq->iv, iv_len,
1778*4882a593Smuzhiyun 						     false, false);
1779*4882a593Smuzhiyun 		if (ret)
1780*4882a593Smuzhiyun 			return ret;
1781*4882a593Smuzhiyun 	}
1782*4882a593Smuzhiyun 	/* Data out */
1783*4882a593Smuzhiyun 	artpec6_crypto_walk_init(&walk, areq->src);
1784*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, areq->cryptlen);
1785*4882a593Smuzhiyun 	if (ret)
1786*4882a593Smuzhiyun 		return ret;
1787*4882a593Smuzhiyun 
1788*4882a593Smuzhiyun 	/* Data in */
1789*4882a593Smuzhiyun 	artpec6_crypto_walk_init(&walk, areq->dst);
1790*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_sg_descrs_in(common, &walk, areq->cryptlen);
1791*4882a593Smuzhiyun 	if (ret)
1792*4882a593Smuzhiyun 		return ret;
1793*4882a593Smuzhiyun 
1794*4882a593Smuzhiyun 	/* CTR-mode padding required by the HW. */
1795*4882a593Smuzhiyun 	if (ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_CTR ||
1796*4882a593Smuzhiyun 	    ctx->crypto_type == ARTPEC6_CRYPTO_CIPHER_AES_XTS) {
1797*4882a593Smuzhiyun 		size_t pad = ALIGN(areq->cryptlen, AES_BLOCK_SIZE) -
1798*4882a593Smuzhiyun 			     areq->cryptlen;
1799*4882a593Smuzhiyun 
1800*4882a593Smuzhiyun 		if (pad) {
1801*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_out_descr(common,
1802*4882a593Smuzhiyun 							     ac->pad_buffer,
1803*4882a593Smuzhiyun 							     pad, false, false);
1804*4882a593Smuzhiyun 			if (ret)
1805*4882a593Smuzhiyun 				return ret;
1806*4882a593Smuzhiyun 
1807*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_in_descr(common,
1808*4882a593Smuzhiyun 							    ac->pad_buffer, pad,
1809*4882a593Smuzhiyun 							    false);
1810*4882a593Smuzhiyun 			if (ret)
1811*4882a593Smuzhiyun 				return ret;
1812*4882a593Smuzhiyun 		}
1813*4882a593Smuzhiyun 	}
1814*4882a593Smuzhiyun 
1815*4882a593Smuzhiyun 	ret = artpec6_crypto_terminate_out_descrs(common);
1816*4882a593Smuzhiyun 	if (ret)
1817*4882a593Smuzhiyun 		return ret;
1818*4882a593Smuzhiyun 
1819*4882a593Smuzhiyun 	ret = artpec6_crypto_terminate_in_descrs(common);
1820*4882a593Smuzhiyun 	if (ret)
1821*4882a593Smuzhiyun 		return ret;
1822*4882a593Smuzhiyun 
1823*4882a593Smuzhiyun 	return artpec6_crypto_dma_map_descs(common);
1824*4882a593Smuzhiyun }
1825*4882a593Smuzhiyun 
artpec6_crypto_prepare_aead(struct aead_request * areq)1826*4882a593Smuzhiyun static int artpec6_crypto_prepare_aead(struct aead_request *areq)
1827*4882a593Smuzhiyun {
1828*4882a593Smuzhiyun 	size_t count;
1829*4882a593Smuzhiyun 	int ret;
1830*4882a593Smuzhiyun 	size_t input_length;
1831*4882a593Smuzhiyun 	struct artpec6_cryptotfm_context *ctx = crypto_tfm_ctx(areq->base.tfm);
1832*4882a593Smuzhiyun 	struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq);
1833*4882a593Smuzhiyun 	struct crypto_aead *cipher = crypto_aead_reqtfm(areq);
1834*4882a593Smuzhiyun 	struct artpec6_crypto_req_common *common = &req_ctx->common;
1835*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
1836*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
1837*4882a593Smuzhiyun 	u32 md_cipher_len;
1838*4882a593Smuzhiyun 
1839*4882a593Smuzhiyun 	artpec6_crypto_init_dma_operation(common);
1840*4882a593Smuzhiyun 
1841*4882a593Smuzhiyun 	/* Key */
1842*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO) {
1843*4882a593Smuzhiyun 		ctx->key_md = FIELD_PREP(A6_CRY_MD_OPER,
1844*4882a593Smuzhiyun 					 a6_regk_crypto_dlkey);
1845*4882a593Smuzhiyun 	} else {
1846*4882a593Smuzhiyun 		ctx->key_md = FIELD_PREP(A7_CRY_MD_OPER,
1847*4882a593Smuzhiyun 					 a7_regk_crypto_dlkey);
1848*4882a593Smuzhiyun 	}
1849*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_out_descr(common, (void *)&ctx->key_md,
1850*4882a593Smuzhiyun 					     sizeof(ctx->key_md), false, false);
1851*4882a593Smuzhiyun 	if (ret)
1852*4882a593Smuzhiyun 		return ret;
1853*4882a593Smuzhiyun 
1854*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_out_descr(common, ctx->aes_key,
1855*4882a593Smuzhiyun 					     ctx->key_length, true, false);
1856*4882a593Smuzhiyun 	if (ret)
1857*4882a593Smuzhiyun 		return ret;
1858*4882a593Smuzhiyun 
1859*4882a593Smuzhiyun 	req_ctx->cipher_md = 0;
1860*4882a593Smuzhiyun 
1861*4882a593Smuzhiyun 	switch (ctx->key_length) {
1862*4882a593Smuzhiyun 	case 16:
1863*4882a593Smuzhiyun 		md_cipher_len = regk_crypto_key_128;
1864*4882a593Smuzhiyun 		break;
1865*4882a593Smuzhiyun 	case 24:
1866*4882a593Smuzhiyun 		md_cipher_len = regk_crypto_key_192;
1867*4882a593Smuzhiyun 		break;
1868*4882a593Smuzhiyun 	case 32:
1869*4882a593Smuzhiyun 		md_cipher_len = regk_crypto_key_256;
1870*4882a593Smuzhiyun 		break;
1871*4882a593Smuzhiyun 	default:
1872*4882a593Smuzhiyun 		return -EINVAL;
1873*4882a593Smuzhiyun 	}
1874*4882a593Smuzhiyun 
1875*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO) {
1876*4882a593Smuzhiyun 		req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_OPER,
1877*4882a593Smuzhiyun 						 regk_crypto_aes_gcm);
1878*4882a593Smuzhiyun 		req_ctx->cipher_md |= FIELD_PREP(A6_CRY_MD_CIPHER_LEN,
1879*4882a593Smuzhiyun 						 md_cipher_len);
1880*4882a593Smuzhiyun 		if (req_ctx->decrypt)
1881*4882a593Smuzhiyun 			req_ctx->cipher_md |= A6_CRY_MD_CIPHER_DECR;
1882*4882a593Smuzhiyun 	} else {
1883*4882a593Smuzhiyun 		req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_OPER,
1884*4882a593Smuzhiyun 						 regk_crypto_aes_gcm);
1885*4882a593Smuzhiyun 		req_ctx->cipher_md |= FIELD_PREP(A7_CRY_MD_CIPHER_LEN,
1886*4882a593Smuzhiyun 						 md_cipher_len);
1887*4882a593Smuzhiyun 		if (req_ctx->decrypt)
1888*4882a593Smuzhiyun 			req_ctx->cipher_md |= A7_CRY_MD_CIPHER_DECR;
1889*4882a593Smuzhiyun 	}
1890*4882a593Smuzhiyun 
1891*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_out_descr(common,
1892*4882a593Smuzhiyun 					    (void *) &req_ctx->cipher_md,
1893*4882a593Smuzhiyun 					    sizeof(req_ctx->cipher_md), false,
1894*4882a593Smuzhiyun 					    false);
1895*4882a593Smuzhiyun 	if (ret)
1896*4882a593Smuzhiyun 		return ret;
1897*4882a593Smuzhiyun 
1898*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_in_descr(common, ac->pad_buffer, 4, false);
1899*4882a593Smuzhiyun 	if (ret)
1900*4882a593Smuzhiyun 		return ret;
1901*4882a593Smuzhiyun 
1902*4882a593Smuzhiyun 	/* For the decryption, cryptlen includes the tag. */
1903*4882a593Smuzhiyun 	input_length = areq->cryptlen;
1904*4882a593Smuzhiyun 	if (req_ctx->decrypt)
1905*4882a593Smuzhiyun 		input_length -= crypto_aead_authsize(cipher);
1906*4882a593Smuzhiyun 
1907*4882a593Smuzhiyun 	/* Prepare the context buffer */
1908*4882a593Smuzhiyun 	req_ctx->hw_ctx.aad_length_bits =
1909*4882a593Smuzhiyun 		__cpu_to_be64(8*areq->assoclen);
1910*4882a593Smuzhiyun 
1911*4882a593Smuzhiyun 	req_ctx->hw_ctx.text_length_bits =
1912*4882a593Smuzhiyun 		__cpu_to_be64(8*input_length);
1913*4882a593Smuzhiyun 
1914*4882a593Smuzhiyun 	memcpy(req_ctx->hw_ctx.J0, areq->iv, crypto_aead_ivsize(cipher));
1915*4882a593Smuzhiyun 	// The HW omits the initial increment of the counter field.
1916*4882a593Smuzhiyun 	memcpy(req_ctx->hw_ctx.J0 + GCM_AES_IV_SIZE, "\x00\x00\x00\x01", 4);
1917*4882a593Smuzhiyun 
1918*4882a593Smuzhiyun 	ret = artpec6_crypto_setup_out_descr(common, &req_ctx->hw_ctx,
1919*4882a593Smuzhiyun 		sizeof(struct artpec6_crypto_aead_hw_ctx), false, false);
1920*4882a593Smuzhiyun 	if (ret)
1921*4882a593Smuzhiyun 		return ret;
1922*4882a593Smuzhiyun 
1923*4882a593Smuzhiyun 	{
1924*4882a593Smuzhiyun 		struct artpec6_crypto_walk walk;
1925*4882a593Smuzhiyun 
1926*4882a593Smuzhiyun 		artpec6_crypto_walk_init(&walk, areq->src);
1927*4882a593Smuzhiyun 
1928*4882a593Smuzhiyun 		/* Associated data */
1929*4882a593Smuzhiyun 		count = areq->assoclen;
1930*4882a593Smuzhiyun 		ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, count);
1931*4882a593Smuzhiyun 		if (ret)
1932*4882a593Smuzhiyun 			return ret;
1933*4882a593Smuzhiyun 
1934*4882a593Smuzhiyun 		if (!IS_ALIGNED(areq->assoclen, 16)) {
1935*4882a593Smuzhiyun 			size_t assoc_pad = 16 - (areq->assoclen % 16);
1936*4882a593Smuzhiyun 			/* The HW mandates zero padding here */
1937*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_out_descr(common,
1938*4882a593Smuzhiyun 							     ac->zero_buffer,
1939*4882a593Smuzhiyun 							     assoc_pad, false,
1940*4882a593Smuzhiyun 							     false);
1941*4882a593Smuzhiyun 			if (ret)
1942*4882a593Smuzhiyun 				return ret;
1943*4882a593Smuzhiyun 		}
1944*4882a593Smuzhiyun 
1945*4882a593Smuzhiyun 		/* Data to crypto */
1946*4882a593Smuzhiyun 		count = input_length;
1947*4882a593Smuzhiyun 		ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, count);
1948*4882a593Smuzhiyun 		if (ret)
1949*4882a593Smuzhiyun 			return ret;
1950*4882a593Smuzhiyun 
1951*4882a593Smuzhiyun 		if (!IS_ALIGNED(input_length, 16)) {
1952*4882a593Smuzhiyun 			size_t crypto_pad = 16 - (input_length % 16);
1953*4882a593Smuzhiyun 			/* The HW mandates zero padding here */
1954*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_out_descr(common,
1955*4882a593Smuzhiyun 							     ac->zero_buffer,
1956*4882a593Smuzhiyun 							     crypto_pad,
1957*4882a593Smuzhiyun 							     false,
1958*4882a593Smuzhiyun 							     false);
1959*4882a593Smuzhiyun 			if (ret)
1960*4882a593Smuzhiyun 				return ret;
1961*4882a593Smuzhiyun 		}
1962*4882a593Smuzhiyun 	}
1963*4882a593Smuzhiyun 
1964*4882a593Smuzhiyun 	/* Data from crypto */
1965*4882a593Smuzhiyun 	{
1966*4882a593Smuzhiyun 		struct artpec6_crypto_walk walk;
1967*4882a593Smuzhiyun 		size_t output_len = areq->cryptlen;
1968*4882a593Smuzhiyun 
1969*4882a593Smuzhiyun 		if (req_ctx->decrypt)
1970*4882a593Smuzhiyun 			output_len -= crypto_aead_authsize(cipher);
1971*4882a593Smuzhiyun 
1972*4882a593Smuzhiyun 		artpec6_crypto_walk_init(&walk, areq->dst);
1973*4882a593Smuzhiyun 
1974*4882a593Smuzhiyun 		/* skip associated data in the output */
1975*4882a593Smuzhiyun 		count = artpec6_crypto_walk_advance(&walk, areq->assoclen);
1976*4882a593Smuzhiyun 		if (count)
1977*4882a593Smuzhiyun 			return -EINVAL;
1978*4882a593Smuzhiyun 
1979*4882a593Smuzhiyun 		count = output_len;
1980*4882a593Smuzhiyun 		ret = artpec6_crypto_setup_sg_descrs_in(common, &walk, count);
1981*4882a593Smuzhiyun 		if (ret)
1982*4882a593Smuzhiyun 			return ret;
1983*4882a593Smuzhiyun 
1984*4882a593Smuzhiyun 		/* Put padding between the cryptotext and the auth tag */
1985*4882a593Smuzhiyun 		if (!IS_ALIGNED(output_len, 16)) {
1986*4882a593Smuzhiyun 			size_t crypto_pad = 16 - (output_len % 16);
1987*4882a593Smuzhiyun 
1988*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_in_descr(common,
1989*4882a593Smuzhiyun 							    ac->pad_buffer,
1990*4882a593Smuzhiyun 							    crypto_pad, false);
1991*4882a593Smuzhiyun 			if (ret)
1992*4882a593Smuzhiyun 				return ret;
1993*4882a593Smuzhiyun 		}
1994*4882a593Smuzhiyun 
1995*4882a593Smuzhiyun 		/* The authentication tag shall follow immediately after
1996*4882a593Smuzhiyun 		 * the output ciphertext. For decryption it is put in a context
1997*4882a593Smuzhiyun 		 * buffer for later compare against the input tag.
1998*4882a593Smuzhiyun 		 */
1999*4882a593Smuzhiyun 
2000*4882a593Smuzhiyun 		if (req_ctx->decrypt) {
2001*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_in_descr(common,
2002*4882a593Smuzhiyun 				req_ctx->decryption_tag, AES_BLOCK_SIZE, false);
2003*4882a593Smuzhiyun 			if (ret)
2004*4882a593Smuzhiyun 				return ret;
2005*4882a593Smuzhiyun 
2006*4882a593Smuzhiyun 		} else {
2007*4882a593Smuzhiyun 			/* For encryption the requested tag size may be smaller
2008*4882a593Smuzhiyun 			 * than the hardware's generated tag.
2009*4882a593Smuzhiyun 			 */
2010*4882a593Smuzhiyun 			size_t authsize = crypto_aead_authsize(cipher);
2011*4882a593Smuzhiyun 
2012*4882a593Smuzhiyun 			ret = artpec6_crypto_setup_sg_descrs_in(common, &walk,
2013*4882a593Smuzhiyun 								authsize);
2014*4882a593Smuzhiyun 			if (ret)
2015*4882a593Smuzhiyun 				return ret;
2016*4882a593Smuzhiyun 
2017*4882a593Smuzhiyun 			if (authsize < AES_BLOCK_SIZE) {
2018*4882a593Smuzhiyun 				count = AES_BLOCK_SIZE - authsize;
2019*4882a593Smuzhiyun 				ret = artpec6_crypto_setup_in_descr(common,
2020*4882a593Smuzhiyun 					ac->pad_buffer,
2021*4882a593Smuzhiyun 					count, false);
2022*4882a593Smuzhiyun 				if (ret)
2023*4882a593Smuzhiyun 					return ret;
2024*4882a593Smuzhiyun 			}
2025*4882a593Smuzhiyun 		}
2026*4882a593Smuzhiyun 
2027*4882a593Smuzhiyun 	}
2028*4882a593Smuzhiyun 
2029*4882a593Smuzhiyun 	ret = artpec6_crypto_terminate_in_descrs(common);
2030*4882a593Smuzhiyun 	if (ret)
2031*4882a593Smuzhiyun 		return ret;
2032*4882a593Smuzhiyun 
2033*4882a593Smuzhiyun 	ret = artpec6_crypto_terminate_out_descrs(common);
2034*4882a593Smuzhiyun 	if (ret)
2035*4882a593Smuzhiyun 		return ret;
2036*4882a593Smuzhiyun 
2037*4882a593Smuzhiyun 	return artpec6_crypto_dma_map_descs(common);
2038*4882a593Smuzhiyun }
2039*4882a593Smuzhiyun 
artpec6_crypto_process_queue(struct artpec6_crypto * ac,struct list_head * completions)2040*4882a593Smuzhiyun static void artpec6_crypto_process_queue(struct artpec6_crypto *ac,
2041*4882a593Smuzhiyun 	    struct list_head *completions)
2042*4882a593Smuzhiyun {
2043*4882a593Smuzhiyun 	struct artpec6_crypto_req_common *req;
2044*4882a593Smuzhiyun 
2045*4882a593Smuzhiyun 	while (!list_empty(&ac->queue) && !artpec6_crypto_busy()) {
2046*4882a593Smuzhiyun 		req = list_first_entry(&ac->queue,
2047*4882a593Smuzhiyun 				       struct artpec6_crypto_req_common,
2048*4882a593Smuzhiyun 				       list);
2049*4882a593Smuzhiyun 		list_move_tail(&req->list, &ac->pending);
2050*4882a593Smuzhiyun 		artpec6_crypto_start_dma(req);
2051*4882a593Smuzhiyun 
2052*4882a593Smuzhiyun 		list_add_tail(&req->complete_in_progress, completions);
2053*4882a593Smuzhiyun 	}
2054*4882a593Smuzhiyun 
2055*4882a593Smuzhiyun 	/*
2056*4882a593Smuzhiyun 	 * In some cases, the hardware can raise an in_eop_flush interrupt
2057*4882a593Smuzhiyun 	 * before actually updating the status, so we have an timer which will
2058*4882a593Smuzhiyun 	 * recheck the status on timeout.  Since the cases are expected to be
2059*4882a593Smuzhiyun 	 * very rare, we use a relatively large timeout value.  There should be
2060*4882a593Smuzhiyun 	 * no noticeable negative effect if we timeout spuriously.
2061*4882a593Smuzhiyun 	 */
2062*4882a593Smuzhiyun 	if (ac->pending_count)
2063*4882a593Smuzhiyun 		mod_timer(&ac->timer, jiffies + msecs_to_jiffies(100));
2064*4882a593Smuzhiyun 	else
2065*4882a593Smuzhiyun 		del_timer(&ac->timer);
2066*4882a593Smuzhiyun }
2067*4882a593Smuzhiyun 
artpec6_crypto_timeout(struct timer_list * t)2068*4882a593Smuzhiyun static void artpec6_crypto_timeout(struct timer_list *t)
2069*4882a593Smuzhiyun {
2070*4882a593Smuzhiyun 	struct artpec6_crypto *ac = from_timer(ac, t, timer);
2071*4882a593Smuzhiyun 
2072*4882a593Smuzhiyun 	dev_info_ratelimited(artpec6_crypto_dev, "timeout\n");
2073*4882a593Smuzhiyun 
2074*4882a593Smuzhiyun 	tasklet_schedule(&ac->task);
2075*4882a593Smuzhiyun }
2076*4882a593Smuzhiyun 
artpec6_crypto_task(unsigned long data)2077*4882a593Smuzhiyun static void artpec6_crypto_task(unsigned long data)
2078*4882a593Smuzhiyun {
2079*4882a593Smuzhiyun 	struct artpec6_crypto *ac = (struct artpec6_crypto *)data;
2080*4882a593Smuzhiyun 	struct artpec6_crypto_req_common *req;
2081*4882a593Smuzhiyun 	struct artpec6_crypto_req_common *n;
2082*4882a593Smuzhiyun 	struct list_head complete_done;
2083*4882a593Smuzhiyun 	struct list_head complete_in_progress;
2084*4882a593Smuzhiyun 
2085*4882a593Smuzhiyun 	INIT_LIST_HEAD(&complete_done);
2086*4882a593Smuzhiyun 	INIT_LIST_HEAD(&complete_in_progress);
2087*4882a593Smuzhiyun 
2088*4882a593Smuzhiyun 	if (list_empty(&ac->pending)) {
2089*4882a593Smuzhiyun 		pr_debug("Spurious IRQ\n");
2090*4882a593Smuzhiyun 		return;
2091*4882a593Smuzhiyun 	}
2092*4882a593Smuzhiyun 
2093*4882a593Smuzhiyun 	spin_lock_bh(&ac->queue_lock);
2094*4882a593Smuzhiyun 
2095*4882a593Smuzhiyun 	list_for_each_entry_safe(req, n, &ac->pending, list) {
2096*4882a593Smuzhiyun 		struct artpec6_crypto_dma_descriptors *dma = req->dma;
2097*4882a593Smuzhiyun 		u32 stat;
2098*4882a593Smuzhiyun 		dma_addr_t stataddr;
2099*4882a593Smuzhiyun 
2100*4882a593Smuzhiyun 		stataddr = dma->stat_dma_addr + 4 * (req->dma->in_cnt - 1);
2101*4882a593Smuzhiyun 		dma_sync_single_for_cpu(artpec6_crypto_dev,
2102*4882a593Smuzhiyun 					stataddr,
2103*4882a593Smuzhiyun 					4,
2104*4882a593Smuzhiyun 					DMA_BIDIRECTIONAL);
2105*4882a593Smuzhiyun 
2106*4882a593Smuzhiyun 		stat = req->dma->stat[req->dma->in_cnt-1];
2107*4882a593Smuzhiyun 
2108*4882a593Smuzhiyun 		/* A non-zero final status descriptor indicates
2109*4882a593Smuzhiyun 		 * this job has finished.
2110*4882a593Smuzhiyun 		 */
2111*4882a593Smuzhiyun 		pr_debug("Request %p status is %X\n", req, stat);
2112*4882a593Smuzhiyun 		if (!stat)
2113*4882a593Smuzhiyun 			break;
2114*4882a593Smuzhiyun 
2115*4882a593Smuzhiyun 		/* Allow testing of timeout handling with fault injection */
2116*4882a593Smuzhiyun #ifdef CONFIG_FAULT_INJECTION
2117*4882a593Smuzhiyun 		if (should_fail(&artpec6_crypto_fail_status_read, 1))
2118*4882a593Smuzhiyun 			continue;
2119*4882a593Smuzhiyun #endif
2120*4882a593Smuzhiyun 
2121*4882a593Smuzhiyun 		pr_debug("Completing request %p\n", req);
2122*4882a593Smuzhiyun 
2123*4882a593Smuzhiyun 		list_move_tail(&req->list, &complete_done);
2124*4882a593Smuzhiyun 
2125*4882a593Smuzhiyun 		ac->pending_count--;
2126*4882a593Smuzhiyun 	}
2127*4882a593Smuzhiyun 
2128*4882a593Smuzhiyun 	artpec6_crypto_process_queue(ac, &complete_in_progress);
2129*4882a593Smuzhiyun 
2130*4882a593Smuzhiyun 	spin_unlock_bh(&ac->queue_lock);
2131*4882a593Smuzhiyun 
2132*4882a593Smuzhiyun 	/* Perform the completion callbacks without holding the queue lock
2133*4882a593Smuzhiyun 	 * to allow new request submissions from the callbacks.
2134*4882a593Smuzhiyun 	 */
2135*4882a593Smuzhiyun 	list_for_each_entry_safe(req, n, &complete_done, list) {
2136*4882a593Smuzhiyun 		artpec6_crypto_dma_unmap_all(req);
2137*4882a593Smuzhiyun 		artpec6_crypto_copy_bounce_buffers(req);
2138*4882a593Smuzhiyun 		artpec6_crypto_common_destroy(req);
2139*4882a593Smuzhiyun 
2140*4882a593Smuzhiyun 		req->complete(req->req);
2141*4882a593Smuzhiyun 	}
2142*4882a593Smuzhiyun 
2143*4882a593Smuzhiyun 	list_for_each_entry_safe(req, n, &complete_in_progress,
2144*4882a593Smuzhiyun 				 complete_in_progress) {
2145*4882a593Smuzhiyun 		req->req->complete(req->req, -EINPROGRESS);
2146*4882a593Smuzhiyun 	}
2147*4882a593Smuzhiyun }
2148*4882a593Smuzhiyun 
artpec6_crypto_complete_crypto(struct crypto_async_request * req)2149*4882a593Smuzhiyun static void artpec6_crypto_complete_crypto(struct crypto_async_request *req)
2150*4882a593Smuzhiyun {
2151*4882a593Smuzhiyun 	req->complete(req, 0);
2152*4882a593Smuzhiyun }
2153*4882a593Smuzhiyun 
2154*4882a593Smuzhiyun static void
artpec6_crypto_complete_cbc_decrypt(struct crypto_async_request * req)2155*4882a593Smuzhiyun artpec6_crypto_complete_cbc_decrypt(struct crypto_async_request *req)
2156*4882a593Smuzhiyun {
2157*4882a593Smuzhiyun 	struct skcipher_request *cipher_req = container_of(req,
2158*4882a593Smuzhiyun 		struct skcipher_request, base);
2159*4882a593Smuzhiyun 
2160*4882a593Smuzhiyun 	scatterwalk_map_and_copy(cipher_req->iv, cipher_req->src,
2161*4882a593Smuzhiyun 				 cipher_req->cryptlen - AES_BLOCK_SIZE,
2162*4882a593Smuzhiyun 				 AES_BLOCK_SIZE, 0);
2163*4882a593Smuzhiyun 	req->complete(req, 0);
2164*4882a593Smuzhiyun }
2165*4882a593Smuzhiyun 
2166*4882a593Smuzhiyun static void
artpec6_crypto_complete_cbc_encrypt(struct crypto_async_request * req)2167*4882a593Smuzhiyun artpec6_crypto_complete_cbc_encrypt(struct crypto_async_request *req)
2168*4882a593Smuzhiyun {
2169*4882a593Smuzhiyun 	struct skcipher_request *cipher_req = container_of(req,
2170*4882a593Smuzhiyun 		struct skcipher_request, base);
2171*4882a593Smuzhiyun 
2172*4882a593Smuzhiyun 	scatterwalk_map_and_copy(cipher_req->iv, cipher_req->dst,
2173*4882a593Smuzhiyun 				 cipher_req->cryptlen - AES_BLOCK_SIZE,
2174*4882a593Smuzhiyun 				 AES_BLOCK_SIZE, 0);
2175*4882a593Smuzhiyun 	req->complete(req, 0);
2176*4882a593Smuzhiyun }
2177*4882a593Smuzhiyun 
artpec6_crypto_complete_aead(struct crypto_async_request * req)2178*4882a593Smuzhiyun static void artpec6_crypto_complete_aead(struct crypto_async_request *req)
2179*4882a593Smuzhiyun {
2180*4882a593Smuzhiyun 	int result = 0;
2181*4882a593Smuzhiyun 
2182*4882a593Smuzhiyun 	/* Verify GCM hashtag. */
2183*4882a593Smuzhiyun 	struct aead_request *areq = container_of(req,
2184*4882a593Smuzhiyun 		struct aead_request, base);
2185*4882a593Smuzhiyun 	struct crypto_aead *aead = crypto_aead_reqtfm(areq);
2186*4882a593Smuzhiyun 	struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq);
2187*4882a593Smuzhiyun 
2188*4882a593Smuzhiyun 	if (req_ctx->decrypt) {
2189*4882a593Smuzhiyun 		u8 input_tag[AES_BLOCK_SIZE];
2190*4882a593Smuzhiyun 		unsigned int authsize = crypto_aead_authsize(aead);
2191*4882a593Smuzhiyun 
2192*4882a593Smuzhiyun 		sg_pcopy_to_buffer(areq->src,
2193*4882a593Smuzhiyun 				   sg_nents(areq->src),
2194*4882a593Smuzhiyun 				   input_tag,
2195*4882a593Smuzhiyun 				   authsize,
2196*4882a593Smuzhiyun 				   areq->assoclen + areq->cryptlen -
2197*4882a593Smuzhiyun 				   authsize);
2198*4882a593Smuzhiyun 
2199*4882a593Smuzhiyun 		if (crypto_memneq(req_ctx->decryption_tag,
2200*4882a593Smuzhiyun 				  input_tag,
2201*4882a593Smuzhiyun 				  authsize)) {
2202*4882a593Smuzhiyun 			pr_debug("***EBADMSG:\n");
2203*4882a593Smuzhiyun 			print_hex_dump_debug("ref:", DUMP_PREFIX_ADDRESS, 32, 1,
2204*4882a593Smuzhiyun 					     input_tag, authsize, true);
2205*4882a593Smuzhiyun 			print_hex_dump_debug("out:", DUMP_PREFIX_ADDRESS, 32, 1,
2206*4882a593Smuzhiyun 					     req_ctx->decryption_tag,
2207*4882a593Smuzhiyun 					     authsize, true);
2208*4882a593Smuzhiyun 
2209*4882a593Smuzhiyun 			result = -EBADMSG;
2210*4882a593Smuzhiyun 		}
2211*4882a593Smuzhiyun 	}
2212*4882a593Smuzhiyun 
2213*4882a593Smuzhiyun 	req->complete(req, result);
2214*4882a593Smuzhiyun }
2215*4882a593Smuzhiyun 
artpec6_crypto_complete_hash(struct crypto_async_request * req)2216*4882a593Smuzhiyun static void artpec6_crypto_complete_hash(struct crypto_async_request *req)
2217*4882a593Smuzhiyun {
2218*4882a593Smuzhiyun 	req->complete(req, 0);
2219*4882a593Smuzhiyun }
2220*4882a593Smuzhiyun 
2221*4882a593Smuzhiyun 
2222*4882a593Smuzhiyun /*------------------- Hash functions -----------------------------------------*/
2223*4882a593Smuzhiyun static int
artpec6_crypto_hash_set_key(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2224*4882a593Smuzhiyun artpec6_crypto_hash_set_key(struct crypto_ahash *tfm,
2225*4882a593Smuzhiyun 		    const u8 *key, unsigned int keylen)
2226*4882a593Smuzhiyun {
2227*4882a593Smuzhiyun 	struct artpec6_hashalg_context *tfm_ctx = crypto_tfm_ctx(&tfm->base);
2228*4882a593Smuzhiyun 	size_t blocksize;
2229*4882a593Smuzhiyun 	int ret;
2230*4882a593Smuzhiyun 
2231*4882a593Smuzhiyun 	if (!keylen) {
2232*4882a593Smuzhiyun 		pr_err("Invalid length (%d) of HMAC key\n",
2233*4882a593Smuzhiyun 			keylen);
2234*4882a593Smuzhiyun 		return -EINVAL;
2235*4882a593Smuzhiyun 	}
2236*4882a593Smuzhiyun 
2237*4882a593Smuzhiyun 	memset(tfm_ctx->hmac_key, 0, sizeof(tfm_ctx->hmac_key));
2238*4882a593Smuzhiyun 
2239*4882a593Smuzhiyun 	blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2240*4882a593Smuzhiyun 
2241*4882a593Smuzhiyun 	if (keylen > blocksize) {
2242*4882a593Smuzhiyun 		tfm_ctx->hmac_key_length = blocksize;
2243*4882a593Smuzhiyun 
2244*4882a593Smuzhiyun 		ret = crypto_shash_tfm_digest(tfm_ctx->child_hash, key, keylen,
2245*4882a593Smuzhiyun 					      tfm_ctx->hmac_key);
2246*4882a593Smuzhiyun 		if (ret)
2247*4882a593Smuzhiyun 			return ret;
2248*4882a593Smuzhiyun 	} else {
2249*4882a593Smuzhiyun 		memcpy(tfm_ctx->hmac_key, key, keylen);
2250*4882a593Smuzhiyun 		tfm_ctx->hmac_key_length = keylen;
2251*4882a593Smuzhiyun 	}
2252*4882a593Smuzhiyun 
2253*4882a593Smuzhiyun 	return 0;
2254*4882a593Smuzhiyun }
2255*4882a593Smuzhiyun 
2256*4882a593Smuzhiyun static int
artpec6_crypto_init_hash(struct ahash_request * req,u8 type,int hmac)2257*4882a593Smuzhiyun artpec6_crypto_init_hash(struct ahash_request *req, u8 type, int hmac)
2258*4882a593Smuzhiyun {
2259*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
2260*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
2261*4882a593Smuzhiyun 	struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2262*4882a593Smuzhiyun 	u32 oper;
2263*4882a593Smuzhiyun 
2264*4882a593Smuzhiyun 	memset(req_ctx, 0, sizeof(*req_ctx));
2265*4882a593Smuzhiyun 
2266*4882a593Smuzhiyun 	req_ctx->hash_flags = HASH_FLAG_INIT_CTX;
2267*4882a593Smuzhiyun 	if (hmac)
2268*4882a593Smuzhiyun 		req_ctx->hash_flags |= (HASH_FLAG_HMAC | HASH_FLAG_UPDATE_KEY);
2269*4882a593Smuzhiyun 
2270*4882a593Smuzhiyun 	switch (type) {
2271*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_HASH_SHA1:
2272*4882a593Smuzhiyun 		oper = hmac ? regk_crypto_hmac_sha1 : regk_crypto_sha1;
2273*4882a593Smuzhiyun 		break;
2274*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_HASH_SHA256:
2275*4882a593Smuzhiyun 		oper = hmac ? regk_crypto_hmac_sha256 : regk_crypto_sha256;
2276*4882a593Smuzhiyun 		break;
2277*4882a593Smuzhiyun 	default:
2278*4882a593Smuzhiyun 		pr_err("%s: Unsupported hash type 0x%x\n", MODULE_NAME, type);
2279*4882a593Smuzhiyun 		return -EINVAL;
2280*4882a593Smuzhiyun 	}
2281*4882a593Smuzhiyun 
2282*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO)
2283*4882a593Smuzhiyun 		req_ctx->hash_md = FIELD_PREP(A6_CRY_MD_OPER, oper);
2284*4882a593Smuzhiyun 	else
2285*4882a593Smuzhiyun 		req_ctx->hash_md = FIELD_PREP(A7_CRY_MD_OPER, oper);
2286*4882a593Smuzhiyun 
2287*4882a593Smuzhiyun 	return 0;
2288*4882a593Smuzhiyun }
2289*4882a593Smuzhiyun 
artpec6_crypto_prepare_submit_hash(struct ahash_request * req)2290*4882a593Smuzhiyun static int artpec6_crypto_prepare_submit_hash(struct ahash_request *req)
2291*4882a593Smuzhiyun {
2292*4882a593Smuzhiyun 	struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2293*4882a593Smuzhiyun 	int ret;
2294*4882a593Smuzhiyun 
2295*4882a593Smuzhiyun 	if (!req_ctx->common.dma) {
2296*4882a593Smuzhiyun 		ret = artpec6_crypto_common_init(&req_ctx->common,
2297*4882a593Smuzhiyun 					  &req->base,
2298*4882a593Smuzhiyun 					  artpec6_crypto_complete_hash,
2299*4882a593Smuzhiyun 					  NULL, 0);
2300*4882a593Smuzhiyun 
2301*4882a593Smuzhiyun 		if (ret)
2302*4882a593Smuzhiyun 			return ret;
2303*4882a593Smuzhiyun 	}
2304*4882a593Smuzhiyun 
2305*4882a593Smuzhiyun 	ret = artpec6_crypto_prepare_hash(req);
2306*4882a593Smuzhiyun 	switch (ret) {
2307*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_PREPARE_HASH_START:
2308*4882a593Smuzhiyun 		ret = artpec6_crypto_submit(&req_ctx->common);
2309*4882a593Smuzhiyun 		break;
2310*4882a593Smuzhiyun 
2311*4882a593Smuzhiyun 	case ARTPEC6_CRYPTO_PREPARE_HASH_NO_START:
2312*4882a593Smuzhiyun 		ret = 0;
2313*4882a593Smuzhiyun 		fallthrough;
2314*4882a593Smuzhiyun 
2315*4882a593Smuzhiyun 	default:
2316*4882a593Smuzhiyun 		artpec6_crypto_common_destroy(&req_ctx->common);
2317*4882a593Smuzhiyun 		break;
2318*4882a593Smuzhiyun 	}
2319*4882a593Smuzhiyun 
2320*4882a593Smuzhiyun 	return ret;
2321*4882a593Smuzhiyun }
2322*4882a593Smuzhiyun 
artpec6_crypto_hash_final(struct ahash_request * req)2323*4882a593Smuzhiyun static int artpec6_crypto_hash_final(struct ahash_request *req)
2324*4882a593Smuzhiyun {
2325*4882a593Smuzhiyun 	struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2326*4882a593Smuzhiyun 
2327*4882a593Smuzhiyun 	req_ctx->hash_flags |= HASH_FLAG_FINALIZE;
2328*4882a593Smuzhiyun 
2329*4882a593Smuzhiyun 	return artpec6_crypto_prepare_submit_hash(req);
2330*4882a593Smuzhiyun }
2331*4882a593Smuzhiyun 
artpec6_crypto_hash_update(struct ahash_request * req)2332*4882a593Smuzhiyun static int artpec6_crypto_hash_update(struct ahash_request *req)
2333*4882a593Smuzhiyun {
2334*4882a593Smuzhiyun 	struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2335*4882a593Smuzhiyun 
2336*4882a593Smuzhiyun 	req_ctx->hash_flags |= HASH_FLAG_UPDATE;
2337*4882a593Smuzhiyun 
2338*4882a593Smuzhiyun 	return artpec6_crypto_prepare_submit_hash(req);
2339*4882a593Smuzhiyun }
2340*4882a593Smuzhiyun 
artpec6_crypto_sha1_init(struct ahash_request * req)2341*4882a593Smuzhiyun static int artpec6_crypto_sha1_init(struct ahash_request *req)
2342*4882a593Smuzhiyun {
2343*4882a593Smuzhiyun 	return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA1, 0);
2344*4882a593Smuzhiyun }
2345*4882a593Smuzhiyun 
artpec6_crypto_sha1_digest(struct ahash_request * req)2346*4882a593Smuzhiyun static int artpec6_crypto_sha1_digest(struct ahash_request *req)
2347*4882a593Smuzhiyun {
2348*4882a593Smuzhiyun 	struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2349*4882a593Smuzhiyun 
2350*4882a593Smuzhiyun 	artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA1, 0);
2351*4882a593Smuzhiyun 
2352*4882a593Smuzhiyun 	req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2353*4882a593Smuzhiyun 
2354*4882a593Smuzhiyun 	return artpec6_crypto_prepare_submit_hash(req);
2355*4882a593Smuzhiyun }
2356*4882a593Smuzhiyun 
artpec6_crypto_sha256_init(struct ahash_request * req)2357*4882a593Smuzhiyun static int artpec6_crypto_sha256_init(struct ahash_request *req)
2358*4882a593Smuzhiyun {
2359*4882a593Smuzhiyun 	return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA256, 0);
2360*4882a593Smuzhiyun }
2361*4882a593Smuzhiyun 
artpec6_crypto_sha256_digest(struct ahash_request * req)2362*4882a593Smuzhiyun static int artpec6_crypto_sha256_digest(struct ahash_request *req)
2363*4882a593Smuzhiyun {
2364*4882a593Smuzhiyun 	struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2365*4882a593Smuzhiyun 
2366*4882a593Smuzhiyun 	artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA256, 0);
2367*4882a593Smuzhiyun 	req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2368*4882a593Smuzhiyun 
2369*4882a593Smuzhiyun 	return artpec6_crypto_prepare_submit_hash(req);
2370*4882a593Smuzhiyun }
2371*4882a593Smuzhiyun 
artpec6_crypto_hmac_sha256_init(struct ahash_request * req)2372*4882a593Smuzhiyun static int artpec6_crypto_hmac_sha256_init(struct ahash_request *req)
2373*4882a593Smuzhiyun {
2374*4882a593Smuzhiyun 	return artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA256, 1);
2375*4882a593Smuzhiyun }
2376*4882a593Smuzhiyun 
artpec6_crypto_hmac_sha256_digest(struct ahash_request * req)2377*4882a593Smuzhiyun static int artpec6_crypto_hmac_sha256_digest(struct ahash_request *req)
2378*4882a593Smuzhiyun {
2379*4882a593Smuzhiyun 	struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(req);
2380*4882a593Smuzhiyun 
2381*4882a593Smuzhiyun 	artpec6_crypto_init_hash(req, ARTPEC6_CRYPTO_HASH_SHA256, 1);
2382*4882a593Smuzhiyun 	req_ctx->hash_flags |= HASH_FLAG_UPDATE | HASH_FLAG_FINALIZE;
2383*4882a593Smuzhiyun 
2384*4882a593Smuzhiyun 	return artpec6_crypto_prepare_submit_hash(req);
2385*4882a593Smuzhiyun }
2386*4882a593Smuzhiyun 
artpec6_crypto_ahash_init_common(struct crypto_tfm * tfm,const char * base_hash_name)2387*4882a593Smuzhiyun static int artpec6_crypto_ahash_init_common(struct crypto_tfm *tfm,
2388*4882a593Smuzhiyun 				    const char *base_hash_name)
2389*4882a593Smuzhiyun {
2390*4882a593Smuzhiyun 	struct artpec6_hashalg_context *tfm_ctx = crypto_tfm_ctx(tfm);
2391*4882a593Smuzhiyun 
2392*4882a593Smuzhiyun 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
2393*4882a593Smuzhiyun 				 sizeof(struct artpec6_hash_request_context));
2394*4882a593Smuzhiyun 	memset(tfm_ctx, 0, sizeof(*tfm_ctx));
2395*4882a593Smuzhiyun 
2396*4882a593Smuzhiyun 	if (base_hash_name) {
2397*4882a593Smuzhiyun 		struct crypto_shash *child;
2398*4882a593Smuzhiyun 
2399*4882a593Smuzhiyun 		child = crypto_alloc_shash(base_hash_name, 0,
2400*4882a593Smuzhiyun 					   CRYPTO_ALG_NEED_FALLBACK);
2401*4882a593Smuzhiyun 
2402*4882a593Smuzhiyun 		if (IS_ERR(child))
2403*4882a593Smuzhiyun 			return PTR_ERR(child);
2404*4882a593Smuzhiyun 
2405*4882a593Smuzhiyun 		tfm_ctx->child_hash = child;
2406*4882a593Smuzhiyun 	}
2407*4882a593Smuzhiyun 
2408*4882a593Smuzhiyun 	return 0;
2409*4882a593Smuzhiyun }
2410*4882a593Smuzhiyun 
artpec6_crypto_ahash_init(struct crypto_tfm * tfm)2411*4882a593Smuzhiyun static int artpec6_crypto_ahash_init(struct crypto_tfm *tfm)
2412*4882a593Smuzhiyun {
2413*4882a593Smuzhiyun 	return artpec6_crypto_ahash_init_common(tfm, NULL);
2414*4882a593Smuzhiyun }
2415*4882a593Smuzhiyun 
artpec6_crypto_ahash_init_hmac_sha256(struct crypto_tfm * tfm)2416*4882a593Smuzhiyun static int artpec6_crypto_ahash_init_hmac_sha256(struct crypto_tfm *tfm)
2417*4882a593Smuzhiyun {
2418*4882a593Smuzhiyun 	return artpec6_crypto_ahash_init_common(tfm, "sha256");
2419*4882a593Smuzhiyun }
2420*4882a593Smuzhiyun 
artpec6_crypto_ahash_exit(struct crypto_tfm * tfm)2421*4882a593Smuzhiyun static void artpec6_crypto_ahash_exit(struct crypto_tfm *tfm)
2422*4882a593Smuzhiyun {
2423*4882a593Smuzhiyun 	struct artpec6_hashalg_context *tfm_ctx = crypto_tfm_ctx(tfm);
2424*4882a593Smuzhiyun 
2425*4882a593Smuzhiyun 	if (tfm_ctx->child_hash)
2426*4882a593Smuzhiyun 		crypto_free_shash(tfm_ctx->child_hash);
2427*4882a593Smuzhiyun 
2428*4882a593Smuzhiyun 	memset(tfm_ctx->hmac_key, 0, sizeof(tfm_ctx->hmac_key));
2429*4882a593Smuzhiyun 	tfm_ctx->hmac_key_length = 0;
2430*4882a593Smuzhiyun }
2431*4882a593Smuzhiyun 
artpec6_crypto_hash_export(struct ahash_request * req,void * out)2432*4882a593Smuzhiyun static int artpec6_crypto_hash_export(struct ahash_request *req, void *out)
2433*4882a593Smuzhiyun {
2434*4882a593Smuzhiyun 	const struct artpec6_hash_request_context *ctx = ahash_request_ctx(req);
2435*4882a593Smuzhiyun 	struct artpec6_hash_export_state *state = out;
2436*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
2437*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
2438*4882a593Smuzhiyun 
2439*4882a593Smuzhiyun 	BUILD_BUG_ON(sizeof(state->partial_buffer) !=
2440*4882a593Smuzhiyun 		     sizeof(ctx->partial_buffer));
2441*4882a593Smuzhiyun 	BUILD_BUG_ON(sizeof(state->digeststate) != sizeof(ctx->digeststate));
2442*4882a593Smuzhiyun 
2443*4882a593Smuzhiyun 	state->digcnt = ctx->digcnt;
2444*4882a593Smuzhiyun 	state->partial_bytes = ctx->partial_bytes;
2445*4882a593Smuzhiyun 	state->hash_flags = ctx->hash_flags;
2446*4882a593Smuzhiyun 
2447*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO)
2448*4882a593Smuzhiyun 		state->oper = FIELD_GET(A6_CRY_MD_OPER, ctx->hash_md);
2449*4882a593Smuzhiyun 	else
2450*4882a593Smuzhiyun 		state->oper = FIELD_GET(A7_CRY_MD_OPER, ctx->hash_md);
2451*4882a593Smuzhiyun 
2452*4882a593Smuzhiyun 	memcpy(state->partial_buffer, ctx->partial_buffer,
2453*4882a593Smuzhiyun 	       sizeof(state->partial_buffer));
2454*4882a593Smuzhiyun 	memcpy(state->digeststate, ctx->digeststate,
2455*4882a593Smuzhiyun 	       sizeof(state->digeststate));
2456*4882a593Smuzhiyun 
2457*4882a593Smuzhiyun 	return 0;
2458*4882a593Smuzhiyun }
2459*4882a593Smuzhiyun 
artpec6_crypto_hash_import(struct ahash_request * req,const void * in)2460*4882a593Smuzhiyun static int artpec6_crypto_hash_import(struct ahash_request *req, const void *in)
2461*4882a593Smuzhiyun {
2462*4882a593Smuzhiyun 	struct artpec6_hash_request_context *ctx = ahash_request_ctx(req);
2463*4882a593Smuzhiyun 	const struct artpec6_hash_export_state *state = in;
2464*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_get_drvdata(artpec6_crypto_dev);
2465*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
2466*4882a593Smuzhiyun 
2467*4882a593Smuzhiyun 	memset(ctx, 0, sizeof(*ctx));
2468*4882a593Smuzhiyun 
2469*4882a593Smuzhiyun 	ctx->digcnt = state->digcnt;
2470*4882a593Smuzhiyun 	ctx->partial_bytes = state->partial_bytes;
2471*4882a593Smuzhiyun 	ctx->hash_flags = state->hash_flags;
2472*4882a593Smuzhiyun 
2473*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO)
2474*4882a593Smuzhiyun 		ctx->hash_md = FIELD_PREP(A6_CRY_MD_OPER, state->oper);
2475*4882a593Smuzhiyun 	else
2476*4882a593Smuzhiyun 		ctx->hash_md = FIELD_PREP(A7_CRY_MD_OPER, state->oper);
2477*4882a593Smuzhiyun 
2478*4882a593Smuzhiyun 	memcpy(ctx->partial_buffer, state->partial_buffer,
2479*4882a593Smuzhiyun 	       sizeof(state->partial_buffer));
2480*4882a593Smuzhiyun 	memcpy(ctx->digeststate, state->digeststate,
2481*4882a593Smuzhiyun 	       sizeof(state->digeststate));
2482*4882a593Smuzhiyun 
2483*4882a593Smuzhiyun 	return 0;
2484*4882a593Smuzhiyun }
2485*4882a593Smuzhiyun 
init_crypto_hw(struct artpec6_crypto * ac)2486*4882a593Smuzhiyun static int init_crypto_hw(struct artpec6_crypto *ac)
2487*4882a593Smuzhiyun {
2488*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
2489*4882a593Smuzhiyun 	void __iomem *base = ac->base;
2490*4882a593Smuzhiyun 	u32 out_descr_buf_size;
2491*4882a593Smuzhiyun 	u32 out_data_buf_size;
2492*4882a593Smuzhiyun 	u32 in_data_buf_size;
2493*4882a593Smuzhiyun 	u32 in_descr_buf_size;
2494*4882a593Smuzhiyun 	u32 in_stat_buf_size;
2495*4882a593Smuzhiyun 	u32 in, out;
2496*4882a593Smuzhiyun 
2497*4882a593Smuzhiyun 	/*
2498*4882a593Smuzhiyun 	 * The PDMA unit contains 1984 bytes of internal memory for the OUT
2499*4882a593Smuzhiyun 	 * channels and 1024 bytes for the IN channel. This is an elastic
2500*4882a593Smuzhiyun 	 * memory used to internally store the descriptors and data. The values
2501*4882a593Smuzhiyun 	 * ares specified in 64 byte incremements.  Trustzone buffers are not
2502*4882a593Smuzhiyun 	 * used at this stage.
2503*4882a593Smuzhiyun 	 */
2504*4882a593Smuzhiyun 	out_data_buf_size = 16;  /* 1024 bytes for data */
2505*4882a593Smuzhiyun 	out_descr_buf_size = 15; /* 960 bytes for descriptors */
2506*4882a593Smuzhiyun 	in_data_buf_size = 8;    /* 512 bytes for data */
2507*4882a593Smuzhiyun 	in_descr_buf_size = 4;   /* 256 bytes for descriptors */
2508*4882a593Smuzhiyun 	in_stat_buf_size = 4;   /* 256 bytes for stat descrs */
2509*4882a593Smuzhiyun 
2510*4882a593Smuzhiyun 	BUILD_BUG_ON_MSG((out_data_buf_size
2511*4882a593Smuzhiyun 				+ out_descr_buf_size) * 64 > 1984,
2512*4882a593Smuzhiyun 			  "Invalid OUT configuration");
2513*4882a593Smuzhiyun 
2514*4882a593Smuzhiyun 	BUILD_BUG_ON_MSG((in_data_buf_size
2515*4882a593Smuzhiyun 				+ in_descr_buf_size
2516*4882a593Smuzhiyun 				+ in_stat_buf_size) * 64 > 1024,
2517*4882a593Smuzhiyun 			  "Invalid IN configuration");
2518*4882a593Smuzhiyun 
2519*4882a593Smuzhiyun 	in = FIELD_PREP(PDMA_IN_BUF_CFG_DATA_BUF_SIZE, in_data_buf_size) |
2520*4882a593Smuzhiyun 	     FIELD_PREP(PDMA_IN_BUF_CFG_DESCR_BUF_SIZE, in_descr_buf_size) |
2521*4882a593Smuzhiyun 	     FIELD_PREP(PDMA_IN_BUF_CFG_STAT_BUF_SIZE, in_stat_buf_size);
2522*4882a593Smuzhiyun 
2523*4882a593Smuzhiyun 	out = FIELD_PREP(PDMA_OUT_BUF_CFG_DATA_BUF_SIZE, out_data_buf_size) |
2524*4882a593Smuzhiyun 	      FIELD_PREP(PDMA_OUT_BUF_CFG_DESCR_BUF_SIZE, out_descr_buf_size);
2525*4882a593Smuzhiyun 
2526*4882a593Smuzhiyun 	writel_relaxed(out, base + PDMA_OUT_BUF_CFG);
2527*4882a593Smuzhiyun 	writel_relaxed(PDMA_OUT_CFG_EN, base + PDMA_OUT_CFG);
2528*4882a593Smuzhiyun 
2529*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO) {
2530*4882a593Smuzhiyun 		writel_relaxed(in, base + A6_PDMA_IN_BUF_CFG);
2531*4882a593Smuzhiyun 		writel_relaxed(PDMA_IN_CFG_EN, base + A6_PDMA_IN_CFG);
2532*4882a593Smuzhiyun 		writel_relaxed(A6_PDMA_INTR_MASK_IN_DATA |
2533*4882a593Smuzhiyun 			       A6_PDMA_INTR_MASK_IN_EOP_FLUSH,
2534*4882a593Smuzhiyun 			       base + A6_PDMA_INTR_MASK);
2535*4882a593Smuzhiyun 	} else {
2536*4882a593Smuzhiyun 		writel_relaxed(in, base + A7_PDMA_IN_BUF_CFG);
2537*4882a593Smuzhiyun 		writel_relaxed(PDMA_IN_CFG_EN, base + A7_PDMA_IN_CFG);
2538*4882a593Smuzhiyun 		writel_relaxed(A7_PDMA_INTR_MASK_IN_DATA |
2539*4882a593Smuzhiyun 			       A7_PDMA_INTR_MASK_IN_EOP_FLUSH,
2540*4882a593Smuzhiyun 			       base + A7_PDMA_INTR_MASK);
2541*4882a593Smuzhiyun 	}
2542*4882a593Smuzhiyun 
2543*4882a593Smuzhiyun 	return 0;
2544*4882a593Smuzhiyun }
2545*4882a593Smuzhiyun 
artpec6_crypto_disable_hw(struct artpec6_crypto * ac)2546*4882a593Smuzhiyun static void artpec6_crypto_disable_hw(struct artpec6_crypto *ac)
2547*4882a593Smuzhiyun {
2548*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
2549*4882a593Smuzhiyun 	void __iomem *base = ac->base;
2550*4882a593Smuzhiyun 
2551*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO) {
2552*4882a593Smuzhiyun 		writel_relaxed(A6_PDMA_IN_CMD_STOP, base + A6_PDMA_IN_CMD);
2553*4882a593Smuzhiyun 		writel_relaxed(0, base + A6_PDMA_IN_CFG);
2554*4882a593Smuzhiyun 		writel_relaxed(A6_PDMA_OUT_CMD_STOP, base + PDMA_OUT_CMD);
2555*4882a593Smuzhiyun 	} else {
2556*4882a593Smuzhiyun 		writel_relaxed(A7_PDMA_IN_CMD_STOP, base + A7_PDMA_IN_CMD);
2557*4882a593Smuzhiyun 		writel_relaxed(0, base + A7_PDMA_IN_CFG);
2558*4882a593Smuzhiyun 		writel_relaxed(A7_PDMA_OUT_CMD_STOP, base + PDMA_OUT_CMD);
2559*4882a593Smuzhiyun 	}
2560*4882a593Smuzhiyun 
2561*4882a593Smuzhiyun 	writel_relaxed(0, base + PDMA_OUT_CFG);
2562*4882a593Smuzhiyun 
2563*4882a593Smuzhiyun }
2564*4882a593Smuzhiyun 
artpec6_crypto_irq(int irq,void * dev_id)2565*4882a593Smuzhiyun static irqreturn_t artpec6_crypto_irq(int irq, void *dev_id)
2566*4882a593Smuzhiyun {
2567*4882a593Smuzhiyun 	struct artpec6_crypto *ac = dev_id;
2568*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant = ac->variant;
2569*4882a593Smuzhiyun 	void __iomem *base = ac->base;
2570*4882a593Smuzhiyun 	u32 mask_in_data, mask_in_eop_flush;
2571*4882a593Smuzhiyun 	u32 in_cmd_flush_stat, in_cmd_reg;
2572*4882a593Smuzhiyun 	u32 ack_intr_reg;
2573*4882a593Smuzhiyun 	u32 ack = 0;
2574*4882a593Smuzhiyun 	u32 intr;
2575*4882a593Smuzhiyun 
2576*4882a593Smuzhiyun 	if (variant == ARTPEC6_CRYPTO) {
2577*4882a593Smuzhiyun 		intr = readl_relaxed(base + A6_PDMA_MASKED_INTR);
2578*4882a593Smuzhiyun 		mask_in_data = A6_PDMA_INTR_MASK_IN_DATA;
2579*4882a593Smuzhiyun 		mask_in_eop_flush = A6_PDMA_INTR_MASK_IN_EOP_FLUSH;
2580*4882a593Smuzhiyun 		in_cmd_flush_stat = A6_PDMA_IN_CMD_FLUSH_STAT;
2581*4882a593Smuzhiyun 		in_cmd_reg = A6_PDMA_IN_CMD;
2582*4882a593Smuzhiyun 		ack_intr_reg = A6_PDMA_ACK_INTR;
2583*4882a593Smuzhiyun 	} else {
2584*4882a593Smuzhiyun 		intr = readl_relaxed(base + A7_PDMA_MASKED_INTR);
2585*4882a593Smuzhiyun 		mask_in_data = A7_PDMA_INTR_MASK_IN_DATA;
2586*4882a593Smuzhiyun 		mask_in_eop_flush = A7_PDMA_INTR_MASK_IN_EOP_FLUSH;
2587*4882a593Smuzhiyun 		in_cmd_flush_stat = A7_PDMA_IN_CMD_FLUSH_STAT;
2588*4882a593Smuzhiyun 		in_cmd_reg = A7_PDMA_IN_CMD;
2589*4882a593Smuzhiyun 		ack_intr_reg = A7_PDMA_ACK_INTR;
2590*4882a593Smuzhiyun 	}
2591*4882a593Smuzhiyun 
2592*4882a593Smuzhiyun 	/* We get two interrupt notifications from each job.
2593*4882a593Smuzhiyun 	 * The in_data means all data was sent to memory and then
2594*4882a593Smuzhiyun 	 * we request a status flush command to write the per-job
2595*4882a593Smuzhiyun 	 * status to its status vector. This ensures that the
2596*4882a593Smuzhiyun 	 * tasklet can detect exactly how many submitted jobs
2597*4882a593Smuzhiyun 	 * that have finished.
2598*4882a593Smuzhiyun 	 */
2599*4882a593Smuzhiyun 	if (intr & mask_in_data)
2600*4882a593Smuzhiyun 		ack |= mask_in_data;
2601*4882a593Smuzhiyun 
2602*4882a593Smuzhiyun 	if (intr & mask_in_eop_flush)
2603*4882a593Smuzhiyun 		ack |= mask_in_eop_flush;
2604*4882a593Smuzhiyun 	else
2605*4882a593Smuzhiyun 		writel_relaxed(in_cmd_flush_stat, base + in_cmd_reg);
2606*4882a593Smuzhiyun 
2607*4882a593Smuzhiyun 	writel_relaxed(ack, base + ack_intr_reg);
2608*4882a593Smuzhiyun 
2609*4882a593Smuzhiyun 	if (intr & mask_in_eop_flush)
2610*4882a593Smuzhiyun 		tasklet_schedule(&ac->task);
2611*4882a593Smuzhiyun 
2612*4882a593Smuzhiyun 	return IRQ_HANDLED;
2613*4882a593Smuzhiyun }
2614*4882a593Smuzhiyun 
2615*4882a593Smuzhiyun /*------------------- Algorithm definitions ----------------------------------*/
2616*4882a593Smuzhiyun 
2617*4882a593Smuzhiyun /* Hashes */
2618*4882a593Smuzhiyun static struct ahash_alg hash_algos[] = {
2619*4882a593Smuzhiyun 	/* SHA-1 */
2620*4882a593Smuzhiyun 	{
2621*4882a593Smuzhiyun 		.init = artpec6_crypto_sha1_init,
2622*4882a593Smuzhiyun 		.update = artpec6_crypto_hash_update,
2623*4882a593Smuzhiyun 		.final = artpec6_crypto_hash_final,
2624*4882a593Smuzhiyun 		.digest = artpec6_crypto_sha1_digest,
2625*4882a593Smuzhiyun 		.import = artpec6_crypto_hash_import,
2626*4882a593Smuzhiyun 		.export = artpec6_crypto_hash_export,
2627*4882a593Smuzhiyun 		.halg.digestsize = SHA1_DIGEST_SIZE,
2628*4882a593Smuzhiyun 		.halg.statesize = sizeof(struct artpec6_hash_export_state),
2629*4882a593Smuzhiyun 		.halg.base = {
2630*4882a593Smuzhiyun 			.cra_name = "sha1",
2631*4882a593Smuzhiyun 			.cra_driver_name = "artpec-sha1",
2632*4882a593Smuzhiyun 			.cra_priority = 300,
2633*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_ASYNC |
2634*4882a593Smuzhiyun 				     CRYPTO_ALG_ALLOCATES_MEMORY,
2635*4882a593Smuzhiyun 			.cra_blocksize = SHA1_BLOCK_SIZE,
2636*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2637*4882a593Smuzhiyun 			.cra_alignmask = 3,
2638*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
2639*4882a593Smuzhiyun 			.cra_init = artpec6_crypto_ahash_init,
2640*4882a593Smuzhiyun 			.cra_exit = artpec6_crypto_ahash_exit,
2641*4882a593Smuzhiyun 		}
2642*4882a593Smuzhiyun 	},
2643*4882a593Smuzhiyun 	/* SHA-256 */
2644*4882a593Smuzhiyun 	{
2645*4882a593Smuzhiyun 		.init = artpec6_crypto_sha256_init,
2646*4882a593Smuzhiyun 		.update = artpec6_crypto_hash_update,
2647*4882a593Smuzhiyun 		.final = artpec6_crypto_hash_final,
2648*4882a593Smuzhiyun 		.digest = artpec6_crypto_sha256_digest,
2649*4882a593Smuzhiyun 		.import = artpec6_crypto_hash_import,
2650*4882a593Smuzhiyun 		.export = artpec6_crypto_hash_export,
2651*4882a593Smuzhiyun 		.halg.digestsize = SHA256_DIGEST_SIZE,
2652*4882a593Smuzhiyun 		.halg.statesize = sizeof(struct artpec6_hash_export_state),
2653*4882a593Smuzhiyun 		.halg.base = {
2654*4882a593Smuzhiyun 			.cra_name = "sha256",
2655*4882a593Smuzhiyun 			.cra_driver_name = "artpec-sha256",
2656*4882a593Smuzhiyun 			.cra_priority = 300,
2657*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_ASYNC |
2658*4882a593Smuzhiyun 				     CRYPTO_ALG_ALLOCATES_MEMORY,
2659*4882a593Smuzhiyun 			.cra_blocksize = SHA256_BLOCK_SIZE,
2660*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2661*4882a593Smuzhiyun 			.cra_alignmask = 3,
2662*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
2663*4882a593Smuzhiyun 			.cra_init = artpec6_crypto_ahash_init,
2664*4882a593Smuzhiyun 			.cra_exit = artpec6_crypto_ahash_exit,
2665*4882a593Smuzhiyun 		}
2666*4882a593Smuzhiyun 	},
2667*4882a593Smuzhiyun 	/* HMAC SHA-256 */
2668*4882a593Smuzhiyun 	{
2669*4882a593Smuzhiyun 		.init = artpec6_crypto_hmac_sha256_init,
2670*4882a593Smuzhiyun 		.update = artpec6_crypto_hash_update,
2671*4882a593Smuzhiyun 		.final = artpec6_crypto_hash_final,
2672*4882a593Smuzhiyun 		.digest = artpec6_crypto_hmac_sha256_digest,
2673*4882a593Smuzhiyun 		.import = artpec6_crypto_hash_import,
2674*4882a593Smuzhiyun 		.export = artpec6_crypto_hash_export,
2675*4882a593Smuzhiyun 		.setkey = artpec6_crypto_hash_set_key,
2676*4882a593Smuzhiyun 		.halg.digestsize = SHA256_DIGEST_SIZE,
2677*4882a593Smuzhiyun 		.halg.statesize = sizeof(struct artpec6_hash_export_state),
2678*4882a593Smuzhiyun 		.halg.base = {
2679*4882a593Smuzhiyun 			.cra_name = "hmac(sha256)",
2680*4882a593Smuzhiyun 			.cra_driver_name = "artpec-hmac-sha256",
2681*4882a593Smuzhiyun 			.cra_priority = 300,
2682*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_ASYNC |
2683*4882a593Smuzhiyun 				     CRYPTO_ALG_ALLOCATES_MEMORY,
2684*4882a593Smuzhiyun 			.cra_blocksize = SHA256_BLOCK_SIZE,
2685*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct artpec6_hashalg_context),
2686*4882a593Smuzhiyun 			.cra_alignmask = 3,
2687*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
2688*4882a593Smuzhiyun 			.cra_init = artpec6_crypto_ahash_init_hmac_sha256,
2689*4882a593Smuzhiyun 			.cra_exit = artpec6_crypto_ahash_exit,
2690*4882a593Smuzhiyun 		}
2691*4882a593Smuzhiyun 	},
2692*4882a593Smuzhiyun };
2693*4882a593Smuzhiyun 
2694*4882a593Smuzhiyun /* Crypto */
2695*4882a593Smuzhiyun static struct skcipher_alg crypto_algos[] = {
2696*4882a593Smuzhiyun 	/* AES - ECB */
2697*4882a593Smuzhiyun 	{
2698*4882a593Smuzhiyun 		.base = {
2699*4882a593Smuzhiyun 			.cra_name = "ecb(aes)",
2700*4882a593Smuzhiyun 			.cra_driver_name = "artpec6-ecb-aes",
2701*4882a593Smuzhiyun 			.cra_priority = 300,
2702*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_ASYNC |
2703*4882a593Smuzhiyun 				     CRYPTO_ALG_ALLOCATES_MEMORY,
2704*4882a593Smuzhiyun 			.cra_blocksize = AES_BLOCK_SIZE,
2705*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2706*4882a593Smuzhiyun 			.cra_alignmask = 3,
2707*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
2708*4882a593Smuzhiyun 		},
2709*4882a593Smuzhiyun 		.min_keysize = AES_MIN_KEY_SIZE,
2710*4882a593Smuzhiyun 		.max_keysize = AES_MAX_KEY_SIZE,
2711*4882a593Smuzhiyun 		.setkey = artpec6_crypto_cipher_set_key,
2712*4882a593Smuzhiyun 		.encrypt = artpec6_crypto_encrypt,
2713*4882a593Smuzhiyun 		.decrypt = artpec6_crypto_decrypt,
2714*4882a593Smuzhiyun 		.init = artpec6_crypto_aes_ecb_init,
2715*4882a593Smuzhiyun 		.exit = artpec6_crypto_aes_exit,
2716*4882a593Smuzhiyun 	},
2717*4882a593Smuzhiyun 	/* AES - CTR */
2718*4882a593Smuzhiyun 	{
2719*4882a593Smuzhiyun 		.base = {
2720*4882a593Smuzhiyun 			.cra_name = "ctr(aes)",
2721*4882a593Smuzhiyun 			.cra_driver_name = "artpec6-ctr-aes",
2722*4882a593Smuzhiyun 			.cra_priority = 300,
2723*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_ASYNC |
2724*4882a593Smuzhiyun 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2725*4882a593Smuzhiyun 				     CRYPTO_ALG_NEED_FALLBACK,
2726*4882a593Smuzhiyun 			.cra_blocksize = 1,
2727*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2728*4882a593Smuzhiyun 			.cra_alignmask = 3,
2729*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
2730*4882a593Smuzhiyun 		},
2731*4882a593Smuzhiyun 		.min_keysize = AES_MIN_KEY_SIZE,
2732*4882a593Smuzhiyun 		.max_keysize = AES_MAX_KEY_SIZE,
2733*4882a593Smuzhiyun 		.ivsize = AES_BLOCK_SIZE,
2734*4882a593Smuzhiyun 		.setkey = artpec6_crypto_cipher_set_key,
2735*4882a593Smuzhiyun 		.encrypt = artpec6_crypto_ctr_encrypt,
2736*4882a593Smuzhiyun 		.decrypt = artpec6_crypto_ctr_decrypt,
2737*4882a593Smuzhiyun 		.init = artpec6_crypto_aes_ctr_init,
2738*4882a593Smuzhiyun 		.exit = artpec6_crypto_aes_ctr_exit,
2739*4882a593Smuzhiyun 	},
2740*4882a593Smuzhiyun 	/* AES - CBC */
2741*4882a593Smuzhiyun 	{
2742*4882a593Smuzhiyun 		.base = {
2743*4882a593Smuzhiyun 			.cra_name = "cbc(aes)",
2744*4882a593Smuzhiyun 			.cra_driver_name = "artpec6-cbc-aes",
2745*4882a593Smuzhiyun 			.cra_priority = 300,
2746*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_ASYNC |
2747*4882a593Smuzhiyun 				     CRYPTO_ALG_ALLOCATES_MEMORY,
2748*4882a593Smuzhiyun 			.cra_blocksize = AES_BLOCK_SIZE,
2749*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2750*4882a593Smuzhiyun 			.cra_alignmask = 3,
2751*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
2752*4882a593Smuzhiyun 		},
2753*4882a593Smuzhiyun 		.min_keysize = AES_MIN_KEY_SIZE,
2754*4882a593Smuzhiyun 		.max_keysize = AES_MAX_KEY_SIZE,
2755*4882a593Smuzhiyun 		.ivsize = AES_BLOCK_SIZE,
2756*4882a593Smuzhiyun 		.setkey = artpec6_crypto_cipher_set_key,
2757*4882a593Smuzhiyun 		.encrypt = artpec6_crypto_encrypt,
2758*4882a593Smuzhiyun 		.decrypt = artpec6_crypto_decrypt,
2759*4882a593Smuzhiyun 		.init = artpec6_crypto_aes_cbc_init,
2760*4882a593Smuzhiyun 		.exit = artpec6_crypto_aes_exit
2761*4882a593Smuzhiyun 	},
2762*4882a593Smuzhiyun 	/* AES - XTS */
2763*4882a593Smuzhiyun 	{
2764*4882a593Smuzhiyun 		.base = {
2765*4882a593Smuzhiyun 			.cra_name = "xts(aes)",
2766*4882a593Smuzhiyun 			.cra_driver_name = "artpec6-xts-aes",
2767*4882a593Smuzhiyun 			.cra_priority = 300,
2768*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_ASYNC |
2769*4882a593Smuzhiyun 				     CRYPTO_ALG_ALLOCATES_MEMORY,
2770*4882a593Smuzhiyun 			.cra_blocksize = 1,
2771*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2772*4882a593Smuzhiyun 			.cra_alignmask = 3,
2773*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
2774*4882a593Smuzhiyun 		},
2775*4882a593Smuzhiyun 		.min_keysize = 2*AES_MIN_KEY_SIZE,
2776*4882a593Smuzhiyun 		.max_keysize = 2*AES_MAX_KEY_SIZE,
2777*4882a593Smuzhiyun 		.ivsize = 16,
2778*4882a593Smuzhiyun 		.setkey = artpec6_crypto_xts_set_key,
2779*4882a593Smuzhiyun 		.encrypt = artpec6_crypto_encrypt,
2780*4882a593Smuzhiyun 		.decrypt = artpec6_crypto_decrypt,
2781*4882a593Smuzhiyun 		.init = artpec6_crypto_aes_xts_init,
2782*4882a593Smuzhiyun 		.exit = artpec6_crypto_aes_exit,
2783*4882a593Smuzhiyun 	},
2784*4882a593Smuzhiyun };
2785*4882a593Smuzhiyun 
2786*4882a593Smuzhiyun static struct aead_alg aead_algos[] = {
2787*4882a593Smuzhiyun 	{
2788*4882a593Smuzhiyun 		.init   = artpec6_crypto_aead_init,
2789*4882a593Smuzhiyun 		.setkey = artpec6_crypto_aead_set_key,
2790*4882a593Smuzhiyun 		.encrypt = artpec6_crypto_aead_encrypt,
2791*4882a593Smuzhiyun 		.decrypt = artpec6_crypto_aead_decrypt,
2792*4882a593Smuzhiyun 		.ivsize = GCM_AES_IV_SIZE,
2793*4882a593Smuzhiyun 		.maxauthsize = AES_BLOCK_SIZE,
2794*4882a593Smuzhiyun 
2795*4882a593Smuzhiyun 		.base = {
2796*4882a593Smuzhiyun 			.cra_name = "gcm(aes)",
2797*4882a593Smuzhiyun 			.cra_driver_name = "artpec-gcm-aes",
2798*4882a593Smuzhiyun 			.cra_priority = 300,
2799*4882a593Smuzhiyun 			.cra_flags = CRYPTO_ALG_ASYNC |
2800*4882a593Smuzhiyun 				     CRYPTO_ALG_ALLOCATES_MEMORY |
2801*4882a593Smuzhiyun 				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2802*4882a593Smuzhiyun 			.cra_blocksize = 1,
2803*4882a593Smuzhiyun 			.cra_ctxsize = sizeof(struct artpec6_cryptotfm_context),
2804*4882a593Smuzhiyun 			.cra_alignmask = 3,
2805*4882a593Smuzhiyun 			.cra_module = THIS_MODULE,
2806*4882a593Smuzhiyun 		},
2807*4882a593Smuzhiyun 	}
2808*4882a593Smuzhiyun };
2809*4882a593Smuzhiyun 
2810*4882a593Smuzhiyun #ifdef CONFIG_DEBUG_FS
2811*4882a593Smuzhiyun 
2812*4882a593Smuzhiyun struct dbgfs_u32 {
2813*4882a593Smuzhiyun 	char *name;
2814*4882a593Smuzhiyun 	mode_t mode;
2815*4882a593Smuzhiyun 	u32 *flag;
2816*4882a593Smuzhiyun 	char *desc;
2817*4882a593Smuzhiyun };
2818*4882a593Smuzhiyun 
2819*4882a593Smuzhiyun static struct dentry *dbgfs_root;
2820*4882a593Smuzhiyun 
artpec6_crypto_init_debugfs(void)2821*4882a593Smuzhiyun static void artpec6_crypto_init_debugfs(void)
2822*4882a593Smuzhiyun {
2823*4882a593Smuzhiyun 	dbgfs_root = debugfs_create_dir("artpec6_crypto", NULL);
2824*4882a593Smuzhiyun 
2825*4882a593Smuzhiyun #ifdef CONFIG_FAULT_INJECTION
2826*4882a593Smuzhiyun 	fault_create_debugfs_attr("fail_status_read", dbgfs_root,
2827*4882a593Smuzhiyun 				  &artpec6_crypto_fail_status_read);
2828*4882a593Smuzhiyun 
2829*4882a593Smuzhiyun 	fault_create_debugfs_attr("fail_dma_array_full", dbgfs_root,
2830*4882a593Smuzhiyun 				  &artpec6_crypto_fail_dma_array_full);
2831*4882a593Smuzhiyun #endif
2832*4882a593Smuzhiyun }
2833*4882a593Smuzhiyun 
artpec6_crypto_free_debugfs(void)2834*4882a593Smuzhiyun static void artpec6_crypto_free_debugfs(void)
2835*4882a593Smuzhiyun {
2836*4882a593Smuzhiyun 	debugfs_remove_recursive(dbgfs_root);
2837*4882a593Smuzhiyun 	dbgfs_root = NULL;
2838*4882a593Smuzhiyun }
2839*4882a593Smuzhiyun #endif
2840*4882a593Smuzhiyun 
2841*4882a593Smuzhiyun static const struct of_device_id artpec6_crypto_of_match[] = {
2842*4882a593Smuzhiyun 	{ .compatible = "axis,artpec6-crypto", .data = (void *)ARTPEC6_CRYPTO },
2843*4882a593Smuzhiyun 	{ .compatible = "axis,artpec7-crypto", .data = (void *)ARTPEC7_CRYPTO },
2844*4882a593Smuzhiyun 	{}
2845*4882a593Smuzhiyun };
2846*4882a593Smuzhiyun MODULE_DEVICE_TABLE(of, artpec6_crypto_of_match);
2847*4882a593Smuzhiyun 
artpec6_crypto_probe(struct platform_device * pdev)2848*4882a593Smuzhiyun static int artpec6_crypto_probe(struct platform_device *pdev)
2849*4882a593Smuzhiyun {
2850*4882a593Smuzhiyun 	const struct of_device_id *match;
2851*4882a593Smuzhiyun 	enum artpec6_crypto_variant variant;
2852*4882a593Smuzhiyun 	struct artpec6_crypto *ac;
2853*4882a593Smuzhiyun 	struct device *dev = &pdev->dev;
2854*4882a593Smuzhiyun 	void __iomem *base;
2855*4882a593Smuzhiyun 	int irq;
2856*4882a593Smuzhiyun 	int err;
2857*4882a593Smuzhiyun 
2858*4882a593Smuzhiyun 	if (artpec6_crypto_dev)
2859*4882a593Smuzhiyun 		return -ENODEV;
2860*4882a593Smuzhiyun 
2861*4882a593Smuzhiyun 	match = of_match_node(artpec6_crypto_of_match, dev->of_node);
2862*4882a593Smuzhiyun 	if (!match)
2863*4882a593Smuzhiyun 		return -EINVAL;
2864*4882a593Smuzhiyun 
2865*4882a593Smuzhiyun 	variant = (enum artpec6_crypto_variant)match->data;
2866*4882a593Smuzhiyun 
2867*4882a593Smuzhiyun 	base = devm_platform_ioremap_resource(pdev, 0);
2868*4882a593Smuzhiyun 	if (IS_ERR(base))
2869*4882a593Smuzhiyun 		return PTR_ERR(base);
2870*4882a593Smuzhiyun 
2871*4882a593Smuzhiyun 	irq = platform_get_irq(pdev, 0);
2872*4882a593Smuzhiyun 	if (irq < 0)
2873*4882a593Smuzhiyun 		return -ENODEV;
2874*4882a593Smuzhiyun 
2875*4882a593Smuzhiyun 	ac = devm_kzalloc(&pdev->dev, sizeof(struct artpec6_crypto),
2876*4882a593Smuzhiyun 			  GFP_KERNEL);
2877*4882a593Smuzhiyun 	if (!ac)
2878*4882a593Smuzhiyun 		return -ENOMEM;
2879*4882a593Smuzhiyun 
2880*4882a593Smuzhiyun 	platform_set_drvdata(pdev, ac);
2881*4882a593Smuzhiyun 	ac->variant = variant;
2882*4882a593Smuzhiyun 
2883*4882a593Smuzhiyun 	spin_lock_init(&ac->queue_lock);
2884*4882a593Smuzhiyun 	INIT_LIST_HEAD(&ac->queue);
2885*4882a593Smuzhiyun 	INIT_LIST_HEAD(&ac->pending);
2886*4882a593Smuzhiyun 	timer_setup(&ac->timer, artpec6_crypto_timeout, 0);
2887*4882a593Smuzhiyun 
2888*4882a593Smuzhiyun 	ac->base = base;
2889*4882a593Smuzhiyun 
2890*4882a593Smuzhiyun 	ac->dma_cache = kmem_cache_create("artpec6_crypto_dma",
2891*4882a593Smuzhiyun 		sizeof(struct artpec6_crypto_dma_descriptors),
2892*4882a593Smuzhiyun 		64,
2893*4882a593Smuzhiyun 		0,
2894*4882a593Smuzhiyun 		NULL);
2895*4882a593Smuzhiyun 	if (!ac->dma_cache)
2896*4882a593Smuzhiyun 		return -ENOMEM;
2897*4882a593Smuzhiyun 
2898*4882a593Smuzhiyun #ifdef CONFIG_DEBUG_FS
2899*4882a593Smuzhiyun 	artpec6_crypto_init_debugfs();
2900*4882a593Smuzhiyun #endif
2901*4882a593Smuzhiyun 
2902*4882a593Smuzhiyun 	tasklet_init(&ac->task, artpec6_crypto_task,
2903*4882a593Smuzhiyun 		     (unsigned long)ac);
2904*4882a593Smuzhiyun 
2905*4882a593Smuzhiyun 	ac->pad_buffer = devm_kzalloc(&pdev->dev, 2 * ARTPEC_CACHE_LINE_MAX,
2906*4882a593Smuzhiyun 				      GFP_KERNEL);
2907*4882a593Smuzhiyun 	if (!ac->pad_buffer)
2908*4882a593Smuzhiyun 		return -ENOMEM;
2909*4882a593Smuzhiyun 	ac->pad_buffer = PTR_ALIGN(ac->pad_buffer, ARTPEC_CACHE_LINE_MAX);
2910*4882a593Smuzhiyun 
2911*4882a593Smuzhiyun 	ac->zero_buffer = devm_kzalloc(&pdev->dev, 2 * ARTPEC_CACHE_LINE_MAX,
2912*4882a593Smuzhiyun 				      GFP_KERNEL);
2913*4882a593Smuzhiyun 	if (!ac->zero_buffer)
2914*4882a593Smuzhiyun 		return -ENOMEM;
2915*4882a593Smuzhiyun 	ac->zero_buffer = PTR_ALIGN(ac->zero_buffer, ARTPEC_CACHE_LINE_MAX);
2916*4882a593Smuzhiyun 
2917*4882a593Smuzhiyun 	err = init_crypto_hw(ac);
2918*4882a593Smuzhiyun 	if (err)
2919*4882a593Smuzhiyun 		goto free_cache;
2920*4882a593Smuzhiyun 
2921*4882a593Smuzhiyun 	err = devm_request_irq(&pdev->dev, irq, artpec6_crypto_irq, 0,
2922*4882a593Smuzhiyun 			       "artpec6-crypto", ac);
2923*4882a593Smuzhiyun 	if (err)
2924*4882a593Smuzhiyun 		goto disable_hw;
2925*4882a593Smuzhiyun 
2926*4882a593Smuzhiyun 	artpec6_crypto_dev = &pdev->dev;
2927*4882a593Smuzhiyun 
2928*4882a593Smuzhiyun 	err = crypto_register_ahashes(hash_algos, ARRAY_SIZE(hash_algos));
2929*4882a593Smuzhiyun 	if (err) {
2930*4882a593Smuzhiyun 		dev_err(dev, "Failed to register ahashes\n");
2931*4882a593Smuzhiyun 		goto disable_hw;
2932*4882a593Smuzhiyun 	}
2933*4882a593Smuzhiyun 
2934*4882a593Smuzhiyun 	err = crypto_register_skciphers(crypto_algos, ARRAY_SIZE(crypto_algos));
2935*4882a593Smuzhiyun 	if (err) {
2936*4882a593Smuzhiyun 		dev_err(dev, "Failed to register ciphers\n");
2937*4882a593Smuzhiyun 		goto unregister_ahashes;
2938*4882a593Smuzhiyun 	}
2939*4882a593Smuzhiyun 
2940*4882a593Smuzhiyun 	err = crypto_register_aeads(aead_algos, ARRAY_SIZE(aead_algos));
2941*4882a593Smuzhiyun 	if (err) {
2942*4882a593Smuzhiyun 		dev_err(dev, "Failed to register aeads\n");
2943*4882a593Smuzhiyun 		goto unregister_algs;
2944*4882a593Smuzhiyun 	}
2945*4882a593Smuzhiyun 
2946*4882a593Smuzhiyun 	return 0;
2947*4882a593Smuzhiyun 
2948*4882a593Smuzhiyun unregister_algs:
2949*4882a593Smuzhiyun 	crypto_unregister_skciphers(crypto_algos, ARRAY_SIZE(crypto_algos));
2950*4882a593Smuzhiyun unregister_ahashes:
2951*4882a593Smuzhiyun 	crypto_unregister_ahashes(hash_algos, ARRAY_SIZE(hash_algos));
2952*4882a593Smuzhiyun disable_hw:
2953*4882a593Smuzhiyun 	artpec6_crypto_disable_hw(ac);
2954*4882a593Smuzhiyun free_cache:
2955*4882a593Smuzhiyun 	kmem_cache_destroy(ac->dma_cache);
2956*4882a593Smuzhiyun 	return err;
2957*4882a593Smuzhiyun }
2958*4882a593Smuzhiyun 
artpec6_crypto_remove(struct platform_device * pdev)2959*4882a593Smuzhiyun static int artpec6_crypto_remove(struct platform_device *pdev)
2960*4882a593Smuzhiyun {
2961*4882a593Smuzhiyun 	struct artpec6_crypto *ac = platform_get_drvdata(pdev);
2962*4882a593Smuzhiyun 	int irq = platform_get_irq(pdev, 0);
2963*4882a593Smuzhiyun 
2964*4882a593Smuzhiyun 	crypto_unregister_ahashes(hash_algos, ARRAY_SIZE(hash_algos));
2965*4882a593Smuzhiyun 	crypto_unregister_skciphers(crypto_algos, ARRAY_SIZE(crypto_algos));
2966*4882a593Smuzhiyun 	crypto_unregister_aeads(aead_algos, ARRAY_SIZE(aead_algos));
2967*4882a593Smuzhiyun 
2968*4882a593Smuzhiyun 	tasklet_disable(&ac->task);
2969*4882a593Smuzhiyun 	devm_free_irq(&pdev->dev, irq, ac);
2970*4882a593Smuzhiyun 	tasklet_kill(&ac->task);
2971*4882a593Smuzhiyun 	del_timer_sync(&ac->timer);
2972*4882a593Smuzhiyun 
2973*4882a593Smuzhiyun 	artpec6_crypto_disable_hw(ac);
2974*4882a593Smuzhiyun 
2975*4882a593Smuzhiyun 	kmem_cache_destroy(ac->dma_cache);
2976*4882a593Smuzhiyun #ifdef CONFIG_DEBUG_FS
2977*4882a593Smuzhiyun 	artpec6_crypto_free_debugfs();
2978*4882a593Smuzhiyun #endif
2979*4882a593Smuzhiyun 	return 0;
2980*4882a593Smuzhiyun }
2981*4882a593Smuzhiyun 
2982*4882a593Smuzhiyun static struct platform_driver artpec6_crypto_driver = {
2983*4882a593Smuzhiyun 	.probe   = artpec6_crypto_probe,
2984*4882a593Smuzhiyun 	.remove  = artpec6_crypto_remove,
2985*4882a593Smuzhiyun 	.driver  = {
2986*4882a593Smuzhiyun 		.name  = "artpec6-crypto",
2987*4882a593Smuzhiyun 		.of_match_table = artpec6_crypto_of_match,
2988*4882a593Smuzhiyun 	},
2989*4882a593Smuzhiyun };
2990*4882a593Smuzhiyun 
2991*4882a593Smuzhiyun module_platform_driver(artpec6_crypto_driver);
2992*4882a593Smuzhiyun 
2993*4882a593Smuzhiyun MODULE_AUTHOR("Axis Communications AB");
2994*4882a593Smuzhiyun MODULE_DESCRIPTION("ARTPEC-6 Crypto driver");
2995*4882a593Smuzhiyun MODULE_LICENSE("GPL");
2996