xref: /OK3568_Linux_fs/kernel/drivers/crypto/ixp4xx_crypto.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Intel IXP4xx NPE-C crypto driver
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (C) 2008 Christian Hohnstaedt <chohnstaedt@innominate.com>
6*4882a593Smuzhiyun  */
7*4882a593Smuzhiyun 
8*4882a593Smuzhiyun #include <linux/platform_device.h>
9*4882a593Smuzhiyun #include <linux/dma-mapping.h>
10*4882a593Smuzhiyun #include <linux/dmapool.h>
11*4882a593Smuzhiyun #include <linux/crypto.h>
12*4882a593Smuzhiyun #include <linux/kernel.h>
13*4882a593Smuzhiyun #include <linux/rtnetlink.h>
14*4882a593Smuzhiyun #include <linux/interrupt.h>
15*4882a593Smuzhiyun #include <linux/spinlock.h>
16*4882a593Smuzhiyun #include <linux/gfp.h>
17*4882a593Smuzhiyun #include <linux/module.h>
18*4882a593Smuzhiyun 
19*4882a593Smuzhiyun #include <crypto/ctr.h>
20*4882a593Smuzhiyun #include <crypto/internal/des.h>
21*4882a593Smuzhiyun #include <crypto/aes.h>
22*4882a593Smuzhiyun #include <crypto/hmac.h>
23*4882a593Smuzhiyun #include <crypto/sha.h>
24*4882a593Smuzhiyun #include <crypto/algapi.h>
25*4882a593Smuzhiyun #include <crypto/internal/aead.h>
26*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
27*4882a593Smuzhiyun #include <crypto/authenc.h>
28*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
29*4882a593Smuzhiyun 
30*4882a593Smuzhiyun #include <linux/soc/ixp4xx/npe.h>
31*4882a593Smuzhiyun #include <linux/soc/ixp4xx/qmgr.h>
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun #define MAX_KEYLEN 32
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun /* hash: cfgword + 2 * digestlen; crypt: keylen + cfgword */
36*4882a593Smuzhiyun #define NPE_CTX_LEN 80
37*4882a593Smuzhiyun #define AES_BLOCK128 16
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun #define NPE_OP_HASH_VERIFY   0x01
40*4882a593Smuzhiyun #define NPE_OP_CCM_ENABLE    0x04
41*4882a593Smuzhiyun #define NPE_OP_CRYPT_ENABLE  0x08
42*4882a593Smuzhiyun #define NPE_OP_HASH_ENABLE   0x10
43*4882a593Smuzhiyun #define NPE_OP_NOT_IN_PLACE  0x20
44*4882a593Smuzhiyun #define NPE_OP_HMAC_DISABLE  0x40
45*4882a593Smuzhiyun #define NPE_OP_CRYPT_ENCRYPT 0x80
46*4882a593Smuzhiyun 
47*4882a593Smuzhiyun #define NPE_OP_CCM_GEN_MIC   0xcc
48*4882a593Smuzhiyun #define NPE_OP_HASH_GEN_ICV  0x50
49*4882a593Smuzhiyun #define NPE_OP_ENC_GEN_KEY   0xc9
50*4882a593Smuzhiyun 
51*4882a593Smuzhiyun #define MOD_ECB     0x0000
52*4882a593Smuzhiyun #define MOD_CTR     0x1000
53*4882a593Smuzhiyun #define MOD_CBC_ENC 0x2000
54*4882a593Smuzhiyun #define MOD_CBC_DEC 0x3000
55*4882a593Smuzhiyun #define MOD_CCM_ENC 0x4000
56*4882a593Smuzhiyun #define MOD_CCM_DEC 0x5000
57*4882a593Smuzhiyun 
58*4882a593Smuzhiyun #define KEYLEN_128  4
59*4882a593Smuzhiyun #define KEYLEN_192  6
60*4882a593Smuzhiyun #define KEYLEN_256  8
61*4882a593Smuzhiyun 
62*4882a593Smuzhiyun #define CIPH_DECR   0x0000
63*4882a593Smuzhiyun #define CIPH_ENCR   0x0400
64*4882a593Smuzhiyun 
65*4882a593Smuzhiyun #define MOD_DES     0x0000
66*4882a593Smuzhiyun #define MOD_TDEA2   0x0100
67*4882a593Smuzhiyun #define MOD_3DES   0x0200
68*4882a593Smuzhiyun #define MOD_AES     0x0800
69*4882a593Smuzhiyun #define MOD_AES128  (0x0800 | KEYLEN_128)
70*4882a593Smuzhiyun #define MOD_AES192  (0x0900 | KEYLEN_192)
71*4882a593Smuzhiyun #define MOD_AES256  (0x0a00 | KEYLEN_256)
72*4882a593Smuzhiyun 
73*4882a593Smuzhiyun #define MAX_IVLEN   16
74*4882a593Smuzhiyun #define NPE_ID      2  /* NPE C */
75*4882a593Smuzhiyun #define NPE_QLEN    16
76*4882a593Smuzhiyun /* Space for registering when the first
77*4882a593Smuzhiyun  * NPE_QLEN crypt_ctl are busy */
78*4882a593Smuzhiyun #define NPE_QLEN_TOTAL 64
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun #define SEND_QID    29
81*4882a593Smuzhiyun #define RECV_QID    30
82*4882a593Smuzhiyun 
83*4882a593Smuzhiyun #define CTL_FLAG_UNUSED		0x0000
84*4882a593Smuzhiyun #define CTL_FLAG_USED		0x1000
85*4882a593Smuzhiyun #define CTL_FLAG_PERFORM_ABLK	0x0001
86*4882a593Smuzhiyun #define CTL_FLAG_GEN_ICV	0x0002
87*4882a593Smuzhiyun #define CTL_FLAG_GEN_REVAES	0x0004
88*4882a593Smuzhiyun #define CTL_FLAG_PERFORM_AEAD	0x0008
89*4882a593Smuzhiyun #define CTL_FLAG_MASK		0x000f
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun #define HMAC_PAD_BLOCKLEN SHA1_BLOCK_SIZE
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun #define MD5_DIGEST_SIZE   16
94*4882a593Smuzhiyun 
95*4882a593Smuzhiyun struct buffer_desc {
96*4882a593Smuzhiyun 	u32 phys_next;
97*4882a593Smuzhiyun #ifdef __ARMEB__
98*4882a593Smuzhiyun 	u16 buf_len;
99*4882a593Smuzhiyun 	u16 pkt_len;
100*4882a593Smuzhiyun #else
101*4882a593Smuzhiyun 	u16 pkt_len;
102*4882a593Smuzhiyun 	u16 buf_len;
103*4882a593Smuzhiyun #endif
104*4882a593Smuzhiyun 	dma_addr_t phys_addr;
105*4882a593Smuzhiyun 	u32 __reserved[4];
106*4882a593Smuzhiyun 	struct buffer_desc *next;
107*4882a593Smuzhiyun 	enum dma_data_direction dir;
108*4882a593Smuzhiyun };
109*4882a593Smuzhiyun 
110*4882a593Smuzhiyun struct crypt_ctl {
111*4882a593Smuzhiyun #ifdef __ARMEB__
112*4882a593Smuzhiyun 	u8 mode;		/* NPE_OP_*  operation mode */
113*4882a593Smuzhiyun 	u8 init_len;
114*4882a593Smuzhiyun 	u16 reserved;
115*4882a593Smuzhiyun #else
116*4882a593Smuzhiyun 	u16 reserved;
117*4882a593Smuzhiyun 	u8 init_len;
118*4882a593Smuzhiyun 	u8 mode;		/* NPE_OP_*  operation mode */
119*4882a593Smuzhiyun #endif
120*4882a593Smuzhiyun 	u8 iv[MAX_IVLEN];	/* IV for CBC mode or CTR IV for CTR mode */
121*4882a593Smuzhiyun 	dma_addr_t icv_rev_aes;	/* icv or rev aes */
122*4882a593Smuzhiyun 	dma_addr_t src_buf;
123*4882a593Smuzhiyun 	dma_addr_t dst_buf;
124*4882a593Smuzhiyun #ifdef __ARMEB__
125*4882a593Smuzhiyun 	u16 auth_offs;		/* Authentication start offset */
126*4882a593Smuzhiyun 	u16 auth_len;		/* Authentication data length */
127*4882a593Smuzhiyun 	u16 crypt_offs;		/* Cryption start offset */
128*4882a593Smuzhiyun 	u16 crypt_len;		/* Cryption data length */
129*4882a593Smuzhiyun #else
130*4882a593Smuzhiyun 	u16 auth_len;		/* Authentication data length */
131*4882a593Smuzhiyun 	u16 auth_offs;		/* Authentication start offset */
132*4882a593Smuzhiyun 	u16 crypt_len;		/* Cryption data length */
133*4882a593Smuzhiyun 	u16 crypt_offs;		/* Cryption start offset */
134*4882a593Smuzhiyun #endif
135*4882a593Smuzhiyun 	u32 aadAddr;		/* Additional Auth Data Addr for CCM mode */
136*4882a593Smuzhiyun 	u32 crypto_ctx;		/* NPE Crypto Param structure address */
137*4882a593Smuzhiyun 
138*4882a593Smuzhiyun 	/* Used by Host: 4*4 bytes*/
139*4882a593Smuzhiyun 	unsigned ctl_flags;
140*4882a593Smuzhiyun 	union {
141*4882a593Smuzhiyun 		struct skcipher_request *ablk_req;
142*4882a593Smuzhiyun 		struct aead_request *aead_req;
143*4882a593Smuzhiyun 		struct crypto_tfm *tfm;
144*4882a593Smuzhiyun 	} data;
145*4882a593Smuzhiyun 	struct buffer_desc *regist_buf;
146*4882a593Smuzhiyun 	u8 *regist_ptr;
147*4882a593Smuzhiyun };
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun struct ablk_ctx {
150*4882a593Smuzhiyun 	struct buffer_desc *src;
151*4882a593Smuzhiyun 	struct buffer_desc *dst;
152*4882a593Smuzhiyun 	u8 iv[MAX_IVLEN];
153*4882a593Smuzhiyun 	bool encrypt;
154*4882a593Smuzhiyun };
155*4882a593Smuzhiyun 
156*4882a593Smuzhiyun struct aead_ctx {
157*4882a593Smuzhiyun 	struct buffer_desc *src;
158*4882a593Smuzhiyun 	struct buffer_desc *dst;
159*4882a593Smuzhiyun 	struct scatterlist ivlist;
160*4882a593Smuzhiyun 	/* used when the hmac is not on one sg entry */
161*4882a593Smuzhiyun 	u8 *hmac_virt;
162*4882a593Smuzhiyun 	int encrypt;
163*4882a593Smuzhiyun };
164*4882a593Smuzhiyun 
165*4882a593Smuzhiyun struct ix_hash_algo {
166*4882a593Smuzhiyun 	u32 cfgword;
167*4882a593Smuzhiyun 	unsigned char *icv;
168*4882a593Smuzhiyun };
169*4882a593Smuzhiyun 
170*4882a593Smuzhiyun struct ix_sa_dir {
171*4882a593Smuzhiyun 	unsigned char *npe_ctx;
172*4882a593Smuzhiyun 	dma_addr_t npe_ctx_phys;
173*4882a593Smuzhiyun 	int npe_ctx_idx;
174*4882a593Smuzhiyun 	u8 npe_mode;
175*4882a593Smuzhiyun };
176*4882a593Smuzhiyun 
177*4882a593Smuzhiyun struct ixp_ctx {
178*4882a593Smuzhiyun 	struct ix_sa_dir encrypt;
179*4882a593Smuzhiyun 	struct ix_sa_dir decrypt;
180*4882a593Smuzhiyun 	int authkey_len;
181*4882a593Smuzhiyun 	u8 authkey[MAX_KEYLEN];
182*4882a593Smuzhiyun 	int enckey_len;
183*4882a593Smuzhiyun 	u8 enckey[MAX_KEYLEN];
184*4882a593Smuzhiyun 	u8 salt[MAX_IVLEN];
185*4882a593Smuzhiyun 	u8 nonce[CTR_RFC3686_NONCE_SIZE];
186*4882a593Smuzhiyun 	unsigned salted;
187*4882a593Smuzhiyun 	atomic_t configuring;
188*4882a593Smuzhiyun 	struct completion completion;
189*4882a593Smuzhiyun };
190*4882a593Smuzhiyun 
191*4882a593Smuzhiyun struct ixp_alg {
192*4882a593Smuzhiyun 	struct skcipher_alg crypto;
193*4882a593Smuzhiyun 	const struct ix_hash_algo *hash;
194*4882a593Smuzhiyun 	u32 cfg_enc;
195*4882a593Smuzhiyun 	u32 cfg_dec;
196*4882a593Smuzhiyun 
197*4882a593Smuzhiyun 	int registered;
198*4882a593Smuzhiyun };
199*4882a593Smuzhiyun 
200*4882a593Smuzhiyun struct ixp_aead_alg {
201*4882a593Smuzhiyun 	struct aead_alg crypto;
202*4882a593Smuzhiyun 	const struct ix_hash_algo *hash;
203*4882a593Smuzhiyun 	u32 cfg_enc;
204*4882a593Smuzhiyun 	u32 cfg_dec;
205*4882a593Smuzhiyun 
206*4882a593Smuzhiyun 	int registered;
207*4882a593Smuzhiyun };
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun static const struct ix_hash_algo hash_alg_md5 = {
210*4882a593Smuzhiyun 	.cfgword	= 0xAA010004,
211*4882a593Smuzhiyun 	.icv		= "\x01\x23\x45\x67\x89\xAB\xCD\xEF"
212*4882a593Smuzhiyun 			  "\xFE\xDC\xBA\x98\x76\x54\x32\x10",
213*4882a593Smuzhiyun };
214*4882a593Smuzhiyun static const struct ix_hash_algo hash_alg_sha1 = {
215*4882a593Smuzhiyun 	.cfgword	= 0x00000005,
216*4882a593Smuzhiyun 	.icv		= "\x67\x45\x23\x01\xEF\xCD\xAB\x89\x98\xBA"
217*4882a593Smuzhiyun 			  "\xDC\xFE\x10\x32\x54\x76\xC3\xD2\xE1\xF0",
218*4882a593Smuzhiyun };
219*4882a593Smuzhiyun 
220*4882a593Smuzhiyun static struct npe *npe_c;
221*4882a593Smuzhiyun static struct dma_pool *buffer_pool = NULL;
222*4882a593Smuzhiyun static struct dma_pool *ctx_pool = NULL;
223*4882a593Smuzhiyun 
224*4882a593Smuzhiyun static struct crypt_ctl *crypt_virt = NULL;
225*4882a593Smuzhiyun static dma_addr_t crypt_phys;
226*4882a593Smuzhiyun 
227*4882a593Smuzhiyun static int support_aes = 1;
228*4882a593Smuzhiyun 
229*4882a593Smuzhiyun #define DRIVER_NAME "ixp4xx_crypto"
230*4882a593Smuzhiyun 
231*4882a593Smuzhiyun static struct platform_device *pdev;
232*4882a593Smuzhiyun 
crypt_virt2phys(struct crypt_ctl * virt)233*4882a593Smuzhiyun static inline dma_addr_t crypt_virt2phys(struct crypt_ctl *virt)
234*4882a593Smuzhiyun {
235*4882a593Smuzhiyun 	return crypt_phys + (virt - crypt_virt) * sizeof(struct crypt_ctl);
236*4882a593Smuzhiyun }
237*4882a593Smuzhiyun 
crypt_phys2virt(dma_addr_t phys)238*4882a593Smuzhiyun static inline struct crypt_ctl *crypt_phys2virt(dma_addr_t phys)
239*4882a593Smuzhiyun {
240*4882a593Smuzhiyun 	return crypt_virt + (phys - crypt_phys) / sizeof(struct crypt_ctl);
241*4882a593Smuzhiyun }
242*4882a593Smuzhiyun 
cipher_cfg_enc(struct crypto_tfm * tfm)243*4882a593Smuzhiyun static inline u32 cipher_cfg_enc(struct crypto_tfm *tfm)
244*4882a593Smuzhiyun {
245*4882a593Smuzhiyun 	return container_of(tfm->__crt_alg, struct ixp_alg,crypto.base)->cfg_enc;
246*4882a593Smuzhiyun }
247*4882a593Smuzhiyun 
cipher_cfg_dec(struct crypto_tfm * tfm)248*4882a593Smuzhiyun static inline u32 cipher_cfg_dec(struct crypto_tfm *tfm)
249*4882a593Smuzhiyun {
250*4882a593Smuzhiyun 	return container_of(tfm->__crt_alg, struct ixp_alg,crypto.base)->cfg_dec;
251*4882a593Smuzhiyun }
252*4882a593Smuzhiyun 
ix_hash(struct crypto_tfm * tfm)253*4882a593Smuzhiyun static inline const struct ix_hash_algo *ix_hash(struct crypto_tfm *tfm)
254*4882a593Smuzhiyun {
255*4882a593Smuzhiyun 	return container_of(tfm->__crt_alg, struct ixp_alg, crypto.base)->hash;
256*4882a593Smuzhiyun }
257*4882a593Smuzhiyun 
setup_crypt_desc(void)258*4882a593Smuzhiyun static int setup_crypt_desc(void)
259*4882a593Smuzhiyun {
260*4882a593Smuzhiyun 	struct device *dev = &pdev->dev;
261*4882a593Smuzhiyun 	BUILD_BUG_ON(sizeof(struct crypt_ctl) != 64);
262*4882a593Smuzhiyun 	crypt_virt = dma_alloc_coherent(dev,
263*4882a593Smuzhiyun 					NPE_QLEN * sizeof(struct crypt_ctl),
264*4882a593Smuzhiyun 					&crypt_phys, GFP_ATOMIC);
265*4882a593Smuzhiyun 	if (!crypt_virt)
266*4882a593Smuzhiyun 		return -ENOMEM;
267*4882a593Smuzhiyun 	return 0;
268*4882a593Smuzhiyun }
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun static spinlock_t desc_lock;
get_crypt_desc(void)271*4882a593Smuzhiyun static struct crypt_ctl *get_crypt_desc(void)
272*4882a593Smuzhiyun {
273*4882a593Smuzhiyun 	int i;
274*4882a593Smuzhiyun 	static int idx = 0;
275*4882a593Smuzhiyun 	unsigned long flags;
276*4882a593Smuzhiyun 
277*4882a593Smuzhiyun 	spin_lock_irqsave(&desc_lock, flags);
278*4882a593Smuzhiyun 
279*4882a593Smuzhiyun 	if (unlikely(!crypt_virt))
280*4882a593Smuzhiyun 		setup_crypt_desc();
281*4882a593Smuzhiyun 	if (unlikely(!crypt_virt)) {
282*4882a593Smuzhiyun 		spin_unlock_irqrestore(&desc_lock, flags);
283*4882a593Smuzhiyun 		return NULL;
284*4882a593Smuzhiyun 	}
285*4882a593Smuzhiyun 	i = idx;
286*4882a593Smuzhiyun 	if (crypt_virt[i].ctl_flags == CTL_FLAG_UNUSED) {
287*4882a593Smuzhiyun 		if (++idx >= NPE_QLEN)
288*4882a593Smuzhiyun 			idx = 0;
289*4882a593Smuzhiyun 		crypt_virt[i].ctl_flags = CTL_FLAG_USED;
290*4882a593Smuzhiyun 		spin_unlock_irqrestore(&desc_lock, flags);
291*4882a593Smuzhiyun 		return crypt_virt +i;
292*4882a593Smuzhiyun 	} else {
293*4882a593Smuzhiyun 		spin_unlock_irqrestore(&desc_lock, flags);
294*4882a593Smuzhiyun 		return NULL;
295*4882a593Smuzhiyun 	}
296*4882a593Smuzhiyun }
297*4882a593Smuzhiyun 
298*4882a593Smuzhiyun static spinlock_t emerg_lock;
get_crypt_desc_emerg(void)299*4882a593Smuzhiyun static struct crypt_ctl *get_crypt_desc_emerg(void)
300*4882a593Smuzhiyun {
301*4882a593Smuzhiyun 	int i;
302*4882a593Smuzhiyun 	static int idx = NPE_QLEN;
303*4882a593Smuzhiyun 	struct crypt_ctl *desc;
304*4882a593Smuzhiyun 	unsigned long flags;
305*4882a593Smuzhiyun 
306*4882a593Smuzhiyun 	desc = get_crypt_desc();
307*4882a593Smuzhiyun 	if (desc)
308*4882a593Smuzhiyun 		return desc;
309*4882a593Smuzhiyun 	if (unlikely(!crypt_virt))
310*4882a593Smuzhiyun 		return NULL;
311*4882a593Smuzhiyun 
312*4882a593Smuzhiyun 	spin_lock_irqsave(&emerg_lock, flags);
313*4882a593Smuzhiyun 	i = idx;
314*4882a593Smuzhiyun 	if (crypt_virt[i].ctl_flags == CTL_FLAG_UNUSED) {
315*4882a593Smuzhiyun 		if (++idx >= NPE_QLEN_TOTAL)
316*4882a593Smuzhiyun 			idx = NPE_QLEN;
317*4882a593Smuzhiyun 		crypt_virt[i].ctl_flags = CTL_FLAG_USED;
318*4882a593Smuzhiyun 		spin_unlock_irqrestore(&emerg_lock, flags);
319*4882a593Smuzhiyun 		return crypt_virt +i;
320*4882a593Smuzhiyun 	} else {
321*4882a593Smuzhiyun 		spin_unlock_irqrestore(&emerg_lock, flags);
322*4882a593Smuzhiyun 		return NULL;
323*4882a593Smuzhiyun 	}
324*4882a593Smuzhiyun }
325*4882a593Smuzhiyun 
free_buf_chain(struct device * dev,struct buffer_desc * buf,dma_addr_t phys)326*4882a593Smuzhiyun static void free_buf_chain(struct device *dev, struct buffer_desc *buf,
327*4882a593Smuzhiyun 			   dma_addr_t phys)
328*4882a593Smuzhiyun {
329*4882a593Smuzhiyun 	while (buf) {
330*4882a593Smuzhiyun 		struct buffer_desc *buf1;
331*4882a593Smuzhiyun 		u32 phys1;
332*4882a593Smuzhiyun 
333*4882a593Smuzhiyun 		buf1 = buf->next;
334*4882a593Smuzhiyun 		phys1 = buf->phys_next;
335*4882a593Smuzhiyun 		dma_unmap_single(dev, buf->phys_addr, buf->buf_len, buf->dir);
336*4882a593Smuzhiyun 		dma_pool_free(buffer_pool, buf, phys);
337*4882a593Smuzhiyun 		buf = buf1;
338*4882a593Smuzhiyun 		phys = phys1;
339*4882a593Smuzhiyun 	}
340*4882a593Smuzhiyun }
341*4882a593Smuzhiyun 
342*4882a593Smuzhiyun static struct tasklet_struct crypto_done_tasklet;
343*4882a593Smuzhiyun 
finish_scattered_hmac(struct crypt_ctl * crypt)344*4882a593Smuzhiyun static void finish_scattered_hmac(struct crypt_ctl *crypt)
345*4882a593Smuzhiyun {
346*4882a593Smuzhiyun 	struct aead_request *req = crypt->data.aead_req;
347*4882a593Smuzhiyun 	struct aead_ctx *req_ctx = aead_request_ctx(req);
348*4882a593Smuzhiyun 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
349*4882a593Smuzhiyun 	int authsize = crypto_aead_authsize(tfm);
350*4882a593Smuzhiyun 	int decryptlen = req->assoclen + req->cryptlen - authsize;
351*4882a593Smuzhiyun 
352*4882a593Smuzhiyun 	if (req_ctx->encrypt) {
353*4882a593Smuzhiyun 		scatterwalk_map_and_copy(req_ctx->hmac_virt,
354*4882a593Smuzhiyun 			req->dst, decryptlen, authsize, 1);
355*4882a593Smuzhiyun 	}
356*4882a593Smuzhiyun 	dma_pool_free(buffer_pool, req_ctx->hmac_virt, crypt->icv_rev_aes);
357*4882a593Smuzhiyun }
358*4882a593Smuzhiyun 
one_packet(dma_addr_t phys)359*4882a593Smuzhiyun static void one_packet(dma_addr_t phys)
360*4882a593Smuzhiyun {
361*4882a593Smuzhiyun 	struct device *dev = &pdev->dev;
362*4882a593Smuzhiyun 	struct crypt_ctl *crypt;
363*4882a593Smuzhiyun 	struct ixp_ctx *ctx;
364*4882a593Smuzhiyun 	int failed;
365*4882a593Smuzhiyun 
366*4882a593Smuzhiyun 	failed = phys & 0x1 ? -EBADMSG : 0;
367*4882a593Smuzhiyun 	phys &= ~0x3;
368*4882a593Smuzhiyun 	crypt = crypt_phys2virt(phys);
369*4882a593Smuzhiyun 
370*4882a593Smuzhiyun 	switch (crypt->ctl_flags & CTL_FLAG_MASK) {
371*4882a593Smuzhiyun 	case CTL_FLAG_PERFORM_AEAD: {
372*4882a593Smuzhiyun 		struct aead_request *req = crypt->data.aead_req;
373*4882a593Smuzhiyun 		struct aead_ctx *req_ctx = aead_request_ctx(req);
374*4882a593Smuzhiyun 
375*4882a593Smuzhiyun 		free_buf_chain(dev, req_ctx->src, crypt->src_buf);
376*4882a593Smuzhiyun 		free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
377*4882a593Smuzhiyun 		if (req_ctx->hmac_virt) {
378*4882a593Smuzhiyun 			finish_scattered_hmac(crypt);
379*4882a593Smuzhiyun 		}
380*4882a593Smuzhiyun 		req->base.complete(&req->base, failed);
381*4882a593Smuzhiyun 		break;
382*4882a593Smuzhiyun 	}
383*4882a593Smuzhiyun 	case CTL_FLAG_PERFORM_ABLK: {
384*4882a593Smuzhiyun 		struct skcipher_request *req = crypt->data.ablk_req;
385*4882a593Smuzhiyun 		struct ablk_ctx *req_ctx = skcipher_request_ctx(req);
386*4882a593Smuzhiyun 		struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
387*4882a593Smuzhiyun 		unsigned int ivsize = crypto_skcipher_ivsize(tfm);
388*4882a593Smuzhiyun 		unsigned int offset;
389*4882a593Smuzhiyun 
390*4882a593Smuzhiyun 		if (ivsize > 0) {
391*4882a593Smuzhiyun 			offset = req->cryptlen - ivsize;
392*4882a593Smuzhiyun 			if (req_ctx->encrypt) {
393*4882a593Smuzhiyun 				scatterwalk_map_and_copy(req->iv, req->dst,
394*4882a593Smuzhiyun 							 offset, ivsize, 0);
395*4882a593Smuzhiyun 			} else {
396*4882a593Smuzhiyun 				memcpy(req->iv, req_ctx->iv, ivsize);
397*4882a593Smuzhiyun 				memzero_explicit(req_ctx->iv, ivsize);
398*4882a593Smuzhiyun 			}
399*4882a593Smuzhiyun 		}
400*4882a593Smuzhiyun 
401*4882a593Smuzhiyun 		if (req_ctx->dst) {
402*4882a593Smuzhiyun 			free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
403*4882a593Smuzhiyun 		}
404*4882a593Smuzhiyun 		free_buf_chain(dev, req_ctx->src, crypt->src_buf);
405*4882a593Smuzhiyun 		req->base.complete(&req->base, failed);
406*4882a593Smuzhiyun 		break;
407*4882a593Smuzhiyun 	}
408*4882a593Smuzhiyun 	case CTL_FLAG_GEN_ICV:
409*4882a593Smuzhiyun 		ctx = crypto_tfm_ctx(crypt->data.tfm);
410*4882a593Smuzhiyun 		dma_pool_free(ctx_pool, crypt->regist_ptr,
411*4882a593Smuzhiyun 				crypt->regist_buf->phys_addr);
412*4882a593Smuzhiyun 		dma_pool_free(buffer_pool, crypt->regist_buf, crypt->src_buf);
413*4882a593Smuzhiyun 		if (atomic_dec_and_test(&ctx->configuring))
414*4882a593Smuzhiyun 			complete(&ctx->completion);
415*4882a593Smuzhiyun 		break;
416*4882a593Smuzhiyun 	case CTL_FLAG_GEN_REVAES:
417*4882a593Smuzhiyun 		ctx = crypto_tfm_ctx(crypt->data.tfm);
418*4882a593Smuzhiyun 		*(u32*)ctx->decrypt.npe_ctx &= cpu_to_be32(~CIPH_ENCR);
419*4882a593Smuzhiyun 		if (atomic_dec_and_test(&ctx->configuring))
420*4882a593Smuzhiyun 			complete(&ctx->completion);
421*4882a593Smuzhiyun 		break;
422*4882a593Smuzhiyun 	default:
423*4882a593Smuzhiyun 		BUG();
424*4882a593Smuzhiyun 	}
425*4882a593Smuzhiyun 	crypt->ctl_flags = CTL_FLAG_UNUSED;
426*4882a593Smuzhiyun }
427*4882a593Smuzhiyun 
irqhandler(void * _unused)428*4882a593Smuzhiyun static void irqhandler(void *_unused)
429*4882a593Smuzhiyun {
430*4882a593Smuzhiyun 	tasklet_schedule(&crypto_done_tasklet);
431*4882a593Smuzhiyun }
432*4882a593Smuzhiyun 
crypto_done_action(unsigned long arg)433*4882a593Smuzhiyun static void crypto_done_action(unsigned long arg)
434*4882a593Smuzhiyun {
435*4882a593Smuzhiyun 	int i;
436*4882a593Smuzhiyun 
437*4882a593Smuzhiyun 	for(i=0; i<4; i++) {
438*4882a593Smuzhiyun 		dma_addr_t phys = qmgr_get_entry(RECV_QID);
439*4882a593Smuzhiyun 		if (!phys)
440*4882a593Smuzhiyun 			return;
441*4882a593Smuzhiyun 		one_packet(phys);
442*4882a593Smuzhiyun 	}
443*4882a593Smuzhiyun 	tasklet_schedule(&crypto_done_tasklet);
444*4882a593Smuzhiyun }
445*4882a593Smuzhiyun 
init_ixp_crypto(struct device * dev)446*4882a593Smuzhiyun static int init_ixp_crypto(struct device *dev)
447*4882a593Smuzhiyun {
448*4882a593Smuzhiyun 	int ret = -ENODEV;
449*4882a593Smuzhiyun 	u32 msg[2] = { 0, 0 };
450*4882a593Smuzhiyun 
451*4882a593Smuzhiyun 	if (! ( ~(*IXP4XX_EXP_CFG2) & (IXP4XX_FEATURE_HASH |
452*4882a593Smuzhiyun 				IXP4XX_FEATURE_AES | IXP4XX_FEATURE_DES))) {
453*4882a593Smuzhiyun 		printk(KERN_ERR "ixp_crypto: No HW crypto available\n");
454*4882a593Smuzhiyun 		return ret;
455*4882a593Smuzhiyun 	}
456*4882a593Smuzhiyun 	npe_c = npe_request(NPE_ID);
457*4882a593Smuzhiyun 	if (!npe_c)
458*4882a593Smuzhiyun 		return ret;
459*4882a593Smuzhiyun 
460*4882a593Smuzhiyun 	if (!npe_running(npe_c)) {
461*4882a593Smuzhiyun 		ret = npe_load_firmware(npe_c, npe_name(npe_c), dev);
462*4882a593Smuzhiyun 		if (ret)
463*4882a593Smuzhiyun 			goto npe_release;
464*4882a593Smuzhiyun 		if (npe_recv_message(npe_c, msg, "STATUS_MSG"))
465*4882a593Smuzhiyun 			goto npe_error;
466*4882a593Smuzhiyun 	} else {
467*4882a593Smuzhiyun 		if (npe_send_message(npe_c, msg, "STATUS_MSG"))
468*4882a593Smuzhiyun 			goto npe_error;
469*4882a593Smuzhiyun 
470*4882a593Smuzhiyun 		if (npe_recv_message(npe_c, msg, "STATUS_MSG"))
471*4882a593Smuzhiyun 			goto npe_error;
472*4882a593Smuzhiyun 	}
473*4882a593Smuzhiyun 
474*4882a593Smuzhiyun 	switch ((msg[1]>>16) & 0xff) {
475*4882a593Smuzhiyun 	case 3:
476*4882a593Smuzhiyun 		printk(KERN_WARNING "Firmware of %s lacks AES support\n",
477*4882a593Smuzhiyun 				npe_name(npe_c));
478*4882a593Smuzhiyun 		support_aes = 0;
479*4882a593Smuzhiyun 		break;
480*4882a593Smuzhiyun 	case 4:
481*4882a593Smuzhiyun 	case 5:
482*4882a593Smuzhiyun 		support_aes = 1;
483*4882a593Smuzhiyun 		break;
484*4882a593Smuzhiyun 	default:
485*4882a593Smuzhiyun 		printk(KERN_ERR "Firmware of %s lacks crypto support\n",
486*4882a593Smuzhiyun 			npe_name(npe_c));
487*4882a593Smuzhiyun 		ret = -ENODEV;
488*4882a593Smuzhiyun 		goto npe_release;
489*4882a593Smuzhiyun 	}
490*4882a593Smuzhiyun 	/* buffer_pool will also be used to sometimes store the hmac,
491*4882a593Smuzhiyun 	 * so assure it is large enough
492*4882a593Smuzhiyun 	 */
493*4882a593Smuzhiyun 	BUILD_BUG_ON(SHA1_DIGEST_SIZE > sizeof(struct buffer_desc));
494*4882a593Smuzhiyun 	buffer_pool = dma_pool_create("buffer", dev,
495*4882a593Smuzhiyun 			sizeof(struct buffer_desc), 32, 0);
496*4882a593Smuzhiyun 	ret = -ENOMEM;
497*4882a593Smuzhiyun 	if (!buffer_pool) {
498*4882a593Smuzhiyun 		goto err;
499*4882a593Smuzhiyun 	}
500*4882a593Smuzhiyun 	ctx_pool = dma_pool_create("context", dev,
501*4882a593Smuzhiyun 			NPE_CTX_LEN, 16, 0);
502*4882a593Smuzhiyun 	if (!ctx_pool) {
503*4882a593Smuzhiyun 		goto err;
504*4882a593Smuzhiyun 	}
505*4882a593Smuzhiyun 	ret = qmgr_request_queue(SEND_QID, NPE_QLEN_TOTAL, 0, 0,
506*4882a593Smuzhiyun 				 "ixp_crypto:out", NULL);
507*4882a593Smuzhiyun 	if (ret)
508*4882a593Smuzhiyun 		goto err;
509*4882a593Smuzhiyun 	ret = qmgr_request_queue(RECV_QID, NPE_QLEN, 0, 0,
510*4882a593Smuzhiyun 				 "ixp_crypto:in", NULL);
511*4882a593Smuzhiyun 	if (ret) {
512*4882a593Smuzhiyun 		qmgr_release_queue(SEND_QID);
513*4882a593Smuzhiyun 		goto err;
514*4882a593Smuzhiyun 	}
515*4882a593Smuzhiyun 	qmgr_set_irq(RECV_QID, QUEUE_IRQ_SRC_NOT_EMPTY, irqhandler, NULL);
516*4882a593Smuzhiyun 	tasklet_init(&crypto_done_tasklet, crypto_done_action, 0);
517*4882a593Smuzhiyun 
518*4882a593Smuzhiyun 	qmgr_enable_irq(RECV_QID);
519*4882a593Smuzhiyun 	return 0;
520*4882a593Smuzhiyun 
521*4882a593Smuzhiyun npe_error:
522*4882a593Smuzhiyun 	printk(KERN_ERR "%s not responding\n", npe_name(npe_c));
523*4882a593Smuzhiyun 	ret = -EIO;
524*4882a593Smuzhiyun err:
525*4882a593Smuzhiyun 	dma_pool_destroy(ctx_pool);
526*4882a593Smuzhiyun 	dma_pool_destroy(buffer_pool);
527*4882a593Smuzhiyun npe_release:
528*4882a593Smuzhiyun 	npe_release(npe_c);
529*4882a593Smuzhiyun 	return ret;
530*4882a593Smuzhiyun }
531*4882a593Smuzhiyun 
release_ixp_crypto(struct device * dev)532*4882a593Smuzhiyun static void release_ixp_crypto(struct device *dev)
533*4882a593Smuzhiyun {
534*4882a593Smuzhiyun 	qmgr_disable_irq(RECV_QID);
535*4882a593Smuzhiyun 	tasklet_kill(&crypto_done_tasklet);
536*4882a593Smuzhiyun 
537*4882a593Smuzhiyun 	qmgr_release_queue(SEND_QID);
538*4882a593Smuzhiyun 	qmgr_release_queue(RECV_QID);
539*4882a593Smuzhiyun 
540*4882a593Smuzhiyun 	dma_pool_destroy(ctx_pool);
541*4882a593Smuzhiyun 	dma_pool_destroy(buffer_pool);
542*4882a593Smuzhiyun 
543*4882a593Smuzhiyun 	npe_release(npe_c);
544*4882a593Smuzhiyun 
545*4882a593Smuzhiyun 	if (crypt_virt) {
546*4882a593Smuzhiyun 		dma_free_coherent(dev,
547*4882a593Smuzhiyun 			NPE_QLEN * sizeof(struct crypt_ctl),
548*4882a593Smuzhiyun 			crypt_virt, crypt_phys);
549*4882a593Smuzhiyun 	}
550*4882a593Smuzhiyun }
551*4882a593Smuzhiyun 
reset_sa_dir(struct ix_sa_dir * dir)552*4882a593Smuzhiyun static void reset_sa_dir(struct ix_sa_dir *dir)
553*4882a593Smuzhiyun {
554*4882a593Smuzhiyun 	memset(dir->npe_ctx, 0, NPE_CTX_LEN);
555*4882a593Smuzhiyun 	dir->npe_ctx_idx = 0;
556*4882a593Smuzhiyun 	dir->npe_mode = 0;
557*4882a593Smuzhiyun }
558*4882a593Smuzhiyun 
init_sa_dir(struct ix_sa_dir * dir)559*4882a593Smuzhiyun static int init_sa_dir(struct ix_sa_dir *dir)
560*4882a593Smuzhiyun {
561*4882a593Smuzhiyun 	dir->npe_ctx = dma_pool_alloc(ctx_pool, GFP_KERNEL, &dir->npe_ctx_phys);
562*4882a593Smuzhiyun 	if (!dir->npe_ctx) {
563*4882a593Smuzhiyun 		return -ENOMEM;
564*4882a593Smuzhiyun 	}
565*4882a593Smuzhiyun 	reset_sa_dir(dir);
566*4882a593Smuzhiyun 	return 0;
567*4882a593Smuzhiyun }
568*4882a593Smuzhiyun 
free_sa_dir(struct ix_sa_dir * dir)569*4882a593Smuzhiyun static void free_sa_dir(struct ix_sa_dir *dir)
570*4882a593Smuzhiyun {
571*4882a593Smuzhiyun 	memset(dir->npe_ctx, 0, NPE_CTX_LEN);
572*4882a593Smuzhiyun 	dma_pool_free(ctx_pool, dir->npe_ctx, dir->npe_ctx_phys);
573*4882a593Smuzhiyun }
574*4882a593Smuzhiyun 
init_tfm(struct crypto_tfm * tfm)575*4882a593Smuzhiyun static int init_tfm(struct crypto_tfm *tfm)
576*4882a593Smuzhiyun {
577*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
578*4882a593Smuzhiyun 	int ret;
579*4882a593Smuzhiyun 
580*4882a593Smuzhiyun 	atomic_set(&ctx->configuring, 0);
581*4882a593Smuzhiyun 	ret = init_sa_dir(&ctx->encrypt);
582*4882a593Smuzhiyun 	if (ret)
583*4882a593Smuzhiyun 		return ret;
584*4882a593Smuzhiyun 	ret = init_sa_dir(&ctx->decrypt);
585*4882a593Smuzhiyun 	if (ret) {
586*4882a593Smuzhiyun 		free_sa_dir(&ctx->encrypt);
587*4882a593Smuzhiyun 	}
588*4882a593Smuzhiyun 	return ret;
589*4882a593Smuzhiyun }
590*4882a593Smuzhiyun 
init_tfm_ablk(struct crypto_skcipher * tfm)591*4882a593Smuzhiyun static int init_tfm_ablk(struct crypto_skcipher *tfm)
592*4882a593Smuzhiyun {
593*4882a593Smuzhiyun 	crypto_skcipher_set_reqsize(tfm, sizeof(struct ablk_ctx));
594*4882a593Smuzhiyun 	return init_tfm(crypto_skcipher_tfm(tfm));
595*4882a593Smuzhiyun }
596*4882a593Smuzhiyun 
init_tfm_aead(struct crypto_aead * tfm)597*4882a593Smuzhiyun static int init_tfm_aead(struct crypto_aead *tfm)
598*4882a593Smuzhiyun {
599*4882a593Smuzhiyun 	crypto_aead_set_reqsize(tfm, sizeof(struct aead_ctx));
600*4882a593Smuzhiyun 	return init_tfm(crypto_aead_tfm(tfm));
601*4882a593Smuzhiyun }
602*4882a593Smuzhiyun 
exit_tfm(struct crypto_tfm * tfm)603*4882a593Smuzhiyun static void exit_tfm(struct crypto_tfm *tfm)
604*4882a593Smuzhiyun {
605*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
606*4882a593Smuzhiyun 	free_sa_dir(&ctx->encrypt);
607*4882a593Smuzhiyun 	free_sa_dir(&ctx->decrypt);
608*4882a593Smuzhiyun }
609*4882a593Smuzhiyun 
exit_tfm_ablk(struct crypto_skcipher * tfm)610*4882a593Smuzhiyun static void exit_tfm_ablk(struct crypto_skcipher *tfm)
611*4882a593Smuzhiyun {
612*4882a593Smuzhiyun 	exit_tfm(crypto_skcipher_tfm(tfm));
613*4882a593Smuzhiyun }
614*4882a593Smuzhiyun 
exit_tfm_aead(struct crypto_aead * tfm)615*4882a593Smuzhiyun static void exit_tfm_aead(struct crypto_aead *tfm)
616*4882a593Smuzhiyun {
617*4882a593Smuzhiyun 	exit_tfm(crypto_aead_tfm(tfm));
618*4882a593Smuzhiyun }
619*4882a593Smuzhiyun 
register_chain_var(struct crypto_tfm * tfm,u8 xpad,u32 target,int init_len,u32 ctx_addr,const u8 * key,int key_len)620*4882a593Smuzhiyun static int register_chain_var(struct crypto_tfm *tfm, u8 xpad, u32 target,
621*4882a593Smuzhiyun 		int init_len, u32 ctx_addr, const u8 *key, int key_len)
622*4882a593Smuzhiyun {
623*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
624*4882a593Smuzhiyun 	struct crypt_ctl *crypt;
625*4882a593Smuzhiyun 	struct buffer_desc *buf;
626*4882a593Smuzhiyun 	int i;
627*4882a593Smuzhiyun 	u8 *pad;
628*4882a593Smuzhiyun 	dma_addr_t pad_phys, buf_phys;
629*4882a593Smuzhiyun 
630*4882a593Smuzhiyun 	BUILD_BUG_ON(NPE_CTX_LEN < HMAC_PAD_BLOCKLEN);
631*4882a593Smuzhiyun 	pad = dma_pool_alloc(ctx_pool, GFP_KERNEL, &pad_phys);
632*4882a593Smuzhiyun 	if (!pad)
633*4882a593Smuzhiyun 		return -ENOMEM;
634*4882a593Smuzhiyun 	buf = dma_pool_alloc(buffer_pool, GFP_KERNEL, &buf_phys);
635*4882a593Smuzhiyun 	if (!buf) {
636*4882a593Smuzhiyun 		dma_pool_free(ctx_pool, pad, pad_phys);
637*4882a593Smuzhiyun 		return -ENOMEM;
638*4882a593Smuzhiyun 	}
639*4882a593Smuzhiyun 	crypt = get_crypt_desc_emerg();
640*4882a593Smuzhiyun 	if (!crypt) {
641*4882a593Smuzhiyun 		dma_pool_free(ctx_pool, pad, pad_phys);
642*4882a593Smuzhiyun 		dma_pool_free(buffer_pool, buf, buf_phys);
643*4882a593Smuzhiyun 		return -EAGAIN;
644*4882a593Smuzhiyun 	}
645*4882a593Smuzhiyun 
646*4882a593Smuzhiyun 	memcpy(pad, key, key_len);
647*4882a593Smuzhiyun 	memset(pad + key_len, 0, HMAC_PAD_BLOCKLEN - key_len);
648*4882a593Smuzhiyun 	for (i = 0; i < HMAC_PAD_BLOCKLEN; i++) {
649*4882a593Smuzhiyun 		pad[i] ^= xpad;
650*4882a593Smuzhiyun 	}
651*4882a593Smuzhiyun 
652*4882a593Smuzhiyun 	crypt->data.tfm = tfm;
653*4882a593Smuzhiyun 	crypt->regist_ptr = pad;
654*4882a593Smuzhiyun 	crypt->regist_buf = buf;
655*4882a593Smuzhiyun 
656*4882a593Smuzhiyun 	crypt->auth_offs = 0;
657*4882a593Smuzhiyun 	crypt->auth_len = HMAC_PAD_BLOCKLEN;
658*4882a593Smuzhiyun 	crypt->crypto_ctx = ctx_addr;
659*4882a593Smuzhiyun 	crypt->src_buf = buf_phys;
660*4882a593Smuzhiyun 	crypt->icv_rev_aes = target;
661*4882a593Smuzhiyun 	crypt->mode = NPE_OP_HASH_GEN_ICV;
662*4882a593Smuzhiyun 	crypt->init_len = init_len;
663*4882a593Smuzhiyun 	crypt->ctl_flags |= CTL_FLAG_GEN_ICV;
664*4882a593Smuzhiyun 
665*4882a593Smuzhiyun 	buf->next = 0;
666*4882a593Smuzhiyun 	buf->buf_len = HMAC_PAD_BLOCKLEN;
667*4882a593Smuzhiyun 	buf->pkt_len = 0;
668*4882a593Smuzhiyun 	buf->phys_addr = pad_phys;
669*4882a593Smuzhiyun 
670*4882a593Smuzhiyun 	atomic_inc(&ctx->configuring);
671*4882a593Smuzhiyun 	qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
672*4882a593Smuzhiyun 	BUG_ON(qmgr_stat_overflow(SEND_QID));
673*4882a593Smuzhiyun 	return 0;
674*4882a593Smuzhiyun }
675*4882a593Smuzhiyun 
setup_auth(struct crypto_tfm * tfm,int encrypt,unsigned authsize,const u8 * key,int key_len,unsigned digest_len)676*4882a593Smuzhiyun static int setup_auth(struct crypto_tfm *tfm, int encrypt, unsigned authsize,
677*4882a593Smuzhiyun 		const u8 *key, int key_len, unsigned digest_len)
678*4882a593Smuzhiyun {
679*4882a593Smuzhiyun 	u32 itarget, otarget, npe_ctx_addr;
680*4882a593Smuzhiyun 	unsigned char *cinfo;
681*4882a593Smuzhiyun 	int init_len, ret = 0;
682*4882a593Smuzhiyun 	u32 cfgword;
683*4882a593Smuzhiyun 	struct ix_sa_dir *dir;
684*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
685*4882a593Smuzhiyun 	const struct ix_hash_algo *algo;
686*4882a593Smuzhiyun 
687*4882a593Smuzhiyun 	dir = encrypt ? &ctx->encrypt : &ctx->decrypt;
688*4882a593Smuzhiyun 	cinfo = dir->npe_ctx + dir->npe_ctx_idx;
689*4882a593Smuzhiyun 	algo = ix_hash(tfm);
690*4882a593Smuzhiyun 
691*4882a593Smuzhiyun 	/* write cfg word to cryptinfo */
692*4882a593Smuzhiyun 	cfgword = algo->cfgword | ( authsize << 6); /* (authsize/4) << 8 */
693*4882a593Smuzhiyun #ifndef __ARMEB__
694*4882a593Smuzhiyun 	cfgword ^= 0xAA000000; /* change the "byte swap" flags */
695*4882a593Smuzhiyun #endif
696*4882a593Smuzhiyun 	*(u32*)cinfo = cpu_to_be32(cfgword);
697*4882a593Smuzhiyun 	cinfo += sizeof(cfgword);
698*4882a593Smuzhiyun 
699*4882a593Smuzhiyun 	/* write ICV to cryptinfo */
700*4882a593Smuzhiyun 	memcpy(cinfo, algo->icv, digest_len);
701*4882a593Smuzhiyun 	cinfo += digest_len;
702*4882a593Smuzhiyun 
703*4882a593Smuzhiyun 	itarget = dir->npe_ctx_phys + dir->npe_ctx_idx
704*4882a593Smuzhiyun 				+ sizeof(algo->cfgword);
705*4882a593Smuzhiyun 	otarget = itarget + digest_len;
706*4882a593Smuzhiyun 	init_len = cinfo - (dir->npe_ctx + dir->npe_ctx_idx);
707*4882a593Smuzhiyun 	npe_ctx_addr = dir->npe_ctx_phys + dir->npe_ctx_idx;
708*4882a593Smuzhiyun 
709*4882a593Smuzhiyun 	dir->npe_ctx_idx += init_len;
710*4882a593Smuzhiyun 	dir->npe_mode |= NPE_OP_HASH_ENABLE;
711*4882a593Smuzhiyun 
712*4882a593Smuzhiyun 	if (!encrypt)
713*4882a593Smuzhiyun 		dir->npe_mode |= NPE_OP_HASH_VERIFY;
714*4882a593Smuzhiyun 
715*4882a593Smuzhiyun 	ret = register_chain_var(tfm, HMAC_OPAD_VALUE, otarget,
716*4882a593Smuzhiyun 			init_len, npe_ctx_addr, key, key_len);
717*4882a593Smuzhiyun 	if (ret)
718*4882a593Smuzhiyun 		return ret;
719*4882a593Smuzhiyun 	return register_chain_var(tfm, HMAC_IPAD_VALUE, itarget,
720*4882a593Smuzhiyun 			init_len, npe_ctx_addr, key, key_len);
721*4882a593Smuzhiyun }
722*4882a593Smuzhiyun 
gen_rev_aes_key(struct crypto_tfm * tfm)723*4882a593Smuzhiyun static int gen_rev_aes_key(struct crypto_tfm *tfm)
724*4882a593Smuzhiyun {
725*4882a593Smuzhiyun 	struct crypt_ctl *crypt;
726*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
727*4882a593Smuzhiyun 	struct ix_sa_dir *dir = &ctx->decrypt;
728*4882a593Smuzhiyun 
729*4882a593Smuzhiyun 	crypt = get_crypt_desc_emerg();
730*4882a593Smuzhiyun 	if (!crypt) {
731*4882a593Smuzhiyun 		return -EAGAIN;
732*4882a593Smuzhiyun 	}
733*4882a593Smuzhiyun 	*(u32*)dir->npe_ctx |= cpu_to_be32(CIPH_ENCR);
734*4882a593Smuzhiyun 
735*4882a593Smuzhiyun 	crypt->data.tfm = tfm;
736*4882a593Smuzhiyun 	crypt->crypt_offs = 0;
737*4882a593Smuzhiyun 	crypt->crypt_len = AES_BLOCK128;
738*4882a593Smuzhiyun 	crypt->src_buf = 0;
739*4882a593Smuzhiyun 	crypt->crypto_ctx = dir->npe_ctx_phys;
740*4882a593Smuzhiyun 	crypt->icv_rev_aes = dir->npe_ctx_phys + sizeof(u32);
741*4882a593Smuzhiyun 	crypt->mode = NPE_OP_ENC_GEN_KEY;
742*4882a593Smuzhiyun 	crypt->init_len = dir->npe_ctx_idx;
743*4882a593Smuzhiyun 	crypt->ctl_flags |= CTL_FLAG_GEN_REVAES;
744*4882a593Smuzhiyun 
745*4882a593Smuzhiyun 	atomic_inc(&ctx->configuring);
746*4882a593Smuzhiyun 	qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
747*4882a593Smuzhiyun 	BUG_ON(qmgr_stat_overflow(SEND_QID));
748*4882a593Smuzhiyun 	return 0;
749*4882a593Smuzhiyun }
750*4882a593Smuzhiyun 
setup_cipher(struct crypto_tfm * tfm,int encrypt,const u8 * key,int key_len)751*4882a593Smuzhiyun static int setup_cipher(struct crypto_tfm *tfm, int encrypt,
752*4882a593Smuzhiyun 		const u8 *key, int key_len)
753*4882a593Smuzhiyun {
754*4882a593Smuzhiyun 	u8 *cinfo;
755*4882a593Smuzhiyun 	u32 cipher_cfg;
756*4882a593Smuzhiyun 	u32 keylen_cfg = 0;
757*4882a593Smuzhiyun 	struct ix_sa_dir *dir;
758*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_tfm_ctx(tfm);
759*4882a593Smuzhiyun 	int err;
760*4882a593Smuzhiyun 
761*4882a593Smuzhiyun 	dir = encrypt ? &ctx->encrypt : &ctx->decrypt;
762*4882a593Smuzhiyun 	cinfo = dir->npe_ctx;
763*4882a593Smuzhiyun 
764*4882a593Smuzhiyun 	if (encrypt) {
765*4882a593Smuzhiyun 		cipher_cfg = cipher_cfg_enc(tfm);
766*4882a593Smuzhiyun 		dir->npe_mode |= NPE_OP_CRYPT_ENCRYPT;
767*4882a593Smuzhiyun 	} else {
768*4882a593Smuzhiyun 		cipher_cfg = cipher_cfg_dec(tfm);
769*4882a593Smuzhiyun 	}
770*4882a593Smuzhiyun 	if (cipher_cfg & MOD_AES) {
771*4882a593Smuzhiyun 		switch (key_len) {
772*4882a593Smuzhiyun 		case 16: keylen_cfg = MOD_AES128; break;
773*4882a593Smuzhiyun 		case 24: keylen_cfg = MOD_AES192; break;
774*4882a593Smuzhiyun 		case 32: keylen_cfg = MOD_AES256; break;
775*4882a593Smuzhiyun 		default:
776*4882a593Smuzhiyun 			return -EINVAL;
777*4882a593Smuzhiyun 		}
778*4882a593Smuzhiyun 		cipher_cfg |= keylen_cfg;
779*4882a593Smuzhiyun 	} else {
780*4882a593Smuzhiyun 		err = crypto_des_verify_key(tfm, key);
781*4882a593Smuzhiyun 		if (err)
782*4882a593Smuzhiyun 			return err;
783*4882a593Smuzhiyun 	}
784*4882a593Smuzhiyun 	/* write cfg word to cryptinfo */
785*4882a593Smuzhiyun 	*(u32*)cinfo = cpu_to_be32(cipher_cfg);
786*4882a593Smuzhiyun 	cinfo += sizeof(cipher_cfg);
787*4882a593Smuzhiyun 
788*4882a593Smuzhiyun 	/* write cipher key to cryptinfo */
789*4882a593Smuzhiyun 	memcpy(cinfo, key, key_len);
790*4882a593Smuzhiyun 	/* NPE wants keylen set to DES3_EDE_KEY_SIZE even for single DES */
791*4882a593Smuzhiyun 	if (key_len < DES3_EDE_KEY_SIZE && !(cipher_cfg & MOD_AES)) {
792*4882a593Smuzhiyun 		memset(cinfo + key_len, 0, DES3_EDE_KEY_SIZE -key_len);
793*4882a593Smuzhiyun 		key_len = DES3_EDE_KEY_SIZE;
794*4882a593Smuzhiyun 	}
795*4882a593Smuzhiyun 	dir->npe_ctx_idx = sizeof(cipher_cfg) + key_len;
796*4882a593Smuzhiyun 	dir->npe_mode |= NPE_OP_CRYPT_ENABLE;
797*4882a593Smuzhiyun 	if ((cipher_cfg & MOD_AES) && !encrypt) {
798*4882a593Smuzhiyun 		return gen_rev_aes_key(tfm);
799*4882a593Smuzhiyun 	}
800*4882a593Smuzhiyun 	return 0;
801*4882a593Smuzhiyun }
802*4882a593Smuzhiyun 
chainup_buffers(struct device * dev,struct scatterlist * sg,unsigned nbytes,struct buffer_desc * buf,gfp_t flags,enum dma_data_direction dir)803*4882a593Smuzhiyun static struct buffer_desc *chainup_buffers(struct device *dev,
804*4882a593Smuzhiyun 		struct scatterlist *sg,	unsigned nbytes,
805*4882a593Smuzhiyun 		struct buffer_desc *buf, gfp_t flags,
806*4882a593Smuzhiyun 		enum dma_data_direction dir)
807*4882a593Smuzhiyun {
808*4882a593Smuzhiyun 	for (; nbytes > 0; sg = sg_next(sg)) {
809*4882a593Smuzhiyun 		unsigned len = min(nbytes, sg->length);
810*4882a593Smuzhiyun 		struct buffer_desc *next_buf;
811*4882a593Smuzhiyun 		dma_addr_t next_buf_phys;
812*4882a593Smuzhiyun 		void *ptr;
813*4882a593Smuzhiyun 
814*4882a593Smuzhiyun 		nbytes -= len;
815*4882a593Smuzhiyun 		ptr = sg_virt(sg);
816*4882a593Smuzhiyun 		next_buf = dma_pool_alloc(buffer_pool, flags, &next_buf_phys);
817*4882a593Smuzhiyun 		if (!next_buf) {
818*4882a593Smuzhiyun 			buf = NULL;
819*4882a593Smuzhiyun 			break;
820*4882a593Smuzhiyun 		}
821*4882a593Smuzhiyun 		sg_dma_address(sg) = dma_map_single(dev, ptr, len, dir);
822*4882a593Smuzhiyun 		buf->next = next_buf;
823*4882a593Smuzhiyun 		buf->phys_next = next_buf_phys;
824*4882a593Smuzhiyun 		buf = next_buf;
825*4882a593Smuzhiyun 
826*4882a593Smuzhiyun 		buf->phys_addr = sg_dma_address(sg);
827*4882a593Smuzhiyun 		buf->buf_len = len;
828*4882a593Smuzhiyun 		buf->dir = dir;
829*4882a593Smuzhiyun 	}
830*4882a593Smuzhiyun 	buf->next = NULL;
831*4882a593Smuzhiyun 	buf->phys_next = 0;
832*4882a593Smuzhiyun 	return buf;
833*4882a593Smuzhiyun }
834*4882a593Smuzhiyun 
ablk_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int key_len)835*4882a593Smuzhiyun static int ablk_setkey(struct crypto_skcipher *tfm, const u8 *key,
836*4882a593Smuzhiyun 			unsigned int key_len)
837*4882a593Smuzhiyun {
838*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm);
839*4882a593Smuzhiyun 	int ret;
840*4882a593Smuzhiyun 
841*4882a593Smuzhiyun 	init_completion(&ctx->completion);
842*4882a593Smuzhiyun 	atomic_inc(&ctx->configuring);
843*4882a593Smuzhiyun 
844*4882a593Smuzhiyun 	reset_sa_dir(&ctx->encrypt);
845*4882a593Smuzhiyun 	reset_sa_dir(&ctx->decrypt);
846*4882a593Smuzhiyun 
847*4882a593Smuzhiyun 	ctx->encrypt.npe_mode = NPE_OP_HMAC_DISABLE;
848*4882a593Smuzhiyun 	ctx->decrypt.npe_mode = NPE_OP_HMAC_DISABLE;
849*4882a593Smuzhiyun 
850*4882a593Smuzhiyun 	ret = setup_cipher(&tfm->base, 0, key, key_len);
851*4882a593Smuzhiyun 	if (ret)
852*4882a593Smuzhiyun 		goto out;
853*4882a593Smuzhiyun 	ret = setup_cipher(&tfm->base, 1, key, key_len);
854*4882a593Smuzhiyun out:
855*4882a593Smuzhiyun 	if (!atomic_dec_and_test(&ctx->configuring))
856*4882a593Smuzhiyun 		wait_for_completion(&ctx->completion);
857*4882a593Smuzhiyun 	return ret;
858*4882a593Smuzhiyun }
859*4882a593Smuzhiyun 
ablk_des3_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int key_len)860*4882a593Smuzhiyun static int ablk_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,
861*4882a593Smuzhiyun 			    unsigned int key_len)
862*4882a593Smuzhiyun {
863*4882a593Smuzhiyun 	return verify_skcipher_des3_key(tfm, key) ?:
864*4882a593Smuzhiyun 	       ablk_setkey(tfm, key, key_len);
865*4882a593Smuzhiyun }
866*4882a593Smuzhiyun 
ablk_rfc3686_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int key_len)867*4882a593Smuzhiyun static int ablk_rfc3686_setkey(struct crypto_skcipher *tfm, const u8 *key,
868*4882a593Smuzhiyun 		unsigned int key_len)
869*4882a593Smuzhiyun {
870*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm);
871*4882a593Smuzhiyun 
872*4882a593Smuzhiyun 	/* the nonce is stored in bytes at end of key */
873*4882a593Smuzhiyun 	if (key_len < CTR_RFC3686_NONCE_SIZE)
874*4882a593Smuzhiyun 		return -EINVAL;
875*4882a593Smuzhiyun 
876*4882a593Smuzhiyun 	memcpy(ctx->nonce, key + (key_len - CTR_RFC3686_NONCE_SIZE),
877*4882a593Smuzhiyun 			CTR_RFC3686_NONCE_SIZE);
878*4882a593Smuzhiyun 
879*4882a593Smuzhiyun 	key_len -= CTR_RFC3686_NONCE_SIZE;
880*4882a593Smuzhiyun 	return ablk_setkey(tfm, key, key_len);
881*4882a593Smuzhiyun }
882*4882a593Smuzhiyun 
ablk_perform(struct skcipher_request * req,int encrypt)883*4882a593Smuzhiyun static int ablk_perform(struct skcipher_request *req, int encrypt)
884*4882a593Smuzhiyun {
885*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
886*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm);
887*4882a593Smuzhiyun 	unsigned ivsize = crypto_skcipher_ivsize(tfm);
888*4882a593Smuzhiyun 	struct ix_sa_dir *dir;
889*4882a593Smuzhiyun 	struct crypt_ctl *crypt;
890*4882a593Smuzhiyun 	unsigned int nbytes = req->cryptlen;
891*4882a593Smuzhiyun 	enum dma_data_direction src_direction = DMA_BIDIRECTIONAL;
892*4882a593Smuzhiyun 	struct ablk_ctx *req_ctx = skcipher_request_ctx(req);
893*4882a593Smuzhiyun 	struct buffer_desc src_hook;
894*4882a593Smuzhiyun 	struct device *dev = &pdev->dev;
895*4882a593Smuzhiyun 	unsigned int offset;
896*4882a593Smuzhiyun 	gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
897*4882a593Smuzhiyun 				GFP_KERNEL : GFP_ATOMIC;
898*4882a593Smuzhiyun 
899*4882a593Smuzhiyun 	if (qmgr_stat_full(SEND_QID))
900*4882a593Smuzhiyun 		return -EAGAIN;
901*4882a593Smuzhiyun 	if (atomic_read(&ctx->configuring))
902*4882a593Smuzhiyun 		return -EAGAIN;
903*4882a593Smuzhiyun 
904*4882a593Smuzhiyun 	dir = encrypt ? &ctx->encrypt : &ctx->decrypt;
905*4882a593Smuzhiyun 	req_ctx->encrypt = encrypt;
906*4882a593Smuzhiyun 
907*4882a593Smuzhiyun 	crypt = get_crypt_desc();
908*4882a593Smuzhiyun 	if (!crypt)
909*4882a593Smuzhiyun 		return -ENOMEM;
910*4882a593Smuzhiyun 
911*4882a593Smuzhiyun 	crypt->data.ablk_req = req;
912*4882a593Smuzhiyun 	crypt->crypto_ctx = dir->npe_ctx_phys;
913*4882a593Smuzhiyun 	crypt->mode = dir->npe_mode;
914*4882a593Smuzhiyun 	crypt->init_len = dir->npe_ctx_idx;
915*4882a593Smuzhiyun 
916*4882a593Smuzhiyun 	crypt->crypt_offs = 0;
917*4882a593Smuzhiyun 	crypt->crypt_len = nbytes;
918*4882a593Smuzhiyun 
919*4882a593Smuzhiyun 	BUG_ON(ivsize && !req->iv);
920*4882a593Smuzhiyun 	memcpy(crypt->iv, req->iv, ivsize);
921*4882a593Smuzhiyun 	if (ivsize > 0 && !encrypt) {
922*4882a593Smuzhiyun 		offset = req->cryptlen - ivsize;
923*4882a593Smuzhiyun 		scatterwalk_map_and_copy(req_ctx->iv, req->src, offset, ivsize, 0);
924*4882a593Smuzhiyun 	}
925*4882a593Smuzhiyun 	if (req->src != req->dst) {
926*4882a593Smuzhiyun 		struct buffer_desc dst_hook;
927*4882a593Smuzhiyun 		crypt->mode |= NPE_OP_NOT_IN_PLACE;
928*4882a593Smuzhiyun 		/* This was never tested by Intel
929*4882a593Smuzhiyun 		 * for more than one dst buffer, I think. */
930*4882a593Smuzhiyun 		req_ctx->dst = NULL;
931*4882a593Smuzhiyun 		if (!chainup_buffers(dev, req->dst, nbytes, &dst_hook,
932*4882a593Smuzhiyun 					flags, DMA_FROM_DEVICE))
933*4882a593Smuzhiyun 			goto free_buf_dest;
934*4882a593Smuzhiyun 		src_direction = DMA_TO_DEVICE;
935*4882a593Smuzhiyun 		req_ctx->dst = dst_hook.next;
936*4882a593Smuzhiyun 		crypt->dst_buf = dst_hook.phys_next;
937*4882a593Smuzhiyun 	} else {
938*4882a593Smuzhiyun 		req_ctx->dst = NULL;
939*4882a593Smuzhiyun 	}
940*4882a593Smuzhiyun 	req_ctx->src = NULL;
941*4882a593Smuzhiyun 	if (!chainup_buffers(dev, req->src, nbytes, &src_hook,
942*4882a593Smuzhiyun 				flags, src_direction))
943*4882a593Smuzhiyun 		goto free_buf_src;
944*4882a593Smuzhiyun 
945*4882a593Smuzhiyun 	req_ctx->src = src_hook.next;
946*4882a593Smuzhiyun 	crypt->src_buf = src_hook.phys_next;
947*4882a593Smuzhiyun 	crypt->ctl_flags |= CTL_FLAG_PERFORM_ABLK;
948*4882a593Smuzhiyun 	qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
949*4882a593Smuzhiyun 	BUG_ON(qmgr_stat_overflow(SEND_QID));
950*4882a593Smuzhiyun 	return -EINPROGRESS;
951*4882a593Smuzhiyun 
952*4882a593Smuzhiyun free_buf_src:
953*4882a593Smuzhiyun 	free_buf_chain(dev, req_ctx->src, crypt->src_buf);
954*4882a593Smuzhiyun free_buf_dest:
955*4882a593Smuzhiyun 	if (req->src != req->dst) {
956*4882a593Smuzhiyun 		free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
957*4882a593Smuzhiyun 	}
958*4882a593Smuzhiyun 	crypt->ctl_flags = CTL_FLAG_UNUSED;
959*4882a593Smuzhiyun 	return -ENOMEM;
960*4882a593Smuzhiyun }
961*4882a593Smuzhiyun 
ablk_encrypt(struct skcipher_request * req)962*4882a593Smuzhiyun static int ablk_encrypt(struct skcipher_request *req)
963*4882a593Smuzhiyun {
964*4882a593Smuzhiyun 	return ablk_perform(req, 1);
965*4882a593Smuzhiyun }
966*4882a593Smuzhiyun 
ablk_decrypt(struct skcipher_request * req)967*4882a593Smuzhiyun static int ablk_decrypt(struct skcipher_request *req)
968*4882a593Smuzhiyun {
969*4882a593Smuzhiyun 	return ablk_perform(req, 0);
970*4882a593Smuzhiyun }
971*4882a593Smuzhiyun 
ablk_rfc3686_crypt(struct skcipher_request * req)972*4882a593Smuzhiyun static int ablk_rfc3686_crypt(struct skcipher_request *req)
973*4882a593Smuzhiyun {
974*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
975*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_skcipher_ctx(tfm);
976*4882a593Smuzhiyun 	u8 iv[CTR_RFC3686_BLOCK_SIZE];
977*4882a593Smuzhiyun 	u8 *info = req->iv;
978*4882a593Smuzhiyun 	int ret;
979*4882a593Smuzhiyun 
980*4882a593Smuzhiyun 	/* set up counter block */
981*4882a593Smuzhiyun         memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
982*4882a593Smuzhiyun 	memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);
983*4882a593Smuzhiyun 
984*4882a593Smuzhiyun 	/* initialize counter portion of counter block */
985*4882a593Smuzhiyun 	*(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
986*4882a593Smuzhiyun 		cpu_to_be32(1);
987*4882a593Smuzhiyun 
988*4882a593Smuzhiyun 	req->iv = iv;
989*4882a593Smuzhiyun 	ret = ablk_perform(req, 1);
990*4882a593Smuzhiyun 	req->iv = info;
991*4882a593Smuzhiyun 	return ret;
992*4882a593Smuzhiyun }
993*4882a593Smuzhiyun 
aead_perform(struct aead_request * req,int encrypt,int cryptoffset,int eff_cryptlen,u8 * iv)994*4882a593Smuzhiyun static int aead_perform(struct aead_request *req, int encrypt,
995*4882a593Smuzhiyun 		int cryptoffset, int eff_cryptlen, u8 *iv)
996*4882a593Smuzhiyun {
997*4882a593Smuzhiyun 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
998*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
999*4882a593Smuzhiyun 	unsigned ivsize = crypto_aead_ivsize(tfm);
1000*4882a593Smuzhiyun 	unsigned authsize = crypto_aead_authsize(tfm);
1001*4882a593Smuzhiyun 	struct ix_sa_dir *dir;
1002*4882a593Smuzhiyun 	struct crypt_ctl *crypt;
1003*4882a593Smuzhiyun 	unsigned int cryptlen;
1004*4882a593Smuzhiyun 	struct buffer_desc *buf, src_hook;
1005*4882a593Smuzhiyun 	struct aead_ctx *req_ctx = aead_request_ctx(req);
1006*4882a593Smuzhiyun 	struct device *dev = &pdev->dev;
1007*4882a593Smuzhiyun 	gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
1008*4882a593Smuzhiyun 				GFP_KERNEL : GFP_ATOMIC;
1009*4882a593Smuzhiyun 	enum dma_data_direction src_direction = DMA_BIDIRECTIONAL;
1010*4882a593Smuzhiyun 	unsigned int lastlen;
1011*4882a593Smuzhiyun 
1012*4882a593Smuzhiyun 	if (qmgr_stat_full(SEND_QID))
1013*4882a593Smuzhiyun 		return -EAGAIN;
1014*4882a593Smuzhiyun 	if (atomic_read(&ctx->configuring))
1015*4882a593Smuzhiyun 		return -EAGAIN;
1016*4882a593Smuzhiyun 
1017*4882a593Smuzhiyun 	if (encrypt) {
1018*4882a593Smuzhiyun 		dir = &ctx->encrypt;
1019*4882a593Smuzhiyun 		cryptlen = req->cryptlen;
1020*4882a593Smuzhiyun 	} else {
1021*4882a593Smuzhiyun 		dir = &ctx->decrypt;
1022*4882a593Smuzhiyun 		/* req->cryptlen includes the authsize when decrypting */
1023*4882a593Smuzhiyun 		cryptlen = req->cryptlen -authsize;
1024*4882a593Smuzhiyun 		eff_cryptlen -= authsize;
1025*4882a593Smuzhiyun 	}
1026*4882a593Smuzhiyun 	crypt = get_crypt_desc();
1027*4882a593Smuzhiyun 	if (!crypt)
1028*4882a593Smuzhiyun 		return -ENOMEM;
1029*4882a593Smuzhiyun 
1030*4882a593Smuzhiyun 	crypt->data.aead_req = req;
1031*4882a593Smuzhiyun 	crypt->crypto_ctx = dir->npe_ctx_phys;
1032*4882a593Smuzhiyun 	crypt->mode = dir->npe_mode;
1033*4882a593Smuzhiyun 	crypt->init_len = dir->npe_ctx_idx;
1034*4882a593Smuzhiyun 
1035*4882a593Smuzhiyun 	crypt->crypt_offs = cryptoffset;
1036*4882a593Smuzhiyun 	crypt->crypt_len = eff_cryptlen;
1037*4882a593Smuzhiyun 
1038*4882a593Smuzhiyun 	crypt->auth_offs = 0;
1039*4882a593Smuzhiyun 	crypt->auth_len = req->assoclen + cryptlen;
1040*4882a593Smuzhiyun 	BUG_ON(ivsize && !req->iv);
1041*4882a593Smuzhiyun 	memcpy(crypt->iv, req->iv, ivsize);
1042*4882a593Smuzhiyun 
1043*4882a593Smuzhiyun 	buf = chainup_buffers(dev, req->src, crypt->auth_len,
1044*4882a593Smuzhiyun 			      &src_hook, flags, src_direction);
1045*4882a593Smuzhiyun 	req_ctx->src = src_hook.next;
1046*4882a593Smuzhiyun 	crypt->src_buf = src_hook.phys_next;
1047*4882a593Smuzhiyun 	if (!buf)
1048*4882a593Smuzhiyun 		goto free_buf_src;
1049*4882a593Smuzhiyun 
1050*4882a593Smuzhiyun 	lastlen = buf->buf_len;
1051*4882a593Smuzhiyun 	if (lastlen >= authsize)
1052*4882a593Smuzhiyun 		crypt->icv_rev_aes = buf->phys_addr +
1053*4882a593Smuzhiyun 				     buf->buf_len - authsize;
1054*4882a593Smuzhiyun 
1055*4882a593Smuzhiyun 	req_ctx->dst = NULL;
1056*4882a593Smuzhiyun 
1057*4882a593Smuzhiyun 	if (req->src != req->dst) {
1058*4882a593Smuzhiyun 		struct buffer_desc dst_hook;
1059*4882a593Smuzhiyun 
1060*4882a593Smuzhiyun 		crypt->mode |= NPE_OP_NOT_IN_PLACE;
1061*4882a593Smuzhiyun 		src_direction = DMA_TO_DEVICE;
1062*4882a593Smuzhiyun 
1063*4882a593Smuzhiyun 		buf = chainup_buffers(dev, req->dst, crypt->auth_len,
1064*4882a593Smuzhiyun 				      &dst_hook, flags, DMA_FROM_DEVICE);
1065*4882a593Smuzhiyun 		req_ctx->dst = dst_hook.next;
1066*4882a593Smuzhiyun 		crypt->dst_buf = dst_hook.phys_next;
1067*4882a593Smuzhiyun 
1068*4882a593Smuzhiyun 		if (!buf)
1069*4882a593Smuzhiyun 			goto free_buf_dst;
1070*4882a593Smuzhiyun 
1071*4882a593Smuzhiyun 		if (encrypt) {
1072*4882a593Smuzhiyun 			lastlen = buf->buf_len;
1073*4882a593Smuzhiyun 			if (lastlen >= authsize)
1074*4882a593Smuzhiyun 				crypt->icv_rev_aes = buf->phys_addr +
1075*4882a593Smuzhiyun 						     buf->buf_len - authsize;
1076*4882a593Smuzhiyun 		}
1077*4882a593Smuzhiyun 	}
1078*4882a593Smuzhiyun 
1079*4882a593Smuzhiyun 	if (unlikely(lastlen < authsize)) {
1080*4882a593Smuzhiyun 		/* The 12 hmac bytes are scattered,
1081*4882a593Smuzhiyun 		 * we need to copy them into a safe buffer */
1082*4882a593Smuzhiyun 		req_ctx->hmac_virt = dma_pool_alloc(buffer_pool, flags,
1083*4882a593Smuzhiyun 				&crypt->icv_rev_aes);
1084*4882a593Smuzhiyun 		if (unlikely(!req_ctx->hmac_virt))
1085*4882a593Smuzhiyun 			goto free_buf_dst;
1086*4882a593Smuzhiyun 		if (!encrypt) {
1087*4882a593Smuzhiyun 			scatterwalk_map_and_copy(req_ctx->hmac_virt,
1088*4882a593Smuzhiyun 				req->src, cryptlen, authsize, 0);
1089*4882a593Smuzhiyun 		}
1090*4882a593Smuzhiyun 		req_ctx->encrypt = encrypt;
1091*4882a593Smuzhiyun 	} else {
1092*4882a593Smuzhiyun 		req_ctx->hmac_virt = NULL;
1093*4882a593Smuzhiyun 	}
1094*4882a593Smuzhiyun 
1095*4882a593Smuzhiyun 	crypt->ctl_flags |= CTL_FLAG_PERFORM_AEAD;
1096*4882a593Smuzhiyun 	qmgr_put_entry(SEND_QID, crypt_virt2phys(crypt));
1097*4882a593Smuzhiyun 	BUG_ON(qmgr_stat_overflow(SEND_QID));
1098*4882a593Smuzhiyun 	return -EINPROGRESS;
1099*4882a593Smuzhiyun 
1100*4882a593Smuzhiyun free_buf_dst:
1101*4882a593Smuzhiyun 	free_buf_chain(dev, req_ctx->dst, crypt->dst_buf);
1102*4882a593Smuzhiyun free_buf_src:
1103*4882a593Smuzhiyun 	free_buf_chain(dev, req_ctx->src, crypt->src_buf);
1104*4882a593Smuzhiyun 	crypt->ctl_flags = CTL_FLAG_UNUSED;
1105*4882a593Smuzhiyun 	return -ENOMEM;
1106*4882a593Smuzhiyun }
1107*4882a593Smuzhiyun 
aead_setup(struct crypto_aead * tfm,unsigned int authsize)1108*4882a593Smuzhiyun static int aead_setup(struct crypto_aead *tfm, unsigned int authsize)
1109*4882a593Smuzhiyun {
1110*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
1111*4882a593Smuzhiyun 	unsigned digest_len = crypto_aead_maxauthsize(tfm);
1112*4882a593Smuzhiyun 	int ret;
1113*4882a593Smuzhiyun 
1114*4882a593Smuzhiyun 	if (!ctx->enckey_len && !ctx->authkey_len)
1115*4882a593Smuzhiyun 		return 0;
1116*4882a593Smuzhiyun 	init_completion(&ctx->completion);
1117*4882a593Smuzhiyun 	atomic_inc(&ctx->configuring);
1118*4882a593Smuzhiyun 
1119*4882a593Smuzhiyun 	reset_sa_dir(&ctx->encrypt);
1120*4882a593Smuzhiyun 	reset_sa_dir(&ctx->decrypt);
1121*4882a593Smuzhiyun 
1122*4882a593Smuzhiyun 	ret = setup_cipher(&tfm->base, 0, ctx->enckey, ctx->enckey_len);
1123*4882a593Smuzhiyun 	if (ret)
1124*4882a593Smuzhiyun 		goto out;
1125*4882a593Smuzhiyun 	ret = setup_cipher(&tfm->base, 1, ctx->enckey, ctx->enckey_len);
1126*4882a593Smuzhiyun 	if (ret)
1127*4882a593Smuzhiyun 		goto out;
1128*4882a593Smuzhiyun 	ret = setup_auth(&tfm->base, 0, authsize, ctx->authkey,
1129*4882a593Smuzhiyun 			ctx->authkey_len, digest_len);
1130*4882a593Smuzhiyun 	if (ret)
1131*4882a593Smuzhiyun 		goto out;
1132*4882a593Smuzhiyun 	ret = setup_auth(&tfm->base, 1, authsize,  ctx->authkey,
1133*4882a593Smuzhiyun 			ctx->authkey_len, digest_len);
1134*4882a593Smuzhiyun out:
1135*4882a593Smuzhiyun 	if (!atomic_dec_and_test(&ctx->configuring))
1136*4882a593Smuzhiyun 		wait_for_completion(&ctx->completion);
1137*4882a593Smuzhiyun 	return ret;
1138*4882a593Smuzhiyun }
1139*4882a593Smuzhiyun 
aead_setauthsize(struct crypto_aead * tfm,unsigned int authsize)1140*4882a593Smuzhiyun static int aead_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
1141*4882a593Smuzhiyun {
1142*4882a593Smuzhiyun 	int max = crypto_aead_maxauthsize(tfm) >> 2;
1143*4882a593Smuzhiyun 
1144*4882a593Smuzhiyun 	if ((authsize>>2) < 1 || (authsize>>2) > max || (authsize & 3))
1145*4882a593Smuzhiyun 		return -EINVAL;
1146*4882a593Smuzhiyun 	return aead_setup(tfm, authsize);
1147*4882a593Smuzhiyun }
1148*4882a593Smuzhiyun 
aead_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)1149*4882a593Smuzhiyun static int aead_setkey(struct crypto_aead *tfm, const u8 *key,
1150*4882a593Smuzhiyun 			unsigned int keylen)
1151*4882a593Smuzhiyun {
1152*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
1153*4882a593Smuzhiyun 	struct crypto_authenc_keys keys;
1154*4882a593Smuzhiyun 
1155*4882a593Smuzhiyun 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1156*4882a593Smuzhiyun 		goto badkey;
1157*4882a593Smuzhiyun 
1158*4882a593Smuzhiyun 	if (keys.authkeylen > sizeof(ctx->authkey))
1159*4882a593Smuzhiyun 		goto badkey;
1160*4882a593Smuzhiyun 
1161*4882a593Smuzhiyun 	if (keys.enckeylen > sizeof(ctx->enckey))
1162*4882a593Smuzhiyun 		goto badkey;
1163*4882a593Smuzhiyun 
1164*4882a593Smuzhiyun 	memcpy(ctx->authkey, keys.authkey, keys.authkeylen);
1165*4882a593Smuzhiyun 	memcpy(ctx->enckey, keys.enckey, keys.enckeylen);
1166*4882a593Smuzhiyun 	ctx->authkey_len = keys.authkeylen;
1167*4882a593Smuzhiyun 	ctx->enckey_len = keys.enckeylen;
1168*4882a593Smuzhiyun 
1169*4882a593Smuzhiyun 	memzero_explicit(&keys, sizeof(keys));
1170*4882a593Smuzhiyun 	return aead_setup(tfm, crypto_aead_authsize(tfm));
1171*4882a593Smuzhiyun badkey:
1172*4882a593Smuzhiyun 	memzero_explicit(&keys, sizeof(keys));
1173*4882a593Smuzhiyun 	return -EINVAL;
1174*4882a593Smuzhiyun }
1175*4882a593Smuzhiyun 
des3_aead_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)1176*4882a593Smuzhiyun static int des3_aead_setkey(struct crypto_aead *tfm, const u8 *key,
1177*4882a593Smuzhiyun 			    unsigned int keylen)
1178*4882a593Smuzhiyun {
1179*4882a593Smuzhiyun 	struct ixp_ctx *ctx = crypto_aead_ctx(tfm);
1180*4882a593Smuzhiyun 	struct crypto_authenc_keys keys;
1181*4882a593Smuzhiyun 	int err;
1182*4882a593Smuzhiyun 
1183*4882a593Smuzhiyun 	err = crypto_authenc_extractkeys(&keys, key, keylen);
1184*4882a593Smuzhiyun 	if (unlikely(err))
1185*4882a593Smuzhiyun 		goto badkey;
1186*4882a593Smuzhiyun 
1187*4882a593Smuzhiyun 	err = -EINVAL;
1188*4882a593Smuzhiyun 	if (keys.authkeylen > sizeof(ctx->authkey))
1189*4882a593Smuzhiyun 		goto badkey;
1190*4882a593Smuzhiyun 
1191*4882a593Smuzhiyun 	err = verify_aead_des3_key(tfm, keys.enckey, keys.enckeylen);
1192*4882a593Smuzhiyun 	if (err)
1193*4882a593Smuzhiyun 		goto badkey;
1194*4882a593Smuzhiyun 
1195*4882a593Smuzhiyun 	memcpy(ctx->authkey, keys.authkey, keys.authkeylen);
1196*4882a593Smuzhiyun 	memcpy(ctx->enckey, keys.enckey, keys.enckeylen);
1197*4882a593Smuzhiyun 	ctx->authkey_len = keys.authkeylen;
1198*4882a593Smuzhiyun 	ctx->enckey_len = keys.enckeylen;
1199*4882a593Smuzhiyun 
1200*4882a593Smuzhiyun 	memzero_explicit(&keys, sizeof(keys));
1201*4882a593Smuzhiyun 	return aead_setup(tfm, crypto_aead_authsize(tfm));
1202*4882a593Smuzhiyun badkey:
1203*4882a593Smuzhiyun 	memzero_explicit(&keys, sizeof(keys));
1204*4882a593Smuzhiyun 	return err;
1205*4882a593Smuzhiyun }
1206*4882a593Smuzhiyun 
aead_encrypt(struct aead_request * req)1207*4882a593Smuzhiyun static int aead_encrypt(struct aead_request *req)
1208*4882a593Smuzhiyun {
1209*4882a593Smuzhiyun 	return aead_perform(req, 1, req->assoclen, req->cryptlen, req->iv);
1210*4882a593Smuzhiyun }
1211*4882a593Smuzhiyun 
aead_decrypt(struct aead_request * req)1212*4882a593Smuzhiyun static int aead_decrypt(struct aead_request *req)
1213*4882a593Smuzhiyun {
1214*4882a593Smuzhiyun 	return aead_perform(req, 0, req->assoclen, req->cryptlen, req->iv);
1215*4882a593Smuzhiyun }
1216*4882a593Smuzhiyun 
1217*4882a593Smuzhiyun static struct ixp_alg ixp4xx_algos[] = {
1218*4882a593Smuzhiyun {
1219*4882a593Smuzhiyun 	.crypto	= {
1220*4882a593Smuzhiyun 		.base.cra_name		= "cbc(des)",
1221*4882a593Smuzhiyun 		.base.cra_blocksize	= DES_BLOCK_SIZE,
1222*4882a593Smuzhiyun 
1223*4882a593Smuzhiyun 		.min_keysize		= DES_KEY_SIZE,
1224*4882a593Smuzhiyun 		.max_keysize		= DES_KEY_SIZE,
1225*4882a593Smuzhiyun 		.ivsize			= DES_BLOCK_SIZE,
1226*4882a593Smuzhiyun 	},
1227*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192,
1228*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192,
1229*4882a593Smuzhiyun 
1230*4882a593Smuzhiyun }, {
1231*4882a593Smuzhiyun 	.crypto	= {
1232*4882a593Smuzhiyun 		.base.cra_name		= "ecb(des)",
1233*4882a593Smuzhiyun 		.base.cra_blocksize	= DES_BLOCK_SIZE,
1234*4882a593Smuzhiyun 		.min_keysize		= DES_KEY_SIZE,
1235*4882a593Smuzhiyun 		.max_keysize		= DES_KEY_SIZE,
1236*4882a593Smuzhiyun 	},
1237*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_DES | MOD_ECB | KEYLEN_192,
1238*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_DES | MOD_ECB | KEYLEN_192,
1239*4882a593Smuzhiyun }, {
1240*4882a593Smuzhiyun 	.crypto	= {
1241*4882a593Smuzhiyun 		.base.cra_name		= "cbc(des3_ede)",
1242*4882a593Smuzhiyun 		.base.cra_blocksize	= DES3_EDE_BLOCK_SIZE,
1243*4882a593Smuzhiyun 
1244*4882a593Smuzhiyun 		.min_keysize		= DES3_EDE_KEY_SIZE,
1245*4882a593Smuzhiyun 		.max_keysize		= DES3_EDE_KEY_SIZE,
1246*4882a593Smuzhiyun 		.ivsize			= DES3_EDE_BLOCK_SIZE,
1247*4882a593Smuzhiyun 		.setkey			= ablk_des3_setkey,
1248*4882a593Smuzhiyun 	},
1249*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192,
1250*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192,
1251*4882a593Smuzhiyun }, {
1252*4882a593Smuzhiyun 	.crypto	= {
1253*4882a593Smuzhiyun 		.base.cra_name		= "ecb(des3_ede)",
1254*4882a593Smuzhiyun 		.base.cra_blocksize	= DES3_EDE_BLOCK_SIZE,
1255*4882a593Smuzhiyun 
1256*4882a593Smuzhiyun 		.min_keysize		= DES3_EDE_KEY_SIZE,
1257*4882a593Smuzhiyun 		.max_keysize		= DES3_EDE_KEY_SIZE,
1258*4882a593Smuzhiyun 		.setkey			= ablk_des3_setkey,
1259*4882a593Smuzhiyun 	},
1260*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_3DES | MOD_ECB | KEYLEN_192,
1261*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_3DES | MOD_ECB | KEYLEN_192,
1262*4882a593Smuzhiyun }, {
1263*4882a593Smuzhiyun 	.crypto	= {
1264*4882a593Smuzhiyun 		.base.cra_name		= "cbc(aes)",
1265*4882a593Smuzhiyun 		.base.cra_blocksize	= AES_BLOCK_SIZE,
1266*4882a593Smuzhiyun 
1267*4882a593Smuzhiyun 		.min_keysize		= AES_MIN_KEY_SIZE,
1268*4882a593Smuzhiyun 		.max_keysize		= AES_MAX_KEY_SIZE,
1269*4882a593Smuzhiyun 		.ivsize			= AES_BLOCK_SIZE,
1270*4882a593Smuzhiyun 	},
1271*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC,
1272*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC,
1273*4882a593Smuzhiyun }, {
1274*4882a593Smuzhiyun 	.crypto	= {
1275*4882a593Smuzhiyun 		.base.cra_name		= "ecb(aes)",
1276*4882a593Smuzhiyun 		.base.cra_blocksize	= AES_BLOCK_SIZE,
1277*4882a593Smuzhiyun 
1278*4882a593Smuzhiyun 		.min_keysize		= AES_MIN_KEY_SIZE,
1279*4882a593Smuzhiyun 		.max_keysize		= AES_MAX_KEY_SIZE,
1280*4882a593Smuzhiyun 	},
1281*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_AES | MOD_ECB,
1282*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_AES | MOD_ECB,
1283*4882a593Smuzhiyun }, {
1284*4882a593Smuzhiyun 	.crypto	= {
1285*4882a593Smuzhiyun 		.base.cra_name		= "ctr(aes)",
1286*4882a593Smuzhiyun 		.base.cra_blocksize	= 1,
1287*4882a593Smuzhiyun 
1288*4882a593Smuzhiyun 		.min_keysize		= AES_MIN_KEY_SIZE,
1289*4882a593Smuzhiyun 		.max_keysize		= AES_MAX_KEY_SIZE,
1290*4882a593Smuzhiyun 		.ivsize			= AES_BLOCK_SIZE,
1291*4882a593Smuzhiyun 	},
1292*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CTR,
1293*4882a593Smuzhiyun 	.cfg_dec = CIPH_ENCR | MOD_AES | MOD_CTR,
1294*4882a593Smuzhiyun }, {
1295*4882a593Smuzhiyun 	.crypto	= {
1296*4882a593Smuzhiyun 		.base.cra_name		= "rfc3686(ctr(aes))",
1297*4882a593Smuzhiyun 		.base.cra_blocksize	= 1,
1298*4882a593Smuzhiyun 
1299*4882a593Smuzhiyun 		.min_keysize		= AES_MIN_KEY_SIZE,
1300*4882a593Smuzhiyun 		.max_keysize		= AES_MAX_KEY_SIZE,
1301*4882a593Smuzhiyun 		.ivsize			= AES_BLOCK_SIZE,
1302*4882a593Smuzhiyun 		.setkey			= ablk_rfc3686_setkey,
1303*4882a593Smuzhiyun 		.encrypt		= ablk_rfc3686_crypt,
1304*4882a593Smuzhiyun 		.decrypt		= ablk_rfc3686_crypt,
1305*4882a593Smuzhiyun 	},
1306*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CTR,
1307*4882a593Smuzhiyun 	.cfg_dec = CIPH_ENCR | MOD_AES | MOD_CTR,
1308*4882a593Smuzhiyun } };
1309*4882a593Smuzhiyun 
1310*4882a593Smuzhiyun static struct ixp_aead_alg ixp4xx_aeads[] = {
1311*4882a593Smuzhiyun {
1312*4882a593Smuzhiyun 	.crypto	= {
1313*4882a593Smuzhiyun 		.base = {
1314*4882a593Smuzhiyun 			.cra_name	= "authenc(hmac(md5),cbc(des))",
1315*4882a593Smuzhiyun 			.cra_blocksize	= DES_BLOCK_SIZE,
1316*4882a593Smuzhiyun 		},
1317*4882a593Smuzhiyun 		.ivsize		= DES_BLOCK_SIZE,
1318*4882a593Smuzhiyun 		.maxauthsize	= MD5_DIGEST_SIZE,
1319*4882a593Smuzhiyun 	},
1320*4882a593Smuzhiyun 	.hash = &hash_alg_md5,
1321*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192,
1322*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192,
1323*4882a593Smuzhiyun }, {
1324*4882a593Smuzhiyun 	.crypto	= {
1325*4882a593Smuzhiyun 		.base = {
1326*4882a593Smuzhiyun 			.cra_name	= "authenc(hmac(md5),cbc(des3_ede))",
1327*4882a593Smuzhiyun 			.cra_blocksize	= DES3_EDE_BLOCK_SIZE,
1328*4882a593Smuzhiyun 		},
1329*4882a593Smuzhiyun 		.ivsize		= DES3_EDE_BLOCK_SIZE,
1330*4882a593Smuzhiyun 		.maxauthsize	= MD5_DIGEST_SIZE,
1331*4882a593Smuzhiyun 		.setkey		= des3_aead_setkey,
1332*4882a593Smuzhiyun 	},
1333*4882a593Smuzhiyun 	.hash = &hash_alg_md5,
1334*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192,
1335*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192,
1336*4882a593Smuzhiyun }, {
1337*4882a593Smuzhiyun 	.crypto	= {
1338*4882a593Smuzhiyun 		.base = {
1339*4882a593Smuzhiyun 			.cra_name	= "authenc(hmac(sha1),cbc(des))",
1340*4882a593Smuzhiyun 			.cra_blocksize	= DES_BLOCK_SIZE,
1341*4882a593Smuzhiyun 		},
1342*4882a593Smuzhiyun 			.ivsize		= DES_BLOCK_SIZE,
1343*4882a593Smuzhiyun 			.maxauthsize	= SHA1_DIGEST_SIZE,
1344*4882a593Smuzhiyun 	},
1345*4882a593Smuzhiyun 	.hash = &hash_alg_sha1,
1346*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_DES | MOD_CBC_ENC | KEYLEN_192,
1347*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_DES | MOD_CBC_DEC | KEYLEN_192,
1348*4882a593Smuzhiyun }, {
1349*4882a593Smuzhiyun 	.crypto	= {
1350*4882a593Smuzhiyun 		.base = {
1351*4882a593Smuzhiyun 			.cra_name	= "authenc(hmac(sha1),cbc(des3_ede))",
1352*4882a593Smuzhiyun 			.cra_blocksize	= DES3_EDE_BLOCK_SIZE,
1353*4882a593Smuzhiyun 		},
1354*4882a593Smuzhiyun 		.ivsize		= DES3_EDE_BLOCK_SIZE,
1355*4882a593Smuzhiyun 		.maxauthsize	= SHA1_DIGEST_SIZE,
1356*4882a593Smuzhiyun 		.setkey		= des3_aead_setkey,
1357*4882a593Smuzhiyun 	},
1358*4882a593Smuzhiyun 	.hash = &hash_alg_sha1,
1359*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_3DES | MOD_CBC_ENC | KEYLEN_192,
1360*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_3DES | MOD_CBC_DEC | KEYLEN_192,
1361*4882a593Smuzhiyun }, {
1362*4882a593Smuzhiyun 	.crypto	= {
1363*4882a593Smuzhiyun 		.base = {
1364*4882a593Smuzhiyun 			.cra_name	= "authenc(hmac(md5),cbc(aes))",
1365*4882a593Smuzhiyun 			.cra_blocksize	= AES_BLOCK_SIZE,
1366*4882a593Smuzhiyun 		},
1367*4882a593Smuzhiyun 		.ivsize		= AES_BLOCK_SIZE,
1368*4882a593Smuzhiyun 		.maxauthsize	= MD5_DIGEST_SIZE,
1369*4882a593Smuzhiyun 	},
1370*4882a593Smuzhiyun 	.hash = &hash_alg_md5,
1371*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC,
1372*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC,
1373*4882a593Smuzhiyun }, {
1374*4882a593Smuzhiyun 	.crypto	= {
1375*4882a593Smuzhiyun 		.base = {
1376*4882a593Smuzhiyun 			.cra_name	= "authenc(hmac(sha1),cbc(aes))",
1377*4882a593Smuzhiyun 			.cra_blocksize	= AES_BLOCK_SIZE,
1378*4882a593Smuzhiyun 		},
1379*4882a593Smuzhiyun 		.ivsize		= AES_BLOCK_SIZE,
1380*4882a593Smuzhiyun 		.maxauthsize	= SHA1_DIGEST_SIZE,
1381*4882a593Smuzhiyun 	},
1382*4882a593Smuzhiyun 	.hash = &hash_alg_sha1,
1383*4882a593Smuzhiyun 	.cfg_enc = CIPH_ENCR | MOD_AES | MOD_CBC_ENC,
1384*4882a593Smuzhiyun 	.cfg_dec = CIPH_DECR | MOD_AES | MOD_CBC_DEC,
1385*4882a593Smuzhiyun } };
1386*4882a593Smuzhiyun 
1387*4882a593Smuzhiyun #define IXP_POSTFIX "-ixp4xx"
1388*4882a593Smuzhiyun 
1389*4882a593Smuzhiyun static const struct platform_device_info ixp_dev_info __initdata = {
1390*4882a593Smuzhiyun 	.name		= DRIVER_NAME,
1391*4882a593Smuzhiyun 	.id		= 0,
1392*4882a593Smuzhiyun 	.dma_mask	= DMA_BIT_MASK(32),
1393*4882a593Smuzhiyun };
1394*4882a593Smuzhiyun 
ixp_module_init(void)1395*4882a593Smuzhiyun static int __init ixp_module_init(void)
1396*4882a593Smuzhiyun {
1397*4882a593Smuzhiyun 	int num = ARRAY_SIZE(ixp4xx_algos);
1398*4882a593Smuzhiyun 	int i, err;
1399*4882a593Smuzhiyun 
1400*4882a593Smuzhiyun 	pdev = platform_device_register_full(&ixp_dev_info);
1401*4882a593Smuzhiyun 	if (IS_ERR(pdev))
1402*4882a593Smuzhiyun 		return PTR_ERR(pdev);
1403*4882a593Smuzhiyun 
1404*4882a593Smuzhiyun 	spin_lock_init(&desc_lock);
1405*4882a593Smuzhiyun 	spin_lock_init(&emerg_lock);
1406*4882a593Smuzhiyun 
1407*4882a593Smuzhiyun 	err = init_ixp_crypto(&pdev->dev);
1408*4882a593Smuzhiyun 	if (err) {
1409*4882a593Smuzhiyun 		platform_device_unregister(pdev);
1410*4882a593Smuzhiyun 		return err;
1411*4882a593Smuzhiyun 	}
1412*4882a593Smuzhiyun 	for (i=0; i< num; i++) {
1413*4882a593Smuzhiyun 		struct skcipher_alg *cra = &ixp4xx_algos[i].crypto;
1414*4882a593Smuzhiyun 
1415*4882a593Smuzhiyun 		if (snprintf(cra->base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
1416*4882a593Smuzhiyun 			"%s"IXP_POSTFIX, cra->base.cra_name) >=
1417*4882a593Smuzhiyun 			CRYPTO_MAX_ALG_NAME)
1418*4882a593Smuzhiyun 		{
1419*4882a593Smuzhiyun 			continue;
1420*4882a593Smuzhiyun 		}
1421*4882a593Smuzhiyun 		if (!support_aes && (ixp4xx_algos[i].cfg_enc & MOD_AES)) {
1422*4882a593Smuzhiyun 			continue;
1423*4882a593Smuzhiyun 		}
1424*4882a593Smuzhiyun 
1425*4882a593Smuzhiyun 		/* block ciphers */
1426*4882a593Smuzhiyun 		cra->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1427*4882a593Smuzhiyun 				      CRYPTO_ALG_ASYNC |
1428*4882a593Smuzhiyun 				      CRYPTO_ALG_ALLOCATES_MEMORY;
1429*4882a593Smuzhiyun 		if (!cra->setkey)
1430*4882a593Smuzhiyun 			cra->setkey = ablk_setkey;
1431*4882a593Smuzhiyun 		if (!cra->encrypt)
1432*4882a593Smuzhiyun 			cra->encrypt = ablk_encrypt;
1433*4882a593Smuzhiyun 		if (!cra->decrypt)
1434*4882a593Smuzhiyun 			cra->decrypt = ablk_decrypt;
1435*4882a593Smuzhiyun 		cra->init = init_tfm_ablk;
1436*4882a593Smuzhiyun 		cra->exit = exit_tfm_ablk;
1437*4882a593Smuzhiyun 
1438*4882a593Smuzhiyun 		cra->base.cra_ctxsize = sizeof(struct ixp_ctx);
1439*4882a593Smuzhiyun 		cra->base.cra_module = THIS_MODULE;
1440*4882a593Smuzhiyun 		cra->base.cra_alignmask = 3;
1441*4882a593Smuzhiyun 		cra->base.cra_priority = 300;
1442*4882a593Smuzhiyun 		if (crypto_register_skcipher(cra))
1443*4882a593Smuzhiyun 			printk(KERN_ERR "Failed to register '%s'\n",
1444*4882a593Smuzhiyun 				cra->base.cra_name);
1445*4882a593Smuzhiyun 		else
1446*4882a593Smuzhiyun 			ixp4xx_algos[i].registered = 1;
1447*4882a593Smuzhiyun 	}
1448*4882a593Smuzhiyun 
1449*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(ixp4xx_aeads); i++) {
1450*4882a593Smuzhiyun 		struct aead_alg *cra = &ixp4xx_aeads[i].crypto;
1451*4882a593Smuzhiyun 
1452*4882a593Smuzhiyun 		if (snprintf(cra->base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
1453*4882a593Smuzhiyun 			     "%s"IXP_POSTFIX, cra->base.cra_name) >=
1454*4882a593Smuzhiyun 		    CRYPTO_MAX_ALG_NAME)
1455*4882a593Smuzhiyun 			continue;
1456*4882a593Smuzhiyun 		if (!support_aes && (ixp4xx_algos[i].cfg_enc & MOD_AES))
1457*4882a593Smuzhiyun 			continue;
1458*4882a593Smuzhiyun 
1459*4882a593Smuzhiyun 		/* authenc */
1460*4882a593Smuzhiyun 		cra->base.cra_flags = CRYPTO_ALG_KERN_DRIVER_ONLY |
1461*4882a593Smuzhiyun 				      CRYPTO_ALG_ASYNC |
1462*4882a593Smuzhiyun 				      CRYPTO_ALG_ALLOCATES_MEMORY;
1463*4882a593Smuzhiyun 		cra->setkey = cra->setkey ?: aead_setkey;
1464*4882a593Smuzhiyun 		cra->setauthsize = aead_setauthsize;
1465*4882a593Smuzhiyun 		cra->encrypt = aead_encrypt;
1466*4882a593Smuzhiyun 		cra->decrypt = aead_decrypt;
1467*4882a593Smuzhiyun 		cra->init = init_tfm_aead;
1468*4882a593Smuzhiyun 		cra->exit = exit_tfm_aead;
1469*4882a593Smuzhiyun 
1470*4882a593Smuzhiyun 		cra->base.cra_ctxsize = sizeof(struct ixp_ctx);
1471*4882a593Smuzhiyun 		cra->base.cra_module = THIS_MODULE;
1472*4882a593Smuzhiyun 		cra->base.cra_alignmask = 3;
1473*4882a593Smuzhiyun 		cra->base.cra_priority = 300;
1474*4882a593Smuzhiyun 
1475*4882a593Smuzhiyun 		if (crypto_register_aead(cra))
1476*4882a593Smuzhiyun 			printk(KERN_ERR "Failed to register '%s'\n",
1477*4882a593Smuzhiyun 				cra->base.cra_driver_name);
1478*4882a593Smuzhiyun 		else
1479*4882a593Smuzhiyun 			ixp4xx_aeads[i].registered = 1;
1480*4882a593Smuzhiyun 	}
1481*4882a593Smuzhiyun 	return 0;
1482*4882a593Smuzhiyun }
1483*4882a593Smuzhiyun 
ixp_module_exit(void)1484*4882a593Smuzhiyun static void __exit ixp_module_exit(void)
1485*4882a593Smuzhiyun {
1486*4882a593Smuzhiyun 	int num = ARRAY_SIZE(ixp4xx_algos);
1487*4882a593Smuzhiyun 	int i;
1488*4882a593Smuzhiyun 
1489*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(ixp4xx_aeads); i++) {
1490*4882a593Smuzhiyun 		if (ixp4xx_aeads[i].registered)
1491*4882a593Smuzhiyun 			crypto_unregister_aead(&ixp4xx_aeads[i].crypto);
1492*4882a593Smuzhiyun 	}
1493*4882a593Smuzhiyun 
1494*4882a593Smuzhiyun 	for (i=0; i< num; i++) {
1495*4882a593Smuzhiyun 		if (ixp4xx_algos[i].registered)
1496*4882a593Smuzhiyun 			crypto_unregister_skcipher(&ixp4xx_algos[i].crypto);
1497*4882a593Smuzhiyun 	}
1498*4882a593Smuzhiyun 	release_ixp_crypto(&pdev->dev);
1499*4882a593Smuzhiyun 	platform_device_unregister(pdev);
1500*4882a593Smuzhiyun }
1501*4882a593Smuzhiyun 
1502*4882a593Smuzhiyun module_init(ixp_module_init);
1503*4882a593Smuzhiyun module_exit(ixp_module_exit);
1504*4882a593Smuzhiyun 
1505*4882a593Smuzhiyun MODULE_LICENSE("GPL");
1506*4882a593Smuzhiyun MODULE_AUTHOR("Christian Hohnstaedt <chohnstaedt@innominate.com>");
1507*4882a593Smuzhiyun MODULE_DESCRIPTION("IXP4xx hardware crypto");
1508*4882a593Smuzhiyun 
1509