xref: /OK3568_Linux_fs/kernel/drivers/crypto/talitos.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-or-later
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * talitos - Freescale Integrated Security Engine (SEC) device driver
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Scatterlist Crypto API glue code copied from files with the following:
8*4882a593Smuzhiyun  * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
9*4882a593Smuzhiyun  *
10*4882a593Smuzhiyun  * Crypto algorithm registration code copied from hifn driver:
11*4882a593Smuzhiyun  * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
12*4882a593Smuzhiyun  * All rights reserved.
13*4882a593Smuzhiyun  */
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #include <linux/kernel.h>
16*4882a593Smuzhiyun #include <linux/module.h>
17*4882a593Smuzhiyun #include <linux/mod_devicetable.h>
18*4882a593Smuzhiyun #include <linux/device.h>
19*4882a593Smuzhiyun #include <linux/interrupt.h>
20*4882a593Smuzhiyun #include <linux/crypto.h>
21*4882a593Smuzhiyun #include <linux/hw_random.h>
22*4882a593Smuzhiyun #include <linux/of_address.h>
23*4882a593Smuzhiyun #include <linux/of_irq.h>
24*4882a593Smuzhiyun #include <linux/of_platform.h>
25*4882a593Smuzhiyun #include <linux/dma-mapping.h>
26*4882a593Smuzhiyun #include <linux/io.h>
27*4882a593Smuzhiyun #include <linux/spinlock.h>
28*4882a593Smuzhiyun #include <linux/rtnetlink.h>
29*4882a593Smuzhiyun #include <linux/slab.h>
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun #include <crypto/algapi.h>
32*4882a593Smuzhiyun #include <crypto/aes.h>
33*4882a593Smuzhiyun #include <crypto/internal/des.h>
34*4882a593Smuzhiyun #include <crypto/sha.h>
35*4882a593Smuzhiyun #include <crypto/md5.h>
36*4882a593Smuzhiyun #include <crypto/internal/aead.h>
37*4882a593Smuzhiyun #include <crypto/authenc.h>
38*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
39*4882a593Smuzhiyun #include <crypto/hash.h>
40*4882a593Smuzhiyun #include <crypto/internal/hash.h>
41*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun #include "talitos.h"
44*4882a593Smuzhiyun 
to_talitos_ptr(struct talitos_ptr * ptr,dma_addr_t dma_addr,unsigned int len,bool is_sec1)45*4882a593Smuzhiyun static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
46*4882a593Smuzhiyun 			   unsigned int len, bool is_sec1)
47*4882a593Smuzhiyun {
48*4882a593Smuzhiyun 	ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
49*4882a593Smuzhiyun 	if (is_sec1) {
50*4882a593Smuzhiyun 		ptr->len1 = cpu_to_be16(len);
51*4882a593Smuzhiyun 	} else {
52*4882a593Smuzhiyun 		ptr->len = cpu_to_be16(len);
53*4882a593Smuzhiyun 		ptr->eptr = upper_32_bits(dma_addr);
54*4882a593Smuzhiyun 	}
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun 
copy_talitos_ptr(struct talitos_ptr * dst_ptr,struct talitos_ptr * src_ptr,bool is_sec1)57*4882a593Smuzhiyun static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
58*4882a593Smuzhiyun 			     struct talitos_ptr *src_ptr, bool is_sec1)
59*4882a593Smuzhiyun {
60*4882a593Smuzhiyun 	dst_ptr->ptr = src_ptr->ptr;
61*4882a593Smuzhiyun 	if (is_sec1) {
62*4882a593Smuzhiyun 		dst_ptr->len1 = src_ptr->len1;
63*4882a593Smuzhiyun 	} else {
64*4882a593Smuzhiyun 		dst_ptr->len = src_ptr->len;
65*4882a593Smuzhiyun 		dst_ptr->eptr = src_ptr->eptr;
66*4882a593Smuzhiyun 	}
67*4882a593Smuzhiyun }
68*4882a593Smuzhiyun 
from_talitos_ptr_len(struct talitos_ptr * ptr,bool is_sec1)69*4882a593Smuzhiyun static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
70*4882a593Smuzhiyun 					   bool is_sec1)
71*4882a593Smuzhiyun {
72*4882a593Smuzhiyun 	if (is_sec1)
73*4882a593Smuzhiyun 		return be16_to_cpu(ptr->len1);
74*4882a593Smuzhiyun 	else
75*4882a593Smuzhiyun 		return be16_to_cpu(ptr->len);
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun 
to_talitos_ptr_ext_set(struct talitos_ptr * ptr,u8 val,bool is_sec1)78*4882a593Smuzhiyun static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
79*4882a593Smuzhiyun 				   bool is_sec1)
80*4882a593Smuzhiyun {
81*4882a593Smuzhiyun 	if (!is_sec1)
82*4882a593Smuzhiyun 		ptr->j_extent = val;
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun 
to_talitos_ptr_ext_or(struct talitos_ptr * ptr,u8 val,bool is_sec1)85*4882a593Smuzhiyun static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
86*4882a593Smuzhiyun {
87*4882a593Smuzhiyun 	if (!is_sec1)
88*4882a593Smuzhiyun 		ptr->j_extent |= val;
89*4882a593Smuzhiyun }
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun /*
92*4882a593Smuzhiyun  * map virtual single (contiguous) pointer to h/w descriptor pointer
93*4882a593Smuzhiyun  */
__map_single_talitos_ptr(struct device * dev,struct talitos_ptr * ptr,unsigned int len,void * data,enum dma_data_direction dir,unsigned long attrs)94*4882a593Smuzhiyun static void __map_single_talitos_ptr(struct device *dev,
95*4882a593Smuzhiyun 				     struct talitos_ptr *ptr,
96*4882a593Smuzhiyun 				     unsigned int len, void *data,
97*4882a593Smuzhiyun 				     enum dma_data_direction dir,
98*4882a593Smuzhiyun 				     unsigned long attrs)
99*4882a593Smuzhiyun {
100*4882a593Smuzhiyun 	dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
101*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
102*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun 	to_talitos_ptr(ptr, dma_addr, len, is_sec1);
105*4882a593Smuzhiyun }
106*4882a593Smuzhiyun 
map_single_talitos_ptr(struct device * dev,struct talitos_ptr * ptr,unsigned int len,void * data,enum dma_data_direction dir)107*4882a593Smuzhiyun static void map_single_talitos_ptr(struct device *dev,
108*4882a593Smuzhiyun 				   struct talitos_ptr *ptr,
109*4882a593Smuzhiyun 				   unsigned int len, void *data,
110*4882a593Smuzhiyun 				   enum dma_data_direction dir)
111*4882a593Smuzhiyun {
112*4882a593Smuzhiyun 	__map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
113*4882a593Smuzhiyun }
114*4882a593Smuzhiyun 
map_single_talitos_ptr_nosync(struct device * dev,struct talitos_ptr * ptr,unsigned int len,void * data,enum dma_data_direction dir)115*4882a593Smuzhiyun static void map_single_talitos_ptr_nosync(struct device *dev,
116*4882a593Smuzhiyun 					  struct talitos_ptr *ptr,
117*4882a593Smuzhiyun 					  unsigned int len, void *data,
118*4882a593Smuzhiyun 					  enum dma_data_direction dir)
119*4882a593Smuzhiyun {
120*4882a593Smuzhiyun 	__map_single_talitos_ptr(dev, ptr, len, data, dir,
121*4882a593Smuzhiyun 				 DMA_ATTR_SKIP_CPU_SYNC);
122*4882a593Smuzhiyun }
123*4882a593Smuzhiyun 
124*4882a593Smuzhiyun /*
125*4882a593Smuzhiyun  * unmap bus single (contiguous) h/w descriptor pointer
126*4882a593Smuzhiyun  */
unmap_single_talitos_ptr(struct device * dev,struct talitos_ptr * ptr,enum dma_data_direction dir)127*4882a593Smuzhiyun static void unmap_single_talitos_ptr(struct device *dev,
128*4882a593Smuzhiyun 				     struct talitos_ptr *ptr,
129*4882a593Smuzhiyun 				     enum dma_data_direction dir)
130*4882a593Smuzhiyun {
131*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
132*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
133*4882a593Smuzhiyun 
134*4882a593Smuzhiyun 	dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
135*4882a593Smuzhiyun 			 from_talitos_ptr_len(ptr, is_sec1), dir);
136*4882a593Smuzhiyun }
137*4882a593Smuzhiyun 
reset_channel(struct device * dev,int ch)138*4882a593Smuzhiyun static int reset_channel(struct device *dev, int ch)
139*4882a593Smuzhiyun {
140*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
141*4882a593Smuzhiyun 	unsigned int timeout = TALITOS_TIMEOUT;
142*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
143*4882a593Smuzhiyun 
144*4882a593Smuzhiyun 	if (is_sec1) {
145*4882a593Smuzhiyun 		setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
146*4882a593Smuzhiyun 			  TALITOS1_CCCR_LO_RESET);
147*4882a593Smuzhiyun 
148*4882a593Smuzhiyun 		while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
149*4882a593Smuzhiyun 			TALITOS1_CCCR_LO_RESET) && --timeout)
150*4882a593Smuzhiyun 			cpu_relax();
151*4882a593Smuzhiyun 	} else {
152*4882a593Smuzhiyun 		setbits32(priv->chan[ch].reg + TALITOS_CCCR,
153*4882a593Smuzhiyun 			  TALITOS2_CCCR_RESET);
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun 		while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
156*4882a593Smuzhiyun 			TALITOS2_CCCR_RESET) && --timeout)
157*4882a593Smuzhiyun 			cpu_relax();
158*4882a593Smuzhiyun 	}
159*4882a593Smuzhiyun 
160*4882a593Smuzhiyun 	if (timeout == 0) {
161*4882a593Smuzhiyun 		dev_err(dev, "failed to reset channel %d\n", ch);
162*4882a593Smuzhiyun 		return -EIO;
163*4882a593Smuzhiyun 	}
164*4882a593Smuzhiyun 
165*4882a593Smuzhiyun 	/* set 36-bit addressing, done writeback enable and done IRQ enable */
166*4882a593Smuzhiyun 	setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
167*4882a593Smuzhiyun 		  TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
168*4882a593Smuzhiyun 	/* enable chaining descriptors */
169*4882a593Smuzhiyun 	if (is_sec1)
170*4882a593Smuzhiyun 		setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
171*4882a593Smuzhiyun 			  TALITOS_CCCR_LO_NE);
172*4882a593Smuzhiyun 
173*4882a593Smuzhiyun 	/* and ICCR writeback, if available */
174*4882a593Smuzhiyun 	if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
175*4882a593Smuzhiyun 		setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
176*4882a593Smuzhiyun 		          TALITOS_CCCR_LO_IWSE);
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	return 0;
179*4882a593Smuzhiyun }
180*4882a593Smuzhiyun 
reset_device(struct device * dev)181*4882a593Smuzhiyun static int reset_device(struct device *dev)
182*4882a593Smuzhiyun {
183*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
184*4882a593Smuzhiyun 	unsigned int timeout = TALITOS_TIMEOUT;
185*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
186*4882a593Smuzhiyun 	u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
187*4882a593Smuzhiyun 
188*4882a593Smuzhiyun 	setbits32(priv->reg + TALITOS_MCR, mcr);
189*4882a593Smuzhiyun 
190*4882a593Smuzhiyun 	while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
191*4882a593Smuzhiyun 	       && --timeout)
192*4882a593Smuzhiyun 		cpu_relax();
193*4882a593Smuzhiyun 
194*4882a593Smuzhiyun 	if (priv->irq[1]) {
195*4882a593Smuzhiyun 		mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
196*4882a593Smuzhiyun 		setbits32(priv->reg + TALITOS_MCR, mcr);
197*4882a593Smuzhiyun 	}
198*4882a593Smuzhiyun 
199*4882a593Smuzhiyun 	if (timeout == 0) {
200*4882a593Smuzhiyun 		dev_err(dev, "failed to reset device\n");
201*4882a593Smuzhiyun 		return -EIO;
202*4882a593Smuzhiyun 	}
203*4882a593Smuzhiyun 
204*4882a593Smuzhiyun 	return 0;
205*4882a593Smuzhiyun }
206*4882a593Smuzhiyun 
207*4882a593Smuzhiyun /*
208*4882a593Smuzhiyun  * Reset and initialize the device
209*4882a593Smuzhiyun  */
init_device(struct device * dev)210*4882a593Smuzhiyun static int init_device(struct device *dev)
211*4882a593Smuzhiyun {
212*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
213*4882a593Smuzhiyun 	int ch, err;
214*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
215*4882a593Smuzhiyun 
216*4882a593Smuzhiyun 	/*
217*4882a593Smuzhiyun 	 * Master reset
218*4882a593Smuzhiyun 	 * errata documentation: warning: certain SEC interrupts
219*4882a593Smuzhiyun 	 * are not fully cleared by writing the MCR:SWR bit,
220*4882a593Smuzhiyun 	 * set bit twice to completely reset
221*4882a593Smuzhiyun 	 */
222*4882a593Smuzhiyun 	err = reset_device(dev);
223*4882a593Smuzhiyun 	if (err)
224*4882a593Smuzhiyun 		return err;
225*4882a593Smuzhiyun 
226*4882a593Smuzhiyun 	err = reset_device(dev);
227*4882a593Smuzhiyun 	if (err)
228*4882a593Smuzhiyun 		return err;
229*4882a593Smuzhiyun 
230*4882a593Smuzhiyun 	/* reset channels */
231*4882a593Smuzhiyun 	for (ch = 0; ch < priv->num_channels; ch++) {
232*4882a593Smuzhiyun 		err = reset_channel(dev, ch);
233*4882a593Smuzhiyun 		if (err)
234*4882a593Smuzhiyun 			return err;
235*4882a593Smuzhiyun 	}
236*4882a593Smuzhiyun 
237*4882a593Smuzhiyun 	/* enable channel done and error interrupts */
238*4882a593Smuzhiyun 	if (is_sec1) {
239*4882a593Smuzhiyun 		clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
240*4882a593Smuzhiyun 		clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
241*4882a593Smuzhiyun 		/* disable parity error check in DEU (erroneous? test vect.) */
242*4882a593Smuzhiyun 		setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
243*4882a593Smuzhiyun 	} else {
244*4882a593Smuzhiyun 		setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
245*4882a593Smuzhiyun 		setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
246*4882a593Smuzhiyun 	}
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun 	/* disable integrity check error interrupts (use writeback instead) */
249*4882a593Smuzhiyun 	if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
250*4882a593Smuzhiyun 		setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
251*4882a593Smuzhiyun 		          TALITOS_MDEUICR_LO_ICE);
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun 	return 0;
254*4882a593Smuzhiyun }
255*4882a593Smuzhiyun 
256*4882a593Smuzhiyun /**
257*4882a593Smuzhiyun  * talitos_submit - submits a descriptor to the device for processing
258*4882a593Smuzhiyun  * @dev:	the SEC device to be used
259*4882a593Smuzhiyun  * @ch:		the SEC device channel to be used
260*4882a593Smuzhiyun  * @desc:	the descriptor to be processed by the device
261*4882a593Smuzhiyun  * @callback:	whom to call when processing is complete
262*4882a593Smuzhiyun  * @context:	a handle for use by caller (optional)
263*4882a593Smuzhiyun  *
264*4882a593Smuzhiyun  * desc must contain valid dma-mapped (bus physical) address pointers.
265*4882a593Smuzhiyun  * callback must check err and feedback in descriptor header
266*4882a593Smuzhiyun  * for device processing status.
267*4882a593Smuzhiyun  */
talitos_submit(struct device * dev,int ch,struct talitos_desc * desc,void (* callback)(struct device * dev,struct talitos_desc * desc,void * context,int error),void * context)268*4882a593Smuzhiyun static int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
269*4882a593Smuzhiyun 			  void (*callback)(struct device *dev,
270*4882a593Smuzhiyun 					   struct talitos_desc *desc,
271*4882a593Smuzhiyun 					   void *context, int error),
272*4882a593Smuzhiyun 			  void *context)
273*4882a593Smuzhiyun {
274*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
275*4882a593Smuzhiyun 	struct talitos_request *request;
276*4882a593Smuzhiyun 	unsigned long flags;
277*4882a593Smuzhiyun 	int head;
278*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
279*4882a593Smuzhiyun 
280*4882a593Smuzhiyun 	spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
281*4882a593Smuzhiyun 
282*4882a593Smuzhiyun 	if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
283*4882a593Smuzhiyun 		/* h/w fifo is full */
284*4882a593Smuzhiyun 		spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
285*4882a593Smuzhiyun 		return -EAGAIN;
286*4882a593Smuzhiyun 	}
287*4882a593Smuzhiyun 
288*4882a593Smuzhiyun 	head = priv->chan[ch].head;
289*4882a593Smuzhiyun 	request = &priv->chan[ch].fifo[head];
290*4882a593Smuzhiyun 
291*4882a593Smuzhiyun 	/* map descriptor and save caller data */
292*4882a593Smuzhiyun 	if (is_sec1) {
293*4882a593Smuzhiyun 		desc->hdr1 = desc->hdr;
294*4882a593Smuzhiyun 		request->dma_desc = dma_map_single(dev, &desc->hdr1,
295*4882a593Smuzhiyun 						   TALITOS_DESC_SIZE,
296*4882a593Smuzhiyun 						   DMA_BIDIRECTIONAL);
297*4882a593Smuzhiyun 	} else {
298*4882a593Smuzhiyun 		request->dma_desc = dma_map_single(dev, desc,
299*4882a593Smuzhiyun 						   TALITOS_DESC_SIZE,
300*4882a593Smuzhiyun 						   DMA_BIDIRECTIONAL);
301*4882a593Smuzhiyun 	}
302*4882a593Smuzhiyun 	request->callback = callback;
303*4882a593Smuzhiyun 	request->context = context;
304*4882a593Smuzhiyun 
305*4882a593Smuzhiyun 	/* increment fifo head */
306*4882a593Smuzhiyun 	priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
307*4882a593Smuzhiyun 
308*4882a593Smuzhiyun 	smp_wmb();
309*4882a593Smuzhiyun 	request->desc = desc;
310*4882a593Smuzhiyun 
311*4882a593Smuzhiyun 	/* GO! */
312*4882a593Smuzhiyun 	wmb();
313*4882a593Smuzhiyun 	out_be32(priv->chan[ch].reg + TALITOS_FF,
314*4882a593Smuzhiyun 		 upper_32_bits(request->dma_desc));
315*4882a593Smuzhiyun 	out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
316*4882a593Smuzhiyun 		 lower_32_bits(request->dma_desc));
317*4882a593Smuzhiyun 
318*4882a593Smuzhiyun 	spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
319*4882a593Smuzhiyun 
320*4882a593Smuzhiyun 	return -EINPROGRESS;
321*4882a593Smuzhiyun }
322*4882a593Smuzhiyun 
get_request_hdr(struct talitos_request * request,bool is_sec1)323*4882a593Smuzhiyun static __be32 get_request_hdr(struct talitos_request *request, bool is_sec1)
324*4882a593Smuzhiyun {
325*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
326*4882a593Smuzhiyun 
327*4882a593Smuzhiyun 	if (!is_sec1)
328*4882a593Smuzhiyun 		return request->desc->hdr;
329*4882a593Smuzhiyun 
330*4882a593Smuzhiyun 	if (!request->desc->next_desc)
331*4882a593Smuzhiyun 		return request->desc->hdr1;
332*4882a593Smuzhiyun 
333*4882a593Smuzhiyun 	edesc = container_of(request->desc, struct talitos_edesc, desc);
334*4882a593Smuzhiyun 
335*4882a593Smuzhiyun 	return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1;
336*4882a593Smuzhiyun }
337*4882a593Smuzhiyun 
338*4882a593Smuzhiyun /*
339*4882a593Smuzhiyun  * process what was done, notify callback of error if not
340*4882a593Smuzhiyun  */
flush_channel(struct device * dev,int ch,int error,int reset_ch)341*4882a593Smuzhiyun static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
342*4882a593Smuzhiyun {
343*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
344*4882a593Smuzhiyun 	struct talitos_request *request, saved_req;
345*4882a593Smuzhiyun 	unsigned long flags;
346*4882a593Smuzhiyun 	int tail, status;
347*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
348*4882a593Smuzhiyun 
349*4882a593Smuzhiyun 	spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
350*4882a593Smuzhiyun 
351*4882a593Smuzhiyun 	tail = priv->chan[ch].tail;
352*4882a593Smuzhiyun 	while (priv->chan[ch].fifo[tail].desc) {
353*4882a593Smuzhiyun 		__be32 hdr;
354*4882a593Smuzhiyun 
355*4882a593Smuzhiyun 		request = &priv->chan[ch].fifo[tail];
356*4882a593Smuzhiyun 
357*4882a593Smuzhiyun 		/* descriptors with their done bits set don't get the error */
358*4882a593Smuzhiyun 		rmb();
359*4882a593Smuzhiyun 		hdr = get_request_hdr(request, is_sec1);
360*4882a593Smuzhiyun 
361*4882a593Smuzhiyun 		if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
362*4882a593Smuzhiyun 			status = 0;
363*4882a593Smuzhiyun 		else
364*4882a593Smuzhiyun 			if (!error)
365*4882a593Smuzhiyun 				break;
366*4882a593Smuzhiyun 			else
367*4882a593Smuzhiyun 				status = error;
368*4882a593Smuzhiyun 
369*4882a593Smuzhiyun 		dma_unmap_single(dev, request->dma_desc,
370*4882a593Smuzhiyun 				 TALITOS_DESC_SIZE,
371*4882a593Smuzhiyun 				 DMA_BIDIRECTIONAL);
372*4882a593Smuzhiyun 
373*4882a593Smuzhiyun 		/* copy entries so we can call callback outside lock */
374*4882a593Smuzhiyun 		saved_req.desc = request->desc;
375*4882a593Smuzhiyun 		saved_req.callback = request->callback;
376*4882a593Smuzhiyun 		saved_req.context = request->context;
377*4882a593Smuzhiyun 
378*4882a593Smuzhiyun 		/* release request entry in fifo */
379*4882a593Smuzhiyun 		smp_wmb();
380*4882a593Smuzhiyun 		request->desc = NULL;
381*4882a593Smuzhiyun 
382*4882a593Smuzhiyun 		/* increment fifo tail */
383*4882a593Smuzhiyun 		priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
384*4882a593Smuzhiyun 
385*4882a593Smuzhiyun 		spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
386*4882a593Smuzhiyun 
387*4882a593Smuzhiyun 		atomic_dec(&priv->chan[ch].submit_count);
388*4882a593Smuzhiyun 
389*4882a593Smuzhiyun 		saved_req.callback(dev, saved_req.desc, saved_req.context,
390*4882a593Smuzhiyun 				   status);
391*4882a593Smuzhiyun 		/* channel may resume processing in single desc error case */
392*4882a593Smuzhiyun 		if (error && !reset_ch && status == error)
393*4882a593Smuzhiyun 			return;
394*4882a593Smuzhiyun 		spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
395*4882a593Smuzhiyun 		tail = priv->chan[ch].tail;
396*4882a593Smuzhiyun 	}
397*4882a593Smuzhiyun 
398*4882a593Smuzhiyun 	spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
399*4882a593Smuzhiyun }
400*4882a593Smuzhiyun 
401*4882a593Smuzhiyun /*
402*4882a593Smuzhiyun  * process completed requests for channels that have done status
403*4882a593Smuzhiyun  */
404*4882a593Smuzhiyun #define DEF_TALITOS1_DONE(name, ch_done_mask)				\
405*4882a593Smuzhiyun static void talitos1_done_##name(unsigned long data)			\
406*4882a593Smuzhiyun {									\
407*4882a593Smuzhiyun 	struct device *dev = (struct device *)data;			\
408*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);		\
409*4882a593Smuzhiyun 	unsigned long flags;						\
410*4882a593Smuzhiyun 									\
411*4882a593Smuzhiyun 	if (ch_done_mask & 0x10000000)					\
412*4882a593Smuzhiyun 		flush_channel(dev, 0, 0, 0);			\
413*4882a593Smuzhiyun 	if (ch_done_mask & 0x40000000)					\
414*4882a593Smuzhiyun 		flush_channel(dev, 1, 0, 0);			\
415*4882a593Smuzhiyun 	if (ch_done_mask & 0x00010000)					\
416*4882a593Smuzhiyun 		flush_channel(dev, 2, 0, 0);			\
417*4882a593Smuzhiyun 	if (ch_done_mask & 0x00040000)					\
418*4882a593Smuzhiyun 		flush_channel(dev, 3, 0, 0);			\
419*4882a593Smuzhiyun 									\
420*4882a593Smuzhiyun 	/* At this point, all completed channels have been processed */	\
421*4882a593Smuzhiyun 	/* Unmask done interrupts for channels completed later on. */	\
422*4882a593Smuzhiyun 	spin_lock_irqsave(&priv->reg_lock, flags);			\
423*4882a593Smuzhiyun 	clrbits32(priv->reg + TALITOS_IMR, ch_done_mask);		\
424*4882a593Smuzhiyun 	clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);	\
425*4882a593Smuzhiyun 	spin_unlock_irqrestore(&priv->reg_lock, flags);			\
426*4882a593Smuzhiyun }
427*4882a593Smuzhiyun 
428*4882a593Smuzhiyun DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
DEF_TALITOS1_DONE(ch0,TALITOS1_ISR_CH_0_DONE)429*4882a593Smuzhiyun DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
430*4882a593Smuzhiyun 
431*4882a593Smuzhiyun #define DEF_TALITOS2_DONE(name, ch_done_mask)				\
432*4882a593Smuzhiyun static void talitos2_done_##name(unsigned long data)			\
433*4882a593Smuzhiyun {									\
434*4882a593Smuzhiyun 	struct device *dev = (struct device *)data;			\
435*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);		\
436*4882a593Smuzhiyun 	unsigned long flags;						\
437*4882a593Smuzhiyun 									\
438*4882a593Smuzhiyun 	if (ch_done_mask & 1)						\
439*4882a593Smuzhiyun 		flush_channel(dev, 0, 0, 0);				\
440*4882a593Smuzhiyun 	if (ch_done_mask & (1 << 2))					\
441*4882a593Smuzhiyun 		flush_channel(dev, 1, 0, 0);				\
442*4882a593Smuzhiyun 	if (ch_done_mask & (1 << 4))					\
443*4882a593Smuzhiyun 		flush_channel(dev, 2, 0, 0);				\
444*4882a593Smuzhiyun 	if (ch_done_mask & (1 << 6))					\
445*4882a593Smuzhiyun 		flush_channel(dev, 3, 0, 0);				\
446*4882a593Smuzhiyun 									\
447*4882a593Smuzhiyun 	/* At this point, all completed channels have been processed */	\
448*4882a593Smuzhiyun 	/* Unmask done interrupts for channels completed later on. */	\
449*4882a593Smuzhiyun 	spin_lock_irqsave(&priv->reg_lock, flags);			\
450*4882a593Smuzhiyun 	setbits32(priv->reg + TALITOS_IMR, ch_done_mask);		\
451*4882a593Smuzhiyun 	setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);	\
452*4882a593Smuzhiyun 	spin_unlock_irqrestore(&priv->reg_lock, flags);			\
453*4882a593Smuzhiyun }
454*4882a593Smuzhiyun 
455*4882a593Smuzhiyun DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
456*4882a593Smuzhiyun DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
457*4882a593Smuzhiyun DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
458*4882a593Smuzhiyun DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
459*4882a593Smuzhiyun 
460*4882a593Smuzhiyun /*
461*4882a593Smuzhiyun  * locate current (offending) descriptor
462*4882a593Smuzhiyun  */
463*4882a593Smuzhiyun static __be32 current_desc_hdr(struct device *dev, int ch)
464*4882a593Smuzhiyun {
465*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
466*4882a593Smuzhiyun 	int tail, iter;
467*4882a593Smuzhiyun 	dma_addr_t cur_desc;
468*4882a593Smuzhiyun 
469*4882a593Smuzhiyun 	cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
470*4882a593Smuzhiyun 	cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
471*4882a593Smuzhiyun 
472*4882a593Smuzhiyun 	if (!cur_desc) {
473*4882a593Smuzhiyun 		dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
474*4882a593Smuzhiyun 		return 0;
475*4882a593Smuzhiyun 	}
476*4882a593Smuzhiyun 
477*4882a593Smuzhiyun 	tail = priv->chan[ch].tail;
478*4882a593Smuzhiyun 
479*4882a593Smuzhiyun 	iter = tail;
480*4882a593Smuzhiyun 	while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
481*4882a593Smuzhiyun 	       priv->chan[ch].fifo[iter].desc->next_desc != cpu_to_be32(cur_desc)) {
482*4882a593Smuzhiyun 		iter = (iter + 1) & (priv->fifo_len - 1);
483*4882a593Smuzhiyun 		if (iter == tail) {
484*4882a593Smuzhiyun 			dev_err(dev, "couldn't locate current descriptor\n");
485*4882a593Smuzhiyun 			return 0;
486*4882a593Smuzhiyun 		}
487*4882a593Smuzhiyun 	}
488*4882a593Smuzhiyun 
489*4882a593Smuzhiyun 	if (priv->chan[ch].fifo[iter].desc->next_desc == cpu_to_be32(cur_desc)) {
490*4882a593Smuzhiyun 		struct talitos_edesc *edesc;
491*4882a593Smuzhiyun 
492*4882a593Smuzhiyun 		edesc = container_of(priv->chan[ch].fifo[iter].desc,
493*4882a593Smuzhiyun 				     struct talitos_edesc, desc);
494*4882a593Smuzhiyun 		return ((struct talitos_desc *)
495*4882a593Smuzhiyun 			(edesc->buf + edesc->dma_len))->hdr;
496*4882a593Smuzhiyun 	}
497*4882a593Smuzhiyun 
498*4882a593Smuzhiyun 	return priv->chan[ch].fifo[iter].desc->hdr;
499*4882a593Smuzhiyun }
500*4882a593Smuzhiyun 
501*4882a593Smuzhiyun /*
502*4882a593Smuzhiyun  * user diagnostics; report root cause of error based on execution unit status
503*4882a593Smuzhiyun  */
report_eu_error(struct device * dev,int ch,__be32 desc_hdr)504*4882a593Smuzhiyun static void report_eu_error(struct device *dev, int ch, __be32 desc_hdr)
505*4882a593Smuzhiyun {
506*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
507*4882a593Smuzhiyun 	int i;
508*4882a593Smuzhiyun 
509*4882a593Smuzhiyun 	if (!desc_hdr)
510*4882a593Smuzhiyun 		desc_hdr = cpu_to_be32(in_be32(priv->chan[ch].reg + TALITOS_DESCBUF));
511*4882a593Smuzhiyun 
512*4882a593Smuzhiyun 	switch (desc_hdr & DESC_HDR_SEL0_MASK) {
513*4882a593Smuzhiyun 	case DESC_HDR_SEL0_AFEU:
514*4882a593Smuzhiyun 		dev_err(dev, "AFEUISR 0x%08x_%08x\n",
515*4882a593Smuzhiyun 			in_be32(priv->reg_afeu + TALITOS_EUISR),
516*4882a593Smuzhiyun 			in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
517*4882a593Smuzhiyun 		break;
518*4882a593Smuzhiyun 	case DESC_HDR_SEL0_DEU:
519*4882a593Smuzhiyun 		dev_err(dev, "DEUISR 0x%08x_%08x\n",
520*4882a593Smuzhiyun 			in_be32(priv->reg_deu + TALITOS_EUISR),
521*4882a593Smuzhiyun 			in_be32(priv->reg_deu + TALITOS_EUISR_LO));
522*4882a593Smuzhiyun 		break;
523*4882a593Smuzhiyun 	case DESC_HDR_SEL0_MDEUA:
524*4882a593Smuzhiyun 	case DESC_HDR_SEL0_MDEUB:
525*4882a593Smuzhiyun 		dev_err(dev, "MDEUISR 0x%08x_%08x\n",
526*4882a593Smuzhiyun 			in_be32(priv->reg_mdeu + TALITOS_EUISR),
527*4882a593Smuzhiyun 			in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
528*4882a593Smuzhiyun 		break;
529*4882a593Smuzhiyun 	case DESC_HDR_SEL0_RNG:
530*4882a593Smuzhiyun 		dev_err(dev, "RNGUISR 0x%08x_%08x\n",
531*4882a593Smuzhiyun 			in_be32(priv->reg_rngu + TALITOS_ISR),
532*4882a593Smuzhiyun 			in_be32(priv->reg_rngu + TALITOS_ISR_LO));
533*4882a593Smuzhiyun 		break;
534*4882a593Smuzhiyun 	case DESC_HDR_SEL0_PKEU:
535*4882a593Smuzhiyun 		dev_err(dev, "PKEUISR 0x%08x_%08x\n",
536*4882a593Smuzhiyun 			in_be32(priv->reg_pkeu + TALITOS_EUISR),
537*4882a593Smuzhiyun 			in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
538*4882a593Smuzhiyun 		break;
539*4882a593Smuzhiyun 	case DESC_HDR_SEL0_AESU:
540*4882a593Smuzhiyun 		dev_err(dev, "AESUISR 0x%08x_%08x\n",
541*4882a593Smuzhiyun 			in_be32(priv->reg_aesu + TALITOS_EUISR),
542*4882a593Smuzhiyun 			in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
543*4882a593Smuzhiyun 		break;
544*4882a593Smuzhiyun 	case DESC_HDR_SEL0_CRCU:
545*4882a593Smuzhiyun 		dev_err(dev, "CRCUISR 0x%08x_%08x\n",
546*4882a593Smuzhiyun 			in_be32(priv->reg_crcu + TALITOS_EUISR),
547*4882a593Smuzhiyun 			in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
548*4882a593Smuzhiyun 		break;
549*4882a593Smuzhiyun 	case DESC_HDR_SEL0_KEU:
550*4882a593Smuzhiyun 		dev_err(dev, "KEUISR 0x%08x_%08x\n",
551*4882a593Smuzhiyun 			in_be32(priv->reg_pkeu + TALITOS_EUISR),
552*4882a593Smuzhiyun 			in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
553*4882a593Smuzhiyun 		break;
554*4882a593Smuzhiyun 	}
555*4882a593Smuzhiyun 
556*4882a593Smuzhiyun 	switch (desc_hdr & DESC_HDR_SEL1_MASK) {
557*4882a593Smuzhiyun 	case DESC_HDR_SEL1_MDEUA:
558*4882a593Smuzhiyun 	case DESC_HDR_SEL1_MDEUB:
559*4882a593Smuzhiyun 		dev_err(dev, "MDEUISR 0x%08x_%08x\n",
560*4882a593Smuzhiyun 			in_be32(priv->reg_mdeu + TALITOS_EUISR),
561*4882a593Smuzhiyun 			in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
562*4882a593Smuzhiyun 		break;
563*4882a593Smuzhiyun 	case DESC_HDR_SEL1_CRCU:
564*4882a593Smuzhiyun 		dev_err(dev, "CRCUISR 0x%08x_%08x\n",
565*4882a593Smuzhiyun 			in_be32(priv->reg_crcu + TALITOS_EUISR),
566*4882a593Smuzhiyun 			in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
567*4882a593Smuzhiyun 		break;
568*4882a593Smuzhiyun 	}
569*4882a593Smuzhiyun 
570*4882a593Smuzhiyun 	for (i = 0; i < 8; i++)
571*4882a593Smuzhiyun 		dev_err(dev, "DESCBUF 0x%08x_%08x\n",
572*4882a593Smuzhiyun 			in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
573*4882a593Smuzhiyun 			in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
574*4882a593Smuzhiyun }
575*4882a593Smuzhiyun 
576*4882a593Smuzhiyun /*
577*4882a593Smuzhiyun  * recover from error interrupts
578*4882a593Smuzhiyun  */
talitos_error(struct device * dev,u32 isr,u32 isr_lo)579*4882a593Smuzhiyun static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
580*4882a593Smuzhiyun {
581*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
582*4882a593Smuzhiyun 	unsigned int timeout = TALITOS_TIMEOUT;
583*4882a593Smuzhiyun 	int ch, error, reset_dev = 0;
584*4882a593Smuzhiyun 	u32 v_lo;
585*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
586*4882a593Smuzhiyun 	int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
587*4882a593Smuzhiyun 
588*4882a593Smuzhiyun 	for (ch = 0; ch < priv->num_channels; ch++) {
589*4882a593Smuzhiyun 		/* skip channels without errors */
590*4882a593Smuzhiyun 		if (is_sec1) {
591*4882a593Smuzhiyun 			/* bits 29, 31, 17, 19 */
592*4882a593Smuzhiyun 			if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
593*4882a593Smuzhiyun 				continue;
594*4882a593Smuzhiyun 		} else {
595*4882a593Smuzhiyun 			if (!(isr & (1 << (ch * 2 + 1))))
596*4882a593Smuzhiyun 				continue;
597*4882a593Smuzhiyun 		}
598*4882a593Smuzhiyun 
599*4882a593Smuzhiyun 		error = -EINVAL;
600*4882a593Smuzhiyun 
601*4882a593Smuzhiyun 		v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
602*4882a593Smuzhiyun 
603*4882a593Smuzhiyun 		if (v_lo & TALITOS_CCPSR_LO_DOF) {
604*4882a593Smuzhiyun 			dev_err(dev, "double fetch fifo overflow error\n");
605*4882a593Smuzhiyun 			error = -EAGAIN;
606*4882a593Smuzhiyun 			reset_ch = 1;
607*4882a593Smuzhiyun 		}
608*4882a593Smuzhiyun 		if (v_lo & TALITOS_CCPSR_LO_SOF) {
609*4882a593Smuzhiyun 			/* h/w dropped descriptor */
610*4882a593Smuzhiyun 			dev_err(dev, "single fetch fifo overflow error\n");
611*4882a593Smuzhiyun 			error = -EAGAIN;
612*4882a593Smuzhiyun 		}
613*4882a593Smuzhiyun 		if (v_lo & TALITOS_CCPSR_LO_MDTE)
614*4882a593Smuzhiyun 			dev_err(dev, "master data transfer error\n");
615*4882a593Smuzhiyun 		if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
616*4882a593Smuzhiyun 			dev_err(dev, is_sec1 ? "pointer not complete error\n"
617*4882a593Smuzhiyun 					     : "s/g data length zero error\n");
618*4882a593Smuzhiyun 		if (v_lo & TALITOS_CCPSR_LO_FPZ)
619*4882a593Smuzhiyun 			dev_err(dev, is_sec1 ? "parity error\n"
620*4882a593Smuzhiyun 					     : "fetch pointer zero error\n");
621*4882a593Smuzhiyun 		if (v_lo & TALITOS_CCPSR_LO_IDH)
622*4882a593Smuzhiyun 			dev_err(dev, "illegal descriptor header error\n");
623*4882a593Smuzhiyun 		if (v_lo & TALITOS_CCPSR_LO_IEU)
624*4882a593Smuzhiyun 			dev_err(dev, is_sec1 ? "static assignment error\n"
625*4882a593Smuzhiyun 					     : "invalid exec unit error\n");
626*4882a593Smuzhiyun 		if (v_lo & TALITOS_CCPSR_LO_EU)
627*4882a593Smuzhiyun 			report_eu_error(dev, ch, current_desc_hdr(dev, ch));
628*4882a593Smuzhiyun 		if (!is_sec1) {
629*4882a593Smuzhiyun 			if (v_lo & TALITOS_CCPSR_LO_GB)
630*4882a593Smuzhiyun 				dev_err(dev, "gather boundary error\n");
631*4882a593Smuzhiyun 			if (v_lo & TALITOS_CCPSR_LO_GRL)
632*4882a593Smuzhiyun 				dev_err(dev, "gather return/length error\n");
633*4882a593Smuzhiyun 			if (v_lo & TALITOS_CCPSR_LO_SB)
634*4882a593Smuzhiyun 				dev_err(dev, "scatter boundary error\n");
635*4882a593Smuzhiyun 			if (v_lo & TALITOS_CCPSR_LO_SRL)
636*4882a593Smuzhiyun 				dev_err(dev, "scatter return/length error\n");
637*4882a593Smuzhiyun 		}
638*4882a593Smuzhiyun 
639*4882a593Smuzhiyun 		flush_channel(dev, ch, error, reset_ch);
640*4882a593Smuzhiyun 
641*4882a593Smuzhiyun 		if (reset_ch) {
642*4882a593Smuzhiyun 			reset_channel(dev, ch);
643*4882a593Smuzhiyun 		} else {
644*4882a593Smuzhiyun 			setbits32(priv->chan[ch].reg + TALITOS_CCCR,
645*4882a593Smuzhiyun 				  TALITOS2_CCCR_CONT);
646*4882a593Smuzhiyun 			setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
647*4882a593Smuzhiyun 			while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
648*4882a593Smuzhiyun 			       TALITOS2_CCCR_CONT) && --timeout)
649*4882a593Smuzhiyun 				cpu_relax();
650*4882a593Smuzhiyun 			if (timeout == 0) {
651*4882a593Smuzhiyun 				dev_err(dev, "failed to restart channel %d\n",
652*4882a593Smuzhiyun 					ch);
653*4882a593Smuzhiyun 				reset_dev = 1;
654*4882a593Smuzhiyun 			}
655*4882a593Smuzhiyun 		}
656*4882a593Smuzhiyun 	}
657*4882a593Smuzhiyun 	if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
658*4882a593Smuzhiyun 	    (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
659*4882a593Smuzhiyun 		if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
660*4882a593Smuzhiyun 			dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
661*4882a593Smuzhiyun 				isr, isr_lo);
662*4882a593Smuzhiyun 		else
663*4882a593Smuzhiyun 			dev_err(dev, "done overflow, internal time out, or "
664*4882a593Smuzhiyun 				"rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
665*4882a593Smuzhiyun 
666*4882a593Smuzhiyun 		/* purge request queues */
667*4882a593Smuzhiyun 		for (ch = 0; ch < priv->num_channels; ch++)
668*4882a593Smuzhiyun 			flush_channel(dev, ch, -EIO, 1);
669*4882a593Smuzhiyun 
670*4882a593Smuzhiyun 		/* reset and reinitialize the device */
671*4882a593Smuzhiyun 		init_device(dev);
672*4882a593Smuzhiyun 	}
673*4882a593Smuzhiyun }
674*4882a593Smuzhiyun 
675*4882a593Smuzhiyun #define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet)	       \
676*4882a593Smuzhiyun static irqreturn_t talitos1_interrupt_##name(int irq, void *data)	       \
677*4882a593Smuzhiyun {									       \
678*4882a593Smuzhiyun 	struct device *dev = data;					       \
679*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);		       \
680*4882a593Smuzhiyun 	u32 isr, isr_lo;						       \
681*4882a593Smuzhiyun 	unsigned long flags;						       \
682*4882a593Smuzhiyun 									       \
683*4882a593Smuzhiyun 	spin_lock_irqsave(&priv->reg_lock, flags);			       \
684*4882a593Smuzhiyun 	isr = in_be32(priv->reg + TALITOS_ISR);				       \
685*4882a593Smuzhiyun 	isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);			       \
686*4882a593Smuzhiyun 	/* Acknowledge interrupt */					       \
687*4882a593Smuzhiyun 	out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
688*4882a593Smuzhiyun 	out_be32(priv->reg + TALITOS_ICR_LO, isr_lo);			       \
689*4882a593Smuzhiyun 									       \
690*4882a593Smuzhiyun 	if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) {    \
691*4882a593Smuzhiyun 		spin_unlock_irqrestore(&priv->reg_lock, flags);		       \
692*4882a593Smuzhiyun 		talitos_error(dev, isr & ch_err_mask, isr_lo);		       \
693*4882a593Smuzhiyun 	}								       \
694*4882a593Smuzhiyun 	else {								       \
695*4882a593Smuzhiyun 		if (likely(isr & ch_done_mask)) {			       \
696*4882a593Smuzhiyun 			/* mask further done interrupts. */		       \
697*4882a593Smuzhiyun 			setbits32(priv->reg + TALITOS_IMR, ch_done_mask);      \
698*4882a593Smuzhiyun 			/* done_task will unmask done interrupts at exit */    \
699*4882a593Smuzhiyun 			tasklet_schedule(&priv->done_task[tlet]);	       \
700*4882a593Smuzhiyun 		}							       \
701*4882a593Smuzhiyun 		spin_unlock_irqrestore(&priv->reg_lock, flags);		       \
702*4882a593Smuzhiyun 	}								       \
703*4882a593Smuzhiyun 									       \
704*4882a593Smuzhiyun 	return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED :  \
705*4882a593Smuzhiyun 								IRQ_NONE;      \
706*4882a593Smuzhiyun }
707*4882a593Smuzhiyun 
708*4882a593Smuzhiyun DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
709*4882a593Smuzhiyun 
710*4882a593Smuzhiyun #define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet)	       \
711*4882a593Smuzhiyun static irqreturn_t talitos2_interrupt_##name(int irq, void *data)	       \
712*4882a593Smuzhiyun {									       \
713*4882a593Smuzhiyun 	struct device *dev = data;					       \
714*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);		       \
715*4882a593Smuzhiyun 	u32 isr, isr_lo;						       \
716*4882a593Smuzhiyun 	unsigned long flags;						       \
717*4882a593Smuzhiyun 									       \
718*4882a593Smuzhiyun 	spin_lock_irqsave(&priv->reg_lock, flags);			       \
719*4882a593Smuzhiyun 	isr = in_be32(priv->reg + TALITOS_ISR);				       \
720*4882a593Smuzhiyun 	isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);			       \
721*4882a593Smuzhiyun 	/* Acknowledge interrupt */					       \
722*4882a593Smuzhiyun 	out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
723*4882a593Smuzhiyun 	out_be32(priv->reg + TALITOS_ICR_LO, isr_lo);			       \
724*4882a593Smuzhiyun 									       \
725*4882a593Smuzhiyun 	if (unlikely(isr & ch_err_mask || isr_lo)) {			       \
726*4882a593Smuzhiyun 		spin_unlock_irqrestore(&priv->reg_lock, flags);		       \
727*4882a593Smuzhiyun 		talitos_error(dev, isr & ch_err_mask, isr_lo);		       \
728*4882a593Smuzhiyun 	}								       \
729*4882a593Smuzhiyun 	else {								       \
730*4882a593Smuzhiyun 		if (likely(isr & ch_done_mask)) {			       \
731*4882a593Smuzhiyun 			/* mask further done interrupts. */		       \
732*4882a593Smuzhiyun 			clrbits32(priv->reg + TALITOS_IMR, ch_done_mask);      \
733*4882a593Smuzhiyun 			/* done_task will unmask done interrupts at exit */    \
734*4882a593Smuzhiyun 			tasklet_schedule(&priv->done_task[tlet]);	       \
735*4882a593Smuzhiyun 		}							       \
736*4882a593Smuzhiyun 		spin_unlock_irqrestore(&priv->reg_lock, flags);		       \
737*4882a593Smuzhiyun 	}								       \
738*4882a593Smuzhiyun 									       \
739*4882a593Smuzhiyun 	return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED :  \
740*4882a593Smuzhiyun 								IRQ_NONE;      \
741*4882a593Smuzhiyun }
742*4882a593Smuzhiyun 
743*4882a593Smuzhiyun DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
744*4882a593Smuzhiyun DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
745*4882a593Smuzhiyun 		       0)
746*4882a593Smuzhiyun DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
747*4882a593Smuzhiyun 		       1)
748*4882a593Smuzhiyun 
749*4882a593Smuzhiyun /*
750*4882a593Smuzhiyun  * hwrng
751*4882a593Smuzhiyun  */
talitos_rng_data_present(struct hwrng * rng,int wait)752*4882a593Smuzhiyun static int talitos_rng_data_present(struct hwrng *rng, int wait)
753*4882a593Smuzhiyun {
754*4882a593Smuzhiyun 	struct device *dev = (struct device *)rng->priv;
755*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
756*4882a593Smuzhiyun 	u32 ofl;
757*4882a593Smuzhiyun 	int i;
758*4882a593Smuzhiyun 
759*4882a593Smuzhiyun 	for (i = 0; i < 20; i++) {
760*4882a593Smuzhiyun 		ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
761*4882a593Smuzhiyun 		      TALITOS_RNGUSR_LO_OFL;
762*4882a593Smuzhiyun 		if (ofl || !wait)
763*4882a593Smuzhiyun 			break;
764*4882a593Smuzhiyun 		udelay(10);
765*4882a593Smuzhiyun 	}
766*4882a593Smuzhiyun 
767*4882a593Smuzhiyun 	return !!ofl;
768*4882a593Smuzhiyun }
769*4882a593Smuzhiyun 
talitos_rng_data_read(struct hwrng * rng,u32 * data)770*4882a593Smuzhiyun static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
771*4882a593Smuzhiyun {
772*4882a593Smuzhiyun 	struct device *dev = (struct device *)rng->priv;
773*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
774*4882a593Smuzhiyun 
775*4882a593Smuzhiyun 	/* rng fifo requires 64-bit accesses */
776*4882a593Smuzhiyun 	*data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
777*4882a593Smuzhiyun 	*data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
778*4882a593Smuzhiyun 
779*4882a593Smuzhiyun 	return sizeof(u32);
780*4882a593Smuzhiyun }
781*4882a593Smuzhiyun 
talitos_rng_init(struct hwrng * rng)782*4882a593Smuzhiyun static int talitos_rng_init(struct hwrng *rng)
783*4882a593Smuzhiyun {
784*4882a593Smuzhiyun 	struct device *dev = (struct device *)rng->priv;
785*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
786*4882a593Smuzhiyun 	unsigned int timeout = TALITOS_TIMEOUT;
787*4882a593Smuzhiyun 
788*4882a593Smuzhiyun 	setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
789*4882a593Smuzhiyun 	while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
790*4882a593Smuzhiyun 		 & TALITOS_RNGUSR_LO_RD)
791*4882a593Smuzhiyun 	       && --timeout)
792*4882a593Smuzhiyun 		cpu_relax();
793*4882a593Smuzhiyun 	if (timeout == 0) {
794*4882a593Smuzhiyun 		dev_err(dev, "failed to reset rng hw\n");
795*4882a593Smuzhiyun 		return -ENODEV;
796*4882a593Smuzhiyun 	}
797*4882a593Smuzhiyun 
798*4882a593Smuzhiyun 	/* start generating */
799*4882a593Smuzhiyun 	setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
800*4882a593Smuzhiyun 
801*4882a593Smuzhiyun 	return 0;
802*4882a593Smuzhiyun }
803*4882a593Smuzhiyun 
talitos_register_rng(struct device * dev)804*4882a593Smuzhiyun static int talitos_register_rng(struct device *dev)
805*4882a593Smuzhiyun {
806*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
807*4882a593Smuzhiyun 	int err;
808*4882a593Smuzhiyun 
809*4882a593Smuzhiyun 	priv->rng.name		= dev_driver_string(dev);
810*4882a593Smuzhiyun 	priv->rng.init		= talitos_rng_init;
811*4882a593Smuzhiyun 	priv->rng.data_present	= talitos_rng_data_present;
812*4882a593Smuzhiyun 	priv->rng.data_read	= talitos_rng_data_read;
813*4882a593Smuzhiyun 	priv->rng.priv		= (unsigned long)dev;
814*4882a593Smuzhiyun 
815*4882a593Smuzhiyun 	err = hwrng_register(&priv->rng);
816*4882a593Smuzhiyun 	if (!err)
817*4882a593Smuzhiyun 		priv->rng_registered = true;
818*4882a593Smuzhiyun 
819*4882a593Smuzhiyun 	return err;
820*4882a593Smuzhiyun }
821*4882a593Smuzhiyun 
talitos_unregister_rng(struct device * dev)822*4882a593Smuzhiyun static void talitos_unregister_rng(struct device *dev)
823*4882a593Smuzhiyun {
824*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
825*4882a593Smuzhiyun 
826*4882a593Smuzhiyun 	if (!priv->rng_registered)
827*4882a593Smuzhiyun 		return;
828*4882a593Smuzhiyun 
829*4882a593Smuzhiyun 	hwrng_unregister(&priv->rng);
830*4882a593Smuzhiyun 	priv->rng_registered = false;
831*4882a593Smuzhiyun }
832*4882a593Smuzhiyun 
833*4882a593Smuzhiyun /*
834*4882a593Smuzhiyun  * crypto alg
835*4882a593Smuzhiyun  */
836*4882a593Smuzhiyun #define TALITOS_CRA_PRIORITY		3000
837*4882a593Smuzhiyun /*
838*4882a593Smuzhiyun  * Defines a priority for doing AEAD with descriptors type
839*4882a593Smuzhiyun  * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
840*4882a593Smuzhiyun  */
841*4882a593Smuzhiyun #define TALITOS_CRA_PRIORITY_AEAD_HSNA	(TALITOS_CRA_PRIORITY - 1)
842*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_TALITOS2
843*4882a593Smuzhiyun #define TALITOS_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
844*4882a593Smuzhiyun #else
845*4882a593Smuzhiyun #define TALITOS_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
846*4882a593Smuzhiyun #endif
847*4882a593Smuzhiyun #define TALITOS_MAX_IV_LENGTH		16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
848*4882a593Smuzhiyun 
849*4882a593Smuzhiyun struct talitos_ctx {
850*4882a593Smuzhiyun 	struct device *dev;
851*4882a593Smuzhiyun 	int ch;
852*4882a593Smuzhiyun 	__be32 desc_hdr_template;
853*4882a593Smuzhiyun 	u8 key[TALITOS_MAX_KEY_SIZE];
854*4882a593Smuzhiyun 	u8 iv[TALITOS_MAX_IV_LENGTH];
855*4882a593Smuzhiyun 	dma_addr_t dma_key;
856*4882a593Smuzhiyun 	unsigned int keylen;
857*4882a593Smuzhiyun 	unsigned int enckeylen;
858*4882a593Smuzhiyun 	unsigned int authkeylen;
859*4882a593Smuzhiyun };
860*4882a593Smuzhiyun 
861*4882a593Smuzhiyun #define HASH_MAX_BLOCK_SIZE		SHA512_BLOCK_SIZE
862*4882a593Smuzhiyun #define TALITOS_MDEU_MAX_CONTEXT_SIZE	TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
863*4882a593Smuzhiyun 
864*4882a593Smuzhiyun struct talitos_ahash_req_ctx {
865*4882a593Smuzhiyun 	u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
866*4882a593Smuzhiyun 	unsigned int hw_context_size;
867*4882a593Smuzhiyun 	u8 buf[2][HASH_MAX_BLOCK_SIZE];
868*4882a593Smuzhiyun 	int buf_idx;
869*4882a593Smuzhiyun 	unsigned int swinit;
870*4882a593Smuzhiyun 	unsigned int first;
871*4882a593Smuzhiyun 	unsigned int last;
872*4882a593Smuzhiyun 	unsigned int to_hash_later;
873*4882a593Smuzhiyun 	unsigned int nbuf;
874*4882a593Smuzhiyun 	struct scatterlist bufsl[2];
875*4882a593Smuzhiyun 	struct scatterlist *psrc;
876*4882a593Smuzhiyun };
877*4882a593Smuzhiyun 
878*4882a593Smuzhiyun struct talitos_export_state {
879*4882a593Smuzhiyun 	u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
880*4882a593Smuzhiyun 	u8 buf[HASH_MAX_BLOCK_SIZE];
881*4882a593Smuzhiyun 	unsigned int swinit;
882*4882a593Smuzhiyun 	unsigned int first;
883*4882a593Smuzhiyun 	unsigned int last;
884*4882a593Smuzhiyun 	unsigned int to_hash_later;
885*4882a593Smuzhiyun 	unsigned int nbuf;
886*4882a593Smuzhiyun };
887*4882a593Smuzhiyun 
aead_setkey(struct crypto_aead * authenc,const u8 * key,unsigned int keylen)888*4882a593Smuzhiyun static int aead_setkey(struct crypto_aead *authenc,
889*4882a593Smuzhiyun 		       const u8 *key, unsigned int keylen)
890*4882a593Smuzhiyun {
891*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
892*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
893*4882a593Smuzhiyun 	struct crypto_authenc_keys keys;
894*4882a593Smuzhiyun 
895*4882a593Smuzhiyun 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
896*4882a593Smuzhiyun 		goto badkey;
897*4882a593Smuzhiyun 
898*4882a593Smuzhiyun 	if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
899*4882a593Smuzhiyun 		goto badkey;
900*4882a593Smuzhiyun 
901*4882a593Smuzhiyun 	if (ctx->keylen)
902*4882a593Smuzhiyun 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
903*4882a593Smuzhiyun 
904*4882a593Smuzhiyun 	memcpy(ctx->key, keys.authkey, keys.authkeylen);
905*4882a593Smuzhiyun 	memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
906*4882a593Smuzhiyun 
907*4882a593Smuzhiyun 	ctx->keylen = keys.authkeylen + keys.enckeylen;
908*4882a593Smuzhiyun 	ctx->enckeylen = keys.enckeylen;
909*4882a593Smuzhiyun 	ctx->authkeylen = keys.authkeylen;
910*4882a593Smuzhiyun 	ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
911*4882a593Smuzhiyun 				      DMA_TO_DEVICE);
912*4882a593Smuzhiyun 
913*4882a593Smuzhiyun 	memzero_explicit(&keys, sizeof(keys));
914*4882a593Smuzhiyun 	return 0;
915*4882a593Smuzhiyun 
916*4882a593Smuzhiyun badkey:
917*4882a593Smuzhiyun 	memzero_explicit(&keys, sizeof(keys));
918*4882a593Smuzhiyun 	return -EINVAL;
919*4882a593Smuzhiyun }
920*4882a593Smuzhiyun 
aead_des3_setkey(struct crypto_aead * authenc,const u8 * key,unsigned int keylen)921*4882a593Smuzhiyun static int aead_des3_setkey(struct crypto_aead *authenc,
922*4882a593Smuzhiyun 			    const u8 *key, unsigned int keylen)
923*4882a593Smuzhiyun {
924*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
925*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
926*4882a593Smuzhiyun 	struct crypto_authenc_keys keys;
927*4882a593Smuzhiyun 	int err;
928*4882a593Smuzhiyun 
929*4882a593Smuzhiyun 	err = crypto_authenc_extractkeys(&keys, key, keylen);
930*4882a593Smuzhiyun 	if (unlikely(err))
931*4882a593Smuzhiyun 		goto out;
932*4882a593Smuzhiyun 
933*4882a593Smuzhiyun 	err = -EINVAL;
934*4882a593Smuzhiyun 	if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
935*4882a593Smuzhiyun 		goto out;
936*4882a593Smuzhiyun 
937*4882a593Smuzhiyun 	err = verify_aead_des3_key(authenc, keys.enckey, keys.enckeylen);
938*4882a593Smuzhiyun 	if (err)
939*4882a593Smuzhiyun 		goto out;
940*4882a593Smuzhiyun 
941*4882a593Smuzhiyun 	if (ctx->keylen)
942*4882a593Smuzhiyun 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
943*4882a593Smuzhiyun 
944*4882a593Smuzhiyun 	memcpy(ctx->key, keys.authkey, keys.authkeylen);
945*4882a593Smuzhiyun 	memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
946*4882a593Smuzhiyun 
947*4882a593Smuzhiyun 	ctx->keylen = keys.authkeylen + keys.enckeylen;
948*4882a593Smuzhiyun 	ctx->enckeylen = keys.enckeylen;
949*4882a593Smuzhiyun 	ctx->authkeylen = keys.authkeylen;
950*4882a593Smuzhiyun 	ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
951*4882a593Smuzhiyun 				      DMA_TO_DEVICE);
952*4882a593Smuzhiyun 
953*4882a593Smuzhiyun out:
954*4882a593Smuzhiyun 	memzero_explicit(&keys, sizeof(keys));
955*4882a593Smuzhiyun 	return err;
956*4882a593Smuzhiyun }
957*4882a593Smuzhiyun 
talitos_sg_unmap(struct device * dev,struct talitos_edesc * edesc,struct scatterlist * src,struct scatterlist * dst,unsigned int len,unsigned int offset)958*4882a593Smuzhiyun static void talitos_sg_unmap(struct device *dev,
959*4882a593Smuzhiyun 			     struct talitos_edesc *edesc,
960*4882a593Smuzhiyun 			     struct scatterlist *src,
961*4882a593Smuzhiyun 			     struct scatterlist *dst,
962*4882a593Smuzhiyun 			     unsigned int len, unsigned int offset)
963*4882a593Smuzhiyun {
964*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
965*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
966*4882a593Smuzhiyun 	unsigned int src_nents = edesc->src_nents ? : 1;
967*4882a593Smuzhiyun 	unsigned int dst_nents = edesc->dst_nents ? : 1;
968*4882a593Smuzhiyun 
969*4882a593Smuzhiyun 	if (is_sec1 && dst && dst_nents > 1) {
970*4882a593Smuzhiyun 		dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
971*4882a593Smuzhiyun 					   len, DMA_FROM_DEVICE);
972*4882a593Smuzhiyun 		sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
973*4882a593Smuzhiyun 				     offset);
974*4882a593Smuzhiyun 	}
975*4882a593Smuzhiyun 	if (src != dst) {
976*4882a593Smuzhiyun 		if (src_nents == 1 || !is_sec1)
977*4882a593Smuzhiyun 			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
978*4882a593Smuzhiyun 
979*4882a593Smuzhiyun 		if (dst && (dst_nents == 1 || !is_sec1))
980*4882a593Smuzhiyun 			dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
981*4882a593Smuzhiyun 	} else if (src_nents == 1 || !is_sec1) {
982*4882a593Smuzhiyun 		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
983*4882a593Smuzhiyun 	}
984*4882a593Smuzhiyun }
985*4882a593Smuzhiyun 
ipsec_esp_unmap(struct device * dev,struct talitos_edesc * edesc,struct aead_request * areq,bool encrypt)986*4882a593Smuzhiyun static void ipsec_esp_unmap(struct device *dev,
987*4882a593Smuzhiyun 			    struct talitos_edesc *edesc,
988*4882a593Smuzhiyun 			    struct aead_request *areq, bool encrypt)
989*4882a593Smuzhiyun {
990*4882a593Smuzhiyun 	struct crypto_aead *aead = crypto_aead_reqtfm(areq);
991*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_aead_ctx(aead);
992*4882a593Smuzhiyun 	unsigned int ivsize = crypto_aead_ivsize(aead);
993*4882a593Smuzhiyun 	unsigned int authsize = crypto_aead_authsize(aead);
994*4882a593Smuzhiyun 	unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
995*4882a593Smuzhiyun 	bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
996*4882a593Smuzhiyun 	struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
997*4882a593Smuzhiyun 
998*4882a593Smuzhiyun 	if (is_ipsec_esp)
999*4882a593Smuzhiyun 		unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1000*4882a593Smuzhiyun 					 DMA_FROM_DEVICE);
1001*4882a593Smuzhiyun 	unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
1002*4882a593Smuzhiyun 
1003*4882a593Smuzhiyun 	talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
1004*4882a593Smuzhiyun 			 cryptlen + authsize, areq->assoclen);
1005*4882a593Smuzhiyun 
1006*4882a593Smuzhiyun 	if (edesc->dma_len)
1007*4882a593Smuzhiyun 		dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1008*4882a593Smuzhiyun 				 DMA_BIDIRECTIONAL);
1009*4882a593Smuzhiyun 
1010*4882a593Smuzhiyun 	if (!is_ipsec_esp) {
1011*4882a593Smuzhiyun 		unsigned int dst_nents = edesc->dst_nents ? : 1;
1012*4882a593Smuzhiyun 
1013*4882a593Smuzhiyun 		sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
1014*4882a593Smuzhiyun 				   areq->assoclen + cryptlen - ivsize);
1015*4882a593Smuzhiyun 	}
1016*4882a593Smuzhiyun }
1017*4882a593Smuzhiyun 
1018*4882a593Smuzhiyun /*
1019*4882a593Smuzhiyun  * ipsec_esp descriptor callbacks
1020*4882a593Smuzhiyun  */
ipsec_esp_encrypt_done(struct device * dev,struct talitos_desc * desc,void * context,int err)1021*4882a593Smuzhiyun static void ipsec_esp_encrypt_done(struct device *dev,
1022*4882a593Smuzhiyun 				   struct talitos_desc *desc, void *context,
1023*4882a593Smuzhiyun 				   int err)
1024*4882a593Smuzhiyun {
1025*4882a593Smuzhiyun 	struct aead_request *areq = context;
1026*4882a593Smuzhiyun 	struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1027*4882a593Smuzhiyun 	unsigned int ivsize = crypto_aead_ivsize(authenc);
1028*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1029*4882a593Smuzhiyun 
1030*4882a593Smuzhiyun 	edesc = container_of(desc, struct talitos_edesc, desc);
1031*4882a593Smuzhiyun 
1032*4882a593Smuzhiyun 	ipsec_esp_unmap(dev, edesc, areq, true);
1033*4882a593Smuzhiyun 
1034*4882a593Smuzhiyun 	dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1035*4882a593Smuzhiyun 
1036*4882a593Smuzhiyun 	kfree(edesc);
1037*4882a593Smuzhiyun 
1038*4882a593Smuzhiyun 	aead_request_complete(areq, err);
1039*4882a593Smuzhiyun }
1040*4882a593Smuzhiyun 
ipsec_esp_decrypt_swauth_done(struct device * dev,struct talitos_desc * desc,void * context,int err)1041*4882a593Smuzhiyun static void ipsec_esp_decrypt_swauth_done(struct device *dev,
1042*4882a593Smuzhiyun 					  struct talitos_desc *desc,
1043*4882a593Smuzhiyun 					  void *context, int err)
1044*4882a593Smuzhiyun {
1045*4882a593Smuzhiyun 	struct aead_request *req = context;
1046*4882a593Smuzhiyun 	struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1047*4882a593Smuzhiyun 	unsigned int authsize = crypto_aead_authsize(authenc);
1048*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1049*4882a593Smuzhiyun 	char *oicv, *icv;
1050*4882a593Smuzhiyun 
1051*4882a593Smuzhiyun 	edesc = container_of(desc, struct talitos_edesc, desc);
1052*4882a593Smuzhiyun 
1053*4882a593Smuzhiyun 	ipsec_esp_unmap(dev, edesc, req, false);
1054*4882a593Smuzhiyun 
1055*4882a593Smuzhiyun 	if (!err) {
1056*4882a593Smuzhiyun 		/* auth check */
1057*4882a593Smuzhiyun 		oicv = edesc->buf + edesc->dma_len;
1058*4882a593Smuzhiyun 		icv = oicv - authsize;
1059*4882a593Smuzhiyun 
1060*4882a593Smuzhiyun 		err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
1061*4882a593Smuzhiyun 	}
1062*4882a593Smuzhiyun 
1063*4882a593Smuzhiyun 	kfree(edesc);
1064*4882a593Smuzhiyun 
1065*4882a593Smuzhiyun 	aead_request_complete(req, err);
1066*4882a593Smuzhiyun }
1067*4882a593Smuzhiyun 
ipsec_esp_decrypt_hwauth_done(struct device * dev,struct talitos_desc * desc,void * context,int err)1068*4882a593Smuzhiyun static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
1069*4882a593Smuzhiyun 					  struct talitos_desc *desc,
1070*4882a593Smuzhiyun 					  void *context, int err)
1071*4882a593Smuzhiyun {
1072*4882a593Smuzhiyun 	struct aead_request *req = context;
1073*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1074*4882a593Smuzhiyun 
1075*4882a593Smuzhiyun 	edesc = container_of(desc, struct talitos_edesc, desc);
1076*4882a593Smuzhiyun 
1077*4882a593Smuzhiyun 	ipsec_esp_unmap(dev, edesc, req, false);
1078*4882a593Smuzhiyun 
1079*4882a593Smuzhiyun 	/* check ICV auth status */
1080*4882a593Smuzhiyun 	if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1081*4882a593Smuzhiyun 		     DESC_HDR_LO_ICCR1_PASS))
1082*4882a593Smuzhiyun 		err = -EBADMSG;
1083*4882a593Smuzhiyun 
1084*4882a593Smuzhiyun 	kfree(edesc);
1085*4882a593Smuzhiyun 
1086*4882a593Smuzhiyun 	aead_request_complete(req, err);
1087*4882a593Smuzhiyun }
1088*4882a593Smuzhiyun 
1089*4882a593Smuzhiyun /*
1090*4882a593Smuzhiyun  * convert scatterlist to SEC h/w link table format
1091*4882a593Smuzhiyun  * stop at cryptlen bytes
1092*4882a593Smuzhiyun  */
sg_to_link_tbl_offset(struct scatterlist * sg,int sg_count,unsigned int offset,int datalen,int elen,struct talitos_ptr * link_tbl_ptr,int align)1093*4882a593Smuzhiyun static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1094*4882a593Smuzhiyun 				 unsigned int offset, int datalen, int elen,
1095*4882a593Smuzhiyun 				 struct talitos_ptr *link_tbl_ptr, int align)
1096*4882a593Smuzhiyun {
1097*4882a593Smuzhiyun 	int n_sg = elen ? sg_count + 1 : sg_count;
1098*4882a593Smuzhiyun 	int count = 0;
1099*4882a593Smuzhiyun 	int cryptlen = datalen + elen;
1100*4882a593Smuzhiyun 	int padding = ALIGN(cryptlen, align) - cryptlen;
1101*4882a593Smuzhiyun 
1102*4882a593Smuzhiyun 	while (cryptlen && sg && n_sg--) {
1103*4882a593Smuzhiyun 		unsigned int len = sg_dma_len(sg);
1104*4882a593Smuzhiyun 
1105*4882a593Smuzhiyun 		if (offset >= len) {
1106*4882a593Smuzhiyun 			offset -= len;
1107*4882a593Smuzhiyun 			goto next;
1108*4882a593Smuzhiyun 		}
1109*4882a593Smuzhiyun 
1110*4882a593Smuzhiyun 		len -= offset;
1111*4882a593Smuzhiyun 
1112*4882a593Smuzhiyun 		if (len > cryptlen)
1113*4882a593Smuzhiyun 			len = cryptlen;
1114*4882a593Smuzhiyun 
1115*4882a593Smuzhiyun 		if (datalen > 0 && len > datalen) {
1116*4882a593Smuzhiyun 			to_talitos_ptr(link_tbl_ptr + count,
1117*4882a593Smuzhiyun 				       sg_dma_address(sg) + offset, datalen, 0);
1118*4882a593Smuzhiyun 			to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1119*4882a593Smuzhiyun 			count++;
1120*4882a593Smuzhiyun 			len -= datalen;
1121*4882a593Smuzhiyun 			offset += datalen;
1122*4882a593Smuzhiyun 		}
1123*4882a593Smuzhiyun 		to_talitos_ptr(link_tbl_ptr + count,
1124*4882a593Smuzhiyun 			       sg_dma_address(sg) + offset, sg_next(sg) ? len : len + padding, 0);
1125*4882a593Smuzhiyun 		to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1126*4882a593Smuzhiyun 		count++;
1127*4882a593Smuzhiyun 		cryptlen -= len;
1128*4882a593Smuzhiyun 		datalen -= len;
1129*4882a593Smuzhiyun 		offset = 0;
1130*4882a593Smuzhiyun 
1131*4882a593Smuzhiyun next:
1132*4882a593Smuzhiyun 		sg = sg_next(sg);
1133*4882a593Smuzhiyun 	}
1134*4882a593Smuzhiyun 
1135*4882a593Smuzhiyun 	/* tag end of link table */
1136*4882a593Smuzhiyun 	if (count > 0)
1137*4882a593Smuzhiyun 		to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1138*4882a593Smuzhiyun 				       DESC_PTR_LNKTBL_RET, 0);
1139*4882a593Smuzhiyun 
1140*4882a593Smuzhiyun 	return count;
1141*4882a593Smuzhiyun }
1142*4882a593Smuzhiyun 
talitos_sg_map_ext(struct device * dev,struct scatterlist * src,unsigned int len,struct talitos_edesc * edesc,struct talitos_ptr * ptr,int sg_count,unsigned int offset,int tbl_off,int elen,bool force,int align)1143*4882a593Smuzhiyun static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1144*4882a593Smuzhiyun 			      unsigned int len, struct talitos_edesc *edesc,
1145*4882a593Smuzhiyun 			      struct talitos_ptr *ptr, int sg_count,
1146*4882a593Smuzhiyun 			      unsigned int offset, int tbl_off, int elen,
1147*4882a593Smuzhiyun 			      bool force, int align)
1148*4882a593Smuzhiyun {
1149*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
1150*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
1151*4882a593Smuzhiyun 	int aligned_len = ALIGN(len, align);
1152*4882a593Smuzhiyun 
1153*4882a593Smuzhiyun 	if (!src) {
1154*4882a593Smuzhiyun 		to_talitos_ptr(ptr, 0, 0, is_sec1);
1155*4882a593Smuzhiyun 		return 1;
1156*4882a593Smuzhiyun 	}
1157*4882a593Smuzhiyun 	to_talitos_ptr_ext_set(ptr, elen, is_sec1);
1158*4882a593Smuzhiyun 	if (sg_count == 1 && !force) {
1159*4882a593Smuzhiyun 		to_talitos_ptr(ptr, sg_dma_address(src) + offset, aligned_len, is_sec1);
1160*4882a593Smuzhiyun 		return sg_count;
1161*4882a593Smuzhiyun 	}
1162*4882a593Smuzhiyun 	if (is_sec1) {
1163*4882a593Smuzhiyun 		to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, aligned_len, is_sec1);
1164*4882a593Smuzhiyun 		return sg_count;
1165*4882a593Smuzhiyun 	}
1166*4882a593Smuzhiyun 	sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
1167*4882a593Smuzhiyun 					 &edesc->link_tbl[tbl_off], align);
1168*4882a593Smuzhiyun 	if (sg_count == 1 && !force) {
1169*4882a593Smuzhiyun 		/* Only one segment now, so no link tbl needed*/
1170*4882a593Smuzhiyun 		copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1171*4882a593Smuzhiyun 		return sg_count;
1172*4882a593Smuzhiyun 	}
1173*4882a593Smuzhiyun 	to_talitos_ptr(ptr, edesc->dma_link_tbl +
1174*4882a593Smuzhiyun 			    tbl_off * sizeof(struct talitos_ptr), aligned_len, is_sec1);
1175*4882a593Smuzhiyun 	to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1176*4882a593Smuzhiyun 
1177*4882a593Smuzhiyun 	return sg_count;
1178*4882a593Smuzhiyun }
1179*4882a593Smuzhiyun 
talitos_sg_map(struct device * dev,struct scatterlist * src,unsigned int len,struct talitos_edesc * edesc,struct talitos_ptr * ptr,int sg_count,unsigned int offset,int tbl_off)1180*4882a593Smuzhiyun static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1181*4882a593Smuzhiyun 			  unsigned int len, struct talitos_edesc *edesc,
1182*4882a593Smuzhiyun 			  struct talitos_ptr *ptr, int sg_count,
1183*4882a593Smuzhiyun 			  unsigned int offset, int tbl_off)
1184*4882a593Smuzhiyun {
1185*4882a593Smuzhiyun 	return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
1186*4882a593Smuzhiyun 				  tbl_off, 0, false, 1);
1187*4882a593Smuzhiyun }
1188*4882a593Smuzhiyun 
1189*4882a593Smuzhiyun /*
1190*4882a593Smuzhiyun  * fill in and submit ipsec_esp descriptor
1191*4882a593Smuzhiyun  */
ipsec_esp(struct talitos_edesc * edesc,struct aead_request * areq,bool encrypt,void (* callback)(struct device * dev,struct talitos_desc * desc,void * context,int error))1192*4882a593Smuzhiyun static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
1193*4882a593Smuzhiyun 		     bool encrypt,
1194*4882a593Smuzhiyun 		     void (*callback)(struct device *dev,
1195*4882a593Smuzhiyun 				      struct talitos_desc *desc,
1196*4882a593Smuzhiyun 				      void *context, int error))
1197*4882a593Smuzhiyun {
1198*4882a593Smuzhiyun 	struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1199*4882a593Smuzhiyun 	unsigned int authsize = crypto_aead_authsize(aead);
1200*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1201*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
1202*4882a593Smuzhiyun 	struct talitos_desc *desc = &edesc->desc;
1203*4882a593Smuzhiyun 	unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1204*4882a593Smuzhiyun 	unsigned int ivsize = crypto_aead_ivsize(aead);
1205*4882a593Smuzhiyun 	int tbl_off = 0;
1206*4882a593Smuzhiyun 	int sg_count, ret;
1207*4882a593Smuzhiyun 	int elen = 0;
1208*4882a593Smuzhiyun 	bool sync_needed = false;
1209*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
1210*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
1211*4882a593Smuzhiyun 	bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1212*4882a593Smuzhiyun 	struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1213*4882a593Smuzhiyun 	struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
1214*4882a593Smuzhiyun 	dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
1215*4882a593Smuzhiyun 
1216*4882a593Smuzhiyun 	/* hmac key */
1217*4882a593Smuzhiyun 	to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
1218*4882a593Smuzhiyun 
1219*4882a593Smuzhiyun 	sg_count = edesc->src_nents ?: 1;
1220*4882a593Smuzhiyun 	if (is_sec1 && sg_count > 1)
1221*4882a593Smuzhiyun 		sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1222*4882a593Smuzhiyun 				  areq->assoclen + cryptlen);
1223*4882a593Smuzhiyun 	else
1224*4882a593Smuzhiyun 		sg_count = dma_map_sg(dev, areq->src, sg_count,
1225*4882a593Smuzhiyun 				      (areq->src == areq->dst) ?
1226*4882a593Smuzhiyun 				      DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1227*4882a593Smuzhiyun 
1228*4882a593Smuzhiyun 	/* hmac data */
1229*4882a593Smuzhiyun 	ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1230*4882a593Smuzhiyun 			     &desc->ptr[1], sg_count, 0, tbl_off);
1231*4882a593Smuzhiyun 
1232*4882a593Smuzhiyun 	if (ret > 1) {
1233*4882a593Smuzhiyun 		tbl_off += ret;
1234*4882a593Smuzhiyun 		sync_needed = true;
1235*4882a593Smuzhiyun 	}
1236*4882a593Smuzhiyun 
1237*4882a593Smuzhiyun 	/* cipher iv */
1238*4882a593Smuzhiyun 	to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
1239*4882a593Smuzhiyun 
1240*4882a593Smuzhiyun 	/* cipher key */
1241*4882a593Smuzhiyun 	to_talitos_ptr(ckey_ptr, ctx->dma_key  + ctx->authkeylen,
1242*4882a593Smuzhiyun 		       ctx->enckeylen, is_sec1);
1243*4882a593Smuzhiyun 
1244*4882a593Smuzhiyun 	/*
1245*4882a593Smuzhiyun 	 * cipher in
1246*4882a593Smuzhiyun 	 * map and adjust cipher len to aead request cryptlen.
1247*4882a593Smuzhiyun 	 * extent is bytes of HMAC postpended to ciphertext,
1248*4882a593Smuzhiyun 	 * typically 12 for ipsec
1249*4882a593Smuzhiyun 	 */
1250*4882a593Smuzhiyun 	if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1251*4882a593Smuzhiyun 		elen = authsize;
1252*4882a593Smuzhiyun 
1253*4882a593Smuzhiyun 	ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
1254*4882a593Smuzhiyun 				 sg_count, areq->assoclen, tbl_off, elen,
1255*4882a593Smuzhiyun 				 false, 1);
1256*4882a593Smuzhiyun 
1257*4882a593Smuzhiyun 	if (ret > 1) {
1258*4882a593Smuzhiyun 		tbl_off += ret;
1259*4882a593Smuzhiyun 		sync_needed = true;
1260*4882a593Smuzhiyun 	}
1261*4882a593Smuzhiyun 
1262*4882a593Smuzhiyun 	/* cipher out */
1263*4882a593Smuzhiyun 	if (areq->src != areq->dst) {
1264*4882a593Smuzhiyun 		sg_count = edesc->dst_nents ? : 1;
1265*4882a593Smuzhiyun 		if (!is_sec1 || sg_count == 1)
1266*4882a593Smuzhiyun 			dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1267*4882a593Smuzhiyun 	}
1268*4882a593Smuzhiyun 
1269*4882a593Smuzhiyun 	if (is_ipsec_esp && encrypt)
1270*4882a593Smuzhiyun 		elen = authsize;
1271*4882a593Smuzhiyun 	else
1272*4882a593Smuzhiyun 		elen = 0;
1273*4882a593Smuzhiyun 	ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1274*4882a593Smuzhiyun 				 sg_count, areq->assoclen, tbl_off, elen,
1275*4882a593Smuzhiyun 				 is_ipsec_esp && !encrypt, 1);
1276*4882a593Smuzhiyun 	tbl_off += ret;
1277*4882a593Smuzhiyun 
1278*4882a593Smuzhiyun 	if (!encrypt && is_ipsec_esp) {
1279*4882a593Smuzhiyun 		struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1280*4882a593Smuzhiyun 
1281*4882a593Smuzhiyun 		/* Add an entry to the link table for ICV data */
1282*4882a593Smuzhiyun 		to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1283*4882a593Smuzhiyun 		to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
1284*4882a593Smuzhiyun 
1285*4882a593Smuzhiyun 		/* icv data follows link tables */
1286*4882a593Smuzhiyun 		to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1287*4882a593Smuzhiyun 		to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1288*4882a593Smuzhiyun 		sync_needed = true;
1289*4882a593Smuzhiyun 	} else if (!encrypt) {
1290*4882a593Smuzhiyun 		to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1291*4882a593Smuzhiyun 		sync_needed = true;
1292*4882a593Smuzhiyun 	} else if (!is_ipsec_esp) {
1293*4882a593Smuzhiyun 		talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1294*4882a593Smuzhiyun 			       sg_count, areq->assoclen + cryptlen, tbl_off);
1295*4882a593Smuzhiyun 	}
1296*4882a593Smuzhiyun 
1297*4882a593Smuzhiyun 	/* iv out */
1298*4882a593Smuzhiyun 	if (is_ipsec_esp)
1299*4882a593Smuzhiyun 		map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1300*4882a593Smuzhiyun 				       DMA_FROM_DEVICE);
1301*4882a593Smuzhiyun 
1302*4882a593Smuzhiyun 	if (sync_needed)
1303*4882a593Smuzhiyun 		dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1304*4882a593Smuzhiyun 					   edesc->dma_len,
1305*4882a593Smuzhiyun 					   DMA_BIDIRECTIONAL);
1306*4882a593Smuzhiyun 
1307*4882a593Smuzhiyun 	ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1308*4882a593Smuzhiyun 	if (ret != -EINPROGRESS) {
1309*4882a593Smuzhiyun 		ipsec_esp_unmap(dev, edesc, areq, encrypt);
1310*4882a593Smuzhiyun 		kfree(edesc);
1311*4882a593Smuzhiyun 	}
1312*4882a593Smuzhiyun 	return ret;
1313*4882a593Smuzhiyun }
1314*4882a593Smuzhiyun 
1315*4882a593Smuzhiyun /*
1316*4882a593Smuzhiyun  * allocate and map the extended descriptor
1317*4882a593Smuzhiyun  */
talitos_edesc_alloc(struct device * dev,struct scatterlist * src,struct scatterlist * dst,u8 * iv,unsigned int assoclen,unsigned int cryptlen,unsigned int authsize,unsigned int ivsize,int icv_stashing,u32 cryptoflags,bool encrypt)1318*4882a593Smuzhiyun static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1319*4882a593Smuzhiyun 						 struct scatterlist *src,
1320*4882a593Smuzhiyun 						 struct scatterlist *dst,
1321*4882a593Smuzhiyun 						 u8 *iv,
1322*4882a593Smuzhiyun 						 unsigned int assoclen,
1323*4882a593Smuzhiyun 						 unsigned int cryptlen,
1324*4882a593Smuzhiyun 						 unsigned int authsize,
1325*4882a593Smuzhiyun 						 unsigned int ivsize,
1326*4882a593Smuzhiyun 						 int icv_stashing,
1327*4882a593Smuzhiyun 						 u32 cryptoflags,
1328*4882a593Smuzhiyun 						 bool encrypt)
1329*4882a593Smuzhiyun {
1330*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1331*4882a593Smuzhiyun 	int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
1332*4882a593Smuzhiyun 	dma_addr_t iv_dma = 0;
1333*4882a593Smuzhiyun 	gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1334*4882a593Smuzhiyun 		      GFP_ATOMIC;
1335*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
1336*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
1337*4882a593Smuzhiyun 	int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
1338*4882a593Smuzhiyun 
1339*4882a593Smuzhiyun 	if (cryptlen + authsize > max_len) {
1340*4882a593Smuzhiyun 		dev_err(dev, "length exceeds h/w max limit\n");
1341*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
1342*4882a593Smuzhiyun 	}
1343*4882a593Smuzhiyun 
1344*4882a593Smuzhiyun 	if (!dst || dst == src) {
1345*4882a593Smuzhiyun 		src_len = assoclen + cryptlen + authsize;
1346*4882a593Smuzhiyun 		src_nents = sg_nents_for_len(src, src_len);
1347*4882a593Smuzhiyun 		if (src_nents < 0) {
1348*4882a593Smuzhiyun 			dev_err(dev, "Invalid number of src SG.\n");
1349*4882a593Smuzhiyun 			return ERR_PTR(-EINVAL);
1350*4882a593Smuzhiyun 		}
1351*4882a593Smuzhiyun 		src_nents = (src_nents == 1) ? 0 : src_nents;
1352*4882a593Smuzhiyun 		dst_nents = dst ? src_nents : 0;
1353*4882a593Smuzhiyun 		dst_len = 0;
1354*4882a593Smuzhiyun 	} else { /* dst && dst != src*/
1355*4882a593Smuzhiyun 		src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1356*4882a593Smuzhiyun 		src_nents = sg_nents_for_len(src, src_len);
1357*4882a593Smuzhiyun 		if (src_nents < 0) {
1358*4882a593Smuzhiyun 			dev_err(dev, "Invalid number of src SG.\n");
1359*4882a593Smuzhiyun 			return ERR_PTR(-EINVAL);
1360*4882a593Smuzhiyun 		}
1361*4882a593Smuzhiyun 		src_nents = (src_nents == 1) ? 0 : src_nents;
1362*4882a593Smuzhiyun 		dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1363*4882a593Smuzhiyun 		dst_nents = sg_nents_for_len(dst, dst_len);
1364*4882a593Smuzhiyun 		if (dst_nents < 0) {
1365*4882a593Smuzhiyun 			dev_err(dev, "Invalid number of dst SG.\n");
1366*4882a593Smuzhiyun 			return ERR_PTR(-EINVAL);
1367*4882a593Smuzhiyun 		}
1368*4882a593Smuzhiyun 		dst_nents = (dst_nents == 1) ? 0 : dst_nents;
1369*4882a593Smuzhiyun 	}
1370*4882a593Smuzhiyun 
1371*4882a593Smuzhiyun 	/*
1372*4882a593Smuzhiyun 	 * allocate space for base edesc plus the link tables,
1373*4882a593Smuzhiyun 	 * allowing for two separate entries for AD and generated ICV (+ 2),
1374*4882a593Smuzhiyun 	 * and space for two sets of ICVs (stashed and generated)
1375*4882a593Smuzhiyun 	 */
1376*4882a593Smuzhiyun 	alloc_len = sizeof(struct talitos_edesc);
1377*4882a593Smuzhiyun 	if (src_nents || dst_nents || !encrypt) {
1378*4882a593Smuzhiyun 		if (is_sec1)
1379*4882a593Smuzhiyun 			dma_len = (src_nents ? src_len : 0) +
1380*4882a593Smuzhiyun 				  (dst_nents ? dst_len : 0) + authsize;
1381*4882a593Smuzhiyun 		else
1382*4882a593Smuzhiyun 			dma_len = (src_nents + dst_nents + 2) *
1383*4882a593Smuzhiyun 				  sizeof(struct talitos_ptr) + authsize;
1384*4882a593Smuzhiyun 		alloc_len += dma_len;
1385*4882a593Smuzhiyun 	} else {
1386*4882a593Smuzhiyun 		dma_len = 0;
1387*4882a593Smuzhiyun 	}
1388*4882a593Smuzhiyun 	alloc_len += icv_stashing ? authsize : 0;
1389*4882a593Smuzhiyun 
1390*4882a593Smuzhiyun 	/* if its a ahash, add space for a second desc next to the first one */
1391*4882a593Smuzhiyun 	if (is_sec1 && !dst)
1392*4882a593Smuzhiyun 		alloc_len += sizeof(struct talitos_desc);
1393*4882a593Smuzhiyun 	alloc_len += ivsize;
1394*4882a593Smuzhiyun 
1395*4882a593Smuzhiyun 	edesc = kmalloc(alloc_len, GFP_DMA | flags);
1396*4882a593Smuzhiyun 	if (!edesc)
1397*4882a593Smuzhiyun 		return ERR_PTR(-ENOMEM);
1398*4882a593Smuzhiyun 	if (ivsize) {
1399*4882a593Smuzhiyun 		iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
1400*4882a593Smuzhiyun 		iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1401*4882a593Smuzhiyun 	}
1402*4882a593Smuzhiyun 	memset(&edesc->desc, 0, sizeof(edesc->desc));
1403*4882a593Smuzhiyun 
1404*4882a593Smuzhiyun 	edesc->src_nents = src_nents;
1405*4882a593Smuzhiyun 	edesc->dst_nents = dst_nents;
1406*4882a593Smuzhiyun 	edesc->iv_dma = iv_dma;
1407*4882a593Smuzhiyun 	edesc->dma_len = dma_len;
1408*4882a593Smuzhiyun 	if (dma_len)
1409*4882a593Smuzhiyun 		edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
1410*4882a593Smuzhiyun 						     edesc->dma_len,
1411*4882a593Smuzhiyun 						     DMA_BIDIRECTIONAL);
1412*4882a593Smuzhiyun 
1413*4882a593Smuzhiyun 	return edesc;
1414*4882a593Smuzhiyun }
1415*4882a593Smuzhiyun 
aead_edesc_alloc(struct aead_request * areq,u8 * iv,int icv_stashing,bool encrypt)1416*4882a593Smuzhiyun static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
1417*4882a593Smuzhiyun 					      int icv_stashing, bool encrypt)
1418*4882a593Smuzhiyun {
1419*4882a593Smuzhiyun 	struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1420*4882a593Smuzhiyun 	unsigned int authsize = crypto_aead_authsize(authenc);
1421*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1422*4882a593Smuzhiyun 	unsigned int ivsize = crypto_aead_ivsize(authenc);
1423*4882a593Smuzhiyun 	unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1424*4882a593Smuzhiyun 
1425*4882a593Smuzhiyun 	return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1426*4882a593Smuzhiyun 				   iv, areq->assoclen, cryptlen,
1427*4882a593Smuzhiyun 				   authsize, ivsize, icv_stashing,
1428*4882a593Smuzhiyun 				   areq->base.flags, encrypt);
1429*4882a593Smuzhiyun }
1430*4882a593Smuzhiyun 
aead_encrypt(struct aead_request * req)1431*4882a593Smuzhiyun static int aead_encrypt(struct aead_request *req)
1432*4882a593Smuzhiyun {
1433*4882a593Smuzhiyun 	struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1434*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1435*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1436*4882a593Smuzhiyun 
1437*4882a593Smuzhiyun 	/* allocate extended descriptor */
1438*4882a593Smuzhiyun 	edesc = aead_edesc_alloc(req, req->iv, 0, true);
1439*4882a593Smuzhiyun 	if (IS_ERR(edesc))
1440*4882a593Smuzhiyun 		return PTR_ERR(edesc);
1441*4882a593Smuzhiyun 
1442*4882a593Smuzhiyun 	/* set encrypt */
1443*4882a593Smuzhiyun 	edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1444*4882a593Smuzhiyun 
1445*4882a593Smuzhiyun 	return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
1446*4882a593Smuzhiyun }
1447*4882a593Smuzhiyun 
aead_decrypt(struct aead_request * req)1448*4882a593Smuzhiyun static int aead_decrypt(struct aead_request *req)
1449*4882a593Smuzhiyun {
1450*4882a593Smuzhiyun 	struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1451*4882a593Smuzhiyun 	unsigned int authsize = crypto_aead_authsize(authenc);
1452*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1453*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1454*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1455*4882a593Smuzhiyun 	void *icvdata;
1456*4882a593Smuzhiyun 
1457*4882a593Smuzhiyun 	/* allocate extended descriptor */
1458*4882a593Smuzhiyun 	edesc = aead_edesc_alloc(req, req->iv, 1, false);
1459*4882a593Smuzhiyun 	if (IS_ERR(edesc))
1460*4882a593Smuzhiyun 		return PTR_ERR(edesc);
1461*4882a593Smuzhiyun 
1462*4882a593Smuzhiyun 	if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1463*4882a593Smuzhiyun 	    (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
1464*4882a593Smuzhiyun 	    ((!edesc->src_nents && !edesc->dst_nents) ||
1465*4882a593Smuzhiyun 	     priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
1466*4882a593Smuzhiyun 
1467*4882a593Smuzhiyun 		/* decrypt and check the ICV */
1468*4882a593Smuzhiyun 		edesc->desc.hdr = ctx->desc_hdr_template |
1469*4882a593Smuzhiyun 				  DESC_HDR_DIR_INBOUND |
1470*4882a593Smuzhiyun 				  DESC_HDR_MODE1_MDEU_CICV;
1471*4882a593Smuzhiyun 
1472*4882a593Smuzhiyun 		/* reset integrity check result bits */
1473*4882a593Smuzhiyun 
1474*4882a593Smuzhiyun 		return ipsec_esp(edesc, req, false,
1475*4882a593Smuzhiyun 				 ipsec_esp_decrypt_hwauth_done);
1476*4882a593Smuzhiyun 	}
1477*4882a593Smuzhiyun 
1478*4882a593Smuzhiyun 	/* Have to check the ICV with software */
1479*4882a593Smuzhiyun 	edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1480*4882a593Smuzhiyun 
1481*4882a593Smuzhiyun 	/* stash incoming ICV for later cmp with ICV generated by the h/w */
1482*4882a593Smuzhiyun 	icvdata = edesc->buf + edesc->dma_len;
1483*4882a593Smuzhiyun 
1484*4882a593Smuzhiyun 	sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1485*4882a593Smuzhiyun 			   req->assoclen + req->cryptlen - authsize);
1486*4882a593Smuzhiyun 
1487*4882a593Smuzhiyun 	return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
1488*4882a593Smuzhiyun }
1489*4882a593Smuzhiyun 
skcipher_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)1490*4882a593Smuzhiyun static int skcipher_setkey(struct crypto_skcipher *cipher,
1491*4882a593Smuzhiyun 			     const u8 *key, unsigned int keylen)
1492*4882a593Smuzhiyun {
1493*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1494*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
1495*4882a593Smuzhiyun 
1496*4882a593Smuzhiyun 	if (ctx->keylen)
1497*4882a593Smuzhiyun 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1498*4882a593Smuzhiyun 
1499*4882a593Smuzhiyun 	memcpy(&ctx->key, key, keylen);
1500*4882a593Smuzhiyun 	ctx->keylen = keylen;
1501*4882a593Smuzhiyun 
1502*4882a593Smuzhiyun 	ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1503*4882a593Smuzhiyun 
1504*4882a593Smuzhiyun 	return 0;
1505*4882a593Smuzhiyun }
1506*4882a593Smuzhiyun 
skcipher_des_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)1507*4882a593Smuzhiyun static int skcipher_des_setkey(struct crypto_skcipher *cipher,
1508*4882a593Smuzhiyun 				 const u8 *key, unsigned int keylen)
1509*4882a593Smuzhiyun {
1510*4882a593Smuzhiyun 	return verify_skcipher_des_key(cipher, key) ?:
1511*4882a593Smuzhiyun 	       skcipher_setkey(cipher, key, keylen);
1512*4882a593Smuzhiyun }
1513*4882a593Smuzhiyun 
skcipher_des3_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)1514*4882a593Smuzhiyun static int skcipher_des3_setkey(struct crypto_skcipher *cipher,
1515*4882a593Smuzhiyun 				  const u8 *key, unsigned int keylen)
1516*4882a593Smuzhiyun {
1517*4882a593Smuzhiyun 	return verify_skcipher_des3_key(cipher, key) ?:
1518*4882a593Smuzhiyun 	       skcipher_setkey(cipher, key, keylen);
1519*4882a593Smuzhiyun }
1520*4882a593Smuzhiyun 
skcipher_aes_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)1521*4882a593Smuzhiyun static int skcipher_aes_setkey(struct crypto_skcipher *cipher,
1522*4882a593Smuzhiyun 				  const u8 *key, unsigned int keylen)
1523*4882a593Smuzhiyun {
1524*4882a593Smuzhiyun 	if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1525*4882a593Smuzhiyun 	    keylen == AES_KEYSIZE_256)
1526*4882a593Smuzhiyun 		return skcipher_setkey(cipher, key, keylen);
1527*4882a593Smuzhiyun 
1528*4882a593Smuzhiyun 	return -EINVAL;
1529*4882a593Smuzhiyun }
1530*4882a593Smuzhiyun 
common_nonsnoop_unmap(struct device * dev,struct talitos_edesc * edesc,struct skcipher_request * areq)1531*4882a593Smuzhiyun static void common_nonsnoop_unmap(struct device *dev,
1532*4882a593Smuzhiyun 				  struct talitos_edesc *edesc,
1533*4882a593Smuzhiyun 				  struct skcipher_request *areq)
1534*4882a593Smuzhiyun {
1535*4882a593Smuzhiyun 	unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1536*4882a593Smuzhiyun 
1537*4882a593Smuzhiyun 	talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen, 0);
1538*4882a593Smuzhiyun 	unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1539*4882a593Smuzhiyun 
1540*4882a593Smuzhiyun 	if (edesc->dma_len)
1541*4882a593Smuzhiyun 		dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1542*4882a593Smuzhiyun 				 DMA_BIDIRECTIONAL);
1543*4882a593Smuzhiyun }
1544*4882a593Smuzhiyun 
skcipher_done(struct device * dev,struct talitos_desc * desc,void * context,int err)1545*4882a593Smuzhiyun static void skcipher_done(struct device *dev,
1546*4882a593Smuzhiyun 			    struct talitos_desc *desc, void *context,
1547*4882a593Smuzhiyun 			    int err)
1548*4882a593Smuzhiyun {
1549*4882a593Smuzhiyun 	struct skcipher_request *areq = context;
1550*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1551*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1552*4882a593Smuzhiyun 	unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1553*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1554*4882a593Smuzhiyun 
1555*4882a593Smuzhiyun 	edesc = container_of(desc, struct talitos_edesc, desc);
1556*4882a593Smuzhiyun 
1557*4882a593Smuzhiyun 	common_nonsnoop_unmap(dev, edesc, areq);
1558*4882a593Smuzhiyun 	memcpy(areq->iv, ctx->iv, ivsize);
1559*4882a593Smuzhiyun 
1560*4882a593Smuzhiyun 	kfree(edesc);
1561*4882a593Smuzhiyun 
1562*4882a593Smuzhiyun 	areq->base.complete(&areq->base, err);
1563*4882a593Smuzhiyun }
1564*4882a593Smuzhiyun 
common_nonsnoop(struct talitos_edesc * edesc,struct skcipher_request * areq,void (* callback)(struct device * dev,struct talitos_desc * desc,void * context,int error))1565*4882a593Smuzhiyun static int common_nonsnoop(struct talitos_edesc *edesc,
1566*4882a593Smuzhiyun 			   struct skcipher_request *areq,
1567*4882a593Smuzhiyun 			   void (*callback) (struct device *dev,
1568*4882a593Smuzhiyun 					     struct talitos_desc *desc,
1569*4882a593Smuzhiyun 					     void *context, int error))
1570*4882a593Smuzhiyun {
1571*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1572*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1573*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
1574*4882a593Smuzhiyun 	struct talitos_desc *desc = &edesc->desc;
1575*4882a593Smuzhiyun 	unsigned int cryptlen = areq->cryptlen;
1576*4882a593Smuzhiyun 	unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1577*4882a593Smuzhiyun 	int sg_count, ret;
1578*4882a593Smuzhiyun 	bool sync_needed = false;
1579*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
1580*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
1581*4882a593Smuzhiyun 	bool is_ctr = (desc->hdr & DESC_HDR_SEL0_MASK) == DESC_HDR_SEL0_AESU &&
1582*4882a593Smuzhiyun 		      (desc->hdr & DESC_HDR_MODE0_AESU_MASK) == DESC_HDR_MODE0_AESU_CTR;
1583*4882a593Smuzhiyun 
1584*4882a593Smuzhiyun 	/* first DWORD empty */
1585*4882a593Smuzhiyun 
1586*4882a593Smuzhiyun 	/* cipher iv */
1587*4882a593Smuzhiyun 	to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
1588*4882a593Smuzhiyun 
1589*4882a593Smuzhiyun 	/* cipher key */
1590*4882a593Smuzhiyun 	to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
1591*4882a593Smuzhiyun 
1592*4882a593Smuzhiyun 	sg_count = edesc->src_nents ?: 1;
1593*4882a593Smuzhiyun 	if (is_sec1 && sg_count > 1)
1594*4882a593Smuzhiyun 		sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1595*4882a593Smuzhiyun 				  cryptlen);
1596*4882a593Smuzhiyun 	else
1597*4882a593Smuzhiyun 		sg_count = dma_map_sg(dev, areq->src, sg_count,
1598*4882a593Smuzhiyun 				      (areq->src == areq->dst) ?
1599*4882a593Smuzhiyun 				      DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1600*4882a593Smuzhiyun 	/*
1601*4882a593Smuzhiyun 	 * cipher in
1602*4882a593Smuzhiyun 	 */
1603*4882a593Smuzhiyun 	sg_count = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[3],
1604*4882a593Smuzhiyun 				      sg_count, 0, 0, 0, false, is_ctr ? 16 : 1);
1605*4882a593Smuzhiyun 	if (sg_count > 1)
1606*4882a593Smuzhiyun 		sync_needed = true;
1607*4882a593Smuzhiyun 
1608*4882a593Smuzhiyun 	/* cipher out */
1609*4882a593Smuzhiyun 	if (areq->src != areq->dst) {
1610*4882a593Smuzhiyun 		sg_count = edesc->dst_nents ? : 1;
1611*4882a593Smuzhiyun 		if (!is_sec1 || sg_count == 1)
1612*4882a593Smuzhiyun 			dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1613*4882a593Smuzhiyun 	}
1614*4882a593Smuzhiyun 
1615*4882a593Smuzhiyun 	ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1616*4882a593Smuzhiyun 			     sg_count, 0, (edesc->src_nents + 1));
1617*4882a593Smuzhiyun 	if (ret > 1)
1618*4882a593Smuzhiyun 		sync_needed = true;
1619*4882a593Smuzhiyun 
1620*4882a593Smuzhiyun 	/* iv out */
1621*4882a593Smuzhiyun 	map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
1622*4882a593Smuzhiyun 			       DMA_FROM_DEVICE);
1623*4882a593Smuzhiyun 
1624*4882a593Smuzhiyun 	/* last DWORD empty */
1625*4882a593Smuzhiyun 
1626*4882a593Smuzhiyun 	if (sync_needed)
1627*4882a593Smuzhiyun 		dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1628*4882a593Smuzhiyun 					   edesc->dma_len, DMA_BIDIRECTIONAL);
1629*4882a593Smuzhiyun 
1630*4882a593Smuzhiyun 	ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1631*4882a593Smuzhiyun 	if (ret != -EINPROGRESS) {
1632*4882a593Smuzhiyun 		common_nonsnoop_unmap(dev, edesc, areq);
1633*4882a593Smuzhiyun 		kfree(edesc);
1634*4882a593Smuzhiyun 	}
1635*4882a593Smuzhiyun 	return ret;
1636*4882a593Smuzhiyun }
1637*4882a593Smuzhiyun 
skcipher_edesc_alloc(struct skcipher_request * areq,bool encrypt)1638*4882a593Smuzhiyun static struct talitos_edesc *skcipher_edesc_alloc(struct skcipher_request *
1639*4882a593Smuzhiyun 						    areq, bool encrypt)
1640*4882a593Smuzhiyun {
1641*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1642*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1643*4882a593Smuzhiyun 	unsigned int ivsize = crypto_skcipher_ivsize(cipher);
1644*4882a593Smuzhiyun 
1645*4882a593Smuzhiyun 	return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1646*4882a593Smuzhiyun 				   areq->iv, 0, areq->cryptlen, 0, ivsize, 0,
1647*4882a593Smuzhiyun 				   areq->base.flags, encrypt);
1648*4882a593Smuzhiyun }
1649*4882a593Smuzhiyun 
skcipher_encrypt(struct skcipher_request * areq)1650*4882a593Smuzhiyun static int skcipher_encrypt(struct skcipher_request *areq)
1651*4882a593Smuzhiyun {
1652*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1653*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1654*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1655*4882a593Smuzhiyun 	unsigned int blocksize =
1656*4882a593Smuzhiyun 			crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
1657*4882a593Smuzhiyun 
1658*4882a593Smuzhiyun 	if (!areq->cryptlen)
1659*4882a593Smuzhiyun 		return 0;
1660*4882a593Smuzhiyun 
1661*4882a593Smuzhiyun 	if (areq->cryptlen % blocksize)
1662*4882a593Smuzhiyun 		return -EINVAL;
1663*4882a593Smuzhiyun 
1664*4882a593Smuzhiyun 	/* allocate extended descriptor */
1665*4882a593Smuzhiyun 	edesc = skcipher_edesc_alloc(areq, true);
1666*4882a593Smuzhiyun 	if (IS_ERR(edesc))
1667*4882a593Smuzhiyun 		return PTR_ERR(edesc);
1668*4882a593Smuzhiyun 
1669*4882a593Smuzhiyun 	/* set encrypt */
1670*4882a593Smuzhiyun 	edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1671*4882a593Smuzhiyun 
1672*4882a593Smuzhiyun 	return common_nonsnoop(edesc, areq, skcipher_done);
1673*4882a593Smuzhiyun }
1674*4882a593Smuzhiyun 
skcipher_decrypt(struct skcipher_request * areq)1675*4882a593Smuzhiyun static int skcipher_decrypt(struct skcipher_request *areq)
1676*4882a593Smuzhiyun {
1677*4882a593Smuzhiyun 	struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1678*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1679*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1680*4882a593Smuzhiyun 	unsigned int blocksize =
1681*4882a593Smuzhiyun 			crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
1682*4882a593Smuzhiyun 
1683*4882a593Smuzhiyun 	if (!areq->cryptlen)
1684*4882a593Smuzhiyun 		return 0;
1685*4882a593Smuzhiyun 
1686*4882a593Smuzhiyun 	if (areq->cryptlen % blocksize)
1687*4882a593Smuzhiyun 		return -EINVAL;
1688*4882a593Smuzhiyun 
1689*4882a593Smuzhiyun 	/* allocate extended descriptor */
1690*4882a593Smuzhiyun 	edesc = skcipher_edesc_alloc(areq, false);
1691*4882a593Smuzhiyun 	if (IS_ERR(edesc))
1692*4882a593Smuzhiyun 		return PTR_ERR(edesc);
1693*4882a593Smuzhiyun 
1694*4882a593Smuzhiyun 	edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1695*4882a593Smuzhiyun 
1696*4882a593Smuzhiyun 	return common_nonsnoop(edesc, areq, skcipher_done);
1697*4882a593Smuzhiyun }
1698*4882a593Smuzhiyun 
common_nonsnoop_hash_unmap(struct device * dev,struct talitos_edesc * edesc,struct ahash_request * areq)1699*4882a593Smuzhiyun static void common_nonsnoop_hash_unmap(struct device *dev,
1700*4882a593Smuzhiyun 				       struct talitos_edesc *edesc,
1701*4882a593Smuzhiyun 				       struct ahash_request *areq)
1702*4882a593Smuzhiyun {
1703*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1704*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1705*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
1706*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
1707*4882a593Smuzhiyun 	struct talitos_desc *desc = &edesc->desc;
1708*4882a593Smuzhiyun 	struct talitos_desc *desc2 = (struct talitos_desc *)
1709*4882a593Smuzhiyun 				     (edesc->buf + edesc->dma_len);
1710*4882a593Smuzhiyun 
1711*4882a593Smuzhiyun 	unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1712*4882a593Smuzhiyun 	if (desc->next_desc &&
1713*4882a593Smuzhiyun 	    desc->ptr[5].ptr != desc2->ptr[5].ptr)
1714*4882a593Smuzhiyun 		unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
1715*4882a593Smuzhiyun 	if (req_ctx->last)
1716*4882a593Smuzhiyun 		memcpy(areq->result, req_ctx->hw_context,
1717*4882a593Smuzhiyun 		       crypto_ahash_digestsize(tfm));
1718*4882a593Smuzhiyun 
1719*4882a593Smuzhiyun 	if (req_ctx->psrc)
1720*4882a593Smuzhiyun 		talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
1721*4882a593Smuzhiyun 
1722*4882a593Smuzhiyun 	/* When using hashctx-in, must unmap it. */
1723*4882a593Smuzhiyun 	if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1724*4882a593Smuzhiyun 		unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1725*4882a593Smuzhiyun 					 DMA_TO_DEVICE);
1726*4882a593Smuzhiyun 	else if (desc->next_desc)
1727*4882a593Smuzhiyun 		unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1728*4882a593Smuzhiyun 					 DMA_TO_DEVICE);
1729*4882a593Smuzhiyun 
1730*4882a593Smuzhiyun 	if (is_sec1 && req_ctx->nbuf)
1731*4882a593Smuzhiyun 		unmap_single_talitos_ptr(dev, &desc->ptr[3],
1732*4882a593Smuzhiyun 					 DMA_TO_DEVICE);
1733*4882a593Smuzhiyun 
1734*4882a593Smuzhiyun 	if (edesc->dma_len)
1735*4882a593Smuzhiyun 		dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1736*4882a593Smuzhiyun 				 DMA_BIDIRECTIONAL);
1737*4882a593Smuzhiyun 
1738*4882a593Smuzhiyun 	if (edesc->desc.next_desc)
1739*4882a593Smuzhiyun 		dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1740*4882a593Smuzhiyun 				 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
1741*4882a593Smuzhiyun }
1742*4882a593Smuzhiyun 
ahash_done(struct device * dev,struct talitos_desc * desc,void * context,int err)1743*4882a593Smuzhiyun static void ahash_done(struct device *dev,
1744*4882a593Smuzhiyun 		       struct talitos_desc *desc, void *context,
1745*4882a593Smuzhiyun 		       int err)
1746*4882a593Smuzhiyun {
1747*4882a593Smuzhiyun 	struct ahash_request *areq = context;
1748*4882a593Smuzhiyun 	struct talitos_edesc *edesc =
1749*4882a593Smuzhiyun 		 container_of(desc, struct talitos_edesc, desc);
1750*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1751*4882a593Smuzhiyun 
1752*4882a593Smuzhiyun 	if (!req_ctx->last && req_ctx->to_hash_later) {
1753*4882a593Smuzhiyun 		/* Position any partial block for next update/final/finup */
1754*4882a593Smuzhiyun 		req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
1755*4882a593Smuzhiyun 		req_ctx->nbuf = req_ctx->to_hash_later;
1756*4882a593Smuzhiyun 	}
1757*4882a593Smuzhiyun 	common_nonsnoop_hash_unmap(dev, edesc, areq);
1758*4882a593Smuzhiyun 
1759*4882a593Smuzhiyun 	kfree(edesc);
1760*4882a593Smuzhiyun 
1761*4882a593Smuzhiyun 	areq->base.complete(&areq->base, err);
1762*4882a593Smuzhiyun }
1763*4882a593Smuzhiyun 
1764*4882a593Smuzhiyun /*
1765*4882a593Smuzhiyun  * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1766*4882a593Smuzhiyun  * ourself and submit a padded block
1767*4882a593Smuzhiyun  */
talitos_handle_buggy_hash(struct talitos_ctx * ctx,struct talitos_edesc * edesc,struct talitos_ptr * ptr)1768*4882a593Smuzhiyun static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
1769*4882a593Smuzhiyun 			       struct talitos_edesc *edesc,
1770*4882a593Smuzhiyun 			       struct talitos_ptr *ptr)
1771*4882a593Smuzhiyun {
1772*4882a593Smuzhiyun 	static u8 padded_hash[64] = {
1773*4882a593Smuzhiyun 		0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1774*4882a593Smuzhiyun 		0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1775*4882a593Smuzhiyun 		0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1776*4882a593Smuzhiyun 		0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1777*4882a593Smuzhiyun 	};
1778*4882a593Smuzhiyun 
1779*4882a593Smuzhiyun 	pr_err_once("Bug in SEC1, padding ourself\n");
1780*4882a593Smuzhiyun 	edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1781*4882a593Smuzhiyun 	map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1782*4882a593Smuzhiyun 			       (char *)padded_hash, DMA_TO_DEVICE);
1783*4882a593Smuzhiyun }
1784*4882a593Smuzhiyun 
common_nonsnoop_hash(struct talitos_edesc * edesc,struct ahash_request * areq,unsigned int length,void (* callback)(struct device * dev,struct talitos_desc * desc,void * context,int error))1785*4882a593Smuzhiyun static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1786*4882a593Smuzhiyun 				struct ahash_request *areq, unsigned int length,
1787*4882a593Smuzhiyun 				void (*callback) (struct device *dev,
1788*4882a593Smuzhiyun 						  struct talitos_desc *desc,
1789*4882a593Smuzhiyun 						  void *context, int error))
1790*4882a593Smuzhiyun {
1791*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1792*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1793*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1794*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
1795*4882a593Smuzhiyun 	struct talitos_desc *desc = &edesc->desc;
1796*4882a593Smuzhiyun 	int ret;
1797*4882a593Smuzhiyun 	bool sync_needed = false;
1798*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
1799*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
1800*4882a593Smuzhiyun 	int sg_count;
1801*4882a593Smuzhiyun 
1802*4882a593Smuzhiyun 	/* first DWORD empty */
1803*4882a593Smuzhiyun 
1804*4882a593Smuzhiyun 	/* hash context in */
1805*4882a593Smuzhiyun 	if (!req_ctx->first || req_ctx->swinit) {
1806*4882a593Smuzhiyun 		map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1807*4882a593Smuzhiyun 					      req_ctx->hw_context_size,
1808*4882a593Smuzhiyun 					      req_ctx->hw_context,
1809*4882a593Smuzhiyun 					      DMA_TO_DEVICE);
1810*4882a593Smuzhiyun 		req_ctx->swinit = 0;
1811*4882a593Smuzhiyun 	}
1812*4882a593Smuzhiyun 	/* Indicate next op is not the first. */
1813*4882a593Smuzhiyun 	req_ctx->first = 0;
1814*4882a593Smuzhiyun 
1815*4882a593Smuzhiyun 	/* HMAC key */
1816*4882a593Smuzhiyun 	if (ctx->keylen)
1817*4882a593Smuzhiyun 		to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1818*4882a593Smuzhiyun 			       is_sec1);
1819*4882a593Smuzhiyun 
1820*4882a593Smuzhiyun 	if (is_sec1 && req_ctx->nbuf)
1821*4882a593Smuzhiyun 		length -= req_ctx->nbuf;
1822*4882a593Smuzhiyun 
1823*4882a593Smuzhiyun 	sg_count = edesc->src_nents ?: 1;
1824*4882a593Smuzhiyun 	if (is_sec1 && sg_count > 1)
1825*4882a593Smuzhiyun 		sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
1826*4882a593Smuzhiyun 	else if (length)
1827*4882a593Smuzhiyun 		sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1828*4882a593Smuzhiyun 				      DMA_TO_DEVICE);
1829*4882a593Smuzhiyun 	/*
1830*4882a593Smuzhiyun 	 * data in
1831*4882a593Smuzhiyun 	 */
1832*4882a593Smuzhiyun 	if (is_sec1 && req_ctx->nbuf) {
1833*4882a593Smuzhiyun 		map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1834*4882a593Smuzhiyun 				       req_ctx->buf[req_ctx->buf_idx],
1835*4882a593Smuzhiyun 				       DMA_TO_DEVICE);
1836*4882a593Smuzhiyun 	} else {
1837*4882a593Smuzhiyun 		sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1838*4882a593Smuzhiyun 					  &desc->ptr[3], sg_count, 0, 0);
1839*4882a593Smuzhiyun 		if (sg_count > 1)
1840*4882a593Smuzhiyun 			sync_needed = true;
1841*4882a593Smuzhiyun 	}
1842*4882a593Smuzhiyun 
1843*4882a593Smuzhiyun 	/* fifth DWORD empty */
1844*4882a593Smuzhiyun 
1845*4882a593Smuzhiyun 	/* hash/HMAC out -or- hash context out */
1846*4882a593Smuzhiyun 	if (req_ctx->last)
1847*4882a593Smuzhiyun 		map_single_talitos_ptr(dev, &desc->ptr[5],
1848*4882a593Smuzhiyun 				       crypto_ahash_digestsize(tfm),
1849*4882a593Smuzhiyun 				       req_ctx->hw_context, DMA_FROM_DEVICE);
1850*4882a593Smuzhiyun 	else
1851*4882a593Smuzhiyun 		map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1852*4882a593Smuzhiyun 					      req_ctx->hw_context_size,
1853*4882a593Smuzhiyun 					      req_ctx->hw_context,
1854*4882a593Smuzhiyun 					      DMA_FROM_DEVICE);
1855*4882a593Smuzhiyun 
1856*4882a593Smuzhiyun 	/* last DWORD empty */
1857*4882a593Smuzhiyun 
1858*4882a593Smuzhiyun 	if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1859*4882a593Smuzhiyun 		talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1860*4882a593Smuzhiyun 
1861*4882a593Smuzhiyun 	if (is_sec1 && req_ctx->nbuf && length) {
1862*4882a593Smuzhiyun 		struct talitos_desc *desc2 = (struct talitos_desc *)
1863*4882a593Smuzhiyun 					     (edesc->buf + edesc->dma_len);
1864*4882a593Smuzhiyun 		dma_addr_t next_desc;
1865*4882a593Smuzhiyun 
1866*4882a593Smuzhiyun 		memset(desc2, 0, sizeof(*desc2));
1867*4882a593Smuzhiyun 		desc2->hdr = desc->hdr;
1868*4882a593Smuzhiyun 		desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1869*4882a593Smuzhiyun 		desc2->hdr1 = desc2->hdr;
1870*4882a593Smuzhiyun 		desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1871*4882a593Smuzhiyun 		desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1872*4882a593Smuzhiyun 		desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1873*4882a593Smuzhiyun 
1874*4882a593Smuzhiyun 		if (desc->ptr[1].ptr)
1875*4882a593Smuzhiyun 			copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1876*4882a593Smuzhiyun 					 is_sec1);
1877*4882a593Smuzhiyun 		else
1878*4882a593Smuzhiyun 			map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1879*4882a593Smuzhiyun 						      req_ctx->hw_context_size,
1880*4882a593Smuzhiyun 						      req_ctx->hw_context,
1881*4882a593Smuzhiyun 						      DMA_TO_DEVICE);
1882*4882a593Smuzhiyun 		copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1883*4882a593Smuzhiyun 		sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1884*4882a593Smuzhiyun 					  &desc2->ptr[3], sg_count, 0, 0);
1885*4882a593Smuzhiyun 		if (sg_count > 1)
1886*4882a593Smuzhiyun 			sync_needed = true;
1887*4882a593Smuzhiyun 		copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1888*4882a593Smuzhiyun 		if (req_ctx->last)
1889*4882a593Smuzhiyun 			map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1890*4882a593Smuzhiyun 						      req_ctx->hw_context_size,
1891*4882a593Smuzhiyun 						      req_ctx->hw_context,
1892*4882a593Smuzhiyun 						      DMA_FROM_DEVICE);
1893*4882a593Smuzhiyun 
1894*4882a593Smuzhiyun 		next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1895*4882a593Smuzhiyun 					   DMA_BIDIRECTIONAL);
1896*4882a593Smuzhiyun 		desc->next_desc = cpu_to_be32(next_desc);
1897*4882a593Smuzhiyun 	}
1898*4882a593Smuzhiyun 
1899*4882a593Smuzhiyun 	if (sync_needed)
1900*4882a593Smuzhiyun 		dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1901*4882a593Smuzhiyun 					   edesc->dma_len, DMA_BIDIRECTIONAL);
1902*4882a593Smuzhiyun 
1903*4882a593Smuzhiyun 	ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1904*4882a593Smuzhiyun 	if (ret != -EINPROGRESS) {
1905*4882a593Smuzhiyun 		common_nonsnoop_hash_unmap(dev, edesc, areq);
1906*4882a593Smuzhiyun 		kfree(edesc);
1907*4882a593Smuzhiyun 	}
1908*4882a593Smuzhiyun 	return ret;
1909*4882a593Smuzhiyun }
1910*4882a593Smuzhiyun 
ahash_edesc_alloc(struct ahash_request * areq,unsigned int nbytes)1911*4882a593Smuzhiyun static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1912*4882a593Smuzhiyun 					       unsigned int nbytes)
1913*4882a593Smuzhiyun {
1914*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1915*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1916*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1917*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1918*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
1919*4882a593Smuzhiyun 
1920*4882a593Smuzhiyun 	if (is_sec1)
1921*4882a593Smuzhiyun 		nbytes -= req_ctx->nbuf;
1922*4882a593Smuzhiyun 
1923*4882a593Smuzhiyun 	return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
1924*4882a593Smuzhiyun 				   nbytes, 0, 0, 0, areq->base.flags, false);
1925*4882a593Smuzhiyun }
1926*4882a593Smuzhiyun 
ahash_init(struct ahash_request * areq)1927*4882a593Smuzhiyun static int ahash_init(struct ahash_request *areq)
1928*4882a593Smuzhiyun {
1929*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1930*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1931*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
1932*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1933*4882a593Smuzhiyun 	unsigned int size;
1934*4882a593Smuzhiyun 	dma_addr_t dma;
1935*4882a593Smuzhiyun 
1936*4882a593Smuzhiyun 	/* Initialize the context */
1937*4882a593Smuzhiyun 	req_ctx->buf_idx = 0;
1938*4882a593Smuzhiyun 	req_ctx->nbuf = 0;
1939*4882a593Smuzhiyun 	req_ctx->first = 1; /* first indicates h/w must init its context */
1940*4882a593Smuzhiyun 	req_ctx->swinit = 0; /* assume h/w init of context */
1941*4882a593Smuzhiyun 	size =	(crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
1942*4882a593Smuzhiyun 			? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1943*4882a593Smuzhiyun 			: TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
1944*4882a593Smuzhiyun 	req_ctx->hw_context_size = size;
1945*4882a593Smuzhiyun 
1946*4882a593Smuzhiyun 	dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
1947*4882a593Smuzhiyun 			     DMA_TO_DEVICE);
1948*4882a593Smuzhiyun 	dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
1949*4882a593Smuzhiyun 
1950*4882a593Smuzhiyun 	return 0;
1951*4882a593Smuzhiyun }
1952*4882a593Smuzhiyun 
1953*4882a593Smuzhiyun /*
1954*4882a593Smuzhiyun  * on h/w without explicit sha224 support, we initialize h/w context
1955*4882a593Smuzhiyun  * manually with sha224 constants, and tell it to run sha256.
1956*4882a593Smuzhiyun  */
ahash_init_sha224_swinit(struct ahash_request * areq)1957*4882a593Smuzhiyun static int ahash_init_sha224_swinit(struct ahash_request *areq)
1958*4882a593Smuzhiyun {
1959*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1960*4882a593Smuzhiyun 
1961*4882a593Smuzhiyun 	req_ctx->hw_context[0] = SHA224_H0;
1962*4882a593Smuzhiyun 	req_ctx->hw_context[1] = SHA224_H1;
1963*4882a593Smuzhiyun 	req_ctx->hw_context[2] = SHA224_H2;
1964*4882a593Smuzhiyun 	req_ctx->hw_context[3] = SHA224_H3;
1965*4882a593Smuzhiyun 	req_ctx->hw_context[4] = SHA224_H4;
1966*4882a593Smuzhiyun 	req_ctx->hw_context[5] = SHA224_H5;
1967*4882a593Smuzhiyun 	req_ctx->hw_context[6] = SHA224_H6;
1968*4882a593Smuzhiyun 	req_ctx->hw_context[7] = SHA224_H7;
1969*4882a593Smuzhiyun 
1970*4882a593Smuzhiyun 	/* init 64-bit count */
1971*4882a593Smuzhiyun 	req_ctx->hw_context[8] = 0;
1972*4882a593Smuzhiyun 	req_ctx->hw_context[9] = 0;
1973*4882a593Smuzhiyun 
1974*4882a593Smuzhiyun 	ahash_init(areq);
1975*4882a593Smuzhiyun 	req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1976*4882a593Smuzhiyun 
1977*4882a593Smuzhiyun 	return 0;
1978*4882a593Smuzhiyun }
1979*4882a593Smuzhiyun 
ahash_process_req(struct ahash_request * areq,unsigned int nbytes)1980*4882a593Smuzhiyun static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1981*4882a593Smuzhiyun {
1982*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1983*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1984*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1985*4882a593Smuzhiyun 	struct talitos_edesc *edesc;
1986*4882a593Smuzhiyun 	unsigned int blocksize =
1987*4882a593Smuzhiyun 			crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1988*4882a593Smuzhiyun 	unsigned int nbytes_to_hash;
1989*4882a593Smuzhiyun 	unsigned int to_hash_later;
1990*4882a593Smuzhiyun 	unsigned int nsg;
1991*4882a593Smuzhiyun 	int nents;
1992*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
1993*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
1994*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
1995*4882a593Smuzhiyun 	u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
1996*4882a593Smuzhiyun 
1997*4882a593Smuzhiyun 	if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1998*4882a593Smuzhiyun 		/* Buffer up to one whole block */
1999*4882a593Smuzhiyun 		nents = sg_nents_for_len(areq->src, nbytes);
2000*4882a593Smuzhiyun 		if (nents < 0) {
2001*4882a593Smuzhiyun 			dev_err(ctx->dev, "Invalid number of src SG.\n");
2002*4882a593Smuzhiyun 			return nents;
2003*4882a593Smuzhiyun 		}
2004*4882a593Smuzhiyun 		sg_copy_to_buffer(areq->src, nents,
2005*4882a593Smuzhiyun 				  ctx_buf + req_ctx->nbuf, nbytes);
2006*4882a593Smuzhiyun 		req_ctx->nbuf += nbytes;
2007*4882a593Smuzhiyun 		return 0;
2008*4882a593Smuzhiyun 	}
2009*4882a593Smuzhiyun 
2010*4882a593Smuzhiyun 	/* At least (blocksize + 1) bytes are available to hash */
2011*4882a593Smuzhiyun 	nbytes_to_hash = nbytes + req_ctx->nbuf;
2012*4882a593Smuzhiyun 	to_hash_later = nbytes_to_hash & (blocksize - 1);
2013*4882a593Smuzhiyun 
2014*4882a593Smuzhiyun 	if (req_ctx->last)
2015*4882a593Smuzhiyun 		to_hash_later = 0;
2016*4882a593Smuzhiyun 	else if (to_hash_later)
2017*4882a593Smuzhiyun 		/* There is a partial block. Hash the full block(s) now */
2018*4882a593Smuzhiyun 		nbytes_to_hash -= to_hash_later;
2019*4882a593Smuzhiyun 	else {
2020*4882a593Smuzhiyun 		/* Keep one block buffered */
2021*4882a593Smuzhiyun 		nbytes_to_hash -= blocksize;
2022*4882a593Smuzhiyun 		to_hash_later = blocksize;
2023*4882a593Smuzhiyun 	}
2024*4882a593Smuzhiyun 
2025*4882a593Smuzhiyun 	/* Chain in any previously buffered data */
2026*4882a593Smuzhiyun 	if (!is_sec1 && req_ctx->nbuf) {
2027*4882a593Smuzhiyun 		nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2028*4882a593Smuzhiyun 		sg_init_table(req_ctx->bufsl, nsg);
2029*4882a593Smuzhiyun 		sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
2030*4882a593Smuzhiyun 		if (nsg > 1)
2031*4882a593Smuzhiyun 			sg_chain(req_ctx->bufsl, 2, areq->src);
2032*4882a593Smuzhiyun 		req_ctx->psrc = req_ctx->bufsl;
2033*4882a593Smuzhiyun 	} else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2034*4882a593Smuzhiyun 		int offset;
2035*4882a593Smuzhiyun 
2036*4882a593Smuzhiyun 		if (nbytes_to_hash > blocksize)
2037*4882a593Smuzhiyun 			offset = blocksize - req_ctx->nbuf;
2038*4882a593Smuzhiyun 		else
2039*4882a593Smuzhiyun 			offset = nbytes_to_hash - req_ctx->nbuf;
2040*4882a593Smuzhiyun 		nents = sg_nents_for_len(areq->src, offset);
2041*4882a593Smuzhiyun 		if (nents < 0) {
2042*4882a593Smuzhiyun 			dev_err(ctx->dev, "Invalid number of src SG.\n");
2043*4882a593Smuzhiyun 			return nents;
2044*4882a593Smuzhiyun 		}
2045*4882a593Smuzhiyun 		sg_copy_to_buffer(areq->src, nents,
2046*4882a593Smuzhiyun 				  ctx_buf + req_ctx->nbuf, offset);
2047*4882a593Smuzhiyun 		req_ctx->nbuf += offset;
2048*4882a593Smuzhiyun 		req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src,
2049*4882a593Smuzhiyun 						 offset);
2050*4882a593Smuzhiyun 	} else
2051*4882a593Smuzhiyun 		req_ctx->psrc = areq->src;
2052*4882a593Smuzhiyun 
2053*4882a593Smuzhiyun 	if (to_hash_later) {
2054*4882a593Smuzhiyun 		nents = sg_nents_for_len(areq->src, nbytes);
2055*4882a593Smuzhiyun 		if (nents < 0) {
2056*4882a593Smuzhiyun 			dev_err(ctx->dev, "Invalid number of src SG.\n");
2057*4882a593Smuzhiyun 			return nents;
2058*4882a593Smuzhiyun 		}
2059*4882a593Smuzhiyun 		sg_pcopy_to_buffer(areq->src, nents,
2060*4882a593Smuzhiyun 				   req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
2061*4882a593Smuzhiyun 				      to_hash_later,
2062*4882a593Smuzhiyun 				      nbytes - to_hash_later);
2063*4882a593Smuzhiyun 	}
2064*4882a593Smuzhiyun 	req_ctx->to_hash_later = to_hash_later;
2065*4882a593Smuzhiyun 
2066*4882a593Smuzhiyun 	/* Allocate extended descriptor */
2067*4882a593Smuzhiyun 	edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2068*4882a593Smuzhiyun 	if (IS_ERR(edesc))
2069*4882a593Smuzhiyun 		return PTR_ERR(edesc);
2070*4882a593Smuzhiyun 
2071*4882a593Smuzhiyun 	edesc->desc.hdr = ctx->desc_hdr_template;
2072*4882a593Smuzhiyun 
2073*4882a593Smuzhiyun 	/* On last one, request SEC to pad; otherwise continue */
2074*4882a593Smuzhiyun 	if (req_ctx->last)
2075*4882a593Smuzhiyun 		edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2076*4882a593Smuzhiyun 	else
2077*4882a593Smuzhiyun 		edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2078*4882a593Smuzhiyun 
2079*4882a593Smuzhiyun 	/* request SEC to INIT hash. */
2080*4882a593Smuzhiyun 	if (req_ctx->first && !req_ctx->swinit)
2081*4882a593Smuzhiyun 		edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2082*4882a593Smuzhiyun 
2083*4882a593Smuzhiyun 	/* When the tfm context has a keylen, it's an HMAC.
2084*4882a593Smuzhiyun 	 * A first or last (ie. not middle) descriptor must request HMAC.
2085*4882a593Smuzhiyun 	 */
2086*4882a593Smuzhiyun 	if (ctx->keylen && (req_ctx->first || req_ctx->last))
2087*4882a593Smuzhiyun 		edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2088*4882a593Smuzhiyun 
2089*4882a593Smuzhiyun 	return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done);
2090*4882a593Smuzhiyun }
2091*4882a593Smuzhiyun 
ahash_update(struct ahash_request * areq)2092*4882a593Smuzhiyun static int ahash_update(struct ahash_request *areq)
2093*4882a593Smuzhiyun {
2094*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2095*4882a593Smuzhiyun 
2096*4882a593Smuzhiyun 	req_ctx->last = 0;
2097*4882a593Smuzhiyun 
2098*4882a593Smuzhiyun 	return ahash_process_req(areq, areq->nbytes);
2099*4882a593Smuzhiyun }
2100*4882a593Smuzhiyun 
ahash_final(struct ahash_request * areq)2101*4882a593Smuzhiyun static int ahash_final(struct ahash_request *areq)
2102*4882a593Smuzhiyun {
2103*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2104*4882a593Smuzhiyun 
2105*4882a593Smuzhiyun 	req_ctx->last = 1;
2106*4882a593Smuzhiyun 
2107*4882a593Smuzhiyun 	return ahash_process_req(areq, 0);
2108*4882a593Smuzhiyun }
2109*4882a593Smuzhiyun 
ahash_finup(struct ahash_request * areq)2110*4882a593Smuzhiyun static int ahash_finup(struct ahash_request *areq)
2111*4882a593Smuzhiyun {
2112*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2113*4882a593Smuzhiyun 
2114*4882a593Smuzhiyun 	req_ctx->last = 1;
2115*4882a593Smuzhiyun 
2116*4882a593Smuzhiyun 	return ahash_process_req(areq, areq->nbytes);
2117*4882a593Smuzhiyun }
2118*4882a593Smuzhiyun 
ahash_digest(struct ahash_request * areq)2119*4882a593Smuzhiyun static int ahash_digest(struct ahash_request *areq)
2120*4882a593Smuzhiyun {
2121*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2122*4882a593Smuzhiyun 	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
2123*4882a593Smuzhiyun 
2124*4882a593Smuzhiyun 	ahash->init(areq);
2125*4882a593Smuzhiyun 	req_ctx->last = 1;
2126*4882a593Smuzhiyun 
2127*4882a593Smuzhiyun 	return ahash_process_req(areq, areq->nbytes);
2128*4882a593Smuzhiyun }
2129*4882a593Smuzhiyun 
ahash_export(struct ahash_request * areq,void * out)2130*4882a593Smuzhiyun static int ahash_export(struct ahash_request *areq, void *out)
2131*4882a593Smuzhiyun {
2132*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2133*4882a593Smuzhiyun 	struct talitos_export_state *export = out;
2134*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2135*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2136*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
2137*4882a593Smuzhiyun 	dma_addr_t dma;
2138*4882a593Smuzhiyun 
2139*4882a593Smuzhiyun 	dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2140*4882a593Smuzhiyun 			     DMA_FROM_DEVICE);
2141*4882a593Smuzhiyun 	dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
2142*4882a593Smuzhiyun 
2143*4882a593Smuzhiyun 	memcpy(export->hw_context, req_ctx->hw_context,
2144*4882a593Smuzhiyun 	       req_ctx->hw_context_size);
2145*4882a593Smuzhiyun 	memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
2146*4882a593Smuzhiyun 	export->swinit = req_ctx->swinit;
2147*4882a593Smuzhiyun 	export->first = req_ctx->first;
2148*4882a593Smuzhiyun 	export->last = req_ctx->last;
2149*4882a593Smuzhiyun 	export->to_hash_later = req_ctx->to_hash_later;
2150*4882a593Smuzhiyun 	export->nbuf = req_ctx->nbuf;
2151*4882a593Smuzhiyun 
2152*4882a593Smuzhiyun 	return 0;
2153*4882a593Smuzhiyun }
2154*4882a593Smuzhiyun 
ahash_import(struct ahash_request * areq,const void * in)2155*4882a593Smuzhiyun static int ahash_import(struct ahash_request *areq, const void *in)
2156*4882a593Smuzhiyun {
2157*4882a593Smuzhiyun 	struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2158*4882a593Smuzhiyun 	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2159*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2160*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
2161*4882a593Smuzhiyun 	const struct talitos_export_state *export = in;
2162*4882a593Smuzhiyun 	unsigned int size;
2163*4882a593Smuzhiyun 	dma_addr_t dma;
2164*4882a593Smuzhiyun 
2165*4882a593Smuzhiyun 	memset(req_ctx, 0, sizeof(*req_ctx));
2166*4882a593Smuzhiyun 	size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
2167*4882a593Smuzhiyun 			? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2168*4882a593Smuzhiyun 			: TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
2169*4882a593Smuzhiyun 	req_ctx->hw_context_size = size;
2170*4882a593Smuzhiyun 	memcpy(req_ctx->hw_context, export->hw_context, size);
2171*4882a593Smuzhiyun 	memcpy(req_ctx->buf[0], export->buf, export->nbuf);
2172*4882a593Smuzhiyun 	req_ctx->swinit = export->swinit;
2173*4882a593Smuzhiyun 	req_ctx->first = export->first;
2174*4882a593Smuzhiyun 	req_ctx->last = export->last;
2175*4882a593Smuzhiyun 	req_ctx->to_hash_later = export->to_hash_later;
2176*4882a593Smuzhiyun 	req_ctx->nbuf = export->nbuf;
2177*4882a593Smuzhiyun 
2178*4882a593Smuzhiyun 	dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2179*4882a593Smuzhiyun 			     DMA_TO_DEVICE);
2180*4882a593Smuzhiyun 	dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2181*4882a593Smuzhiyun 
2182*4882a593Smuzhiyun 	return 0;
2183*4882a593Smuzhiyun }
2184*4882a593Smuzhiyun 
keyhash(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen,u8 * hash)2185*4882a593Smuzhiyun static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2186*4882a593Smuzhiyun 		   u8 *hash)
2187*4882a593Smuzhiyun {
2188*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2189*4882a593Smuzhiyun 
2190*4882a593Smuzhiyun 	struct scatterlist sg[1];
2191*4882a593Smuzhiyun 	struct ahash_request *req;
2192*4882a593Smuzhiyun 	struct crypto_wait wait;
2193*4882a593Smuzhiyun 	int ret;
2194*4882a593Smuzhiyun 
2195*4882a593Smuzhiyun 	crypto_init_wait(&wait);
2196*4882a593Smuzhiyun 
2197*4882a593Smuzhiyun 	req = ahash_request_alloc(tfm, GFP_KERNEL);
2198*4882a593Smuzhiyun 	if (!req)
2199*4882a593Smuzhiyun 		return -ENOMEM;
2200*4882a593Smuzhiyun 
2201*4882a593Smuzhiyun 	/* Keep tfm keylen == 0 during hash of the long key */
2202*4882a593Smuzhiyun 	ctx->keylen = 0;
2203*4882a593Smuzhiyun 	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2204*4882a593Smuzhiyun 				   crypto_req_done, &wait);
2205*4882a593Smuzhiyun 
2206*4882a593Smuzhiyun 	sg_init_one(&sg[0], key, keylen);
2207*4882a593Smuzhiyun 
2208*4882a593Smuzhiyun 	ahash_request_set_crypt(req, sg, hash, keylen);
2209*4882a593Smuzhiyun 	ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2210*4882a593Smuzhiyun 
2211*4882a593Smuzhiyun 	ahash_request_free(req);
2212*4882a593Smuzhiyun 
2213*4882a593Smuzhiyun 	return ret;
2214*4882a593Smuzhiyun }
2215*4882a593Smuzhiyun 
ahash_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2216*4882a593Smuzhiyun static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2217*4882a593Smuzhiyun 			unsigned int keylen)
2218*4882a593Smuzhiyun {
2219*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2220*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
2221*4882a593Smuzhiyun 	unsigned int blocksize =
2222*4882a593Smuzhiyun 			crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2223*4882a593Smuzhiyun 	unsigned int digestsize = crypto_ahash_digestsize(tfm);
2224*4882a593Smuzhiyun 	unsigned int keysize = keylen;
2225*4882a593Smuzhiyun 	u8 hash[SHA512_DIGEST_SIZE];
2226*4882a593Smuzhiyun 	int ret;
2227*4882a593Smuzhiyun 
2228*4882a593Smuzhiyun 	if (keylen <= blocksize)
2229*4882a593Smuzhiyun 		memcpy(ctx->key, key, keysize);
2230*4882a593Smuzhiyun 	else {
2231*4882a593Smuzhiyun 		/* Must get the hash of the long key */
2232*4882a593Smuzhiyun 		ret = keyhash(tfm, key, keylen, hash);
2233*4882a593Smuzhiyun 
2234*4882a593Smuzhiyun 		if (ret)
2235*4882a593Smuzhiyun 			return -EINVAL;
2236*4882a593Smuzhiyun 
2237*4882a593Smuzhiyun 		keysize = digestsize;
2238*4882a593Smuzhiyun 		memcpy(ctx->key, hash, digestsize);
2239*4882a593Smuzhiyun 	}
2240*4882a593Smuzhiyun 
2241*4882a593Smuzhiyun 	if (ctx->keylen)
2242*4882a593Smuzhiyun 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2243*4882a593Smuzhiyun 
2244*4882a593Smuzhiyun 	ctx->keylen = keysize;
2245*4882a593Smuzhiyun 	ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
2246*4882a593Smuzhiyun 
2247*4882a593Smuzhiyun 	return 0;
2248*4882a593Smuzhiyun }
2249*4882a593Smuzhiyun 
2250*4882a593Smuzhiyun 
2251*4882a593Smuzhiyun struct talitos_alg_template {
2252*4882a593Smuzhiyun 	u32 type;
2253*4882a593Smuzhiyun 	u32 priority;
2254*4882a593Smuzhiyun 	union {
2255*4882a593Smuzhiyun 		struct skcipher_alg skcipher;
2256*4882a593Smuzhiyun 		struct ahash_alg hash;
2257*4882a593Smuzhiyun 		struct aead_alg aead;
2258*4882a593Smuzhiyun 	} alg;
2259*4882a593Smuzhiyun 	__be32 desc_hdr_template;
2260*4882a593Smuzhiyun };
2261*4882a593Smuzhiyun 
2262*4882a593Smuzhiyun static struct talitos_alg_template driver_algs[] = {
2263*4882a593Smuzhiyun 	/* AEAD algorithms.  These use a single-pass ipsec_esp descriptor */
2264*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2265*4882a593Smuzhiyun 		.alg.aead = {
2266*4882a593Smuzhiyun 			.base = {
2267*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
2268*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha1-"
2269*4882a593Smuzhiyun 						   "cbc-aes-talitos",
2270*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2271*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2272*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2273*4882a593Smuzhiyun 			},
2274*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2275*4882a593Smuzhiyun 			.maxauthsize = SHA1_DIGEST_SIZE,
2276*4882a593Smuzhiyun 		},
2277*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2278*4882a593Smuzhiyun 			             DESC_HDR_SEL0_AESU |
2279*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_AESU_CBC |
2280*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUA |
2281*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2282*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2283*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2284*4882a593Smuzhiyun 	},
2285*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2286*4882a593Smuzhiyun 		.priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2287*4882a593Smuzhiyun 		.alg.aead = {
2288*4882a593Smuzhiyun 			.base = {
2289*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha1),cbc(aes))",
2290*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha1-"
2291*4882a593Smuzhiyun 						   "cbc-aes-talitos-hsna",
2292*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2293*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2294*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2295*4882a593Smuzhiyun 			},
2296*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2297*4882a593Smuzhiyun 			.maxauthsize = SHA1_DIGEST_SIZE,
2298*4882a593Smuzhiyun 		},
2299*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2300*4882a593Smuzhiyun 				     DESC_HDR_SEL0_AESU |
2301*4882a593Smuzhiyun 				     DESC_HDR_MODE0_AESU_CBC |
2302*4882a593Smuzhiyun 				     DESC_HDR_SEL1_MDEUA |
2303*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_INIT |
2304*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_PAD |
2305*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2306*4882a593Smuzhiyun 	},
2307*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2308*4882a593Smuzhiyun 		.alg.aead = {
2309*4882a593Smuzhiyun 			.base = {
2310*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha1),"
2311*4882a593Smuzhiyun 					    "cbc(des3_ede))",
2312*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha1-"
2313*4882a593Smuzhiyun 						   "cbc-3des-talitos",
2314*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2315*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2316*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2317*4882a593Smuzhiyun 			},
2318*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2319*4882a593Smuzhiyun 			.maxauthsize = SHA1_DIGEST_SIZE,
2320*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2321*4882a593Smuzhiyun 		},
2322*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2323*4882a593Smuzhiyun 			             DESC_HDR_SEL0_DEU |
2324*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_CBC |
2325*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_3DES |
2326*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUA |
2327*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2328*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2329*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2330*4882a593Smuzhiyun 	},
2331*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2332*4882a593Smuzhiyun 		.priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2333*4882a593Smuzhiyun 		.alg.aead = {
2334*4882a593Smuzhiyun 			.base = {
2335*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha1),"
2336*4882a593Smuzhiyun 					    "cbc(des3_ede))",
2337*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha1-"
2338*4882a593Smuzhiyun 						   "cbc-3des-talitos-hsna",
2339*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2340*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2341*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2342*4882a593Smuzhiyun 			},
2343*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2344*4882a593Smuzhiyun 			.maxauthsize = SHA1_DIGEST_SIZE,
2345*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2346*4882a593Smuzhiyun 		},
2347*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2348*4882a593Smuzhiyun 				     DESC_HDR_SEL0_DEU |
2349*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_CBC |
2350*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_3DES |
2351*4882a593Smuzhiyun 				     DESC_HDR_SEL1_MDEUA |
2352*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_INIT |
2353*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_PAD |
2354*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2355*4882a593Smuzhiyun 	},
2356*4882a593Smuzhiyun 	{       .type = CRYPTO_ALG_TYPE_AEAD,
2357*4882a593Smuzhiyun 		.alg.aead = {
2358*4882a593Smuzhiyun 			.base = {
2359*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2360*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha224-"
2361*4882a593Smuzhiyun 						   "cbc-aes-talitos",
2362*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2363*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2364*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2365*4882a593Smuzhiyun 			},
2366*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2367*4882a593Smuzhiyun 			.maxauthsize = SHA224_DIGEST_SIZE,
2368*4882a593Smuzhiyun 		},
2369*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2370*4882a593Smuzhiyun 				     DESC_HDR_SEL0_AESU |
2371*4882a593Smuzhiyun 				     DESC_HDR_MODE0_AESU_CBC |
2372*4882a593Smuzhiyun 				     DESC_HDR_SEL1_MDEUA |
2373*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_INIT |
2374*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_PAD |
2375*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2376*4882a593Smuzhiyun 	},
2377*4882a593Smuzhiyun 	{       .type = CRYPTO_ALG_TYPE_AEAD,
2378*4882a593Smuzhiyun 		.priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2379*4882a593Smuzhiyun 		.alg.aead = {
2380*4882a593Smuzhiyun 			.base = {
2381*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2382*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha224-"
2383*4882a593Smuzhiyun 						   "cbc-aes-talitos-hsna",
2384*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2385*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2386*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2387*4882a593Smuzhiyun 			},
2388*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2389*4882a593Smuzhiyun 			.maxauthsize = SHA224_DIGEST_SIZE,
2390*4882a593Smuzhiyun 		},
2391*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2392*4882a593Smuzhiyun 				     DESC_HDR_SEL0_AESU |
2393*4882a593Smuzhiyun 				     DESC_HDR_MODE0_AESU_CBC |
2394*4882a593Smuzhiyun 				     DESC_HDR_SEL1_MDEUA |
2395*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_INIT |
2396*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_PAD |
2397*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2398*4882a593Smuzhiyun 	},
2399*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2400*4882a593Smuzhiyun 		.alg.aead = {
2401*4882a593Smuzhiyun 			.base = {
2402*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha224),"
2403*4882a593Smuzhiyun 					    "cbc(des3_ede))",
2404*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha224-"
2405*4882a593Smuzhiyun 						   "cbc-3des-talitos",
2406*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2407*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2408*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2409*4882a593Smuzhiyun 			},
2410*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2411*4882a593Smuzhiyun 			.maxauthsize = SHA224_DIGEST_SIZE,
2412*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2413*4882a593Smuzhiyun 		},
2414*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2415*4882a593Smuzhiyun 			             DESC_HDR_SEL0_DEU |
2416*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_CBC |
2417*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_3DES |
2418*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUA |
2419*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2420*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2421*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2422*4882a593Smuzhiyun 	},
2423*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2424*4882a593Smuzhiyun 		.priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2425*4882a593Smuzhiyun 		.alg.aead = {
2426*4882a593Smuzhiyun 			.base = {
2427*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha224),"
2428*4882a593Smuzhiyun 					    "cbc(des3_ede))",
2429*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha224-"
2430*4882a593Smuzhiyun 						   "cbc-3des-talitos-hsna",
2431*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2432*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2433*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2434*4882a593Smuzhiyun 			},
2435*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2436*4882a593Smuzhiyun 			.maxauthsize = SHA224_DIGEST_SIZE,
2437*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2438*4882a593Smuzhiyun 		},
2439*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2440*4882a593Smuzhiyun 				     DESC_HDR_SEL0_DEU |
2441*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_CBC |
2442*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_3DES |
2443*4882a593Smuzhiyun 				     DESC_HDR_SEL1_MDEUA |
2444*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_INIT |
2445*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_PAD |
2446*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2447*4882a593Smuzhiyun 	},
2448*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2449*4882a593Smuzhiyun 		.alg.aead = {
2450*4882a593Smuzhiyun 			.base = {
2451*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2452*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha256-"
2453*4882a593Smuzhiyun 						   "cbc-aes-talitos",
2454*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2455*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2456*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2457*4882a593Smuzhiyun 			},
2458*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2459*4882a593Smuzhiyun 			.maxauthsize = SHA256_DIGEST_SIZE,
2460*4882a593Smuzhiyun 		},
2461*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2462*4882a593Smuzhiyun 			             DESC_HDR_SEL0_AESU |
2463*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_AESU_CBC |
2464*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUA |
2465*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2466*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2467*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2468*4882a593Smuzhiyun 	},
2469*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2470*4882a593Smuzhiyun 		.priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2471*4882a593Smuzhiyun 		.alg.aead = {
2472*4882a593Smuzhiyun 			.base = {
2473*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2474*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha256-"
2475*4882a593Smuzhiyun 						   "cbc-aes-talitos-hsna",
2476*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2477*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2478*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2479*4882a593Smuzhiyun 			},
2480*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2481*4882a593Smuzhiyun 			.maxauthsize = SHA256_DIGEST_SIZE,
2482*4882a593Smuzhiyun 		},
2483*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2484*4882a593Smuzhiyun 				     DESC_HDR_SEL0_AESU |
2485*4882a593Smuzhiyun 				     DESC_HDR_MODE0_AESU_CBC |
2486*4882a593Smuzhiyun 				     DESC_HDR_SEL1_MDEUA |
2487*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_INIT |
2488*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_PAD |
2489*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2490*4882a593Smuzhiyun 	},
2491*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2492*4882a593Smuzhiyun 		.alg.aead = {
2493*4882a593Smuzhiyun 			.base = {
2494*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha256),"
2495*4882a593Smuzhiyun 					    "cbc(des3_ede))",
2496*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha256-"
2497*4882a593Smuzhiyun 						   "cbc-3des-talitos",
2498*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2499*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2500*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2501*4882a593Smuzhiyun 			},
2502*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2503*4882a593Smuzhiyun 			.maxauthsize = SHA256_DIGEST_SIZE,
2504*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2505*4882a593Smuzhiyun 		},
2506*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2507*4882a593Smuzhiyun 			             DESC_HDR_SEL0_DEU |
2508*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_CBC |
2509*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_3DES |
2510*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUA |
2511*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2512*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2513*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2514*4882a593Smuzhiyun 	},
2515*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2516*4882a593Smuzhiyun 		.priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2517*4882a593Smuzhiyun 		.alg.aead = {
2518*4882a593Smuzhiyun 			.base = {
2519*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha256),"
2520*4882a593Smuzhiyun 					    "cbc(des3_ede))",
2521*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha256-"
2522*4882a593Smuzhiyun 						   "cbc-3des-talitos-hsna",
2523*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2524*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2525*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2526*4882a593Smuzhiyun 			},
2527*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2528*4882a593Smuzhiyun 			.maxauthsize = SHA256_DIGEST_SIZE,
2529*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2530*4882a593Smuzhiyun 		},
2531*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2532*4882a593Smuzhiyun 				     DESC_HDR_SEL0_DEU |
2533*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_CBC |
2534*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_3DES |
2535*4882a593Smuzhiyun 				     DESC_HDR_SEL1_MDEUA |
2536*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_INIT |
2537*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_PAD |
2538*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2539*4882a593Smuzhiyun 	},
2540*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2541*4882a593Smuzhiyun 		.alg.aead = {
2542*4882a593Smuzhiyun 			.base = {
2543*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha384),cbc(aes))",
2544*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha384-"
2545*4882a593Smuzhiyun 						   "cbc-aes-talitos",
2546*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2547*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2548*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2549*4882a593Smuzhiyun 			},
2550*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2551*4882a593Smuzhiyun 			.maxauthsize = SHA384_DIGEST_SIZE,
2552*4882a593Smuzhiyun 		},
2553*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2554*4882a593Smuzhiyun 			             DESC_HDR_SEL0_AESU |
2555*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_AESU_CBC |
2556*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUB |
2557*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2558*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2559*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2560*4882a593Smuzhiyun 	},
2561*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2562*4882a593Smuzhiyun 		.alg.aead = {
2563*4882a593Smuzhiyun 			.base = {
2564*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha384),"
2565*4882a593Smuzhiyun 					    "cbc(des3_ede))",
2566*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha384-"
2567*4882a593Smuzhiyun 						   "cbc-3des-talitos",
2568*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2569*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2570*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2571*4882a593Smuzhiyun 			},
2572*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2573*4882a593Smuzhiyun 			.maxauthsize = SHA384_DIGEST_SIZE,
2574*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2575*4882a593Smuzhiyun 		},
2576*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2577*4882a593Smuzhiyun 			             DESC_HDR_SEL0_DEU |
2578*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_CBC |
2579*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_3DES |
2580*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUB |
2581*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2582*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2583*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2584*4882a593Smuzhiyun 	},
2585*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2586*4882a593Smuzhiyun 		.alg.aead = {
2587*4882a593Smuzhiyun 			.base = {
2588*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha512),cbc(aes))",
2589*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha512-"
2590*4882a593Smuzhiyun 						   "cbc-aes-talitos",
2591*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2592*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2593*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2594*4882a593Smuzhiyun 			},
2595*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2596*4882a593Smuzhiyun 			.maxauthsize = SHA512_DIGEST_SIZE,
2597*4882a593Smuzhiyun 		},
2598*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2599*4882a593Smuzhiyun 			             DESC_HDR_SEL0_AESU |
2600*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_AESU_CBC |
2601*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUB |
2602*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2603*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2604*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2605*4882a593Smuzhiyun 	},
2606*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2607*4882a593Smuzhiyun 		.alg.aead = {
2608*4882a593Smuzhiyun 			.base = {
2609*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(sha512),"
2610*4882a593Smuzhiyun 					    "cbc(des3_ede))",
2611*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-sha512-"
2612*4882a593Smuzhiyun 						   "cbc-3des-talitos",
2613*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2614*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2615*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2616*4882a593Smuzhiyun 			},
2617*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2618*4882a593Smuzhiyun 			.maxauthsize = SHA512_DIGEST_SIZE,
2619*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2620*4882a593Smuzhiyun 		},
2621*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2622*4882a593Smuzhiyun 			             DESC_HDR_SEL0_DEU |
2623*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_CBC |
2624*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_3DES |
2625*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUB |
2626*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2627*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2628*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2629*4882a593Smuzhiyun 	},
2630*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2631*4882a593Smuzhiyun 		.alg.aead = {
2632*4882a593Smuzhiyun 			.base = {
2633*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(md5),cbc(aes))",
2634*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-md5-"
2635*4882a593Smuzhiyun 						   "cbc-aes-talitos",
2636*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2637*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2638*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2639*4882a593Smuzhiyun 			},
2640*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2641*4882a593Smuzhiyun 			.maxauthsize = MD5_DIGEST_SIZE,
2642*4882a593Smuzhiyun 		},
2643*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2644*4882a593Smuzhiyun 			             DESC_HDR_SEL0_AESU |
2645*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_AESU_CBC |
2646*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUA |
2647*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2648*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2649*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_MD5_HMAC,
2650*4882a593Smuzhiyun 	},
2651*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2652*4882a593Smuzhiyun 		.priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2653*4882a593Smuzhiyun 		.alg.aead = {
2654*4882a593Smuzhiyun 			.base = {
2655*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(md5),cbc(aes))",
2656*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-md5-"
2657*4882a593Smuzhiyun 						   "cbc-aes-talitos-hsna",
2658*4882a593Smuzhiyun 				.cra_blocksize = AES_BLOCK_SIZE,
2659*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2660*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2661*4882a593Smuzhiyun 			},
2662*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2663*4882a593Smuzhiyun 			.maxauthsize = MD5_DIGEST_SIZE,
2664*4882a593Smuzhiyun 		},
2665*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2666*4882a593Smuzhiyun 				     DESC_HDR_SEL0_AESU |
2667*4882a593Smuzhiyun 				     DESC_HDR_MODE0_AESU_CBC |
2668*4882a593Smuzhiyun 				     DESC_HDR_SEL1_MDEUA |
2669*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_INIT |
2670*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_PAD |
2671*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_MD5_HMAC,
2672*4882a593Smuzhiyun 	},
2673*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2674*4882a593Smuzhiyun 		.alg.aead = {
2675*4882a593Smuzhiyun 			.base = {
2676*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2677*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-md5-"
2678*4882a593Smuzhiyun 						   "cbc-3des-talitos",
2679*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2680*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2681*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2682*4882a593Smuzhiyun 			},
2683*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2684*4882a593Smuzhiyun 			.maxauthsize = MD5_DIGEST_SIZE,
2685*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2686*4882a593Smuzhiyun 		},
2687*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2688*4882a593Smuzhiyun 			             DESC_HDR_SEL0_DEU |
2689*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_CBC |
2690*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_3DES |
2691*4882a593Smuzhiyun 		                     DESC_HDR_SEL1_MDEUA |
2692*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_INIT |
2693*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_PAD |
2694*4882a593Smuzhiyun 		                     DESC_HDR_MODE1_MDEU_MD5_HMAC,
2695*4882a593Smuzhiyun 	},
2696*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AEAD,
2697*4882a593Smuzhiyun 		.priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2698*4882a593Smuzhiyun 		.alg.aead = {
2699*4882a593Smuzhiyun 			.base = {
2700*4882a593Smuzhiyun 				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2701*4882a593Smuzhiyun 				.cra_driver_name = "authenc-hmac-md5-"
2702*4882a593Smuzhiyun 						   "cbc-3des-talitos-hsna",
2703*4882a593Smuzhiyun 				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2704*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2705*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2706*4882a593Smuzhiyun 			},
2707*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2708*4882a593Smuzhiyun 			.maxauthsize = MD5_DIGEST_SIZE,
2709*4882a593Smuzhiyun 			.setkey = aead_des3_setkey,
2710*4882a593Smuzhiyun 		},
2711*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2712*4882a593Smuzhiyun 				     DESC_HDR_SEL0_DEU |
2713*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_CBC |
2714*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_3DES |
2715*4882a593Smuzhiyun 				     DESC_HDR_SEL1_MDEUA |
2716*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_INIT |
2717*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_PAD |
2718*4882a593Smuzhiyun 				     DESC_HDR_MODE1_MDEU_MD5_HMAC,
2719*4882a593Smuzhiyun 	},
2720*4882a593Smuzhiyun 	/* SKCIPHER algorithms. */
2721*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_SKCIPHER,
2722*4882a593Smuzhiyun 		.alg.skcipher = {
2723*4882a593Smuzhiyun 			.base.cra_name = "ecb(aes)",
2724*4882a593Smuzhiyun 			.base.cra_driver_name = "ecb-aes-talitos",
2725*4882a593Smuzhiyun 			.base.cra_blocksize = AES_BLOCK_SIZE,
2726*4882a593Smuzhiyun 			.base.cra_flags = CRYPTO_ALG_ASYNC |
2727*4882a593Smuzhiyun 					  CRYPTO_ALG_ALLOCATES_MEMORY,
2728*4882a593Smuzhiyun 			.min_keysize = AES_MIN_KEY_SIZE,
2729*4882a593Smuzhiyun 			.max_keysize = AES_MAX_KEY_SIZE,
2730*4882a593Smuzhiyun 			.setkey = skcipher_aes_setkey,
2731*4882a593Smuzhiyun 		},
2732*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2733*4882a593Smuzhiyun 				     DESC_HDR_SEL0_AESU,
2734*4882a593Smuzhiyun 	},
2735*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_SKCIPHER,
2736*4882a593Smuzhiyun 		.alg.skcipher = {
2737*4882a593Smuzhiyun 			.base.cra_name = "cbc(aes)",
2738*4882a593Smuzhiyun 			.base.cra_driver_name = "cbc-aes-talitos",
2739*4882a593Smuzhiyun 			.base.cra_blocksize = AES_BLOCK_SIZE,
2740*4882a593Smuzhiyun 			.base.cra_flags = CRYPTO_ALG_ASYNC |
2741*4882a593Smuzhiyun 					  CRYPTO_ALG_ALLOCATES_MEMORY,
2742*4882a593Smuzhiyun 			.min_keysize = AES_MIN_KEY_SIZE,
2743*4882a593Smuzhiyun 			.max_keysize = AES_MAX_KEY_SIZE,
2744*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2745*4882a593Smuzhiyun 			.setkey = skcipher_aes_setkey,
2746*4882a593Smuzhiyun 		},
2747*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2748*4882a593Smuzhiyun 				     DESC_HDR_SEL0_AESU |
2749*4882a593Smuzhiyun 				     DESC_HDR_MODE0_AESU_CBC,
2750*4882a593Smuzhiyun 	},
2751*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_SKCIPHER,
2752*4882a593Smuzhiyun 		.alg.skcipher = {
2753*4882a593Smuzhiyun 			.base.cra_name = "ctr(aes)",
2754*4882a593Smuzhiyun 			.base.cra_driver_name = "ctr-aes-talitos",
2755*4882a593Smuzhiyun 			.base.cra_blocksize = 1,
2756*4882a593Smuzhiyun 			.base.cra_flags = CRYPTO_ALG_ASYNC |
2757*4882a593Smuzhiyun 					  CRYPTO_ALG_ALLOCATES_MEMORY,
2758*4882a593Smuzhiyun 			.min_keysize = AES_MIN_KEY_SIZE,
2759*4882a593Smuzhiyun 			.max_keysize = AES_MAX_KEY_SIZE,
2760*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2761*4882a593Smuzhiyun 			.setkey = skcipher_aes_setkey,
2762*4882a593Smuzhiyun 		},
2763*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
2764*4882a593Smuzhiyun 				     DESC_HDR_SEL0_AESU |
2765*4882a593Smuzhiyun 				     DESC_HDR_MODE0_AESU_CTR,
2766*4882a593Smuzhiyun 	},
2767*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_SKCIPHER,
2768*4882a593Smuzhiyun 		.alg.skcipher = {
2769*4882a593Smuzhiyun 			.base.cra_name = "ctr(aes)",
2770*4882a593Smuzhiyun 			.base.cra_driver_name = "ctr-aes-talitos",
2771*4882a593Smuzhiyun 			.base.cra_blocksize = 1,
2772*4882a593Smuzhiyun 			.base.cra_flags = CRYPTO_ALG_ASYNC |
2773*4882a593Smuzhiyun 					  CRYPTO_ALG_ALLOCATES_MEMORY,
2774*4882a593Smuzhiyun 			.min_keysize = AES_MIN_KEY_SIZE,
2775*4882a593Smuzhiyun 			.max_keysize = AES_MAX_KEY_SIZE,
2776*4882a593Smuzhiyun 			.ivsize = AES_BLOCK_SIZE,
2777*4882a593Smuzhiyun 			.setkey = skcipher_aes_setkey,
2778*4882a593Smuzhiyun 		},
2779*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2780*4882a593Smuzhiyun 				     DESC_HDR_SEL0_AESU |
2781*4882a593Smuzhiyun 				     DESC_HDR_MODE0_AESU_CTR,
2782*4882a593Smuzhiyun 	},
2783*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_SKCIPHER,
2784*4882a593Smuzhiyun 		.alg.skcipher = {
2785*4882a593Smuzhiyun 			.base.cra_name = "ecb(des)",
2786*4882a593Smuzhiyun 			.base.cra_driver_name = "ecb-des-talitos",
2787*4882a593Smuzhiyun 			.base.cra_blocksize = DES_BLOCK_SIZE,
2788*4882a593Smuzhiyun 			.base.cra_flags = CRYPTO_ALG_ASYNC |
2789*4882a593Smuzhiyun 					  CRYPTO_ALG_ALLOCATES_MEMORY,
2790*4882a593Smuzhiyun 			.min_keysize = DES_KEY_SIZE,
2791*4882a593Smuzhiyun 			.max_keysize = DES_KEY_SIZE,
2792*4882a593Smuzhiyun 			.setkey = skcipher_des_setkey,
2793*4882a593Smuzhiyun 		},
2794*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2795*4882a593Smuzhiyun 				     DESC_HDR_SEL0_DEU,
2796*4882a593Smuzhiyun 	},
2797*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_SKCIPHER,
2798*4882a593Smuzhiyun 		.alg.skcipher = {
2799*4882a593Smuzhiyun 			.base.cra_name = "cbc(des)",
2800*4882a593Smuzhiyun 			.base.cra_driver_name = "cbc-des-talitos",
2801*4882a593Smuzhiyun 			.base.cra_blocksize = DES_BLOCK_SIZE,
2802*4882a593Smuzhiyun 			.base.cra_flags = CRYPTO_ALG_ASYNC |
2803*4882a593Smuzhiyun 					  CRYPTO_ALG_ALLOCATES_MEMORY,
2804*4882a593Smuzhiyun 			.min_keysize = DES_KEY_SIZE,
2805*4882a593Smuzhiyun 			.max_keysize = DES_KEY_SIZE,
2806*4882a593Smuzhiyun 			.ivsize = DES_BLOCK_SIZE,
2807*4882a593Smuzhiyun 			.setkey = skcipher_des_setkey,
2808*4882a593Smuzhiyun 		},
2809*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2810*4882a593Smuzhiyun 				     DESC_HDR_SEL0_DEU |
2811*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_CBC,
2812*4882a593Smuzhiyun 	},
2813*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_SKCIPHER,
2814*4882a593Smuzhiyun 		.alg.skcipher = {
2815*4882a593Smuzhiyun 			.base.cra_name = "ecb(des3_ede)",
2816*4882a593Smuzhiyun 			.base.cra_driver_name = "ecb-3des-talitos",
2817*4882a593Smuzhiyun 			.base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2818*4882a593Smuzhiyun 			.base.cra_flags = CRYPTO_ALG_ASYNC |
2819*4882a593Smuzhiyun 					  CRYPTO_ALG_ALLOCATES_MEMORY,
2820*4882a593Smuzhiyun 			.min_keysize = DES3_EDE_KEY_SIZE,
2821*4882a593Smuzhiyun 			.max_keysize = DES3_EDE_KEY_SIZE,
2822*4882a593Smuzhiyun 			.setkey = skcipher_des3_setkey,
2823*4882a593Smuzhiyun 		},
2824*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2825*4882a593Smuzhiyun 				     DESC_HDR_SEL0_DEU |
2826*4882a593Smuzhiyun 				     DESC_HDR_MODE0_DEU_3DES,
2827*4882a593Smuzhiyun 	},
2828*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_SKCIPHER,
2829*4882a593Smuzhiyun 		.alg.skcipher = {
2830*4882a593Smuzhiyun 			.base.cra_name = "cbc(des3_ede)",
2831*4882a593Smuzhiyun 			.base.cra_driver_name = "cbc-3des-talitos",
2832*4882a593Smuzhiyun 			.base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2833*4882a593Smuzhiyun 			.base.cra_flags = CRYPTO_ALG_ASYNC |
2834*4882a593Smuzhiyun 					  CRYPTO_ALG_ALLOCATES_MEMORY,
2835*4882a593Smuzhiyun 			.min_keysize = DES3_EDE_KEY_SIZE,
2836*4882a593Smuzhiyun 			.max_keysize = DES3_EDE_KEY_SIZE,
2837*4882a593Smuzhiyun 			.ivsize = DES3_EDE_BLOCK_SIZE,
2838*4882a593Smuzhiyun 			.setkey = skcipher_des3_setkey,
2839*4882a593Smuzhiyun 		},
2840*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2841*4882a593Smuzhiyun 			             DESC_HDR_SEL0_DEU |
2842*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_CBC |
2843*4882a593Smuzhiyun 		                     DESC_HDR_MODE0_DEU_3DES,
2844*4882a593Smuzhiyun 	},
2845*4882a593Smuzhiyun 	/* AHASH algorithms. */
2846*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2847*4882a593Smuzhiyun 		.alg.hash = {
2848*4882a593Smuzhiyun 			.halg.digestsize = MD5_DIGEST_SIZE,
2849*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2850*4882a593Smuzhiyun 			.halg.base = {
2851*4882a593Smuzhiyun 				.cra_name = "md5",
2852*4882a593Smuzhiyun 				.cra_driver_name = "md5-talitos",
2853*4882a593Smuzhiyun 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2854*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2855*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2856*4882a593Smuzhiyun 			}
2857*4882a593Smuzhiyun 		},
2858*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2859*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUA |
2860*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEU_MD5,
2861*4882a593Smuzhiyun 	},
2862*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2863*4882a593Smuzhiyun 		.alg.hash = {
2864*4882a593Smuzhiyun 			.halg.digestsize = SHA1_DIGEST_SIZE,
2865*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2866*4882a593Smuzhiyun 			.halg.base = {
2867*4882a593Smuzhiyun 				.cra_name = "sha1",
2868*4882a593Smuzhiyun 				.cra_driver_name = "sha1-talitos",
2869*4882a593Smuzhiyun 				.cra_blocksize = SHA1_BLOCK_SIZE,
2870*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2871*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2872*4882a593Smuzhiyun 			}
2873*4882a593Smuzhiyun 		},
2874*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2875*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUA |
2876*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEU_SHA1,
2877*4882a593Smuzhiyun 	},
2878*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2879*4882a593Smuzhiyun 		.alg.hash = {
2880*4882a593Smuzhiyun 			.halg.digestsize = SHA224_DIGEST_SIZE,
2881*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2882*4882a593Smuzhiyun 			.halg.base = {
2883*4882a593Smuzhiyun 				.cra_name = "sha224",
2884*4882a593Smuzhiyun 				.cra_driver_name = "sha224-talitos",
2885*4882a593Smuzhiyun 				.cra_blocksize = SHA224_BLOCK_SIZE,
2886*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2887*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2888*4882a593Smuzhiyun 			}
2889*4882a593Smuzhiyun 		},
2890*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2891*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUA |
2892*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEU_SHA224,
2893*4882a593Smuzhiyun 	},
2894*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2895*4882a593Smuzhiyun 		.alg.hash = {
2896*4882a593Smuzhiyun 			.halg.digestsize = SHA256_DIGEST_SIZE,
2897*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2898*4882a593Smuzhiyun 			.halg.base = {
2899*4882a593Smuzhiyun 				.cra_name = "sha256",
2900*4882a593Smuzhiyun 				.cra_driver_name = "sha256-talitos",
2901*4882a593Smuzhiyun 				.cra_blocksize = SHA256_BLOCK_SIZE,
2902*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2903*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2904*4882a593Smuzhiyun 			}
2905*4882a593Smuzhiyun 		},
2906*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2907*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUA |
2908*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEU_SHA256,
2909*4882a593Smuzhiyun 	},
2910*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2911*4882a593Smuzhiyun 		.alg.hash = {
2912*4882a593Smuzhiyun 			.halg.digestsize = SHA384_DIGEST_SIZE,
2913*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2914*4882a593Smuzhiyun 			.halg.base = {
2915*4882a593Smuzhiyun 				.cra_name = "sha384",
2916*4882a593Smuzhiyun 				.cra_driver_name = "sha384-talitos",
2917*4882a593Smuzhiyun 				.cra_blocksize = SHA384_BLOCK_SIZE,
2918*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2919*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2920*4882a593Smuzhiyun 			}
2921*4882a593Smuzhiyun 		},
2922*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2923*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUB |
2924*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEUB_SHA384,
2925*4882a593Smuzhiyun 	},
2926*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2927*4882a593Smuzhiyun 		.alg.hash = {
2928*4882a593Smuzhiyun 			.halg.digestsize = SHA512_DIGEST_SIZE,
2929*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2930*4882a593Smuzhiyun 			.halg.base = {
2931*4882a593Smuzhiyun 				.cra_name = "sha512",
2932*4882a593Smuzhiyun 				.cra_driver_name = "sha512-talitos",
2933*4882a593Smuzhiyun 				.cra_blocksize = SHA512_BLOCK_SIZE,
2934*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2935*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2936*4882a593Smuzhiyun 			}
2937*4882a593Smuzhiyun 		},
2938*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2939*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUB |
2940*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEUB_SHA512,
2941*4882a593Smuzhiyun 	},
2942*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2943*4882a593Smuzhiyun 		.alg.hash = {
2944*4882a593Smuzhiyun 			.halg.digestsize = MD5_DIGEST_SIZE,
2945*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2946*4882a593Smuzhiyun 			.halg.base = {
2947*4882a593Smuzhiyun 				.cra_name = "hmac(md5)",
2948*4882a593Smuzhiyun 				.cra_driver_name = "hmac-md5-talitos",
2949*4882a593Smuzhiyun 				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2950*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2951*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2952*4882a593Smuzhiyun 			}
2953*4882a593Smuzhiyun 		},
2954*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2955*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUA |
2956*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEU_MD5,
2957*4882a593Smuzhiyun 	},
2958*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2959*4882a593Smuzhiyun 		.alg.hash = {
2960*4882a593Smuzhiyun 			.halg.digestsize = SHA1_DIGEST_SIZE,
2961*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2962*4882a593Smuzhiyun 			.halg.base = {
2963*4882a593Smuzhiyun 				.cra_name = "hmac(sha1)",
2964*4882a593Smuzhiyun 				.cra_driver_name = "hmac-sha1-talitos",
2965*4882a593Smuzhiyun 				.cra_blocksize = SHA1_BLOCK_SIZE,
2966*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2967*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2968*4882a593Smuzhiyun 			}
2969*4882a593Smuzhiyun 		},
2970*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2971*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUA |
2972*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEU_SHA1,
2973*4882a593Smuzhiyun 	},
2974*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2975*4882a593Smuzhiyun 		.alg.hash = {
2976*4882a593Smuzhiyun 			.halg.digestsize = SHA224_DIGEST_SIZE,
2977*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2978*4882a593Smuzhiyun 			.halg.base = {
2979*4882a593Smuzhiyun 				.cra_name = "hmac(sha224)",
2980*4882a593Smuzhiyun 				.cra_driver_name = "hmac-sha224-talitos",
2981*4882a593Smuzhiyun 				.cra_blocksize = SHA224_BLOCK_SIZE,
2982*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2983*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
2984*4882a593Smuzhiyun 			}
2985*4882a593Smuzhiyun 		},
2986*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2987*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUA |
2988*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEU_SHA224,
2989*4882a593Smuzhiyun 	},
2990*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
2991*4882a593Smuzhiyun 		.alg.hash = {
2992*4882a593Smuzhiyun 			.halg.digestsize = SHA256_DIGEST_SIZE,
2993*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
2994*4882a593Smuzhiyun 			.halg.base = {
2995*4882a593Smuzhiyun 				.cra_name = "hmac(sha256)",
2996*4882a593Smuzhiyun 				.cra_driver_name = "hmac-sha256-talitos",
2997*4882a593Smuzhiyun 				.cra_blocksize = SHA256_BLOCK_SIZE,
2998*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
2999*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
3000*4882a593Smuzhiyun 			}
3001*4882a593Smuzhiyun 		},
3002*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3003*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUA |
3004*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEU_SHA256,
3005*4882a593Smuzhiyun 	},
3006*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
3007*4882a593Smuzhiyun 		.alg.hash = {
3008*4882a593Smuzhiyun 			.halg.digestsize = SHA384_DIGEST_SIZE,
3009*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
3010*4882a593Smuzhiyun 			.halg.base = {
3011*4882a593Smuzhiyun 				.cra_name = "hmac(sha384)",
3012*4882a593Smuzhiyun 				.cra_driver_name = "hmac-sha384-talitos",
3013*4882a593Smuzhiyun 				.cra_blocksize = SHA384_BLOCK_SIZE,
3014*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
3015*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
3016*4882a593Smuzhiyun 			}
3017*4882a593Smuzhiyun 		},
3018*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3019*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUB |
3020*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEUB_SHA384,
3021*4882a593Smuzhiyun 	},
3022*4882a593Smuzhiyun 	{	.type = CRYPTO_ALG_TYPE_AHASH,
3023*4882a593Smuzhiyun 		.alg.hash = {
3024*4882a593Smuzhiyun 			.halg.digestsize = SHA512_DIGEST_SIZE,
3025*4882a593Smuzhiyun 			.halg.statesize = sizeof(struct talitos_export_state),
3026*4882a593Smuzhiyun 			.halg.base = {
3027*4882a593Smuzhiyun 				.cra_name = "hmac(sha512)",
3028*4882a593Smuzhiyun 				.cra_driver_name = "hmac-sha512-talitos",
3029*4882a593Smuzhiyun 				.cra_blocksize = SHA512_BLOCK_SIZE,
3030*4882a593Smuzhiyun 				.cra_flags = CRYPTO_ALG_ASYNC |
3031*4882a593Smuzhiyun 					     CRYPTO_ALG_ALLOCATES_MEMORY,
3032*4882a593Smuzhiyun 			}
3033*4882a593Smuzhiyun 		},
3034*4882a593Smuzhiyun 		.desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3035*4882a593Smuzhiyun 				     DESC_HDR_SEL0_MDEUB |
3036*4882a593Smuzhiyun 				     DESC_HDR_MODE0_MDEUB_SHA512,
3037*4882a593Smuzhiyun 	}
3038*4882a593Smuzhiyun };
3039*4882a593Smuzhiyun 
3040*4882a593Smuzhiyun struct talitos_crypto_alg {
3041*4882a593Smuzhiyun 	struct list_head entry;
3042*4882a593Smuzhiyun 	struct device *dev;
3043*4882a593Smuzhiyun 	struct talitos_alg_template algt;
3044*4882a593Smuzhiyun };
3045*4882a593Smuzhiyun 
talitos_init_common(struct talitos_ctx * ctx,struct talitos_crypto_alg * talitos_alg)3046*4882a593Smuzhiyun static int talitos_init_common(struct talitos_ctx *ctx,
3047*4882a593Smuzhiyun 			       struct talitos_crypto_alg *talitos_alg)
3048*4882a593Smuzhiyun {
3049*4882a593Smuzhiyun 	struct talitos_private *priv;
3050*4882a593Smuzhiyun 
3051*4882a593Smuzhiyun 	/* update context with ptr to dev */
3052*4882a593Smuzhiyun 	ctx->dev = talitos_alg->dev;
3053*4882a593Smuzhiyun 
3054*4882a593Smuzhiyun 	/* assign SEC channel to tfm in round-robin fashion */
3055*4882a593Smuzhiyun 	priv = dev_get_drvdata(ctx->dev);
3056*4882a593Smuzhiyun 	ctx->ch = atomic_inc_return(&priv->last_chan) &
3057*4882a593Smuzhiyun 		  (priv->num_channels - 1);
3058*4882a593Smuzhiyun 
3059*4882a593Smuzhiyun 	/* copy descriptor header template value */
3060*4882a593Smuzhiyun 	ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
3061*4882a593Smuzhiyun 
3062*4882a593Smuzhiyun 	/* select done notification */
3063*4882a593Smuzhiyun 	ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3064*4882a593Smuzhiyun 
3065*4882a593Smuzhiyun 	return 0;
3066*4882a593Smuzhiyun }
3067*4882a593Smuzhiyun 
talitos_cra_init_aead(struct crypto_aead * tfm)3068*4882a593Smuzhiyun static int talitos_cra_init_aead(struct crypto_aead *tfm)
3069*4882a593Smuzhiyun {
3070*4882a593Smuzhiyun 	struct aead_alg *alg = crypto_aead_alg(tfm);
3071*4882a593Smuzhiyun 	struct talitos_crypto_alg *talitos_alg;
3072*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3073*4882a593Smuzhiyun 
3074*4882a593Smuzhiyun 	talitos_alg = container_of(alg, struct talitos_crypto_alg,
3075*4882a593Smuzhiyun 				   algt.alg.aead);
3076*4882a593Smuzhiyun 
3077*4882a593Smuzhiyun 	return talitos_init_common(ctx, talitos_alg);
3078*4882a593Smuzhiyun }
3079*4882a593Smuzhiyun 
talitos_cra_init_skcipher(struct crypto_skcipher * tfm)3080*4882a593Smuzhiyun static int talitos_cra_init_skcipher(struct crypto_skcipher *tfm)
3081*4882a593Smuzhiyun {
3082*4882a593Smuzhiyun 	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3083*4882a593Smuzhiyun 	struct talitos_crypto_alg *talitos_alg;
3084*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_skcipher_ctx(tfm);
3085*4882a593Smuzhiyun 
3086*4882a593Smuzhiyun 	talitos_alg = container_of(alg, struct talitos_crypto_alg,
3087*4882a593Smuzhiyun 				   algt.alg.skcipher);
3088*4882a593Smuzhiyun 
3089*4882a593Smuzhiyun 	return talitos_init_common(ctx, talitos_alg);
3090*4882a593Smuzhiyun }
3091*4882a593Smuzhiyun 
talitos_cra_init_ahash(struct crypto_tfm * tfm)3092*4882a593Smuzhiyun static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3093*4882a593Smuzhiyun {
3094*4882a593Smuzhiyun 	struct crypto_alg *alg = tfm->__crt_alg;
3095*4882a593Smuzhiyun 	struct talitos_crypto_alg *talitos_alg;
3096*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3097*4882a593Smuzhiyun 
3098*4882a593Smuzhiyun 	talitos_alg = container_of(__crypto_ahash_alg(alg),
3099*4882a593Smuzhiyun 				   struct talitos_crypto_alg,
3100*4882a593Smuzhiyun 				   algt.alg.hash);
3101*4882a593Smuzhiyun 
3102*4882a593Smuzhiyun 	ctx->keylen = 0;
3103*4882a593Smuzhiyun 	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3104*4882a593Smuzhiyun 				 sizeof(struct talitos_ahash_req_ctx));
3105*4882a593Smuzhiyun 
3106*4882a593Smuzhiyun 	return talitos_init_common(ctx, talitos_alg);
3107*4882a593Smuzhiyun }
3108*4882a593Smuzhiyun 
talitos_cra_exit(struct crypto_tfm * tfm)3109*4882a593Smuzhiyun static void talitos_cra_exit(struct crypto_tfm *tfm)
3110*4882a593Smuzhiyun {
3111*4882a593Smuzhiyun 	struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3112*4882a593Smuzhiyun 	struct device *dev = ctx->dev;
3113*4882a593Smuzhiyun 
3114*4882a593Smuzhiyun 	if (ctx->keylen)
3115*4882a593Smuzhiyun 		dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3116*4882a593Smuzhiyun }
3117*4882a593Smuzhiyun 
3118*4882a593Smuzhiyun /*
3119*4882a593Smuzhiyun  * given the alg's descriptor header template, determine whether descriptor
3120*4882a593Smuzhiyun  * type and primary/secondary execution units required match the hw
3121*4882a593Smuzhiyun  * capabilities description provided in the device tree node.
3122*4882a593Smuzhiyun  */
hw_supports(struct device * dev,__be32 desc_hdr_template)3123*4882a593Smuzhiyun static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3124*4882a593Smuzhiyun {
3125*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
3126*4882a593Smuzhiyun 	int ret;
3127*4882a593Smuzhiyun 
3128*4882a593Smuzhiyun 	ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3129*4882a593Smuzhiyun 	      (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3130*4882a593Smuzhiyun 
3131*4882a593Smuzhiyun 	if (SECONDARY_EU(desc_hdr_template))
3132*4882a593Smuzhiyun 		ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3133*4882a593Smuzhiyun 		              & priv->exec_units);
3134*4882a593Smuzhiyun 
3135*4882a593Smuzhiyun 	return ret;
3136*4882a593Smuzhiyun }
3137*4882a593Smuzhiyun 
talitos_remove(struct platform_device * ofdev)3138*4882a593Smuzhiyun static int talitos_remove(struct platform_device *ofdev)
3139*4882a593Smuzhiyun {
3140*4882a593Smuzhiyun 	struct device *dev = &ofdev->dev;
3141*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
3142*4882a593Smuzhiyun 	struct talitos_crypto_alg *t_alg, *n;
3143*4882a593Smuzhiyun 	int i;
3144*4882a593Smuzhiyun 
3145*4882a593Smuzhiyun 	list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
3146*4882a593Smuzhiyun 		switch (t_alg->algt.type) {
3147*4882a593Smuzhiyun 		case CRYPTO_ALG_TYPE_SKCIPHER:
3148*4882a593Smuzhiyun 			crypto_unregister_skcipher(&t_alg->algt.alg.skcipher);
3149*4882a593Smuzhiyun 			break;
3150*4882a593Smuzhiyun 		case CRYPTO_ALG_TYPE_AEAD:
3151*4882a593Smuzhiyun 			crypto_unregister_aead(&t_alg->algt.alg.aead);
3152*4882a593Smuzhiyun 			break;
3153*4882a593Smuzhiyun 		case CRYPTO_ALG_TYPE_AHASH:
3154*4882a593Smuzhiyun 			crypto_unregister_ahash(&t_alg->algt.alg.hash);
3155*4882a593Smuzhiyun 			break;
3156*4882a593Smuzhiyun 		}
3157*4882a593Smuzhiyun 		list_del(&t_alg->entry);
3158*4882a593Smuzhiyun 	}
3159*4882a593Smuzhiyun 
3160*4882a593Smuzhiyun 	if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3161*4882a593Smuzhiyun 		talitos_unregister_rng(dev);
3162*4882a593Smuzhiyun 
3163*4882a593Smuzhiyun 	for (i = 0; i < 2; i++)
3164*4882a593Smuzhiyun 		if (priv->irq[i]) {
3165*4882a593Smuzhiyun 			free_irq(priv->irq[i], dev);
3166*4882a593Smuzhiyun 			irq_dispose_mapping(priv->irq[i]);
3167*4882a593Smuzhiyun 		}
3168*4882a593Smuzhiyun 
3169*4882a593Smuzhiyun 	tasklet_kill(&priv->done_task[0]);
3170*4882a593Smuzhiyun 	if (priv->irq[1])
3171*4882a593Smuzhiyun 		tasklet_kill(&priv->done_task[1]);
3172*4882a593Smuzhiyun 
3173*4882a593Smuzhiyun 	return 0;
3174*4882a593Smuzhiyun }
3175*4882a593Smuzhiyun 
talitos_alg_alloc(struct device * dev,struct talitos_alg_template * template)3176*4882a593Smuzhiyun static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3177*4882a593Smuzhiyun 						    struct talitos_alg_template
3178*4882a593Smuzhiyun 						           *template)
3179*4882a593Smuzhiyun {
3180*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
3181*4882a593Smuzhiyun 	struct talitos_crypto_alg *t_alg;
3182*4882a593Smuzhiyun 	struct crypto_alg *alg;
3183*4882a593Smuzhiyun 
3184*4882a593Smuzhiyun 	t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3185*4882a593Smuzhiyun 			     GFP_KERNEL);
3186*4882a593Smuzhiyun 	if (!t_alg)
3187*4882a593Smuzhiyun 		return ERR_PTR(-ENOMEM);
3188*4882a593Smuzhiyun 
3189*4882a593Smuzhiyun 	t_alg->algt = *template;
3190*4882a593Smuzhiyun 
3191*4882a593Smuzhiyun 	switch (t_alg->algt.type) {
3192*4882a593Smuzhiyun 	case CRYPTO_ALG_TYPE_SKCIPHER:
3193*4882a593Smuzhiyun 		alg = &t_alg->algt.alg.skcipher.base;
3194*4882a593Smuzhiyun 		alg->cra_exit = talitos_cra_exit;
3195*4882a593Smuzhiyun 		t_alg->algt.alg.skcipher.init = talitos_cra_init_skcipher;
3196*4882a593Smuzhiyun 		t_alg->algt.alg.skcipher.setkey =
3197*4882a593Smuzhiyun 			t_alg->algt.alg.skcipher.setkey ?: skcipher_setkey;
3198*4882a593Smuzhiyun 		t_alg->algt.alg.skcipher.encrypt = skcipher_encrypt;
3199*4882a593Smuzhiyun 		t_alg->algt.alg.skcipher.decrypt = skcipher_decrypt;
3200*4882a593Smuzhiyun 		if (!strcmp(alg->cra_name, "ctr(aes)") && !has_ftr_sec1(priv) &&
3201*4882a593Smuzhiyun 		    DESC_TYPE(t_alg->algt.desc_hdr_template) !=
3202*4882a593Smuzhiyun 		    DESC_TYPE(DESC_HDR_TYPE_AESU_CTR_NONSNOOP)) {
3203*4882a593Smuzhiyun 			devm_kfree(dev, t_alg);
3204*4882a593Smuzhiyun 			return ERR_PTR(-ENOTSUPP);
3205*4882a593Smuzhiyun 		}
3206*4882a593Smuzhiyun 		break;
3207*4882a593Smuzhiyun 	case CRYPTO_ALG_TYPE_AEAD:
3208*4882a593Smuzhiyun 		alg = &t_alg->algt.alg.aead.base;
3209*4882a593Smuzhiyun 		alg->cra_exit = talitos_cra_exit;
3210*4882a593Smuzhiyun 		t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3211*4882a593Smuzhiyun 		t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3212*4882a593Smuzhiyun 					      aead_setkey;
3213*4882a593Smuzhiyun 		t_alg->algt.alg.aead.encrypt = aead_encrypt;
3214*4882a593Smuzhiyun 		t_alg->algt.alg.aead.decrypt = aead_decrypt;
3215*4882a593Smuzhiyun 		if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3216*4882a593Smuzhiyun 		    !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
3217*4882a593Smuzhiyun 			devm_kfree(dev, t_alg);
3218*4882a593Smuzhiyun 			return ERR_PTR(-ENOTSUPP);
3219*4882a593Smuzhiyun 		}
3220*4882a593Smuzhiyun 		break;
3221*4882a593Smuzhiyun 	case CRYPTO_ALG_TYPE_AHASH:
3222*4882a593Smuzhiyun 		alg = &t_alg->algt.alg.hash.halg.base;
3223*4882a593Smuzhiyun 		alg->cra_init = talitos_cra_init_ahash;
3224*4882a593Smuzhiyun 		alg->cra_exit = talitos_cra_exit;
3225*4882a593Smuzhiyun 		t_alg->algt.alg.hash.init = ahash_init;
3226*4882a593Smuzhiyun 		t_alg->algt.alg.hash.update = ahash_update;
3227*4882a593Smuzhiyun 		t_alg->algt.alg.hash.final = ahash_final;
3228*4882a593Smuzhiyun 		t_alg->algt.alg.hash.finup = ahash_finup;
3229*4882a593Smuzhiyun 		t_alg->algt.alg.hash.digest = ahash_digest;
3230*4882a593Smuzhiyun 		if (!strncmp(alg->cra_name, "hmac", 4))
3231*4882a593Smuzhiyun 			t_alg->algt.alg.hash.setkey = ahash_setkey;
3232*4882a593Smuzhiyun 		t_alg->algt.alg.hash.import = ahash_import;
3233*4882a593Smuzhiyun 		t_alg->algt.alg.hash.export = ahash_export;
3234*4882a593Smuzhiyun 
3235*4882a593Smuzhiyun 		if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
3236*4882a593Smuzhiyun 		    !strncmp(alg->cra_name, "hmac", 4)) {
3237*4882a593Smuzhiyun 			devm_kfree(dev, t_alg);
3238*4882a593Smuzhiyun 			return ERR_PTR(-ENOTSUPP);
3239*4882a593Smuzhiyun 		}
3240*4882a593Smuzhiyun 		if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3241*4882a593Smuzhiyun 		    (!strcmp(alg->cra_name, "sha224") ||
3242*4882a593Smuzhiyun 		     !strcmp(alg->cra_name, "hmac(sha224)"))) {
3243*4882a593Smuzhiyun 			t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3244*4882a593Smuzhiyun 			t_alg->algt.desc_hdr_template =
3245*4882a593Smuzhiyun 					DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3246*4882a593Smuzhiyun 					DESC_HDR_SEL0_MDEUA |
3247*4882a593Smuzhiyun 					DESC_HDR_MODE0_MDEU_SHA256;
3248*4882a593Smuzhiyun 		}
3249*4882a593Smuzhiyun 		break;
3250*4882a593Smuzhiyun 	default:
3251*4882a593Smuzhiyun 		dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
3252*4882a593Smuzhiyun 		devm_kfree(dev, t_alg);
3253*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
3254*4882a593Smuzhiyun 	}
3255*4882a593Smuzhiyun 
3256*4882a593Smuzhiyun 	alg->cra_module = THIS_MODULE;
3257*4882a593Smuzhiyun 	if (t_alg->algt.priority)
3258*4882a593Smuzhiyun 		alg->cra_priority = t_alg->algt.priority;
3259*4882a593Smuzhiyun 	else
3260*4882a593Smuzhiyun 		alg->cra_priority = TALITOS_CRA_PRIORITY;
3261*4882a593Smuzhiyun 	if (has_ftr_sec1(priv))
3262*4882a593Smuzhiyun 		alg->cra_alignmask = 3;
3263*4882a593Smuzhiyun 	else
3264*4882a593Smuzhiyun 		alg->cra_alignmask = 0;
3265*4882a593Smuzhiyun 	alg->cra_ctxsize = sizeof(struct talitos_ctx);
3266*4882a593Smuzhiyun 	alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
3267*4882a593Smuzhiyun 
3268*4882a593Smuzhiyun 	t_alg->dev = dev;
3269*4882a593Smuzhiyun 
3270*4882a593Smuzhiyun 	return t_alg;
3271*4882a593Smuzhiyun }
3272*4882a593Smuzhiyun 
talitos_probe_irq(struct platform_device * ofdev)3273*4882a593Smuzhiyun static int talitos_probe_irq(struct platform_device *ofdev)
3274*4882a593Smuzhiyun {
3275*4882a593Smuzhiyun 	struct device *dev = &ofdev->dev;
3276*4882a593Smuzhiyun 	struct device_node *np = ofdev->dev.of_node;
3277*4882a593Smuzhiyun 	struct talitos_private *priv = dev_get_drvdata(dev);
3278*4882a593Smuzhiyun 	int err;
3279*4882a593Smuzhiyun 	bool is_sec1 = has_ftr_sec1(priv);
3280*4882a593Smuzhiyun 
3281*4882a593Smuzhiyun 	priv->irq[0] = irq_of_parse_and_map(np, 0);
3282*4882a593Smuzhiyun 	if (!priv->irq[0]) {
3283*4882a593Smuzhiyun 		dev_err(dev, "failed to map irq\n");
3284*4882a593Smuzhiyun 		return -EINVAL;
3285*4882a593Smuzhiyun 	}
3286*4882a593Smuzhiyun 	if (is_sec1) {
3287*4882a593Smuzhiyun 		err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3288*4882a593Smuzhiyun 				  dev_driver_string(dev), dev);
3289*4882a593Smuzhiyun 		goto primary_out;
3290*4882a593Smuzhiyun 	}
3291*4882a593Smuzhiyun 
3292*4882a593Smuzhiyun 	priv->irq[1] = irq_of_parse_and_map(np, 1);
3293*4882a593Smuzhiyun 
3294*4882a593Smuzhiyun 	/* get the primary irq line */
3295*4882a593Smuzhiyun 	if (!priv->irq[1]) {
3296*4882a593Smuzhiyun 		err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
3297*4882a593Smuzhiyun 				  dev_driver_string(dev), dev);
3298*4882a593Smuzhiyun 		goto primary_out;
3299*4882a593Smuzhiyun 	}
3300*4882a593Smuzhiyun 
3301*4882a593Smuzhiyun 	err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
3302*4882a593Smuzhiyun 			  dev_driver_string(dev), dev);
3303*4882a593Smuzhiyun 	if (err)
3304*4882a593Smuzhiyun 		goto primary_out;
3305*4882a593Smuzhiyun 
3306*4882a593Smuzhiyun 	/* get the secondary irq line */
3307*4882a593Smuzhiyun 	err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
3308*4882a593Smuzhiyun 			  dev_driver_string(dev), dev);
3309*4882a593Smuzhiyun 	if (err) {
3310*4882a593Smuzhiyun 		dev_err(dev, "failed to request secondary irq\n");
3311*4882a593Smuzhiyun 		irq_dispose_mapping(priv->irq[1]);
3312*4882a593Smuzhiyun 		priv->irq[1] = 0;
3313*4882a593Smuzhiyun 	}
3314*4882a593Smuzhiyun 
3315*4882a593Smuzhiyun 	return err;
3316*4882a593Smuzhiyun 
3317*4882a593Smuzhiyun primary_out:
3318*4882a593Smuzhiyun 	if (err) {
3319*4882a593Smuzhiyun 		dev_err(dev, "failed to request primary irq\n");
3320*4882a593Smuzhiyun 		irq_dispose_mapping(priv->irq[0]);
3321*4882a593Smuzhiyun 		priv->irq[0] = 0;
3322*4882a593Smuzhiyun 	}
3323*4882a593Smuzhiyun 
3324*4882a593Smuzhiyun 	return err;
3325*4882a593Smuzhiyun }
3326*4882a593Smuzhiyun 
talitos_probe(struct platform_device * ofdev)3327*4882a593Smuzhiyun static int talitos_probe(struct platform_device *ofdev)
3328*4882a593Smuzhiyun {
3329*4882a593Smuzhiyun 	struct device *dev = &ofdev->dev;
3330*4882a593Smuzhiyun 	struct device_node *np = ofdev->dev.of_node;
3331*4882a593Smuzhiyun 	struct talitos_private *priv;
3332*4882a593Smuzhiyun 	int i, err;
3333*4882a593Smuzhiyun 	int stride;
3334*4882a593Smuzhiyun 	struct resource *res;
3335*4882a593Smuzhiyun 
3336*4882a593Smuzhiyun 	priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
3337*4882a593Smuzhiyun 	if (!priv)
3338*4882a593Smuzhiyun 		return -ENOMEM;
3339*4882a593Smuzhiyun 
3340*4882a593Smuzhiyun 	INIT_LIST_HEAD(&priv->alg_list);
3341*4882a593Smuzhiyun 
3342*4882a593Smuzhiyun 	dev_set_drvdata(dev, priv);
3343*4882a593Smuzhiyun 
3344*4882a593Smuzhiyun 	priv->ofdev = ofdev;
3345*4882a593Smuzhiyun 
3346*4882a593Smuzhiyun 	spin_lock_init(&priv->reg_lock);
3347*4882a593Smuzhiyun 
3348*4882a593Smuzhiyun 	res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3349*4882a593Smuzhiyun 	if (!res)
3350*4882a593Smuzhiyun 		return -ENXIO;
3351*4882a593Smuzhiyun 	priv->reg = devm_ioremap(dev, res->start, resource_size(res));
3352*4882a593Smuzhiyun 	if (!priv->reg) {
3353*4882a593Smuzhiyun 		dev_err(dev, "failed to of_iomap\n");
3354*4882a593Smuzhiyun 		err = -ENOMEM;
3355*4882a593Smuzhiyun 		goto err_out;
3356*4882a593Smuzhiyun 	}
3357*4882a593Smuzhiyun 
3358*4882a593Smuzhiyun 	/* get SEC version capabilities from device tree */
3359*4882a593Smuzhiyun 	of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3360*4882a593Smuzhiyun 	of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3361*4882a593Smuzhiyun 	of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3362*4882a593Smuzhiyun 	of_property_read_u32(np, "fsl,descriptor-types-mask",
3363*4882a593Smuzhiyun 			     &priv->desc_types);
3364*4882a593Smuzhiyun 
3365*4882a593Smuzhiyun 	if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3366*4882a593Smuzhiyun 	    !priv->exec_units || !priv->desc_types) {
3367*4882a593Smuzhiyun 		dev_err(dev, "invalid property data in device tree node\n");
3368*4882a593Smuzhiyun 		err = -EINVAL;
3369*4882a593Smuzhiyun 		goto err_out;
3370*4882a593Smuzhiyun 	}
3371*4882a593Smuzhiyun 
3372*4882a593Smuzhiyun 	if (of_device_is_compatible(np, "fsl,sec3.0"))
3373*4882a593Smuzhiyun 		priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3374*4882a593Smuzhiyun 
3375*4882a593Smuzhiyun 	if (of_device_is_compatible(np, "fsl,sec2.1"))
3376*4882a593Smuzhiyun 		priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
3377*4882a593Smuzhiyun 				  TALITOS_FTR_SHA224_HWINIT |
3378*4882a593Smuzhiyun 				  TALITOS_FTR_HMAC_OK;
3379*4882a593Smuzhiyun 
3380*4882a593Smuzhiyun 	if (of_device_is_compatible(np, "fsl,sec1.0"))
3381*4882a593Smuzhiyun 		priv->features |= TALITOS_FTR_SEC1;
3382*4882a593Smuzhiyun 
3383*4882a593Smuzhiyun 	if (of_device_is_compatible(np, "fsl,sec1.2")) {
3384*4882a593Smuzhiyun 		priv->reg_deu = priv->reg + TALITOS12_DEU;
3385*4882a593Smuzhiyun 		priv->reg_aesu = priv->reg + TALITOS12_AESU;
3386*4882a593Smuzhiyun 		priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3387*4882a593Smuzhiyun 		stride = TALITOS1_CH_STRIDE;
3388*4882a593Smuzhiyun 	} else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3389*4882a593Smuzhiyun 		priv->reg_deu = priv->reg + TALITOS10_DEU;
3390*4882a593Smuzhiyun 		priv->reg_aesu = priv->reg + TALITOS10_AESU;
3391*4882a593Smuzhiyun 		priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3392*4882a593Smuzhiyun 		priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3393*4882a593Smuzhiyun 		priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3394*4882a593Smuzhiyun 		priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3395*4882a593Smuzhiyun 		stride = TALITOS1_CH_STRIDE;
3396*4882a593Smuzhiyun 	} else {
3397*4882a593Smuzhiyun 		priv->reg_deu = priv->reg + TALITOS2_DEU;
3398*4882a593Smuzhiyun 		priv->reg_aesu = priv->reg + TALITOS2_AESU;
3399*4882a593Smuzhiyun 		priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3400*4882a593Smuzhiyun 		priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3401*4882a593Smuzhiyun 		priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3402*4882a593Smuzhiyun 		priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3403*4882a593Smuzhiyun 		priv->reg_keu = priv->reg + TALITOS2_KEU;
3404*4882a593Smuzhiyun 		priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3405*4882a593Smuzhiyun 		stride = TALITOS2_CH_STRIDE;
3406*4882a593Smuzhiyun 	}
3407*4882a593Smuzhiyun 
3408*4882a593Smuzhiyun 	err = talitos_probe_irq(ofdev);
3409*4882a593Smuzhiyun 	if (err)
3410*4882a593Smuzhiyun 		goto err_out;
3411*4882a593Smuzhiyun 
3412*4882a593Smuzhiyun 	if (has_ftr_sec1(priv)) {
3413*4882a593Smuzhiyun 		if (priv->num_channels == 1)
3414*4882a593Smuzhiyun 			tasklet_init(&priv->done_task[0], talitos1_done_ch0,
3415*4882a593Smuzhiyun 				     (unsigned long)dev);
3416*4882a593Smuzhiyun 		else
3417*4882a593Smuzhiyun 			tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3418*4882a593Smuzhiyun 				     (unsigned long)dev);
3419*4882a593Smuzhiyun 	} else {
3420*4882a593Smuzhiyun 		if (priv->irq[1]) {
3421*4882a593Smuzhiyun 			tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3422*4882a593Smuzhiyun 				     (unsigned long)dev);
3423*4882a593Smuzhiyun 			tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3424*4882a593Smuzhiyun 				     (unsigned long)dev);
3425*4882a593Smuzhiyun 		} else if (priv->num_channels == 1) {
3426*4882a593Smuzhiyun 			tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3427*4882a593Smuzhiyun 				     (unsigned long)dev);
3428*4882a593Smuzhiyun 		} else {
3429*4882a593Smuzhiyun 			tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3430*4882a593Smuzhiyun 				     (unsigned long)dev);
3431*4882a593Smuzhiyun 		}
3432*4882a593Smuzhiyun 	}
3433*4882a593Smuzhiyun 
3434*4882a593Smuzhiyun 	priv->chan = devm_kcalloc(dev,
3435*4882a593Smuzhiyun 				  priv->num_channels,
3436*4882a593Smuzhiyun 				  sizeof(struct talitos_channel),
3437*4882a593Smuzhiyun 				  GFP_KERNEL);
3438*4882a593Smuzhiyun 	if (!priv->chan) {
3439*4882a593Smuzhiyun 		dev_err(dev, "failed to allocate channel management space\n");
3440*4882a593Smuzhiyun 		err = -ENOMEM;
3441*4882a593Smuzhiyun 		goto err_out;
3442*4882a593Smuzhiyun 	}
3443*4882a593Smuzhiyun 
3444*4882a593Smuzhiyun 	priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3445*4882a593Smuzhiyun 
3446*4882a593Smuzhiyun 	for (i = 0; i < priv->num_channels; i++) {
3447*4882a593Smuzhiyun 		priv->chan[i].reg = priv->reg + stride * (i + 1);
3448*4882a593Smuzhiyun 		if (!priv->irq[1] || !(i & 1))
3449*4882a593Smuzhiyun 			priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
3450*4882a593Smuzhiyun 
3451*4882a593Smuzhiyun 		spin_lock_init(&priv->chan[i].head_lock);
3452*4882a593Smuzhiyun 		spin_lock_init(&priv->chan[i].tail_lock);
3453*4882a593Smuzhiyun 
3454*4882a593Smuzhiyun 		priv->chan[i].fifo = devm_kcalloc(dev,
3455*4882a593Smuzhiyun 						priv->fifo_len,
3456*4882a593Smuzhiyun 						sizeof(struct talitos_request),
3457*4882a593Smuzhiyun 						GFP_KERNEL);
3458*4882a593Smuzhiyun 		if (!priv->chan[i].fifo) {
3459*4882a593Smuzhiyun 			dev_err(dev, "failed to allocate request fifo %d\n", i);
3460*4882a593Smuzhiyun 			err = -ENOMEM;
3461*4882a593Smuzhiyun 			goto err_out;
3462*4882a593Smuzhiyun 		}
3463*4882a593Smuzhiyun 
3464*4882a593Smuzhiyun 		atomic_set(&priv->chan[i].submit_count,
3465*4882a593Smuzhiyun 			   -(priv->chfifo_len - 1));
3466*4882a593Smuzhiyun 	}
3467*4882a593Smuzhiyun 
3468*4882a593Smuzhiyun 	dma_set_mask(dev, DMA_BIT_MASK(36));
3469*4882a593Smuzhiyun 
3470*4882a593Smuzhiyun 	/* reset and initialize the h/w */
3471*4882a593Smuzhiyun 	err = init_device(dev);
3472*4882a593Smuzhiyun 	if (err) {
3473*4882a593Smuzhiyun 		dev_err(dev, "failed to initialize device\n");
3474*4882a593Smuzhiyun 		goto err_out;
3475*4882a593Smuzhiyun 	}
3476*4882a593Smuzhiyun 
3477*4882a593Smuzhiyun 	/* register the RNG, if available */
3478*4882a593Smuzhiyun 	if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3479*4882a593Smuzhiyun 		err = talitos_register_rng(dev);
3480*4882a593Smuzhiyun 		if (err) {
3481*4882a593Smuzhiyun 			dev_err(dev, "failed to register hwrng: %d\n", err);
3482*4882a593Smuzhiyun 			goto err_out;
3483*4882a593Smuzhiyun 		} else
3484*4882a593Smuzhiyun 			dev_info(dev, "hwrng\n");
3485*4882a593Smuzhiyun 	}
3486*4882a593Smuzhiyun 
3487*4882a593Smuzhiyun 	/* register crypto algorithms the device supports */
3488*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3489*4882a593Smuzhiyun 		if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3490*4882a593Smuzhiyun 			struct talitos_crypto_alg *t_alg;
3491*4882a593Smuzhiyun 			struct crypto_alg *alg = NULL;
3492*4882a593Smuzhiyun 
3493*4882a593Smuzhiyun 			t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3494*4882a593Smuzhiyun 			if (IS_ERR(t_alg)) {
3495*4882a593Smuzhiyun 				err = PTR_ERR(t_alg);
3496*4882a593Smuzhiyun 				if (err == -ENOTSUPP)
3497*4882a593Smuzhiyun 					continue;
3498*4882a593Smuzhiyun 				goto err_out;
3499*4882a593Smuzhiyun 			}
3500*4882a593Smuzhiyun 
3501*4882a593Smuzhiyun 			switch (t_alg->algt.type) {
3502*4882a593Smuzhiyun 			case CRYPTO_ALG_TYPE_SKCIPHER:
3503*4882a593Smuzhiyun 				err = crypto_register_skcipher(
3504*4882a593Smuzhiyun 						&t_alg->algt.alg.skcipher);
3505*4882a593Smuzhiyun 				alg = &t_alg->algt.alg.skcipher.base;
3506*4882a593Smuzhiyun 				break;
3507*4882a593Smuzhiyun 
3508*4882a593Smuzhiyun 			case CRYPTO_ALG_TYPE_AEAD:
3509*4882a593Smuzhiyun 				err = crypto_register_aead(
3510*4882a593Smuzhiyun 					&t_alg->algt.alg.aead);
3511*4882a593Smuzhiyun 				alg = &t_alg->algt.alg.aead.base;
3512*4882a593Smuzhiyun 				break;
3513*4882a593Smuzhiyun 
3514*4882a593Smuzhiyun 			case CRYPTO_ALG_TYPE_AHASH:
3515*4882a593Smuzhiyun 				err = crypto_register_ahash(
3516*4882a593Smuzhiyun 						&t_alg->algt.alg.hash);
3517*4882a593Smuzhiyun 				alg = &t_alg->algt.alg.hash.halg.base;
3518*4882a593Smuzhiyun 				break;
3519*4882a593Smuzhiyun 			}
3520*4882a593Smuzhiyun 			if (err) {
3521*4882a593Smuzhiyun 				dev_err(dev, "%s alg registration failed\n",
3522*4882a593Smuzhiyun 					alg->cra_driver_name);
3523*4882a593Smuzhiyun 				devm_kfree(dev, t_alg);
3524*4882a593Smuzhiyun 			} else
3525*4882a593Smuzhiyun 				list_add_tail(&t_alg->entry, &priv->alg_list);
3526*4882a593Smuzhiyun 		}
3527*4882a593Smuzhiyun 	}
3528*4882a593Smuzhiyun 	if (!list_empty(&priv->alg_list))
3529*4882a593Smuzhiyun 		dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3530*4882a593Smuzhiyun 			 (char *)of_get_property(np, "compatible", NULL));
3531*4882a593Smuzhiyun 
3532*4882a593Smuzhiyun 	return 0;
3533*4882a593Smuzhiyun 
3534*4882a593Smuzhiyun err_out:
3535*4882a593Smuzhiyun 	talitos_remove(ofdev);
3536*4882a593Smuzhiyun 
3537*4882a593Smuzhiyun 	return err;
3538*4882a593Smuzhiyun }
3539*4882a593Smuzhiyun 
3540*4882a593Smuzhiyun static const struct of_device_id talitos_match[] = {
3541*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_TALITOS1
3542*4882a593Smuzhiyun 	{
3543*4882a593Smuzhiyun 		.compatible = "fsl,sec1.0",
3544*4882a593Smuzhiyun 	},
3545*4882a593Smuzhiyun #endif
3546*4882a593Smuzhiyun #ifdef CONFIG_CRYPTO_DEV_TALITOS2
3547*4882a593Smuzhiyun 	{
3548*4882a593Smuzhiyun 		.compatible = "fsl,sec2.0",
3549*4882a593Smuzhiyun 	},
3550*4882a593Smuzhiyun #endif
3551*4882a593Smuzhiyun 	{},
3552*4882a593Smuzhiyun };
3553*4882a593Smuzhiyun MODULE_DEVICE_TABLE(of, talitos_match);
3554*4882a593Smuzhiyun 
3555*4882a593Smuzhiyun static struct platform_driver talitos_driver = {
3556*4882a593Smuzhiyun 	.driver = {
3557*4882a593Smuzhiyun 		.name = "talitos",
3558*4882a593Smuzhiyun 		.of_match_table = talitos_match,
3559*4882a593Smuzhiyun 	},
3560*4882a593Smuzhiyun 	.probe = talitos_probe,
3561*4882a593Smuzhiyun 	.remove = talitos_remove,
3562*4882a593Smuzhiyun };
3563*4882a593Smuzhiyun 
3564*4882a593Smuzhiyun module_platform_driver(talitos_driver);
3565*4882a593Smuzhiyun 
3566*4882a593Smuzhiyun MODULE_LICENSE("GPL");
3567*4882a593Smuzhiyun MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3568*4882a593Smuzhiyun MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");
3569