xref: /OK3568_Linux_fs/kernel/drivers/crypto/ux500/cryp/cryp.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /**
3*4882a593Smuzhiyun  * Copyright (C) ST-Ericsson SA 2010
4*4882a593Smuzhiyun  * Author: Shujuan Chen <shujuan.chen@stericsson.com> for ST-Ericsson.
5*4882a593Smuzhiyun  * Author: Jonas Linde <jonas.linde@stericsson.com> for ST-Ericsson.
6*4882a593Smuzhiyun  * Author: Niklas Hernaeus <niklas.hernaeus@stericsson.com> for ST-Ericsson.
7*4882a593Smuzhiyun  * Author: Joakim Bech <joakim.xx.bech@stericsson.com> for ST-Ericsson.
8*4882a593Smuzhiyun  * Author: Berne Hebark <berne.herbark@stericsson.com> for ST-Ericsson.
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <linux/errno.h>
12*4882a593Smuzhiyun #include <linux/kernel.h>
13*4882a593Smuzhiyun #include <linux/types.h>
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun #include "cryp_p.h"
16*4882a593Smuzhiyun #include "cryp.h"
17*4882a593Smuzhiyun 
18*4882a593Smuzhiyun /**
19*4882a593Smuzhiyun  * cryp_wait_until_done - wait until the device logic is not busy
20*4882a593Smuzhiyun  */
cryp_wait_until_done(struct cryp_device_data * device_data)21*4882a593Smuzhiyun void cryp_wait_until_done(struct cryp_device_data *device_data)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun 	while (cryp_is_logic_busy(device_data))
24*4882a593Smuzhiyun 		cpu_relax();
25*4882a593Smuzhiyun }
26*4882a593Smuzhiyun 
27*4882a593Smuzhiyun /**
28*4882a593Smuzhiyun  * cryp_check - This routine checks Peripheral and PCell Id
29*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
30*4882a593Smuzhiyun  */
cryp_check(struct cryp_device_data * device_data)31*4882a593Smuzhiyun int cryp_check(struct cryp_device_data *device_data)
32*4882a593Smuzhiyun {
33*4882a593Smuzhiyun 	int peripheralid2 = 0;
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun 	if (NULL == device_data)
36*4882a593Smuzhiyun 		return -EINVAL;
37*4882a593Smuzhiyun 
38*4882a593Smuzhiyun 	peripheralid2 = readl_relaxed(&device_data->base->periphId2);
39*4882a593Smuzhiyun 
40*4882a593Smuzhiyun 	if (peripheralid2 != CRYP_PERIPHERAL_ID2_DB8500)
41*4882a593Smuzhiyun 		return -EPERM;
42*4882a593Smuzhiyun 
43*4882a593Smuzhiyun 	/* Check Peripheral and Pcell Id Register for CRYP */
44*4882a593Smuzhiyun 	if ((CRYP_PERIPHERAL_ID0 ==
45*4882a593Smuzhiyun 		readl_relaxed(&device_data->base->periphId0))
46*4882a593Smuzhiyun 	    && (CRYP_PERIPHERAL_ID1 ==
47*4882a593Smuzhiyun 		    readl_relaxed(&device_data->base->periphId1))
48*4882a593Smuzhiyun 	    && (CRYP_PERIPHERAL_ID3 ==
49*4882a593Smuzhiyun 		    readl_relaxed(&device_data->base->periphId3))
50*4882a593Smuzhiyun 	    && (CRYP_PCELL_ID0 ==
51*4882a593Smuzhiyun 		    readl_relaxed(&device_data->base->pcellId0))
52*4882a593Smuzhiyun 	    && (CRYP_PCELL_ID1 ==
53*4882a593Smuzhiyun 		    readl_relaxed(&device_data->base->pcellId1))
54*4882a593Smuzhiyun 	    && (CRYP_PCELL_ID2 ==
55*4882a593Smuzhiyun 		    readl_relaxed(&device_data->base->pcellId2))
56*4882a593Smuzhiyun 	    && (CRYP_PCELL_ID3 ==
57*4882a593Smuzhiyun 		    readl_relaxed(&device_data->base->pcellId3))) {
58*4882a593Smuzhiyun 		return 0;
59*4882a593Smuzhiyun 	}
60*4882a593Smuzhiyun 
61*4882a593Smuzhiyun 	return -EPERM;
62*4882a593Smuzhiyun }
63*4882a593Smuzhiyun 
64*4882a593Smuzhiyun /**
65*4882a593Smuzhiyun  * cryp_activity - This routine enables/disable the cryptography function.
66*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
67*4882a593Smuzhiyun  * @cryp_crypen: Enable/Disable functionality
68*4882a593Smuzhiyun  */
cryp_activity(struct cryp_device_data * device_data,enum cryp_crypen cryp_crypen)69*4882a593Smuzhiyun void cryp_activity(struct cryp_device_data *device_data,
70*4882a593Smuzhiyun 		   enum cryp_crypen cryp_crypen)
71*4882a593Smuzhiyun {
72*4882a593Smuzhiyun 	CRYP_PUT_BITS(&device_data->base->cr,
73*4882a593Smuzhiyun 		      cryp_crypen,
74*4882a593Smuzhiyun 		      CRYP_CR_CRYPEN_POS,
75*4882a593Smuzhiyun 		      CRYP_CR_CRYPEN_MASK);
76*4882a593Smuzhiyun }
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun /**
79*4882a593Smuzhiyun  * cryp_flush_inoutfifo - Resets both the input and the output FIFOs
80*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
81*4882a593Smuzhiyun  */
cryp_flush_inoutfifo(struct cryp_device_data * device_data)82*4882a593Smuzhiyun void cryp_flush_inoutfifo(struct cryp_device_data *device_data)
83*4882a593Smuzhiyun {
84*4882a593Smuzhiyun 	/*
85*4882a593Smuzhiyun 	 * We always need to disable the hardware before trying to flush the
86*4882a593Smuzhiyun 	 * FIFO. This is something that isn't written in the design
87*4882a593Smuzhiyun 	 * specification, but we have been informed by the hardware designers
88*4882a593Smuzhiyun 	 * that this must be done.
89*4882a593Smuzhiyun 	 */
90*4882a593Smuzhiyun 	cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
91*4882a593Smuzhiyun 	cryp_wait_until_done(device_data);
92*4882a593Smuzhiyun 
93*4882a593Smuzhiyun 	CRYP_SET_BITS(&device_data->base->cr, CRYP_CR_FFLUSH_MASK);
94*4882a593Smuzhiyun 	/*
95*4882a593Smuzhiyun 	 * CRYP_SR_INFIFO_READY_MASK is the expected value on the status
96*4882a593Smuzhiyun 	 * register when starting a new calculation, which means Input FIFO is
97*4882a593Smuzhiyun 	 * not full and input FIFO is empty.
98*4882a593Smuzhiyun 	 */
99*4882a593Smuzhiyun 	while (readl_relaxed(&device_data->base->sr) !=
100*4882a593Smuzhiyun 	       CRYP_SR_INFIFO_READY_MASK)
101*4882a593Smuzhiyun 		cpu_relax();
102*4882a593Smuzhiyun }
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun /**
105*4882a593Smuzhiyun  * cryp_set_configuration - This routine set the cr CRYP IP
106*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
107*4882a593Smuzhiyun  * @cryp_config: Pointer to the configuration parameter
108*4882a593Smuzhiyun  * @control_register: The control register to be written later on.
109*4882a593Smuzhiyun  */
cryp_set_configuration(struct cryp_device_data * device_data,struct cryp_config * cryp_config,u32 * control_register)110*4882a593Smuzhiyun int cryp_set_configuration(struct cryp_device_data *device_data,
111*4882a593Smuzhiyun 			   struct cryp_config *cryp_config,
112*4882a593Smuzhiyun 			   u32 *control_register)
113*4882a593Smuzhiyun {
114*4882a593Smuzhiyun 	u32 cr_for_kse;
115*4882a593Smuzhiyun 
116*4882a593Smuzhiyun 	if (NULL == device_data || NULL == cryp_config)
117*4882a593Smuzhiyun 		return -EINVAL;
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun 	*control_register |= (cryp_config->keysize << CRYP_CR_KEYSIZE_POS);
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 	/* Prepare key for decryption in AES_ECB and AES_CBC mode. */
122*4882a593Smuzhiyun 	if ((CRYP_ALGORITHM_DECRYPT == cryp_config->algodir) &&
123*4882a593Smuzhiyun 	    ((CRYP_ALGO_AES_ECB == cryp_config->algomode) ||
124*4882a593Smuzhiyun 	     (CRYP_ALGO_AES_CBC == cryp_config->algomode))) {
125*4882a593Smuzhiyun 		cr_for_kse = *control_register;
126*4882a593Smuzhiyun 		/*
127*4882a593Smuzhiyun 		 * This seems a bit odd, but it is indeed needed to set this to
128*4882a593Smuzhiyun 		 * encrypt even though it is a decryption that we are doing. It
129*4882a593Smuzhiyun 		 * also mentioned in the design spec that you need to do this.
130*4882a593Smuzhiyun 		 * After the keyprepartion for decrypting is done you should set
131*4882a593Smuzhiyun 		 * algodir back to decryption, which is done outside this if
132*4882a593Smuzhiyun 		 * statement.
133*4882a593Smuzhiyun 		 *
134*4882a593Smuzhiyun 		 * According to design specification we should set mode ECB
135*4882a593Smuzhiyun 		 * during key preparation even though we might be running CBC
136*4882a593Smuzhiyun 		 * when enter this function.
137*4882a593Smuzhiyun 		 *
138*4882a593Smuzhiyun 		 * Writing to KSE_ENABLED will drop CRYPEN when key preparation
139*4882a593Smuzhiyun 		 * is done. Therefore we need to set CRYPEN again outside this
140*4882a593Smuzhiyun 		 * if statement when running decryption.
141*4882a593Smuzhiyun 		 */
142*4882a593Smuzhiyun 		cr_for_kse |= ((CRYP_ALGORITHM_ENCRYPT << CRYP_CR_ALGODIR_POS) |
143*4882a593Smuzhiyun 			       (CRYP_ALGO_AES_ECB << CRYP_CR_ALGOMODE_POS) |
144*4882a593Smuzhiyun 			       (CRYP_CRYPEN_ENABLE << CRYP_CR_CRYPEN_POS) |
145*4882a593Smuzhiyun 			       (KSE_ENABLED << CRYP_CR_KSE_POS));
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun 		writel_relaxed(cr_for_kse, &device_data->base->cr);
148*4882a593Smuzhiyun 		cryp_wait_until_done(device_data);
149*4882a593Smuzhiyun 	}
150*4882a593Smuzhiyun 
151*4882a593Smuzhiyun 	*control_register |=
152*4882a593Smuzhiyun 		((cryp_config->algomode << CRYP_CR_ALGOMODE_POS) |
153*4882a593Smuzhiyun 		 (cryp_config->algodir << CRYP_CR_ALGODIR_POS));
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun 	return 0;
156*4882a593Smuzhiyun }
157*4882a593Smuzhiyun 
158*4882a593Smuzhiyun /**
159*4882a593Smuzhiyun  * cryp_configure_protection - set the protection bits in the CRYP logic.
160*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
161*4882a593Smuzhiyun  * @p_protect_config:	Pointer to the protection mode and
162*4882a593Smuzhiyun  *			secure mode configuration
163*4882a593Smuzhiyun  */
cryp_configure_protection(struct cryp_device_data * device_data,struct cryp_protection_config * p_protect_config)164*4882a593Smuzhiyun int cryp_configure_protection(struct cryp_device_data *device_data,
165*4882a593Smuzhiyun 			      struct cryp_protection_config *p_protect_config)
166*4882a593Smuzhiyun {
167*4882a593Smuzhiyun 	if (NULL == p_protect_config)
168*4882a593Smuzhiyun 		return -EINVAL;
169*4882a593Smuzhiyun 
170*4882a593Smuzhiyun 	CRYP_WRITE_BIT(&device_data->base->cr,
171*4882a593Smuzhiyun 		       (u32) p_protect_config->secure_access,
172*4882a593Smuzhiyun 		       CRYP_CR_SECURE_MASK);
173*4882a593Smuzhiyun 	CRYP_PUT_BITS(&device_data->base->cr,
174*4882a593Smuzhiyun 		      p_protect_config->privilege_access,
175*4882a593Smuzhiyun 		      CRYP_CR_PRLG_POS,
176*4882a593Smuzhiyun 		      CRYP_CR_PRLG_MASK);
177*4882a593Smuzhiyun 
178*4882a593Smuzhiyun 	return 0;
179*4882a593Smuzhiyun }
180*4882a593Smuzhiyun 
181*4882a593Smuzhiyun /**
182*4882a593Smuzhiyun  * cryp_is_logic_busy - returns the busy status of the CRYP logic
183*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
184*4882a593Smuzhiyun  */
cryp_is_logic_busy(struct cryp_device_data * device_data)185*4882a593Smuzhiyun int cryp_is_logic_busy(struct cryp_device_data *device_data)
186*4882a593Smuzhiyun {
187*4882a593Smuzhiyun 	return CRYP_TEST_BITS(&device_data->base->sr,
188*4882a593Smuzhiyun 			      CRYP_SR_BUSY_MASK);
189*4882a593Smuzhiyun }
190*4882a593Smuzhiyun 
191*4882a593Smuzhiyun /**
192*4882a593Smuzhiyun  * cryp_configure_for_dma - configures the CRYP IP for DMA operation
193*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
194*4882a593Smuzhiyun  * @dma_req: Specifies the DMA request type value.
195*4882a593Smuzhiyun  */
cryp_configure_for_dma(struct cryp_device_data * device_data,enum cryp_dma_req_type dma_req)196*4882a593Smuzhiyun void cryp_configure_for_dma(struct cryp_device_data *device_data,
197*4882a593Smuzhiyun 			    enum cryp_dma_req_type dma_req)
198*4882a593Smuzhiyun {
199*4882a593Smuzhiyun 	CRYP_SET_BITS(&device_data->base->dmacr,
200*4882a593Smuzhiyun 		      (u32) dma_req);
201*4882a593Smuzhiyun }
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun /**
204*4882a593Smuzhiyun  * cryp_configure_key_values - configures the key values for CRYP operations
205*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
206*4882a593Smuzhiyun  * @key_reg_index: Key value index register
207*4882a593Smuzhiyun  * @key_value: The key value struct
208*4882a593Smuzhiyun  */
cryp_configure_key_values(struct cryp_device_data * device_data,enum cryp_key_reg_index key_reg_index,struct cryp_key_value key_value)209*4882a593Smuzhiyun int cryp_configure_key_values(struct cryp_device_data *device_data,
210*4882a593Smuzhiyun 			      enum cryp_key_reg_index key_reg_index,
211*4882a593Smuzhiyun 			      struct cryp_key_value key_value)
212*4882a593Smuzhiyun {
213*4882a593Smuzhiyun 	while (cryp_is_logic_busy(device_data))
214*4882a593Smuzhiyun 		cpu_relax();
215*4882a593Smuzhiyun 
216*4882a593Smuzhiyun 	switch (key_reg_index) {
217*4882a593Smuzhiyun 	case CRYP_KEY_REG_1:
218*4882a593Smuzhiyun 		writel_relaxed(key_value.key_value_left,
219*4882a593Smuzhiyun 				&device_data->base->key_1_l);
220*4882a593Smuzhiyun 		writel_relaxed(key_value.key_value_right,
221*4882a593Smuzhiyun 				&device_data->base->key_1_r);
222*4882a593Smuzhiyun 		break;
223*4882a593Smuzhiyun 	case CRYP_KEY_REG_2:
224*4882a593Smuzhiyun 		writel_relaxed(key_value.key_value_left,
225*4882a593Smuzhiyun 				&device_data->base->key_2_l);
226*4882a593Smuzhiyun 		writel_relaxed(key_value.key_value_right,
227*4882a593Smuzhiyun 				&device_data->base->key_2_r);
228*4882a593Smuzhiyun 		break;
229*4882a593Smuzhiyun 	case CRYP_KEY_REG_3:
230*4882a593Smuzhiyun 		writel_relaxed(key_value.key_value_left,
231*4882a593Smuzhiyun 				&device_data->base->key_3_l);
232*4882a593Smuzhiyun 		writel_relaxed(key_value.key_value_right,
233*4882a593Smuzhiyun 				&device_data->base->key_3_r);
234*4882a593Smuzhiyun 		break;
235*4882a593Smuzhiyun 	case CRYP_KEY_REG_4:
236*4882a593Smuzhiyun 		writel_relaxed(key_value.key_value_left,
237*4882a593Smuzhiyun 				&device_data->base->key_4_l);
238*4882a593Smuzhiyun 		writel_relaxed(key_value.key_value_right,
239*4882a593Smuzhiyun 				&device_data->base->key_4_r);
240*4882a593Smuzhiyun 		break;
241*4882a593Smuzhiyun 	default:
242*4882a593Smuzhiyun 		return -EINVAL;
243*4882a593Smuzhiyun 	}
244*4882a593Smuzhiyun 
245*4882a593Smuzhiyun 	return 0;
246*4882a593Smuzhiyun }
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun /**
249*4882a593Smuzhiyun  * cryp_configure_init_vector - configures the initialization vector register
250*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
251*4882a593Smuzhiyun  * @init_vector_index: Specifies the index of the init vector.
252*4882a593Smuzhiyun  * @init_vector_value: Specifies the value for the init vector.
253*4882a593Smuzhiyun  */
cryp_configure_init_vector(struct cryp_device_data * device_data,enum cryp_init_vector_index init_vector_index,struct cryp_init_vector_value init_vector_value)254*4882a593Smuzhiyun int cryp_configure_init_vector(struct cryp_device_data *device_data,
255*4882a593Smuzhiyun 			       enum cryp_init_vector_index
256*4882a593Smuzhiyun 			       init_vector_index,
257*4882a593Smuzhiyun 			       struct cryp_init_vector_value
258*4882a593Smuzhiyun 			       init_vector_value)
259*4882a593Smuzhiyun {
260*4882a593Smuzhiyun 	while (cryp_is_logic_busy(device_data))
261*4882a593Smuzhiyun 		cpu_relax();
262*4882a593Smuzhiyun 
263*4882a593Smuzhiyun 	switch (init_vector_index) {
264*4882a593Smuzhiyun 	case CRYP_INIT_VECTOR_INDEX_0:
265*4882a593Smuzhiyun 		writel_relaxed(init_vector_value.init_value_left,
266*4882a593Smuzhiyun 		       &device_data->base->init_vect_0_l);
267*4882a593Smuzhiyun 		writel_relaxed(init_vector_value.init_value_right,
268*4882a593Smuzhiyun 		       &device_data->base->init_vect_0_r);
269*4882a593Smuzhiyun 		break;
270*4882a593Smuzhiyun 	case CRYP_INIT_VECTOR_INDEX_1:
271*4882a593Smuzhiyun 		writel_relaxed(init_vector_value.init_value_left,
272*4882a593Smuzhiyun 		       &device_data->base->init_vect_1_l);
273*4882a593Smuzhiyun 		writel_relaxed(init_vector_value.init_value_right,
274*4882a593Smuzhiyun 		       &device_data->base->init_vect_1_r);
275*4882a593Smuzhiyun 		break;
276*4882a593Smuzhiyun 	default:
277*4882a593Smuzhiyun 		return -EINVAL;
278*4882a593Smuzhiyun 	}
279*4882a593Smuzhiyun 
280*4882a593Smuzhiyun 	return 0;
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun 
283*4882a593Smuzhiyun /**
284*4882a593Smuzhiyun  * cryp_save_device_context -	Store hardware registers and
285*4882a593Smuzhiyun  *				other device context parameter
286*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
287*4882a593Smuzhiyun  * @ctx: Crypto device context
288*4882a593Smuzhiyun  */
cryp_save_device_context(struct cryp_device_data * device_data,struct cryp_device_context * ctx,int cryp_mode)289*4882a593Smuzhiyun void cryp_save_device_context(struct cryp_device_data *device_data,
290*4882a593Smuzhiyun 			      struct cryp_device_context *ctx,
291*4882a593Smuzhiyun 			      int cryp_mode)
292*4882a593Smuzhiyun {
293*4882a593Smuzhiyun 	enum cryp_algo_mode algomode;
294*4882a593Smuzhiyun 	struct cryp_register __iomem *src_reg = device_data->base;
295*4882a593Smuzhiyun 	struct cryp_config *config =
296*4882a593Smuzhiyun 		(struct cryp_config *)device_data->current_ctx;
297*4882a593Smuzhiyun 
298*4882a593Smuzhiyun 	/*
299*4882a593Smuzhiyun 	 * Always start by disable the hardware and wait for it to finish the
300*4882a593Smuzhiyun 	 * ongoing calculations before trying to reprogram it.
301*4882a593Smuzhiyun 	 */
302*4882a593Smuzhiyun 	cryp_activity(device_data, CRYP_CRYPEN_DISABLE);
303*4882a593Smuzhiyun 	cryp_wait_until_done(device_data);
304*4882a593Smuzhiyun 
305*4882a593Smuzhiyun 	if (cryp_mode == CRYP_MODE_DMA)
306*4882a593Smuzhiyun 		cryp_configure_for_dma(device_data, CRYP_DMA_DISABLE_BOTH);
307*4882a593Smuzhiyun 
308*4882a593Smuzhiyun 	if (CRYP_TEST_BITS(&src_reg->sr, CRYP_SR_IFEM_MASK) == 0)
309*4882a593Smuzhiyun 		ctx->din = readl_relaxed(&src_reg->din);
310*4882a593Smuzhiyun 
311*4882a593Smuzhiyun 	ctx->cr = readl_relaxed(&src_reg->cr) & CRYP_CR_CONTEXT_SAVE_MASK;
312*4882a593Smuzhiyun 
313*4882a593Smuzhiyun 	switch (config->keysize) {
314*4882a593Smuzhiyun 	case CRYP_KEY_SIZE_256:
315*4882a593Smuzhiyun 		ctx->key_4_l = readl_relaxed(&src_reg->key_4_l);
316*4882a593Smuzhiyun 		ctx->key_4_r = readl_relaxed(&src_reg->key_4_r);
317*4882a593Smuzhiyun 		fallthrough;
318*4882a593Smuzhiyun 
319*4882a593Smuzhiyun 	case CRYP_KEY_SIZE_192:
320*4882a593Smuzhiyun 		ctx->key_3_l = readl_relaxed(&src_reg->key_3_l);
321*4882a593Smuzhiyun 		ctx->key_3_r = readl_relaxed(&src_reg->key_3_r);
322*4882a593Smuzhiyun 		fallthrough;
323*4882a593Smuzhiyun 
324*4882a593Smuzhiyun 	case CRYP_KEY_SIZE_128:
325*4882a593Smuzhiyun 		ctx->key_2_l = readl_relaxed(&src_reg->key_2_l);
326*4882a593Smuzhiyun 		ctx->key_2_r = readl_relaxed(&src_reg->key_2_r);
327*4882a593Smuzhiyun 		fallthrough;
328*4882a593Smuzhiyun 
329*4882a593Smuzhiyun 	default:
330*4882a593Smuzhiyun 		ctx->key_1_l = readl_relaxed(&src_reg->key_1_l);
331*4882a593Smuzhiyun 		ctx->key_1_r = readl_relaxed(&src_reg->key_1_r);
332*4882a593Smuzhiyun 	}
333*4882a593Smuzhiyun 
334*4882a593Smuzhiyun 	/* Save IV for CBC mode for both AES and DES. */
335*4882a593Smuzhiyun 	algomode = ((ctx->cr & CRYP_CR_ALGOMODE_MASK) >> CRYP_CR_ALGOMODE_POS);
336*4882a593Smuzhiyun 	if (algomode == CRYP_ALGO_TDES_CBC ||
337*4882a593Smuzhiyun 	    algomode == CRYP_ALGO_DES_CBC ||
338*4882a593Smuzhiyun 	    algomode == CRYP_ALGO_AES_CBC) {
339*4882a593Smuzhiyun 		ctx->init_vect_0_l = readl_relaxed(&src_reg->init_vect_0_l);
340*4882a593Smuzhiyun 		ctx->init_vect_0_r = readl_relaxed(&src_reg->init_vect_0_r);
341*4882a593Smuzhiyun 		ctx->init_vect_1_l = readl_relaxed(&src_reg->init_vect_1_l);
342*4882a593Smuzhiyun 		ctx->init_vect_1_r = readl_relaxed(&src_reg->init_vect_1_r);
343*4882a593Smuzhiyun 	}
344*4882a593Smuzhiyun }
345*4882a593Smuzhiyun 
346*4882a593Smuzhiyun /**
347*4882a593Smuzhiyun  * cryp_restore_device_context -	Restore hardware registers and
348*4882a593Smuzhiyun  *					other device context parameter
349*4882a593Smuzhiyun  * @device_data: Pointer to the device data struct for base address.
350*4882a593Smuzhiyun  * @ctx: Crypto device context
351*4882a593Smuzhiyun  */
cryp_restore_device_context(struct cryp_device_data * device_data,struct cryp_device_context * ctx)352*4882a593Smuzhiyun void cryp_restore_device_context(struct cryp_device_data *device_data,
353*4882a593Smuzhiyun 				 struct cryp_device_context *ctx)
354*4882a593Smuzhiyun {
355*4882a593Smuzhiyun 	struct cryp_register __iomem *reg = device_data->base;
356*4882a593Smuzhiyun 	struct cryp_config *config =
357*4882a593Smuzhiyun 		(struct cryp_config *)device_data->current_ctx;
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun 	/*
360*4882a593Smuzhiyun 	 * Fall through for all items in switch statement. DES is captured in
361*4882a593Smuzhiyun 	 * the default.
362*4882a593Smuzhiyun 	 */
363*4882a593Smuzhiyun 	switch (config->keysize) {
364*4882a593Smuzhiyun 	case CRYP_KEY_SIZE_256:
365*4882a593Smuzhiyun 		writel_relaxed(ctx->key_4_l, &reg->key_4_l);
366*4882a593Smuzhiyun 		writel_relaxed(ctx->key_4_r, &reg->key_4_r);
367*4882a593Smuzhiyun 		fallthrough;
368*4882a593Smuzhiyun 
369*4882a593Smuzhiyun 	case CRYP_KEY_SIZE_192:
370*4882a593Smuzhiyun 		writel_relaxed(ctx->key_3_l, &reg->key_3_l);
371*4882a593Smuzhiyun 		writel_relaxed(ctx->key_3_r, &reg->key_3_r);
372*4882a593Smuzhiyun 		fallthrough;
373*4882a593Smuzhiyun 
374*4882a593Smuzhiyun 	case CRYP_KEY_SIZE_128:
375*4882a593Smuzhiyun 		writel_relaxed(ctx->key_2_l, &reg->key_2_l);
376*4882a593Smuzhiyun 		writel_relaxed(ctx->key_2_r, &reg->key_2_r);
377*4882a593Smuzhiyun 		fallthrough;
378*4882a593Smuzhiyun 
379*4882a593Smuzhiyun 	default:
380*4882a593Smuzhiyun 		writel_relaxed(ctx->key_1_l, &reg->key_1_l);
381*4882a593Smuzhiyun 		writel_relaxed(ctx->key_1_r, &reg->key_1_r);
382*4882a593Smuzhiyun 	}
383*4882a593Smuzhiyun 
384*4882a593Smuzhiyun 	/* Restore IV for CBC mode for AES and DES. */
385*4882a593Smuzhiyun 	if (config->algomode == CRYP_ALGO_TDES_CBC ||
386*4882a593Smuzhiyun 	    config->algomode == CRYP_ALGO_DES_CBC ||
387*4882a593Smuzhiyun 	    config->algomode == CRYP_ALGO_AES_CBC) {
388*4882a593Smuzhiyun 		writel_relaxed(ctx->init_vect_0_l, &reg->init_vect_0_l);
389*4882a593Smuzhiyun 		writel_relaxed(ctx->init_vect_0_r, &reg->init_vect_0_r);
390*4882a593Smuzhiyun 		writel_relaxed(ctx->init_vect_1_l, &reg->init_vect_1_l);
391*4882a593Smuzhiyun 		writel_relaxed(ctx->init_vect_1_r, &reg->init_vect_1_r);
392*4882a593Smuzhiyun 	}
393*4882a593Smuzhiyun }
394