1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * Copyright (C) 2017 Marvell
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Antoine Tenart <antoine.tenart@free-electrons.com>
6*4882a593Smuzhiyun */
7*4882a593Smuzhiyun
8*4882a593Smuzhiyun #include <asm/unaligned.h>
9*4882a593Smuzhiyun #include <linux/device.h>
10*4882a593Smuzhiyun #include <linux/dma-mapping.h>
11*4882a593Smuzhiyun #include <linux/dmapool.h>
12*4882a593Smuzhiyun #include <crypto/aead.h>
13*4882a593Smuzhiyun #include <crypto/aes.h>
14*4882a593Smuzhiyun #include <crypto/authenc.h>
15*4882a593Smuzhiyun #include <crypto/chacha.h>
16*4882a593Smuzhiyun #include <crypto/ctr.h>
17*4882a593Smuzhiyun #include <crypto/internal/des.h>
18*4882a593Smuzhiyun #include <crypto/gcm.h>
19*4882a593Smuzhiyun #include <crypto/ghash.h>
20*4882a593Smuzhiyun #include <crypto/poly1305.h>
21*4882a593Smuzhiyun #include <crypto/sha.h>
22*4882a593Smuzhiyun #include <crypto/sm3.h>
23*4882a593Smuzhiyun #include <crypto/sm4.h>
24*4882a593Smuzhiyun #include <crypto/xts.h>
25*4882a593Smuzhiyun #include <crypto/skcipher.h>
26*4882a593Smuzhiyun #include <crypto/internal/aead.h>
27*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun #include "safexcel.h"
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun enum safexcel_cipher_direction {
32*4882a593Smuzhiyun SAFEXCEL_ENCRYPT,
33*4882a593Smuzhiyun SAFEXCEL_DECRYPT,
34*4882a593Smuzhiyun };
35*4882a593Smuzhiyun
36*4882a593Smuzhiyun enum safexcel_cipher_alg {
37*4882a593Smuzhiyun SAFEXCEL_DES,
38*4882a593Smuzhiyun SAFEXCEL_3DES,
39*4882a593Smuzhiyun SAFEXCEL_AES,
40*4882a593Smuzhiyun SAFEXCEL_CHACHA20,
41*4882a593Smuzhiyun SAFEXCEL_SM4,
42*4882a593Smuzhiyun };
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun struct safexcel_cipher_ctx {
45*4882a593Smuzhiyun struct safexcel_context base;
46*4882a593Smuzhiyun struct safexcel_crypto_priv *priv;
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun u32 mode;
49*4882a593Smuzhiyun enum safexcel_cipher_alg alg;
50*4882a593Smuzhiyun u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51*4882a593Smuzhiyun u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
52*4882a593Smuzhiyun u8 aadskip;
53*4882a593Smuzhiyun u8 blocksz;
54*4882a593Smuzhiyun u32 ivmask;
55*4882a593Smuzhiyun u32 ctrinit;
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun __le32 key[16];
58*4882a593Smuzhiyun u32 nonce;
59*4882a593Smuzhiyun unsigned int key_len, xts;
60*4882a593Smuzhiyun
61*4882a593Smuzhiyun /* All the below is AEAD specific */
62*4882a593Smuzhiyun u32 hash_alg;
63*4882a593Smuzhiyun u32 state_sz;
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun struct crypto_cipher *hkaes;
66*4882a593Smuzhiyun struct crypto_aead *fback;
67*4882a593Smuzhiyun };
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun struct safexcel_cipher_req {
70*4882a593Smuzhiyun enum safexcel_cipher_direction direction;
71*4882a593Smuzhiyun /* Number of result descriptors associated to the request */
72*4882a593Smuzhiyun unsigned int rdescs;
73*4882a593Smuzhiyun bool needs_inv;
74*4882a593Smuzhiyun int nr_src, nr_dst;
75*4882a593Smuzhiyun };
76*4882a593Smuzhiyun
safexcel_skcipher_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)77*4882a593Smuzhiyun static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
78*4882a593Smuzhiyun struct safexcel_command_desc *cdesc)
79*4882a593Smuzhiyun {
80*4882a593Smuzhiyun if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
81*4882a593Smuzhiyun cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82*4882a593Smuzhiyun /* 32 bit nonce */
83*4882a593Smuzhiyun cdesc->control_data.token[0] = ctx->nonce;
84*4882a593Smuzhiyun /* 64 bit IV part */
85*4882a593Smuzhiyun memcpy(&cdesc->control_data.token[1], iv, 8);
86*4882a593Smuzhiyun /* 32 bit counter, start at 0 or 1 (big endian!) */
87*4882a593Smuzhiyun cdesc->control_data.token[3] =
88*4882a593Smuzhiyun (__force u32)cpu_to_be32(ctx->ctrinit);
89*4882a593Smuzhiyun return 4;
90*4882a593Smuzhiyun }
91*4882a593Smuzhiyun if (ctx->alg == SAFEXCEL_CHACHA20) {
92*4882a593Smuzhiyun cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
93*4882a593Smuzhiyun /* 96 bit nonce part */
94*4882a593Smuzhiyun memcpy(&cdesc->control_data.token[0], &iv[4], 12);
95*4882a593Smuzhiyun /* 32 bit counter */
96*4882a593Smuzhiyun cdesc->control_data.token[3] = *(u32 *)iv;
97*4882a593Smuzhiyun return 4;
98*4882a593Smuzhiyun }
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun cdesc->control_data.options |= ctx->ivmask;
101*4882a593Smuzhiyun memcpy(cdesc->control_data.token, iv, ctx->blocksz);
102*4882a593Smuzhiyun return ctx->blocksz / sizeof(u32);
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun
safexcel_skcipher_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,u32 length)105*4882a593Smuzhiyun static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
106*4882a593Smuzhiyun struct safexcel_command_desc *cdesc,
107*4882a593Smuzhiyun struct safexcel_token *atoken,
108*4882a593Smuzhiyun u32 length)
109*4882a593Smuzhiyun {
110*4882a593Smuzhiyun struct safexcel_token *token;
111*4882a593Smuzhiyun int ivlen;
112*4882a593Smuzhiyun
113*4882a593Smuzhiyun ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
114*4882a593Smuzhiyun if (ivlen == 4) {
115*4882a593Smuzhiyun /* No space in cdesc, instruction moves to atoken */
116*4882a593Smuzhiyun cdesc->additional_cdata_size = 1;
117*4882a593Smuzhiyun token = atoken;
118*4882a593Smuzhiyun } else {
119*4882a593Smuzhiyun /* Everything fits in cdesc */
120*4882a593Smuzhiyun token = (struct safexcel_token *)(cdesc->control_data.token + 2);
121*4882a593Smuzhiyun /* Need to pad with NOP */
122*4882a593Smuzhiyun eip197_noop_token(&token[1]);
123*4882a593Smuzhiyun }
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
126*4882a593Smuzhiyun token->packet_length = length;
127*4882a593Smuzhiyun token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
128*4882a593Smuzhiyun EIP197_TOKEN_STAT_LAST_HASH;
129*4882a593Smuzhiyun token->instructions = EIP197_TOKEN_INS_LAST |
130*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_CRYPTO |
131*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_OUTPUT;
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun
safexcel_aead_iv(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc)134*4882a593Smuzhiyun static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
135*4882a593Smuzhiyun struct safexcel_command_desc *cdesc)
136*4882a593Smuzhiyun {
137*4882a593Smuzhiyun if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
138*4882a593Smuzhiyun ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
139*4882a593Smuzhiyun /* 32 bit nonce */
140*4882a593Smuzhiyun cdesc->control_data.token[0] = ctx->nonce;
141*4882a593Smuzhiyun /* 64 bit IV part */
142*4882a593Smuzhiyun memcpy(&cdesc->control_data.token[1], iv, 8);
143*4882a593Smuzhiyun /* 32 bit counter, start at 0 or 1 (big endian!) */
144*4882a593Smuzhiyun cdesc->control_data.token[3] =
145*4882a593Smuzhiyun (__force u32)cpu_to_be32(ctx->ctrinit);
146*4882a593Smuzhiyun return;
147*4882a593Smuzhiyun }
148*4882a593Smuzhiyun if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
149*4882a593Smuzhiyun /* 96 bit IV part */
150*4882a593Smuzhiyun memcpy(&cdesc->control_data.token[0], iv, 12);
151*4882a593Smuzhiyun /* 32 bit counter, start at 0 or 1 (big endian!) */
152*4882a593Smuzhiyun cdesc->control_data.token[3] =
153*4882a593Smuzhiyun (__force u32)cpu_to_be32(ctx->ctrinit);
154*4882a593Smuzhiyun return;
155*4882a593Smuzhiyun }
156*4882a593Smuzhiyun /* CBC */
157*4882a593Smuzhiyun memcpy(cdesc->control_data.token, iv, ctx->blocksz);
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun
safexcel_aead_token(struct safexcel_cipher_ctx * ctx,u8 * iv,struct safexcel_command_desc * cdesc,struct safexcel_token * atoken,enum safexcel_cipher_direction direction,u32 cryptlen,u32 assoclen,u32 digestsize)160*4882a593Smuzhiyun static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
161*4882a593Smuzhiyun struct safexcel_command_desc *cdesc,
162*4882a593Smuzhiyun struct safexcel_token *atoken,
163*4882a593Smuzhiyun enum safexcel_cipher_direction direction,
164*4882a593Smuzhiyun u32 cryptlen, u32 assoclen, u32 digestsize)
165*4882a593Smuzhiyun {
166*4882a593Smuzhiyun struct safexcel_token *aadref;
167*4882a593Smuzhiyun int atoksize = 2; /* Start with minimum size */
168*4882a593Smuzhiyun int assocadj = assoclen - ctx->aadskip, aadalign;
169*4882a593Smuzhiyun
170*4882a593Smuzhiyun /* Always 4 dwords of embedded IV for AEAD modes */
171*4882a593Smuzhiyun cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun if (direction == SAFEXCEL_DECRYPT)
174*4882a593Smuzhiyun cryptlen -= digestsize;
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
177*4882a593Smuzhiyun /* Construct IV block B0 for the CBC-MAC */
178*4882a593Smuzhiyun u8 *final_iv = (u8 *)cdesc->control_data.token;
179*4882a593Smuzhiyun u8 *cbcmaciv = (u8 *)&atoken[1];
180*4882a593Smuzhiyun __le32 *aadlen = (__le32 *)&atoken[5];
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
183*4882a593Smuzhiyun /* Length + nonce */
184*4882a593Smuzhiyun cdesc->control_data.token[0] = ctx->nonce;
185*4882a593Smuzhiyun /* Fixup flags byte */
186*4882a593Smuzhiyun *(__le32 *)cbcmaciv =
187*4882a593Smuzhiyun cpu_to_le32(ctx->nonce |
188*4882a593Smuzhiyun ((assocadj > 0) << 6) |
189*4882a593Smuzhiyun ((digestsize - 2) << 2));
190*4882a593Smuzhiyun /* 64 bit IV part */
191*4882a593Smuzhiyun memcpy(&cdesc->control_data.token[1], iv, 8);
192*4882a593Smuzhiyun memcpy(cbcmaciv + 4, iv, 8);
193*4882a593Smuzhiyun /* Start counter at 0 */
194*4882a593Smuzhiyun cdesc->control_data.token[3] = 0;
195*4882a593Smuzhiyun /* Message length */
196*4882a593Smuzhiyun *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
197*4882a593Smuzhiyun } else {
198*4882a593Smuzhiyun /* Variable length IV part */
199*4882a593Smuzhiyun memcpy(final_iv, iv, 15 - iv[0]);
200*4882a593Smuzhiyun memcpy(cbcmaciv, iv, 15 - iv[0]);
201*4882a593Smuzhiyun /* Start variable length counter at 0 */
202*4882a593Smuzhiyun memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
203*4882a593Smuzhiyun memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
204*4882a593Smuzhiyun /* fixup flags byte */
205*4882a593Smuzhiyun cbcmaciv[0] |= ((assocadj > 0) << 6) |
206*4882a593Smuzhiyun ((digestsize - 2) << 2);
207*4882a593Smuzhiyun /* insert lower 2 bytes of message length */
208*4882a593Smuzhiyun cbcmaciv[14] = cryptlen >> 8;
209*4882a593Smuzhiyun cbcmaciv[15] = cryptlen & 255;
210*4882a593Smuzhiyun }
211*4882a593Smuzhiyun
212*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
213*4882a593Smuzhiyun atoken->packet_length = AES_BLOCK_SIZE +
214*4882a593Smuzhiyun ((assocadj > 0) << 1);
215*4882a593Smuzhiyun atoken->stat = 0;
216*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
217*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_HASH;
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun if (likely(assocadj)) {
220*4882a593Smuzhiyun *aadlen = cpu_to_le32((assocadj >> 8) |
221*4882a593Smuzhiyun (assocadj & 255) << 8);
222*4882a593Smuzhiyun atoken += 6;
223*4882a593Smuzhiyun atoksize += 7;
224*4882a593Smuzhiyun } else {
225*4882a593Smuzhiyun atoken += 5;
226*4882a593Smuzhiyun atoksize += 6;
227*4882a593Smuzhiyun }
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun /* Process AAD data */
230*4882a593Smuzhiyun aadref = atoken;
231*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
232*4882a593Smuzhiyun atoken->packet_length = assocadj;
233*4882a593Smuzhiyun atoken->stat = 0;
234*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
235*4882a593Smuzhiyun atoken++;
236*4882a593Smuzhiyun
237*4882a593Smuzhiyun /* For CCM only, align AAD data towards hash engine */
238*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
239*4882a593Smuzhiyun aadalign = (assocadj + 2) & 15;
240*4882a593Smuzhiyun atoken->packet_length = assocadj && aadalign ?
241*4882a593Smuzhiyun 16 - aadalign :
242*4882a593Smuzhiyun 0;
243*4882a593Smuzhiyun if (likely(cryptlen)) {
244*4882a593Smuzhiyun atoken->stat = 0;
245*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
246*4882a593Smuzhiyun } else {
247*4882a593Smuzhiyun atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
248*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_LAST |
249*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_HASH;
250*4882a593Smuzhiyun }
251*4882a593Smuzhiyun } else {
252*4882a593Smuzhiyun safexcel_aead_iv(ctx, iv, cdesc);
253*4882a593Smuzhiyun
254*4882a593Smuzhiyun /* Process AAD data */
255*4882a593Smuzhiyun aadref = atoken;
256*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
257*4882a593Smuzhiyun atoken->packet_length = assocadj;
258*4882a593Smuzhiyun atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
259*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_LAST |
260*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_HASH;
261*4882a593Smuzhiyun }
262*4882a593Smuzhiyun atoken++;
263*4882a593Smuzhiyun
264*4882a593Smuzhiyun if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
265*4882a593Smuzhiyun /* For ESP mode (and not GMAC), skip over the IV */
266*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
267*4882a593Smuzhiyun atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
268*4882a593Smuzhiyun atoken->stat = 0;
269*4882a593Smuzhiyun atoken->instructions = 0;
270*4882a593Smuzhiyun atoken++;
271*4882a593Smuzhiyun atoksize++;
272*4882a593Smuzhiyun } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
273*4882a593Smuzhiyun direction == SAFEXCEL_DECRYPT)) {
274*4882a593Smuzhiyun /* Poly-chacha decryption needs a dummy NOP here ... */
275*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
276*4882a593Smuzhiyun atoken->packet_length = 16; /* According to Op Manual */
277*4882a593Smuzhiyun atoken->stat = 0;
278*4882a593Smuzhiyun atoken->instructions = 0;
279*4882a593Smuzhiyun atoken++;
280*4882a593Smuzhiyun atoksize++;
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun
283*4882a593Smuzhiyun if (ctx->xcm) {
284*4882a593Smuzhiyun /* For GCM and CCM, obtain enc(Y0) */
285*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
286*4882a593Smuzhiyun atoken->packet_length = 0;
287*4882a593Smuzhiyun atoken->stat = 0;
288*4882a593Smuzhiyun atoken->instructions = AES_BLOCK_SIZE;
289*4882a593Smuzhiyun atoken++;
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
292*4882a593Smuzhiyun atoken->packet_length = AES_BLOCK_SIZE;
293*4882a593Smuzhiyun atoken->stat = 0;
294*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
295*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_CRYPTO;
296*4882a593Smuzhiyun atoken++;
297*4882a593Smuzhiyun atoksize += 2;
298*4882a593Smuzhiyun }
299*4882a593Smuzhiyun
300*4882a593Smuzhiyun if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
301*4882a593Smuzhiyun /* Fixup stat field for AAD direction instruction */
302*4882a593Smuzhiyun aadref->stat = 0;
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun /* Process crypto data */
305*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
306*4882a593Smuzhiyun atoken->packet_length = cryptlen;
307*4882a593Smuzhiyun
308*4882a593Smuzhiyun if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
309*4882a593Smuzhiyun /* Fixup instruction field for AAD dir instruction */
310*4882a593Smuzhiyun aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
311*4882a593Smuzhiyun
312*4882a593Smuzhiyun /* Do not send to crypt engine in case of GMAC */
313*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_LAST |
314*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_HASH |
315*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_OUTPUT;
316*4882a593Smuzhiyun } else {
317*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_LAST |
318*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_CRYPTO |
319*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_HASH |
320*4882a593Smuzhiyun EIP197_TOKEN_INS_TYPE_OUTPUT;
321*4882a593Smuzhiyun }
322*4882a593Smuzhiyun
323*4882a593Smuzhiyun cryptlen &= 15;
324*4882a593Smuzhiyun if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
325*4882a593Smuzhiyun atoken->stat = 0;
326*4882a593Smuzhiyun /* For CCM only, pad crypto data to the hash engine */
327*4882a593Smuzhiyun atoken++;
328*4882a593Smuzhiyun atoksize++;
329*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
330*4882a593Smuzhiyun atoken->packet_length = 16 - cryptlen;
331*4882a593Smuzhiyun atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
332*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
333*4882a593Smuzhiyun } else {
334*4882a593Smuzhiyun atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
335*4882a593Smuzhiyun }
336*4882a593Smuzhiyun atoken++;
337*4882a593Smuzhiyun atoksize++;
338*4882a593Smuzhiyun }
339*4882a593Smuzhiyun
340*4882a593Smuzhiyun if (direction == SAFEXCEL_ENCRYPT) {
341*4882a593Smuzhiyun /* Append ICV */
342*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
343*4882a593Smuzhiyun atoken->packet_length = digestsize;
344*4882a593Smuzhiyun atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
345*4882a593Smuzhiyun EIP197_TOKEN_STAT_LAST_PACKET;
346*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
347*4882a593Smuzhiyun EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
348*4882a593Smuzhiyun } else {
349*4882a593Smuzhiyun /* Extract ICV */
350*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
351*4882a593Smuzhiyun atoken->packet_length = digestsize;
352*4882a593Smuzhiyun atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
353*4882a593Smuzhiyun EIP197_TOKEN_STAT_LAST_PACKET;
354*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
355*4882a593Smuzhiyun atoken++;
356*4882a593Smuzhiyun atoksize++;
357*4882a593Smuzhiyun
358*4882a593Smuzhiyun /* Verify ICV */
359*4882a593Smuzhiyun atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
360*4882a593Smuzhiyun atoken->packet_length = digestsize |
361*4882a593Smuzhiyun EIP197_TOKEN_HASH_RESULT_VERIFY;
362*4882a593Smuzhiyun atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
363*4882a593Smuzhiyun EIP197_TOKEN_STAT_LAST_PACKET;
364*4882a593Smuzhiyun atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
365*4882a593Smuzhiyun }
366*4882a593Smuzhiyun
367*4882a593Smuzhiyun /* Fixup length of the token in the command descriptor */
368*4882a593Smuzhiyun cdesc->additional_cdata_size = atoksize;
369*4882a593Smuzhiyun }
370*4882a593Smuzhiyun
safexcel_skcipher_aes_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)371*4882a593Smuzhiyun static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
372*4882a593Smuzhiyun const u8 *key, unsigned int len)
373*4882a593Smuzhiyun {
374*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
375*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
376*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
377*4882a593Smuzhiyun struct crypto_aes_ctx aes;
378*4882a593Smuzhiyun int ret, i;
379*4882a593Smuzhiyun
380*4882a593Smuzhiyun ret = aes_expandkey(&aes, key, len);
381*4882a593Smuzhiyun if (ret)
382*4882a593Smuzhiyun return ret;
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
385*4882a593Smuzhiyun for (i = 0; i < len / sizeof(u32); i++) {
386*4882a593Smuzhiyun if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
387*4882a593Smuzhiyun ctx->base.needs_inv = true;
388*4882a593Smuzhiyun break;
389*4882a593Smuzhiyun }
390*4882a593Smuzhiyun }
391*4882a593Smuzhiyun }
392*4882a593Smuzhiyun
393*4882a593Smuzhiyun for (i = 0; i < len / sizeof(u32); i++)
394*4882a593Smuzhiyun ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun ctx->key_len = len;
397*4882a593Smuzhiyun
398*4882a593Smuzhiyun memzero_explicit(&aes, sizeof(aes));
399*4882a593Smuzhiyun return 0;
400*4882a593Smuzhiyun }
401*4882a593Smuzhiyun
safexcel_aead_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)402*4882a593Smuzhiyun static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
403*4882a593Smuzhiyun unsigned int len)
404*4882a593Smuzhiyun {
405*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
406*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
407*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
408*4882a593Smuzhiyun struct crypto_authenc_keys keys;
409*4882a593Smuzhiyun struct crypto_aes_ctx aes;
410*4882a593Smuzhiyun int err = -EINVAL, i;
411*4882a593Smuzhiyun const char *alg;
412*4882a593Smuzhiyun
413*4882a593Smuzhiyun if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
414*4882a593Smuzhiyun goto badkey;
415*4882a593Smuzhiyun
416*4882a593Smuzhiyun if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
417*4882a593Smuzhiyun /* Must have at least space for the nonce here */
418*4882a593Smuzhiyun if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
419*4882a593Smuzhiyun goto badkey;
420*4882a593Smuzhiyun /* last 4 bytes of key are the nonce! */
421*4882a593Smuzhiyun ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
422*4882a593Smuzhiyun CTR_RFC3686_NONCE_SIZE);
423*4882a593Smuzhiyun /* exclude the nonce here */
424*4882a593Smuzhiyun keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
425*4882a593Smuzhiyun }
426*4882a593Smuzhiyun
427*4882a593Smuzhiyun /* Encryption key */
428*4882a593Smuzhiyun switch (ctx->alg) {
429*4882a593Smuzhiyun case SAFEXCEL_DES:
430*4882a593Smuzhiyun err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
431*4882a593Smuzhiyun if (unlikely(err))
432*4882a593Smuzhiyun goto badkey;
433*4882a593Smuzhiyun break;
434*4882a593Smuzhiyun case SAFEXCEL_3DES:
435*4882a593Smuzhiyun err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
436*4882a593Smuzhiyun if (unlikely(err))
437*4882a593Smuzhiyun goto badkey;
438*4882a593Smuzhiyun break;
439*4882a593Smuzhiyun case SAFEXCEL_AES:
440*4882a593Smuzhiyun err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
441*4882a593Smuzhiyun if (unlikely(err))
442*4882a593Smuzhiyun goto badkey;
443*4882a593Smuzhiyun break;
444*4882a593Smuzhiyun case SAFEXCEL_SM4:
445*4882a593Smuzhiyun if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
446*4882a593Smuzhiyun goto badkey;
447*4882a593Smuzhiyun break;
448*4882a593Smuzhiyun default:
449*4882a593Smuzhiyun dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
450*4882a593Smuzhiyun goto badkey;
451*4882a593Smuzhiyun }
452*4882a593Smuzhiyun
453*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
454*4882a593Smuzhiyun for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
455*4882a593Smuzhiyun if (le32_to_cpu(ctx->key[i]) !=
456*4882a593Smuzhiyun ((u32 *)keys.enckey)[i]) {
457*4882a593Smuzhiyun ctx->base.needs_inv = true;
458*4882a593Smuzhiyun break;
459*4882a593Smuzhiyun }
460*4882a593Smuzhiyun }
461*4882a593Smuzhiyun }
462*4882a593Smuzhiyun
463*4882a593Smuzhiyun /* Auth key */
464*4882a593Smuzhiyun switch (ctx->hash_alg) {
465*4882a593Smuzhiyun case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
466*4882a593Smuzhiyun alg = "safexcel-sha1";
467*4882a593Smuzhiyun break;
468*4882a593Smuzhiyun case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
469*4882a593Smuzhiyun alg = "safexcel-sha224";
470*4882a593Smuzhiyun break;
471*4882a593Smuzhiyun case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
472*4882a593Smuzhiyun alg = "safexcel-sha256";
473*4882a593Smuzhiyun break;
474*4882a593Smuzhiyun case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
475*4882a593Smuzhiyun alg = "safexcel-sha384";
476*4882a593Smuzhiyun break;
477*4882a593Smuzhiyun case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
478*4882a593Smuzhiyun alg = "safexcel-sha512";
479*4882a593Smuzhiyun break;
480*4882a593Smuzhiyun case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
481*4882a593Smuzhiyun alg = "safexcel-sm3";
482*4882a593Smuzhiyun break;
483*4882a593Smuzhiyun default:
484*4882a593Smuzhiyun dev_err(priv->dev, "aead: unsupported hash algorithm\n");
485*4882a593Smuzhiyun goto badkey;
486*4882a593Smuzhiyun }
487*4882a593Smuzhiyun
488*4882a593Smuzhiyun if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
489*4882a593Smuzhiyun alg, ctx->state_sz))
490*4882a593Smuzhiyun goto badkey;
491*4882a593Smuzhiyun
492*4882a593Smuzhiyun /* Now copy the keys into the context */
493*4882a593Smuzhiyun for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
494*4882a593Smuzhiyun ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
495*4882a593Smuzhiyun ctx->key_len = keys.enckeylen;
496*4882a593Smuzhiyun
497*4882a593Smuzhiyun memzero_explicit(&keys, sizeof(keys));
498*4882a593Smuzhiyun return 0;
499*4882a593Smuzhiyun
500*4882a593Smuzhiyun badkey:
501*4882a593Smuzhiyun memzero_explicit(&keys, sizeof(keys));
502*4882a593Smuzhiyun return err;
503*4882a593Smuzhiyun }
504*4882a593Smuzhiyun
safexcel_context_control(struct safexcel_cipher_ctx * ctx,struct crypto_async_request * async,struct safexcel_cipher_req * sreq,struct safexcel_command_desc * cdesc)505*4882a593Smuzhiyun static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
506*4882a593Smuzhiyun struct crypto_async_request *async,
507*4882a593Smuzhiyun struct safexcel_cipher_req *sreq,
508*4882a593Smuzhiyun struct safexcel_command_desc *cdesc)
509*4882a593Smuzhiyun {
510*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
511*4882a593Smuzhiyun int ctrl_size = ctx->key_len / sizeof(u32);
512*4882a593Smuzhiyun
513*4882a593Smuzhiyun cdesc->control_data.control1 = ctx->mode;
514*4882a593Smuzhiyun
515*4882a593Smuzhiyun if (ctx->aead) {
516*4882a593Smuzhiyun /* Take in account the ipad+opad digests */
517*4882a593Smuzhiyun if (ctx->xcm) {
518*4882a593Smuzhiyun ctrl_size += ctx->state_sz / sizeof(u32);
519*4882a593Smuzhiyun cdesc->control_data.control0 =
520*4882a593Smuzhiyun CONTEXT_CONTROL_KEY_EN |
521*4882a593Smuzhiyun CONTEXT_CONTROL_DIGEST_XCM |
522*4882a593Smuzhiyun ctx->hash_alg |
523*4882a593Smuzhiyun CONTEXT_CONTROL_SIZE(ctrl_size);
524*4882a593Smuzhiyun } else if (ctx->alg == SAFEXCEL_CHACHA20) {
525*4882a593Smuzhiyun /* Chacha20-Poly1305 */
526*4882a593Smuzhiyun cdesc->control_data.control0 =
527*4882a593Smuzhiyun CONTEXT_CONTROL_KEY_EN |
528*4882a593Smuzhiyun CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
529*4882a593Smuzhiyun (sreq->direction == SAFEXCEL_ENCRYPT ?
530*4882a593Smuzhiyun CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
531*4882a593Smuzhiyun CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
532*4882a593Smuzhiyun ctx->hash_alg |
533*4882a593Smuzhiyun CONTEXT_CONTROL_SIZE(ctrl_size);
534*4882a593Smuzhiyun return 0;
535*4882a593Smuzhiyun } else {
536*4882a593Smuzhiyun ctrl_size += ctx->state_sz / sizeof(u32) * 2;
537*4882a593Smuzhiyun cdesc->control_data.control0 =
538*4882a593Smuzhiyun CONTEXT_CONTROL_KEY_EN |
539*4882a593Smuzhiyun CONTEXT_CONTROL_DIGEST_HMAC |
540*4882a593Smuzhiyun ctx->hash_alg |
541*4882a593Smuzhiyun CONTEXT_CONTROL_SIZE(ctrl_size);
542*4882a593Smuzhiyun }
543*4882a593Smuzhiyun
544*4882a593Smuzhiyun if (sreq->direction == SAFEXCEL_ENCRYPT &&
545*4882a593Smuzhiyun (ctx->xcm == EIP197_XCM_MODE_CCM ||
546*4882a593Smuzhiyun ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
547*4882a593Smuzhiyun cdesc->control_data.control0 |=
548*4882a593Smuzhiyun CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
549*4882a593Smuzhiyun else if (sreq->direction == SAFEXCEL_ENCRYPT)
550*4882a593Smuzhiyun cdesc->control_data.control0 |=
551*4882a593Smuzhiyun CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
552*4882a593Smuzhiyun else if (ctx->xcm == EIP197_XCM_MODE_CCM)
553*4882a593Smuzhiyun cdesc->control_data.control0 |=
554*4882a593Smuzhiyun CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
555*4882a593Smuzhiyun else
556*4882a593Smuzhiyun cdesc->control_data.control0 |=
557*4882a593Smuzhiyun CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
558*4882a593Smuzhiyun } else {
559*4882a593Smuzhiyun if (sreq->direction == SAFEXCEL_ENCRYPT)
560*4882a593Smuzhiyun cdesc->control_data.control0 =
561*4882a593Smuzhiyun CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
562*4882a593Smuzhiyun CONTEXT_CONTROL_KEY_EN |
563*4882a593Smuzhiyun CONTEXT_CONTROL_SIZE(ctrl_size);
564*4882a593Smuzhiyun else
565*4882a593Smuzhiyun cdesc->control_data.control0 =
566*4882a593Smuzhiyun CONTEXT_CONTROL_TYPE_CRYPTO_IN |
567*4882a593Smuzhiyun CONTEXT_CONTROL_KEY_EN |
568*4882a593Smuzhiyun CONTEXT_CONTROL_SIZE(ctrl_size);
569*4882a593Smuzhiyun }
570*4882a593Smuzhiyun
571*4882a593Smuzhiyun if (ctx->alg == SAFEXCEL_DES) {
572*4882a593Smuzhiyun cdesc->control_data.control0 |=
573*4882a593Smuzhiyun CONTEXT_CONTROL_CRYPTO_ALG_DES;
574*4882a593Smuzhiyun } else if (ctx->alg == SAFEXCEL_3DES) {
575*4882a593Smuzhiyun cdesc->control_data.control0 |=
576*4882a593Smuzhiyun CONTEXT_CONTROL_CRYPTO_ALG_3DES;
577*4882a593Smuzhiyun } else if (ctx->alg == SAFEXCEL_AES) {
578*4882a593Smuzhiyun switch (ctx->key_len >> ctx->xts) {
579*4882a593Smuzhiyun case AES_KEYSIZE_128:
580*4882a593Smuzhiyun cdesc->control_data.control0 |=
581*4882a593Smuzhiyun CONTEXT_CONTROL_CRYPTO_ALG_AES128;
582*4882a593Smuzhiyun break;
583*4882a593Smuzhiyun case AES_KEYSIZE_192:
584*4882a593Smuzhiyun cdesc->control_data.control0 |=
585*4882a593Smuzhiyun CONTEXT_CONTROL_CRYPTO_ALG_AES192;
586*4882a593Smuzhiyun break;
587*4882a593Smuzhiyun case AES_KEYSIZE_256:
588*4882a593Smuzhiyun cdesc->control_data.control0 |=
589*4882a593Smuzhiyun CONTEXT_CONTROL_CRYPTO_ALG_AES256;
590*4882a593Smuzhiyun break;
591*4882a593Smuzhiyun default:
592*4882a593Smuzhiyun dev_err(priv->dev, "aes keysize not supported: %u\n",
593*4882a593Smuzhiyun ctx->key_len >> ctx->xts);
594*4882a593Smuzhiyun return -EINVAL;
595*4882a593Smuzhiyun }
596*4882a593Smuzhiyun } else if (ctx->alg == SAFEXCEL_CHACHA20) {
597*4882a593Smuzhiyun cdesc->control_data.control0 |=
598*4882a593Smuzhiyun CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
599*4882a593Smuzhiyun } else if (ctx->alg == SAFEXCEL_SM4) {
600*4882a593Smuzhiyun cdesc->control_data.control0 |=
601*4882a593Smuzhiyun CONTEXT_CONTROL_CRYPTO_ALG_SM4;
602*4882a593Smuzhiyun }
603*4882a593Smuzhiyun
604*4882a593Smuzhiyun return 0;
605*4882a593Smuzhiyun }
606*4882a593Smuzhiyun
safexcel_handle_req_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)607*4882a593Smuzhiyun static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
608*4882a593Smuzhiyun struct crypto_async_request *async,
609*4882a593Smuzhiyun struct scatterlist *src,
610*4882a593Smuzhiyun struct scatterlist *dst,
611*4882a593Smuzhiyun unsigned int cryptlen,
612*4882a593Smuzhiyun struct safexcel_cipher_req *sreq,
613*4882a593Smuzhiyun bool *should_complete, int *ret)
614*4882a593Smuzhiyun {
615*4882a593Smuzhiyun struct skcipher_request *areq = skcipher_request_cast(async);
616*4882a593Smuzhiyun struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
617*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
618*4882a593Smuzhiyun struct safexcel_result_desc *rdesc;
619*4882a593Smuzhiyun int ndesc = 0;
620*4882a593Smuzhiyun
621*4882a593Smuzhiyun *ret = 0;
622*4882a593Smuzhiyun
623*4882a593Smuzhiyun if (unlikely(!sreq->rdescs))
624*4882a593Smuzhiyun return 0;
625*4882a593Smuzhiyun
626*4882a593Smuzhiyun while (sreq->rdescs--) {
627*4882a593Smuzhiyun rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
628*4882a593Smuzhiyun if (IS_ERR(rdesc)) {
629*4882a593Smuzhiyun dev_err(priv->dev,
630*4882a593Smuzhiyun "cipher: result: could not retrieve the result descriptor\n");
631*4882a593Smuzhiyun *ret = PTR_ERR(rdesc);
632*4882a593Smuzhiyun break;
633*4882a593Smuzhiyun }
634*4882a593Smuzhiyun
635*4882a593Smuzhiyun if (likely(!*ret))
636*4882a593Smuzhiyun *ret = safexcel_rdesc_check_errors(priv, rdesc);
637*4882a593Smuzhiyun
638*4882a593Smuzhiyun ndesc++;
639*4882a593Smuzhiyun }
640*4882a593Smuzhiyun
641*4882a593Smuzhiyun safexcel_complete(priv, ring);
642*4882a593Smuzhiyun
643*4882a593Smuzhiyun if (src == dst) {
644*4882a593Smuzhiyun dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
645*4882a593Smuzhiyun } else {
646*4882a593Smuzhiyun dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
647*4882a593Smuzhiyun dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
648*4882a593Smuzhiyun }
649*4882a593Smuzhiyun
650*4882a593Smuzhiyun /*
651*4882a593Smuzhiyun * Update IV in req from last crypto output word for CBC modes
652*4882a593Smuzhiyun */
653*4882a593Smuzhiyun if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
654*4882a593Smuzhiyun (sreq->direction == SAFEXCEL_ENCRYPT)) {
655*4882a593Smuzhiyun /* For encrypt take the last output word */
656*4882a593Smuzhiyun sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
657*4882a593Smuzhiyun crypto_skcipher_ivsize(skcipher),
658*4882a593Smuzhiyun (cryptlen -
659*4882a593Smuzhiyun crypto_skcipher_ivsize(skcipher)));
660*4882a593Smuzhiyun }
661*4882a593Smuzhiyun
662*4882a593Smuzhiyun *should_complete = true;
663*4882a593Smuzhiyun
664*4882a593Smuzhiyun return ndesc;
665*4882a593Smuzhiyun }
666*4882a593Smuzhiyun
safexcel_send_req(struct crypto_async_request * base,int ring,struct safexcel_cipher_req * sreq,struct scatterlist * src,struct scatterlist * dst,unsigned int cryptlen,unsigned int assoclen,unsigned int digestsize,u8 * iv,int * commands,int * results)667*4882a593Smuzhiyun static int safexcel_send_req(struct crypto_async_request *base, int ring,
668*4882a593Smuzhiyun struct safexcel_cipher_req *sreq,
669*4882a593Smuzhiyun struct scatterlist *src, struct scatterlist *dst,
670*4882a593Smuzhiyun unsigned int cryptlen, unsigned int assoclen,
671*4882a593Smuzhiyun unsigned int digestsize, u8 *iv, int *commands,
672*4882a593Smuzhiyun int *results)
673*4882a593Smuzhiyun {
674*4882a593Smuzhiyun struct skcipher_request *areq = skcipher_request_cast(base);
675*4882a593Smuzhiyun struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
676*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
677*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
678*4882a593Smuzhiyun struct safexcel_command_desc *cdesc;
679*4882a593Smuzhiyun struct safexcel_command_desc *first_cdesc = NULL;
680*4882a593Smuzhiyun struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
681*4882a593Smuzhiyun struct scatterlist *sg;
682*4882a593Smuzhiyun unsigned int totlen;
683*4882a593Smuzhiyun unsigned int totlen_src = cryptlen + assoclen;
684*4882a593Smuzhiyun unsigned int totlen_dst = totlen_src;
685*4882a593Smuzhiyun struct safexcel_token *atoken;
686*4882a593Smuzhiyun int n_cdesc = 0, n_rdesc = 0;
687*4882a593Smuzhiyun int queued, i, ret = 0;
688*4882a593Smuzhiyun bool first = true;
689*4882a593Smuzhiyun
690*4882a593Smuzhiyun sreq->nr_src = sg_nents_for_len(src, totlen_src);
691*4882a593Smuzhiyun
692*4882a593Smuzhiyun if (ctx->aead) {
693*4882a593Smuzhiyun /*
694*4882a593Smuzhiyun * AEAD has auth tag appended to output for encrypt and
695*4882a593Smuzhiyun * removed from the output for decrypt!
696*4882a593Smuzhiyun */
697*4882a593Smuzhiyun if (sreq->direction == SAFEXCEL_DECRYPT)
698*4882a593Smuzhiyun totlen_dst -= digestsize;
699*4882a593Smuzhiyun else
700*4882a593Smuzhiyun totlen_dst += digestsize;
701*4882a593Smuzhiyun
702*4882a593Smuzhiyun memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
703*4882a593Smuzhiyun &ctx->base.ipad, ctx->state_sz);
704*4882a593Smuzhiyun if (!ctx->xcm)
705*4882a593Smuzhiyun memcpy(ctx->base.ctxr->data + (ctx->key_len +
706*4882a593Smuzhiyun ctx->state_sz) / sizeof(u32), &ctx->base.opad,
707*4882a593Smuzhiyun ctx->state_sz);
708*4882a593Smuzhiyun } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
709*4882a593Smuzhiyun (sreq->direction == SAFEXCEL_DECRYPT)) {
710*4882a593Smuzhiyun /*
711*4882a593Smuzhiyun * Save IV from last crypto input word for CBC modes in decrypt
712*4882a593Smuzhiyun * direction. Need to do this first in case of inplace operation
713*4882a593Smuzhiyun * as it will be overwritten.
714*4882a593Smuzhiyun */
715*4882a593Smuzhiyun sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
716*4882a593Smuzhiyun crypto_skcipher_ivsize(skcipher),
717*4882a593Smuzhiyun (totlen_src -
718*4882a593Smuzhiyun crypto_skcipher_ivsize(skcipher)));
719*4882a593Smuzhiyun }
720*4882a593Smuzhiyun
721*4882a593Smuzhiyun sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
722*4882a593Smuzhiyun
723*4882a593Smuzhiyun /*
724*4882a593Smuzhiyun * Remember actual input length, source buffer length may be
725*4882a593Smuzhiyun * updated in case of inline operation below.
726*4882a593Smuzhiyun */
727*4882a593Smuzhiyun totlen = totlen_src;
728*4882a593Smuzhiyun queued = totlen_src;
729*4882a593Smuzhiyun
730*4882a593Smuzhiyun if (src == dst) {
731*4882a593Smuzhiyun sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
732*4882a593Smuzhiyun sreq->nr_dst = sreq->nr_src;
733*4882a593Smuzhiyun if (unlikely((totlen_src || totlen_dst) &&
734*4882a593Smuzhiyun (sreq->nr_src <= 0))) {
735*4882a593Smuzhiyun dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
736*4882a593Smuzhiyun max(totlen_src, totlen_dst));
737*4882a593Smuzhiyun return -EINVAL;
738*4882a593Smuzhiyun }
739*4882a593Smuzhiyun dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
740*4882a593Smuzhiyun } else {
741*4882a593Smuzhiyun if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
742*4882a593Smuzhiyun dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
743*4882a593Smuzhiyun totlen_src);
744*4882a593Smuzhiyun return -EINVAL;
745*4882a593Smuzhiyun }
746*4882a593Smuzhiyun dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
747*4882a593Smuzhiyun
748*4882a593Smuzhiyun if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
749*4882a593Smuzhiyun dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
750*4882a593Smuzhiyun totlen_dst);
751*4882a593Smuzhiyun dma_unmap_sg(priv->dev, src, sreq->nr_src,
752*4882a593Smuzhiyun DMA_TO_DEVICE);
753*4882a593Smuzhiyun return -EINVAL;
754*4882a593Smuzhiyun }
755*4882a593Smuzhiyun dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
756*4882a593Smuzhiyun }
757*4882a593Smuzhiyun
758*4882a593Smuzhiyun memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
759*4882a593Smuzhiyun
760*4882a593Smuzhiyun if (!totlen) {
761*4882a593Smuzhiyun /*
762*4882a593Smuzhiyun * The EIP97 cannot deal with zero length input packets!
763*4882a593Smuzhiyun * So stuff a dummy command descriptor indicating a 1 byte
764*4882a593Smuzhiyun * (dummy) input packet, using the context record as source.
765*4882a593Smuzhiyun */
766*4882a593Smuzhiyun first_cdesc = safexcel_add_cdesc(priv, ring,
767*4882a593Smuzhiyun 1, 1, ctx->base.ctxr_dma,
768*4882a593Smuzhiyun 1, 1, ctx->base.ctxr_dma,
769*4882a593Smuzhiyun &atoken);
770*4882a593Smuzhiyun if (IS_ERR(first_cdesc)) {
771*4882a593Smuzhiyun /* No space left in the command descriptor ring */
772*4882a593Smuzhiyun ret = PTR_ERR(first_cdesc);
773*4882a593Smuzhiyun goto cdesc_rollback;
774*4882a593Smuzhiyun }
775*4882a593Smuzhiyun n_cdesc = 1;
776*4882a593Smuzhiyun goto skip_cdesc;
777*4882a593Smuzhiyun }
778*4882a593Smuzhiyun
779*4882a593Smuzhiyun /* command descriptors */
780*4882a593Smuzhiyun for_each_sg(src, sg, sreq->nr_src, i) {
781*4882a593Smuzhiyun int len = sg_dma_len(sg);
782*4882a593Smuzhiyun
783*4882a593Smuzhiyun /* Do not overflow the request */
784*4882a593Smuzhiyun if (queued < len)
785*4882a593Smuzhiyun len = queued;
786*4882a593Smuzhiyun
787*4882a593Smuzhiyun cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
788*4882a593Smuzhiyun !(queued - len),
789*4882a593Smuzhiyun sg_dma_address(sg), len, totlen,
790*4882a593Smuzhiyun ctx->base.ctxr_dma, &atoken);
791*4882a593Smuzhiyun if (IS_ERR(cdesc)) {
792*4882a593Smuzhiyun /* No space left in the command descriptor ring */
793*4882a593Smuzhiyun ret = PTR_ERR(cdesc);
794*4882a593Smuzhiyun goto cdesc_rollback;
795*4882a593Smuzhiyun }
796*4882a593Smuzhiyun
797*4882a593Smuzhiyun if (!n_cdesc)
798*4882a593Smuzhiyun first_cdesc = cdesc;
799*4882a593Smuzhiyun
800*4882a593Smuzhiyun n_cdesc++;
801*4882a593Smuzhiyun queued -= len;
802*4882a593Smuzhiyun if (!queued)
803*4882a593Smuzhiyun break;
804*4882a593Smuzhiyun }
805*4882a593Smuzhiyun skip_cdesc:
806*4882a593Smuzhiyun /* Add context control words and token to first command descriptor */
807*4882a593Smuzhiyun safexcel_context_control(ctx, base, sreq, first_cdesc);
808*4882a593Smuzhiyun if (ctx->aead)
809*4882a593Smuzhiyun safexcel_aead_token(ctx, iv, first_cdesc, atoken,
810*4882a593Smuzhiyun sreq->direction, cryptlen,
811*4882a593Smuzhiyun assoclen, digestsize);
812*4882a593Smuzhiyun else
813*4882a593Smuzhiyun safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
814*4882a593Smuzhiyun cryptlen);
815*4882a593Smuzhiyun
816*4882a593Smuzhiyun /* result descriptors */
817*4882a593Smuzhiyun for_each_sg(dst, sg, sreq->nr_dst, i) {
818*4882a593Smuzhiyun bool last = (i == sreq->nr_dst - 1);
819*4882a593Smuzhiyun u32 len = sg_dma_len(sg);
820*4882a593Smuzhiyun
821*4882a593Smuzhiyun /* only allow the part of the buffer we know we need */
822*4882a593Smuzhiyun if (len > totlen_dst)
823*4882a593Smuzhiyun len = totlen_dst;
824*4882a593Smuzhiyun if (unlikely(!len))
825*4882a593Smuzhiyun break;
826*4882a593Smuzhiyun totlen_dst -= len;
827*4882a593Smuzhiyun
828*4882a593Smuzhiyun /* skip over AAD space in buffer - not written */
829*4882a593Smuzhiyun if (assoclen) {
830*4882a593Smuzhiyun if (assoclen >= len) {
831*4882a593Smuzhiyun assoclen -= len;
832*4882a593Smuzhiyun continue;
833*4882a593Smuzhiyun }
834*4882a593Smuzhiyun rdesc = safexcel_add_rdesc(priv, ring, first, last,
835*4882a593Smuzhiyun sg_dma_address(sg) +
836*4882a593Smuzhiyun assoclen,
837*4882a593Smuzhiyun len - assoclen);
838*4882a593Smuzhiyun assoclen = 0;
839*4882a593Smuzhiyun } else {
840*4882a593Smuzhiyun rdesc = safexcel_add_rdesc(priv, ring, first, last,
841*4882a593Smuzhiyun sg_dma_address(sg),
842*4882a593Smuzhiyun len);
843*4882a593Smuzhiyun }
844*4882a593Smuzhiyun if (IS_ERR(rdesc)) {
845*4882a593Smuzhiyun /* No space left in the result descriptor ring */
846*4882a593Smuzhiyun ret = PTR_ERR(rdesc);
847*4882a593Smuzhiyun goto rdesc_rollback;
848*4882a593Smuzhiyun }
849*4882a593Smuzhiyun if (first) {
850*4882a593Smuzhiyun first_rdesc = rdesc;
851*4882a593Smuzhiyun first = false;
852*4882a593Smuzhiyun }
853*4882a593Smuzhiyun n_rdesc++;
854*4882a593Smuzhiyun }
855*4882a593Smuzhiyun
856*4882a593Smuzhiyun if (unlikely(first)) {
857*4882a593Smuzhiyun /*
858*4882a593Smuzhiyun * Special case: AEAD decrypt with only AAD data.
859*4882a593Smuzhiyun * In this case there is NO output data from the engine,
860*4882a593Smuzhiyun * but the engine still needs a result descriptor!
861*4882a593Smuzhiyun * Create a dummy one just for catching the result token.
862*4882a593Smuzhiyun */
863*4882a593Smuzhiyun rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
864*4882a593Smuzhiyun if (IS_ERR(rdesc)) {
865*4882a593Smuzhiyun /* No space left in the result descriptor ring */
866*4882a593Smuzhiyun ret = PTR_ERR(rdesc);
867*4882a593Smuzhiyun goto rdesc_rollback;
868*4882a593Smuzhiyun }
869*4882a593Smuzhiyun first_rdesc = rdesc;
870*4882a593Smuzhiyun n_rdesc = 1;
871*4882a593Smuzhiyun }
872*4882a593Smuzhiyun
873*4882a593Smuzhiyun safexcel_rdr_req_set(priv, ring, first_rdesc, base);
874*4882a593Smuzhiyun
875*4882a593Smuzhiyun *commands = n_cdesc;
876*4882a593Smuzhiyun *results = n_rdesc;
877*4882a593Smuzhiyun return 0;
878*4882a593Smuzhiyun
879*4882a593Smuzhiyun rdesc_rollback:
880*4882a593Smuzhiyun for (i = 0; i < n_rdesc; i++)
881*4882a593Smuzhiyun safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
882*4882a593Smuzhiyun cdesc_rollback:
883*4882a593Smuzhiyun for (i = 0; i < n_cdesc; i++)
884*4882a593Smuzhiyun safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
885*4882a593Smuzhiyun
886*4882a593Smuzhiyun if (src == dst) {
887*4882a593Smuzhiyun dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
888*4882a593Smuzhiyun } else {
889*4882a593Smuzhiyun dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
890*4882a593Smuzhiyun dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
891*4882a593Smuzhiyun }
892*4882a593Smuzhiyun
893*4882a593Smuzhiyun return ret;
894*4882a593Smuzhiyun }
895*4882a593Smuzhiyun
safexcel_handle_inv_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,bool * should_complete,int * ret)896*4882a593Smuzhiyun static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
897*4882a593Smuzhiyun int ring,
898*4882a593Smuzhiyun struct crypto_async_request *base,
899*4882a593Smuzhiyun struct safexcel_cipher_req *sreq,
900*4882a593Smuzhiyun bool *should_complete, int *ret)
901*4882a593Smuzhiyun {
902*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
903*4882a593Smuzhiyun struct safexcel_result_desc *rdesc;
904*4882a593Smuzhiyun int ndesc = 0, enq_ret;
905*4882a593Smuzhiyun
906*4882a593Smuzhiyun *ret = 0;
907*4882a593Smuzhiyun
908*4882a593Smuzhiyun if (unlikely(!sreq->rdescs))
909*4882a593Smuzhiyun return 0;
910*4882a593Smuzhiyun
911*4882a593Smuzhiyun while (sreq->rdescs--) {
912*4882a593Smuzhiyun rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
913*4882a593Smuzhiyun if (IS_ERR(rdesc)) {
914*4882a593Smuzhiyun dev_err(priv->dev,
915*4882a593Smuzhiyun "cipher: invalidate: could not retrieve the result descriptor\n");
916*4882a593Smuzhiyun *ret = PTR_ERR(rdesc);
917*4882a593Smuzhiyun break;
918*4882a593Smuzhiyun }
919*4882a593Smuzhiyun
920*4882a593Smuzhiyun if (likely(!*ret))
921*4882a593Smuzhiyun *ret = safexcel_rdesc_check_errors(priv, rdesc);
922*4882a593Smuzhiyun
923*4882a593Smuzhiyun ndesc++;
924*4882a593Smuzhiyun }
925*4882a593Smuzhiyun
926*4882a593Smuzhiyun safexcel_complete(priv, ring);
927*4882a593Smuzhiyun
928*4882a593Smuzhiyun if (ctx->base.exit_inv) {
929*4882a593Smuzhiyun dma_pool_free(priv->context_pool, ctx->base.ctxr,
930*4882a593Smuzhiyun ctx->base.ctxr_dma);
931*4882a593Smuzhiyun
932*4882a593Smuzhiyun *should_complete = true;
933*4882a593Smuzhiyun
934*4882a593Smuzhiyun return ndesc;
935*4882a593Smuzhiyun }
936*4882a593Smuzhiyun
937*4882a593Smuzhiyun ring = safexcel_select_ring(priv);
938*4882a593Smuzhiyun ctx->base.ring = ring;
939*4882a593Smuzhiyun
940*4882a593Smuzhiyun spin_lock_bh(&priv->ring[ring].queue_lock);
941*4882a593Smuzhiyun enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
942*4882a593Smuzhiyun spin_unlock_bh(&priv->ring[ring].queue_lock);
943*4882a593Smuzhiyun
944*4882a593Smuzhiyun if (enq_ret != -EINPROGRESS)
945*4882a593Smuzhiyun *ret = enq_ret;
946*4882a593Smuzhiyun
947*4882a593Smuzhiyun queue_work(priv->ring[ring].workqueue,
948*4882a593Smuzhiyun &priv->ring[ring].work_data.work);
949*4882a593Smuzhiyun
950*4882a593Smuzhiyun *should_complete = false;
951*4882a593Smuzhiyun
952*4882a593Smuzhiyun return ndesc;
953*4882a593Smuzhiyun }
954*4882a593Smuzhiyun
safexcel_skcipher_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)955*4882a593Smuzhiyun static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
956*4882a593Smuzhiyun int ring,
957*4882a593Smuzhiyun struct crypto_async_request *async,
958*4882a593Smuzhiyun bool *should_complete, int *ret)
959*4882a593Smuzhiyun {
960*4882a593Smuzhiyun struct skcipher_request *req = skcipher_request_cast(async);
961*4882a593Smuzhiyun struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
962*4882a593Smuzhiyun int err;
963*4882a593Smuzhiyun
964*4882a593Smuzhiyun if (sreq->needs_inv) {
965*4882a593Smuzhiyun sreq->needs_inv = false;
966*4882a593Smuzhiyun err = safexcel_handle_inv_result(priv, ring, async, sreq,
967*4882a593Smuzhiyun should_complete, ret);
968*4882a593Smuzhiyun } else {
969*4882a593Smuzhiyun err = safexcel_handle_req_result(priv, ring, async, req->src,
970*4882a593Smuzhiyun req->dst, req->cryptlen, sreq,
971*4882a593Smuzhiyun should_complete, ret);
972*4882a593Smuzhiyun }
973*4882a593Smuzhiyun
974*4882a593Smuzhiyun return err;
975*4882a593Smuzhiyun }
976*4882a593Smuzhiyun
safexcel_aead_handle_result(struct safexcel_crypto_priv * priv,int ring,struct crypto_async_request * async,bool * should_complete,int * ret)977*4882a593Smuzhiyun static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
978*4882a593Smuzhiyun int ring,
979*4882a593Smuzhiyun struct crypto_async_request *async,
980*4882a593Smuzhiyun bool *should_complete, int *ret)
981*4882a593Smuzhiyun {
982*4882a593Smuzhiyun struct aead_request *req = aead_request_cast(async);
983*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
984*4882a593Smuzhiyun struct safexcel_cipher_req *sreq = aead_request_ctx(req);
985*4882a593Smuzhiyun int err;
986*4882a593Smuzhiyun
987*4882a593Smuzhiyun if (sreq->needs_inv) {
988*4882a593Smuzhiyun sreq->needs_inv = false;
989*4882a593Smuzhiyun err = safexcel_handle_inv_result(priv, ring, async, sreq,
990*4882a593Smuzhiyun should_complete, ret);
991*4882a593Smuzhiyun } else {
992*4882a593Smuzhiyun err = safexcel_handle_req_result(priv, ring, async, req->src,
993*4882a593Smuzhiyun req->dst,
994*4882a593Smuzhiyun req->cryptlen + crypto_aead_authsize(tfm),
995*4882a593Smuzhiyun sreq, should_complete, ret);
996*4882a593Smuzhiyun }
997*4882a593Smuzhiyun
998*4882a593Smuzhiyun return err;
999*4882a593Smuzhiyun }
1000*4882a593Smuzhiyun
safexcel_cipher_send_inv(struct crypto_async_request * base,int ring,int * commands,int * results)1001*4882a593Smuzhiyun static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1002*4882a593Smuzhiyun int ring, int *commands, int *results)
1003*4882a593Smuzhiyun {
1004*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1005*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1006*4882a593Smuzhiyun int ret;
1007*4882a593Smuzhiyun
1008*4882a593Smuzhiyun ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1009*4882a593Smuzhiyun if (unlikely(ret))
1010*4882a593Smuzhiyun return ret;
1011*4882a593Smuzhiyun
1012*4882a593Smuzhiyun *commands = 1;
1013*4882a593Smuzhiyun *results = 1;
1014*4882a593Smuzhiyun
1015*4882a593Smuzhiyun return 0;
1016*4882a593Smuzhiyun }
1017*4882a593Smuzhiyun
safexcel_skcipher_send(struct crypto_async_request * async,int ring,int * commands,int * results)1018*4882a593Smuzhiyun static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1019*4882a593Smuzhiyun int *commands, int *results)
1020*4882a593Smuzhiyun {
1021*4882a593Smuzhiyun struct skcipher_request *req = skcipher_request_cast(async);
1022*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1023*4882a593Smuzhiyun struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1024*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1025*4882a593Smuzhiyun int ret;
1026*4882a593Smuzhiyun
1027*4882a593Smuzhiyun BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1028*4882a593Smuzhiyun
1029*4882a593Smuzhiyun if (sreq->needs_inv) {
1030*4882a593Smuzhiyun ret = safexcel_cipher_send_inv(async, ring, commands, results);
1031*4882a593Smuzhiyun } else {
1032*4882a593Smuzhiyun struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1033*4882a593Smuzhiyun u8 input_iv[AES_BLOCK_SIZE];
1034*4882a593Smuzhiyun
1035*4882a593Smuzhiyun /*
1036*4882a593Smuzhiyun * Save input IV in case of CBC decrypt mode
1037*4882a593Smuzhiyun * Will be overwritten with output IV prior to use!
1038*4882a593Smuzhiyun */
1039*4882a593Smuzhiyun memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1040*4882a593Smuzhiyun
1041*4882a593Smuzhiyun ret = safexcel_send_req(async, ring, sreq, req->src,
1042*4882a593Smuzhiyun req->dst, req->cryptlen, 0, 0, input_iv,
1043*4882a593Smuzhiyun commands, results);
1044*4882a593Smuzhiyun }
1045*4882a593Smuzhiyun
1046*4882a593Smuzhiyun sreq->rdescs = *results;
1047*4882a593Smuzhiyun return ret;
1048*4882a593Smuzhiyun }
1049*4882a593Smuzhiyun
safexcel_aead_send(struct crypto_async_request * async,int ring,int * commands,int * results)1050*4882a593Smuzhiyun static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1051*4882a593Smuzhiyun int *commands, int *results)
1052*4882a593Smuzhiyun {
1053*4882a593Smuzhiyun struct aead_request *req = aead_request_cast(async);
1054*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1055*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1056*4882a593Smuzhiyun struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1057*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1058*4882a593Smuzhiyun int ret;
1059*4882a593Smuzhiyun
1060*4882a593Smuzhiyun BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1061*4882a593Smuzhiyun
1062*4882a593Smuzhiyun if (sreq->needs_inv)
1063*4882a593Smuzhiyun ret = safexcel_cipher_send_inv(async, ring, commands, results);
1064*4882a593Smuzhiyun else
1065*4882a593Smuzhiyun ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1066*4882a593Smuzhiyun req->cryptlen, req->assoclen,
1067*4882a593Smuzhiyun crypto_aead_authsize(tfm), req->iv,
1068*4882a593Smuzhiyun commands, results);
1069*4882a593Smuzhiyun sreq->rdescs = *results;
1070*4882a593Smuzhiyun return ret;
1071*4882a593Smuzhiyun }
1072*4882a593Smuzhiyun
safexcel_cipher_exit_inv(struct crypto_tfm * tfm,struct crypto_async_request * base,struct safexcel_cipher_req * sreq,struct safexcel_inv_result * result)1073*4882a593Smuzhiyun static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1074*4882a593Smuzhiyun struct crypto_async_request *base,
1075*4882a593Smuzhiyun struct safexcel_cipher_req *sreq,
1076*4882a593Smuzhiyun struct safexcel_inv_result *result)
1077*4882a593Smuzhiyun {
1078*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1079*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1080*4882a593Smuzhiyun int ring = ctx->base.ring;
1081*4882a593Smuzhiyun
1082*4882a593Smuzhiyun init_completion(&result->completion);
1083*4882a593Smuzhiyun
1084*4882a593Smuzhiyun ctx = crypto_tfm_ctx(base->tfm);
1085*4882a593Smuzhiyun ctx->base.exit_inv = true;
1086*4882a593Smuzhiyun sreq->needs_inv = true;
1087*4882a593Smuzhiyun
1088*4882a593Smuzhiyun spin_lock_bh(&priv->ring[ring].queue_lock);
1089*4882a593Smuzhiyun crypto_enqueue_request(&priv->ring[ring].queue, base);
1090*4882a593Smuzhiyun spin_unlock_bh(&priv->ring[ring].queue_lock);
1091*4882a593Smuzhiyun
1092*4882a593Smuzhiyun queue_work(priv->ring[ring].workqueue,
1093*4882a593Smuzhiyun &priv->ring[ring].work_data.work);
1094*4882a593Smuzhiyun
1095*4882a593Smuzhiyun wait_for_completion(&result->completion);
1096*4882a593Smuzhiyun
1097*4882a593Smuzhiyun if (result->error) {
1098*4882a593Smuzhiyun dev_warn(priv->dev,
1099*4882a593Smuzhiyun "cipher: sync: invalidate: completion error %d\n",
1100*4882a593Smuzhiyun result->error);
1101*4882a593Smuzhiyun return result->error;
1102*4882a593Smuzhiyun }
1103*4882a593Smuzhiyun
1104*4882a593Smuzhiyun return 0;
1105*4882a593Smuzhiyun }
1106*4882a593Smuzhiyun
safexcel_skcipher_exit_inv(struct crypto_tfm * tfm)1107*4882a593Smuzhiyun static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1108*4882a593Smuzhiyun {
1109*4882a593Smuzhiyun EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1110*4882a593Smuzhiyun struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1111*4882a593Smuzhiyun struct safexcel_inv_result result = {};
1112*4882a593Smuzhiyun
1113*4882a593Smuzhiyun memset(req, 0, sizeof(struct skcipher_request));
1114*4882a593Smuzhiyun
1115*4882a593Smuzhiyun skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1116*4882a593Smuzhiyun safexcel_inv_complete, &result);
1117*4882a593Smuzhiyun skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1118*4882a593Smuzhiyun
1119*4882a593Smuzhiyun return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1120*4882a593Smuzhiyun }
1121*4882a593Smuzhiyun
safexcel_aead_exit_inv(struct crypto_tfm * tfm)1122*4882a593Smuzhiyun static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1123*4882a593Smuzhiyun {
1124*4882a593Smuzhiyun EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1125*4882a593Smuzhiyun struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1126*4882a593Smuzhiyun struct safexcel_inv_result result = {};
1127*4882a593Smuzhiyun
1128*4882a593Smuzhiyun memset(req, 0, sizeof(struct aead_request));
1129*4882a593Smuzhiyun
1130*4882a593Smuzhiyun aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1131*4882a593Smuzhiyun safexcel_inv_complete, &result);
1132*4882a593Smuzhiyun aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1133*4882a593Smuzhiyun
1134*4882a593Smuzhiyun return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1135*4882a593Smuzhiyun }
1136*4882a593Smuzhiyun
safexcel_queue_req(struct crypto_async_request * base,struct safexcel_cipher_req * sreq,enum safexcel_cipher_direction dir)1137*4882a593Smuzhiyun static int safexcel_queue_req(struct crypto_async_request *base,
1138*4882a593Smuzhiyun struct safexcel_cipher_req *sreq,
1139*4882a593Smuzhiyun enum safexcel_cipher_direction dir)
1140*4882a593Smuzhiyun {
1141*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1142*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1143*4882a593Smuzhiyun int ret, ring;
1144*4882a593Smuzhiyun
1145*4882a593Smuzhiyun sreq->needs_inv = false;
1146*4882a593Smuzhiyun sreq->direction = dir;
1147*4882a593Smuzhiyun
1148*4882a593Smuzhiyun if (ctx->base.ctxr) {
1149*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1150*4882a593Smuzhiyun sreq->needs_inv = true;
1151*4882a593Smuzhiyun ctx->base.needs_inv = false;
1152*4882a593Smuzhiyun }
1153*4882a593Smuzhiyun } else {
1154*4882a593Smuzhiyun ctx->base.ring = safexcel_select_ring(priv);
1155*4882a593Smuzhiyun ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1156*4882a593Smuzhiyun EIP197_GFP_FLAGS(*base),
1157*4882a593Smuzhiyun &ctx->base.ctxr_dma);
1158*4882a593Smuzhiyun if (!ctx->base.ctxr)
1159*4882a593Smuzhiyun return -ENOMEM;
1160*4882a593Smuzhiyun }
1161*4882a593Smuzhiyun
1162*4882a593Smuzhiyun ring = ctx->base.ring;
1163*4882a593Smuzhiyun
1164*4882a593Smuzhiyun spin_lock_bh(&priv->ring[ring].queue_lock);
1165*4882a593Smuzhiyun ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1166*4882a593Smuzhiyun spin_unlock_bh(&priv->ring[ring].queue_lock);
1167*4882a593Smuzhiyun
1168*4882a593Smuzhiyun queue_work(priv->ring[ring].workqueue,
1169*4882a593Smuzhiyun &priv->ring[ring].work_data.work);
1170*4882a593Smuzhiyun
1171*4882a593Smuzhiyun return ret;
1172*4882a593Smuzhiyun }
1173*4882a593Smuzhiyun
safexcel_encrypt(struct skcipher_request * req)1174*4882a593Smuzhiyun static int safexcel_encrypt(struct skcipher_request *req)
1175*4882a593Smuzhiyun {
1176*4882a593Smuzhiyun return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1177*4882a593Smuzhiyun SAFEXCEL_ENCRYPT);
1178*4882a593Smuzhiyun }
1179*4882a593Smuzhiyun
safexcel_decrypt(struct skcipher_request * req)1180*4882a593Smuzhiyun static int safexcel_decrypt(struct skcipher_request *req)
1181*4882a593Smuzhiyun {
1182*4882a593Smuzhiyun return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1183*4882a593Smuzhiyun SAFEXCEL_DECRYPT);
1184*4882a593Smuzhiyun }
1185*4882a593Smuzhiyun
safexcel_skcipher_cra_init(struct crypto_tfm * tfm)1186*4882a593Smuzhiyun static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1187*4882a593Smuzhiyun {
1188*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1189*4882a593Smuzhiyun struct safexcel_alg_template *tmpl =
1190*4882a593Smuzhiyun container_of(tfm->__crt_alg, struct safexcel_alg_template,
1191*4882a593Smuzhiyun alg.skcipher.base);
1192*4882a593Smuzhiyun
1193*4882a593Smuzhiyun crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1194*4882a593Smuzhiyun sizeof(struct safexcel_cipher_req));
1195*4882a593Smuzhiyun
1196*4882a593Smuzhiyun ctx->base.priv = tmpl->priv;
1197*4882a593Smuzhiyun
1198*4882a593Smuzhiyun ctx->base.send = safexcel_skcipher_send;
1199*4882a593Smuzhiyun ctx->base.handle_result = safexcel_skcipher_handle_result;
1200*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1201*4882a593Smuzhiyun ctx->ctrinit = 1;
1202*4882a593Smuzhiyun return 0;
1203*4882a593Smuzhiyun }
1204*4882a593Smuzhiyun
safexcel_cipher_cra_exit(struct crypto_tfm * tfm)1205*4882a593Smuzhiyun static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1206*4882a593Smuzhiyun {
1207*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1208*4882a593Smuzhiyun
1209*4882a593Smuzhiyun memzero_explicit(ctx->key, sizeof(ctx->key));
1210*4882a593Smuzhiyun
1211*4882a593Smuzhiyun /* context not allocated, skip invalidation */
1212*4882a593Smuzhiyun if (!ctx->base.ctxr)
1213*4882a593Smuzhiyun return -ENOMEM;
1214*4882a593Smuzhiyun
1215*4882a593Smuzhiyun memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1216*4882a593Smuzhiyun return 0;
1217*4882a593Smuzhiyun }
1218*4882a593Smuzhiyun
safexcel_skcipher_cra_exit(struct crypto_tfm * tfm)1219*4882a593Smuzhiyun static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1220*4882a593Smuzhiyun {
1221*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1222*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1223*4882a593Smuzhiyun int ret;
1224*4882a593Smuzhiyun
1225*4882a593Smuzhiyun if (safexcel_cipher_cra_exit(tfm))
1226*4882a593Smuzhiyun return;
1227*4882a593Smuzhiyun
1228*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE) {
1229*4882a593Smuzhiyun ret = safexcel_skcipher_exit_inv(tfm);
1230*4882a593Smuzhiyun if (ret)
1231*4882a593Smuzhiyun dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1232*4882a593Smuzhiyun ret);
1233*4882a593Smuzhiyun } else {
1234*4882a593Smuzhiyun dma_pool_free(priv->context_pool, ctx->base.ctxr,
1235*4882a593Smuzhiyun ctx->base.ctxr_dma);
1236*4882a593Smuzhiyun }
1237*4882a593Smuzhiyun }
1238*4882a593Smuzhiyun
safexcel_aead_cra_exit(struct crypto_tfm * tfm)1239*4882a593Smuzhiyun static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1240*4882a593Smuzhiyun {
1241*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1242*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1243*4882a593Smuzhiyun int ret;
1244*4882a593Smuzhiyun
1245*4882a593Smuzhiyun if (safexcel_cipher_cra_exit(tfm))
1246*4882a593Smuzhiyun return;
1247*4882a593Smuzhiyun
1248*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE) {
1249*4882a593Smuzhiyun ret = safexcel_aead_exit_inv(tfm);
1250*4882a593Smuzhiyun if (ret)
1251*4882a593Smuzhiyun dev_warn(priv->dev, "aead: invalidation error %d\n",
1252*4882a593Smuzhiyun ret);
1253*4882a593Smuzhiyun } else {
1254*4882a593Smuzhiyun dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255*4882a593Smuzhiyun ctx->base.ctxr_dma);
1256*4882a593Smuzhiyun }
1257*4882a593Smuzhiyun }
1258*4882a593Smuzhiyun
safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm * tfm)1259*4882a593Smuzhiyun static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1260*4882a593Smuzhiyun {
1261*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1262*4882a593Smuzhiyun
1263*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
1264*4882a593Smuzhiyun ctx->alg = SAFEXCEL_AES;
1265*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1266*4882a593Smuzhiyun ctx->blocksz = 0;
1267*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1268*4882a593Smuzhiyun return 0;
1269*4882a593Smuzhiyun }
1270*4882a593Smuzhiyun
1271*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_ecb_aes = {
1272*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1273*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES,
1274*4882a593Smuzhiyun .alg.skcipher = {
1275*4882a593Smuzhiyun .setkey = safexcel_skcipher_aes_setkey,
1276*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
1277*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
1278*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
1279*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
1280*4882a593Smuzhiyun .base = {
1281*4882a593Smuzhiyun .cra_name = "ecb(aes)",
1282*4882a593Smuzhiyun .cra_driver_name = "safexcel-ecb-aes",
1283*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1284*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1285*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1286*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1287*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
1288*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1289*4882a593Smuzhiyun .cra_alignmask = 0,
1290*4882a593Smuzhiyun .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1291*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
1292*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1293*4882a593Smuzhiyun },
1294*4882a593Smuzhiyun },
1295*4882a593Smuzhiyun };
1296*4882a593Smuzhiyun
safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm * tfm)1297*4882a593Smuzhiyun static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1298*4882a593Smuzhiyun {
1299*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1300*4882a593Smuzhiyun
1301*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
1302*4882a593Smuzhiyun ctx->alg = SAFEXCEL_AES;
1303*4882a593Smuzhiyun ctx->blocksz = AES_BLOCK_SIZE;
1304*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1305*4882a593Smuzhiyun return 0;
1306*4882a593Smuzhiyun }
1307*4882a593Smuzhiyun
1308*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_cbc_aes = {
1309*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1310*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES,
1311*4882a593Smuzhiyun .alg.skcipher = {
1312*4882a593Smuzhiyun .setkey = safexcel_skcipher_aes_setkey,
1313*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
1314*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
1315*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
1316*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
1317*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
1318*4882a593Smuzhiyun .base = {
1319*4882a593Smuzhiyun .cra_name = "cbc(aes)",
1320*4882a593Smuzhiyun .cra_driver_name = "safexcel-cbc-aes",
1321*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1322*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1323*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1324*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1325*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
1326*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1327*4882a593Smuzhiyun .cra_alignmask = 0,
1328*4882a593Smuzhiyun .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1329*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
1330*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1331*4882a593Smuzhiyun },
1332*4882a593Smuzhiyun },
1333*4882a593Smuzhiyun };
1334*4882a593Smuzhiyun
safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm * tfm)1335*4882a593Smuzhiyun static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1336*4882a593Smuzhiyun {
1337*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1338*4882a593Smuzhiyun
1339*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
1340*4882a593Smuzhiyun ctx->alg = SAFEXCEL_AES;
1341*4882a593Smuzhiyun ctx->blocksz = AES_BLOCK_SIZE;
1342*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1343*4882a593Smuzhiyun return 0;
1344*4882a593Smuzhiyun }
1345*4882a593Smuzhiyun
1346*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_cfb_aes = {
1347*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1348*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1349*4882a593Smuzhiyun .alg.skcipher = {
1350*4882a593Smuzhiyun .setkey = safexcel_skcipher_aes_setkey,
1351*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
1352*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
1353*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
1354*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
1355*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
1356*4882a593Smuzhiyun .base = {
1357*4882a593Smuzhiyun .cra_name = "cfb(aes)",
1358*4882a593Smuzhiyun .cra_driver_name = "safexcel-cfb-aes",
1359*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1360*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1361*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1362*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1363*4882a593Smuzhiyun .cra_blocksize = 1,
1364*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1365*4882a593Smuzhiyun .cra_alignmask = 0,
1366*4882a593Smuzhiyun .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1367*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
1368*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1369*4882a593Smuzhiyun },
1370*4882a593Smuzhiyun },
1371*4882a593Smuzhiyun };
1372*4882a593Smuzhiyun
safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm * tfm)1373*4882a593Smuzhiyun static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1374*4882a593Smuzhiyun {
1375*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1376*4882a593Smuzhiyun
1377*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
1378*4882a593Smuzhiyun ctx->alg = SAFEXCEL_AES;
1379*4882a593Smuzhiyun ctx->blocksz = AES_BLOCK_SIZE;
1380*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1381*4882a593Smuzhiyun return 0;
1382*4882a593Smuzhiyun }
1383*4882a593Smuzhiyun
1384*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_ofb_aes = {
1385*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1386*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1387*4882a593Smuzhiyun .alg.skcipher = {
1388*4882a593Smuzhiyun .setkey = safexcel_skcipher_aes_setkey,
1389*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
1390*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
1391*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
1392*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
1393*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
1394*4882a593Smuzhiyun .base = {
1395*4882a593Smuzhiyun .cra_name = "ofb(aes)",
1396*4882a593Smuzhiyun .cra_driver_name = "safexcel-ofb-aes",
1397*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1398*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1399*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1400*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1401*4882a593Smuzhiyun .cra_blocksize = 1,
1402*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1403*4882a593Smuzhiyun .cra_alignmask = 0,
1404*4882a593Smuzhiyun .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1405*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
1406*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1407*4882a593Smuzhiyun },
1408*4882a593Smuzhiyun },
1409*4882a593Smuzhiyun };
1410*4882a593Smuzhiyun
safexcel_skcipher_aesctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1411*4882a593Smuzhiyun static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1412*4882a593Smuzhiyun const u8 *key, unsigned int len)
1413*4882a593Smuzhiyun {
1414*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1415*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1416*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1417*4882a593Smuzhiyun struct crypto_aes_ctx aes;
1418*4882a593Smuzhiyun int ret, i;
1419*4882a593Smuzhiyun unsigned int keylen;
1420*4882a593Smuzhiyun
1421*4882a593Smuzhiyun /* last 4 bytes of key are the nonce! */
1422*4882a593Smuzhiyun ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1423*4882a593Smuzhiyun /* exclude the nonce here */
1424*4882a593Smuzhiyun keylen = len - CTR_RFC3686_NONCE_SIZE;
1425*4882a593Smuzhiyun ret = aes_expandkey(&aes, key, keylen);
1426*4882a593Smuzhiyun if (ret)
1427*4882a593Smuzhiyun return ret;
1428*4882a593Smuzhiyun
1429*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1430*4882a593Smuzhiyun for (i = 0; i < keylen / sizeof(u32); i++) {
1431*4882a593Smuzhiyun if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1432*4882a593Smuzhiyun ctx->base.needs_inv = true;
1433*4882a593Smuzhiyun break;
1434*4882a593Smuzhiyun }
1435*4882a593Smuzhiyun }
1436*4882a593Smuzhiyun }
1437*4882a593Smuzhiyun
1438*4882a593Smuzhiyun for (i = 0; i < keylen / sizeof(u32); i++)
1439*4882a593Smuzhiyun ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1440*4882a593Smuzhiyun
1441*4882a593Smuzhiyun ctx->key_len = keylen;
1442*4882a593Smuzhiyun
1443*4882a593Smuzhiyun memzero_explicit(&aes, sizeof(aes));
1444*4882a593Smuzhiyun return 0;
1445*4882a593Smuzhiyun }
1446*4882a593Smuzhiyun
safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm * tfm)1447*4882a593Smuzhiyun static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1448*4882a593Smuzhiyun {
1449*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1450*4882a593Smuzhiyun
1451*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
1452*4882a593Smuzhiyun ctx->alg = SAFEXCEL_AES;
1453*4882a593Smuzhiyun ctx->blocksz = AES_BLOCK_SIZE;
1454*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1455*4882a593Smuzhiyun return 0;
1456*4882a593Smuzhiyun }
1457*4882a593Smuzhiyun
1458*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_ctr_aes = {
1459*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1460*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES,
1461*4882a593Smuzhiyun .alg.skcipher = {
1462*4882a593Smuzhiyun .setkey = safexcel_skcipher_aesctr_setkey,
1463*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
1464*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
1465*4882a593Smuzhiyun /* Add nonce size */
1466*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1467*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1468*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
1469*4882a593Smuzhiyun .base = {
1470*4882a593Smuzhiyun .cra_name = "rfc3686(ctr(aes))",
1471*4882a593Smuzhiyun .cra_driver_name = "safexcel-ctr-aes",
1472*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1473*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1474*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1475*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1476*4882a593Smuzhiyun .cra_blocksize = 1,
1477*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1478*4882a593Smuzhiyun .cra_alignmask = 0,
1479*4882a593Smuzhiyun .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1480*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
1481*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1482*4882a593Smuzhiyun },
1483*4882a593Smuzhiyun },
1484*4882a593Smuzhiyun };
1485*4882a593Smuzhiyun
safexcel_des_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1486*4882a593Smuzhiyun static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1487*4882a593Smuzhiyun unsigned int len)
1488*4882a593Smuzhiyun {
1489*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1490*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1491*4882a593Smuzhiyun int ret;
1492*4882a593Smuzhiyun
1493*4882a593Smuzhiyun ret = verify_skcipher_des_key(ctfm, key);
1494*4882a593Smuzhiyun if (ret)
1495*4882a593Smuzhiyun return ret;
1496*4882a593Smuzhiyun
1497*4882a593Smuzhiyun /* if context exits and key changed, need to invalidate it */
1498*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1499*4882a593Smuzhiyun if (memcmp(ctx->key, key, len))
1500*4882a593Smuzhiyun ctx->base.needs_inv = true;
1501*4882a593Smuzhiyun
1502*4882a593Smuzhiyun memcpy(ctx->key, key, len);
1503*4882a593Smuzhiyun ctx->key_len = len;
1504*4882a593Smuzhiyun
1505*4882a593Smuzhiyun return 0;
1506*4882a593Smuzhiyun }
1507*4882a593Smuzhiyun
safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm * tfm)1508*4882a593Smuzhiyun static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1509*4882a593Smuzhiyun {
1510*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1511*4882a593Smuzhiyun
1512*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
1513*4882a593Smuzhiyun ctx->alg = SAFEXCEL_DES;
1514*4882a593Smuzhiyun ctx->blocksz = DES_BLOCK_SIZE;
1515*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1516*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1517*4882a593Smuzhiyun return 0;
1518*4882a593Smuzhiyun }
1519*4882a593Smuzhiyun
1520*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_cbc_des = {
1521*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1522*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES,
1523*4882a593Smuzhiyun .alg.skcipher = {
1524*4882a593Smuzhiyun .setkey = safexcel_des_setkey,
1525*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
1526*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
1527*4882a593Smuzhiyun .min_keysize = DES_KEY_SIZE,
1528*4882a593Smuzhiyun .max_keysize = DES_KEY_SIZE,
1529*4882a593Smuzhiyun .ivsize = DES_BLOCK_SIZE,
1530*4882a593Smuzhiyun .base = {
1531*4882a593Smuzhiyun .cra_name = "cbc(des)",
1532*4882a593Smuzhiyun .cra_driver_name = "safexcel-cbc-des",
1533*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1534*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1535*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1536*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1537*4882a593Smuzhiyun .cra_blocksize = DES_BLOCK_SIZE,
1538*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1539*4882a593Smuzhiyun .cra_alignmask = 0,
1540*4882a593Smuzhiyun .cra_init = safexcel_skcipher_des_cbc_cra_init,
1541*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
1542*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1543*4882a593Smuzhiyun },
1544*4882a593Smuzhiyun },
1545*4882a593Smuzhiyun };
1546*4882a593Smuzhiyun
safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm * tfm)1547*4882a593Smuzhiyun static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1548*4882a593Smuzhiyun {
1549*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1550*4882a593Smuzhiyun
1551*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
1552*4882a593Smuzhiyun ctx->alg = SAFEXCEL_DES;
1553*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1554*4882a593Smuzhiyun ctx->blocksz = 0;
1555*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1556*4882a593Smuzhiyun return 0;
1557*4882a593Smuzhiyun }
1558*4882a593Smuzhiyun
1559*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_ecb_des = {
1560*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1561*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES,
1562*4882a593Smuzhiyun .alg.skcipher = {
1563*4882a593Smuzhiyun .setkey = safexcel_des_setkey,
1564*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
1565*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
1566*4882a593Smuzhiyun .min_keysize = DES_KEY_SIZE,
1567*4882a593Smuzhiyun .max_keysize = DES_KEY_SIZE,
1568*4882a593Smuzhiyun .base = {
1569*4882a593Smuzhiyun .cra_name = "ecb(des)",
1570*4882a593Smuzhiyun .cra_driver_name = "safexcel-ecb-des",
1571*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1572*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1573*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1574*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1575*4882a593Smuzhiyun .cra_blocksize = DES_BLOCK_SIZE,
1576*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1577*4882a593Smuzhiyun .cra_alignmask = 0,
1578*4882a593Smuzhiyun .cra_init = safexcel_skcipher_des_ecb_cra_init,
1579*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
1580*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1581*4882a593Smuzhiyun },
1582*4882a593Smuzhiyun },
1583*4882a593Smuzhiyun };
1584*4882a593Smuzhiyun
safexcel_des3_ede_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)1585*4882a593Smuzhiyun static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1586*4882a593Smuzhiyun const u8 *key, unsigned int len)
1587*4882a593Smuzhiyun {
1588*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1589*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
1590*4882a593Smuzhiyun int err;
1591*4882a593Smuzhiyun
1592*4882a593Smuzhiyun err = verify_skcipher_des3_key(ctfm, key);
1593*4882a593Smuzhiyun if (err)
1594*4882a593Smuzhiyun return err;
1595*4882a593Smuzhiyun
1596*4882a593Smuzhiyun /* if context exits and key changed, need to invalidate it */
1597*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1598*4882a593Smuzhiyun if (memcmp(ctx->key, key, len))
1599*4882a593Smuzhiyun ctx->base.needs_inv = true;
1600*4882a593Smuzhiyun
1601*4882a593Smuzhiyun memcpy(ctx->key, key, len);
1602*4882a593Smuzhiyun ctx->key_len = len;
1603*4882a593Smuzhiyun
1604*4882a593Smuzhiyun return 0;
1605*4882a593Smuzhiyun }
1606*4882a593Smuzhiyun
safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm * tfm)1607*4882a593Smuzhiyun static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1608*4882a593Smuzhiyun {
1609*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1610*4882a593Smuzhiyun
1611*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
1612*4882a593Smuzhiyun ctx->alg = SAFEXCEL_3DES;
1613*4882a593Smuzhiyun ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1614*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1615*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1616*4882a593Smuzhiyun return 0;
1617*4882a593Smuzhiyun }
1618*4882a593Smuzhiyun
1619*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1620*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1621*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES,
1622*4882a593Smuzhiyun .alg.skcipher = {
1623*4882a593Smuzhiyun .setkey = safexcel_des3_ede_setkey,
1624*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
1625*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
1626*4882a593Smuzhiyun .min_keysize = DES3_EDE_KEY_SIZE,
1627*4882a593Smuzhiyun .max_keysize = DES3_EDE_KEY_SIZE,
1628*4882a593Smuzhiyun .ivsize = DES3_EDE_BLOCK_SIZE,
1629*4882a593Smuzhiyun .base = {
1630*4882a593Smuzhiyun .cra_name = "cbc(des3_ede)",
1631*4882a593Smuzhiyun .cra_driver_name = "safexcel-cbc-des3_ede",
1632*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1633*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1634*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1635*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1636*4882a593Smuzhiyun .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1637*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1638*4882a593Smuzhiyun .cra_alignmask = 0,
1639*4882a593Smuzhiyun .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1640*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
1641*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1642*4882a593Smuzhiyun },
1643*4882a593Smuzhiyun },
1644*4882a593Smuzhiyun };
1645*4882a593Smuzhiyun
safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm * tfm)1646*4882a593Smuzhiyun static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1647*4882a593Smuzhiyun {
1648*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1649*4882a593Smuzhiyun
1650*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
1651*4882a593Smuzhiyun ctx->alg = SAFEXCEL_3DES;
1652*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1653*4882a593Smuzhiyun ctx->blocksz = 0;
1654*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1655*4882a593Smuzhiyun return 0;
1656*4882a593Smuzhiyun }
1657*4882a593Smuzhiyun
1658*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1659*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1660*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES,
1661*4882a593Smuzhiyun .alg.skcipher = {
1662*4882a593Smuzhiyun .setkey = safexcel_des3_ede_setkey,
1663*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
1664*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
1665*4882a593Smuzhiyun .min_keysize = DES3_EDE_KEY_SIZE,
1666*4882a593Smuzhiyun .max_keysize = DES3_EDE_KEY_SIZE,
1667*4882a593Smuzhiyun .base = {
1668*4882a593Smuzhiyun .cra_name = "ecb(des3_ede)",
1669*4882a593Smuzhiyun .cra_driver_name = "safexcel-ecb-des3_ede",
1670*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1671*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1672*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1673*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1674*4882a593Smuzhiyun .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1675*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1676*4882a593Smuzhiyun .cra_alignmask = 0,
1677*4882a593Smuzhiyun .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1678*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
1679*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1680*4882a593Smuzhiyun },
1681*4882a593Smuzhiyun },
1682*4882a593Smuzhiyun };
1683*4882a593Smuzhiyun
safexcel_aead_encrypt(struct aead_request * req)1684*4882a593Smuzhiyun static int safexcel_aead_encrypt(struct aead_request *req)
1685*4882a593Smuzhiyun {
1686*4882a593Smuzhiyun struct safexcel_cipher_req *creq = aead_request_ctx(req);
1687*4882a593Smuzhiyun
1688*4882a593Smuzhiyun return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1689*4882a593Smuzhiyun }
1690*4882a593Smuzhiyun
safexcel_aead_decrypt(struct aead_request * req)1691*4882a593Smuzhiyun static int safexcel_aead_decrypt(struct aead_request *req)
1692*4882a593Smuzhiyun {
1693*4882a593Smuzhiyun struct safexcel_cipher_req *creq = aead_request_ctx(req);
1694*4882a593Smuzhiyun
1695*4882a593Smuzhiyun return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1696*4882a593Smuzhiyun }
1697*4882a593Smuzhiyun
safexcel_aead_cra_init(struct crypto_tfm * tfm)1698*4882a593Smuzhiyun static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1699*4882a593Smuzhiyun {
1700*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1701*4882a593Smuzhiyun struct safexcel_alg_template *tmpl =
1702*4882a593Smuzhiyun container_of(tfm->__crt_alg, struct safexcel_alg_template,
1703*4882a593Smuzhiyun alg.aead.base);
1704*4882a593Smuzhiyun
1705*4882a593Smuzhiyun crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1706*4882a593Smuzhiyun sizeof(struct safexcel_cipher_req));
1707*4882a593Smuzhiyun
1708*4882a593Smuzhiyun ctx->base.priv = tmpl->priv;
1709*4882a593Smuzhiyun
1710*4882a593Smuzhiyun ctx->alg = SAFEXCEL_AES; /* default */
1711*4882a593Smuzhiyun ctx->blocksz = AES_BLOCK_SIZE;
1712*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1713*4882a593Smuzhiyun ctx->ctrinit = 1;
1714*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1715*4882a593Smuzhiyun ctx->aead = true;
1716*4882a593Smuzhiyun ctx->base.send = safexcel_aead_send;
1717*4882a593Smuzhiyun ctx->base.handle_result = safexcel_aead_handle_result;
1718*4882a593Smuzhiyun return 0;
1719*4882a593Smuzhiyun }
1720*4882a593Smuzhiyun
safexcel_aead_sha1_cra_init(struct crypto_tfm * tfm)1721*4882a593Smuzhiyun static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1722*4882a593Smuzhiyun {
1723*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1724*4882a593Smuzhiyun
1725*4882a593Smuzhiyun safexcel_aead_cra_init(tfm);
1726*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1727*4882a593Smuzhiyun ctx->state_sz = SHA1_DIGEST_SIZE;
1728*4882a593Smuzhiyun return 0;
1729*4882a593Smuzhiyun }
1730*4882a593Smuzhiyun
1731*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1732*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
1733*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1734*4882a593Smuzhiyun .alg.aead = {
1735*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
1736*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
1737*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
1738*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
1739*4882a593Smuzhiyun .maxauthsize = SHA1_DIGEST_SIZE,
1740*4882a593Smuzhiyun .base = {
1741*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha1),cbc(aes))",
1742*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1743*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1744*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1745*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1746*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1747*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
1748*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1749*4882a593Smuzhiyun .cra_alignmask = 0,
1750*4882a593Smuzhiyun .cra_init = safexcel_aead_sha1_cra_init,
1751*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
1752*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1753*4882a593Smuzhiyun },
1754*4882a593Smuzhiyun },
1755*4882a593Smuzhiyun };
1756*4882a593Smuzhiyun
safexcel_aead_sha256_cra_init(struct crypto_tfm * tfm)1757*4882a593Smuzhiyun static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1758*4882a593Smuzhiyun {
1759*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1760*4882a593Smuzhiyun
1761*4882a593Smuzhiyun safexcel_aead_cra_init(tfm);
1762*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1763*4882a593Smuzhiyun ctx->state_sz = SHA256_DIGEST_SIZE;
1764*4882a593Smuzhiyun return 0;
1765*4882a593Smuzhiyun }
1766*4882a593Smuzhiyun
1767*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1768*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
1769*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1770*4882a593Smuzhiyun .alg.aead = {
1771*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
1772*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
1773*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
1774*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
1775*4882a593Smuzhiyun .maxauthsize = SHA256_DIGEST_SIZE,
1776*4882a593Smuzhiyun .base = {
1777*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha256),cbc(aes))",
1778*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1779*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1780*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1781*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1782*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1783*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
1784*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1785*4882a593Smuzhiyun .cra_alignmask = 0,
1786*4882a593Smuzhiyun .cra_init = safexcel_aead_sha256_cra_init,
1787*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
1788*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1789*4882a593Smuzhiyun },
1790*4882a593Smuzhiyun },
1791*4882a593Smuzhiyun };
1792*4882a593Smuzhiyun
safexcel_aead_sha224_cra_init(struct crypto_tfm * tfm)1793*4882a593Smuzhiyun static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1794*4882a593Smuzhiyun {
1795*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1796*4882a593Smuzhiyun
1797*4882a593Smuzhiyun safexcel_aead_cra_init(tfm);
1798*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1799*4882a593Smuzhiyun ctx->state_sz = SHA256_DIGEST_SIZE;
1800*4882a593Smuzhiyun return 0;
1801*4882a593Smuzhiyun }
1802*4882a593Smuzhiyun
1803*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1804*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
1805*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1806*4882a593Smuzhiyun .alg.aead = {
1807*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
1808*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
1809*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
1810*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
1811*4882a593Smuzhiyun .maxauthsize = SHA224_DIGEST_SIZE,
1812*4882a593Smuzhiyun .base = {
1813*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha224),cbc(aes))",
1814*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1815*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1816*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1817*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1818*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1819*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
1820*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1821*4882a593Smuzhiyun .cra_alignmask = 0,
1822*4882a593Smuzhiyun .cra_init = safexcel_aead_sha224_cra_init,
1823*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
1824*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1825*4882a593Smuzhiyun },
1826*4882a593Smuzhiyun },
1827*4882a593Smuzhiyun };
1828*4882a593Smuzhiyun
safexcel_aead_sha512_cra_init(struct crypto_tfm * tfm)1829*4882a593Smuzhiyun static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1830*4882a593Smuzhiyun {
1831*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1832*4882a593Smuzhiyun
1833*4882a593Smuzhiyun safexcel_aead_cra_init(tfm);
1834*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1835*4882a593Smuzhiyun ctx->state_sz = SHA512_DIGEST_SIZE;
1836*4882a593Smuzhiyun return 0;
1837*4882a593Smuzhiyun }
1838*4882a593Smuzhiyun
1839*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1840*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
1841*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1842*4882a593Smuzhiyun .alg.aead = {
1843*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
1844*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
1845*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
1846*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
1847*4882a593Smuzhiyun .maxauthsize = SHA512_DIGEST_SIZE,
1848*4882a593Smuzhiyun .base = {
1849*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha512),cbc(aes))",
1850*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1851*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1852*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1853*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1854*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1855*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
1856*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1857*4882a593Smuzhiyun .cra_alignmask = 0,
1858*4882a593Smuzhiyun .cra_init = safexcel_aead_sha512_cra_init,
1859*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
1860*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1861*4882a593Smuzhiyun },
1862*4882a593Smuzhiyun },
1863*4882a593Smuzhiyun };
1864*4882a593Smuzhiyun
safexcel_aead_sha384_cra_init(struct crypto_tfm * tfm)1865*4882a593Smuzhiyun static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1866*4882a593Smuzhiyun {
1867*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1868*4882a593Smuzhiyun
1869*4882a593Smuzhiyun safexcel_aead_cra_init(tfm);
1870*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1871*4882a593Smuzhiyun ctx->state_sz = SHA512_DIGEST_SIZE;
1872*4882a593Smuzhiyun return 0;
1873*4882a593Smuzhiyun }
1874*4882a593Smuzhiyun
1875*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1876*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
1877*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1878*4882a593Smuzhiyun .alg.aead = {
1879*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
1880*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
1881*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
1882*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
1883*4882a593Smuzhiyun .maxauthsize = SHA384_DIGEST_SIZE,
1884*4882a593Smuzhiyun .base = {
1885*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha384),cbc(aes))",
1886*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1887*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1888*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1889*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1890*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1891*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
1892*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1893*4882a593Smuzhiyun .cra_alignmask = 0,
1894*4882a593Smuzhiyun .cra_init = safexcel_aead_sha384_cra_init,
1895*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
1896*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1897*4882a593Smuzhiyun },
1898*4882a593Smuzhiyun },
1899*4882a593Smuzhiyun };
1900*4882a593Smuzhiyun
safexcel_aead_sha1_des3_cra_init(struct crypto_tfm * tfm)1901*4882a593Smuzhiyun static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1902*4882a593Smuzhiyun {
1903*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1904*4882a593Smuzhiyun
1905*4882a593Smuzhiyun safexcel_aead_sha1_cra_init(tfm);
1906*4882a593Smuzhiyun ctx->alg = SAFEXCEL_3DES; /* override default */
1907*4882a593Smuzhiyun ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1908*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1909*4882a593Smuzhiyun return 0;
1910*4882a593Smuzhiyun }
1911*4882a593Smuzhiyun
1912*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1913*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
1914*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1915*4882a593Smuzhiyun .alg.aead = {
1916*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
1917*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
1918*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
1919*4882a593Smuzhiyun .ivsize = DES3_EDE_BLOCK_SIZE,
1920*4882a593Smuzhiyun .maxauthsize = SHA1_DIGEST_SIZE,
1921*4882a593Smuzhiyun .base = {
1922*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1923*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1924*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1925*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1926*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1927*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1928*4882a593Smuzhiyun .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1929*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1930*4882a593Smuzhiyun .cra_alignmask = 0,
1931*4882a593Smuzhiyun .cra_init = safexcel_aead_sha1_des3_cra_init,
1932*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
1933*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1934*4882a593Smuzhiyun },
1935*4882a593Smuzhiyun },
1936*4882a593Smuzhiyun };
1937*4882a593Smuzhiyun
safexcel_aead_sha256_des3_cra_init(struct crypto_tfm * tfm)1938*4882a593Smuzhiyun static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1939*4882a593Smuzhiyun {
1940*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1941*4882a593Smuzhiyun
1942*4882a593Smuzhiyun safexcel_aead_sha256_cra_init(tfm);
1943*4882a593Smuzhiyun ctx->alg = SAFEXCEL_3DES; /* override default */
1944*4882a593Smuzhiyun ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1945*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1946*4882a593Smuzhiyun return 0;
1947*4882a593Smuzhiyun }
1948*4882a593Smuzhiyun
1949*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1950*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
1951*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1952*4882a593Smuzhiyun .alg.aead = {
1953*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
1954*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
1955*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
1956*4882a593Smuzhiyun .ivsize = DES3_EDE_BLOCK_SIZE,
1957*4882a593Smuzhiyun .maxauthsize = SHA256_DIGEST_SIZE,
1958*4882a593Smuzhiyun .base = {
1959*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1960*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1961*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1962*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
1963*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
1964*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
1965*4882a593Smuzhiyun .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1966*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1967*4882a593Smuzhiyun .cra_alignmask = 0,
1968*4882a593Smuzhiyun .cra_init = safexcel_aead_sha256_des3_cra_init,
1969*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
1970*4882a593Smuzhiyun .cra_module = THIS_MODULE,
1971*4882a593Smuzhiyun },
1972*4882a593Smuzhiyun },
1973*4882a593Smuzhiyun };
1974*4882a593Smuzhiyun
safexcel_aead_sha224_des3_cra_init(struct crypto_tfm * tfm)1975*4882a593Smuzhiyun static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1976*4882a593Smuzhiyun {
1977*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1978*4882a593Smuzhiyun
1979*4882a593Smuzhiyun safexcel_aead_sha224_cra_init(tfm);
1980*4882a593Smuzhiyun ctx->alg = SAFEXCEL_3DES; /* override default */
1981*4882a593Smuzhiyun ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1982*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1983*4882a593Smuzhiyun return 0;
1984*4882a593Smuzhiyun }
1985*4882a593Smuzhiyun
1986*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1987*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
1988*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1989*4882a593Smuzhiyun .alg.aead = {
1990*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
1991*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
1992*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
1993*4882a593Smuzhiyun .ivsize = DES3_EDE_BLOCK_SIZE,
1994*4882a593Smuzhiyun .maxauthsize = SHA224_DIGEST_SIZE,
1995*4882a593Smuzhiyun .base = {
1996*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1997*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1998*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
1999*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2000*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2001*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2002*4882a593Smuzhiyun .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2003*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2004*4882a593Smuzhiyun .cra_alignmask = 0,
2005*4882a593Smuzhiyun .cra_init = safexcel_aead_sha224_des3_cra_init,
2006*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2007*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2008*4882a593Smuzhiyun },
2009*4882a593Smuzhiyun },
2010*4882a593Smuzhiyun };
2011*4882a593Smuzhiyun
safexcel_aead_sha512_des3_cra_init(struct crypto_tfm * tfm)2012*4882a593Smuzhiyun static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2013*4882a593Smuzhiyun {
2014*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2015*4882a593Smuzhiyun
2016*4882a593Smuzhiyun safexcel_aead_sha512_cra_init(tfm);
2017*4882a593Smuzhiyun ctx->alg = SAFEXCEL_3DES; /* override default */
2018*4882a593Smuzhiyun ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2019*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2020*4882a593Smuzhiyun return 0;
2021*4882a593Smuzhiyun }
2022*4882a593Smuzhiyun
2023*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2024*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2025*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2026*4882a593Smuzhiyun .alg.aead = {
2027*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2028*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2029*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2030*4882a593Smuzhiyun .ivsize = DES3_EDE_BLOCK_SIZE,
2031*4882a593Smuzhiyun .maxauthsize = SHA512_DIGEST_SIZE,
2032*4882a593Smuzhiyun .base = {
2033*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2034*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2035*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2036*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2037*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2038*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2039*4882a593Smuzhiyun .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2040*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2041*4882a593Smuzhiyun .cra_alignmask = 0,
2042*4882a593Smuzhiyun .cra_init = safexcel_aead_sha512_des3_cra_init,
2043*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2044*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2045*4882a593Smuzhiyun },
2046*4882a593Smuzhiyun },
2047*4882a593Smuzhiyun };
2048*4882a593Smuzhiyun
safexcel_aead_sha384_des3_cra_init(struct crypto_tfm * tfm)2049*4882a593Smuzhiyun static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2050*4882a593Smuzhiyun {
2051*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2052*4882a593Smuzhiyun
2053*4882a593Smuzhiyun safexcel_aead_sha384_cra_init(tfm);
2054*4882a593Smuzhiyun ctx->alg = SAFEXCEL_3DES; /* override default */
2055*4882a593Smuzhiyun ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2056*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2057*4882a593Smuzhiyun return 0;
2058*4882a593Smuzhiyun }
2059*4882a593Smuzhiyun
2060*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2061*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2062*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2063*4882a593Smuzhiyun .alg.aead = {
2064*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2065*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2066*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2067*4882a593Smuzhiyun .ivsize = DES3_EDE_BLOCK_SIZE,
2068*4882a593Smuzhiyun .maxauthsize = SHA384_DIGEST_SIZE,
2069*4882a593Smuzhiyun .base = {
2070*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2071*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2072*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2073*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2074*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2075*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2076*4882a593Smuzhiyun .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2077*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2078*4882a593Smuzhiyun .cra_alignmask = 0,
2079*4882a593Smuzhiyun .cra_init = safexcel_aead_sha384_des3_cra_init,
2080*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2081*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2082*4882a593Smuzhiyun },
2083*4882a593Smuzhiyun },
2084*4882a593Smuzhiyun };
2085*4882a593Smuzhiyun
safexcel_aead_sha1_des_cra_init(struct crypto_tfm * tfm)2086*4882a593Smuzhiyun static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2087*4882a593Smuzhiyun {
2088*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2089*4882a593Smuzhiyun
2090*4882a593Smuzhiyun safexcel_aead_sha1_cra_init(tfm);
2091*4882a593Smuzhiyun ctx->alg = SAFEXCEL_DES; /* override default */
2092*4882a593Smuzhiyun ctx->blocksz = DES_BLOCK_SIZE;
2093*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2094*4882a593Smuzhiyun return 0;
2095*4882a593Smuzhiyun }
2096*4882a593Smuzhiyun
2097*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2098*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2099*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2100*4882a593Smuzhiyun .alg.aead = {
2101*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2102*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2103*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2104*4882a593Smuzhiyun .ivsize = DES_BLOCK_SIZE,
2105*4882a593Smuzhiyun .maxauthsize = SHA1_DIGEST_SIZE,
2106*4882a593Smuzhiyun .base = {
2107*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha1),cbc(des))",
2108*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2109*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2110*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2111*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2112*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2113*4882a593Smuzhiyun .cra_blocksize = DES_BLOCK_SIZE,
2114*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2115*4882a593Smuzhiyun .cra_alignmask = 0,
2116*4882a593Smuzhiyun .cra_init = safexcel_aead_sha1_des_cra_init,
2117*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2118*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2119*4882a593Smuzhiyun },
2120*4882a593Smuzhiyun },
2121*4882a593Smuzhiyun };
2122*4882a593Smuzhiyun
safexcel_aead_sha256_des_cra_init(struct crypto_tfm * tfm)2123*4882a593Smuzhiyun static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2124*4882a593Smuzhiyun {
2125*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2126*4882a593Smuzhiyun
2127*4882a593Smuzhiyun safexcel_aead_sha256_cra_init(tfm);
2128*4882a593Smuzhiyun ctx->alg = SAFEXCEL_DES; /* override default */
2129*4882a593Smuzhiyun ctx->blocksz = DES_BLOCK_SIZE;
2130*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2131*4882a593Smuzhiyun return 0;
2132*4882a593Smuzhiyun }
2133*4882a593Smuzhiyun
2134*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2135*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2136*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2137*4882a593Smuzhiyun .alg.aead = {
2138*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2139*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2140*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2141*4882a593Smuzhiyun .ivsize = DES_BLOCK_SIZE,
2142*4882a593Smuzhiyun .maxauthsize = SHA256_DIGEST_SIZE,
2143*4882a593Smuzhiyun .base = {
2144*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha256),cbc(des))",
2145*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2146*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2147*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2148*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2149*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2150*4882a593Smuzhiyun .cra_blocksize = DES_BLOCK_SIZE,
2151*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2152*4882a593Smuzhiyun .cra_alignmask = 0,
2153*4882a593Smuzhiyun .cra_init = safexcel_aead_sha256_des_cra_init,
2154*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2155*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2156*4882a593Smuzhiyun },
2157*4882a593Smuzhiyun },
2158*4882a593Smuzhiyun };
2159*4882a593Smuzhiyun
safexcel_aead_sha224_des_cra_init(struct crypto_tfm * tfm)2160*4882a593Smuzhiyun static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2161*4882a593Smuzhiyun {
2162*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2163*4882a593Smuzhiyun
2164*4882a593Smuzhiyun safexcel_aead_sha224_cra_init(tfm);
2165*4882a593Smuzhiyun ctx->alg = SAFEXCEL_DES; /* override default */
2166*4882a593Smuzhiyun ctx->blocksz = DES_BLOCK_SIZE;
2167*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2168*4882a593Smuzhiyun return 0;
2169*4882a593Smuzhiyun }
2170*4882a593Smuzhiyun
2171*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2172*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2173*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2174*4882a593Smuzhiyun .alg.aead = {
2175*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2176*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2177*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2178*4882a593Smuzhiyun .ivsize = DES_BLOCK_SIZE,
2179*4882a593Smuzhiyun .maxauthsize = SHA224_DIGEST_SIZE,
2180*4882a593Smuzhiyun .base = {
2181*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha224),cbc(des))",
2182*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2183*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2184*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2185*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2186*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2187*4882a593Smuzhiyun .cra_blocksize = DES_BLOCK_SIZE,
2188*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2189*4882a593Smuzhiyun .cra_alignmask = 0,
2190*4882a593Smuzhiyun .cra_init = safexcel_aead_sha224_des_cra_init,
2191*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2192*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2193*4882a593Smuzhiyun },
2194*4882a593Smuzhiyun },
2195*4882a593Smuzhiyun };
2196*4882a593Smuzhiyun
safexcel_aead_sha512_des_cra_init(struct crypto_tfm * tfm)2197*4882a593Smuzhiyun static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2198*4882a593Smuzhiyun {
2199*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2200*4882a593Smuzhiyun
2201*4882a593Smuzhiyun safexcel_aead_sha512_cra_init(tfm);
2202*4882a593Smuzhiyun ctx->alg = SAFEXCEL_DES; /* override default */
2203*4882a593Smuzhiyun ctx->blocksz = DES_BLOCK_SIZE;
2204*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2205*4882a593Smuzhiyun return 0;
2206*4882a593Smuzhiyun }
2207*4882a593Smuzhiyun
2208*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2209*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2210*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2211*4882a593Smuzhiyun .alg.aead = {
2212*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2213*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2214*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2215*4882a593Smuzhiyun .ivsize = DES_BLOCK_SIZE,
2216*4882a593Smuzhiyun .maxauthsize = SHA512_DIGEST_SIZE,
2217*4882a593Smuzhiyun .base = {
2218*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha512),cbc(des))",
2219*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2220*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2221*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2222*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2223*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2224*4882a593Smuzhiyun .cra_blocksize = DES_BLOCK_SIZE,
2225*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2226*4882a593Smuzhiyun .cra_alignmask = 0,
2227*4882a593Smuzhiyun .cra_init = safexcel_aead_sha512_des_cra_init,
2228*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2229*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2230*4882a593Smuzhiyun },
2231*4882a593Smuzhiyun },
2232*4882a593Smuzhiyun };
2233*4882a593Smuzhiyun
safexcel_aead_sha384_des_cra_init(struct crypto_tfm * tfm)2234*4882a593Smuzhiyun static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2235*4882a593Smuzhiyun {
2236*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2237*4882a593Smuzhiyun
2238*4882a593Smuzhiyun safexcel_aead_sha384_cra_init(tfm);
2239*4882a593Smuzhiyun ctx->alg = SAFEXCEL_DES; /* override default */
2240*4882a593Smuzhiyun ctx->blocksz = DES_BLOCK_SIZE;
2241*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2242*4882a593Smuzhiyun return 0;
2243*4882a593Smuzhiyun }
2244*4882a593Smuzhiyun
2245*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2246*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2247*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2248*4882a593Smuzhiyun .alg.aead = {
2249*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2250*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2251*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2252*4882a593Smuzhiyun .ivsize = DES_BLOCK_SIZE,
2253*4882a593Smuzhiyun .maxauthsize = SHA384_DIGEST_SIZE,
2254*4882a593Smuzhiyun .base = {
2255*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha384),cbc(des))",
2256*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2257*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2258*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2259*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2260*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2261*4882a593Smuzhiyun .cra_blocksize = DES_BLOCK_SIZE,
2262*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2263*4882a593Smuzhiyun .cra_alignmask = 0,
2264*4882a593Smuzhiyun .cra_init = safexcel_aead_sha384_des_cra_init,
2265*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2266*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2267*4882a593Smuzhiyun },
2268*4882a593Smuzhiyun },
2269*4882a593Smuzhiyun };
2270*4882a593Smuzhiyun
safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm * tfm)2271*4882a593Smuzhiyun static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2272*4882a593Smuzhiyun {
2273*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2274*4882a593Smuzhiyun
2275*4882a593Smuzhiyun safexcel_aead_sha1_cra_init(tfm);
2276*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2277*4882a593Smuzhiyun return 0;
2278*4882a593Smuzhiyun }
2279*4882a593Smuzhiyun
2280*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2281*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2282*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2283*4882a593Smuzhiyun .alg.aead = {
2284*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2285*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2286*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2287*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
2288*4882a593Smuzhiyun .maxauthsize = SHA1_DIGEST_SIZE,
2289*4882a593Smuzhiyun .base = {
2290*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2291*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2292*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2293*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2294*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2295*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2296*4882a593Smuzhiyun .cra_blocksize = 1,
2297*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2298*4882a593Smuzhiyun .cra_alignmask = 0,
2299*4882a593Smuzhiyun .cra_init = safexcel_aead_sha1_ctr_cra_init,
2300*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2301*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2302*4882a593Smuzhiyun },
2303*4882a593Smuzhiyun },
2304*4882a593Smuzhiyun };
2305*4882a593Smuzhiyun
safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm * tfm)2306*4882a593Smuzhiyun static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2307*4882a593Smuzhiyun {
2308*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2309*4882a593Smuzhiyun
2310*4882a593Smuzhiyun safexcel_aead_sha256_cra_init(tfm);
2311*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2312*4882a593Smuzhiyun return 0;
2313*4882a593Smuzhiyun }
2314*4882a593Smuzhiyun
2315*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2316*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2317*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2318*4882a593Smuzhiyun .alg.aead = {
2319*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2320*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2321*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2322*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
2323*4882a593Smuzhiyun .maxauthsize = SHA256_DIGEST_SIZE,
2324*4882a593Smuzhiyun .base = {
2325*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2326*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2327*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2328*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2329*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2330*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2331*4882a593Smuzhiyun .cra_blocksize = 1,
2332*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2333*4882a593Smuzhiyun .cra_alignmask = 0,
2334*4882a593Smuzhiyun .cra_init = safexcel_aead_sha256_ctr_cra_init,
2335*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2336*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2337*4882a593Smuzhiyun },
2338*4882a593Smuzhiyun },
2339*4882a593Smuzhiyun };
2340*4882a593Smuzhiyun
safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm * tfm)2341*4882a593Smuzhiyun static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2342*4882a593Smuzhiyun {
2343*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2344*4882a593Smuzhiyun
2345*4882a593Smuzhiyun safexcel_aead_sha224_cra_init(tfm);
2346*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2347*4882a593Smuzhiyun return 0;
2348*4882a593Smuzhiyun }
2349*4882a593Smuzhiyun
2350*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2351*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2352*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2353*4882a593Smuzhiyun .alg.aead = {
2354*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2355*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2356*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2357*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
2358*4882a593Smuzhiyun .maxauthsize = SHA224_DIGEST_SIZE,
2359*4882a593Smuzhiyun .base = {
2360*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2361*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2362*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2363*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2364*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2365*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2366*4882a593Smuzhiyun .cra_blocksize = 1,
2367*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2368*4882a593Smuzhiyun .cra_alignmask = 0,
2369*4882a593Smuzhiyun .cra_init = safexcel_aead_sha224_ctr_cra_init,
2370*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2371*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2372*4882a593Smuzhiyun },
2373*4882a593Smuzhiyun },
2374*4882a593Smuzhiyun };
2375*4882a593Smuzhiyun
safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm * tfm)2376*4882a593Smuzhiyun static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2377*4882a593Smuzhiyun {
2378*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2379*4882a593Smuzhiyun
2380*4882a593Smuzhiyun safexcel_aead_sha512_cra_init(tfm);
2381*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2382*4882a593Smuzhiyun return 0;
2383*4882a593Smuzhiyun }
2384*4882a593Smuzhiyun
2385*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2386*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2387*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2388*4882a593Smuzhiyun .alg.aead = {
2389*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2390*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2391*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2392*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
2393*4882a593Smuzhiyun .maxauthsize = SHA512_DIGEST_SIZE,
2394*4882a593Smuzhiyun .base = {
2395*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2396*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2397*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2398*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2399*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2400*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2401*4882a593Smuzhiyun .cra_blocksize = 1,
2402*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2403*4882a593Smuzhiyun .cra_alignmask = 0,
2404*4882a593Smuzhiyun .cra_init = safexcel_aead_sha512_ctr_cra_init,
2405*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2406*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2407*4882a593Smuzhiyun },
2408*4882a593Smuzhiyun },
2409*4882a593Smuzhiyun };
2410*4882a593Smuzhiyun
safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm * tfm)2411*4882a593Smuzhiyun static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2412*4882a593Smuzhiyun {
2413*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2414*4882a593Smuzhiyun
2415*4882a593Smuzhiyun safexcel_aead_sha384_cra_init(tfm);
2416*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2417*4882a593Smuzhiyun return 0;
2418*4882a593Smuzhiyun }
2419*4882a593Smuzhiyun
2420*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2421*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2422*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2423*4882a593Smuzhiyun .alg.aead = {
2424*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
2425*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2426*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2427*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
2428*4882a593Smuzhiyun .maxauthsize = SHA384_DIGEST_SIZE,
2429*4882a593Smuzhiyun .base = {
2430*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2431*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2432*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2433*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2434*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2435*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2436*4882a593Smuzhiyun .cra_blocksize = 1,
2437*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2438*4882a593Smuzhiyun .cra_alignmask = 0,
2439*4882a593Smuzhiyun .cra_init = safexcel_aead_sha384_ctr_cra_init,
2440*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2441*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2442*4882a593Smuzhiyun },
2443*4882a593Smuzhiyun },
2444*4882a593Smuzhiyun };
2445*4882a593Smuzhiyun
safexcel_skcipher_aesxts_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2446*4882a593Smuzhiyun static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2447*4882a593Smuzhiyun const u8 *key, unsigned int len)
2448*4882a593Smuzhiyun {
2449*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2450*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2451*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
2452*4882a593Smuzhiyun struct crypto_aes_ctx aes;
2453*4882a593Smuzhiyun int ret, i;
2454*4882a593Smuzhiyun unsigned int keylen;
2455*4882a593Smuzhiyun
2456*4882a593Smuzhiyun /* Check for illegal XTS keys */
2457*4882a593Smuzhiyun ret = xts_verify_key(ctfm, key, len);
2458*4882a593Smuzhiyun if (ret)
2459*4882a593Smuzhiyun return ret;
2460*4882a593Smuzhiyun
2461*4882a593Smuzhiyun /* Only half of the key data is cipher key */
2462*4882a593Smuzhiyun keylen = (len >> 1);
2463*4882a593Smuzhiyun ret = aes_expandkey(&aes, key, keylen);
2464*4882a593Smuzhiyun if (ret)
2465*4882a593Smuzhiyun return ret;
2466*4882a593Smuzhiyun
2467*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2468*4882a593Smuzhiyun for (i = 0; i < keylen / sizeof(u32); i++) {
2469*4882a593Smuzhiyun if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2470*4882a593Smuzhiyun ctx->base.needs_inv = true;
2471*4882a593Smuzhiyun break;
2472*4882a593Smuzhiyun }
2473*4882a593Smuzhiyun }
2474*4882a593Smuzhiyun }
2475*4882a593Smuzhiyun
2476*4882a593Smuzhiyun for (i = 0; i < keylen / sizeof(u32); i++)
2477*4882a593Smuzhiyun ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2478*4882a593Smuzhiyun
2479*4882a593Smuzhiyun /* The other half is the tweak key */
2480*4882a593Smuzhiyun ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2481*4882a593Smuzhiyun if (ret)
2482*4882a593Smuzhiyun return ret;
2483*4882a593Smuzhiyun
2484*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2485*4882a593Smuzhiyun for (i = 0; i < keylen / sizeof(u32); i++) {
2486*4882a593Smuzhiyun if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2487*4882a593Smuzhiyun aes.key_enc[i]) {
2488*4882a593Smuzhiyun ctx->base.needs_inv = true;
2489*4882a593Smuzhiyun break;
2490*4882a593Smuzhiyun }
2491*4882a593Smuzhiyun }
2492*4882a593Smuzhiyun }
2493*4882a593Smuzhiyun
2494*4882a593Smuzhiyun for (i = 0; i < keylen / sizeof(u32); i++)
2495*4882a593Smuzhiyun ctx->key[i + keylen / sizeof(u32)] =
2496*4882a593Smuzhiyun cpu_to_le32(aes.key_enc[i]);
2497*4882a593Smuzhiyun
2498*4882a593Smuzhiyun ctx->key_len = keylen << 1;
2499*4882a593Smuzhiyun
2500*4882a593Smuzhiyun memzero_explicit(&aes, sizeof(aes));
2501*4882a593Smuzhiyun return 0;
2502*4882a593Smuzhiyun }
2503*4882a593Smuzhiyun
safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm * tfm)2504*4882a593Smuzhiyun static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2505*4882a593Smuzhiyun {
2506*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2507*4882a593Smuzhiyun
2508*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
2509*4882a593Smuzhiyun ctx->alg = SAFEXCEL_AES;
2510*4882a593Smuzhiyun ctx->blocksz = AES_BLOCK_SIZE;
2511*4882a593Smuzhiyun ctx->xts = 1;
2512*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2513*4882a593Smuzhiyun return 0;
2514*4882a593Smuzhiyun }
2515*4882a593Smuzhiyun
safexcel_encrypt_xts(struct skcipher_request * req)2516*4882a593Smuzhiyun static int safexcel_encrypt_xts(struct skcipher_request *req)
2517*4882a593Smuzhiyun {
2518*4882a593Smuzhiyun if (req->cryptlen < XTS_BLOCK_SIZE)
2519*4882a593Smuzhiyun return -EINVAL;
2520*4882a593Smuzhiyun return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2521*4882a593Smuzhiyun SAFEXCEL_ENCRYPT);
2522*4882a593Smuzhiyun }
2523*4882a593Smuzhiyun
safexcel_decrypt_xts(struct skcipher_request * req)2524*4882a593Smuzhiyun static int safexcel_decrypt_xts(struct skcipher_request *req)
2525*4882a593Smuzhiyun {
2526*4882a593Smuzhiyun if (req->cryptlen < XTS_BLOCK_SIZE)
2527*4882a593Smuzhiyun return -EINVAL;
2528*4882a593Smuzhiyun return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2529*4882a593Smuzhiyun SAFEXCEL_DECRYPT);
2530*4882a593Smuzhiyun }
2531*4882a593Smuzhiyun
2532*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_xts_aes = {
2533*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2534*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2535*4882a593Smuzhiyun .alg.skcipher = {
2536*4882a593Smuzhiyun .setkey = safexcel_skcipher_aesxts_setkey,
2537*4882a593Smuzhiyun .encrypt = safexcel_encrypt_xts,
2538*4882a593Smuzhiyun .decrypt = safexcel_decrypt_xts,
2539*4882a593Smuzhiyun /* XTS actually uses 2 AES keys glued together */
2540*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE * 2,
2541*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE * 2,
2542*4882a593Smuzhiyun .ivsize = XTS_BLOCK_SIZE,
2543*4882a593Smuzhiyun .base = {
2544*4882a593Smuzhiyun .cra_name = "xts(aes)",
2545*4882a593Smuzhiyun .cra_driver_name = "safexcel-xts-aes",
2546*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2547*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2548*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2549*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2550*4882a593Smuzhiyun .cra_blocksize = XTS_BLOCK_SIZE,
2551*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2552*4882a593Smuzhiyun .cra_alignmask = 0,
2553*4882a593Smuzhiyun .cra_init = safexcel_skcipher_aes_xts_cra_init,
2554*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
2555*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2556*4882a593Smuzhiyun },
2557*4882a593Smuzhiyun },
2558*4882a593Smuzhiyun };
2559*4882a593Smuzhiyun
safexcel_aead_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2560*4882a593Smuzhiyun static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2561*4882a593Smuzhiyun unsigned int len)
2562*4882a593Smuzhiyun {
2563*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2564*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2565*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
2566*4882a593Smuzhiyun struct crypto_aes_ctx aes;
2567*4882a593Smuzhiyun u32 hashkey[AES_BLOCK_SIZE >> 2];
2568*4882a593Smuzhiyun int ret, i;
2569*4882a593Smuzhiyun
2570*4882a593Smuzhiyun ret = aes_expandkey(&aes, key, len);
2571*4882a593Smuzhiyun if (ret) {
2572*4882a593Smuzhiyun memzero_explicit(&aes, sizeof(aes));
2573*4882a593Smuzhiyun return ret;
2574*4882a593Smuzhiyun }
2575*4882a593Smuzhiyun
2576*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2577*4882a593Smuzhiyun for (i = 0; i < len / sizeof(u32); i++) {
2578*4882a593Smuzhiyun if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2579*4882a593Smuzhiyun ctx->base.needs_inv = true;
2580*4882a593Smuzhiyun break;
2581*4882a593Smuzhiyun }
2582*4882a593Smuzhiyun }
2583*4882a593Smuzhiyun }
2584*4882a593Smuzhiyun
2585*4882a593Smuzhiyun for (i = 0; i < len / sizeof(u32); i++)
2586*4882a593Smuzhiyun ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2587*4882a593Smuzhiyun
2588*4882a593Smuzhiyun ctx->key_len = len;
2589*4882a593Smuzhiyun
2590*4882a593Smuzhiyun /* Compute hash key by encrypting zeroes with cipher key */
2591*4882a593Smuzhiyun crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2592*4882a593Smuzhiyun crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2593*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
2594*4882a593Smuzhiyun ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2595*4882a593Smuzhiyun if (ret)
2596*4882a593Smuzhiyun return ret;
2597*4882a593Smuzhiyun
2598*4882a593Smuzhiyun memset(hashkey, 0, AES_BLOCK_SIZE);
2599*4882a593Smuzhiyun crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2600*4882a593Smuzhiyun
2601*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2602*4882a593Smuzhiyun for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2603*4882a593Smuzhiyun if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2604*4882a593Smuzhiyun ctx->base.needs_inv = true;
2605*4882a593Smuzhiyun break;
2606*4882a593Smuzhiyun }
2607*4882a593Smuzhiyun }
2608*4882a593Smuzhiyun }
2609*4882a593Smuzhiyun
2610*4882a593Smuzhiyun for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2611*4882a593Smuzhiyun ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2612*4882a593Smuzhiyun
2613*4882a593Smuzhiyun memzero_explicit(hashkey, AES_BLOCK_SIZE);
2614*4882a593Smuzhiyun memzero_explicit(&aes, sizeof(aes));
2615*4882a593Smuzhiyun return 0;
2616*4882a593Smuzhiyun }
2617*4882a593Smuzhiyun
safexcel_aead_gcm_cra_init(struct crypto_tfm * tfm)2618*4882a593Smuzhiyun static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2619*4882a593Smuzhiyun {
2620*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2621*4882a593Smuzhiyun
2622*4882a593Smuzhiyun safexcel_aead_cra_init(tfm);
2623*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2624*4882a593Smuzhiyun ctx->state_sz = GHASH_BLOCK_SIZE;
2625*4882a593Smuzhiyun ctx->xcm = EIP197_XCM_MODE_GCM;
2626*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2627*4882a593Smuzhiyun
2628*4882a593Smuzhiyun ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2629*4882a593Smuzhiyun return PTR_ERR_OR_ZERO(ctx->hkaes);
2630*4882a593Smuzhiyun }
2631*4882a593Smuzhiyun
safexcel_aead_gcm_cra_exit(struct crypto_tfm * tfm)2632*4882a593Smuzhiyun static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2633*4882a593Smuzhiyun {
2634*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2635*4882a593Smuzhiyun
2636*4882a593Smuzhiyun crypto_free_cipher(ctx->hkaes);
2637*4882a593Smuzhiyun safexcel_aead_cra_exit(tfm);
2638*4882a593Smuzhiyun }
2639*4882a593Smuzhiyun
safexcel_aead_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2640*4882a593Smuzhiyun static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2641*4882a593Smuzhiyun unsigned int authsize)
2642*4882a593Smuzhiyun {
2643*4882a593Smuzhiyun return crypto_gcm_check_authsize(authsize);
2644*4882a593Smuzhiyun }
2645*4882a593Smuzhiyun
2646*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_gcm = {
2647*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2648*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2649*4882a593Smuzhiyun .alg.aead = {
2650*4882a593Smuzhiyun .setkey = safexcel_aead_gcm_setkey,
2651*4882a593Smuzhiyun .setauthsize = safexcel_aead_gcm_setauthsize,
2652*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
2653*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
2654*4882a593Smuzhiyun .ivsize = GCM_AES_IV_SIZE,
2655*4882a593Smuzhiyun .maxauthsize = GHASH_DIGEST_SIZE,
2656*4882a593Smuzhiyun .base = {
2657*4882a593Smuzhiyun .cra_name = "gcm(aes)",
2658*4882a593Smuzhiyun .cra_driver_name = "safexcel-gcm-aes",
2659*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2660*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2661*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2662*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2663*4882a593Smuzhiyun .cra_blocksize = 1,
2664*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2665*4882a593Smuzhiyun .cra_alignmask = 0,
2666*4882a593Smuzhiyun .cra_init = safexcel_aead_gcm_cra_init,
2667*4882a593Smuzhiyun .cra_exit = safexcel_aead_gcm_cra_exit,
2668*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2669*4882a593Smuzhiyun },
2670*4882a593Smuzhiyun },
2671*4882a593Smuzhiyun };
2672*4882a593Smuzhiyun
safexcel_aead_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2673*4882a593Smuzhiyun static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2674*4882a593Smuzhiyun unsigned int len)
2675*4882a593Smuzhiyun {
2676*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2677*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2678*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
2679*4882a593Smuzhiyun struct crypto_aes_ctx aes;
2680*4882a593Smuzhiyun int ret, i;
2681*4882a593Smuzhiyun
2682*4882a593Smuzhiyun ret = aes_expandkey(&aes, key, len);
2683*4882a593Smuzhiyun if (ret) {
2684*4882a593Smuzhiyun memzero_explicit(&aes, sizeof(aes));
2685*4882a593Smuzhiyun return ret;
2686*4882a593Smuzhiyun }
2687*4882a593Smuzhiyun
2688*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2689*4882a593Smuzhiyun for (i = 0; i < len / sizeof(u32); i++) {
2690*4882a593Smuzhiyun if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2691*4882a593Smuzhiyun ctx->base.needs_inv = true;
2692*4882a593Smuzhiyun break;
2693*4882a593Smuzhiyun }
2694*4882a593Smuzhiyun }
2695*4882a593Smuzhiyun }
2696*4882a593Smuzhiyun
2697*4882a593Smuzhiyun for (i = 0; i < len / sizeof(u32); i++) {
2698*4882a593Smuzhiyun ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2699*4882a593Smuzhiyun ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2700*4882a593Smuzhiyun cpu_to_be32(aes.key_enc[i]);
2701*4882a593Smuzhiyun }
2702*4882a593Smuzhiyun
2703*4882a593Smuzhiyun ctx->key_len = len;
2704*4882a593Smuzhiyun ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2705*4882a593Smuzhiyun
2706*4882a593Smuzhiyun if (len == AES_KEYSIZE_192)
2707*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2708*4882a593Smuzhiyun else if (len == AES_KEYSIZE_256)
2709*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2710*4882a593Smuzhiyun else
2711*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2712*4882a593Smuzhiyun
2713*4882a593Smuzhiyun memzero_explicit(&aes, sizeof(aes));
2714*4882a593Smuzhiyun return 0;
2715*4882a593Smuzhiyun }
2716*4882a593Smuzhiyun
safexcel_aead_ccm_cra_init(struct crypto_tfm * tfm)2717*4882a593Smuzhiyun static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2718*4882a593Smuzhiyun {
2719*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2720*4882a593Smuzhiyun
2721*4882a593Smuzhiyun safexcel_aead_cra_init(tfm);
2722*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2723*4882a593Smuzhiyun ctx->state_sz = 3 * AES_BLOCK_SIZE;
2724*4882a593Smuzhiyun ctx->xcm = EIP197_XCM_MODE_CCM;
2725*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2726*4882a593Smuzhiyun ctx->ctrinit = 0;
2727*4882a593Smuzhiyun return 0;
2728*4882a593Smuzhiyun }
2729*4882a593Smuzhiyun
safexcel_aead_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2730*4882a593Smuzhiyun static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2731*4882a593Smuzhiyun unsigned int authsize)
2732*4882a593Smuzhiyun {
2733*4882a593Smuzhiyun /* Borrowed from crypto/ccm.c */
2734*4882a593Smuzhiyun switch (authsize) {
2735*4882a593Smuzhiyun case 4:
2736*4882a593Smuzhiyun case 6:
2737*4882a593Smuzhiyun case 8:
2738*4882a593Smuzhiyun case 10:
2739*4882a593Smuzhiyun case 12:
2740*4882a593Smuzhiyun case 14:
2741*4882a593Smuzhiyun case 16:
2742*4882a593Smuzhiyun break;
2743*4882a593Smuzhiyun default:
2744*4882a593Smuzhiyun return -EINVAL;
2745*4882a593Smuzhiyun }
2746*4882a593Smuzhiyun
2747*4882a593Smuzhiyun return 0;
2748*4882a593Smuzhiyun }
2749*4882a593Smuzhiyun
safexcel_ccm_encrypt(struct aead_request * req)2750*4882a593Smuzhiyun static int safexcel_ccm_encrypt(struct aead_request *req)
2751*4882a593Smuzhiyun {
2752*4882a593Smuzhiyun struct safexcel_cipher_req *creq = aead_request_ctx(req);
2753*4882a593Smuzhiyun
2754*4882a593Smuzhiyun if (req->iv[0] < 1 || req->iv[0] > 7)
2755*4882a593Smuzhiyun return -EINVAL;
2756*4882a593Smuzhiyun
2757*4882a593Smuzhiyun return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2758*4882a593Smuzhiyun }
2759*4882a593Smuzhiyun
safexcel_ccm_decrypt(struct aead_request * req)2760*4882a593Smuzhiyun static int safexcel_ccm_decrypt(struct aead_request *req)
2761*4882a593Smuzhiyun {
2762*4882a593Smuzhiyun struct safexcel_cipher_req *creq = aead_request_ctx(req);
2763*4882a593Smuzhiyun
2764*4882a593Smuzhiyun if (req->iv[0] < 1 || req->iv[0] > 7)
2765*4882a593Smuzhiyun return -EINVAL;
2766*4882a593Smuzhiyun
2767*4882a593Smuzhiyun return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2768*4882a593Smuzhiyun }
2769*4882a593Smuzhiyun
2770*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_ccm = {
2771*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2772*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2773*4882a593Smuzhiyun .alg.aead = {
2774*4882a593Smuzhiyun .setkey = safexcel_aead_ccm_setkey,
2775*4882a593Smuzhiyun .setauthsize = safexcel_aead_ccm_setauthsize,
2776*4882a593Smuzhiyun .encrypt = safexcel_ccm_encrypt,
2777*4882a593Smuzhiyun .decrypt = safexcel_ccm_decrypt,
2778*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
2779*4882a593Smuzhiyun .maxauthsize = AES_BLOCK_SIZE,
2780*4882a593Smuzhiyun .base = {
2781*4882a593Smuzhiyun .cra_name = "ccm(aes)",
2782*4882a593Smuzhiyun .cra_driver_name = "safexcel-ccm-aes",
2783*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2784*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2785*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2786*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2787*4882a593Smuzhiyun .cra_blocksize = 1,
2788*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2789*4882a593Smuzhiyun .cra_alignmask = 0,
2790*4882a593Smuzhiyun .cra_init = safexcel_aead_ccm_cra_init,
2791*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
2792*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2793*4882a593Smuzhiyun },
2794*4882a593Smuzhiyun },
2795*4882a593Smuzhiyun };
2796*4882a593Smuzhiyun
safexcel_chacha20_setkey(struct safexcel_cipher_ctx * ctx,const u8 * key)2797*4882a593Smuzhiyun static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2798*4882a593Smuzhiyun const u8 *key)
2799*4882a593Smuzhiyun {
2800*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
2801*4882a593Smuzhiyun
2802*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2803*4882a593Smuzhiyun if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2804*4882a593Smuzhiyun ctx->base.needs_inv = true;
2805*4882a593Smuzhiyun
2806*4882a593Smuzhiyun memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2807*4882a593Smuzhiyun ctx->key_len = CHACHA_KEY_SIZE;
2808*4882a593Smuzhiyun }
2809*4882a593Smuzhiyun
safexcel_skcipher_chacha20_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)2810*4882a593Smuzhiyun static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2811*4882a593Smuzhiyun const u8 *key, unsigned int len)
2812*4882a593Smuzhiyun {
2813*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2814*4882a593Smuzhiyun
2815*4882a593Smuzhiyun if (len != CHACHA_KEY_SIZE)
2816*4882a593Smuzhiyun return -EINVAL;
2817*4882a593Smuzhiyun
2818*4882a593Smuzhiyun safexcel_chacha20_setkey(ctx, key);
2819*4882a593Smuzhiyun
2820*4882a593Smuzhiyun return 0;
2821*4882a593Smuzhiyun }
2822*4882a593Smuzhiyun
safexcel_skcipher_chacha20_cra_init(struct crypto_tfm * tfm)2823*4882a593Smuzhiyun static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2824*4882a593Smuzhiyun {
2825*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2826*4882a593Smuzhiyun
2827*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
2828*4882a593Smuzhiyun ctx->alg = SAFEXCEL_CHACHA20;
2829*4882a593Smuzhiyun ctx->ctrinit = 0;
2830*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2831*4882a593Smuzhiyun return 0;
2832*4882a593Smuzhiyun }
2833*4882a593Smuzhiyun
2834*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_chacha20 = {
2835*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2836*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_CHACHA20,
2837*4882a593Smuzhiyun .alg.skcipher = {
2838*4882a593Smuzhiyun .setkey = safexcel_skcipher_chacha20_setkey,
2839*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
2840*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
2841*4882a593Smuzhiyun .min_keysize = CHACHA_KEY_SIZE,
2842*4882a593Smuzhiyun .max_keysize = CHACHA_KEY_SIZE,
2843*4882a593Smuzhiyun .ivsize = CHACHA_IV_SIZE,
2844*4882a593Smuzhiyun .base = {
2845*4882a593Smuzhiyun .cra_name = "chacha20",
2846*4882a593Smuzhiyun .cra_driver_name = "safexcel-chacha20",
2847*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
2848*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
2849*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
2850*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
2851*4882a593Smuzhiyun .cra_blocksize = 1,
2852*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2853*4882a593Smuzhiyun .cra_alignmask = 0,
2854*4882a593Smuzhiyun .cra_init = safexcel_skcipher_chacha20_cra_init,
2855*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
2856*4882a593Smuzhiyun .cra_module = THIS_MODULE,
2857*4882a593Smuzhiyun },
2858*4882a593Smuzhiyun },
2859*4882a593Smuzhiyun };
2860*4882a593Smuzhiyun
safexcel_aead_chachapoly_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)2861*4882a593Smuzhiyun static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2862*4882a593Smuzhiyun const u8 *key, unsigned int len)
2863*4882a593Smuzhiyun {
2864*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2865*4882a593Smuzhiyun
2866*4882a593Smuzhiyun if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2867*4882a593Smuzhiyun len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2868*4882a593Smuzhiyun /* ESP variant has nonce appended to key */
2869*4882a593Smuzhiyun len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2870*4882a593Smuzhiyun ctx->nonce = *(u32 *)(key + len);
2871*4882a593Smuzhiyun }
2872*4882a593Smuzhiyun if (len != CHACHA_KEY_SIZE)
2873*4882a593Smuzhiyun return -EINVAL;
2874*4882a593Smuzhiyun
2875*4882a593Smuzhiyun safexcel_chacha20_setkey(ctx, key);
2876*4882a593Smuzhiyun
2877*4882a593Smuzhiyun return 0;
2878*4882a593Smuzhiyun }
2879*4882a593Smuzhiyun
safexcel_aead_chachapoly_setauthsize(struct crypto_aead * tfm,unsigned int authsize)2880*4882a593Smuzhiyun static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2881*4882a593Smuzhiyun unsigned int authsize)
2882*4882a593Smuzhiyun {
2883*4882a593Smuzhiyun if (authsize != POLY1305_DIGEST_SIZE)
2884*4882a593Smuzhiyun return -EINVAL;
2885*4882a593Smuzhiyun return 0;
2886*4882a593Smuzhiyun }
2887*4882a593Smuzhiyun
safexcel_aead_chachapoly_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)2888*4882a593Smuzhiyun static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2889*4882a593Smuzhiyun enum safexcel_cipher_direction dir)
2890*4882a593Smuzhiyun {
2891*4882a593Smuzhiyun struct safexcel_cipher_req *creq = aead_request_ctx(req);
2892*4882a593Smuzhiyun struct crypto_aead *aead = crypto_aead_reqtfm(req);
2893*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2894*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2895*4882a593Smuzhiyun struct aead_request *subreq = aead_request_ctx(req);
2896*4882a593Smuzhiyun u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2897*4882a593Smuzhiyun int ret = 0;
2898*4882a593Smuzhiyun
2899*4882a593Smuzhiyun /*
2900*4882a593Smuzhiyun * Instead of wasting time detecting umpteen silly corner cases,
2901*4882a593Smuzhiyun * just dump all "small" requests to the fallback implementation.
2902*4882a593Smuzhiyun * HW would not be faster on such small requests anyway.
2903*4882a593Smuzhiyun */
2904*4882a593Smuzhiyun if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2905*4882a593Smuzhiyun req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2906*4882a593Smuzhiyun req->cryptlen > POLY1305_DIGEST_SIZE)) {
2907*4882a593Smuzhiyun return safexcel_queue_req(&req->base, creq, dir);
2908*4882a593Smuzhiyun }
2909*4882a593Smuzhiyun
2910*4882a593Smuzhiyun /* HW cannot do full (AAD+payload) zero length, use fallback */
2911*4882a593Smuzhiyun memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2912*4882a593Smuzhiyun if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2913*4882a593Smuzhiyun /* ESP variant has nonce appended to the key */
2914*4882a593Smuzhiyun key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2915*4882a593Smuzhiyun ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2916*4882a593Smuzhiyun CHACHA_KEY_SIZE +
2917*4882a593Smuzhiyun EIP197_AEAD_IPSEC_NONCE_SIZE);
2918*4882a593Smuzhiyun } else {
2919*4882a593Smuzhiyun ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2920*4882a593Smuzhiyun CHACHA_KEY_SIZE);
2921*4882a593Smuzhiyun }
2922*4882a593Smuzhiyun if (ret) {
2923*4882a593Smuzhiyun crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2924*4882a593Smuzhiyun crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2925*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
2926*4882a593Smuzhiyun return ret;
2927*4882a593Smuzhiyun }
2928*4882a593Smuzhiyun
2929*4882a593Smuzhiyun aead_request_set_tfm(subreq, ctx->fback);
2930*4882a593Smuzhiyun aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2931*4882a593Smuzhiyun req->base.data);
2932*4882a593Smuzhiyun aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2933*4882a593Smuzhiyun req->iv);
2934*4882a593Smuzhiyun aead_request_set_ad(subreq, req->assoclen);
2935*4882a593Smuzhiyun
2936*4882a593Smuzhiyun return (dir == SAFEXCEL_ENCRYPT) ?
2937*4882a593Smuzhiyun crypto_aead_encrypt(subreq) :
2938*4882a593Smuzhiyun crypto_aead_decrypt(subreq);
2939*4882a593Smuzhiyun }
2940*4882a593Smuzhiyun
safexcel_aead_chachapoly_encrypt(struct aead_request * req)2941*4882a593Smuzhiyun static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2942*4882a593Smuzhiyun {
2943*4882a593Smuzhiyun return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2944*4882a593Smuzhiyun }
2945*4882a593Smuzhiyun
safexcel_aead_chachapoly_decrypt(struct aead_request * req)2946*4882a593Smuzhiyun static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2947*4882a593Smuzhiyun {
2948*4882a593Smuzhiyun return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2949*4882a593Smuzhiyun }
2950*4882a593Smuzhiyun
safexcel_aead_fallback_cra_init(struct crypto_tfm * tfm)2951*4882a593Smuzhiyun static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2952*4882a593Smuzhiyun {
2953*4882a593Smuzhiyun struct crypto_aead *aead = __crypto_aead_cast(tfm);
2954*4882a593Smuzhiyun struct aead_alg *alg = crypto_aead_alg(aead);
2955*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2956*4882a593Smuzhiyun
2957*4882a593Smuzhiyun safexcel_aead_cra_init(tfm);
2958*4882a593Smuzhiyun
2959*4882a593Smuzhiyun /* Allocate fallback implementation */
2960*4882a593Smuzhiyun ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2961*4882a593Smuzhiyun CRYPTO_ALG_ASYNC |
2962*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK);
2963*4882a593Smuzhiyun if (IS_ERR(ctx->fback))
2964*4882a593Smuzhiyun return PTR_ERR(ctx->fback);
2965*4882a593Smuzhiyun
2966*4882a593Smuzhiyun crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2967*4882a593Smuzhiyun sizeof(struct aead_request) +
2968*4882a593Smuzhiyun crypto_aead_reqsize(ctx->fback)));
2969*4882a593Smuzhiyun
2970*4882a593Smuzhiyun return 0;
2971*4882a593Smuzhiyun }
2972*4882a593Smuzhiyun
safexcel_aead_chachapoly_cra_init(struct crypto_tfm * tfm)2973*4882a593Smuzhiyun static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2974*4882a593Smuzhiyun {
2975*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2976*4882a593Smuzhiyun
2977*4882a593Smuzhiyun safexcel_aead_fallback_cra_init(tfm);
2978*4882a593Smuzhiyun ctx->alg = SAFEXCEL_CHACHA20;
2979*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2980*4882a593Smuzhiyun CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2981*4882a593Smuzhiyun ctx->ctrinit = 0;
2982*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2983*4882a593Smuzhiyun ctx->state_sz = 0; /* Precomputed by HW */
2984*4882a593Smuzhiyun return 0;
2985*4882a593Smuzhiyun }
2986*4882a593Smuzhiyun
safexcel_aead_fallback_cra_exit(struct crypto_tfm * tfm)2987*4882a593Smuzhiyun static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2988*4882a593Smuzhiyun {
2989*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2990*4882a593Smuzhiyun
2991*4882a593Smuzhiyun crypto_free_aead(ctx->fback);
2992*4882a593Smuzhiyun safexcel_aead_cra_exit(tfm);
2993*4882a593Smuzhiyun }
2994*4882a593Smuzhiyun
2995*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_chachapoly = {
2996*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
2997*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2998*4882a593Smuzhiyun .alg.aead = {
2999*4882a593Smuzhiyun .setkey = safexcel_aead_chachapoly_setkey,
3000*4882a593Smuzhiyun .setauthsize = safexcel_aead_chachapoly_setauthsize,
3001*4882a593Smuzhiyun .encrypt = safexcel_aead_chachapoly_encrypt,
3002*4882a593Smuzhiyun .decrypt = safexcel_aead_chachapoly_decrypt,
3003*4882a593Smuzhiyun .ivsize = CHACHAPOLY_IV_SIZE,
3004*4882a593Smuzhiyun .maxauthsize = POLY1305_DIGEST_SIZE,
3005*4882a593Smuzhiyun .base = {
3006*4882a593Smuzhiyun .cra_name = "rfc7539(chacha20,poly1305)",
3007*4882a593Smuzhiyun .cra_driver_name = "safexcel-chacha20-poly1305",
3008*4882a593Smuzhiyun /* +1 to put it above HW chacha + SW poly */
3009*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3010*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3011*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3012*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY |
3013*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK,
3014*4882a593Smuzhiyun .cra_blocksize = 1,
3015*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3016*4882a593Smuzhiyun .cra_alignmask = 0,
3017*4882a593Smuzhiyun .cra_init = safexcel_aead_chachapoly_cra_init,
3018*4882a593Smuzhiyun .cra_exit = safexcel_aead_fallback_cra_exit,
3019*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3020*4882a593Smuzhiyun },
3021*4882a593Smuzhiyun },
3022*4882a593Smuzhiyun };
3023*4882a593Smuzhiyun
safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm * tfm)3024*4882a593Smuzhiyun static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3025*4882a593Smuzhiyun {
3026*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3027*4882a593Smuzhiyun int ret;
3028*4882a593Smuzhiyun
3029*4882a593Smuzhiyun ret = safexcel_aead_chachapoly_cra_init(tfm);
3030*4882a593Smuzhiyun ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3031*4882a593Smuzhiyun ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3032*4882a593Smuzhiyun return ret;
3033*4882a593Smuzhiyun }
3034*4882a593Smuzhiyun
3035*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3036*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
3037*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3038*4882a593Smuzhiyun .alg.aead = {
3039*4882a593Smuzhiyun .setkey = safexcel_aead_chachapoly_setkey,
3040*4882a593Smuzhiyun .setauthsize = safexcel_aead_chachapoly_setauthsize,
3041*4882a593Smuzhiyun .encrypt = safexcel_aead_chachapoly_encrypt,
3042*4882a593Smuzhiyun .decrypt = safexcel_aead_chachapoly_decrypt,
3043*4882a593Smuzhiyun .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3044*4882a593Smuzhiyun .maxauthsize = POLY1305_DIGEST_SIZE,
3045*4882a593Smuzhiyun .base = {
3046*4882a593Smuzhiyun .cra_name = "rfc7539esp(chacha20,poly1305)",
3047*4882a593Smuzhiyun .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3048*4882a593Smuzhiyun /* +1 to put it above HW chacha + SW poly */
3049*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3050*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3051*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3052*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY |
3053*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK,
3054*4882a593Smuzhiyun .cra_blocksize = 1,
3055*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3056*4882a593Smuzhiyun .cra_alignmask = 0,
3057*4882a593Smuzhiyun .cra_init = safexcel_aead_chachapolyesp_cra_init,
3058*4882a593Smuzhiyun .cra_exit = safexcel_aead_fallback_cra_exit,
3059*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3060*4882a593Smuzhiyun },
3061*4882a593Smuzhiyun },
3062*4882a593Smuzhiyun };
3063*4882a593Smuzhiyun
safexcel_skcipher_sm4_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3064*4882a593Smuzhiyun static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3065*4882a593Smuzhiyun const u8 *key, unsigned int len)
3066*4882a593Smuzhiyun {
3067*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3068*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3069*4882a593Smuzhiyun struct safexcel_crypto_priv *priv = ctx->base.priv;
3070*4882a593Smuzhiyun
3071*4882a593Smuzhiyun if (len != SM4_KEY_SIZE)
3072*4882a593Smuzhiyun return -EINVAL;
3073*4882a593Smuzhiyun
3074*4882a593Smuzhiyun if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3075*4882a593Smuzhiyun if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3076*4882a593Smuzhiyun ctx->base.needs_inv = true;
3077*4882a593Smuzhiyun
3078*4882a593Smuzhiyun memcpy(ctx->key, key, SM4_KEY_SIZE);
3079*4882a593Smuzhiyun ctx->key_len = SM4_KEY_SIZE;
3080*4882a593Smuzhiyun
3081*4882a593Smuzhiyun return 0;
3082*4882a593Smuzhiyun }
3083*4882a593Smuzhiyun
safexcel_sm4_blk_encrypt(struct skcipher_request * req)3084*4882a593Smuzhiyun static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3085*4882a593Smuzhiyun {
3086*4882a593Smuzhiyun /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3087*4882a593Smuzhiyun if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3088*4882a593Smuzhiyun return -EINVAL;
3089*4882a593Smuzhiyun else
3090*4882a593Smuzhiyun return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3091*4882a593Smuzhiyun SAFEXCEL_ENCRYPT);
3092*4882a593Smuzhiyun }
3093*4882a593Smuzhiyun
safexcel_sm4_blk_decrypt(struct skcipher_request * req)3094*4882a593Smuzhiyun static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3095*4882a593Smuzhiyun {
3096*4882a593Smuzhiyun /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3097*4882a593Smuzhiyun if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3098*4882a593Smuzhiyun return -EINVAL;
3099*4882a593Smuzhiyun else
3100*4882a593Smuzhiyun return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3101*4882a593Smuzhiyun SAFEXCEL_DECRYPT);
3102*4882a593Smuzhiyun }
3103*4882a593Smuzhiyun
safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm * tfm)3104*4882a593Smuzhiyun static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3105*4882a593Smuzhiyun {
3106*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3107*4882a593Smuzhiyun
3108*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
3109*4882a593Smuzhiyun ctx->alg = SAFEXCEL_SM4;
3110*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3111*4882a593Smuzhiyun ctx->blocksz = 0;
3112*4882a593Smuzhiyun ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3113*4882a593Smuzhiyun return 0;
3114*4882a593Smuzhiyun }
3115*4882a593Smuzhiyun
3116*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3117*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3118*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_SM4,
3119*4882a593Smuzhiyun .alg.skcipher = {
3120*4882a593Smuzhiyun .setkey = safexcel_skcipher_sm4_setkey,
3121*4882a593Smuzhiyun .encrypt = safexcel_sm4_blk_encrypt,
3122*4882a593Smuzhiyun .decrypt = safexcel_sm4_blk_decrypt,
3123*4882a593Smuzhiyun .min_keysize = SM4_KEY_SIZE,
3124*4882a593Smuzhiyun .max_keysize = SM4_KEY_SIZE,
3125*4882a593Smuzhiyun .base = {
3126*4882a593Smuzhiyun .cra_name = "ecb(sm4)",
3127*4882a593Smuzhiyun .cra_driver_name = "safexcel-ecb-sm4",
3128*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3129*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3130*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3131*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3132*4882a593Smuzhiyun .cra_blocksize = SM4_BLOCK_SIZE,
3133*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3134*4882a593Smuzhiyun .cra_alignmask = 0,
3135*4882a593Smuzhiyun .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3136*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
3137*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3138*4882a593Smuzhiyun },
3139*4882a593Smuzhiyun },
3140*4882a593Smuzhiyun };
3141*4882a593Smuzhiyun
safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm * tfm)3142*4882a593Smuzhiyun static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3143*4882a593Smuzhiyun {
3144*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3145*4882a593Smuzhiyun
3146*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
3147*4882a593Smuzhiyun ctx->alg = SAFEXCEL_SM4;
3148*4882a593Smuzhiyun ctx->blocksz = SM4_BLOCK_SIZE;
3149*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3150*4882a593Smuzhiyun return 0;
3151*4882a593Smuzhiyun }
3152*4882a593Smuzhiyun
3153*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3154*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3155*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_SM4,
3156*4882a593Smuzhiyun .alg.skcipher = {
3157*4882a593Smuzhiyun .setkey = safexcel_skcipher_sm4_setkey,
3158*4882a593Smuzhiyun .encrypt = safexcel_sm4_blk_encrypt,
3159*4882a593Smuzhiyun .decrypt = safexcel_sm4_blk_decrypt,
3160*4882a593Smuzhiyun .min_keysize = SM4_KEY_SIZE,
3161*4882a593Smuzhiyun .max_keysize = SM4_KEY_SIZE,
3162*4882a593Smuzhiyun .ivsize = SM4_BLOCK_SIZE,
3163*4882a593Smuzhiyun .base = {
3164*4882a593Smuzhiyun .cra_name = "cbc(sm4)",
3165*4882a593Smuzhiyun .cra_driver_name = "safexcel-cbc-sm4",
3166*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3167*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3168*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3169*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3170*4882a593Smuzhiyun .cra_blocksize = SM4_BLOCK_SIZE,
3171*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3172*4882a593Smuzhiyun .cra_alignmask = 0,
3173*4882a593Smuzhiyun .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3174*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
3175*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3176*4882a593Smuzhiyun },
3177*4882a593Smuzhiyun },
3178*4882a593Smuzhiyun };
3179*4882a593Smuzhiyun
safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm * tfm)3180*4882a593Smuzhiyun static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3181*4882a593Smuzhiyun {
3182*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3183*4882a593Smuzhiyun
3184*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
3185*4882a593Smuzhiyun ctx->alg = SAFEXCEL_SM4;
3186*4882a593Smuzhiyun ctx->blocksz = SM4_BLOCK_SIZE;
3187*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3188*4882a593Smuzhiyun return 0;
3189*4882a593Smuzhiyun }
3190*4882a593Smuzhiyun
3191*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3192*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3193*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3194*4882a593Smuzhiyun .alg.skcipher = {
3195*4882a593Smuzhiyun .setkey = safexcel_skcipher_sm4_setkey,
3196*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
3197*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
3198*4882a593Smuzhiyun .min_keysize = SM4_KEY_SIZE,
3199*4882a593Smuzhiyun .max_keysize = SM4_KEY_SIZE,
3200*4882a593Smuzhiyun .ivsize = SM4_BLOCK_SIZE,
3201*4882a593Smuzhiyun .base = {
3202*4882a593Smuzhiyun .cra_name = "ofb(sm4)",
3203*4882a593Smuzhiyun .cra_driver_name = "safexcel-ofb-sm4",
3204*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3205*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3206*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3207*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3208*4882a593Smuzhiyun .cra_blocksize = 1,
3209*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3210*4882a593Smuzhiyun .cra_alignmask = 0,
3211*4882a593Smuzhiyun .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3212*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
3213*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3214*4882a593Smuzhiyun },
3215*4882a593Smuzhiyun },
3216*4882a593Smuzhiyun };
3217*4882a593Smuzhiyun
safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm * tfm)3218*4882a593Smuzhiyun static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3219*4882a593Smuzhiyun {
3220*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3221*4882a593Smuzhiyun
3222*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
3223*4882a593Smuzhiyun ctx->alg = SAFEXCEL_SM4;
3224*4882a593Smuzhiyun ctx->blocksz = SM4_BLOCK_SIZE;
3225*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3226*4882a593Smuzhiyun return 0;
3227*4882a593Smuzhiyun }
3228*4882a593Smuzhiyun
3229*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3230*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3231*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3232*4882a593Smuzhiyun .alg.skcipher = {
3233*4882a593Smuzhiyun .setkey = safexcel_skcipher_sm4_setkey,
3234*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
3235*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
3236*4882a593Smuzhiyun .min_keysize = SM4_KEY_SIZE,
3237*4882a593Smuzhiyun .max_keysize = SM4_KEY_SIZE,
3238*4882a593Smuzhiyun .ivsize = SM4_BLOCK_SIZE,
3239*4882a593Smuzhiyun .base = {
3240*4882a593Smuzhiyun .cra_name = "cfb(sm4)",
3241*4882a593Smuzhiyun .cra_driver_name = "safexcel-cfb-sm4",
3242*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3243*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3244*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3245*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3246*4882a593Smuzhiyun .cra_blocksize = 1,
3247*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3248*4882a593Smuzhiyun .cra_alignmask = 0,
3249*4882a593Smuzhiyun .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3250*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
3251*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3252*4882a593Smuzhiyun },
3253*4882a593Smuzhiyun },
3254*4882a593Smuzhiyun };
3255*4882a593Smuzhiyun
safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher * ctfm,const u8 * key,unsigned int len)3256*4882a593Smuzhiyun static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3257*4882a593Smuzhiyun const u8 *key, unsigned int len)
3258*4882a593Smuzhiyun {
3259*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3260*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3261*4882a593Smuzhiyun
3262*4882a593Smuzhiyun /* last 4 bytes of key are the nonce! */
3263*4882a593Smuzhiyun ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3264*4882a593Smuzhiyun /* exclude the nonce here */
3265*4882a593Smuzhiyun len -= CTR_RFC3686_NONCE_SIZE;
3266*4882a593Smuzhiyun
3267*4882a593Smuzhiyun return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3268*4882a593Smuzhiyun }
3269*4882a593Smuzhiyun
safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm * tfm)3270*4882a593Smuzhiyun static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3271*4882a593Smuzhiyun {
3272*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3273*4882a593Smuzhiyun
3274*4882a593Smuzhiyun safexcel_skcipher_cra_init(tfm);
3275*4882a593Smuzhiyun ctx->alg = SAFEXCEL_SM4;
3276*4882a593Smuzhiyun ctx->blocksz = SM4_BLOCK_SIZE;
3277*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3278*4882a593Smuzhiyun return 0;
3279*4882a593Smuzhiyun }
3280*4882a593Smuzhiyun
3281*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3282*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3283*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_SM4,
3284*4882a593Smuzhiyun .alg.skcipher = {
3285*4882a593Smuzhiyun .setkey = safexcel_skcipher_sm4ctr_setkey,
3286*4882a593Smuzhiyun .encrypt = safexcel_encrypt,
3287*4882a593Smuzhiyun .decrypt = safexcel_decrypt,
3288*4882a593Smuzhiyun /* Add nonce size */
3289*4882a593Smuzhiyun .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3290*4882a593Smuzhiyun .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3291*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
3292*4882a593Smuzhiyun .base = {
3293*4882a593Smuzhiyun .cra_name = "rfc3686(ctr(sm4))",
3294*4882a593Smuzhiyun .cra_driver_name = "safexcel-ctr-sm4",
3295*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3296*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3297*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3298*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3299*4882a593Smuzhiyun .cra_blocksize = 1,
3300*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3301*4882a593Smuzhiyun .cra_alignmask = 0,
3302*4882a593Smuzhiyun .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3303*4882a593Smuzhiyun .cra_exit = safexcel_skcipher_cra_exit,
3304*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3305*4882a593Smuzhiyun },
3306*4882a593Smuzhiyun },
3307*4882a593Smuzhiyun };
3308*4882a593Smuzhiyun
safexcel_aead_sm4_blk_encrypt(struct aead_request * req)3309*4882a593Smuzhiyun static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3310*4882a593Smuzhiyun {
3311*4882a593Smuzhiyun /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3312*4882a593Smuzhiyun if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3313*4882a593Smuzhiyun return -EINVAL;
3314*4882a593Smuzhiyun
3315*4882a593Smuzhiyun return safexcel_queue_req(&req->base, aead_request_ctx(req),
3316*4882a593Smuzhiyun SAFEXCEL_ENCRYPT);
3317*4882a593Smuzhiyun }
3318*4882a593Smuzhiyun
safexcel_aead_sm4_blk_decrypt(struct aead_request * req)3319*4882a593Smuzhiyun static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3320*4882a593Smuzhiyun {
3321*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3322*4882a593Smuzhiyun
3323*4882a593Smuzhiyun /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3324*4882a593Smuzhiyun if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3325*4882a593Smuzhiyun return -EINVAL;
3326*4882a593Smuzhiyun
3327*4882a593Smuzhiyun return safexcel_queue_req(&req->base, aead_request_ctx(req),
3328*4882a593Smuzhiyun SAFEXCEL_DECRYPT);
3329*4882a593Smuzhiyun }
3330*4882a593Smuzhiyun
safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm * tfm)3331*4882a593Smuzhiyun static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3332*4882a593Smuzhiyun {
3333*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3334*4882a593Smuzhiyun
3335*4882a593Smuzhiyun safexcel_aead_cra_init(tfm);
3336*4882a593Smuzhiyun ctx->alg = SAFEXCEL_SM4;
3337*4882a593Smuzhiyun ctx->blocksz = SM4_BLOCK_SIZE;
3338*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3339*4882a593Smuzhiyun ctx->state_sz = SHA1_DIGEST_SIZE;
3340*4882a593Smuzhiyun return 0;
3341*4882a593Smuzhiyun }
3342*4882a593Smuzhiyun
3343*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3344*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
3345*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3346*4882a593Smuzhiyun .alg.aead = {
3347*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
3348*4882a593Smuzhiyun .encrypt = safexcel_aead_sm4_blk_encrypt,
3349*4882a593Smuzhiyun .decrypt = safexcel_aead_sm4_blk_decrypt,
3350*4882a593Smuzhiyun .ivsize = SM4_BLOCK_SIZE,
3351*4882a593Smuzhiyun .maxauthsize = SHA1_DIGEST_SIZE,
3352*4882a593Smuzhiyun .base = {
3353*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3354*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3355*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3356*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3357*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3358*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3359*4882a593Smuzhiyun .cra_blocksize = SM4_BLOCK_SIZE,
3360*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3361*4882a593Smuzhiyun .cra_alignmask = 0,
3362*4882a593Smuzhiyun .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3363*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
3364*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3365*4882a593Smuzhiyun },
3366*4882a593Smuzhiyun },
3367*4882a593Smuzhiyun };
3368*4882a593Smuzhiyun
safexcel_aead_fallback_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3369*4882a593Smuzhiyun static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3370*4882a593Smuzhiyun const u8 *key, unsigned int len)
3371*4882a593Smuzhiyun {
3372*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3373*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3374*4882a593Smuzhiyun
3375*4882a593Smuzhiyun /* Keep fallback cipher synchronized */
3376*4882a593Smuzhiyun return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3377*4882a593Smuzhiyun safexcel_aead_setkey(ctfm, key, len);
3378*4882a593Smuzhiyun }
3379*4882a593Smuzhiyun
safexcel_aead_fallback_setauthsize(struct crypto_aead * ctfm,unsigned int authsize)3380*4882a593Smuzhiyun static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3381*4882a593Smuzhiyun unsigned int authsize)
3382*4882a593Smuzhiyun {
3383*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3384*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3385*4882a593Smuzhiyun
3386*4882a593Smuzhiyun /* Keep fallback cipher synchronized */
3387*4882a593Smuzhiyun return crypto_aead_setauthsize(ctx->fback, authsize);
3388*4882a593Smuzhiyun }
3389*4882a593Smuzhiyun
safexcel_aead_fallback_crypt(struct aead_request * req,enum safexcel_cipher_direction dir)3390*4882a593Smuzhiyun static int safexcel_aead_fallback_crypt(struct aead_request *req,
3391*4882a593Smuzhiyun enum safexcel_cipher_direction dir)
3392*4882a593Smuzhiyun {
3393*4882a593Smuzhiyun struct crypto_aead *aead = crypto_aead_reqtfm(req);
3394*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3395*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3396*4882a593Smuzhiyun struct aead_request *subreq = aead_request_ctx(req);
3397*4882a593Smuzhiyun
3398*4882a593Smuzhiyun aead_request_set_tfm(subreq, ctx->fback);
3399*4882a593Smuzhiyun aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3400*4882a593Smuzhiyun req->base.data);
3401*4882a593Smuzhiyun aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3402*4882a593Smuzhiyun req->iv);
3403*4882a593Smuzhiyun aead_request_set_ad(subreq, req->assoclen);
3404*4882a593Smuzhiyun
3405*4882a593Smuzhiyun return (dir == SAFEXCEL_ENCRYPT) ?
3406*4882a593Smuzhiyun crypto_aead_encrypt(subreq) :
3407*4882a593Smuzhiyun crypto_aead_decrypt(subreq);
3408*4882a593Smuzhiyun }
3409*4882a593Smuzhiyun
safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request * req)3410*4882a593Smuzhiyun static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3411*4882a593Smuzhiyun {
3412*4882a593Smuzhiyun struct safexcel_cipher_req *creq = aead_request_ctx(req);
3413*4882a593Smuzhiyun
3414*4882a593Smuzhiyun /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3415*4882a593Smuzhiyun if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3416*4882a593Smuzhiyun return -EINVAL;
3417*4882a593Smuzhiyun else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3418*4882a593Smuzhiyun return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3419*4882a593Smuzhiyun
3420*4882a593Smuzhiyun /* HW cannot do full (AAD+payload) zero length, use fallback */
3421*4882a593Smuzhiyun return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3422*4882a593Smuzhiyun }
3423*4882a593Smuzhiyun
safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request * req)3424*4882a593Smuzhiyun static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3425*4882a593Smuzhiyun {
3426*4882a593Smuzhiyun struct safexcel_cipher_req *creq = aead_request_ctx(req);
3427*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3428*4882a593Smuzhiyun
3429*4882a593Smuzhiyun /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3430*4882a593Smuzhiyun if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3431*4882a593Smuzhiyun return -EINVAL;
3432*4882a593Smuzhiyun else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3433*4882a593Smuzhiyun /* If input length > 0 only */
3434*4882a593Smuzhiyun return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3435*4882a593Smuzhiyun
3436*4882a593Smuzhiyun /* HW cannot do full (AAD+payload) zero length, use fallback */
3437*4882a593Smuzhiyun return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3438*4882a593Smuzhiyun }
3439*4882a593Smuzhiyun
safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm * tfm)3440*4882a593Smuzhiyun static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3441*4882a593Smuzhiyun {
3442*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3443*4882a593Smuzhiyun
3444*4882a593Smuzhiyun safexcel_aead_fallback_cra_init(tfm);
3445*4882a593Smuzhiyun ctx->alg = SAFEXCEL_SM4;
3446*4882a593Smuzhiyun ctx->blocksz = SM4_BLOCK_SIZE;
3447*4882a593Smuzhiyun ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3448*4882a593Smuzhiyun ctx->state_sz = SM3_DIGEST_SIZE;
3449*4882a593Smuzhiyun return 0;
3450*4882a593Smuzhiyun }
3451*4882a593Smuzhiyun
3452*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3453*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
3454*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3455*4882a593Smuzhiyun .alg.aead = {
3456*4882a593Smuzhiyun .setkey = safexcel_aead_fallback_setkey,
3457*4882a593Smuzhiyun .setauthsize = safexcel_aead_fallback_setauthsize,
3458*4882a593Smuzhiyun .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3459*4882a593Smuzhiyun .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3460*4882a593Smuzhiyun .ivsize = SM4_BLOCK_SIZE,
3461*4882a593Smuzhiyun .maxauthsize = SM3_DIGEST_SIZE,
3462*4882a593Smuzhiyun .base = {
3463*4882a593Smuzhiyun .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3464*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3465*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3466*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3467*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3468*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY |
3469*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK,
3470*4882a593Smuzhiyun .cra_blocksize = SM4_BLOCK_SIZE,
3471*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3472*4882a593Smuzhiyun .cra_alignmask = 0,
3473*4882a593Smuzhiyun .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3474*4882a593Smuzhiyun .cra_exit = safexcel_aead_fallback_cra_exit,
3475*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3476*4882a593Smuzhiyun },
3477*4882a593Smuzhiyun },
3478*4882a593Smuzhiyun };
3479*4882a593Smuzhiyun
safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm * tfm)3480*4882a593Smuzhiyun static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3481*4882a593Smuzhiyun {
3482*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3483*4882a593Smuzhiyun
3484*4882a593Smuzhiyun safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3485*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3486*4882a593Smuzhiyun return 0;
3487*4882a593Smuzhiyun }
3488*4882a593Smuzhiyun
3489*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3490*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
3491*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3492*4882a593Smuzhiyun .alg.aead = {
3493*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
3494*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
3495*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
3496*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
3497*4882a593Smuzhiyun .maxauthsize = SHA1_DIGEST_SIZE,
3498*4882a593Smuzhiyun .base = {
3499*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3500*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3501*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3502*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3503*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3504*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3505*4882a593Smuzhiyun .cra_blocksize = 1,
3506*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3507*4882a593Smuzhiyun .cra_alignmask = 0,
3508*4882a593Smuzhiyun .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3509*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
3510*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3511*4882a593Smuzhiyun },
3512*4882a593Smuzhiyun },
3513*4882a593Smuzhiyun };
3514*4882a593Smuzhiyun
safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm * tfm)3515*4882a593Smuzhiyun static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3516*4882a593Smuzhiyun {
3517*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3518*4882a593Smuzhiyun
3519*4882a593Smuzhiyun safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3520*4882a593Smuzhiyun ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3521*4882a593Smuzhiyun return 0;
3522*4882a593Smuzhiyun }
3523*4882a593Smuzhiyun
3524*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3525*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
3526*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3527*4882a593Smuzhiyun .alg.aead = {
3528*4882a593Smuzhiyun .setkey = safexcel_aead_setkey,
3529*4882a593Smuzhiyun .encrypt = safexcel_aead_encrypt,
3530*4882a593Smuzhiyun .decrypt = safexcel_aead_decrypt,
3531*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
3532*4882a593Smuzhiyun .maxauthsize = SM3_DIGEST_SIZE,
3533*4882a593Smuzhiyun .base = {
3534*4882a593Smuzhiyun .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3535*4882a593Smuzhiyun .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3536*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3537*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3538*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3539*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3540*4882a593Smuzhiyun .cra_blocksize = 1,
3541*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3542*4882a593Smuzhiyun .cra_alignmask = 0,
3543*4882a593Smuzhiyun .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3544*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
3545*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3546*4882a593Smuzhiyun },
3547*4882a593Smuzhiyun },
3548*4882a593Smuzhiyun };
3549*4882a593Smuzhiyun
safexcel_rfc4106_gcm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3550*4882a593Smuzhiyun static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3551*4882a593Smuzhiyun unsigned int len)
3552*4882a593Smuzhiyun {
3553*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3554*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3555*4882a593Smuzhiyun
3556*4882a593Smuzhiyun /* last 4 bytes of key are the nonce! */
3557*4882a593Smuzhiyun ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3558*4882a593Smuzhiyun
3559*4882a593Smuzhiyun len -= CTR_RFC3686_NONCE_SIZE;
3560*4882a593Smuzhiyun return safexcel_aead_gcm_setkey(ctfm, key, len);
3561*4882a593Smuzhiyun }
3562*4882a593Smuzhiyun
safexcel_rfc4106_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3563*4882a593Smuzhiyun static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3564*4882a593Smuzhiyun unsigned int authsize)
3565*4882a593Smuzhiyun {
3566*4882a593Smuzhiyun return crypto_rfc4106_check_authsize(authsize);
3567*4882a593Smuzhiyun }
3568*4882a593Smuzhiyun
safexcel_rfc4106_encrypt(struct aead_request * req)3569*4882a593Smuzhiyun static int safexcel_rfc4106_encrypt(struct aead_request *req)
3570*4882a593Smuzhiyun {
3571*4882a593Smuzhiyun return crypto_ipsec_check_assoclen(req->assoclen) ?:
3572*4882a593Smuzhiyun safexcel_aead_encrypt(req);
3573*4882a593Smuzhiyun }
3574*4882a593Smuzhiyun
safexcel_rfc4106_decrypt(struct aead_request * req)3575*4882a593Smuzhiyun static int safexcel_rfc4106_decrypt(struct aead_request *req)
3576*4882a593Smuzhiyun {
3577*4882a593Smuzhiyun return crypto_ipsec_check_assoclen(req->assoclen) ?:
3578*4882a593Smuzhiyun safexcel_aead_decrypt(req);
3579*4882a593Smuzhiyun }
3580*4882a593Smuzhiyun
safexcel_rfc4106_gcm_cra_init(struct crypto_tfm * tfm)3581*4882a593Smuzhiyun static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3582*4882a593Smuzhiyun {
3583*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3584*4882a593Smuzhiyun int ret;
3585*4882a593Smuzhiyun
3586*4882a593Smuzhiyun ret = safexcel_aead_gcm_cra_init(tfm);
3587*4882a593Smuzhiyun ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3588*4882a593Smuzhiyun ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3589*4882a593Smuzhiyun return ret;
3590*4882a593Smuzhiyun }
3591*4882a593Smuzhiyun
3592*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3593*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
3594*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3595*4882a593Smuzhiyun .alg.aead = {
3596*4882a593Smuzhiyun .setkey = safexcel_rfc4106_gcm_setkey,
3597*4882a593Smuzhiyun .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3598*4882a593Smuzhiyun .encrypt = safexcel_rfc4106_encrypt,
3599*4882a593Smuzhiyun .decrypt = safexcel_rfc4106_decrypt,
3600*4882a593Smuzhiyun .ivsize = GCM_RFC4106_IV_SIZE,
3601*4882a593Smuzhiyun .maxauthsize = GHASH_DIGEST_SIZE,
3602*4882a593Smuzhiyun .base = {
3603*4882a593Smuzhiyun .cra_name = "rfc4106(gcm(aes))",
3604*4882a593Smuzhiyun .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3605*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3606*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3607*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3608*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3609*4882a593Smuzhiyun .cra_blocksize = 1,
3610*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3611*4882a593Smuzhiyun .cra_alignmask = 0,
3612*4882a593Smuzhiyun .cra_init = safexcel_rfc4106_gcm_cra_init,
3613*4882a593Smuzhiyun .cra_exit = safexcel_aead_gcm_cra_exit,
3614*4882a593Smuzhiyun },
3615*4882a593Smuzhiyun },
3616*4882a593Smuzhiyun };
3617*4882a593Smuzhiyun
safexcel_rfc4543_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3618*4882a593Smuzhiyun static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3619*4882a593Smuzhiyun unsigned int authsize)
3620*4882a593Smuzhiyun {
3621*4882a593Smuzhiyun if (authsize != GHASH_DIGEST_SIZE)
3622*4882a593Smuzhiyun return -EINVAL;
3623*4882a593Smuzhiyun
3624*4882a593Smuzhiyun return 0;
3625*4882a593Smuzhiyun }
3626*4882a593Smuzhiyun
safexcel_rfc4543_gcm_cra_init(struct crypto_tfm * tfm)3627*4882a593Smuzhiyun static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3628*4882a593Smuzhiyun {
3629*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3630*4882a593Smuzhiyun int ret;
3631*4882a593Smuzhiyun
3632*4882a593Smuzhiyun ret = safexcel_aead_gcm_cra_init(tfm);
3633*4882a593Smuzhiyun ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3634*4882a593Smuzhiyun return ret;
3635*4882a593Smuzhiyun }
3636*4882a593Smuzhiyun
3637*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3638*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
3639*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3640*4882a593Smuzhiyun .alg.aead = {
3641*4882a593Smuzhiyun .setkey = safexcel_rfc4106_gcm_setkey,
3642*4882a593Smuzhiyun .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3643*4882a593Smuzhiyun .encrypt = safexcel_rfc4106_encrypt,
3644*4882a593Smuzhiyun .decrypt = safexcel_rfc4106_decrypt,
3645*4882a593Smuzhiyun .ivsize = GCM_RFC4543_IV_SIZE,
3646*4882a593Smuzhiyun .maxauthsize = GHASH_DIGEST_SIZE,
3647*4882a593Smuzhiyun .base = {
3648*4882a593Smuzhiyun .cra_name = "rfc4543(gcm(aes))",
3649*4882a593Smuzhiyun .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3650*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3651*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3652*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3653*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3654*4882a593Smuzhiyun .cra_blocksize = 1,
3655*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3656*4882a593Smuzhiyun .cra_alignmask = 0,
3657*4882a593Smuzhiyun .cra_init = safexcel_rfc4543_gcm_cra_init,
3658*4882a593Smuzhiyun .cra_exit = safexcel_aead_gcm_cra_exit,
3659*4882a593Smuzhiyun },
3660*4882a593Smuzhiyun },
3661*4882a593Smuzhiyun };
3662*4882a593Smuzhiyun
safexcel_rfc4309_ccm_setkey(struct crypto_aead * ctfm,const u8 * key,unsigned int len)3663*4882a593Smuzhiyun static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3664*4882a593Smuzhiyun unsigned int len)
3665*4882a593Smuzhiyun {
3666*4882a593Smuzhiyun struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3667*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3668*4882a593Smuzhiyun
3669*4882a593Smuzhiyun /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3670*4882a593Smuzhiyun *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3671*4882a593Smuzhiyun /* last 3 bytes of key are the nonce! */
3672*4882a593Smuzhiyun memcpy((u8 *)&ctx->nonce + 1, key + len -
3673*4882a593Smuzhiyun EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3674*4882a593Smuzhiyun EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3675*4882a593Smuzhiyun
3676*4882a593Smuzhiyun len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3677*4882a593Smuzhiyun return safexcel_aead_ccm_setkey(ctfm, key, len);
3678*4882a593Smuzhiyun }
3679*4882a593Smuzhiyun
safexcel_rfc4309_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3680*4882a593Smuzhiyun static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3681*4882a593Smuzhiyun unsigned int authsize)
3682*4882a593Smuzhiyun {
3683*4882a593Smuzhiyun /* Borrowed from crypto/ccm.c */
3684*4882a593Smuzhiyun switch (authsize) {
3685*4882a593Smuzhiyun case 8:
3686*4882a593Smuzhiyun case 12:
3687*4882a593Smuzhiyun case 16:
3688*4882a593Smuzhiyun break;
3689*4882a593Smuzhiyun default:
3690*4882a593Smuzhiyun return -EINVAL;
3691*4882a593Smuzhiyun }
3692*4882a593Smuzhiyun
3693*4882a593Smuzhiyun return 0;
3694*4882a593Smuzhiyun }
3695*4882a593Smuzhiyun
safexcel_rfc4309_ccm_encrypt(struct aead_request * req)3696*4882a593Smuzhiyun static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3697*4882a593Smuzhiyun {
3698*4882a593Smuzhiyun struct safexcel_cipher_req *creq = aead_request_ctx(req);
3699*4882a593Smuzhiyun
3700*4882a593Smuzhiyun /* Borrowed from crypto/ccm.c */
3701*4882a593Smuzhiyun if (req->assoclen != 16 && req->assoclen != 20)
3702*4882a593Smuzhiyun return -EINVAL;
3703*4882a593Smuzhiyun
3704*4882a593Smuzhiyun return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3705*4882a593Smuzhiyun }
3706*4882a593Smuzhiyun
safexcel_rfc4309_ccm_decrypt(struct aead_request * req)3707*4882a593Smuzhiyun static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3708*4882a593Smuzhiyun {
3709*4882a593Smuzhiyun struct safexcel_cipher_req *creq = aead_request_ctx(req);
3710*4882a593Smuzhiyun
3711*4882a593Smuzhiyun /* Borrowed from crypto/ccm.c */
3712*4882a593Smuzhiyun if (req->assoclen != 16 && req->assoclen != 20)
3713*4882a593Smuzhiyun return -EINVAL;
3714*4882a593Smuzhiyun
3715*4882a593Smuzhiyun return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3716*4882a593Smuzhiyun }
3717*4882a593Smuzhiyun
safexcel_rfc4309_ccm_cra_init(struct crypto_tfm * tfm)3718*4882a593Smuzhiyun static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3719*4882a593Smuzhiyun {
3720*4882a593Smuzhiyun struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3721*4882a593Smuzhiyun int ret;
3722*4882a593Smuzhiyun
3723*4882a593Smuzhiyun ret = safexcel_aead_ccm_cra_init(tfm);
3724*4882a593Smuzhiyun ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3725*4882a593Smuzhiyun ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3726*4882a593Smuzhiyun return ret;
3727*4882a593Smuzhiyun }
3728*4882a593Smuzhiyun
3729*4882a593Smuzhiyun struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3730*4882a593Smuzhiyun .type = SAFEXCEL_ALG_TYPE_AEAD,
3731*4882a593Smuzhiyun .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3732*4882a593Smuzhiyun .alg.aead = {
3733*4882a593Smuzhiyun .setkey = safexcel_rfc4309_ccm_setkey,
3734*4882a593Smuzhiyun .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3735*4882a593Smuzhiyun .encrypt = safexcel_rfc4309_ccm_encrypt,
3736*4882a593Smuzhiyun .decrypt = safexcel_rfc4309_ccm_decrypt,
3737*4882a593Smuzhiyun .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3738*4882a593Smuzhiyun .maxauthsize = AES_BLOCK_SIZE,
3739*4882a593Smuzhiyun .base = {
3740*4882a593Smuzhiyun .cra_name = "rfc4309(ccm(aes))",
3741*4882a593Smuzhiyun .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3742*4882a593Smuzhiyun .cra_priority = SAFEXCEL_CRA_PRIORITY,
3743*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
3744*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
3745*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY,
3746*4882a593Smuzhiyun .cra_blocksize = 1,
3747*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3748*4882a593Smuzhiyun .cra_alignmask = 0,
3749*4882a593Smuzhiyun .cra_init = safexcel_rfc4309_ccm_cra_init,
3750*4882a593Smuzhiyun .cra_exit = safexcel_aead_cra_exit,
3751*4882a593Smuzhiyun .cra_module = THIS_MODULE,
3752*4882a593Smuzhiyun },
3753*4882a593Smuzhiyun },
3754*4882a593Smuzhiyun };
3755