1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * This file is part of the Chelsio T6 Crypto driver for Linux.
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Copyright (c) 2003-2016 Chelsio Communications, Inc. All rights reserved.
5*4882a593Smuzhiyun *
6*4882a593Smuzhiyun * This software is available to you under a choice of one of two
7*4882a593Smuzhiyun * licenses. You may choose to be licensed under the terms of the GNU
8*4882a593Smuzhiyun * General Public License (GPL) Version 2, available from the file
9*4882a593Smuzhiyun * COPYING in the main directory of this source tree, or the
10*4882a593Smuzhiyun * OpenIB.org BSD license below:
11*4882a593Smuzhiyun *
12*4882a593Smuzhiyun * Redistribution and use in source and binary forms, with or
13*4882a593Smuzhiyun * without modification, are permitted provided that the following
14*4882a593Smuzhiyun * conditions are met:
15*4882a593Smuzhiyun *
16*4882a593Smuzhiyun * - Redistributions of source code must retain the above
17*4882a593Smuzhiyun * copyright notice, this list of conditions and the following
18*4882a593Smuzhiyun * disclaimer.
19*4882a593Smuzhiyun *
20*4882a593Smuzhiyun * - Redistributions in binary form must reproduce the above
21*4882a593Smuzhiyun * copyright notice, this list of conditions and the following
22*4882a593Smuzhiyun * disclaimer in the documentation and/or other materials
23*4882a593Smuzhiyun * provided with the distribution.
24*4882a593Smuzhiyun *
25*4882a593Smuzhiyun * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
26*4882a593Smuzhiyun * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
27*4882a593Smuzhiyun * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
28*4882a593Smuzhiyun * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
29*4882a593Smuzhiyun * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
30*4882a593Smuzhiyun * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
31*4882a593Smuzhiyun * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
32*4882a593Smuzhiyun * SOFTWARE.
33*4882a593Smuzhiyun *
34*4882a593Smuzhiyun * Written and Maintained by:
35*4882a593Smuzhiyun * Manoj Malviya (manojmalviya@chelsio.com)
36*4882a593Smuzhiyun * Atul Gupta (atul.gupta@chelsio.com)
37*4882a593Smuzhiyun * Jitendra Lulla (jlulla@chelsio.com)
38*4882a593Smuzhiyun * Yeshaswi M R Gowda (yeshaswi@chelsio.com)
39*4882a593Smuzhiyun * Harsh Jain (harsh@chelsio.com)
40*4882a593Smuzhiyun */
41*4882a593Smuzhiyun
42*4882a593Smuzhiyun #define pr_fmt(fmt) "chcr:" fmt
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun #include <linux/kernel.h>
45*4882a593Smuzhiyun #include <linux/module.h>
46*4882a593Smuzhiyun #include <linux/crypto.h>
47*4882a593Smuzhiyun #include <linux/skbuff.h>
48*4882a593Smuzhiyun #include <linux/rtnetlink.h>
49*4882a593Smuzhiyun #include <linux/highmem.h>
50*4882a593Smuzhiyun #include <linux/scatterlist.h>
51*4882a593Smuzhiyun
52*4882a593Smuzhiyun #include <crypto/aes.h>
53*4882a593Smuzhiyun #include <crypto/algapi.h>
54*4882a593Smuzhiyun #include <crypto/hash.h>
55*4882a593Smuzhiyun #include <crypto/gcm.h>
56*4882a593Smuzhiyun #include <crypto/sha.h>
57*4882a593Smuzhiyun #include <crypto/authenc.h>
58*4882a593Smuzhiyun #include <crypto/ctr.h>
59*4882a593Smuzhiyun #include <crypto/gf128mul.h>
60*4882a593Smuzhiyun #include <crypto/internal/aead.h>
61*4882a593Smuzhiyun #include <crypto/null.h>
62*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
63*4882a593Smuzhiyun #include <crypto/aead.h>
64*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
65*4882a593Smuzhiyun #include <crypto/internal/hash.h>
66*4882a593Smuzhiyun
67*4882a593Smuzhiyun #include "t4fw_api.h"
68*4882a593Smuzhiyun #include "t4_msg.h"
69*4882a593Smuzhiyun #include "chcr_core.h"
70*4882a593Smuzhiyun #include "chcr_algo.h"
71*4882a593Smuzhiyun #include "chcr_crypto.h"
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun #define IV AES_BLOCK_SIZE
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun static unsigned int sgl_ent_len[] = {
76*4882a593Smuzhiyun 0, 0, 16, 24, 40, 48, 64, 72, 88,
77*4882a593Smuzhiyun 96, 112, 120, 136, 144, 160, 168, 184,
78*4882a593Smuzhiyun 192, 208, 216, 232, 240, 256, 264, 280,
79*4882a593Smuzhiyun 288, 304, 312, 328, 336, 352, 360, 376
80*4882a593Smuzhiyun };
81*4882a593Smuzhiyun
82*4882a593Smuzhiyun static unsigned int dsgl_ent_len[] = {
83*4882a593Smuzhiyun 0, 32, 32, 48, 48, 64, 64, 80, 80,
84*4882a593Smuzhiyun 112, 112, 128, 128, 144, 144, 160, 160,
85*4882a593Smuzhiyun 192, 192, 208, 208, 224, 224, 240, 240,
86*4882a593Smuzhiyun 272, 272, 288, 288, 304, 304, 320, 320
87*4882a593Smuzhiyun };
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun static u32 round_constant[11] = {
90*4882a593Smuzhiyun 0x01000000, 0x02000000, 0x04000000, 0x08000000,
91*4882a593Smuzhiyun 0x10000000, 0x20000000, 0x40000000, 0x80000000,
92*4882a593Smuzhiyun 0x1B000000, 0x36000000, 0x6C000000
93*4882a593Smuzhiyun };
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun static int chcr_handle_cipher_resp(struct skcipher_request *req,
96*4882a593Smuzhiyun unsigned char *input, int err);
97*4882a593Smuzhiyun
AEAD_CTX(struct chcr_context * ctx)98*4882a593Smuzhiyun static inline struct chcr_aead_ctx *AEAD_CTX(struct chcr_context *ctx)
99*4882a593Smuzhiyun {
100*4882a593Smuzhiyun return ctx->crypto_ctx->aeadctx;
101*4882a593Smuzhiyun }
102*4882a593Smuzhiyun
ABLK_CTX(struct chcr_context * ctx)103*4882a593Smuzhiyun static inline struct ablk_ctx *ABLK_CTX(struct chcr_context *ctx)
104*4882a593Smuzhiyun {
105*4882a593Smuzhiyun return ctx->crypto_ctx->ablkctx;
106*4882a593Smuzhiyun }
107*4882a593Smuzhiyun
HMAC_CTX(struct chcr_context * ctx)108*4882a593Smuzhiyun static inline struct hmac_ctx *HMAC_CTX(struct chcr_context *ctx)
109*4882a593Smuzhiyun {
110*4882a593Smuzhiyun return ctx->crypto_ctx->hmacctx;
111*4882a593Smuzhiyun }
112*4882a593Smuzhiyun
GCM_CTX(struct chcr_aead_ctx * gctx)113*4882a593Smuzhiyun static inline struct chcr_gcm_ctx *GCM_CTX(struct chcr_aead_ctx *gctx)
114*4882a593Smuzhiyun {
115*4882a593Smuzhiyun return gctx->ctx->gcm;
116*4882a593Smuzhiyun }
117*4882a593Smuzhiyun
AUTHENC_CTX(struct chcr_aead_ctx * gctx)118*4882a593Smuzhiyun static inline struct chcr_authenc_ctx *AUTHENC_CTX(struct chcr_aead_ctx *gctx)
119*4882a593Smuzhiyun {
120*4882a593Smuzhiyun return gctx->ctx->authenc;
121*4882a593Smuzhiyun }
122*4882a593Smuzhiyun
ULD_CTX(struct chcr_context * ctx)123*4882a593Smuzhiyun static inline struct uld_ctx *ULD_CTX(struct chcr_context *ctx)
124*4882a593Smuzhiyun {
125*4882a593Smuzhiyun return container_of(ctx->dev, struct uld_ctx, dev);
126*4882a593Smuzhiyun }
127*4882a593Smuzhiyun
is_ofld_imm(const struct sk_buff * skb)128*4882a593Smuzhiyun static inline int is_ofld_imm(const struct sk_buff *skb)
129*4882a593Smuzhiyun {
130*4882a593Smuzhiyun return (skb->len <= SGE_MAX_WR_LEN);
131*4882a593Smuzhiyun }
132*4882a593Smuzhiyun
chcr_init_hctx_per_wr(struct chcr_ahash_req_ctx * reqctx)133*4882a593Smuzhiyun static inline void chcr_init_hctx_per_wr(struct chcr_ahash_req_ctx *reqctx)
134*4882a593Smuzhiyun {
135*4882a593Smuzhiyun memset(&reqctx->hctx_wr, 0, sizeof(struct chcr_hctx_per_wr));
136*4882a593Smuzhiyun }
137*4882a593Smuzhiyun
sg_nents_xlen(struct scatterlist * sg,unsigned int reqlen,unsigned int entlen,unsigned int skip)138*4882a593Smuzhiyun static int sg_nents_xlen(struct scatterlist *sg, unsigned int reqlen,
139*4882a593Smuzhiyun unsigned int entlen,
140*4882a593Smuzhiyun unsigned int skip)
141*4882a593Smuzhiyun {
142*4882a593Smuzhiyun int nents = 0;
143*4882a593Smuzhiyun unsigned int less;
144*4882a593Smuzhiyun unsigned int skip_len = 0;
145*4882a593Smuzhiyun
146*4882a593Smuzhiyun while (sg && skip) {
147*4882a593Smuzhiyun if (sg_dma_len(sg) <= skip) {
148*4882a593Smuzhiyun skip -= sg_dma_len(sg);
149*4882a593Smuzhiyun skip_len = 0;
150*4882a593Smuzhiyun sg = sg_next(sg);
151*4882a593Smuzhiyun } else {
152*4882a593Smuzhiyun skip_len = skip;
153*4882a593Smuzhiyun skip = 0;
154*4882a593Smuzhiyun }
155*4882a593Smuzhiyun }
156*4882a593Smuzhiyun
157*4882a593Smuzhiyun while (sg && reqlen) {
158*4882a593Smuzhiyun less = min(reqlen, sg_dma_len(sg) - skip_len);
159*4882a593Smuzhiyun nents += DIV_ROUND_UP(less, entlen);
160*4882a593Smuzhiyun reqlen -= less;
161*4882a593Smuzhiyun skip_len = 0;
162*4882a593Smuzhiyun sg = sg_next(sg);
163*4882a593Smuzhiyun }
164*4882a593Smuzhiyun return nents;
165*4882a593Smuzhiyun }
166*4882a593Smuzhiyun
get_aead_subtype(struct crypto_aead * aead)167*4882a593Smuzhiyun static inline int get_aead_subtype(struct crypto_aead *aead)
168*4882a593Smuzhiyun {
169*4882a593Smuzhiyun struct aead_alg *alg = crypto_aead_alg(aead);
170*4882a593Smuzhiyun struct chcr_alg_template *chcr_crypto_alg =
171*4882a593Smuzhiyun container_of(alg, struct chcr_alg_template, alg.aead);
172*4882a593Smuzhiyun return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
173*4882a593Smuzhiyun }
174*4882a593Smuzhiyun
chcr_verify_tag(struct aead_request * req,u8 * input,int * err)175*4882a593Smuzhiyun void chcr_verify_tag(struct aead_request *req, u8 *input, int *err)
176*4882a593Smuzhiyun {
177*4882a593Smuzhiyun u8 temp[SHA512_DIGEST_SIZE];
178*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
179*4882a593Smuzhiyun int authsize = crypto_aead_authsize(tfm);
180*4882a593Smuzhiyun struct cpl_fw6_pld *fw6_pld;
181*4882a593Smuzhiyun int cmp = 0;
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun fw6_pld = (struct cpl_fw6_pld *)input;
184*4882a593Smuzhiyun if ((get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) ||
185*4882a593Smuzhiyun (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_GCM)) {
186*4882a593Smuzhiyun cmp = crypto_memneq(&fw6_pld->data[2], (fw6_pld + 1), authsize);
187*4882a593Smuzhiyun } else {
188*4882a593Smuzhiyun
189*4882a593Smuzhiyun sg_pcopy_to_buffer(req->src, sg_nents(req->src), temp,
190*4882a593Smuzhiyun authsize, req->assoclen +
191*4882a593Smuzhiyun req->cryptlen - authsize);
192*4882a593Smuzhiyun cmp = crypto_memneq(temp, (fw6_pld + 1), authsize);
193*4882a593Smuzhiyun }
194*4882a593Smuzhiyun if (cmp)
195*4882a593Smuzhiyun *err = -EBADMSG;
196*4882a593Smuzhiyun else
197*4882a593Smuzhiyun *err = 0;
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun
chcr_inc_wrcount(struct chcr_dev * dev)200*4882a593Smuzhiyun static int chcr_inc_wrcount(struct chcr_dev *dev)
201*4882a593Smuzhiyun {
202*4882a593Smuzhiyun if (dev->state == CHCR_DETACH)
203*4882a593Smuzhiyun return 1;
204*4882a593Smuzhiyun atomic_inc(&dev->inflight);
205*4882a593Smuzhiyun return 0;
206*4882a593Smuzhiyun }
207*4882a593Smuzhiyun
chcr_dec_wrcount(struct chcr_dev * dev)208*4882a593Smuzhiyun static inline void chcr_dec_wrcount(struct chcr_dev *dev)
209*4882a593Smuzhiyun {
210*4882a593Smuzhiyun atomic_dec(&dev->inflight);
211*4882a593Smuzhiyun }
212*4882a593Smuzhiyun
chcr_handle_aead_resp(struct aead_request * req,unsigned char * input,int err)213*4882a593Smuzhiyun static inline int chcr_handle_aead_resp(struct aead_request *req,
214*4882a593Smuzhiyun unsigned char *input,
215*4882a593Smuzhiyun int err)
216*4882a593Smuzhiyun {
217*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
218*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
219*4882a593Smuzhiyun struct chcr_dev *dev = a_ctx(tfm)->dev;
220*4882a593Smuzhiyun
221*4882a593Smuzhiyun chcr_aead_common_exit(req);
222*4882a593Smuzhiyun if (reqctx->verify == VERIFY_SW) {
223*4882a593Smuzhiyun chcr_verify_tag(req, input, &err);
224*4882a593Smuzhiyun reqctx->verify = VERIFY_HW;
225*4882a593Smuzhiyun }
226*4882a593Smuzhiyun chcr_dec_wrcount(dev);
227*4882a593Smuzhiyun req->base.complete(&req->base, err);
228*4882a593Smuzhiyun
229*4882a593Smuzhiyun return err;
230*4882a593Smuzhiyun }
231*4882a593Smuzhiyun
get_aes_decrypt_key(unsigned char * dec_key,const unsigned char * key,unsigned int keylength)232*4882a593Smuzhiyun static void get_aes_decrypt_key(unsigned char *dec_key,
233*4882a593Smuzhiyun const unsigned char *key,
234*4882a593Smuzhiyun unsigned int keylength)
235*4882a593Smuzhiyun {
236*4882a593Smuzhiyun u32 temp;
237*4882a593Smuzhiyun u32 w_ring[MAX_NK];
238*4882a593Smuzhiyun int i, j, k;
239*4882a593Smuzhiyun u8 nr, nk;
240*4882a593Smuzhiyun
241*4882a593Smuzhiyun switch (keylength) {
242*4882a593Smuzhiyun case AES_KEYLENGTH_128BIT:
243*4882a593Smuzhiyun nk = KEYLENGTH_4BYTES;
244*4882a593Smuzhiyun nr = NUMBER_OF_ROUNDS_10;
245*4882a593Smuzhiyun break;
246*4882a593Smuzhiyun case AES_KEYLENGTH_192BIT:
247*4882a593Smuzhiyun nk = KEYLENGTH_6BYTES;
248*4882a593Smuzhiyun nr = NUMBER_OF_ROUNDS_12;
249*4882a593Smuzhiyun break;
250*4882a593Smuzhiyun case AES_KEYLENGTH_256BIT:
251*4882a593Smuzhiyun nk = KEYLENGTH_8BYTES;
252*4882a593Smuzhiyun nr = NUMBER_OF_ROUNDS_14;
253*4882a593Smuzhiyun break;
254*4882a593Smuzhiyun default:
255*4882a593Smuzhiyun return;
256*4882a593Smuzhiyun }
257*4882a593Smuzhiyun for (i = 0; i < nk; i++)
258*4882a593Smuzhiyun w_ring[i] = get_unaligned_be32(&key[i * 4]);
259*4882a593Smuzhiyun
260*4882a593Smuzhiyun i = 0;
261*4882a593Smuzhiyun temp = w_ring[nk - 1];
262*4882a593Smuzhiyun while (i + nk < (nr + 1) * 4) {
263*4882a593Smuzhiyun if (!(i % nk)) {
264*4882a593Smuzhiyun /* RotWord(temp) */
265*4882a593Smuzhiyun temp = (temp << 8) | (temp >> 24);
266*4882a593Smuzhiyun temp = aes_ks_subword(temp);
267*4882a593Smuzhiyun temp ^= round_constant[i / nk];
268*4882a593Smuzhiyun } else if (nk == 8 && (i % 4 == 0)) {
269*4882a593Smuzhiyun temp = aes_ks_subword(temp);
270*4882a593Smuzhiyun }
271*4882a593Smuzhiyun w_ring[i % nk] ^= temp;
272*4882a593Smuzhiyun temp = w_ring[i % nk];
273*4882a593Smuzhiyun i++;
274*4882a593Smuzhiyun }
275*4882a593Smuzhiyun i--;
276*4882a593Smuzhiyun for (k = 0, j = i % nk; k < nk; k++) {
277*4882a593Smuzhiyun put_unaligned_be32(w_ring[j], &dec_key[k * 4]);
278*4882a593Smuzhiyun j--;
279*4882a593Smuzhiyun if (j < 0)
280*4882a593Smuzhiyun j += nk;
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun }
283*4882a593Smuzhiyun
chcr_alloc_shash(unsigned int ds)284*4882a593Smuzhiyun static struct crypto_shash *chcr_alloc_shash(unsigned int ds)
285*4882a593Smuzhiyun {
286*4882a593Smuzhiyun struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
287*4882a593Smuzhiyun
288*4882a593Smuzhiyun switch (ds) {
289*4882a593Smuzhiyun case SHA1_DIGEST_SIZE:
290*4882a593Smuzhiyun base_hash = crypto_alloc_shash("sha1", 0, 0);
291*4882a593Smuzhiyun break;
292*4882a593Smuzhiyun case SHA224_DIGEST_SIZE:
293*4882a593Smuzhiyun base_hash = crypto_alloc_shash("sha224", 0, 0);
294*4882a593Smuzhiyun break;
295*4882a593Smuzhiyun case SHA256_DIGEST_SIZE:
296*4882a593Smuzhiyun base_hash = crypto_alloc_shash("sha256", 0, 0);
297*4882a593Smuzhiyun break;
298*4882a593Smuzhiyun case SHA384_DIGEST_SIZE:
299*4882a593Smuzhiyun base_hash = crypto_alloc_shash("sha384", 0, 0);
300*4882a593Smuzhiyun break;
301*4882a593Smuzhiyun case SHA512_DIGEST_SIZE:
302*4882a593Smuzhiyun base_hash = crypto_alloc_shash("sha512", 0, 0);
303*4882a593Smuzhiyun break;
304*4882a593Smuzhiyun }
305*4882a593Smuzhiyun
306*4882a593Smuzhiyun return base_hash;
307*4882a593Smuzhiyun }
308*4882a593Smuzhiyun
chcr_compute_partial_hash(struct shash_desc * desc,char * iopad,char * result_hash,int digest_size)309*4882a593Smuzhiyun static int chcr_compute_partial_hash(struct shash_desc *desc,
310*4882a593Smuzhiyun char *iopad, char *result_hash,
311*4882a593Smuzhiyun int digest_size)
312*4882a593Smuzhiyun {
313*4882a593Smuzhiyun struct sha1_state sha1_st;
314*4882a593Smuzhiyun struct sha256_state sha256_st;
315*4882a593Smuzhiyun struct sha512_state sha512_st;
316*4882a593Smuzhiyun int error;
317*4882a593Smuzhiyun
318*4882a593Smuzhiyun if (digest_size == SHA1_DIGEST_SIZE) {
319*4882a593Smuzhiyun error = crypto_shash_init(desc) ?:
320*4882a593Smuzhiyun crypto_shash_update(desc, iopad, SHA1_BLOCK_SIZE) ?:
321*4882a593Smuzhiyun crypto_shash_export(desc, (void *)&sha1_st);
322*4882a593Smuzhiyun memcpy(result_hash, sha1_st.state, SHA1_DIGEST_SIZE);
323*4882a593Smuzhiyun } else if (digest_size == SHA224_DIGEST_SIZE) {
324*4882a593Smuzhiyun error = crypto_shash_init(desc) ?:
325*4882a593Smuzhiyun crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
326*4882a593Smuzhiyun crypto_shash_export(desc, (void *)&sha256_st);
327*4882a593Smuzhiyun memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
328*4882a593Smuzhiyun
329*4882a593Smuzhiyun } else if (digest_size == SHA256_DIGEST_SIZE) {
330*4882a593Smuzhiyun error = crypto_shash_init(desc) ?:
331*4882a593Smuzhiyun crypto_shash_update(desc, iopad, SHA256_BLOCK_SIZE) ?:
332*4882a593Smuzhiyun crypto_shash_export(desc, (void *)&sha256_st);
333*4882a593Smuzhiyun memcpy(result_hash, sha256_st.state, SHA256_DIGEST_SIZE);
334*4882a593Smuzhiyun
335*4882a593Smuzhiyun } else if (digest_size == SHA384_DIGEST_SIZE) {
336*4882a593Smuzhiyun error = crypto_shash_init(desc) ?:
337*4882a593Smuzhiyun crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
338*4882a593Smuzhiyun crypto_shash_export(desc, (void *)&sha512_st);
339*4882a593Smuzhiyun memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
340*4882a593Smuzhiyun
341*4882a593Smuzhiyun } else if (digest_size == SHA512_DIGEST_SIZE) {
342*4882a593Smuzhiyun error = crypto_shash_init(desc) ?:
343*4882a593Smuzhiyun crypto_shash_update(desc, iopad, SHA512_BLOCK_SIZE) ?:
344*4882a593Smuzhiyun crypto_shash_export(desc, (void *)&sha512_st);
345*4882a593Smuzhiyun memcpy(result_hash, sha512_st.state, SHA512_DIGEST_SIZE);
346*4882a593Smuzhiyun } else {
347*4882a593Smuzhiyun error = -EINVAL;
348*4882a593Smuzhiyun pr_err("Unknown digest size %d\n", digest_size);
349*4882a593Smuzhiyun }
350*4882a593Smuzhiyun return error;
351*4882a593Smuzhiyun }
352*4882a593Smuzhiyun
chcr_change_order(char * buf,int ds)353*4882a593Smuzhiyun static void chcr_change_order(char *buf, int ds)
354*4882a593Smuzhiyun {
355*4882a593Smuzhiyun int i;
356*4882a593Smuzhiyun
357*4882a593Smuzhiyun if (ds == SHA512_DIGEST_SIZE) {
358*4882a593Smuzhiyun for (i = 0; i < (ds / sizeof(u64)); i++)
359*4882a593Smuzhiyun *((__be64 *)buf + i) =
360*4882a593Smuzhiyun cpu_to_be64(*((u64 *)buf + i));
361*4882a593Smuzhiyun } else {
362*4882a593Smuzhiyun for (i = 0; i < (ds / sizeof(u32)); i++)
363*4882a593Smuzhiyun *((__be32 *)buf + i) =
364*4882a593Smuzhiyun cpu_to_be32(*((u32 *)buf + i));
365*4882a593Smuzhiyun }
366*4882a593Smuzhiyun }
367*4882a593Smuzhiyun
is_hmac(struct crypto_tfm * tfm)368*4882a593Smuzhiyun static inline int is_hmac(struct crypto_tfm *tfm)
369*4882a593Smuzhiyun {
370*4882a593Smuzhiyun struct crypto_alg *alg = tfm->__crt_alg;
371*4882a593Smuzhiyun struct chcr_alg_template *chcr_crypto_alg =
372*4882a593Smuzhiyun container_of(__crypto_ahash_alg(alg), struct chcr_alg_template,
373*4882a593Smuzhiyun alg.hash);
374*4882a593Smuzhiyun if (chcr_crypto_alg->type == CRYPTO_ALG_TYPE_HMAC)
375*4882a593Smuzhiyun return 1;
376*4882a593Smuzhiyun return 0;
377*4882a593Smuzhiyun }
378*4882a593Smuzhiyun
dsgl_walk_init(struct dsgl_walk * walk,struct cpl_rx_phys_dsgl * dsgl)379*4882a593Smuzhiyun static inline void dsgl_walk_init(struct dsgl_walk *walk,
380*4882a593Smuzhiyun struct cpl_rx_phys_dsgl *dsgl)
381*4882a593Smuzhiyun {
382*4882a593Smuzhiyun walk->dsgl = dsgl;
383*4882a593Smuzhiyun walk->nents = 0;
384*4882a593Smuzhiyun walk->to = (struct phys_sge_pairs *)(dsgl + 1);
385*4882a593Smuzhiyun }
386*4882a593Smuzhiyun
dsgl_walk_end(struct dsgl_walk * walk,unsigned short qid,int pci_chan_id)387*4882a593Smuzhiyun static inline void dsgl_walk_end(struct dsgl_walk *walk, unsigned short qid,
388*4882a593Smuzhiyun int pci_chan_id)
389*4882a593Smuzhiyun {
390*4882a593Smuzhiyun struct cpl_rx_phys_dsgl *phys_cpl;
391*4882a593Smuzhiyun
392*4882a593Smuzhiyun phys_cpl = walk->dsgl;
393*4882a593Smuzhiyun
394*4882a593Smuzhiyun phys_cpl->op_to_tid = htonl(CPL_RX_PHYS_DSGL_OPCODE_V(CPL_RX_PHYS_DSGL)
395*4882a593Smuzhiyun | CPL_RX_PHYS_DSGL_ISRDMA_V(0));
396*4882a593Smuzhiyun phys_cpl->pcirlxorder_to_noofsgentr =
397*4882a593Smuzhiyun htonl(CPL_RX_PHYS_DSGL_PCIRLXORDER_V(0) |
398*4882a593Smuzhiyun CPL_RX_PHYS_DSGL_PCINOSNOOP_V(0) |
399*4882a593Smuzhiyun CPL_RX_PHYS_DSGL_PCITPHNTENB_V(0) |
400*4882a593Smuzhiyun CPL_RX_PHYS_DSGL_PCITPHNT_V(0) |
401*4882a593Smuzhiyun CPL_RX_PHYS_DSGL_DCAID_V(0) |
402*4882a593Smuzhiyun CPL_RX_PHYS_DSGL_NOOFSGENTR_V(walk->nents));
403*4882a593Smuzhiyun phys_cpl->rss_hdr_int.opcode = CPL_RX_PHYS_ADDR;
404*4882a593Smuzhiyun phys_cpl->rss_hdr_int.qid = htons(qid);
405*4882a593Smuzhiyun phys_cpl->rss_hdr_int.hash_val = 0;
406*4882a593Smuzhiyun phys_cpl->rss_hdr_int.channel = pci_chan_id;
407*4882a593Smuzhiyun }
408*4882a593Smuzhiyun
dsgl_walk_add_page(struct dsgl_walk * walk,size_t size,dma_addr_t addr)409*4882a593Smuzhiyun static inline void dsgl_walk_add_page(struct dsgl_walk *walk,
410*4882a593Smuzhiyun size_t size,
411*4882a593Smuzhiyun dma_addr_t addr)
412*4882a593Smuzhiyun {
413*4882a593Smuzhiyun int j;
414*4882a593Smuzhiyun
415*4882a593Smuzhiyun if (!size)
416*4882a593Smuzhiyun return;
417*4882a593Smuzhiyun j = walk->nents;
418*4882a593Smuzhiyun walk->to->len[j % 8] = htons(size);
419*4882a593Smuzhiyun walk->to->addr[j % 8] = cpu_to_be64(addr);
420*4882a593Smuzhiyun j++;
421*4882a593Smuzhiyun if ((j % 8) == 0)
422*4882a593Smuzhiyun walk->to++;
423*4882a593Smuzhiyun walk->nents = j;
424*4882a593Smuzhiyun }
425*4882a593Smuzhiyun
dsgl_walk_add_sg(struct dsgl_walk * walk,struct scatterlist * sg,unsigned int slen,unsigned int skip)426*4882a593Smuzhiyun static void dsgl_walk_add_sg(struct dsgl_walk *walk,
427*4882a593Smuzhiyun struct scatterlist *sg,
428*4882a593Smuzhiyun unsigned int slen,
429*4882a593Smuzhiyun unsigned int skip)
430*4882a593Smuzhiyun {
431*4882a593Smuzhiyun int skip_len = 0;
432*4882a593Smuzhiyun unsigned int left_size = slen, len = 0;
433*4882a593Smuzhiyun unsigned int j = walk->nents;
434*4882a593Smuzhiyun int offset, ent_len;
435*4882a593Smuzhiyun
436*4882a593Smuzhiyun if (!slen)
437*4882a593Smuzhiyun return;
438*4882a593Smuzhiyun while (sg && skip) {
439*4882a593Smuzhiyun if (sg_dma_len(sg) <= skip) {
440*4882a593Smuzhiyun skip -= sg_dma_len(sg);
441*4882a593Smuzhiyun skip_len = 0;
442*4882a593Smuzhiyun sg = sg_next(sg);
443*4882a593Smuzhiyun } else {
444*4882a593Smuzhiyun skip_len = skip;
445*4882a593Smuzhiyun skip = 0;
446*4882a593Smuzhiyun }
447*4882a593Smuzhiyun }
448*4882a593Smuzhiyun
449*4882a593Smuzhiyun while (left_size && sg) {
450*4882a593Smuzhiyun len = min_t(u32, left_size, sg_dma_len(sg) - skip_len);
451*4882a593Smuzhiyun offset = 0;
452*4882a593Smuzhiyun while (len) {
453*4882a593Smuzhiyun ent_len = min_t(u32, len, CHCR_DST_SG_SIZE);
454*4882a593Smuzhiyun walk->to->len[j % 8] = htons(ent_len);
455*4882a593Smuzhiyun walk->to->addr[j % 8] = cpu_to_be64(sg_dma_address(sg) +
456*4882a593Smuzhiyun offset + skip_len);
457*4882a593Smuzhiyun offset += ent_len;
458*4882a593Smuzhiyun len -= ent_len;
459*4882a593Smuzhiyun j++;
460*4882a593Smuzhiyun if ((j % 8) == 0)
461*4882a593Smuzhiyun walk->to++;
462*4882a593Smuzhiyun }
463*4882a593Smuzhiyun walk->last_sg = sg;
464*4882a593Smuzhiyun walk->last_sg_len = min_t(u32, left_size, sg_dma_len(sg) -
465*4882a593Smuzhiyun skip_len) + skip_len;
466*4882a593Smuzhiyun left_size -= min_t(u32, left_size, sg_dma_len(sg) - skip_len);
467*4882a593Smuzhiyun skip_len = 0;
468*4882a593Smuzhiyun sg = sg_next(sg);
469*4882a593Smuzhiyun }
470*4882a593Smuzhiyun walk->nents = j;
471*4882a593Smuzhiyun }
472*4882a593Smuzhiyun
ulptx_walk_init(struct ulptx_walk * walk,struct ulptx_sgl * ulp)473*4882a593Smuzhiyun static inline void ulptx_walk_init(struct ulptx_walk *walk,
474*4882a593Smuzhiyun struct ulptx_sgl *ulp)
475*4882a593Smuzhiyun {
476*4882a593Smuzhiyun walk->sgl = ulp;
477*4882a593Smuzhiyun walk->nents = 0;
478*4882a593Smuzhiyun walk->pair_idx = 0;
479*4882a593Smuzhiyun walk->pair = ulp->sge;
480*4882a593Smuzhiyun walk->last_sg = NULL;
481*4882a593Smuzhiyun walk->last_sg_len = 0;
482*4882a593Smuzhiyun }
483*4882a593Smuzhiyun
ulptx_walk_end(struct ulptx_walk * walk)484*4882a593Smuzhiyun static inline void ulptx_walk_end(struct ulptx_walk *walk)
485*4882a593Smuzhiyun {
486*4882a593Smuzhiyun walk->sgl->cmd_nsge = htonl(ULPTX_CMD_V(ULP_TX_SC_DSGL) |
487*4882a593Smuzhiyun ULPTX_NSGE_V(walk->nents));
488*4882a593Smuzhiyun }
489*4882a593Smuzhiyun
490*4882a593Smuzhiyun
ulptx_walk_add_page(struct ulptx_walk * walk,size_t size,dma_addr_t addr)491*4882a593Smuzhiyun static inline void ulptx_walk_add_page(struct ulptx_walk *walk,
492*4882a593Smuzhiyun size_t size,
493*4882a593Smuzhiyun dma_addr_t addr)
494*4882a593Smuzhiyun {
495*4882a593Smuzhiyun if (!size)
496*4882a593Smuzhiyun return;
497*4882a593Smuzhiyun
498*4882a593Smuzhiyun if (walk->nents == 0) {
499*4882a593Smuzhiyun walk->sgl->len0 = cpu_to_be32(size);
500*4882a593Smuzhiyun walk->sgl->addr0 = cpu_to_be64(addr);
501*4882a593Smuzhiyun } else {
502*4882a593Smuzhiyun walk->pair->addr[walk->pair_idx] = cpu_to_be64(addr);
503*4882a593Smuzhiyun walk->pair->len[walk->pair_idx] = cpu_to_be32(size);
504*4882a593Smuzhiyun walk->pair_idx = !walk->pair_idx;
505*4882a593Smuzhiyun if (!walk->pair_idx)
506*4882a593Smuzhiyun walk->pair++;
507*4882a593Smuzhiyun }
508*4882a593Smuzhiyun walk->nents++;
509*4882a593Smuzhiyun }
510*4882a593Smuzhiyun
ulptx_walk_add_sg(struct ulptx_walk * walk,struct scatterlist * sg,unsigned int len,unsigned int skip)511*4882a593Smuzhiyun static void ulptx_walk_add_sg(struct ulptx_walk *walk,
512*4882a593Smuzhiyun struct scatterlist *sg,
513*4882a593Smuzhiyun unsigned int len,
514*4882a593Smuzhiyun unsigned int skip)
515*4882a593Smuzhiyun {
516*4882a593Smuzhiyun int small;
517*4882a593Smuzhiyun int skip_len = 0;
518*4882a593Smuzhiyun unsigned int sgmin;
519*4882a593Smuzhiyun
520*4882a593Smuzhiyun if (!len)
521*4882a593Smuzhiyun return;
522*4882a593Smuzhiyun while (sg && skip) {
523*4882a593Smuzhiyun if (sg_dma_len(sg) <= skip) {
524*4882a593Smuzhiyun skip -= sg_dma_len(sg);
525*4882a593Smuzhiyun skip_len = 0;
526*4882a593Smuzhiyun sg = sg_next(sg);
527*4882a593Smuzhiyun } else {
528*4882a593Smuzhiyun skip_len = skip;
529*4882a593Smuzhiyun skip = 0;
530*4882a593Smuzhiyun }
531*4882a593Smuzhiyun }
532*4882a593Smuzhiyun WARN(!sg, "SG should not be null here\n");
533*4882a593Smuzhiyun if (sg && (walk->nents == 0)) {
534*4882a593Smuzhiyun small = min_t(unsigned int, sg_dma_len(sg) - skip_len, len);
535*4882a593Smuzhiyun sgmin = min_t(unsigned int, small, CHCR_SRC_SG_SIZE);
536*4882a593Smuzhiyun walk->sgl->len0 = cpu_to_be32(sgmin);
537*4882a593Smuzhiyun walk->sgl->addr0 = cpu_to_be64(sg_dma_address(sg) + skip_len);
538*4882a593Smuzhiyun walk->nents++;
539*4882a593Smuzhiyun len -= sgmin;
540*4882a593Smuzhiyun walk->last_sg = sg;
541*4882a593Smuzhiyun walk->last_sg_len = sgmin + skip_len;
542*4882a593Smuzhiyun skip_len += sgmin;
543*4882a593Smuzhiyun if (sg_dma_len(sg) == skip_len) {
544*4882a593Smuzhiyun sg = sg_next(sg);
545*4882a593Smuzhiyun skip_len = 0;
546*4882a593Smuzhiyun }
547*4882a593Smuzhiyun }
548*4882a593Smuzhiyun
549*4882a593Smuzhiyun while (sg && len) {
550*4882a593Smuzhiyun small = min(sg_dma_len(sg) - skip_len, len);
551*4882a593Smuzhiyun sgmin = min_t(unsigned int, small, CHCR_SRC_SG_SIZE);
552*4882a593Smuzhiyun walk->pair->len[walk->pair_idx] = cpu_to_be32(sgmin);
553*4882a593Smuzhiyun walk->pair->addr[walk->pair_idx] =
554*4882a593Smuzhiyun cpu_to_be64(sg_dma_address(sg) + skip_len);
555*4882a593Smuzhiyun walk->pair_idx = !walk->pair_idx;
556*4882a593Smuzhiyun walk->nents++;
557*4882a593Smuzhiyun if (!walk->pair_idx)
558*4882a593Smuzhiyun walk->pair++;
559*4882a593Smuzhiyun len -= sgmin;
560*4882a593Smuzhiyun skip_len += sgmin;
561*4882a593Smuzhiyun walk->last_sg = sg;
562*4882a593Smuzhiyun walk->last_sg_len = skip_len;
563*4882a593Smuzhiyun if (sg_dma_len(sg) == skip_len) {
564*4882a593Smuzhiyun sg = sg_next(sg);
565*4882a593Smuzhiyun skip_len = 0;
566*4882a593Smuzhiyun }
567*4882a593Smuzhiyun }
568*4882a593Smuzhiyun }
569*4882a593Smuzhiyun
get_cryptoalg_subtype(struct crypto_skcipher * tfm)570*4882a593Smuzhiyun static inline int get_cryptoalg_subtype(struct crypto_skcipher *tfm)
571*4882a593Smuzhiyun {
572*4882a593Smuzhiyun struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
573*4882a593Smuzhiyun struct chcr_alg_template *chcr_crypto_alg =
574*4882a593Smuzhiyun container_of(alg, struct chcr_alg_template, alg.skcipher);
575*4882a593Smuzhiyun
576*4882a593Smuzhiyun return chcr_crypto_alg->type & CRYPTO_ALG_SUB_TYPE_MASK;
577*4882a593Smuzhiyun }
578*4882a593Smuzhiyun
cxgb4_is_crypto_q_full(struct net_device * dev,unsigned int idx)579*4882a593Smuzhiyun static int cxgb4_is_crypto_q_full(struct net_device *dev, unsigned int idx)
580*4882a593Smuzhiyun {
581*4882a593Smuzhiyun struct adapter *adap = netdev2adap(dev);
582*4882a593Smuzhiyun struct sge_uld_txq_info *txq_info =
583*4882a593Smuzhiyun adap->sge.uld_txq_info[CXGB4_TX_CRYPTO];
584*4882a593Smuzhiyun struct sge_uld_txq *txq;
585*4882a593Smuzhiyun int ret = 0;
586*4882a593Smuzhiyun
587*4882a593Smuzhiyun local_bh_disable();
588*4882a593Smuzhiyun txq = &txq_info->uldtxq[idx];
589*4882a593Smuzhiyun spin_lock(&txq->sendq.lock);
590*4882a593Smuzhiyun if (txq->full)
591*4882a593Smuzhiyun ret = -1;
592*4882a593Smuzhiyun spin_unlock(&txq->sendq.lock);
593*4882a593Smuzhiyun local_bh_enable();
594*4882a593Smuzhiyun return ret;
595*4882a593Smuzhiyun }
596*4882a593Smuzhiyun
generate_copy_rrkey(struct ablk_ctx * ablkctx,struct _key_ctx * key_ctx)597*4882a593Smuzhiyun static int generate_copy_rrkey(struct ablk_ctx *ablkctx,
598*4882a593Smuzhiyun struct _key_ctx *key_ctx)
599*4882a593Smuzhiyun {
600*4882a593Smuzhiyun if (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) {
601*4882a593Smuzhiyun memcpy(key_ctx->key, ablkctx->rrkey, ablkctx->enckey_len);
602*4882a593Smuzhiyun } else {
603*4882a593Smuzhiyun memcpy(key_ctx->key,
604*4882a593Smuzhiyun ablkctx->key + (ablkctx->enckey_len >> 1),
605*4882a593Smuzhiyun ablkctx->enckey_len >> 1);
606*4882a593Smuzhiyun memcpy(key_ctx->key + (ablkctx->enckey_len >> 1),
607*4882a593Smuzhiyun ablkctx->rrkey, ablkctx->enckey_len >> 1);
608*4882a593Smuzhiyun }
609*4882a593Smuzhiyun return 0;
610*4882a593Smuzhiyun }
611*4882a593Smuzhiyun
chcr_hash_ent_in_wr(struct scatterlist * src,unsigned int minsg,unsigned int space,unsigned int srcskip)612*4882a593Smuzhiyun static int chcr_hash_ent_in_wr(struct scatterlist *src,
613*4882a593Smuzhiyun unsigned int minsg,
614*4882a593Smuzhiyun unsigned int space,
615*4882a593Smuzhiyun unsigned int srcskip)
616*4882a593Smuzhiyun {
617*4882a593Smuzhiyun int srclen = 0;
618*4882a593Smuzhiyun int srcsg = minsg;
619*4882a593Smuzhiyun int soffset = 0, sless;
620*4882a593Smuzhiyun
621*4882a593Smuzhiyun if (sg_dma_len(src) == srcskip) {
622*4882a593Smuzhiyun src = sg_next(src);
623*4882a593Smuzhiyun srcskip = 0;
624*4882a593Smuzhiyun }
625*4882a593Smuzhiyun while (src && space > (sgl_ent_len[srcsg + 1])) {
626*4882a593Smuzhiyun sless = min_t(unsigned int, sg_dma_len(src) - soffset - srcskip,
627*4882a593Smuzhiyun CHCR_SRC_SG_SIZE);
628*4882a593Smuzhiyun srclen += sless;
629*4882a593Smuzhiyun soffset += sless;
630*4882a593Smuzhiyun srcsg++;
631*4882a593Smuzhiyun if (sg_dma_len(src) == (soffset + srcskip)) {
632*4882a593Smuzhiyun src = sg_next(src);
633*4882a593Smuzhiyun soffset = 0;
634*4882a593Smuzhiyun srcskip = 0;
635*4882a593Smuzhiyun }
636*4882a593Smuzhiyun }
637*4882a593Smuzhiyun return srclen;
638*4882a593Smuzhiyun }
639*4882a593Smuzhiyun
chcr_sg_ent_in_wr(struct scatterlist * src,struct scatterlist * dst,unsigned int minsg,unsigned int space,unsigned int srcskip,unsigned int dstskip)640*4882a593Smuzhiyun static int chcr_sg_ent_in_wr(struct scatterlist *src,
641*4882a593Smuzhiyun struct scatterlist *dst,
642*4882a593Smuzhiyun unsigned int minsg,
643*4882a593Smuzhiyun unsigned int space,
644*4882a593Smuzhiyun unsigned int srcskip,
645*4882a593Smuzhiyun unsigned int dstskip)
646*4882a593Smuzhiyun {
647*4882a593Smuzhiyun int srclen = 0, dstlen = 0;
648*4882a593Smuzhiyun int srcsg = minsg, dstsg = minsg;
649*4882a593Smuzhiyun int offset = 0, soffset = 0, less, sless = 0;
650*4882a593Smuzhiyun
651*4882a593Smuzhiyun if (sg_dma_len(src) == srcskip) {
652*4882a593Smuzhiyun src = sg_next(src);
653*4882a593Smuzhiyun srcskip = 0;
654*4882a593Smuzhiyun }
655*4882a593Smuzhiyun if (sg_dma_len(dst) == dstskip) {
656*4882a593Smuzhiyun dst = sg_next(dst);
657*4882a593Smuzhiyun dstskip = 0;
658*4882a593Smuzhiyun }
659*4882a593Smuzhiyun
660*4882a593Smuzhiyun while (src && dst &&
661*4882a593Smuzhiyun space > (sgl_ent_len[srcsg + 1] + dsgl_ent_len[dstsg])) {
662*4882a593Smuzhiyun sless = min_t(unsigned int, sg_dma_len(src) - srcskip - soffset,
663*4882a593Smuzhiyun CHCR_SRC_SG_SIZE);
664*4882a593Smuzhiyun srclen += sless;
665*4882a593Smuzhiyun srcsg++;
666*4882a593Smuzhiyun offset = 0;
667*4882a593Smuzhiyun while (dst && ((dstsg + 1) <= MAX_DSGL_ENT) &&
668*4882a593Smuzhiyun space > (sgl_ent_len[srcsg] + dsgl_ent_len[dstsg + 1])) {
669*4882a593Smuzhiyun if (srclen <= dstlen)
670*4882a593Smuzhiyun break;
671*4882a593Smuzhiyun less = min_t(unsigned int, sg_dma_len(dst) - offset -
672*4882a593Smuzhiyun dstskip, CHCR_DST_SG_SIZE);
673*4882a593Smuzhiyun dstlen += less;
674*4882a593Smuzhiyun offset += less;
675*4882a593Smuzhiyun if ((offset + dstskip) == sg_dma_len(dst)) {
676*4882a593Smuzhiyun dst = sg_next(dst);
677*4882a593Smuzhiyun offset = 0;
678*4882a593Smuzhiyun }
679*4882a593Smuzhiyun dstsg++;
680*4882a593Smuzhiyun dstskip = 0;
681*4882a593Smuzhiyun }
682*4882a593Smuzhiyun soffset += sless;
683*4882a593Smuzhiyun if ((soffset + srcskip) == sg_dma_len(src)) {
684*4882a593Smuzhiyun src = sg_next(src);
685*4882a593Smuzhiyun srcskip = 0;
686*4882a593Smuzhiyun soffset = 0;
687*4882a593Smuzhiyun }
688*4882a593Smuzhiyun
689*4882a593Smuzhiyun }
690*4882a593Smuzhiyun return min(srclen, dstlen);
691*4882a593Smuzhiyun }
692*4882a593Smuzhiyun
chcr_cipher_fallback(struct crypto_skcipher * cipher,struct skcipher_request * req,u8 * iv,unsigned short op_type)693*4882a593Smuzhiyun static int chcr_cipher_fallback(struct crypto_skcipher *cipher,
694*4882a593Smuzhiyun struct skcipher_request *req,
695*4882a593Smuzhiyun u8 *iv,
696*4882a593Smuzhiyun unsigned short op_type)
697*4882a593Smuzhiyun {
698*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
699*4882a593Smuzhiyun int err;
700*4882a593Smuzhiyun
701*4882a593Smuzhiyun skcipher_request_set_tfm(&reqctx->fallback_req, cipher);
702*4882a593Smuzhiyun skcipher_request_set_callback(&reqctx->fallback_req, req->base.flags,
703*4882a593Smuzhiyun req->base.complete, req->base.data);
704*4882a593Smuzhiyun skcipher_request_set_crypt(&reqctx->fallback_req, req->src, req->dst,
705*4882a593Smuzhiyun req->cryptlen, iv);
706*4882a593Smuzhiyun
707*4882a593Smuzhiyun err = op_type ? crypto_skcipher_decrypt(&reqctx->fallback_req) :
708*4882a593Smuzhiyun crypto_skcipher_encrypt(&reqctx->fallback_req);
709*4882a593Smuzhiyun
710*4882a593Smuzhiyun return err;
711*4882a593Smuzhiyun
712*4882a593Smuzhiyun }
713*4882a593Smuzhiyun
get_qidxs(struct crypto_async_request * req,unsigned int * txqidx,unsigned int * rxqidx)714*4882a593Smuzhiyun static inline int get_qidxs(struct crypto_async_request *req,
715*4882a593Smuzhiyun unsigned int *txqidx, unsigned int *rxqidx)
716*4882a593Smuzhiyun {
717*4882a593Smuzhiyun struct crypto_tfm *tfm = req->tfm;
718*4882a593Smuzhiyun int ret = 0;
719*4882a593Smuzhiyun
720*4882a593Smuzhiyun switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
721*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_AEAD:
722*4882a593Smuzhiyun {
723*4882a593Smuzhiyun struct aead_request *aead_req =
724*4882a593Smuzhiyun container_of(req, struct aead_request, base);
725*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(aead_req);
726*4882a593Smuzhiyun *txqidx = reqctx->txqidx;
727*4882a593Smuzhiyun *rxqidx = reqctx->rxqidx;
728*4882a593Smuzhiyun break;
729*4882a593Smuzhiyun }
730*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_SKCIPHER:
731*4882a593Smuzhiyun {
732*4882a593Smuzhiyun struct skcipher_request *sk_req =
733*4882a593Smuzhiyun container_of(req, struct skcipher_request, base);
734*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx =
735*4882a593Smuzhiyun skcipher_request_ctx(sk_req);
736*4882a593Smuzhiyun *txqidx = reqctx->txqidx;
737*4882a593Smuzhiyun *rxqidx = reqctx->rxqidx;
738*4882a593Smuzhiyun break;
739*4882a593Smuzhiyun }
740*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_AHASH:
741*4882a593Smuzhiyun {
742*4882a593Smuzhiyun struct ahash_request *ahash_req =
743*4882a593Smuzhiyun container_of(req, struct ahash_request, base);
744*4882a593Smuzhiyun struct chcr_ahash_req_ctx *reqctx =
745*4882a593Smuzhiyun ahash_request_ctx(ahash_req);
746*4882a593Smuzhiyun *txqidx = reqctx->txqidx;
747*4882a593Smuzhiyun *rxqidx = reqctx->rxqidx;
748*4882a593Smuzhiyun break;
749*4882a593Smuzhiyun }
750*4882a593Smuzhiyun default:
751*4882a593Smuzhiyun ret = -EINVAL;
752*4882a593Smuzhiyun /* should never get here */
753*4882a593Smuzhiyun BUG();
754*4882a593Smuzhiyun break;
755*4882a593Smuzhiyun }
756*4882a593Smuzhiyun return ret;
757*4882a593Smuzhiyun }
758*4882a593Smuzhiyun
create_wreq(struct chcr_context * ctx,struct chcr_wr * chcr_req,struct crypto_async_request * req,unsigned int imm,int hash_sz,unsigned int len16,unsigned int sc_len,unsigned int lcb)759*4882a593Smuzhiyun static inline void create_wreq(struct chcr_context *ctx,
760*4882a593Smuzhiyun struct chcr_wr *chcr_req,
761*4882a593Smuzhiyun struct crypto_async_request *req,
762*4882a593Smuzhiyun unsigned int imm,
763*4882a593Smuzhiyun int hash_sz,
764*4882a593Smuzhiyun unsigned int len16,
765*4882a593Smuzhiyun unsigned int sc_len,
766*4882a593Smuzhiyun unsigned int lcb)
767*4882a593Smuzhiyun {
768*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
769*4882a593Smuzhiyun unsigned int tx_channel_id, rx_channel_id;
770*4882a593Smuzhiyun unsigned int txqidx = 0, rxqidx = 0;
771*4882a593Smuzhiyun unsigned int qid, fid, portno;
772*4882a593Smuzhiyun
773*4882a593Smuzhiyun get_qidxs(req, &txqidx, &rxqidx);
774*4882a593Smuzhiyun qid = u_ctx->lldi.rxq_ids[rxqidx];
775*4882a593Smuzhiyun fid = u_ctx->lldi.rxq_ids[0];
776*4882a593Smuzhiyun portno = rxqidx / ctx->rxq_perchan;
777*4882a593Smuzhiyun tx_channel_id = txqidx / ctx->txq_perchan;
778*4882a593Smuzhiyun rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[portno]);
779*4882a593Smuzhiyun
780*4882a593Smuzhiyun
781*4882a593Smuzhiyun chcr_req->wreq.op_to_cctx_size = FILL_WR_OP_CCTX_SIZE;
782*4882a593Smuzhiyun chcr_req->wreq.pld_size_hash_size =
783*4882a593Smuzhiyun htonl(FW_CRYPTO_LOOKASIDE_WR_HASH_SIZE_V(hash_sz));
784*4882a593Smuzhiyun chcr_req->wreq.len16_pkd =
785*4882a593Smuzhiyun htonl(FW_CRYPTO_LOOKASIDE_WR_LEN16_V(DIV_ROUND_UP(len16, 16)));
786*4882a593Smuzhiyun chcr_req->wreq.cookie = cpu_to_be64((uintptr_t)req);
787*4882a593Smuzhiyun chcr_req->wreq.rx_chid_to_rx_q_id = FILL_WR_RX_Q_ID(rx_channel_id, qid,
788*4882a593Smuzhiyun !!lcb, txqidx);
789*4882a593Smuzhiyun
790*4882a593Smuzhiyun chcr_req->ulptx.cmd_dest = FILL_ULPTX_CMD_DEST(tx_channel_id, fid);
791*4882a593Smuzhiyun chcr_req->ulptx.len = htonl((DIV_ROUND_UP(len16, 16) -
792*4882a593Smuzhiyun ((sizeof(chcr_req->wreq)) >> 4)));
793*4882a593Smuzhiyun chcr_req->sc_imm.cmd_more = FILL_CMD_MORE(!imm);
794*4882a593Smuzhiyun chcr_req->sc_imm.len = cpu_to_be32(sizeof(struct cpl_tx_sec_pdu) +
795*4882a593Smuzhiyun sizeof(chcr_req->key_ctx) + sc_len);
796*4882a593Smuzhiyun }
797*4882a593Smuzhiyun
798*4882a593Smuzhiyun /**
799*4882a593Smuzhiyun * create_cipher_wr - form the WR for cipher operations
800*4882a593Smuzhiyun * @req: cipher req.
801*4882a593Smuzhiyun * @ctx: crypto driver context of the request.
802*4882a593Smuzhiyun * @qid: ingress qid where response of this WR should be received.
803*4882a593Smuzhiyun * @op_type: encryption or decryption
804*4882a593Smuzhiyun */
create_cipher_wr(struct cipher_wr_param * wrparam)805*4882a593Smuzhiyun static struct sk_buff *create_cipher_wr(struct cipher_wr_param *wrparam)
806*4882a593Smuzhiyun {
807*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(wrparam->req);
808*4882a593Smuzhiyun struct chcr_context *ctx = c_ctx(tfm);
809*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
810*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
811*4882a593Smuzhiyun struct sk_buff *skb = NULL;
812*4882a593Smuzhiyun struct chcr_wr *chcr_req;
813*4882a593Smuzhiyun struct cpl_rx_phys_dsgl *phys_cpl;
814*4882a593Smuzhiyun struct ulptx_sgl *ulptx;
815*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx =
816*4882a593Smuzhiyun skcipher_request_ctx(wrparam->req);
817*4882a593Smuzhiyun unsigned int temp = 0, transhdr_len, dst_size;
818*4882a593Smuzhiyun int error;
819*4882a593Smuzhiyun int nents;
820*4882a593Smuzhiyun unsigned int kctx_len;
821*4882a593Smuzhiyun gfp_t flags = wrparam->req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
822*4882a593Smuzhiyun GFP_KERNEL : GFP_ATOMIC;
823*4882a593Smuzhiyun struct adapter *adap = padap(ctx->dev);
824*4882a593Smuzhiyun unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
825*4882a593Smuzhiyun
826*4882a593Smuzhiyun rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
827*4882a593Smuzhiyun nents = sg_nents_xlen(reqctx->dstsg, wrparam->bytes, CHCR_DST_SG_SIZE,
828*4882a593Smuzhiyun reqctx->dst_ofst);
829*4882a593Smuzhiyun dst_size = get_space_for_phys_dsgl(nents);
830*4882a593Smuzhiyun kctx_len = roundup(ablkctx->enckey_len, 16);
831*4882a593Smuzhiyun transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
832*4882a593Smuzhiyun nents = sg_nents_xlen(reqctx->srcsg, wrparam->bytes,
833*4882a593Smuzhiyun CHCR_SRC_SG_SIZE, reqctx->src_ofst);
834*4882a593Smuzhiyun temp = reqctx->imm ? roundup(wrparam->bytes, 16) :
835*4882a593Smuzhiyun (sgl_len(nents) * 8);
836*4882a593Smuzhiyun transhdr_len += temp;
837*4882a593Smuzhiyun transhdr_len = roundup(transhdr_len, 16);
838*4882a593Smuzhiyun skb = alloc_skb(SGE_MAX_WR_LEN, flags);
839*4882a593Smuzhiyun if (!skb) {
840*4882a593Smuzhiyun error = -ENOMEM;
841*4882a593Smuzhiyun goto err;
842*4882a593Smuzhiyun }
843*4882a593Smuzhiyun chcr_req = __skb_put_zero(skb, transhdr_len);
844*4882a593Smuzhiyun chcr_req->sec_cpl.op_ivinsrtofst =
845*4882a593Smuzhiyun FILL_SEC_CPL_OP_IVINSR(rx_channel_id, 2, 1);
846*4882a593Smuzhiyun
847*4882a593Smuzhiyun chcr_req->sec_cpl.pldlen = htonl(IV + wrparam->bytes);
848*4882a593Smuzhiyun chcr_req->sec_cpl.aadstart_cipherstop_hi =
849*4882a593Smuzhiyun FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, IV + 1, 0);
850*4882a593Smuzhiyun
851*4882a593Smuzhiyun chcr_req->sec_cpl.cipherstop_lo_authinsert =
852*4882a593Smuzhiyun FILL_SEC_CPL_AUTHINSERT(0, 0, 0, 0);
853*4882a593Smuzhiyun chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, 0,
854*4882a593Smuzhiyun ablkctx->ciph_mode,
855*4882a593Smuzhiyun 0, 0, IV >> 1);
856*4882a593Smuzhiyun chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 0,
857*4882a593Smuzhiyun 0, 1, dst_size);
858*4882a593Smuzhiyun
859*4882a593Smuzhiyun chcr_req->key_ctx.ctx_hdr = ablkctx->key_ctx_hdr;
860*4882a593Smuzhiyun if ((reqctx->op == CHCR_DECRYPT_OP) &&
861*4882a593Smuzhiyun (!(get_cryptoalg_subtype(tfm) ==
862*4882a593Smuzhiyun CRYPTO_ALG_SUB_TYPE_CTR)) &&
863*4882a593Smuzhiyun (!(get_cryptoalg_subtype(tfm) ==
864*4882a593Smuzhiyun CRYPTO_ALG_SUB_TYPE_CTR_RFC3686))) {
865*4882a593Smuzhiyun generate_copy_rrkey(ablkctx, &chcr_req->key_ctx);
866*4882a593Smuzhiyun } else {
867*4882a593Smuzhiyun if ((ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC) ||
868*4882a593Smuzhiyun (ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CTR)) {
869*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key, ablkctx->key,
870*4882a593Smuzhiyun ablkctx->enckey_len);
871*4882a593Smuzhiyun } else {
872*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key, ablkctx->key +
873*4882a593Smuzhiyun (ablkctx->enckey_len >> 1),
874*4882a593Smuzhiyun ablkctx->enckey_len >> 1);
875*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key +
876*4882a593Smuzhiyun (ablkctx->enckey_len >> 1),
877*4882a593Smuzhiyun ablkctx->key,
878*4882a593Smuzhiyun ablkctx->enckey_len >> 1);
879*4882a593Smuzhiyun }
880*4882a593Smuzhiyun }
881*4882a593Smuzhiyun phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
882*4882a593Smuzhiyun ulptx = (struct ulptx_sgl *)((u8 *)(phys_cpl + 1) + dst_size);
883*4882a593Smuzhiyun chcr_add_cipher_src_ent(wrparam->req, ulptx, wrparam);
884*4882a593Smuzhiyun chcr_add_cipher_dst_ent(wrparam->req, phys_cpl, wrparam, wrparam->qid);
885*4882a593Smuzhiyun
886*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.cipher_rqst);
887*4882a593Smuzhiyun temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size + kctx_len + IV
888*4882a593Smuzhiyun + (reqctx->imm ? (wrparam->bytes) : 0);
889*4882a593Smuzhiyun create_wreq(c_ctx(tfm), chcr_req, &(wrparam->req->base), reqctx->imm, 0,
890*4882a593Smuzhiyun transhdr_len, temp,
891*4882a593Smuzhiyun ablkctx->ciph_mode == CHCR_SCMD_CIPHER_MODE_AES_CBC);
892*4882a593Smuzhiyun reqctx->skb = skb;
893*4882a593Smuzhiyun
894*4882a593Smuzhiyun if (reqctx->op && (ablkctx->ciph_mode ==
895*4882a593Smuzhiyun CHCR_SCMD_CIPHER_MODE_AES_CBC))
896*4882a593Smuzhiyun sg_pcopy_to_buffer(wrparam->req->src,
897*4882a593Smuzhiyun sg_nents(wrparam->req->src), wrparam->req->iv, 16,
898*4882a593Smuzhiyun reqctx->processed + wrparam->bytes - AES_BLOCK_SIZE);
899*4882a593Smuzhiyun
900*4882a593Smuzhiyun return skb;
901*4882a593Smuzhiyun err:
902*4882a593Smuzhiyun return ERR_PTR(error);
903*4882a593Smuzhiyun }
904*4882a593Smuzhiyun
chcr_keyctx_ck_size(unsigned int keylen)905*4882a593Smuzhiyun static inline int chcr_keyctx_ck_size(unsigned int keylen)
906*4882a593Smuzhiyun {
907*4882a593Smuzhiyun int ck_size = 0;
908*4882a593Smuzhiyun
909*4882a593Smuzhiyun if (keylen == AES_KEYSIZE_128)
910*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
911*4882a593Smuzhiyun else if (keylen == AES_KEYSIZE_192)
912*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
913*4882a593Smuzhiyun else if (keylen == AES_KEYSIZE_256)
914*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
915*4882a593Smuzhiyun else
916*4882a593Smuzhiyun ck_size = 0;
917*4882a593Smuzhiyun
918*4882a593Smuzhiyun return ck_size;
919*4882a593Smuzhiyun }
chcr_cipher_fallback_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)920*4882a593Smuzhiyun static int chcr_cipher_fallback_setkey(struct crypto_skcipher *cipher,
921*4882a593Smuzhiyun const u8 *key,
922*4882a593Smuzhiyun unsigned int keylen)
923*4882a593Smuzhiyun {
924*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
925*4882a593Smuzhiyun
926*4882a593Smuzhiyun crypto_skcipher_clear_flags(ablkctx->sw_cipher,
927*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
928*4882a593Smuzhiyun crypto_skcipher_set_flags(ablkctx->sw_cipher,
929*4882a593Smuzhiyun cipher->base.crt_flags & CRYPTO_TFM_REQ_MASK);
930*4882a593Smuzhiyun return crypto_skcipher_setkey(ablkctx->sw_cipher, key, keylen);
931*4882a593Smuzhiyun }
932*4882a593Smuzhiyun
chcr_aes_cbc_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)933*4882a593Smuzhiyun static int chcr_aes_cbc_setkey(struct crypto_skcipher *cipher,
934*4882a593Smuzhiyun const u8 *key,
935*4882a593Smuzhiyun unsigned int keylen)
936*4882a593Smuzhiyun {
937*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
938*4882a593Smuzhiyun unsigned int ck_size, context_size;
939*4882a593Smuzhiyun u16 alignment = 0;
940*4882a593Smuzhiyun int err;
941*4882a593Smuzhiyun
942*4882a593Smuzhiyun err = chcr_cipher_fallback_setkey(cipher, key, keylen);
943*4882a593Smuzhiyun if (err)
944*4882a593Smuzhiyun goto badkey_err;
945*4882a593Smuzhiyun
946*4882a593Smuzhiyun ck_size = chcr_keyctx_ck_size(keylen);
947*4882a593Smuzhiyun alignment = ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192 ? 8 : 0;
948*4882a593Smuzhiyun memcpy(ablkctx->key, key, keylen);
949*4882a593Smuzhiyun ablkctx->enckey_len = keylen;
950*4882a593Smuzhiyun get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, keylen << 3);
951*4882a593Smuzhiyun context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
952*4882a593Smuzhiyun keylen + alignment) >> 4;
953*4882a593Smuzhiyun
954*4882a593Smuzhiyun ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
955*4882a593Smuzhiyun 0, 0, context_size);
956*4882a593Smuzhiyun ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CBC;
957*4882a593Smuzhiyun return 0;
958*4882a593Smuzhiyun badkey_err:
959*4882a593Smuzhiyun ablkctx->enckey_len = 0;
960*4882a593Smuzhiyun
961*4882a593Smuzhiyun return err;
962*4882a593Smuzhiyun }
963*4882a593Smuzhiyun
chcr_aes_ctr_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)964*4882a593Smuzhiyun static int chcr_aes_ctr_setkey(struct crypto_skcipher *cipher,
965*4882a593Smuzhiyun const u8 *key,
966*4882a593Smuzhiyun unsigned int keylen)
967*4882a593Smuzhiyun {
968*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
969*4882a593Smuzhiyun unsigned int ck_size, context_size;
970*4882a593Smuzhiyun u16 alignment = 0;
971*4882a593Smuzhiyun int err;
972*4882a593Smuzhiyun
973*4882a593Smuzhiyun err = chcr_cipher_fallback_setkey(cipher, key, keylen);
974*4882a593Smuzhiyun if (err)
975*4882a593Smuzhiyun goto badkey_err;
976*4882a593Smuzhiyun ck_size = chcr_keyctx_ck_size(keylen);
977*4882a593Smuzhiyun alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0;
978*4882a593Smuzhiyun memcpy(ablkctx->key, key, keylen);
979*4882a593Smuzhiyun ablkctx->enckey_len = keylen;
980*4882a593Smuzhiyun context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
981*4882a593Smuzhiyun keylen + alignment) >> 4;
982*4882a593Smuzhiyun
983*4882a593Smuzhiyun ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
984*4882a593Smuzhiyun 0, 0, context_size);
985*4882a593Smuzhiyun ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CTR;
986*4882a593Smuzhiyun
987*4882a593Smuzhiyun return 0;
988*4882a593Smuzhiyun badkey_err:
989*4882a593Smuzhiyun ablkctx->enckey_len = 0;
990*4882a593Smuzhiyun
991*4882a593Smuzhiyun return err;
992*4882a593Smuzhiyun }
993*4882a593Smuzhiyun
chcr_aes_rfc3686_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int keylen)994*4882a593Smuzhiyun static int chcr_aes_rfc3686_setkey(struct crypto_skcipher *cipher,
995*4882a593Smuzhiyun const u8 *key,
996*4882a593Smuzhiyun unsigned int keylen)
997*4882a593Smuzhiyun {
998*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
999*4882a593Smuzhiyun unsigned int ck_size, context_size;
1000*4882a593Smuzhiyun u16 alignment = 0;
1001*4882a593Smuzhiyun int err;
1002*4882a593Smuzhiyun
1003*4882a593Smuzhiyun if (keylen < CTR_RFC3686_NONCE_SIZE)
1004*4882a593Smuzhiyun return -EINVAL;
1005*4882a593Smuzhiyun memcpy(ablkctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
1006*4882a593Smuzhiyun CTR_RFC3686_NONCE_SIZE);
1007*4882a593Smuzhiyun
1008*4882a593Smuzhiyun keylen -= CTR_RFC3686_NONCE_SIZE;
1009*4882a593Smuzhiyun err = chcr_cipher_fallback_setkey(cipher, key, keylen);
1010*4882a593Smuzhiyun if (err)
1011*4882a593Smuzhiyun goto badkey_err;
1012*4882a593Smuzhiyun
1013*4882a593Smuzhiyun ck_size = chcr_keyctx_ck_size(keylen);
1014*4882a593Smuzhiyun alignment = (ck_size == CHCR_KEYCTX_CIPHER_KEY_SIZE_192) ? 8 : 0;
1015*4882a593Smuzhiyun memcpy(ablkctx->key, key, keylen);
1016*4882a593Smuzhiyun ablkctx->enckey_len = keylen;
1017*4882a593Smuzhiyun context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD +
1018*4882a593Smuzhiyun keylen + alignment) >> 4;
1019*4882a593Smuzhiyun
1020*4882a593Smuzhiyun ablkctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY,
1021*4882a593Smuzhiyun 0, 0, context_size);
1022*4882a593Smuzhiyun ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_CTR;
1023*4882a593Smuzhiyun
1024*4882a593Smuzhiyun return 0;
1025*4882a593Smuzhiyun badkey_err:
1026*4882a593Smuzhiyun ablkctx->enckey_len = 0;
1027*4882a593Smuzhiyun
1028*4882a593Smuzhiyun return err;
1029*4882a593Smuzhiyun }
ctr_add_iv(u8 * dstiv,u8 * srciv,u32 add)1030*4882a593Smuzhiyun static void ctr_add_iv(u8 *dstiv, u8 *srciv, u32 add)
1031*4882a593Smuzhiyun {
1032*4882a593Smuzhiyun unsigned int size = AES_BLOCK_SIZE;
1033*4882a593Smuzhiyun __be32 *b = (__be32 *)(dstiv + size);
1034*4882a593Smuzhiyun u32 c, prev;
1035*4882a593Smuzhiyun
1036*4882a593Smuzhiyun memcpy(dstiv, srciv, AES_BLOCK_SIZE);
1037*4882a593Smuzhiyun for (; size >= 4; size -= 4) {
1038*4882a593Smuzhiyun prev = be32_to_cpu(*--b);
1039*4882a593Smuzhiyun c = prev + add;
1040*4882a593Smuzhiyun *b = cpu_to_be32(c);
1041*4882a593Smuzhiyun if (prev < c)
1042*4882a593Smuzhiyun break;
1043*4882a593Smuzhiyun add = 1;
1044*4882a593Smuzhiyun }
1045*4882a593Smuzhiyun
1046*4882a593Smuzhiyun }
1047*4882a593Smuzhiyun
adjust_ctr_overflow(u8 * iv,u32 bytes)1048*4882a593Smuzhiyun static unsigned int adjust_ctr_overflow(u8 *iv, u32 bytes)
1049*4882a593Smuzhiyun {
1050*4882a593Smuzhiyun __be32 *b = (__be32 *)(iv + AES_BLOCK_SIZE);
1051*4882a593Smuzhiyun u64 c;
1052*4882a593Smuzhiyun u32 temp = be32_to_cpu(*--b);
1053*4882a593Smuzhiyun
1054*4882a593Smuzhiyun temp = ~temp;
1055*4882a593Smuzhiyun c = (u64)temp + 1; // No of block can processed without overflow
1056*4882a593Smuzhiyun if ((bytes / AES_BLOCK_SIZE) >= c)
1057*4882a593Smuzhiyun bytes = c * AES_BLOCK_SIZE;
1058*4882a593Smuzhiyun return bytes;
1059*4882a593Smuzhiyun }
1060*4882a593Smuzhiyun
chcr_update_tweak(struct skcipher_request * req,u8 * iv,u32 isfinal)1061*4882a593Smuzhiyun static int chcr_update_tweak(struct skcipher_request *req, u8 *iv,
1062*4882a593Smuzhiyun u32 isfinal)
1063*4882a593Smuzhiyun {
1064*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1065*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
1066*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
1067*4882a593Smuzhiyun struct crypto_aes_ctx aes;
1068*4882a593Smuzhiyun int ret, i;
1069*4882a593Smuzhiyun u8 *key;
1070*4882a593Smuzhiyun unsigned int keylen;
1071*4882a593Smuzhiyun int round = reqctx->last_req_len / AES_BLOCK_SIZE;
1072*4882a593Smuzhiyun int round8 = round / 8;
1073*4882a593Smuzhiyun
1074*4882a593Smuzhiyun memcpy(iv, reqctx->iv, AES_BLOCK_SIZE);
1075*4882a593Smuzhiyun
1076*4882a593Smuzhiyun keylen = ablkctx->enckey_len / 2;
1077*4882a593Smuzhiyun key = ablkctx->key + keylen;
1078*4882a593Smuzhiyun /* For a 192 bit key remove the padded zeroes which was
1079*4882a593Smuzhiyun * added in chcr_xts_setkey
1080*4882a593Smuzhiyun */
1081*4882a593Smuzhiyun if (KEY_CONTEXT_CK_SIZE_G(ntohl(ablkctx->key_ctx_hdr))
1082*4882a593Smuzhiyun == CHCR_KEYCTX_CIPHER_KEY_SIZE_192)
1083*4882a593Smuzhiyun ret = aes_expandkey(&aes, key, keylen - 8);
1084*4882a593Smuzhiyun else
1085*4882a593Smuzhiyun ret = aes_expandkey(&aes, key, keylen);
1086*4882a593Smuzhiyun if (ret)
1087*4882a593Smuzhiyun return ret;
1088*4882a593Smuzhiyun aes_encrypt(&aes, iv, iv);
1089*4882a593Smuzhiyun for (i = 0; i < round8; i++)
1090*4882a593Smuzhiyun gf128mul_x8_ble((le128 *)iv, (le128 *)iv);
1091*4882a593Smuzhiyun
1092*4882a593Smuzhiyun for (i = 0; i < (round % 8); i++)
1093*4882a593Smuzhiyun gf128mul_x_ble((le128 *)iv, (le128 *)iv);
1094*4882a593Smuzhiyun
1095*4882a593Smuzhiyun if (!isfinal)
1096*4882a593Smuzhiyun aes_decrypt(&aes, iv, iv);
1097*4882a593Smuzhiyun
1098*4882a593Smuzhiyun memzero_explicit(&aes, sizeof(aes));
1099*4882a593Smuzhiyun return 0;
1100*4882a593Smuzhiyun }
1101*4882a593Smuzhiyun
chcr_update_cipher_iv(struct skcipher_request * req,struct cpl_fw6_pld * fw6_pld,u8 * iv)1102*4882a593Smuzhiyun static int chcr_update_cipher_iv(struct skcipher_request *req,
1103*4882a593Smuzhiyun struct cpl_fw6_pld *fw6_pld, u8 *iv)
1104*4882a593Smuzhiyun {
1105*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1106*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
1107*4882a593Smuzhiyun int subtype = get_cryptoalg_subtype(tfm);
1108*4882a593Smuzhiyun int ret = 0;
1109*4882a593Smuzhiyun
1110*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
1111*4882a593Smuzhiyun ctr_add_iv(iv, req->iv, (reqctx->processed /
1112*4882a593Smuzhiyun AES_BLOCK_SIZE));
1113*4882a593Smuzhiyun else if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_RFC3686)
1114*4882a593Smuzhiyun *(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE +
1115*4882a593Smuzhiyun CTR_RFC3686_IV_SIZE) = cpu_to_be32((reqctx->processed /
1116*4882a593Smuzhiyun AES_BLOCK_SIZE) + 1);
1117*4882a593Smuzhiyun else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS)
1118*4882a593Smuzhiyun ret = chcr_update_tweak(req, iv, 0);
1119*4882a593Smuzhiyun else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
1120*4882a593Smuzhiyun if (reqctx->op)
1121*4882a593Smuzhiyun /*Updated before sending last WR*/
1122*4882a593Smuzhiyun memcpy(iv, req->iv, AES_BLOCK_SIZE);
1123*4882a593Smuzhiyun else
1124*4882a593Smuzhiyun memcpy(iv, &fw6_pld->data[2], AES_BLOCK_SIZE);
1125*4882a593Smuzhiyun }
1126*4882a593Smuzhiyun
1127*4882a593Smuzhiyun return ret;
1128*4882a593Smuzhiyun
1129*4882a593Smuzhiyun }
1130*4882a593Smuzhiyun
1131*4882a593Smuzhiyun /* We need separate function for final iv because in rfc3686 Initial counter
1132*4882a593Smuzhiyun * starts from 1 and buffer size of iv is 8 byte only which remains constant
1133*4882a593Smuzhiyun * for subsequent update requests
1134*4882a593Smuzhiyun */
1135*4882a593Smuzhiyun
chcr_final_cipher_iv(struct skcipher_request * req,struct cpl_fw6_pld * fw6_pld,u8 * iv)1136*4882a593Smuzhiyun static int chcr_final_cipher_iv(struct skcipher_request *req,
1137*4882a593Smuzhiyun struct cpl_fw6_pld *fw6_pld, u8 *iv)
1138*4882a593Smuzhiyun {
1139*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1140*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
1141*4882a593Smuzhiyun int subtype = get_cryptoalg_subtype(tfm);
1142*4882a593Smuzhiyun int ret = 0;
1143*4882a593Smuzhiyun
1144*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CTR)
1145*4882a593Smuzhiyun ctr_add_iv(iv, req->iv, DIV_ROUND_UP(reqctx->processed,
1146*4882a593Smuzhiyun AES_BLOCK_SIZE));
1147*4882a593Smuzhiyun else if (subtype == CRYPTO_ALG_SUB_TYPE_XTS) {
1148*4882a593Smuzhiyun if (!reqctx->partial_req)
1149*4882a593Smuzhiyun memcpy(iv, reqctx->iv, AES_BLOCK_SIZE);
1150*4882a593Smuzhiyun else
1151*4882a593Smuzhiyun ret = chcr_update_tweak(req, iv, 1);
1152*4882a593Smuzhiyun }
1153*4882a593Smuzhiyun else if (subtype == CRYPTO_ALG_SUB_TYPE_CBC) {
1154*4882a593Smuzhiyun /*Already updated for Decrypt*/
1155*4882a593Smuzhiyun if (!reqctx->op)
1156*4882a593Smuzhiyun memcpy(iv, &fw6_pld->data[2], AES_BLOCK_SIZE);
1157*4882a593Smuzhiyun
1158*4882a593Smuzhiyun }
1159*4882a593Smuzhiyun return ret;
1160*4882a593Smuzhiyun
1161*4882a593Smuzhiyun }
1162*4882a593Smuzhiyun
chcr_handle_cipher_resp(struct skcipher_request * req,unsigned char * input,int err)1163*4882a593Smuzhiyun static int chcr_handle_cipher_resp(struct skcipher_request *req,
1164*4882a593Smuzhiyun unsigned char *input, int err)
1165*4882a593Smuzhiyun {
1166*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
1167*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1168*4882a593Smuzhiyun struct cpl_fw6_pld *fw6_pld = (struct cpl_fw6_pld *)input;
1169*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
1170*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1171*4882a593Smuzhiyun struct chcr_dev *dev = c_ctx(tfm)->dev;
1172*4882a593Smuzhiyun struct chcr_context *ctx = c_ctx(tfm);
1173*4882a593Smuzhiyun struct adapter *adap = padap(ctx->dev);
1174*4882a593Smuzhiyun struct cipher_wr_param wrparam;
1175*4882a593Smuzhiyun struct sk_buff *skb;
1176*4882a593Smuzhiyun int bytes;
1177*4882a593Smuzhiyun
1178*4882a593Smuzhiyun if (err)
1179*4882a593Smuzhiyun goto unmap;
1180*4882a593Smuzhiyun if (req->cryptlen == reqctx->processed) {
1181*4882a593Smuzhiyun chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
1182*4882a593Smuzhiyun req);
1183*4882a593Smuzhiyun err = chcr_final_cipher_iv(req, fw6_pld, req->iv);
1184*4882a593Smuzhiyun goto complete;
1185*4882a593Smuzhiyun }
1186*4882a593Smuzhiyun
1187*4882a593Smuzhiyun if (!reqctx->imm) {
1188*4882a593Smuzhiyun bytes = chcr_sg_ent_in_wr(reqctx->srcsg, reqctx->dstsg, 0,
1189*4882a593Smuzhiyun CIP_SPACE_LEFT(ablkctx->enckey_len),
1190*4882a593Smuzhiyun reqctx->src_ofst, reqctx->dst_ofst);
1191*4882a593Smuzhiyun if ((bytes + reqctx->processed) >= req->cryptlen)
1192*4882a593Smuzhiyun bytes = req->cryptlen - reqctx->processed;
1193*4882a593Smuzhiyun else
1194*4882a593Smuzhiyun bytes = rounddown(bytes, 16);
1195*4882a593Smuzhiyun } else {
1196*4882a593Smuzhiyun /*CTR mode counter overfloa*/
1197*4882a593Smuzhiyun bytes = req->cryptlen - reqctx->processed;
1198*4882a593Smuzhiyun }
1199*4882a593Smuzhiyun err = chcr_update_cipher_iv(req, fw6_pld, reqctx->iv);
1200*4882a593Smuzhiyun if (err)
1201*4882a593Smuzhiyun goto unmap;
1202*4882a593Smuzhiyun
1203*4882a593Smuzhiyun if (unlikely(bytes == 0)) {
1204*4882a593Smuzhiyun chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
1205*4882a593Smuzhiyun req);
1206*4882a593Smuzhiyun memcpy(req->iv, reqctx->init_iv, IV);
1207*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.fallback);
1208*4882a593Smuzhiyun err = chcr_cipher_fallback(ablkctx->sw_cipher, req, req->iv,
1209*4882a593Smuzhiyun reqctx->op);
1210*4882a593Smuzhiyun goto complete;
1211*4882a593Smuzhiyun }
1212*4882a593Smuzhiyun
1213*4882a593Smuzhiyun if (get_cryptoalg_subtype(tfm) ==
1214*4882a593Smuzhiyun CRYPTO_ALG_SUB_TYPE_CTR)
1215*4882a593Smuzhiyun bytes = adjust_ctr_overflow(reqctx->iv, bytes);
1216*4882a593Smuzhiyun wrparam.qid = u_ctx->lldi.rxq_ids[reqctx->rxqidx];
1217*4882a593Smuzhiyun wrparam.req = req;
1218*4882a593Smuzhiyun wrparam.bytes = bytes;
1219*4882a593Smuzhiyun skb = create_cipher_wr(&wrparam);
1220*4882a593Smuzhiyun if (IS_ERR(skb)) {
1221*4882a593Smuzhiyun pr_err("%s : Failed to form WR. No memory\n", __func__);
1222*4882a593Smuzhiyun err = PTR_ERR(skb);
1223*4882a593Smuzhiyun goto unmap;
1224*4882a593Smuzhiyun }
1225*4882a593Smuzhiyun skb->dev = u_ctx->lldi.ports[0];
1226*4882a593Smuzhiyun set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
1227*4882a593Smuzhiyun chcr_send_wr(skb);
1228*4882a593Smuzhiyun reqctx->last_req_len = bytes;
1229*4882a593Smuzhiyun reqctx->processed += bytes;
1230*4882a593Smuzhiyun if (get_cryptoalg_subtype(tfm) ==
1231*4882a593Smuzhiyun CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
1232*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP ) {
1233*4882a593Smuzhiyun complete(&ctx->cbc_aes_aio_done);
1234*4882a593Smuzhiyun }
1235*4882a593Smuzhiyun return 0;
1236*4882a593Smuzhiyun unmap:
1237*4882a593Smuzhiyun chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1238*4882a593Smuzhiyun complete:
1239*4882a593Smuzhiyun if (get_cryptoalg_subtype(tfm) ==
1240*4882a593Smuzhiyun CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
1241*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP ) {
1242*4882a593Smuzhiyun complete(&ctx->cbc_aes_aio_done);
1243*4882a593Smuzhiyun }
1244*4882a593Smuzhiyun chcr_dec_wrcount(dev);
1245*4882a593Smuzhiyun req->base.complete(&req->base, err);
1246*4882a593Smuzhiyun return err;
1247*4882a593Smuzhiyun }
1248*4882a593Smuzhiyun
process_cipher(struct skcipher_request * req,unsigned short qid,struct sk_buff ** skb,unsigned short op_type)1249*4882a593Smuzhiyun static int process_cipher(struct skcipher_request *req,
1250*4882a593Smuzhiyun unsigned short qid,
1251*4882a593Smuzhiyun struct sk_buff **skb,
1252*4882a593Smuzhiyun unsigned short op_type)
1253*4882a593Smuzhiyun {
1254*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
1255*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1256*4882a593Smuzhiyun unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1257*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(tfm));
1258*4882a593Smuzhiyun struct adapter *adap = padap(c_ctx(tfm)->dev);
1259*4882a593Smuzhiyun struct cipher_wr_param wrparam;
1260*4882a593Smuzhiyun int bytes, err = -EINVAL;
1261*4882a593Smuzhiyun int subtype;
1262*4882a593Smuzhiyun
1263*4882a593Smuzhiyun reqctx->processed = 0;
1264*4882a593Smuzhiyun reqctx->partial_req = 0;
1265*4882a593Smuzhiyun if (!req->iv)
1266*4882a593Smuzhiyun goto error;
1267*4882a593Smuzhiyun subtype = get_cryptoalg_subtype(tfm);
1268*4882a593Smuzhiyun if ((ablkctx->enckey_len == 0) || (ivsize > AES_BLOCK_SIZE) ||
1269*4882a593Smuzhiyun (req->cryptlen == 0) ||
1270*4882a593Smuzhiyun (req->cryptlen % crypto_skcipher_blocksize(tfm))) {
1271*4882a593Smuzhiyun if (req->cryptlen == 0 && subtype != CRYPTO_ALG_SUB_TYPE_XTS)
1272*4882a593Smuzhiyun goto fallback;
1273*4882a593Smuzhiyun else if (req->cryptlen % crypto_skcipher_blocksize(tfm) &&
1274*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_XTS)
1275*4882a593Smuzhiyun goto fallback;
1276*4882a593Smuzhiyun pr_err("AES: Invalid value of Key Len %d nbytes %d IV Len %d\n",
1277*4882a593Smuzhiyun ablkctx->enckey_len, req->cryptlen, ivsize);
1278*4882a593Smuzhiyun goto error;
1279*4882a593Smuzhiyun }
1280*4882a593Smuzhiyun
1281*4882a593Smuzhiyun err = chcr_cipher_dma_map(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1282*4882a593Smuzhiyun if (err)
1283*4882a593Smuzhiyun goto error;
1284*4882a593Smuzhiyun if (req->cryptlen < (SGE_MAX_WR_LEN - (sizeof(struct chcr_wr) +
1285*4882a593Smuzhiyun AES_MIN_KEY_SIZE +
1286*4882a593Smuzhiyun sizeof(struct cpl_rx_phys_dsgl) +
1287*4882a593Smuzhiyun /*Min dsgl size*/
1288*4882a593Smuzhiyun 32))) {
1289*4882a593Smuzhiyun /* Can be sent as Imm*/
1290*4882a593Smuzhiyun unsigned int dnents = 0, transhdr_len, phys_dsgl, kctx_len;
1291*4882a593Smuzhiyun
1292*4882a593Smuzhiyun dnents = sg_nents_xlen(req->dst, req->cryptlen,
1293*4882a593Smuzhiyun CHCR_DST_SG_SIZE, 0);
1294*4882a593Smuzhiyun phys_dsgl = get_space_for_phys_dsgl(dnents);
1295*4882a593Smuzhiyun kctx_len = roundup(ablkctx->enckey_len, 16);
1296*4882a593Smuzhiyun transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, phys_dsgl);
1297*4882a593Smuzhiyun reqctx->imm = (transhdr_len + IV + req->cryptlen) <=
1298*4882a593Smuzhiyun SGE_MAX_WR_LEN;
1299*4882a593Smuzhiyun bytes = IV + req->cryptlen;
1300*4882a593Smuzhiyun
1301*4882a593Smuzhiyun } else {
1302*4882a593Smuzhiyun reqctx->imm = 0;
1303*4882a593Smuzhiyun }
1304*4882a593Smuzhiyun
1305*4882a593Smuzhiyun if (!reqctx->imm) {
1306*4882a593Smuzhiyun bytes = chcr_sg_ent_in_wr(req->src, req->dst, 0,
1307*4882a593Smuzhiyun CIP_SPACE_LEFT(ablkctx->enckey_len),
1308*4882a593Smuzhiyun 0, 0);
1309*4882a593Smuzhiyun if ((bytes + reqctx->processed) >= req->cryptlen)
1310*4882a593Smuzhiyun bytes = req->cryptlen - reqctx->processed;
1311*4882a593Smuzhiyun else
1312*4882a593Smuzhiyun bytes = rounddown(bytes, 16);
1313*4882a593Smuzhiyun } else {
1314*4882a593Smuzhiyun bytes = req->cryptlen;
1315*4882a593Smuzhiyun }
1316*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CTR) {
1317*4882a593Smuzhiyun bytes = adjust_ctr_overflow(req->iv, bytes);
1318*4882a593Smuzhiyun }
1319*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_RFC3686) {
1320*4882a593Smuzhiyun memcpy(reqctx->iv, ablkctx->nonce, CTR_RFC3686_NONCE_SIZE);
1321*4882a593Smuzhiyun memcpy(reqctx->iv + CTR_RFC3686_NONCE_SIZE, req->iv,
1322*4882a593Smuzhiyun CTR_RFC3686_IV_SIZE);
1323*4882a593Smuzhiyun
1324*4882a593Smuzhiyun /* initialize counter portion of counter block */
1325*4882a593Smuzhiyun *(__be32 *)(reqctx->iv + CTR_RFC3686_NONCE_SIZE +
1326*4882a593Smuzhiyun CTR_RFC3686_IV_SIZE) = cpu_to_be32(1);
1327*4882a593Smuzhiyun memcpy(reqctx->init_iv, reqctx->iv, IV);
1328*4882a593Smuzhiyun
1329*4882a593Smuzhiyun } else {
1330*4882a593Smuzhiyun
1331*4882a593Smuzhiyun memcpy(reqctx->iv, req->iv, IV);
1332*4882a593Smuzhiyun memcpy(reqctx->init_iv, req->iv, IV);
1333*4882a593Smuzhiyun }
1334*4882a593Smuzhiyun if (unlikely(bytes == 0)) {
1335*4882a593Smuzhiyun chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev,
1336*4882a593Smuzhiyun req);
1337*4882a593Smuzhiyun fallback: atomic_inc(&adap->chcr_stats.fallback);
1338*4882a593Smuzhiyun err = chcr_cipher_fallback(ablkctx->sw_cipher, req,
1339*4882a593Smuzhiyun subtype ==
1340*4882a593Smuzhiyun CRYPTO_ALG_SUB_TYPE_CTR_RFC3686 ?
1341*4882a593Smuzhiyun reqctx->iv : req->iv,
1342*4882a593Smuzhiyun op_type);
1343*4882a593Smuzhiyun goto error;
1344*4882a593Smuzhiyun }
1345*4882a593Smuzhiyun reqctx->op = op_type;
1346*4882a593Smuzhiyun reqctx->srcsg = req->src;
1347*4882a593Smuzhiyun reqctx->dstsg = req->dst;
1348*4882a593Smuzhiyun reqctx->src_ofst = 0;
1349*4882a593Smuzhiyun reqctx->dst_ofst = 0;
1350*4882a593Smuzhiyun wrparam.qid = qid;
1351*4882a593Smuzhiyun wrparam.req = req;
1352*4882a593Smuzhiyun wrparam.bytes = bytes;
1353*4882a593Smuzhiyun *skb = create_cipher_wr(&wrparam);
1354*4882a593Smuzhiyun if (IS_ERR(*skb)) {
1355*4882a593Smuzhiyun err = PTR_ERR(*skb);
1356*4882a593Smuzhiyun goto unmap;
1357*4882a593Smuzhiyun }
1358*4882a593Smuzhiyun reqctx->processed = bytes;
1359*4882a593Smuzhiyun reqctx->last_req_len = bytes;
1360*4882a593Smuzhiyun reqctx->partial_req = !!(req->cryptlen - reqctx->processed);
1361*4882a593Smuzhiyun
1362*4882a593Smuzhiyun return 0;
1363*4882a593Smuzhiyun unmap:
1364*4882a593Smuzhiyun chcr_cipher_dma_unmap(&ULD_CTX(c_ctx(tfm))->lldi.pdev->dev, req);
1365*4882a593Smuzhiyun error:
1366*4882a593Smuzhiyun return err;
1367*4882a593Smuzhiyun }
1368*4882a593Smuzhiyun
chcr_aes_encrypt(struct skcipher_request * req)1369*4882a593Smuzhiyun static int chcr_aes_encrypt(struct skcipher_request *req)
1370*4882a593Smuzhiyun {
1371*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1372*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
1373*4882a593Smuzhiyun struct chcr_dev *dev = c_ctx(tfm)->dev;
1374*4882a593Smuzhiyun struct sk_buff *skb = NULL;
1375*4882a593Smuzhiyun int err;
1376*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1377*4882a593Smuzhiyun struct chcr_context *ctx = c_ctx(tfm);
1378*4882a593Smuzhiyun unsigned int cpu;
1379*4882a593Smuzhiyun
1380*4882a593Smuzhiyun cpu = get_cpu();
1381*4882a593Smuzhiyun reqctx->txqidx = cpu % ctx->ntxq;
1382*4882a593Smuzhiyun reqctx->rxqidx = cpu % ctx->nrxq;
1383*4882a593Smuzhiyun put_cpu();
1384*4882a593Smuzhiyun
1385*4882a593Smuzhiyun err = chcr_inc_wrcount(dev);
1386*4882a593Smuzhiyun if (err)
1387*4882a593Smuzhiyun return -ENXIO;
1388*4882a593Smuzhiyun if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1389*4882a593Smuzhiyun reqctx->txqidx) &&
1390*4882a593Smuzhiyun (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)))) {
1391*4882a593Smuzhiyun err = -ENOSPC;
1392*4882a593Smuzhiyun goto error;
1393*4882a593Smuzhiyun }
1394*4882a593Smuzhiyun
1395*4882a593Smuzhiyun err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
1396*4882a593Smuzhiyun &skb, CHCR_ENCRYPT_OP);
1397*4882a593Smuzhiyun if (err || !skb)
1398*4882a593Smuzhiyun return err;
1399*4882a593Smuzhiyun skb->dev = u_ctx->lldi.ports[0];
1400*4882a593Smuzhiyun set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
1401*4882a593Smuzhiyun chcr_send_wr(skb);
1402*4882a593Smuzhiyun if (get_cryptoalg_subtype(tfm) ==
1403*4882a593Smuzhiyun CRYPTO_ALG_SUB_TYPE_CBC && req->base.flags ==
1404*4882a593Smuzhiyun CRYPTO_TFM_REQ_MAY_SLEEP ) {
1405*4882a593Smuzhiyun reqctx->partial_req = 1;
1406*4882a593Smuzhiyun wait_for_completion(&ctx->cbc_aes_aio_done);
1407*4882a593Smuzhiyun }
1408*4882a593Smuzhiyun return -EINPROGRESS;
1409*4882a593Smuzhiyun error:
1410*4882a593Smuzhiyun chcr_dec_wrcount(dev);
1411*4882a593Smuzhiyun return err;
1412*4882a593Smuzhiyun }
1413*4882a593Smuzhiyun
chcr_aes_decrypt(struct skcipher_request * req)1414*4882a593Smuzhiyun static int chcr_aes_decrypt(struct skcipher_request *req)
1415*4882a593Smuzhiyun {
1416*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
1417*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
1418*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(c_ctx(tfm));
1419*4882a593Smuzhiyun struct chcr_dev *dev = c_ctx(tfm)->dev;
1420*4882a593Smuzhiyun struct sk_buff *skb = NULL;
1421*4882a593Smuzhiyun int err;
1422*4882a593Smuzhiyun struct chcr_context *ctx = c_ctx(tfm);
1423*4882a593Smuzhiyun unsigned int cpu;
1424*4882a593Smuzhiyun
1425*4882a593Smuzhiyun cpu = get_cpu();
1426*4882a593Smuzhiyun reqctx->txqidx = cpu % ctx->ntxq;
1427*4882a593Smuzhiyun reqctx->rxqidx = cpu % ctx->nrxq;
1428*4882a593Smuzhiyun put_cpu();
1429*4882a593Smuzhiyun
1430*4882a593Smuzhiyun err = chcr_inc_wrcount(dev);
1431*4882a593Smuzhiyun if (err)
1432*4882a593Smuzhiyun return -ENXIO;
1433*4882a593Smuzhiyun
1434*4882a593Smuzhiyun if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1435*4882a593Smuzhiyun reqctx->txqidx) &&
1436*4882a593Smuzhiyun (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))))
1437*4882a593Smuzhiyun return -ENOSPC;
1438*4882a593Smuzhiyun err = process_cipher(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx],
1439*4882a593Smuzhiyun &skb, CHCR_DECRYPT_OP);
1440*4882a593Smuzhiyun if (err || !skb)
1441*4882a593Smuzhiyun return err;
1442*4882a593Smuzhiyun skb->dev = u_ctx->lldi.ports[0];
1443*4882a593Smuzhiyun set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
1444*4882a593Smuzhiyun chcr_send_wr(skb);
1445*4882a593Smuzhiyun return -EINPROGRESS;
1446*4882a593Smuzhiyun }
chcr_device_init(struct chcr_context * ctx)1447*4882a593Smuzhiyun static int chcr_device_init(struct chcr_context *ctx)
1448*4882a593Smuzhiyun {
1449*4882a593Smuzhiyun struct uld_ctx *u_ctx = NULL;
1450*4882a593Smuzhiyun int txq_perchan, ntxq;
1451*4882a593Smuzhiyun int err = 0, rxq_perchan;
1452*4882a593Smuzhiyun
1453*4882a593Smuzhiyun if (!ctx->dev) {
1454*4882a593Smuzhiyun u_ctx = assign_chcr_device();
1455*4882a593Smuzhiyun if (!u_ctx) {
1456*4882a593Smuzhiyun err = -ENXIO;
1457*4882a593Smuzhiyun pr_err("chcr device assignment fails\n");
1458*4882a593Smuzhiyun goto out;
1459*4882a593Smuzhiyun }
1460*4882a593Smuzhiyun ctx->dev = &u_ctx->dev;
1461*4882a593Smuzhiyun ntxq = u_ctx->lldi.ntxq;
1462*4882a593Smuzhiyun rxq_perchan = u_ctx->lldi.nrxq / u_ctx->lldi.nchan;
1463*4882a593Smuzhiyun txq_perchan = ntxq / u_ctx->lldi.nchan;
1464*4882a593Smuzhiyun ctx->ntxq = ntxq;
1465*4882a593Smuzhiyun ctx->nrxq = u_ctx->lldi.nrxq;
1466*4882a593Smuzhiyun ctx->rxq_perchan = rxq_perchan;
1467*4882a593Smuzhiyun ctx->txq_perchan = txq_perchan;
1468*4882a593Smuzhiyun }
1469*4882a593Smuzhiyun out:
1470*4882a593Smuzhiyun return err;
1471*4882a593Smuzhiyun }
1472*4882a593Smuzhiyun
chcr_init_tfm(struct crypto_skcipher * tfm)1473*4882a593Smuzhiyun static int chcr_init_tfm(struct crypto_skcipher *tfm)
1474*4882a593Smuzhiyun {
1475*4882a593Smuzhiyun struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
1476*4882a593Smuzhiyun struct chcr_context *ctx = crypto_skcipher_ctx(tfm);
1477*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1478*4882a593Smuzhiyun
1479*4882a593Smuzhiyun ablkctx->sw_cipher = crypto_alloc_skcipher(alg->base.cra_name, 0,
1480*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK);
1481*4882a593Smuzhiyun if (IS_ERR(ablkctx->sw_cipher)) {
1482*4882a593Smuzhiyun pr_err("failed to allocate fallback for %s\n", alg->base.cra_name);
1483*4882a593Smuzhiyun return PTR_ERR(ablkctx->sw_cipher);
1484*4882a593Smuzhiyun }
1485*4882a593Smuzhiyun init_completion(&ctx->cbc_aes_aio_done);
1486*4882a593Smuzhiyun crypto_skcipher_set_reqsize(tfm, sizeof(struct chcr_skcipher_req_ctx) +
1487*4882a593Smuzhiyun crypto_skcipher_reqsize(ablkctx->sw_cipher));
1488*4882a593Smuzhiyun
1489*4882a593Smuzhiyun return chcr_device_init(ctx);
1490*4882a593Smuzhiyun }
1491*4882a593Smuzhiyun
chcr_rfc3686_init(struct crypto_skcipher * tfm)1492*4882a593Smuzhiyun static int chcr_rfc3686_init(struct crypto_skcipher *tfm)
1493*4882a593Smuzhiyun {
1494*4882a593Smuzhiyun struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
1495*4882a593Smuzhiyun struct chcr_context *ctx = crypto_skcipher_ctx(tfm);
1496*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1497*4882a593Smuzhiyun
1498*4882a593Smuzhiyun /*RFC3686 initialises IV counter value to 1, rfc3686(ctr(aes))
1499*4882a593Smuzhiyun * cannot be used as fallback in chcr_handle_cipher_response
1500*4882a593Smuzhiyun */
1501*4882a593Smuzhiyun ablkctx->sw_cipher = crypto_alloc_skcipher("ctr(aes)", 0,
1502*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK);
1503*4882a593Smuzhiyun if (IS_ERR(ablkctx->sw_cipher)) {
1504*4882a593Smuzhiyun pr_err("failed to allocate fallback for %s\n", alg->base.cra_name);
1505*4882a593Smuzhiyun return PTR_ERR(ablkctx->sw_cipher);
1506*4882a593Smuzhiyun }
1507*4882a593Smuzhiyun crypto_skcipher_set_reqsize(tfm, sizeof(struct chcr_skcipher_req_ctx) +
1508*4882a593Smuzhiyun crypto_skcipher_reqsize(ablkctx->sw_cipher));
1509*4882a593Smuzhiyun return chcr_device_init(ctx);
1510*4882a593Smuzhiyun }
1511*4882a593Smuzhiyun
1512*4882a593Smuzhiyun
chcr_exit_tfm(struct crypto_skcipher * tfm)1513*4882a593Smuzhiyun static void chcr_exit_tfm(struct crypto_skcipher *tfm)
1514*4882a593Smuzhiyun {
1515*4882a593Smuzhiyun struct chcr_context *ctx = crypto_skcipher_ctx(tfm);
1516*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(ctx);
1517*4882a593Smuzhiyun
1518*4882a593Smuzhiyun crypto_free_skcipher(ablkctx->sw_cipher);
1519*4882a593Smuzhiyun }
1520*4882a593Smuzhiyun
get_alg_config(struct algo_param * params,unsigned int auth_size)1521*4882a593Smuzhiyun static int get_alg_config(struct algo_param *params,
1522*4882a593Smuzhiyun unsigned int auth_size)
1523*4882a593Smuzhiyun {
1524*4882a593Smuzhiyun switch (auth_size) {
1525*4882a593Smuzhiyun case SHA1_DIGEST_SIZE:
1526*4882a593Smuzhiyun params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_160;
1527*4882a593Smuzhiyun params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA1;
1528*4882a593Smuzhiyun params->result_size = SHA1_DIGEST_SIZE;
1529*4882a593Smuzhiyun break;
1530*4882a593Smuzhiyun case SHA224_DIGEST_SIZE:
1531*4882a593Smuzhiyun params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
1532*4882a593Smuzhiyun params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA224;
1533*4882a593Smuzhiyun params->result_size = SHA256_DIGEST_SIZE;
1534*4882a593Smuzhiyun break;
1535*4882a593Smuzhiyun case SHA256_DIGEST_SIZE:
1536*4882a593Smuzhiyun params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
1537*4882a593Smuzhiyun params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA256;
1538*4882a593Smuzhiyun params->result_size = SHA256_DIGEST_SIZE;
1539*4882a593Smuzhiyun break;
1540*4882a593Smuzhiyun case SHA384_DIGEST_SIZE:
1541*4882a593Smuzhiyun params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
1542*4882a593Smuzhiyun params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_384;
1543*4882a593Smuzhiyun params->result_size = SHA512_DIGEST_SIZE;
1544*4882a593Smuzhiyun break;
1545*4882a593Smuzhiyun case SHA512_DIGEST_SIZE:
1546*4882a593Smuzhiyun params->mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_512;
1547*4882a593Smuzhiyun params->auth_mode = CHCR_SCMD_AUTH_MODE_SHA512_512;
1548*4882a593Smuzhiyun params->result_size = SHA512_DIGEST_SIZE;
1549*4882a593Smuzhiyun break;
1550*4882a593Smuzhiyun default:
1551*4882a593Smuzhiyun pr_err("ERROR, unsupported digest size\n");
1552*4882a593Smuzhiyun return -EINVAL;
1553*4882a593Smuzhiyun }
1554*4882a593Smuzhiyun return 0;
1555*4882a593Smuzhiyun }
1556*4882a593Smuzhiyun
chcr_free_shash(struct crypto_shash * base_hash)1557*4882a593Smuzhiyun static inline void chcr_free_shash(struct crypto_shash *base_hash)
1558*4882a593Smuzhiyun {
1559*4882a593Smuzhiyun crypto_free_shash(base_hash);
1560*4882a593Smuzhiyun }
1561*4882a593Smuzhiyun
1562*4882a593Smuzhiyun /**
1563*4882a593Smuzhiyun * create_hash_wr - Create hash work request
1564*4882a593Smuzhiyun * @req - Cipher req base
1565*4882a593Smuzhiyun */
create_hash_wr(struct ahash_request * req,struct hash_wr_param * param)1566*4882a593Smuzhiyun static struct sk_buff *create_hash_wr(struct ahash_request *req,
1567*4882a593Smuzhiyun struct hash_wr_param *param)
1568*4882a593Smuzhiyun {
1569*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1570*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1571*4882a593Smuzhiyun struct chcr_context *ctx = h_ctx(tfm);
1572*4882a593Smuzhiyun struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
1573*4882a593Smuzhiyun struct sk_buff *skb = NULL;
1574*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
1575*4882a593Smuzhiyun struct chcr_wr *chcr_req;
1576*4882a593Smuzhiyun struct ulptx_sgl *ulptx;
1577*4882a593Smuzhiyun unsigned int nents = 0, transhdr_len;
1578*4882a593Smuzhiyun unsigned int temp = 0;
1579*4882a593Smuzhiyun gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1580*4882a593Smuzhiyun GFP_ATOMIC;
1581*4882a593Smuzhiyun struct adapter *adap = padap(h_ctx(tfm)->dev);
1582*4882a593Smuzhiyun int error = 0;
1583*4882a593Smuzhiyun unsigned int rx_channel_id = req_ctx->rxqidx / ctx->rxq_perchan;
1584*4882a593Smuzhiyun
1585*4882a593Smuzhiyun rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
1586*4882a593Smuzhiyun transhdr_len = HASH_TRANSHDR_SIZE(param->kctx_len);
1587*4882a593Smuzhiyun req_ctx->hctx_wr.imm = (transhdr_len + param->bfr_len +
1588*4882a593Smuzhiyun param->sg_len) <= SGE_MAX_WR_LEN;
1589*4882a593Smuzhiyun nents = sg_nents_xlen(req_ctx->hctx_wr.srcsg, param->sg_len,
1590*4882a593Smuzhiyun CHCR_SRC_SG_SIZE, req_ctx->hctx_wr.src_ofst);
1591*4882a593Smuzhiyun nents += param->bfr_len ? 1 : 0;
1592*4882a593Smuzhiyun transhdr_len += req_ctx->hctx_wr.imm ? roundup(param->bfr_len +
1593*4882a593Smuzhiyun param->sg_len, 16) : (sgl_len(nents) * 8);
1594*4882a593Smuzhiyun transhdr_len = roundup(transhdr_len, 16);
1595*4882a593Smuzhiyun
1596*4882a593Smuzhiyun skb = alloc_skb(transhdr_len, flags);
1597*4882a593Smuzhiyun if (!skb)
1598*4882a593Smuzhiyun return ERR_PTR(-ENOMEM);
1599*4882a593Smuzhiyun chcr_req = __skb_put_zero(skb, transhdr_len);
1600*4882a593Smuzhiyun
1601*4882a593Smuzhiyun chcr_req->sec_cpl.op_ivinsrtofst =
1602*4882a593Smuzhiyun FILL_SEC_CPL_OP_IVINSR(rx_channel_id, 2, 0);
1603*4882a593Smuzhiyun
1604*4882a593Smuzhiyun chcr_req->sec_cpl.pldlen = htonl(param->bfr_len + param->sg_len);
1605*4882a593Smuzhiyun
1606*4882a593Smuzhiyun chcr_req->sec_cpl.aadstart_cipherstop_hi =
1607*4882a593Smuzhiyun FILL_SEC_CPL_CIPHERSTOP_HI(0, 0, 0, 0);
1608*4882a593Smuzhiyun chcr_req->sec_cpl.cipherstop_lo_authinsert =
1609*4882a593Smuzhiyun FILL_SEC_CPL_AUTHINSERT(0, 1, 0, 0);
1610*4882a593Smuzhiyun chcr_req->sec_cpl.seqno_numivs =
1611*4882a593Smuzhiyun FILL_SEC_CPL_SCMD0_SEQNO(0, 0, 0, param->alg_prm.auth_mode,
1612*4882a593Smuzhiyun param->opad_needed, 0);
1613*4882a593Smuzhiyun
1614*4882a593Smuzhiyun chcr_req->sec_cpl.ivgen_hdrlen =
1615*4882a593Smuzhiyun FILL_SEC_CPL_IVGEN_HDRLEN(param->last, param->more, 0, 1, 0, 0);
1616*4882a593Smuzhiyun
1617*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key, req_ctx->partial_hash,
1618*4882a593Smuzhiyun param->alg_prm.result_size);
1619*4882a593Smuzhiyun
1620*4882a593Smuzhiyun if (param->opad_needed)
1621*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key +
1622*4882a593Smuzhiyun ((param->alg_prm.result_size <= 32) ? 32 :
1623*4882a593Smuzhiyun CHCR_HASH_MAX_DIGEST_SIZE),
1624*4882a593Smuzhiyun hmacctx->opad, param->alg_prm.result_size);
1625*4882a593Smuzhiyun
1626*4882a593Smuzhiyun chcr_req->key_ctx.ctx_hdr = FILL_KEY_CTX_HDR(CHCR_KEYCTX_NO_KEY,
1627*4882a593Smuzhiyun param->alg_prm.mk_size, 0,
1628*4882a593Smuzhiyun param->opad_needed,
1629*4882a593Smuzhiyun ((param->kctx_len +
1630*4882a593Smuzhiyun sizeof(chcr_req->key_ctx)) >> 4));
1631*4882a593Smuzhiyun chcr_req->sec_cpl.scmd1 = cpu_to_be64((u64)param->scmd1);
1632*4882a593Smuzhiyun ulptx = (struct ulptx_sgl *)((u8 *)(chcr_req + 1) + param->kctx_len +
1633*4882a593Smuzhiyun DUMMY_BYTES);
1634*4882a593Smuzhiyun if (param->bfr_len != 0) {
1635*4882a593Smuzhiyun req_ctx->hctx_wr.dma_addr =
1636*4882a593Smuzhiyun dma_map_single(&u_ctx->lldi.pdev->dev, req_ctx->reqbfr,
1637*4882a593Smuzhiyun param->bfr_len, DMA_TO_DEVICE);
1638*4882a593Smuzhiyun if (dma_mapping_error(&u_ctx->lldi.pdev->dev,
1639*4882a593Smuzhiyun req_ctx->hctx_wr. dma_addr)) {
1640*4882a593Smuzhiyun error = -ENOMEM;
1641*4882a593Smuzhiyun goto err;
1642*4882a593Smuzhiyun }
1643*4882a593Smuzhiyun req_ctx->hctx_wr.dma_len = param->bfr_len;
1644*4882a593Smuzhiyun } else {
1645*4882a593Smuzhiyun req_ctx->hctx_wr.dma_addr = 0;
1646*4882a593Smuzhiyun }
1647*4882a593Smuzhiyun chcr_add_hash_src_ent(req, ulptx, param);
1648*4882a593Smuzhiyun /* Request upto max wr size */
1649*4882a593Smuzhiyun temp = param->kctx_len + DUMMY_BYTES + (req_ctx->hctx_wr.imm ?
1650*4882a593Smuzhiyun (param->sg_len + param->bfr_len) : 0);
1651*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.digest_rqst);
1652*4882a593Smuzhiyun create_wreq(h_ctx(tfm), chcr_req, &req->base, req_ctx->hctx_wr.imm,
1653*4882a593Smuzhiyun param->hash_size, transhdr_len,
1654*4882a593Smuzhiyun temp, 0);
1655*4882a593Smuzhiyun req_ctx->hctx_wr.skb = skb;
1656*4882a593Smuzhiyun return skb;
1657*4882a593Smuzhiyun err:
1658*4882a593Smuzhiyun kfree_skb(skb);
1659*4882a593Smuzhiyun return ERR_PTR(error);
1660*4882a593Smuzhiyun }
1661*4882a593Smuzhiyun
chcr_ahash_update(struct ahash_request * req)1662*4882a593Smuzhiyun static int chcr_ahash_update(struct ahash_request *req)
1663*4882a593Smuzhiyun {
1664*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1665*4882a593Smuzhiyun struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1666*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1667*4882a593Smuzhiyun struct chcr_context *ctx = h_ctx(rtfm);
1668*4882a593Smuzhiyun struct chcr_dev *dev = h_ctx(rtfm)->dev;
1669*4882a593Smuzhiyun struct sk_buff *skb;
1670*4882a593Smuzhiyun u8 remainder = 0, bs;
1671*4882a593Smuzhiyun unsigned int nbytes = req->nbytes;
1672*4882a593Smuzhiyun struct hash_wr_param params;
1673*4882a593Smuzhiyun int error;
1674*4882a593Smuzhiyun unsigned int cpu;
1675*4882a593Smuzhiyun
1676*4882a593Smuzhiyun cpu = get_cpu();
1677*4882a593Smuzhiyun req_ctx->txqidx = cpu % ctx->ntxq;
1678*4882a593Smuzhiyun req_ctx->rxqidx = cpu % ctx->nrxq;
1679*4882a593Smuzhiyun put_cpu();
1680*4882a593Smuzhiyun
1681*4882a593Smuzhiyun bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1682*4882a593Smuzhiyun
1683*4882a593Smuzhiyun if (nbytes + req_ctx->reqlen >= bs) {
1684*4882a593Smuzhiyun remainder = (nbytes + req_ctx->reqlen) % bs;
1685*4882a593Smuzhiyun nbytes = nbytes + req_ctx->reqlen - remainder;
1686*4882a593Smuzhiyun } else {
1687*4882a593Smuzhiyun sg_pcopy_to_buffer(req->src, sg_nents(req->src), req_ctx->reqbfr
1688*4882a593Smuzhiyun + req_ctx->reqlen, nbytes, 0);
1689*4882a593Smuzhiyun req_ctx->reqlen += nbytes;
1690*4882a593Smuzhiyun return 0;
1691*4882a593Smuzhiyun }
1692*4882a593Smuzhiyun error = chcr_inc_wrcount(dev);
1693*4882a593Smuzhiyun if (error)
1694*4882a593Smuzhiyun return -ENXIO;
1695*4882a593Smuzhiyun /* Detach state for CHCR means lldi or padap is freed. Increasing
1696*4882a593Smuzhiyun * inflight count for dev guarantees that lldi and padap is valid
1697*4882a593Smuzhiyun */
1698*4882a593Smuzhiyun if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1699*4882a593Smuzhiyun req_ctx->txqidx) &&
1700*4882a593Smuzhiyun (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)))) {
1701*4882a593Smuzhiyun error = -ENOSPC;
1702*4882a593Smuzhiyun goto err;
1703*4882a593Smuzhiyun }
1704*4882a593Smuzhiyun
1705*4882a593Smuzhiyun chcr_init_hctx_per_wr(req_ctx);
1706*4882a593Smuzhiyun error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1707*4882a593Smuzhiyun if (error) {
1708*4882a593Smuzhiyun error = -ENOMEM;
1709*4882a593Smuzhiyun goto err;
1710*4882a593Smuzhiyun }
1711*4882a593Smuzhiyun get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1712*4882a593Smuzhiyun params.kctx_len = roundup(params.alg_prm.result_size, 16);
1713*4882a593Smuzhiyun params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen,
1714*4882a593Smuzhiyun HASH_SPACE_LEFT(params.kctx_len), 0);
1715*4882a593Smuzhiyun if (params.sg_len > req->nbytes)
1716*4882a593Smuzhiyun params.sg_len = req->nbytes;
1717*4882a593Smuzhiyun params.sg_len = rounddown(params.sg_len + req_ctx->reqlen, bs) -
1718*4882a593Smuzhiyun req_ctx->reqlen;
1719*4882a593Smuzhiyun params.opad_needed = 0;
1720*4882a593Smuzhiyun params.more = 1;
1721*4882a593Smuzhiyun params.last = 0;
1722*4882a593Smuzhiyun params.bfr_len = req_ctx->reqlen;
1723*4882a593Smuzhiyun params.scmd1 = 0;
1724*4882a593Smuzhiyun req_ctx->hctx_wr.srcsg = req->src;
1725*4882a593Smuzhiyun
1726*4882a593Smuzhiyun params.hash_size = params.alg_prm.result_size;
1727*4882a593Smuzhiyun req_ctx->data_len += params.sg_len + params.bfr_len;
1728*4882a593Smuzhiyun skb = create_hash_wr(req, ¶ms);
1729*4882a593Smuzhiyun if (IS_ERR(skb)) {
1730*4882a593Smuzhiyun error = PTR_ERR(skb);
1731*4882a593Smuzhiyun goto unmap;
1732*4882a593Smuzhiyun }
1733*4882a593Smuzhiyun
1734*4882a593Smuzhiyun req_ctx->hctx_wr.processed += params.sg_len;
1735*4882a593Smuzhiyun if (remainder) {
1736*4882a593Smuzhiyun /* Swap buffers */
1737*4882a593Smuzhiyun swap(req_ctx->reqbfr, req_ctx->skbfr);
1738*4882a593Smuzhiyun sg_pcopy_to_buffer(req->src, sg_nents(req->src),
1739*4882a593Smuzhiyun req_ctx->reqbfr, remainder, req->nbytes -
1740*4882a593Smuzhiyun remainder);
1741*4882a593Smuzhiyun }
1742*4882a593Smuzhiyun req_ctx->reqlen = remainder;
1743*4882a593Smuzhiyun skb->dev = u_ctx->lldi.ports[0];
1744*4882a593Smuzhiyun set_wr_txq(skb, CPL_PRIORITY_DATA, req_ctx->txqidx);
1745*4882a593Smuzhiyun chcr_send_wr(skb);
1746*4882a593Smuzhiyun return -EINPROGRESS;
1747*4882a593Smuzhiyun unmap:
1748*4882a593Smuzhiyun chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1749*4882a593Smuzhiyun err:
1750*4882a593Smuzhiyun chcr_dec_wrcount(dev);
1751*4882a593Smuzhiyun return error;
1752*4882a593Smuzhiyun }
1753*4882a593Smuzhiyun
create_last_hash_block(char * bfr_ptr,unsigned int bs,u64 scmd1)1754*4882a593Smuzhiyun static void create_last_hash_block(char *bfr_ptr, unsigned int bs, u64 scmd1)
1755*4882a593Smuzhiyun {
1756*4882a593Smuzhiyun memset(bfr_ptr, 0, bs);
1757*4882a593Smuzhiyun *bfr_ptr = 0x80;
1758*4882a593Smuzhiyun if (bs == 64)
1759*4882a593Smuzhiyun *(__be64 *)(bfr_ptr + 56) = cpu_to_be64(scmd1 << 3);
1760*4882a593Smuzhiyun else
1761*4882a593Smuzhiyun *(__be64 *)(bfr_ptr + 120) = cpu_to_be64(scmd1 << 3);
1762*4882a593Smuzhiyun }
1763*4882a593Smuzhiyun
chcr_ahash_final(struct ahash_request * req)1764*4882a593Smuzhiyun static int chcr_ahash_final(struct ahash_request *req)
1765*4882a593Smuzhiyun {
1766*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1767*4882a593Smuzhiyun struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1768*4882a593Smuzhiyun struct chcr_dev *dev = h_ctx(rtfm)->dev;
1769*4882a593Smuzhiyun struct hash_wr_param params;
1770*4882a593Smuzhiyun struct sk_buff *skb;
1771*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1772*4882a593Smuzhiyun struct chcr_context *ctx = h_ctx(rtfm);
1773*4882a593Smuzhiyun u8 bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1774*4882a593Smuzhiyun int error;
1775*4882a593Smuzhiyun unsigned int cpu;
1776*4882a593Smuzhiyun
1777*4882a593Smuzhiyun cpu = get_cpu();
1778*4882a593Smuzhiyun req_ctx->txqidx = cpu % ctx->ntxq;
1779*4882a593Smuzhiyun req_ctx->rxqidx = cpu % ctx->nrxq;
1780*4882a593Smuzhiyun put_cpu();
1781*4882a593Smuzhiyun
1782*4882a593Smuzhiyun error = chcr_inc_wrcount(dev);
1783*4882a593Smuzhiyun if (error)
1784*4882a593Smuzhiyun return -ENXIO;
1785*4882a593Smuzhiyun
1786*4882a593Smuzhiyun chcr_init_hctx_per_wr(req_ctx);
1787*4882a593Smuzhiyun if (is_hmac(crypto_ahash_tfm(rtfm)))
1788*4882a593Smuzhiyun params.opad_needed = 1;
1789*4882a593Smuzhiyun else
1790*4882a593Smuzhiyun params.opad_needed = 0;
1791*4882a593Smuzhiyun params.sg_len = 0;
1792*4882a593Smuzhiyun req_ctx->hctx_wr.isfinal = 1;
1793*4882a593Smuzhiyun get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1794*4882a593Smuzhiyun params.kctx_len = roundup(params.alg_prm.result_size, 16);
1795*4882a593Smuzhiyun if (is_hmac(crypto_ahash_tfm(rtfm))) {
1796*4882a593Smuzhiyun params.opad_needed = 1;
1797*4882a593Smuzhiyun params.kctx_len *= 2;
1798*4882a593Smuzhiyun } else {
1799*4882a593Smuzhiyun params.opad_needed = 0;
1800*4882a593Smuzhiyun }
1801*4882a593Smuzhiyun
1802*4882a593Smuzhiyun req_ctx->hctx_wr.result = 1;
1803*4882a593Smuzhiyun params.bfr_len = req_ctx->reqlen;
1804*4882a593Smuzhiyun req_ctx->data_len += params.bfr_len + params.sg_len;
1805*4882a593Smuzhiyun req_ctx->hctx_wr.srcsg = req->src;
1806*4882a593Smuzhiyun if (req_ctx->reqlen == 0) {
1807*4882a593Smuzhiyun create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1808*4882a593Smuzhiyun params.last = 0;
1809*4882a593Smuzhiyun params.more = 1;
1810*4882a593Smuzhiyun params.scmd1 = 0;
1811*4882a593Smuzhiyun params.bfr_len = bs;
1812*4882a593Smuzhiyun
1813*4882a593Smuzhiyun } else {
1814*4882a593Smuzhiyun params.scmd1 = req_ctx->data_len;
1815*4882a593Smuzhiyun params.last = 1;
1816*4882a593Smuzhiyun params.more = 0;
1817*4882a593Smuzhiyun }
1818*4882a593Smuzhiyun params.hash_size = crypto_ahash_digestsize(rtfm);
1819*4882a593Smuzhiyun skb = create_hash_wr(req, ¶ms);
1820*4882a593Smuzhiyun if (IS_ERR(skb)) {
1821*4882a593Smuzhiyun error = PTR_ERR(skb);
1822*4882a593Smuzhiyun goto err;
1823*4882a593Smuzhiyun }
1824*4882a593Smuzhiyun req_ctx->reqlen = 0;
1825*4882a593Smuzhiyun skb->dev = u_ctx->lldi.ports[0];
1826*4882a593Smuzhiyun set_wr_txq(skb, CPL_PRIORITY_DATA, req_ctx->txqidx);
1827*4882a593Smuzhiyun chcr_send_wr(skb);
1828*4882a593Smuzhiyun return -EINPROGRESS;
1829*4882a593Smuzhiyun err:
1830*4882a593Smuzhiyun chcr_dec_wrcount(dev);
1831*4882a593Smuzhiyun return error;
1832*4882a593Smuzhiyun }
1833*4882a593Smuzhiyun
chcr_ahash_finup(struct ahash_request * req)1834*4882a593Smuzhiyun static int chcr_ahash_finup(struct ahash_request *req)
1835*4882a593Smuzhiyun {
1836*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1837*4882a593Smuzhiyun struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1838*4882a593Smuzhiyun struct chcr_dev *dev = h_ctx(rtfm)->dev;
1839*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1840*4882a593Smuzhiyun struct chcr_context *ctx = h_ctx(rtfm);
1841*4882a593Smuzhiyun struct sk_buff *skb;
1842*4882a593Smuzhiyun struct hash_wr_param params;
1843*4882a593Smuzhiyun u8 bs;
1844*4882a593Smuzhiyun int error;
1845*4882a593Smuzhiyun unsigned int cpu;
1846*4882a593Smuzhiyun
1847*4882a593Smuzhiyun cpu = get_cpu();
1848*4882a593Smuzhiyun req_ctx->txqidx = cpu % ctx->ntxq;
1849*4882a593Smuzhiyun req_ctx->rxqidx = cpu % ctx->nrxq;
1850*4882a593Smuzhiyun put_cpu();
1851*4882a593Smuzhiyun
1852*4882a593Smuzhiyun bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1853*4882a593Smuzhiyun error = chcr_inc_wrcount(dev);
1854*4882a593Smuzhiyun if (error)
1855*4882a593Smuzhiyun return -ENXIO;
1856*4882a593Smuzhiyun
1857*4882a593Smuzhiyun if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1858*4882a593Smuzhiyun req_ctx->txqidx) &&
1859*4882a593Smuzhiyun (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)))) {
1860*4882a593Smuzhiyun error = -ENOSPC;
1861*4882a593Smuzhiyun goto err;
1862*4882a593Smuzhiyun }
1863*4882a593Smuzhiyun chcr_init_hctx_per_wr(req_ctx);
1864*4882a593Smuzhiyun error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1865*4882a593Smuzhiyun if (error) {
1866*4882a593Smuzhiyun error = -ENOMEM;
1867*4882a593Smuzhiyun goto err;
1868*4882a593Smuzhiyun }
1869*4882a593Smuzhiyun
1870*4882a593Smuzhiyun get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1871*4882a593Smuzhiyun params.kctx_len = roundup(params.alg_prm.result_size, 16);
1872*4882a593Smuzhiyun if (is_hmac(crypto_ahash_tfm(rtfm))) {
1873*4882a593Smuzhiyun params.kctx_len *= 2;
1874*4882a593Smuzhiyun params.opad_needed = 1;
1875*4882a593Smuzhiyun } else {
1876*4882a593Smuzhiyun params.opad_needed = 0;
1877*4882a593Smuzhiyun }
1878*4882a593Smuzhiyun
1879*4882a593Smuzhiyun params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen,
1880*4882a593Smuzhiyun HASH_SPACE_LEFT(params.kctx_len), 0);
1881*4882a593Smuzhiyun if (params.sg_len < req->nbytes) {
1882*4882a593Smuzhiyun if (is_hmac(crypto_ahash_tfm(rtfm))) {
1883*4882a593Smuzhiyun params.kctx_len /= 2;
1884*4882a593Smuzhiyun params.opad_needed = 0;
1885*4882a593Smuzhiyun }
1886*4882a593Smuzhiyun params.last = 0;
1887*4882a593Smuzhiyun params.more = 1;
1888*4882a593Smuzhiyun params.sg_len = rounddown(params.sg_len + req_ctx->reqlen, bs)
1889*4882a593Smuzhiyun - req_ctx->reqlen;
1890*4882a593Smuzhiyun params.hash_size = params.alg_prm.result_size;
1891*4882a593Smuzhiyun params.scmd1 = 0;
1892*4882a593Smuzhiyun } else {
1893*4882a593Smuzhiyun params.last = 1;
1894*4882a593Smuzhiyun params.more = 0;
1895*4882a593Smuzhiyun params.sg_len = req->nbytes;
1896*4882a593Smuzhiyun params.hash_size = crypto_ahash_digestsize(rtfm);
1897*4882a593Smuzhiyun params.scmd1 = req_ctx->data_len + req_ctx->reqlen +
1898*4882a593Smuzhiyun params.sg_len;
1899*4882a593Smuzhiyun }
1900*4882a593Smuzhiyun params.bfr_len = req_ctx->reqlen;
1901*4882a593Smuzhiyun req_ctx->data_len += params.bfr_len + params.sg_len;
1902*4882a593Smuzhiyun req_ctx->hctx_wr.result = 1;
1903*4882a593Smuzhiyun req_ctx->hctx_wr.srcsg = req->src;
1904*4882a593Smuzhiyun if ((req_ctx->reqlen + req->nbytes) == 0) {
1905*4882a593Smuzhiyun create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
1906*4882a593Smuzhiyun params.last = 0;
1907*4882a593Smuzhiyun params.more = 1;
1908*4882a593Smuzhiyun params.scmd1 = 0;
1909*4882a593Smuzhiyun params.bfr_len = bs;
1910*4882a593Smuzhiyun }
1911*4882a593Smuzhiyun skb = create_hash_wr(req, ¶ms);
1912*4882a593Smuzhiyun if (IS_ERR(skb)) {
1913*4882a593Smuzhiyun error = PTR_ERR(skb);
1914*4882a593Smuzhiyun goto unmap;
1915*4882a593Smuzhiyun }
1916*4882a593Smuzhiyun req_ctx->reqlen = 0;
1917*4882a593Smuzhiyun req_ctx->hctx_wr.processed += params.sg_len;
1918*4882a593Smuzhiyun skb->dev = u_ctx->lldi.ports[0];
1919*4882a593Smuzhiyun set_wr_txq(skb, CPL_PRIORITY_DATA, req_ctx->txqidx);
1920*4882a593Smuzhiyun chcr_send_wr(skb);
1921*4882a593Smuzhiyun return -EINPROGRESS;
1922*4882a593Smuzhiyun unmap:
1923*4882a593Smuzhiyun chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
1924*4882a593Smuzhiyun err:
1925*4882a593Smuzhiyun chcr_dec_wrcount(dev);
1926*4882a593Smuzhiyun return error;
1927*4882a593Smuzhiyun }
1928*4882a593Smuzhiyun
chcr_ahash_digest(struct ahash_request * req)1929*4882a593Smuzhiyun static int chcr_ahash_digest(struct ahash_request *req)
1930*4882a593Smuzhiyun {
1931*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
1932*4882a593Smuzhiyun struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
1933*4882a593Smuzhiyun struct chcr_dev *dev = h_ctx(rtfm)->dev;
1934*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(h_ctx(rtfm));
1935*4882a593Smuzhiyun struct chcr_context *ctx = h_ctx(rtfm);
1936*4882a593Smuzhiyun struct sk_buff *skb;
1937*4882a593Smuzhiyun struct hash_wr_param params;
1938*4882a593Smuzhiyun u8 bs;
1939*4882a593Smuzhiyun int error;
1940*4882a593Smuzhiyun unsigned int cpu;
1941*4882a593Smuzhiyun
1942*4882a593Smuzhiyun cpu = get_cpu();
1943*4882a593Smuzhiyun req_ctx->txqidx = cpu % ctx->ntxq;
1944*4882a593Smuzhiyun req_ctx->rxqidx = cpu % ctx->nrxq;
1945*4882a593Smuzhiyun put_cpu();
1946*4882a593Smuzhiyun
1947*4882a593Smuzhiyun rtfm->init(req);
1948*4882a593Smuzhiyun bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
1949*4882a593Smuzhiyun error = chcr_inc_wrcount(dev);
1950*4882a593Smuzhiyun if (error)
1951*4882a593Smuzhiyun return -ENXIO;
1952*4882a593Smuzhiyun
1953*4882a593Smuzhiyun if (unlikely(cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
1954*4882a593Smuzhiyun req_ctx->txqidx) &&
1955*4882a593Smuzhiyun (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)))) {
1956*4882a593Smuzhiyun error = -ENOSPC;
1957*4882a593Smuzhiyun goto err;
1958*4882a593Smuzhiyun }
1959*4882a593Smuzhiyun
1960*4882a593Smuzhiyun chcr_init_hctx_per_wr(req_ctx);
1961*4882a593Smuzhiyun error = chcr_hash_dma_map(&u_ctx->lldi.pdev->dev, req);
1962*4882a593Smuzhiyun if (error) {
1963*4882a593Smuzhiyun error = -ENOMEM;
1964*4882a593Smuzhiyun goto err;
1965*4882a593Smuzhiyun }
1966*4882a593Smuzhiyun
1967*4882a593Smuzhiyun get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
1968*4882a593Smuzhiyun params.kctx_len = roundup(params.alg_prm.result_size, 16);
1969*4882a593Smuzhiyun if (is_hmac(crypto_ahash_tfm(rtfm))) {
1970*4882a593Smuzhiyun params.kctx_len *= 2;
1971*4882a593Smuzhiyun params.opad_needed = 1;
1972*4882a593Smuzhiyun } else {
1973*4882a593Smuzhiyun params.opad_needed = 0;
1974*4882a593Smuzhiyun }
1975*4882a593Smuzhiyun params.sg_len = chcr_hash_ent_in_wr(req->src, !!req_ctx->reqlen,
1976*4882a593Smuzhiyun HASH_SPACE_LEFT(params.kctx_len), 0);
1977*4882a593Smuzhiyun if (params.sg_len < req->nbytes) {
1978*4882a593Smuzhiyun if (is_hmac(crypto_ahash_tfm(rtfm))) {
1979*4882a593Smuzhiyun params.kctx_len /= 2;
1980*4882a593Smuzhiyun params.opad_needed = 0;
1981*4882a593Smuzhiyun }
1982*4882a593Smuzhiyun params.last = 0;
1983*4882a593Smuzhiyun params.more = 1;
1984*4882a593Smuzhiyun params.scmd1 = 0;
1985*4882a593Smuzhiyun params.sg_len = rounddown(params.sg_len, bs);
1986*4882a593Smuzhiyun params.hash_size = params.alg_prm.result_size;
1987*4882a593Smuzhiyun } else {
1988*4882a593Smuzhiyun params.sg_len = req->nbytes;
1989*4882a593Smuzhiyun params.hash_size = crypto_ahash_digestsize(rtfm);
1990*4882a593Smuzhiyun params.last = 1;
1991*4882a593Smuzhiyun params.more = 0;
1992*4882a593Smuzhiyun params.scmd1 = req->nbytes + req_ctx->data_len;
1993*4882a593Smuzhiyun
1994*4882a593Smuzhiyun }
1995*4882a593Smuzhiyun params.bfr_len = 0;
1996*4882a593Smuzhiyun req_ctx->hctx_wr.result = 1;
1997*4882a593Smuzhiyun req_ctx->hctx_wr.srcsg = req->src;
1998*4882a593Smuzhiyun req_ctx->data_len += params.bfr_len + params.sg_len;
1999*4882a593Smuzhiyun
2000*4882a593Smuzhiyun if (req->nbytes == 0) {
2001*4882a593Smuzhiyun create_last_hash_block(req_ctx->reqbfr, bs, req_ctx->data_len);
2002*4882a593Smuzhiyun params.more = 1;
2003*4882a593Smuzhiyun params.bfr_len = bs;
2004*4882a593Smuzhiyun }
2005*4882a593Smuzhiyun
2006*4882a593Smuzhiyun skb = create_hash_wr(req, ¶ms);
2007*4882a593Smuzhiyun if (IS_ERR(skb)) {
2008*4882a593Smuzhiyun error = PTR_ERR(skb);
2009*4882a593Smuzhiyun goto unmap;
2010*4882a593Smuzhiyun }
2011*4882a593Smuzhiyun req_ctx->hctx_wr.processed += params.sg_len;
2012*4882a593Smuzhiyun skb->dev = u_ctx->lldi.ports[0];
2013*4882a593Smuzhiyun set_wr_txq(skb, CPL_PRIORITY_DATA, req_ctx->txqidx);
2014*4882a593Smuzhiyun chcr_send_wr(skb);
2015*4882a593Smuzhiyun return -EINPROGRESS;
2016*4882a593Smuzhiyun unmap:
2017*4882a593Smuzhiyun chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
2018*4882a593Smuzhiyun err:
2019*4882a593Smuzhiyun chcr_dec_wrcount(dev);
2020*4882a593Smuzhiyun return error;
2021*4882a593Smuzhiyun }
2022*4882a593Smuzhiyun
chcr_ahash_continue(struct ahash_request * req)2023*4882a593Smuzhiyun static int chcr_ahash_continue(struct ahash_request *req)
2024*4882a593Smuzhiyun {
2025*4882a593Smuzhiyun struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req);
2026*4882a593Smuzhiyun struct chcr_hctx_per_wr *hctx_wr = &reqctx->hctx_wr;
2027*4882a593Smuzhiyun struct crypto_ahash *rtfm = crypto_ahash_reqtfm(req);
2028*4882a593Smuzhiyun struct chcr_context *ctx = h_ctx(rtfm);
2029*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
2030*4882a593Smuzhiyun struct sk_buff *skb;
2031*4882a593Smuzhiyun struct hash_wr_param params;
2032*4882a593Smuzhiyun u8 bs;
2033*4882a593Smuzhiyun int error;
2034*4882a593Smuzhiyun unsigned int cpu;
2035*4882a593Smuzhiyun
2036*4882a593Smuzhiyun cpu = get_cpu();
2037*4882a593Smuzhiyun reqctx->txqidx = cpu % ctx->ntxq;
2038*4882a593Smuzhiyun reqctx->rxqidx = cpu % ctx->nrxq;
2039*4882a593Smuzhiyun put_cpu();
2040*4882a593Smuzhiyun
2041*4882a593Smuzhiyun bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
2042*4882a593Smuzhiyun get_alg_config(¶ms.alg_prm, crypto_ahash_digestsize(rtfm));
2043*4882a593Smuzhiyun params.kctx_len = roundup(params.alg_prm.result_size, 16);
2044*4882a593Smuzhiyun if (is_hmac(crypto_ahash_tfm(rtfm))) {
2045*4882a593Smuzhiyun params.kctx_len *= 2;
2046*4882a593Smuzhiyun params.opad_needed = 1;
2047*4882a593Smuzhiyun } else {
2048*4882a593Smuzhiyun params.opad_needed = 0;
2049*4882a593Smuzhiyun }
2050*4882a593Smuzhiyun params.sg_len = chcr_hash_ent_in_wr(hctx_wr->srcsg, 0,
2051*4882a593Smuzhiyun HASH_SPACE_LEFT(params.kctx_len),
2052*4882a593Smuzhiyun hctx_wr->src_ofst);
2053*4882a593Smuzhiyun if ((params.sg_len + hctx_wr->processed) > req->nbytes)
2054*4882a593Smuzhiyun params.sg_len = req->nbytes - hctx_wr->processed;
2055*4882a593Smuzhiyun if (!hctx_wr->result ||
2056*4882a593Smuzhiyun ((params.sg_len + hctx_wr->processed) < req->nbytes)) {
2057*4882a593Smuzhiyun if (is_hmac(crypto_ahash_tfm(rtfm))) {
2058*4882a593Smuzhiyun params.kctx_len /= 2;
2059*4882a593Smuzhiyun params.opad_needed = 0;
2060*4882a593Smuzhiyun }
2061*4882a593Smuzhiyun params.last = 0;
2062*4882a593Smuzhiyun params.more = 1;
2063*4882a593Smuzhiyun params.sg_len = rounddown(params.sg_len, bs);
2064*4882a593Smuzhiyun params.hash_size = params.alg_prm.result_size;
2065*4882a593Smuzhiyun params.scmd1 = 0;
2066*4882a593Smuzhiyun } else {
2067*4882a593Smuzhiyun params.last = 1;
2068*4882a593Smuzhiyun params.more = 0;
2069*4882a593Smuzhiyun params.hash_size = crypto_ahash_digestsize(rtfm);
2070*4882a593Smuzhiyun params.scmd1 = reqctx->data_len + params.sg_len;
2071*4882a593Smuzhiyun }
2072*4882a593Smuzhiyun params.bfr_len = 0;
2073*4882a593Smuzhiyun reqctx->data_len += params.sg_len;
2074*4882a593Smuzhiyun skb = create_hash_wr(req, ¶ms);
2075*4882a593Smuzhiyun if (IS_ERR(skb)) {
2076*4882a593Smuzhiyun error = PTR_ERR(skb);
2077*4882a593Smuzhiyun goto err;
2078*4882a593Smuzhiyun }
2079*4882a593Smuzhiyun hctx_wr->processed += params.sg_len;
2080*4882a593Smuzhiyun skb->dev = u_ctx->lldi.ports[0];
2081*4882a593Smuzhiyun set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
2082*4882a593Smuzhiyun chcr_send_wr(skb);
2083*4882a593Smuzhiyun return 0;
2084*4882a593Smuzhiyun err:
2085*4882a593Smuzhiyun return error;
2086*4882a593Smuzhiyun }
2087*4882a593Smuzhiyun
chcr_handle_ahash_resp(struct ahash_request * req,unsigned char * input,int err)2088*4882a593Smuzhiyun static inline void chcr_handle_ahash_resp(struct ahash_request *req,
2089*4882a593Smuzhiyun unsigned char *input,
2090*4882a593Smuzhiyun int err)
2091*4882a593Smuzhiyun {
2092*4882a593Smuzhiyun struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req);
2093*4882a593Smuzhiyun struct chcr_hctx_per_wr *hctx_wr = &reqctx->hctx_wr;
2094*4882a593Smuzhiyun int digestsize, updated_digestsize;
2095*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2096*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(h_ctx(tfm));
2097*4882a593Smuzhiyun struct chcr_dev *dev = h_ctx(tfm)->dev;
2098*4882a593Smuzhiyun
2099*4882a593Smuzhiyun if (input == NULL)
2100*4882a593Smuzhiyun goto out;
2101*4882a593Smuzhiyun digestsize = crypto_ahash_digestsize(crypto_ahash_reqtfm(req));
2102*4882a593Smuzhiyun updated_digestsize = digestsize;
2103*4882a593Smuzhiyun if (digestsize == SHA224_DIGEST_SIZE)
2104*4882a593Smuzhiyun updated_digestsize = SHA256_DIGEST_SIZE;
2105*4882a593Smuzhiyun else if (digestsize == SHA384_DIGEST_SIZE)
2106*4882a593Smuzhiyun updated_digestsize = SHA512_DIGEST_SIZE;
2107*4882a593Smuzhiyun
2108*4882a593Smuzhiyun if (hctx_wr->dma_addr) {
2109*4882a593Smuzhiyun dma_unmap_single(&u_ctx->lldi.pdev->dev, hctx_wr->dma_addr,
2110*4882a593Smuzhiyun hctx_wr->dma_len, DMA_TO_DEVICE);
2111*4882a593Smuzhiyun hctx_wr->dma_addr = 0;
2112*4882a593Smuzhiyun }
2113*4882a593Smuzhiyun if (hctx_wr->isfinal || ((hctx_wr->processed + reqctx->reqlen) ==
2114*4882a593Smuzhiyun req->nbytes)) {
2115*4882a593Smuzhiyun if (hctx_wr->result == 1) {
2116*4882a593Smuzhiyun hctx_wr->result = 0;
2117*4882a593Smuzhiyun memcpy(req->result, input + sizeof(struct cpl_fw6_pld),
2118*4882a593Smuzhiyun digestsize);
2119*4882a593Smuzhiyun } else {
2120*4882a593Smuzhiyun memcpy(reqctx->partial_hash,
2121*4882a593Smuzhiyun input + sizeof(struct cpl_fw6_pld),
2122*4882a593Smuzhiyun updated_digestsize);
2123*4882a593Smuzhiyun
2124*4882a593Smuzhiyun }
2125*4882a593Smuzhiyun goto unmap;
2126*4882a593Smuzhiyun }
2127*4882a593Smuzhiyun memcpy(reqctx->partial_hash, input + sizeof(struct cpl_fw6_pld),
2128*4882a593Smuzhiyun updated_digestsize);
2129*4882a593Smuzhiyun
2130*4882a593Smuzhiyun err = chcr_ahash_continue(req);
2131*4882a593Smuzhiyun if (err)
2132*4882a593Smuzhiyun goto unmap;
2133*4882a593Smuzhiyun return;
2134*4882a593Smuzhiyun unmap:
2135*4882a593Smuzhiyun if (hctx_wr->is_sg_map)
2136*4882a593Smuzhiyun chcr_hash_dma_unmap(&u_ctx->lldi.pdev->dev, req);
2137*4882a593Smuzhiyun
2138*4882a593Smuzhiyun
2139*4882a593Smuzhiyun out:
2140*4882a593Smuzhiyun chcr_dec_wrcount(dev);
2141*4882a593Smuzhiyun req->base.complete(&req->base, err);
2142*4882a593Smuzhiyun }
2143*4882a593Smuzhiyun
2144*4882a593Smuzhiyun /*
2145*4882a593Smuzhiyun * chcr_handle_resp - Unmap the DMA buffers associated with the request
2146*4882a593Smuzhiyun * @req: crypto request
2147*4882a593Smuzhiyun */
chcr_handle_resp(struct crypto_async_request * req,unsigned char * input,int err)2148*4882a593Smuzhiyun int chcr_handle_resp(struct crypto_async_request *req, unsigned char *input,
2149*4882a593Smuzhiyun int err)
2150*4882a593Smuzhiyun {
2151*4882a593Smuzhiyun struct crypto_tfm *tfm = req->tfm;
2152*4882a593Smuzhiyun struct chcr_context *ctx = crypto_tfm_ctx(tfm);
2153*4882a593Smuzhiyun struct adapter *adap = padap(ctx->dev);
2154*4882a593Smuzhiyun
2155*4882a593Smuzhiyun switch (tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
2156*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_AEAD:
2157*4882a593Smuzhiyun err = chcr_handle_aead_resp(aead_request_cast(req), input, err);
2158*4882a593Smuzhiyun break;
2159*4882a593Smuzhiyun
2160*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_SKCIPHER:
2161*4882a593Smuzhiyun chcr_handle_cipher_resp(skcipher_request_cast(req),
2162*4882a593Smuzhiyun input, err);
2163*4882a593Smuzhiyun break;
2164*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_AHASH:
2165*4882a593Smuzhiyun chcr_handle_ahash_resp(ahash_request_cast(req), input, err);
2166*4882a593Smuzhiyun }
2167*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.complete);
2168*4882a593Smuzhiyun return err;
2169*4882a593Smuzhiyun }
chcr_ahash_export(struct ahash_request * areq,void * out)2170*4882a593Smuzhiyun static int chcr_ahash_export(struct ahash_request *areq, void *out)
2171*4882a593Smuzhiyun {
2172*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2173*4882a593Smuzhiyun struct chcr_ahash_req_ctx *state = out;
2174*4882a593Smuzhiyun
2175*4882a593Smuzhiyun state->reqlen = req_ctx->reqlen;
2176*4882a593Smuzhiyun state->data_len = req_ctx->data_len;
2177*4882a593Smuzhiyun memcpy(state->bfr1, req_ctx->reqbfr, req_ctx->reqlen);
2178*4882a593Smuzhiyun memcpy(state->partial_hash, req_ctx->partial_hash,
2179*4882a593Smuzhiyun CHCR_HASH_MAX_DIGEST_SIZE);
2180*4882a593Smuzhiyun chcr_init_hctx_per_wr(state);
2181*4882a593Smuzhiyun return 0;
2182*4882a593Smuzhiyun }
2183*4882a593Smuzhiyun
chcr_ahash_import(struct ahash_request * areq,const void * in)2184*4882a593Smuzhiyun static int chcr_ahash_import(struct ahash_request *areq, const void *in)
2185*4882a593Smuzhiyun {
2186*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2187*4882a593Smuzhiyun struct chcr_ahash_req_ctx *state = (struct chcr_ahash_req_ctx *)in;
2188*4882a593Smuzhiyun
2189*4882a593Smuzhiyun req_ctx->reqlen = state->reqlen;
2190*4882a593Smuzhiyun req_ctx->data_len = state->data_len;
2191*4882a593Smuzhiyun req_ctx->reqbfr = req_ctx->bfr1;
2192*4882a593Smuzhiyun req_ctx->skbfr = req_ctx->bfr2;
2193*4882a593Smuzhiyun memcpy(req_ctx->bfr1, state->bfr1, CHCR_HASH_MAX_BLOCK_SIZE_128);
2194*4882a593Smuzhiyun memcpy(req_ctx->partial_hash, state->partial_hash,
2195*4882a593Smuzhiyun CHCR_HASH_MAX_DIGEST_SIZE);
2196*4882a593Smuzhiyun chcr_init_hctx_per_wr(req_ctx);
2197*4882a593Smuzhiyun return 0;
2198*4882a593Smuzhiyun }
2199*4882a593Smuzhiyun
chcr_ahash_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)2200*4882a593Smuzhiyun static int chcr_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2201*4882a593Smuzhiyun unsigned int keylen)
2202*4882a593Smuzhiyun {
2203*4882a593Smuzhiyun struct hmac_ctx *hmacctx = HMAC_CTX(h_ctx(tfm));
2204*4882a593Smuzhiyun unsigned int digestsize = crypto_ahash_digestsize(tfm);
2205*4882a593Smuzhiyun unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2206*4882a593Smuzhiyun unsigned int i, err = 0, updated_digestsize;
2207*4882a593Smuzhiyun
2208*4882a593Smuzhiyun SHASH_DESC_ON_STACK(shash, hmacctx->base_hash);
2209*4882a593Smuzhiyun
2210*4882a593Smuzhiyun /* use the key to calculate the ipad and opad. ipad will sent with the
2211*4882a593Smuzhiyun * first request's data. opad will be sent with the final hash result
2212*4882a593Smuzhiyun * ipad in hmacctx->ipad and opad in hmacctx->opad location
2213*4882a593Smuzhiyun */
2214*4882a593Smuzhiyun shash->tfm = hmacctx->base_hash;
2215*4882a593Smuzhiyun if (keylen > bs) {
2216*4882a593Smuzhiyun err = crypto_shash_digest(shash, key, keylen,
2217*4882a593Smuzhiyun hmacctx->ipad);
2218*4882a593Smuzhiyun if (err)
2219*4882a593Smuzhiyun goto out;
2220*4882a593Smuzhiyun keylen = digestsize;
2221*4882a593Smuzhiyun } else {
2222*4882a593Smuzhiyun memcpy(hmacctx->ipad, key, keylen);
2223*4882a593Smuzhiyun }
2224*4882a593Smuzhiyun memset(hmacctx->ipad + keylen, 0, bs - keylen);
2225*4882a593Smuzhiyun memcpy(hmacctx->opad, hmacctx->ipad, bs);
2226*4882a593Smuzhiyun
2227*4882a593Smuzhiyun for (i = 0; i < bs / sizeof(int); i++) {
2228*4882a593Smuzhiyun *((unsigned int *)(&hmacctx->ipad) + i) ^= IPAD_DATA;
2229*4882a593Smuzhiyun *((unsigned int *)(&hmacctx->opad) + i) ^= OPAD_DATA;
2230*4882a593Smuzhiyun }
2231*4882a593Smuzhiyun
2232*4882a593Smuzhiyun updated_digestsize = digestsize;
2233*4882a593Smuzhiyun if (digestsize == SHA224_DIGEST_SIZE)
2234*4882a593Smuzhiyun updated_digestsize = SHA256_DIGEST_SIZE;
2235*4882a593Smuzhiyun else if (digestsize == SHA384_DIGEST_SIZE)
2236*4882a593Smuzhiyun updated_digestsize = SHA512_DIGEST_SIZE;
2237*4882a593Smuzhiyun err = chcr_compute_partial_hash(shash, hmacctx->ipad,
2238*4882a593Smuzhiyun hmacctx->ipad, digestsize);
2239*4882a593Smuzhiyun if (err)
2240*4882a593Smuzhiyun goto out;
2241*4882a593Smuzhiyun chcr_change_order(hmacctx->ipad, updated_digestsize);
2242*4882a593Smuzhiyun
2243*4882a593Smuzhiyun err = chcr_compute_partial_hash(shash, hmacctx->opad,
2244*4882a593Smuzhiyun hmacctx->opad, digestsize);
2245*4882a593Smuzhiyun if (err)
2246*4882a593Smuzhiyun goto out;
2247*4882a593Smuzhiyun chcr_change_order(hmacctx->opad, updated_digestsize);
2248*4882a593Smuzhiyun out:
2249*4882a593Smuzhiyun return err;
2250*4882a593Smuzhiyun }
2251*4882a593Smuzhiyun
chcr_aes_xts_setkey(struct crypto_skcipher * cipher,const u8 * key,unsigned int key_len)2252*4882a593Smuzhiyun static int chcr_aes_xts_setkey(struct crypto_skcipher *cipher, const u8 *key,
2253*4882a593Smuzhiyun unsigned int key_len)
2254*4882a593Smuzhiyun {
2255*4882a593Smuzhiyun struct ablk_ctx *ablkctx = ABLK_CTX(c_ctx(cipher));
2256*4882a593Smuzhiyun unsigned short context_size = 0;
2257*4882a593Smuzhiyun int err;
2258*4882a593Smuzhiyun
2259*4882a593Smuzhiyun err = chcr_cipher_fallback_setkey(cipher, key, key_len);
2260*4882a593Smuzhiyun if (err)
2261*4882a593Smuzhiyun goto badkey_err;
2262*4882a593Smuzhiyun
2263*4882a593Smuzhiyun memcpy(ablkctx->key, key, key_len);
2264*4882a593Smuzhiyun ablkctx->enckey_len = key_len;
2265*4882a593Smuzhiyun get_aes_decrypt_key(ablkctx->rrkey, ablkctx->key, key_len << 2);
2266*4882a593Smuzhiyun context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD + key_len) >> 4;
2267*4882a593Smuzhiyun /* Both keys for xts must be aligned to 16 byte boundary
2268*4882a593Smuzhiyun * by padding with zeros. So for 24 byte keys padding 8 zeroes.
2269*4882a593Smuzhiyun */
2270*4882a593Smuzhiyun if (key_len == 48) {
2271*4882a593Smuzhiyun context_size = (KEY_CONTEXT_HDR_SALT_AND_PAD + key_len
2272*4882a593Smuzhiyun + 16) >> 4;
2273*4882a593Smuzhiyun memmove(ablkctx->key + 32, ablkctx->key + 24, 24);
2274*4882a593Smuzhiyun memset(ablkctx->key + 24, 0, 8);
2275*4882a593Smuzhiyun memset(ablkctx->key + 56, 0, 8);
2276*4882a593Smuzhiyun ablkctx->enckey_len = 64;
2277*4882a593Smuzhiyun ablkctx->key_ctx_hdr =
2278*4882a593Smuzhiyun FILL_KEY_CTX_HDR(CHCR_KEYCTX_CIPHER_KEY_SIZE_192,
2279*4882a593Smuzhiyun CHCR_KEYCTX_NO_KEY, 1,
2280*4882a593Smuzhiyun 0, context_size);
2281*4882a593Smuzhiyun } else {
2282*4882a593Smuzhiyun ablkctx->key_ctx_hdr =
2283*4882a593Smuzhiyun FILL_KEY_CTX_HDR((key_len == AES_KEYSIZE_256) ?
2284*4882a593Smuzhiyun CHCR_KEYCTX_CIPHER_KEY_SIZE_128 :
2285*4882a593Smuzhiyun CHCR_KEYCTX_CIPHER_KEY_SIZE_256,
2286*4882a593Smuzhiyun CHCR_KEYCTX_NO_KEY, 1,
2287*4882a593Smuzhiyun 0, context_size);
2288*4882a593Smuzhiyun }
2289*4882a593Smuzhiyun ablkctx->ciph_mode = CHCR_SCMD_CIPHER_MODE_AES_XTS;
2290*4882a593Smuzhiyun return 0;
2291*4882a593Smuzhiyun badkey_err:
2292*4882a593Smuzhiyun ablkctx->enckey_len = 0;
2293*4882a593Smuzhiyun
2294*4882a593Smuzhiyun return err;
2295*4882a593Smuzhiyun }
2296*4882a593Smuzhiyun
chcr_sha_init(struct ahash_request * areq)2297*4882a593Smuzhiyun static int chcr_sha_init(struct ahash_request *areq)
2298*4882a593Smuzhiyun {
2299*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2300*4882a593Smuzhiyun struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2301*4882a593Smuzhiyun int digestsize = crypto_ahash_digestsize(tfm);
2302*4882a593Smuzhiyun
2303*4882a593Smuzhiyun req_ctx->data_len = 0;
2304*4882a593Smuzhiyun req_ctx->reqlen = 0;
2305*4882a593Smuzhiyun req_ctx->reqbfr = req_ctx->bfr1;
2306*4882a593Smuzhiyun req_ctx->skbfr = req_ctx->bfr2;
2307*4882a593Smuzhiyun copy_hash_init_values(req_ctx->partial_hash, digestsize);
2308*4882a593Smuzhiyun
2309*4882a593Smuzhiyun return 0;
2310*4882a593Smuzhiyun }
2311*4882a593Smuzhiyun
chcr_sha_cra_init(struct crypto_tfm * tfm)2312*4882a593Smuzhiyun static int chcr_sha_cra_init(struct crypto_tfm *tfm)
2313*4882a593Smuzhiyun {
2314*4882a593Smuzhiyun crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
2315*4882a593Smuzhiyun sizeof(struct chcr_ahash_req_ctx));
2316*4882a593Smuzhiyun return chcr_device_init(crypto_tfm_ctx(tfm));
2317*4882a593Smuzhiyun }
2318*4882a593Smuzhiyun
chcr_hmac_init(struct ahash_request * areq)2319*4882a593Smuzhiyun static int chcr_hmac_init(struct ahash_request *areq)
2320*4882a593Smuzhiyun {
2321*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2322*4882a593Smuzhiyun struct crypto_ahash *rtfm = crypto_ahash_reqtfm(areq);
2323*4882a593Smuzhiyun struct hmac_ctx *hmacctx = HMAC_CTX(h_ctx(rtfm));
2324*4882a593Smuzhiyun unsigned int digestsize = crypto_ahash_digestsize(rtfm);
2325*4882a593Smuzhiyun unsigned int bs = crypto_tfm_alg_blocksize(crypto_ahash_tfm(rtfm));
2326*4882a593Smuzhiyun
2327*4882a593Smuzhiyun chcr_sha_init(areq);
2328*4882a593Smuzhiyun req_ctx->data_len = bs;
2329*4882a593Smuzhiyun if (is_hmac(crypto_ahash_tfm(rtfm))) {
2330*4882a593Smuzhiyun if (digestsize == SHA224_DIGEST_SIZE)
2331*4882a593Smuzhiyun memcpy(req_ctx->partial_hash, hmacctx->ipad,
2332*4882a593Smuzhiyun SHA256_DIGEST_SIZE);
2333*4882a593Smuzhiyun else if (digestsize == SHA384_DIGEST_SIZE)
2334*4882a593Smuzhiyun memcpy(req_ctx->partial_hash, hmacctx->ipad,
2335*4882a593Smuzhiyun SHA512_DIGEST_SIZE);
2336*4882a593Smuzhiyun else
2337*4882a593Smuzhiyun memcpy(req_ctx->partial_hash, hmacctx->ipad,
2338*4882a593Smuzhiyun digestsize);
2339*4882a593Smuzhiyun }
2340*4882a593Smuzhiyun return 0;
2341*4882a593Smuzhiyun }
2342*4882a593Smuzhiyun
chcr_hmac_cra_init(struct crypto_tfm * tfm)2343*4882a593Smuzhiyun static int chcr_hmac_cra_init(struct crypto_tfm *tfm)
2344*4882a593Smuzhiyun {
2345*4882a593Smuzhiyun struct chcr_context *ctx = crypto_tfm_ctx(tfm);
2346*4882a593Smuzhiyun struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
2347*4882a593Smuzhiyun unsigned int digestsize =
2348*4882a593Smuzhiyun crypto_ahash_digestsize(__crypto_ahash_cast(tfm));
2349*4882a593Smuzhiyun
2350*4882a593Smuzhiyun crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
2351*4882a593Smuzhiyun sizeof(struct chcr_ahash_req_ctx));
2352*4882a593Smuzhiyun hmacctx->base_hash = chcr_alloc_shash(digestsize);
2353*4882a593Smuzhiyun if (IS_ERR(hmacctx->base_hash))
2354*4882a593Smuzhiyun return PTR_ERR(hmacctx->base_hash);
2355*4882a593Smuzhiyun return chcr_device_init(crypto_tfm_ctx(tfm));
2356*4882a593Smuzhiyun }
2357*4882a593Smuzhiyun
chcr_hmac_cra_exit(struct crypto_tfm * tfm)2358*4882a593Smuzhiyun static void chcr_hmac_cra_exit(struct crypto_tfm *tfm)
2359*4882a593Smuzhiyun {
2360*4882a593Smuzhiyun struct chcr_context *ctx = crypto_tfm_ctx(tfm);
2361*4882a593Smuzhiyun struct hmac_ctx *hmacctx = HMAC_CTX(ctx);
2362*4882a593Smuzhiyun
2363*4882a593Smuzhiyun if (hmacctx->base_hash) {
2364*4882a593Smuzhiyun chcr_free_shash(hmacctx->base_hash);
2365*4882a593Smuzhiyun hmacctx->base_hash = NULL;
2366*4882a593Smuzhiyun }
2367*4882a593Smuzhiyun }
2368*4882a593Smuzhiyun
chcr_aead_common_exit(struct aead_request * req)2369*4882a593Smuzhiyun inline void chcr_aead_common_exit(struct aead_request *req)
2370*4882a593Smuzhiyun {
2371*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2372*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2373*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(a_ctx(tfm));
2374*4882a593Smuzhiyun
2375*4882a593Smuzhiyun chcr_aead_dma_unmap(&u_ctx->lldi.pdev->dev, req, reqctx->op);
2376*4882a593Smuzhiyun }
2377*4882a593Smuzhiyun
chcr_aead_common_init(struct aead_request * req)2378*4882a593Smuzhiyun static int chcr_aead_common_init(struct aead_request *req)
2379*4882a593Smuzhiyun {
2380*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2381*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2382*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2383*4882a593Smuzhiyun unsigned int authsize = crypto_aead_authsize(tfm);
2384*4882a593Smuzhiyun int error = -EINVAL;
2385*4882a593Smuzhiyun
2386*4882a593Smuzhiyun /* validate key size */
2387*4882a593Smuzhiyun if (aeadctx->enckey_len == 0)
2388*4882a593Smuzhiyun goto err;
2389*4882a593Smuzhiyun if (reqctx->op && req->cryptlen < authsize)
2390*4882a593Smuzhiyun goto err;
2391*4882a593Smuzhiyun if (reqctx->b0_len)
2392*4882a593Smuzhiyun reqctx->scratch_pad = reqctx->iv + IV;
2393*4882a593Smuzhiyun else
2394*4882a593Smuzhiyun reqctx->scratch_pad = NULL;
2395*4882a593Smuzhiyun
2396*4882a593Smuzhiyun error = chcr_aead_dma_map(&ULD_CTX(a_ctx(tfm))->lldi.pdev->dev, req,
2397*4882a593Smuzhiyun reqctx->op);
2398*4882a593Smuzhiyun if (error) {
2399*4882a593Smuzhiyun error = -ENOMEM;
2400*4882a593Smuzhiyun goto err;
2401*4882a593Smuzhiyun }
2402*4882a593Smuzhiyun
2403*4882a593Smuzhiyun return 0;
2404*4882a593Smuzhiyun err:
2405*4882a593Smuzhiyun return error;
2406*4882a593Smuzhiyun }
2407*4882a593Smuzhiyun
chcr_aead_need_fallback(struct aead_request * req,int dst_nents,int aadmax,int wrlen,unsigned short op_type)2408*4882a593Smuzhiyun static int chcr_aead_need_fallback(struct aead_request *req, int dst_nents,
2409*4882a593Smuzhiyun int aadmax, int wrlen,
2410*4882a593Smuzhiyun unsigned short op_type)
2411*4882a593Smuzhiyun {
2412*4882a593Smuzhiyun unsigned int authsize = crypto_aead_authsize(crypto_aead_reqtfm(req));
2413*4882a593Smuzhiyun
2414*4882a593Smuzhiyun if (((req->cryptlen - (op_type ? authsize : 0)) == 0) ||
2415*4882a593Smuzhiyun dst_nents > MAX_DSGL_ENT ||
2416*4882a593Smuzhiyun (req->assoclen > aadmax) ||
2417*4882a593Smuzhiyun (wrlen > SGE_MAX_WR_LEN))
2418*4882a593Smuzhiyun return 1;
2419*4882a593Smuzhiyun return 0;
2420*4882a593Smuzhiyun }
2421*4882a593Smuzhiyun
chcr_aead_fallback(struct aead_request * req,unsigned short op_type)2422*4882a593Smuzhiyun static int chcr_aead_fallback(struct aead_request *req, unsigned short op_type)
2423*4882a593Smuzhiyun {
2424*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2425*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2426*4882a593Smuzhiyun struct aead_request *subreq = aead_request_ctx(req);
2427*4882a593Smuzhiyun
2428*4882a593Smuzhiyun aead_request_set_tfm(subreq, aeadctx->sw_cipher);
2429*4882a593Smuzhiyun aead_request_set_callback(subreq, req->base.flags,
2430*4882a593Smuzhiyun req->base.complete, req->base.data);
2431*4882a593Smuzhiyun aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2432*4882a593Smuzhiyun req->iv);
2433*4882a593Smuzhiyun aead_request_set_ad(subreq, req->assoclen);
2434*4882a593Smuzhiyun return op_type ? crypto_aead_decrypt(subreq) :
2435*4882a593Smuzhiyun crypto_aead_encrypt(subreq);
2436*4882a593Smuzhiyun }
2437*4882a593Smuzhiyun
create_authenc_wr(struct aead_request * req,unsigned short qid,int size)2438*4882a593Smuzhiyun static struct sk_buff *create_authenc_wr(struct aead_request *req,
2439*4882a593Smuzhiyun unsigned short qid,
2440*4882a593Smuzhiyun int size)
2441*4882a593Smuzhiyun {
2442*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2443*4882a593Smuzhiyun struct chcr_context *ctx = a_ctx(tfm);
2444*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
2445*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2446*4882a593Smuzhiyun struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
2447*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2448*4882a593Smuzhiyun struct sk_buff *skb = NULL;
2449*4882a593Smuzhiyun struct chcr_wr *chcr_req;
2450*4882a593Smuzhiyun struct cpl_rx_phys_dsgl *phys_cpl;
2451*4882a593Smuzhiyun struct ulptx_sgl *ulptx;
2452*4882a593Smuzhiyun unsigned int transhdr_len;
2453*4882a593Smuzhiyun unsigned int dst_size = 0, temp, subtype = get_aead_subtype(tfm);
2454*4882a593Smuzhiyun unsigned int kctx_len = 0, dnents, snents;
2455*4882a593Smuzhiyun unsigned int authsize = crypto_aead_authsize(tfm);
2456*4882a593Smuzhiyun int error = -EINVAL;
2457*4882a593Smuzhiyun u8 *ivptr;
2458*4882a593Smuzhiyun int null = 0;
2459*4882a593Smuzhiyun gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
2460*4882a593Smuzhiyun GFP_ATOMIC;
2461*4882a593Smuzhiyun struct adapter *adap = padap(ctx->dev);
2462*4882a593Smuzhiyun unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
2463*4882a593Smuzhiyun
2464*4882a593Smuzhiyun rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2465*4882a593Smuzhiyun if (req->cryptlen == 0)
2466*4882a593Smuzhiyun return NULL;
2467*4882a593Smuzhiyun
2468*4882a593Smuzhiyun reqctx->b0_len = 0;
2469*4882a593Smuzhiyun error = chcr_aead_common_init(req);
2470*4882a593Smuzhiyun if (error)
2471*4882a593Smuzhiyun return ERR_PTR(error);
2472*4882a593Smuzhiyun
2473*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CBC_NULL ||
2474*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
2475*4882a593Smuzhiyun null = 1;
2476*4882a593Smuzhiyun }
2477*4882a593Smuzhiyun dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen +
2478*4882a593Smuzhiyun (reqctx->op ? -authsize : authsize), CHCR_DST_SG_SIZE, 0);
2479*4882a593Smuzhiyun dnents += MIN_AUTH_SG; // For IV
2480*4882a593Smuzhiyun snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen,
2481*4882a593Smuzhiyun CHCR_SRC_SG_SIZE, 0);
2482*4882a593Smuzhiyun dst_size = get_space_for_phys_dsgl(dnents);
2483*4882a593Smuzhiyun kctx_len = (KEY_CONTEXT_CTX_LEN_G(ntohl(aeadctx->key_ctx_hdr)) << 4)
2484*4882a593Smuzhiyun - sizeof(chcr_req->key_ctx);
2485*4882a593Smuzhiyun transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
2486*4882a593Smuzhiyun reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) <
2487*4882a593Smuzhiyun SGE_MAX_WR_LEN;
2488*4882a593Smuzhiyun temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16)
2489*4882a593Smuzhiyun : (sgl_len(snents) * 8);
2490*4882a593Smuzhiyun transhdr_len += temp;
2491*4882a593Smuzhiyun transhdr_len = roundup(transhdr_len, 16);
2492*4882a593Smuzhiyun
2493*4882a593Smuzhiyun if (chcr_aead_need_fallback(req, dnents, T6_MAX_AAD_SIZE,
2494*4882a593Smuzhiyun transhdr_len, reqctx->op)) {
2495*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.fallback);
2496*4882a593Smuzhiyun chcr_aead_common_exit(req);
2497*4882a593Smuzhiyun return ERR_PTR(chcr_aead_fallback(req, reqctx->op));
2498*4882a593Smuzhiyun }
2499*4882a593Smuzhiyun skb = alloc_skb(transhdr_len, flags);
2500*4882a593Smuzhiyun if (!skb) {
2501*4882a593Smuzhiyun error = -ENOMEM;
2502*4882a593Smuzhiyun goto err;
2503*4882a593Smuzhiyun }
2504*4882a593Smuzhiyun
2505*4882a593Smuzhiyun chcr_req = __skb_put_zero(skb, transhdr_len);
2506*4882a593Smuzhiyun
2507*4882a593Smuzhiyun temp = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize;
2508*4882a593Smuzhiyun
2509*4882a593Smuzhiyun /*
2510*4882a593Smuzhiyun * Input order is AAD,IV and Payload. where IV should be included as
2511*4882a593Smuzhiyun * the part of authdata. All other fields should be filled according
2512*4882a593Smuzhiyun * to the hardware spec
2513*4882a593Smuzhiyun */
2514*4882a593Smuzhiyun chcr_req->sec_cpl.op_ivinsrtofst =
2515*4882a593Smuzhiyun FILL_SEC_CPL_OP_IVINSR(rx_channel_id, 2, 1);
2516*4882a593Smuzhiyun chcr_req->sec_cpl.pldlen = htonl(req->assoclen + IV + req->cryptlen);
2517*4882a593Smuzhiyun chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
2518*4882a593Smuzhiyun null ? 0 : 1 + IV,
2519*4882a593Smuzhiyun null ? 0 : IV + req->assoclen,
2520*4882a593Smuzhiyun req->assoclen + IV + 1,
2521*4882a593Smuzhiyun (temp & 0x1F0) >> 4);
2522*4882a593Smuzhiyun chcr_req->sec_cpl.cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(
2523*4882a593Smuzhiyun temp & 0xF,
2524*4882a593Smuzhiyun null ? 0 : req->assoclen + IV + 1,
2525*4882a593Smuzhiyun temp, temp);
2526*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL ||
2527*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA)
2528*4882a593Smuzhiyun temp = CHCR_SCMD_CIPHER_MODE_AES_CTR;
2529*4882a593Smuzhiyun else
2530*4882a593Smuzhiyun temp = CHCR_SCMD_CIPHER_MODE_AES_CBC;
2531*4882a593Smuzhiyun chcr_req->sec_cpl.seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op,
2532*4882a593Smuzhiyun (reqctx->op == CHCR_ENCRYPT_OP) ? 1 : 0,
2533*4882a593Smuzhiyun temp,
2534*4882a593Smuzhiyun actx->auth_mode, aeadctx->hmac_ctrl,
2535*4882a593Smuzhiyun IV >> 1);
2536*4882a593Smuzhiyun chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
2537*4882a593Smuzhiyun 0, 0, dst_size);
2538*4882a593Smuzhiyun
2539*4882a593Smuzhiyun chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
2540*4882a593Smuzhiyun if (reqctx->op == CHCR_ENCRYPT_OP ||
2541*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
2542*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL)
2543*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key, aeadctx->key,
2544*4882a593Smuzhiyun aeadctx->enckey_len);
2545*4882a593Smuzhiyun else
2546*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key, actx->dec_rrkey,
2547*4882a593Smuzhiyun aeadctx->enckey_len);
2548*4882a593Smuzhiyun
2549*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key + roundup(aeadctx->enckey_len, 16),
2550*4882a593Smuzhiyun actx->h_iopad, kctx_len - roundup(aeadctx->enckey_len, 16));
2551*4882a593Smuzhiyun phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
2552*4882a593Smuzhiyun ivptr = (u8 *)(phys_cpl + 1) + dst_size;
2553*4882a593Smuzhiyun ulptx = (struct ulptx_sgl *)(ivptr + IV);
2554*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
2555*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
2556*4882a593Smuzhiyun memcpy(ivptr, aeadctx->nonce, CTR_RFC3686_NONCE_SIZE);
2557*4882a593Smuzhiyun memcpy(ivptr + CTR_RFC3686_NONCE_SIZE, req->iv,
2558*4882a593Smuzhiyun CTR_RFC3686_IV_SIZE);
2559*4882a593Smuzhiyun *(__be32 *)(ivptr + CTR_RFC3686_NONCE_SIZE +
2560*4882a593Smuzhiyun CTR_RFC3686_IV_SIZE) = cpu_to_be32(1);
2561*4882a593Smuzhiyun } else {
2562*4882a593Smuzhiyun memcpy(ivptr, req->iv, IV);
2563*4882a593Smuzhiyun }
2564*4882a593Smuzhiyun chcr_add_aead_dst_ent(req, phys_cpl, qid);
2565*4882a593Smuzhiyun chcr_add_aead_src_ent(req, ulptx);
2566*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.cipher_rqst);
2567*4882a593Smuzhiyun temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size + IV +
2568*4882a593Smuzhiyun kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0);
2569*4882a593Smuzhiyun create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size,
2570*4882a593Smuzhiyun transhdr_len, temp, 0);
2571*4882a593Smuzhiyun reqctx->skb = skb;
2572*4882a593Smuzhiyun
2573*4882a593Smuzhiyun return skb;
2574*4882a593Smuzhiyun err:
2575*4882a593Smuzhiyun chcr_aead_common_exit(req);
2576*4882a593Smuzhiyun
2577*4882a593Smuzhiyun return ERR_PTR(error);
2578*4882a593Smuzhiyun }
2579*4882a593Smuzhiyun
chcr_aead_dma_map(struct device * dev,struct aead_request * req,unsigned short op_type)2580*4882a593Smuzhiyun int chcr_aead_dma_map(struct device *dev,
2581*4882a593Smuzhiyun struct aead_request *req,
2582*4882a593Smuzhiyun unsigned short op_type)
2583*4882a593Smuzhiyun {
2584*4882a593Smuzhiyun int error;
2585*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2586*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2587*4882a593Smuzhiyun unsigned int authsize = crypto_aead_authsize(tfm);
2588*4882a593Smuzhiyun int src_len, dst_len;
2589*4882a593Smuzhiyun
2590*4882a593Smuzhiyun /* calculate and handle src and dst sg length separately
2591*4882a593Smuzhiyun * for inplace and out-of place operations
2592*4882a593Smuzhiyun */
2593*4882a593Smuzhiyun if (req->src == req->dst) {
2594*4882a593Smuzhiyun src_len = req->assoclen + req->cryptlen + (op_type ?
2595*4882a593Smuzhiyun 0 : authsize);
2596*4882a593Smuzhiyun dst_len = src_len;
2597*4882a593Smuzhiyun } else {
2598*4882a593Smuzhiyun src_len = req->assoclen + req->cryptlen;
2599*4882a593Smuzhiyun dst_len = req->assoclen + req->cryptlen + (op_type ?
2600*4882a593Smuzhiyun -authsize : authsize);
2601*4882a593Smuzhiyun }
2602*4882a593Smuzhiyun
2603*4882a593Smuzhiyun if (!req->cryptlen || !src_len || !dst_len)
2604*4882a593Smuzhiyun return 0;
2605*4882a593Smuzhiyun reqctx->iv_dma = dma_map_single(dev, reqctx->iv, (IV + reqctx->b0_len),
2606*4882a593Smuzhiyun DMA_BIDIRECTIONAL);
2607*4882a593Smuzhiyun if (dma_mapping_error(dev, reqctx->iv_dma))
2608*4882a593Smuzhiyun return -ENOMEM;
2609*4882a593Smuzhiyun if (reqctx->b0_len)
2610*4882a593Smuzhiyun reqctx->b0_dma = reqctx->iv_dma + IV;
2611*4882a593Smuzhiyun else
2612*4882a593Smuzhiyun reqctx->b0_dma = 0;
2613*4882a593Smuzhiyun if (req->src == req->dst) {
2614*4882a593Smuzhiyun error = dma_map_sg(dev, req->src,
2615*4882a593Smuzhiyun sg_nents_for_len(req->src, src_len),
2616*4882a593Smuzhiyun DMA_BIDIRECTIONAL);
2617*4882a593Smuzhiyun if (!error)
2618*4882a593Smuzhiyun goto err;
2619*4882a593Smuzhiyun } else {
2620*4882a593Smuzhiyun error = dma_map_sg(dev, req->src,
2621*4882a593Smuzhiyun sg_nents_for_len(req->src, src_len),
2622*4882a593Smuzhiyun DMA_TO_DEVICE);
2623*4882a593Smuzhiyun if (!error)
2624*4882a593Smuzhiyun goto err;
2625*4882a593Smuzhiyun error = dma_map_sg(dev, req->dst,
2626*4882a593Smuzhiyun sg_nents_for_len(req->dst, dst_len),
2627*4882a593Smuzhiyun DMA_FROM_DEVICE);
2628*4882a593Smuzhiyun if (!error) {
2629*4882a593Smuzhiyun dma_unmap_sg(dev, req->src,
2630*4882a593Smuzhiyun sg_nents_for_len(req->src, src_len),
2631*4882a593Smuzhiyun DMA_TO_DEVICE);
2632*4882a593Smuzhiyun goto err;
2633*4882a593Smuzhiyun }
2634*4882a593Smuzhiyun }
2635*4882a593Smuzhiyun
2636*4882a593Smuzhiyun return 0;
2637*4882a593Smuzhiyun err:
2638*4882a593Smuzhiyun dma_unmap_single(dev, reqctx->iv_dma, IV, DMA_BIDIRECTIONAL);
2639*4882a593Smuzhiyun return -ENOMEM;
2640*4882a593Smuzhiyun }
2641*4882a593Smuzhiyun
chcr_aead_dma_unmap(struct device * dev,struct aead_request * req,unsigned short op_type)2642*4882a593Smuzhiyun void chcr_aead_dma_unmap(struct device *dev,
2643*4882a593Smuzhiyun struct aead_request *req,
2644*4882a593Smuzhiyun unsigned short op_type)
2645*4882a593Smuzhiyun {
2646*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2647*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2648*4882a593Smuzhiyun unsigned int authsize = crypto_aead_authsize(tfm);
2649*4882a593Smuzhiyun int src_len, dst_len;
2650*4882a593Smuzhiyun
2651*4882a593Smuzhiyun /* calculate and handle src and dst sg length separately
2652*4882a593Smuzhiyun * for inplace and out-of place operations
2653*4882a593Smuzhiyun */
2654*4882a593Smuzhiyun if (req->src == req->dst) {
2655*4882a593Smuzhiyun src_len = req->assoclen + req->cryptlen + (op_type ?
2656*4882a593Smuzhiyun 0 : authsize);
2657*4882a593Smuzhiyun dst_len = src_len;
2658*4882a593Smuzhiyun } else {
2659*4882a593Smuzhiyun src_len = req->assoclen + req->cryptlen;
2660*4882a593Smuzhiyun dst_len = req->assoclen + req->cryptlen + (op_type ?
2661*4882a593Smuzhiyun -authsize : authsize);
2662*4882a593Smuzhiyun }
2663*4882a593Smuzhiyun
2664*4882a593Smuzhiyun if (!req->cryptlen || !src_len || !dst_len)
2665*4882a593Smuzhiyun return;
2666*4882a593Smuzhiyun
2667*4882a593Smuzhiyun dma_unmap_single(dev, reqctx->iv_dma, (IV + reqctx->b0_len),
2668*4882a593Smuzhiyun DMA_BIDIRECTIONAL);
2669*4882a593Smuzhiyun if (req->src == req->dst) {
2670*4882a593Smuzhiyun dma_unmap_sg(dev, req->src,
2671*4882a593Smuzhiyun sg_nents_for_len(req->src, src_len),
2672*4882a593Smuzhiyun DMA_BIDIRECTIONAL);
2673*4882a593Smuzhiyun } else {
2674*4882a593Smuzhiyun dma_unmap_sg(dev, req->src,
2675*4882a593Smuzhiyun sg_nents_for_len(req->src, src_len),
2676*4882a593Smuzhiyun DMA_TO_DEVICE);
2677*4882a593Smuzhiyun dma_unmap_sg(dev, req->dst,
2678*4882a593Smuzhiyun sg_nents_for_len(req->dst, dst_len),
2679*4882a593Smuzhiyun DMA_FROM_DEVICE);
2680*4882a593Smuzhiyun }
2681*4882a593Smuzhiyun }
2682*4882a593Smuzhiyun
chcr_add_aead_src_ent(struct aead_request * req,struct ulptx_sgl * ulptx)2683*4882a593Smuzhiyun void chcr_add_aead_src_ent(struct aead_request *req,
2684*4882a593Smuzhiyun struct ulptx_sgl *ulptx)
2685*4882a593Smuzhiyun {
2686*4882a593Smuzhiyun struct ulptx_walk ulp_walk;
2687*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2688*4882a593Smuzhiyun
2689*4882a593Smuzhiyun if (reqctx->imm) {
2690*4882a593Smuzhiyun u8 *buf = (u8 *)ulptx;
2691*4882a593Smuzhiyun
2692*4882a593Smuzhiyun if (reqctx->b0_len) {
2693*4882a593Smuzhiyun memcpy(buf, reqctx->scratch_pad, reqctx->b0_len);
2694*4882a593Smuzhiyun buf += reqctx->b0_len;
2695*4882a593Smuzhiyun }
2696*4882a593Smuzhiyun sg_pcopy_to_buffer(req->src, sg_nents(req->src),
2697*4882a593Smuzhiyun buf, req->cryptlen + req->assoclen, 0);
2698*4882a593Smuzhiyun } else {
2699*4882a593Smuzhiyun ulptx_walk_init(&ulp_walk, ulptx);
2700*4882a593Smuzhiyun if (reqctx->b0_len)
2701*4882a593Smuzhiyun ulptx_walk_add_page(&ulp_walk, reqctx->b0_len,
2702*4882a593Smuzhiyun reqctx->b0_dma);
2703*4882a593Smuzhiyun ulptx_walk_add_sg(&ulp_walk, req->src, req->cryptlen +
2704*4882a593Smuzhiyun req->assoclen, 0);
2705*4882a593Smuzhiyun ulptx_walk_end(&ulp_walk);
2706*4882a593Smuzhiyun }
2707*4882a593Smuzhiyun }
2708*4882a593Smuzhiyun
chcr_add_aead_dst_ent(struct aead_request * req,struct cpl_rx_phys_dsgl * phys_cpl,unsigned short qid)2709*4882a593Smuzhiyun void chcr_add_aead_dst_ent(struct aead_request *req,
2710*4882a593Smuzhiyun struct cpl_rx_phys_dsgl *phys_cpl,
2711*4882a593Smuzhiyun unsigned short qid)
2712*4882a593Smuzhiyun {
2713*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2714*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2715*4882a593Smuzhiyun struct dsgl_walk dsgl_walk;
2716*4882a593Smuzhiyun unsigned int authsize = crypto_aead_authsize(tfm);
2717*4882a593Smuzhiyun struct chcr_context *ctx = a_ctx(tfm);
2718*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
2719*4882a593Smuzhiyun u32 temp;
2720*4882a593Smuzhiyun unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
2721*4882a593Smuzhiyun
2722*4882a593Smuzhiyun rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2723*4882a593Smuzhiyun dsgl_walk_init(&dsgl_walk, phys_cpl);
2724*4882a593Smuzhiyun dsgl_walk_add_page(&dsgl_walk, IV + reqctx->b0_len, reqctx->iv_dma);
2725*4882a593Smuzhiyun temp = req->assoclen + req->cryptlen +
2726*4882a593Smuzhiyun (reqctx->op ? -authsize : authsize);
2727*4882a593Smuzhiyun dsgl_walk_add_sg(&dsgl_walk, req->dst, temp, 0);
2728*4882a593Smuzhiyun dsgl_walk_end(&dsgl_walk, qid, rx_channel_id);
2729*4882a593Smuzhiyun }
2730*4882a593Smuzhiyun
chcr_add_cipher_src_ent(struct skcipher_request * req,void * ulptx,struct cipher_wr_param * wrparam)2731*4882a593Smuzhiyun void chcr_add_cipher_src_ent(struct skcipher_request *req,
2732*4882a593Smuzhiyun void *ulptx,
2733*4882a593Smuzhiyun struct cipher_wr_param *wrparam)
2734*4882a593Smuzhiyun {
2735*4882a593Smuzhiyun struct ulptx_walk ulp_walk;
2736*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
2737*4882a593Smuzhiyun u8 *buf = ulptx;
2738*4882a593Smuzhiyun
2739*4882a593Smuzhiyun memcpy(buf, reqctx->iv, IV);
2740*4882a593Smuzhiyun buf += IV;
2741*4882a593Smuzhiyun if (reqctx->imm) {
2742*4882a593Smuzhiyun sg_pcopy_to_buffer(req->src, sg_nents(req->src),
2743*4882a593Smuzhiyun buf, wrparam->bytes, reqctx->processed);
2744*4882a593Smuzhiyun } else {
2745*4882a593Smuzhiyun ulptx_walk_init(&ulp_walk, (struct ulptx_sgl *)buf);
2746*4882a593Smuzhiyun ulptx_walk_add_sg(&ulp_walk, reqctx->srcsg, wrparam->bytes,
2747*4882a593Smuzhiyun reqctx->src_ofst);
2748*4882a593Smuzhiyun reqctx->srcsg = ulp_walk.last_sg;
2749*4882a593Smuzhiyun reqctx->src_ofst = ulp_walk.last_sg_len;
2750*4882a593Smuzhiyun ulptx_walk_end(&ulp_walk);
2751*4882a593Smuzhiyun }
2752*4882a593Smuzhiyun }
2753*4882a593Smuzhiyun
chcr_add_cipher_dst_ent(struct skcipher_request * req,struct cpl_rx_phys_dsgl * phys_cpl,struct cipher_wr_param * wrparam,unsigned short qid)2754*4882a593Smuzhiyun void chcr_add_cipher_dst_ent(struct skcipher_request *req,
2755*4882a593Smuzhiyun struct cpl_rx_phys_dsgl *phys_cpl,
2756*4882a593Smuzhiyun struct cipher_wr_param *wrparam,
2757*4882a593Smuzhiyun unsigned short qid)
2758*4882a593Smuzhiyun {
2759*4882a593Smuzhiyun struct chcr_skcipher_req_ctx *reqctx = skcipher_request_ctx(req);
2760*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(wrparam->req);
2761*4882a593Smuzhiyun struct chcr_context *ctx = c_ctx(tfm);
2762*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
2763*4882a593Smuzhiyun struct dsgl_walk dsgl_walk;
2764*4882a593Smuzhiyun unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
2765*4882a593Smuzhiyun
2766*4882a593Smuzhiyun rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2767*4882a593Smuzhiyun dsgl_walk_init(&dsgl_walk, phys_cpl);
2768*4882a593Smuzhiyun dsgl_walk_add_sg(&dsgl_walk, reqctx->dstsg, wrparam->bytes,
2769*4882a593Smuzhiyun reqctx->dst_ofst);
2770*4882a593Smuzhiyun reqctx->dstsg = dsgl_walk.last_sg;
2771*4882a593Smuzhiyun reqctx->dst_ofst = dsgl_walk.last_sg_len;
2772*4882a593Smuzhiyun dsgl_walk_end(&dsgl_walk, qid, rx_channel_id);
2773*4882a593Smuzhiyun }
2774*4882a593Smuzhiyun
chcr_add_hash_src_ent(struct ahash_request * req,struct ulptx_sgl * ulptx,struct hash_wr_param * param)2775*4882a593Smuzhiyun void chcr_add_hash_src_ent(struct ahash_request *req,
2776*4882a593Smuzhiyun struct ulptx_sgl *ulptx,
2777*4882a593Smuzhiyun struct hash_wr_param *param)
2778*4882a593Smuzhiyun {
2779*4882a593Smuzhiyun struct ulptx_walk ulp_walk;
2780*4882a593Smuzhiyun struct chcr_ahash_req_ctx *reqctx = ahash_request_ctx(req);
2781*4882a593Smuzhiyun
2782*4882a593Smuzhiyun if (reqctx->hctx_wr.imm) {
2783*4882a593Smuzhiyun u8 *buf = (u8 *)ulptx;
2784*4882a593Smuzhiyun
2785*4882a593Smuzhiyun if (param->bfr_len) {
2786*4882a593Smuzhiyun memcpy(buf, reqctx->reqbfr, param->bfr_len);
2787*4882a593Smuzhiyun buf += param->bfr_len;
2788*4882a593Smuzhiyun }
2789*4882a593Smuzhiyun
2790*4882a593Smuzhiyun sg_pcopy_to_buffer(reqctx->hctx_wr.srcsg,
2791*4882a593Smuzhiyun sg_nents(reqctx->hctx_wr.srcsg), buf,
2792*4882a593Smuzhiyun param->sg_len, 0);
2793*4882a593Smuzhiyun } else {
2794*4882a593Smuzhiyun ulptx_walk_init(&ulp_walk, ulptx);
2795*4882a593Smuzhiyun if (param->bfr_len)
2796*4882a593Smuzhiyun ulptx_walk_add_page(&ulp_walk, param->bfr_len,
2797*4882a593Smuzhiyun reqctx->hctx_wr.dma_addr);
2798*4882a593Smuzhiyun ulptx_walk_add_sg(&ulp_walk, reqctx->hctx_wr.srcsg,
2799*4882a593Smuzhiyun param->sg_len, reqctx->hctx_wr.src_ofst);
2800*4882a593Smuzhiyun reqctx->hctx_wr.srcsg = ulp_walk.last_sg;
2801*4882a593Smuzhiyun reqctx->hctx_wr.src_ofst = ulp_walk.last_sg_len;
2802*4882a593Smuzhiyun ulptx_walk_end(&ulp_walk);
2803*4882a593Smuzhiyun }
2804*4882a593Smuzhiyun }
2805*4882a593Smuzhiyun
chcr_hash_dma_map(struct device * dev,struct ahash_request * req)2806*4882a593Smuzhiyun int chcr_hash_dma_map(struct device *dev,
2807*4882a593Smuzhiyun struct ahash_request *req)
2808*4882a593Smuzhiyun {
2809*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
2810*4882a593Smuzhiyun int error = 0;
2811*4882a593Smuzhiyun
2812*4882a593Smuzhiyun if (!req->nbytes)
2813*4882a593Smuzhiyun return 0;
2814*4882a593Smuzhiyun error = dma_map_sg(dev, req->src, sg_nents(req->src),
2815*4882a593Smuzhiyun DMA_TO_DEVICE);
2816*4882a593Smuzhiyun if (!error)
2817*4882a593Smuzhiyun return -ENOMEM;
2818*4882a593Smuzhiyun req_ctx->hctx_wr.is_sg_map = 1;
2819*4882a593Smuzhiyun return 0;
2820*4882a593Smuzhiyun }
2821*4882a593Smuzhiyun
chcr_hash_dma_unmap(struct device * dev,struct ahash_request * req)2822*4882a593Smuzhiyun void chcr_hash_dma_unmap(struct device *dev,
2823*4882a593Smuzhiyun struct ahash_request *req)
2824*4882a593Smuzhiyun {
2825*4882a593Smuzhiyun struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(req);
2826*4882a593Smuzhiyun
2827*4882a593Smuzhiyun if (!req->nbytes)
2828*4882a593Smuzhiyun return;
2829*4882a593Smuzhiyun
2830*4882a593Smuzhiyun dma_unmap_sg(dev, req->src, sg_nents(req->src),
2831*4882a593Smuzhiyun DMA_TO_DEVICE);
2832*4882a593Smuzhiyun req_ctx->hctx_wr.is_sg_map = 0;
2833*4882a593Smuzhiyun
2834*4882a593Smuzhiyun }
2835*4882a593Smuzhiyun
chcr_cipher_dma_map(struct device * dev,struct skcipher_request * req)2836*4882a593Smuzhiyun int chcr_cipher_dma_map(struct device *dev,
2837*4882a593Smuzhiyun struct skcipher_request *req)
2838*4882a593Smuzhiyun {
2839*4882a593Smuzhiyun int error;
2840*4882a593Smuzhiyun
2841*4882a593Smuzhiyun if (req->src == req->dst) {
2842*4882a593Smuzhiyun error = dma_map_sg(dev, req->src, sg_nents(req->src),
2843*4882a593Smuzhiyun DMA_BIDIRECTIONAL);
2844*4882a593Smuzhiyun if (!error)
2845*4882a593Smuzhiyun goto err;
2846*4882a593Smuzhiyun } else {
2847*4882a593Smuzhiyun error = dma_map_sg(dev, req->src, sg_nents(req->src),
2848*4882a593Smuzhiyun DMA_TO_DEVICE);
2849*4882a593Smuzhiyun if (!error)
2850*4882a593Smuzhiyun goto err;
2851*4882a593Smuzhiyun error = dma_map_sg(dev, req->dst, sg_nents(req->dst),
2852*4882a593Smuzhiyun DMA_FROM_DEVICE);
2853*4882a593Smuzhiyun if (!error) {
2854*4882a593Smuzhiyun dma_unmap_sg(dev, req->src, sg_nents(req->src),
2855*4882a593Smuzhiyun DMA_TO_DEVICE);
2856*4882a593Smuzhiyun goto err;
2857*4882a593Smuzhiyun }
2858*4882a593Smuzhiyun }
2859*4882a593Smuzhiyun
2860*4882a593Smuzhiyun return 0;
2861*4882a593Smuzhiyun err:
2862*4882a593Smuzhiyun return -ENOMEM;
2863*4882a593Smuzhiyun }
2864*4882a593Smuzhiyun
chcr_cipher_dma_unmap(struct device * dev,struct skcipher_request * req)2865*4882a593Smuzhiyun void chcr_cipher_dma_unmap(struct device *dev,
2866*4882a593Smuzhiyun struct skcipher_request *req)
2867*4882a593Smuzhiyun {
2868*4882a593Smuzhiyun if (req->src == req->dst) {
2869*4882a593Smuzhiyun dma_unmap_sg(dev, req->src, sg_nents(req->src),
2870*4882a593Smuzhiyun DMA_BIDIRECTIONAL);
2871*4882a593Smuzhiyun } else {
2872*4882a593Smuzhiyun dma_unmap_sg(dev, req->src, sg_nents(req->src),
2873*4882a593Smuzhiyun DMA_TO_DEVICE);
2874*4882a593Smuzhiyun dma_unmap_sg(dev, req->dst, sg_nents(req->dst),
2875*4882a593Smuzhiyun DMA_FROM_DEVICE);
2876*4882a593Smuzhiyun }
2877*4882a593Smuzhiyun }
2878*4882a593Smuzhiyun
set_msg_len(u8 * block,unsigned int msglen,int csize)2879*4882a593Smuzhiyun static int set_msg_len(u8 *block, unsigned int msglen, int csize)
2880*4882a593Smuzhiyun {
2881*4882a593Smuzhiyun __be32 data;
2882*4882a593Smuzhiyun
2883*4882a593Smuzhiyun memset(block, 0, csize);
2884*4882a593Smuzhiyun block += csize;
2885*4882a593Smuzhiyun
2886*4882a593Smuzhiyun if (csize >= 4)
2887*4882a593Smuzhiyun csize = 4;
2888*4882a593Smuzhiyun else if (msglen > (unsigned int)(1 << (8 * csize)))
2889*4882a593Smuzhiyun return -EOVERFLOW;
2890*4882a593Smuzhiyun
2891*4882a593Smuzhiyun data = cpu_to_be32(msglen);
2892*4882a593Smuzhiyun memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
2893*4882a593Smuzhiyun
2894*4882a593Smuzhiyun return 0;
2895*4882a593Smuzhiyun }
2896*4882a593Smuzhiyun
generate_b0(struct aead_request * req,u8 * ivptr,unsigned short op_type)2897*4882a593Smuzhiyun static int generate_b0(struct aead_request *req, u8 *ivptr,
2898*4882a593Smuzhiyun unsigned short op_type)
2899*4882a593Smuzhiyun {
2900*4882a593Smuzhiyun unsigned int l, lp, m;
2901*4882a593Smuzhiyun int rc;
2902*4882a593Smuzhiyun struct crypto_aead *aead = crypto_aead_reqtfm(req);
2903*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2904*4882a593Smuzhiyun u8 *b0 = reqctx->scratch_pad;
2905*4882a593Smuzhiyun
2906*4882a593Smuzhiyun m = crypto_aead_authsize(aead);
2907*4882a593Smuzhiyun
2908*4882a593Smuzhiyun memcpy(b0, ivptr, 16);
2909*4882a593Smuzhiyun
2910*4882a593Smuzhiyun lp = b0[0];
2911*4882a593Smuzhiyun l = lp + 1;
2912*4882a593Smuzhiyun
2913*4882a593Smuzhiyun /* set m, bits 3-5 */
2914*4882a593Smuzhiyun *b0 |= (8 * ((m - 2) / 2));
2915*4882a593Smuzhiyun
2916*4882a593Smuzhiyun /* set adata, bit 6, if associated data is used */
2917*4882a593Smuzhiyun if (req->assoclen)
2918*4882a593Smuzhiyun *b0 |= 64;
2919*4882a593Smuzhiyun rc = set_msg_len(b0 + 16 - l,
2920*4882a593Smuzhiyun (op_type == CHCR_DECRYPT_OP) ?
2921*4882a593Smuzhiyun req->cryptlen - m : req->cryptlen, l);
2922*4882a593Smuzhiyun
2923*4882a593Smuzhiyun return rc;
2924*4882a593Smuzhiyun }
2925*4882a593Smuzhiyun
crypto_ccm_check_iv(const u8 * iv)2926*4882a593Smuzhiyun static inline int crypto_ccm_check_iv(const u8 *iv)
2927*4882a593Smuzhiyun {
2928*4882a593Smuzhiyun /* 2 <= L <= 8, so 1 <= L' <= 7. */
2929*4882a593Smuzhiyun if (iv[0] < 1 || iv[0] > 7)
2930*4882a593Smuzhiyun return -EINVAL;
2931*4882a593Smuzhiyun
2932*4882a593Smuzhiyun return 0;
2933*4882a593Smuzhiyun }
2934*4882a593Smuzhiyun
ccm_format_packet(struct aead_request * req,u8 * ivptr,unsigned int sub_type,unsigned short op_type,unsigned int assoclen)2935*4882a593Smuzhiyun static int ccm_format_packet(struct aead_request *req,
2936*4882a593Smuzhiyun u8 *ivptr,
2937*4882a593Smuzhiyun unsigned int sub_type,
2938*4882a593Smuzhiyun unsigned short op_type,
2939*4882a593Smuzhiyun unsigned int assoclen)
2940*4882a593Smuzhiyun {
2941*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2942*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2943*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
2944*4882a593Smuzhiyun int rc = 0;
2945*4882a593Smuzhiyun
2946*4882a593Smuzhiyun if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
2947*4882a593Smuzhiyun ivptr[0] = 3;
2948*4882a593Smuzhiyun memcpy(ivptr + 1, &aeadctx->salt[0], 3);
2949*4882a593Smuzhiyun memcpy(ivptr + 4, req->iv, 8);
2950*4882a593Smuzhiyun memset(ivptr + 12, 0, 4);
2951*4882a593Smuzhiyun } else {
2952*4882a593Smuzhiyun memcpy(ivptr, req->iv, 16);
2953*4882a593Smuzhiyun }
2954*4882a593Smuzhiyun if (assoclen)
2955*4882a593Smuzhiyun put_unaligned_be16(assoclen, &reqctx->scratch_pad[16]);
2956*4882a593Smuzhiyun
2957*4882a593Smuzhiyun rc = generate_b0(req, ivptr, op_type);
2958*4882a593Smuzhiyun /* zero the ctr value */
2959*4882a593Smuzhiyun memset(ivptr + 15 - ivptr[0], 0, ivptr[0] + 1);
2960*4882a593Smuzhiyun return rc;
2961*4882a593Smuzhiyun }
2962*4882a593Smuzhiyun
fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu * sec_cpl,unsigned int dst_size,struct aead_request * req,unsigned short op_type)2963*4882a593Smuzhiyun static void fill_sec_cpl_for_aead(struct cpl_tx_sec_pdu *sec_cpl,
2964*4882a593Smuzhiyun unsigned int dst_size,
2965*4882a593Smuzhiyun struct aead_request *req,
2966*4882a593Smuzhiyun unsigned short op_type)
2967*4882a593Smuzhiyun {
2968*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2969*4882a593Smuzhiyun struct chcr_context *ctx = a_ctx(tfm);
2970*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
2971*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
2972*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
2973*4882a593Smuzhiyun unsigned int cipher_mode = CHCR_SCMD_CIPHER_MODE_AES_CCM;
2974*4882a593Smuzhiyun unsigned int mac_mode = CHCR_SCMD_AUTH_MODE_CBCMAC;
2975*4882a593Smuzhiyun unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
2976*4882a593Smuzhiyun unsigned int ccm_xtra;
2977*4882a593Smuzhiyun unsigned int tag_offset = 0, auth_offset = 0;
2978*4882a593Smuzhiyun unsigned int assoclen;
2979*4882a593Smuzhiyun
2980*4882a593Smuzhiyun rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
2981*4882a593Smuzhiyun
2982*4882a593Smuzhiyun if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
2983*4882a593Smuzhiyun assoclen = req->assoclen - 8;
2984*4882a593Smuzhiyun else
2985*4882a593Smuzhiyun assoclen = req->assoclen;
2986*4882a593Smuzhiyun ccm_xtra = CCM_B0_SIZE +
2987*4882a593Smuzhiyun ((assoclen) ? CCM_AAD_FIELD_SIZE : 0);
2988*4882a593Smuzhiyun
2989*4882a593Smuzhiyun auth_offset = req->cryptlen ?
2990*4882a593Smuzhiyun (req->assoclen + IV + 1 + ccm_xtra) : 0;
2991*4882a593Smuzhiyun if (op_type == CHCR_DECRYPT_OP) {
2992*4882a593Smuzhiyun if (crypto_aead_authsize(tfm) != req->cryptlen)
2993*4882a593Smuzhiyun tag_offset = crypto_aead_authsize(tfm);
2994*4882a593Smuzhiyun else
2995*4882a593Smuzhiyun auth_offset = 0;
2996*4882a593Smuzhiyun }
2997*4882a593Smuzhiyun
2998*4882a593Smuzhiyun sec_cpl->op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(rx_channel_id, 2, 1);
2999*4882a593Smuzhiyun sec_cpl->pldlen =
3000*4882a593Smuzhiyun htonl(req->assoclen + IV + req->cryptlen + ccm_xtra);
3001*4882a593Smuzhiyun /* For CCM there wil be b0 always. So AAD start will be 1 always */
3002*4882a593Smuzhiyun sec_cpl->aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
3003*4882a593Smuzhiyun 1 + IV, IV + assoclen + ccm_xtra,
3004*4882a593Smuzhiyun req->assoclen + IV + 1 + ccm_xtra, 0);
3005*4882a593Smuzhiyun
3006*4882a593Smuzhiyun sec_cpl->cipherstop_lo_authinsert = FILL_SEC_CPL_AUTHINSERT(0,
3007*4882a593Smuzhiyun auth_offset, tag_offset,
3008*4882a593Smuzhiyun (op_type == CHCR_ENCRYPT_OP) ? 0 :
3009*4882a593Smuzhiyun crypto_aead_authsize(tfm));
3010*4882a593Smuzhiyun sec_cpl->seqno_numivs = FILL_SEC_CPL_SCMD0_SEQNO(op_type,
3011*4882a593Smuzhiyun (op_type == CHCR_ENCRYPT_OP) ? 0 : 1,
3012*4882a593Smuzhiyun cipher_mode, mac_mode,
3013*4882a593Smuzhiyun aeadctx->hmac_ctrl, IV >> 1);
3014*4882a593Smuzhiyun
3015*4882a593Smuzhiyun sec_cpl->ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1, 0,
3016*4882a593Smuzhiyun 0, dst_size);
3017*4882a593Smuzhiyun }
3018*4882a593Smuzhiyun
aead_ccm_validate_input(unsigned short op_type,struct aead_request * req,struct chcr_aead_ctx * aeadctx,unsigned int sub_type)3019*4882a593Smuzhiyun static int aead_ccm_validate_input(unsigned short op_type,
3020*4882a593Smuzhiyun struct aead_request *req,
3021*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx,
3022*4882a593Smuzhiyun unsigned int sub_type)
3023*4882a593Smuzhiyun {
3024*4882a593Smuzhiyun if (sub_type != CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309) {
3025*4882a593Smuzhiyun if (crypto_ccm_check_iv(req->iv)) {
3026*4882a593Smuzhiyun pr_err("CCM: IV check fails\n");
3027*4882a593Smuzhiyun return -EINVAL;
3028*4882a593Smuzhiyun }
3029*4882a593Smuzhiyun } else {
3030*4882a593Smuzhiyun if (req->assoclen != 16 && req->assoclen != 20) {
3031*4882a593Smuzhiyun pr_err("RFC4309: Invalid AAD length %d\n",
3032*4882a593Smuzhiyun req->assoclen);
3033*4882a593Smuzhiyun return -EINVAL;
3034*4882a593Smuzhiyun }
3035*4882a593Smuzhiyun }
3036*4882a593Smuzhiyun return 0;
3037*4882a593Smuzhiyun }
3038*4882a593Smuzhiyun
create_aead_ccm_wr(struct aead_request * req,unsigned short qid,int size)3039*4882a593Smuzhiyun static struct sk_buff *create_aead_ccm_wr(struct aead_request *req,
3040*4882a593Smuzhiyun unsigned short qid,
3041*4882a593Smuzhiyun int size)
3042*4882a593Smuzhiyun {
3043*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3044*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3045*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
3046*4882a593Smuzhiyun struct sk_buff *skb = NULL;
3047*4882a593Smuzhiyun struct chcr_wr *chcr_req;
3048*4882a593Smuzhiyun struct cpl_rx_phys_dsgl *phys_cpl;
3049*4882a593Smuzhiyun struct ulptx_sgl *ulptx;
3050*4882a593Smuzhiyun unsigned int transhdr_len;
3051*4882a593Smuzhiyun unsigned int dst_size = 0, kctx_len, dnents, temp, snents;
3052*4882a593Smuzhiyun unsigned int sub_type, assoclen = req->assoclen;
3053*4882a593Smuzhiyun unsigned int authsize = crypto_aead_authsize(tfm);
3054*4882a593Smuzhiyun int error = -EINVAL;
3055*4882a593Smuzhiyun u8 *ivptr;
3056*4882a593Smuzhiyun gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
3057*4882a593Smuzhiyun GFP_ATOMIC;
3058*4882a593Smuzhiyun struct adapter *adap = padap(a_ctx(tfm)->dev);
3059*4882a593Smuzhiyun
3060*4882a593Smuzhiyun sub_type = get_aead_subtype(tfm);
3061*4882a593Smuzhiyun if (sub_type == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309)
3062*4882a593Smuzhiyun assoclen -= 8;
3063*4882a593Smuzhiyun reqctx->b0_len = CCM_B0_SIZE + (assoclen ? CCM_AAD_FIELD_SIZE : 0);
3064*4882a593Smuzhiyun error = chcr_aead_common_init(req);
3065*4882a593Smuzhiyun if (error)
3066*4882a593Smuzhiyun return ERR_PTR(error);
3067*4882a593Smuzhiyun
3068*4882a593Smuzhiyun error = aead_ccm_validate_input(reqctx->op, req, aeadctx, sub_type);
3069*4882a593Smuzhiyun if (error)
3070*4882a593Smuzhiyun goto err;
3071*4882a593Smuzhiyun dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen
3072*4882a593Smuzhiyun + (reqctx->op ? -authsize : authsize),
3073*4882a593Smuzhiyun CHCR_DST_SG_SIZE, 0);
3074*4882a593Smuzhiyun dnents += MIN_CCM_SG; // For IV and B0
3075*4882a593Smuzhiyun dst_size = get_space_for_phys_dsgl(dnents);
3076*4882a593Smuzhiyun snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen,
3077*4882a593Smuzhiyun CHCR_SRC_SG_SIZE, 0);
3078*4882a593Smuzhiyun snents += MIN_CCM_SG; //For B0
3079*4882a593Smuzhiyun kctx_len = roundup(aeadctx->enckey_len, 16) * 2;
3080*4882a593Smuzhiyun transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
3081*4882a593Smuzhiyun reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen +
3082*4882a593Smuzhiyun reqctx->b0_len) <= SGE_MAX_WR_LEN;
3083*4882a593Smuzhiyun temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen +
3084*4882a593Smuzhiyun reqctx->b0_len, 16) :
3085*4882a593Smuzhiyun (sgl_len(snents) * 8);
3086*4882a593Smuzhiyun transhdr_len += temp;
3087*4882a593Smuzhiyun transhdr_len = roundup(transhdr_len, 16);
3088*4882a593Smuzhiyun
3089*4882a593Smuzhiyun if (chcr_aead_need_fallback(req, dnents, T6_MAX_AAD_SIZE -
3090*4882a593Smuzhiyun reqctx->b0_len, transhdr_len, reqctx->op)) {
3091*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.fallback);
3092*4882a593Smuzhiyun chcr_aead_common_exit(req);
3093*4882a593Smuzhiyun return ERR_PTR(chcr_aead_fallback(req, reqctx->op));
3094*4882a593Smuzhiyun }
3095*4882a593Smuzhiyun skb = alloc_skb(transhdr_len, flags);
3096*4882a593Smuzhiyun
3097*4882a593Smuzhiyun if (!skb) {
3098*4882a593Smuzhiyun error = -ENOMEM;
3099*4882a593Smuzhiyun goto err;
3100*4882a593Smuzhiyun }
3101*4882a593Smuzhiyun
3102*4882a593Smuzhiyun chcr_req = __skb_put_zero(skb, transhdr_len);
3103*4882a593Smuzhiyun
3104*4882a593Smuzhiyun fill_sec_cpl_for_aead(&chcr_req->sec_cpl, dst_size, req, reqctx->op);
3105*4882a593Smuzhiyun
3106*4882a593Smuzhiyun chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
3107*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
3108*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key + roundup(aeadctx->enckey_len, 16),
3109*4882a593Smuzhiyun aeadctx->key, aeadctx->enckey_len);
3110*4882a593Smuzhiyun
3111*4882a593Smuzhiyun phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
3112*4882a593Smuzhiyun ivptr = (u8 *)(phys_cpl + 1) + dst_size;
3113*4882a593Smuzhiyun ulptx = (struct ulptx_sgl *)(ivptr + IV);
3114*4882a593Smuzhiyun error = ccm_format_packet(req, ivptr, sub_type, reqctx->op, assoclen);
3115*4882a593Smuzhiyun if (error)
3116*4882a593Smuzhiyun goto dstmap_fail;
3117*4882a593Smuzhiyun chcr_add_aead_dst_ent(req, phys_cpl, qid);
3118*4882a593Smuzhiyun chcr_add_aead_src_ent(req, ulptx);
3119*4882a593Smuzhiyun
3120*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.aead_rqst);
3121*4882a593Smuzhiyun temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size + IV +
3122*4882a593Smuzhiyun kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen +
3123*4882a593Smuzhiyun reqctx->b0_len) : 0);
3124*4882a593Smuzhiyun create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, 0,
3125*4882a593Smuzhiyun transhdr_len, temp, 0);
3126*4882a593Smuzhiyun reqctx->skb = skb;
3127*4882a593Smuzhiyun
3128*4882a593Smuzhiyun return skb;
3129*4882a593Smuzhiyun dstmap_fail:
3130*4882a593Smuzhiyun kfree_skb(skb);
3131*4882a593Smuzhiyun err:
3132*4882a593Smuzhiyun chcr_aead_common_exit(req);
3133*4882a593Smuzhiyun return ERR_PTR(error);
3134*4882a593Smuzhiyun }
3135*4882a593Smuzhiyun
create_gcm_wr(struct aead_request * req,unsigned short qid,int size)3136*4882a593Smuzhiyun static struct sk_buff *create_gcm_wr(struct aead_request *req,
3137*4882a593Smuzhiyun unsigned short qid,
3138*4882a593Smuzhiyun int size)
3139*4882a593Smuzhiyun {
3140*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3141*4882a593Smuzhiyun struct chcr_context *ctx = a_ctx(tfm);
3142*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
3143*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
3144*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
3145*4882a593Smuzhiyun struct sk_buff *skb = NULL;
3146*4882a593Smuzhiyun struct chcr_wr *chcr_req;
3147*4882a593Smuzhiyun struct cpl_rx_phys_dsgl *phys_cpl;
3148*4882a593Smuzhiyun struct ulptx_sgl *ulptx;
3149*4882a593Smuzhiyun unsigned int transhdr_len, dnents = 0, snents;
3150*4882a593Smuzhiyun unsigned int dst_size = 0, temp = 0, kctx_len, assoclen = req->assoclen;
3151*4882a593Smuzhiyun unsigned int authsize = crypto_aead_authsize(tfm);
3152*4882a593Smuzhiyun int error = -EINVAL;
3153*4882a593Smuzhiyun u8 *ivptr;
3154*4882a593Smuzhiyun gfp_t flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
3155*4882a593Smuzhiyun GFP_ATOMIC;
3156*4882a593Smuzhiyun struct adapter *adap = padap(ctx->dev);
3157*4882a593Smuzhiyun unsigned int rx_channel_id = reqctx->rxqidx / ctx->rxq_perchan;
3158*4882a593Smuzhiyun
3159*4882a593Smuzhiyun rx_channel_id = cxgb4_port_e2cchan(u_ctx->lldi.ports[rx_channel_id]);
3160*4882a593Smuzhiyun if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106)
3161*4882a593Smuzhiyun assoclen = req->assoclen - 8;
3162*4882a593Smuzhiyun
3163*4882a593Smuzhiyun reqctx->b0_len = 0;
3164*4882a593Smuzhiyun error = chcr_aead_common_init(req);
3165*4882a593Smuzhiyun if (error)
3166*4882a593Smuzhiyun return ERR_PTR(error);
3167*4882a593Smuzhiyun dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen +
3168*4882a593Smuzhiyun (reqctx->op ? -authsize : authsize),
3169*4882a593Smuzhiyun CHCR_DST_SG_SIZE, 0);
3170*4882a593Smuzhiyun snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen,
3171*4882a593Smuzhiyun CHCR_SRC_SG_SIZE, 0);
3172*4882a593Smuzhiyun dnents += MIN_GCM_SG; // For IV
3173*4882a593Smuzhiyun dst_size = get_space_for_phys_dsgl(dnents);
3174*4882a593Smuzhiyun kctx_len = roundup(aeadctx->enckey_len, 16) + AEAD_H_SIZE;
3175*4882a593Smuzhiyun transhdr_len = CIPHER_TRANSHDR_SIZE(kctx_len, dst_size);
3176*4882a593Smuzhiyun reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) <=
3177*4882a593Smuzhiyun SGE_MAX_WR_LEN;
3178*4882a593Smuzhiyun temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) :
3179*4882a593Smuzhiyun (sgl_len(snents) * 8);
3180*4882a593Smuzhiyun transhdr_len += temp;
3181*4882a593Smuzhiyun transhdr_len = roundup(transhdr_len, 16);
3182*4882a593Smuzhiyun if (chcr_aead_need_fallback(req, dnents, T6_MAX_AAD_SIZE,
3183*4882a593Smuzhiyun transhdr_len, reqctx->op)) {
3184*4882a593Smuzhiyun
3185*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.fallback);
3186*4882a593Smuzhiyun chcr_aead_common_exit(req);
3187*4882a593Smuzhiyun return ERR_PTR(chcr_aead_fallback(req, reqctx->op));
3188*4882a593Smuzhiyun }
3189*4882a593Smuzhiyun skb = alloc_skb(transhdr_len, flags);
3190*4882a593Smuzhiyun if (!skb) {
3191*4882a593Smuzhiyun error = -ENOMEM;
3192*4882a593Smuzhiyun goto err;
3193*4882a593Smuzhiyun }
3194*4882a593Smuzhiyun
3195*4882a593Smuzhiyun chcr_req = __skb_put_zero(skb, transhdr_len);
3196*4882a593Smuzhiyun
3197*4882a593Smuzhiyun //Offset of tag from end
3198*4882a593Smuzhiyun temp = (reqctx->op == CHCR_ENCRYPT_OP) ? 0 : authsize;
3199*4882a593Smuzhiyun chcr_req->sec_cpl.op_ivinsrtofst = FILL_SEC_CPL_OP_IVINSR(
3200*4882a593Smuzhiyun rx_channel_id, 2, 1);
3201*4882a593Smuzhiyun chcr_req->sec_cpl.pldlen =
3202*4882a593Smuzhiyun htonl(req->assoclen + IV + req->cryptlen);
3203*4882a593Smuzhiyun chcr_req->sec_cpl.aadstart_cipherstop_hi = FILL_SEC_CPL_CIPHERSTOP_HI(
3204*4882a593Smuzhiyun assoclen ? 1 + IV : 0,
3205*4882a593Smuzhiyun assoclen ? IV + assoclen : 0,
3206*4882a593Smuzhiyun req->assoclen + IV + 1, 0);
3207*4882a593Smuzhiyun chcr_req->sec_cpl.cipherstop_lo_authinsert =
3208*4882a593Smuzhiyun FILL_SEC_CPL_AUTHINSERT(0, req->assoclen + IV + 1,
3209*4882a593Smuzhiyun temp, temp);
3210*4882a593Smuzhiyun chcr_req->sec_cpl.seqno_numivs =
3211*4882a593Smuzhiyun FILL_SEC_CPL_SCMD0_SEQNO(reqctx->op, (reqctx->op ==
3212*4882a593Smuzhiyun CHCR_ENCRYPT_OP) ? 1 : 0,
3213*4882a593Smuzhiyun CHCR_SCMD_CIPHER_MODE_AES_GCM,
3214*4882a593Smuzhiyun CHCR_SCMD_AUTH_MODE_GHASH,
3215*4882a593Smuzhiyun aeadctx->hmac_ctrl, IV >> 1);
3216*4882a593Smuzhiyun chcr_req->sec_cpl.ivgen_hdrlen = FILL_SEC_CPL_IVGEN_HDRLEN(0, 0, 1,
3217*4882a593Smuzhiyun 0, 0, dst_size);
3218*4882a593Smuzhiyun chcr_req->key_ctx.ctx_hdr = aeadctx->key_ctx_hdr;
3219*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key, aeadctx->key, aeadctx->enckey_len);
3220*4882a593Smuzhiyun memcpy(chcr_req->key_ctx.key + roundup(aeadctx->enckey_len, 16),
3221*4882a593Smuzhiyun GCM_CTX(aeadctx)->ghash_h, AEAD_H_SIZE);
3222*4882a593Smuzhiyun
3223*4882a593Smuzhiyun phys_cpl = (struct cpl_rx_phys_dsgl *)((u8 *)(chcr_req + 1) + kctx_len);
3224*4882a593Smuzhiyun ivptr = (u8 *)(phys_cpl + 1) + dst_size;
3225*4882a593Smuzhiyun /* prepare a 16 byte iv */
3226*4882a593Smuzhiyun /* S A L T | IV | 0x00000001 */
3227*4882a593Smuzhiyun if (get_aead_subtype(tfm) ==
3228*4882a593Smuzhiyun CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106) {
3229*4882a593Smuzhiyun memcpy(ivptr, aeadctx->salt, 4);
3230*4882a593Smuzhiyun memcpy(ivptr + 4, req->iv, GCM_RFC4106_IV_SIZE);
3231*4882a593Smuzhiyun } else {
3232*4882a593Smuzhiyun memcpy(ivptr, req->iv, GCM_AES_IV_SIZE);
3233*4882a593Smuzhiyun }
3234*4882a593Smuzhiyun put_unaligned_be32(0x01, &ivptr[12]);
3235*4882a593Smuzhiyun ulptx = (struct ulptx_sgl *)(ivptr + 16);
3236*4882a593Smuzhiyun
3237*4882a593Smuzhiyun chcr_add_aead_dst_ent(req, phys_cpl, qid);
3238*4882a593Smuzhiyun chcr_add_aead_src_ent(req, ulptx);
3239*4882a593Smuzhiyun atomic_inc(&adap->chcr_stats.aead_rqst);
3240*4882a593Smuzhiyun temp = sizeof(struct cpl_rx_phys_dsgl) + dst_size + IV +
3241*4882a593Smuzhiyun kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0);
3242*4882a593Smuzhiyun create_wreq(a_ctx(tfm), chcr_req, &req->base, reqctx->imm, size,
3243*4882a593Smuzhiyun transhdr_len, temp, reqctx->verify);
3244*4882a593Smuzhiyun reqctx->skb = skb;
3245*4882a593Smuzhiyun return skb;
3246*4882a593Smuzhiyun
3247*4882a593Smuzhiyun err:
3248*4882a593Smuzhiyun chcr_aead_common_exit(req);
3249*4882a593Smuzhiyun return ERR_PTR(error);
3250*4882a593Smuzhiyun }
3251*4882a593Smuzhiyun
3252*4882a593Smuzhiyun
3253*4882a593Smuzhiyun
chcr_aead_cra_init(struct crypto_aead * tfm)3254*4882a593Smuzhiyun static int chcr_aead_cra_init(struct crypto_aead *tfm)
3255*4882a593Smuzhiyun {
3256*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3257*4882a593Smuzhiyun struct aead_alg *alg = crypto_aead_alg(tfm);
3258*4882a593Smuzhiyun
3259*4882a593Smuzhiyun aeadctx->sw_cipher = crypto_alloc_aead(alg->base.cra_name, 0,
3260*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK |
3261*4882a593Smuzhiyun CRYPTO_ALG_ASYNC);
3262*4882a593Smuzhiyun if (IS_ERR(aeadctx->sw_cipher))
3263*4882a593Smuzhiyun return PTR_ERR(aeadctx->sw_cipher);
3264*4882a593Smuzhiyun crypto_aead_set_reqsize(tfm, max(sizeof(struct chcr_aead_reqctx),
3265*4882a593Smuzhiyun sizeof(struct aead_request) +
3266*4882a593Smuzhiyun crypto_aead_reqsize(aeadctx->sw_cipher)));
3267*4882a593Smuzhiyun return chcr_device_init(a_ctx(tfm));
3268*4882a593Smuzhiyun }
3269*4882a593Smuzhiyun
chcr_aead_cra_exit(struct crypto_aead * tfm)3270*4882a593Smuzhiyun static void chcr_aead_cra_exit(struct crypto_aead *tfm)
3271*4882a593Smuzhiyun {
3272*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3273*4882a593Smuzhiyun
3274*4882a593Smuzhiyun crypto_free_aead(aeadctx->sw_cipher);
3275*4882a593Smuzhiyun }
3276*4882a593Smuzhiyun
chcr_authenc_null_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3277*4882a593Smuzhiyun static int chcr_authenc_null_setauthsize(struct crypto_aead *tfm,
3278*4882a593Smuzhiyun unsigned int authsize)
3279*4882a593Smuzhiyun {
3280*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3281*4882a593Smuzhiyun
3282*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NOP;
3283*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3284*4882a593Smuzhiyun return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3285*4882a593Smuzhiyun }
chcr_authenc_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3286*4882a593Smuzhiyun static int chcr_authenc_setauthsize(struct crypto_aead *tfm,
3287*4882a593Smuzhiyun unsigned int authsize)
3288*4882a593Smuzhiyun {
3289*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3290*4882a593Smuzhiyun u32 maxauth = crypto_aead_maxauthsize(tfm);
3291*4882a593Smuzhiyun
3292*4882a593Smuzhiyun /*SHA1 authsize in ipsec is 12 instead of 10 i.e maxauthsize / 2 is not
3293*4882a593Smuzhiyun * true for sha1. authsize == 12 condition should be before
3294*4882a593Smuzhiyun * authsize == (maxauth >> 1)
3295*4882a593Smuzhiyun */
3296*4882a593Smuzhiyun if (authsize == ICV_4) {
3297*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
3298*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3299*4882a593Smuzhiyun } else if (authsize == ICV_6) {
3300*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
3301*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3302*4882a593Smuzhiyun } else if (authsize == ICV_10) {
3303*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
3304*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3305*4882a593Smuzhiyun } else if (authsize == ICV_12) {
3306*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
3307*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3308*4882a593Smuzhiyun } else if (authsize == ICV_14) {
3309*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
3310*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3311*4882a593Smuzhiyun } else if (authsize == (maxauth >> 1)) {
3312*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
3313*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3314*4882a593Smuzhiyun } else if (authsize == maxauth) {
3315*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3316*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3317*4882a593Smuzhiyun } else {
3318*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3319*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_SW;
3320*4882a593Smuzhiyun }
3321*4882a593Smuzhiyun return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3322*4882a593Smuzhiyun }
3323*4882a593Smuzhiyun
3324*4882a593Smuzhiyun
chcr_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3325*4882a593Smuzhiyun static int chcr_gcm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
3326*4882a593Smuzhiyun {
3327*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3328*4882a593Smuzhiyun
3329*4882a593Smuzhiyun switch (authsize) {
3330*4882a593Smuzhiyun case ICV_4:
3331*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
3332*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3333*4882a593Smuzhiyun break;
3334*4882a593Smuzhiyun case ICV_8:
3335*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
3336*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3337*4882a593Smuzhiyun break;
3338*4882a593Smuzhiyun case ICV_12:
3339*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
3340*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3341*4882a593Smuzhiyun break;
3342*4882a593Smuzhiyun case ICV_14:
3343*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
3344*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3345*4882a593Smuzhiyun break;
3346*4882a593Smuzhiyun case ICV_16:
3347*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3348*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3349*4882a593Smuzhiyun break;
3350*4882a593Smuzhiyun case ICV_13:
3351*4882a593Smuzhiyun case ICV_15:
3352*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3353*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_SW;
3354*4882a593Smuzhiyun break;
3355*4882a593Smuzhiyun default:
3356*4882a593Smuzhiyun return -EINVAL;
3357*4882a593Smuzhiyun }
3358*4882a593Smuzhiyun return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3359*4882a593Smuzhiyun }
3360*4882a593Smuzhiyun
chcr_4106_4309_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3361*4882a593Smuzhiyun static int chcr_4106_4309_setauthsize(struct crypto_aead *tfm,
3362*4882a593Smuzhiyun unsigned int authsize)
3363*4882a593Smuzhiyun {
3364*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3365*4882a593Smuzhiyun
3366*4882a593Smuzhiyun switch (authsize) {
3367*4882a593Smuzhiyun case ICV_8:
3368*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
3369*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3370*4882a593Smuzhiyun break;
3371*4882a593Smuzhiyun case ICV_12:
3372*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
3373*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3374*4882a593Smuzhiyun break;
3375*4882a593Smuzhiyun case ICV_16:
3376*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3377*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3378*4882a593Smuzhiyun break;
3379*4882a593Smuzhiyun default:
3380*4882a593Smuzhiyun return -EINVAL;
3381*4882a593Smuzhiyun }
3382*4882a593Smuzhiyun return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3383*4882a593Smuzhiyun }
3384*4882a593Smuzhiyun
chcr_ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)3385*4882a593Smuzhiyun static int chcr_ccm_setauthsize(struct crypto_aead *tfm,
3386*4882a593Smuzhiyun unsigned int authsize)
3387*4882a593Smuzhiyun {
3388*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(tfm));
3389*4882a593Smuzhiyun
3390*4882a593Smuzhiyun switch (authsize) {
3391*4882a593Smuzhiyun case ICV_4:
3392*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL1;
3393*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3394*4882a593Smuzhiyun break;
3395*4882a593Smuzhiyun case ICV_6:
3396*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL2;
3397*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3398*4882a593Smuzhiyun break;
3399*4882a593Smuzhiyun case ICV_8:
3400*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_DIV2;
3401*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3402*4882a593Smuzhiyun break;
3403*4882a593Smuzhiyun case ICV_10:
3404*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_TRUNC_RFC4366;
3405*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3406*4882a593Smuzhiyun break;
3407*4882a593Smuzhiyun case ICV_12:
3408*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_IPSEC_96BIT;
3409*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3410*4882a593Smuzhiyun break;
3411*4882a593Smuzhiyun case ICV_14:
3412*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_PL3;
3413*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3414*4882a593Smuzhiyun break;
3415*4882a593Smuzhiyun case ICV_16:
3416*4882a593Smuzhiyun aeadctx->hmac_ctrl = CHCR_SCMD_HMAC_CTRL_NO_TRUNC;
3417*4882a593Smuzhiyun aeadctx->mayverify = VERIFY_HW;
3418*4882a593Smuzhiyun break;
3419*4882a593Smuzhiyun default:
3420*4882a593Smuzhiyun return -EINVAL;
3421*4882a593Smuzhiyun }
3422*4882a593Smuzhiyun return crypto_aead_setauthsize(aeadctx->sw_cipher, authsize);
3423*4882a593Smuzhiyun }
3424*4882a593Smuzhiyun
chcr_ccm_common_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)3425*4882a593Smuzhiyun static int chcr_ccm_common_setkey(struct crypto_aead *aead,
3426*4882a593Smuzhiyun const u8 *key,
3427*4882a593Smuzhiyun unsigned int keylen)
3428*4882a593Smuzhiyun {
3429*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
3430*4882a593Smuzhiyun unsigned char ck_size, mk_size;
3431*4882a593Smuzhiyun int key_ctx_size = 0;
3432*4882a593Smuzhiyun
3433*4882a593Smuzhiyun key_ctx_size = sizeof(struct _key_ctx) + roundup(keylen, 16) * 2;
3434*4882a593Smuzhiyun if (keylen == AES_KEYSIZE_128) {
3435*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3436*4882a593Smuzhiyun mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_128;
3437*4882a593Smuzhiyun } else if (keylen == AES_KEYSIZE_192) {
3438*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
3439*4882a593Smuzhiyun mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_192;
3440*4882a593Smuzhiyun } else if (keylen == AES_KEYSIZE_256) {
3441*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
3442*4882a593Smuzhiyun mk_size = CHCR_KEYCTX_MAC_KEY_SIZE_256;
3443*4882a593Smuzhiyun } else {
3444*4882a593Smuzhiyun aeadctx->enckey_len = 0;
3445*4882a593Smuzhiyun return -EINVAL;
3446*4882a593Smuzhiyun }
3447*4882a593Smuzhiyun aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, mk_size, 0, 0,
3448*4882a593Smuzhiyun key_ctx_size >> 4);
3449*4882a593Smuzhiyun memcpy(aeadctx->key, key, keylen);
3450*4882a593Smuzhiyun aeadctx->enckey_len = keylen;
3451*4882a593Smuzhiyun
3452*4882a593Smuzhiyun return 0;
3453*4882a593Smuzhiyun }
3454*4882a593Smuzhiyun
chcr_aead_ccm_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)3455*4882a593Smuzhiyun static int chcr_aead_ccm_setkey(struct crypto_aead *aead,
3456*4882a593Smuzhiyun const u8 *key,
3457*4882a593Smuzhiyun unsigned int keylen)
3458*4882a593Smuzhiyun {
3459*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
3460*4882a593Smuzhiyun int error;
3461*4882a593Smuzhiyun
3462*4882a593Smuzhiyun crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3463*4882a593Smuzhiyun crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) &
3464*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
3465*4882a593Smuzhiyun error = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3466*4882a593Smuzhiyun if (error)
3467*4882a593Smuzhiyun return error;
3468*4882a593Smuzhiyun return chcr_ccm_common_setkey(aead, key, keylen);
3469*4882a593Smuzhiyun }
3470*4882a593Smuzhiyun
chcr_aead_rfc4309_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)3471*4882a593Smuzhiyun static int chcr_aead_rfc4309_setkey(struct crypto_aead *aead, const u8 *key,
3472*4882a593Smuzhiyun unsigned int keylen)
3473*4882a593Smuzhiyun {
3474*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
3475*4882a593Smuzhiyun int error;
3476*4882a593Smuzhiyun
3477*4882a593Smuzhiyun if (keylen < 3) {
3478*4882a593Smuzhiyun aeadctx->enckey_len = 0;
3479*4882a593Smuzhiyun return -EINVAL;
3480*4882a593Smuzhiyun }
3481*4882a593Smuzhiyun crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3482*4882a593Smuzhiyun crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead) &
3483*4882a593Smuzhiyun CRYPTO_TFM_REQ_MASK);
3484*4882a593Smuzhiyun error = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3485*4882a593Smuzhiyun if (error)
3486*4882a593Smuzhiyun return error;
3487*4882a593Smuzhiyun keylen -= 3;
3488*4882a593Smuzhiyun memcpy(aeadctx->salt, key + keylen, 3);
3489*4882a593Smuzhiyun return chcr_ccm_common_setkey(aead, key, keylen);
3490*4882a593Smuzhiyun }
3491*4882a593Smuzhiyun
chcr_gcm_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)3492*4882a593Smuzhiyun static int chcr_gcm_setkey(struct crypto_aead *aead, const u8 *key,
3493*4882a593Smuzhiyun unsigned int keylen)
3494*4882a593Smuzhiyun {
3495*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(aead));
3496*4882a593Smuzhiyun struct chcr_gcm_ctx *gctx = GCM_CTX(aeadctx);
3497*4882a593Smuzhiyun unsigned int ck_size;
3498*4882a593Smuzhiyun int ret = 0, key_ctx_size = 0;
3499*4882a593Smuzhiyun struct crypto_aes_ctx aes;
3500*4882a593Smuzhiyun
3501*4882a593Smuzhiyun aeadctx->enckey_len = 0;
3502*4882a593Smuzhiyun crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3503*4882a593Smuzhiyun crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(aead)
3504*4882a593Smuzhiyun & CRYPTO_TFM_REQ_MASK);
3505*4882a593Smuzhiyun ret = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3506*4882a593Smuzhiyun if (ret)
3507*4882a593Smuzhiyun goto out;
3508*4882a593Smuzhiyun
3509*4882a593Smuzhiyun if (get_aead_subtype(aead) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106 &&
3510*4882a593Smuzhiyun keylen > 3) {
3511*4882a593Smuzhiyun keylen -= 4; /* nonce/salt is present in the last 4 bytes */
3512*4882a593Smuzhiyun memcpy(aeadctx->salt, key + keylen, 4);
3513*4882a593Smuzhiyun }
3514*4882a593Smuzhiyun if (keylen == AES_KEYSIZE_128) {
3515*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3516*4882a593Smuzhiyun } else if (keylen == AES_KEYSIZE_192) {
3517*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
3518*4882a593Smuzhiyun } else if (keylen == AES_KEYSIZE_256) {
3519*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
3520*4882a593Smuzhiyun } else {
3521*4882a593Smuzhiyun pr_err("GCM: Invalid key length %d\n", keylen);
3522*4882a593Smuzhiyun ret = -EINVAL;
3523*4882a593Smuzhiyun goto out;
3524*4882a593Smuzhiyun }
3525*4882a593Smuzhiyun
3526*4882a593Smuzhiyun memcpy(aeadctx->key, key, keylen);
3527*4882a593Smuzhiyun aeadctx->enckey_len = keylen;
3528*4882a593Smuzhiyun key_ctx_size = sizeof(struct _key_ctx) + roundup(keylen, 16) +
3529*4882a593Smuzhiyun AEAD_H_SIZE;
3530*4882a593Smuzhiyun aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size,
3531*4882a593Smuzhiyun CHCR_KEYCTX_MAC_KEY_SIZE_128,
3532*4882a593Smuzhiyun 0, 0,
3533*4882a593Smuzhiyun key_ctx_size >> 4);
3534*4882a593Smuzhiyun /* Calculate the H = CIPH(K, 0 repeated 16 times).
3535*4882a593Smuzhiyun * It will go in key context
3536*4882a593Smuzhiyun */
3537*4882a593Smuzhiyun ret = aes_expandkey(&aes, key, keylen);
3538*4882a593Smuzhiyun if (ret) {
3539*4882a593Smuzhiyun aeadctx->enckey_len = 0;
3540*4882a593Smuzhiyun goto out;
3541*4882a593Smuzhiyun }
3542*4882a593Smuzhiyun memset(gctx->ghash_h, 0, AEAD_H_SIZE);
3543*4882a593Smuzhiyun aes_encrypt(&aes, gctx->ghash_h, gctx->ghash_h);
3544*4882a593Smuzhiyun memzero_explicit(&aes, sizeof(aes));
3545*4882a593Smuzhiyun
3546*4882a593Smuzhiyun out:
3547*4882a593Smuzhiyun return ret;
3548*4882a593Smuzhiyun }
3549*4882a593Smuzhiyun
chcr_authenc_setkey(struct crypto_aead * authenc,const u8 * key,unsigned int keylen)3550*4882a593Smuzhiyun static int chcr_authenc_setkey(struct crypto_aead *authenc, const u8 *key,
3551*4882a593Smuzhiyun unsigned int keylen)
3552*4882a593Smuzhiyun {
3553*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(authenc));
3554*4882a593Smuzhiyun struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
3555*4882a593Smuzhiyun /* it contains auth and cipher key both*/
3556*4882a593Smuzhiyun struct crypto_authenc_keys keys;
3557*4882a593Smuzhiyun unsigned int bs, subtype;
3558*4882a593Smuzhiyun unsigned int max_authsize = crypto_aead_alg(authenc)->maxauthsize;
3559*4882a593Smuzhiyun int err = 0, i, key_ctx_len = 0;
3560*4882a593Smuzhiyun unsigned char ck_size = 0;
3561*4882a593Smuzhiyun unsigned char pad[CHCR_HASH_MAX_BLOCK_SIZE_128] = { 0 };
3562*4882a593Smuzhiyun struct crypto_shash *base_hash = ERR_PTR(-EINVAL);
3563*4882a593Smuzhiyun struct algo_param param;
3564*4882a593Smuzhiyun int align;
3565*4882a593Smuzhiyun u8 *o_ptr = NULL;
3566*4882a593Smuzhiyun
3567*4882a593Smuzhiyun crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3568*4882a593Smuzhiyun crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
3569*4882a593Smuzhiyun & CRYPTO_TFM_REQ_MASK);
3570*4882a593Smuzhiyun err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3571*4882a593Smuzhiyun if (err)
3572*4882a593Smuzhiyun goto out;
3573*4882a593Smuzhiyun
3574*4882a593Smuzhiyun if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
3575*4882a593Smuzhiyun goto out;
3576*4882a593Smuzhiyun
3577*4882a593Smuzhiyun if (get_alg_config(¶m, max_authsize)) {
3578*4882a593Smuzhiyun pr_err("Unsupported digest size\n");
3579*4882a593Smuzhiyun goto out;
3580*4882a593Smuzhiyun }
3581*4882a593Smuzhiyun subtype = get_aead_subtype(authenc);
3582*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
3583*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
3584*4882a593Smuzhiyun if (keys.enckeylen < CTR_RFC3686_NONCE_SIZE)
3585*4882a593Smuzhiyun goto out;
3586*4882a593Smuzhiyun memcpy(aeadctx->nonce, keys.enckey + (keys.enckeylen
3587*4882a593Smuzhiyun - CTR_RFC3686_NONCE_SIZE), CTR_RFC3686_NONCE_SIZE);
3588*4882a593Smuzhiyun keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
3589*4882a593Smuzhiyun }
3590*4882a593Smuzhiyun if (keys.enckeylen == AES_KEYSIZE_128) {
3591*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3592*4882a593Smuzhiyun } else if (keys.enckeylen == AES_KEYSIZE_192) {
3593*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
3594*4882a593Smuzhiyun } else if (keys.enckeylen == AES_KEYSIZE_256) {
3595*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
3596*4882a593Smuzhiyun } else {
3597*4882a593Smuzhiyun pr_err("Unsupported cipher key\n");
3598*4882a593Smuzhiyun goto out;
3599*4882a593Smuzhiyun }
3600*4882a593Smuzhiyun
3601*4882a593Smuzhiyun /* Copy only encryption key. We use authkey to generate h(ipad) and
3602*4882a593Smuzhiyun * h(opad) so authkey is not needed again. authkeylen size have the
3603*4882a593Smuzhiyun * size of the hash digest size.
3604*4882a593Smuzhiyun */
3605*4882a593Smuzhiyun memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
3606*4882a593Smuzhiyun aeadctx->enckey_len = keys.enckeylen;
3607*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CBC_SHA ||
3608*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_CBC_NULL) {
3609*4882a593Smuzhiyun
3610*4882a593Smuzhiyun get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
3611*4882a593Smuzhiyun aeadctx->enckey_len << 3);
3612*4882a593Smuzhiyun }
3613*4882a593Smuzhiyun base_hash = chcr_alloc_shash(max_authsize);
3614*4882a593Smuzhiyun if (IS_ERR(base_hash)) {
3615*4882a593Smuzhiyun pr_err("Base driver cannot be loaded\n");
3616*4882a593Smuzhiyun goto out;
3617*4882a593Smuzhiyun }
3618*4882a593Smuzhiyun {
3619*4882a593Smuzhiyun SHASH_DESC_ON_STACK(shash, base_hash);
3620*4882a593Smuzhiyun
3621*4882a593Smuzhiyun shash->tfm = base_hash;
3622*4882a593Smuzhiyun bs = crypto_shash_blocksize(base_hash);
3623*4882a593Smuzhiyun align = KEYCTX_ALIGN_PAD(max_authsize);
3624*4882a593Smuzhiyun o_ptr = actx->h_iopad + param.result_size + align;
3625*4882a593Smuzhiyun
3626*4882a593Smuzhiyun if (keys.authkeylen > bs) {
3627*4882a593Smuzhiyun err = crypto_shash_digest(shash, keys.authkey,
3628*4882a593Smuzhiyun keys.authkeylen,
3629*4882a593Smuzhiyun o_ptr);
3630*4882a593Smuzhiyun if (err) {
3631*4882a593Smuzhiyun pr_err("Base driver cannot be loaded\n");
3632*4882a593Smuzhiyun goto out;
3633*4882a593Smuzhiyun }
3634*4882a593Smuzhiyun keys.authkeylen = max_authsize;
3635*4882a593Smuzhiyun } else
3636*4882a593Smuzhiyun memcpy(o_ptr, keys.authkey, keys.authkeylen);
3637*4882a593Smuzhiyun
3638*4882a593Smuzhiyun /* Compute the ipad-digest*/
3639*4882a593Smuzhiyun memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
3640*4882a593Smuzhiyun memcpy(pad, o_ptr, keys.authkeylen);
3641*4882a593Smuzhiyun for (i = 0; i < bs >> 2; i++)
3642*4882a593Smuzhiyun *((unsigned int *)pad + i) ^= IPAD_DATA;
3643*4882a593Smuzhiyun
3644*4882a593Smuzhiyun if (chcr_compute_partial_hash(shash, pad, actx->h_iopad,
3645*4882a593Smuzhiyun max_authsize))
3646*4882a593Smuzhiyun goto out;
3647*4882a593Smuzhiyun /* Compute the opad-digest */
3648*4882a593Smuzhiyun memset(pad + keys.authkeylen, 0, bs - keys.authkeylen);
3649*4882a593Smuzhiyun memcpy(pad, o_ptr, keys.authkeylen);
3650*4882a593Smuzhiyun for (i = 0; i < bs >> 2; i++)
3651*4882a593Smuzhiyun *((unsigned int *)pad + i) ^= OPAD_DATA;
3652*4882a593Smuzhiyun
3653*4882a593Smuzhiyun if (chcr_compute_partial_hash(shash, pad, o_ptr, max_authsize))
3654*4882a593Smuzhiyun goto out;
3655*4882a593Smuzhiyun
3656*4882a593Smuzhiyun /* convert the ipad and opad digest to network order */
3657*4882a593Smuzhiyun chcr_change_order(actx->h_iopad, param.result_size);
3658*4882a593Smuzhiyun chcr_change_order(o_ptr, param.result_size);
3659*4882a593Smuzhiyun key_ctx_len = sizeof(struct _key_ctx) +
3660*4882a593Smuzhiyun roundup(keys.enckeylen, 16) +
3661*4882a593Smuzhiyun (param.result_size + align) * 2;
3662*4882a593Smuzhiyun aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, param.mk_size,
3663*4882a593Smuzhiyun 0, 1, key_ctx_len >> 4);
3664*4882a593Smuzhiyun actx->auth_mode = param.auth_mode;
3665*4882a593Smuzhiyun chcr_free_shash(base_hash);
3666*4882a593Smuzhiyun
3667*4882a593Smuzhiyun memzero_explicit(&keys, sizeof(keys));
3668*4882a593Smuzhiyun return 0;
3669*4882a593Smuzhiyun }
3670*4882a593Smuzhiyun out:
3671*4882a593Smuzhiyun aeadctx->enckey_len = 0;
3672*4882a593Smuzhiyun memzero_explicit(&keys, sizeof(keys));
3673*4882a593Smuzhiyun if (!IS_ERR(base_hash))
3674*4882a593Smuzhiyun chcr_free_shash(base_hash);
3675*4882a593Smuzhiyun return -EINVAL;
3676*4882a593Smuzhiyun }
3677*4882a593Smuzhiyun
chcr_aead_digest_null_setkey(struct crypto_aead * authenc,const u8 * key,unsigned int keylen)3678*4882a593Smuzhiyun static int chcr_aead_digest_null_setkey(struct crypto_aead *authenc,
3679*4882a593Smuzhiyun const u8 *key, unsigned int keylen)
3680*4882a593Smuzhiyun {
3681*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(a_ctx(authenc));
3682*4882a593Smuzhiyun struct chcr_authenc_ctx *actx = AUTHENC_CTX(aeadctx);
3683*4882a593Smuzhiyun struct crypto_authenc_keys keys;
3684*4882a593Smuzhiyun int err;
3685*4882a593Smuzhiyun /* it contains auth and cipher key both*/
3686*4882a593Smuzhiyun unsigned int subtype;
3687*4882a593Smuzhiyun int key_ctx_len = 0;
3688*4882a593Smuzhiyun unsigned char ck_size = 0;
3689*4882a593Smuzhiyun
3690*4882a593Smuzhiyun crypto_aead_clear_flags(aeadctx->sw_cipher, CRYPTO_TFM_REQ_MASK);
3691*4882a593Smuzhiyun crypto_aead_set_flags(aeadctx->sw_cipher, crypto_aead_get_flags(authenc)
3692*4882a593Smuzhiyun & CRYPTO_TFM_REQ_MASK);
3693*4882a593Smuzhiyun err = crypto_aead_setkey(aeadctx->sw_cipher, key, keylen);
3694*4882a593Smuzhiyun if (err)
3695*4882a593Smuzhiyun goto out;
3696*4882a593Smuzhiyun
3697*4882a593Smuzhiyun if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
3698*4882a593Smuzhiyun goto out;
3699*4882a593Smuzhiyun
3700*4882a593Smuzhiyun subtype = get_aead_subtype(authenc);
3701*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CTR_SHA ||
3702*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_CTR_NULL) {
3703*4882a593Smuzhiyun if (keys.enckeylen < CTR_RFC3686_NONCE_SIZE)
3704*4882a593Smuzhiyun goto out;
3705*4882a593Smuzhiyun memcpy(aeadctx->nonce, keys.enckey + (keys.enckeylen
3706*4882a593Smuzhiyun - CTR_RFC3686_NONCE_SIZE), CTR_RFC3686_NONCE_SIZE);
3707*4882a593Smuzhiyun keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
3708*4882a593Smuzhiyun }
3709*4882a593Smuzhiyun if (keys.enckeylen == AES_KEYSIZE_128) {
3710*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_128;
3711*4882a593Smuzhiyun } else if (keys.enckeylen == AES_KEYSIZE_192) {
3712*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_192;
3713*4882a593Smuzhiyun } else if (keys.enckeylen == AES_KEYSIZE_256) {
3714*4882a593Smuzhiyun ck_size = CHCR_KEYCTX_CIPHER_KEY_SIZE_256;
3715*4882a593Smuzhiyun } else {
3716*4882a593Smuzhiyun pr_err("Unsupported cipher key %d\n", keys.enckeylen);
3717*4882a593Smuzhiyun goto out;
3718*4882a593Smuzhiyun }
3719*4882a593Smuzhiyun memcpy(aeadctx->key, keys.enckey, keys.enckeylen);
3720*4882a593Smuzhiyun aeadctx->enckey_len = keys.enckeylen;
3721*4882a593Smuzhiyun if (subtype == CRYPTO_ALG_SUB_TYPE_CBC_SHA ||
3722*4882a593Smuzhiyun subtype == CRYPTO_ALG_SUB_TYPE_CBC_NULL) {
3723*4882a593Smuzhiyun get_aes_decrypt_key(actx->dec_rrkey, aeadctx->key,
3724*4882a593Smuzhiyun aeadctx->enckey_len << 3);
3725*4882a593Smuzhiyun }
3726*4882a593Smuzhiyun key_ctx_len = sizeof(struct _key_ctx) + roundup(keys.enckeylen, 16);
3727*4882a593Smuzhiyun
3728*4882a593Smuzhiyun aeadctx->key_ctx_hdr = FILL_KEY_CTX_HDR(ck_size, CHCR_KEYCTX_NO_KEY, 0,
3729*4882a593Smuzhiyun 0, key_ctx_len >> 4);
3730*4882a593Smuzhiyun actx->auth_mode = CHCR_SCMD_AUTH_MODE_NOP;
3731*4882a593Smuzhiyun memzero_explicit(&keys, sizeof(keys));
3732*4882a593Smuzhiyun return 0;
3733*4882a593Smuzhiyun out:
3734*4882a593Smuzhiyun aeadctx->enckey_len = 0;
3735*4882a593Smuzhiyun memzero_explicit(&keys, sizeof(keys));
3736*4882a593Smuzhiyun return -EINVAL;
3737*4882a593Smuzhiyun }
3738*4882a593Smuzhiyun
chcr_aead_op(struct aead_request * req,int size,create_wr_t create_wr_fn)3739*4882a593Smuzhiyun static int chcr_aead_op(struct aead_request *req,
3740*4882a593Smuzhiyun int size,
3741*4882a593Smuzhiyun create_wr_t create_wr_fn)
3742*4882a593Smuzhiyun {
3743*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3744*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
3745*4882a593Smuzhiyun struct chcr_context *ctx = a_ctx(tfm);
3746*4882a593Smuzhiyun struct uld_ctx *u_ctx = ULD_CTX(ctx);
3747*4882a593Smuzhiyun struct sk_buff *skb;
3748*4882a593Smuzhiyun struct chcr_dev *cdev;
3749*4882a593Smuzhiyun
3750*4882a593Smuzhiyun cdev = a_ctx(tfm)->dev;
3751*4882a593Smuzhiyun if (!cdev) {
3752*4882a593Smuzhiyun pr_err("%s : No crypto device.\n", __func__);
3753*4882a593Smuzhiyun return -ENXIO;
3754*4882a593Smuzhiyun }
3755*4882a593Smuzhiyun
3756*4882a593Smuzhiyun if (chcr_inc_wrcount(cdev)) {
3757*4882a593Smuzhiyun /* Detach state for CHCR means lldi or padap is freed.
3758*4882a593Smuzhiyun * We cannot increment fallback here.
3759*4882a593Smuzhiyun */
3760*4882a593Smuzhiyun return chcr_aead_fallback(req, reqctx->op);
3761*4882a593Smuzhiyun }
3762*4882a593Smuzhiyun
3763*4882a593Smuzhiyun if (cxgb4_is_crypto_q_full(u_ctx->lldi.ports[0],
3764*4882a593Smuzhiyun reqctx->txqidx) &&
3765*4882a593Smuzhiyun (!(req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG))) {
3766*4882a593Smuzhiyun chcr_dec_wrcount(cdev);
3767*4882a593Smuzhiyun return -ENOSPC;
3768*4882a593Smuzhiyun }
3769*4882a593Smuzhiyun
3770*4882a593Smuzhiyun if (get_aead_subtype(tfm) == CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106 &&
3771*4882a593Smuzhiyun crypto_ipsec_check_assoclen(req->assoclen) != 0) {
3772*4882a593Smuzhiyun pr_err("RFC4106: Invalid value of assoclen %d\n",
3773*4882a593Smuzhiyun req->assoclen);
3774*4882a593Smuzhiyun return -EINVAL;
3775*4882a593Smuzhiyun }
3776*4882a593Smuzhiyun
3777*4882a593Smuzhiyun /* Form a WR from req */
3778*4882a593Smuzhiyun skb = create_wr_fn(req, u_ctx->lldi.rxq_ids[reqctx->rxqidx], size);
3779*4882a593Smuzhiyun
3780*4882a593Smuzhiyun if (IS_ERR_OR_NULL(skb)) {
3781*4882a593Smuzhiyun chcr_dec_wrcount(cdev);
3782*4882a593Smuzhiyun return PTR_ERR_OR_ZERO(skb);
3783*4882a593Smuzhiyun }
3784*4882a593Smuzhiyun
3785*4882a593Smuzhiyun skb->dev = u_ctx->lldi.ports[0];
3786*4882a593Smuzhiyun set_wr_txq(skb, CPL_PRIORITY_DATA, reqctx->txqidx);
3787*4882a593Smuzhiyun chcr_send_wr(skb);
3788*4882a593Smuzhiyun return -EINPROGRESS;
3789*4882a593Smuzhiyun }
3790*4882a593Smuzhiyun
chcr_aead_encrypt(struct aead_request * req)3791*4882a593Smuzhiyun static int chcr_aead_encrypt(struct aead_request *req)
3792*4882a593Smuzhiyun {
3793*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3794*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
3795*4882a593Smuzhiyun struct chcr_context *ctx = a_ctx(tfm);
3796*4882a593Smuzhiyun unsigned int cpu;
3797*4882a593Smuzhiyun
3798*4882a593Smuzhiyun cpu = get_cpu();
3799*4882a593Smuzhiyun reqctx->txqidx = cpu % ctx->ntxq;
3800*4882a593Smuzhiyun reqctx->rxqidx = cpu % ctx->nrxq;
3801*4882a593Smuzhiyun put_cpu();
3802*4882a593Smuzhiyun
3803*4882a593Smuzhiyun reqctx->verify = VERIFY_HW;
3804*4882a593Smuzhiyun reqctx->op = CHCR_ENCRYPT_OP;
3805*4882a593Smuzhiyun
3806*4882a593Smuzhiyun switch (get_aead_subtype(tfm)) {
3807*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_CTR_SHA:
3808*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_CBC_SHA:
3809*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_CBC_NULL:
3810*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_CTR_NULL:
3811*4882a593Smuzhiyun return chcr_aead_op(req, 0, create_authenc_wr);
3812*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
3813*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
3814*4882a593Smuzhiyun return chcr_aead_op(req, 0, create_aead_ccm_wr);
3815*4882a593Smuzhiyun default:
3816*4882a593Smuzhiyun return chcr_aead_op(req, 0, create_gcm_wr);
3817*4882a593Smuzhiyun }
3818*4882a593Smuzhiyun }
3819*4882a593Smuzhiyun
chcr_aead_decrypt(struct aead_request * req)3820*4882a593Smuzhiyun static int chcr_aead_decrypt(struct aead_request *req)
3821*4882a593Smuzhiyun {
3822*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3823*4882a593Smuzhiyun struct chcr_context *ctx = a_ctx(tfm);
3824*4882a593Smuzhiyun struct chcr_aead_ctx *aeadctx = AEAD_CTX(ctx);
3825*4882a593Smuzhiyun struct chcr_aead_reqctx *reqctx = aead_request_ctx(req);
3826*4882a593Smuzhiyun int size;
3827*4882a593Smuzhiyun unsigned int cpu;
3828*4882a593Smuzhiyun
3829*4882a593Smuzhiyun cpu = get_cpu();
3830*4882a593Smuzhiyun reqctx->txqidx = cpu % ctx->ntxq;
3831*4882a593Smuzhiyun reqctx->rxqidx = cpu % ctx->nrxq;
3832*4882a593Smuzhiyun put_cpu();
3833*4882a593Smuzhiyun
3834*4882a593Smuzhiyun if (aeadctx->mayverify == VERIFY_SW) {
3835*4882a593Smuzhiyun size = crypto_aead_maxauthsize(tfm);
3836*4882a593Smuzhiyun reqctx->verify = VERIFY_SW;
3837*4882a593Smuzhiyun } else {
3838*4882a593Smuzhiyun size = 0;
3839*4882a593Smuzhiyun reqctx->verify = VERIFY_HW;
3840*4882a593Smuzhiyun }
3841*4882a593Smuzhiyun reqctx->op = CHCR_DECRYPT_OP;
3842*4882a593Smuzhiyun switch (get_aead_subtype(tfm)) {
3843*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_CBC_SHA:
3844*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_CTR_SHA:
3845*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_CBC_NULL:
3846*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_CTR_NULL:
3847*4882a593Smuzhiyun return chcr_aead_op(req, size, create_authenc_wr);
3848*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_AEAD_CCM:
3849*4882a593Smuzhiyun case CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309:
3850*4882a593Smuzhiyun return chcr_aead_op(req, size, create_aead_ccm_wr);
3851*4882a593Smuzhiyun default:
3852*4882a593Smuzhiyun return chcr_aead_op(req, size, create_gcm_wr);
3853*4882a593Smuzhiyun }
3854*4882a593Smuzhiyun }
3855*4882a593Smuzhiyun
3856*4882a593Smuzhiyun static struct chcr_alg_template driver_algs[] = {
3857*4882a593Smuzhiyun /* AES-CBC */
3858*4882a593Smuzhiyun {
3859*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_SUB_TYPE_CBC,
3860*4882a593Smuzhiyun .is_registered = 0,
3861*4882a593Smuzhiyun .alg.skcipher = {
3862*4882a593Smuzhiyun .base.cra_name = "cbc(aes)",
3863*4882a593Smuzhiyun .base.cra_driver_name = "cbc-aes-chcr",
3864*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
3865*4882a593Smuzhiyun
3866*4882a593Smuzhiyun .init = chcr_init_tfm,
3867*4882a593Smuzhiyun .exit = chcr_exit_tfm,
3868*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
3869*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
3870*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
3871*4882a593Smuzhiyun .setkey = chcr_aes_cbc_setkey,
3872*4882a593Smuzhiyun .encrypt = chcr_aes_encrypt,
3873*4882a593Smuzhiyun .decrypt = chcr_aes_decrypt,
3874*4882a593Smuzhiyun }
3875*4882a593Smuzhiyun },
3876*4882a593Smuzhiyun {
3877*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_SUB_TYPE_XTS,
3878*4882a593Smuzhiyun .is_registered = 0,
3879*4882a593Smuzhiyun .alg.skcipher = {
3880*4882a593Smuzhiyun .base.cra_name = "xts(aes)",
3881*4882a593Smuzhiyun .base.cra_driver_name = "xts-aes-chcr",
3882*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
3883*4882a593Smuzhiyun
3884*4882a593Smuzhiyun .init = chcr_init_tfm,
3885*4882a593Smuzhiyun .exit = chcr_exit_tfm,
3886*4882a593Smuzhiyun .min_keysize = 2 * AES_MIN_KEY_SIZE,
3887*4882a593Smuzhiyun .max_keysize = 2 * AES_MAX_KEY_SIZE,
3888*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
3889*4882a593Smuzhiyun .setkey = chcr_aes_xts_setkey,
3890*4882a593Smuzhiyun .encrypt = chcr_aes_encrypt,
3891*4882a593Smuzhiyun .decrypt = chcr_aes_decrypt,
3892*4882a593Smuzhiyun }
3893*4882a593Smuzhiyun },
3894*4882a593Smuzhiyun {
3895*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_SUB_TYPE_CTR,
3896*4882a593Smuzhiyun .is_registered = 0,
3897*4882a593Smuzhiyun .alg.skcipher = {
3898*4882a593Smuzhiyun .base.cra_name = "ctr(aes)",
3899*4882a593Smuzhiyun .base.cra_driver_name = "ctr-aes-chcr",
3900*4882a593Smuzhiyun .base.cra_blocksize = 1,
3901*4882a593Smuzhiyun
3902*4882a593Smuzhiyun .init = chcr_init_tfm,
3903*4882a593Smuzhiyun .exit = chcr_exit_tfm,
3904*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
3905*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
3906*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
3907*4882a593Smuzhiyun .setkey = chcr_aes_ctr_setkey,
3908*4882a593Smuzhiyun .encrypt = chcr_aes_encrypt,
3909*4882a593Smuzhiyun .decrypt = chcr_aes_decrypt,
3910*4882a593Smuzhiyun }
3911*4882a593Smuzhiyun },
3912*4882a593Smuzhiyun {
3913*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_SKCIPHER |
3914*4882a593Smuzhiyun CRYPTO_ALG_SUB_TYPE_CTR_RFC3686,
3915*4882a593Smuzhiyun .is_registered = 0,
3916*4882a593Smuzhiyun .alg.skcipher = {
3917*4882a593Smuzhiyun .base.cra_name = "rfc3686(ctr(aes))",
3918*4882a593Smuzhiyun .base.cra_driver_name = "rfc3686-ctr-aes-chcr",
3919*4882a593Smuzhiyun .base.cra_blocksize = 1,
3920*4882a593Smuzhiyun
3921*4882a593Smuzhiyun .init = chcr_rfc3686_init,
3922*4882a593Smuzhiyun .exit = chcr_exit_tfm,
3923*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3924*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3925*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
3926*4882a593Smuzhiyun .setkey = chcr_aes_rfc3686_setkey,
3927*4882a593Smuzhiyun .encrypt = chcr_aes_encrypt,
3928*4882a593Smuzhiyun .decrypt = chcr_aes_decrypt,
3929*4882a593Smuzhiyun }
3930*4882a593Smuzhiyun },
3931*4882a593Smuzhiyun /* SHA */
3932*4882a593Smuzhiyun {
3933*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AHASH,
3934*4882a593Smuzhiyun .is_registered = 0,
3935*4882a593Smuzhiyun .alg.hash = {
3936*4882a593Smuzhiyun .halg.digestsize = SHA1_DIGEST_SIZE,
3937*4882a593Smuzhiyun .halg.base = {
3938*4882a593Smuzhiyun .cra_name = "sha1",
3939*4882a593Smuzhiyun .cra_driver_name = "sha1-chcr",
3940*4882a593Smuzhiyun .cra_blocksize = SHA1_BLOCK_SIZE,
3941*4882a593Smuzhiyun }
3942*4882a593Smuzhiyun }
3943*4882a593Smuzhiyun },
3944*4882a593Smuzhiyun {
3945*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AHASH,
3946*4882a593Smuzhiyun .is_registered = 0,
3947*4882a593Smuzhiyun .alg.hash = {
3948*4882a593Smuzhiyun .halg.digestsize = SHA256_DIGEST_SIZE,
3949*4882a593Smuzhiyun .halg.base = {
3950*4882a593Smuzhiyun .cra_name = "sha256",
3951*4882a593Smuzhiyun .cra_driver_name = "sha256-chcr",
3952*4882a593Smuzhiyun .cra_blocksize = SHA256_BLOCK_SIZE,
3953*4882a593Smuzhiyun }
3954*4882a593Smuzhiyun }
3955*4882a593Smuzhiyun },
3956*4882a593Smuzhiyun {
3957*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AHASH,
3958*4882a593Smuzhiyun .is_registered = 0,
3959*4882a593Smuzhiyun .alg.hash = {
3960*4882a593Smuzhiyun .halg.digestsize = SHA224_DIGEST_SIZE,
3961*4882a593Smuzhiyun .halg.base = {
3962*4882a593Smuzhiyun .cra_name = "sha224",
3963*4882a593Smuzhiyun .cra_driver_name = "sha224-chcr",
3964*4882a593Smuzhiyun .cra_blocksize = SHA224_BLOCK_SIZE,
3965*4882a593Smuzhiyun }
3966*4882a593Smuzhiyun }
3967*4882a593Smuzhiyun },
3968*4882a593Smuzhiyun {
3969*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AHASH,
3970*4882a593Smuzhiyun .is_registered = 0,
3971*4882a593Smuzhiyun .alg.hash = {
3972*4882a593Smuzhiyun .halg.digestsize = SHA384_DIGEST_SIZE,
3973*4882a593Smuzhiyun .halg.base = {
3974*4882a593Smuzhiyun .cra_name = "sha384",
3975*4882a593Smuzhiyun .cra_driver_name = "sha384-chcr",
3976*4882a593Smuzhiyun .cra_blocksize = SHA384_BLOCK_SIZE,
3977*4882a593Smuzhiyun }
3978*4882a593Smuzhiyun }
3979*4882a593Smuzhiyun },
3980*4882a593Smuzhiyun {
3981*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AHASH,
3982*4882a593Smuzhiyun .is_registered = 0,
3983*4882a593Smuzhiyun .alg.hash = {
3984*4882a593Smuzhiyun .halg.digestsize = SHA512_DIGEST_SIZE,
3985*4882a593Smuzhiyun .halg.base = {
3986*4882a593Smuzhiyun .cra_name = "sha512",
3987*4882a593Smuzhiyun .cra_driver_name = "sha512-chcr",
3988*4882a593Smuzhiyun .cra_blocksize = SHA512_BLOCK_SIZE,
3989*4882a593Smuzhiyun }
3990*4882a593Smuzhiyun }
3991*4882a593Smuzhiyun },
3992*4882a593Smuzhiyun /* HMAC */
3993*4882a593Smuzhiyun {
3994*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_HMAC,
3995*4882a593Smuzhiyun .is_registered = 0,
3996*4882a593Smuzhiyun .alg.hash = {
3997*4882a593Smuzhiyun .halg.digestsize = SHA1_DIGEST_SIZE,
3998*4882a593Smuzhiyun .halg.base = {
3999*4882a593Smuzhiyun .cra_name = "hmac(sha1)",
4000*4882a593Smuzhiyun .cra_driver_name = "hmac-sha1-chcr",
4001*4882a593Smuzhiyun .cra_blocksize = SHA1_BLOCK_SIZE,
4002*4882a593Smuzhiyun }
4003*4882a593Smuzhiyun }
4004*4882a593Smuzhiyun },
4005*4882a593Smuzhiyun {
4006*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_HMAC,
4007*4882a593Smuzhiyun .is_registered = 0,
4008*4882a593Smuzhiyun .alg.hash = {
4009*4882a593Smuzhiyun .halg.digestsize = SHA224_DIGEST_SIZE,
4010*4882a593Smuzhiyun .halg.base = {
4011*4882a593Smuzhiyun .cra_name = "hmac(sha224)",
4012*4882a593Smuzhiyun .cra_driver_name = "hmac-sha224-chcr",
4013*4882a593Smuzhiyun .cra_blocksize = SHA224_BLOCK_SIZE,
4014*4882a593Smuzhiyun }
4015*4882a593Smuzhiyun }
4016*4882a593Smuzhiyun },
4017*4882a593Smuzhiyun {
4018*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_HMAC,
4019*4882a593Smuzhiyun .is_registered = 0,
4020*4882a593Smuzhiyun .alg.hash = {
4021*4882a593Smuzhiyun .halg.digestsize = SHA256_DIGEST_SIZE,
4022*4882a593Smuzhiyun .halg.base = {
4023*4882a593Smuzhiyun .cra_name = "hmac(sha256)",
4024*4882a593Smuzhiyun .cra_driver_name = "hmac-sha256-chcr",
4025*4882a593Smuzhiyun .cra_blocksize = SHA256_BLOCK_SIZE,
4026*4882a593Smuzhiyun }
4027*4882a593Smuzhiyun }
4028*4882a593Smuzhiyun },
4029*4882a593Smuzhiyun {
4030*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_HMAC,
4031*4882a593Smuzhiyun .is_registered = 0,
4032*4882a593Smuzhiyun .alg.hash = {
4033*4882a593Smuzhiyun .halg.digestsize = SHA384_DIGEST_SIZE,
4034*4882a593Smuzhiyun .halg.base = {
4035*4882a593Smuzhiyun .cra_name = "hmac(sha384)",
4036*4882a593Smuzhiyun .cra_driver_name = "hmac-sha384-chcr",
4037*4882a593Smuzhiyun .cra_blocksize = SHA384_BLOCK_SIZE,
4038*4882a593Smuzhiyun }
4039*4882a593Smuzhiyun }
4040*4882a593Smuzhiyun },
4041*4882a593Smuzhiyun {
4042*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_HMAC,
4043*4882a593Smuzhiyun .is_registered = 0,
4044*4882a593Smuzhiyun .alg.hash = {
4045*4882a593Smuzhiyun .halg.digestsize = SHA512_DIGEST_SIZE,
4046*4882a593Smuzhiyun .halg.base = {
4047*4882a593Smuzhiyun .cra_name = "hmac(sha512)",
4048*4882a593Smuzhiyun .cra_driver_name = "hmac-sha512-chcr",
4049*4882a593Smuzhiyun .cra_blocksize = SHA512_BLOCK_SIZE,
4050*4882a593Smuzhiyun }
4051*4882a593Smuzhiyun }
4052*4882a593Smuzhiyun },
4053*4882a593Smuzhiyun /* Add AEAD Algorithms */
4054*4882a593Smuzhiyun {
4055*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_GCM,
4056*4882a593Smuzhiyun .is_registered = 0,
4057*4882a593Smuzhiyun .alg.aead = {
4058*4882a593Smuzhiyun .base = {
4059*4882a593Smuzhiyun .cra_name = "gcm(aes)",
4060*4882a593Smuzhiyun .cra_driver_name = "gcm-aes-chcr",
4061*4882a593Smuzhiyun .cra_blocksize = 1,
4062*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4063*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4064*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4065*4882a593Smuzhiyun sizeof(struct chcr_gcm_ctx),
4066*4882a593Smuzhiyun },
4067*4882a593Smuzhiyun .ivsize = GCM_AES_IV_SIZE,
4068*4882a593Smuzhiyun .maxauthsize = GHASH_DIGEST_SIZE,
4069*4882a593Smuzhiyun .setkey = chcr_gcm_setkey,
4070*4882a593Smuzhiyun .setauthsize = chcr_gcm_setauthsize,
4071*4882a593Smuzhiyun }
4072*4882a593Smuzhiyun },
4073*4882a593Smuzhiyun {
4074*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4106,
4075*4882a593Smuzhiyun .is_registered = 0,
4076*4882a593Smuzhiyun .alg.aead = {
4077*4882a593Smuzhiyun .base = {
4078*4882a593Smuzhiyun .cra_name = "rfc4106(gcm(aes))",
4079*4882a593Smuzhiyun .cra_driver_name = "rfc4106-gcm-aes-chcr",
4080*4882a593Smuzhiyun .cra_blocksize = 1,
4081*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY + 1,
4082*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4083*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4084*4882a593Smuzhiyun sizeof(struct chcr_gcm_ctx),
4085*4882a593Smuzhiyun
4086*4882a593Smuzhiyun },
4087*4882a593Smuzhiyun .ivsize = GCM_RFC4106_IV_SIZE,
4088*4882a593Smuzhiyun .maxauthsize = GHASH_DIGEST_SIZE,
4089*4882a593Smuzhiyun .setkey = chcr_gcm_setkey,
4090*4882a593Smuzhiyun .setauthsize = chcr_4106_4309_setauthsize,
4091*4882a593Smuzhiyun }
4092*4882a593Smuzhiyun },
4093*4882a593Smuzhiyun {
4094*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_CCM,
4095*4882a593Smuzhiyun .is_registered = 0,
4096*4882a593Smuzhiyun .alg.aead = {
4097*4882a593Smuzhiyun .base = {
4098*4882a593Smuzhiyun .cra_name = "ccm(aes)",
4099*4882a593Smuzhiyun .cra_driver_name = "ccm-aes-chcr",
4100*4882a593Smuzhiyun .cra_blocksize = 1,
4101*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4102*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4103*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx),
4104*4882a593Smuzhiyun
4105*4882a593Smuzhiyun },
4106*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
4107*4882a593Smuzhiyun .maxauthsize = GHASH_DIGEST_SIZE,
4108*4882a593Smuzhiyun .setkey = chcr_aead_ccm_setkey,
4109*4882a593Smuzhiyun .setauthsize = chcr_ccm_setauthsize,
4110*4882a593Smuzhiyun }
4111*4882a593Smuzhiyun },
4112*4882a593Smuzhiyun {
4113*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_AEAD_RFC4309,
4114*4882a593Smuzhiyun .is_registered = 0,
4115*4882a593Smuzhiyun .alg.aead = {
4116*4882a593Smuzhiyun .base = {
4117*4882a593Smuzhiyun .cra_name = "rfc4309(ccm(aes))",
4118*4882a593Smuzhiyun .cra_driver_name = "rfc4309-ccm-aes-chcr",
4119*4882a593Smuzhiyun .cra_blocksize = 1,
4120*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY + 1,
4121*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4122*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx),
4123*4882a593Smuzhiyun
4124*4882a593Smuzhiyun },
4125*4882a593Smuzhiyun .ivsize = 8,
4126*4882a593Smuzhiyun .maxauthsize = GHASH_DIGEST_SIZE,
4127*4882a593Smuzhiyun .setkey = chcr_aead_rfc4309_setkey,
4128*4882a593Smuzhiyun .setauthsize = chcr_4106_4309_setauthsize,
4129*4882a593Smuzhiyun }
4130*4882a593Smuzhiyun },
4131*4882a593Smuzhiyun {
4132*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
4133*4882a593Smuzhiyun .is_registered = 0,
4134*4882a593Smuzhiyun .alg.aead = {
4135*4882a593Smuzhiyun .base = {
4136*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha1),cbc(aes))",
4137*4882a593Smuzhiyun .cra_driver_name =
4138*4882a593Smuzhiyun "authenc-hmac-sha1-cbc-aes-chcr",
4139*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
4140*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4141*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4142*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4143*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4144*4882a593Smuzhiyun
4145*4882a593Smuzhiyun },
4146*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
4147*4882a593Smuzhiyun .maxauthsize = SHA1_DIGEST_SIZE,
4148*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4149*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4150*4882a593Smuzhiyun }
4151*4882a593Smuzhiyun },
4152*4882a593Smuzhiyun {
4153*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
4154*4882a593Smuzhiyun .is_registered = 0,
4155*4882a593Smuzhiyun .alg.aead = {
4156*4882a593Smuzhiyun .base = {
4157*4882a593Smuzhiyun
4158*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha256),cbc(aes))",
4159*4882a593Smuzhiyun .cra_driver_name =
4160*4882a593Smuzhiyun "authenc-hmac-sha256-cbc-aes-chcr",
4161*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
4162*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4163*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4164*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4165*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4166*4882a593Smuzhiyun
4167*4882a593Smuzhiyun },
4168*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
4169*4882a593Smuzhiyun .maxauthsize = SHA256_DIGEST_SIZE,
4170*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4171*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4172*4882a593Smuzhiyun }
4173*4882a593Smuzhiyun },
4174*4882a593Smuzhiyun {
4175*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
4176*4882a593Smuzhiyun .is_registered = 0,
4177*4882a593Smuzhiyun .alg.aead = {
4178*4882a593Smuzhiyun .base = {
4179*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha224),cbc(aes))",
4180*4882a593Smuzhiyun .cra_driver_name =
4181*4882a593Smuzhiyun "authenc-hmac-sha224-cbc-aes-chcr",
4182*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
4183*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4184*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4185*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4186*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4187*4882a593Smuzhiyun },
4188*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
4189*4882a593Smuzhiyun .maxauthsize = SHA224_DIGEST_SIZE,
4190*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4191*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4192*4882a593Smuzhiyun }
4193*4882a593Smuzhiyun },
4194*4882a593Smuzhiyun {
4195*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
4196*4882a593Smuzhiyun .is_registered = 0,
4197*4882a593Smuzhiyun .alg.aead = {
4198*4882a593Smuzhiyun .base = {
4199*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha384),cbc(aes))",
4200*4882a593Smuzhiyun .cra_driver_name =
4201*4882a593Smuzhiyun "authenc-hmac-sha384-cbc-aes-chcr",
4202*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
4203*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4204*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4205*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4206*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4207*4882a593Smuzhiyun
4208*4882a593Smuzhiyun },
4209*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
4210*4882a593Smuzhiyun .maxauthsize = SHA384_DIGEST_SIZE,
4211*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4212*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4213*4882a593Smuzhiyun }
4214*4882a593Smuzhiyun },
4215*4882a593Smuzhiyun {
4216*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_SHA,
4217*4882a593Smuzhiyun .is_registered = 0,
4218*4882a593Smuzhiyun .alg.aead = {
4219*4882a593Smuzhiyun .base = {
4220*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha512),cbc(aes))",
4221*4882a593Smuzhiyun .cra_driver_name =
4222*4882a593Smuzhiyun "authenc-hmac-sha512-cbc-aes-chcr",
4223*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
4224*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4225*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4226*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4227*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4228*4882a593Smuzhiyun
4229*4882a593Smuzhiyun },
4230*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
4231*4882a593Smuzhiyun .maxauthsize = SHA512_DIGEST_SIZE,
4232*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4233*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4234*4882a593Smuzhiyun }
4235*4882a593Smuzhiyun },
4236*4882a593Smuzhiyun {
4237*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CBC_NULL,
4238*4882a593Smuzhiyun .is_registered = 0,
4239*4882a593Smuzhiyun .alg.aead = {
4240*4882a593Smuzhiyun .base = {
4241*4882a593Smuzhiyun .cra_name = "authenc(digest_null,cbc(aes))",
4242*4882a593Smuzhiyun .cra_driver_name =
4243*4882a593Smuzhiyun "authenc-digest_null-cbc-aes-chcr",
4244*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
4245*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4246*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4247*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4248*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4249*4882a593Smuzhiyun
4250*4882a593Smuzhiyun },
4251*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
4252*4882a593Smuzhiyun .maxauthsize = 0,
4253*4882a593Smuzhiyun .setkey = chcr_aead_digest_null_setkey,
4254*4882a593Smuzhiyun .setauthsize = chcr_authenc_null_setauthsize,
4255*4882a593Smuzhiyun }
4256*4882a593Smuzhiyun },
4257*4882a593Smuzhiyun {
4258*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4259*4882a593Smuzhiyun .is_registered = 0,
4260*4882a593Smuzhiyun .alg.aead = {
4261*4882a593Smuzhiyun .base = {
4262*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
4263*4882a593Smuzhiyun .cra_driver_name =
4264*4882a593Smuzhiyun "authenc-hmac-sha1-rfc3686-ctr-aes-chcr",
4265*4882a593Smuzhiyun .cra_blocksize = 1,
4266*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4267*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4268*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4269*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4270*4882a593Smuzhiyun
4271*4882a593Smuzhiyun },
4272*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
4273*4882a593Smuzhiyun .maxauthsize = SHA1_DIGEST_SIZE,
4274*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4275*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4276*4882a593Smuzhiyun }
4277*4882a593Smuzhiyun },
4278*4882a593Smuzhiyun {
4279*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4280*4882a593Smuzhiyun .is_registered = 0,
4281*4882a593Smuzhiyun .alg.aead = {
4282*4882a593Smuzhiyun .base = {
4283*4882a593Smuzhiyun
4284*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
4285*4882a593Smuzhiyun .cra_driver_name =
4286*4882a593Smuzhiyun "authenc-hmac-sha256-rfc3686-ctr-aes-chcr",
4287*4882a593Smuzhiyun .cra_blocksize = 1,
4288*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4289*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4290*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4291*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4292*4882a593Smuzhiyun
4293*4882a593Smuzhiyun },
4294*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
4295*4882a593Smuzhiyun .maxauthsize = SHA256_DIGEST_SIZE,
4296*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4297*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4298*4882a593Smuzhiyun }
4299*4882a593Smuzhiyun },
4300*4882a593Smuzhiyun {
4301*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4302*4882a593Smuzhiyun .is_registered = 0,
4303*4882a593Smuzhiyun .alg.aead = {
4304*4882a593Smuzhiyun .base = {
4305*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
4306*4882a593Smuzhiyun .cra_driver_name =
4307*4882a593Smuzhiyun "authenc-hmac-sha224-rfc3686-ctr-aes-chcr",
4308*4882a593Smuzhiyun .cra_blocksize = 1,
4309*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4310*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4311*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4312*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4313*4882a593Smuzhiyun },
4314*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
4315*4882a593Smuzhiyun .maxauthsize = SHA224_DIGEST_SIZE,
4316*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4317*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4318*4882a593Smuzhiyun }
4319*4882a593Smuzhiyun },
4320*4882a593Smuzhiyun {
4321*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4322*4882a593Smuzhiyun .is_registered = 0,
4323*4882a593Smuzhiyun .alg.aead = {
4324*4882a593Smuzhiyun .base = {
4325*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
4326*4882a593Smuzhiyun .cra_driver_name =
4327*4882a593Smuzhiyun "authenc-hmac-sha384-rfc3686-ctr-aes-chcr",
4328*4882a593Smuzhiyun .cra_blocksize = 1,
4329*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4330*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4331*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4332*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4333*4882a593Smuzhiyun
4334*4882a593Smuzhiyun },
4335*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
4336*4882a593Smuzhiyun .maxauthsize = SHA384_DIGEST_SIZE,
4337*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4338*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4339*4882a593Smuzhiyun }
4340*4882a593Smuzhiyun },
4341*4882a593Smuzhiyun {
4342*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_SHA,
4343*4882a593Smuzhiyun .is_registered = 0,
4344*4882a593Smuzhiyun .alg.aead = {
4345*4882a593Smuzhiyun .base = {
4346*4882a593Smuzhiyun .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
4347*4882a593Smuzhiyun .cra_driver_name =
4348*4882a593Smuzhiyun "authenc-hmac-sha512-rfc3686-ctr-aes-chcr",
4349*4882a593Smuzhiyun .cra_blocksize = 1,
4350*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4351*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4352*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4353*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4354*4882a593Smuzhiyun
4355*4882a593Smuzhiyun },
4356*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
4357*4882a593Smuzhiyun .maxauthsize = SHA512_DIGEST_SIZE,
4358*4882a593Smuzhiyun .setkey = chcr_authenc_setkey,
4359*4882a593Smuzhiyun .setauthsize = chcr_authenc_setauthsize,
4360*4882a593Smuzhiyun }
4361*4882a593Smuzhiyun },
4362*4882a593Smuzhiyun {
4363*4882a593Smuzhiyun .type = CRYPTO_ALG_TYPE_AEAD | CRYPTO_ALG_SUB_TYPE_CTR_NULL,
4364*4882a593Smuzhiyun .is_registered = 0,
4365*4882a593Smuzhiyun .alg.aead = {
4366*4882a593Smuzhiyun .base = {
4367*4882a593Smuzhiyun .cra_name = "authenc(digest_null,rfc3686(ctr(aes)))",
4368*4882a593Smuzhiyun .cra_driver_name =
4369*4882a593Smuzhiyun "authenc-digest_null-rfc3686-ctr-aes-chcr",
4370*4882a593Smuzhiyun .cra_blocksize = 1,
4371*4882a593Smuzhiyun .cra_priority = CHCR_AEAD_PRIORITY,
4372*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct chcr_context) +
4373*4882a593Smuzhiyun sizeof(struct chcr_aead_ctx) +
4374*4882a593Smuzhiyun sizeof(struct chcr_authenc_ctx),
4375*4882a593Smuzhiyun
4376*4882a593Smuzhiyun },
4377*4882a593Smuzhiyun .ivsize = CTR_RFC3686_IV_SIZE,
4378*4882a593Smuzhiyun .maxauthsize = 0,
4379*4882a593Smuzhiyun .setkey = chcr_aead_digest_null_setkey,
4380*4882a593Smuzhiyun .setauthsize = chcr_authenc_null_setauthsize,
4381*4882a593Smuzhiyun }
4382*4882a593Smuzhiyun },
4383*4882a593Smuzhiyun };
4384*4882a593Smuzhiyun
4385*4882a593Smuzhiyun /*
4386*4882a593Smuzhiyun * chcr_unregister_alg - Deregister crypto algorithms with
4387*4882a593Smuzhiyun * kernel framework.
4388*4882a593Smuzhiyun */
chcr_unregister_alg(void)4389*4882a593Smuzhiyun static int chcr_unregister_alg(void)
4390*4882a593Smuzhiyun {
4391*4882a593Smuzhiyun int i;
4392*4882a593Smuzhiyun
4393*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4394*4882a593Smuzhiyun switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
4395*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_SKCIPHER:
4396*4882a593Smuzhiyun if (driver_algs[i].is_registered && refcount_read(
4397*4882a593Smuzhiyun &driver_algs[i].alg.skcipher.base.cra_refcnt)
4398*4882a593Smuzhiyun == 1) {
4399*4882a593Smuzhiyun crypto_unregister_skcipher(
4400*4882a593Smuzhiyun &driver_algs[i].alg.skcipher);
4401*4882a593Smuzhiyun driver_algs[i].is_registered = 0;
4402*4882a593Smuzhiyun }
4403*4882a593Smuzhiyun break;
4404*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_AEAD:
4405*4882a593Smuzhiyun if (driver_algs[i].is_registered && refcount_read(
4406*4882a593Smuzhiyun &driver_algs[i].alg.aead.base.cra_refcnt) == 1) {
4407*4882a593Smuzhiyun crypto_unregister_aead(
4408*4882a593Smuzhiyun &driver_algs[i].alg.aead);
4409*4882a593Smuzhiyun driver_algs[i].is_registered = 0;
4410*4882a593Smuzhiyun }
4411*4882a593Smuzhiyun break;
4412*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_AHASH:
4413*4882a593Smuzhiyun if (driver_algs[i].is_registered && refcount_read(
4414*4882a593Smuzhiyun &driver_algs[i].alg.hash.halg.base.cra_refcnt)
4415*4882a593Smuzhiyun == 1) {
4416*4882a593Smuzhiyun crypto_unregister_ahash(
4417*4882a593Smuzhiyun &driver_algs[i].alg.hash);
4418*4882a593Smuzhiyun driver_algs[i].is_registered = 0;
4419*4882a593Smuzhiyun }
4420*4882a593Smuzhiyun break;
4421*4882a593Smuzhiyun }
4422*4882a593Smuzhiyun }
4423*4882a593Smuzhiyun return 0;
4424*4882a593Smuzhiyun }
4425*4882a593Smuzhiyun
4426*4882a593Smuzhiyun #define SZ_AHASH_CTX sizeof(struct chcr_context)
4427*4882a593Smuzhiyun #define SZ_AHASH_H_CTX (sizeof(struct chcr_context) + sizeof(struct hmac_ctx))
4428*4882a593Smuzhiyun #define SZ_AHASH_REQ_CTX sizeof(struct chcr_ahash_req_ctx)
4429*4882a593Smuzhiyun
4430*4882a593Smuzhiyun /*
4431*4882a593Smuzhiyun * chcr_register_alg - Register crypto algorithms with kernel framework.
4432*4882a593Smuzhiyun */
chcr_register_alg(void)4433*4882a593Smuzhiyun static int chcr_register_alg(void)
4434*4882a593Smuzhiyun {
4435*4882a593Smuzhiyun struct crypto_alg ai;
4436*4882a593Smuzhiyun struct ahash_alg *a_hash;
4437*4882a593Smuzhiyun int err = 0, i;
4438*4882a593Smuzhiyun char *name = NULL;
4439*4882a593Smuzhiyun
4440*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4441*4882a593Smuzhiyun if (driver_algs[i].is_registered)
4442*4882a593Smuzhiyun continue;
4443*4882a593Smuzhiyun switch (driver_algs[i].type & CRYPTO_ALG_TYPE_MASK) {
4444*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_SKCIPHER:
4445*4882a593Smuzhiyun driver_algs[i].alg.skcipher.base.cra_priority =
4446*4882a593Smuzhiyun CHCR_CRA_PRIORITY;
4447*4882a593Smuzhiyun driver_algs[i].alg.skcipher.base.cra_module = THIS_MODULE;
4448*4882a593Smuzhiyun driver_algs[i].alg.skcipher.base.cra_flags =
4449*4882a593Smuzhiyun CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_ASYNC |
4450*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
4451*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK;
4452*4882a593Smuzhiyun driver_algs[i].alg.skcipher.base.cra_ctxsize =
4453*4882a593Smuzhiyun sizeof(struct chcr_context) +
4454*4882a593Smuzhiyun sizeof(struct ablk_ctx);
4455*4882a593Smuzhiyun driver_algs[i].alg.skcipher.base.cra_alignmask = 0;
4456*4882a593Smuzhiyun
4457*4882a593Smuzhiyun err = crypto_register_skcipher(&driver_algs[i].alg.skcipher);
4458*4882a593Smuzhiyun name = driver_algs[i].alg.skcipher.base.cra_driver_name;
4459*4882a593Smuzhiyun break;
4460*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_AEAD:
4461*4882a593Smuzhiyun driver_algs[i].alg.aead.base.cra_flags =
4462*4882a593Smuzhiyun CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK |
4463*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY;
4464*4882a593Smuzhiyun driver_algs[i].alg.aead.encrypt = chcr_aead_encrypt;
4465*4882a593Smuzhiyun driver_algs[i].alg.aead.decrypt = chcr_aead_decrypt;
4466*4882a593Smuzhiyun driver_algs[i].alg.aead.init = chcr_aead_cra_init;
4467*4882a593Smuzhiyun driver_algs[i].alg.aead.exit = chcr_aead_cra_exit;
4468*4882a593Smuzhiyun driver_algs[i].alg.aead.base.cra_module = THIS_MODULE;
4469*4882a593Smuzhiyun err = crypto_register_aead(&driver_algs[i].alg.aead);
4470*4882a593Smuzhiyun name = driver_algs[i].alg.aead.base.cra_driver_name;
4471*4882a593Smuzhiyun break;
4472*4882a593Smuzhiyun case CRYPTO_ALG_TYPE_AHASH:
4473*4882a593Smuzhiyun a_hash = &driver_algs[i].alg.hash;
4474*4882a593Smuzhiyun a_hash->update = chcr_ahash_update;
4475*4882a593Smuzhiyun a_hash->final = chcr_ahash_final;
4476*4882a593Smuzhiyun a_hash->finup = chcr_ahash_finup;
4477*4882a593Smuzhiyun a_hash->digest = chcr_ahash_digest;
4478*4882a593Smuzhiyun a_hash->export = chcr_ahash_export;
4479*4882a593Smuzhiyun a_hash->import = chcr_ahash_import;
4480*4882a593Smuzhiyun a_hash->halg.statesize = SZ_AHASH_REQ_CTX;
4481*4882a593Smuzhiyun a_hash->halg.base.cra_priority = CHCR_CRA_PRIORITY;
4482*4882a593Smuzhiyun a_hash->halg.base.cra_module = THIS_MODULE;
4483*4882a593Smuzhiyun a_hash->halg.base.cra_flags =
4484*4882a593Smuzhiyun CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY;
4485*4882a593Smuzhiyun a_hash->halg.base.cra_alignmask = 0;
4486*4882a593Smuzhiyun a_hash->halg.base.cra_exit = NULL;
4487*4882a593Smuzhiyun
4488*4882a593Smuzhiyun if (driver_algs[i].type == CRYPTO_ALG_TYPE_HMAC) {
4489*4882a593Smuzhiyun a_hash->halg.base.cra_init = chcr_hmac_cra_init;
4490*4882a593Smuzhiyun a_hash->halg.base.cra_exit = chcr_hmac_cra_exit;
4491*4882a593Smuzhiyun a_hash->init = chcr_hmac_init;
4492*4882a593Smuzhiyun a_hash->setkey = chcr_ahash_setkey;
4493*4882a593Smuzhiyun a_hash->halg.base.cra_ctxsize = SZ_AHASH_H_CTX;
4494*4882a593Smuzhiyun } else {
4495*4882a593Smuzhiyun a_hash->init = chcr_sha_init;
4496*4882a593Smuzhiyun a_hash->halg.base.cra_ctxsize = SZ_AHASH_CTX;
4497*4882a593Smuzhiyun a_hash->halg.base.cra_init = chcr_sha_cra_init;
4498*4882a593Smuzhiyun }
4499*4882a593Smuzhiyun err = crypto_register_ahash(&driver_algs[i].alg.hash);
4500*4882a593Smuzhiyun ai = driver_algs[i].alg.hash.halg.base;
4501*4882a593Smuzhiyun name = ai.cra_driver_name;
4502*4882a593Smuzhiyun break;
4503*4882a593Smuzhiyun }
4504*4882a593Smuzhiyun if (err) {
4505*4882a593Smuzhiyun pr_err("%s : Algorithm registration failed\n", name);
4506*4882a593Smuzhiyun goto register_err;
4507*4882a593Smuzhiyun } else {
4508*4882a593Smuzhiyun driver_algs[i].is_registered = 1;
4509*4882a593Smuzhiyun }
4510*4882a593Smuzhiyun }
4511*4882a593Smuzhiyun return 0;
4512*4882a593Smuzhiyun
4513*4882a593Smuzhiyun register_err:
4514*4882a593Smuzhiyun chcr_unregister_alg();
4515*4882a593Smuzhiyun return err;
4516*4882a593Smuzhiyun }
4517*4882a593Smuzhiyun
4518*4882a593Smuzhiyun /*
4519*4882a593Smuzhiyun * start_crypto - Register the crypto algorithms.
4520*4882a593Smuzhiyun * This should called once when the first device comesup. After this
4521*4882a593Smuzhiyun * kernel will start calling driver APIs for crypto operations.
4522*4882a593Smuzhiyun */
start_crypto(void)4523*4882a593Smuzhiyun int start_crypto(void)
4524*4882a593Smuzhiyun {
4525*4882a593Smuzhiyun return chcr_register_alg();
4526*4882a593Smuzhiyun }
4527*4882a593Smuzhiyun
4528*4882a593Smuzhiyun /*
4529*4882a593Smuzhiyun * stop_crypto - Deregister all the crypto algorithms with kernel.
4530*4882a593Smuzhiyun * This should be called once when the last device goes down. After this
4531*4882a593Smuzhiyun * kernel will not call the driver API for crypto operations.
4532*4882a593Smuzhiyun */
stop_crypto(void)4533*4882a593Smuzhiyun int stop_crypto(void)
4534*4882a593Smuzhiyun {
4535*4882a593Smuzhiyun chcr_unregister_alg();
4536*4882a593Smuzhiyun return 0;
4537*4882a593Smuzhiyun }
4538