1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /**
3*4882a593Smuzhiyun * AES GCM routines supporting the Power 7+ Nest Accelerators driver
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 2012 International Business Machines Inc.
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Author: Kent Yoder <yoder1@us.ibm.com>
8*4882a593Smuzhiyun */
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun #include <crypto/internal/aead.h>
11*4882a593Smuzhiyun #include <crypto/aes.h>
12*4882a593Smuzhiyun #include <crypto/algapi.h>
13*4882a593Smuzhiyun #include <crypto/gcm.h>
14*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
15*4882a593Smuzhiyun #include <linux/module.h>
16*4882a593Smuzhiyun #include <linux/types.h>
17*4882a593Smuzhiyun #include <asm/vio.h>
18*4882a593Smuzhiyun
19*4882a593Smuzhiyun #include "nx_csbcpb.h"
20*4882a593Smuzhiyun #include "nx.h"
21*4882a593Smuzhiyun
22*4882a593Smuzhiyun
gcm_aes_nx_set_key(struct crypto_aead * tfm,const u8 * in_key,unsigned int key_len)23*4882a593Smuzhiyun static int gcm_aes_nx_set_key(struct crypto_aead *tfm,
24*4882a593Smuzhiyun const u8 *in_key,
25*4882a593Smuzhiyun unsigned int key_len)
26*4882a593Smuzhiyun {
27*4882a593Smuzhiyun struct nx_crypto_ctx *nx_ctx = crypto_aead_ctx(tfm);
28*4882a593Smuzhiyun struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
29*4882a593Smuzhiyun struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
30*4882a593Smuzhiyun
31*4882a593Smuzhiyun nx_ctx_init(nx_ctx, HCOP_FC_AES);
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun switch (key_len) {
34*4882a593Smuzhiyun case AES_KEYSIZE_128:
35*4882a593Smuzhiyun NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128);
36*4882a593Smuzhiyun NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_128);
37*4882a593Smuzhiyun nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128];
38*4882a593Smuzhiyun break;
39*4882a593Smuzhiyun case AES_KEYSIZE_192:
40*4882a593Smuzhiyun NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_192);
41*4882a593Smuzhiyun NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_192);
42*4882a593Smuzhiyun nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192];
43*4882a593Smuzhiyun break;
44*4882a593Smuzhiyun case AES_KEYSIZE_256:
45*4882a593Smuzhiyun NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_256);
46*4882a593Smuzhiyun NX_CPB_SET_KEY_SIZE(csbcpb_aead, NX_KS_AES_256);
47*4882a593Smuzhiyun nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256];
48*4882a593Smuzhiyun break;
49*4882a593Smuzhiyun default:
50*4882a593Smuzhiyun return -EINVAL;
51*4882a593Smuzhiyun }
52*4882a593Smuzhiyun
53*4882a593Smuzhiyun csbcpb->cpb.hdr.mode = NX_MODE_AES_GCM;
54*4882a593Smuzhiyun memcpy(csbcpb->cpb.aes_gcm.key, in_key, key_len);
55*4882a593Smuzhiyun
56*4882a593Smuzhiyun csbcpb_aead->cpb.hdr.mode = NX_MODE_AES_GCA;
57*4882a593Smuzhiyun memcpy(csbcpb_aead->cpb.aes_gca.key, in_key, key_len);
58*4882a593Smuzhiyun
59*4882a593Smuzhiyun return 0;
60*4882a593Smuzhiyun }
61*4882a593Smuzhiyun
gcm4106_aes_nx_set_key(struct crypto_aead * tfm,const u8 * in_key,unsigned int key_len)62*4882a593Smuzhiyun static int gcm4106_aes_nx_set_key(struct crypto_aead *tfm,
63*4882a593Smuzhiyun const u8 *in_key,
64*4882a593Smuzhiyun unsigned int key_len)
65*4882a593Smuzhiyun {
66*4882a593Smuzhiyun struct nx_crypto_ctx *nx_ctx = crypto_aead_ctx(tfm);
67*4882a593Smuzhiyun char *nonce = nx_ctx->priv.gcm.nonce;
68*4882a593Smuzhiyun int rc;
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun if (key_len < 4)
71*4882a593Smuzhiyun return -EINVAL;
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun key_len -= 4;
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun rc = gcm_aes_nx_set_key(tfm, in_key, key_len);
76*4882a593Smuzhiyun if (rc)
77*4882a593Smuzhiyun goto out;
78*4882a593Smuzhiyun
79*4882a593Smuzhiyun memcpy(nonce, in_key + key_len, 4);
80*4882a593Smuzhiyun out:
81*4882a593Smuzhiyun return rc;
82*4882a593Smuzhiyun }
83*4882a593Smuzhiyun
gcm4106_aes_nx_setauthsize(struct crypto_aead * tfm,unsigned int authsize)84*4882a593Smuzhiyun static int gcm4106_aes_nx_setauthsize(struct crypto_aead *tfm,
85*4882a593Smuzhiyun unsigned int authsize)
86*4882a593Smuzhiyun {
87*4882a593Smuzhiyun switch (authsize) {
88*4882a593Smuzhiyun case 8:
89*4882a593Smuzhiyun case 12:
90*4882a593Smuzhiyun case 16:
91*4882a593Smuzhiyun break;
92*4882a593Smuzhiyun default:
93*4882a593Smuzhiyun return -EINVAL;
94*4882a593Smuzhiyun }
95*4882a593Smuzhiyun
96*4882a593Smuzhiyun return 0;
97*4882a593Smuzhiyun }
98*4882a593Smuzhiyun
nx_gca(struct nx_crypto_ctx * nx_ctx,struct aead_request * req,u8 * out,unsigned int assoclen)99*4882a593Smuzhiyun static int nx_gca(struct nx_crypto_ctx *nx_ctx,
100*4882a593Smuzhiyun struct aead_request *req,
101*4882a593Smuzhiyun u8 *out,
102*4882a593Smuzhiyun unsigned int assoclen)
103*4882a593Smuzhiyun {
104*4882a593Smuzhiyun int rc;
105*4882a593Smuzhiyun struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead;
106*4882a593Smuzhiyun struct scatter_walk walk;
107*4882a593Smuzhiyun struct nx_sg *nx_sg = nx_ctx->in_sg;
108*4882a593Smuzhiyun unsigned int nbytes = assoclen;
109*4882a593Smuzhiyun unsigned int processed = 0, to_process;
110*4882a593Smuzhiyun unsigned int max_sg_len;
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun if (nbytes <= AES_BLOCK_SIZE) {
113*4882a593Smuzhiyun scatterwalk_start(&walk, req->src);
114*4882a593Smuzhiyun scatterwalk_copychunks(out, &walk, nbytes, SCATTERWALK_FROM_SG);
115*4882a593Smuzhiyun scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0);
116*4882a593Smuzhiyun return 0;
117*4882a593Smuzhiyun }
118*4882a593Smuzhiyun
119*4882a593Smuzhiyun NX_CPB_FDM(csbcpb_aead) &= ~NX_FDM_CONTINUATION;
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun /* page_limit: number of sg entries that fit on one page */
122*4882a593Smuzhiyun max_sg_len = min_t(u64, nx_driver.of.max_sg_len/sizeof(struct nx_sg),
123*4882a593Smuzhiyun nx_ctx->ap->sglen);
124*4882a593Smuzhiyun max_sg_len = min_t(u64, max_sg_len,
125*4882a593Smuzhiyun nx_ctx->ap->databytelen/NX_PAGE_SIZE);
126*4882a593Smuzhiyun
127*4882a593Smuzhiyun do {
128*4882a593Smuzhiyun /*
129*4882a593Smuzhiyun * to_process: the data chunk to process in this update.
130*4882a593Smuzhiyun * This value is bound by sg list limits.
131*4882a593Smuzhiyun */
132*4882a593Smuzhiyun to_process = min_t(u64, nbytes - processed,
133*4882a593Smuzhiyun nx_ctx->ap->databytelen);
134*4882a593Smuzhiyun to_process = min_t(u64, to_process,
135*4882a593Smuzhiyun NX_PAGE_SIZE * (max_sg_len - 1));
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len,
138*4882a593Smuzhiyun req->src, processed, &to_process);
139*4882a593Smuzhiyun
140*4882a593Smuzhiyun if ((to_process + processed) < nbytes)
141*4882a593Smuzhiyun NX_CPB_FDM(csbcpb_aead) |= NX_FDM_INTERMEDIATE;
142*4882a593Smuzhiyun else
143*4882a593Smuzhiyun NX_CPB_FDM(csbcpb_aead) &= ~NX_FDM_INTERMEDIATE;
144*4882a593Smuzhiyun
145*4882a593Smuzhiyun nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg)
146*4882a593Smuzhiyun * sizeof(struct nx_sg);
147*4882a593Smuzhiyun
148*4882a593Smuzhiyun rc = nx_hcall_sync(nx_ctx, &nx_ctx->op_aead,
149*4882a593Smuzhiyun req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
150*4882a593Smuzhiyun if (rc)
151*4882a593Smuzhiyun return rc;
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun memcpy(csbcpb_aead->cpb.aes_gca.in_pat,
154*4882a593Smuzhiyun csbcpb_aead->cpb.aes_gca.out_pat,
155*4882a593Smuzhiyun AES_BLOCK_SIZE);
156*4882a593Smuzhiyun NX_CPB_FDM(csbcpb_aead) |= NX_FDM_CONTINUATION;
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun atomic_inc(&(nx_ctx->stats->aes_ops));
159*4882a593Smuzhiyun atomic64_add(assoclen, &(nx_ctx->stats->aes_bytes));
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun processed += to_process;
162*4882a593Smuzhiyun } while (processed < nbytes);
163*4882a593Smuzhiyun
164*4882a593Smuzhiyun memcpy(out, csbcpb_aead->cpb.aes_gca.out_pat, AES_BLOCK_SIZE);
165*4882a593Smuzhiyun
166*4882a593Smuzhiyun return rc;
167*4882a593Smuzhiyun }
168*4882a593Smuzhiyun
gmac(struct aead_request * req,const u8 * iv,unsigned int assoclen)169*4882a593Smuzhiyun static int gmac(struct aead_request *req, const u8 *iv, unsigned int assoclen)
170*4882a593Smuzhiyun {
171*4882a593Smuzhiyun int rc;
172*4882a593Smuzhiyun struct nx_crypto_ctx *nx_ctx =
173*4882a593Smuzhiyun crypto_aead_ctx(crypto_aead_reqtfm(req));
174*4882a593Smuzhiyun struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
175*4882a593Smuzhiyun struct nx_sg *nx_sg;
176*4882a593Smuzhiyun unsigned int nbytes = assoclen;
177*4882a593Smuzhiyun unsigned int processed = 0, to_process;
178*4882a593Smuzhiyun unsigned int max_sg_len;
179*4882a593Smuzhiyun
180*4882a593Smuzhiyun /* Set GMAC mode */
181*4882a593Smuzhiyun csbcpb->cpb.hdr.mode = NX_MODE_AES_GMAC;
182*4882a593Smuzhiyun
183*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) &= ~NX_FDM_CONTINUATION;
184*4882a593Smuzhiyun
185*4882a593Smuzhiyun /* page_limit: number of sg entries that fit on one page */
186*4882a593Smuzhiyun max_sg_len = min_t(u64, nx_driver.of.max_sg_len/sizeof(struct nx_sg),
187*4882a593Smuzhiyun nx_ctx->ap->sglen);
188*4882a593Smuzhiyun max_sg_len = min_t(u64, max_sg_len,
189*4882a593Smuzhiyun nx_ctx->ap->databytelen/NX_PAGE_SIZE);
190*4882a593Smuzhiyun
191*4882a593Smuzhiyun /* Copy IV */
192*4882a593Smuzhiyun memcpy(csbcpb->cpb.aes_gcm.iv_or_cnt, iv, AES_BLOCK_SIZE);
193*4882a593Smuzhiyun
194*4882a593Smuzhiyun do {
195*4882a593Smuzhiyun /*
196*4882a593Smuzhiyun * to_process: the data chunk to process in this update.
197*4882a593Smuzhiyun * This value is bound by sg list limits.
198*4882a593Smuzhiyun */
199*4882a593Smuzhiyun to_process = min_t(u64, nbytes - processed,
200*4882a593Smuzhiyun nx_ctx->ap->databytelen);
201*4882a593Smuzhiyun to_process = min_t(u64, to_process,
202*4882a593Smuzhiyun NX_PAGE_SIZE * (max_sg_len - 1));
203*4882a593Smuzhiyun
204*4882a593Smuzhiyun nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len,
205*4882a593Smuzhiyun req->src, processed, &to_process);
206*4882a593Smuzhiyun
207*4882a593Smuzhiyun if ((to_process + processed) < nbytes)
208*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE;
209*4882a593Smuzhiyun else
210*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) &= ~NX_FDM_INTERMEDIATE;
211*4882a593Smuzhiyun
212*4882a593Smuzhiyun nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg)
213*4882a593Smuzhiyun * sizeof(struct nx_sg);
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.bit_length_data = 0;
216*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.bit_length_aad = 8 * nbytes;
217*4882a593Smuzhiyun
218*4882a593Smuzhiyun rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
219*4882a593Smuzhiyun req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
220*4882a593Smuzhiyun if (rc)
221*4882a593Smuzhiyun goto out;
222*4882a593Smuzhiyun
223*4882a593Smuzhiyun memcpy(csbcpb->cpb.aes_gcm.in_pat_or_aad,
224*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE);
225*4882a593Smuzhiyun memcpy(csbcpb->cpb.aes_gcm.in_s0,
226*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE);
227*4882a593Smuzhiyun
228*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
229*4882a593Smuzhiyun
230*4882a593Smuzhiyun atomic_inc(&(nx_ctx->stats->aes_ops));
231*4882a593Smuzhiyun atomic64_add(assoclen, &(nx_ctx->stats->aes_bytes));
232*4882a593Smuzhiyun
233*4882a593Smuzhiyun processed += to_process;
234*4882a593Smuzhiyun } while (processed < nbytes);
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun out:
237*4882a593Smuzhiyun /* Restore GCM mode */
238*4882a593Smuzhiyun csbcpb->cpb.hdr.mode = NX_MODE_AES_GCM;
239*4882a593Smuzhiyun return rc;
240*4882a593Smuzhiyun }
241*4882a593Smuzhiyun
gcm_empty(struct aead_request * req,const u8 * iv,int enc)242*4882a593Smuzhiyun static int gcm_empty(struct aead_request *req, const u8 *iv, int enc)
243*4882a593Smuzhiyun {
244*4882a593Smuzhiyun int rc;
245*4882a593Smuzhiyun struct nx_crypto_ctx *nx_ctx =
246*4882a593Smuzhiyun crypto_aead_ctx(crypto_aead_reqtfm(req));
247*4882a593Smuzhiyun struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
248*4882a593Smuzhiyun char out[AES_BLOCK_SIZE];
249*4882a593Smuzhiyun struct nx_sg *in_sg, *out_sg;
250*4882a593Smuzhiyun int len;
251*4882a593Smuzhiyun
252*4882a593Smuzhiyun /* For scenarios where the input message is zero length, AES CTR mode
253*4882a593Smuzhiyun * may be used. Set the source data to be a single block (16B) of all
254*4882a593Smuzhiyun * zeros, and set the input IV value to be the same as the GMAC IV
255*4882a593Smuzhiyun * value. - nx_wb 4.8.1.3 */
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun /* Change to ECB mode */
258*4882a593Smuzhiyun csbcpb->cpb.hdr.mode = NX_MODE_AES_ECB;
259*4882a593Smuzhiyun memcpy(csbcpb->cpb.aes_ecb.key, csbcpb->cpb.aes_gcm.key,
260*4882a593Smuzhiyun sizeof(csbcpb->cpb.aes_ecb.key));
261*4882a593Smuzhiyun if (enc)
262*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
263*4882a593Smuzhiyun else
264*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
265*4882a593Smuzhiyun
266*4882a593Smuzhiyun len = AES_BLOCK_SIZE;
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun /* Encrypt the counter/IV */
269*4882a593Smuzhiyun in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) iv,
270*4882a593Smuzhiyun &len, nx_ctx->ap->sglen);
271*4882a593Smuzhiyun
272*4882a593Smuzhiyun if (len != AES_BLOCK_SIZE)
273*4882a593Smuzhiyun return -EINVAL;
274*4882a593Smuzhiyun
275*4882a593Smuzhiyun len = sizeof(out);
276*4882a593Smuzhiyun out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) out, &len,
277*4882a593Smuzhiyun nx_ctx->ap->sglen);
278*4882a593Smuzhiyun
279*4882a593Smuzhiyun if (len != sizeof(out))
280*4882a593Smuzhiyun return -EINVAL;
281*4882a593Smuzhiyun
282*4882a593Smuzhiyun nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
283*4882a593Smuzhiyun nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg);
284*4882a593Smuzhiyun
285*4882a593Smuzhiyun rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
286*4882a593Smuzhiyun req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
287*4882a593Smuzhiyun if (rc)
288*4882a593Smuzhiyun goto out;
289*4882a593Smuzhiyun atomic_inc(&(nx_ctx->stats->aes_ops));
290*4882a593Smuzhiyun
291*4882a593Smuzhiyun /* Copy out the auth tag */
292*4882a593Smuzhiyun memcpy(csbcpb->cpb.aes_gcm.out_pat_or_mac, out,
293*4882a593Smuzhiyun crypto_aead_authsize(crypto_aead_reqtfm(req)));
294*4882a593Smuzhiyun out:
295*4882a593Smuzhiyun /* Restore XCBC mode */
296*4882a593Smuzhiyun csbcpb->cpb.hdr.mode = NX_MODE_AES_GCM;
297*4882a593Smuzhiyun
298*4882a593Smuzhiyun /*
299*4882a593Smuzhiyun * ECB key uses the same region that GCM AAD and counter, so it's safe
300*4882a593Smuzhiyun * to just fill it with zeroes.
301*4882a593Smuzhiyun */
302*4882a593Smuzhiyun memset(csbcpb->cpb.aes_ecb.key, 0, sizeof(csbcpb->cpb.aes_ecb.key));
303*4882a593Smuzhiyun
304*4882a593Smuzhiyun return rc;
305*4882a593Smuzhiyun }
306*4882a593Smuzhiyun
gcm_aes_nx_crypt(struct aead_request * req,int enc,unsigned int assoclen)307*4882a593Smuzhiyun static int gcm_aes_nx_crypt(struct aead_request *req, int enc,
308*4882a593Smuzhiyun unsigned int assoclen)
309*4882a593Smuzhiyun {
310*4882a593Smuzhiyun struct nx_crypto_ctx *nx_ctx =
311*4882a593Smuzhiyun crypto_aead_ctx(crypto_aead_reqtfm(req));
312*4882a593Smuzhiyun struct nx_gcm_rctx *rctx = aead_request_ctx(req);
313*4882a593Smuzhiyun struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
314*4882a593Smuzhiyun unsigned int nbytes = req->cryptlen;
315*4882a593Smuzhiyun unsigned int processed = 0, to_process;
316*4882a593Smuzhiyun unsigned long irq_flags;
317*4882a593Smuzhiyun int rc = -EINVAL;
318*4882a593Smuzhiyun
319*4882a593Smuzhiyun spin_lock_irqsave(&nx_ctx->lock, irq_flags);
320*4882a593Smuzhiyun
321*4882a593Smuzhiyun /* initialize the counter */
322*4882a593Smuzhiyun *(u32 *)&rctx->iv[NX_GCM_CTR_OFFSET] = 1;
323*4882a593Smuzhiyun
324*4882a593Smuzhiyun if (nbytes == 0) {
325*4882a593Smuzhiyun if (assoclen == 0)
326*4882a593Smuzhiyun rc = gcm_empty(req, rctx->iv, enc);
327*4882a593Smuzhiyun else
328*4882a593Smuzhiyun rc = gmac(req, rctx->iv, assoclen);
329*4882a593Smuzhiyun if (rc)
330*4882a593Smuzhiyun goto out;
331*4882a593Smuzhiyun else
332*4882a593Smuzhiyun goto mac;
333*4882a593Smuzhiyun }
334*4882a593Smuzhiyun
335*4882a593Smuzhiyun /* Process associated data */
336*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.bit_length_aad = assoclen * 8;
337*4882a593Smuzhiyun if (assoclen) {
338*4882a593Smuzhiyun rc = nx_gca(nx_ctx, req, csbcpb->cpb.aes_gcm.in_pat_or_aad,
339*4882a593Smuzhiyun assoclen);
340*4882a593Smuzhiyun if (rc)
341*4882a593Smuzhiyun goto out;
342*4882a593Smuzhiyun }
343*4882a593Smuzhiyun
344*4882a593Smuzhiyun /* Set flags for encryption */
345*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) &= ~NX_FDM_CONTINUATION;
346*4882a593Smuzhiyun if (enc) {
347*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
348*4882a593Smuzhiyun } else {
349*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
350*4882a593Smuzhiyun nbytes -= crypto_aead_authsize(crypto_aead_reqtfm(req));
351*4882a593Smuzhiyun }
352*4882a593Smuzhiyun
353*4882a593Smuzhiyun do {
354*4882a593Smuzhiyun to_process = nbytes - processed;
355*4882a593Smuzhiyun
356*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.bit_length_data = nbytes * 8;
357*4882a593Smuzhiyun rc = nx_build_sg_lists(nx_ctx, rctx->iv, req->dst,
358*4882a593Smuzhiyun req->src, &to_process,
359*4882a593Smuzhiyun processed + req->assoclen,
360*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.iv_or_cnt);
361*4882a593Smuzhiyun
362*4882a593Smuzhiyun if (rc)
363*4882a593Smuzhiyun goto out;
364*4882a593Smuzhiyun
365*4882a593Smuzhiyun if ((to_process + processed) < nbytes)
366*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) |= NX_FDM_INTERMEDIATE;
367*4882a593Smuzhiyun else
368*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) &= ~NX_FDM_INTERMEDIATE;
369*4882a593Smuzhiyun
370*4882a593Smuzhiyun
371*4882a593Smuzhiyun rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
372*4882a593Smuzhiyun req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
373*4882a593Smuzhiyun if (rc)
374*4882a593Smuzhiyun goto out;
375*4882a593Smuzhiyun
376*4882a593Smuzhiyun memcpy(rctx->iv, csbcpb->cpb.aes_gcm.out_cnt, AES_BLOCK_SIZE);
377*4882a593Smuzhiyun memcpy(csbcpb->cpb.aes_gcm.in_pat_or_aad,
378*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE);
379*4882a593Smuzhiyun memcpy(csbcpb->cpb.aes_gcm.in_s0,
380*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE);
381*4882a593Smuzhiyun
382*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) |= NX_FDM_CONTINUATION;
383*4882a593Smuzhiyun
384*4882a593Smuzhiyun atomic_inc(&(nx_ctx->stats->aes_ops));
385*4882a593Smuzhiyun atomic64_add(csbcpb->csb.processed_byte_count,
386*4882a593Smuzhiyun &(nx_ctx->stats->aes_bytes));
387*4882a593Smuzhiyun
388*4882a593Smuzhiyun processed += to_process;
389*4882a593Smuzhiyun } while (processed < nbytes);
390*4882a593Smuzhiyun
391*4882a593Smuzhiyun mac:
392*4882a593Smuzhiyun if (enc) {
393*4882a593Smuzhiyun /* copy out the auth tag */
394*4882a593Smuzhiyun scatterwalk_map_and_copy(
395*4882a593Smuzhiyun csbcpb->cpb.aes_gcm.out_pat_or_mac,
396*4882a593Smuzhiyun req->dst, req->assoclen + nbytes,
397*4882a593Smuzhiyun crypto_aead_authsize(crypto_aead_reqtfm(req)),
398*4882a593Smuzhiyun SCATTERWALK_TO_SG);
399*4882a593Smuzhiyun } else {
400*4882a593Smuzhiyun u8 *itag = nx_ctx->priv.gcm.iauth_tag;
401*4882a593Smuzhiyun u8 *otag = csbcpb->cpb.aes_gcm.out_pat_or_mac;
402*4882a593Smuzhiyun
403*4882a593Smuzhiyun scatterwalk_map_and_copy(
404*4882a593Smuzhiyun itag, req->src, req->assoclen + nbytes,
405*4882a593Smuzhiyun crypto_aead_authsize(crypto_aead_reqtfm(req)),
406*4882a593Smuzhiyun SCATTERWALK_FROM_SG);
407*4882a593Smuzhiyun rc = crypto_memneq(itag, otag,
408*4882a593Smuzhiyun crypto_aead_authsize(crypto_aead_reqtfm(req))) ?
409*4882a593Smuzhiyun -EBADMSG : 0;
410*4882a593Smuzhiyun }
411*4882a593Smuzhiyun out:
412*4882a593Smuzhiyun spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
413*4882a593Smuzhiyun return rc;
414*4882a593Smuzhiyun }
415*4882a593Smuzhiyun
gcm_aes_nx_encrypt(struct aead_request * req)416*4882a593Smuzhiyun static int gcm_aes_nx_encrypt(struct aead_request *req)
417*4882a593Smuzhiyun {
418*4882a593Smuzhiyun struct nx_gcm_rctx *rctx = aead_request_ctx(req);
419*4882a593Smuzhiyun char *iv = rctx->iv;
420*4882a593Smuzhiyun
421*4882a593Smuzhiyun memcpy(iv, req->iv, GCM_AES_IV_SIZE);
422*4882a593Smuzhiyun
423*4882a593Smuzhiyun return gcm_aes_nx_crypt(req, 1, req->assoclen);
424*4882a593Smuzhiyun }
425*4882a593Smuzhiyun
gcm_aes_nx_decrypt(struct aead_request * req)426*4882a593Smuzhiyun static int gcm_aes_nx_decrypt(struct aead_request *req)
427*4882a593Smuzhiyun {
428*4882a593Smuzhiyun struct nx_gcm_rctx *rctx = aead_request_ctx(req);
429*4882a593Smuzhiyun char *iv = rctx->iv;
430*4882a593Smuzhiyun
431*4882a593Smuzhiyun memcpy(iv, req->iv, GCM_AES_IV_SIZE);
432*4882a593Smuzhiyun
433*4882a593Smuzhiyun return gcm_aes_nx_crypt(req, 0, req->assoclen);
434*4882a593Smuzhiyun }
435*4882a593Smuzhiyun
gcm4106_aes_nx_encrypt(struct aead_request * req)436*4882a593Smuzhiyun static int gcm4106_aes_nx_encrypt(struct aead_request *req)
437*4882a593Smuzhiyun {
438*4882a593Smuzhiyun struct nx_crypto_ctx *nx_ctx =
439*4882a593Smuzhiyun crypto_aead_ctx(crypto_aead_reqtfm(req));
440*4882a593Smuzhiyun struct nx_gcm_rctx *rctx = aead_request_ctx(req);
441*4882a593Smuzhiyun char *iv = rctx->iv;
442*4882a593Smuzhiyun char *nonce = nx_ctx->priv.gcm.nonce;
443*4882a593Smuzhiyun
444*4882a593Smuzhiyun memcpy(iv, nonce, NX_GCM4106_NONCE_LEN);
445*4882a593Smuzhiyun memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8);
446*4882a593Smuzhiyun
447*4882a593Smuzhiyun if (req->assoclen < 8)
448*4882a593Smuzhiyun return -EINVAL;
449*4882a593Smuzhiyun
450*4882a593Smuzhiyun return gcm_aes_nx_crypt(req, 1, req->assoclen - 8);
451*4882a593Smuzhiyun }
452*4882a593Smuzhiyun
gcm4106_aes_nx_decrypt(struct aead_request * req)453*4882a593Smuzhiyun static int gcm4106_aes_nx_decrypt(struct aead_request *req)
454*4882a593Smuzhiyun {
455*4882a593Smuzhiyun struct nx_crypto_ctx *nx_ctx =
456*4882a593Smuzhiyun crypto_aead_ctx(crypto_aead_reqtfm(req));
457*4882a593Smuzhiyun struct nx_gcm_rctx *rctx = aead_request_ctx(req);
458*4882a593Smuzhiyun char *iv = rctx->iv;
459*4882a593Smuzhiyun char *nonce = nx_ctx->priv.gcm.nonce;
460*4882a593Smuzhiyun
461*4882a593Smuzhiyun memcpy(iv, nonce, NX_GCM4106_NONCE_LEN);
462*4882a593Smuzhiyun memcpy(iv + NX_GCM4106_NONCE_LEN, req->iv, 8);
463*4882a593Smuzhiyun
464*4882a593Smuzhiyun if (req->assoclen < 8)
465*4882a593Smuzhiyun return -EINVAL;
466*4882a593Smuzhiyun
467*4882a593Smuzhiyun return gcm_aes_nx_crypt(req, 0, req->assoclen - 8);
468*4882a593Smuzhiyun }
469*4882a593Smuzhiyun
470*4882a593Smuzhiyun struct aead_alg nx_gcm_aes_alg = {
471*4882a593Smuzhiyun .base = {
472*4882a593Smuzhiyun .cra_name = "gcm(aes)",
473*4882a593Smuzhiyun .cra_driver_name = "gcm-aes-nx",
474*4882a593Smuzhiyun .cra_priority = 300,
475*4882a593Smuzhiyun .cra_blocksize = 1,
476*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct nx_crypto_ctx),
477*4882a593Smuzhiyun .cra_module = THIS_MODULE,
478*4882a593Smuzhiyun },
479*4882a593Smuzhiyun .init = nx_crypto_ctx_aes_gcm_init,
480*4882a593Smuzhiyun .exit = nx_crypto_ctx_aead_exit,
481*4882a593Smuzhiyun .ivsize = GCM_AES_IV_SIZE,
482*4882a593Smuzhiyun .maxauthsize = AES_BLOCK_SIZE,
483*4882a593Smuzhiyun .setkey = gcm_aes_nx_set_key,
484*4882a593Smuzhiyun .encrypt = gcm_aes_nx_encrypt,
485*4882a593Smuzhiyun .decrypt = gcm_aes_nx_decrypt,
486*4882a593Smuzhiyun };
487*4882a593Smuzhiyun
488*4882a593Smuzhiyun struct aead_alg nx_gcm4106_aes_alg = {
489*4882a593Smuzhiyun .base = {
490*4882a593Smuzhiyun .cra_name = "rfc4106(gcm(aes))",
491*4882a593Smuzhiyun .cra_driver_name = "rfc4106-gcm-aes-nx",
492*4882a593Smuzhiyun .cra_priority = 300,
493*4882a593Smuzhiyun .cra_blocksize = 1,
494*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct nx_crypto_ctx),
495*4882a593Smuzhiyun .cra_module = THIS_MODULE,
496*4882a593Smuzhiyun },
497*4882a593Smuzhiyun .init = nx_crypto_ctx_aes_gcm_init,
498*4882a593Smuzhiyun .exit = nx_crypto_ctx_aead_exit,
499*4882a593Smuzhiyun .ivsize = GCM_RFC4106_IV_SIZE,
500*4882a593Smuzhiyun .maxauthsize = AES_BLOCK_SIZE,
501*4882a593Smuzhiyun .setkey = gcm4106_aes_nx_set_key,
502*4882a593Smuzhiyun .setauthsize = gcm4106_aes_nx_setauthsize,
503*4882a593Smuzhiyun .encrypt = gcm4106_aes_nx_encrypt,
504*4882a593Smuzhiyun .decrypt = gcm4106_aes_nx_decrypt,
505*4882a593Smuzhiyun };
506