1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun * AMD Cryptographic Coprocessor (CCP) AES GCM crypto API support
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 2016,2017 Advanced Micro Devices, Inc.
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Author: Gary R Hook <gary.hook@amd.com>
8*4882a593Smuzhiyun */
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun #include <linux/module.h>
11*4882a593Smuzhiyun #include <linux/sched.h>
12*4882a593Smuzhiyun #include <linux/delay.h>
13*4882a593Smuzhiyun #include <linux/scatterlist.h>
14*4882a593Smuzhiyun #include <linux/crypto.h>
15*4882a593Smuzhiyun #include <crypto/internal/aead.h>
16*4882a593Smuzhiyun #include <crypto/algapi.h>
17*4882a593Smuzhiyun #include <crypto/aes.h>
18*4882a593Smuzhiyun #include <crypto/ctr.h>
19*4882a593Smuzhiyun #include <crypto/gcm.h>
20*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
21*4882a593Smuzhiyun
22*4882a593Smuzhiyun #include "ccp-crypto.h"
23*4882a593Smuzhiyun
ccp_aes_gcm_complete(struct crypto_async_request * async_req,int ret)24*4882a593Smuzhiyun static int ccp_aes_gcm_complete(struct crypto_async_request *async_req, int ret)
25*4882a593Smuzhiyun {
26*4882a593Smuzhiyun return ret;
27*4882a593Smuzhiyun }
28*4882a593Smuzhiyun
ccp_aes_gcm_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int key_len)29*4882a593Smuzhiyun static int ccp_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key,
30*4882a593Smuzhiyun unsigned int key_len)
31*4882a593Smuzhiyun {
32*4882a593Smuzhiyun struct ccp_ctx *ctx = crypto_aead_ctx(tfm);
33*4882a593Smuzhiyun
34*4882a593Smuzhiyun switch (key_len) {
35*4882a593Smuzhiyun case AES_KEYSIZE_128:
36*4882a593Smuzhiyun ctx->u.aes.type = CCP_AES_TYPE_128;
37*4882a593Smuzhiyun break;
38*4882a593Smuzhiyun case AES_KEYSIZE_192:
39*4882a593Smuzhiyun ctx->u.aes.type = CCP_AES_TYPE_192;
40*4882a593Smuzhiyun break;
41*4882a593Smuzhiyun case AES_KEYSIZE_256:
42*4882a593Smuzhiyun ctx->u.aes.type = CCP_AES_TYPE_256;
43*4882a593Smuzhiyun break;
44*4882a593Smuzhiyun default:
45*4882a593Smuzhiyun return -EINVAL;
46*4882a593Smuzhiyun }
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun ctx->u.aes.mode = CCP_AES_MODE_GCM;
49*4882a593Smuzhiyun ctx->u.aes.key_len = key_len;
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun memcpy(ctx->u.aes.key, key, key_len);
52*4882a593Smuzhiyun sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun return 0;
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun
ccp_aes_gcm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)57*4882a593Smuzhiyun static int ccp_aes_gcm_setauthsize(struct crypto_aead *tfm,
58*4882a593Smuzhiyun unsigned int authsize)
59*4882a593Smuzhiyun {
60*4882a593Smuzhiyun switch (authsize) {
61*4882a593Smuzhiyun case 16:
62*4882a593Smuzhiyun case 15:
63*4882a593Smuzhiyun case 14:
64*4882a593Smuzhiyun case 13:
65*4882a593Smuzhiyun case 12:
66*4882a593Smuzhiyun case 8:
67*4882a593Smuzhiyun case 4:
68*4882a593Smuzhiyun break;
69*4882a593Smuzhiyun default:
70*4882a593Smuzhiyun return -EINVAL;
71*4882a593Smuzhiyun }
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun return 0;
74*4882a593Smuzhiyun }
75*4882a593Smuzhiyun
ccp_aes_gcm_crypt(struct aead_request * req,bool encrypt)76*4882a593Smuzhiyun static int ccp_aes_gcm_crypt(struct aead_request *req, bool encrypt)
77*4882a593Smuzhiyun {
78*4882a593Smuzhiyun struct crypto_aead *tfm = crypto_aead_reqtfm(req);
79*4882a593Smuzhiyun struct ccp_ctx *ctx = crypto_aead_ctx(tfm);
80*4882a593Smuzhiyun struct ccp_aes_req_ctx *rctx = aead_request_ctx(req);
81*4882a593Smuzhiyun struct scatterlist *iv_sg = NULL;
82*4882a593Smuzhiyun unsigned int iv_len = 0;
83*4882a593Smuzhiyun int i;
84*4882a593Smuzhiyun int ret = 0;
85*4882a593Smuzhiyun
86*4882a593Smuzhiyun if (!ctx->u.aes.key_len)
87*4882a593Smuzhiyun return -EINVAL;
88*4882a593Smuzhiyun
89*4882a593Smuzhiyun if (ctx->u.aes.mode != CCP_AES_MODE_GCM)
90*4882a593Smuzhiyun return -EINVAL;
91*4882a593Smuzhiyun
92*4882a593Smuzhiyun if (!req->iv)
93*4882a593Smuzhiyun return -EINVAL;
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun /*
96*4882a593Smuzhiyun * 5 parts:
97*4882a593Smuzhiyun * plaintext/ciphertext input
98*4882a593Smuzhiyun * AAD
99*4882a593Smuzhiyun * key
100*4882a593Smuzhiyun * IV
101*4882a593Smuzhiyun * Destination+tag buffer
102*4882a593Smuzhiyun */
103*4882a593Smuzhiyun
104*4882a593Smuzhiyun /* Prepare the IV: 12 bytes + an integer (counter) */
105*4882a593Smuzhiyun memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE);
106*4882a593Smuzhiyun for (i = 0; i < 3; i++)
107*4882a593Smuzhiyun rctx->iv[i + GCM_AES_IV_SIZE] = 0;
108*4882a593Smuzhiyun rctx->iv[AES_BLOCK_SIZE - 1] = 1;
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun /* Set up a scatterlist for the IV */
111*4882a593Smuzhiyun iv_sg = &rctx->iv_sg;
112*4882a593Smuzhiyun iv_len = AES_BLOCK_SIZE;
113*4882a593Smuzhiyun sg_init_one(iv_sg, rctx->iv, iv_len);
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun /* The AAD + plaintext are concatenated in the src buffer */
116*4882a593Smuzhiyun memset(&rctx->cmd, 0, sizeof(rctx->cmd));
117*4882a593Smuzhiyun INIT_LIST_HEAD(&rctx->cmd.entry);
118*4882a593Smuzhiyun rctx->cmd.engine = CCP_ENGINE_AES;
119*4882a593Smuzhiyun rctx->cmd.u.aes.authsize = crypto_aead_authsize(tfm);
120*4882a593Smuzhiyun rctx->cmd.u.aes.type = ctx->u.aes.type;
121*4882a593Smuzhiyun rctx->cmd.u.aes.mode = ctx->u.aes.mode;
122*4882a593Smuzhiyun rctx->cmd.u.aes.action = encrypt;
123*4882a593Smuzhiyun rctx->cmd.u.aes.key = &ctx->u.aes.key_sg;
124*4882a593Smuzhiyun rctx->cmd.u.aes.key_len = ctx->u.aes.key_len;
125*4882a593Smuzhiyun rctx->cmd.u.aes.iv = iv_sg;
126*4882a593Smuzhiyun rctx->cmd.u.aes.iv_len = iv_len;
127*4882a593Smuzhiyun rctx->cmd.u.aes.src = req->src;
128*4882a593Smuzhiyun rctx->cmd.u.aes.src_len = req->cryptlen;
129*4882a593Smuzhiyun rctx->cmd.u.aes.aad_len = req->assoclen;
130*4882a593Smuzhiyun
131*4882a593Smuzhiyun /* The cipher text + the tag are in the dst buffer */
132*4882a593Smuzhiyun rctx->cmd.u.aes.dst = req->dst;
133*4882a593Smuzhiyun
134*4882a593Smuzhiyun ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd);
135*4882a593Smuzhiyun
136*4882a593Smuzhiyun return ret;
137*4882a593Smuzhiyun }
138*4882a593Smuzhiyun
ccp_aes_gcm_encrypt(struct aead_request * req)139*4882a593Smuzhiyun static int ccp_aes_gcm_encrypt(struct aead_request *req)
140*4882a593Smuzhiyun {
141*4882a593Smuzhiyun return ccp_aes_gcm_crypt(req, CCP_AES_ACTION_ENCRYPT);
142*4882a593Smuzhiyun }
143*4882a593Smuzhiyun
ccp_aes_gcm_decrypt(struct aead_request * req)144*4882a593Smuzhiyun static int ccp_aes_gcm_decrypt(struct aead_request *req)
145*4882a593Smuzhiyun {
146*4882a593Smuzhiyun return ccp_aes_gcm_crypt(req, CCP_AES_ACTION_DECRYPT);
147*4882a593Smuzhiyun }
148*4882a593Smuzhiyun
ccp_aes_gcm_cra_init(struct crypto_aead * tfm)149*4882a593Smuzhiyun static int ccp_aes_gcm_cra_init(struct crypto_aead *tfm)
150*4882a593Smuzhiyun {
151*4882a593Smuzhiyun struct ccp_ctx *ctx = crypto_aead_ctx(tfm);
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun ctx->complete = ccp_aes_gcm_complete;
154*4882a593Smuzhiyun ctx->u.aes.key_len = 0;
155*4882a593Smuzhiyun
156*4882a593Smuzhiyun crypto_aead_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx));
157*4882a593Smuzhiyun
158*4882a593Smuzhiyun return 0;
159*4882a593Smuzhiyun }
160*4882a593Smuzhiyun
ccp_aes_gcm_cra_exit(struct crypto_tfm * tfm)161*4882a593Smuzhiyun static void ccp_aes_gcm_cra_exit(struct crypto_tfm *tfm)
162*4882a593Smuzhiyun {
163*4882a593Smuzhiyun }
164*4882a593Smuzhiyun
165*4882a593Smuzhiyun static struct aead_alg ccp_aes_gcm_defaults = {
166*4882a593Smuzhiyun .setkey = ccp_aes_gcm_setkey,
167*4882a593Smuzhiyun .setauthsize = ccp_aes_gcm_setauthsize,
168*4882a593Smuzhiyun .encrypt = ccp_aes_gcm_encrypt,
169*4882a593Smuzhiyun .decrypt = ccp_aes_gcm_decrypt,
170*4882a593Smuzhiyun .init = ccp_aes_gcm_cra_init,
171*4882a593Smuzhiyun .ivsize = GCM_AES_IV_SIZE,
172*4882a593Smuzhiyun .maxauthsize = AES_BLOCK_SIZE,
173*4882a593Smuzhiyun .base = {
174*4882a593Smuzhiyun .cra_flags = CRYPTO_ALG_ASYNC |
175*4882a593Smuzhiyun CRYPTO_ALG_ALLOCATES_MEMORY |
176*4882a593Smuzhiyun CRYPTO_ALG_KERN_DRIVER_ONLY |
177*4882a593Smuzhiyun CRYPTO_ALG_NEED_FALLBACK,
178*4882a593Smuzhiyun .cra_blocksize = AES_BLOCK_SIZE,
179*4882a593Smuzhiyun .cra_ctxsize = sizeof(struct ccp_ctx),
180*4882a593Smuzhiyun .cra_priority = CCP_CRA_PRIORITY,
181*4882a593Smuzhiyun .cra_exit = ccp_aes_gcm_cra_exit,
182*4882a593Smuzhiyun .cra_module = THIS_MODULE,
183*4882a593Smuzhiyun },
184*4882a593Smuzhiyun };
185*4882a593Smuzhiyun
186*4882a593Smuzhiyun struct ccp_aes_aead_def {
187*4882a593Smuzhiyun enum ccp_aes_mode mode;
188*4882a593Smuzhiyun unsigned int version;
189*4882a593Smuzhiyun const char *name;
190*4882a593Smuzhiyun const char *driver_name;
191*4882a593Smuzhiyun unsigned int blocksize;
192*4882a593Smuzhiyun unsigned int ivsize;
193*4882a593Smuzhiyun struct aead_alg *alg_defaults;
194*4882a593Smuzhiyun };
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun static struct ccp_aes_aead_def aes_aead_algs[] = {
197*4882a593Smuzhiyun {
198*4882a593Smuzhiyun .mode = CCP_AES_MODE_GHASH,
199*4882a593Smuzhiyun .version = CCP_VERSION(5, 0),
200*4882a593Smuzhiyun .name = "gcm(aes)",
201*4882a593Smuzhiyun .driver_name = "gcm-aes-ccp",
202*4882a593Smuzhiyun .blocksize = 1,
203*4882a593Smuzhiyun .ivsize = AES_BLOCK_SIZE,
204*4882a593Smuzhiyun .alg_defaults = &ccp_aes_gcm_defaults,
205*4882a593Smuzhiyun },
206*4882a593Smuzhiyun };
207*4882a593Smuzhiyun
ccp_register_aes_aead(struct list_head * head,const struct ccp_aes_aead_def * def)208*4882a593Smuzhiyun static int ccp_register_aes_aead(struct list_head *head,
209*4882a593Smuzhiyun const struct ccp_aes_aead_def *def)
210*4882a593Smuzhiyun {
211*4882a593Smuzhiyun struct ccp_crypto_aead *ccp_aead;
212*4882a593Smuzhiyun struct aead_alg *alg;
213*4882a593Smuzhiyun int ret;
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun ccp_aead = kzalloc(sizeof(*ccp_aead), GFP_KERNEL);
216*4882a593Smuzhiyun if (!ccp_aead)
217*4882a593Smuzhiyun return -ENOMEM;
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun INIT_LIST_HEAD(&ccp_aead->entry);
220*4882a593Smuzhiyun
221*4882a593Smuzhiyun ccp_aead->mode = def->mode;
222*4882a593Smuzhiyun
223*4882a593Smuzhiyun /* Copy the defaults and override as necessary */
224*4882a593Smuzhiyun alg = &ccp_aead->alg;
225*4882a593Smuzhiyun *alg = *def->alg_defaults;
226*4882a593Smuzhiyun snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name);
227*4882a593Smuzhiyun snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
228*4882a593Smuzhiyun def->driver_name);
229*4882a593Smuzhiyun alg->base.cra_blocksize = def->blocksize;
230*4882a593Smuzhiyun
231*4882a593Smuzhiyun ret = crypto_register_aead(alg);
232*4882a593Smuzhiyun if (ret) {
233*4882a593Smuzhiyun pr_err("%s aead algorithm registration error (%d)\n",
234*4882a593Smuzhiyun alg->base.cra_name, ret);
235*4882a593Smuzhiyun kfree(ccp_aead);
236*4882a593Smuzhiyun return ret;
237*4882a593Smuzhiyun }
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun list_add(&ccp_aead->entry, head);
240*4882a593Smuzhiyun
241*4882a593Smuzhiyun return 0;
242*4882a593Smuzhiyun }
243*4882a593Smuzhiyun
ccp_register_aes_aeads(struct list_head * head)244*4882a593Smuzhiyun int ccp_register_aes_aeads(struct list_head *head)
245*4882a593Smuzhiyun {
246*4882a593Smuzhiyun int i, ret;
247*4882a593Smuzhiyun unsigned int ccpversion = ccp_version();
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun for (i = 0; i < ARRAY_SIZE(aes_aead_algs); i++) {
250*4882a593Smuzhiyun if (aes_aead_algs[i].version > ccpversion)
251*4882a593Smuzhiyun continue;
252*4882a593Smuzhiyun ret = ccp_register_aes_aead(head, &aes_aead_algs[i]);
253*4882a593Smuzhiyun if (ret)
254*4882a593Smuzhiyun return ret;
255*4882a593Smuzhiyun }
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun return 0;
258*4882a593Smuzhiyun }
259