xref: /OK3568_Linux_fs/kernel/drivers/crypto/ccp/ccp-crypto-aes-xts.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * AMD Cryptographic Coprocessor (CCP) AES XTS crypto API support
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (C) 2013,2017 Advanced Micro Devices, Inc.
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  * Author: Gary R Hook <gary.hook@amd.com>
8*4882a593Smuzhiyun  * Author: Tom Lendacky <thomas.lendacky@amd.com>
9*4882a593Smuzhiyun  */
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <linux/module.h>
12*4882a593Smuzhiyun #include <linux/sched.h>
13*4882a593Smuzhiyun #include <linux/delay.h>
14*4882a593Smuzhiyun #include <linux/scatterlist.h>
15*4882a593Smuzhiyun #include <crypto/aes.h>
16*4882a593Smuzhiyun #include <crypto/xts.h>
17*4882a593Smuzhiyun #include <crypto/internal/skcipher.h>
18*4882a593Smuzhiyun #include <crypto/scatterwalk.h>
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun #include "ccp-crypto.h"
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun struct ccp_aes_xts_def {
23*4882a593Smuzhiyun 	const char *name;
24*4882a593Smuzhiyun 	const char *drv_name;
25*4882a593Smuzhiyun };
26*4882a593Smuzhiyun 
27*4882a593Smuzhiyun static const struct ccp_aes_xts_def aes_xts_algs[] = {
28*4882a593Smuzhiyun 	{
29*4882a593Smuzhiyun 		.name		= "xts(aes)",
30*4882a593Smuzhiyun 		.drv_name	= "xts-aes-ccp",
31*4882a593Smuzhiyun 	},
32*4882a593Smuzhiyun };
33*4882a593Smuzhiyun 
34*4882a593Smuzhiyun struct ccp_unit_size_map {
35*4882a593Smuzhiyun 	unsigned int size;
36*4882a593Smuzhiyun 	u32 value;
37*4882a593Smuzhiyun };
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun static struct ccp_unit_size_map xts_unit_sizes[] = {
40*4882a593Smuzhiyun 	{
41*4882a593Smuzhiyun 		.size   = 16,
42*4882a593Smuzhiyun 		.value	= CCP_XTS_AES_UNIT_SIZE_16,
43*4882a593Smuzhiyun 	},
44*4882a593Smuzhiyun 	{
45*4882a593Smuzhiyun 		.size   = 512,
46*4882a593Smuzhiyun 		.value	= CCP_XTS_AES_UNIT_SIZE_512,
47*4882a593Smuzhiyun 	},
48*4882a593Smuzhiyun 	{
49*4882a593Smuzhiyun 		.size   = 1024,
50*4882a593Smuzhiyun 		.value	= CCP_XTS_AES_UNIT_SIZE_1024,
51*4882a593Smuzhiyun 	},
52*4882a593Smuzhiyun 	{
53*4882a593Smuzhiyun 		.size   = 2048,
54*4882a593Smuzhiyun 		.value	= CCP_XTS_AES_UNIT_SIZE_2048,
55*4882a593Smuzhiyun 	},
56*4882a593Smuzhiyun 	{
57*4882a593Smuzhiyun 		.size   = 4096,
58*4882a593Smuzhiyun 		.value	= CCP_XTS_AES_UNIT_SIZE_4096,
59*4882a593Smuzhiyun 	},
60*4882a593Smuzhiyun };
61*4882a593Smuzhiyun 
ccp_aes_xts_complete(struct crypto_async_request * async_req,int ret)62*4882a593Smuzhiyun static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret)
63*4882a593Smuzhiyun {
64*4882a593Smuzhiyun 	struct skcipher_request *req = skcipher_request_cast(async_req);
65*4882a593Smuzhiyun 	struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun 	if (ret)
68*4882a593Smuzhiyun 		return ret;
69*4882a593Smuzhiyun 
70*4882a593Smuzhiyun 	memcpy(req->iv, rctx->iv, AES_BLOCK_SIZE);
71*4882a593Smuzhiyun 
72*4882a593Smuzhiyun 	return 0;
73*4882a593Smuzhiyun }
74*4882a593Smuzhiyun 
ccp_aes_xts_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int key_len)75*4882a593Smuzhiyun static int ccp_aes_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
76*4882a593Smuzhiyun 			      unsigned int key_len)
77*4882a593Smuzhiyun {
78*4882a593Smuzhiyun 	struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
79*4882a593Smuzhiyun 	unsigned int ccpversion = ccp_version();
80*4882a593Smuzhiyun 	int ret;
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun 	ret = xts_verify_key(tfm, key, key_len);
83*4882a593Smuzhiyun 	if (ret)
84*4882a593Smuzhiyun 		return ret;
85*4882a593Smuzhiyun 
86*4882a593Smuzhiyun 	/* Version 3 devices support 128-bit keys; version 5 devices can
87*4882a593Smuzhiyun 	 * accommodate 128- and 256-bit keys.
88*4882a593Smuzhiyun 	 */
89*4882a593Smuzhiyun 	switch (key_len) {
90*4882a593Smuzhiyun 	case AES_KEYSIZE_128 * 2:
91*4882a593Smuzhiyun 		memcpy(ctx->u.aes.key, key, key_len);
92*4882a593Smuzhiyun 		break;
93*4882a593Smuzhiyun 	case AES_KEYSIZE_256 * 2:
94*4882a593Smuzhiyun 		if (ccpversion > CCP_VERSION(3, 0))
95*4882a593Smuzhiyun 			memcpy(ctx->u.aes.key, key, key_len);
96*4882a593Smuzhiyun 		break;
97*4882a593Smuzhiyun 	}
98*4882a593Smuzhiyun 	ctx->u.aes.key_len = key_len / 2;
99*4882a593Smuzhiyun 	sg_init_one(&ctx->u.aes.key_sg, ctx->u.aes.key, key_len);
100*4882a593Smuzhiyun 
101*4882a593Smuzhiyun 	return crypto_skcipher_setkey(ctx->u.aes.tfm_skcipher, key, key_len);
102*4882a593Smuzhiyun }
103*4882a593Smuzhiyun 
ccp_aes_xts_crypt(struct skcipher_request * req,unsigned int encrypt)104*4882a593Smuzhiyun static int ccp_aes_xts_crypt(struct skcipher_request *req,
105*4882a593Smuzhiyun 			     unsigned int encrypt)
106*4882a593Smuzhiyun {
107*4882a593Smuzhiyun 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
108*4882a593Smuzhiyun 	struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
109*4882a593Smuzhiyun 	struct ccp_aes_req_ctx *rctx = skcipher_request_ctx(req);
110*4882a593Smuzhiyun 	unsigned int ccpversion = ccp_version();
111*4882a593Smuzhiyun 	unsigned int fallback = 0;
112*4882a593Smuzhiyun 	unsigned int unit;
113*4882a593Smuzhiyun 	u32 unit_size;
114*4882a593Smuzhiyun 	int ret;
115*4882a593Smuzhiyun 
116*4882a593Smuzhiyun 	if (!ctx->u.aes.key_len)
117*4882a593Smuzhiyun 		return -EINVAL;
118*4882a593Smuzhiyun 
119*4882a593Smuzhiyun 	if (!req->iv)
120*4882a593Smuzhiyun 		return -EINVAL;
121*4882a593Smuzhiyun 
122*4882a593Smuzhiyun 	/* Check conditions under which the CCP can fulfill a request. The
123*4882a593Smuzhiyun 	 * device can handle input plaintext of a length that is a multiple
124*4882a593Smuzhiyun 	 * of the unit_size, bug the crypto implementation only supports
125*4882a593Smuzhiyun 	 * the unit_size being equal to the input length. This limits the
126*4882a593Smuzhiyun 	 * number of scenarios we can handle.
127*4882a593Smuzhiyun 	 */
128*4882a593Smuzhiyun 	unit_size = CCP_XTS_AES_UNIT_SIZE__LAST;
129*4882a593Smuzhiyun 	for (unit = 0; unit < ARRAY_SIZE(xts_unit_sizes); unit++) {
130*4882a593Smuzhiyun 		if (req->cryptlen == xts_unit_sizes[unit].size) {
131*4882a593Smuzhiyun 			unit_size = unit;
132*4882a593Smuzhiyun 			break;
133*4882a593Smuzhiyun 		}
134*4882a593Smuzhiyun 	}
135*4882a593Smuzhiyun 	/* The CCP has restrictions on block sizes. Also, a version 3 device
136*4882a593Smuzhiyun 	 * only supports AES-128 operations; version 5 CCPs support both
137*4882a593Smuzhiyun 	 * AES-128 and -256 operations.
138*4882a593Smuzhiyun 	 */
139*4882a593Smuzhiyun 	if (unit_size == CCP_XTS_AES_UNIT_SIZE__LAST)
140*4882a593Smuzhiyun 		fallback = 1;
141*4882a593Smuzhiyun 	if ((ccpversion < CCP_VERSION(5, 0)) &&
142*4882a593Smuzhiyun 	    (ctx->u.aes.key_len != AES_KEYSIZE_128))
143*4882a593Smuzhiyun 		fallback = 1;
144*4882a593Smuzhiyun 	if ((ctx->u.aes.key_len != AES_KEYSIZE_128) &&
145*4882a593Smuzhiyun 	    (ctx->u.aes.key_len != AES_KEYSIZE_256))
146*4882a593Smuzhiyun 		fallback = 1;
147*4882a593Smuzhiyun 	if (fallback) {
148*4882a593Smuzhiyun 		/* Use the fallback to process the request for any
149*4882a593Smuzhiyun 		 * unsupported unit sizes or key sizes
150*4882a593Smuzhiyun 		 */
151*4882a593Smuzhiyun 		skcipher_request_set_tfm(&rctx->fallback_req,
152*4882a593Smuzhiyun 					 ctx->u.aes.tfm_skcipher);
153*4882a593Smuzhiyun 		skcipher_request_set_callback(&rctx->fallback_req,
154*4882a593Smuzhiyun 					      req->base.flags,
155*4882a593Smuzhiyun 					      req->base.complete,
156*4882a593Smuzhiyun 					      req->base.data);
157*4882a593Smuzhiyun 		skcipher_request_set_crypt(&rctx->fallback_req, req->src,
158*4882a593Smuzhiyun 					   req->dst, req->cryptlen, req->iv);
159*4882a593Smuzhiyun 		ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
160*4882a593Smuzhiyun 				crypto_skcipher_decrypt(&rctx->fallback_req);
161*4882a593Smuzhiyun 		return ret;
162*4882a593Smuzhiyun 	}
163*4882a593Smuzhiyun 
164*4882a593Smuzhiyun 	memcpy(rctx->iv, req->iv, AES_BLOCK_SIZE);
165*4882a593Smuzhiyun 	sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE);
166*4882a593Smuzhiyun 
167*4882a593Smuzhiyun 	memset(&rctx->cmd, 0, sizeof(rctx->cmd));
168*4882a593Smuzhiyun 	INIT_LIST_HEAD(&rctx->cmd.entry);
169*4882a593Smuzhiyun 	rctx->cmd.engine = CCP_ENGINE_XTS_AES_128;
170*4882a593Smuzhiyun 	rctx->cmd.u.xts.type = CCP_AES_TYPE_128;
171*4882a593Smuzhiyun 	rctx->cmd.u.xts.action = (encrypt) ? CCP_AES_ACTION_ENCRYPT
172*4882a593Smuzhiyun 					   : CCP_AES_ACTION_DECRYPT;
173*4882a593Smuzhiyun 	rctx->cmd.u.xts.unit_size = unit_size;
174*4882a593Smuzhiyun 	rctx->cmd.u.xts.key = &ctx->u.aes.key_sg;
175*4882a593Smuzhiyun 	rctx->cmd.u.xts.key_len = ctx->u.aes.key_len;
176*4882a593Smuzhiyun 	rctx->cmd.u.xts.iv = &rctx->iv_sg;
177*4882a593Smuzhiyun 	rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE;
178*4882a593Smuzhiyun 	rctx->cmd.u.xts.src = req->src;
179*4882a593Smuzhiyun 	rctx->cmd.u.xts.src_len = req->cryptlen;
180*4882a593Smuzhiyun 	rctx->cmd.u.xts.dst = req->dst;
181*4882a593Smuzhiyun 
182*4882a593Smuzhiyun 	ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd);
183*4882a593Smuzhiyun 
184*4882a593Smuzhiyun 	return ret;
185*4882a593Smuzhiyun }
186*4882a593Smuzhiyun 
ccp_aes_xts_encrypt(struct skcipher_request * req)187*4882a593Smuzhiyun static int ccp_aes_xts_encrypt(struct skcipher_request *req)
188*4882a593Smuzhiyun {
189*4882a593Smuzhiyun 	return ccp_aes_xts_crypt(req, 1);
190*4882a593Smuzhiyun }
191*4882a593Smuzhiyun 
ccp_aes_xts_decrypt(struct skcipher_request * req)192*4882a593Smuzhiyun static int ccp_aes_xts_decrypt(struct skcipher_request *req)
193*4882a593Smuzhiyun {
194*4882a593Smuzhiyun 	return ccp_aes_xts_crypt(req, 0);
195*4882a593Smuzhiyun }
196*4882a593Smuzhiyun 
ccp_aes_xts_init_tfm(struct crypto_skcipher * tfm)197*4882a593Smuzhiyun static int ccp_aes_xts_init_tfm(struct crypto_skcipher *tfm)
198*4882a593Smuzhiyun {
199*4882a593Smuzhiyun 	struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
200*4882a593Smuzhiyun 	struct crypto_skcipher *fallback_tfm;
201*4882a593Smuzhiyun 
202*4882a593Smuzhiyun 	ctx->complete = ccp_aes_xts_complete;
203*4882a593Smuzhiyun 	ctx->u.aes.key_len = 0;
204*4882a593Smuzhiyun 
205*4882a593Smuzhiyun 	fallback_tfm = crypto_alloc_skcipher("xts(aes)", 0,
206*4882a593Smuzhiyun 					     CRYPTO_ALG_NEED_FALLBACK);
207*4882a593Smuzhiyun 	if (IS_ERR(fallback_tfm)) {
208*4882a593Smuzhiyun 		pr_warn("could not load fallback driver xts(aes)\n");
209*4882a593Smuzhiyun 		return PTR_ERR(fallback_tfm);
210*4882a593Smuzhiyun 	}
211*4882a593Smuzhiyun 	ctx->u.aes.tfm_skcipher = fallback_tfm;
212*4882a593Smuzhiyun 
213*4882a593Smuzhiyun 	crypto_skcipher_set_reqsize(tfm, sizeof(struct ccp_aes_req_ctx) +
214*4882a593Smuzhiyun 					 crypto_skcipher_reqsize(fallback_tfm));
215*4882a593Smuzhiyun 
216*4882a593Smuzhiyun 	return 0;
217*4882a593Smuzhiyun }
218*4882a593Smuzhiyun 
ccp_aes_xts_exit_tfm(struct crypto_skcipher * tfm)219*4882a593Smuzhiyun static void ccp_aes_xts_exit_tfm(struct crypto_skcipher *tfm)
220*4882a593Smuzhiyun {
221*4882a593Smuzhiyun 	struct ccp_ctx *ctx = crypto_skcipher_ctx(tfm);
222*4882a593Smuzhiyun 
223*4882a593Smuzhiyun 	crypto_free_skcipher(ctx->u.aes.tfm_skcipher);
224*4882a593Smuzhiyun }
225*4882a593Smuzhiyun 
ccp_register_aes_xts_alg(struct list_head * head,const struct ccp_aes_xts_def * def)226*4882a593Smuzhiyun static int ccp_register_aes_xts_alg(struct list_head *head,
227*4882a593Smuzhiyun 				    const struct ccp_aes_xts_def *def)
228*4882a593Smuzhiyun {
229*4882a593Smuzhiyun 	struct ccp_crypto_skcipher_alg *ccp_alg;
230*4882a593Smuzhiyun 	struct skcipher_alg *alg;
231*4882a593Smuzhiyun 	int ret;
232*4882a593Smuzhiyun 
233*4882a593Smuzhiyun 	ccp_alg = kzalloc(sizeof(*ccp_alg), GFP_KERNEL);
234*4882a593Smuzhiyun 	if (!ccp_alg)
235*4882a593Smuzhiyun 		return -ENOMEM;
236*4882a593Smuzhiyun 
237*4882a593Smuzhiyun 	INIT_LIST_HEAD(&ccp_alg->entry);
238*4882a593Smuzhiyun 
239*4882a593Smuzhiyun 	alg = &ccp_alg->alg;
240*4882a593Smuzhiyun 
241*4882a593Smuzhiyun 	snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name);
242*4882a593Smuzhiyun 	snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
243*4882a593Smuzhiyun 		 def->drv_name);
244*4882a593Smuzhiyun 	alg->base.cra_flags	= CRYPTO_ALG_ASYNC |
245*4882a593Smuzhiyun 				  CRYPTO_ALG_ALLOCATES_MEMORY |
246*4882a593Smuzhiyun 				  CRYPTO_ALG_KERN_DRIVER_ONLY |
247*4882a593Smuzhiyun 				  CRYPTO_ALG_NEED_FALLBACK;
248*4882a593Smuzhiyun 	alg->base.cra_blocksize	= AES_BLOCK_SIZE;
249*4882a593Smuzhiyun 	alg->base.cra_ctxsize	= sizeof(struct ccp_ctx);
250*4882a593Smuzhiyun 	alg->base.cra_priority	= CCP_CRA_PRIORITY;
251*4882a593Smuzhiyun 	alg->base.cra_module	= THIS_MODULE;
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun 	alg->setkey		= ccp_aes_xts_setkey;
254*4882a593Smuzhiyun 	alg->encrypt		= ccp_aes_xts_encrypt;
255*4882a593Smuzhiyun 	alg->decrypt		= ccp_aes_xts_decrypt;
256*4882a593Smuzhiyun 	alg->min_keysize	= AES_MIN_KEY_SIZE * 2;
257*4882a593Smuzhiyun 	alg->max_keysize	= AES_MAX_KEY_SIZE * 2;
258*4882a593Smuzhiyun 	alg->ivsize		= AES_BLOCK_SIZE;
259*4882a593Smuzhiyun 	alg->init		= ccp_aes_xts_init_tfm;
260*4882a593Smuzhiyun 	alg->exit		= ccp_aes_xts_exit_tfm;
261*4882a593Smuzhiyun 
262*4882a593Smuzhiyun 	ret = crypto_register_skcipher(alg);
263*4882a593Smuzhiyun 	if (ret) {
264*4882a593Smuzhiyun 		pr_err("%s skcipher algorithm registration error (%d)\n",
265*4882a593Smuzhiyun 		       alg->base.cra_name, ret);
266*4882a593Smuzhiyun 		kfree(ccp_alg);
267*4882a593Smuzhiyun 		return ret;
268*4882a593Smuzhiyun 	}
269*4882a593Smuzhiyun 
270*4882a593Smuzhiyun 	list_add(&ccp_alg->entry, head);
271*4882a593Smuzhiyun 
272*4882a593Smuzhiyun 	return 0;
273*4882a593Smuzhiyun }
274*4882a593Smuzhiyun 
ccp_register_aes_xts_algs(struct list_head * head)275*4882a593Smuzhiyun int ccp_register_aes_xts_algs(struct list_head *head)
276*4882a593Smuzhiyun {
277*4882a593Smuzhiyun 	int i, ret;
278*4882a593Smuzhiyun 
279*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(aes_xts_algs); i++) {
280*4882a593Smuzhiyun 		ret = ccp_register_aes_xts_alg(head, &aes_xts_algs[i]);
281*4882a593Smuzhiyun 		if (ret)
282*4882a593Smuzhiyun 			return ret;
283*4882a593Smuzhiyun 	}
284*4882a593Smuzhiyun 
285*4882a593Smuzhiyun 	return 0;
286*4882a593Smuzhiyun }
287