1*4882a593Smuzhiyun // SPDX-License-Identifier: GPL-2.0-only
2*4882a593Smuzhiyun /**
3*4882a593Smuzhiyun * AES ECB routines supporting the Power 7+ Nest Accelerators driver
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Copyright (C) 2011-2012 International Business Machines Inc.
6*4882a593Smuzhiyun *
7*4882a593Smuzhiyun * Author: Kent Yoder <yoder1@us.ibm.com>
8*4882a593Smuzhiyun */
9*4882a593Smuzhiyun
10*4882a593Smuzhiyun #include <crypto/aes.h>
11*4882a593Smuzhiyun #include <crypto/algapi.h>
12*4882a593Smuzhiyun #include <linux/module.h>
13*4882a593Smuzhiyun #include <linux/types.h>
14*4882a593Smuzhiyun #include <linux/crypto.h>
15*4882a593Smuzhiyun #include <asm/vio.h>
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun #include "nx_csbcpb.h"
18*4882a593Smuzhiyun #include "nx.h"
19*4882a593Smuzhiyun
20*4882a593Smuzhiyun
ecb_aes_nx_set_key(struct crypto_skcipher * tfm,const u8 * in_key,unsigned int key_len)21*4882a593Smuzhiyun static int ecb_aes_nx_set_key(struct crypto_skcipher *tfm,
22*4882a593Smuzhiyun const u8 *in_key,
23*4882a593Smuzhiyun unsigned int key_len)
24*4882a593Smuzhiyun {
25*4882a593Smuzhiyun struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm);
26*4882a593Smuzhiyun struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb;
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun nx_ctx_init(nx_ctx, HCOP_FC_AES);
29*4882a593Smuzhiyun
30*4882a593Smuzhiyun switch (key_len) {
31*4882a593Smuzhiyun case AES_KEYSIZE_128:
32*4882a593Smuzhiyun NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_128);
33*4882a593Smuzhiyun nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128];
34*4882a593Smuzhiyun break;
35*4882a593Smuzhiyun case AES_KEYSIZE_192:
36*4882a593Smuzhiyun NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_192);
37*4882a593Smuzhiyun nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192];
38*4882a593Smuzhiyun break;
39*4882a593Smuzhiyun case AES_KEYSIZE_256:
40*4882a593Smuzhiyun NX_CPB_SET_KEY_SIZE(csbcpb, NX_KS_AES_256);
41*4882a593Smuzhiyun nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256];
42*4882a593Smuzhiyun break;
43*4882a593Smuzhiyun default:
44*4882a593Smuzhiyun return -EINVAL;
45*4882a593Smuzhiyun }
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun csbcpb->cpb.hdr.mode = NX_MODE_AES_ECB;
48*4882a593Smuzhiyun memcpy(csbcpb->cpb.aes_ecb.key, in_key, key_len);
49*4882a593Smuzhiyun
50*4882a593Smuzhiyun return 0;
51*4882a593Smuzhiyun }
52*4882a593Smuzhiyun
ecb_aes_nx_crypt(struct skcipher_request * req,int enc)53*4882a593Smuzhiyun static int ecb_aes_nx_crypt(struct skcipher_request *req,
54*4882a593Smuzhiyun int enc)
55*4882a593Smuzhiyun {
56*4882a593Smuzhiyun struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
57*4882a593Smuzhiyun struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm);
58*4882a593Smuzhiyun struct nx_csbcpb *csbcpb = nx_ctx->csbcpb;
59*4882a593Smuzhiyun unsigned long irq_flags;
60*4882a593Smuzhiyun unsigned int processed = 0, to_process;
61*4882a593Smuzhiyun int rc;
62*4882a593Smuzhiyun
63*4882a593Smuzhiyun spin_lock_irqsave(&nx_ctx->lock, irq_flags);
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun if (enc)
66*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) |= NX_FDM_ENDE_ENCRYPT;
67*4882a593Smuzhiyun else
68*4882a593Smuzhiyun NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT;
69*4882a593Smuzhiyun
70*4882a593Smuzhiyun do {
71*4882a593Smuzhiyun to_process = req->cryptlen - processed;
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun rc = nx_build_sg_lists(nx_ctx, NULL, req->dst, req->src,
74*4882a593Smuzhiyun &to_process, processed, NULL);
75*4882a593Smuzhiyun if (rc)
76*4882a593Smuzhiyun goto out;
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) {
79*4882a593Smuzhiyun rc = -EINVAL;
80*4882a593Smuzhiyun goto out;
81*4882a593Smuzhiyun }
82*4882a593Smuzhiyun
83*4882a593Smuzhiyun rc = nx_hcall_sync(nx_ctx, &nx_ctx->op,
84*4882a593Smuzhiyun req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP);
85*4882a593Smuzhiyun if (rc)
86*4882a593Smuzhiyun goto out;
87*4882a593Smuzhiyun
88*4882a593Smuzhiyun atomic_inc(&(nx_ctx->stats->aes_ops));
89*4882a593Smuzhiyun atomic64_add(csbcpb->csb.processed_byte_count,
90*4882a593Smuzhiyun &(nx_ctx->stats->aes_bytes));
91*4882a593Smuzhiyun
92*4882a593Smuzhiyun processed += to_process;
93*4882a593Smuzhiyun } while (processed < req->cryptlen);
94*4882a593Smuzhiyun
95*4882a593Smuzhiyun out:
96*4882a593Smuzhiyun spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
97*4882a593Smuzhiyun return rc;
98*4882a593Smuzhiyun }
99*4882a593Smuzhiyun
ecb_aes_nx_encrypt(struct skcipher_request * req)100*4882a593Smuzhiyun static int ecb_aes_nx_encrypt(struct skcipher_request *req)
101*4882a593Smuzhiyun {
102*4882a593Smuzhiyun return ecb_aes_nx_crypt(req, 1);
103*4882a593Smuzhiyun }
104*4882a593Smuzhiyun
ecb_aes_nx_decrypt(struct skcipher_request * req)105*4882a593Smuzhiyun static int ecb_aes_nx_decrypt(struct skcipher_request *req)
106*4882a593Smuzhiyun {
107*4882a593Smuzhiyun return ecb_aes_nx_crypt(req, 0);
108*4882a593Smuzhiyun }
109*4882a593Smuzhiyun
110*4882a593Smuzhiyun struct skcipher_alg nx_ecb_aes_alg = {
111*4882a593Smuzhiyun .base.cra_name = "ecb(aes)",
112*4882a593Smuzhiyun .base.cra_driver_name = "ecb-aes-nx",
113*4882a593Smuzhiyun .base.cra_priority = 300,
114*4882a593Smuzhiyun .base.cra_blocksize = AES_BLOCK_SIZE,
115*4882a593Smuzhiyun .base.cra_alignmask = 0xf,
116*4882a593Smuzhiyun .base.cra_ctxsize = sizeof(struct nx_crypto_ctx),
117*4882a593Smuzhiyun .base.cra_module = THIS_MODULE,
118*4882a593Smuzhiyun .init = nx_crypto_ctx_aes_ecb_init,
119*4882a593Smuzhiyun .exit = nx_crypto_ctx_skcipher_exit,
120*4882a593Smuzhiyun .min_keysize = AES_MIN_KEY_SIZE,
121*4882a593Smuzhiyun .max_keysize = AES_MAX_KEY_SIZE,
122*4882a593Smuzhiyun .setkey = ecb_aes_nx_set_key,
123*4882a593Smuzhiyun .encrypt = ecb_aes_nx_encrypt,
124*4882a593Smuzhiyun .decrypt = ecb_aes_nx_decrypt,
125*4882a593Smuzhiyun };
126