xref: /OK3568_Linux_fs/kernel/include/crypto/internal/skcipher.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-or-later */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Symmetric key ciphers.
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
6*4882a593Smuzhiyun  */
7*4882a593Smuzhiyun 
8*4882a593Smuzhiyun #ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9*4882a593Smuzhiyun #define _CRYPTO_INTERNAL_SKCIPHER_H
10*4882a593Smuzhiyun 
11*4882a593Smuzhiyun #include <crypto/algapi.h>
12*4882a593Smuzhiyun #include <crypto/internal/cipher.h>
13*4882a593Smuzhiyun #include <crypto/skcipher.h>
14*4882a593Smuzhiyun #include <linux/list.h>
15*4882a593Smuzhiyun #include <linux/types.h>
16*4882a593Smuzhiyun 
17*4882a593Smuzhiyun struct aead_request;
18*4882a593Smuzhiyun struct rtattr;
19*4882a593Smuzhiyun 
20*4882a593Smuzhiyun struct skcipher_instance {
21*4882a593Smuzhiyun 	void (*free)(struct skcipher_instance *inst);
22*4882a593Smuzhiyun 	union {
23*4882a593Smuzhiyun 		struct {
24*4882a593Smuzhiyun 			char head[offsetof(struct skcipher_alg, base)];
25*4882a593Smuzhiyun 			struct crypto_instance base;
26*4882a593Smuzhiyun 		} s;
27*4882a593Smuzhiyun 		struct skcipher_alg alg;
28*4882a593Smuzhiyun 	};
29*4882a593Smuzhiyun };
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun struct crypto_skcipher_spawn {
32*4882a593Smuzhiyun 	struct crypto_spawn base;
33*4882a593Smuzhiyun };
34*4882a593Smuzhiyun 
35*4882a593Smuzhiyun struct skcipher_walk {
36*4882a593Smuzhiyun 	union {
37*4882a593Smuzhiyun 		struct {
38*4882a593Smuzhiyun 			struct page *page;
39*4882a593Smuzhiyun 			unsigned long offset;
40*4882a593Smuzhiyun 		} phys;
41*4882a593Smuzhiyun 
42*4882a593Smuzhiyun 		struct {
43*4882a593Smuzhiyun 			u8 *page;
44*4882a593Smuzhiyun 			void *addr;
45*4882a593Smuzhiyun 		} virt;
46*4882a593Smuzhiyun 	} src, dst;
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun 	struct scatter_walk in;
49*4882a593Smuzhiyun 	unsigned int nbytes;
50*4882a593Smuzhiyun 
51*4882a593Smuzhiyun 	struct scatter_walk out;
52*4882a593Smuzhiyun 	unsigned int total;
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun 	struct list_head buffers;
55*4882a593Smuzhiyun 
56*4882a593Smuzhiyun 	u8 *page;
57*4882a593Smuzhiyun 	u8 *buffer;
58*4882a593Smuzhiyun 	u8 *oiv;
59*4882a593Smuzhiyun 	void *iv;
60*4882a593Smuzhiyun 
61*4882a593Smuzhiyun 	unsigned int ivsize;
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun 	int flags;
64*4882a593Smuzhiyun 	unsigned int blocksize;
65*4882a593Smuzhiyun 	unsigned int stride;
66*4882a593Smuzhiyun 	unsigned int alignmask;
67*4882a593Smuzhiyun };
68*4882a593Smuzhiyun 
skcipher_crypto_instance(struct skcipher_instance * inst)69*4882a593Smuzhiyun static inline struct crypto_instance *skcipher_crypto_instance(
70*4882a593Smuzhiyun 	struct skcipher_instance *inst)
71*4882a593Smuzhiyun {
72*4882a593Smuzhiyun 	return &inst->s.base;
73*4882a593Smuzhiyun }
74*4882a593Smuzhiyun 
skcipher_alg_instance(struct crypto_skcipher * skcipher)75*4882a593Smuzhiyun static inline struct skcipher_instance *skcipher_alg_instance(
76*4882a593Smuzhiyun 	struct crypto_skcipher *skcipher)
77*4882a593Smuzhiyun {
78*4882a593Smuzhiyun 	return container_of(crypto_skcipher_alg(skcipher),
79*4882a593Smuzhiyun 			    struct skcipher_instance, alg);
80*4882a593Smuzhiyun }
81*4882a593Smuzhiyun 
skcipher_instance_ctx(struct skcipher_instance * inst)82*4882a593Smuzhiyun static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
83*4882a593Smuzhiyun {
84*4882a593Smuzhiyun 	return crypto_instance_ctx(skcipher_crypto_instance(inst));
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun 
skcipher_request_complete(struct skcipher_request * req,int err)87*4882a593Smuzhiyun static inline void skcipher_request_complete(struct skcipher_request *req, int err)
88*4882a593Smuzhiyun {
89*4882a593Smuzhiyun 	req->base.complete(&req->base, err);
90*4882a593Smuzhiyun }
91*4882a593Smuzhiyun 
92*4882a593Smuzhiyun int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
93*4882a593Smuzhiyun 			 struct crypto_instance *inst,
94*4882a593Smuzhiyun 			 const char *name, u32 type, u32 mask);
95*4882a593Smuzhiyun 
crypto_drop_skcipher(struct crypto_skcipher_spawn * spawn)96*4882a593Smuzhiyun static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
97*4882a593Smuzhiyun {
98*4882a593Smuzhiyun 	crypto_drop_spawn(&spawn->base);
99*4882a593Smuzhiyun }
100*4882a593Smuzhiyun 
crypto_skcipher_spawn_alg(struct crypto_skcipher_spawn * spawn)101*4882a593Smuzhiyun static inline struct skcipher_alg *crypto_skcipher_spawn_alg(
102*4882a593Smuzhiyun 	struct crypto_skcipher_spawn *spawn)
103*4882a593Smuzhiyun {
104*4882a593Smuzhiyun 	return container_of(spawn->base.alg, struct skcipher_alg, base);
105*4882a593Smuzhiyun }
106*4882a593Smuzhiyun 
crypto_spawn_skcipher_alg(struct crypto_skcipher_spawn * spawn)107*4882a593Smuzhiyun static inline struct skcipher_alg *crypto_spawn_skcipher_alg(
108*4882a593Smuzhiyun 	struct crypto_skcipher_spawn *spawn)
109*4882a593Smuzhiyun {
110*4882a593Smuzhiyun 	return crypto_skcipher_spawn_alg(spawn);
111*4882a593Smuzhiyun }
112*4882a593Smuzhiyun 
crypto_spawn_skcipher(struct crypto_skcipher_spawn * spawn)113*4882a593Smuzhiyun static inline struct crypto_skcipher *crypto_spawn_skcipher(
114*4882a593Smuzhiyun 	struct crypto_skcipher_spawn *spawn)
115*4882a593Smuzhiyun {
116*4882a593Smuzhiyun 	return crypto_spawn_tfm2(&spawn->base);
117*4882a593Smuzhiyun }
118*4882a593Smuzhiyun 
crypto_skcipher_set_reqsize(struct crypto_skcipher * skcipher,unsigned int reqsize)119*4882a593Smuzhiyun static inline void crypto_skcipher_set_reqsize(
120*4882a593Smuzhiyun 	struct crypto_skcipher *skcipher, unsigned int reqsize)
121*4882a593Smuzhiyun {
122*4882a593Smuzhiyun 	skcipher->reqsize = reqsize;
123*4882a593Smuzhiyun }
124*4882a593Smuzhiyun 
125*4882a593Smuzhiyun int crypto_register_skcipher(struct skcipher_alg *alg);
126*4882a593Smuzhiyun void crypto_unregister_skcipher(struct skcipher_alg *alg);
127*4882a593Smuzhiyun int crypto_register_skciphers(struct skcipher_alg *algs, int count);
128*4882a593Smuzhiyun void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
129*4882a593Smuzhiyun int skcipher_register_instance(struct crypto_template *tmpl,
130*4882a593Smuzhiyun 			       struct skcipher_instance *inst);
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun int skcipher_walk_done(struct skcipher_walk *walk, int err);
133*4882a593Smuzhiyun int skcipher_walk_virt(struct skcipher_walk *walk,
134*4882a593Smuzhiyun 		       struct skcipher_request *req,
135*4882a593Smuzhiyun 		       bool atomic);
136*4882a593Smuzhiyun void skcipher_walk_atomise(struct skcipher_walk *walk);
137*4882a593Smuzhiyun int skcipher_walk_async(struct skcipher_walk *walk,
138*4882a593Smuzhiyun 			struct skcipher_request *req);
139*4882a593Smuzhiyun int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
140*4882a593Smuzhiyun 			       struct aead_request *req, bool atomic);
141*4882a593Smuzhiyun int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
142*4882a593Smuzhiyun 			       struct aead_request *req, bool atomic);
143*4882a593Smuzhiyun void skcipher_walk_complete(struct skcipher_walk *walk, int err);
144*4882a593Smuzhiyun 
skcipher_walk_abort(struct skcipher_walk * walk)145*4882a593Smuzhiyun static inline void skcipher_walk_abort(struct skcipher_walk *walk)
146*4882a593Smuzhiyun {
147*4882a593Smuzhiyun 	skcipher_walk_done(walk, -ECANCELED);
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun 
crypto_skcipher_ctx(struct crypto_skcipher * tfm)150*4882a593Smuzhiyun static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
151*4882a593Smuzhiyun {
152*4882a593Smuzhiyun 	return crypto_tfm_ctx(&tfm->base);
153*4882a593Smuzhiyun }
154*4882a593Smuzhiyun 
skcipher_request_ctx(struct skcipher_request * req)155*4882a593Smuzhiyun static inline void *skcipher_request_ctx(struct skcipher_request *req)
156*4882a593Smuzhiyun {
157*4882a593Smuzhiyun 	return req->__ctx;
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun 
skcipher_request_flags(struct skcipher_request * req)160*4882a593Smuzhiyun static inline u32 skcipher_request_flags(struct skcipher_request *req)
161*4882a593Smuzhiyun {
162*4882a593Smuzhiyun 	return req->base.flags;
163*4882a593Smuzhiyun }
164*4882a593Smuzhiyun 
crypto_skcipher_alg_min_keysize(struct skcipher_alg * alg)165*4882a593Smuzhiyun static inline unsigned int crypto_skcipher_alg_min_keysize(
166*4882a593Smuzhiyun 	struct skcipher_alg *alg)
167*4882a593Smuzhiyun {
168*4882a593Smuzhiyun 	return alg->min_keysize;
169*4882a593Smuzhiyun }
170*4882a593Smuzhiyun 
crypto_skcipher_alg_max_keysize(struct skcipher_alg * alg)171*4882a593Smuzhiyun static inline unsigned int crypto_skcipher_alg_max_keysize(
172*4882a593Smuzhiyun 	struct skcipher_alg *alg)
173*4882a593Smuzhiyun {
174*4882a593Smuzhiyun 	return alg->max_keysize;
175*4882a593Smuzhiyun }
176*4882a593Smuzhiyun 
crypto_skcipher_alg_walksize(struct skcipher_alg * alg)177*4882a593Smuzhiyun static inline unsigned int crypto_skcipher_alg_walksize(
178*4882a593Smuzhiyun 	struct skcipher_alg *alg)
179*4882a593Smuzhiyun {
180*4882a593Smuzhiyun 	return alg->walksize;
181*4882a593Smuzhiyun }
182*4882a593Smuzhiyun 
183*4882a593Smuzhiyun /**
184*4882a593Smuzhiyun  * crypto_skcipher_walksize() - obtain walk size
185*4882a593Smuzhiyun  * @tfm: cipher handle
186*4882a593Smuzhiyun  *
187*4882a593Smuzhiyun  * In some cases, algorithms can only perform optimally when operating on
188*4882a593Smuzhiyun  * multiple blocks in parallel. This is reflected by the walksize, which
189*4882a593Smuzhiyun  * must be a multiple of the chunksize (or equal if the concern does not
190*4882a593Smuzhiyun  * apply)
191*4882a593Smuzhiyun  *
192*4882a593Smuzhiyun  * Return: walk size in bytes
193*4882a593Smuzhiyun  */
crypto_skcipher_walksize(struct crypto_skcipher * tfm)194*4882a593Smuzhiyun static inline unsigned int crypto_skcipher_walksize(
195*4882a593Smuzhiyun 	struct crypto_skcipher *tfm)
196*4882a593Smuzhiyun {
197*4882a593Smuzhiyun 	return crypto_skcipher_alg_walksize(crypto_skcipher_alg(tfm));
198*4882a593Smuzhiyun }
199*4882a593Smuzhiyun 
200*4882a593Smuzhiyun /* Helpers for simple block cipher modes of operation */
201*4882a593Smuzhiyun struct skcipher_ctx_simple {
202*4882a593Smuzhiyun 	struct crypto_cipher *cipher;	/* underlying block cipher */
203*4882a593Smuzhiyun };
204*4882a593Smuzhiyun static inline struct crypto_cipher *
skcipher_cipher_simple(struct crypto_skcipher * tfm)205*4882a593Smuzhiyun skcipher_cipher_simple(struct crypto_skcipher *tfm)
206*4882a593Smuzhiyun {
207*4882a593Smuzhiyun 	struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
208*4882a593Smuzhiyun 
209*4882a593Smuzhiyun 	return ctx->cipher;
210*4882a593Smuzhiyun }
211*4882a593Smuzhiyun 
212*4882a593Smuzhiyun struct skcipher_instance *skcipher_alloc_instance_simple(
213*4882a593Smuzhiyun 	struct crypto_template *tmpl, struct rtattr **tb);
214*4882a593Smuzhiyun 
skcipher_ialg_simple(struct skcipher_instance * inst)215*4882a593Smuzhiyun static inline struct crypto_alg *skcipher_ialg_simple(
216*4882a593Smuzhiyun 	struct skcipher_instance *inst)
217*4882a593Smuzhiyun {
218*4882a593Smuzhiyun 	struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
219*4882a593Smuzhiyun 
220*4882a593Smuzhiyun 	return crypto_spawn_cipher_alg(spawn);
221*4882a593Smuzhiyun }
222*4882a593Smuzhiyun 
223*4882a593Smuzhiyun #endif	/* _CRYPTO_INTERNAL_SKCIPHER_H */
224*4882a593Smuzhiyun 
225