1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * Symmetric key ciphers.
4 *
5 * Copyright (c) 2007 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8#ifndef _CRYPTO_INTERNAL_SKCIPHER_H
9#define _CRYPTO_INTERNAL_SKCIPHER_H
10
11#include <crypto/algapi.h>
12#include <crypto/internal/cipher.h>
13#include <crypto/scatterwalk.h>
14#include <crypto/skcipher.h>
15#include <linux/types.h>
16
17/*
18 * Set this if your algorithm is sync but needs a reqsize larger
19 * than MAX_SYNC_SKCIPHER_REQSIZE.
20 *
21 * Reuse bit that is specific to hash algorithms.
22 */
23#define CRYPTO_ALG_SKCIPHER_REQSIZE_LARGE CRYPTO_ALG_OPTIONAL_KEY
24
25struct aead_request;
26struct rtattr;
27
28struct skcipher_instance {
29 void (*free)(struct skcipher_instance *inst);
30 union {
31 struct {
32 char head[offsetof(struct skcipher_alg, base)];
33 struct crypto_instance base;
34 } s;
35 struct skcipher_alg alg;
36 };
37};
38
39struct lskcipher_instance {
40 void (*free)(struct lskcipher_instance *inst);
41 union {
42 struct {
43 char head[offsetof(struct lskcipher_alg, co.base)];
44 struct crypto_instance base;
45 } s;
46 struct lskcipher_alg alg;
47 };
48};
49
50struct crypto_skcipher_spawn {
51 struct crypto_spawn base;
52};
53
54struct crypto_lskcipher_spawn {
55 struct crypto_spawn base;
56};
57
58static inline struct crypto_instance *skcipher_crypto_instance(
59 struct skcipher_instance *inst)
60{
61 return &inst->s.base;
62}
63
64static inline struct crypto_instance *lskcipher_crypto_instance(
65 struct lskcipher_instance *inst)
66{
67 return &inst->s.base;
68}
69
70static inline struct skcipher_instance *skcipher_alg_instance(
71 struct crypto_skcipher *skcipher)
72{
73 return container_of(crypto_skcipher_alg(skcipher),
74 struct skcipher_instance, alg);
75}
76
77static inline struct lskcipher_instance *lskcipher_alg_instance(
78 struct crypto_lskcipher *lskcipher)
79{
80 return container_of(crypto_lskcipher_alg(lskcipher),
81 struct lskcipher_instance, alg);
82}
83
84static inline void *skcipher_instance_ctx(struct skcipher_instance *inst)
85{
86 return crypto_instance_ctx(inst: skcipher_crypto_instance(inst));
87}
88
89static inline void *lskcipher_instance_ctx(struct lskcipher_instance *inst)
90{
91 return crypto_instance_ctx(inst: lskcipher_crypto_instance(inst));
92}
93
94static inline void skcipher_request_complete(struct skcipher_request *req, int err)
95{
96 crypto_request_complete(req: &req->base, err);
97}
98
99int crypto_grab_skcipher(struct crypto_skcipher_spawn *spawn,
100 struct crypto_instance *inst,
101 const char *name, u32 type, u32 mask);
102
103int crypto_grab_lskcipher(struct crypto_lskcipher_spawn *spawn,
104 struct crypto_instance *inst,
105 const char *name, u32 type, u32 mask);
106
107static inline void crypto_drop_skcipher(struct crypto_skcipher_spawn *spawn)
108{
109 crypto_drop_spawn(spawn: &spawn->base);
110}
111
112static inline void crypto_drop_lskcipher(struct crypto_lskcipher_spawn *spawn)
113{
114 crypto_drop_spawn(spawn: &spawn->base);
115}
116
117static inline struct lskcipher_alg *crypto_lskcipher_spawn_alg(
118 struct crypto_lskcipher_spawn *spawn)
119{
120 return container_of(spawn->base.alg, struct lskcipher_alg, co.base);
121}
122
123static inline struct skcipher_alg_common *crypto_spawn_skcipher_alg_common(
124 struct crypto_skcipher_spawn *spawn)
125{
126 return container_of(spawn->base.alg, struct skcipher_alg_common, base);
127}
128
129static inline struct lskcipher_alg *crypto_spawn_lskcipher_alg(
130 struct crypto_lskcipher_spawn *spawn)
131{
132 return crypto_lskcipher_spawn_alg(spawn);
133}
134
135static inline struct crypto_skcipher *crypto_spawn_skcipher(
136 struct crypto_skcipher_spawn *spawn)
137{
138 return crypto_spawn_tfm2(spawn: &spawn->base);
139}
140
141static inline struct crypto_lskcipher *crypto_spawn_lskcipher(
142 struct crypto_lskcipher_spawn *spawn)
143{
144 return crypto_spawn_tfm2(spawn: &spawn->base);
145}
146
147static inline void crypto_skcipher_set_reqsize(
148 struct crypto_skcipher *skcipher, unsigned int reqsize)
149{
150 skcipher->reqsize = reqsize;
151}
152
153static inline void crypto_skcipher_set_reqsize_dma(
154 struct crypto_skcipher *skcipher, unsigned int reqsize)
155{
156 reqsize += crypto_dma_align() & ~(crypto_tfm_ctx_alignment() - 1);
157 skcipher->reqsize = reqsize;
158}
159
160int crypto_register_skcipher(struct skcipher_alg *alg);
161void crypto_unregister_skcipher(struct skcipher_alg *alg);
162int crypto_register_skciphers(struct skcipher_alg *algs, int count);
163void crypto_unregister_skciphers(struct skcipher_alg *algs, int count);
164int skcipher_register_instance(struct crypto_template *tmpl,
165 struct skcipher_instance *inst);
166
167int crypto_register_lskcipher(struct lskcipher_alg *alg);
168void crypto_unregister_lskcipher(struct lskcipher_alg *alg);
169int crypto_register_lskciphers(struct lskcipher_alg *algs, int count);
170void crypto_unregister_lskciphers(struct lskcipher_alg *algs, int count);
171int lskcipher_register_instance(struct crypto_template *tmpl,
172 struct lskcipher_instance *inst);
173
174int skcipher_walk_virt(struct skcipher_walk *__restrict walk,
175 struct skcipher_request *__restrict req,
176 bool atomic);
177int skcipher_walk_aead_encrypt(struct skcipher_walk *__restrict walk,
178 struct aead_request *__restrict req,
179 bool atomic);
180int skcipher_walk_aead_decrypt(struct skcipher_walk *__restrict walk,
181 struct aead_request *__restrict req,
182 bool atomic);
183
184static inline void *crypto_skcipher_ctx(struct crypto_skcipher *tfm)
185{
186 return crypto_tfm_ctx(tfm: &tfm->base);
187}
188
189static inline void *crypto_lskcipher_ctx(struct crypto_lskcipher *tfm)
190{
191 return crypto_tfm_ctx(tfm: &tfm->base);
192}
193
194static inline void *crypto_skcipher_ctx_dma(struct crypto_skcipher *tfm)
195{
196 return crypto_tfm_ctx_dma(tfm: &tfm->base);
197}
198
199static inline void *skcipher_request_ctx(struct skcipher_request *req)
200{
201 return req->__ctx;
202}
203
204static inline void *skcipher_request_ctx_dma(struct skcipher_request *req)
205{
206 unsigned int align = crypto_dma_align();
207
208 if (align <= crypto_tfm_ctx_alignment())
209 align = 1;
210
211 return PTR_ALIGN(skcipher_request_ctx(req), align);
212}
213
214static inline u32 skcipher_request_flags(struct skcipher_request *req)
215{
216 return req->base.flags;
217}
218
219/* Helpers for simple block cipher modes of operation */
220struct skcipher_ctx_simple {
221 struct crypto_cipher *cipher; /* underlying block cipher */
222};
223static inline struct crypto_cipher *
224skcipher_cipher_simple(struct crypto_skcipher *tfm)
225{
226 struct skcipher_ctx_simple *ctx = crypto_skcipher_ctx(tfm);
227
228 return ctx->cipher;
229}
230
231struct skcipher_instance *skcipher_alloc_instance_simple(
232 struct crypto_template *tmpl, struct rtattr **tb);
233
234static inline struct crypto_alg *skcipher_ialg_simple(
235 struct skcipher_instance *inst)
236{
237 struct crypto_cipher_spawn *spawn = skcipher_instance_ctx(inst);
238
239 return crypto_spawn_cipher_alg(spawn);
240}
241
242static inline struct crypto_lskcipher *lskcipher_cipher_simple(
243 struct crypto_lskcipher *tfm)
244{
245 struct crypto_lskcipher **ctx = crypto_lskcipher_ctx(tfm);
246
247 return *ctx;
248}
249
250struct lskcipher_instance *lskcipher_alloc_instance_simple(
251 struct crypto_template *tmpl, struct rtattr **tb);
252
253static inline struct lskcipher_alg *lskcipher_ialg_simple(
254 struct lskcipher_instance *inst)
255{
256 struct crypto_lskcipher_spawn *spawn = lskcipher_instance_ctx(inst);
257
258 return crypto_lskcipher_spawn_alg(spawn);
259}
260
261#endif /* _CRYPTO_INTERNAL_SKCIPHER_H */
262
263