1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Asynchronous Cryptographic Hash operations.
4 *
5 * This is the implementation of the ahash (asynchronous hash) API. It differs
6 * from shash (synchronous hash) in that ahash supports asynchronous operations,
7 * and it hashes data from scatterlists instead of virtually addressed buffers.
8 *
9 * The ahash API provides access to both ahash and shash algorithms. The shash
10 * API only provides access to shash algorithms.
11 *
12 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
13 */
14
15#include <crypto/scatterwalk.h>
16#include <linux/cryptouser.h>
17#include <linux/err.h>
18#include <linux/kernel.h>
19#include <linux/mm.h>
20#include <linux/module.h>
21#include <linux/scatterlist.h>
22#include <linux/slab.h>
23#include <linux/seq_file.h>
24#include <linux/string.h>
25#include <linux/string_choices.h>
26#include <net/netlink.h>
27
28#include "hash.h"
29
30#define CRYPTO_ALG_TYPE_AHASH_MASK 0x0000000e
31
32static int ahash_def_finup(struct ahash_request *req);
33
34static inline bool crypto_ahash_block_only(struct crypto_ahash *tfm)
35{
36 return crypto_ahash_alg(hash: tfm)->halg.base.cra_flags &
37 CRYPTO_AHASH_ALG_BLOCK_ONLY;
38}
39
40static inline bool crypto_ahash_final_nonzero(struct crypto_ahash *tfm)
41{
42 return crypto_ahash_alg(hash: tfm)->halg.base.cra_flags &
43 CRYPTO_AHASH_ALG_FINAL_NONZERO;
44}
45
46static inline bool crypto_ahash_need_fallback(struct crypto_ahash *tfm)
47{
48 return crypto_ahash_alg(hash: tfm)->halg.base.cra_flags &
49 CRYPTO_ALG_NEED_FALLBACK;
50}
51
52static inline void ahash_op_done(void *data, int err,
53 int (*finish)(struct ahash_request *, int))
54{
55 struct ahash_request *areq = data;
56 crypto_completion_t compl;
57
58 compl = areq->saved_complete;
59 data = areq->saved_data;
60 if (err == -EINPROGRESS)
61 goto out;
62
63 areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
64
65 err = finish(areq, err);
66 if (err == -EINPROGRESS || err == -EBUSY)
67 return;
68
69out:
70 compl(data, err);
71}
72
73static int hash_walk_next(struct crypto_hash_walk *walk)
74{
75 unsigned int offset = walk->offset;
76 unsigned int nbytes = min(walk->entrylen,
77 ((unsigned int)(PAGE_SIZE)) - offset);
78
79 walk->data = kmap_local_page(page: walk->pg);
80 walk->data += offset;
81 walk->entrylen -= nbytes;
82 return nbytes;
83}
84
85static int hash_walk_new_entry(struct crypto_hash_walk *walk)
86{
87 struct scatterlist *sg;
88
89 sg = walk->sg;
90 walk->offset = sg->offset;
91 walk->pg = sg_page(sg: walk->sg) + (walk->offset >> PAGE_SHIFT);
92 walk->offset = offset_in_page(walk->offset);
93 walk->entrylen = sg->length;
94
95 if (walk->entrylen > walk->total)
96 walk->entrylen = walk->total;
97 walk->total -= walk->entrylen;
98
99 return hash_walk_next(walk);
100}
101
102int crypto_hash_walk_first(struct ahash_request *req,
103 struct crypto_hash_walk *walk)
104{
105 walk->total = req->nbytes;
106 walk->entrylen = 0;
107
108 if (!walk->total)
109 return 0;
110
111 walk->flags = req->base.flags;
112
113 if (ahash_request_isvirt(req)) {
114 walk->data = req->svirt;
115 walk->total = 0;
116 return req->nbytes;
117 }
118
119 walk->sg = req->src;
120
121 return hash_walk_new_entry(walk);
122}
123EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
124
125int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
126{
127 if ((walk->flags & CRYPTO_AHASH_REQ_VIRT))
128 return err;
129
130 walk->data -= walk->offset;
131
132 kunmap_local(walk->data);
133 crypto_yield(flags: walk->flags);
134
135 if (err)
136 return err;
137
138 if (walk->entrylen) {
139 walk->offset = 0;
140 walk->pg++;
141 return hash_walk_next(walk);
142 }
143
144 if (!walk->total)
145 return 0;
146
147 walk->sg = sg_next(sg: walk->sg);
148
149 return hash_walk_new_entry(walk);
150}
151EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
152
153/*
154 * For an ahash tfm that is using an shash algorithm (instead of an ahash
155 * algorithm), this returns the underlying shash tfm.
156 */
157static inline struct crypto_shash *ahash_to_shash(struct crypto_ahash *tfm)
158{
159 return *(struct crypto_shash **)crypto_ahash_ctx(tfm);
160}
161
162static inline struct shash_desc *prepare_shash_desc(struct ahash_request *req,
163 struct crypto_ahash *tfm)
164{
165 struct shash_desc *desc = ahash_request_ctx(req);
166
167 desc->tfm = ahash_to_shash(tfm);
168 return desc;
169}
170
171int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
172{
173 struct crypto_hash_walk walk;
174 int nbytes;
175
176 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
177 nbytes = crypto_hash_walk_done(&walk, nbytes))
178 nbytes = crypto_shash_update(desc, data: walk.data, len: nbytes);
179
180 return nbytes;
181}
182EXPORT_SYMBOL_GPL(shash_ahash_update);
183
184int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
185{
186 struct crypto_hash_walk walk;
187 int nbytes;
188
189 nbytes = crypto_hash_walk_first(req, &walk);
190 if (!nbytes)
191 return crypto_shash_final(desc, out: req->result);
192
193 do {
194 nbytes = crypto_hash_walk_last(walk: &walk) ?
195 crypto_shash_finup(desc, data: walk.data, len: nbytes,
196 out: req->result) :
197 crypto_shash_update(desc, data: walk.data, len: nbytes);
198 nbytes = crypto_hash_walk_done(&walk, nbytes);
199 } while (nbytes > 0);
200
201 return nbytes;
202}
203EXPORT_SYMBOL_GPL(shash_ahash_finup);
204
205int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
206{
207 unsigned int nbytes = req->nbytes;
208 struct scatterlist *sg;
209 unsigned int offset;
210 struct page *page;
211 const u8 *data;
212 int err;
213
214 data = req->svirt;
215 if (!nbytes || ahash_request_isvirt(req))
216 return crypto_shash_digest(desc, data, len: nbytes, out: req->result);
217
218 sg = req->src;
219 if (nbytes > sg->length)
220 return crypto_shash_init(desc) ?:
221 shash_ahash_finup(req, desc);
222
223 page = sg_page(sg);
224 offset = sg->offset;
225 data = lowmem_page_address(page) + offset;
226 if (!IS_ENABLED(CONFIG_HIGHMEM))
227 return crypto_shash_digest(desc, data, len: nbytes, out: req->result);
228
229 page += offset >> PAGE_SHIFT;
230 offset = offset_in_page(offset);
231
232 if (nbytes > (unsigned int)PAGE_SIZE - offset)
233 return crypto_shash_init(desc) ?:
234 shash_ahash_finup(req, desc);
235
236 data = kmap_local_page(page);
237 err = crypto_shash_digest(desc, data: data + offset, len: nbytes,
238 out: req->result);
239 kunmap_local(data);
240 return err;
241}
242EXPORT_SYMBOL_GPL(shash_ahash_digest);
243
244static void crypto_exit_ahash_using_shash(struct crypto_tfm *tfm)
245{
246 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
247
248 crypto_free_shash(tfm: *ctx);
249}
250
251static int crypto_init_ahash_using_shash(struct crypto_tfm *tfm)
252{
253 struct crypto_alg *calg = tfm->__crt_alg;
254 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
255 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
256 struct crypto_shash *shash;
257
258 if (!crypto_mod_get(alg: calg))
259 return -EAGAIN;
260
261 shash = crypto_create_tfm(alg: calg, frontend: &crypto_shash_type);
262 if (IS_ERR(ptr: shash)) {
263 crypto_mod_put(alg: calg);
264 return PTR_ERR(ptr: shash);
265 }
266
267 crt->using_shash = true;
268 *ctx = shash;
269 tfm->exit = crypto_exit_ahash_using_shash;
270
271 crypto_ahash_set_flags(tfm: crt, flags: crypto_shash_get_flags(tfm: shash) &
272 CRYPTO_TFM_NEED_KEY);
273
274 return 0;
275}
276
277static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
278 unsigned int keylen)
279{
280 return -ENOSYS;
281}
282
283static void ahash_set_needkey(struct crypto_ahash *tfm, struct ahash_alg *alg)
284{
285 if (alg->setkey != ahash_nosetkey &&
286 !(alg->halg.base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
287 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
288}
289
290int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
291 unsigned int keylen)
292{
293 if (likely(tfm->using_shash)) {
294 struct crypto_shash *shash = ahash_to_shash(tfm);
295 int err;
296
297 err = crypto_shash_setkey(tfm: shash, key, keylen);
298 if (unlikely(err)) {
299 crypto_ahash_set_flags(tfm,
300 flags: crypto_shash_get_flags(tfm: shash) &
301 CRYPTO_TFM_NEED_KEY);
302 return err;
303 }
304 } else {
305 struct ahash_alg *alg = crypto_ahash_alg(hash: tfm);
306 int err;
307
308 err = alg->setkey(tfm, key, keylen);
309 if (!err && crypto_ahash_need_fallback(tfm))
310 err = crypto_ahash_setkey(tfm: crypto_ahash_fb(tfm),
311 key, keylen);
312 if (unlikely(err)) {
313 ahash_set_needkey(tfm, alg);
314 return err;
315 }
316 }
317 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
318 return 0;
319}
320EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
321
322static int ahash_do_req_chain(struct ahash_request *req,
323 int (*const *op)(struct ahash_request *req))
324{
325 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
326 int err;
327
328 if (crypto_ahash_req_virt(tfm) || !ahash_request_isvirt(req))
329 return (*op)(req);
330
331 if (crypto_ahash_statesize(tfm) > HASH_MAX_STATESIZE)
332 return -ENOSYS;
333
334 if (!crypto_ahash_need_fallback(tfm))
335 return -ENOSYS;
336
337 if (crypto_hash_no_export_core(tfm))
338 return -ENOSYS;
339
340 {
341 u8 state[HASH_MAX_STATESIZE];
342
343 if (op == &crypto_ahash_alg(hash: tfm)->digest) {
344 ahash_request_set_tfm(req, tfm: crypto_ahash_fb(tfm));
345 err = crypto_ahash_digest(req);
346 goto out_no_state;
347 }
348
349 err = crypto_ahash_export(req, out: state);
350 ahash_request_set_tfm(req, tfm: crypto_ahash_fb(tfm));
351 err = err ?: crypto_ahash_import(req, in: state);
352
353 if (op == &crypto_ahash_alg(hash: tfm)->finup) {
354 err = err ?: crypto_ahash_finup(req);
355 goto out_no_state;
356 }
357
358 err = err ?:
359 crypto_ahash_update(req) ?:
360 crypto_ahash_export(req, out: state);
361
362 ahash_request_set_tfm(req, tfm);
363 return err ?: crypto_ahash_import(req, in: state);
364
365out_no_state:
366 ahash_request_set_tfm(req, tfm);
367 return err;
368 }
369}
370
371int crypto_ahash_init(struct ahash_request *req)
372{
373 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
374
375 if (likely(tfm->using_shash))
376 return crypto_shash_init(desc: prepare_shash_desc(req, tfm));
377 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
378 return -ENOKEY;
379 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
380 return -EAGAIN;
381 if (crypto_ahash_block_only(tfm)) {
382 u8 *buf = ahash_request_ctx(req);
383
384 buf += crypto_ahash_reqsize(tfm) - 1;
385 *buf = 0;
386 }
387 return crypto_ahash_alg(hash: tfm)->init(req);
388}
389EXPORT_SYMBOL_GPL(crypto_ahash_init);
390
391static void ahash_save_req(struct ahash_request *req, crypto_completion_t cplt)
392{
393 req->saved_complete = req->base.complete;
394 req->saved_data = req->base.data;
395 req->base.complete = cplt;
396 req->base.data = req;
397}
398
399static void ahash_restore_req(struct ahash_request *req)
400{
401 req->base.complete = req->saved_complete;
402 req->base.data = req->saved_data;
403}
404
405static int ahash_update_finish(struct ahash_request *req, int err)
406{
407 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
408 bool nonzero = crypto_ahash_final_nonzero(tfm);
409 int bs = crypto_ahash_blocksize(tfm);
410 u8 *blenp = ahash_request_ctx(req);
411 int blen;
412 u8 *buf;
413
414 blenp += crypto_ahash_reqsize(tfm) - 1;
415 blen = *blenp;
416 buf = blenp - bs;
417
418 if (blen) {
419 req->src = req->sg_head + 1;
420 if (sg_is_chain(sg: req->src))
421 req->src = sg_chain_ptr(sg: req->src);
422 }
423
424 req->nbytes += nonzero - blen;
425
426 blen = err < 0 ? 0 : err + nonzero;
427 if (ahash_request_isvirt(req))
428 memcpy(to: buf, from: req->svirt + req->nbytes - blen, len: blen);
429 else
430 memcpy_from_sglist(buf, sg: req->src, start: req->nbytes - blen, nbytes: blen);
431 *blenp = blen;
432
433 ahash_restore_req(req);
434
435 return err;
436}
437
438static void ahash_update_done(void *data, int err)
439{
440 ahash_op_done(data, err, finish: ahash_update_finish);
441}
442
443int crypto_ahash_update(struct ahash_request *req)
444{
445 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
446 bool nonzero = crypto_ahash_final_nonzero(tfm);
447 int bs = crypto_ahash_blocksize(tfm);
448 u8 *blenp = ahash_request_ctx(req);
449 int blen, err;
450 u8 *buf;
451
452 if (likely(tfm->using_shash))
453 return shash_ahash_update(req, ahash_request_ctx(req));
454 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
455 return -EAGAIN;
456 if (!crypto_ahash_block_only(tfm))
457 return ahash_do_req_chain(req, op: &crypto_ahash_alg(hash: tfm)->update);
458
459 blenp += crypto_ahash_reqsize(tfm) - 1;
460 blen = *blenp;
461 buf = blenp - bs;
462
463 if (blen + req->nbytes < bs + nonzero) {
464 if (ahash_request_isvirt(req))
465 memcpy(to: buf + blen, from: req->svirt, len: req->nbytes);
466 else
467 memcpy_from_sglist(buf: buf + blen, sg: req->src, start: 0,
468 nbytes: req->nbytes);
469
470 *blenp += req->nbytes;
471 return 0;
472 }
473
474 if (blen) {
475 memset(s: req->sg_head, c: 0, n: sizeof(req->sg_head[0]));
476 sg_set_buf(sg: req->sg_head, buf, buflen: blen);
477 if (req->src != req->sg_head + 1)
478 sg_chain(prv: req->sg_head, prv_nents: 2, sgl: req->src);
479 req->src = req->sg_head;
480 req->nbytes += blen;
481 }
482 req->nbytes -= nonzero;
483
484 ahash_save_req(req, cplt: ahash_update_done);
485
486 err = ahash_do_req_chain(req, op: &crypto_ahash_alg(hash: tfm)->update);
487 if (err == -EINPROGRESS || err == -EBUSY)
488 return err;
489
490 return ahash_update_finish(req, err);
491}
492EXPORT_SYMBOL_GPL(crypto_ahash_update);
493
494static int ahash_finup_finish(struct ahash_request *req, int err)
495{
496 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
497 u8 *blenp = ahash_request_ctx(req);
498 int blen;
499
500 blenp += crypto_ahash_reqsize(tfm) - 1;
501 blen = *blenp;
502
503 if (blen) {
504 if (sg_is_last(sg: req->src))
505 req->src = NULL;
506 else {
507 req->src = req->sg_head + 1;
508 if (sg_is_chain(sg: req->src))
509 req->src = sg_chain_ptr(sg: req->src);
510 }
511 req->nbytes -= blen;
512 }
513
514 ahash_restore_req(req);
515
516 return err;
517}
518
519static void ahash_finup_done(void *data, int err)
520{
521 ahash_op_done(data, err, finish: ahash_finup_finish);
522}
523
524int crypto_ahash_finup(struct ahash_request *req)
525{
526 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
527 int bs = crypto_ahash_blocksize(tfm);
528 u8 *blenp = ahash_request_ctx(req);
529 int blen, err;
530 u8 *buf;
531
532 if (likely(tfm->using_shash))
533 return shash_ahash_finup(req, ahash_request_ctx(req));
534 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
535 return -EAGAIN;
536 if (!crypto_ahash_alg(hash: tfm)->finup)
537 return ahash_def_finup(req);
538 if (!crypto_ahash_block_only(tfm))
539 return ahash_do_req_chain(req, op: &crypto_ahash_alg(hash: tfm)->finup);
540
541 blenp += crypto_ahash_reqsize(tfm) - 1;
542 blen = *blenp;
543 buf = blenp - bs;
544
545 if (blen) {
546 memset(s: req->sg_head, c: 0, n: sizeof(req->sg_head[0]));
547 sg_set_buf(sg: req->sg_head, buf, buflen: blen);
548 if (!req->src)
549 sg_mark_end(sg: req->sg_head);
550 else if (req->src != req->sg_head + 1)
551 sg_chain(prv: req->sg_head, prv_nents: 2, sgl: req->src);
552 req->src = req->sg_head;
553 req->nbytes += blen;
554 }
555
556 ahash_save_req(req, cplt: ahash_finup_done);
557
558 err = ahash_do_req_chain(req, op: &crypto_ahash_alg(hash: tfm)->finup);
559 if (err == -EINPROGRESS || err == -EBUSY)
560 return err;
561
562 return ahash_finup_finish(req, err);
563}
564EXPORT_SYMBOL_GPL(crypto_ahash_finup);
565
566int crypto_ahash_digest(struct ahash_request *req)
567{
568 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
569
570 if (likely(tfm->using_shash))
571 return shash_ahash_digest(req, prepare_shash_desc(req, tfm));
572 if (ahash_req_on_stack(req) && ahash_is_async(tfm))
573 return -EAGAIN;
574 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
575 return -ENOKEY;
576 return ahash_do_req_chain(req, op: &crypto_ahash_alg(hash: tfm)->digest);
577}
578EXPORT_SYMBOL_GPL(crypto_ahash_digest);
579
580static void ahash_def_finup_done2(void *data, int err)
581{
582 struct ahash_request *areq = data;
583
584 if (err == -EINPROGRESS)
585 return;
586
587 ahash_restore_req(req: areq);
588 ahash_request_complete(req: areq, err);
589}
590
591static int ahash_def_finup_finish1(struct ahash_request *req, int err)
592{
593 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
594
595 if (err)
596 goto out;
597
598 req->base.complete = ahash_def_finup_done2;
599
600 err = crypto_ahash_alg(hash: tfm)->final(req);
601 if (err == -EINPROGRESS || err == -EBUSY)
602 return err;
603
604out:
605 ahash_restore_req(req);
606 return err;
607}
608
609static void ahash_def_finup_done1(void *data, int err)
610{
611 ahash_op_done(data, err, finish: ahash_def_finup_finish1);
612}
613
614static int ahash_def_finup(struct ahash_request *req)
615{
616 int err;
617
618 ahash_save_req(req, cplt: ahash_def_finup_done1);
619
620 err = crypto_ahash_update(req);
621 if (err == -EINPROGRESS || err == -EBUSY)
622 return err;
623
624 return ahash_def_finup_finish1(req, err);
625}
626
627int crypto_ahash_export_core(struct ahash_request *req, void *out)
628{
629 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
630
631 if (likely(tfm->using_shash))
632 return crypto_shash_export_core(desc: ahash_request_ctx(req), out);
633 return crypto_ahash_alg(hash: tfm)->export_core(req, out);
634}
635EXPORT_SYMBOL_GPL(crypto_ahash_export_core);
636
637int crypto_ahash_export(struct ahash_request *req, void *out)
638{
639 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
640
641 if (likely(tfm->using_shash))
642 return crypto_shash_export(desc: ahash_request_ctx(req), out);
643 if (crypto_ahash_block_only(tfm)) {
644 unsigned int plen = crypto_ahash_blocksize(tfm) + 1;
645 unsigned int reqsize = crypto_ahash_reqsize(tfm);
646 unsigned int ss = crypto_ahash_statesize(tfm);
647 u8 *buf = ahash_request_ctx(req);
648
649 memcpy(to: out + ss - plen, from: buf + reqsize - plen, len: plen);
650 }
651 return crypto_ahash_alg(hash: tfm)->export(req, out);
652}
653EXPORT_SYMBOL_GPL(crypto_ahash_export);
654
655int crypto_ahash_import_core(struct ahash_request *req, const void *in)
656{
657 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
658
659 if (likely(tfm->using_shash))
660 return crypto_shash_import_core(desc: prepare_shash_desc(req, tfm),
661 in);
662 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
663 return -ENOKEY;
664 return crypto_ahash_alg(hash: tfm)->import_core(req, in);
665}
666EXPORT_SYMBOL_GPL(crypto_ahash_import_core);
667
668int crypto_ahash_import(struct ahash_request *req, const void *in)
669{
670 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
671
672 if (likely(tfm->using_shash))
673 return crypto_shash_import(desc: prepare_shash_desc(req, tfm), in);
674 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
675 return -ENOKEY;
676 if (crypto_ahash_block_only(tfm)) {
677 unsigned int reqsize = crypto_ahash_reqsize(tfm);
678 u8 *buf = ahash_request_ctx(req);
679
680 buf[reqsize - 1] = 0;
681 }
682 return crypto_ahash_alg(hash: tfm)->import(req, in);
683}
684EXPORT_SYMBOL_GPL(crypto_ahash_import);
685
686static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
687{
688 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
689 struct ahash_alg *alg = crypto_ahash_alg(hash);
690
691 if (alg->exit_tfm)
692 alg->exit_tfm(hash);
693 else if (tfm->__crt_alg->cra_exit)
694 tfm->__crt_alg->cra_exit(tfm);
695
696 if (crypto_ahash_need_fallback(tfm: hash))
697 crypto_free_ahash(tfm: crypto_ahash_fb(tfm: hash));
698}
699
700static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
701{
702 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
703 struct ahash_alg *alg = crypto_ahash_alg(hash);
704 struct crypto_ahash *fb = NULL;
705 int err;
706
707 crypto_ahash_set_statesize(tfm: hash, size: alg->halg.statesize);
708 crypto_ahash_set_reqsize(tfm: hash, reqsize: crypto_tfm_alg_reqsize(tfm));
709
710 if (tfm->__crt_alg->cra_type == &crypto_shash_type)
711 return crypto_init_ahash_using_shash(tfm);
712
713 if (crypto_ahash_need_fallback(tfm: hash)) {
714 fb = crypto_alloc_ahash(alg_name: crypto_ahash_alg_name(tfm: hash),
715 CRYPTO_ALG_REQ_VIRT,
716 CRYPTO_ALG_ASYNC |
717 CRYPTO_ALG_REQ_VIRT |
718 CRYPTO_AHASH_ALG_NO_EXPORT_CORE);
719 if (IS_ERR(ptr: fb))
720 return PTR_ERR(ptr: fb);
721
722 tfm->fb = crypto_ahash_tfm(tfm: fb);
723 }
724
725 ahash_set_needkey(tfm: hash, alg);
726
727 tfm->exit = crypto_ahash_exit_tfm;
728
729 if (alg->init_tfm)
730 err = alg->init_tfm(hash);
731 else if (tfm->__crt_alg->cra_init)
732 err = tfm->__crt_alg->cra_init(tfm);
733 else
734 return 0;
735
736 if (err)
737 goto out_free_sync_hash;
738
739 if (!ahash_is_async(tfm: hash) && crypto_ahash_reqsize(tfm: hash) >
740 MAX_SYNC_HASH_REQSIZE)
741 goto out_exit_tfm;
742
743 BUILD_BUG_ON(HASH_MAX_DESCSIZE > MAX_SYNC_HASH_REQSIZE);
744 if (crypto_ahash_reqsize(tfm: hash) < HASH_MAX_DESCSIZE)
745 crypto_ahash_set_reqsize(tfm: hash, HASH_MAX_DESCSIZE);
746
747 return 0;
748
749out_exit_tfm:
750 if (alg->exit_tfm)
751 alg->exit_tfm(hash);
752 else if (tfm->__crt_alg->cra_exit)
753 tfm->__crt_alg->cra_exit(tfm);
754 err = -EINVAL;
755out_free_sync_hash:
756 crypto_free_ahash(tfm: fb);
757 return err;
758}
759
760static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
761{
762 if (alg->cra_type == &crypto_shash_type)
763 return sizeof(struct crypto_shash *);
764
765 return crypto_alg_extsize(alg);
766}
767
768static void crypto_ahash_free_instance(struct crypto_instance *inst)
769{
770 struct ahash_instance *ahash = ahash_instance(inst);
771
772 ahash->free(ahash);
773}
774
775static int __maybe_unused crypto_ahash_report(
776 struct sk_buff *skb, struct crypto_alg *alg)
777{
778 struct crypto_report_hash rhash;
779
780 memset(s: &rhash, c: 0, n: sizeof(rhash));
781
782 strscpy(rhash.type, "ahash", sizeof(rhash.type));
783
784 rhash.blocksize = alg->cra_blocksize;
785 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
786
787 return nla_put(skb, attrtype: CRYPTOCFGA_REPORT_HASH, attrlen: sizeof(rhash), data: &rhash);
788}
789
790static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
791 __maybe_unused;
792static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
793{
794 seq_printf(m, fmt: "type : ahash\n");
795 seq_printf(m, fmt: "async : %s\n",
796 str_yes_no(v: alg->cra_flags & CRYPTO_ALG_ASYNC));
797 seq_printf(m, fmt: "blocksize : %u\n", alg->cra_blocksize);
798 seq_printf(m, fmt: "digestsize : %u\n",
799 __crypto_hash_alg_common(alg)->digestsize);
800}
801
802static const struct crypto_type crypto_ahash_type = {
803 .extsize = crypto_ahash_extsize,
804 .init_tfm = crypto_ahash_init_tfm,
805 .free = crypto_ahash_free_instance,
806#ifdef CONFIG_PROC_FS
807 .show = crypto_ahash_show,
808#endif
809#if IS_ENABLED(CONFIG_CRYPTO_USER)
810 .report = crypto_ahash_report,
811#endif
812 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
813 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
814 .type = CRYPTO_ALG_TYPE_AHASH,
815 .tfmsize = offsetof(struct crypto_ahash, base),
816 .algsize = offsetof(struct ahash_alg, halg.base),
817};
818
819int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
820 struct crypto_instance *inst,
821 const char *name, u32 type, u32 mask)
822{
823 spawn->base.frontend = &crypto_ahash_type;
824 return crypto_grab_spawn(spawn: &spawn->base, inst, name, type, mask);
825}
826EXPORT_SYMBOL_GPL(crypto_grab_ahash);
827
828struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
829 u32 mask)
830{
831 return crypto_alloc_tfm(alg_name, frontend: &crypto_ahash_type, type, mask);
832}
833EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
834
835int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
836{
837 return crypto_type_has_alg(name: alg_name, frontend: &crypto_ahash_type, type, mask);
838}
839EXPORT_SYMBOL_GPL(crypto_has_ahash);
840
841bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
842{
843 struct crypto_alg *alg = &halg->base;
844
845 if (alg->cra_type == &crypto_shash_type)
846 return crypto_shash_alg_has_setkey(alg: __crypto_shash_alg(alg));
847
848 return __crypto_ahash_alg(alg)->setkey != ahash_nosetkey;
849}
850EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
851
852struct crypto_ahash *crypto_clone_ahash(struct crypto_ahash *hash)
853{
854 struct hash_alg_common *halg = crypto_hash_alg_common(tfm: hash);
855 struct crypto_tfm *tfm = crypto_ahash_tfm(tfm: hash);
856 struct crypto_ahash *fb = NULL;
857 struct crypto_ahash *nhash;
858 struct ahash_alg *alg;
859 int err;
860
861 if (!crypto_hash_alg_has_setkey(halg)) {
862 tfm = crypto_tfm_get(tfm);
863 if (IS_ERR(ptr: tfm))
864 return ERR_CAST(ptr: tfm);
865
866 return hash;
867 }
868
869 nhash = crypto_clone_tfm(frontend: &crypto_ahash_type, otfm: tfm);
870
871 if (IS_ERR(ptr: nhash))
872 return nhash;
873
874 nhash->reqsize = hash->reqsize;
875 nhash->statesize = hash->statesize;
876
877 if (likely(hash->using_shash)) {
878 struct crypto_shash **nctx = crypto_ahash_ctx(tfm: nhash);
879 struct crypto_shash *shash;
880
881 shash = crypto_clone_shash(tfm: ahash_to_shash(tfm: hash));
882 if (IS_ERR(ptr: shash)) {
883 err = PTR_ERR(ptr: shash);
884 goto out_free_nhash;
885 }
886 crypto_ahash_tfm(tfm: nhash)->exit = crypto_exit_ahash_using_shash;
887 nhash->using_shash = true;
888 *nctx = shash;
889 return nhash;
890 }
891
892 if (crypto_ahash_need_fallback(tfm: hash)) {
893 fb = crypto_clone_ahash(hash: crypto_ahash_fb(tfm: hash));
894 err = PTR_ERR(ptr: fb);
895 if (IS_ERR(ptr: fb))
896 goto out_free_nhash;
897
898 crypto_ahash_tfm(tfm: nhash)->fb = crypto_ahash_tfm(tfm: fb);
899 }
900
901 err = -ENOSYS;
902 alg = crypto_ahash_alg(hash);
903 if (!alg->clone_tfm)
904 goto out_free_fb;
905
906 err = alg->clone_tfm(nhash, hash);
907 if (err)
908 goto out_free_fb;
909
910 crypto_ahash_tfm(tfm: nhash)->exit = crypto_ahash_exit_tfm;
911
912 return nhash;
913
914out_free_fb:
915 crypto_free_ahash(tfm: fb);
916out_free_nhash:
917 crypto_free_ahash(tfm: nhash);
918 return ERR_PTR(error: err);
919}
920EXPORT_SYMBOL_GPL(crypto_clone_ahash);
921
922static int ahash_default_export_core(struct ahash_request *req, void *out)
923{
924 return -ENOSYS;
925}
926
927static int ahash_default_import_core(struct ahash_request *req, const void *in)
928{
929 return -ENOSYS;
930}
931
932static int ahash_prepare_alg(struct ahash_alg *alg)
933{
934 struct crypto_alg *base = &alg->halg.base;
935 int err;
936
937 if (alg->halg.statesize == 0)
938 return -EINVAL;
939
940 if (base->cra_reqsize && base->cra_reqsize < alg->halg.statesize)
941 return -EINVAL;
942
943 if (!(base->cra_flags & CRYPTO_ALG_ASYNC) &&
944 base->cra_reqsize > MAX_SYNC_HASH_REQSIZE)
945 return -EINVAL;
946
947 if (base->cra_flags & CRYPTO_ALG_NEED_FALLBACK &&
948 base->cra_flags & CRYPTO_ALG_NO_FALLBACK)
949 return -EINVAL;
950
951 err = hash_prepare_alg(alg: &alg->halg);
952 if (err)
953 return err;
954
955 base->cra_type = &crypto_ahash_type;
956 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
957
958 if ((base->cra_flags ^ CRYPTO_ALG_REQ_VIRT) &
959 (CRYPTO_ALG_ASYNC | CRYPTO_ALG_REQ_VIRT) &&
960 !(base->cra_flags & CRYPTO_ALG_NO_FALLBACK))
961 base->cra_flags |= CRYPTO_ALG_NEED_FALLBACK;
962
963 if (!alg->setkey)
964 alg->setkey = ahash_nosetkey;
965
966 if (base->cra_flags & CRYPTO_AHASH_ALG_BLOCK_ONLY) {
967 BUILD_BUG_ON(MAX_ALGAPI_BLOCKSIZE >= 256);
968 if (!alg->finup)
969 return -EINVAL;
970
971 base->cra_reqsize += base->cra_blocksize + 1;
972 alg->halg.statesize += base->cra_blocksize + 1;
973 alg->export_core = alg->export;
974 alg->import_core = alg->import;
975 } else if (!alg->export_core || !alg->import_core) {
976 alg->export_core = ahash_default_export_core;
977 alg->import_core = ahash_default_import_core;
978 base->cra_flags |= CRYPTO_AHASH_ALG_NO_EXPORT_CORE;
979 }
980
981 return 0;
982}
983
984int crypto_register_ahash(struct ahash_alg *alg)
985{
986 struct crypto_alg *base = &alg->halg.base;
987 int err;
988
989 err = ahash_prepare_alg(alg);
990 if (err)
991 return err;
992
993 return crypto_register_alg(alg: base);
994}
995EXPORT_SYMBOL_GPL(crypto_register_ahash);
996
997void crypto_unregister_ahash(struct ahash_alg *alg)
998{
999 crypto_unregister_alg(alg: &alg->halg.base);
1000}
1001EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
1002
1003int crypto_register_ahashes(struct ahash_alg *algs, int count)
1004{
1005 int i, ret;
1006
1007 for (i = 0; i < count; i++) {
1008 ret = crypto_register_ahash(&algs[i]);
1009 if (ret)
1010 goto err;
1011 }
1012
1013 return 0;
1014
1015err:
1016 for (--i; i >= 0; --i)
1017 crypto_unregister_ahash(&algs[i]);
1018
1019 return ret;
1020}
1021EXPORT_SYMBOL_GPL(crypto_register_ahashes);
1022
1023void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
1024{
1025 int i;
1026
1027 for (i = count - 1; i >= 0; --i)
1028 crypto_unregister_ahash(&algs[i]);
1029}
1030EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
1031
1032int ahash_register_instance(struct crypto_template *tmpl,
1033 struct ahash_instance *inst)
1034{
1035 int err;
1036
1037 if (WARN_ON(!inst->free))
1038 return -EINVAL;
1039
1040 err = ahash_prepare_alg(alg: &inst->alg);
1041 if (err)
1042 return err;
1043
1044 return crypto_register_instance(tmpl, inst: ahash_crypto_instance(inst));
1045}
1046EXPORT_SYMBOL_GPL(ahash_register_instance);
1047
1048void ahash_request_free(struct ahash_request *req)
1049{
1050 if (unlikely(!req))
1051 return;
1052
1053 if (!ahash_req_on_stack(req)) {
1054 kfree(objp: req);
1055 return;
1056 }
1057
1058 ahash_request_zero(req);
1059}
1060EXPORT_SYMBOL_GPL(ahash_request_free);
1061
1062int crypto_hash_digest(struct crypto_ahash *tfm, const u8 *data,
1063 unsigned int len, u8 *out)
1064{
1065 HASH_REQUEST_ON_STACK(req, crypto_ahash_fb(tfm));
1066 int err;
1067
1068 ahash_request_set_callback(req, flags: 0, NULL, NULL);
1069 ahash_request_set_virt(req, src: data, result: out, nbytes: len);
1070 err = crypto_ahash_digest(req);
1071
1072 ahash_request_zero(req);
1073
1074 return err;
1075}
1076EXPORT_SYMBOL_GPL(crypto_hash_digest);
1077
1078void ahash_free_singlespawn_instance(struct ahash_instance *inst)
1079{
1080 crypto_drop_spawn(spawn: ahash_instance_ctx(inst));
1081 kfree(objp: inst);
1082}
1083EXPORT_SYMBOL_GPL(ahash_free_singlespawn_instance);
1084
1085MODULE_LICENSE("GPL");
1086MODULE_DESCRIPTION("Asynchronous cryptographic hash type");
1087