| 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
| 2 | /* |
| 3 | * Synchronous Cryptographic Hash operations. |
| 4 | * |
| 5 | * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au> |
| 6 | */ |
| 7 | |
| 8 | #include <crypto/scatterwalk.h> |
| 9 | #include <linux/cryptouser.h> |
| 10 | #include <linux/err.h> |
| 11 | #include <linux/kernel.h> |
| 12 | #include <linux/module.h> |
| 13 | #include <linux/seq_file.h> |
| 14 | #include <linux/string.h> |
| 15 | #include <net/netlink.h> |
| 16 | |
| 17 | #include "hash.h" |
| 18 | |
| 19 | static inline bool crypto_shash_block_only(struct crypto_shash *tfm) |
| 20 | { |
| 21 | return crypto_shash_alg(tfm)->base.cra_flags & |
| 22 | CRYPTO_AHASH_ALG_BLOCK_ONLY; |
| 23 | } |
| 24 | |
| 25 | static inline bool crypto_shash_final_nonzero(struct crypto_shash *tfm) |
| 26 | { |
| 27 | return crypto_shash_alg(tfm)->base.cra_flags & |
| 28 | CRYPTO_AHASH_ALG_FINAL_NONZERO; |
| 29 | } |
| 30 | |
| 31 | static inline bool crypto_shash_finup_max(struct crypto_shash *tfm) |
| 32 | { |
| 33 | return crypto_shash_alg(tfm)->base.cra_flags & |
| 34 | CRYPTO_AHASH_ALG_FINUP_MAX; |
| 35 | } |
| 36 | |
| 37 | int shash_no_setkey(struct crypto_shash *tfm, const u8 *key, |
| 38 | unsigned int keylen) |
| 39 | { |
| 40 | return -ENOSYS; |
| 41 | } |
| 42 | EXPORT_SYMBOL_GPL(shash_no_setkey); |
| 43 | |
| 44 | static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg) |
| 45 | { |
| 46 | if (crypto_shash_alg_needs_key(alg)) |
| 47 | crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY); |
| 48 | } |
| 49 | |
| 50 | int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key, |
| 51 | unsigned int keylen) |
| 52 | { |
| 53 | struct shash_alg *shash = crypto_shash_alg(tfm); |
| 54 | int err; |
| 55 | |
| 56 | err = shash->setkey(tfm, key, keylen); |
| 57 | if (unlikely(err)) { |
| 58 | shash_set_needkey(tfm, alg: shash); |
| 59 | return err; |
| 60 | } |
| 61 | |
| 62 | crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY); |
| 63 | return 0; |
| 64 | } |
| 65 | EXPORT_SYMBOL_GPL(crypto_shash_setkey); |
| 66 | |
| 67 | static int __crypto_shash_init(struct shash_desc *desc) |
| 68 | { |
| 69 | struct crypto_shash *tfm = desc->tfm; |
| 70 | |
| 71 | if (crypto_shash_block_only(tfm)) { |
| 72 | u8 *buf = shash_desc_ctx(desc); |
| 73 | |
| 74 | buf += crypto_shash_descsize(tfm) - 1; |
| 75 | *buf = 0; |
| 76 | } |
| 77 | |
| 78 | return crypto_shash_alg(tfm)->init(desc); |
| 79 | } |
| 80 | |
| 81 | int crypto_shash_init(struct shash_desc *desc) |
| 82 | { |
| 83 | if (crypto_shash_get_flags(tfm: desc->tfm) & CRYPTO_TFM_NEED_KEY) |
| 84 | return -ENOKEY; |
| 85 | return __crypto_shash_init(desc); |
| 86 | } |
| 87 | EXPORT_SYMBOL_GPL(crypto_shash_init); |
| 88 | |
| 89 | static int shash_default_finup(struct shash_desc *desc, const u8 *data, |
| 90 | unsigned int len, u8 *out) |
| 91 | { |
| 92 | struct shash_alg *shash = crypto_shash_alg(tfm: desc->tfm); |
| 93 | |
| 94 | return shash->update(desc, data, len) ?: |
| 95 | shash->final(desc, out); |
| 96 | } |
| 97 | |
| 98 | static int crypto_shash_op_and_zero( |
| 99 | int (*op)(struct shash_desc *desc, const u8 *data, |
| 100 | unsigned int len, u8 *out), |
| 101 | struct shash_desc *desc, const u8 *data, unsigned int len, u8 *out) |
| 102 | { |
| 103 | int err; |
| 104 | |
| 105 | err = op(desc, data, len, out); |
| 106 | memset(s: shash_desc_ctx(desc), c: 0, n: crypto_shash_descsize(tfm: desc->tfm)); |
| 107 | return err; |
| 108 | } |
| 109 | |
| 110 | int crypto_shash_finup(struct shash_desc *restrict desc, const u8 *data, |
| 111 | unsigned int len, u8 *restrict out) |
| 112 | { |
| 113 | struct crypto_shash *tfm = desc->tfm; |
| 114 | u8 *blenp = shash_desc_ctx(desc); |
| 115 | bool finup_max, nonzero; |
| 116 | unsigned int bs; |
| 117 | int err; |
| 118 | u8 *buf; |
| 119 | |
| 120 | if (!crypto_shash_block_only(tfm)) { |
| 121 | if (out) |
| 122 | goto finup; |
| 123 | return crypto_shash_alg(tfm)->update(desc, data, len); |
| 124 | } |
| 125 | |
| 126 | finup_max = out && crypto_shash_finup_max(tfm); |
| 127 | |
| 128 | /* Retain extra block for final nonzero algorithms. */ |
| 129 | nonzero = crypto_shash_final_nonzero(tfm); |
| 130 | |
| 131 | /* |
| 132 | * The partial block buffer follows the algorithm desc context. |
| 133 | * The byte following that contains the length. |
| 134 | */ |
| 135 | blenp += crypto_shash_descsize(tfm) - 1; |
| 136 | bs = crypto_shash_blocksize(tfm); |
| 137 | buf = blenp - bs; |
| 138 | |
| 139 | if (likely(!*blenp && finup_max)) |
| 140 | goto finup; |
| 141 | |
| 142 | while ((*blenp + len) >= bs + nonzero) { |
| 143 | unsigned int nbytes = len - nonzero; |
| 144 | const u8 *src = data; |
| 145 | |
| 146 | if (*blenp) { |
| 147 | memcpy(to: buf + *blenp, from: data, len: bs - *blenp); |
| 148 | nbytes = bs; |
| 149 | src = buf; |
| 150 | } |
| 151 | |
| 152 | err = crypto_shash_alg(tfm)->update(desc, src, nbytes); |
| 153 | if (err < 0) |
| 154 | return err; |
| 155 | |
| 156 | data += nbytes - err - *blenp; |
| 157 | len -= nbytes - err - *blenp; |
| 158 | *blenp = 0; |
| 159 | } |
| 160 | |
| 161 | if (*blenp || !out) { |
| 162 | memcpy(to: buf + *blenp, from: data, len); |
| 163 | *blenp += len; |
| 164 | if (!out) |
| 165 | return 0; |
| 166 | data = buf; |
| 167 | len = *blenp; |
| 168 | } |
| 169 | |
| 170 | finup: |
| 171 | return crypto_shash_op_and_zero(op: crypto_shash_alg(tfm)->finup, desc, |
| 172 | data, len, out); |
| 173 | } |
| 174 | EXPORT_SYMBOL_GPL(crypto_shash_finup); |
| 175 | |
| 176 | static int shash_default_digest(struct shash_desc *desc, const u8 *data, |
| 177 | unsigned int len, u8 *out) |
| 178 | { |
| 179 | return __crypto_shash_init(desc) ?: |
| 180 | crypto_shash_finup(desc, data, len, out); |
| 181 | } |
| 182 | |
| 183 | int crypto_shash_digest(struct shash_desc *desc, const u8 *data, |
| 184 | unsigned int len, u8 *out) |
| 185 | { |
| 186 | struct crypto_shash *tfm = desc->tfm; |
| 187 | |
| 188 | if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) |
| 189 | return -ENOKEY; |
| 190 | |
| 191 | return crypto_shash_op_and_zero(op: crypto_shash_alg(tfm)->digest, desc, |
| 192 | data, len, out); |
| 193 | } |
| 194 | EXPORT_SYMBOL_GPL(crypto_shash_digest); |
| 195 | |
| 196 | int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data, |
| 197 | unsigned int len, u8 *out) |
| 198 | { |
| 199 | SHASH_DESC_ON_STACK(desc, tfm); |
| 200 | |
| 201 | desc->tfm = tfm; |
| 202 | return crypto_shash_digest(desc, data, len, out); |
| 203 | } |
| 204 | EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest); |
| 205 | |
| 206 | static int __crypto_shash_export(struct shash_desc *desc, void *out, |
| 207 | int (*export)(struct shash_desc *desc, |
| 208 | void *out)) |
| 209 | { |
| 210 | struct crypto_shash *tfm = desc->tfm; |
| 211 | u8 *buf = shash_desc_ctx(desc); |
| 212 | unsigned int plen, ss; |
| 213 | |
| 214 | plen = crypto_shash_blocksize(tfm) + 1; |
| 215 | ss = crypto_shash_statesize(tfm); |
| 216 | if (crypto_shash_block_only(tfm)) |
| 217 | ss -= plen; |
| 218 | if (!export) { |
| 219 | memcpy(to: out, from: buf, len: ss); |
| 220 | return 0; |
| 221 | } |
| 222 | |
| 223 | return export(desc, out); |
| 224 | } |
| 225 | |
| 226 | int crypto_shash_export_core(struct shash_desc *desc, void *out) |
| 227 | { |
| 228 | return __crypto_shash_export(desc, out, |
| 229 | export: crypto_shash_alg(tfm: desc->tfm)->export_core); |
| 230 | } |
| 231 | EXPORT_SYMBOL_GPL(crypto_shash_export_core); |
| 232 | |
| 233 | int crypto_shash_export(struct shash_desc *desc, void *out) |
| 234 | { |
| 235 | struct crypto_shash *tfm = desc->tfm; |
| 236 | |
| 237 | if (crypto_shash_block_only(tfm)) { |
| 238 | unsigned int plen = crypto_shash_blocksize(tfm) + 1; |
| 239 | unsigned int descsize = crypto_shash_descsize(tfm); |
| 240 | unsigned int ss = crypto_shash_statesize(tfm); |
| 241 | u8 *buf = shash_desc_ctx(desc); |
| 242 | |
| 243 | memcpy(to: out + ss - plen, from: buf + descsize - plen, len: plen); |
| 244 | } |
| 245 | return __crypto_shash_export(desc, out, export: crypto_shash_alg(tfm)->export); |
| 246 | } |
| 247 | EXPORT_SYMBOL_GPL(crypto_shash_export); |
| 248 | |
| 249 | static int __crypto_shash_import(struct shash_desc *desc, const void *in, |
| 250 | int (*import)(struct shash_desc *desc, |
| 251 | const void *in)) |
| 252 | { |
| 253 | struct crypto_shash *tfm = desc->tfm; |
| 254 | unsigned int descsize, plen, ss; |
| 255 | u8 *buf = shash_desc_ctx(desc); |
| 256 | |
| 257 | if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY) |
| 258 | return -ENOKEY; |
| 259 | |
| 260 | ss = crypto_shash_statesize(tfm); |
| 261 | if (crypto_shash_block_only(tfm)) { |
| 262 | plen = crypto_shash_blocksize(tfm) + 1; |
| 263 | ss -= plen; |
| 264 | descsize = crypto_shash_descsize(tfm); |
| 265 | buf[descsize - 1] = 0; |
| 266 | } |
| 267 | if (!import) { |
| 268 | memcpy(to: buf, from: in, len: ss); |
| 269 | return 0; |
| 270 | } |
| 271 | |
| 272 | return import(desc, in); |
| 273 | } |
| 274 | |
| 275 | int crypto_shash_import_core(struct shash_desc *desc, const void *in) |
| 276 | { |
| 277 | return __crypto_shash_import(desc, in, |
| 278 | import: crypto_shash_alg(tfm: desc->tfm)->import_core); |
| 279 | } |
| 280 | EXPORT_SYMBOL_GPL(crypto_shash_import_core); |
| 281 | |
| 282 | int crypto_shash_import(struct shash_desc *desc, const void *in) |
| 283 | { |
| 284 | struct crypto_shash *tfm = desc->tfm; |
| 285 | int err; |
| 286 | |
| 287 | err = __crypto_shash_import(desc, in, import: crypto_shash_alg(tfm)->import); |
| 288 | if (crypto_shash_block_only(tfm)) { |
| 289 | unsigned int plen = crypto_shash_blocksize(tfm) + 1; |
| 290 | unsigned int descsize = crypto_shash_descsize(tfm); |
| 291 | unsigned int ss = crypto_shash_statesize(tfm); |
| 292 | u8 *buf = shash_desc_ctx(desc); |
| 293 | |
| 294 | memcpy(to: buf + descsize - plen, from: in + ss - plen, len: plen); |
| 295 | if (buf[descsize - 1] >= plen) |
| 296 | err = -EOVERFLOW; |
| 297 | } |
| 298 | return err; |
| 299 | } |
| 300 | EXPORT_SYMBOL_GPL(crypto_shash_import); |
| 301 | |
| 302 | static void crypto_shash_exit_tfm(struct crypto_tfm *tfm) |
| 303 | { |
| 304 | struct crypto_shash *hash = __crypto_shash_cast(tfm); |
| 305 | struct shash_alg *alg = crypto_shash_alg(tfm: hash); |
| 306 | |
| 307 | alg->exit_tfm(hash); |
| 308 | } |
| 309 | |
| 310 | static int crypto_shash_init_tfm(struct crypto_tfm *tfm) |
| 311 | { |
| 312 | struct crypto_shash *hash = __crypto_shash_cast(tfm); |
| 313 | struct shash_alg *alg = crypto_shash_alg(tfm: hash); |
| 314 | |
| 315 | shash_set_needkey(tfm: hash, alg); |
| 316 | |
| 317 | if (alg->exit_tfm) |
| 318 | tfm->exit = crypto_shash_exit_tfm; |
| 319 | |
| 320 | if (!alg->init_tfm) |
| 321 | return 0; |
| 322 | |
| 323 | return alg->init_tfm(hash); |
| 324 | } |
| 325 | |
| 326 | static void crypto_shash_free_instance(struct crypto_instance *inst) |
| 327 | { |
| 328 | struct shash_instance *shash = shash_instance(inst); |
| 329 | |
| 330 | shash->free(shash); |
| 331 | } |
| 332 | |
| 333 | static int __maybe_unused crypto_shash_report( |
| 334 | struct sk_buff *skb, struct crypto_alg *alg) |
| 335 | { |
| 336 | struct crypto_report_hash rhash; |
| 337 | struct shash_alg *salg = __crypto_shash_alg(alg); |
| 338 | |
| 339 | memset(s: &rhash, c: 0, n: sizeof(rhash)); |
| 340 | |
| 341 | strscpy(rhash.type, "shash" , sizeof(rhash.type)); |
| 342 | |
| 343 | rhash.blocksize = alg->cra_blocksize; |
| 344 | rhash.digestsize = salg->digestsize; |
| 345 | |
| 346 | return nla_put(skb, attrtype: CRYPTOCFGA_REPORT_HASH, attrlen: sizeof(rhash), data: &rhash); |
| 347 | } |
| 348 | |
| 349 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) |
| 350 | __maybe_unused; |
| 351 | static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg) |
| 352 | { |
| 353 | struct shash_alg *salg = __crypto_shash_alg(alg); |
| 354 | |
| 355 | seq_printf(m, fmt: "type : shash\n" ); |
| 356 | seq_printf(m, fmt: "blocksize : %u\n" , alg->cra_blocksize); |
| 357 | seq_printf(m, fmt: "digestsize : %u\n" , salg->digestsize); |
| 358 | } |
| 359 | |
| 360 | const struct crypto_type crypto_shash_type = { |
| 361 | .extsize = crypto_alg_extsize, |
| 362 | .init_tfm = crypto_shash_init_tfm, |
| 363 | .free = crypto_shash_free_instance, |
| 364 | #ifdef CONFIG_PROC_FS |
| 365 | .show = crypto_shash_show, |
| 366 | #endif |
| 367 | #if IS_ENABLED(CONFIG_CRYPTO_USER) |
| 368 | .report = crypto_shash_report, |
| 369 | #endif |
| 370 | .maskclear = ~CRYPTO_ALG_TYPE_MASK, |
| 371 | .maskset = CRYPTO_ALG_TYPE_MASK, |
| 372 | .type = CRYPTO_ALG_TYPE_SHASH, |
| 373 | .tfmsize = offsetof(struct crypto_shash, base), |
| 374 | .algsize = offsetof(struct shash_alg, base), |
| 375 | }; |
| 376 | |
| 377 | int crypto_grab_shash(struct crypto_shash_spawn *spawn, |
| 378 | struct crypto_instance *inst, |
| 379 | const char *name, u32 type, u32 mask) |
| 380 | { |
| 381 | spawn->base.frontend = &crypto_shash_type; |
| 382 | return crypto_grab_spawn(spawn: &spawn->base, inst, name, type, mask); |
| 383 | } |
| 384 | EXPORT_SYMBOL_GPL(crypto_grab_shash); |
| 385 | |
| 386 | struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type, |
| 387 | u32 mask) |
| 388 | { |
| 389 | return crypto_alloc_tfm(alg_name, frontend: &crypto_shash_type, type, mask); |
| 390 | } |
| 391 | EXPORT_SYMBOL_GPL(crypto_alloc_shash); |
| 392 | |
| 393 | int crypto_has_shash(const char *alg_name, u32 type, u32 mask) |
| 394 | { |
| 395 | return crypto_type_has_alg(name: alg_name, frontend: &crypto_shash_type, type, mask); |
| 396 | } |
| 397 | EXPORT_SYMBOL_GPL(crypto_has_shash); |
| 398 | |
| 399 | struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash) |
| 400 | { |
| 401 | struct crypto_tfm *tfm = crypto_shash_tfm(tfm: hash); |
| 402 | struct shash_alg *alg = crypto_shash_alg(tfm: hash); |
| 403 | struct crypto_shash *nhash; |
| 404 | int err; |
| 405 | |
| 406 | if (!crypto_shash_alg_has_setkey(alg)) { |
| 407 | tfm = crypto_tfm_get(tfm); |
| 408 | if (IS_ERR(ptr: tfm)) |
| 409 | return ERR_CAST(ptr: tfm); |
| 410 | |
| 411 | return hash; |
| 412 | } |
| 413 | |
| 414 | if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init)) |
| 415 | return ERR_PTR(error: -ENOSYS); |
| 416 | |
| 417 | nhash = crypto_clone_tfm(frontend: &crypto_shash_type, otfm: tfm); |
| 418 | if (IS_ERR(ptr: nhash)) |
| 419 | return nhash; |
| 420 | |
| 421 | if (alg->clone_tfm) { |
| 422 | err = alg->clone_tfm(nhash, hash); |
| 423 | if (err) { |
| 424 | crypto_free_shash(tfm: nhash); |
| 425 | return ERR_PTR(error: err); |
| 426 | } |
| 427 | } |
| 428 | |
| 429 | if (alg->exit_tfm) |
| 430 | crypto_shash_tfm(tfm: nhash)->exit = crypto_shash_exit_tfm; |
| 431 | |
| 432 | return nhash; |
| 433 | } |
| 434 | EXPORT_SYMBOL_GPL(crypto_clone_shash); |
| 435 | |
| 436 | int hash_prepare_alg(struct hash_alg_common *alg) |
| 437 | { |
| 438 | struct crypto_alg *base = &alg->base; |
| 439 | |
| 440 | if (alg->digestsize > HASH_MAX_DIGESTSIZE) |
| 441 | return -EINVAL; |
| 442 | |
| 443 | /* alignmask is not useful for hashes, so it is not supported. */ |
| 444 | if (base->cra_alignmask) |
| 445 | return -EINVAL; |
| 446 | |
| 447 | base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK; |
| 448 | |
| 449 | return 0; |
| 450 | } |
| 451 | |
| 452 | static int shash_default_export_core(struct shash_desc *desc, void *out) |
| 453 | { |
| 454 | return -ENOSYS; |
| 455 | } |
| 456 | |
| 457 | static int shash_default_import_core(struct shash_desc *desc, const void *in) |
| 458 | { |
| 459 | return -ENOSYS; |
| 460 | } |
| 461 | |
| 462 | static int shash_prepare_alg(struct shash_alg *alg) |
| 463 | { |
| 464 | struct crypto_alg *base = &alg->halg.base; |
| 465 | int err; |
| 466 | |
| 467 | if ((alg->export && !alg->import) || (alg->import && !alg->export)) |
| 468 | return -EINVAL; |
| 469 | |
| 470 | err = hash_prepare_alg(alg: &alg->halg); |
| 471 | if (err) |
| 472 | return err; |
| 473 | |
| 474 | base->cra_type = &crypto_shash_type; |
| 475 | base->cra_flags |= CRYPTO_ALG_TYPE_SHASH; |
| 476 | base->cra_flags |= CRYPTO_ALG_REQ_VIRT; |
| 477 | |
| 478 | /* |
| 479 | * Handle missing optional functions. For each one we can either |
| 480 | * install a default here, or we can leave the pointer as NULL and check |
| 481 | * the pointer for NULL in crypto_shash_*(), avoiding an indirect call |
| 482 | * when the default behavior is desired. For ->finup and ->digest we |
| 483 | * install defaults, since for optimal performance algorithms should |
| 484 | * implement these anyway. On the other hand, for ->import and |
| 485 | * ->export the common case and best performance comes from the simple |
| 486 | * memcpy of the shash_desc_ctx, so when those pointers are NULL we |
| 487 | * leave them NULL and provide the memcpy with no indirect call. |
| 488 | */ |
| 489 | if (!alg->finup) |
| 490 | alg->finup = shash_default_finup; |
| 491 | if (!alg->digest) |
| 492 | alg->digest = shash_default_digest; |
| 493 | if (!alg->export && !alg->halg.statesize) |
| 494 | alg->halg.statesize = alg->descsize; |
| 495 | if (!alg->setkey) |
| 496 | alg->setkey = shash_no_setkey; |
| 497 | |
| 498 | if (base->cra_flags & CRYPTO_AHASH_ALG_BLOCK_ONLY) { |
| 499 | BUILD_BUG_ON(MAX_ALGAPI_BLOCKSIZE >= 256); |
| 500 | alg->descsize += base->cra_blocksize + 1; |
| 501 | alg->statesize += base->cra_blocksize + 1; |
| 502 | alg->export_core = alg->export; |
| 503 | alg->import_core = alg->import; |
| 504 | } else if (!alg->export_core || !alg->import_core) { |
| 505 | alg->export_core = shash_default_export_core; |
| 506 | alg->import_core = shash_default_import_core; |
| 507 | base->cra_flags |= CRYPTO_AHASH_ALG_NO_EXPORT_CORE; |
| 508 | } |
| 509 | |
| 510 | if (alg->descsize > HASH_MAX_DESCSIZE) |
| 511 | return -EINVAL; |
| 512 | if (alg->statesize > HASH_MAX_STATESIZE) |
| 513 | return -EINVAL; |
| 514 | |
| 515 | base->cra_reqsize = sizeof(struct shash_desc) + alg->descsize; |
| 516 | |
| 517 | return 0; |
| 518 | } |
| 519 | |
| 520 | int crypto_register_shash(struct shash_alg *alg) |
| 521 | { |
| 522 | struct crypto_alg *base = &alg->base; |
| 523 | int err; |
| 524 | |
| 525 | err = shash_prepare_alg(alg); |
| 526 | if (err) |
| 527 | return err; |
| 528 | |
| 529 | return crypto_register_alg(alg: base); |
| 530 | } |
| 531 | EXPORT_SYMBOL_GPL(crypto_register_shash); |
| 532 | |
| 533 | void crypto_unregister_shash(struct shash_alg *alg) |
| 534 | { |
| 535 | crypto_unregister_alg(alg: &alg->base); |
| 536 | } |
| 537 | EXPORT_SYMBOL_GPL(crypto_unregister_shash); |
| 538 | |
| 539 | int crypto_register_shashes(struct shash_alg *algs, int count) |
| 540 | { |
| 541 | int i, ret; |
| 542 | |
| 543 | for (i = 0; i < count; i++) { |
| 544 | ret = crypto_register_shash(&algs[i]); |
| 545 | if (ret) |
| 546 | goto err; |
| 547 | } |
| 548 | |
| 549 | return 0; |
| 550 | |
| 551 | err: |
| 552 | for (--i; i >= 0; --i) |
| 553 | crypto_unregister_shash(&algs[i]); |
| 554 | |
| 555 | return ret; |
| 556 | } |
| 557 | EXPORT_SYMBOL_GPL(crypto_register_shashes); |
| 558 | |
| 559 | void crypto_unregister_shashes(struct shash_alg *algs, int count) |
| 560 | { |
| 561 | int i; |
| 562 | |
| 563 | for (i = count - 1; i >= 0; --i) |
| 564 | crypto_unregister_shash(&algs[i]); |
| 565 | } |
| 566 | EXPORT_SYMBOL_GPL(crypto_unregister_shashes); |
| 567 | |
| 568 | int shash_register_instance(struct crypto_template *tmpl, |
| 569 | struct shash_instance *inst) |
| 570 | { |
| 571 | int err; |
| 572 | |
| 573 | if (WARN_ON(!inst->free)) |
| 574 | return -EINVAL; |
| 575 | |
| 576 | err = shash_prepare_alg(alg: &inst->alg); |
| 577 | if (err) |
| 578 | return err; |
| 579 | |
| 580 | return crypto_register_instance(tmpl, inst: shash_crypto_instance(inst)); |
| 581 | } |
| 582 | EXPORT_SYMBOL_GPL(shash_register_instance); |
| 583 | |
| 584 | void shash_free_singlespawn_instance(struct shash_instance *inst) |
| 585 | { |
| 586 | crypto_drop_spawn(spawn: shash_instance_ctx(inst)); |
| 587 | kfree(objp: inst); |
| 588 | } |
| 589 | EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance); |
| 590 | |
| 591 | MODULE_LICENSE("GPL" ); |
| 592 | MODULE_DESCRIPTION("Synchronous cryptographic hash type" ); |
| 593 | |