1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * CCM: Counter with CBC-MAC
4 *
5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
6 */
7
8#include <crypto/internal/aead.h>
9#include <crypto/internal/cipher.h>
10#include <crypto/internal/hash.h>
11#include <crypto/internal/skcipher.h>
12#include <crypto/scatterwalk.h>
13#include <crypto/utils.h>
14#include <linux/err.h>
15#include <linux/kernel.h>
16#include <linux/module.h>
17#include <linux/slab.h>
18#include <linux/string.h>
19
20struct ccm_instance_ctx {
21 struct crypto_skcipher_spawn ctr;
22 struct crypto_ahash_spawn mac;
23};
24
25struct crypto_ccm_ctx {
26 struct crypto_ahash *mac;
27 struct crypto_skcipher *ctr;
28};
29
30struct crypto_rfc4309_ctx {
31 struct crypto_aead *child;
32 u8 nonce[3];
33};
34
35struct crypto_rfc4309_req_ctx {
36 struct scatterlist src[3];
37 struct scatterlist dst[3];
38 struct aead_request subreq;
39};
40
41struct crypto_ccm_req_priv_ctx {
42 u8 odata[16];
43 u8 idata[16];
44 u8 auth_tag[16];
45 u32 flags;
46 struct scatterlist src[3];
47 struct scatterlist dst[3];
48 union {
49 struct ahash_request ahreq;
50 struct skcipher_request skreq;
51 };
52};
53
54struct cbcmac_tfm_ctx {
55 struct crypto_cipher *child;
56};
57
58static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
59 struct aead_request *req)
60{
61 unsigned long align = crypto_aead_alignmask(tfm: crypto_aead_reqtfm(req));
62
63 return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
64}
65
66static int set_msg_len(u8 *block, unsigned int msglen, int csize)
67{
68 __be32 data;
69
70 memset(s: block, c: 0, n: csize);
71 block += csize;
72
73 if (csize >= 4)
74 csize = 4;
75 else if (msglen > (1 << (8 * csize)))
76 return -EOVERFLOW;
77
78 data = cpu_to_be32(msglen);
79 memcpy(to: block - csize, from: (u8 *)&data + 4 - csize, len: csize);
80
81 return 0;
82}
83
84static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
85 unsigned int keylen)
86{
87 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm: aead);
88 struct crypto_skcipher *ctr = ctx->ctr;
89 struct crypto_ahash *mac = ctx->mac;
90 int err;
91
92 crypto_skcipher_clear_flags(tfm: ctr, CRYPTO_TFM_REQ_MASK);
93 crypto_skcipher_set_flags(tfm: ctr, flags: crypto_aead_get_flags(tfm: aead) &
94 CRYPTO_TFM_REQ_MASK);
95 err = crypto_skcipher_setkey(tfm: ctr, key, keylen);
96 if (err)
97 return err;
98
99 crypto_ahash_clear_flags(tfm: mac, CRYPTO_TFM_REQ_MASK);
100 crypto_ahash_set_flags(tfm: mac, flags: crypto_aead_get_flags(tfm: aead) &
101 CRYPTO_TFM_REQ_MASK);
102 return crypto_ahash_setkey(tfm: mac, key, keylen);
103}
104
105static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
106 unsigned int authsize)
107{
108 switch (authsize) {
109 case 4:
110 case 6:
111 case 8:
112 case 10:
113 case 12:
114 case 14:
115 case 16:
116 break;
117 default:
118 return -EINVAL;
119 }
120
121 return 0;
122}
123
124static int format_input(u8 *info, struct aead_request *req,
125 unsigned int cryptlen)
126{
127 struct crypto_aead *aead = crypto_aead_reqtfm(req);
128 unsigned int lp = req->iv[0];
129 unsigned int l = lp + 1;
130 unsigned int m;
131
132 m = crypto_aead_authsize(tfm: aead);
133
134 memcpy(to: info, from: req->iv, len: 16);
135
136 /* format control info per RFC 3610 and
137 * NIST Special Publication 800-38C
138 */
139 *info |= (8 * ((m - 2) / 2));
140 if (req->assoclen)
141 *info |= 64;
142
143 return set_msg_len(block: info + 16 - l, msglen: cryptlen, csize: l);
144}
145
146static int format_adata(u8 *adata, unsigned int a)
147{
148 int len = 0;
149
150 /* add control info for associated data
151 * RFC 3610 and NIST Special Publication 800-38C
152 */
153 if (a < 65280) {
154 *(__be16 *)adata = cpu_to_be16(a);
155 len = 2;
156 } else {
157 *(__be16 *)adata = cpu_to_be16(0xfffe);
158 *(__be32 *)&adata[2] = cpu_to_be32(a);
159 len = 6;
160 }
161
162 return len;
163}
164
165static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
166 unsigned int cryptlen)
167{
168 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
169 struct crypto_aead *aead = crypto_aead_reqtfm(req);
170 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm: aead);
171 struct ahash_request *ahreq = &pctx->ahreq;
172 unsigned int assoclen = req->assoclen;
173 struct scatterlist sg[3];
174 u8 *odata = pctx->odata;
175 u8 *idata = pctx->idata;
176 int ilen, err;
177
178 /* format control data for input */
179 err = format_input(info: odata, req, cryptlen);
180 if (err)
181 goto out;
182
183 sg_init_table(sg, 3);
184 sg_set_buf(sg: &sg[0], buf: odata, buflen: 16);
185
186 /* format associated data and compute into mac */
187 if (assoclen) {
188 ilen = format_adata(adata: idata, a: assoclen);
189 sg_set_buf(sg: &sg[1], buf: idata, buflen: ilen);
190 sg_chain(prv: sg, prv_nents: 3, sgl: req->src);
191 } else {
192 ilen = 0;
193 sg_chain(prv: sg, prv_nents: 2, sgl: req->src);
194 }
195
196 ahash_request_set_tfm(req: ahreq, tfm: ctx->mac);
197 ahash_request_set_callback(req: ahreq, flags: pctx->flags, NULL, NULL);
198 ahash_request_set_crypt(req: ahreq, src: sg, NULL, nbytes: assoclen + ilen + 16);
199 err = crypto_ahash_init(req: ahreq);
200 if (err)
201 goto out;
202 err = crypto_ahash_update(req: ahreq);
203 if (err)
204 goto out;
205
206 /* we need to pad the MAC input to a round multiple of the block size */
207 ilen = 16 - (assoclen + ilen) % 16;
208 if (ilen < 16) {
209 memset(s: idata, c: 0, n: ilen);
210 sg_init_table(sg, 2);
211 sg_set_buf(sg: &sg[0], buf: idata, buflen: ilen);
212 if (plain)
213 sg_chain(prv: sg, prv_nents: 2, sgl: plain);
214 plain = sg;
215 cryptlen += ilen;
216 }
217
218 ahash_request_set_crypt(req: ahreq, src: plain, result: odata, nbytes: cryptlen);
219 err = crypto_ahash_finup(req: ahreq);
220out:
221 return err;
222}
223
224static void crypto_ccm_encrypt_done(void *data, int err)
225{
226 struct aead_request *req = data;
227 struct crypto_aead *aead = crypto_aead_reqtfm(req);
228 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
229 u8 *odata = pctx->odata;
230
231 if (!err)
232 scatterwalk_map_and_copy(buf: odata, sg: req->dst,
233 start: req->assoclen + req->cryptlen,
234 nbytes: crypto_aead_authsize(tfm: aead), out: 1);
235 aead_request_complete(req, err);
236}
237
238static inline int crypto_ccm_check_iv(const u8 *iv)
239{
240 /* 2 <= L <= 8, so 1 <= L' <= 7. */
241 if (1 > iv[0] || iv[0] > 7)
242 return -EINVAL;
243
244 return 0;
245}
246
247static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
248{
249 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
250 struct scatterlist *sg;
251 u8 *iv = req->iv;
252 int err;
253
254 err = crypto_ccm_check_iv(iv);
255 if (err)
256 return err;
257
258 pctx->flags = aead_request_flags(req);
259
260 /* Note: rfc 3610 and NIST 800-38C require counter of
261 * zero to encrypt auth tag.
262 */
263 memset(s: iv + 15 - iv[0], c: 0, n: iv[0] + 1);
264
265 sg_init_table(pctx->src, 3);
266 sg_set_buf(sg: pctx->src, buf: tag, buflen: 16);
267 sg = scatterwalk_ffwd(dst: pctx->src + 1, src: req->src, len: req->assoclen);
268 if (sg != pctx->src + 1)
269 sg_chain(prv: pctx->src, prv_nents: 2, sgl: sg);
270
271 if (req->src != req->dst) {
272 sg_init_table(pctx->dst, 3);
273 sg_set_buf(sg: pctx->dst, buf: tag, buflen: 16);
274 sg = scatterwalk_ffwd(dst: pctx->dst + 1, src: req->dst, len: req->assoclen);
275 if (sg != pctx->dst + 1)
276 sg_chain(prv: pctx->dst, prv_nents: 2, sgl: sg);
277 }
278
279 return 0;
280}
281
282static int crypto_ccm_encrypt(struct aead_request *req)
283{
284 struct crypto_aead *aead = crypto_aead_reqtfm(req);
285 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm: aead);
286 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
287 struct skcipher_request *skreq = &pctx->skreq;
288 struct scatterlist *dst;
289 unsigned int cryptlen = req->cryptlen;
290 u8 *odata = pctx->odata;
291 u8 *iv = req->iv;
292 int err;
293
294 err = crypto_ccm_init_crypt(req, tag: odata);
295 if (err)
296 return err;
297
298 err = crypto_ccm_auth(req, plain: sg_next(sg: pctx->src), cryptlen);
299 if (err)
300 return err;
301
302 dst = pctx->src;
303 if (req->src != req->dst)
304 dst = pctx->dst;
305
306 skcipher_request_set_tfm(req: skreq, tfm: ctx->ctr);
307 skcipher_request_set_callback(req: skreq, flags: pctx->flags,
308 compl: crypto_ccm_encrypt_done, data: req);
309 skcipher_request_set_crypt(req: skreq, src: pctx->src, dst, cryptlen: cryptlen + 16, iv);
310 err = crypto_skcipher_encrypt(req: skreq);
311 if (err)
312 return err;
313
314 /* copy authtag to end of dst */
315 scatterwalk_map_and_copy(buf: odata, sg: sg_next(sg: dst), start: cryptlen,
316 nbytes: crypto_aead_authsize(tfm: aead), out: 1);
317 return err;
318}
319
320static void crypto_ccm_decrypt_done(void *data, int err)
321{
322 struct aead_request *req = data;
323 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
324 struct crypto_aead *aead = crypto_aead_reqtfm(req);
325 unsigned int authsize = crypto_aead_authsize(tfm: aead);
326 unsigned int cryptlen = req->cryptlen - authsize;
327 struct scatterlist *dst;
328
329 pctx->flags = 0;
330
331 dst = sg_next(sg: req->src == req->dst ? pctx->src : pctx->dst);
332
333 if (!err) {
334 err = crypto_ccm_auth(req, plain: dst, cryptlen);
335 if (!err && crypto_memneq(a: pctx->auth_tag, b: pctx->odata, size: authsize))
336 err = -EBADMSG;
337 }
338 aead_request_complete(req, err);
339}
340
341static int crypto_ccm_decrypt(struct aead_request *req)
342{
343 struct crypto_aead *aead = crypto_aead_reqtfm(req);
344 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm: aead);
345 struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
346 struct skcipher_request *skreq = &pctx->skreq;
347 struct scatterlist *dst;
348 unsigned int authsize = crypto_aead_authsize(tfm: aead);
349 unsigned int cryptlen = req->cryptlen;
350 u8 *authtag = pctx->auth_tag;
351 u8 *odata = pctx->odata;
352 u8 *iv = pctx->idata;
353 int err;
354
355 cryptlen -= authsize;
356
357 err = crypto_ccm_init_crypt(req, tag: authtag);
358 if (err)
359 return err;
360
361 scatterwalk_map_and_copy(buf: authtag, sg: sg_next(sg: pctx->src), start: cryptlen,
362 nbytes: authsize, out: 0);
363
364 dst = pctx->src;
365 if (req->src != req->dst)
366 dst = pctx->dst;
367
368 memcpy(to: iv, from: req->iv, len: 16);
369
370 skcipher_request_set_tfm(req: skreq, tfm: ctx->ctr);
371 skcipher_request_set_callback(req: skreq, flags: pctx->flags,
372 compl: crypto_ccm_decrypt_done, data: req);
373 skcipher_request_set_crypt(req: skreq, src: pctx->src, dst, cryptlen: cryptlen + 16, iv);
374 err = crypto_skcipher_decrypt(req: skreq);
375 if (err)
376 return err;
377
378 err = crypto_ccm_auth(req, plain: sg_next(sg: dst), cryptlen);
379 if (err)
380 return err;
381
382 /* verify */
383 if (crypto_memneq(a: authtag, b: odata, size: authsize))
384 return -EBADMSG;
385
386 return err;
387}
388
389static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
390{
391 struct aead_instance *inst = aead_alg_instance(aead: tfm);
392 struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
393 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
394 struct crypto_ahash *mac;
395 struct crypto_skcipher *ctr;
396 unsigned long align;
397 int err;
398
399 mac = crypto_spawn_ahash(spawn: &ictx->mac);
400 if (IS_ERR(ptr: mac))
401 return PTR_ERR(ptr: mac);
402
403 ctr = crypto_spawn_skcipher(spawn: &ictx->ctr);
404 err = PTR_ERR(ptr: ctr);
405 if (IS_ERR(ptr: ctr))
406 goto err_free_mac;
407
408 ctx->mac = mac;
409 ctx->ctr = ctr;
410
411 align = crypto_aead_alignmask(tfm);
412 align &= ~(crypto_tfm_ctx_alignment() - 1);
413 crypto_aead_set_reqsize(
414 aead: tfm,
415 reqsize: align + sizeof(struct crypto_ccm_req_priv_ctx) +
416 max(crypto_ahash_reqsize(mac), crypto_skcipher_reqsize(ctr)));
417
418 return 0;
419
420err_free_mac:
421 crypto_free_ahash(tfm: mac);
422 return err;
423}
424
425static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
426{
427 struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
428
429 crypto_free_ahash(tfm: ctx->mac);
430 crypto_free_skcipher(tfm: ctx->ctr);
431}
432
433static void crypto_ccm_free(struct aead_instance *inst)
434{
435 struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
436
437 crypto_drop_ahash(spawn: &ctx->mac);
438 crypto_drop_skcipher(spawn: &ctx->ctr);
439 kfree(objp: inst);
440}
441
442static int crypto_ccm_create_common(struct crypto_template *tmpl,
443 struct rtattr **tb,
444 const char *ctr_name,
445 const char *mac_name)
446{
447 struct skcipher_alg_common *ctr;
448 u32 mask;
449 struct aead_instance *inst;
450 struct ccm_instance_ctx *ictx;
451 struct hash_alg_common *mac;
452 int err;
453
454 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, mask_ret: &mask);
455 if (err)
456 return err;
457
458 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
459 if (!inst)
460 return -ENOMEM;
461 ictx = aead_instance_ctx(inst);
462
463 err = crypto_grab_ahash(spawn: &ictx->mac, inst: aead_crypto_instance(inst),
464 name: mac_name, type: 0, mask: mask | CRYPTO_ALG_ASYNC);
465 if (err)
466 goto err_free_inst;
467 mac = crypto_spawn_ahash_alg(spawn: &ictx->mac);
468
469 err = -EINVAL;
470 if (strncmp(mac->base.cra_name, "cbcmac(", 7) != 0 ||
471 mac->digestsize != 16)
472 goto err_free_inst;
473
474 err = crypto_grab_skcipher(spawn: &ictx->ctr, inst: aead_crypto_instance(inst),
475 name: ctr_name, type: 0, mask);
476 if (err)
477 goto err_free_inst;
478 ctr = crypto_spawn_skcipher_alg_common(spawn: &ictx->ctr);
479
480 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */
481 err = -EINVAL;
482 if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 ||
483 ctr->ivsize != 16 || ctr->base.cra_blocksize != 1)
484 goto err_free_inst;
485
486 /* ctr and cbcmac must use the same underlying block cipher. */
487 if (strcmp(ctr->base.cra_name + 4, mac->base.cra_name + 7) != 0)
488 goto err_free_inst;
489
490 err = -ENAMETOOLONG;
491 if (snprintf(buf: inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
492 fmt: "ccm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME)
493 goto err_free_inst;
494
495 if (snprintf(buf: inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
496 fmt: "ccm_base(%s,%s)", ctr->base.cra_driver_name,
497 mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
498 goto err_free_inst;
499
500 inst->alg.base.cra_priority = (mac->base.cra_priority +
501 ctr->base.cra_priority) / 2;
502 inst->alg.base.cra_blocksize = 1;
503 inst->alg.base.cra_alignmask = ctr->base.cra_alignmask;
504 inst->alg.ivsize = 16;
505 inst->alg.chunksize = ctr->chunksize;
506 inst->alg.maxauthsize = 16;
507 inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
508 inst->alg.init = crypto_ccm_init_tfm;
509 inst->alg.exit = crypto_ccm_exit_tfm;
510 inst->alg.setkey = crypto_ccm_setkey;
511 inst->alg.setauthsize = crypto_ccm_setauthsize;
512 inst->alg.encrypt = crypto_ccm_encrypt;
513 inst->alg.decrypt = crypto_ccm_decrypt;
514
515 inst->free = crypto_ccm_free;
516
517 err = aead_register_instance(tmpl, inst);
518 if (err) {
519err_free_inst:
520 crypto_ccm_free(inst);
521 }
522 return err;
523}
524
525static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
526{
527 const char *cipher_name;
528 char ctr_name[CRYPTO_MAX_ALG_NAME];
529 char mac_name[CRYPTO_MAX_ALG_NAME];
530
531 cipher_name = crypto_attr_alg_name(rta: tb[1]);
532 if (IS_ERR(ptr: cipher_name))
533 return PTR_ERR(ptr: cipher_name);
534
535 if (snprintf(buf: ctr_name, CRYPTO_MAX_ALG_NAME, fmt: "ctr(%s)",
536 cipher_name) >= CRYPTO_MAX_ALG_NAME)
537 return -ENAMETOOLONG;
538
539 if (snprintf(buf: mac_name, CRYPTO_MAX_ALG_NAME, fmt: "cbcmac(%s)",
540 cipher_name) >= CRYPTO_MAX_ALG_NAME)
541 return -ENAMETOOLONG;
542
543 return crypto_ccm_create_common(tmpl, tb, ctr_name, mac_name);
544}
545
546static int crypto_ccm_base_create(struct crypto_template *tmpl,
547 struct rtattr **tb)
548{
549 const char *ctr_name;
550 const char *mac_name;
551
552 ctr_name = crypto_attr_alg_name(rta: tb[1]);
553 if (IS_ERR(ptr: ctr_name))
554 return PTR_ERR(ptr: ctr_name);
555
556 mac_name = crypto_attr_alg_name(rta: tb[2]);
557 if (IS_ERR(ptr: mac_name))
558 return PTR_ERR(ptr: mac_name);
559
560 return crypto_ccm_create_common(tmpl, tb, ctr_name, mac_name);
561}
562
563static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
564 unsigned int keylen)
565{
566 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm: parent);
567 struct crypto_aead *child = ctx->child;
568
569 if (keylen < 3)
570 return -EINVAL;
571
572 keylen -= 3;
573 memcpy(to: ctx->nonce, from: key + keylen, len: 3);
574
575 crypto_aead_clear_flags(tfm: child, CRYPTO_TFM_REQ_MASK);
576 crypto_aead_set_flags(tfm: child, flags: crypto_aead_get_flags(tfm: parent) &
577 CRYPTO_TFM_REQ_MASK);
578 return crypto_aead_setkey(tfm: child, key, keylen);
579}
580
581static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
582 unsigned int authsize)
583{
584 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm: parent);
585
586 switch (authsize) {
587 case 8:
588 case 12:
589 case 16:
590 break;
591 default:
592 return -EINVAL;
593 }
594
595 return crypto_aead_setauthsize(tfm: ctx->child, authsize);
596}
597
598static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
599{
600 struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
601 struct aead_request *subreq = &rctx->subreq;
602 struct crypto_aead *aead = crypto_aead_reqtfm(req);
603 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm: aead);
604 struct crypto_aead *child = ctx->child;
605 struct scatterlist *sg;
606 u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
607 crypto_aead_alignmask(child) + 1);
608
609 /* L' */
610 iv[0] = 3;
611
612 memcpy(to: iv + 1, from: ctx->nonce, len: 3);
613 memcpy(to: iv + 4, from: req->iv, len: 8);
614
615 scatterwalk_map_and_copy(buf: iv + 16, sg: req->src, start: 0, nbytes: req->assoclen - 8, out: 0);
616
617 sg_init_table(rctx->src, 3);
618 sg_set_buf(sg: rctx->src, buf: iv + 16, buflen: req->assoclen - 8);
619 sg = scatterwalk_ffwd(dst: rctx->src + 1, src: req->src, len: req->assoclen);
620 if (sg != rctx->src + 1)
621 sg_chain(prv: rctx->src, prv_nents: 2, sgl: sg);
622
623 if (req->src != req->dst) {
624 sg_init_table(rctx->dst, 3);
625 sg_set_buf(sg: rctx->dst, buf: iv + 16, buflen: req->assoclen - 8);
626 sg = scatterwalk_ffwd(dst: rctx->dst + 1, src: req->dst, len: req->assoclen);
627 if (sg != rctx->dst + 1)
628 sg_chain(prv: rctx->dst, prv_nents: 2, sgl: sg);
629 }
630
631 aead_request_set_tfm(req: subreq, tfm: child);
632 aead_request_set_callback(req: subreq, flags: req->base.flags, compl: req->base.complete,
633 data: req->base.data);
634 aead_request_set_crypt(req: subreq, src: rctx->src,
635 dst: req->src == req->dst ? rctx->src : rctx->dst,
636 cryptlen: req->cryptlen, iv);
637 aead_request_set_ad(req: subreq, assoclen: req->assoclen - 8);
638
639 return subreq;
640}
641
642static int crypto_rfc4309_encrypt(struct aead_request *req)
643{
644 if (req->assoclen != 16 && req->assoclen != 20)
645 return -EINVAL;
646
647 req = crypto_rfc4309_crypt(req);
648
649 return crypto_aead_encrypt(req);
650}
651
652static int crypto_rfc4309_decrypt(struct aead_request *req)
653{
654 if (req->assoclen != 16 && req->assoclen != 20)
655 return -EINVAL;
656
657 req = crypto_rfc4309_crypt(req);
658
659 return crypto_aead_decrypt(req);
660}
661
662static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
663{
664 struct aead_instance *inst = aead_alg_instance(aead: tfm);
665 struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
666 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
667 struct crypto_aead *aead;
668 unsigned long align;
669
670 aead = crypto_spawn_aead(spawn);
671 if (IS_ERR(ptr: aead))
672 return PTR_ERR(ptr: aead);
673
674 ctx->child = aead;
675
676 align = crypto_aead_alignmask(tfm: aead);
677 align &= ~(crypto_tfm_ctx_alignment() - 1);
678 crypto_aead_set_reqsize(
679 aead: tfm,
680 reqsize: sizeof(struct crypto_rfc4309_req_ctx) +
681 ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
682 align + 32);
683
684 return 0;
685}
686
687static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
688{
689 struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
690
691 crypto_free_aead(tfm: ctx->child);
692}
693
694static void crypto_rfc4309_free(struct aead_instance *inst)
695{
696 crypto_drop_aead(spawn: aead_instance_ctx(inst));
697 kfree(objp: inst);
698}
699
700static int crypto_rfc4309_create(struct crypto_template *tmpl,
701 struct rtattr **tb)
702{
703 u32 mask;
704 struct aead_instance *inst;
705 struct crypto_aead_spawn *spawn;
706 struct aead_alg *alg;
707 int err;
708
709 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, mask_ret: &mask);
710 if (err)
711 return err;
712
713 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
714 if (!inst)
715 return -ENOMEM;
716
717 spawn = aead_instance_ctx(inst);
718 err = crypto_grab_aead(spawn, inst: aead_crypto_instance(inst),
719 name: crypto_attr_alg_name(rta: tb[1]), type: 0, mask);
720 if (err)
721 goto err_free_inst;
722
723 alg = crypto_spawn_aead_alg(spawn);
724
725 err = -EINVAL;
726
727 /* We only support 16-byte blocks. */
728 if (crypto_aead_alg_ivsize(alg) != 16)
729 goto err_free_inst;
730
731 /* Not a stream cipher? */
732 if (alg->base.cra_blocksize != 1)
733 goto err_free_inst;
734
735 err = -ENAMETOOLONG;
736 if (snprintf(buf: inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
737 fmt: "rfc4309(%s)", alg->base.cra_name) >=
738 CRYPTO_MAX_ALG_NAME ||
739 snprintf(buf: inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
740 fmt: "rfc4309(%s)", alg->base.cra_driver_name) >=
741 CRYPTO_MAX_ALG_NAME)
742 goto err_free_inst;
743
744 inst->alg.base.cra_priority = alg->base.cra_priority;
745 inst->alg.base.cra_blocksize = 1;
746 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
747
748 inst->alg.ivsize = 8;
749 inst->alg.chunksize = crypto_aead_alg_chunksize(alg);
750 inst->alg.maxauthsize = 16;
751
752 inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
753
754 inst->alg.init = crypto_rfc4309_init_tfm;
755 inst->alg.exit = crypto_rfc4309_exit_tfm;
756
757 inst->alg.setkey = crypto_rfc4309_setkey;
758 inst->alg.setauthsize = crypto_rfc4309_setauthsize;
759 inst->alg.encrypt = crypto_rfc4309_encrypt;
760 inst->alg.decrypt = crypto_rfc4309_decrypt;
761
762 inst->free = crypto_rfc4309_free;
763
764 err = aead_register_instance(tmpl, inst);
765 if (err) {
766err_free_inst:
767 crypto_rfc4309_free(inst);
768 }
769 return err;
770}
771
772static int crypto_cbcmac_digest_setkey(struct crypto_shash *parent,
773 const u8 *inkey, unsigned int keylen)
774{
775 struct cbcmac_tfm_ctx *ctx = crypto_shash_ctx(tfm: parent);
776
777 return crypto_cipher_setkey(tfm: ctx->child, key: inkey, keylen);
778}
779
780static int crypto_cbcmac_digest_init(struct shash_desc *pdesc)
781{
782 int bs = crypto_shash_digestsize(tfm: pdesc->tfm);
783 u8 *dg = shash_desc_ctx(desc: pdesc);
784
785 memset(s: dg, c: 0, n: bs);
786 return 0;
787}
788
789static int crypto_cbcmac_digest_update(struct shash_desc *pdesc, const u8 *p,
790 unsigned int len)
791{
792 struct crypto_shash *parent = pdesc->tfm;
793 struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(tfm: parent);
794 struct crypto_cipher *tfm = tctx->child;
795 int bs = crypto_shash_digestsize(tfm: parent);
796 u8 *dg = shash_desc_ctx(desc: pdesc);
797
798 do {
799 crypto_xor(dst: dg, src: p, size: bs);
800 crypto_cipher_encrypt_one(tfm, dst: dg, src: dg);
801 p += bs;
802 len -= bs;
803 } while (len >= bs);
804 return len;
805}
806
807static int crypto_cbcmac_digest_finup(struct shash_desc *pdesc, const u8 *src,
808 unsigned int len, u8 *out)
809{
810 struct crypto_shash *parent = pdesc->tfm;
811 struct cbcmac_tfm_ctx *tctx = crypto_shash_ctx(tfm: parent);
812 struct crypto_cipher *tfm = tctx->child;
813 int bs = crypto_shash_digestsize(tfm: parent);
814 u8 *dg = shash_desc_ctx(desc: pdesc);
815
816 if (len) {
817 crypto_xor(dst: dg, src, size: len);
818 crypto_cipher_encrypt_one(tfm, dst: out, src: dg);
819 return 0;
820 }
821 memcpy(to: out, from: dg, len: bs);
822 return 0;
823}
824
825static int cbcmac_init_tfm(struct crypto_tfm *tfm)
826{
827 struct crypto_cipher *cipher;
828 struct crypto_instance *inst = (void *)tfm->__crt_alg;
829 struct crypto_cipher_spawn *spawn = crypto_instance_ctx(inst);
830 struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
831
832 cipher = crypto_spawn_cipher(spawn);
833 if (IS_ERR(ptr: cipher))
834 return PTR_ERR(ptr: cipher);
835
836 ctx->child = cipher;
837
838 return 0;
839};
840
841static void cbcmac_exit_tfm(struct crypto_tfm *tfm)
842{
843 struct cbcmac_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
844 crypto_free_cipher(tfm: ctx->child);
845}
846
847static int cbcmac_create(struct crypto_template *tmpl, struct rtattr **tb)
848{
849 struct shash_instance *inst;
850 struct crypto_cipher_spawn *spawn;
851 struct crypto_alg *alg;
852 u32 mask;
853 int err;
854
855 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, mask_ret: &mask);
856 if (err)
857 return err;
858
859 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
860 if (!inst)
861 return -ENOMEM;
862 spawn = shash_instance_ctx(inst);
863
864 err = crypto_grab_cipher(spawn, inst: shash_crypto_instance(inst),
865 name: crypto_attr_alg_name(rta: tb[1]), type: 0, mask);
866 if (err)
867 goto err_free_inst;
868 alg = crypto_spawn_cipher_alg(spawn);
869
870 err = crypto_inst_setname(shash_crypto_instance(inst), tmpl->name, alg);
871 if (err)
872 goto err_free_inst;
873
874 inst->alg.base.cra_priority = alg->cra_priority;
875 inst->alg.base.cra_blocksize = alg->cra_blocksize;
876
877 inst->alg.digestsize = alg->cra_blocksize;
878 inst->alg.descsize = alg->cra_blocksize;
879
880 inst->alg.base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY;
881 inst->alg.base.cra_ctxsize = sizeof(struct cbcmac_tfm_ctx);
882 inst->alg.base.cra_init = cbcmac_init_tfm;
883 inst->alg.base.cra_exit = cbcmac_exit_tfm;
884
885 inst->alg.init = crypto_cbcmac_digest_init;
886 inst->alg.update = crypto_cbcmac_digest_update;
887 inst->alg.finup = crypto_cbcmac_digest_finup;
888 inst->alg.setkey = crypto_cbcmac_digest_setkey;
889
890 inst->free = shash_free_singlespawn_instance;
891
892 err = shash_register_instance(tmpl, inst);
893 if (err) {
894err_free_inst:
895 shash_free_singlespawn_instance(inst);
896 }
897 return err;
898}
899
900static struct crypto_template crypto_ccm_tmpls[] = {
901 {
902 .name = "cbcmac",
903 .create = cbcmac_create,
904 .module = THIS_MODULE,
905 }, {
906 .name = "ccm_base",
907 .create = crypto_ccm_base_create,
908 .module = THIS_MODULE,
909 }, {
910 .name = "ccm",
911 .create = crypto_ccm_create,
912 .module = THIS_MODULE,
913 }, {
914 .name = "rfc4309",
915 .create = crypto_rfc4309_create,
916 .module = THIS_MODULE,
917 },
918};
919
920static int __init crypto_ccm_module_init(void)
921{
922 return crypto_register_templates(tmpls: crypto_ccm_tmpls,
923 ARRAY_SIZE(crypto_ccm_tmpls));
924}
925
926static void __exit crypto_ccm_module_exit(void)
927{
928 crypto_unregister_templates(tmpls: crypto_ccm_tmpls,
929 ARRAY_SIZE(crypto_ccm_tmpls));
930}
931
932module_init(crypto_ccm_module_init);
933module_exit(crypto_ccm_module_exit);
934
935MODULE_LICENSE("GPL");
936MODULE_DESCRIPTION("Counter with CBC MAC");
937MODULE_ALIAS_CRYPTO("ccm_base");
938MODULE_ALIAS_CRYPTO("rfc4309");
939MODULE_ALIAS_CRYPTO("ccm");
940MODULE_ALIAS_CRYPTO("cbcmac");
941MODULE_IMPORT_NS("CRYPTO_INTERNAL");
942