ccm.c 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940
  1. /*
  2. * CCM: Counter with CBC-MAC
  3. *
  4. * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5. *
  6. * This program is free software; you can redistribute it and/or modify it
  7. * under the terms of the GNU General Public License as published by the Free
  8. * Software Foundation; either version 2 of the License, or (at your option)
  9. * any later version.
  10. *
  11. */
  12. #include <crypto/internal/aead.h>
  13. #include <crypto/internal/skcipher.h>
  14. #include <crypto/scatterwalk.h>
  15. #include <linux/err.h>
  16. #include <linux/init.h>
  17. #include <linux/kernel.h>
  18. #include <linux/module.h>
  19. #include <linux/slab.h>
  20. #include "internal.h"
  21. struct ccm_instance_ctx {
  22. struct crypto_skcipher_spawn ctr;
  23. struct crypto_spawn cipher;
  24. };
  25. struct crypto_ccm_ctx {
  26. struct crypto_cipher *cipher;
  27. struct crypto_ablkcipher *ctr;
  28. };
  29. struct crypto_rfc4309_ctx {
  30. struct crypto_aead *child;
  31. u8 nonce[3];
  32. };
  33. struct crypto_rfc4309_req_ctx {
  34. struct scatterlist src[3];
  35. struct scatterlist dst[3];
  36. struct aead_request subreq;
  37. };
  38. struct crypto_ccm_req_priv_ctx {
  39. u8 odata[16];
  40. u8 idata[16];
  41. u8 auth_tag[16];
  42. u32 ilen;
  43. u32 flags;
  44. struct scatterlist src[3];
  45. struct scatterlist dst[3];
  46. struct ablkcipher_request abreq;
  47. };
  48. static inline struct crypto_ccm_req_priv_ctx *crypto_ccm_reqctx(
  49. struct aead_request *req)
  50. {
  51. unsigned long align = crypto_aead_alignmask(crypto_aead_reqtfm(req));
  52. return (void *)PTR_ALIGN((u8 *)aead_request_ctx(req), align + 1);
  53. }
  54. static int set_msg_len(u8 *block, unsigned int msglen, int csize)
  55. {
  56. __be32 data;
  57. memset(block, 0, csize);
  58. block += csize;
  59. if (csize >= 4)
  60. csize = 4;
  61. else if (msglen > (1 << (8 * csize)))
  62. return -EOVERFLOW;
  63. data = cpu_to_be32(msglen);
  64. memcpy(block - csize, (u8 *)&data + 4 - csize, csize);
  65. return 0;
  66. }
  67. static int crypto_ccm_setkey(struct crypto_aead *aead, const u8 *key,
  68. unsigned int keylen)
  69. {
  70. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  71. struct crypto_ablkcipher *ctr = ctx->ctr;
  72. struct crypto_cipher *tfm = ctx->cipher;
  73. int err = 0;
  74. crypto_ablkcipher_clear_flags(ctr, CRYPTO_TFM_REQ_MASK);
  75. crypto_ablkcipher_set_flags(ctr, crypto_aead_get_flags(aead) &
  76. CRYPTO_TFM_REQ_MASK);
  77. err = crypto_ablkcipher_setkey(ctr, key, keylen);
  78. crypto_aead_set_flags(aead, crypto_ablkcipher_get_flags(ctr) &
  79. CRYPTO_TFM_RES_MASK);
  80. if (err)
  81. goto out;
  82. crypto_cipher_clear_flags(tfm, CRYPTO_TFM_REQ_MASK);
  83. crypto_cipher_set_flags(tfm, crypto_aead_get_flags(aead) &
  84. CRYPTO_TFM_REQ_MASK);
  85. err = crypto_cipher_setkey(tfm, key, keylen);
  86. crypto_aead_set_flags(aead, crypto_cipher_get_flags(tfm) &
  87. CRYPTO_TFM_RES_MASK);
  88. out:
  89. return err;
  90. }
  91. static int crypto_ccm_setauthsize(struct crypto_aead *tfm,
  92. unsigned int authsize)
  93. {
  94. switch (authsize) {
  95. case 4:
  96. case 6:
  97. case 8:
  98. case 10:
  99. case 12:
  100. case 14:
  101. case 16:
  102. break;
  103. default:
  104. return -EINVAL;
  105. }
  106. return 0;
  107. }
  108. static int format_input(u8 *info, struct aead_request *req,
  109. unsigned int cryptlen)
  110. {
  111. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  112. unsigned int lp = req->iv[0];
  113. unsigned int l = lp + 1;
  114. unsigned int m;
  115. m = crypto_aead_authsize(aead);
  116. memcpy(info, req->iv, 16);
  117. /* format control info per RFC 3610 and
  118. * NIST Special Publication 800-38C
  119. */
  120. *info |= (8 * ((m - 2) / 2));
  121. if (req->assoclen)
  122. *info |= 64;
  123. return set_msg_len(info + 16 - l, cryptlen, l);
  124. }
  125. static int format_adata(u8 *adata, unsigned int a)
  126. {
  127. int len = 0;
  128. /* add control info for associated data
  129. * RFC 3610 and NIST Special Publication 800-38C
  130. */
  131. if (a < 65280) {
  132. *(__be16 *)adata = cpu_to_be16(a);
  133. len = 2;
  134. } else {
  135. *(__be16 *)adata = cpu_to_be16(0xfffe);
  136. *(__be32 *)&adata[2] = cpu_to_be32(a);
  137. len = 6;
  138. }
  139. return len;
  140. }
  141. static void compute_mac(struct crypto_cipher *tfm, u8 *data, int n,
  142. struct crypto_ccm_req_priv_ctx *pctx)
  143. {
  144. unsigned int bs = 16;
  145. u8 *odata = pctx->odata;
  146. u8 *idata = pctx->idata;
  147. int datalen, getlen;
  148. datalen = n;
  149. /* first time in here, block may be partially filled. */
  150. getlen = bs - pctx->ilen;
  151. if (datalen >= getlen) {
  152. memcpy(idata + pctx->ilen, data, getlen);
  153. crypto_xor(odata, idata, bs);
  154. crypto_cipher_encrypt_one(tfm, odata, odata);
  155. datalen -= getlen;
  156. data += getlen;
  157. pctx->ilen = 0;
  158. }
  159. /* now encrypt rest of data */
  160. while (datalen >= bs) {
  161. crypto_xor(odata, data, bs);
  162. crypto_cipher_encrypt_one(tfm, odata, odata);
  163. datalen -= bs;
  164. data += bs;
  165. }
  166. /* check and see if there's leftover data that wasn't
  167. * enough to fill a block.
  168. */
  169. if (datalen) {
  170. memcpy(idata + pctx->ilen, data, datalen);
  171. pctx->ilen += datalen;
  172. }
  173. }
  174. static void get_data_to_compute(struct crypto_cipher *tfm,
  175. struct crypto_ccm_req_priv_ctx *pctx,
  176. struct scatterlist *sg, unsigned int len)
  177. {
  178. struct scatter_walk walk;
  179. u8 *data_src;
  180. int n;
  181. scatterwalk_start(&walk, sg);
  182. while (len) {
  183. n = scatterwalk_clamp(&walk, len);
  184. if (!n) {
  185. scatterwalk_start(&walk, sg_next(walk.sg));
  186. n = scatterwalk_clamp(&walk, len);
  187. }
  188. data_src = scatterwalk_map(&walk);
  189. compute_mac(tfm, data_src, n, pctx);
  190. len -= n;
  191. scatterwalk_unmap(data_src);
  192. scatterwalk_advance(&walk, n);
  193. scatterwalk_done(&walk, 0, len);
  194. if (len)
  195. crypto_yield(pctx->flags);
  196. }
  197. /* any leftover needs padding and then encrypted */
  198. if (pctx->ilen) {
  199. int padlen;
  200. u8 *odata = pctx->odata;
  201. u8 *idata = pctx->idata;
  202. padlen = 16 - pctx->ilen;
  203. memset(idata + pctx->ilen, 0, padlen);
  204. crypto_xor(odata, idata, 16);
  205. crypto_cipher_encrypt_one(tfm, odata, odata);
  206. pctx->ilen = 0;
  207. }
  208. }
  209. static int crypto_ccm_auth(struct aead_request *req, struct scatterlist *plain,
  210. unsigned int cryptlen)
  211. {
  212. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  213. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  214. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  215. struct crypto_cipher *cipher = ctx->cipher;
  216. unsigned int assoclen = req->assoclen;
  217. u8 *odata = pctx->odata;
  218. u8 *idata = pctx->idata;
  219. int err;
  220. /* format control data for input */
  221. err = format_input(odata, req, cryptlen);
  222. if (err)
  223. goto out;
  224. /* encrypt first block to use as start in computing mac */
  225. crypto_cipher_encrypt_one(cipher, odata, odata);
  226. /* format associated data and compute into mac */
  227. if (assoclen) {
  228. pctx->ilen = format_adata(idata, assoclen);
  229. get_data_to_compute(cipher, pctx, req->src, req->assoclen);
  230. } else {
  231. pctx->ilen = 0;
  232. }
  233. /* compute plaintext into mac */
  234. if (cryptlen)
  235. get_data_to_compute(cipher, pctx, plain, cryptlen);
  236. out:
  237. return err;
  238. }
  239. static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err)
  240. {
  241. struct aead_request *req = areq->data;
  242. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  243. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  244. u8 *odata = pctx->odata;
  245. if (!err)
  246. scatterwalk_map_and_copy(odata, req->dst,
  247. req->assoclen + req->cryptlen,
  248. crypto_aead_authsize(aead), 1);
  249. aead_request_complete(req, err);
  250. }
  251. static inline int crypto_ccm_check_iv(const u8 *iv)
  252. {
  253. /* 2 <= L <= 8, so 1 <= L' <= 7. */
  254. if (1 > iv[0] || iv[0] > 7)
  255. return -EINVAL;
  256. return 0;
  257. }
  258. static int crypto_ccm_init_crypt(struct aead_request *req, u8 *tag)
  259. {
  260. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  261. struct scatterlist *sg;
  262. u8 *iv = req->iv;
  263. int err;
  264. err = crypto_ccm_check_iv(iv);
  265. if (err)
  266. return err;
  267. pctx->flags = aead_request_flags(req);
  268. /* Note: rfc 3610 and NIST 800-38C require counter of
  269. * zero to encrypt auth tag.
  270. */
  271. memset(iv + 15 - iv[0], 0, iv[0] + 1);
  272. sg_init_table(pctx->src, 3);
  273. sg_set_buf(pctx->src, tag, 16);
  274. sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
  275. if (sg != pctx->src + 1)
  276. sg_chain(pctx->src, 2, sg);
  277. if (req->src != req->dst) {
  278. sg_init_table(pctx->dst, 3);
  279. sg_set_buf(pctx->dst, tag, 16);
  280. sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
  281. if (sg != pctx->dst + 1)
  282. sg_chain(pctx->dst, 2, sg);
  283. }
  284. return 0;
  285. }
  286. static int crypto_ccm_encrypt(struct aead_request *req)
  287. {
  288. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  289. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  290. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  291. struct ablkcipher_request *abreq = &pctx->abreq;
  292. struct scatterlist *dst;
  293. unsigned int cryptlen = req->cryptlen;
  294. u8 *odata = pctx->odata;
  295. u8 *iv = req->iv;
  296. int err;
  297. err = crypto_ccm_init_crypt(req, odata);
  298. if (err)
  299. return err;
  300. err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen);
  301. if (err)
  302. return err;
  303. dst = pctx->src;
  304. if (req->src != req->dst)
  305. dst = pctx->dst;
  306. ablkcipher_request_set_tfm(abreq, ctx->ctr);
  307. ablkcipher_request_set_callback(abreq, pctx->flags,
  308. crypto_ccm_encrypt_done, req);
  309. ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
  310. err = crypto_ablkcipher_encrypt(abreq);
  311. if (err)
  312. return err;
  313. /* copy authtag to end of dst */
  314. scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen,
  315. crypto_aead_authsize(aead), 1);
  316. return err;
  317. }
  318. static void crypto_ccm_decrypt_done(struct crypto_async_request *areq,
  319. int err)
  320. {
  321. struct aead_request *req = areq->data;
  322. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  323. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  324. unsigned int authsize = crypto_aead_authsize(aead);
  325. unsigned int cryptlen = req->cryptlen - authsize;
  326. struct scatterlist *dst;
  327. pctx->flags = 0;
  328. dst = sg_next(req->src == req->dst ? pctx->src : pctx->dst);
  329. if (!err) {
  330. err = crypto_ccm_auth(req, dst, cryptlen);
  331. if (!err && crypto_memneq(pctx->auth_tag, pctx->odata, authsize))
  332. err = -EBADMSG;
  333. }
  334. aead_request_complete(req, err);
  335. }
  336. static int crypto_ccm_decrypt(struct aead_request *req)
  337. {
  338. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  339. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(aead);
  340. struct crypto_ccm_req_priv_ctx *pctx = crypto_ccm_reqctx(req);
  341. struct ablkcipher_request *abreq = &pctx->abreq;
  342. struct scatterlist *dst;
  343. unsigned int authsize = crypto_aead_authsize(aead);
  344. unsigned int cryptlen = req->cryptlen;
  345. u8 *authtag = pctx->auth_tag;
  346. u8 *odata = pctx->odata;
  347. u8 *iv = req->iv;
  348. int err;
  349. cryptlen -= authsize;
  350. err = crypto_ccm_init_crypt(req, authtag);
  351. if (err)
  352. return err;
  353. scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen,
  354. authsize, 0);
  355. dst = pctx->src;
  356. if (req->src != req->dst)
  357. dst = pctx->dst;
  358. ablkcipher_request_set_tfm(abreq, ctx->ctr);
  359. ablkcipher_request_set_callback(abreq, pctx->flags,
  360. crypto_ccm_decrypt_done, req);
  361. ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv);
  362. err = crypto_ablkcipher_decrypt(abreq);
  363. if (err)
  364. return err;
  365. err = crypto_ccm_auth(req, sg_next(dst), cryptlen);
  366. if (err)
  367. return err;
  368. /* verify */
  369. if (crypto_memneq(authtag, odata, authsize))
  370. return -EBADMSG;
  371. return err;
  372. }
  373. static int crypto_ccm_init_tfm(struct crypto_aead *tfm)
  374. {
  375. struct aead_instance *inst = aead_alg_instance(tfm);
  376. struct ccm_instance_ctx *ictx = aead_instance_ctx(inst);
  377. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  378. struct crypto_cipher *cipher;
  379. struct crypto_ablkcipher *ctr;
  380. unsigned long align;
  381. int err;
  382. cipher = crypto_spawn_cipher(&ictx->cipher);
  383. if (IS_ERR(cipher))
  384. return PTR_ERR(cipher);
  385. ctr = crypto_spawn_skcipher(&ictx->ctr);
  386. err = PTR_ERR(ctr);
  387. if (IS_ERR(ctr))
  388. goto err_free_cipher;
  389. ctx->cipher = cipher;
  390. ctx->ctr = ctr;
  391. align = crypto_aead_alignmask(tfm);
  392. align &= ~(crypto_tfm_ctx_alignment() - 1);
  393. crypto_aead_set_reqsize(
  394. tfm,
  395. align + sizeof(struct crypto_ccm_req_priv_ctx) +
  396. crypto_ablkcipher_reqsize(ctr));
  397. return 0;
  398. err_free_cipher:
  399. crypto_free_cipher(cipher);
  400. return err;
  401. }
  402. static void crypto_ccm_exit_tfm(struct crypto_aead *tfm)
  403. {
  404. struct crypto_ccm_ctx *ctx = crypto_aead_ctx(tfm);
  405. crypto_free_cipher(ctx->cipher);
  406. crypto_free_ablkcipher(ctx->ctr);
  407. }
  408. static void crypto_ccm_free(struct aead_instance *inst)
  409. {
  410. struct ccm_instance_ctx *ctx = aead_instance_ctx(inst);
  411. crypto_drop_spawn(&ctx->cipher);
  412. crypto_drop_skcipher(&ctx->ctr);
  413. kfree(inst);
  414. }
  415. static int crypto_ccm_create_common(struct crypto_template *tmpl,
  416. struct rtattr **tb,
  417. const char *full_name,
  418. const char *ctr_name,
  419. const char *cipher_name)
  420. {
  421. struct crypto_attr_type *algt;
  422. struct aead_instance *inst;
  423. struct crypto_alg *ctr;
  424. struct crypto_alg *cipher;
  425. struct ccm_instance_ctx *ictx;
  426. int err;
  427. algt = crypto_get_attr_type(tb);
  428. if (IS_ERR(algt))
  429. return PTR_ERR(algt);
  430. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  431. return -EINVAL;
  432. cipher = crypto_alg_mod_lookup(cipher_name, CRYPTO_ALG_TYPE_CIPHER,
  433. CRYPTO_ALG_TYPE_MASK);
  434. if (IS_ERR(cipher))
  435. return PTR_ERR(cipher);
  436. err = -EINVAL;
  437. if (cipher->cra_blocksize != 16)
  438. goto out_put_cipher;
  439. inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL);
  440. err = -ENOMEM;
  441. if (!inst)
  442. goto out_put_cipher;
  443. ictx = aead_instance_ctx(inst);
  444. err = crypto_init_spawn(&ictx->cipher, cipher,
  445. aead_crypto_instance(inst),
  446. CRYPTO_ALG_TYPE_MASK);
  447. if (err)
  448. goto err_free_inst;
  449. crypto_set_skcipher_spawn(&ictx->ctr, aead_crypto_instance(inst));
  450. err = crypto_grab_skcipher(&ictx->ctr, ctr_name, 0,
  451. crypto_requires_sync(algt->type,
  452. algt->mask));
  453. if (err)
  454. goto err_drop_cipher;
  455. ctr = crypto_skcipher_spawn_alg(&ictx->ctr);
  456. /* Not a stream cipher? */
  457. err = -EINVAL;
  458. if (ctr->cra_blocksize != 1)
  459. goto err_drop_ctr;
  460. /* We want the real thing! */
  461. if (ctr->cra_ablkcipher.ivsize != 16)
  462. goto err_drop_ctr;
  463. err = -ENAMETOOLONG;
  464. if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  465. "ccm_base(%s,%s)", ctr->cra_driver_name,
  466. cipher->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
  467. goto err_drop_ctr;
  468. memcpy(inst->alg.base.cra_name, full_name, CRYPTO_MAX_ALG_NAME);
  469. inst->alg.base.cra_flags = ctr->cra_flags & CRYPTO_ALG_ASYNC;
  470. inst->alg.base.cra_priority = (cipher->cra_priority +
  471. ctr->cra_priority) / 2;
  472. inst->alg.base.cra_blocksize = 1;
  473. inst->alg.base.cra_alignmask = cipher->cra_alignmask |
  474. ctr->cra_alignmask |
  475. (__alignof__(u32) - 1);
  476. inst->alg.ivsize = 16;
  477. inst->alg.maxauthsize = 16;
  478. inst->alg.base.cra_ctxsize = sizeof(struct crypto_ccm_ctx);
  479. inst->alg.init = crypto_ccm_init_tfm;
  480. inst->alg.exit = crypto_ccm_exit_tfm;
  481. inst->alg.setkey = crypto_ccm_setkey;
  482. inst->alg.setauthsize = crypto_ccm_setauthsize;
  483. inst->alg.encrypt = crypto_ccm_encrypt;
  484. inst->alg.decrypt = crypto_ccm_decrypt;
  485. inst->free = crypto_ccm_free;
  486. err = aead_register_instance(tmpl, inst);
  487. if (err)
  488. goto err_drop_ctr;
  489. out_put_cipher:
  490. crypto_mod_put(cipher);
  491. return err;
  492. err_drop_ctr:
  493. crypto_drop_skcipher(&ictx->ctr);
  494. err_drop_cipher:
  495. crypto_drop_spawn(&ictx->cipher);
  496. err_free_inst:
  497. kfree(inst);
  498. goto out_put_cipher;
  499. }
  500. static int crypto_ccm_create(struct crypto_template *tmpl, struct rtattr **tb)
  501. {
  502. const char *cipher_name;
  503. char ctr_name[CRYPTO_MAX_ALG_NAME];
  504. char full_name[CRYPTO_MAX_ALG_NAME];
  505. cipher_name = crypto_attr_alg_name(tb[1]);
  506. if (IS_ERR(cipher_name))
  507. return PTR_ERR(cipher_name);
  508. if (snprintf(ctr_name, CRYPTO_MAX_ALG_NAME, "ctr(%s)",
  509. cipher_name) >= CRYPTO_MAX_ALG_NAME)
  510. return -ENAMETOOLONG;
  511. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm(%s)", cipher_name) >=
  512. CRYPTO_MAX_ALG_NAME)
  513. return -ENAMETOOLONG;
  514. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  515. cipher_name);
  516. }
  517. static struct crypto_template crypto_ccm_tmpl = {
  518. .name = "ccm",
  519. .create = crypto_ccm_create,
  520. .module = THIS_MODULE,
  521. };
  522. static int crypto_ccm_base_create(struct crypto_template *tmpl,
  523. struct rtattr **tb)
  524. {
  525. const char *ctr_name;
  526. const char *cipher_name;
  527. char full_name[CRYPTO_MAX_ALG_NAME];
  528. ctr_name = crypto_attr_alg_name(tb[1]);
  529. if (IS_ERR(ctr_name))
  530. return PTR_ERR(ctr_name);
  531. cipher_name = crypto_attr_alg_name(tb[2]);
  532. if (IS_ERR(cipher_name))
  533. return PTR_ERR(cipher_name);
  534. if (snprintf(full_name, CRYPTO_MAX_ALG_NAME, "ccm_base(%s,%s)",
  535. ctr_name, cipher_name) >= CRYPTO_MAX_ALG_NAME)
  536. return -ENAMETOOLONG;
  537. return crypto_ccm_create_common(tmpl, tb, full_name, ctr_name,
  538. cipher_name);
  539. }
  540. static struct crypto_template crypto_ccm_base_tmpl = {
  541. .name = "ccm_base",
  542. .create = crypto_ccm_base_create,
  543. .module = THIS_MODULE,
  544. };
  545. static int crypto_rfc4309_setkey(struct crypto_aead *parent, const u8 *key,
  546. unsigned int keylen)
  547. {
  548. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  549. struct crypto_aead *child = ctx->child;
  550. int err;
  551. if (keylen < 3)
  552. return -EINVAL;
  553. keylen -= 3;
  554. memcpy(ctx->nonce, key + keylen, 3);
  555. crypto_aead_clear_flags(child, CRYPTO_TFM_REQ_MASK);
  556. crypto_aead_set_flags(child, crypto_aead_get_flags(parent) &
  557. CRYPTO_TFM_REQ_MASK);
  558. err = crypto_aead_setkey(child, key, keylen);
  559. crypto_aead_set_flags(parent, crypto_aead_get_flags(child) &
  560. CRYPTO_TFM_RES_MASK);
  561. return err;
  562. }
  563. static int crypto_rfc4309_setauthsize(struct crypto_aead *parent,
  564. unsigned int authsize)
  565. {
  566. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(parent);
  567. switch (authsize) {
  568. case 8:
  569. case 12:
  570. case 16:
  571. break;
  572. default:
  573. return -EINVAL;
  574. }
  575. return crypto_aead_setauthsize(ctx->child, authsize);
  576. }
  577. static struct aead_request *crypto_rfc4309_crypt(struct aead_request *req)
  578. {
  579. struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req);
  580. struct aead_request *subreq = &rctx->subreq;
  581. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  582. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(aead);
  583. struct crypto_aead *child = ctx->child;
  584. struct scatterlist *sg;
  585. u8 *iv = PTR_ALIGN((u8 *)(subreq + 1) + crypto_aead_reqsize(child),
  586. crypto_aead_alignmask(child) + 1);
  587. /* L' */
  588. iv[0] = 3;
  589. memcpy(iv + 1, ctx->nonce, 3);
  590. memcpy(iv + 4, req->iv, 8);
  591. scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0);
  592. sg_init_table(rctx->src, 3);
  593. sg_set_buf(rctx->src, iv + 16, req->assoclen - 8);
  594. sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
  595. if (sg != rctx->src + 1)
  596. sg_chain(rctx->src, 2, sg);
  597. if (req->src != req->dst) {
  598. sg_init_table(rctx->dst, 3);
  599. sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8);
  600. sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
  601. if (sg != rctx->dst + 1)
  602. sg_chain(rctx->dst, 2, sg);
  603. }
  604. aead_request_set_tfm(subreq, child);
  605. aead_request_set_callback(subreq, req->base.flags, req->base.complete,
  606. req->base.data);
  607. aead_request_set_crypt(subreq, rctx->src,
  608. req->src == req->dst ? rctx->src : rctx->dst,
  609. req->cryptlen, iv);
  610. aead_request_set_ad(subreq, req->assoclen - 8);
  611. return subreq;
  612. }
  613. static int crypto_rfc4309_encrypt(struct aead_request *req)
  614. {
  615. if (req->assoclen != 16 && req->assoclen != 20)
  616. return -EINVAL;
  617. req = crypto_rfc4309_crypt(req);
  618. return crypto_aead_encrypt(req);
  619. }
  620. static int crypto_rfc4309_decrypt(struct aead_request *req)
  621. {
  622. if (req->assoclen != 16 && req->assoclen != 20)
  623. return -EINVAL;
  624. req = crypto_rfc4309_crypt(req);
  625. return crypto_aead_decrypt(req);
  626. }
  627. static int crypto_rfc4309_init_tfm(struct crypto_aead *tfm)
  628. {
  629. struct aead_instance *inst = aead_alg_instance(tfm);
  630. struct crypto_aead_spawn *spawn = aead_instance_ctx(inst);
  631. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  632. struct crypto_aead *aead;
  633. unsigned long align;
  634. aead = crypto_spawn_aead(spawn);
  635. if (IS_ERR(aead))
  636. return PTR_ERR(aead);
  637. ctx->child = aead;
  638. align = crypto_aead_alignmask(aead);
  639. align &= ~(crypto_tfm_ctx_alignment() - 1);
  640. crypto_aead_set_reqsize(
  641. tfm,
  642. sizeof(struct crypto_rfc4309_req_ctx) +
  643. ALIGN(crypto_aead_reqsize(aead), crypto_tfm_ctx_alignment()) +
  644. align + 32);
  645. return 0;
  646. }
  647. static void crypto_rfc4309_exit_tfm(struct crypto_aead *tfm)
  648. {
  649. struct crypto_rfc4309_ctx *ctx = crypto_aead_ctx(tfm);
  650. crypto_free_aead(ctx->child);
  651. }
  652. static void crypto_rfc4309_free(struct aead_instance *inst)
  653. {
  654. crypto_drop_aead(aead_instance_ctx(inst));
  655. kfree(inst);
  656. }
  657. static int crypto_rfc4309_create(struct crypto_template *tmpl,
  658. struct rtattr **tb)
  659. {
  660. struct crypto_attr_type *algt;
  661. struct aead_instance *inst;
  662. struct crypto_aead_spawn *spawn;
  663. struct aead_alg *alg;
  664. const char *ccm_name;
  665. int err;
  666. algt = crypto_get_attr_type(tb);
  667. if (IS_ERR(algt))
  668. return PTR_ERR(algt);
  669. if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
  670. return -EINVAL;
  671. ccm_name = crypto_attr_alg_name(tb[1]);
  672. if (IS_ERR(ccm_name))
  673. return PTR_ERR(ccm_name);
  674. inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
  675. if (!inst)
  676. return -ENOMEM;
  677. spawn = aead_instance_ctx(inst);
  678. crypto_set_aead_spawn(spawn, aead_crypto_instance(inst));
  679. err = crypto_grab_aead(spawn, ccm_name, 0,
  680. crypto_requires_sync(algt->type, algt->mask));
  681. if (err)
  682. goto out_free_inst;
  683. alg = crypto_spawn_aead_alg(spawn);
  684. err = -EINVAL;
  685. /* We only support 16-byte blocks. */
  686. if (crypto_aead_alg_ivsize(alg) != 16)
  687. goto out_drop_alg;
  688. /* Not a stream cipher? */
  689. if (alg->base.cra_blocksize != 1)
  690. goto out_drop_alg;
  691. err = -ENAMETOOLONG;
  692. if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
  693. "rfc4309(%s)", alg->base.cra_name) >=
  694. CRYPTO_MAX_ALG_NAME ||
  695. snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
  696. "rfc4309(%s)", alg->base.cra_driver_name) >=
  697. CRYPTO_MAX_ALG_NAME)
  698. goto out_drop_alg;
  699. inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC;
  700. inst->alg.base.cra_priority = alg->base.cra_priority;
  701. inst->alg.base.cra_blocksize = 1;
  702. inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
  703. inst->alg.ivsize = 8;
  704. inst->alg.maxauthsize = 16;
  705. inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc4309_ctx);
  706. inst->alg.init = crypto_rfc4309_init_tfm;
  707. inst->alg.exit = crypto_rfc4309_exit_tfm;
  708. inst->alg.setkey = crypto_rfc4309_setkey;
  709. inst->alg.setauthsize = crypto_rfc4309_setauthsize;
  710. inst->alg.encrypt = crypto_rfc4309_encrypt;
  711. inst->alg.decrypt = crypto_rfc4309_decrypt;
  712. inst->free = crypto_rfc4309_free;
  713. err = aead_register_instance(tmpl, inst);
  714. if (err)
  715. goto out_drop_alg;
  716. out:
  717. return err;
  718. out_drop_alg:
  719. crypto_drop_aead(spawn);
  720. out_free_inst:
  721. kfree(inst);
  722. goto out;
  723. }
  724. static struct crypto_template crypto_rfc4309_tmpl = {
  725. .name = "rfc4309",
  726. .create = crypto_rfc4309_create,
  727. .module = THIS_MODULE,
  728. };
  729. static int __init crypto_ccm_module_init(void)
  730. {
  731. int err;
  732. err = crypto_register_template(&crypto_ccm_base_tmpl);
  733. if (err)
  734. goto out;
  735. err = crypto_register_template(&crypto_ccm_tmpl);
  736. if (err)
  737. goto out_undo_base;
  738. err = crypto_register_template(&crypto_rfc4309_tmpl);
  739. if (err)
  740. goto out_undo_ccm;
  741. out:
  742. return err;
  743. out_undo_ccm:
  744. crypto_unregister_template(&crypto_ccm_tmpl);
  745. out_undo_base:
  746. crypto_unregister_template(&crypto_ccm_base_tmpl);
  747. goto out;
  748. }
  749. static void __exit crypto_ccm_module_exit(void)
  750. {
  751. crypto_unregister_template(&crypto_rfc4309_tmpl);
  752. crypto_unregister_template(&crypto_ccm_tmpl);
  753. crypto_unregister_template(&crypto_ccm_base_tmpl);
  754. }
  755. module_init(crypto_ccm_module_init);
  756. module_exit(crypto_ccm_module_exit);
  757. MODULE_LICENSE("GPL");
  758. MODULE_DESCRIPTION("Counter with CBC MAC");
  759. MODULE_ALIAS_CRYPTO("ccm_base");
  760. MODULE_ALIAS_CRYPTO("rfc4309");
  761. MODULE_ALIAS_CRYPTO("ccm");