aes-ce-ccm-glue.c 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315
  1. /*
  2. * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
  3. *
  4. * Copyright (C) 2013 - 2014 Linaro Ltd <ard.biesheuvel@linaro.org>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. #include <asm/neon.h>
  11. #include <asm/unaligned.h>
  12. #include <crypto/aes.h>
  13. #include <crypto/algapi.h>
  14. #include <crypto/scatterwalk.h>
  15. #include <crypto/internal/aead.h>
  16. #include <linux/module.h>
  17. #include "aes-ce-setkey.h"
  18. static int num_rounds(struct crypto_aes_ctx *ctx)
  19. {
  20. /*
  21. * # of rounds specified by AES:
  22. * 128 bit key 10 rounds
  23. * 192 bit key 12 rounds
  24. * 256 bit key 14 rounds
  25. * => n byte key => 6 + (n/4) rounds
  26. */
  27. return 6 + ctx->key_length / 4;
  28. }
  29. asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
  30. u32 *macp, u32 const rk[], u32 rounds);
  31. asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
  32. u32 const rk[], u32 rounds, u8 mac[],
  33. u8 ctr[]);
  34. asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
  35. u32 const rk[], u32 rounds, u8 mac[],
  36. u8 ctr[]);
  37. asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
  38. u32 rounds);
  39. static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
  40. unsigned int key_len)
  41. {
  42. struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
  43. int ret;
  44. ret = ce_aes_expandkey(ctx, in_key, key_len);
  45. if (!ret)
  46. return 0;
  47. tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  48. return -EINVAL;
  49. }
  50. static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
  51. {
  52. if ((authsize & 1) || authsize < 4)
  53. return -EINVAL;
  54. return 0;
  55. }
  56. static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
  57. {
  58. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  59. __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
  60. u32 l = req->iv[0] + 1;
  61. /* verify that CCM dimension 'L' is set correctly in the IV */
  62. if (l < 2 || l > 8)
  63. return -EINVAL;
  64. /* verify that msglen can in fact be represented in L bytes */
  65. if (l < 4 && msglen >> (8 * l))
  66. return -EOVERFLOW;
  67. /*
  68. * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
  69. * uses a u32 type to represent msglen so the top 4 bytes are always 0.
  70. */
  71. n[0] = 0;
  72. n[1] = cpu_to_be32(msglen);
  73. memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
  74. /*
  75. * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
  76. * - bits 0..2 : max # of bytes required to represent msglen, minus 1
  77. * (already set by caller)
  78. * - bits 3..5 : size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
  79. * - bit 6 : indicates presence of authenticate-only data
  80. */
  81. maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
  82. if (req->assoclen)
  83. maciv[0] |= 0x40;
  84. memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
  85. return 0;
  86. }
  87. static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[])
  88. {
  89. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  90. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  91. struct __packed { __be16 l; __be32 h; u16 len; } ltag;
  92. struct scatter_walk walk;
  93. u32 len = req->assoclen;
  94. u32 macp = 0;
  95. /* prepend the AAD with a length tag */
  96. if (len < 0xff00) {
  97. ltag.l = cpu_to_be16(len);
  98. ltag.len = 2;
  99. } else {
  100. ltag.l = cpu_to_be16(0xfffe);
  101. put_unaligned_be32(len, &ltag.h);
  102. ltag.len = 6;
  103. }
  104. ce_aes_ccm_auth_data(mac, (u8 *)&ltag, ltag.len, &macp, ctx->key_enc,
  105. num_rounds(ctx));
  106. scatterwalk_start(&walk, req->src);
  107. do {
  108. u32 n = scatterwalk_clamp(&walk, len);
  109. u8 *p;
  110. if (!n) {
  111. scatterwalk_start(&walk, sg_next(walk.sg));
  112. n = scatterwalk_clamp(&walk, len);
  113. }
  114. p = scatterwalk_map(&walk);
  115. ce_aes_ccm_auth_data(mac, p, n, &macp, ctx->key_enc,
  116. num_rounds(ctx));
  117. len -= n;
  118. scatterwalk_unmap(p);
  119. scatterwalk_advance(&walk, n);
  120. scatterwalk_done(&walk, 0, len);
  121. } while (len);
  122. }
  123. static int ccm_encrypt(struct aead_request *req)
  124. {
  125. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  126. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  127. struct blkcipher_desc desc = { .info = req->iv };
  128. struct blkcipher_walk walk;
  129. struct scatterlist srcbuf[2];
  130. struct scatterlist dstbuf[2];
  131. struct scatterlist *src;
  132. struct scatterlist *dst;
  133. u8 __aligned(8) mac[AES_BLOCK_SIZE];
  134. u8 buf[AES_BLOCK_SIZE];
  135. u32 len = req->cryptlen;
  136. int err;
  137. err = ccm_init_mac(req, mac, len);
  138. if (err)
  139. return err;
  140. kernel_neon_begin_partial(6);
  141. if (req->assoclen)
  142. ccm_calculate_auth_mac(req, mac);
  143. /* preserve the original iv for the final round */
  144. memcpy(buf, req->iv, AES_BLOCK_SIZE);
  145. src = scatterwalk_ffwd(srcbuf, req->src, req->assoclen);
  146. dst = src;
  147. if (req->src != req->dst)
  148. dst = scatterwalk_ffwd(dstbuf, req->dst, req->assoclen);
  149. blkcipher_walk_init(&walk, dst, src, len);
  150. err = blkcipher_aead_walk_virt_block(&desc, &walk, aead,
  151. AES_BLOCK_SIZE);
  152. while (walk.nbytes) {
  153. u32 tail = walk.nbytes % AES_BLOCK_SIZE;
  154. if (walk.nbytes == len)
  155. tail = 0;
  156. ce_aes_ccm_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  157. walk.nbytes - tail, ctx->key_enc,
  158. num_rounds(ctx), mac, walk.iv);
  159. len -= walk.nbytes - tail;
  160. err = blkcipher_walk_done(&desc, &walk, tail);
  161. }
  162. if (!err)
  163. ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
  164. kernel_neon_end();
  165. if (err)
  166. return err;
  167. /* copy authtag to end of dst */
  168. scatterwalk_map_and_copy(mac, dst, req->cryptlen,
  169. crypto_aead_authsize(aead), 1);
  170. return 0;
  171. }
  172. static int ccm_decrypt(struct aead_request *req)
  173. {
  174. struct crypto_aead *aead = crypto_aead_reqtfm(req);
  175. struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
  176. unsigned int authsize = crypto_aead_authsize(aead);
  177. struct blkcipher_desc desc = { .info = req->iv };
  178. struct blkcipher_walk walk;
  179. struct scatterlist srcbuf[2];
  180. struct scatterlist dstbuf[2];
  181. struct scatterlist *src;
  182. struct scatterlist *dst;
  183. u8 __aligned(8) mac[AES_BLOCK_SIZE];
  184. u8 buf[AES_BLOCK_SIZE];
  185. u32 len = req->cryptlen - authsize;
  186. int err;
  187. err = ccm_init_mac(req, mac, len);
  188. if (err)
  189. return err;
  190. kernel_neon_begin_partial(6);
  191. if (req->assoclen)
  192. ccm_calculate_auth_mac(req, mac);
  193. /* preserve the original iv for the final round */
  194. memcpy(buf, req->iv, AES_BLOCK_SIZE);
  195. src = scatterwalk_ffwd(srcbuf, req->src, req->assoclen);
  196. dst = src;
  197. if (req->src != req->dst)
  198. dst = scatterwalk_ffwd(dstbuf, req->dst, req->assoclen);
  199. blkcipher_walk_init(&walk, dst, src, len);
  200. err = blkcipher_aead_walk_virt_block(&desc, &walk, aead,
  201. AES_BLOCK_SIZE);
  202. while (walk.nbytes) {
  203. u32 tail = walk.nbytes % AES_BLOCK_SIZE;
  204. if (walk.nbytes == len)
  205. tail = 0;
  206. ce_aes_ccm_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  207. walk.nbytes - tail, ctx->key_enc,
  208. num_rounds(ctx), mac, walk.iv);
  209. len -= walk.nbytes - tail;
  210. err = blkcipher_walk_done(&desc, &walk, tail);
  211. }
  212. if (!err)
  213. ce_aes_ccm_final(mac, buf, ctx->key_enc, num_rounds(ctx));
  214. kernel_neon_end();
  215. if (err)
  216. return err;
  217. /* compare calculated auth tag with the stored one */
  218. scatterwalk_map_and_copy(buf, src, req->cryptlen - authsize,
  219. authsize, 0);
  220. if (crypto_memneq(mac, buf, authsize))
  221. return -EBADMSG;
  222. return 0;
  223. }
  224. static struct aead_alg ccm_aes_alg = {
  225. .base = {
  226. .cra_name = "ccm(aes)",
  227. .cra_driver_name = "ccm-aes-ce",
  228. .cra_priority = 300,
  229. .cra_blocksize = 1,
  230. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  231. .cra_alignmask = 7,
  232. .cra_module = THIS_MODULE,
  233. },
  234. .ivsize = AES_BLOCK_SIZE,
  235. .maxauthsize = AES_BLOCK_SIZE,
  236. .setkey = ccm_setkey,
  237. .setauthsize = ccm_setauthsize,
  238. .encrypt = ccm_encrypt,
  239. .decrypt = ccm_decrypt,
  240. };
  241. static int __init aes_mod_init(void)
  242. {
  243. if (!(elf_hwcap & HWCAP_AES))
  244. return -ENODEV;
  245. return crypto_register_aead(&ccm_aes_alg);
  246. }
  247. static void __exit aes_mod_exit(void)
  248. {
  249. crypto_unregister_aead(&ccm_aes_alg);
  250. }
  251. module_init(aes_mod_init);
  252. module_exit(aes_mod_exit);
  253. MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
  254. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  255. MODULE_LICENSE("GPL v2");
  256. MODULE_ALIAS_CRYPTO("ccm(aes)");