aes-glue.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456
  1. /*
  2. * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
  3. *
  4. * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. #include <asm/neon.h>
  11. #include <asm/hwcap.h>
  12. #include <crypto/aes.h>
  13. #include <crypto/ablk_helper.h>
  14. #include <crypto/algapi.h>
  15. #include <linux/module.h>
  16. #include <linux/cpufeature.h>
  17. #include "aes-ce-setkey.h"
  18. #ifdef USE_V8_CRYPTO_EXTENSIONS
  19. #define MODE "ce"
  20. #define PRIO 300
  21. #define aes_setkey ce_aes_setkey
  22. #define aes_expandkey ce_aes_expandkey
  23. #define aes_ecb_encrypt ce_aes_ecb_encrypt
  24. #define aes_ecb_decrypt ce_aes_ecb_decrypt
  25. #define aes_cbc_encrypt ce_aes_cbc_encrypt
  26. #define aes_cbc_decrypt ce_aes_cbc_decrypt
  27. #define aes_ctr_encrypt ce_aes_ctr_encrypt
  28. #define aes_xts_encrypt ce_aes_xts_encrypt
  29. #define aes_xts_decrypt ce_aes_xts_decrypt
  30. MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
  31. #else
  32. #define MODE "neon"
  33. #define PRIO 200
  34. #define aes_setkey crypto_aes_set_key
  35. #define aes_expandkey crypto_aes_expand_key
  36. #define aes_ecb_encrypt neon_aes_ecb_encrypt
  37. #define aes_ecb_decrypt neon_aes_ecb_decrypt
  38. #define aes_cbc_encrypt neon_aes_cbc_encrypt
  39. #define aes_cbc_decrypt neon_aes_cbc_decrypt
  40. #define aes_ctr_encrypt neon_aes_ctr_encrypt
  41. #define aes_xts_encrypt neon_aes_xts_encrypt
  42. #define aes_xts_decrypt neon_aes_xts_decrypt
  43. MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
  44. MODULE_ALIAS_CRYPTO("ecb(aes)");
  45. MODULE_ALIAS_CRYPTO("cbc(aes)");
  46. MODULE_ALIAS_CRYPTO("ctr(aes)");
  47. MODULE_ALIAS_CRYPTO("xts(aes)");
  48. #endif
  49. MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
  50. MODULE_LICENSE("GPL v2");
  51. /* defined in aes-modes.S */
  52. asmlinkage void aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
  53. int rounds, int blocks, int first);
  54. asmlinkage void aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
  55. int rounds, int blocks, int first);
  56. asmlinkage void aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
  57. int rounds, int blocks, u8 iv[], int first);
  58. asmlinkage void aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
  59. int rounds, int blocks, u8 iv[], int first);
  60. asmlinkage void aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
  61. int rounds, int blocks, u8 ctr[], int first);
  62. asmlinkage void aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
  63. int rounds, int blocks, u8 const rk2[], u8 iv[],
  64. int first);
  65. asmlinkage void aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
  66. int rounds, int blocks, u8 const rk2[], u8 iv[],
  67. int first);
  68. struct crypto_aes_xts_ctx {
  69. struct crypto_aes_ctx key1;
  70. struct crypto_aes_ctx __aligned(8) key2;
  71. };
  72. static int xts_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  73. unsigned int key_len)
  74. {
  75. struct crypto_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
  76. int ret;
  77. ret = aes_expandkey(&ctx->key1, in_key, key_len / 2);
  78. if (!ret)
  79. ret = aes_expandkey(&ctx->key2, &in_key[key_len / 2],
  80. key_len / 2);
  81. if (!ret)
  82. return 0;
  83. tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  84. return -EINVAL;
  85. }
  86. static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  87. struct scatterlist *src, unsigned int nbytes)
  88. {
  89. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  90. int err, first, rounds = 6 + ctx->key_length / 4;
  91. struct blkcipher_walk walk;
  92. unsigned int blocks;
  93. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  94. blkcipher_walk_init(&walk, dst, src, nbytes);
  95. err = blkcipher_walk_virt(desc, &walk);
  96. kernel_neon_begin();
  97. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  98. aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  99. (u8 *)ctx->key_enc, rounds, blocks, first);
  100. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  101. }
  102. kernel_neon_end();
  103. return err;
  104. }
  105. static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  106. struct scatterlist *src, unsigned int nbytes)
  107. {
  108. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  109. int err, first, rounds = 6 + ctx->key_length / 4;
  110. struct blkcipher_walk walk;
  111. unsigned int blocks;
  112. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  113. blkcipher_walk_init(&walk, dst, src, nbytes);
  114. err = blkcipher_walk_virt(desc, &walk);
  115. kernel_neon_begin();
  116. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  117. aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  118. (u8 *)ctx->key_dec, rounds, blocks, first);
  119. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  120. }
  121. kernel_neon_end();
  122. return err;
  123. }
  124. static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  125. struct scatterlist *src, unsigned int nbytes)
  126. {
  127. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  128. int err, first, rounds = 6 + ctx->key_length / 4;
  129. struct blkcipher_walk walk;
  130. unsigned int blocks;
  131. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  132. blkcipher_walk_init(&walk, dst, src, nbytes);
  133. err = blkcipher_walk_virt(desc, &walk);
  134. kernel_neon_begin();
  135. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  136. aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  137. (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
  138. first);
  139. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  140. }
  141. kernel_neon_end();
  142. return err;
  143. }
  144. static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  145. struct scatterlist *src, unsigned int nbytes)
  146. {
  147. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  148. int err, first, rounds = 6 + ctx->key_length / 4;
  149. struct blkcipher_walk walk;
  150. unsigned int blocks;
  151. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  152. blkcipher_walk_init(&walk, dst, src, nbytes);
  153. err = blkcipher_walk_virt(desc, &walk);
  154. kernel_neon_begin();
  155. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  156. aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  157. (u8 *)ctx->key_dec, rounds, blocks, walk.iv,
  158. first);
  159. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  160. }
  161. kernel_neon_end();
  162. return err;
  163. }
  164. static int ctr_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  165. struct scatterlist *src, unsigned int nbytes)
  166. {
  167. struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  168. int err, first, rounds = 6 + ctx->key_length / 4;
  169. struct blkcipher_walk walk;
  170. int blocks;
  171. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  172. blkcipher_walk_init(&walk, dst, src, nbytes);
  173. err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
  174. first = 1;
  175. kernel_neon_begin();
  176. while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
  177. aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  178. (u8 *)ctx->key_enc, rounds, blocks, walk.iv,
  179. first);
  180. first = 0;
  181. nbytes -= blocks * AES_BLOCK_SIZE;
  182. if (nbytes && nbytes == walk.nbytes % AES_BLOCK_SIZE)
  183. break;
  184. err = blkcipher_walk_done(desc, &walk,
  185. walk.nbytes % AES_BLOCK_SIZE);
  186. }
  187. if (walk.nbytes % AES_BLOCK_SIZE) {
  188. u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
  189. u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
  190. u8 __aligned(8) tail[AES_BLOCK_SIZE];
  191. /*
  192. * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need
  193. * to tell aes_ctr_encrypt() to only read half a block.
  194. */
  195. blocks = (nbytes <= 8) ? -1 : 1;
  196. aes_ctr_encrypt(tail, tsrc, (u8 *)ctx->key_enc, rounds,
  197. blocks, walk.iv, first);
  198. memcpy(tdst, tail, nbytes);
  199. err = blkcipher_walk_done(desc, &walk, 0);
  200. }
  201. kernel_neon_end();
  202. return err;
  203. }
  204. static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  205. struct scatterlist *src, unsigned int nbytes)
  206. {
  207. struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  208. int err, first, rounds = 6 + ctx->key1.key_length / 4;
  209. struct blkcipher_walk walk;
  210. unsigned int blocks;
  211. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  212. blkcipher_walk_init(&walk, dst, src, nbytes);
  213. err = blkcipher_walk_virt(desc, &walk);
  214. kernel_neon_begin();
  215. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  216. aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
  217. (u8 *)ctx->key1.key_enc, rounds, blocks,
  218. (u8 *)ctx->key2.key_enc, walk.iv, first);
  219. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  220. }
  221. kernel_neon_end();
  222. return err;
  223. }
  224. static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  225. struct scatterlist *src, unsigned int nbytes)
  226. {
  227. struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  228. int err, first, rounds = 6 + ctx->key1.key_length / 4;
  229. struct blkcipher_walk walk;
  230. unsigned int blocks;
  231. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  232. blkcipher_walk_init(&walk, dst, src, nbytes);
  233. err = blkcipher_walk_virt(desc, &walk);
  234. kernel_neon_begin();
  235. for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
  236. aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
  237. (u8 *)ctx->key1.key_dec, rounds, blocks,
  238. (u8 *)ctx->key2.key_enc, walk.iv, first);
  239. err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE);
  240. }
  241. kernel_neon_end();
  242. return err;
  243. }
  244. static struct crypto_alg aes_algs[] = { {
  245. .cra_name = "__ecb-aes-" MODE,
  246. .cra_driver_name = "__driver-ecb-aes-" MODE,
  247. .cra_priority = 0,
  248. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  249. CRYPTO_ALG_INTERNAL,
  250. .cra_blocksize = AES_BLOCK_SIZE,
  251. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  252. .cra_alignmask = 7,
  253. .cra_type = &crypto_blkcipher_type,
  254. .cra_module = THIS_MODULE,
  255. .cra_blkcipher = {
  256. .min_keysize = AES_MIN_KEY_SIZE,
  257. .max_keysize = AES_MAX_KEY_SIZE,
  258. .ivsize = AES_BLOCK_SIZE,
  259. .setkey = aes_setkey,
  260. .encrypt = ecb_encrypt,
  261. .decrypt = ecb_decrypt,
  262. },
  263. }, {
  264. .cra_name = "__cbc-aes-" MODE,
  265. .cra_driver_name = "__driver-cbc-aes-" MODE,
  266. .cra_priority = 0,
  267. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  268. CRYPTO_ALG_INTERNAL,
  269. .cra_blocksize = AES_BLOCK_SIZE,
  270. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  271. .cra_alignmask = 7,
  272. .cra_type = &crypto_blkcipher_type,
  273. .cra_module = THIS_MODULE,
  274. .cra_blkcipher = {
  275. .min_keysize = AES_MIN_KEY_SIZE,
  276. .max_keysize = AES_MAX_KEY_SIZE,
  277. .ivsize = AES_BLOCK_SIZE,
  278. .setkey = aes_setkey,
  279. .encrypt = cbc_encrypt,
  280. .decrypt = cbc_decrypt,
  281. },
  282. }, {
  283. .cra_name = "__ctr-aes-" MODE,
  284. .cra_driver_name = "__driver-ctr-aes-" MODE,
  285. .cra_priority = 0,
  286. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  287. CRYPTO_ALG_INTERNAL,
  288. .cra_blocksize = 1,
  289. .cra_ctxsize = sizeof(struct crypto_aes_ctx),
  290. .cra_alignmask = 7,
  291. .cra_type = &crypto_blkcipher_type,
  292. .cra_module = THIS_MODULE,
  293. .cra_blkcipher = {
  294. .min_keysize = AES_MIN_KEY_SIZE,
  295. .max_keysize = AES_MAX_KEY_SIZE,
  296. .ivsize = AES_BLOCK_SIZE,
  297. .setkey = aes_setkey,
  298. .encrypt = ctr_encrypt,
  299. .decrypt = ctr_encrypt,
  300. },
  301. }, {
  302. .cra_name = "__xts-aes-" MODE,
  303. .cra_driver_name = "__driver-xts-aes-" MODE,
  304. .cra_priority = 0,
  305. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  306. CRYPTO_ALG_INTERNAL,
  307. .cra_blocksize = AES_BLOCK_SIZE,
  308. .cra_ctxsize = sizeof(struct crypto_aes_xts_ctx),
  309. .cra_alignmask = 7,
  310. .cra_type = &crypto_blkcipher_type,
  311. .cra_module = THIS_MODULE,
  312. .cra_blkcipher = {
  313. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  314. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  315. .ivsize = AES_BLOCK_SIZE,
  316. .setkey = xts_set_key,
  317. .encrypt = xts_encrypt,
  318. .decrypt = xts_decrypt,
  319. },
  320. }, {
  321. .cra_name = "ecb(aes)",
  322. .cra_driver_name = "ecb-aes-" MODE,
  323. .cra_priority = PRIO,
  324. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  325. .cra_blocksize = AES_BLOCK_SIZE,
  326. .cra_ctxsize = sizeof(struct async_helper_ctx),
  327. .cra_alignmask = 7,
  328. .cra_type = &crypto_ablkcipher_type,
  329. .cra_module = THIS_MODULE,
  330. .cra_init = ablk_init,
  331. .cra_exit = ablk_exit,
  332. .cra_ablkcipher = {
  333. .min_keysize = AES_MIN_KEY_SIZE,
  334. .max_keysize = AES_MAX_KEY_SIZE,
  335. .ivsize = AES_BLOCK_SIZE,
  336. .setkey = ablk_set_key,
  337. .encrypt = ablk_encrypt,
  338. .decrypt = ablk_decrypt,
  339. }
  340. }, {
  341. .cra_name = "cbc(aes)",
  342. .cra_driver_name = "cbc-aes-" MODE,
  343. .cra_priority = PRIO,
  344. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  345. .cra_blocksize = AES_BLOCK_SIZE,
  346. .cra_ctxsize = sizeof(struct async_helper_ctx),
  347. .cra_alignmask = 7,
  348. .cra_type = &crypto_ablkcipher_type,
  349. .cra_module = THIS_MODULE,
  350. .cra_init = ablk_init,
  351. .cra_exit = ablk_exit,
  352. .cra_ablkcipher = {
  353. .min_keysize = AES_MIN_KEY_SIZE,
  354. .max_keysize = AES_MAX_KEY_SIZE,
  355. .ivsize = AES_BLOCK_SIZE,
  356. .setkey = ablk_set_key,
  357. .encrypt = ablk_encrypt,
  358. .decrypt = ablk_decrypt,
  359. }
  360. }, {
  361. .cra_name = "ctr(aes)",
  362. .cra_driver_name = "ctr-aes-" MODE,
  363. .cra_priority = PRIO,
  364. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  365. .cra_blocksize = 1,
  366. .cra_ctxsize = sizeof(struct async_helper_ctx),
  367. .cra_alignmask = 7,
  368. .cra_type = &crypto_ablkcipher_type,
  369. .cra_module = THIS_MODULE,
  370. .cra_init = ablk_init,
  371. .cra_exit = ablk_exit,
  372. .cra_ablkcipher = {
  373. .min_keysize = AES_MIN_KEY_SIZE,
  374. .max_keysize = AES_MAX_KEY_SIZE,
  375. .ivsize = AES_BLOCK_SIZE,
  376. .setkey = ablk_set_key,
  377. .encrypt = ablk_encrypt,
  378. .decrypt = ablk_decrypt,
  379. }
  380. }, {
  381. .cra_name = "xts(aes)",
  382. .cra_driver_name = "xts-aes-" MODE,
  383. .cra_priority = PRIO,
  384. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
  385. .cra_blocksize = AES_BLOCK_SIZE,
  386. .cra_ctxsize = sizeof(struct async_helper_ctx),
  387. .cra_alignmask = 7,
  388. .cra_type = &crypto_ablkcipher_type,
  389. .cra_module = THIS_MODULE,
  390. .cra_init = ablk_init,
  391. .cra_exit = ablk_exit,
  392. .cra_ablkcipher = {
  393. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  394. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  395. .ivsize = AES_BLOCK_SIZE,
  396. .setkey = ablk_set_key,
  397. .encrypt = ablk_encrypt,
  398. .decrypt = ablk_decrypt,
  399. }
  400. } };
  401. static int __init aes_init(void)
  402. {
  403. return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs));
  404. }
  405. static void __exit aes_exit(void)
  406. {
  407. crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs));
  408. }
  409. #ifdef USE_V8_CRYPTO_EXTENSIONS
  410. module_cpu_feature_match(AES, aes_init);
  411. #else
  412. module_init(aes_init);
  413. #endif
  414. module_exit(aes_exit);