serpent_avx2_glue.c 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565
  1. /*
  2. * Glue Code for x86_64/AVX2 assembler optimized version of Serpent
  3. *
  4. * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License as published by
  8. * the Free Software Foundation; either version 2 of the License, or
  9. * (at your option) any later version.
  10. *
  11. */
  12. #include <linux/module.h>
  13. #include <linux/types.h>
  14. #include <linux/crypto.h>
  15. #include <linux/err.h>
  16. #include <crypto/ablk_helper.h>
  17. #include <crypto/algapi.h>
  18. #include <crypto/ctr.h>
  19. #include <crypto/lrw.h>
  20. #include <crypto/xts.h>
  21. #include <crypto/serpent.h>
  22. #include <asm/fpu/api.h>
  23. #include <asm/crypto/serpent-avx.h>
  24. #include <asm/crypto/glue_helper.h>
  25. #define SERPENT_AVX2_PARALLEL_BLOCKS 16
  26. /* 16-way AVX2 parallel cipher functions */
  27. asmlinkage void serpent_ecb_enc_16way(struct serpent_ctx *ctx, u8 *dst,
  28. const u8 *src);
  29. asmlinkage void serpent_ecb_dec_16way(struct serpent_ctx *ctx, u8 *dst,
  30. const u8 *src);
  31. asmlinkage void serpent_cbc_dec_16way(void *ctx, u128 *dst, const u128 *src);
  32. asmlinkage void serpent_ctr_16way(void *ctx, u128 *dst, const u128 *src,
  33. le128 *iv);
  34. asmlinkage void serpent_xts_enc_16way(struct serpent_ctx *ctx, u8 *dst,
  35. const u8 *src, le128 *iv);
  36. asmlinkage void serpent_xts_dec_16way(struct serpent_ctx *ctx, u8 *dst,
  37. const u8 *src, le128 *iv);
  38. static const struct common_glue_ctx serpent_enc = {
  39. .num_funcs = 3,
  40. .fpu_blocks_limit = 8,
  41. .funcs = { {
  42. .num_blocks = 16,
  43. .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_16way) }
  44. }, {
  45. .num_blocks = 8,
  46. .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
  47. }, {
  48. .num_blocks = 1,
  49. .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
  50. } }
  51. };
  52. static const struct common_glue_ctx serpent_ctr = {
  53. .num_funcs = 3,
  54. .fpu_blocks_limit = 8,
  55. .funcs = { {
  56. .num_blocks = 16,
  57. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_16way) }
  58. }, {
  59. .num_blocks = 8,
  60. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
  61. }, {
  62. .num_blocks = 1,
  63. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
  64. } }
  65. };
  66. static const struct common_glue_ctx serpent_enc_xts = {
  67. .num_funcs = 3,
  68. .fpu_blocks_limit = 8,
  69. .funcs = { {
  70. .num_blocks = 16,
  71. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_16way) }
  72. }, {
  73. .num_blocks = 8,
  74. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
  75. }, {
  76. .num_blocks = 1,
  77. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
  78. } }
  79. };
  80. static const struct common_glue_ctx serpent_dec = {
  81. .num_funcs = 3,
  82. .fpu_blocks_limit = 8,
  83. .funcs = { {
  84. .num_blocks = 16,
  85. .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_16way) }
  86. }, {
  87. .num_blocks = 8,
  88. .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
  89. }, {
  90. .num_blocks = 1,
  91. .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
  92. } }
  93. };
  94. static const struct common_glue_ctx serpent_dec_cbc = {
  95. .num_funcs = 3,
  96. .fpu_blocks_limit = 8,
  97. .funcs = { {
  98. .num_blocks = 16,
  99. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_16way) }
  100. }, {
  101. .num_blocks = 8,
  102. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
  103. }, {
  104. .num_blocks = 1,
  105. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
  106. } }
  107. };
  108. static const struct common_glue_ctx serpent_dec_xts = {
  109. .num_funcs = 3,
  110. .fpu_blocks_limit = 8,
  111. .funcs = { {
  112. .num_blocks = 16,
  113. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_16way) }
  114. }, {
  115. .num_blocks = 8,
  116. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
  117. }, {
  118. .num_blocks = 1,
  119. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
  120. } }
  121. };
  122. static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  123. struct scatterlist *src, unsigned int nbytes)
  124. {
  125. return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
  126. }
  127. static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  128. struct scatterlist *src, unsigned int nbytes)
  129. {
  130. return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
  131. }
  132. static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  133. struct scatterlist *src, unsigned int nbytes)
  134. {
  135. return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
  136. dst, src, nbytes);
  137. }
  138. static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  139. struct scatterlist *src, unsigned int nbytes)
  140. {
  141. return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
  142. nbytes);
  143. }
  144. static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  145. struct scatterlist *src, unsigned int nbytes)
  146. {
  147. return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
  148. }
  149. static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
  150. {
  151. /* since reusing AVX functions, starts using FPU at 8 parallel blocks */
  152. return glue_fpu_begin(SERPENT_BLOCK_SIZE, 8, NULL, fpu_enabled, nbytes);
  153. }
  154. static inline void serpent_fpu_end(bool fpu_enabled)
  155. {
  156. glue_fpu_end(fpu_enabled);
  157. }
  158. struct crypt_priv {
  159. struct serpent_ctx *ctx;
  160. bool fpu_enabled;
  161. };
  162. static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  163. {
  164. const unsigned int bsize = SERPENT_BLOCK_SIZE;
  165. struct crypt_priv *ctx = priv;
  166. int i;
  167. ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
  168. if (nbytes >= SERPENT_AVX2_PARALLEL_BLOCKS * bsize) {
  169. serpent_ecb_enc_16way(ctx->ctx, srcdst, srcdst);
  170. srcdst += bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
  171. nbytes -= bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
  172. }
  173. while (nbytes >= SERPENT_PARALLEL_BLOCKS * bsize) {
  174. serpent_ecb_enc_8way_avx(ctx->ctx, srcdst, srcdst);
  175. srcdst += bsize * SERPENT_PARALLEL_BLOCKS;
  176. nbytes -= bsize * SERPENT_PARALLEL_BLOCKS;
  177. }
  178. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  179. __serpent_encrypt(ctx->ctx, srcdst, srcdst);
  180. }
  181. static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  182. {
  183. const unsigned int bsize = SERPENT_BLOCK_SIZE;
  184. struct crypt_priv *ctx = priv;
  185. int i;
  186. ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
  187. if (nbytes >= SERPENT_AVX2_PARALLEL_BLOCKS * bsize) {
  188. serpent_ecb_dec_16way(ctx->ctx, srcdst, srcdst);
  189. srcdst += bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
  190. nbytes -= bsize * SERPENT_AVX2_PARALLEL_BLOCKS;
  191. }
  192. while (nbytes >= SERPENT_PARALLEL_BLOCKS * bsize) {
  193. serpent_ecb_dec_8way_avx(ctx->ctx, srcdst, srcdst);
  194. srcdst += bsize * SERPENT_PARALLEL_BLOCKS;
  195. nbytes -= bsize * SERPENT_PARALLEL_BLOCKS;
  196. }
  197. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  198. __serpent_decrypt(ctx->ctx, srcdst, srcdst);
  199. }
  200. static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  201. struct scatterlist *src, unsigned int nbytes)
  202. {
  203. struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  204. be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS];
  205. struct crypt_priv crypt_ctx = {
  206. .ctx = &ctx->serpent_ctx,
  207. .fpu_enabled = false,
  208. };
  209. struct lrw_crypt_req req = {
  210. .tbuf = buf,
  211. .tbuflen = sizeof(buf),
  212. .table_ctx = &ctx->lrw_table,
  213. .crypt_ctx = &crypt_ctx,
  214. .crypt_fn = encrypt_callback,
  215. };
  216. int ret;
  217. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  218. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  219. serpent_fpu_end(crypt_ctx.fpu_enabled);
  220. return ret;
  221. }
  222. static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  223. struct scatterlist *src, unsigned int nbytes)
  224. {
  225. struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  226. be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS];
  227. struct crypt_priv crypt_ctx = {
  228. .ctx = &ctx->serpent_ctx,
  229. .fpu_enabled = false,
  230. };
  231. struct lrw_crypt_req req = {
  232. .tbuf = buf,
  233. .tbuflen = sizeof(buf),
  234. .table_ctx = &ctx->lrw_table,
  235. .crypt_ctx = &crypt_ctx,
  236. .crypt_fn = decrypt_callback,
  237. };
  238. int ret;
  239. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  240. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  241. serpent_fpu_end(crypt_ctx.fpu_enabled);
  242. return ret;
  243. }
  244. static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  245. struct scatterlist *src, unsigned int nbytes)
  246. {
  247. struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  248. return glue_xts_crypt_128bit(&serpent_enc_xts, desc, dst, src, nbytes,
  249. XTS_TWEAK_CAST(__serpent_encrypt),
  250. &ctx->tweak_ctx, &ctx->crypt_ctx);
  251. }
  252. static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  253. struct scatterlist *src, unsigned int nbytes)
  254. {
  255. struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  256. return glue_xts_crypt_128bit(&serpent_dec_xts, desc, dst, src, nbytes,
  257. XTS_TWEAK_CAST(__serpent_encrypt),
  258. &ctx->tweak_ctx, &ctx->crypt_ctx);
  259. }
  260. static struct crypto_alg srp_algs[10] = { {
  261. .cra_name = "__ecb-serpent-avx2",
  262. .cra_driver_name = "__driver-ecb-serpent-avx2",
  263. .cra_priority = 0,
  264. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  265. CRYPTO_ALG_INTERNAL,
  266. .cra_blocksize = SERPENT_BLOCK_SIZE,
  267. .cra_ctxsize = sizeof(struct serpent_ctx),
  268. .cra_alignmask = 0,
  269. .cra_type = &crypto_blkcipher_type,
  270. .cra_module = THIS_MODULE,
  271. .cra_list = LIST_HEAD_INIT(srp_algs[0].cra_list),
  272. .cra_u = {
  273. .blkcipher = {
  274. .min_keysize = SERPENT_MIN_KEY_SIZE,
  275. .max_keysize = SERPENT_MAX_KEY_SIZE,
  276. .setkey = serpent_setkey,
  277. .encrypt = ecb_encrypt,
  278. .decrypt = ecb_decrypt,
  279. },
  280. },
  281. }, {
  282. .cra_name = "__cbc-serpent-avx2",
  283. .cra_driver_name = "__driver-cbc-serpent-avx2",
  284. .cra_priority = 0,
  285. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  286. CRYPTO_ALG_INTERNAL,
  287. .cra_blocksize = SERPENT_BLOCK_SIZE,
  288. .cra_ctxsize = sizeof(struct serpent_ctx),
  289. .cra_alignmask = 0,
  290. .cra_type = &crypto_blkcipher_type,
  291. .cra_module = THIS_MODULE,
  292. .cra_list = LIST_HEAD_INIT(srp_algs[1].cra_list),
  293. .cra_u = {
  294. .blkcipher = {
  295. .min_keysize = SERPENT_MIN_KEY_SIZE,
  296. .max_keysize = SERPENT_MAX_KEY_SIZE,
  297. .setkey = serpent_setkey,
  298. .encrypt = cbc_encrypt,
  299. .decrypt = cbc_decrypt,
  300. },
  301. },
  302. }, {
  303. .cra_name = "__ctr-serpent-avx2",
  304. .cra_driver_name = "__driver-ctr-serpent-avx2",
  305. .cra_priority = 0,
  306. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  307. CRYPTO_ALG_INTERNAL,
  308. .cra_blocksize = 1,
  309. .cra_ctxsize = sizeof(struct serpent_ctx),
  310. .cra_alignmask = 0,
  311. .cra_type = &crypto_blkcipher_type,
  312. .cra_module = THIS_MODULE,
  313. .cra_list = LIST_HEAD_INIT(srp_algs[2].cra_list),
  314. .cra_u = {
  315. .blkcipher = {
  316. .min_keysize = SERPENT_MIN_KEY_SIZE,
  317. .max_keysize = SERPENT_MAX_KEY_SIZE,
  318. .ivsize = SERPENT_BLOCK_SIZE,
  319. .setkey = serpent_setkey,
  320. .encrypt = ctr_crypt,
  321. .decrypt = ctr_crypt,
  322. },
  323. },
  324. }, {
  325. .cra_name = "__lrw-serpent-avx2",
  326. .cra_driver_name = "__driver-lrw-serpent-avx2",
  327. .cra_priority = 0,
  328. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  329. CRYPTO_ALG_INTERNAL,
  330. .cra_blocksize = SERPENT_BLOCK_SIZE,
  331. .cra_ctxsize = sizeof(struct serpent_lrw_ctx),
  332. .cra_alignmask = 0,
  333. .cra_type = &crypto_blkcipher_type,
  334. .cra_module = THIS_MODULE,
  335. .cra_list = LIST_HEAD_INIT(srp_algs[3].cra_list),
  336. .cra_exit = lrw_serpent_exit_tfm,
  337. .cra_u = {
  338. .blkcipher = {
  339. .min_keysize = SERPENT_MIN_KEY_SIZE +
  340. SERPENT_BLOCK_SIZE,
  341. .max_keysize = SERPENT_MAX_KEY_SIZE +
  342. SERPENT_BLOCK_SIZE,
  343. .ivsize = SERPENT_BLOCK_SIZE,
  344. .setkey = lrw_serpent_setkey,
  345. .encrypt = lrw_encrypt,
  346. .decrypt = lrw_decrypt,
  347. },
  348. },
  349. }, {
  350. .cra_name = "__xts-serpent-avx2",
  351. .cra_driver_name = "__driver-xts-serpent-avx2",
  352. .cra_priority = 0,
  353. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  354. CRYPTO_ALG_INTERNAL,
  355. .cra_blocksize = SERPENT_BLOCK_SIZE,
  356. .cra_ctxsize = sizeof(struct serpent_xts_ctx),
  357. .cra_alignmask = 0,
  358. .cra_type = &crypto_blkcipher_type,
  359. .cra_module = THIS_MODULE,
  360. .cra_list = LIST_HEAD_INIT(srp_algs[4].cra_list),
  361. .cra_u = {
  362. .blkcipher = {
  363. .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
  364. .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
  365. .ivsize = SERPENT_BLOCK_SIZE,
  366. .setkey = xts_serpent_setkey,
  367. .encrypt = xts_encrypt,
  368. .decrypt = xts_decrypt,
  369. },
  370. },
  371. }, {
  372. .cra_name = "ecb(serpent)",
  373. .cra_driver_name = "ecb-serpent-avx2",
  374. .cra_priority = 600,
  375. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  376. .cra_blocksize = SERPENT_BLOCK_SIZE,
  377. .cra_ctxsize = sizeof(struct async_helper_ctx),
  378. .cra_alignmask = 0,
  379. .cra_type = &crypto_ablkcipher_type,
  380. .cra_module = THIS_MODULE,
  381. .cra_list = LIST_HEAD_INIT(srp_algs[5].cra_list),
  382. .cra_init = ablk_init,
  383. .cra_exit = ablk_exit,
  384. .cra_u = {
  385. .ablkcipher = {
  386. .min_keysize = SERPENT_MIN_KEY_SIZE,
  387. .max_keysize = SERPENT_MAX_KEY_SIZE,
  388. .setkey = ablk_set_key,
  389. .encrypt = ablk_encrypt,
  390. .decrypt = ablk_decrypt,
  391. },
  392. },
  393. }, {
  394. .cra_name = "cbc(serpent)",
  395. .cra_driver_name = "cbc-serpent-avx2",
  396. .cra_priority = 600,
  397. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  398. .cra_blocksize = SERPENT_BLOCK_SIZE,
  399. .cra_ctxsize = sizeof(struct async_helper_ctx),
  400. .cra_alignmask = 0,
  401. .cra_type = &crypto_ablkcipher_type,
  402. .cra_module = THIS_MODULE,
  403. .cra_list = LIST_HEAD_INIT(srp_algs[6].cra_list),
  404. .cra_init = ablk_init,
  405. .cra_exit = ablk_exit,
  406. .cra_u = {
  407. .ablkcipher = {
  408. .min_keysize = SERPENT_MIN_KEY_SIZE,
  409. .max_keysize = SERPENT_MAX_KEY_SIZE,
  410. .ivsize = SERPENT_BLOCK_SIZE,
  411. .setkey = ablk_set_key,
  412. .encrypt = __ablk_encrypt,
  413. .decrypt = ablk_decrypt,
  414. },
  415. },
  416. }, {
  417. .cra_name = "ctr(serpent)",
  418. .cra_driver_name = "ctr-serpent-avx2",
  419. .cra_priority = 600,
  420. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  421. .cra_blocksize = 1,
  422. .cra_ctxsize = sizeof(struct async_helper_ctx),
  423. .cra_alignmask = 0,
  424. .cra_type = &crypto_ablkcipher_type,
  425. .cra_module = THIS_MODULE,
  426. .cra_list = LIST_HEAD_INIT(srp_algs[7].cra_list),
  427. .cra_init = ablk_init,
  428. .cra_exit = ablk_exit,
  429. .cra_u = {
  430. .ablkcipher = {
  431. .min_keysize = SERPENT_MIN_KEY_SIZE,
  432. .max_keysize = SERPENT_MAX_KEY_SIZE,
  433. .ivsize = SERPENT_BLOCK_SIZE,
  434. .setkey = ablk_set_key,
  435. .encrypt = ablk_encrypt,
  436. .decrypt = ablk_encrypt,
  437. .geniv = "chainiv",
  438. },
  439. },
  440. }, {
  441. .cra_name = "lrw(serpent)",
  442. .cra_driver_name = "lrw-serpent-avx2",
  443. .cra_priority = 600,
  444. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  445. .cra_blocksize = SERPENT_BLOCK_SIZE,
  446. .cra_ctxsize = sizeof(struct async_helper_ctx),
  447. .cra_alignmask = 0,
  448. .cra_type = &crypto_ablkcipher_type,
  449. .cra_module = THIS_MODULE,
  450. .cra_list = LIST_HEAD_INIT(srp_algs[8].cra_list),
  451. .cra_init = ablk_init,
  452. .cra_exit = ablk_exit,
  453. .cra_u = {
  454. .ablkcipher = {
  455. .min_keysize = SERPENT_MIN_KEY_SIZE +
  456. SERPENT_BLOCK_SIZE,
  457. .max_keysize = SERPENT_MAX_KEY_SIZE +
  458. SERPENT_BLOCK_SIZE,
  459. .ivsize = SERPENT_BLOCK_SIZE,
  460. .setkey = ablk_set_key,
  461. .encrypt = ablk_encrypt,
  462. .decrypt = ablk_decrypt,
  463. },
  464. },
  465. }, {
  466. .cra_name = "xts(serpent)",
  467. .cra_driver_name = "xts-serpent-avx2",
  468. .cra_priority = 600,
  469. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  470. .cra_blocksize = SERPENT_BLOCK_SIZE,
  471. .cra_ctxsize = sizeof(struct async_helper_ctx),
  472. .cra_alignmask = 0,
  473. .cra_type = &crypto_ablkcipher_type,
  474. .cra_module = THIS_MODULE,
  475. .cra_list = LIST_HEAD_INIT(srp_algs[9].cra_list),
  476. .cra_init = ablk_init,
  477. .cra_exit = ablk_exit,
  478. .cra_u = {
  479. .ablkcipher = {
  480. .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
  481. .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
  482. .ivsize = SERPENT_BLOCK_SIZE,
  483. .setkey = ablk_set_key,
  484. .encrypt = ablk_encrypt,
  485. .decrypt = ablk_decrypt,
  486. },
  487. },
  488. } };
  489. static int __init init(void)
  490. {
  491. const char *feature_name;
  492. if (!cpu_has_avx2 || !cpu_has_osxsave) {
  493. pr_info("AVX2 instructions are not detected.\n");
  494. return -ENODEV;
  495. }
  496. if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
  497. &feature_name)) {
  498. pr_info("CPU feature '%s' is not supported.\n", feature_name);
  499. return -ENODEV;
  500. }
  501. return crypto_register_algs(srp_algs, ARRAY_SIZE(srp_algs));
  502. }
  503. static void __exit fini(void)
  504. {
  505. crypto_unregister_algs(srp_algs, ARRAY_SIZE(srp_algs));
  506. }
  507. module_init(init);
  508. module_exit(fini);
  509. MODULE_LICENSE("GPL");
  510. MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX2 optimized");
  511. MODULE_ALIAS_CRYPTO("serpent");
  512. MODULE_ALIAS_CRYPTO("serpent-asm");