serpent_avx_glue.c 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619
  1. /*
  2. * Glue Code for AVX assembler versions of Serpent Cipher
  3. *
  4. * Copyright (C) 2012 Johannes Goetzfried
  5. * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
  6. *
  7. * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License as published by
  11. * the Free Software Foundation; either version 2 of the License, or
  12. * (at your option) any later version.
  13. *
  14. * This program is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17. * GNU General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU General Public License
  20. * along with this program; if not, write to the Free Software
  21. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
  22. * USA
  23. *
  24. */
  25. #include <linux/module.h>
  26. #include <linux/hardirq.h>
  27. #include <linux/types.h>
  28. #include <linux/crypto.h>
  29. #include <linux/err.h>
  30. #include <crypto/ablk_helper.h>
  31. #include <crypto/algapi.h>
  32. #include <crypto/serpent.h>
  33. #include <crypto/cryptd.h>
  34. #include <crypto/b128ops.h>
  35. #include <crypto/ctr.h>
  36. #include <crypto/lrw.h>
  37. #include <crypto/xts.h>
  38. #include <asm/fpu/api.h>
  39. #include <asm/crypto/serpent-avx.h>
  40. #include <asm/crypto/glue_helper.h>
  41. /* 8-way parallel cipher functions */
  42. asmlinkage void serpent_ecb_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  43. const u8 *src);
  44. EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
  45. asmlinkage void serpent_ecb_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  46. const u8 *src);
  47. EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
  48. asmlinkage void serpent_cbc_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  49. const u8 *src);
  50. EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
  51. asmlinkage void serpent_ctr_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  52. const u8 *src, le128 *iv);
  53. EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
  54. asmlinkage void serpent_xts_enc_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  55. const u8 *src, le128 *iv);
  56. EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
  57. asmlinkage void serpent_xts_dec_8way_avx(struct serpent_ctx *ctx, u8 *dst,
  58. const u8 *src, le128 *iv);
  59. EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
  60. void __serpent_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  61. {
  62. be128 ctrblk;
  63. le128_to_be128(&ctrblk, iv);
  64. le128_inc(iv);
  65. __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
  66. u128_xor(dst, src, (u128 *)&ctrblk);
  67. }
  68. EXPORT_SYMBOL_GPL(__serpent_crypt_ctr);
  69. void serpent_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  70. {
  71. glue_xts_crypt_128bit_one(ctx, dst, src, iv,
  72. GLUE_FUNC_CAST(__serpent_encrypt));
  73. }
  74. EXPORT_SYMBOL_GPL(serpent_xts_enc);
  75. void serpent_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  76. {
  77. glue_xts_crypt_128bit_one(ctx, dst, src, iv,
  78. GLUE_FUNC_CAST(__serpent_decrypt));
  79. }
  80. EXPORT_SYMBOL_GPL(serpent_xts_dec);
  81. static const struct common_glue_ctx serpent_enc = {
  82. .num_funcs = 2,
  83. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  84. .funcs = { {
  85. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  86. .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx) }
  87. }, {
  88. .num_blocks = 1,
  89. .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_encrypt) }
  90. } }
  91. };
  92. static const struct common_glue_ctx serpent_ctr = {
  93. .num_funcs = 2,
  94. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  95. .funcs = { {
  96. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  97. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx) }
  98. }, {
  99. .num_blocks = 1,
  100. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr) }
  101. } }
  102. };
  103. static const struct common_glue_ctx serpent_enc_xts = {
  104. .num_funcs = 2,
  105. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  106. .funcs = { {
  107. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  108. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx) }
  109. }, {
  110. .num_blocks = 1,
  111. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_enc) }
  112. } }
  113. };
  114. static const struct common_glue_ctx serpent_dec = {
  115. .num_funcs = 2,
  116. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  117. .funcs = { {
  118. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  119. .fn_u = { .ecb = GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx) }
  120. }, {
  121. .num_blocks = 1,
  122. .fn_u = { .ecb = GLUE_FUNC_CAST(__serpent_decrypt) }
  123. } }
  124. };
  125. static const struct common_glue_ctx serpent_dec_cbc = {
  126. .num_funcs = 2,
  127. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  128. .funcs = { {
  129. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  130. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx) }
  131. }, {
  132. .num_blocks = 1,
  133. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__serpent_decrypt) }
  134. } }
  135. };
  136. static const struct common_glue_ctx serpent_dec_xts = {
  137. .num_funcs = 2,
  138. .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
  139. .funcs = { {
  140. .num_blocks = SERPENT_PARALLEL_BLOCKS,
  141. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx) }
  142. }, {
  143. .num_blocks = 1,
  144. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(serpent_xts_dec) }
  145. } }
  146. };
  147. static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  148. struct scatterlist *src, unsigned int nbytes)
  149. {
  150. return glue_ecb_crypt_128bit(&serpent_enc, desc, dst, src, nbytes);
  151. }
  152. static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  153. struct scatterlist *src, unsigned int nbytes)
  154. {
  155. return glue_ecb_crypt_128bit(&serpent_dec, desc, dst, src, nbytes);
  156. }
  157. static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  158. struct scatterlist *src, unsigned int nbytes)
  159. {
  160. return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt), desc,
  161. dst, src, nbytes);
  162. }
  163. static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  164. struct scatterlist *src, unsigned int nbytes)
  165. {
  166. return glue_cbc_decrypt_128bit(&serpent_dec_cbc, desc, dst, src,
  167. nbytes);
  168. }
  169. static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  170. struct scatterlist *src, unsigned int nbytes)
  171. {
  172. return glue_ctr_crypt_128bit(&serpent_ctr, desc, dst, src, nbytes);
  173. }
  174. static inline bool serpent_fpu_begin(bool fpu_enabled, unsigned int nbytes)
  175. {
  176. return glue_fpu_begin(SERPENT_BLOCK_SIZE, SERPENT_PARALLEL_BLOCKS,
  177. NULL, fpu_enabled, nbytes);
  178. }
  179. static inline void serpent_fpu_end(bool fpu_enabled)
  180. {
  181. glue_fpu_end(fpu_enabled);
  182. }
  183. struct crypt_priv {
  184. struct serpent_ctx *ctx;
  185. bool fpu_enabled;
  186. };
  187. static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  188. {
  189. const unsigned int bsize = SERPENT_BLOCK_SIZE;
  190. struct crypt_priv *ctx = priv;
  191. int i;
  192. ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
  193. if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
  194. serpent_ecb_enc_8way_avx(ctx->ctx, srcdst, srcdst);
  195. return;
  196. }
  197. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  198. __serpent_encrypt(ctx->ctx, srcdst, srcdst);
  199. }
  200. static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  201. {
  202. const unsigned int bsize = SERPENT_BLOCK_SIZE;
  203. struct crypt_priv *ctx = priv;
  204. int i;
  205. ctx->fpu_enabled = serpent_fpu_begin(ctx->fpu_enabled, nbytes);
  206. if (nbytes == bsize * SERPENT_PARALLEL_BLOCKS) {
  207. serpent_ecb_dec_8way_avx(ctx->ctx, srcdst, srcdst);
  208. return;
  209. }
  210. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  211. __serpent_decrypt(ctx->ctx, srcdst, srcdst);
  212. }
  213. int lrw_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
  214. unsigned int keylen)
  215. {
  216. struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
  217. int err;
  218. err = __serpent_setkey(&ctx->serpent_ctx, key, keylen -
  219. SERPENT_BLOCK_SIZE);
  220. if (err)
  221. return err;
  222. return lrw_init_table(&ctx->lrw_table, key + keylen -
  223. SERPENT_BLOCK_SIZE);
  224. }
  225. EXPORT_SYMBOL_GPL(lrw_serpent_setkey);
  226. static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  227. struct scatterlist *src, unsigned int nbytes)
  228. {
  229. struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  230. be128 buf[SERPENT_PARALLEL_BLOCKS];
  231. struct crypt_priv crypt_ctx = {
  232. .ctx = &ctx->serpent_ctx,
  233. .fpu_enabled = false,
  234. };
  235. struct lrw_crypt_req req = {
  236. .tbuf = buf,
  237. .tbuflen = sizeof(buf),
  238. .table_ctx = &ctx->lrw_table,
  239. .crypt_ctx = &crypt_ctx,
  240. .crypt_fn = encrypt_callback,
  241. };
  242. int ret;
  243. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  244. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  245. serpent_fpu_end(crypt_ctx.fpu_enabled);
  246. return ret;
  247. }
  248. static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  249. struct scatterlist *src, unsigned int nbytes)
  250. {
  251. struct serpent_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  252. be128 buf[SERPENT_PARALLEL_BLOCKS];
  253. struct crypt_priv crypt_ctx = {
  254. .ctx = &ctx->serpent_ctx,
  255. .fpu_enabled = false,
  256. };
  257. struct lrw_crypt_req req = {
  258. .tbuf = buf,
  259. .tbuflen = sizeof(buf),
  260. .table_ctx = &ctx->lrw_table,
  261. .crypt_ctx = &crypt_ctx,
  262. .crypt_fn = decrypt_callback,
  263. };
  264. int ret;
  265. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  266. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  267. serpent_fpu_end(crypt_ctx.fpu_enabled);
  268. return ret;
  269. }
  270. void lrw_serpent_exit_tfm(struct crypto_tfm *tfm)
  271. {
  272. struct serpent_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
  273. lrw_free_table(&ctx->lrw_table);
  274. }
  275. EXPORT_SYMBOL_GPL(lrw_serpent_exit_tfm);
  276. int xts_serpent_setkey(struct crypto_tfm *tfm, const u8 *key,
  277. unsigned int keylen)
  278. {
  279. struct serpent_xts_ctx *ctx = crypto_tfm_ctx(tfm);
  280. u32 *flags = &tfm->crt_flags;
  281. int err;
  282. /* key consists of keys of equal size concatenated, therefore
  283. * the length must be even
  284. */
  285. if (keylen % 2) {
  286. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  287. return -EINVAL;
  288. }
  289. /* first half of xts-key is for crypt */
  290. err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
  291. if (err)
  292. return err;
  293. /* second half of xts-key is for tweak */
  294. return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
  295. }
  296. EXPORT_SYMBOL_GPL(xts_serpent_setkey);
  297. static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  298. struct scatterlist *src, unsigned int nbytes)
  299. {
  300. struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  301. return glue_xts_crypt_128bit(&serpent_enc_xts, desc, dst, src, nbytes,
  302. XTS_TWEAK_CAST(__serpent_encrypt),
  303. &ctx->tweak_ctx, &ctx->crypt_ctx);
  304. }
  305. static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  306. struct scatterlist *src, unsigned int nbytes)
  307. {
  308. struct serpent_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  309. return glue_xts_crypt_128bit(&serpent_dec_xts, desc, dst, src, nbytes,
  310. XTS_TWEAK_CAST(__serpent_encrypt),
  311. &ctx->tweak_ctx, &ctx->crypt_ctx);
  312. }
  313. static struct crypto_alg serpent_algs[10] = { {
  314. .cra_name = "__ecb-serpent-avx",
  315. .cra_driver_name = "__driver-ecb-serpent-avx",
  316. .cra_priority = 0,
  317. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  318. CRYPTO_ALG_INTERNAL,
  319. .cra_blocksize = SERPENT_BLOCK_SIZE,
  320. .cra_ctxsize = sizeof(struct serpent_ctx),
  321. .cra_alignmask = 0,
  322. .cra_type = &crypto_blkcipher_type,
  323. .cra_module = THIS_MODULE,
  324. .cra_u = {
  325. .blkcipher = {
  326. .min_keysize = SERPENT_MIN_KEY_SIZE,
  327. .max_keysize = SERPENT_MAX_KEY_SIZE,
  328. .setkey = serpent_setkey,
  329. .encrypt = ecb_encrypt,
  330. .decrypt = ecb_decrypt,
  331. },
  332. },
  333. }, {
  334. .cra_name = "__cbc-serpent-avx",
  335. .cra_driver_name = "__driver-cbc-serpent-avx",
  336. .cra_priority = 0,
  337. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  338. CRYPTO_ALG_INTERNAL,
  339. .cra_blocksize = SERPENT_BLOCK_SIZE,
  340. .cra_ctxsize = sizeof(struct serpent_ctx),
  341. .cra_alignmask = 0,
  342. .cra_type = &crypto_blkcipher_type,
  343. .cra_module = THIS_MODULE,
  344. .cra_u = {
  345. .blkcipher = {
  346. .min_keysize = SERPENT_MIN_KEY_SIZE,
  347. .max_keysize = SERPENT_MAX_KEY_SIZE,
  348. .setkey = serpent_setkey,
  349. .encrypt = cbc_encrypt,
  350. .decrypt = cbc_decrypt,
  351. },
  352. },
  353. }, {
  354. .cra_name = "__ctr-serpent-avx",
  355. .cra_driver_name = "__driver-ctr-serpent-avx",
  356. .cra_priority = 0,
  357. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  358. CRYPTO_ALG_INTERNAL,
  359. .cra_blocksize = 1,
  360. .cra_ctxsize = sizeof(struct serpent_ctx),
  361. .cra_alignmask = 0,
  362. .cra_type = &crypto_blkcipher_type,
  363. .cra_module = THIS_MODULE,
  364. .cra_u = {
  365. .blkcipher = {
  366. .min_keysize = SERPENT_MIN_KEY_SIZE,
  367. .max_keysize = SERPENT_MAX_KEY_SIZE,
  368. .ivsize = SERPENT_BLOCK_SIZE,
  369. .setkey = serpent_setkey,
  370. .encrypt = ctr_crypt,
  371. .decrypt = ctr_crypt,
  372. },
  373. },
  374. }, {
  375. .cra_name = "__lrw-serpent-avx",
  376. .cra_driver_name = "__driver-lrw-serpent-avx",
  377. .cra_priority = 0,
  378. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  379. CRYPTO_ALG_INTERNAL,
  380. .cra_blocksize = SERPENT_BLOCK_SIZE,
  381. .cra_ctxsize = sizeof(struct serpent_lrw_ctx),
  382. .cra_alignmask = 0,
  383. .cra_type = &crypto_blkcipher_type,
  384. .cra_module = THIS_MODULE,
  385. .cra_exit = lrw_serpent_exit_tfm,
  386. .cra_u = {
  387. .blkcipher = {
  388. .min_keysize = SERPENT_MIN_KEY_SIZE +
  389. SERPENT_BLOCK_SIZE,
  390. .max_keysize = SERPENT_MAX_KEY_SIZE +
  391. SERPENT_BLOCK_SIZE,
  392. .ivsize = SERPENT_BLOCK_SIZE,
  393. .setkey = lrw_serpent_setkey,
  394. .encrypt = lrw_encrypt,
  395. .decrypt = lrw_decrypt,
  396. },
  397. },
  398. }, {
  399. .cra_name = "__xts-serpent-avx",
  400. .cra_driver_name = "__driver-xts-serpent-avx",
  401. .cra_priority = 0,
  402. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  403. CRYPTO_ALG_INTERNAL,
  404. .cra_blocksize = SERPENT_BLOCK_SIZE,
  405. .cra_ctxsize = sizeof(struct serpent_xts_ctx),
  406. .cra_alignmask = 0,
  407. .cra_type = &crypto_blkcipher_type,
  408. .cra_module = THIS_MODULE,
  409. .cra_u = {
  410. .blkcipher = {
  411. .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
  412. .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
  413. .ivsize = SERPENT_BLOCK_SIZE,
  414. .setkey = xts_serpent_setkey,
  415. .encrypt = xts_encrypt,
  416. .decrypt = xts_decrypt,
  417. },
  418. },
  419. }, {
  420. .cra_name = "ecb(serpent)",
  421. .cra_driver_name = "ecb-serpent-avx",
  422. .cra_priority = 500,
  423. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  424. .cra_blocksize = SERPENT_BLOCK_SIZE,
  425. .cra_ctxsize = sizeof(struct async_helper_ctx),
  426. .cra_alignmask = 0,
  427. .cra_type = &crypto_ablkcipher_type,
  428. .cra_module = THIS_MODULE,
  429. .cra_init = ablk_init,
  430. .cra_exit = ablk_exit,
  431. .cra_u = {
  432. .ablkcipher = {
  433. .min_keysize = SERPENT_MIN_KEY_SIZE,
  434. .max_keysize = SERPENT_MAX_KEY_SIZE,
  435. .setkey = ablk_set_key,
  436. .encrypt = ablk_encrypt,
  437. .decrypt = ablk_decrypt,
  438. },
  439. },
  440. }, {
  441. .cra_name = "cbc(serpent)",
  442. .cra_driver_name = "cbc-serpent-avx",
  443. .cra_priority = 500,
  444. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  445. .cra_blocksize = SERPENT_BLOCK_SIZE,
  446. .cra_ctxsize = sizeof(struct async_helper_ctx),
  447. .cra_alignmask = 0,
  448. .cra_type = &crypto_ablkcipher_type,
  449. .cra_module = THIS_MODULE,
  450. .cra_init = ablk_init,
  451. .cra_exit = ablk_exit,
  452. .cra_u = {
  453. .ablkcipher = {
  454. .min_keysize = SERPENT_MIN_KEY_SIZE,
  455. .max_keysize = SERPENT_MAX_KEY_SIZE,
  456. .ivsize = SERPENT_BLOCK_SIZE,
  457. .setkey = ablk_set_key,
  458. .encrypt = __ablk_encrypt,
  459. .decrypt = ablk_decrypt,
  460. },
  461. },
  462. }, {
  463. .cra_name = "ctr(serpent)",
  464. .cra_driver_name = "ctr-serpent-avx",
  465. .cra_priority = 500,
  466. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  467. .cra_blocksize = 1,
  468. .cra_ctxsize = sizeof(struct async_helper_ctx),
  469. .cra_alignmask = 0,
  470. .cra_type = &crypto_ablkcipher_type,
  471. .cra_module = THIS_MODULE,
  472. .cra_init = ablk_init,
  473. .cra_exit = ablk_exit,
  474. .cra_u = {
  475. .ablkcipher = {
  476. .min_keysize = SERPENT_MIN_KEY_SIZE,
  477. .max_keysize = SERPENT_MAX_KEY_SIZE,
  478. .ivsize = SERPENT_BLOCK_SIZE,
  479. .setkey = ablk_set_key,
  480. .encrypt = ablk_encrypt,
  481. .decrypt = ablk_encrypt,
  482. .geniv = "chainiv",
  483. },
  484. },
  485. }, {
  486. .cra_name = "lrw(serpent)",
  487. .cra_driver_name = "lrw-serpent-avx",
  488. .cra_priority = 500,
  489. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  490. .cra_blocksize = SERPENT_BLOCK_SIZE,
  491. .cra_ctxsize = sizeof(struct async_helper_ctx),
  492. .cra_alignmask = 0,
  493. .cra_type = &crypto_ablkcipher_type,
  494. .cra_module = THIS_MODULE,
  495. .cra_init = ablk_init,
  496. .cra_exit = ablk_exit,
  497. .cra_u = {
  498. .ablkcipher = {
  499. .min_keysize = SERPENT_MIN_KEY_SIZE +
  500. SERPENT_BLOCK_SIZE,
  501. .max_keysize = SERPENT_MAX_KEY_SIZE +
  502. SERPENT_BLOCK_SIZE,
  503. .ivsize = SERPENT_BLOCK_SIZE,
  504. .setkey = ablk_set_key,
  505. .encrypt = ablk_encrypt,
  506. .decrypt = ablk_decrypt,
  507. },
  508. },
  509. }, {
  510. .cra_name = "xts(serpent)",
  511. .cra_driver_name = "xts-serpent-avx",
  512. .cra_priority = 500,
  513. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  514. .cra_blocksize = SERPENT_BLOCK_SIZE,
  515. .cra_ctxsize = sizeof(struct async_helper_ctx),
  516. .cra_alignmask = 0,
  517. .cra_type = &crypto_ablkcipher_type,
  518. .cra_module = THIS_MODULE,
  519. .cra_init = ablk_init,
  520. .cra_exit = ablk_exit,
  521. .cra_u = {
  522. .ablkcipher = {
  523. .min_keysize = SERPENT_MIN_KEY_SIZE * 2,
  524. .max_keysize = SERPENT_MAX_KEY_SIZE * 2,
  525. .ivsize = SERPENT_BLOCK_SIZE,
  526. .setkey = ablk_set_key,
  527. .encrypt = ablk_encrypt,
  528. .decrypt = ablk_decrypt,
  529. },
  530. },
  531. } };
  532. static int __init serpent_init(void)
  533. {
  534. const char *feature_name;
  535. if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
  536. &feature_name)) {
  537. pr_info("CPU feature '%s' is not supported.\n", feature_name);
  538. return -ENODEV;
  539. }
  540. return crypto_register_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
  541. }
  542. static void __exit serpent_exit(void)
  543. {
  544. crypto_unregister_algs(serpent_algs, ARRAY_SIZE(serpent_algs));
  545. }
  546. module_init(serpent_init);
  547. module_exit(serpent_exit);
  548. MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
  549. MODULE_LICENSE("GPL");
  550. MODULE_ALIAS_CRYPTO("serpent");