cast6_avx_glue.c 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613
  1. /*
  2. * Glue Code for the AVX assembler implemention of the Cast6 Cipher
  3. *
  4. * Copyright (C) 2012 Johannes Goetzfried
  5. * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
  6. *
  7. * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
  8. *
  9. * This program is free software; you can redistribute it and/or modify
  10. * it under the terms of the GNU General Public License as published by
  11. * the Free Software Foundation; either version 2 of the License, or
  12. * (at your option) any later version.
  13. *
  14. * This program is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17. * GNU General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU General Public License
  20. * along with this program; if not, write to the Free Software
  21. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
  22. * USA
  23. *
  24. */
  25. #include <linux/module.h>
  26. #include <linux/hardirq.h>
  27. #include <linux/types.h>
  28. #include <linux/crypto.h>
  29. #include <linux/err.h>
  30. #include <crypto/ablk_helper.h>
  31. #include <crypto/algapi.h>
  32. #include <crypto/cast6.h>
  33. #include <crypto/cryptd.h>
  34. #include <crypto/b128ops.h>
  35. #include <crypto/ctr.h>
  36. #include <crypto/lrw.h>
  37. #include <crypto/xts.h>
  38. #include <asm/fpu/api.h>
  39. #include <asm/crypto/glue_helper.h>
  40. #define CAST6_PARALLEL_BLOCKS 8
  41. asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst,
  42. const u8 *src);
  43. asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst,
  44. const u8 *src);
  45. asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst,
  46. const u8 *src);
  47. asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src,
  48. le128 *iv);
  49. asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst,
  50. const u8 *src, le128 *iv);
  51. asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst,
  52. const u8 *src, le128 *iv);
  53. static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  54. {
  55. glue_xts_crypt_128bit_one(ctx, dst, src, iv,
  56. GLUE_FUNC_CAST(__cast6_encrypt));
  57. }
  58. static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  59. {
  60. glue_xts_crypt_128bit_one(ctx, dst, src, iv,
  61. GLUE_FUNC_CAST(__cast6_decrypt));
  62. }
  63. static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
  64. {
  65. be128 ctrblk;
  66. le128_to_be128(&ctrblk, iv);
  67. le128_inc(iv);
  68. __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
  69. u128_xor(dst, src, (u128 *)&ctrblk);
  70. }
  71. static const struct common_glue_ctx cast6_enc = {
  72. .num_funcs = 2,
  73. .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
  74. .funcs = { {
  75. .num_blocks = CAST6_PARALLEL_BLOCKS,
  76. .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) }
  77. }, {
  78. .num_blocks = 1,
  79. .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) }
  80. } }
  81. };
  82. static const struct common_glue_ctx cast6_ctr = {
  83. .num_funcs = 2,
  84. .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
  85. .funcs = { {
  86. .num_blocks = CAST6_PARALLEL_BLOCKS,
  87. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_ctr_8way) }
  88. }, {
  89. .num_blocks = 1,
  90. .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) }
  91. } }
  92. };
  93. static const struct common_glue_ctx cast6_enc_xts = {
  94. .num_funcs = 2,
  95. .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
  96. .funcs = { {
  97. .num_blocks = CAST6_PARALLEL_BLOCKS,
  98. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way) }
  99. }, {
  100. .num_blocks = 1,
  101. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc) }
  102. } }
  103. };
  104. static const struct common_glue_ctx cast6_dec = {
  105. .num_funcs = 2,
  106. .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
  107. .funcs = { {
  108. .num_blocks = CAST6_PARALLEL_BLOCKS,
  109. .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) }
  110. }, {
  111. .num_blocks = 1,
  112. .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) }
  113. } }
  114. };
  115. static const struct common_glue_ctx cast6_dec_cbc = {
  116. .num_funcs = 2,
  117. .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
  118. .funcs = { {
  119. .num_blocks = CAST6_PARALLEL_BLOCKS,
  120. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way) }
  121. }, {
  122. .num_blocks = 1,
  123. .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) }
  124. } }
  125. };
  126. static const struct common_glue_ctx cast6_dec_xts = {
  127. .num_funcs = 2,
  128. .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
  129. .funcs = { {
  130. .num_blocks = CAST6_PARALLEL_BLOCKS,
  131. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way) }
  132. }, {
  133. .num_blocks = 1,
  134. .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec) }
  135. } }
  136. };
  137. static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  138. struct scatterlist *src, unsigned int nbytes)
  139. {
  140. return glue_ecb_crypt_128bit(&cast6_enc, desc, dst, src, nbytes);
  141. }
  142. static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  143. struct scatterlist *src, unsigned int nbytes)
  144. {
  145. return glue_ecb_crypt_128bit(&cast6_dec, desc, dst, src, nbytes);
  146. }
  147. static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  148. struct scatterlist *src, unsigned int nbytes)
  149. {
  150. return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__cast6_encrypt), desc,
  151. dst, src, nbytes);
  152. }
  153. static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  154. struct scatterlist *src, unsigned int nbytes)
  155. {
  156. return glue_cbc_decrypt_128bit(&cast6_dec_cbc, desc, dst, src,
  157. nbytes);
  158. }
  159. static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  160. struct scatterlist *src, unsigned int nbytes)
  161. {
  162. return glue_ctr_crypt_128bit(&cast6_ctr, desc, dst, src, nbytes);
  163. }
  164. static inline bool cast6_fpu_begin(bool fpu_enabled, unsigned int nbytes)
  165. {
  166. return glue_fpu_begin(CAST6_BLOCK_SIZE, CAST6_PARALLEL_BLOCKS,
  167. NULL, fpu_enabled, nbytes);
  168. }
  169. static inline void cast6_fpu_end(bool fpu_enabled)
  170. {
  171. glue_fpu_end(fpu_enabled);
  172. }
  173. struct crypt_priv {
  174. struct cast6_ctx *ctx;
  175. bool fpu_enabled;
  176. };
  177. static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  178. {
  179. const unsigned int bsize = CAST6_BLOCK_SIZE;
  180. struct crypt_priv *ctx = priv;
  181. int i;
  182. ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
  183. if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
  184. cast6_ecb_enc_8way(ctx->ctx, srcdst, srcdst);
  185. return;
  186. }
  187. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  188. __cast6_encrypt(ctx->ctx, srcdst, srcdst);
  189. }
  190. static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
  191. {
  192. const unsigned int bsize = CAST6_BLOCK_SIZE;
  193. struct crypt_priv *ctx = priv;
  194. int i;
  195. ctx->fpu_enabled = cast6_fpu_begin(ctx->fpu_enabled, nbytes);
  196. if (nbytes == bsize * CAST6_PARALLEL_BLOCKS) {
  197. cast6_ecb_dec_8way(ctx->ctx, srcdst, srcdst);
  198. return;
  199. }
  200. for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
  201. __cast6_decrypt(ctx->ctx, srcdst, srcdst);
  202. }
  203. struct cast6_lrw_ctx {
  204. struct lrw_table_ctx lrw_table;
  205. struct cast6_ctx cast6_ctx;
  206. };
  207. static int lrw_cast6_setkey(struct crypto_tfm *tfm, const u8 *key,
  208. unsigned int keylen)
  209. {
  210. struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
  211. int err;
  212. err = __cast6_setkey(&ctx->cast6_ctx, key, keylen - CAST6_BLOCK_SIZE,
  213. &tfm->crt_flags);
  214. if (err)
  215. return err;
  216. return lrw_init_table(&ctx->lrw_table, key + keylen - CAST6_BLOCK_SIZE);
  217. }
  218. static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  219. struct scatterlist *src, unsigned int nbytes)
  220. {
  221. struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  222. be128 buf[CAST6_PARALLEL_BLOCKS];
  223. struct crypt_priv crypt_ctx = {
  224. .ctx = &ctx->cast6_ctx,
  225. .fpu_enabled = false,
  226. };
  227. struct lrw_crypt_req req = {
  228. .tbuf = buf,
  229. .tbuflen = sizeof(buf),
  230. .table_ctx = &ctx->lrw_table,
  231. .crypt_ctx = &crypt_ctx,
  232. .crypt_fn = encrypt_callback,
  233. };
  234. int ret;
  235. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  236. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  237. cast6_fpu_end(crypt_ctx.fpu_enabled);
  238. return ret;
  239. }
  240. static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  241. struct scatterlist *src, unsigned int nbytes)
  242. {
  243. struct cast6_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  244. be128 buf[CAST6_PARALLEL_BLOCKS];
  245. struct crypt_priv crypt_ctx = {
  246. .ctx = &ctx->cast6_ctx,
  247. .fpu_enabled = false,
  248. };
  249. struct lrw_crypt_req req = {
  250. .tbuf = buf,
  251. .tbuflen = sizeof(buf),
  252. .table_ctx = &ctx->lrw_table,
  253. .crypt_ctx = &crypt_ctx,
  254. .crypt_fn = decrypt_callback,
  255. };
  256. int ret;
  257. desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
  258. ret = lrw_crypt(desc, dst, src, nbytes, &req);
  259. cast6_fpu_end(crypt_ctx.fpu_enabled);
  260. return ret;
  261. }
  262. static void lrw_exit_tfm(struct crypto_tfm *tfm)
  263. {
  264. struct cast6_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
  265. lrw_free_table(&ctx->lrw_table);
  266. }
  267. struct cast6_xts_ctx {
  268. struct cast6_ctx tweak_ctx;
  269. struct cast6_ctx crypt_ctx;
  270. };
  271. static int xts_cast6_setkey(struct crypto_tfm *tfm, const u8 *key,
  272. unsigned int keylen)
  273. {
  274. struct cast6_xts_ctx *ctx = crypto_tfm_ctx(tfm);
  275. u32 *flags = &tfm->crt_flags;
  276. int err;
  277. /* key consists of keys of equal size concatenated, therefore
  278. * the length must be even
  279. */
  280. if (keylen % 2) {
  281. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  282. return -EINVAL;
  283. }
  284. /* first half of xts-key is for crypt */
  285. err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
  286. if (err)
  287. return err;
  288. /* second half of xts-key is for tweak */
  289. return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
  290. flags);
  291. }
  292. static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  293. struct scatterlist *src, unsigned int nbytes)
  294. {
  295. struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  296. return glue_xts_crypt_128bit(&cast6_enc_xts, desc, dst, src, nbytes,
  297. XTS_TWEAK_CAST(__cast6_encrypt),
  298. &ctx->tweak_ctx, &ctx->crypt_ctx);
  299. }
  300. static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
  301. struct scatterlist *src, unsigned int nbytes)
  302. {
  303. struct cast6_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
  304. return glue_xts_crypt_128bit(&cast6_dec_xts, desc, dst, src, nbytes,
  305. XTS_TWEAK_CAST(__cast6_encrypt),
  306. &ctx->tweak_ctx, &ctx->crypt_ctx);
  307. }
  308. static struct crypto_alg cast6_algs[10] = { {
  309. .cra_name = "__ecb-cast6-avx",
  310. .cra_driver_name = "__driver-ecb-cast6-avx",
  311. .cra_priority = 0,
  312. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  313. CRYPTO_ALG_INTERNAL,
  314. .cra_blocksize = CAST6_BLOCK_SIZE,
  315. .cra_ctxsize = sizeof(struct cast6_ctx),
  316. .cra_alignmask = 0,
  317. .cra_type = &crypto_blkcipher_type,
  318. .cra_module = THIS_MODULE,
  319. .cra_u = {
  320. .blkcipher = {
  321. .min_keysize = CAST6_MIN_KEY_SIZE,
  322. .max_keysize = CAST6_MAX_KEY_SIZE,
  323. .setkey = cast6_setkey,
  324. .encrypt = ecb_encrypt,
  325. .decrypt = ecb_decrypt,
  326. },
  327. },
  328. }, {
  329. .cra_name = "__cbc-cast6-avx",
  330. .cra_driver_name = "__driver-cbc-cast6-avx",
  331. .cra_priority = 0,
  332. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  333. CRYPTO_ALG_INTERNAL,
  334. .cra_blocksize = CAST6_BLOCK_SIZE,
  335. .cra_ctxsize = sizeof(struct cast6_ctx),
  336. .cra_alignmask = 0,
  337. .cra_type = &crypto_blkcipher_type,
  338. .cra_module = THIS_MODULE,
  339. .cra_u = {
  340. .blkcipher = {
  341. .min_keysize = CAST6_MIN_KEY_SIZE,
  342. .max_keysize = CAST6_MAX_KEY_SIZE,
  343. .setkey = cast6_setkey,
  344. .encrypt = cbc_encrypt,
  345. .decrypt = cbc_decrypt,
  346. },
  347. },
  348. }, {
  349. .cra_name = "__ctr-cast6-avx",
  350. .cra_driver_name = "__driver-ctr-cast6-avx",
  351. .cra_priority = 0,
  352. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  353. CRYPTO_ALG_INTERNAL,
  354. .cra_blocksize = 1,
  355. .cra_ctxsize = sizeof(struct cast6_ctx),
  356. .cra_alignmask = 0,
  357. .cra_type = &crypto_blkcipher_type,
  358. .cra_module = THIS_MODULE,
  359. .cra_u = {
  360. .blkcipher = {
  361. .min_keysize = CAST6_MIN_KEY_SIZE,
  362. .max_keysize = CAST6_MAX_KEY_SIZE,
  363. .ivsize = CAST6_BLOCK_SIZE,
  364. .setkey = cast6_setkey,
  365. .encrypt = ctr_crypt,
  366. .decrypt = ctr_crypt,
  367. },
  368. },
  369. }, {
  370. .cra_name = "__lrw-cast6-avx",
  371. .cra_driver_name = "__driver-lrw-cast6-avx",
  372. .cra_priority = 0,
  373. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  374. CRYPTO_ALG_INTERNAL,
  375. .cra_blocksize = CAST6_BLOCK_SIZE,
  376. .cra_ctxsize = sizeof(struct cast6_lrw_ctx),
  377. .cra_alignmask = 0,
  378. .cra_type = &crypto_blkcipher_type,
  379. .cra_module = THIS_MODULE,
  380. .cra_exit = lrw_exit_tfm,
  381. .cra_u = {
  382. .blkcipher = {
  383. .min_keysize = CAST6_MIN_KEY_SIZE +
  384. CAST6_BLOCK_SIZE,
  385. .max_keysize = CAST6_MAX_KEY_SIZE +
  386. CAST6_BLOCK_SIZE,
  387. .ivsize = CAST6_BLOCK_SIZE,
  388. .setkey = lrw_cast6_setkey,
  389. .encrypt = lrw_encrypt,
  390. .decrypt = lrw_decrypt,
  391. },
  392. },
  393. }, {
  394. .cra_name = "__xts-cast6-avx",
  395. .cra_driver_name = "__driver-xts-cast6-avx",
  396. .cra_priority = 0,
  397. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  398. CRYPTO_ALG_INTERNAL,
  399. .cra_blocksize = CAST6_BLOCK_SIZE,
  400. .cra_ctxsize = sizeof(struct cast6_xts_ctx),
  401. .cra_alignmask = 0,
  402. .cra_type = &crypto_blkcipher_type,
  403. .cra_module = THIS_MODULE,
  404. .cra_u = {
  405. .blkcipher = {
  406. .min_keysize = CAST6_MIN_KEY_SIZE * 2,
  407. .max_keysize = CAST6_MAX_KEY_SIZE * 2,
  408. .ivsize = CAST6_BLOCK_SIZE,
  409. .setkey = xts_cast6_setkey,
  410. .encrypt = xts_encrypt,
  411. .decrypt = xts_decrypt,
  412. },
  413. },
  414. }, {
  415. .cra_name = "ecb(cast6)",
  416. .cra_driver_name = "ecb-cast6-avx",
  417. .cra_priority = 200,
  418. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  419. .cra_blocksize = CAST6_BLOCK_SIZE,
  420. .cra_ctxsize = sizeof(struct async_helper_ctx),
  421. .cra_alignmask = 0,
  422. .cra_type = &crypto_ablkcipher_type,
  423. .cra_module = THIS_MODULE,
  424. .cra_init = ablk_init,
  425. .cra_exit = ablk_exit,
  426. .cra_u = {
  427. .ablkcipher = {
  428. .min_keysize = CAST6_MIN_KEY_SIZE,
  429. .max_keysize = CAST6_MAX_KEY_SIZE,
  430. .setkey = ablk_set_key,
  431. .encrypt = ablk_encrypt,
  432. .decrypt = ablk_decrypt,
  433. },
  434. },
  435. }, {
  436. .cra_name = "cbc(cast6)",
  437. .cra_driver_name = "cbc-cast6-avx",
  438. .cra_priority = 200,
  439. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  440. .cra_blocksize = CAST6_BLOCK_SIZE,
  441. .cra_ctxsize = sizeof(struct async_helper_ctx),
  442. .cra_alignmask = 0,
  443. .cra_type = &crypto_ablkcipher_type,
  444. .cra_module = THIS_MODULE,
  445. .cra_init = ablk_init,
  446. .cra_exit = ablk_exit,
  447. .cra_u = {
  448. .ablkcipher = {
  449. .min_keysize = CAST6_MIN_KEY_SIZE,
  450. .max_keysize = CAST6_MAX_KEY_SIZE,
  451. .ivsize = CAST6_BLOCK_SIZE,
  452. .setkey = ablk_set_key,
  453. .encrypt = __ablk_encrypt,
  454. .decrypt = ablk_decrypt,
  455. },
  456. },
  457. }, {
  458. .cra_name = "ctr(cast6)",
  459. .cra_driver_name = "ctr-cast6-avx",
  460. .cra_priority = 200,
  461. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  462. .cra_blocksize = 1,
  463. .cra_ctxsize = sizeof(struct async_helper_ctx),
  464. .cra_alignmask = 0,
  465. .cra_type = &crypto_ablkcipher_type,
  466. .cra_module = THIS_MODULE,
  467. .cra_init = ablk_init,
  468. .cra_exit = ablk_exit,
  469. .cra_u = {
  470. .ablkcipher = {
  471. .min_keysize = CAST6_MIN_KEY_SIZE,
  472. .max_keysize = CAST6_MAX_KEY_SIZE,
  473. .ivsize = CAST6_BLOCK_SIZE,
  474. .setkey = ablk_set_key,
  475. .encrypt = ablk_encrypt,
  476. .decrypt = ablk_encrypt,
  477. .geniv = "chainiv",
  478. },
  479. },
  480. }, {
  481. .cra_name = "lrw(cast6)",
  482. .cra_driver_name = "lrw-cast6-avx",
  483. .cra_priority = 200,
  484. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  485. .cra_blocksize = CAST6_BLOCK_SIZE,
  486. .cra_ctxsize = sizeof(struct async_helper_ctx),
  487. .cra_alignmask = 0,
  488. .cra_type = &crypto_ablkcipher_type,
  489. .cra_module = THIS_MODULE,
  490. .cra_init = ablk_init,
  491. .cra_exit = ablk_exit,
  492. .cra_u = {
  493. .ablkcipher = {
  494. .min_keysize = CAST6_MIN_KEY_SIZE +
  495. CAST6_BLOCK_SIZE,
  496. .max_keysize = CAST6_MAX_KEY_SIZE +
  497. CAST6_BLOCK_SIZE,
  498. .ivsize = CAST6_BLOCK_SIZE,
  499. .setkey = ablk_set_key,
  500. .encrypt = ablk_encrypt,
  501. .decrypt = ablk_decrypt,
  502. },
  503. },
  504. }, {
  505. .cra_name = "xts(cast6)",
  506. .cra_driver_name = "xts-cast6-avx",
  507. .cra_priority = 200,
  508. .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
  509. .cra_blocksize = CAST6_BLOCK_SIZE,
  510. .cra_ctxsize = sizeof(struct async_helper_ctx),
  511. .cra_alignmask = 0,
  512. .cra_type = &crypto_ablkcipher_type,
  513. .cra_module = THIS_MODULE,
  514. .cra_init = ablk_init,
  515. .cra_exit = ablk_exit,
  516. .cra_u = {
  517. .ablkcipher = {
  518. .min_keysize = CAST6_MIN_KEY_SIZE * 2,
  519. .max_keysize = CAST6_MAX_KEY_SIZE * 2,
  520. .ivsize = CAST6_BLOCK_SIZE,
  521. .setkey = ablk_set_key,
  522. .encrypt = ablk_encrypt,
  523. .decrypt = ablk_decrypt,
  524. },
  525. },
  526. } };
  527. static int __init cast6_init(void)
  528. {
  529. const char *feature_name;
  530. if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
  531. &feature_name)) {
  532. pr_info("CPU feature '%s' is not supported.\n", feature_name);
  533. return -ENODEV;
  534. }
  535. return crypto_register_algs(cast6_algs, ARRAY_SIZE(cast6_algs));
  536. }
  537. static void __exit cast6_exit(void)
  538. {
  539. crypto_unregister_algs(cast6_algs, ARRAY_SIZE(cast6_algs));
  540. }
  541. module_init(cast6_init);
  542. module_exit(cast6_exit);
  543. MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
  544. MODULE_LICENSE("GPL");
  545. MODULE_ALIAS_CRYPTO("cast6");