aes_s390.c 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986
  1. /*
  2. * Cryptographic API.
  3. *
  4. * s390 implementation of the AES Cipher Algorithm.
  5. *
  6. * s390 Version:
  7. * Copyright IBM Corp. 2005, 2007
  8. * Author(s): Jan Glauber (jang@de.ibm.com)
  9. * Sebastian Siewior (sebastian@breakpoint.cc> SW-Fallback
  10. *
  11. * Derived from "crypto/aes_generic.c"
  12. *
  13. * This program is free software; you can redistribute it and/or modify it
  14. * under the terms of the GNU General Public License as published by the Free
  15. * Software Foundation; either version 2 of the License, or (at your option)
  16. * any later version.
  17. *
  18. */
  19. #define KMSG_COMPONENT "aes_s390"
  20. #define pr_fmt(fmt) KMSG_COMPONENT ": " fmt
  21. #include <crypto/aes.h>
  22. #include <crypto/algapi.h>
  23. #include <linux/err.h>
  24. #include <linux/module.h>
  25. #include <linux/cpufeature.h>
  26. #include <linux/init.h>
  27. #include <linux/spinlock.h>
  28. #include "crypt_s390.h"
  29. #define AES_KEYLEN_128 1
  30. #define AES_KEYLEN_192 2
  31. #define AES_KEYLEN_256 4
  32. static u8 *ctrblk;
  33. static DEFINE_SPINLOCK(ctrblk_lock);
  34. static char keylen_flag;
  35. struct s390_aes_ctx {
  36. u8 key[AES_MAX_KEY_SIZE];
  37. long enc;
  38. long dec;
  39. int key_len;
  40. union {
  41. struct crypto_blkcipher *blk;
  42. struct crypto_cipher *cip;
  43. } fallback;
  44. };
  45. struct pcc_param {
  46. u8 key[32];
  47. u8 tweak[16];
  48. u8 block[16];
  49. u8 bit[16];
  50. u8 xts[16];
  51. };
  52. struct s390_xts_ctx {
  53. u8 key[32];
  54. u8 pcc_key[32];
  55. long enc;
  56. long dec;
  57. int key_len;
  58. struct crypto_blkcipher *fallback;
  59. };
  60. /*
  61. * Check if the key_len is supported by the HW.
  62. * Returns 0 if it is, a positive number if it is not and software fallback is
  63. * required or a negative number in case the key size is not valid
  64. */
  65. static int need_fallback(unsigned int key_len)
  66. {
  67. switch (key_len) {
  68. case 16:
  69. if (!(keylen_flag & AES_KEYLEN_128))
  70. return 1;
  71. break;
  72. case 24:
  73. if (!(keylen_flag & AES_KEYLEN_192))
  74. return 1;
  75. break;
  76. case 32:
  77. if (!(keylen_flag & AES_KEYLEN_256))
  78. return 1;
  79. break;
  80. default:
  81. return -1;
  82. break;
  83. }
  84. return 0;
  85. }
  86. static int setkey_fallback_cip(struct crypto_tfm *tfm, const u8 *in_key,
  87. unsigned int key_len)
  88. {
  89. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  90. int ret;
  91. sctx->fallback.cip->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  92. sctx->fallback.cip->base.crt_flags |= (tfm->crt_flags &
  93. CRYPTO_TFM_REQ_MASK);
  94. ret = crypto_cipher_setkey(sctx->fallback.cip, in_key, key_len);
  95. if (ret) {
  96. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  97. tfm->crt_flags |= (sctx->fallback.cip->base.crt_flags &
  98. CRYPTO_TFM_RES_MASK);
  99. }
  100. return ret;
  101. }
  102. static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  103. unsigned int key_len)
  104. {
  105. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  106. u32 *flags = &tfm->crt_flags;
  107. int ret;
  108. ret = need_fallback(key_len);
  109. if (ret < 0) {
  110. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  111. return -EINVAL;
  112. }
  113. sctx->key_len = key_len;
  114. if (!ret) {
  115. memcpy(sctx->key, in_key, key_len);
  116. return 0;
  117. }
  118. return setkey_fallback_cip(tfm, in_key, key_len);
  119. }
  120. static void aes_encrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  121. {
  122. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  123. if (unlikely(need_fallback(sctx->key_len))) {
  124. crypto_cipher_encrypt_one(sctx->fallback.cip, out, in);
  125. return;
  126. }
  127. switch (sctx->key_len) {
  128. case 16:
  129. crypt_s390_km(KM_AES_128_ENCRYPT, &sctx->key, out, in,
  130. AES_BLOCK_SIZE);
  131. break;
  132. case 24:
  133. crypt_s390_km(KM_AES_192_ENCRYPT, &sctx->key, out, in,
  134. AES_BLOCK_SIZE);
  135. break;
  136. case 32:
  137. crypt_s390_km(KM_AES_256_ENCRYPT, &sctx->key, out, in,
  138. AES_BLOCK_SIZE);
  139. break;
  140. }
  141. }
  142. static void aes_decrypt(struct crypto_tfm *tfm, u8 *out, const u8 *in)
  143. {
  144. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  145. if (unlikely(need_fallback(sctx->key_len))) {
  146. crypto_cipher_decrypt_one(sctx->fallback.cip, out, in);
  147. return;
  148. }
  149. switch (sctx->key_len) {
  150. case 16:
  151. crypt_s390_km(KM_AES_128_DECRYPT, &sctx->key, out, in,
  152. AES_BLOCK_SIZE);
  153. break;
  154. case 24:
  155. crypt_s390_km(KM_AES_192_DECRYPT, &sctx->key, out, in,
  156. AES_BLOCK_SIZE);
  157. break;
  158. case 32:
  159. crypt_s390_km(KM_AES_256_DECRYPT, &sctx->key, out, in,
  160. AES_BLOCK_SIZE);
  161. break;
  162. }
  163. }
  164. static int fallback_init_cip(struct crypto_tfm *tfm)
  165. {
  166. const char *name = tfm->__crt_alg->cra_name;
  167. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  168. sctx->fallback.cip = crypto_alloc_cipher(name, 0,
  169. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  170. if (IS_ERR(sctx->fallback.cip)) {
  171. pr_err("Allocating AES fallback algorithm %s failed\n",
  172. name);
  173. return PTR_ERR(sctx->fallback.cip);
  174. }
  175. return 0;
  176. }
  177. static void fallback_exit_cip(struct crypto_tfm *tfm)
  178. {
  179. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  180. crypto_free_cipher(sctx->fallback.cip);
  181. sctx->fallback.cip = NULL;
  182. }
  183. static struct crypto_alg aes_alg = {
  184. .cra_name = "aes",
  185. .cra_driver_name = "aes-s390",
  186. .cra_priority = CRYPT_S390_PRIORITY,
  187. .cra_flags = CRYPTO_ALG_TYPE_CIPHER |
  188. CRYPTO_ALG_NEED_FALLBACK,
  189. .cra_blocksize = AES_BLOCK_SIZE,
  190. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  191. .cra_module = THIS_MODULE,
  192. .cra_init = fallback_init_cip,
  193. .cra_exit = fallback_exit_cip,
  194. .cra_u = {
  195. .cipher = {
  196. .cia_min_keysize = AES_MIN_KEY_SIZE,
  197. .cia_max_keysize = AES_MAX_KEY_SIZE,
  198. .cia_setkey = aes_set_key,
  199. .cia_encrypt = aes_encrypt,
  200. .cia_decrypt = aes_decrypt,
  201. }
  202. }
  203. };
  204. static int setkey_fallback_blk(struct crypto_tfm *tfm, const u8 *key,
  205. unsigned int len)
  206. {
  207. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  208. unsigned int ret;
  209. sctx->fallback.blk->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  210. sctx->fallback.blk->base.crt_flags |= (tfm->crt_flags &
  211. CRYPTO_TFM_REQ_MASK);
  212. ret = crypto_blkcipher_setkey(sctx->fallback.blk, key, len);
  213. if (ret) {
  214. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  215. tfm->crt_flags |= (sctx->fallback.blk->base.crt_flags &
  216. CRYPTO_TFM_RES_MASK);
  217. }
  218. return ret;
  219. }
  220. static int fallback_blk_dec(struct blkcipher_desc *desc,
  221. struct scatterlist *dst, struct scatterlist *src,
  222. unsigned int nbytes)
  223. {
  224. unsigned int ret;
  225. struct crypto_blkcipher *tfm;
  226. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  227. tfm = desc->tfm;
  228. desc->tfm = sctx->fallback.blk;
  229. ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
  230. desc->tfm = tfm;
  231. return ret;
  232. }
  233. static int fallback_blk_enc(struct blkcipher_desc *desc,
  234. struct scatterlist *dst, struct scatterlist *src,
  235. unsigned int nbytes)
  236. {
  237. unsigned int ret;
  238. struct crypto_blkcipher *tfm;
  239. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  240. tfm = desc->tfm;
  241. desc->tfm = sctx->fallback.blk;
  242. ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  243. desc->tfm = tfm;
  244. return ret;
  245. }
  246. static int ecb_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  247. unsigned int key_len)
  248. {
  249. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  250. int ret;
  251. ret = need_fallback(key_len);
  252. if (ret > 0) {
  253. sctx->key_len = key_len;
  254. return setkey_fallback_blk(tfm, in_key, key_len);
  255. }
  256. switch (key_len) {
  257. case 16:
  258. sctx->enc = KM_AES_128_ENCRYPT;
  259. sctx->dec = KM_AES_128_DECRYPT;
  260. break;
  261. case 24:
  262. sctx->enc = KM_AES_192_ENCRYPT;
  263. sctx->dec = KM_AES_192_DECRYPT;
  264. break;
  265. case 32:
  266. sctx->enc = KM_AES_256_ENCRYPT;
  267. sctx->dec = KM_AES_256_DECRYPT;
  268. break;
  269. }
  270. return aes_set_key(tfm, in_key, key_len);
  271. }
  272. static int ecb_aes_crypt(struct blkcipher_desc *desc, long func, void *param,
  273. struct blkcipher_walk *walk)
  274. {
  275. int ret = blkcipher_walk_virt(desc, walk);
  276. unsigned int nbytes;
  277. while ((nbytes = walk->nbytes)) {
  278. /* only use complete blocks */
  279. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  280. u8 *out = walk->dst.virt.addr;
  281. u8 *in = walk->src.virt.addr;
  282. ret = crypt_s390_km(func, param, out, in, n);
  283. if (ret < 0 || ret != n)
  284. return -EIO;
  285. nbytes &= AES_BLOCK_SIZE - 1;
  286. ret = blkcipher_walk_done(desc, walk, nbytes);
  287. }
  288. return ret;
  289. }
  290. static int ecb_aes_encrypt(struct blkcipher_desc *desc,
  291. struct scatterlist *dst, struct scatterlist *src,
  292. unsigned int nbytes)
  293. {
  294. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  295. struct blkcipher_walk walk;
  296. if (unlikely(need_fallback(sctx->key_len)))
  297. return fallback_blk_enc(desc, dst, src, nbytes);
  298. blkcipher_walk_init(&walk, dst, src, nbytes);
  299. return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk);
  300. }
  301. static int ecb_aes_decrypt(struct blkcipher_desc *desc,
  302. struct scatterlist *dst, struct scatterlist *src,
  303. unsigned int nbytes)
  304. {
  305. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  306. struct blkcipher_walk walk;
  307. if (unlikely(need_fallback(sctx->key_len)))
  308. return fallback_blk_dec(desc, dst, src, nbytes);
  309. blkcipher_walk_init(&walk, dst, src, nbytes);
  310. return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk);
  311. }
  312. static int fallback_init_blk(struct crypto_tfm *tfm)
  313. {
  314. const char *name = tfm->__crt_alg->cra_name;
  315. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  316. sctx->fallback.blk = crypto_alloc_blkcipher(name, 0,
  317. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  318. if (IS_ERR(sctx->fallback.blk)) {
  319. pr_err("Allocating AES fallback algorithm %s failed\n",
  320. name);
  321. return PTR_ERR(sctx->fallback.blk);
  322. }
  323. return 0;
  324. }
  325. static void fallback_exit_blk(struct crypto_tfm *tfm)
  326. {
  327. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  328. crypto_free_blkcipher(sctx->fallback.blk);
  329. sctx->fallback.blk = NULL;
  330. }
  331. static struct crypto_alg ecb_aes_alg = {
  332. .cra_name = "ecb(aes)",
  333. .cra_driver_name = "ecb-aes-s390",
  334. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  335. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  336. CRYPTO_ALG_NEED_FALLBACK,
  337. .cra_blocksize = AES_BLOCK_SIZE,
  338. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  339. .cra_type = &crypto_blkcipher_type,
  340. .cra_module = THIS_MODULE,
  341. .cra_init = fallback_init_blk,
  342. .cra_exit = fallback_exit_blk,
  343. .cra_u = {
  344. .blkcipher = {
  345. .min_keysize = AES_MIN_KEY_SIZE,
  346. .max_keysize = AES_MAX_KEY_SIZE,
  347. .setkey = ecb_aes_set_key,
  348. .encrypt = ecb_aes_encrypt,
  349. .decrypt = ecb_aes_decrypt,
  350. }
  351. }
  352. };
  353. static int cbc_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  354. unsigned int key_len)
  355. {
  356. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  357. int ret;
  358. ret = need_fallback(key_len);
  359. if (ret > 0) {
  360. sctx->key_len = key_len;
  361. return setkey_fallback_blk(tfm, in_key, key_len);
  362. }
  363. switch (key_len) {
  364. case 16:
  365. sctx->enc = KMC_AES_128_ENCRYPT;
  366. sctx->dec = KMC_AES_128_DECRYPT;
  367. break;
  368. case 24:
  369. sctx->enc = KMC_AES_192_ENCRYPT;
  370. sctx->dec = KMC_AES_192_DECRYPT;
  371. break;
  372. case 32:
  373. sctx->enc = KMC_AES_256_ENCRYPT;
  374. sctx->dec = KMC_AES_256_DECRYPT;
  375. break;
  376. }
  377. return aes_set_key(tfm, in_key, key_len);
  378. }
  379. static int cbc_aes_crypt(struct blkcipher_desc *desc, long func,
  380. struct blkcipher_walk *walk)
  381. {
  382. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  383. int ret = blkcipher_walk_virt(desc, walk);
  384. unsigned int nbytes = walk->nbytes;
  385. struct {
  386. u8 iv[AES_BLOCK_SIZE];
  387. u8 key[AES_MAX_KEY_SIZE];
  388. } param;
  389. if (!nbytes)
  390. goto out;
  391. memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
  392. memcpy(param.key, sctx->key, sctx->key_len);
  393. do {
  394. /* only use complete blocks */
  395. unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1);
  396. u8 *out = walk->dst.virt.addr;
  397. u8 *in = walk->src.virt.addr;
  398. ret = crypt_s390_kmc(func, &param, out, in, n);
  399. if (ret < 0 || ret != n)
  400. return -EIO;
  401. nbytes &= AES_BLOCK_SIZE - 1;
  402. ret = blkcipher_walk_done(desc, walk, nbytes);
  403. } while ((nbytes = walk->nbytes));
  404. memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
  405. out:
  406. return ret;
  407. }
  408. static int cbc_aes_encrypt(struct blkcipher_desc *desc,
  409. struct scatterlist *dst, struct scatterlist *src,
  410. unsigned int nbytes)
  411. {
  412. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  413. struct blkcipher_walk walk;
  414. if (unlikely(need_fallback(sctx->key_len)))
  415. return fallback_blk_enc(desc, dst, src, nbytes);
  416. blkcipher_walk_init(&walk, dst, src, nbytes);
  417. return cbc_aes_crypt(desc, sctx->enc, &walk);
  418. }
  419. static int cbc_aes_decrypt(struct blkcipher_desc *desc,
  420. struct scatterlist *dst, struct scatterlist *src,
  421. unsigned int nbytes)
  422. {
  423. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  424. struct blkcipher_walk walk;
  425. if (unlikely(need_fallback(sctx->key_len)))
  426. return fallback_blk_dec(desc, dst, src, nbytes);
  427. blkcipher_walk_init(&walk, dst, src, nbytes);
  428. return cbc_aes_crypt(desc, sctx->dec, &walk);
  429. }
  430. static struct crypto_alg cbc_aes_alg = {
  431. .cra_name = "cbc(aes)",
  432. .cra_driver_name = "cbc-aes-s390",
  433. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  434. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  435. CRYPTO_ALG_NEED_FALLBACK,
  436. .cra_blocksize = AES_BLOCK_SIZE,
  437. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  438. .cra_type = &crypto_blkcipher_type,
  439. .cra_module = THIS_MODULE,
  440. .cra_init = fallback_init_blk,
  441. .cra_exit = fallback_exit_blk,
  442. .cra_u = {
  443. .blkcipher = {
  444. .min_keysize = AES_MIN_KEY_SIZE,
  445. .max_keysize = AES_MAX_KEY_SIZE,
  446. .ivsize = AES_BLOCK_SIZE,
  447. .setkey = cbc_aes_set_key,
  448. .encrypt = cbc_aes_encrypt,
  449. .decrypt = cbc_aes_decrypt,
  450. }
  451. }
  452. };
  453. static int xts_fallback_setkey(struct crypto_tfm *tfm, const u8 *key,
  454. unsigned int len)
  455. {
  456. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  457. unsigned int ret;
  458. xts_ctx->fallback->base.crt_flags &= ~CRYPTO_TFM_REQ_MASK;
  459. xts_ctx->fallback->base.crt_flags |= (tfm->crt_flags &
  460. CRYPTO_TFM_REQ_MASK);
  461. ret = crypto_blkcipher_setkey(xts_ctx->fallback, key, len);
  462. if (ret) {
  463. tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
  464. tfm->crt_flags |= (xts_ctx->fallback->base.crt_flags &
  465. CRYPTO_TFM_RES_MASK);
  466. }
  467. return ret;
  468. }
  469. static int xts_fallback_decrypt(struct blkcipher_desc *desc,
  470. struct scatterlist *dst, struct scatterlist *src,
  471. unsigned int nbytes)
  472. {
  473. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  474. struct crypto_blkcipher *tfm;
  475. unsigned int ret;
  476. tfm = desc->tfm;
  477. desc->tfm = xts_ctx->fallback;
  478. ret = crypto_blkcipher_decrypt_iv(desc, dst, src, nbytes);
  479. desc->tfm = tfm;
  480. return ret;
  481. }
  482. static int xts_fallback_encrypt(struct blkcipher_desc *desc,
  483. struct scatterlist *dst, struct scatterlist *src,
  484. unsigned int nbytes)
  485. {
  486. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  487. struct crypto_blkcipher *tfm;
  488. unsigned int ret;
  489. tfm = desc->tfm;
  490. desc->tfm = xts_ctx->fallback;
  491. ret = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
  492. desc->tfm = tfm;
  493. return ret;
  494. }
  495. static int xts_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  496. unsigned int key_len)
  497. {
  498. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  499. u32 *flags = &tfm->crt_flags;
  500. switch (key_len) {
  501. case 32:
  502. xts_ctx->enc = KM_XTS_128_ENCRYPT;
  503. xts_ctx->dec = KM_XTS_128_DECRYPT;
  504. memcpy(xts_ctx->key + 16, in_key, 16);
  505. memcpy(xts_ctx->pcc_key + 16, in_key + 16, 16);
  506. break;
  507. case 48:
  508. xts_ctx->enc = 0;
  509. xts_ctx->dec = 0;
  510. xts_fallback_setkey(tfm, in_key, key_len);
  511. break;
  512. case 64:
  513. xts_ctx->enc = KM_XTS_256_ENCRYPT;
  514. xts_ctx->dec = KM_XTS_256_DECRYPT;
  515. memcpy(xts_ctx->key, in_key, 32);
  516. memcpy(xts_ctx->pcc_key, in_key + 32, 32);
  517. break;
  518. default:
  519. *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
  520. return -EINVAL;
  521. }
  522. xts_ctx->key_len = key_len;
  523. return 0;
  524. }
  525. static int xts_aes_crypt(struct blkcipher_desc *desc, long func,
  526. struct s390_xts_ctx *xts_ctx,
  527. struct blkcipher_walk *walk)
  528. {
  529. unsigned int offset = (xts_ctx->key_len >> 1) & 0x10;
  530. int ret = blkcipher_walk_virt(desc, walk);
  531. unsigned int nbytes = walk->nbytes;
  532. unsigned int n;
  533. u8 *in, *out;
  534. struct pcc_param pcc_param;
  535. struct {
  536. u8 key[32];
  537. u8 init[16];
  538. } xts_param;
  539. if (!nbytes)
  540. goto out;
  541. memset(pcc_param.block, 0, sizeof(pcc_param.block));
  542. memset(pcc_param.bit, 0, sizeof(pcc_param.bit));
  543. memset(pcc_param.xts, 0, sizeof(pcc_param.xts));
  544. memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
  545. memcpy(pcc_param.key, xts_ctx->pcc_key, 32);
  546. ret = crypt_s390_pcc(func, &pcc_param.key[offset]);
  547. if (ret < 0)
  548. return -EIO;
  549. memcpy(xts_param.key, xts_ctx->key, 32);
  550. memcpy(xts_param.init, pcc_param.xts, 16);
  551. do {
  552. /* only use complete blocks */
  553. n = nbytes & ~(AES_BLOCK_SIZE - 1);
  554. out = walk->dst.virt.addr;
  555. in = walk->src.virt.addr;
  556. ret = crypt_s390_km(func, &xts_param.key[offset], out, in, n);
  557. if (ret < 0 || ret != n)
  558. return -EIO;
  559. nbytes &= AES_BLOCK_SIZE - 1;
  560. ret = blkcipher_walk_done(desc, walk, nbytes);
  561. } while ((nbytes = walk->nbytes));
  562. out:
  563. return ret;
  564. }
  565. static int xts_aes_encrypt(struct blkcipher_desc *desc,
  566. struct scatterlist *dst, struct scatterlist *src,
  567. unsigned int nbytes)
  568. {
  569. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  570. struct blkcipher_walk walk;
  571. if (unlikely(xts_ctx->key_len == 48))
  572. return xts_fallback_encrypt(desc, dst, src, nbytes);
  573. blkcipher_walk_init(&walk, dst, src, nbytes);
  574. return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk);
  575. }
  576. static int xts_aes_decrypt(struct blkcipher_desc *desc,
  577. struct scatterlist *dst, struct scatterlist *src,
  578. unsigned int nbytes)
  579. {
  580. struct s390_xts_ctx *xts_ctx = crypto_blkcipher_ctx(desc->tfm);
  581. struct blkcipher_walk walk;
  582. if (unlikely(xts_ctx->key_len == 48))
  583. return xts_fallback_decrypt(desc, dst, src, nbytes);
  584. blkcipher_walk_init(&walk, dst, src, nbytes);
  585. return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk);
  586. }
  587. static int xts_fallback_init(struct crypto_tfm *tfm)
  588. {
  589. const char *name = tfm->__crt_alg->cra_name;
  590. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  591. xts_ctx->fallback = crypto_alloc_blkcipher(name, 0,
  592. CRYPTO_ALG_ASYNC | CRYPTO_ALG_NEED_FALLBACK);
  593. if (IS_ERR(xts_ctx->fallback)) {
  594. pr_err("Allocating XTS fallback algorithm %s failed\n",
  595. name);
  596. return PTR_ERR(xts_ctx->fallback);
  597. }
  598. return 0;
  599. }
  600. static void xts_fallback_exit(struct crypto_tfm *tfm)
  601. {
  602. struct s390_xts_ctx *xts_ctx = crypto_tfm_ctx(tfm);
  603. crypto_free_blkcipher(xts_ctx->fallback);
  604. xts_ctx->fallback = NULL;
  605. }
  606. static struct crypto_alg xts_aes_alg = {
  607. .cra_name = "xts(aes)",
  608. .cra_driver_name = "xts-aes-s390",
  609. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  610. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER |
  611. CRYPTO_ALG_NEED_FALLBACK,
  612. .cra_blocksize = AES_BLOCK_SIZE,
  613. .cra_ctxsize = sizeof(struct s390_xts_ctx),
  614. .cra_type = &crypto_blkcipher_type,
  615. .cra_module = THIS_MODULE,
  616. .cra_init = xts_fallback_init,
  617. .cra_exit = xts_fallback_exit,
  618. .cra_u = {
  619. .blkcipher = {
  620. .min_keysize = 2 * AES_MIN_KEY_SIZE,
  621. .max_keysize = 2 * AES_MAX_KEY_SIZE,
  622. .ivsize = AES_BLOCK_SIZE,
  623. .setkey = xts_aes_set_key,
  624. .encrypt = xts_aes_encrypt,
  625. .decrypt = xts_aes_decrypt,
  626. }
  627. }
  628. };
  629. static int xts_aes_alg_reg;
  630. static int ctr_aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
  631. unsigned int key_len)
  632. {
  633. struct s390_aes_ctx *sctx = crypto_tfm_ctx(tfm);
  634. switch (key_len) {
  635. case 16:
  636. sctx->enc = KMCTR_AES_128_ENCRYPT;
  637. sctx->dec = KMCTR_AES_128_DECRYPT;
  638. break;
  639. case 24:
  640. sctx->enc = KMCTR_AES_192_ENCRYPT;
  641. sctx->dec = KMCTR_AES_192_DECRYPT;
  642. break;
  643. case 32:
  644. sctx->enc = KMCTR_AES_256_ENCRYPT;
  645. sctx->dec = KMCTR_AES_256_DECRYPT;
  646. break;
  647. }
  648. return aes_set_key(tfm, in_key, key_len);
  649. }
  650. static unsigned int __ctrblk_init(u8 *ctrptr, unsigned int nbytes)
  651. {
  652. unsigned int i, n;
  653. /* only use complete blocks, max. PAGE_SIZE */
  654. n = (nbytes > PAGE_SIZE) ? PAGE_SIZE : nbytes & ~(AES_BLOCK_SIZE - 1);
  655. for (i = AES_BLOCK_SIZE; i < n; i += AES_BLOCK_SIZE) {
  656. memcpy(ctrptr + i, ctrptr + i - AES_BLOCK_SIZE,
  657. AES_BLOCK_SIZE);
  658. crypto_inc(ctrptr + i, AES_BLOCK_SIZE);
  659. }
  660. return n;
  661. }
  662. static int ctr_aes_crypt(struct blkcipher_desc *desc, long func,
  663. struct s390_aes_ctx *sctx, struct blkcipher_walk *walk)
  664. {
  665. int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
  666. unsigned int n, nbytes;
  667. u8 buf[AES_BLOCK_SIZE], ctrbuf[AES_BLOCK_SIZE];
  668. u8 *out, *in, *ctrptr = ctrbuf;
  669. if (!walk->nbytes)
  670. return ret;
  671. if (spin_trylock(&ctrblk_lock))
  672. ctrptr = ctrblk;
  673. memcpy(ctrptr, walk->iv, AES_BLOCK_SIZE);
  674. while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
  675. out = walk->dst.virt.addr;
  676. in = walk->src.virt.addr;
  677. while (nbytes >= AES_BLOCK_SIZE) {
  678. if (ctrptr == ctrblk)
  679. n = __ctrblk_init(ctrptr, nbytes);
  680. else
  681. n = AES_BLOCK_SIZE;
  682. ret = crypt_s390_kmctr(func, sctx->key, out, in,
  683. n, ctrptr);
  684. if (ret < 0 || ret != n) {
  685. if (ctrptr == ctrblk)
  686. spin_unlock(&ctrblk_lock);
  687. return -EIO;
  688. }
  689. if (n > AES_BLOCK_SIZE)
  690. memcpy(ctrptr, ctrptr + n - AES_BLOCK_SIZE,
  691. AES_BLOCK_SIZE);
  692. crypto_inc(ctrptr, AES_BLOCK_SIZE);
  693. out += n;
  694. in += n;
  695. nbytes -= n;
  696. }
  697. ret = blkcipher_walk_done(desc, walk, nbytes);
  698. }
  699. if (ctrptr == ctrblk) {
  700. if (nbytes)
  701. memcpy(ctrbuf, ctrptr, AES_BLOCK_SIZE);
  702. else
  703. memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
  704. spin_unlock(&ctrblk_lock);
  705. } else {
  706. if (!nbytes)
  707. memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE);
  708. }
  709. /*
  710. * final block may be < AES_BLOCK_SIZE, copy only nbytes
  711. */
  712. if (nbytes) {
  713. out = walk->dst.virt.addr;
  714. in = walk->src.virt.addr;
  715. ret = crypt_s390_kmctr(func, sctx->key, buf, in,
  716. AES_BLOCK_SIZE, ctrbuf);
  717. if (ret < 0 || ret != AES_BLOCK_SIZE)
  718. return -EIO;
  719. memcpy(out, buf, nbytes);
  720. crypto_inc(ctrbuf, AES_BLOCK_SIZE);
  721. ret = blkcipher_walk_done(desc, walk, 0);
  722. memcpy(walk->iv, ctrbuf, AES_BLOCK_SIZE);
  723. }
  724. return ret;
  725. }
  726. static int ctr_aes_encrypt(struct blkcipher_desc *desc,
  727. struct scatterlist *dst, struct scatterlist *src,
  728. unsigned int nbytes)
  729. {
  730. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  731. struct blkcipher_walk walk;
  732. blkcipher_walk_init(&walk, dst, src, nbytes);
  733. return ctr_aes_crypt(desc, sctx->enc, sctx, &walk);
  734. }
  735. static int ctr_aes_decrypt(struct blkcipher_desc *desc,
  736. struct scatterlist *dst, struct scatterlist *src,
  737. unsigned int nbytes)
  738. {
  739. struct s390_aes_ctx *sctx = crypto_blkcipher_ctx(desc->tfm);
  740. struct blkcipher_walk walk;
  741. blkcipher_walk_init(&walk, dst, src, nbytes);
  742. return ctr_aes_crypt(desc, sctx->dec, sctx, &walk);
  743. }
  744. static struct crypto_alg ctr_aes_alg = {
  745. .cra_name = "ctr(aes)",
  746. .cra_driver_name = "ctr-aes-s390",
  747. .cra_priority = CRYPT_S390_COMPOSITE_PRIORITY,
  748. .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
  749. .cra_blocksize = 1,
  750. .cra_ctxsize = sizeof(struct s390_aes_ctx),
  751. .cra_type = &crypto_blkcipher_type,
  752. .cra_module = THIS_MODULE,
  753. .cra_u = {
  754. .blkcipher = {
  755. .min_keysize = AES_MIN_KEY_SIZE,
  756. .max_keysize = AES_MAX_KEY_SIZE,
  757. .ivsize = AES_BLOCK_SIZE,
  758. .setkey = ctr_aes_set_key,
  759. .encrypt = ctr_aes_encrypt,
  760. .decrypt = ctr_aes_decrypt,
  761. }
  762. }
  763. };
  764. static int ctr_aes_alg_reg;
  765. static int __init aes_s390_init(void)
  766. {
  767. int ret;
  768. if (crypt_s390_func_available(KM_AES_128_ENCRYPT, CRYPT_S390_MSA))
  769. keylen_flag |= AES_KEYLEN_128;
  770. if (crypt_s390_func_available(KM_AES_192_ENCRYPT, CRYPT_S390_MSA))
  771. keylen_flag |= AES_KEYLEN_192;
  772. if (crypt_s390_func_available(KM_AES_256_ENCRYPT, CRYPT_S390_MSA))
  773. keylen_flag |= AES_KEYLEN_256;
  774. if (!keylen_flag)
  775. return -EOPNOTSUPP;
  776. /* z9 109 and z9 BC/EC only support 128 bit key length */
  777. if (keylen_flag == AES_KEYLEN_128)
  778. pr_info("AES hardware acceleration is only available for"
  779. " 128-bit keys\n");
  780. ret = crypto_register_alg(&aes_alg);
  781. if (ret)
  782. goto aes_err;
  783. ret = crypto_register_alg(&ecb_aes_alg);
  784. if (ret)
  785. goto ecb_aes_err;
  786. ret = crypto_register_alg(&cbc_aes_alg);
  787. if (ret)
  788. goto cbc_aes_err;
  789. if (crypt_s390_func_available(KM_XTS_128_ENCRYPT,
  790. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  791. crypt_s390_func_available(KM_XTS_256_ENCRYPT,
  792. CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
  793. ret = crypto_register_alg(&xts_aes_alg);
  794. if (ret)
  795. goto xts_aes_err;
  796. xts_aes_alg_reg = 1;
  797. }
  798. if (crypt_s390_func_available(KMCTR_AES_128_ENCRYPT,
  799. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  800. crypt_s390_func_available(KMCTR_AES_192_ENCRYPT,
  801. CRYPT_S390_MSA | CRYPT_S390_MSA4) &&
  802. crypt_s390_func_available(KMCTR_AES_256_ENCRYPT,
  803. CRYPT_S390_MSA | CRYPT_S390_MSA4)) {
  804. ctrblk = (u8 *) __get_free_page(GFP_KERNEL);
  805. if (!ctrblk) {
  806. ret = -ENOMEM;
  807. goto ctr_aes_err;
  808. }
  809. ret = crypto_register_alg(&ctr_aes_alg);
  810. if (ret) {
  811. free_page((unsigned long) ctrblk);
  812. goto ctr_aes_err;
  813. }
  814. ctr_aes_alg_reg = 1;
  815. }
  816. out:
  817. return ret;
  818. ctr_aes_err:
  819. crypto_unregister_alg(&xts_aes_alg);
  820. xts_aes_err:
  821. crypto_unregister_alg(&cbc_aes_alg);
  822. cbc_aes_err:
  823. crypto_unregister_alg(&ecb_aes_alg);
  824. ecb_aes_err:
  825. crypto_unregister_alg(&aes_alg);
  826. aes_err:
  827. goto out;
  828. }
  829. static void __exit aes_s390_fini(void)
  830. {
  831. if (ctr_aes_alg_reg) {
  832. crypto_unregister_alg(&ctr_aes_alg);
  833. free_page((unsigned long) ctrblk);
  834. }
  835. if (xts_aes_alg_reg)
  836. crypto_unregister_alg(&xts_aes_alg);
  837. crypto_unregister_alg(&cbc_aes_alg);
  838. crypto_unregister_alg(&ecb_aes_alg);
  839. crypto_unregister_alg(&aes_alg);
  840. }
  841. module_cpu_feature_match(MSA, aes_s390_init);
  842. module_exit(aes_s390_fini);
  843. MODULE_ALIAS_CRYPTO("aes-all");
  844. MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm");
  845. MODULE_LICENSE("GPL");