skcipher.c 7.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247
  1. /*
  2. * Symmetric key cipher operations.
  3. *
  4. * Generic encrypt/decrypt wrapper for ciphers, handles operations across
  5. * multiple page boundaries by using temporary blocks. In user context,
  6. * the kernel is given a chance to schedule us once per page.
  7. *
  8. * Copyright (c) 2015 Herbert Xu <herbert@gondor.apana.org.au>
  9. *
  10. * This program is free software; you can redistribute it and/or modify it
  11. * under the terms of the GNU General Public License as published by the Free
  12. * Software Foundation; either version 2 of the License, or (at your option)
  13. * any later version.
  14. *
  15. */
  16. #include <crypto/internal/skcipher.h>
  17. #include <linux/bug.h>
  18. #include <linux/module.h>
  19. #include "internal.h"
  20. static unsigned int crypto_skcipher_extsize(struct crypto_alg *alg)
  21. {
  22. if (alg->cra_type == &crypto_blkcipher_type)
  23. return sizeof(struct crypto_blkcipher *);
  24. BUG_ON(alg->cra_type != &crypto_ablkcipher_type &&
  25. alg->cra_type != &crypto_givcipher_type);
  26. return sizeof(struct crypto_ablkcipher *);
  27. }
  28. static int skcipher_setkey_blkcipher(struct crypto_skcipher *tfm,
  29. const u8 *key, unsigned int keylen)
  30. {
  31. struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
  32. struct crypto_blkcipher *blkcipher = *ctx;
  33. int err;
  34. crypto_blkcipher_clear_flags(blkcipher, ~0);
  35. crypto_blkcipher_set_flags(blkcipher, crypto_skcipher_get_flags(tfm) &
  36. CRYPTO_TFM_REQ_MASK);
  37. err = crypto_blkcipher_setkey(blkcipher, key, keylen);
  38. crypto_skcipher_set_flags(tfm, crypto_blkcipher_get_flags(blkcipher) &
  39. CRYPTO_TFM_RES_MASK);
  40. return err;
  41. }
  42. static int skcipher_crypt_blkcipher(struct skcipher_request *req,
  43. int (*crypt)(struct blkcipher_desc *,
  44. struct scatterlist *,
  45. struct scatterlist *,
  46. unsigned int))
  47. {
  48. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  49. struct crypto_blkcipher **ctx = crypto_skcipher_ctx(tfm);
  50. struct blkcipher_desc desc = {
  51. .tfm = *ctx,
  52. .info = req->iv,
  53. .flags = req->base.flags,
  54. };
  55. return crypt(&desc, req->dst, req->src, req->cryptlen);
  56. }
  57. static int skcipher_encrypt_blkcipher(struct skcipher_request *req)
  58. {
  59. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  60. struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
  61. struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
  62. return skcipher_crypt_blkcipher(req, alg->encrypt);
  63. }
  64. static int skcipher_decrypt_blkcipher(struct skcipher_request *req)
  65. {
  66. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  67. struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
  68. struct blkcipher_alg *alg = &tfm->__crt_alg->cra_blkcipher;
  69. return skcipher_crypt_blkcipher(req, alg->decrypt);
  70. }
  71. static void crypto_exit_skcipher_ops_blkcipher(struct crypto_tfm *tfm)
  72. {
  73. struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
  74. crypto_free_blkcipher(*ctx);
  75. }
  76. static int crypto_init_skcipher_ops_blkcipher(struct crypto_tfm *tfm)
  77. {
  78. struct crypto_alg *calg = tfm->__crt_alg;
  79. struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
  80. struct crypto_blkcipher **ctx = crypto_tfm_ctx(tfm);
  81. struct crypto_blkcipher *blkcipher;
  82. struct crypto_tfm *btfm;
  83. if (!crypto_mod_get(calg))
  84. return -EAGAIN;
  85. btfm = __crypto_alloc_tfm(calg, CRYPTO_ALG_TYPE_BLKCIPHER,
  86. CRYPTO_ALG_TYPE_MASK);
  87. if (IS_ERR(btfm)) {
  88. crypto_mod_put(calg);
  89. return PTR_ERR(btfm);
  90. }
  91. blkcipher = __crypto_blkcipher_cast(btfm);
  92. *ctx = blkcipher;
  93. tfm->exit = crypto_exit_skcipher_ops_blkcipher;
  94. skcipher->setkey = skcipher_setkey_blkcipher;
  95. skcipher->encrypt = skcipher_encrypt_blkcipher;
  96. skcipher->decrypt = skcipher_decrypt_blkcipher;
  97. skcipher->ivsize = crypto_blkcipher_ivsize(blkcipher);
  98. skcipher->has_setkey = calg->cra_blkcipher.max_keysize;
  99. return 0;
  100. }
  101. static int skcipher_setkey_ablkcipher(struct crypto_skcipher *tfm,
  102. const u8 *key, unsigned int keylen)
  103. {
  104. struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
  105. struct crypto_ablkcipher *ablkcipher = *ctx;
  106. int err;
  107. crypto_ablkcipher_clear_flags(ablkcipher, ~0);
  108. crypto_ablkcipher_set_flags(ablkcipher,
  109. crypto_skcipher_get_flags(tfm) &
  110. CRYPTO_TFM_REQ_MASK);
  111. err = crypto_ablkcipher_setkey(ablkcipher, key, keylen);
  112. crypto_skcipher_set_flags(tfm,
  113. crypto_ablkcipher_get_flags(ablkcipher) &
  114. CRYPTO_TFM_RES_MASK);
  115. return err;
  116. }
  117. static int skcipher_crypt_ablkcipher(struct skcipher_request *req,
  118. int (*crypt)(struct ablkcipher_request *))
  119. {
  120. struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
  121. struct crypto_ablkcipher **ctx = crypto_skcipher_ctx(tfm);
  122. struct ablkcipher_request *subreq = skcipher_request_ctx(req);
  123. ablkcipher_request_set_tfm(subreq, *ctx);
  124. ablkcipher_request_set_callback(subreq, skcipher_request_flags(req),
  125. req->base.complete, req->base.data);
  126. ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
  127. req->iv);
  128. return crypt(subreq);
  129. }
  130. static int skcipher_encrypt_ablkcipher(struct skcipher_request *req)
  131. {
  132. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  133. struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
  134. struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
  135. return skcipher_crypt_ablkcipher(req, alg->encrypt);
  136. }
  137. static int skcipher_decrypt_ablkcipher(struct skcipher_request *req)
  138. {
  139. struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
  140. struct crypto_tfm *tfm = crypto_skcipher_tfm(skcipher);
  141. struct ablkcipher_alg *alg = &tfm->__crt_alg->cra_ablkcipher;
  142. return skcipher_crypt_ablkcipher(req, alg->decrypt);
  143. }
  144. static void crypto_exit_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
  145. {
  146. struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
  147. crypto_free_ablkcipher(*ctx);
  148. }
  149. static int crypto_init_skcipher_ops_ablkcipher(struct crypto_tfm *tfm)
  150. {
  151. struct crypto_alg *calg = tfm->__crt_alg;
  152. struct crypto_skcipher *skcipher = __crypto_skcipher_cast(tfm);
  153. struct crypto_ablkcipher **ctx = crypto_tfm_ctx(tfm);
  154. struct crypto_ablkcipher *ablkcipher;
  155. struct crypto_tfm *abtfm;
  156. if (!crypto_mod_get(calg))
  157. return -EAGAIN;
  158. abtfm = __crypto_alloc_tfm(calg, 0, 0);
  159. if (IS_ERR(abtfm)) {
  160. crypto_mod_put(calg);
  161. return PTR_ERR(abtfm);
  162. }
  163. ablkcipher = __crypto_ablkcipher_cast(abtfm);
  164. *ctx = ablkcipher;
  165. tfm->exit = crypto_exit_skcipher_ops_ablkcipher;
  166. skcipher->setkey = skcipher_setkey_ablkcipher;
  167. skcipher->encrypt = skcipher_encrypt_ablkcipher;
  168. skcipher->decrypt = skcipher_decrypt_ablkcipher;
  169. skcipher->ivsize = crypto_ablkcipher_ivsize(ablkcipher);
  170. skcipher->reqsize = crypto_ablkcipher_reqsize(ablkcipher) +
  171. sizeof(struct ablkcipher_request);
  172. skcipher->has_setkey = calg->cra_ablkcipher.max_keysize;
  173. return 0;
  174. }
  175. static int crypto_skcipher_init_tfm(struct crypto_tfm *tfm)
  176. {
  177. if (tfm->__crt_alg->cra_type == &crypto_blkcipher_type)
  178. return crypto_init_skcipher_ops_blkcipher(tfm);
  179. BUG_ON(tfm->__crt_alg->cra_type != &crypto_ablkcipher_type &&
  180. tfm->__crt_alg->cra_type != &crypto_givcipher_type);
  181. return crypto_init_skcipher_ops_ablkcipher(tfm);
  182. }
  183. static const struct crypto_type crypto_skcipher_type2 = {
  184. .extsize = crypto_skcipher_extsize,
  185. .init_tfm = crypto_skcipher_init_tfm,
  186. .maskclear = ~CRYPTO_ALG_TYPE_MASK,
  187. .maskset = CRYPTO_ALG_TYPE_BLKCIPHER_MASK,
  188. .type = CRYPTO_ALG_TYPE_BLKCIPHER,
  189. .tfmsize = offsetof(struct crypto_skcipher, base),
  190. };
  191. struct crypto_skcipher *crypto_alloc_skcipher(const char *alg_name,
  192. u32 type, u32 mask)
  193. {
  194. return crypto_alloc_tfm(alg_name, &crypto_skcipher_type2, type, mask);
  195. }
  196. EXPORT_SYMBOL_GPL(crypto_alloc_skcipher);
  197. MODULE_LICENSE("GPL");
  198. MODULE_DESCRIPTION("Symmetric key cipher type");