cipher.c 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Cryptographic API.
  4. *
  5. * Single-block cipher operations.
  6. *
  7. * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  8. * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  9. */
  10. #include <crypto/algapi.h>
  11. #include <crypto/internal/cipher.h>
  12. #include <linux/kernel.h>
  13. #include <linux/crypto.h>
  14. #include <linux/errno.h>
  15. #include <linux/slab.h>
  16. #include <linux/string.h>
  17. #include "internal.h"
  18. static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key,
  19. unsigned int keylen)
  20. {
  21. struct cipher_alg *cia = crypto_cipher_alg(tfm);
  22. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  23. int ret;
  24. u8 *buffer, *alignbuffer;
  25. unsigned long absize;
  26. absize = keylen + alignmask;
  27. buffer = kmalloc(absize, GFP_ATOMIC);
  28. if (!buffer)
  29. return -ENOMEM;
  30. alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  31. memcpy(alignbuffer, key, keylen);
  32. ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen);
  33. kfree_sensitive(buffer);
  34. return ret;
  35. }
  36. int crypto_cipher_setkey(struct crypto_cipher *tfm,
  37. const u8 *key, unsigned int keylen)
  38. {
  39. struct cipher_alg *cia = crypto_cipher_alg(tfm);
  40. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  41. if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize)
  42. return -EINVAL;
  43. if ((unsigned long)key & alignmask)
  44. return setkey_unaligned(tfm, key, keylen);
  45. return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen);
  46. }
  47. EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL);
  48. static inline void cipher_crypt_one(struct crypto_cipher *tfm,
  49. u8 *dst, const u8 *src, bool enc)
  50. {
  51. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  52. struct cipher_alg *cia = crypto_cipher_alg(tfm);
  53. void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
  54. enc ? cia->cia_encrypt : cia->cia_decrypt;
  55. if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
  56. unsigned int bs = crypto_cipher_blocksize(tfm);
  57. u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
  58. u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  59. memcpy(tmp, src, bs);
  60. fn(crypto_cipher_tfm(tfm), tmp, tmp);
  61. memcpy(dst, tmp, bs);
  62. } else {
  63. fn(crypto_cipher_tfm(tfm), dst, src);
  64. }
  65. }
  66. void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
  67. u8 *dst, const u8 *src)
  68. {
  69. cipher_crypt_one(tfm, dst, src, true);
  70. }
  71. EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL);
  72. void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
  73. u8 *dst, const u8 *src)
  74. {
  75. cipher_crypt_one(tfm, dst, src, false);
  76. }
  77. EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL);
  78. struct crypto_cipher *crypto_clone_cipher(struct crypto_cipher *cipher)
  79. {
  80. struct crypto_tfm *tfm = crypto_cipher_tfm(cipher);
  81. struct crypto_alg *alg = tfm->__crt_alg;
  82. struct crypto_cipher *ncipher;
  83. struct crypto_tfm *ntfm;
  84. if (alg->cra_init)
  85. return ERR_PTR(-ENOSYS);
  86. if (unlikely(!crypto_mod_get(alg)))
  87. return ERR_PTR(-ESTALE);
  88. ntfm = __crypto_alloc_tfmgfp(alg, CRYPTO_ALG_TYPE_CIPHER,
  89. CRYPTO_ALG_TYPE_MASK, GFP_ATOMIC);
  90. if (IS_ERR(ntfm)) {
  91. crypto_mod_put(alg);
  92. return ERR_CAST(ntfm);
  93. }
  94. ntfm->crt_flags = tfm->crt_flags;
  95. ncipher = __crypto_cipher_cast(ntfm);
  96. return ncipher;
  97. }
  98. EXPORT_SYMBOL_GPL(crypto_clone_cipher);