cipher.c 2.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192
  1. // SPDX-License-Identifier: GPL-2.0-or-later
  2. /*
  3. * Cryptographic API.
  4. *
  5. * Single-block cipher operations.
  6. *
  7. * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  8. * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  9. */
  10. #include <crypto/algapi.h>
  11. #include <crypto/internal/cipher.h>
  12. #include <linux/kernel.h>
  13. #include <linux/crypto.h>
  14. #include <linux/errno.h>
  15. #include <linux/slab.h>
  16. #include <linux/string.h>
  17. #include "internal.h"
  18. static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key,
  19. unsigned int keylen)
  20. {
  21. struct cipher_alg *cia = crypto_cipher_alg(tfm);
  22. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  23. int ret;
  24. u8 *buffer, *alignbuffer;
  25. unsigned long absize;
  26. absize = keylen + alignmask;
  27. buffer = kmalloc(absize, GFP_ATOMIC);
  28. if (!buffer)
  29. return -ENOMEM;
  30. alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  31. memcpy(alignbuffer, key, keylen);
  32. ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen);
  33. memset(alignbuffer, 0, keylen);
  34. kfree(buffer);
  35. return ret;
  36. }
  37. int crypto_cipher_setkey(struct crypto_cipher *tfm,
  38. const u8 *key, unsigned int keylen)
  39. {
  40. struct cipher_alg *cia = crypto_cipher_alg(tfm);
  41. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  42. if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize)
  43. return -EINVAL;
  44. if ((unsigned long)key & alignmask)
  45. return setkey_unaligned(tfm, key, keylen);
  46. return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen);
  47. }
  48. EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL);
  49. static inline void cipher_crypt_one(struct crypto_cipher *tfm,
  50. u8 *dst, const u8 *src, bool enc)
  51. {
  52. unsigned long alignmask = crypto_cipher_alignmask(tfm);
  53. struct cipher_alg *cia = crypto_cipher_alg(tfm);
  54. void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
  55. enc ? cia->cia_encrypt : cia->cia_decrypt;
  56. if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
  57. unsigned int bs = crypto_cipher_blocksize(tfm);
  58. u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
  59. u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
  60. memcpy(tmp, src, bs);
  61. fn(crypto_cipher_tfm(tfm), tmp, tmp);
  62. memcpy(dst, tmp, bs);
  63. } else {
  64. fn(crypto_cipher_tfm(tfm), dst, src);
  65. }
  66. }
  67. void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
  68. u8 *dst, const u8 *src)
  69. {
  70. cipher_crypt_one(tfm, dst, src, true);
  71. }
  72. EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL);
  73. void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
  74. u8 *dst, const u8 *src)
  75. {
  76. cipher_crypt_one(tfm, dst, src, false);
  77. }
  78. EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL);