@@ -20,7 +20,7 @@
*/
#include <crypto/algapi.h>
-#include <crypto/chacha.h>
+#include <crypto/internal/chacha.h>
#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <linux/kernel.h>
@@ -20,7 +20,7 @@
*/
#include <crypto/algapi.h>
-#include <crypto/chacha.h>
+#include <crypto/internal/chacha.h>
#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <linux/kernel.h>
@@ -7,7 +7,7 @@
*/
#include <crypto/algapi.h>
-#include <crypto/chacha.h>
+#include <crypto/internal/chacha.h>
#include <crypto/internal/simd.h>
#include <crypto/internal/skcipher.h>
#include <linux/kernel.h>
@@ -1374,6 +1374,14 @@ config CRYPTO_SALSA20
The Salsa20 stream cipher algorithm is designed by Daniel J.
Bernstein <djb@cr.yp.to>. See <http://cr.yp.to/snuffle.html>
+config CRYPTO_ARCH_HAVE_LIB_CHACHA
+ tristate
+
+config CRYPTO_LIB_CHACHA
+ tristate
+ default y
+ depends on CRYPTO_ARCH_HAVE_LIB_CHACHA || !CRYPTO_ARCH_HAVE_LIB_CHACHA
+
config CRYPTO_CHACHA20
tristate "ChaCha stream cipher algorithms"
select CRYPTO_BLKCIPHER
@@ -8,29 +8,10 @@
#include <asm/unaligned.h>
#include <crypto/algapi.h>
-#include <crypto/chacha.h>
+#include <crypto/internal/chacha.h>
#include <crypto/internal/skcipher.h>
#include <linux/module.h>
-static void chacha_docrypt(u32 *state, u8 *dst, const u8 *src,
- unsigned int bytes, int nrounds)
-{
- /* aligned to potentially speed up crypto_xor() */
- u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long));
-
- while (bytes >= CHACHA_BLOCK_SIZE) {
- chacha_block(state, stream, nrounds);
- crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE);
- bytes -= CHACHA_BLOCK_SIZE;
- dst += CHACHA_BLOCK_SIZE;
- src += CHACHA_BLOCK_SIZE;
- }
- if (bytes) {
- chacha_block(state, stream, nrounds);
- crypto_xor_cpy(dst, src, stream, bytes);
- }
-}
-
static int chacha_stream_xor(struct skcipher_request *req,
const struct chacha_ctx *ctx, const u8 *iv)
{
@@ -48,8 +29,8 @@ static int chacha_stream_xor(struct skcipher_request *req,
if (nbytes < walk.total)
nbytes = round_down(nbytes, CHACHA_BLOCK_SIZE);
- chacha_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr,
- nbytes, ctx->nrounds);
+ chacha_crypt_generic(state, walk.dst.virt.addr,
+ walk.src.virt.addr, nbytes, ctx->nrounds);
err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
}
@@ -58,22 +39,7 @@ static int chacha_stream_xor(struct skcipher_request *req,
void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv)
{
- state[0] = 0x61707865; /* "expa" */
- state[1] = 0x3320646e; /* "nd 3" */
- state[2] = 0x79622d32; /* "2-by" */
- state[3] = 0x6b206574; /* "te k" */
- state[4] = ctx->key[0];
- state[5] = ctx->key[1];
- state[6] = ctx->key[2];
- state[7] = ctx->key[3];
- state[8] = ctx->key[4];
- state[9] = ctx->key[5];
- state[10] = ctx->key[6];
- state[11] = ctx->key[7];
- state[12] = get_unaligned_le32(iv + 0);
- state[13] = get_unaligned_le32(iv + 4);
- state[14] = get_unaligned_le32(iv + 8);
- state[15] = get_unaligned_le32(iv + 12);
+ chacha_init_generic(state, ctx->key, iv);
}
EXPORT_SYMBOL_GPL(crypto_chacha_init);
@@ -126,7 +92,7 @@ int crypto_xchacha_crypt(struct skcipher_request *req)
/* Compute the subkey given the original key and first 128 nonce bits */
crypto_chacha_init(state, ctx, req->iv);
- hchacha_block(state, subctx.key, ctx->nrounds);
+ hchacha_block_generic(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
/* Build the real IV */
@@ -15,9 +15,8 @@
#ifndef _CRYPTO_CHACHA_H
#define _CRYPTO_CHACHA_H
-#include <crypto/skcipher.h>
+#include <asm/unaligned.h>
#include <linux/types.h>
-#include <linux/crypto.h>
/* 32-bit stream position, then 96-bit nonce (RFC7539 convention) */
#define CHACHA_IV_SIZE 16
@@ -29,26 +28,42 @@
/* 192-bit nonce, then 64-bit stream position */
#define XCHACHA_IV_SIZE 32
-struct chacha_ctx {
- u32 key[8];
- int nrounds;
-};
-
-void chacha_block(u32 *state, u8 *stream, int nrounds);
+void chacha_block_generic(u32 *state, u8 *stream, int nrounds);
static inline void chacha20_block(u32 *state, u8 *stream)
{
- chacha_block(state, stream, 20);
+ chacha_block_generic(state, stream, 20);
+}
+void hchacha_block_generic(const u32 *in, u32 *out, int nrounds);
+
+static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv)
+{
+ state[0] = 0x61707865; /* "expa" */
+ state[1] = 0x3320646e; /* "nd 3" */
+ state[2] = 0x79622d32; /* "2-by" */
+ state[3] = 0x6b206574; /* "te k" */
+ state[4] = key[0];
+ state[5] = key[1];
+ state[6] = key[2];
+ state[7] = key[3];
+ state[8] = key[4];
+ state[9] = key[5];
+ state[10] = key[6];
+ state[11] = key[7];
+ state[12] = get_unaligned_le32(iv + 0);
+ state[13] = get_unaligned_le32(iv + 4);
+ state[14] = get_unaligned_le32(iv + 8);
+ state[15] = get_unaligned_le32(iv + 12);
}
-void hchacha_block(const u32 *in, u32 *out, int nrounds);
-void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv);
+static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
+{
+ chacha_init_generic(state, key, iv);
+}
-int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keysize);
-int crypto_chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key,
- unsigned int keysize);
+void chacha_crypt(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
+ int nrounds);
-int crypto_chacha_crypt(struct skcipher_request *req);
-int crypto_xchacha_crypt(struct skcipher_request *req);
+void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
+ unsigned int bytes, int nrounds);
#endif /* _CRYPTO_CHACHA_H */
new file mode 100644
@@ -0,0 +1,25 @@
+/* SPDX-License-Identifier: GPL-2.0 */
+
+#ifndef _CRYPTO_INTERNAL_CHACHA_H
+#define _CRYPTO_INTERNAL_CHACHA_H
+
+#include <crypto/chacha.h>
+#include <crypto/skcipher.h>
+#include <linux/crypto.h>
+
+struct chacha_ctx {
+ u32 key[8];
+ int nrounds;
+};
+
+void crypto_chacha_init(u32 *state, const struct chacha_ctx *ctx, const u8 *iv);
+
+int crypto_chacha20_setkey(struct crypto_skcipher *tfm, const u8 *key,
+ unsigned int keysize);
+int crypto_chacha12_setkey(struct crypto_skcipher *tfm, const u8 *key,
+ unsigned int keysize);
+
+int crypto_chacha_crypt(struct skcipher_request *req);
+int crypto_xchacha_crypt(struct skcipher_request *req);
+
+#endif /* _CRYPTO_CHACHA_H */
@@ -30,8 +30,7 @@ endif
lib-y := ctype.o string.o vsprintf.o cmdline.o \
rbtree.o radix-tree.o timerqueue.o xarray.o \
- idr.o extable.o \
- sha1.o chacha.o irq_regs.o argv_split.o \
+ idr.o extable.o sha1.o irq_regs.o argv_split.o \
flex_proportions.o ratelimit.o show_mem.o \
is_single_threaded.o plist.o decompress.o kobject_uevent.o \
earlycpio.o seq_buf.o siphash.o dec_and_lock.o \
@@ -1,13 +1,16 @@
# SPDX-License-Identifier: GPL-2.0
-obj-$(CONFIG_CRYPTO_LIB_AES) += libaes.o
-libaes-y := aes.o
+# chacha is used by the /dev/random driver which is always builtin
+obj-y += chacha.o
-obj-$(CONFIG_CRYPTO_LIB_ARC4) += libarc4.o
-libarc4-y := arc4.o
+obj-$(CONFIG_CRYPTO_LIB_AES) += libaes.o
+libaes-y := aes.o
-obj-$(CONFIG_CRYPTO_LIB_DES) += libdes.o
-libdes-y := des.o
+obj-$(CONFIG_CRYPTO_LIB_ARC4) += libarc4.o
+libarc4-y := arc4.o
-obj-$(CONFIG_CRYPTO_LIB_SHA256) += libsha256.o
-libsha256-y := sha256.o
+obj-$(CONFIG_CRYPTO_LIB_DES) += libdes.o
+libdes-y := des.o
+
+obj-$(CONFIG_CRYPTO_LIB_SHA256) += libsha256.o
+libsha256-y := sha256.o
similarity index 76%
rename from lib/chacha.c
rename to lib/crypto/chacha.c
@@ -5,11 +5,14 @@
* Copyright (C) 2015 Martin Willi
*/
+#include <linux/bug.h>
#include <linux/kernel.h>
#include <linux/export.h>
#include <linux/bitops.h>
+#include <linux/string.h>
#include <linux/cryptohash.h>
#include <asm/unaligned.h>
+#include <crypto/algapi.h> // for crypto_xor_cpy
#include <crypto/chacha.h>
static void chacha_permute(u32 *x, int nrounds)
@@ -72,7 +75,7 @@ static void chacha_permute(u32 *x, int nrounds)
* The caller has already converted the endianness of the input. This function
* also handles incrementing the block counter in the input matrix.
*/
-void chacha_block(u32 *state, u8 *stream, int nrounds)
+void chacha_block_generic(u32 *state, u8 *stream, int nrounds)
{
u32 x[16];
int i;
@@ -86,7 +89,7 @@ void chacha_block(u32 *state, u8 *stream, int nrounds)
state[12]++;
}
-EXPORT_SYMBOL(chacha_block);
+EXPORT_SYMBOL(chacha_block_generic);
/**
* hchacha_block - abbreviated ChaCha core, for XChaCha
@@ -99,7 +102,7 @@ EXPORT_SYMBOL(chacha_block);
* skips the final addition of the initial state, and outputs only certain words
* of the state. It should not be used for streaming directly.
*/
-void hchacha_block(const u32 *in, u32 *out, int nrounds)
+void hchacha_block_generic(const u32 *in, u32 *out, int nrounds)
{
u32 x[16];
@@ -110,4 +113,30 @@ void hchacha_block(const u32 *in, u32 *out, int nrounds)
memcpy(&out[0], &x[0], 16);
memcpy(&out[4], &x[12], 16);
}
-EXPORT_SYMBOL(hchacha_block);
+EXPORT_SYMBOL(hchacha_block_generic);
+
+void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
+ unsigned int bytes, int nrounds)
+{
+ /* aligned to potentially speed up crypto_xor() */
+ u8 stream[CHACHA_BLOCK_SIZE] __aligned(sizeof(long));
+
+ while (bytes >= CHACHA_BLOCK_SIZE) {
+ chacha_block_generic(state, stream, nrounds);
+ crypto_xor_cpy(dst, src, stream, CHACHA_BLOCK_SIZE);
+ bytes -= CHACHA_BLOCK_SIZE;
+ dst += CHACHA_BLOCK_SIZE;
+ src += CHACHA_BLOCK_SIZE;
+ }
+ if (bytes) {
+ chacha_block_generic(state, stream, nrounds);
+ crypto_xor_cpy(dst, src, stream, bytes);
+ }
+}
+EXPORT_SYMBOL(chacha_crypt_generic);
+
+#ifndef CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA
+extern void chacha_crypt(u32 *, u8 *, const u8 *, unsigned int, int)
+ __alias(chacha_crypt_generic);
+EXPORT_SYMBOL(chacha_crypt);
+#endif
Move the existing shared ChaCha code into lib/crypto, and at the same time, split the support header into a public version, and an internal version that is only intended for consumption by crypto implementations. Also, refactor the generic implementation so it only gets exposed as the chacha_crypt() library function if the architecture does not override it with its own implementation, potentially falling back to the generic routine if needed. And while at it, tidy up lib/crypto/Makefile a bit so we are ready for some new arrivals. Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org> --- arch/arm/crypto/chacha-neon-glue.c | 2 +- arch/arm64/crypto/chacha-neon-glue.c | 2 +- arch/x86/crypto/chacha_glue.c | 2 +- crypto/Kconfig | 8 ++++ crypto/chacha_generic.c | 44 ++---------------- include/crypto/chacha.h | 49 +++++++++++++------- include/crypto/internal/chacha.h | 25 ++++++++++ lib/Makefile | 3 +- lib/crypto/Makefile | 19 ++++---- lib/{ => crypto}/chacha.c | 37 +++++++++++++-- 10 files changed, 118 insertions(+), 73 deletions(-) -- 2.17.1