@@ -74,16 +74,10 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
kernel_neon_end();
}
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
int nrounds)
{
if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon_usable() ||
bytes <= CHACHA_BLOCK_SIZE) {
@@ -114,11 +108,11 @@ static int chacha_stream_xor(struct skcipher_request *req,
u32 state[16];
int err;
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
if (nbytes < walk.total)
@@ -164,11 +158,11 @@ static int do_xchacha(struct skcipher_request *req, bool neon)
struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
struct chacha_ctx subctx;
u32 state[16];
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
if (!IS_ENABLED(CONFIG_KERNEL_MODE_NEON) || !neon) {
hchacha_block_arm(state, subctx.key, ctx->nrounds);
} else {
kernel_neon_begin();
@@ -72,16 +72,10 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
kernel_neon_end();
}
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
int nrounds)
{
if (!static_branch_likely(&have_neon) || bytes <= CHACHA_BLOCK_SIZE ||
!crypto_simd_usable())
@@ -108,11 +102,11 @@ static int chacha_neon_stream_xor(struct skcipher_request *req,
u32 state[16];
int err;
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
if (nbytes < walk.total)
@@ -149,11 +143,11 @@ static int xchacha_neon(struct skcipher_request *req)
struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
struct chacha_ctx subctx;
u32 state[16];
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
hchacha_block_arch(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
memcpy(&real_iv[0], req->iv + 24, 8);
memcpy(&real_iv[8], req->iv + 16, 8);
@@ -18,26 +18,20 @@ asmlinkage void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
EXPORT_SYMBOL(chacha_crypt_arch);
asmlinkage void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds);
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
static int chacha_mips_stream_xor(struct skcipher_request *req,
const struct chacha_ctx *ctx, const u8 *iv)
{
struct skcipher_walk walk;
u32 state[16];
int err;
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
if (nbytes < walk.total)
@@ -65,11 +59,11 @@ static int xchacha_mips(struct skcipher_request *req)
struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
struct chacha_ctx subctx;
u32 state[16];
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
hchacha_block(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
memcpy(&real_iv[0], req->iv + 24, 8);
@@ -55,16 +55,10 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
{
hchacha_block_generic(state, stream, nrounds);
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
int nrounds)
{
if (!static_branch_likely(&have_p10) || bytes <= CHACHA_BLOCK_SIZE ||
!crypto_simd_usable())
@@ -93,11 +87,11 @@ static int chacha_p10_stream_xor(struct skcipher_request *req,
err = skcipher_walk_virt(&walk, req, false);
if (err)
return err;
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
if (nbytes < walk.total)
@@ -135,11 +129,11 @@ static int xchacha_p10(struct skcipher_request *req)
struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
struct chacha_ctx subctx;
u32 state[16];
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
hchacha_block_arch(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
memcpy(&real_iv[0], req->iv + 24, 8);
memcpy(&real_iv[8], req->iv + 16, 8);
@@ -39,11 +39,11 @@ static int chacha20_s390(struct skcipher_request *req)
struct skcipher_walk walk;
unsigned int nbytes;
int rc;
rc = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
while (walk.nbytes > 0) {
nbytes = walk.nbytes;
if (nbytes < walk.total)
nbytes = round_down(nbytes, walk.stride);
@@ -67,16 +67,10 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
/* TODO: implement hchacha_block_arch() in assembly */
hchacha_block_generic(state, stream, nrounds);
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds)
{
/* s390 chacha20 implementation has 20 rounds hard-coded,
* it cannot handle a block of data or less, but otherwise
@@ -131,16 +131,10 @@ void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
kernel_fpu_end();
}
}
EXPORT_SYMBOL(hchacha_block_arch);
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
-{
- chacha_init_generic(state, key, iv);
-}
-EXPORT_SYMBOL(chacha_init_arch);
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
int nrounds)
{
if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable() ||
bytes <= CHACHA_BLOCK_SIZE)
@@ -167,11 +161,11 @@ static int chacha_simd_stream_xor(struct skcipher_request *req,
struct skcipher_walk walk;
int err;
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
if (nbytes < walk.total)
@@ -209,11 +203,11 @@ static int xchacha_simd(struct skcipher_request *req)
struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
u32 state[CHACHA_STATE_WORDS] __aligned(8);
struct chacha_ctx subctx;
u8 real_iv[16];
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) {
kernel_fpu_begin();
hchacha_block_ssse3(state, subctx.key, ctx->nrounds);
kernel_fpu_end();
@@ -19,11 +19,11 @@ static int chacha_stream_xor(struct skcipher_request *req,
u32 state[16];
int err;
err = skcipher_walk_virt(&walk, req, false);
- chacha_init_generic(state, ctx->key, iv);
+ chacha_init(state, ctx->key, iv);
while (walk.nbytes > 0) {
unsigned int nbytes = walk.nbytes;
if (nbytes < walk.total)
@@ -52,11 +52,11 @@ static int crypto_xchacha_crypt(struct skcipher_request *req)
struct chacha_ctx subctx;
u32 state[16];
u8 real_iv[16];
/* Compute the subkey given the original key and first 128 nonce bits */
- chacha_init_generic(state, ctx->key, req->iv);
+ chacha_init(state, ctx->key, req->iv);
hchacha_block_generic(state, subctx.key, ctx->nrounds);
subctx.nrounds = ctx->nrounds;
/* Build the real IV */
memcpy(&real_iv[0], req->iv + 24, 8); /* stream position */
@@ -60,12 +60,11 @@ static inline void chacha_init_consts(u32 *state)
state[1] = CHACHA_CONSTANT_ND_3;
state[2] = CHACHA_CONSTANT_2_BY;
state[3] = CHACHA_CONSTANT_TE_K;
}
-void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv);
-static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv)
+static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
{
chacha_init_consts(state);
state[4] = key[0];
state[5] = key[1];
state[6] = key[2];
@@ -78,18 +77,10 @@ static inline void chacha_init_generic(u32 *state, const u32 *key, const u8 *iv)
state[13] = get_unaligned_le32(iv + 4);
state[14] = get_unaligned_le32(iv + 8);
state[15] = get_unaligned_le32(iv + 12);
}
-static inline void chacha_init(u32 *state, const u32 *key, const u8 *iv)
-{
- if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
- chacha_init_arch(state, key, iv);
- else
- chacha_init_generic(state, key, iv);
-}
-
void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds);
void chacha_crypt_generic(u32 *state, u8 *dst, const u8 *src,
unsigned int bytes, int nrounds);
@@ -64,11 +64,11 @@ static int test_lib_chacha(u8 *revert, u8 *cipher, u8 *plain)
print_hex_dump(KERN_INFO, "iv: ", DUMP_PREFIX_OFFSET,
16, 1, iv, 16, 1);
}
/* Encrypt */
- chacha_init_arch(chacha_state, (u32*)key, iv);
+ chacha_init(chacha_state, (u32 *)key, iv);
start = ktime_get_ns();
chacha_crypt_arch(chacha_state, cipher, plain, data_size, 20);
end = ktime_get_ns();
@@ -79,11 +79,11 @@ static int test_lib_chacha(u8 *revert, u8 *cipher, u8 *plain)
(data_size > 64 ? 64 : data_size), 1);
pr_info("lib encryption took: %lld nsec", end - start);
/* Decrypt */
- chacha_init_arch(chacha_state, (u32 *)key, iv);
+ chacha_init(chacha_state, (u32 *)key, iv);
start = ktime_get_ns();
chacha_crypt_arch(chacha_state, revert, cipher, data_size, 20);
end = ktime_get_ns();