summaryrefslogtreecommitdiff
path: root/include/crypto
diff options
context:
space:
mode:
authorEric Biggers <ebiggers@kernel.org>2025-08-27 08:11:25 -0700
committerEric Biggers <ebiggers@kernel.org>2025-08-29 09:50:19 -0700
commit13cecc526d8fe7eeb9b136159738688a1a10cd82 (patch)
tree4499f5fb8bada21d451533734e89aa771d7adf06 /include/crypto
parent1ae46b6eb5b9a97978fe12a71f5de53ab977297f (diff)
lib/crypto: chacha: Consolidate into single module
Consolidate the ChaCha code into a single module (excluding chacha-block-generic.c which remains always built-in for random.c), similar to various other algorithms: - Each arch now provides a header file lib/crypto/$(SRCARCH)/chacha.h, replacing lib/crypto/$(SRCARCH)/chacha*.c. The header defines chacha_crypt_arch() and hchacha_block_arch(). It is included by lib/crypto/chacha.c, and thus the code gets built into the single libchacha module, with improved inlining in some cases. - Whether arch-optimized ChaCha is buildable is now controlled centrally by lib/crypto/Kconfig instead of by lib/crypto/$(SRCARCH)/Kconfig. The conditions for enabling it remain the same as before, and it remains enabled by default. - Any additional arch-specific translation units for the optimized ChaCha code, such as assembly files, are now compiled by lib/crypto/Makefile instead of lib/crypto/$(SRCARCH)/Makefile. This removes the last use for the Makefile and Kconfig files in the arm64, mips, powerpc, riscv, and s390 subdirectories of lib/crypto/. So also remove those files and the references to them. Reviewed-by: Ard Biesheuvel <ardb@kernel.org> Link: https://lore.kernel.org/r/20250827151131.27733-7-ebiggers@kernel.org Signed-off-by: Eric Biggers <ebiggers@kernel.org>
Diffstat (limited to 'include/crypto')
-rw-r--r--include/crypto/chacha.h28
1 files changed, 4 insertions, 24 deletions
diff --git a/include/crypto/chacha.h b/include/crypto/chacha.h
index be25a0b65a05..38e26dff27b0 100644
--- a/include/crypto/chacha.h
+++ b/include/crypto/chacha.h
@@ -45,19 +45,11 @@ static inline void chacha20_block(struct chacha_state *state,
chacha_block_generic(state, out, 20);
}
-void hchacha_block_arch(const struct chacha_state *state,
- u32 out[HCHACHA_OUT_WORDS], int nrounds);
void hchacha_block_generic(const struct chacha_state *state,
u32 out[HCHACHA_OUT_WORDS], int nrounds);
-static inline void hchacha_block(const struct chacha_state *state,
- u32 out[HCHACHA_OUT_WORDS], int nrounds)
-{
- if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
- hchacha_block_arch(state, out, nrounds);
- else
- hchacha_block_generic(state, out, nrounds);
-}
+void hchacha_block(const struct chacha_state *state,
+ u32 out[HCHACHA_OUT_WORDS], int nrounds);
enum chacha_constants { /* expand 32-byte k */
CHACHA_CONSTANT_EXPA = 0x61707865U,
@@ -93,20 +85,8 @@ static inline void chacha_init(struct chacha_state *state,
state->x[15] = get_unaligned_le32(iv + 12);
}
-void chacha_crypt_arch(struct chacha_state *state, u8 *dst, const u8 *src,
- unsigned int bytes, int nrounds);
-void chacha_crypt_generic(struct chacha_state *state, u8 *dst, const u8 *src,
- unsigned int bytes, int nrounds);
-
-static inline void chacha_crypt(struct chacha_state *state,
- u8 *dst, const u8 *src,
- unsigned int bytes, int nrounds)
-{
- if (IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_CHACHA))
- chacha_crypt_arch(state, dst, src, bytes, nrounds);
- else
- chacha_crypt_generic(state, dst, src, bytes, nrounds);
-}
+void chacha_crypt(struct chacha_state *state, u8 *dst, const u8 *src,
+ unsigned int bytes, int nrounds);
static inline void chacha20_crypt(struct chacha_state *state,
u8 *dst, const u8 *src, unsigned int bytes)