summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/crypto/arm/sha256.h10
-rw-r--r--lib/crypto/arm64/sha256.h10
-rw-r--r--lib/crypto/riscv/sha256.h8
-rw-r--r--lib/crypto/x86/sha256.h3
4 files changed, 15 insertions, 16 deletions
diff --git a/lib/crypto/arm/sha256.h b/lib/crypto/arm/sha256.h
index da75cbdc51d4..eab713e650f3 100644
--- a/lib/crypto/arm/sha256.h
+++ b/lib/crypto/arm/sha256.h
@@ -5,7 +5,10 @@
* Copyright 2025 Google LLC
*/
#include <asm/neon.h>
-#include <crypto/internal/simd.h>
+#include <asm/simd.h>
+
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
asmlinkage void sha256_block_data_order(struct sha256_block_state *state,
const u8 *data, size_t nblocks);
@@ -14,14 +17,11 @@ asmlinkage void sha256_block_data_order_neon(struct sha256_block_state *state,
asmlinkage void sha256_ce_transform(struct sha256_block_state *state,
const u8 *data, size_t nblocks);
-static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
-static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
-
static void sha256_blocks(struct sha256_block_state *state,
const u8 *data, size_t nblocks)
{
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
- static_branch_likely(&have_neon) && crypto_simd_usable()) {
+ static_branch_likely(&have_neon) && likely(may_use_simd())) {
kernel_neon_begin();
if (static_branch_likely(&have_ce))
sha256_ce_transform(state, data, nblocks);
diff --git a/lib/crypto/arm64/sha256.h b/lib/crypto/arm64/sha256.h
index a211966c124a..d95f1077c32b 100644
--- a/lib/crypto/arm64/sha256.h
+++ b/lib/crypto/arm64/sha256.h
@@ -5,9 +5,12 @@
* Copyright 2025 Google LLC
*/
#include <asm/neon.h>
-#include <crypto/internal/simd.h>
+#include <asm/simd.h>
#include <linux/cpufeature.h>
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
+
asmlinkage void sha256_block_data_order(struct sha256_block_state *state,
const u8 *data, size_t nblocks);
asmlinkage void sha256_block_neon(struct sha256_block_state *state,
@@ -15,14 +18,11 @@ asmlinkage void sha256_block_neon(struct sha256_block_state *state,
asmlinkage size_t __sha256_ce_transform(struct sha256_block_state *state,
const u8 *data, size_t nblocks);
-static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon);
-static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce);
-
static void sha256_blocks(struct sha256_block_state *state,
const u8 *data, size_t nblocks)
{
if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
- static_branch_likely(&have_neon) && crypto_simd_usable()) {
+ static_branch_likely(&have_neon) && likely(may_use_simd())) {
if (static_branch_likely(&have_ce)) {
do {
size_t rem;
diff --git a/lib/crypto/riscv/sha256.h b/lib/crypto/riscv/sha256.h
index c0f79c18f119..f36f68d2e88c 100644
--- a/lib/crypto/riscv/sha256.h
+++ b/lib/crypto/riscv/sha256.h
@@ -9,19 +9,19 @@
* Author: Jerry Shih <jerry.shih@sifive.com>
*/
+#include <asm/simd.h>
#include <asm/vector.h>
-#include <crypto/internal/simd.h>
+
+static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_extensions);
asmlinkage void
sha256_transform_zvknha_or_zvknhb_zvkb(struct sha256_block_state *state,
const u8 *data, size_t nblocks);
-static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_extensions);
-
static void sha256_blocks(struct sha256_block_state *state,
const u8 *data, size_t nblocks)
{
- if (static_branch_likely(&have_extensions) && crypto_simd_usable()) {
+ if (static_branch_likely(&have_extensions) && likely(may_use_simd())) {
kernel_vector_begin();
sha256_transform_zvknha_or_zvknhb_zvkb(state, data, nblocks);
kernel_vector_end();
diff --git a/lib/crypto/x86/sha256.h b/lib/crypto/x86/sha256.h
index 669bc06538b6..c852396ef319 100644
--- a/lib/crypto/x86/sha256.h
+++ b/lib/crypto/x86/sha256.h
@@ -5,7 +5,6 @@
* Copyright 2025 Google LLC
*/
#include <asm/fpu/api.h>
-#include <crypto/internal/simd.h>
#include <linux/static_call.h>
DEFINE_STATIC_CALL(sha256_blocks_x86, sha256_blocks_generic);
@@ -16,7 +15,7 @@ DEFINE_STATIC_CALL(sha256_blocks_x86, sha256_blocks_generic);
static void c_fn(struct sha256_block_state *state, const u8 *data, \
size_t nblocks) \
{ \
- if (likely(crypto_simd_usable())) { \
+ if (likely(irq_fpu_usable())) { \
kernel_fpu_begin(); \
asm_fn(state, data, nblocks); \
kernel_fpu_end(); \