summaryrefslogtreecommitdiff
path: root/arch/arm64/include/asm/preempt.h
diff options
context:
space:
mode:
authorJinjie Ruan <ruanjinjie@huawei.com>2025-08-15 11:06:31 +0800
committerWill Deacon <will@kernel.org>2025-09-11 15:55:34 +0100
commit64f4b8b15f1c3c9a4e416fc5b5b4dc354b78e75e (patch)
tree9107895c5228317c218933bf0c60a183d3f27844 /arch/arm64/include/asm/preempt.h
parent3c973c51bfbaf356367afa46b94f9100a7d672f2 (diff)
arm64: entry: Refactor preempt_schedule_irq() check code
To align the structure of the code with irqentry_exit_cond_resched() from the generic entry code, hoist the need_irq_preemption() and IS_ENABLED() check earlier. And different preemption check functions are defined based on whether dynamic preemption is enabled. Signed-off-by: Jinjie Ruan <ruanjinjie@huawei.com> Reviewed-by: Ada Couprie Diaz <ada.coupriediaz@arm.com> Acked-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Will Deacon <will@kernel.org>
Diffstat (limited to 'arch/arm64/include/asm/preempt.h')
-rw-r--r--arch/arm64/include/asm/preempt.h6
1 files changed, 6 insertions, 0 deletions
diff --git a/arch/arm64/include/asm/preempt.h b/arch/arm64/include/asm/preempt.h
index 0159b625cc7f..c2437ea0790f 100644
--- a/arch/arm64/include/asm/preempt.h
+++ b/arch/arm64/include/asm/preempt.h
@@ -85,6 +85,7 @@ static inline bool should_resched(int preempt_offset)
void preempt_schedule(void);
void preempt_schedule_notrace(void);
+void raw_irqentry_exit_cond_resched(void);
#ifdef CONFIG_PREEMPT_DYNAMIC
DECLARE_STATIC_KEY_TRUE(sk_dynamic_irqentry_exit_cond_resched);
@@ -92,13 +93,18 @@ void dynamic_preempt_schedule(void);
#define __preempt_schedule() dynamic_preempt_schedule()
void dynamic_preempt_schedule_notrace(void);
#define __preempt_schedule_notrace() dynamic_preempt_schedule_notrace()
+void dynamic_irqentry_exit_cond_resched(void);
+#define irqentry_exit_cond_resched() dynamic_irqentry_exit_cond_resched()
#else /* CONFIG_PREEMPT_DYNAMIC */
#define __preempt_schedule() preempt_schedule()
#define __preempt_schedule_notrace() preempt_schedule_notrace()
+#define irqentry_exit_cond_resched() raw_irqentry_exit_cond_resched()
#endif /* CONFIG_PREEMPT_DYNAMIC */
+#else /* CONFIG_PREEMPTION */
+#define irqentry_exit_cond_resched() {}
#endif /* CONFIG_PREEMPTION */
#endif /* __ASM_PREEMPT_H */