diff options
Diffstat (limited to 'arch/riscv/include')
| -rw-r--r-- | arch/riscv/include/asm/arch_hweight.h | 24 | ||||
| -rw-r--r-- | arch/riscv/include/asm/bitops.h | 32 | ||||
| -rw-r--r-- | arch/riscv/include/asm/checksum.h | 13 | ||||
| -rw-r--r-- | arch/riscv/include/asm/cmpxchg.h | 12 | ||||
| -rw-r--r-- | arch/riscv/include/asm/hwcap.h | 1 | ||||
| -rw-r--r-- | arch/riscv/include/asm/hwprobe.h | 2 | ||||
| -rw-r--r-- | arch/riscv/include/asm/insn-def.h | 79 | ||||
| -rw-r--r-- | arch/riscv/include/asm/pgtable.h | 15 | ||||
| -rw-r--r-- | arch/riscv/include/asm/vector.h | 1 | ||||
| -rw-r--r-- | arch/riscv/include/uapi/asm/hwprobe.h | 3 |
10 files changed, 116 insertions, 66 deletions
diff --git a/arch/riscv/include/asm/arch_hweight.h b/arch/riscv/include/asm/arch_hweight.h index 0e7cdbbec8ef..f3c0831beefc 100644 --- a/arch/riscv/include/asm/arch_hweight.h +++ b/arch/riscv/include/asm/arch_hweight.h @@ -19,10 +19,10 @@ static __always_inline unsigned int __arch_hweight32(unsigned int w) { -#if defined(CONFIG_RISCV_ISA_ZBB) && defined(CONFIG_TOOLCHAIN_HAS_ZBB) - asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0, - RISCV_ISA_EXT_ZBB, 1) - : : : : legacy); + if (!(IS_ENABLED(CONFIG_RISCV_ISA_ZBB) && + IS_ENABLED(CONFIG_TOOLCHAIN_HAS_ZBB) && + riscv_has_extension_likely(RISCV_ISA_EXT_ZBB))) + return __sw_hweight32(w); asm (".option push\n" ".option arch,+zbb\n" @@ -31,10 +31,6 @@ static __always_inline unsigned int __arch_hweight32(unsigned int w) : "=r" (w) : "r" (w) :); return w; - -legacy: -#endif - return __sw_hweight32(w); } static inline unsigned int __arch_hweight16(unsigned int w) @@ -50,10 +46,10 @@ static inline unsigned int __arch_hweight8(unsigned int w) #if BITS_PER_LONG == 64 static __always_inline unsigned long __arch_hweight64(__u64 w) { -#if defined(CONFIG_RISCV_ISA_ZBB) && defined(CONFIG_TOOLCHAIN_HAS_ZBB) - asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0, - RISCV_ISA_EXT_ZBB, 1) - : : : : legacy); + if (!(IS_ENABLED(CONFIG_RISCV_ISA_ZBB) && + IS_ENABLED(CONFIG_TOOLCHAIN_HAS_ZBB) && + riscv_has_extension_likely(RISCV_ISA_EXT_ZBB))) + return __sw_hweight64(w); asm (".option push\n" ".option arch,+zbb\n" @@ -62,10 +58,6 @@ static __always_inline unsigned long __arch_hweight64(__u64 w) : "=r" (w) : "r" (w) :); return w; - -legacy: -#endif - return __sw_hweight64(w); } #else /* BITS_PER_LONG == 64 */ static inline unsigned long __arch_hweight64(__u64 w) diff --git a/arch/riscv/include/asm/bitops.h b/arch/riscv/include/asm/bitops.h index 77880677b06e..238092125c11 100644 --- a/arch/riscv/include/asm/bitops.h +++ b/arch/riscv/include/asm/bitops.h @@ -47,9 +47,8 @@ static __always_inline __attribute_const__ unsigned long variable__ffs(unsigned long word) { - asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0, - RISCV_ISA_EXT_ZBB, 1) - : : : : legacy); + if (!riscv_has_extension_likely(RISCV_ISA_EXT_ZBB)) + return generic___ffs(word); asm volatile (".option push\n" ".option arch,+zbb\n" @@ -58,9 +57,6 @@ static __always_inline __attribute_const__ unsigned long variable__ffs(unsigned : "=r" (word) : "r" (word) :); return word; - -legacy: - return generic___ffs(word); } /** @@ -76,9 +72,8 @@ legacy: static __always_inline __attribute_const__ unsigned long variable__fls(unsigned long word) { - asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0, - RISCV_ISA_EXT_ZBB, 1) - : : : : legacy); + if (!riscv_has_extension_likely(RISCV_ISA_EXT_ZBB)) + return generic___fls(word); asm volatile (".option push\n" ".option arch,+zbb\n" @@ -87,9 +82,6 @@ static __always_inline __attribute_const__ unsigned long variable__fls(unsigned : "=r" (word) : "r" (word) :); return BITS_PER_LONG - 1 - word; - -legacy: - return generic___fls(word); } /** @@ -105,9 +97,8 @@ legacy: static __always_inline __attribute_const__ int variable_ffs(int x) { - asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0, - RISCV_ISA_EXT_ZBB, 1) - : : : : legacy); + if (!riscv_has_extension_likely(RISCV_ISA_EXT_ZBB)) + return generic_ffs(x); if (!x) return 0; @@ -119,9 +110,6 @@ static __always_inline __attribute_const__ int variable_ffs(int x) : "=r" (x) : "r" (x) :); return x + 1; - -legacy: - return generic_ffs(x); } /** @@ -137,9 +125,8 @@ legacy: static __always_inline int variable_fls(unsigned int x) { - asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0, - RISCV_ISA_EXT_ZBB, 1) - : : : : legacy); + if (!riscv_has_extension_likely(RISCV_ISA_EXT_ZBB)) + return generic_fls(x); if (!x) return 0; @@ -151,9 +138,6 @@ static __always_inline int variable_fls(unsigned int x) : "=r" (x) : "r" (x) :); return 32 - x; - -legacy: - return generic_fls(x); } /** diff --git a/arch/riscv/include/asm/checksum.h b/arch/riscv/include/asm/checksum.h index da378856f1d5..945cce34be92 100644 --- a/arch/riscv/include/asm/checksum.h +++ b/arch/riscv/include/asm/checksum.h @@ -49,16 +49,11 @@ static inline __sum16 ip_fast_csum(const void *iph, unsigned int ihl) * ZBB only saves three instructions on 32-bit and five on 64-bit so not * worth checking if supported without Alternatives. */ - if (IS_ENABLED(CONFIG_RISCV_ISA_ZBB) && IS_ENABLED(CONFIG_TOOLCHAIN_HAS_ZBB)) { + if (IS_ENABLED(CONFIG_RISCV_ISA_ZBB) && + IS_ENABLED(CONFIG_TOOLCHAIN_HAS_ZBB) && + riscv_has_extension_likely(RISCV_ISA_EXT_ZBB)) { unsigned long fold_temp; - asm goto(ALTERNATIVE("j %l[no_zbb]", "nop", 0, - RISCV_ISA_EXT_ZBB, 1) - : - : - : - : no_zbb); - if (IS_ENABLED(CONFIG_32BIT)) { asm(".option push \n\ .option arch,+zbb \n\ @@ -81,7 +76,7 @@ static inline __sum16 ip_fast_csum(const void *iph, unsigned int ihl) } return (__force __sum16)(csum >> 16); } -no_zbb: + #ifndef CONFIG_32BIT csum += ror64(csum, 32); csum >>= 32; diff --git a/arch/riscv/include/asm/cmpxchg.h b/arch/riscv/include/asm/cmpxchg.h index 122e1485d39a..8712cf9c69dc 100644 --- a/arch/riscv/include/asm/cmpxchg.h +++ b/arch/riscv/include/asm/cmpxchg.h @@ -373,9 +373,10 @@ static __always_inline void __cmpwait(volatile void *ptr, u32 *__ptr32b; ulong __s, __val, __mask; - asm goto(ALTERNATIVE("j %l[no_zawrs]", "nop", - 0, RISCV_ISA_EXT_ZAWRS, 1) - : : : : no_zawrs); + if (!riscv_has_extension_likely(RISCV_ISA_EXT_ZAWRS)) { + ALT_RISCV_PAUSE(); + return; + } switch (size) { case 1: @@ -437,11 +438,6 @@ static __always_inline void __cmpwait(volatile void *ptr, default: BUILD_BUG(); } - - return; - -no_zawrs: - ALT_RISCV_PAUSE(); } #define __cmpwait_relaxed(ptr, val) \ diff --git a/arch/riscv/include/asm/hwcap.h b/arch/riscv/include/asm/hwcap.h index f98fcb5c17d5..dfe57b215e6c 100644 --- a/arch/riscv/include/asm/hwcap.h +++ b/arch/riscv/include/asm/hwcap.h @@ -107,6 +107,7 @@ #define RISCV_ISA_EXT_ZALRSC 98 #define RISCV_ISA_EXT_ZICBOP 99 #define RISCV_ISA_EXT_SVRSW60T59B 100 +#define RISCV_ISA_EXT_ZALASR 101 #define RISCV_ISA_EXT_XLINUXENVCFG 127 diff --git a/arch/riscv/include/asm/hwprobe.h b/arch/riscv/include/asm/hwprobe.h index 58f8dda73259..8c572a464719 100644 --- a/arch/riscv/include/asm/hwprobe.h +++ b/arch/riscv/include/asm/hwprobe.h @@ -8,7 +8,7 @@ #include <uapi/asm/hwprobe.h> -#define RISCV_HWPROBE_MAX_KEY 14 +#define RISCV_HWPROBE_MAX_KEY 15 static inline bool riscv_hwprobe_key_is_valid(__s64 key) { diff --git a/arch/riscv/include/asm/insn-def.h b/arch/riscv/include/asm/insn-def.h index d29da6ccd3dd..7c6daf116756 100644 --- a/arch/riscv/include/asm/insn-def.h +++ b/arch/riscv/include/asm/insn-def.h @@ -179,6 +179,7 @@ #define RV___RS1(v) __RV_REG(v) #define RV___RS2(v) __RV_REG(v) +#define RV_OPCODE_AMO RV_OPCODE(47) #define RV_OPCODE_MISC_MEM RV_OPCODE(15) #define RV_OPCODE_OP_IMM RV_OPCODE(19) #define RV_OPCODE_SYSTEM RV_OPCODE(115) @@ -208,6 +209,84 @@ __ASM_STR(.error "hlv.d requires 64-bit support") #endif +#define LB_AQ(dest, addr) \ + INSN_R(OPCODE_AMO, FUNC3(0), FUNC7(26), \ + RD(dest), RS1(addr), __RS2(0)) + +#define LB_AQRL(dest, addr) \ + INSN_R(OPCODE_AMO, FUNC3(0), FUNC7(27), \ + RD(dest), RS1(addr), __RS2(0)) + +#define LH_AQ(dest, addr) \ + INSN_R(OPCODE_AMO, FUNC3(1), FUNC7(26), \ + RD(dest), RS1(addr), __RS2(0)) + +#define LH_AQRL(dest, addr) \ + INSN_R(OPCODE_AMO, FUNC3(1), FUNC7(27), \ + RD(dest), RS1(addr), __RS2(0)) + +#define LW_AQ(dest, addr) \ + INSN_R(OPCODE_AMO, FUNC3(2), FUNC7(26), \ + RD(dest), RS1(addr), __RS2(0)) + +#define LW_AQRL(dest, addr) \ + INSN_R(OPCODE_AMO, FUNC3(2), FUNC7(27), \ + RD(dest), RS1(addr), __RS2(0)) + +#define SB_RL(src, addr) \ + INSN_R(OPCODE_AMO, FUNC3(0), FUNC7(29), \ + __RD(0), RS1(addr), RS2(src)) + +#define SB_AQRL(src, addr) \ + INSN_R(OPCODE_AMO, FUNC3(0), FUNC7(31), \ + __RD(0), RS1(addr), RS2(src)) + +#define SH_RL(src, addr) \ + INSN_R(OPCODE_AMO, FUNC3(1), FUNC7(29), \ + __RD(0), RS1(addr), RS2(src)) + +#define SH_AQRL(src, addr) \ + INSN_R(OPCODE_AMO, FUNC3(1), FUNC7(31), \ + __RD(0), RS1(addr), RS2(src)) + +#define SW_RL(src, addr) \ + INSN_R(OPCODE_AMO, FUNC3(2), FUNC7(29), \ + __RD(0), RS1(addr), RS2(src)) + +#define SW_AQRL(src, addr) \ + INSN_R(OPCODE_AMO, FUNC3(2), FUNC7(31), \ + __RD(0), RS1(addr), RS2(src)) + +#ifdef CONFIG_64BIT +#define LD_AQ(dest, addr) \ + INSN_R(OPCODE_AMO, FUNC3(3), FUNC7(26), \ + RD(dest), RS1(addr), __RS2(0)) + +#define LD_AQRL(dest, addr) \ + INSN_R(OPCODE_AMO, FUNC3(3), FUNC7(27), \ + RD(dest), RS1(addr), __RS2(0)) + +#define SD_RL(src, addr) \ + INSN_R(OPCODE_AMO, FUNC3(3), FUNC7(29), \ + __RD(0), RS1(addr), RS2(src)) + +#define SD_AQRL(src, addr) \ + INSN_R(OPCODE_AMO, FUNC3(3), FUNC7(31), \ + __RD(0), RS1(addr), RS2(src)) +#else +#define LD_AQ(dest, addr) \ + __ASM_STR(.error "ld.aq requires 64-bit support") + +#define LD_AQRL(dest, addr) \ + __ASM_STR(.error "ld.aqrl requires 64-bit support") + +#define SD_RL(dest, addr) \ + __ASM_STR(.error "sd.rl requires 64-bit support") + +#define SD_AQRL(dest, addr) \ + __ASM_STR(.error "sd.aqrl requires 64-bit support") +#endif + #define SINVAL_VMA(vaddr, asid) \ INSN_R(OPCODE_SYSTEM, FUNC3(0), FUNC7(11), \ __RD(0), RS1(vaddr), RS2(asid)) diff --git a/arch/riscv/include/asm/pgtable.h b/arch/riscv/include/asm/pgtable.h index 1c311193e7da..8bd36ac842eb 100644 --- a/arch/riscv/include/asm/pgtable.h +++ b/arch/riscv/include/asm/pgtable.h @@ -567,8 +567,13 @@ static inline void update_mmu_cache_range(struct vm_fault *vmf, struct vm_area_struct *vma, unsigned long address, pte_t *ptep, unsigned int nr) { - asm goto(ALTERNATIVE("nop", "j %l[svvptc]", 0, RISCV_ISA_EXT_SVVPTC, 1) - : : : : svvptc); + /* + * Svvptc guarantees that the new valid pte will be visible within + * a bounded timeframe, so when the uarch does not cache invalid + * entries, we don't have to do anything. + */ + if (riscv_has_extension_unlikely(RISCV_ISA_EXT_SVVPTC)) + return; /* * The kernel assumes that TLBs don't cache invalid entries, but @@ -580,12 +585,6 @@ static inline void update_mmu_cache_range(struct vm_fault *vmf, while (nr--) local_flush_tlb_page(address + nr * PAGE_SIZE); -svvptc:; - /* - * Svvptc guarantees that the new valid pte will be visible within - * a bounded timeframe, so when the uarch does not cache invalid - * entries, we don't have to do anything. - */ } #define update_mmu_cache(vma, addr, ptep) \ update_mmu_cache_range(NULL, vma, addr, ptep, 1) diff --git a/arch/riscv/include/asm/vector.h b/arch/riscv/include/asm/vector.h index b61786d43c20..e7aa449368ad 100644 --- a/arch/riscv/include/asm/vector.h +++ b/arch/riscv/include/asm/vector.h @@ -51,6 +51,7 @@ void put_cpu_vector_context(void); void riscv_v_thread_free(struct task_struct *tsk); void __init riscv_v_setup_ctx_cache(void); void riscv_v_thread_alloc(struct task_struct *tsk); +void __init update_regset_vector_info(unsigned long size); static inline u32 riscv_v_flags(void) { diff --git a/arch/riscv/include/uapi/asm/hwprobe.h b/arch/riscv/include/uapi/asm/hwprobe.h index 5d30a4fae37a..1edea2331b8b 100644 --- a/arch/riscv/include/uapi/asm/hwprobe.h +++ b/arch/riscv/include/uapi/asm/hwprobe.h @@ -82,6 +82,8 @@ struct riscv_hwprobe { #define RISCV_HWPROBE_EXT_ZAAMO (1ULL << 56) #define RISCV_HWPROBE_EXT_ZALRSC (1ULL << 57) #define RISCV_HWPROBE_EXT_ZABHA (1ULL << 58) +#define RISCV_HWPROBE_EXT_ZALASR (1ULL << 59) +#define RISCV_HWPROBE_EXT_ZICBOP (1ULL << 60) #define RISCV_HWPROBE_KEY_CPUPERF_0 5 #define RISCV_HWPROBE_MISALIGNED_UNKNOWN (0 << 0) #define RISCV_HWPROBE_MISALIGNED_EMULATED (1 << 0) @@ -107,6 +109,7 @@ struct riscv_hwprobe { #define RISCV_HWPROBE_KEY_ZICBOM_BLOCK_SIZE 12 #define RISCV_HWPROBE_KEY_VENDOR_EXT_SIFIVE_0 13 #define RISCV_HWPROBE_KEY_VENDOR_EXT_MIPS_0 14 +#define RISCV_HWPROBE_KEY_ZICBOP_BLOCK_SIZE 15 /* Increase RISCV_HWPROBE_MAX_KEY when adding items. */ /* Flags */ |