arm64: bitops: patch in lse instructions when supported by the CPU
[deliverable/linux.git] / arch / arm64 / include / asm / lse.h
CommitLineData
c09d6a04
WD
1#ifndef __ASM_LSE_H
2#define __ASM_LSE_H
3
4#if defined(CONFIG_AS_LSE) && defined(CONFIG_ARM64_LSE_ATOMICS)
5
6#include <linux/stringify.h>
c09d6a04
WD
7#include <asm/alternative.h>
8#include <asm/cpufeature.h>
9
084f9037
WD
10#ifdef __ASSEMBLER__
11
12.arch_extension lse
13
14.macro alt_lse, llsc, lse
15 alternative_insn "\llsc", "\lse", ARM64_CPU_FEAT_LSE_ATOMICS
16.endm
17
18#else /* __ASSEMBLER__ */
19
c09d6a04
WD
20__asm__(".arch_extension lse");
21
22/* Move the ll/sc atomics out-of-line */
23#define __LL_SC_INLINE
24#define __LL_SC_PREFIX(x) __ll_sc_##x
25#define __LL_SC_EXPORT(x) EXPORT_SYMBOL(__LL_SC_PREFIX(x))
26
27/* Macro for constructing calls to out-of-line ll/sc atomics */
28#define __LL_SC_CALL(op) "bl\t" __stringify(__LL_SC_PREFIX(op)) "\n"
29
30/* In-line patching at runtime */
31#define ARM64_LSE_ATOMIC_INSN(llsc, lse) \
32 ALTERNATIVE(llsc, lse, ARM64_CPU_FEAT_LSE_ATOMICS)
33
084f9037
WD
34#endif /* __ASSEMBLER__ */
35#else /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */
36
37#ifdef __ASSEMBLER__
38
39.macro alt_lse, llsc, lse
40 \llsc
41.endm
42
43#else /* __ASSEMBLER__ */
c09d6a04
WD
44
45#define __LL_SC_INLINE static inline
46#define __LL_SC_PREFIX(x) x
47#define __LL_SC_EXPORT(x)
48
49#define ARM64_LSE_ATOMIC_INSN(llsc, lse) llsc
50
084f9037 51#endif /* __ASSEMBLER__ */
c09d6a04
WD
52#endif /* CONFIG_AS_LSE && CONFIG_ARM64_LSE_ATOMICS */
53#endif /* __ASM_LSE_H */
This page took 0.029558 seconds and 5 git commands to generate.