rseq percpu alloc: Implement numa support
[librseq.git] / include / rseq / compiler.h
CommitLineData
90702366 1/* SPDX-License-Identifier: MIT */
f2d7b530
MJ
2/* SPDX-FileCopyrightText: 2021 Mathieu Desnoyers <mathieu.desnoyers@efficios.com> */
3
dd76f2d6
MD
4/*
5 * rseq/compiler.h
6 *
7 * Work-around asm goto compiler bugs.
dd76f2d6
MD
8 */
9
44ec21eb
MJ
10#ifndef _RSEQ_COMPILER_H
11#define _RSEQ_COMPILER_H
dd76f2d6 12
d292e9e1
MD
13#if defined __cplusplus
14# include <type_traits> /* for std::remove_cv */
15#endif
16
44ec21eb
MJ
17#define rseq_likely(x) __builtin_expect(!!(x), 1)
18#define rseq_unlikely(x) __builtin_expect(!!(x), 0)
19#define rseq_barrier() __asm__ __volatile__("" : : : "memory")
20
21/*
22 * Instruct the compiler to perform only a single access to a variable
23 * (prohibits merging and refetching). The compiler is also forbidden to reorder
24 * successive instances of RSEQ_ACCESS_ONCE(), but only when the compiler is aware of
25 * particular ordering. Compiler ordering can be ensured, for example, by
26 * putting two RSEQ_ACCESS_ONCE() in separate C statements.
27 *
28 * This macro does absolutely -nothing- to prevent the CPU from reordering,
29 * merging, or refetching absolutely anything at any time. Its main intended
30 * use is to mediate communication between process-level code and irq/NMI
31 * handlers, all running on the same CPU.
32 */
33#define RSEQ_ACCESS_ONCE(x) (*(__volatile__ __typeof__(x) *)&(x))
34
35#define RSEQ_WRITE_ONCE(x, v) __extension__ ({ RSEQ_ACCESS_ONCE(x) = (v); })
36#define RSEQ_READ_ONCE(x) RSEQ_ACCESS_ONCE(x)
37
dd76f2d6
MD
38/*
39 * gcc prior to 4.8.2 miscompiles asm goto.
40 * https://gcc.gnu.org/bugzilla/show_bug.cgi?id=58670
41 *
42 * gcc prior to 8.1.0 miscompiles asm goto at O1.
43 * https://gcc.gnu.org/bugzilla/show_bug.cgi?id=103908
44 *
45 * clang prior to version 13.0.1 miscompiles asm goto at O2.
46 * https://github.com/llvm/llvm-project/issues/52735
47 *
48 * Work around these issues by adding a volatile inline asm with
49 * memory clobber in the fallthrough after the asm goto and at each
50 * label target. Emit this for all compilers in case other similar
51 * issues are found in the future.
52 */
6a5fa598 53#define rseq_after_asm_goto() __asm__ __volatile__ ("" : : : "memory")
dd76f2d6 54
809f5ee3
MD
55/* Combine two tokens. */
56#define RSEQ__COMBINE_TOKENS(_tokena, _tokenb) \
57 _tokena##_tokenb
58#define RSEQ_COMBINE_TOKENS(_tokena, _tokenb) \
59 RSEQ__COMBINE_TOKENS(_tokena, _tokenb)
60
eb5d1cbe
MD
61#if defined(__SIZEOF_LONG__)
62#define RSEQ_BITS_PER_LONG (__SIZEOF_LONG__ * 8)
63#elif defined(_LP64)
64#define RSEQ_BITS_PER_LONG 64
65#else
66#define RSEQ_BITS_PER_LONG 32
67#endif
68
d292e9e1
MD
69#ifdef __cplusplus
70#define rseq_unqual_scalar_typeof(x) \
96d67c89 71 std::remove_cv<std::remove_reference<decltype(x)>::type>::type
d292e9e1 72#else
a95cabb3
MD
73#define rseq_scalar_type_to_expr(type) \
74 unsigned type: (unsigned type)0, \
75 signed type: (signed type)0
76
d292e9e1
MD
77/*
78 * Use C11 _Generic to express unqualified type from expression. This removes
79 * volatile qualifier from expression type.
80 */
81#define rseq_unqual_scalar_typeof(x) \
82 __typeof__( \
83 _Generic((x), \
84 char: (char)0, \
a95cabb3
MD
85 rseq_scalar_type_to_expr(char), \
86 rseq_scalar_type_to_expr(short), \
87 rseq_scalar_type_to_expr(int), \
88 rseq_scalar_type_to_expr(long), \
89 rseq_scalar_type_to_expr(long long), \
d292e9e1
MD
90 default: (x) \
91 ) \
92 )
93#endif
94
44ec21eb 95#endif /* _RSEQ_COMPILER_H */
This page took 0.028546 seconds and 4 git commands to generate.