Fix: racy init_done initialization
[librseq.git] / include / rseq / arch / ppc.h
... / ...
CommitLineData
1/* SPDX-License-Identifier: MIT */
2/* SPDX-FileCopyrightText: 2016-2024 Mathieu Desnoyers <mathieu.desnoyers@efficios.com> */
3/* SPDX-FileCopyrightText: 2016-2018 Boqun Feng <boqun.feng@gmail.com> */
4
5/*
6 * rseq/arch/ppc.h
7 */
8
9#ifndef _RSEQ_RSEQ_H
10#error "Never use <rseq/arch/ppc.h> directly; include <rseq/rseq.h> instead."
11#endif
12
13/*
14 * RSEQ_ASM_*() macro helpers are internal to the librseq headers. Those
15 * are not part of the public API.
16 */
17
18/*
19 * RSEQ_SIG is used with the following trap instruction:
20 *
21 * powerpc-be: 0f e5 00 0b twui r5,11
22 * powerpc64-le: 0b 00 e5 0f twui r5,11
23 * powerpc64-be: 0f e5 00 0b twui r5,11
24 */
25
26#define RSEQ_SIG 0x0fe5000b
27
28/*
29 * Refer to the Linux kernel memory model (LKMM) for documentation of
30 * the memory barriers.
31 */
32
33/* CPU memory barrier. */
34#define rseq_smp_mb() __asm__ __volatile__ ("sync" ::: "memory", "cc")
35/* Only used internally in this header. */
36#define __rseq_smp_lwsync() __asm__ __volatile__ ("lwsync" ::: "memory", "cc")
37/* CPU read memory barrier */
38#define rseq_smp_rmb() __rseq_smp_lwsync()
39/* CPU write memory barrier */
40#define rseq_smp_wmb() __rseq_smp_lwsync()
41
42/* Acquire: One-way permeable barrier. */
43#define rseq_smp_load_acquire(p) \
44__extension__ ({ \
45 rseq_unqual_scalar_typeof(*(p)) ____p1 = RSEQ_READ_ONCE(*(p)); \
46 __rseq_smp_lwsync(); \
47 ____p1; \
48})
49
50/* Acquire barrier after control dependency. */
51#define rseq_smp_acquire__after_ctrl_dep() __rseq_smp_lwsync()
52
53/* Release: One-way permeable barrier. */
54#define rseq_smp_store_release(p, v) \
55do { \
56 __rseq_smp_lwsync(); \
57 RSEQ_WRITE_ONCE(*(p), v); \
58} while (0)
59
60/*
61 * Helper macros to define and access a variable of long integer type.
62 * Only used internally in rseq headers.
63 */
64#ifdef RSEQ_ARCH_PPC64
65# define RSEQ_ASM_STORE_LONG(arg) "std%U[" __rseq_str(arg) "]%X[" __rseq_str(arg) "] " /* To memory ("m" constraint) */
66# define RSEQ_ASM_STORE_INT(arg) "stw%U[" __rseq_str(arg) "]%X[" __rseq_str(arg) "] " /* To memory ("m" constraint) */
67# define RSEQ_ASM_LOAD_LONG(arg) "ld%U[" __rseq_str(arg) "]%X[" __rseq_str(arg) "] " /* From memory ("m" constraint) */
68# define RSEQ_ASM_LOAD_INT(arg) "lwz%U[" __rseq_str(arg) "]%X[" __rseq_str(arg) "] " /* From memory ("m" constraint) */
69# define RSEQ_ASM_LOADX_LONG "ldx " /* From base register ("b" constraint) */
70# define RSEQ_ASM_CMP_LONG "cmpd " /* Register-to-register comparison */
71# define RSEQ_ASM_CMP_LONG_INT "cmpdi " /* Register-to-immediate comparison */
72#else
73# define RSEQ_ASM_STORE_LONG(arg) "stw%U[" __rseq_str(arg) "]%X[" __rseq_str(arg) "] " /* To memory ("m" constraint) */
74# define RSEQ_ASM_STORE_INT(arg) RSEQ_ASM_STORE_LONG(arg) /* To memory ("m" constraint) */
75# define RSEQ_ASM_LOAD_LONG(arg) "lwz%U[" __rseq_str(arg) "]%X[" __rseq_str(arg) "] " /* From memory ("m" constraint) */
76# define RSEQ_ASM_LOAD_INT(arg) RSEQ_ASM_LOAD_LONG(arg) /* From memory ("m" constraint) */
77# define RSEQ_ASM_LOADX_LONG "lwzx " /* From base register ("b" constraint) */
78# define RSEQ_ASM_CMP_LONG "cmpw " /* Register-to-register comparison */
79# define RSEQ_ASM_CMP_LONG_INT "cmpwi " /* Register-to-immediate comparison */
80#endif
81
82/*
83 * Helper macros to define a variable of pointer type stored in a 64-bit
84 * integer. Only used internally in rseq headers.
85 */
86#ifdef RSEQ_ARCH_PPC64
87# define RSEQ_ASM_U64_PTR(x) ".quad " x
88#else
89/* 32-bit only supported on big endian. */
90# define RSEQ_ASM_U64_PTR(x) ".long 0x0, " x
91#endif
92
93#define RSEQ_ASM_U32(x) ".long " x
94
95/* Common architecture support macros. */
96#include "rseq/arch/generic/common.h"
97
98/*
99 * Define a critical section abort handler.
100 *
101 * @label:
102 * Local label to the abort handler.
103 * @teardown:
104 * Sequence of instructions to run on abort.
105 * @abort_label:
106 * C label to jump to at the end of the sequence.
107 */
108#define RSEQ_ASM_DEFINE_ABORT(label, teardown, abort_label) \
109 ".pushsection __rseq_failure, \"ax\"\n\t" \
110 RSEQ_ASM_U32(__rseq_str(RSEQ_SIG)) "\n\t" \
111 __rseq_str(label) ":\n\t" \
112 teardown \
113 "b %l[" __rseq_str(abort_label) "]\n\t" \
114 ".popsection\n\t"
115
116/*
117 * Store the address of the critical section descriptor structure at
118 * @cs_label into the @rseq_cs pointer and emit the label @label, which
119 * is the beginning of the sequence of consecutive assembly instructions.
120 *
121 * @label:
122 * Local label to the beginning of the sequence of consecutive assembly
123 * instructions.
124 * @cs_label:
125 * Source local label to the critical section descriptor structure.
126 * @rseq_cs:
127 * Destination pointer where to store the address of the critical
128 * section descriptor structure.
129 */
130#ifdef RSEQ_ARCH_PPC64
131# define RSEQ_ASM_STORE_RSEQ_CS(label, cs_label, rseq_cs) \
132 RSEQ_INJECT_ASM(1) \
133 "lis %%r17, (" __rseq_str(cs_label) ")@highest\n\t" \
134 "ori %%r17, %%r17, (" __rseq_str(cs_label) ")@higher\n\t" \
135 "rldicr %%r17, %%r17, 32, 31\n\t" \
136 "oris %%r17, %%r17, (" __rseq_str(cs_label) ")@high\n\t" \
137 "ori %%r17, %%r17, (" __rseq_str(cs_label) ")@l\n\t" \
138 "std %%r17, %[" __rseq_str(rseq_cs) "]\n\t" \
139 __rseq_str(label) ":\n\t"
140#else
141# define RSEQ_ASM_STORE_RSEQ_CS(label, cs_label, rseq_cs) \
142 RSEQ_INJECT_ASM(1) \
143 "lis %%r17, (" __rseq_str(cs_label) ")@ha\n\t" \
144 "addi %%r17, %%r17, (" __rseq_str(cs_label) ")@l\n\t" \
145 RSEQ_ASM_STORE_INT(rseq_cs) "%%r17, %[" __rseq_str(rseq_cs) "]\n\t" \
146 __rseq_str(label) ":\n\t"
147#endif
148
149/* Jump to local label @label when @cpu_id != @current_cpu_id. */
150#define RSEQ_ASM_CBNE_CPU_ID(cpu_id, current_cpu_id, label) \
151 RSEQ_INJECT_ASM(2) \
152 RSEQ_ASM_LOAD_INT(current_cpu_id) "%%r17, %[" __rseq_str(current_cpu_id) "]\n\t" \
153 "cmpw cr7, %[" __rseq_str(cpu_id) "], %%r17\n\t" \
154 "bne- cr7, " __rseq_str(label) "\n\t"
155
156/*
157 * RSEQ_ASM_OPs: asm operations for rseq. Only used internally by rseq headers.
158 * RSEQ_ASM_OP_R_*: has hard-coded registers in it
159 * RSEQ_ASM_OP_* (else): doesn't have hard-coded registers(unless cr7)
160 */
161
162/* Jump to local label @label when @var != @expect. */
163#define RSEQ_ASM_OP_CBNE(var, expect, label) \
164 RSEQ_ASM_LOAD_LONG(var) "%%r17, %[" __rseq_str(var) "]\n\t" \
165 RSEQ_ASM_CMP_LONG "cr7, %%r17, %[" __rseq_str(expect) "]\n\t" \
166 "bne- cr7, " __rseq_str(label) "\n\t"
167
168/* Jump to local label @label when @var == @expect. */
169#define RSEQ_ASM_OP_CBEQ(var, expect, label) \
170 RSEQ_ASM_LOAD_LONG(var) "%%r17, %[" __rseq_str(var) "]\n\t" \
171 RSEQ_ASM_CMP_LONG "cr7, %%r17, %[" __rseq_str(expect) "]\n\t" \
172 "beq- cr7, " __rseq_str(label) "\n\t"
173
174/* Store @value to address @var. */
175#define RSEQ_ASM_OP_STORE(value, var) \
176 RSEQ_ASM_STORE_LONG(var) "%[" __rseq_str(value) "], %[" __rseq_str(var) "]\n\t"
177
178/* Load @var to r17 */
179#define RSEQ_ASM_OP_R_LOAD(var) \
180 RSEQ_ASM_LOAD_LONG(var) "%%r17, %[" __rseq_str(var) "]\n\t"
181
182/* Store r17 to @var */
183#define RSEQ_ASM_OP_R_STORE(var) \
184 RSEQ_ASM_STORE_LONG(var) "%%r17, %[" __rseq_str(var) "]\n\t"
185
186/* Add @count to r17 */
187#define RSEQ_ASM_OP_R_ADD(count) \
188 "add %%r17, %[" __rseq_str(count) "], %%r17\n\t"
189
190/* Load (r17 + voffp) to r17 */
191#define RSEQ_ASM_OP_R_LOADX(voffp) \
192 RSEQ_ASM_LOADX_LONG "%%r17, %[" __rseq_str(voffp) "], %%r17\n\t"
193
194/*
195 * Copy @len bytes from @src to @dst. This is an inefficient bytewise
196 * copy and could be improved in the future.
197 */
198#define RSEQ_ASM_OP_R_BYTEWISE_MEMCPY() \
199 RSEQ_ASM_CMP_LONG_INT "%%r19, 0\n\t" \
200 "beq 333f\n\t" \
201 "addi %%r20, %%r20, -1\n\t" \
202 "addi %%r21, %%r21, -1\n\t" \
203 "222:\n\t" \
204 "lbzu %%r18, 1(%%r20)\n\t" \
205 "stbu %%r18, 1(%%r21)\n\t" \
206 "addi %%r19, %%r19, -1\n\t" \
207 RSEQ_ASM_CMP_LONG_INT "%%r19, 0\n\t" \
208 "bne 222b\n\t" \
209 "333:\n\t" \
210
211/*
212 * End-of-sequence store of r17 to address @var. Emit
213 * @post_commit_label label after the store instruction.
214 */
215#define RSEQ_ASM_OP_R_FINAL_STORE(var, post_commit_label) \
216 RSEQ_ASM_STORE_LONG(var) "%%r17, %[" __rseq_str(var) "]\n\t" \
217 __rseq_str(post_commit_label) ":\n\t"
218
219/*
220 * End-of-sequence store of @value to address @var. Emit
221 * @post_commit_label label after the store instruction.
222 */
223#define RSEQ_ASM_OP_FINAL_STORE(value, var, post_commit_label) \
224 RSEQ_ASM_STORE_LONG(var) "%[" __rseq_str(value) "], %[" __rseq_str(var) "]\n\t" \
225 __rseq_str(post_commit_label) ":\n\t"
226
227/* Per-cpu-id indexing. */
228
229#define RSEQ_TEMPLATE_INDEX_CPU_ID
230#define RSEQ_TEMPLATE_MO_RELAXED
231#include "rseq/arch/ppc/bits.h"
232#undef RSEQ_TEMPLATE_MO_RELAXED
233
234#define RSEQ_TEMPLATE_MO_RELEASE
235#include "rseq/arch/ppc/bits.h"
236#undef RSEQ_TEMPLATE_MO_RELEASE
237#undef RSEQ_TEMPLATE_INDEX_CPU_ID
238
239/* Per-mm-cid indexing. */
240
241#define RSEQ_TEMPLATE_INDEX_MM_CID
242#define RSEQ_TEMPLATE_MO_RELAXED
243#include "rseq/arch/ppc/bits.h"
244#undef RSEQ_TEMPLATE_MO_RELAXED
245
246#define RSEQ_TEMPLATE_MO_RELEASE
247#include "rseq/arch/ppc/bits.h"
248#undef RSEQ_TEMPLATE_MO_RELEASE
249#undef RSEQ_TEMPLATE_INDEX_MM_CID
250
251/* APIs which are not indexed. */
252
253#define RSEQ_TEMPLATE_INDEX_NONE
254#define RSEQ_TEMPLATE_MO_RELAXED
255#include "rseq/arch/ppc/bits.h"
256#undef RSEQ_TEMPLATE_MO_RELAXED
257#undef RSEQ_TEMPLATE_INDEX_NONE
This page took 0.034008 seconds and 5 git commands to generate.