41b1b1c62660c882255a5951005f7effa705f72f
[deliverable/linux.git] / tools / perf / arch / x86 / tests / insn-x86-dat-src.c
1 /*
2 * This file contains instructions for testing by the test titled:
3 *
4 * "Test x86 instruction decoder - new instructions"
5 *
6 * Note that the 'Expecting' comment lines are consumed by the
7 * gen-insn-x86-dat.awk script and have the format:
8 *
9 * Expecting: <op> <branch> <rel>
10 *
11 * If this file is changed, remember to run the gen-insn-x86-dat.sh
12 * script and commit the result.
13 *
14 * Refer to insn-x86.c for more details.
15 */
16
17 int main(void)
18 {
19 /* Following line is a marker for the awk script - do not change */
20 asm volatile("rdtsc"); /* Start here */
21
22 #ifdef __x86_64__
23
24 /* bndmk m64, bnd */
25
26 asm volatile("bndmk (%rax), %bnd0");
27 asm volatile("bndmk (%r8), %bnd0");
28 asm volatile("bndmk (0x12345678), %bnd0");
29 asm volatile("bndmk (%rax), %bnd3");
30 asm volatile("bndmk (%rcx,%rax,1), %bnd0");
31 asm volatile("bndmk 0x12345678(,%rax,1), %bnd0");
32 asm volatile("bndmk (%rax,%rcx,1), %bnd0");
33 asm volatile("bndmk (%rax,%rcx,8), %bnd0");
34 asm volatile("bndmk 0x12(%rax), %bnd0");
35 asm volatile("bndmk 0x12(%rbp), %bnd0");
36 asm volatile("bndmk 0x12(%rcx,%rax,1), %bnd0");
37 asm volatile("bndmk 0x12(%rbp,%rax,1), %bnd0");
38 asm volatile("bndmk 0x12(%rax,%rcx,1), %bnd0");
39 asm volatile("bndmk 0x12(%rax,%rcx,8), %bnd0");
40 asm volatile("bndmk 0x12345678(%rax), %bnd0");
41 asm volatile("bndmk 0x12345678(%rbp), %bnd0");
42 asm volatile("bndmk 0x12345678(%rcx,%rax,1), %bnd0");
43 asm volatile("bndmk 0x12345678(%rbp,%rax,1), %bnd0");
44 asm volatile("bndmk 0x12345678(%rax,%rcx,1), %bnd0");
45 asm volatile("bndmk 0x12345678(%rax,%rcx,8), %bnd0");
46
47 /* bndcl r/m64, bnd */
48
49 asm volatile("bndcl (%rax), %bnd0");
50 asm volatile("bndcl (%r8), %bnd0");
51 asm volatile("bndcl (0x12345678), %bnd0");
52 asm volatile("bndcl (%rax), %bnd3");
53 asm volatile("bndcl (%rcx,%rax,1), %bnd0");
54 asm volatile("bndcl 0x12345678(,%rax,1), %bnd0");
55 asm volatile("bndcl (%rax,%rcx,1), %bnd0");
56 asm volatile("bndcl (%rax,%rcx,8), %bnd0");
57 asm volatile("bndcl 0x12(%rax), %bnd0");
58 asm volatile("bndcl 0x12(%rbp), %bnd0");
59 asm volatile("bndcl 0x12(%rcx,%rax,1), %bnd0");
60 asm volatile("bndcl 0x12(%rbp,%rax,1), %bnd0");
61 asm volatile("bndcl 0x12(%rax,%rcx,1), %bnd0");
62 asm volatile("bndcl 0x12(%rax,%rcx,8), %bnd0");
63 asm volatile("bndcl 0x12345678(%rax), %bnd0");
64 asm volatile("bndcl 0x12345678(%rbp), %bnd0");
65 asm volatile("bndcl 0x12345678(%rcx,%rax,1), %bnd0");
66 asm volatile("bndcl 0x12345678(%rbp,%rax,1), %bnd0");
67 asm volatile("bndcl 0x12345678(%rax,%rcx,1), %bnd0");
68 asm volatile("bndcl 0x12345678(%rax,%rcx,8), %bnd0");
69 asm volatile("bndcl %rax, %bnd0");
70
71 /* bndcu r/m64, bnd */
72
73 asm volatile("bndcu (%rax), %bnd0");
74 asm volatile("bndcu (%r8), %bnd0");
75 asm volatile("bndcu (0x12345678), %bnd0");
76 asm volatile("bndcu (%rax), %bnd3");
77 asm volatile("bndcu (%rcx,%rax,1), %bnd0");
78 asm volatile("bndcu 0x12345678(,%rax,1), %bnd0");
79 asm volatile("bndcu (%rax,%rcx,1), %bnd0");
80 asm volatile("bndcu (%rax,%rcx,8), %bnd0");
81 asm volatile("bndcu 0x12(%rax), %bnd0");
82 asm volatile("bndcu 0x12(%rbp), %bnd0");
83 asm volatile("bndcu 0x12(%rcx,%rax,1), %bnd0");
84 asm volatile("bndcu 0x12(%rbp,%rax,1), %bnd0");
85 asm volatile("bndcu 0x12(%rax,%rcx,1), %bnd0");
86 asm volatile("bndcu 0x12(%rax,%rcx,8), %bnd0");
87 asm volatile("bndcu 0x12345678(%rax), %bnd0");
88 asm volatile("bndcu 0x12345678(%rbp), %bnd0");
89 asm volatile("bndcu 0x12345678(%rcx,%rax,1), %bnd0");
90 asm volatile("bndcu 0x12345678(%rbp,%rax,1), %bnd0");
91 asm volatile("bndcu 0x12345678(%rax,%rcx,1), %bnd0");
92 asm volatile("bndcu 0x12345678(%rax,%rcx,8), %bnd0");
93 asm volatile("bndcu %rax, %bnd0");
94
95 /* bndcn r/m64, bnd */
96
97 asm volatile("bndcn (%rax), %bnd0");
98 asm volatile("bndcn (%r8), %bnd0");
99 asm volatile("bndcn (0x12345678), %bnd0");
100 asm volatile("bndcn (%rax), %bnd3");
101 asm volatile("bndcn (%rcx,%rax,1), %bnd0");
102 asm volatile("bndcn 0x12345678(,%rax,1), %bnd0");
103 asm volatile("bndcn (%rax,%rcx,1), %bnd0");
104 asm volatile("bndcn (%rax,%rcx,8), %bnd0");
105 asm volatile("bndcn 0x12(%rax), %bnd0");
106 asm volatile("bndcn 0x12(%rbp), %bnd0");
107 asm volatile("bndcn 0x12(%rcx,%rax,1), %bnd0");
108 asm volatile("bndcn 0x12(%rbp,%rax,1), %bnd0");
109 asm volatile("bndcn 0x12(%rax,%rcx,1), %bnd0");
110 asm volatile("bndcn 0x12(%rax,%rcx,8), %bnd0");
111 asm volatile("bndcn 0x12345678(%rax), %bnd0");
112 asm volatile("bndcn 0x12345678(%rbp), %bnd0");
113 asm volatile("bndcn 0x12345678(%rcx,%rax,1), %bnd0");
114 asm volatile("bndcn 0x12345678(%rbp,%rax,1), %bnd0");
115 asm volatile("bndcn 0x12345678(%rax,%rcx,1), %bnd0");
116 asm volatile("bndcn 0x12345678(%rax,%rcx,8), %bnd0");
117 asm volatile("bndcn %rax, %bnd0");
118
119 /* bndmov m128, bnd */
120
121 asm volatile("bndmov (%rax), %bnd0");
122 asm volatile("bndmov (%r8), %bnd0");
123 asm volatile("bndmov (0x12345678), %bnd0");
124 asm volatile("bndmov (%rax), %bnd3");
125 asm volatile("bndmov (%rcx,%rax,1), %bnd0");
126 asm volatile("bndmov 0x12345678(,%rax,1), %bnd0");
127 asm volatile("bndmov (%rax,%rcx,1), %bnd0");
128 asm volatile("bndmov (%rax,%rcx,8), %bnd0");
129 asm volatile("bndmov 0x12(%rax), %bnd0");
130 asm volatile("bndmov 0x12(%rbp), %bnd0");
131 asm volatile("bndmov 0x12(%rcx,%rax,1), %bnd0");
132 asm volatile("bndmov 0x12(%rbp,%rax,1), %bnd0");
133 asm volatile("bndmov 0x12(%rax,%rcx,1), %bnd0");
134 asm volatile("bndmov 0x12(%rax,%rcx,8), %bnd0");
135 asm volatile("bndmov 0x12345678(%rax), %bnd0");
136 asm volatile("bndmov 0x12345678(%rbp), %bnd0");
137 asm volatile("bndmov 0x12345678(%rcx,%rax,1), %bnd0");
138 asm volatile("bndmov 0x12345678(%rbp,%rax,1), %bnd0");
139 asm volatile("bndmov 0x12345678(%rax,%rcx,1), %bnd0");
140 asm volatile("bndmov 0x12345678(%rax,%rcx,8), %bnd0");
141
142 /* bndmov bnd, m128 */
143
144 asm volatile("bndmov %bnd0, (%rax)");
145 asm volatile("bndmov %bnd0, (%r8)");
146 asm volatile("bndmov %bnd0, (0x12345678)");
147 asm volatile("bndmov %bnd3, (%rax)");
148 asm volatile("bndmov %bnd0, (%rcx,%rax,1)");
149 asm volatile("bndmov %bnd0, 0x12345678(,%rax,1)");
150 asm volatile("bndmov %bnd0, (%rax,%rcx,1)");
151 asm volatile("bndmov %bnd0, (%rax,%rcx,8)");
152 asm volatile("bndmov %bnd0, 0x12(%rax)");
153 asm volatile("bndmov %bnd0, 0x12(%rbp)");
154 asm volatile("bndmov %bnd0, 0x12(%rcx,%rax,1)");
155 asm volatile("bndmov %bnd0, 0x12(%rbp,%rax,1)");
156 asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,1)");
157 asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,8)");
158 asm volatile("bndmov %bnd0, 0x12345678(%rax)");
159 asm volatile("bndmov %bnd0, 0x12345678(%rbp)");
160 asm volatile("bndmov %bnd0, 0x12345678(%rcx,%rax,1)");
161 asm volatile("bndmov %bnd0, 0x12345678(%rbp,%rax,1)");
162 asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,1)");
163 asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,8)");
164
165 /* bndmov bnd2, bnd1 */
166
167 asm volatile("bndmov %bnd0, %bnd1");
168 asm volatile("bndmov %bnd1, %bnd0");
169
170 /* bndldx mib, bnd */
171
172 asm volatile("bndldx (%rax), %bnd0");
173 asm volatile("bndldx (%r8), %bnd0");
174 asm volatile("bndldx (0x12345678), %bnd0");
175 asm volatile("bndldx (%rax), %bnd3");
176 asm volatile("bndldx (%rcx,%rax,1), %bnd0");
177 asm volatile("bndldx 0x12345678(,%rax,1), %bnd0");
178 asm volatile("bndldx (%rax,%rcx,1), %bnd0");
179 asm volatile("bndldx 0x12(%rax), %bnd0");
180 asm volatile("bndldx 0x12(%rbp), %bnd0");
181 asm volatile("bndldx 0x12(%rcx,%rax,1), %bnd0");
182 asm volatile("bndldx 0x12(%rbp,%rax,1), %bnd0");
183 asm volatile("bndldx 0x12(%rax,%rcx,1), %bnd0");
184 asm volatile("bndldx 0x12345678(%rax), %bnd0");
185 asm volatile("bndldx 0x12345678(%rbp), %bnd0");
186 asm volatile("bndldx 0x12345678(%rcx,%rax,1), %bnd0");
187 asm volatile("bndldx 0x12345678(%rbp,%rax,1), %bnd0");
188 asm volatile("bndldx 0x12345678(%rax,%rcx,1), %bnd0");
189
190 /* bndstx bnd, mib */
191
192 asm volatile("bndstx %bnd0, (%rax)");
193 asm volatile("bndstx %bnd0, (%r8)");
194 asm volatile("bndstx %bnd0, (0x12345678)");
195 asm volatile("bndstx %bnd3, (%rax)");
196 asm volatile("bndstx %bnd0, (%rcx,%rax,1)");
197 asm volatile("bndstx %bnd0, 0x12345678(,%rax,1)");
198 asm volatile("bndstx %bnd0, (%rax,%rcx,1)");
199 asm volatile("bndstx %bnd0, 0x12(%rax)");
200 asm volatile("bndstx %bnd0, 0x12(%rbp)");
201 asm volatile("bndstx %bnd0, 0x12(%rcx,%rax,1)");
202 asm volatile("bndstx %bnd0, 0x12(%rbp,%rax,1)");
203 asm volatile("bndstx %bnd0, 0x12(%rax,%rcx,1)");
204 asm volatile("bndstx %bnd0, 0x12345678(%rax)");
205 asm volatile("bndstx %bnd0, 0x12345678(%rbp)");
206 asm volatile("bndstx %bnd0, 0x12345678(%rcx,%rax,1)");
207 asm volatile("bndstx %bnd0, 0x12345678(%rbp,%rax,1)");
208 asm volatile("bndstx %bnd0, 0x12345678(%rax,%rcx,1)");
209
210 /* bnd prefix on call, ret, jmp and all jcc */
211
212 asm volatile("bnd call label1"); /* Expecting: call unconditional 0 */
213 asm volatile("bnd call *(%eax)"); /* Expecting: call indirect 0 */
214 asm volatile("bnd ret"); /* Expecting: ret indirect 0 */
215 asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0 */
216 asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0 */
217 asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
218 asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0 */
219
220 /* sha1rnds4 imm8, xmm2/m128, xmm1 */
221
222 asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
223 asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
224 asm volatile("sha1rnds4 $0x91, %xmm8, %xmm0");
225 asm volatile("sha1rnds4 $0x91, %xmm7, %xmm8");
226 asm volatile("sha1rnds4 $0x91, %xmm15, %xmm8");
227 asm volatile("sha1rnds4 $0x91, (%rax), %xmm0");
228 asm volatile("sha1rnds4 $0x91, (%r8), %xmm0");
229 asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
230 asm volatile("sha1rnds4 $0x91, (%rax), %xmm3");
231 asm volatile("sha1rnds4 $0x91, (%rcx,%rax,1), %xmm0");
232 asm volatile("sha1rnds4 $0x91, 0x12345678(,%rax,1), %xmm0");
233 asm volatile("sha1rnds4 $0x91, (%rax,%rcx,1), %xmm0");
234 asm volatile("sha1rnds4 $0x91, (%rax,%rcx,8), %xmm0");
235 asm volatile("sha1rnds4 $0x91, 0x12(%rax), %xmm0");
236 asm volatile("sha1rnds4 $0x91, 0x12(%rbp), %xmm0");
237 asm volatile("sha1rnds4 $0x91, 0x12(%rcx,%rax,1), %xmm0");
238 asm volatile("sha1rnds4 $0x91, 0x12(%rbp,%rax,1), %xmm0");
239 asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,1), %xmm0");
240 asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,8), %xmm0");
241 asm volatile("sha1rnds4 $0x91, 0x12345678(%rax), %xmm0");
242 asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp), %xmm0");
243 asm volatile("sha1rnds4 $0x91, 0x12345678(%rcx,%rax,1), %xmm0");
244 asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp,%rax,1), %xmm0");
245 asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,1), %xmm0");
246 asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm0");
247 asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm15");
248
249 /* sha1nexte xmm2/m128, xmm1 */
250
251 asm volatile("sha1nexte %xmm1, %xmm0");
252 asm volatile("sha1nexte %xmm7, %xmm2");
253 asm volatile("sha1nexte %xmm8, %xmm0");
254 asm volatile("sha1nexte %xmm7, %xmm8");
255 asm volatile("sha1nexte %xmm15, %xmm8");
256 asm volatile("sha1nexte (%rax), %xmm0");
257 asm volatile("sha1nexte (%r8), %xmm0");
258 asm volatile("sha1nexte (0x12345678), %xmm0");
259 asm volatile("sha1nexte (%rax), %xmm3");
260 asm volatile("sha1nexte (%rcx,%rax,1), %xmm0");
261 asm volatile("sha1nexte 0x12345678(,%rax,1), %xmm0");
262 asm volatile("sha1nexte (%rax,%rcx,1), %xmm0");
263 asm volatile("sha1nexte (%rax,%rcx,8), %xmm0");
264 asm volatile("sha1nexte 0x12(%rax), %xmm0");
265 asm volatile("sha1nexte 0x12(%rbp), %xmm0");
266 asm volatile("sha1nexte 0x12(%rcx,%rax,1), %xmm0");
267 asm volatile("sha1nexte 0x12(%rbp,%rax,1), %xmm0");
268 asm volatile("sha1nexte 0x12(%rax,%rcx,1), %xmm0");
269 asm volatile("sha1nexte 0x12(%rax,%rcx,8), %xmm0");
270 asm volatile("sha1nexte 0x12345678(%rax), %xmm0");
271 asm volatile("sha1nexte 0x12345678(%rbp), %xmm0");
272 asm volatile("sha1nexte 0x12345678(%rcx,%rax,1), %xmm0");
273 asm volatile("sha1nexte 0x12345678(%rbp,%rax,1), %xmm0");
274 asm volatile("sha1nexte 0x12345678(%rax,%rcx,1), %xmm0");
275 asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm0");
276 asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm15");
277
278 /* sha1msg1 xmm2/m128, xmm1 */
279
280 asm volatile("sha1msg1 %xmm1, %xmm0");
281 asm volatile("sha1msg1 %xmm7, %xmm2");
282 asm volatile("sha1msg1 %xmm8, %xmm0");
283 asm volatile("sha1msg1 %xmm7, %xmm8");
284 asm volatile("sha1msg1 %xmm15, %xmm8");
285 asm volatile("sha1msg1 (%rax), %xmm0");
286 asm volatile("sha1msg1 (%r8), %xmm0");
287 asm volatile("sha1msg1 (0x12345678), %xmm0");
288 asm volatile("sha1msg1 (%rax), %xmm3");
289 asm volatile("sha1msg1 (%rcx,%rax,1), %xmm0");
290 asm volatile("sha1msg1 0x12345678(,%rax,1), %xmm0");
291 asm volatile("sha1msg1 (%rax,%rcx,1), %xmm0");
292 asm volatile("sha1msg1 (%rax,%rcx,8), %xmm0");
293 asm volatile("sha1msg1 0x12(%rax), %xmm0");
294 asm volatile("sha1msg1 0x12(%rbp), %xmm0");
295 asm volatile("sha1msg1 0x12(%rcx,%rax,1), %xmm0");
296 asm volatile("sha1msg1 0x12(%rbp,%rax,1), %xmm0");
297 asm volatile("sha1msg1 0x12(%rax,%rcx,1), %xmm0");
298 asm volatile("sha1msg1 0x12(%rax,%rcx,8), %xmm0");
299 asm volatile("sha1msg1 0x12345678(%rax), %xmm0");
300 asm volatile("sha1msg1 0x12345678(%rbp), %xmm0");
301 asm volatile("sha1msg1 0x12345678(%rcx,%rax,1), %xmm0");
302 asm volatile("sha1msg1 0x12345678(%rbp,%rax,1), %xmm0");
303 asm volatile("sha1msg1 0x12345678(%rax,%rcx,1), %xmm0");
304 asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm0");
305 asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm15");
306
307 /* sha1msg2 xmm2/m128, xmm1 */
308
309 asm volatile("sha1msg2 %xmm1, %xmm0");
310 asm volatile("sha1msg2 %xmm7, %xmm2");
311 asm volatile("sha1msg2 %xmm8, %xmm0");
312 asm volatile("sha1msg2 %xmm7, %xmm8");
313 asm volatile("sha1msg2 %xmm15, %xmm8");
314 asm volatile("sha1msg2 (%rax), %xmm0");
315 asm volatile("sha1msg2 (%r8), %xmm0");
316 asm volatile("sha1msg2 (0x12345678), %xmm0");
317 asm volatile("sha1msg2 (%rax), %xmm3");
318 asm volatile("sha1msg2 (%rcx,%rax,1), %xmm0");
319 asm volatile("sha1msg2 0x12345678(,%rax,1), %xmm0");
320 asm volatile("sha1msg2 (%rax,%rcx,1), %xmm0");
321 asm volatile("sha1msg2 (%rax,%rcx,8), %xmm0");
322 asm volatile("sha1msg2 0x12(%rax), %xmm0");
323 asm volatile("sha1msg2 0x12(%rbp), %xmm0");
324 asm volatile("sha1msg2 0x12(%rcx,%rax,1), %xmm0");
325 asm volatile("sha1msg2 0x12(%rbp,%rax,1), %xmm0");
326 asm volatile("sha1msg2 0x12(%rax,%rcx,1), %xmm0");
327 asm volatile("sha1msg2 0x12(%rax,%rcx,8), %xmm0");
328 asm volatile("sha1msg2 0x12345678(%rax), %xmm0");
329 asm volatile("sha1msg2 0x12345678(%rbp), %xmm0");
330 asm volatile("sha1msg2 0x12345678(%rcx,%rax,1), %xmm0");
331 asm volatile("sha1msg2 0x12345678(%rbp,%rax,1), %xmm0");
332 asm volatile("sha1msg2 0x12345678(%rax,%rcx,1), %xmm0");
333 asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm0");
334 asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm15");
335
336 /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
337 /* Note sha256rnds2 has an implicit operand 'xmm0' */
338
339 asm volatile("sha256rnds2 %xmm4, %xmm1");
340 asm volatile("sha256rnds2 %xmm7, %xmm2");
341 asm volatile("sha256rnds2 %xmm8, %xmm1");
342 asm volatile("sha256rnds2 %xmm7, %xmm8");
343 asm volatile("sha256rnds2 %xmm15, %xmm8");
344 asm volatile("sha256rnds2 (%rax), %xmm1");
345 asm volatile("sha256rnds2 (%r8), %xmm1");
346 asm volatile("sha256rnds2 (0x12345678), %xmm1");
347 asm volatile("sha256rnds2 (%rax), %xmm3");
348 asm volatile("sha256rnds2 (%rcx,%rax,1), %xmm1");
349 asm volatile("sha256rnds2 0x12345678(,%rax,1), %xmm1");
350 asm volatile("sha256rnds2 (%rax,%rcx,1), %xmm1");
351 asm volatile("sha256rnds2 (%rax,%rcx,8), %xmm1");
352 asm volatile("sha256rnds2 0x12(%rax), %xmm1");
353 asm volatile("sha256rnds2 0x12(%rbp), %xmm1");
354 asm volatile("sha256rnds2 0x12(%rcx,%rax,1), %xmm1");
355 asm volatile("sha256rnds2 0x12(%rbp,%rax,1), %xmm1");
356 asm volatile("sha256rnds2 0x12(%rax,%rcx,1), %xmm1");
357 asm volatile("sha256rnds2 0x12(%rax,%rcx,8), %xmm1");
358 asm volatile("sha256rnds2 0x12345678(%rax), %xmm1");
359 asm volatile("sha256rnds2 0x12345678(%rbp), %xmm1");
360 asm volatile("sha256rnds2 0x12345678(%rcx,%rax,1), %xmm1");
361 asm volatile("sha256rnds2 0x12345678(%rbp,%rax,1), %xmm1");
362 asm volatile("sha256rnds2 0x12345678(%rax,%rcx,1), %xmm1");
363 asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm1");
364 asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm15");
365
366 /* sha256msg1 xmm2/m128, xmm1 */
367
368 asm volatile("sha256msg1 %xmm1, %xmm0");
369 asm volatile("sha256msg1 %xmm7, %xmm2");
370 asm volatile("sha256msg1 %xmm8, %xmm0");
371 asm volatile("sha256msg1 %xmm7, %xmm8");
372 asm volatile("sha256msg1 %xmm15, %xmm8");
373 asm volatile("sha256msg1 (%rax), %xmm0");
374 asm volatile("sha256msg1 (%r8), %xmm0");
375 asm volatile("sha256msg1 (0x12345678), %xmm0");
376 asm volatile("sha256msg1 (%rax), %xmm3");
377 asm volatile("sha256msg1 (%rcx,%rax,1), %xmm0");
378 asm volatile("sha256msg1 0x12345678(,%rax,1), %xmm0");
379 asm volatile("sha256msg1 (%rax,%rcx,1), %xmm0");
380 asm volatile("sha256msg1 (%rax,%rcx,8), %xmm0");
381 asm volatile("sha256msg1 0x12(%rax), %xmm0");
382 asm volatile("sha256msg1 0x12(%rbp), %xmm0");
383 asm volatile("sha256msg1 0x12(%rcx,%rax,1), %xmm0");
384 asm volatile("sha256msg1 0x12(%rbp,%rax,1), %xmm0");
385 asm volatile("sha256msg1 0x12(%rax,%rcx,1), %xmm0");
386 asm volatile("sha256msg1 0x12(%rax,%rcx,8), %xmm0");
387 asm volatile("sha256msg1 0x12345678(%rax), %xmm0");
388 asm volatile("sha256msg1 0x12345678(%rbp), %xmm0");
389 asm volatile("sha256msg1 0x12345678(%rcx,%rax,1), %xmm0");
390 asm volatile("sha256msg1 0x12345678(%rbp,%rax,1), %xmm0");
391 asm volatile("sha256msg1 0x12345678(%rax,%rcx,1), %xmm0");
392 asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm0");
393 asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm15");
394
395 /* sha256msg2 xmm2/m128, xmm1 */
396
397 asm volatile("sha256msg2 %xmm1, %xmm0");
398 asm volatile("sha256msg2 %xmm7, %xmm2");
399 asm volatile("sha256msg2 %xmm8, %xmm0");
400 asm volatile("sha256msg2 %xmm7, %xmm8");
401 asm volatile("sha256msg2 %xmm15, %xmm8");
402 asm volatile("sha256msg2 (%rax), %xmm0");
403 asm volatile("sha256msg2 (%r8), %xmm0");
404 asm volatile("sha256msg2 (0x12345678), %xmm0");
405 asm volatile("sha256msg2 (%rax), %xmm3");
406 asm volatile("sha256msg2 (%rcx,%rax,1), %xmm0");
407 asm volatile("sha256msg2 0x12345678(,%rax,1), %xmm0");
408 asm volatile("sha256msg2 (%rax,%rcx,1), %xmm0");
409 asm volatile("sha256msg2 (%rax,%rcx,8), %xmm0");
410 asm volatile("sha256msg2 0x12(%rax), %xmm0");
411 asm volatile("sha256msg2 0x12(%rbp), %xmm0");
412 asm volatile("sha256msg2 0x12(%rcx,%rax,1), %xmm0");
413 asm volatile("sha256msg2 0x12(%rbp,%rax,1), %xmm0");
414 asm volatile("sha256msg2 0x12(%rax,%rcx,1), %xmm0");
415 asm volatile("sha256msg2 0x12(%rax,%rcx,8), %xmm0");
416 asm volatile("sha256msg2 0x12345678(%rax), %xmm0");
417 asm volatile("sha256msg2 0x12345678(%rbp), %xmm0");
418 asm volatile("sha256msg2 0x12345678(%rcx,%rax,1), %xmm0");
419 asm volatile("sha256msg2 0x12345678(%rbp,%rax,1), %xmm0");
420 asm volatile("sha256msg2 0x12345678(%rax,%rcx,1), %xmm0");
421 asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm0");
422 asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm15");
423
424 /* clflushopt m8 */
425
426 asm volatile("clflushopt (%rax)");
427 asm volatile("clflushopt (%r8)");
428 asm volatile("clflushopt (0x12345678)");
429 asm volatile("clflushopt 0x12345678(%rax,%rcx,8)");
430 asm volatile("clflushopt 0x12345678(%r8,%rcx,8)");
431 /* Also check instructions in the same group encoding as clflushopt */
432 asm volatile("clflush (%rax)");
433 asm volatile("clflush (%r8)");
434 asm volatile("sfence");
435
436 /* clwb m8 */
437
438 asm volatile("clwb (%rax)");
439 asm volatile("clwb (%r8)");
440 asm volatile("clwb (0x12345678)");
441 asm volatile("clwb 0x12345678(%rax,%rcx,8)");
442 asm volatile("clwb 0x12345678(%r8,%rcx,8)");
443 /* Also check instructions in the same group encoding as clwb */
444 asm volatile("xsaveopt (%rax)");
445 asm volatile("xsaveopt (%r8)");
446 asm volatile("mfence");
447
448 /* xsavec mem */
449
450 asm volatile("xsavec (%rax)");
451 asm volatile("xsavec (%r8)");
452 asm volatile("xsavec (0x12345678)");
453 asm volatile("xsavec 0x12345678(%rax,%rcx,8)");
454 asm volatile("xsavec 0x12345678(%r8,%rcx,8)");
455
456 /* xsaves mem */
457
458 asm volatile("xsaves (%rax)");
459 asm volatile("xsaves (%r8)");
460 asm volatile("xsaves (0x12345678)");
461 asm volatile("xsaves 0x12345678(%rax,%rcx,8)");
462 asm volatile("xsaves 0x12345678(%r8,%rcx,8)");
463
464 /* xrstors mem */
465
466 asm volatile("xrstors (%rax)");
467 asm volatile("xrstors (%r8)");
468 asm volatile("xrstors (0x12345678)");
469 asm volatile("xrstors 0x12345678(%rax,%rcx,8)");
470 asm volatile("xrstors 0x12345678(%r8,%rcx,8)");
471
472 #else /* #ifdef __x86_64__ */
473
474 /* bndmk m32, bnd */
475
476 asm volatile("bndmk (%eax), %bnd0");
477 asm volatile("bndmk (0x12345678), %bnd0");
478 asm volatile("bndmk (%eax), %bnd3");
479 asm volatile("bndmk (%ecx,%eax,1), %bnd0");
480 asm volatile("bndmk 0x12345678(,%eax,1), %bnd0");
481 asm volatile("bndmk (%eax,%ecx,1), %bnd0");
482 asm volatile("bndmk (%eax,%ecx,8), %bnd0");
483 asm volatile("bndmk 0x12(%eax), %bnd0");
484 asm volatile("bndmk 0x12(%ebp), %bnd0");
485 asm volatile("bndmk 0x12(%ecx,%eax,1), %bnd0");
486 asm volatile("bndmk 0x12(%ebp,%eax,1), %bnd0");
487 asm volatile("bndmk 0x12(%eax,%ecx,1), %bnd0");
488 asm volatile("bndmk 0x12(%eax,%ecx,8), %bnd0");
489 asm volatile("bndmk 0x12345678(%eax), %bnd0");
490 asm volatile("bndmk 0x12345678(%ebp), %bnd0");
491 asm volatile("bndmk 0x12345678(%ecx,%eax,1), %bnd0");
492 asm volatile("bndmk 0x12345678(%ebp,%eax,1), %bnd0");
493 asm volatile("bndmk 0x12345678(%eax,%ecx,1), %bnd0");
494 asm volatile("bndmk 0x12345678(%eax,%ecx,8), %bnd0");
495
496 /* bndcl r/m32, bnd */
497
498 asm volatile("bndcl (%eax), %bnd0");
499 asm volatile("bndcl (0x12345678), %bnd0");
500 asm volatile("bndcl (%eax), %bnd3");
501 asm volatile("bndcl (%ecx,%eax,1), %bnd0");
502 asm volatile("bndcl 0x12345678(,%eax,1), %bnd0");
503 asm volatile("bndcl (%eax,%ecx,1), %bnd0");
504 asm volatile("bndcl (%eax,%ecx,8), %bnd0");
505 asm volatile("bndcl 0x12(%eax), %bnd0");
506 asm volatile("bndcl 0x12(%ebp), %bnd0");
507 asm volatile("bndcl 0x12(%ecx,%eax,1), %bnd0");
508 asm volatile("bndcl 0x12(%ebp,%eax,1), %bnd0");
509 asm volatile("bndcl 0x12(%eax,%ecx,1), %bnd0");
510 asm volatile("bndcl 0x12(%eax,%ecx,8), %bnd0");
511 asm volatile("bndcl 0x12345678(%eax), %bnd0");
512 asm volatile("bndcl 0x12345678(%ebp), %bnd0");
513 asm volatile("bndcl 0x12345678(%ecx,%eax,1), %bnd0");
514 asm volatile("bndcl 0x12345678(%ebp,%eax,1), %bnd0");
515 asm volatile("bndcl 0x12345678(%eax,%ecx,1), %bnd0");
516 asm volatile("bndcl 0x12345678(%eax,%ecx,8), %bnd0");
517 asm volatile("bndcl %eax, %bnd0");
518
519 /* bndcu r/m32, bnd */
520
521 asm volatile("bndcu (%eax), %bnd0");
522 asm volatile("bndcu (0x12345678), %bnd0");
523 asm volatile("bndcu (%eax), %bnd3");
524 asm volatile("bndcu (%ecx,%eax,1), %bnd0");
525 asm volatile("bndcu 0x12345678(,%eax,1), %bnd0");
526 asm volatile("bndcu (%eax,%ecx,1), %bnd0");
527 asm volatile("bndcu (%eax,%ecx,8), %bnd0");
528 asm volatile("bndcu 0x12(%eax), %bnd0");
529 asm volatile("bndcu 0x12(%ebp), %bnd0");
530 asm volatile("bndcu 0x12(%ecx,%eax,1), %bnd0");
531 asm volatile("bndcu 0x12(%ebp,%eax,1), %bnd0");
532 asm volatile("bndcu 0x12(%eax,%ecx,1), %bnd0");
533 asm volatile("bndcu 0x12(%eax,%ecx,8), %bnd0");
534 asm volatile("bndcu 0x12345678(%eax), %bnd0");
535 asm volatile("bndcu 0x12345678(%ebp), %bnd0");
536 asm volatile("bndcu 0x12345678(%ecx,%eax,1), %bnd0");
537 asm volatile("bndcu 0x12345678(%ebp,%eax,1), %bnd0");
538 asm volatile("bndcu 0x12345678(%eax,%ecx,1), %bnd0");
539 asm volatile("bndcu 0x12345678(%eax,%ecx,8), %bnd0");
540 asm volatile("bndcu %eax, %bnd0");
541
542 /* bndcn r/m32, bnd */
543
544 asm volatile("bndcn (%eax), %bnd0");
545 asm volatile("bndcn (0x12345678), %bnd0");
546 asm volatile("bndcn (%eax), %bnd3");
547 asm volatile("bndcn (%ecx,%eax,1), %bnd0");
548 asm volatile("bndcn 0x12345678(,%eax,1), %bnd0");
549 asm volatile("bndcn (%eax,%ecx,1), %bnd0");
550 asm volatile("bndcn (%eax,%ecx,8), %bnd0");
551 asm volatile("bndcn 0x12(%eax), %bnd0");
552 asm volatile("bndcn 0x12(%ebp), %bnd0");
553 asm volatile("bndcn 0x12(%ecx,%eax,1), %bnd0");
554 asm volatile("bndcn 0x12(%ebp,%eax,1), %bnd0");
555 asm volatile("bndcn 0x12(%eax,%ecx,1), %bnd0");
556 asm volatile("bndcn 0x12(%eax,%ecx,8), %bnd0");
557 asm volatile("bndcn 0x12345678(%eax), %bnd0");
558 asm volatile("bndcn 0x12345678(%ebp), %bnd0");
559 asm volatile("bndcn 0x12345678(%ecx,%eax,1), %bnd0");
560 asm volatile("bndcn 0x12345678(%ebp,%eax,1), %bnd0");
561 asm volatile("bndcn 0x12345678(%eax,%ecx,1), %bnd0");
562 asm volatile("bndcn 0x12345678(%eax,%ecx,8), %bnd0");
563 asm volatile("bndcn %eax, %bnd0");
564
565 /* bndmov m64, bnd */
566
567 asm volatile("bndmov (%eax), %bnd0");
568 asm volatile("bndmov (0x12345678), %bnd0");
569 asm volatile("bndmov (%eax), %bnd3");
570 asm volatile("bndmov (%ecx,%eax,1), %bnd0");
571 asm volatile("bndmov 0x12345678(,%eax,1), %bnd0");
572 asm volatile("bndmov (%eax,%ecx,1), %bnd0");
573 asm volatile("bndmov (%eax,%ecx,8), %bnd0");
574 asm volatile("bndmov 0x12(%eax), %bnd0");
575 asm volatile("bndmov 0x12(%ebp), %bnd0");
576 asm volatile("bndmov 0x12(%ecx,%eax,1), %bnd0");
577 asm volatile("bndmov 0x12(%ebp,%eax,1), %bnd0");
578 asm volatile("bndmov 0x12(%eax,%ecx,1), %bnd0");
579 asm volatile("bndmov 0x12(%eax,%ecx,8), %bnd0");
580 asm volatile("bndmov 0x12345678(%eax), %bnd0");
581 asm volatile("bndmov 0x12345678(%ebp), %bnd0");
582 asm volatile("bndmov 0x12345678(%ecx,%eax,1), %bnd0");
583 asm volatile("bndmov 0x12345678(%ebp,%eax,1), %bnd0");
584 asm volatile("bndmov 0x12345678(%eax,%ecx,1), %bnd0");
585 asm volatile("bndmov 0x12345678(%eax,%ecx,8), %bnd0");
586
587 /* bndmov bnd, m64 */
588
589 asm volatile("bndmov %bnd0, (%eax)");
590 asm volatile("bndmov %bnd0, (0x12345678)");
591 asm volatile("bndmov %bnd3, (%eax)");
592 asm volatile("bndmov %bnd0, (%ecx,%eax,1)");
593 asm volatile("bndmov %bnd0, 0x12345678(,%eax,1)");
594 asm volatile("bndmov %bnd0, (%eax,%ecx,1)");
595 asm volatile("bndmov %bnd0, (%eax,%ecx,8)");
596 asm volatile("bndmov %bnd0, 0x12(%eax)");
597 asm volatile("bndmov %bnd0, 0x12(%ebp)");
598 asm volatile("bndmov %bnd0, 0x12(%ecx,%eax,1)");
599 asm volatile("bndmov %bnd0, 0x12(%ebp,%eax,1)");
600 asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,1)");
601 asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,8)");
602 asm volatile("bndmov %bnd0, 0x12345678(%eax)");
603 asm volatile("bndmov %bnd0, 0x12345678(%ebp)");
604 asm volatile("bndmov %bnd0, 0x12345678(%ecx,%eax,1)");
605 asm volatile("bndmov %bnd0, 0x12345678(%ebp,%eax,1)");
606 asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,1)");
607 asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,8)");
608
609 /* bndmov bnd2, bnd1 */
610
611 asm volatile("bndmov %bnd0, %bnd1");
612 asm volatile("bndmov %bnd1, %bnd0");
613
614 /* bndldx mib, bnd */
615
616 asm volatile("bndldx (%eax), %bnd0");
617 asm volatile("bndldx (0x12345678), %bnd0");
618 asm volatile("bndldx (%eax), %bnd3");
619 asm volatile("bndldx (%ecx,%eax,1), %bnd0");
620 asm volatile("bndldx 0x12345678(,%eax,1), %bnd0");
621 asm volatile("bndldx (%eax,%ecx,1), %bnd0");
622 asm volatile("bndldx 0x12(%eax), %bnd0");
623 asm volatile("bndldx 0x12(%ebp), %bnd0");
624 asm volatile("bndldx 0x12(%ecx,%eax,1), %bnd0");
625 asm volatile("bndldx 0x12(%ebp,%eax,1), %bnd0");
626 asm volatile("bndldx 0x12(%eax,%ecx,1), %bnd0");
627 asm volatile("bndldx 0x12345678(%eax), %bnd0");
628 asm volatile("bndldx 0x12345678(%ebp), %bnd0");
629 asm volatile("bndldx 0x12345678(%ecx,%eax,1), %bnd0");
630 asm volatile("bndldx 0x12345678(%ebp,%eax,1), %bnd0");
631 asm volatile("bndldx 0x12345678(%eax,%ecx,1), %bnd0");
632
633 /* bndstx bnd, mib */
634
635 asm volatile("bndstx %bnd0, (%eax)");
636 asm volatile("bndstx %bnd0, (0x12345678)");
637 asm volatile("bndstx %bnd3, (%eax)");
638 asm volatile("bndstx %bnd0, (%ecx,%eax,1)");
639 asm volatile("bndstx %bnd0, 0x12345678(,%eax,1)");
640 asm volatile("bndstx %bnd0, (%eax,%ecx,1)");
641 asm volatile("bndstx %bnd0, 0x12(%eax)");
642 asm volatile("bndstx %bnd0, 0x12(%ebp)");
643 asm volatile("bndstx %bnd0, 0x12(%ecx,%eax,1)");
644 asm volatile("bndstx %bnd0, 0x12(%ebp,%eax,1)");
645 asm volatile("bndstx %bnd0, 0x12(%eax,%ecx,1)");
646 asm volatile("bndstx %bnd0, 0x12345678(%eax)");
647 asm volatile("bndstx %bnd0, 0x12345678(%ebp)");
648 asm volatile("bndstx %bnd0, 0x12345678(%ecx,%eax,1)");
649 asm volatile("bndstx %bnd0, 0x12345678(%ebp,%eax,1)");
650 asm volatile("bndstx %bnd0, 0x12345678(%eax,%ecx,1)");
651
652 /* bnd prefix on call, ret, jmp and all jcc */
653
654 asm volatile("bnd call label1"); /* Expecting: call unconditional 0xfffffffc */
655 asm volatile("bnd call *(%eax)"); /* Expecting: call indirect 0 */
656 asm volatile("bnd ret"); /* Expecting: ret indirect 0 */
657 asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0xfffffffc */
658 asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0xfffffffc */
659 asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
660 asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0xfffffffc */
661
662 /* sha1rnds4 imm8, xmm2/m128, xmm1 */
663
664 asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
665 asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
666 asm volatile("sha1rnds4 $0x91, (%eax), %xmm0");
667 asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
668 asm volatile("sha1rnds4 $0x91, (%eax), %xmm3");
669 asm volatile("sha1rnds4 $0x91, (%ecx,%eax,1), %xmm0");
670 asm volatile("sha1rnds4 $0x91, 0x12345678(,%eax,1), %xmm0");
671 asm volatile("sha1rnds4 $0x91, (%eax,%ecx,1), %xmm0");
672 asm volatile("sha1rnds4 $0x91, (%eax,%ecx,8), %xmm0");
673 asm volatile("sha1rnds4 $0x91, 0x12(%eax), %xmm0");
674 asm volatile("sha1rnds4 $0x91, 0x12(%ebp), %xmm0");
675 asm volatile("sha1rnds4 $0x91, 0x12(%ecx,%eax,1), %xmm0");
676 asm volatile("sha1rnds4 $0x91, 0x12(%ebp,%eax,1), %xmm0");
677 asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,1), %xmm0");
678 asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,8), %xmm0");
679 asm volatile("sha1rnds4 $0x91, 0x12345678(%eax), %xmm0");
680 asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp), %xmm0");
681 asm volatile("sha1rnds4 $0x91, 0x12345678(%ecx,%eax,1), %xmm0");
682 asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp,%eax,1), %xmm0");
683 asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,1), %xmm0");
684 asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,8), %xmm0");
685
686 /* sha1nexte xmm2/m128, xmm1 */
687
688 asm volatile("sha1nexte %xmm1, %xmm0");
689 asm volatile("sha1nexte %xmm7, %xmm2");
690 asm volatile("sha1nexte (%eax), %xmm0");
691 asm volatile("sha1nexte (0x12345678), %xmm0");
692 asm volatile("sha1nexte (%eax), %xmm3");
693 asm volatile("sha1nexte (%ecx,%eax,1), %xmm0");
694 asm volatile("sha1nexte 0x12345678(,%eax,1), %xmm0");
695 asm volatile("sha1nexte (%eax,%ecx,1), %xmm0");
696 asm volatile("sha1nexte (%eax,%ecx,8), %xmm0");
697 asm volatile("sha1nexte 0x12(%eax), %xmm0");
698 asm volatile("sha1nexte 0x12(%ebp), %xmm0");
699 asm volatile("sha1nexte 0x12(%ecx,%eax,1), %xmm0");
700 asm volatile("sha1nexte 0x12(%ebp,%eax,1), %xmm0");
701 asm volatile("sha1nexte 0x12(%eax,%ecx,1), %xmm0");
702 asm volatile("sha1nexte 0x12(%eax,%ecx,8), %xmm0");
703 asm volatile("sha1nexte 0x12345678(%eax), %xmm0");
704 asm volatile("sha1nexte 0x12345678(%ebp), %xmm0");
705 asm volatile("sha1nexte 0x12345678(%ecx,%eax,1), %xmm0");
706 asm volatile("sha1nexte 0x12345678(%ebp,%eax,1), %xmm0");
707 asm volatile("sha1nexte 0x12345678(%eax,%ecx,1), %xmm0");
708 asm volatile("sha1nexte 0x12345678(%eax,%ecx,8), %xmm0");
709
710 /* sha1msg1 xmm2/m128, xmm1 */
711
712 asm volatile("sha1msg1 %xmm1, %xmm0");
713 asm volatile("sha1msg1 %xmm7, %xmm2");
714 asm volatile("sha1msg1 (%eax), %xmm0");
715 asm volatile("sha1msg1 (0x12345678), %xmm0");
716 asm volatile("sha1msg1 (%eax), %xmm3");
717 asm volatile("sha1msg1 (%ecx,%eax,1), %xmm0");
718 asm volatile("sha1msg1 0x12345678(,%eax,1), %xmm0");
719 asm volatile("sha1msg1 (%eax,%ecx,1), %xmm0");
720 asm volatile("sha1msg1 (%eax,%ecx,8), %xmm0");
721 asm volatile("sha1msg1 0x12(%eax), %xmm0");
722 asm volatile("sha1msg1 0x12(%ebp), %xmm0");
723 asm volatile("sha1msg1 0x12(%ecx,%eax,1), %xmm0");
724 asm volatile("sha1msg1 0x12(%ebp,%eax,1), %xmm0");
725 asm volatile("sha1msg1 0x12(%eax,%ecx,1), %xmm0");
726 asm volatile("sha1msg1 0x12(%eax,%ecx,8), %xmm0");
727 asm volatile("sha1msg1 0x12345678(%eax), %xmm0");
728 asm volatile("sha1msg1 0x12345678(%ebp), %xmm0");
729 asm volatile("sha1msg1 0x12345678(%ecx,%eax,1), %xmm0");
730 asm volatile("sha1msg1 0x12345678(%ebp,%eax,1), %xmm0");
731 asm volatile("sha1msg1 0x12345678(%eax,%ecx,1), %xmm0");
732 asm volatile("sha1msg1 0x12345678(%eax,%ecx,8), %xmm0");
733
734 /* sha1msg2 xmm2/m128, xmm1 */
735
736 asm volatile("sha1msg2 %xmm1, %xmm0");
737 asm volatile("sha1msg2 %xmm7, %xmm2");
738 asm volatile("sha1msg2 (%eax), %xmm0");
739 asm volatile("sha1msg2 (0x12345678), %xmm0");
740 asm volatile("sha1msg2 (%eax), %xmm3");
741 asm volatile("sha1msg2 (%ecx,%eax,1), %xmm0");
742 asm volatile("sha1msg2 0x12345678(,%eax,1), %xmm0");
743 asm volatile("sha1msg2 (%eax,%ecx,1), %xmm0");
744 asm volatile("sha1msg2 (%eax,%ecx,8), %xmm0");
745 asm volatile("sha1msg2 0x12(%eax), %xmm0");
746 asm volatile("sha1msg2 0x12(%ebp), %xmm0");
747 asm volatile("sha1msg2 0x12(%ecx,%eax,1), %xmm0");
748 asm volatile("sha1msg2 0x12(%ebp,%eax,1), %xmm0");
749 asm volatile("sha1msg2 0x12(%eax,%ecx,1), %xmm0");
750 asm volatile("sha1msg2 0x12(%eax,%ecx,8), %xmm0");
751 asm volatile("sha1msg2 0x12345678(%eax), %xmm0");
752 asm volatile("sha1msg2 0x12345678(%ebp), %xmm0");
753 asm volatile("sha1msg2 0x12345678(%ecx,%eax,1), %xmm0");
754 asm volatile("sha1msg2 0x12345678(%ebp,%eax,1), %xmm0");
755 asm volatile("sha1msg2 0x12345678(%eax,%ecx,1), %xmm0");
756 asm volatile("sha1msg2 0x12345678(%eax,%ecx,8), %xmm0");
757
758 /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
759 /* Note sha256rnds2 has an implicit operand 'xmm0' */
760
761 asm volatile("sha256rnds2 %xmm4, %xmm1");
762 asm volatile("sha256rnds2 %xmm7, %xmm2");
763 asm volatile("sha256rnds2 (%eax), %xmm1");
764 asm volatile("sha256rnds2 (0x12345678), %xmm1");
765 asm volatile("sha256rnds2 (%eax), %xmm3");
766 asm volatile("sha256rnds2 (%ecx,%eax,1), %xmm1");
767 asm volatile("sha256rnds2 0x12345678(,%eax,1), %xmm1");
768 asm volatile("sha256rnds2 (%eax,%ecx,1), %xmm1");
769 asm volatile("sha256rnds2 (%eax,%ecx,8), %xmm1");
770 asm volatile("sha256rnds2 0x12(%eax), %xmm1");
771 asm volatile("sha256rnds2 0x12(%ebp), %xmm1");
772 asm volatile("sha256rnds2 0x12(%ecx,%eax,1), %xmm1");
773 asm volatile("sha256rnds2 0x12(%ebp,%eax,1), %xmm1");
774 asm volatile("sha256rnds2 0x12(%eax,%ecx,1), %xmm1");
775 asm volatile("sha256rnds2 0x12(%eax,%ecx,8), %xmm1");
776 asm volatile("sha256rnds2 0x12345678(%eax), %xmm1");
777 asm volatile("sha256rnds2 0x12345678(%ebp), %xmm1");
778 asm volatile("sha256rnds2 0x12345678(%ecx,%eax,1), %xmm1");
779 asm volatile("sha256rnds2 0x12345678(%ebp,%eax,1), %xmm1");
780 asm volatile("sha256rnds2 0x12345678(%eax,%ecx,1), %xmm1");
781 asm volatile("sha256rnds2 0x12345678(%eax,%ecx,8), %xmm1");
782
783 /* sha256msg1 xmm2/m128, xmm1 */
784
785 asm volatile("sha256msg1 %xmm1, %xmm0");
786 asm volatile("sha256msg1 %xmm7, %xmm2");
787 asm volatile("sha256msg1 (%eax), %xmm0");
788 asm volatile("sha256msg1 (0x12345678), %xmm0");
789 asm volatile("sha256msg1 (%eax), %xmm3");
790 asm volatile("sha256msg1 (%ecx,%eax,1), %xmm0");
791 asm volatile("sha256msg1 0x12345678(,%eax,1), %xmm0");
792 asm volatile("sha256msg1 (%eax,%ecx,1), %xmm0");
793 asm volatile("sha256msg1 (%eax,%ecx,8), %xmm0");
794 asm volatile("sha256msg1 0x12(%eax), %xmm0");
795 asm volatile("sha256msg1 0x12(%ebp), %xmm0");
796 asm volatile("sha256msg1 0x12(%ecx,%eax,1), %xmm0");
797 asm volatile("sha256msg1 0x12(%ebp,%eax,1), %xmm0");
798 asm volatile("sha256msg1 0x12(%eax,%ecx,1), %xmm0");
799 asm volatile("sha256msg1 0x12(%eax,%ecx,8), %xmm0");
800 asm volatile("sha256msg1 0x12345678(%eax), %xmm0");
801 asm volatile("sha256msg1 0x12345678(%ebp), %xmm0");
802 asm volatile("sha256msg1 0x12345678(%ecx,%eax,1), %xmm0");
803 asm volatile("sha256msg1 0x12345678(%ebp,%eax,1), %xmm0");
804 asm volatile("sha256msg1 0x12345678(%eax,%ecx,1), %xmm0");
805 asm volatile("sha256msg1 0x12345678(%eax,%ecx,8), %xmm0");
806
807 /* sha256msg2 xmm2/m128, xmm1 */
808
809 asm volatile("sha256msg2 %xmm1, %xmm0");
810 asm volatile("sha256msg2 %xmm7, %xmm2");
811 asm volatile("sha256msg2 (%eax), %xmm0");
812 asm volatile("sha256msg2 (0x12345678), %xmm0");
813 asm volatile("sha256msg2 (%eax), %xmm3");
814 asm volatile("sha256msg2 (%ecx,%eax,1), %xmm0");
815 asm volatile("sha256msg2 0x12345678(,%eax,1), %xmm0");
816 asm volatile("sha256msg2 (%eax,%ecx,1), %xmm0");
817 asm volatile("sha256msg2 (%eax,%ecx,8), %xmm0");
818 asm volatile("sha256msg2 0x12(%eax), %xmm0");
819 asm volatile("sha256msg2 0x12(%ebp), %xmm0");
820 asm volatile("sha256msg2 0x12(%ecx,%eax,1), %xmm0");
821 asm volatile("sha256msg2 0x12(%ebp,%eax,1), %xmm0");
822 asm volatile("sha256msg2 0x12(%eax,%ecx,1), %xmm0");
823 asm volatile("sha256msg2 0x12(%eax,%ecx,8), %xmm0");
824 asm volatile("sha256msg2 0x12345678(%eax), %xmm0");
825 asm volatile("sha256msg2 0x12345678(%ebp), %xmm0");
826 asm volatile("sha256msg2 0x12345678(%ecx,%eax,1), %xmm0");
827 asm volatile("sha256msg2 0x12345678(%ebp,%eax,1), %xmm0");
828 asm volatile("sha256msg2 0x12345678(%eax,%ecx,1), %xmm0");
829 asm volatile("sha256msg2 0x12345678(%eax,%ecx,8), %xmm0");
830
831 /* clflushopt m8 */
832
833 asm volatile("clflushopt (%eax)");
834 asm volatile("clflushopt (0x12345678)");
835 asm volatile("clflushopt 0x12345678(%eax,%ecx,8)");
836 /* Also check instructions in the same group encoding as clflushopt */
837 asm volatile("clflush (%eax)");
838 asm volatile("sfence");
839
840 /* clwb m8 */
841
842 asm volatile("clwb (%eax)");
843 asm volatile("clwb (0x12345678)");
844 asm volatile("clwb 0x12345678(%eax,%ecx,8)");
845 /* Also check instructions in the same group encoding as clwb */
846 asm volatile("xsaveopt (%eax)");
847 asm volatile("mfence");
848
849 /* xsavec mem */
850
851 asm volatile("xsavec (%eax)");
852 asm volatile("xsavec (0x12345678)");
853 asm volatile("xsavec 0x12345678(%eax,%ecx,8)");
854
855 /* xsaves mem */
856
857 asm volatile("xsaves (%eax)");
858 asm volatile("xsaves (0x12345678)");
859 asm volatile("xsaves 0x12345678(%eax,%ecx,8)");
860
861 /* xrstors mem */
862
863 asm volatile("xrstors (%eax)");
864 asm volatile("xrstors (0x12345678)");
865 asm volatile("xrstors 0x12345678(%eax,%ecx,8)");
866
867 #endif /* #ifndef __x86_64__ */
868
869 /* pcommit */
870
871 asm volatile("pcommit");
872
873 /* Following line is a marker for the awk script - do not change */
874 asm volatile("rdtsc"); /* Stop here */
875
876 return 0;
877 }
This page took 0.057052 seconds and 4 git commands to generate.