x86/insn: perf tools: Fix vcvtph2ps instruction decoding
[deliverable/linux.git] / tools / perf / arch / x86 / tests / insn-x86-dat-src.c
CommitLineData
98e4619f
AH
1/*
2 * This file contains instructions for testing by the test titled:
3 *
4 * "Test x86 instruction decoder - new instructions"
5 *
6 * Note that the 'Expecting' comment lines are consumed by the
7 * gen-insn-x86-dat.awk script and have the format:
8 *
9 * Expecting: <op> <branch> <rel>
10 *
11 * If this file is changed, remember to run the gen-insn-x86-dat.sh
12 * script and commit the result.
13 *
14 * Refer to insn-x86.c for more details.
15 */
16
17int main(void)
18{
19 /* Following line is a marker for the awk script - do not change */
20 asm volatile("rdtsc"); /* Start here */
21
6f6ef07f
AH
22 /* Test fix for vcvtph2ps in x86-opcode-map.txt */
23
24 asm volatile("vcvtph2ps %xmm3,%ymm5");
25
98e4619f
AH
26#ifdef __x86_64__
27
28 /* bndmk m64, bnd */
29
30 asm volatile("bndmk (%rax), %bnd0");
31 asm volatile("bndmk (%r8), %bnd0");
32 asm volatile("bndmk (0x12345678), %bnd0");
33 asm volatile("bndmk (%rax), %bnd3");
34 asm volatile("bndmk (%rcx,%rax,1), %bnd0");
35 asm volatile("bndmk 0x12345678(,%rax,1), %bnd0");
36 asm volatile("bndmk (%rax,%rcx,1), %bnd0");
37 asm volatile("bndmk (%rax,%rcx,8), %bnd0");
38 asm volatile("bndmk 0x12(%rax), %bnd0");
39 asm volatile("bndmk 0x12(%rbp), %bnd0");
40 asm volatile("bndmk 0x12(%rcx,%rax,1), %bnd0");
41 asm volatile("bndmk 0x12(%rbp,%rax,1), %bnd0");
42 asm volatile("bndmk 0x12(%rax,%rcx,1), %bnd0");
43 asm volatile("bndmk 0x12(%rax,%rcx,8), %bnd0");
44 asm volatile("bndmk 0x12345678(%rax), %bnd0");
45 asm volatile("bndmk 0x12345678(%rbp), %bnd0");
46 asm volatile("bndmk 0x12345678(%rcx,%rax,1), %bnd0");
47 asm volatile("bndmk 0x12345678(%rbp,%rax,1), %bnd0");
48 asm volatile("bndmk 0x12345678(%rax,%rcx,1), %bnd0");
49 asm volatile("bndmk 0x12345678(%rax,%rcx,8), %bnd0");
50
51 /* bndcl r/m64, bnd */
52
53 asm volatile("bndcl (%rax), %bnd0");
54 asm volatile("bndcl (%r8), %bnd0");
55 asm volatile("bndcl (0x12345678), %bnd0");
56 asm volatile("bndcl (%rax), %bnd3");
57 asm volatile("bndcl (%rcx,%rax,1), %bnd0");
58 asm volatile("bndcl 0x12345678(,%rax,1), %bnd0");
59 asm volatile("bndcl (%rax,%rcx,1), %bnd0");
60 asm volatile("bndcl (%rax,%rcx,8), %bnd0");
61 asm volatile("bndcl 0x12(%rax), %bnd0");
62 asm volatile("bndcl 0x12(%rbp), %bnd0");
63 asm volatile("bndcl 0x12(%rcx,%rax,1), %bnd0");
64 asm volatile("bndcl 0x12(%rbp,%rax,1), %bnd0");
65 asm volatile("bndcl 0x12(%rax,%rcx,1), %bnd0");
66 asm volatile("bndcl 0x12(%rax,%rcx,8), %bnd0");
67 asm volatile("bndcl 0x12345678(%rax), %bnd0");
68 asm volatile("bndcl 0x12345678(%rbp), %bnd0");
69 asm volatile("bndcl 0x12345678(%rcx,%rax,1), %bnd0");
70 asm volatile("bndcl 0x12345678(%rbp,%rax,1), %bnd0");
71 asm volatile("bndcl 0x12345678(%rax,%rcx,1), %bnd0");
72 asm volatile("bndcl 0x12345678(%rax,%rcx,8), %bnd0");
73 asm volatile("bndcl %rax, %bnd0");
74
75 /* bndcu r/m64, bnd */
76
77 asm volatile("bndcu (%rax), %bnd0");
78 asm volatile("bndcu (%r8), %bnd0");
79 asm volatile("bndcu (0x12345678), %bnd0");
80 asm volatile("bndcu (%rax), %bnd3");
81 asm volatile("bndcu (%rcx,%rax,1), %bnd0");
82 asm volatile("bndcu 0x12345678(,%rax,1), %bnd0");
83 asm volatile("bndcu (%rax,%rcx,1), %bnd0");
84 asm volatile("bndcu (%rax,%rcx,8), %bnd0");
85 asm volatile("bndcu 0x12(%rax), %bnd0");
86 asm volatile("bndcu 0x12(%rbp), %bnd0");
87 asm volatile("bndcu 0x12(%rcx,%rax,1), %bnd0");
88 asm volatile("bndcu 0x12(%rbp,%rax,1), %bnd0");
89 asm volatile("bndcu 0x12(%rax,%rcx,1), %bnd0");
90 asm volatile("bndcu 0x12(%rax,%rcx,8), %bnd0");
91 asm volatile("bndcu 0x12345678(%rax), %bnd0");
92 asm volatile("bndcu 0x12345678(%rbp), %bnd0");
93 asm volatile("bndcu 0x12345678(%rcx,%rax,1), %bnd0");
94 asm volatile("bndcu 0x12345678(%rbp,%rax,1), %bnd0");
95 asm volatile("bndcu 0x12345678(%rax,%rcx,1), %bnd0");
96 asm volatile("bndcu 0x12345678(%rax,%rcx,8), %bnd0");
97 asm volatile("bndcu %rax, %bnd0");
98
99 /* bndcn r/m64, bnd */
100
101 asm volatile("bndcn (%rax), %bnd0");
102 asm volatile("bndcn (%r8), %bnd0");
103 asm volatile("bndcn (0x12345678), %bnd0");
104 asm volatile("bndcn (%rax), %bnd3");
105 asm volatile("bndcn (%rcx,%rax,1), %bnd0");
106 asm volatile("bndcn 0x12345678(,%rax,1), %bnd0");
107 asm volatile("bndcn (%rax,%rcx,1), %bnd0");
108 asm volatile("bndcn (%rax,%rcx,8), %bnd0");
109 asm volatile("bndcn 0x12(%rax), %bnd0");
110 asm volatile("bndcn 0x12(%rbp), %bnd0");
111 asm volatile("bndcn 0x12(%rcx,%rax,1), %bnd0");
112 asm volatile("bndcn 0x12(%rbp,%rax,1), %bnd0");
113 asm volatile("bndcn 0x12(%rax,%rcx,1), %bnd0");
114 asm volatile("bndcn 0x12(%rax,%rcx,8), %bnd0");
115 asm volatile("bndcn 0x12345678(%rax), %bnd0");
116 asm volatile("bndcn 0x12345678(%rbp), %bnd0");
117 asm volatile("bndcn 0x12345678(%rcx,%rax,1), %bnd0");
118 asm volatile("bndcn 0x12345678(%rbp,%rax,1), %bnd0");
119 asm volatile("bndcn 0x12345678(%rax,%rcx,1), %bnd0");
120 asm volatile("bndcn 0x12345678(%rax,%rcx,8), %bnd0");
121 asm volatile("bndcn %rax, %bnd0");
122
123 /* bndmov m128, bnd */
124
125 asm volatile("bndmov (%rax), %bnd0");
126 asm volatile("bndmov (%r8), %bnd0");
127 asm volatile("bndmov (0x12345678), %bnd0");
128 asm volatile("bndmov (%rax), %bnd3");
129 asm volatile("bndmov (%rcx,%rax,1), %bnd0");
130 asm volatile("bndmov 0x12345678(,%rax,1), %bnd0");
131 asm volatile("bndmov (%rax,%rcx,1), %bnd0");
132 asm volatile("bndmov (%rax,%rcx,8), %bnd0");
133 asm volatile("bndmov 0x12(%rax), %bnd0");
134 asm volatile("bndmov 0x12(%rbp), %bnd0");
135 asm volatile("bndmov 0x12(%rcx,%rax,1), %bnd0");
136 asm volatile("bndmov 0x12(%rbp,%rax,1), %bnd0");
137 asm volatile("bndmov 0x12(%rax,%rcx,1), %bnd0");
138 asm volatile("bndmov 0x12(%rax,%rcx,8), %bnd0");
139 asm volatile("bndmov 0x12345678(%rax), %bnd0");
140 asm volatile("bndmov 0x12345678(%rbp), %bnd0");
141 asm volatile("bndmov 0x12345678(%rcx,%rax,1), %bnd0");
142 asm volatile("bndmov 0x12345678(%rbp,%rax,1), %bnd0");
143 asm volatile("bndmov 0x12345678(%rax,%rcx,1), %bnd0");
144 asm volatile("bndmov 0x12345678(%rax,%rcx,8), %bnd0");
145
146 /* bndmov bnd, m128 */
147
148 asm volatile("bndmov %bnd0, (%rax)");
149 asm volatile("bndmov %bnd0, (%r8)");
150 asm volatile("bndmov %bnd0, (0x12345678)");
151 asm volatile("bndmov %bnd3, (%rax)");
152 asm volatile("bndmov %bnd0, (%rcx,%rax,1)");
153 asm volatile("bndmov %bnd0, 0x12345678(,%rax,1)");
154 asm volatile("bndmov %bnd0, (%rax,%rcx,1)");
155 asm volatile("bndmov %bnd0, (%rax,%rcx,8)");
156 asm volatile("bndmov %bnd0, 0x12(%rax)");
157 asm volatile("bndmov %bnd0, 0x12(%rbp)");
158 asm volatile("bndmov %bnd0, 0x12(%rcx,%rax,1)");
159 asm volatile("bndmov %bnd0, 0x12(%rbp,%rax,1)");
160 asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,1)");
161 asm volatile("bndmov %bnd0, 0x12(%rax,%rcx,8)");
162 asm volatile("bndmov %bnd0, 0x12345678(%rax)");
163 asm volatile("bndmov %bnd0, 0x12345678(%rbp)");
164 asm volatile("bndmov %bnd0, 0x12345678(%rcx,%rax,1)");
165 asm volatile("bndmov %bnd0, 0x12345678(%rbp,%rax,1)");
166 asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,1)");
167 asm volatile("bndmov %bnd0, 0x12345678(%rax,%rcx,8)");
168
169 /* bndmov bnd2, bnd1 */
170
171 asm volatile("bndmov %bnd0, %bnd1");
172 asm volatile("bndmov %bnd1, %bnd0");
173
174 /* bndldx mib, bnd */
175
176 asm volatile("bndldx (%rax), %bnd0");
177 asm volatile("bndldx (%r8), %bnd0");
178 asm volatile("bndldx (0x12345678), %bnd0");
179 asm volatile("bndldx (%rax), %bnd3");
180 asm volatile("bndldx (%rcx,%rax,1), %bnd0");
181 asm volatile("bndldx 0x12345678(,%rax,1), %bnd0");
182 asm volatile("bndldx (%rax,%rcx,1), %bnd0");
183 asm volatile("bndldx 0x12(%rax), %bnd0");
184 asm volatile("bndldx 0x12(%rbp), %bnd0");
185 asm volatile("bndldx 0x12(%rcx,%rax,1), %bnd0");
186 asm volatile("bndldx 0x12(%rbp,%rax,1), %bnd0");
187 asm volatile("bndldx 0x12(%rax,%rcx,1), %bnd0");
188 asm volatile("bndldx 0x12345678(%rax), %bnd0");
189 asm volatile("bndldx 0x12345678(%rbp), %bnd0");
190 asm volatile("bndldx 0x12345678(%rcx,%rax,1), %bnd0");
191 asm volatile("bndldx 0x12345678(%rbp,%rax,1), %bnd0");
192 asm volatile("bndldx 0x12345678(%rax,%rcx,1), %bnd0");
193
194 /* bndstx bnd, mib */
195
196 asm volatile("bndstx %bnd0, (%rax)");
197 asm volatile("bndstx %bnd0, (%r8)");
198 asm volatile("bndstx %bnd0, (0x12345678)");
199 asm volatile("bndstx %bnd3, (%rax)");
200 asm volatile("bndstx %bnd0, (%rcx,%rax,1)");
201 asm volatile("bndstx %bnd0, 0x12345678(,%rax,1)");
202 asm volatile("bndstx %bnd0, (%rax,%rcx,1)");
203 asm volatile("bndstx %bnd0, 0x12(%rax)");
204 asm volatile("bndstx %bnd0, 0x12(%rbp)");
205 asm volatile("bndstx %bnd0, 0x12(%rcx,%rax,1)");
206 asm volatile("bndstx %bnd0, 0x12(%rbp,%rax,1)");
207 asm volatile("bndstx %bnd0, 0x12(%rax,%rcx,1)");
208 asm volatile("bndstx %bnd0, 0x12345678(%rax)");
209 asm volatile("bndstx %bnd0, 0x12345678(%rbp)");
210 asm volatile("bndstx %bnd0, 0x12345678(%rcx,%rax,1)");
211 asm volatile("bndstx %bnd0, 0x12345678(%rbp,%rax,1)");
212 asm volatile("bndstx %bnd0, 0x12345678(%rax,%rcx,1)");
213
214 /* bnd prefix on call, ret, jmp and all jcc */
215
216 asm volatile("bnd call label1"); /* Expecting: call unconditional 0 */
217 asm volatile("bnd call *(%eax)"); /* Expecting: call indirect 0 */
218 asm volatile("bnd ret"); /* Expecting: ret indirect 0 */
219 asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0 */
220 asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0 */
221 asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
222 asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0 */
223
3fe78d6a
AH
224 /* sha1rnds4 imm8, xmm2/m128, xmm1 */
225
226 asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
227 asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
228 asm volatile("sha1rnds4 $0x91, %xmm8, %xmm0");
229 asm volatile("sha1rnds4 $0x91, %xmm7, %xmm8");
230 asm volatile("sha1rnds4 $0x91, %xmm15, %xmm8");
231 asm volatile("sha1rnds4 $0x91, (%rax), %xmm0");
232 asm volatile("sha1rnds4 $0x91, (%r8), %xmm0");
233 asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
234 asm volatile("sha1rnds4 $0x91, (%rax), %xmm3");
235 asm volatile("sha1rnds4 $0x91, (%rcx,%rax,1), %xmm0");
236 asm volatile("sha1rnds4 $0x91, 0x12345678(,%rax,1), %xmm0");
237 asm volatile("sha1rnds4 $0x91, (%rax,%rcx,1), %xmm0");
238 asm volatile("sha1rnds4 $0x91, (%rax,%rcx,8), %xmm0");
239 asm volatile("sha1rnds4 $0x91, 0x12(%rax), %xmm0");
240 asm volatile("sha1rnds4 $0x91, 0x12(%rbp), %xmm0");
241 asm volatile("sha1rnds4 $0x91, 0x12(%rcx,%rax,1), %xmm0");
242 asm volatile("sha1rnds4 $0x91, 0x12(%rbp,%rax,1), %xmm0");
243 asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,1), %xmm0");
244 asm volatile("sha1rnds4 $0x91, 0x12(%rax,%rcx,8), %xmm0");
245 asm volatile("sha1rnds4 $0x91, 0x12345678(%rax), %xmm0");
246 asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp), %xmm0");
247 asm volatile("sha1rnds4 $0x91, 0x12345678(%rcx,%rax,1), %xmm0");
248 asm volatile("sha1rnds4 $0x91, 0x12345678(%rbp,%rax,1), %xmm0");
249 asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,1), %xmm0");
250 asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm0");
251 asm volatile("sha1rnds4 $0x91, 0x12345678(%rax,%rcx,8), %xmm15");
252
253 /* sha1nexte xmm2/m128, xmm1 */
254
255 asm volatile("sha1nexte %xmm1, %xmm0");
256 asm volatile("sha1nexte %xmm7, %xmm2");
257 asm volatile("sha1nexte %xmm8, %xmm0");
258 asm volatile("sha1nexte %xmm7, %xmm8");
259 asm volatile("sha1nexte %xmm15, %xmm8");
260 asm volatile("sha1nexte (%rax), %xmm0");
261 asm volatile("sha1nexte (%r8), %xmm0");
262 asm volatile("sha1nexte (0x12345678), %xmm0");
263 asm volatile("sha1nexte (%rax), %xmm3");
264 asm volatile("sha1nexte (%rcx,%rax,1), %xmm0");
265 asm volatile("sha1nexte 0x12345678(,%rax,1), %xmm0");
266 asm volatile("sha1nexte (%rax,%rcx,1), %xmm0");
267 asm volatile("sha1nexte (%rax,%rcx,8), %xmm0");
268 asm volatile("sha1nexte 0x12(%rax), %xmm0");
269 asm volatile("sha1nexte 0x12(%rbp), %xmm0");
270 asm volatile("sha1nexte 0x12(%rcx,%rax,1), %xmm0");
271 asm volatile("sha1nexte 0x12(%rbp,%rax,1), %xmm0");
272 asm volatile("sha1nexte 0x12(%rax,%rcx,1), %xmm0");
273 asm volatile("sha1nexte 0x12(%rax,%rcx,8), %xmm0");
274 asm volatile("sha1nexte 0x12345678(%rax), %xmm0");
275 asm volatile("sha1nexte 0x12345678(%rbp), %xmm0");
276 asm volatile("sha1nexte 0x12345678(%rcx,%rax,1), %xmm0");
277 asm volatile("sha1nexte 0x12345678(%rbp,%rax,1), %xmm0");
278 asm volatile("sha1nexte 0x12345678(%rax,%rcx,1), %xmm0");
279 asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm0");
280 asm volatile("sha1nexte 0x12345678(%rax,%rcx,8), %xmm15");
281
282 /* sha1msg1 xmm2/m128, xmm1 */
283
284 asm volatile("sha1msg1 %xmm1, %xmm0");
285 asm volatile("sha1msg1 %xmm7, %xmm2");
286 asm volatile("sha1msg1 %xmm8, %xmm0");
287 asm volatile("sha1msg1 %xmm7, %xmm8");
288 asm volatile("sha1msg1 %xmm15, %xmm8");
289 asm volatile("sha1msg1 (%rax), %xmm0");
290 asm volatile("sha1msg1 (%r8), %xmm0");
291 asm volatile("sha1msg1 (0x12345678), %xmm0");
292 asm volatile("sha1msg1 (%rax), %xmm3");
293 asm volatile("sha1msg1 (%rcx,%rax,1), %xmm0");
294 asm volatile("sha1msg1 0x12345678(,%rax,1), %xmm0");
295 asm volatile("sha1msg1 (%rax,%rcx,1), %xmm0");
296 asm volatile("sha1msg1 (%rax,%rcx,8), %xmm0");
297 asm volatile("sha1msg1 0x12(%rax), %xmm0");
298 asm volatile("sha1msg1 0x12(%rbp), %xmm0");
299 asm volatile("sha1msg1 0x12(%rcx,%rax,1), %xmm0");
300 asm volatile("sha1msg1 0x12(%rbp,%rax,1), %xmm0");
301 asm volatile("sha1msg1 0x12(%rax,%rcx,1), %xmm0");
302 asm volatile("sha1msg1 0x12(%rax,%rcx,8), %xmm0");
303 asm volatile("sha1msg1 0x12345678(%rax), %xmm0");
304 asm volatile("sha1msg1 0x12345678(%rbp), %xmm0");
305 asm volatile("sha1msg1 0x12345678(%rcx,%rax,1), %xmm0");
306 asm volatile("sha1msg1 0x12345678(%rbp,%rax,1), %xmm0");
307 asm volatile("sha1msg1 0x12345678(%rax,%rcx,1), %xmm0");
308 asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm0");
309 asm volatile("sha1msg1 0x12345678(%rax,%rcx,8), %xmm15");
310
311 /* sha1msg2 xmm2/m128, xmm1 */
312
313 asm volatile("sha1msg2 %xmm1, %xmm0");
314 asm volatile("sha1msg2 %xmm7, %xmm2");
315 asm volatile("sha1msg2 %xmm8, %xmm0");
316 asm volatile("sha1msg2 %xmm7, %xmm8");
317 asm volatile("sha1msg2 %xmm15, %xmm8");
318 asm volatile("sha1msg2 (%rax), %xmm0");
319 asm volatile("sha1msg2 (%r8), %xmm0");
320 asm volatile("sha1msg2 (0x12345678), %xmm0");
321 asm volatile("sha1msg2 (%rax), %xmm3");
322 asm volatile("sha1msg2 (%rcx,%rax,1), %xmm0");
323 asm volatile("sha1msg2 0x12345678(,%rax,1), %xmm0");
324 asm volatile("sha1msg2 (%rax,%rcx,1), %xmm0");
325 asm volatile("sha1msg2 (%rax,%rcx,8), %xmm0");
326 asm volatile("sha1msg2 0x12(%rax), %xmm0");
327 asm volatile("sha1msg2 0x12(%rbp), %xmm0");
328 asm volatile("sha1msg2 0x12(%rcx,%rax,1), %xmm0");
329 asm volatile("sha1msg2 0x12(%rbp,%rax,1), %xmm0");
330 asm volatile("sha1msg2 0x12(%rax,%rcx,1), %xmm0");
331 asm volatile("sha1msg2 0x12(%rax,%rcx,8), %xmm0");
332 asm volatile("sha1msg2 0x12345678(%rax), %xmm0");
333 asm volatile("sha1msg2 0x12345678(%rbp), %xmm0");
334 asm volatile("sha1msg2 0x12345678(%rcx,%rax,1), %xmm0");
335 asm volatile("sha1msg2 0x12345678(%rbp,%rax,1), %xmm0");
336 asm volatile("sha1msg2 0x12345678(%rax,%rcx,1), %xmm0");
337 asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm0");
338 asm volatile("sha1msg2 0x12345678(%rax,%rcx,8), %xmm15");
339
340 /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
341 /* Note sha256rnds2 has an implicit operand 'xmm0' */
342
343 asm volatile("sha256rnds2 %xmm4, %xmm1");
344 asm volatile("sha256rnds2 %xmm7, %xmm2");
345 asm volatile("sha256rnds2 %xmm8, %xmm1");
346 asm volatile("sha256rnds2 %xmm7, %xmm8");
347 asm volatile("sha256rnds2 %xmm15, %xmm8");
348 asm volatile("sha256rnds2 (%rax), %xmm1");
349 asm volatile("sha256rnds2 (%r8), %xmm1");
350 asm volatile("sha256rnds2 (0x12345678), %xmm1");
351 asm volatile("sha256rnds2 (%rax), %xmm3");
352 asm volatile("sha256rnds2 (%rcx,%rax,1), %xmm1");
353 asm volatile("sha256rnds2 0x12345678(,%rax,1), %xmm1");
354 asm volatile("sha256rnds2 (%rax,%rcx,1), %xmm1");
355 asm volatile("sha256rnds2 (%rax,%rcx,8), %xmm1");
356 asm volatile("sha256rnds2 0x12(%rax), %xmm1");
357 asm volatile("sha256rnds2 0x12(%rbp), %xmm1");
358 asm volatile("sha256rnds2 0x12(%rcx,%rax,1), %xmm1");
359 asm volatile("sha256rnds2 0x12(%rbp,%rax,1), %xmm1");
360 asm volatile("sha256rnds2 0x12(%rax,%rcx,1), %xmm1");
361 asm volatile("sha256rnds2 0x12(%rax,%rcx,8), %xmm1");
362 asm volatile("sha256rnds2 0x12345678(%rax), %xmm1");
363 asm volatile("sha256rnds2 0x12345678(%rbp), %xmm1");
364 asm volatile("sha256rnds2 0x12345678(%rcx,%rax,1), %xmm1");
365 asm volatile("sha256rnds2 0x12345678(%rbp,%rax,1), %xmm1");
366 asm volatile("sha256rnds2 0x12345678(%rax,%rcx,1), %xmm1");
367 asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm1");
368 asm volatile("sha256rnds2 0x12345678(%rax,%rcx,8), %xmm15");
369
370 /* sha256msg1 xmm2/m128, xmm1 */
371
372 asm volatile("sha256msg1 %xmm1, %xmm0");
373 asm volatile("sha256msg1 %xmm7, %xmm2");
374 asm volatile("sha256msg1 %xmm8, %xmm0");
375 asm volatile("sha256msg1 %xmm7, %xmm8");
376 asm volatile("sha256msg1 %xmm15, %xmm8");
377 asm volatile("sha256msg1 (%rax), %xmm0");
378 asm volatile("sha256msg1 (%r8), %xmm0");
379 asm volatile("sha256msg1 (0x12345678), %xmm0");
380 asm volatile("sha256msg1 (%rax), %xmm3");
381 asm volatile("sha256msg1 (%rcx,%rax,1), %xmm0");
382 asm volatile("sha256msg1 0x12345678(,%rax,1), %xmm0");
383 asm volatile("sha256msg1 (%rax,%rcx,1), %xmm0");
384 asm volatile("sha256msg1 (%rax,%rcx,8), %xmm0");
385 asm volatile("sha256msg1 0x12(%rax), %xmm0");
386 asm volatile("sha256msg1 0x12(%rbp), %xmm0");
387 asm volatile("sha256msg1 0x12(%rcx,%rax,1), %xmm0");
388 asm volatile("sha256msg1 0x12(%rbp,%rax,1), %xmm0");
389 asm volatile("sha256msg1 0x12(%rax,%rcx,1), %xmm0");
390 asm volatile("sha256msg1 0x12(%rax,%rcx,8), %xmm0");
391 asm volatile("sha256msg1 0x12345678(%rax), %xmm0");
392 asm volatile("sha256msg1 0x12345678(%rbp), %xmm0");
393 asm volatile("sha256msg1 0x12345678(%rcx,%rax,1), %xmm0");
394 asm volatile("sha256msg1 0x12345678(%rbp,%rax,1), %xmm0");
395 asm volatile("sha256msg1 0x12345678(%rax,%rcx,1), %xmm0");
396 asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm0");
397 asm volatile("sha256msg1 0x12345678(%rax,%rcx,8), %xmm15");
398
399 /* sha256msg2 xmm2/m128, xmm1 */
400
401 asm volatile("sha256msg2 %xmm1, %xmm0");
402 asm volatile("sha256msg2 %xmm7, %xmm2");
403 asm volatile("sha256msg2 %xmm8, %xmm0");
404 asm volatile("sha256msg2 %xmm7, %xmm8");
405 asm volatile("sha256msg2 %xmm15, %xmm8");
406 asm volatile("sha256msg2 (%rax), %xmm0");
407 asm volatile("sha256msg2 (%r8), %xmm0");
408 asm volatile("sha256msg2 (0x12345678), %xmm0");
409 asm volatile("sha256msg2 (%rax), %xmm3");
410 asm volatile("sha256msg2 (%rcx,%rax,1), %xmm0");
411 asm volatile("sha256msg2 0x12345678(,%rax,1), %xmm0");
412 asm volatile("sha256msg2 (%rax,%rcx,1), %xmm0");
413 asm volatile("sha256msg2 (%rax,%rcx,8), %xmm0");
414 asm volatile("sha256msg2 0x12(%rax), %xmm0");
415 asm volatile("sha256msg2 0x12(%rbp), %xmm0");
416 asm volatile("sha256msg2 0x12(%rcx,%rax,1), %xmm0");
417 asm volatile("sha256msg2 0x12(%rbp,%rax,1), %xmm0");
418 asm volatile("sha256msg2 0x12(%rax,%rcx,1), %xmm0");
419 asm volatile("sha256msg2 0x12(%rax,%rcx,8), %xmm0");
420 asm volatile("sha256msg2 0x12345678(%rax), %xmm0");
421 asm volatile("sha256msg2 0x12345678(%rbp), %xmm0");
422 asm volatile("sha256msg2 0x12345678(%rcx,%rax,1), %xmm0");
423 asm volatile("sha256msg2 0x12345678(%rbp,%rax,1), %xmm0");
424 asm volatile("sha256msg2 0x12345678(%rax,%rcx,1), %xmm0");
425 asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm0");
426 asm volatile("sha256msg2 0x12345678(%rax,%rcx,8), %xmm15");
427
ac1c8859
AH
428 /* clflushopt m8 */
429
430 asm volatile("clflushopt (%rax)");
431 asm volatile("clflushopt (%r8)");
432 asm volatile("clflushopt (0x12345678)");
433 asm volatile("clflushopt 0x12345678(%rax,%rcx,8)");
434 asm volatile("clflushopt 0x12345678(%r8,%rcx,8)");
435 /* Also check instructions in the same group encoding as clflushopt */
436 asm volatile("clflush (%rax)");
437 asm volatile("clflush (%r8)");
438 asm volatile("sfence");
439
440 /* clwb m8 */
441
442 asm volatile("clwb (%rax)");
443 asm volatile("clwb (%r8)");
444 asm volatile("clwb (0x12345678)");
445 asm volatile("clwb 0x12345678(%rax,%rcx,8)");
446 asm volatile("clwb 0x12345678(%r8,%rcx,8)");
447 /* Also check instructions in the same group encoding as clwb */
448 asm volatile("xsaveopt (%rax)");
449 asm volatile("xsaveopt (%r8)");
450 asm volatile("mfence");
451
f83b6b64
AH
452 /* xsavec mem */
453
454 asm volatile("xsavec (%rax)");
455 asm volatile("xsavec (%r8)");
456 asm volatile("xsavec (0x12345678)");
457 asm volatile("xsavec 0x12345678(%rax,%rcx,8)");
458 asm volatile("xsavec 0x12345678(%r8,%rcx,8)");
459
460 /* xsaves mem */
461
462 asm volatile("xsaves (%rax)");
463 asm volatile("xsaves (%r8)");
464 asm volatile("xsaves (0x12345678)");
465 asm volatile("xsaves 0x12345678(%rax,%rcx,8)");
466 asm volatile("xsaves 0x12345678(%r8,%rcx,8)");
467
468 /* xrstors mem */
469
470 asm volatile("xrstors (%rax)");
471 asm volatile("xrstors (%r8)");
472 asm volatile("xrstors (0x12345678)");
473 asm volatile("xrstors 0x12345678(%rax,%rcx,8)");
474 asm volatile("xrstors 0x12345678(%r8,%rcx,8)");
475
98e4619f
AH
476#else /* #ifdef __x86_64__ */
477
478 /* bndmk m32, bnd */
479
480 asm volatile("bndmk (%eax), %bnd0");
481 asm volatile("bndmk (0x12345678), %bnd0");
482 asm volatile("bndmk (%eax), %bnd3");
483 asm volatile("bndmk (%ecx,%eax,1), %bnd0");
484 asm volatile("bndmk 0x12345678(,%eax,1), %bnd0");
485 asm volatile("bndmk (%eax,%ecx,1), %bnd0");
486 asm volatile("bndmk (%eax,%ecx,8), %bnd0");
487 asm volatile("bndmk 0x12(%eax), %bnd0");
488 asm volatile("bndmk 0x12(%ebp), %bnd0");
489 asm volatile("bndmk 0x12(%ecx,%eax,1), %bnd0");
490 asm volatile("bndmk 0x12(%ebp,%eax,1), %bnd0");
491 asm volatile("bndmk 0x12(%eax,%ecx,1), %bnd0");
492 asm volatile("bndmk 0x12(%eax,%ecx,8), %bnd0");
493 asm volatile("bndmk 0x12345678(%eax), %bnd0");
494 asm volatile("bndmk 0x12345678(%ebp), %bnd0");
495 asm volatile("bndmk 0x12345678(%ecx,%eax,1), %bnd0");
496 asm volatile("bndmk 0x12345678(%ebp,%eax,1), %bnd0");
497 asm volatile("bndmk 0x12345678(%eax,%ecx,1), %bnd0");
498 asm volatile("bndmk 0x12345678(%eax,%ecx,8), %bnd0");
499
500 /* bndcl r/m32, bnd */
501
502 asm volatile("bndcl (%eax), %bnd0");
503 asm volatile("bndcl (0x12345678), %bnd0");
504 asm volatile("bndcl (%eax), %bnd3");
505 asm volatile("bndcl (%ecx,%eax,1), %bnd0");
506 asm volatile("bndcl 0x12345678(,%eax,1), %bnd0");
507 asm volatile("bndcl (%eax,%ecx,1), %bnd0");
508 asm volatile("bndcl (%eax,%ecx,8), %bnd0");
509 asm volatile("bndcl 0x12(%eax), %bnd0");
510 asm volatile("bndcl 0x12(%ebp), %bnd0");
511 asm volatile("bndcl 0x12(%ecx,%eax,1), %bnd0");
512 asm volatile("bndcl 0x12(%ebp,%eax,1), %bnd0");
513 asm volatile("bndcl 0x12(%eax,%ecx,1), %bnd0");
514 asm volatile("bndcl 0x12(%eax,%ecx,8), %bnd0");
515 asm volatile("bndcl 0x12345678(%eax), %bnd0");
516 asm volatile("bndcl 0x12345678(%ebp), %bnd0");
517 asm volatile("bndcl 0x12345678(%ecx,%eax,1), %bnd0");
518 asm volatile("bndcl 0x12345678(%ebp,%eax,1), %bnd0");
519 asm volatile("bndcl 0x12345678(%eax,%ecx,1), %bnd0");
520 asm volatile("bndcl 0x12345678(%eax,%ecx,8), %bnd0");
521 asm volatile("bndcl %eax, %bnd0");
522
523 /* bndcu r/m32, bnd */
524
525 asm volatile("bndcu (%eax), %bnd0");
526 asm volatile("bndcu (0x12345678), %bnd0");
527 asm volatile("bndcu (%eax), %bnd3");
528 asm volatile("bndcu (%ecx,%eax,1), %bnd0");
529 asm volatile("bndcu 0x12345678(,%eax,1), %bnd0");
530 asm volatile("bndcu (%eax,%ecx,1), %bnd0");
531 asm volatile("bndcu (%eax,%ecx,8), %bnd0");
532 asm volatile("bndcu 0x12(%eax), %bnd0");
533 asm volatile("bndcu 0x12(%ebp), %bnd0");
534 asm volatile("bndcu 0x12(%ecx,%eax,1), %bnd0");
535 asm volatile("bndcu 0x12(%ebp,%eax,1), %bnd0");
536 asm volatile("bndcu 0x12(%eax,%ecx,1), %bnd0");
537 asm volatile("bndcu 0x12(%eax,%ecx,8), %bnd0");
538 asm volatile("bndcu 0x12345678(%eax), %bnd0");
539 asm volatile("bndcu 0x12345678(%ebp), %bnd0");
540 asm volatile("bndcu 0x12345678(%ecx,%eax,1), %bnd0");
541 asm volatile("bndcu 0x12345678(%ebp,%eax,1), %bnd0");
542 asm volatile("bndcu 0x12345678(%eax,%ecx,1), %bnd0");
543 asm volatile("bndcu 0x12345678(%eax,%ecx,8), %bnd0");
544 asm volatile("bndcu %eax, %bnd0");
545
546 /* bndcn r/m32, bnd */
547
548 asm volatile("bndcn (%eax), %bnd0");
549 asm volatile("bndcn (0x12345678), %bnd0");
550 asm volatile("bndcn (%eax), %bnd3");
551 asm volatile("bndcn (%ecx,%eax,1), %bnd0");
552 asm volatile("bndcn 0x12345678(,%eax,1), %bnd0");
553 asm volatile("bndcn (%eax,%ecx,1), %bnd0");
554 asm volatile("bndcn (%eax,%ecx,8), %bnd0");
555 asm volatile("bndcn 0x12(%eax), %bnd0");
556 asm volatile("bndcn 0x12(%ebp), %bnd0");
557 asm volatile("bndcn 0x12(%ecx,%eax,1), %bnd0");
558 asm volatile("bndcn 0x12(%ebp,%eax,1), %bnd0");
559 asm volatile("bndcn 0x12(%eax,%ecx,1), %bnd0");
560 asm volatile("bndcn 0x12(%eax,%ecx,8), %bnd0");
561 asm volatile("bndcn 0x12345678(%eax), %bnd0");
562 asm volatile("bndcn 0x12345678(%ebp), %bnd0");
563 asm volatile("bndcn 0x12345678(%ecx,%eax,1), %bnd0");
564 asm volatile("bndcn 0x12345678(%ebp,%eax,1), %bnd0");
565 asm volatile("bndcn 0x12345678(%eax,%ecx,1), %bnd0");
566 asm volatile("bndcn 0x12345678(%eax,%ecx,8), %bnd0");
567 asm volatile("bndcn %eax, %bnd0");
568
569 /* bndmov m64, bnd */
570
571 asm volatile("bndmov (%eax), %bnd0");
572 asm volatile("bndmov (0x12345678), %bnd0");
573 asm volatile("bndmov (%eax), %bnd3");
574 asm volatile("bndmov (%ecx,%eax,1), %bnd0");
575 asm volatile("bndmov 0x12345678(,%eax,1), %bnd0");
576 asm volatile("bndmov (%eax,%ecx,1), %bnd0");
577 asm volatile("bndmov (%eax,%ecx,8), %bnd0");
578 asm volatile("bndmov 0x12(%eax), %bnd0");
579 asm volatile("bndmov 0x12(%ebp), %bnd0");
580 asm volatile("bndmov 0x12(%ecx,%eax,1), %bnd0");
581 asm volatile("bndmov 0x12(%ebp,%eax,1), %bnd0");
582 asm volatile("bndmov 0x12(%eax,%ecx,1), %bnd0");
583 asm volatile("bndmov 0x12(%eax,%ecx,8), %bnd0");
584 asm volatile("bndmov 0x12345678(%eax), %bnd0");
585 asm volatile("bndmov 0x12345678(%ebp), %bnd0");
586 asm volatile("bndmov 0x12345678(%ecx,%eax,1), %bnd0");
587 asm volatile("bndmov 0x12345678(%ebp,%eax,1), %bnd0");
588 asm volatile("bndmov 0x12345678(%eax,%ecx,1), %bnd0");
589 asm volatile("bndmov 0x12345678(%eax,%ecx,8), %bnd0");
590
591 /* bndmov bnd, m64 */
592
593 asm volatile("bndmov %bnd0, (%eax)");
594 asm volatile("bndmov %bnd0, (0x12345678)");
595 asm volatile("bndmov %bnd3, (%eax)");
596 asm volatile("bndmov %bnd0, (%ecx,%eax,1)");
597 asm volatile("bndmov %bnd0, 0x12345678(,%eax,1)");
598 asm volatile("bndmov %bnd0, (%eax,%ecx,1)");
599 asm volatile("bndmov %bnd0, (%eax,%ecx,8)");
600 asm volatile("bndmov %bnd0, 0x12(%eax)");
601 asm volatile("bndmov %bnd0, 0x12(%ebp)");
602 asm volatile("bndmov %bnd0, 0x12(%ecx,%eax,1)");
603 asm volatile("bndmov %bnd0, 0x12(%ebp,%eax,1)");
604 asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,1)");
605 asm volatile("bndmov %bnd0, 0x12(%eax,%ecx,8)");
606 asm volatile("bndmov %bnd0, 0x12345678(%eax)");
607 asm volatile("bndmov %bnd0, 0x12345678(%ebp)");
608 asm volatile("bndmov %bnd0, 0x12345678(%ecx,%eax,1)");
609 asm volatile("bndmov %bnd0, 0x12345678(%ebp,%eax,1)");
610 asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,1)");
611 asm volatile("bndmov %bnd0, 0x12345678(%eax,%ecx,8)");
612
613 /* bndmov bnd2, bnd1 */
614
615 asm volatile("bndmov %bnd0, %bnd1");
616 asm volatile("bndmov %bnd1, %bnd0");
617
618 /* bndldx mib, bnd */
619
620 asm volatile("bndldx (%eax), %bnd0");
621 asm volatile("bndldx (0x12345678), %bnd0");
622 asm volatile("bndldx (%eax), %bnd3");
623 asm volatile("bndldx (%ecx,%eax,1), %bnd0");
624 asm volatile("bndldx 0x12345678(,%eax,1), %bnd0");
625 asm volatile("bndldx (%eax,%ecx,1), %bnd0");
626 asm volatile("bndldx 0x12(%eax), %bnd0");
627 asm volatile("bndldx 0x12(%ebp), %bnd0");
628 asm volatile("bndldx 0x12(%ecx,%eax,1), %bnd0");
629 asm volatile("bndldx 0x12(%ebp,%eax,1), %bnd0");
630 asm volatile("bndldx 0x12(%eax,%ecx,1), %bnd0");
631 asm volatile("bndldx 0x12345678(%eax), %bnd0");
632 asm volatile("bndldx 0x12345678(%ebp), %bnd0");
633 asm volatile("bndldx 0x12345678(%ecx,%eax,1), %bnd0");
634 asm volatile("bndldx 0x12345678(%ebp,%eax,1), %bnd0");
635 asm volatile("bndldx 0x12345678(%eax,%ecx,1), %bnd0");
636
637 /* bndstx bnd, mib */
638
639 asm volatile("bndstx %bnd0, (%eax)");
640 asm volatile("bndstx %bnd0, (0x12345678)");
641 asm volatile("bndstx %bnd3, (%eax)");
642 asm volatile("bndstx %bnd0, (%ecx,%eax,1)");
643 asm volatile("bndstx %bnd0, 0x12345678(,%eax,1)");
644 asm volatile("bndstx %bnd0, (%eax,%ecx,1)");
645 asm volatile("bndstx %bnd0, 0x12(%eax)");
646 asm volatile("bndstx %bnd0, 0x12(%ebp)");
647 asm volatile("bndstx %bnd0, 0x12(%ecx,%eax,1)");
648 asm volatile("bndstx %bnd0, 0x12(%ebp,%eax,1)");
649 asm volatile("bndstx %bnd0, 0x12(%eax,%ecx,1)");
650 asm volatile("bndstx %bnd0, 0x12345678(%eax)");
651 asm volatile("bndstx %bnd0, 0x12345678(%ebp)");
652 asm volatile("bndstx %bnd0, 0x12345678(%ecx,%eax,1)");
653 asm volatile("bndstx %bnd0, 0x12345678(%ebp,%eax,1)");
654 asm volatile("bndstx %bnd0, 0x12345678(%eax,%ecx,1)");
655
656 /* bnd prefix on call, ret, jmp and all jcc */
657
658 asm volatile("bnd call label1"); /* Expecting: call unconditional 0xfffffffc */
659 asm volatile("bnd call *(%eax)"); /* Expecting: call indirect 0 */
660 asm volatile("bnd ret"); /* Expecting: ret indirect 0 */
661 asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0xfffffffc */
662 asm volatile("bnd jmp label1"); /* Expecting: jmp unconditional 0xfffffffc */
663 asm volatile("bnd jmp *(%ecx)"); /* Expecting: jmp indirect 0 */
664 asm volatile("bnd jne label1"); /* Expecting: jcc conditional 0xfffffffc */
665
3fe78d6a
AH
666 /* sha1rnds4 imm8, xmm2/m128, xmm1 */
667
668 asm volatile("sha1rnds4 $0x0, %xmm1, %xmm0");
669 asm volatile("sha1rnds4 $0x91, %xmm7, %xmm2");
670 asm volatile("sha1rnds4 $0x91, (%eax), %xmm0");
671 asm volatile("sha1rnds4 $0x91, (0x12345678), %xmm0");
672 asm volatile("sha1rnds4 $0x91, (%eax), %xmm3");
673 asm volatile("sha1rnds4 $0x91, (%ecx,%eax,1), %xmm0");
674 asm volatile("sha1rnds4 $0x91, 0x12345678(,%eax,1), %xmm0");
675 asm volatile("sha1rnds4 $0x91, (%eax,%ecx,1), %xmm0");
676 asm volatile("sha1rnds4 $0x91, (%eax,%ecx,8), %xmm0");
677 asm volatile("sha1rnds4 $0x91, 0x12(%eax), %xmm0");
678 asm volatile("sha1rnds4 $0x91, 0x12(%ebp), %xmm0");
679 asm volatile("sha1rnds4 $0x91, 0x12(%ecx,%eax,1), %xmm0");
680 asm volatile("sha1rnds4 $0x91, 0x12(%ebp,%eax,1), %xmm0");
681 asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,1), %xmm0");
682 asm volatile("sha1rnds4 $0x91, 0x12(%eax,%ecx,8), %xmm0");
683 asm volatile("sha1rnds4 $0x91, 0x12345678(%eax), %xmm0");
684 asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp), %xmm0");
685 asm volatile("sha1rnds4 $0x91, 0x12345678(%ecx,%eax,1), %xmm0");
686 asm volatile("sha1rnds4 $0x91, 0x12345678(%ebp,%eax,1), %xmm0");
687 asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,1), %xmm0");
688 asm volatile("sha1rnds4 $0x91, 0x12345678(%eax,%ecx,8), %xmm0");
689
690 /* sha1nexte xmm2/m128, xmm1 */
691
692 asm volatile("sha1nexte %xmm1, %xmm0");
693 asm volatile("sha1nexte %xmm7, %xmm2");
694 asm volatile("sha1nexte (%eax), %xmm0");
695 asm volatile("sha1nexte (0x12345678), %xmm0");
696 asm volatile("sha1nexte (%eax), %xmm3");
697 asm volatile("sha1nexte (%ecx,%eax,1), %xmm0");
698 asm volatile("sha1nexte 0x12345678(,%eax,1), %xmm0");
699 asm volatile("sha1nexte (%eax,%ecx,1), %xmm0");
700 asm volatile("sha1nexte (%eax,%ecx,8), %xmm0");
701 asm volatile("sha1nexte 0x12(%eax), %xmm0");
702 asm volatile("sha1nexte 0x12(%ebp), %xmm0");
703 asm volatile("sha1nexte 0x12(%ecx,%eax,1), %xmm0");
704 asm volatile("sha1nexte 0x12(%ebp,%eax,1), %xmm0");
705 asm volatile("sha1nexte 0x12(%eax,%ecx,1), %xmm0");
706 asm volatile("sha1nexte 0x12(%eax,%ecx,8), %xmm0");
707 asm volatile("sha1nexte 0x12345678(%eax), %xmm0");
708 asm volatile("sha1nexte 0x12345678(%ebp), %xmm0");
709 asm volatile("sha1nexte 0x12345678(%ecx,%eax,1), %xmm0");
710 asm volatile("sha1nexte 0x12345678(%ebp,%eax,1), %xmm0");
711 asm volatile("sha1nexte 0x12345678(%eax,%ecx,1), %xmm0");
712 asm volatile("sha1nexte 0x12345678(%eax,%ecx,8), %xmm0");
713
714 /* sha1msg1 xmm2/m128, xmm1 */
715
716 asm volatile("sha1msg1 %xmm1, %xmm0");
717 asm volatile("sha1msg1 %xmm7, %xmm2");
718 asm volatile("sha1msg1 (%eax), %xmm0");
719 asm volatile("sha1msg1 (0x12345678), %xmm0");
720 asm volatile("sha1msg1 (%eax), %xmm3");
721 asm volatile("sha1msg1 (%ecx,%eax,1), %xmm0");
722 asm volatile("sha1msg1 0x12345678(,%eax,1), %xmm0");
723 asm volatile("sha1msg1 (%eax,%ecx,1), %xmm0");
724 asm volatile("sha1msg1 (%eax,%ecx,8), %xmm0");
725 asm volatile("sha1msg1 0x12(%eax), %xmm0");
726 asm volatile("sha1msg1 0x12(%ebp), %xmm0");
727 asm volatile("sha1msg1 0x12(%ecx,%eax,1), %xmm0");
728 asm volatile("sha1msg1 0x12(%ebp,%eax,1), %xmm0");
729 asm volatile("sha1msg1 0x12(%eax,%ecx,1), %xmm0");
730 asm volatile("sha1msg1 0x12(%eax,%ecx,8), %xmm0");
731 asm volatile("sha1msg1 0x12345678(%eax), %xmm0");
732 asm volatile("sha1msg1 0x12345678(%ebp), %xmm0");
733 asm volatile("sha1msg1 0x12345678(%ecx,%eax,1), %xmm0");
734 asm volatile("sha1msg1 0x12345678(%ebp,%eax,1), %xmm0");
735 asm volatile("sha1msg1 0x12345678(%eax,%ecx,1), %xmm0");
736 asm volatile("sha1msg1 0x12345678(%eax,%ecx,8), %xmm0");
737
738 /* sha1msg2 xmm2/m128, xmm1 */
739
740 asm volatile("sha1msg2 %xmm1, %xmm0");
741 asm volatile("sha1msg2 %xmm7, %xmm2");
742 asm volatile("sha1msg2 (%eax), %xmm0");
743 asm volatile("sha1msg2 (0x12345678), %xmm0");
744 asm volatile("sha1msg2 (%eax), %xmm3");
745 asm volatile("sha1msg2 (%ecx,%eax,1), %xmm0");
746 asm volatile("sha1msg2 0x12345678(,%eax,1), %xmm0");
747 asm volatile("sha1msg2 (%eax,%ecx,1), %xmm0");
748 asm volatile("sha1msg2 (%eax,%ecx,8), %xmm0");
749 asm volatile("sha1msg2 0x12(%eax), %xmm0");
750 asm volatile("sha1msg2 0x12(%ebp), %xmm0");
751 asm volatile("sha1msg2 0x12(%ecx,%eax,1), %xmm0");
752 asm volatile("sha1msg2 0x12(%ebp,%eax,1), %xmm0");
753 asm volatile("sha1msg2 0x12(%eax,%ecx,1), %xmm0");
754 asm volatile("sha1msg2 0x12(%eax,%ecx,8), %xmm0");
755 asm volatile("sha1msg2 0x12345678(%eax), %xmm0");
756 asm volatile("sha1msg2 0x12345678(%ebp), %xmm0");
757 asm volatile("sha1msg2 0x12345678(%ecx,%eax,1), %xmm0");
758 asm volatile("sha1msg2 0x12345678(%ebp,%eax,1), %xmm0");
759 asm volatile("sha1msg2 0x12345678(%eax,%ecx,1), %xmm0");
760 asm volatile("sha1msg2 0x12345678(%eax,%ecx,8), %xmm0");
761
762 /* sha256rnds2 <XMM0>, xmm2/m128, xmm1 */
763 /* Note sha256rnds2 has an implicit operand 'xmm0' */
764
765 asm volatile("sha256rnds2 %xmm4, %xmm1");
766 asm volatile("sha256rnds2 %xmm7, %xmm2");
767 asm volatile("sha256rnds2 (%eax), %xmm1");
768 asm volatile("sha256rnds2 (0x12345678), %xmm1");
769 asm volatile("sha256rnds2 (%eax), %xmm3");
770 asm volatile("sha256rnds2 (%ecx,%eax,1), %xmm1");
771 asm volatile("sha256rnds2 0x12345678(,%eax,1), %xmm1");
772 asm volatile("sha256rnds2 (%eax,%ecx,1), %xmm1");
773 asm volatile("sha256rnds2 (%eax,%ecx,8), %xmm1");
774 asm volatile("sha256rnds2 0x12(%eax), %xmm1");
775 asm volatile("sha256rnds2 0x12(%ebp), %xmm1");
776 asm volatile("sha256rnds2 0x12(%ecx,%eax,1), %xmm1");
777 asm volatile("sha256rnds2 0x12(%ebp,%eax,1), %xmm1");
778 asm volatile("sha256rnds2 0x12(%eax,%ecx,1), %xmm1");
779 asm volatile("sha256rnds2 0x12(%eax,%ecx,8), %xmm1");
780 asm volatile("sha256rnds2 0x12345678(%eax), %xmm1");
781 asm volatile("sha256rnds2 0x12345678(%ebp), %xmm1");
782 asm volatile("sha256rnds2 0x12345678(%ecx,%eax,1), %xmm1");
783 asm volatile("sha256rnds2 0x12345678(%ebp,%eax,1), %xmm1");
784 asm volatile("sha256rnds2 0x12345678(%eax,%ecx,1), %xmm1");
785 asm volatile("sha256rnds2 0x12345678(%eax,%ecx,8), %xmm1");
786
787 /* sha256msg1 xmm2/m128, xmm1 */
788
789 asm volatile("sha256msg1 %xmm1, %xmm0");
790 asm volatile("sha256msg1 %xmm7, %xmm2");
791 asm volatile("sha256msg1 (%eax), %xmm0");
792 asm volatile("sha256msg1 (0x12345678), %xmm0");
793 asm volatile("sha256msg1 (%eax), %xmm3");
794 asm volatile("sha256msg1 (%ecx,%eax,1), %xmm0");
795 asm volatile("sha256msg1 0x12345678(,%eax,1), %xmm0");
796 asm volatile("sha256msg1 (%eax,%ecx,1), %xmm0");
797 asm volatile("sha256msg1 (%eax,%ecx,8), %xmm0");
798 asm volatile("sha256msg1 0x12(%eax), %xmm0");
799 asm volatile("sha256msg1 0x12(%ebp), %xmm0");
800 asm volatile("sha256msg1 0x12(%ecx,%eax,1), %xmm0");
801 asm volatile("sha256msg1 0x12(%ebp,%eax,1), %xmm0");
802 asm volatile("sha256msg1 0x12(%eax,%ecx,1), %xmm0");
803 asm volatile("sha256msg1 0x12(%eax,%ecx,8), %xmm0");
804 asm volatile("sha256msg1 0x12345678(%eax), %xmm0");
805 asm volatile("sha256msg1 0x12345678(%ebp), %xmm0");
806 asm volatile("sha256msg1 0x12345678(%ecx,%eax,1), %xmm0");
807 asm volatile("sha256msg1 0x12345678(%ebp,%eax,1), %xmm0");
808 asm volatile("sha256msg1 0x12345678(%eax,%ecx,1), %xmm0");
809 asm volatile("sha256msg1 0x12345678(%eax,%ecx,8), %xmm0");
810
811 /* sha256msg2 xmm2/m128, xmm1 */
812
813 asm volatile("sha256msg2 %xmm1, %xmm0");
814 asm volatile("sha256msg2 %xmm7, %xmm2");
815 asm volatile("sha256msg2 (%eax), %xmm0");
816 asm volatile("sha256msg2 (0x12345678), %xmm0");
817 asm volatile("sha256msg2 (%eax), %xmm3");
818 asm volatile("sha256msg2 (%ecx,%eax,1), %xmm0");
819 asm volatile("sha256msg2 0x12345678(,%eax,1), %xmm0");
820 asm volatile("sha256msg2 (%eax,%ecx,1), %xmm0");
821 asm volatile("sha256msg2 (%eax,%ecx,8), %xmm0");
822 asm volatile("sha256msg2 0x12(%eax), %xmm0");
823 asm volatile("sha256msg2 0x12(%ebp), %xmm0");
824 asm volatile("sha256msg2 0x12(%ecx,%eax,1), %xmm0");
825 asm volatile("sha256msg2 0x12(%ebp,%eax,1), %xmm0");
826 asm volatile("sha256msg2 0x12(%eax,%ecx,1), %xmm0");
827 asm volatile("sha256msg2 0x12(%eax,%ecx,8), %xmm0");
828 asm volatile("sha256msg2 0x12345678(%eax), %xmm0");
829 asm volatile("sha256msg2 0x12345678(%ebp), %xmm0");
830 asm volatile("sha256msg2 0x12345678(%ecx,%eax,1), %xmm0");
831 asm volatile("sha256msg2 0x12345678(%ebp,%eax,1), %xmm0");
832 asm volatile("sha256msg2 0x12345678(%eax,%ecx,1), %xmm0");
833 asm volatile("sha256msg2 0x12345678(%eax,%ecx,8), %xmm0");
834
ac1c8859
AH
835 /* clflushopt m8 */
836
837 asm volatile("clflushopt (%eax)");
838 asm volatile("clflushopt (0x12345678)");
839 asm volatile("clflushopt 0x12345678(%eax,%ecx,8)");
840 /* Also check instructions in the same group encoding as clflushopt */
841 asm volatile("clflush (%eax)");
842 asm volatile("sfence");
843
844 /* clwb m8 */
845
846 asm volatile("clwb (%eax)");
847 asm volatile("clwb (0x12345678)");
848 asm volatile("clwb 0x12345678(%eax,%ecx,8)");
849 /* Also check instructions in the same group encoding as clwb */
850 asm volatile("xsaveopt (%eax)");
851 asm volatile("mfence");
852
f83b6b64
AH
853 /* xsavec mem */
854
855 asm volatile("xsavec (%eax)");
856 asm volatile("xsavec (0x12345678)");
857 asm volatile("xsavec 0x12345678(%eax,%ecx,8)");
858
859 /* xsaves mem */
860
861 asm volatile("xsaves (%eax)");
862 asm volatile("xsaves (0x12345678)");
863 asm volatile("xsaves 0x12345678(%eax,%ecx,8)");
864
865 /* xrstors mem */
866
867 asm volatile("xrstors (%eax)");
868 asm volatile("xrstors (0x12345678)");
869 asm volatile("xrstors 0x12345678(%eax,%ecx,8)");
870
98e4619f
AH
871#endif /* #ifndef __x86_64__ */
872
ac1c8859
AH
873 /* pcommit */
874
875 asm volatile("pcommit");
876
98e4619f
AH
877 /* Following line is a marker for the awk script - do not change */
878 asm volatile("rdtsc"); /* Stop here */
879
880 return 0;
881}
This page took 0.107907 seconds and 5 git commands to generate.