| 1 | # Check 32bit AVX512{VBMI2,VL} instructions |
| 2 | |
| 3 | .allow_index_reg |
| 4 | .text |
| 5 | _start: |
| 6 | vpcompressb %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{VBMI2,VL} |
| 7 | vpcompressb %xmm6, 126(%edx){%k7} # AVX512{VBMI2,VL} Disp8 |
| 8 | vpcompressb %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{VBMI2,VL} |
| 9 | vpcompressb %ymm6, 126(%edx){%k7} # AVX512{VBMI2,VL} Disp8 |
| 10 | vpcompressb %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 11 | vpcompressb %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 12 | vpcompressb %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 13 | vpcompressb %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 14 | |
| 15 | vpcompressw %xmm6, -123456(%esp,%esi,8){%k7} # AVX512{VBMI2,VL} |
| 16 | vpcompressw %xmm6, 128(%edx){%k7} # AVX512{VBMI2,VL} Disp8 |
| 17 | vpcompressw %ymm6, -123456(%esp,%esi,8){%k7} # AVX512{VBMI2,VL} |
| 18 | vpcompressw %ymm6, 128(%edx){%k7} # AVX512{VBMI2,VL} Disp8 |
| 19 | vpcompressw %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 20 | vpcompressw %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 21 | vpcompressw %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 22 | vpcompressw %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 23 | |
| 24 | vpexpandb (%ecx), %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 25 | vpexpandb -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{VBMI2,VL} |
| 26 | vpexpandb 126(%edx), %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 27 | vpexpandb (%ecx), %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 28 | vpexpandb -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{VBMI2,VL} |
| 29 | vpexpandb 126(%edx), %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 30 | vpexpandb %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 31 | vpexpandb %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 32 | vpexpandb %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 33 | vpexpandb %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 34 | |
| 35 | vpexpandw (%ecx), %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 36 | vpexpandw -123456(%esp,%esi,8), %xmm6{%k7} # AVX512{VBMI2,VL} |
| 37 | vpexpandw 128(%edx), %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 38 | vpexpandw (%ecx), %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 39 | vpexpandw -123456(%esp,%esi,8), %ymm6{%k7} # AVX512{VBMI2,VL} |
| 40 | vpexpandw 128(%edx), %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 41 | vpexpandw %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 42 | vpexpandw %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 43 | vpexpandw %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 44 | vpexpandw %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 45 | |
| 46 | vpshldvw %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 47 | vpshldvw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 48 | vpshldvw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 49 | vpshldvw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 50 | vpshldvw %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 51 | vpshldvw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 52 | vpshldvw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 53 | vpshldvw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 54 | |
| 55 | vpshldvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 56 | vpshldvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 57 | vpshldvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 58 | vpshldvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 59 | vpshldvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 60 | vpshldvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 61 | vpshldvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 62 | vpshldvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 63 | vpshldvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 64 | vpshldvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 65 | |
| 66 | vpshldvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 67 | vpshldvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 68 | vpshldvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 69 | vpshldvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 70 | vpshldvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 71 | vpshldvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 72 | vpshldvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 73 | vpshldvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 74 | vpshldvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 75 | vpshldvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 76 | |
| 77 | vpshrdvw %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 78 | vpshrdvw %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 79 | vpshrdvw -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 80 | vpshrdvw 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 81 | vpshrdvw %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 82 | vpshrdvw %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 83 | vpshrdvw -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 84 | vpshrdvw 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 85 | |
| 86 | vpshrdvd %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 87 | vpshrdvd %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 88 | vpshrdvd -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 89 | vpshrdvd 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 90 | vpshrdvd 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 91 | vpshrdvd %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 92 | vpshrdvd %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 93 | vpshrdvd -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 94 | vpshrdvd 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 95 | vpshrdvd 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 96 | |
| 97 | vpshrdvq %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 98 | vpshrdvq %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 99 | vpshrdvq -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 100 | vpshrdvq 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 101 | vpshrdvq 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 102 | vpshrdvq %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 103 | vpshrdvq %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 104 | vpshrdvq -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 105 | vpshrdvq 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 106 | vpshrdvq 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 107 | |
| 108 | vpshldw $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 109 | vpshldw $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 110 | vpshldw $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 111 | vpshldw $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 112 | vpshldw $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 113 | vpshldw $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 114 | vpshldw $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 115 | vpshldw $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 116 | |
| 117 | vpshldd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 118 | vpshldd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 119 | vpshldd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 120 | vpshldd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 121 | vpshldd $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 122 | vpshldd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 123 | vpshldd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 124 | vpshldd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 125 | vpshldd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 126 | vpshldd $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 127 | |
| 128 | vpshldq $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 129 | vpshldq $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 130 | vpshldq $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 131 | vpshldq $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 132 | vpshldq $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 133 | vpshldq $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 134 | vpshldq $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 135 | vpshldq $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 136 | vpshldq $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 137 | vpshldq $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 138 | |
| 139 | vpshrdw $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 140 | vpshrdw $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 141 | vpshrdw $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 142 | vpshrdw $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 143 | vpshrdw $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 144 | vpshrdw $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 145 | vpshrdw $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 146 | vpshrdw $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 147 | |
| 148 | vpshrdd $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 149 | vpshrdd $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 150 | vpshrdd $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 151 | vpshrdd $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 152 | vpshrdd $123, 508(%edx){1to4}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 153 | vpshrdd $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 154 | vpshrdd $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 155 | vpshrdd $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 156 | vpshrdd $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 157 | vpshrdd $123, 508(%edx){1to8}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 158 | |
| 159 | vpshrdq $0xab, %xmm4, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 160 | vpshrdq $0xab, %xmm4, %xmm5, %xmm6{%k7}{z} # AVX512{VBMI2,VL} |
| 161 | vpshrdq $123, -123456(%esp,%esi,8), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} |
| 162 | vpshrdq $123, 2032(%edx), %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 163 | vpshrdq $123, 1016(%edx){1to2}, %xmm5, %xmm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 164 | vpshrdq $0xab, %ymm4, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 165 | vpshrdq $0xab, %ymm4, %ymm5, %ymm6{%k7}{z} # AVX512{VBMI2,VL} |
| 166 | vpshrdq $123, -123456(%esp,%esi,8), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} |
| 167 | vpshrdq $123, 4064(%edx), %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 168 | vpshrdq $123, 1016(%edx){1to4}, %ymm5, %ymm6{%k7} # AVX512{VBMI2,VL} Disp8 |
| 169 | |
| 170 | .intel_syntax noprefix |
| 171 | vpcompressb XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{VBMI2,VL} |
| 172 | vpcompressb XMMWORD PTR [edx+126]{k7}, xmm6 # AVX512{VBMI2,VL} Disp8 |
| 173 | vpcompressb YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{VBMI2,VL} |
| 174 | vpcompressb YMMWORD PTR [edx+126]{k7}, ymm6 # AVX512{VBMI2,VL} Disp8 |
| 175 | vpcompressb xmm6{k7}, xmm5 # AVX512{VBMI2,VL} |
| 176 | vpcompressb xmm6{k7}{z}, xmm5 # AVX512{VBMI2,VL} |
| 177 | vpcompressb ymm6{k7}, ymm5 # AVX512{VBMI2,VL} |
| 178 | vpcompressb ymm6{k7}{z}, ymm5 # AVX512{VBMI2,VL} |
| 179 | |
| 180 | vpcompressw XMMWORD PTR [esp+esi*8-123456]{k7}, xmm6 # AVX512{VBMI2,VL} |
| 181 | vpcompressw XMMWORD PTR [edx+128]{k7}, xmm6 # AVX512{VBMI2,VL} Disp8 |
| 182 | vpcompressw YMMWORD PTR [esp+esi*8-123456]{k7}, ymm6 # AVX512{VBMI2,VL} |
| 183 | vpcompressw YMMWORD PTR [edx+128]{k7}, ymm6 # AVX512{VBMI2,VL} Disp8 |
| 184 | vpcompressw xmm6{k7}, xmm5 # AVX512{VBMI2,VL} |
| 185 | vpcompressw xmm6{k7}{z}, xmm5 # AVX512{VBMI2,VL} |
| 186 | vpcompressw ymm6{k7}, ymm5 # AVX512{VBMI2,VL} |
| 187 | vpcompressw ymm6{k7}{z}, ymm5 # AVX512{VBMI2,VL} |
| 188 | |
| 189 | vpexpandb xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{VBMI2,VL} |
| 190 | vpexpandb xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 191 | vpexpandb xmm6{k7}, XMMWORD PTR [edx+126] # AVX512{VBMI2,VL} Disp8 |
| 192 | vpexpandb ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{VBMI2,VL} |
| 193 | vpexpandb ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 194 | vpexpandb ymm6{k7}, YMMWORD PTR [edx+126] # AVX512{VBMI2,VL} Disp8 |
| 195 | vpexpandb xmm6{k7}, xmm5 # AVX512{VBMI2,VL} |
| 196 | vpexpandb xmm6{k7}{z}, xmm5 # AVX512{VBMI2,VL} |
| 197 | vpexpandb ymm6{k7}, ymm5 # AVX512{VBMI2,VL} |
| 198 | vpexpandb ymm6{k7}{z}, ymm5 # AVX512{VBMI2,VL} |
| 199 | |
| 200 | vpexpandw xmm6{k7}{z}, XMMWORD PTR [ecx] # AVX512{VBMI2,VL} |
| 201 | vpexpandw xmm6{k7}, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 202 | vpexpandw xmm6{k7}, XMMWORD PTR [edx+128] # AVX512{VBMI2,VL} Disp8 |
| 203 | vpexpandw ymm6{k7}{z}, YMMWORD PTR [ecx] # AVX512{VBMI2,VL} |
| 204 | vpexpandw ymm6{k7}, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 205 | vpexpandw ymm6{k7}, YMMWORD PTR [edx+128] # AVX512{VBMI2,VL} Disp8 |
| 206 | vpexpandw xmm6{k7}, xmm5 # AVX512{VBMI2,VL} |
| 207 | vpexpandw xmm6{k7}{z}, xmm5 # AVX512{VBMI2,VL} |
| 208 | vpexpandw ymm6{k7}, ymm5 # AVX512{VBMI2,VL} |
| 209 | vpexpandw ymm6{k7}{z}, ymm5 # AVX512{VBMI2,VL} |
| 210 | |
| 211 | vpshldvw xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 212 | vpshldvw xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 213 | vpshldvw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 214 | vpshldvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8 |
| 215 | vpshldvw ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 216 | vpshldvw ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 217 | vpshldvw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 218 | vpshldvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8 |
| 219 | |
| 220 | vpshldvd xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 221 | vpshldvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 222 | vpshldvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 223 | vpshldvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8 |
| 224 | vpshldvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{VBMI2,VL} Disp8 |
| 225 | vpshldvd ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 226 | vpshldvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 227 | vpshldvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 228 | vpshldvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8 |
| 229 | vpshldvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{VBMI2,VL} Disp8 |
| 230 | |
| 231 | vpshldvq xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 232 | vpshldvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 233 | vpshldvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 234 | vpshldvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8 |
| 235 | vpshldvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{VBMI2,VL} Disp8 |
| 236 | vpshldvq ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 237 | vpshldvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 238 | vpshldvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 239 | vpshldvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8 |
| 240 | vpshldvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{VBMI2,VL} Disp8 |
| 241 | |
| 242 | vpshrdvw xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 243 | vpshrdvw xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 244 | vpshrdvw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 245 | vpshrdvw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8 |
| 246 | vpshrdvw ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 247 | vpshrdvw ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 248 | vpshrdvw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 249 | vpshrdvw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8 |
| 250 | |
| 251 | vpshrdvd xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 252 | vpshrdvd xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 253 | vpshrdvd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 254 | vpshrdvd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8 |
| 255 | vpshrdvd xmm6{k7}, xmm5, [edx+508]{1to4} # AVX512{VBMI2,VL} Disp8 |
| 256 | vpshrdvd ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 257 | vpshrdvd ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 258 | vpshrdvd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 259 | vpshrdvd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8 |
| 260 | vpshrdvd ymm6{k7}, ymm5, [edx+508]{1to8} # AVX512{VBMI2,VL} Disp8 |
| 261 | |
| 262 | vpshrdvq xmm6{k7}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 263 | vpshrdvq xmm6{k7}{z}, xmm5, xmm4 # AVX512{VBMI2,VL} |
| 264 | vpshrdvq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 265 | vpshrdvq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032] # AVX512{VBMI2,VL} Disp8 |
| 266 | vpshrdvq xmm6{k7}, xmm5, [edx+1016]{1to2} # AVX512{VBMI2,VL} Disp8 |
| 267 | vpshrdvq ymm6{k7}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 268 | vpshrdvq ymm6{k7}{z}, ymm5, ymm4 # AVX512{VBMI2,VL} |
| 269 | vpshrdvq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456] # AVX512{VBMI2,VL} |
| 270 | vpshrdvq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064] # AVX512{VBMI2,VL} Disp8 |
| 271 | vpshrdvq ymm6{k7}, ymm5, [edx+1016]{1to4} # AVX512{VBMI2,VL} Disp8 |
| 272 | |
| 273 | vpshldw xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 274 | vpshldw xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 275 | vpshldw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 276 | vpshldw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8 |
| 277 | vpshldw ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 278 | vpshldw ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 279 | vpshldw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 280 | vpshldw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8 |
| 281 | |
| 282 | vpshldd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 283 | vpshldd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 284 | vpshldd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 285 | vpshldd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8 |
| 286 | vpshldd xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{VBMI2,VL} Disp8 |
| 287 | vpshldd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 288 | vpshldd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 289 | vpshldd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 290 | vpshldd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8 |
| 291 | vpshldd ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{VBMI2,VL} Disp8 |
| 292 | |
| 293 | vpshldq xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 294 | vpshldq xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 295 | vpshldq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 296 | vpshldq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8 |
| 297 | vpshldq xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{VBMI2,VL} Disp8 |
| 298 | vpshldq ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 299 | vpshldq ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 300 | vpshldq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 301 | vpshldq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8 |
| 302 | vpshldq ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{VBMI2,VL} Disp8 |
| 303 | |
| 304 | vpshrdw xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 305 | vpshrdw xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 306 | vpshrdw xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 307 | vpshrdw xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8 |
| 308 | vpshrdw ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 309 | vpshrdw ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 310 | vpshrdw ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 311 | vpshrdw ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8 |
| 312 | vpshrdd xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 313 | vpshrdd xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 314 | vpshrdd xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 315 | vpshrdd xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8 |
| 316 | vpshrdd xmm6{k7}, xmm5, [edx+508]{1to4}, 123 # AVX512{VBMI2,VL} Disp8 |
| 317 | vpshrdd ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 318 | vpshrdd ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 319 | vpshrdd ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 320 | vpshrdd ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8 |
| 321 | vpshrdd ymm6{k7}, ymm5, [edx+508]{1to8}, 123 # AVX512{VBMI2,VL} Disp8 |
| 322 | |
| 323 | vpshrdq xmm6{k7}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 324 | vpshrdq xmm6{k7}{z}, xmm5, xmm4, 0xab # AVX512{VBMI2,VL} |
| 325 | vpshrdq xmm6{k7}, xmm5, XMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 326 | vpshrdq xmm6{k7}, xmm5, XMMWORD PTR [edx+2032], 123 # AVX512{VBMI2,VL} Disp8 |
| 327 | vpshrdq xmm6{k7}, xmm5, [edx+1016]{1to2}, 123 # AVX512{VBMI2,VL} Disp8 |
| 328 | vpshrdq ymm6{k7}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 329 | vpshrdq ymm6{k7}{z}, ymm5, ymm4, 0xab # AVX512{VBMI2,VL} |
| 330 | vpshrdq ymm6{k7}, ymm5, YMMWORD PTR [esp+esi*8-123456], 123 # AVX512{VBMI2,VL} |
| 331 | vpshrdq ymm6{k7}, ymm5, YMMWORD PTR [edx+4064], 123 # AVX512{VBMI2,VL} Disp8 |
| 332 | vpshrdq ymm6{k7}, ymm5, [edx+1016]{1to4}, 123 # AVX512{VBMI2,VL} Disp8 |