+static INLINE int
+uints_all_zero (const unsigned int *x, unsigned int size)
+{
+ switch (size)
+ {
+ case 3:
+ if (x[2])
+ return 0;
+ case 2:
+ if (x[1])
+ return 0;
+ case 1:
+ return !x[0];
+ default:
+ abort ();
+ }
+}
+
+static INLINE void
+uints_set (unsigned int *x, unsigned int v, unsigned int size)
+{
+ switch (size)
+ {
+ case 3:
+ x[2] = v;
+ case 2:
+ x[1] = v;
+ case 1:
+ x[0] = v;
+ break;
+ default:
+ abort ();
+ }
+}
+
+static INLINE int
+uints_equal (const unsigned int *x, const unsigned int *y,
+ unsigned int size)
+{
+ switch (size)
+ {
+ case 3:
+ if (x[2] != y [2])
+ return 0;
+ case 2:
+ if (x[1] != y [1])
+ return 0;
+ case 1:
+ return x[0] == y [0];
+ break;
+ default:
+ abort ();
+ }
+}
+
+#define UINTS_ALL_ZERO(x) \
+ uints_all_zero ((x).array, ARRAY_SIZE ((x).array))
+#define UINTS_SET(x, v) \
+ uints_set ((x).array, v, ARRAY_SIZE ((x).array))
+#define UINTS_CLEAR(x) \
+ uints_set ((x).array, 0, ARRAY_SIZE ((x).array))
+#define UINTS_EQUAL(x, y) \
+ uints_equal ((x).array, (y).array, ARRAY_SIZE ((x).array))
+
+static INLINE int
+cpu_flags_check_cpu64 (i386_cpu_flags f)
+{
+ return !((flag_code == CODE_64BIT && f.bitfield.cpuno64)
+ || (flag_code != CODE_64BIT && f.bitfield.cpu64));
+}
+
+static INLINE i386_cpu_flags
+cpu_flags_and (i386_cpu_flags x, i386_cpu_flags y)
+{
+ switch (ARRAY_SIZE (x.array))
+ {
+ case 3:
+ x.array [2] &= y.array [2];
+ case 2:
+ x.array [1] &= y.array [1];
+ case 1:
+ x.array [0] &= y.array [0];
+ break;
+ default:
+ abort ();
+ }
+ return x;
+}
+
+static INLINE i386_cpu_flags
+cpu_flags_or (i386_cpu_flags x, i386_cpu_flags y)
+{
+ switch (ARRAY_SIZE (x.array))
+ {
+ case 3:
+ x.array [2] |= y.array [2];
+ case 2:
+ x.array [1] |= y.array [1];
+ case 1:
+ x.array [0] |= y.array [0];
+ break;
+ default:
+ abort ();
+ }
+ return x;
+}
+
+/* Return 3 if there is a perfect match, 2 if compatible with 64bit,
+ 1 if compatible with arch, 0 if there is no match. */
+
+static int
+cpu_flags_match (i386_cpu_flags x)
+{
+ int overlap = cpu_flags_check_cpu64 (x) ? 2 : 0;
+
+ x.bitfield.cpu64 = 0;
+ x.bitfield.cpuno64 = 0;
+
+ if (UINTS_ALL_ZERO (x))
+ overlap |= 1;
+ else
+ {
+ i386_cpu_flags cpu = cpu_arch_flags;
+
+ cpu.bitfield.cpu64 = 0;
+ cpu.bitfield.cpuno64 = 0;
+ cpu = cpu_flags_and (x, cpu);
+ overlap |= UINTS_ALL_ZERO (cpu) ? 0 : 1;
+ }
+ return overlap;
+}
+
+static INLINE i386_operand_type
+operand_type_and (i386_operand_type x, i386_operand_type y)
+{
+ switch (ARRAY_SIZE (x.array))
+ {
+ case 3:
+ x.array [2] &= y.array [2];
+ case 2:
+ x.array [1] &= y.array [1];
+ case 1:
+ x.array [0] &= y.array [0];
+ break;
+ default:
+ abort ();
+ }
+ return x;
+}
+
+static INLINE i386_operand_type
+operand_type_or (i386_operand_type x, i386_operand_type y)
+{
+ switch (ARRAY_SIZE (x.array))
+ {
+ case 3:
+ x.array [2] |= y.array [2];
+ case 2:
+ x.array [1] |= y.array [1];
+ case 1:
+ x.array [0] |= y.array [0];
+ break;
+ default:
+ abort ();
+ }
+ return x;
+}
+
+static INLINE i386_operand_type
+operand_type_xor (i386_operand_type x, i386_operand_type y)
+{
+ switch (ARRAY_SIZE (x.array))
+ {
+ case 3:
+ x.array [2] ^= y.array [2];
+ case 2:
+ x.array [1] ^= y.array [1];
+ case 1:
+ x.array [0] ^= y.array [0];
+ break;
+ default:
+ abort ();
+ }
+ return x;
+}
+
+static const i386_operand_type acc32 = OPERAND_TYPE_ACC32;
+static const i386_operand_type acc64 = OPERAND_TYPE_ACC64;
+static const i386_operand_type control = OPERAND_TYPE_CONTROL;
+static const i386_operand_type reg16_inoutportreg
+ = OPERAND_TYPE_REG16_INOUTPORTREG;
+static const i386_operand_type disp16 = OPERAND_TYPE_DISP16;
+static const i386_operand_type disp32 = OPERAND_TYPE_DISP32;
+static const i386_operand_type disp32s = OPERAND_TYPE_DISP32S;
+static const i386_operand_type disp16_32 = OPERAND_TYPE_DISP16_32;
+static const i386_operand_type anydisp
+ = OPERAND_TYPE_ANYDISP;
+static const i386_operand_type regxmm = OPERAND_TYPE_REGXMM;
+static const i386_operand_type imm8 = OPERAND_TYPE_IMM8;
+static const i386_operand_type imm8s = OPERAND_TYPE_IMM8S;
+static const i386_operand_type imm16 = OPERAND_TYPE_IMM16;
+static const i386_operand_type imm32 = OPERAND_TYPE_IMM32;
+static const i386_operand_type imm32s = OPERAND_TYPE_IMM32S;
+static const i386_operand_type imm64 = OPERAND_TYPE_IMM64;
+static const i386_operand_type imm16_32 = OPERAND_TYPE_IMM16_32;
+static const i386_operand_type imm16_32s = OPERAND_TYPE_IMM16_32S;
+static const i386_operand_type imm16_32_32s = OPERAND_TYPE_IMM16_32_32S;
+
+enum operand_type
+{
+ reg,
+ imm,
+ disp,
+ anymem
+};
+
+static INLINE int
+operand_type_check (i386_operand_type t, enum operand_type c)
+{
+ switch (c)
+ {
+ case reg:
+ return (t.bitfield.reg8
+ || t.bitfield.reg16
+ || t.bitfield.reg32
+ || t.bitfield.reg64);
+
+ case imm:
+ return (t.bitfield.imm8
+ || t.bitfield.imm8s
+ || t.bitfield.imm16
+ || t.bitfield.imm32
+ || t.bitfield.imm32s
+ || t.bitfield.imm64);
+
+ case disp:
+ return (t.bitfield.disp8
+ || t.bitfield.disp16
+ || t.bitfield.disp32
+ || t.bitfield.disp32s
+ || t.bitfield.disp64);
+
+ case anymem:
+ return (t.bitfield.disp8
+ || t.bitfield.disp16
+ || t.bitfield.disp32
+ || t.bitfield.disp32s
+ || t.bitfield.disp64
+ || t.bitfield.baseindex);
+
+ default:
+ abort ();
+ }
+}
+
+/* Return 1 if there is no conflict in 8bit/16bit/32bit/64bit on
+ operand J for instruction template T. */
+
+static INLINE int
+match_reg_size (const template *t, unsigned int j)
+{
+ return !((i.types[j].bitfield.byte
+ && !t->operand_types[j].bitfield.byte)
+ || (i.types[j].bitfield.word
+ && !t->operand_types[j].bitfield.word)
+ || (i.types[j].bitfield.dword
+ && !t->operand_types[j].bitfield.dword)
+ || (i.types[j].bitfield.qword
+ && !t->operand_types[j].bitfield.qword));
+}
+
+/* Return 1 if there is no conflict in any size on operand J for
+ instruction template T. */
+
+static INLINE int
+match_mem_size (const template *t, unsigned int j)
+{
+ return (match_reg_size (t, j)
+ && !((i.types[j].bitfield.unspecified
+ && !t->operand_types[j].bitfield.unspecified)
+ || (i.types[j].bitfield.fword
+ && !t->operand_types[j].bitfield.fword)
+ || (i.types[j].bitfield.tbyte
+ && !t->operand_types[j].bitfield.tbyte)
+ || (i.types[j].bitfield.xmmword
+ && !t->operand_types[j].bitfield.xmmword)));
+}
+
+/* Return 1 if there is no size conflict on any operands for
+ instruction template T. */
+
+static INLINE int
+operand_size_match (const template *t)
+{
+ unsigned int j;
+ int match = 1;
+
+ /* Don't check jump instructions. */
+ if (t->opcode_modifier.jump
+ || t->opcode_modifier.jumpbyte
+ || t->opcode_modifier.jumpdword
+ || t->opcode_modifier.jumpintersegment)
+ return match;
+
+ /* Check memory and accumulator operand size. */
+ for (j = 0; j < i.operands; j++)
+ {
+ if (t->operand_types[j].bitfield.anysize)
+ continue;
+
+ if (t->operand_types[j].bitfield.acc && !match_reg_size (t, j))
+ {
+ match = 0;
+ break;
+ }
+
+ if (i.types[j].bitfield.mem && !match_mem_size (t, j))
+ {
+ match = 0;
+ break;
+ }
+ }
+
+ if (match
+ || (!t->opcode_modifier.d && !t->opcode_modifier.floatd))
+ return match;
+
+ /* Check reverse. */
+ assert (i.operands == 2);
+
+ match = 1;
+ for (j = 0; j < 2; j++)
+ {
+ if (t->operand_types[j].bitfield.acc
+ && !match_reg_size (t, j ? 0 : 1))
+ {
+ match = 0;
+ break;
+ }
+
+ if (i.types[j].bitfield.mem
+ && !match_mem_size (t, j ? 0 : 1))
+ {
+ match = 0;
+ break;
+ }
+ }
+
+ return match;
+}
+
+static INLINE int
+operand_type_match (i386_operand_type overlap,
+ i386_operand_type given)
+{
+ i386_operand_type temp = overlap;
+
+ temp.bitfield.jumpabsolute = 0;
+ temp.bitfield.unspecified = 0;
+ temp.bitfield.byte = 0;
+ temp.bitfield.word = 0;
+ temp.bitfield.dword = 0;
+ temp.bitfield.fword = 0;
+ temp.bitfield.qword = 0;
+ temp.bitfield.tbyte = 0;
+ temp.bitfield.xmmword = 0;
+ if (UINTS_ALL_ZERO (temp))
+ return 0;
+
+ return (given.bitfield.baseindex == overlap.bitfield.baseindex
+ && given.bitfield.jumpabsolute == overlap.bitfield.jumpabsolute);
+}
+
+/* If given types g0 and g1 are registers they must be of the same type
+ unless the expected operand type register overlap is null.
+ Note that Acc in a template matches every size of reg. */
+
+static INLINE int
+operand_type_register_match (i386_operand_type m0,
+ i386_operand_type g0,
+ i386_operand_type t0,
+ i386_operand_type m1,
+ i386_operand_type g1,
+ i386_operand_type t1)
+{
+ if (!operand_type_check (g0, reg))
+ return 1;
+
+ if (!operand_type_check (g1, reg))
+ return 1;
+
+ if (g0.bitfield.reg8 == g1.bitfield.reg8
+ && g0.bitfield.reg16 == g1.bitfield.reg16
+ && g0.bitfield.reg32 == g1.bitfield.reg32
+ && g0.bitfield.reg64 == g1.bitfield.reg64)
+ return 1;
+
+ if (m0.bitfield.acc)
+ {
+ t0.bitfield.reg8 = 1;
+ t0.bitfield.reg16 = 1;
+ t0.bitfield.reg32 = 1;
+ t0.bitfield.reg64 = 1;
+ }
+
+ if (m1.bitfield.acc)
+ {
+ t1.bitfield.reg8 = 1;
+ t1.bitfield.reg16 = 1;
+ t1.bitfield.reg32 = 1;
+ t1.bitfield.reg64 = 1;
+ }
+
+ return (!(t0.bitfield.reg8 & t1.bitfield.reg8)
+ && !(t0.bitfield.reg16 & t1.bitfield.reg16)
+ && !(t0.bitfield.reg32 & t1.bitfield.reg32)
+ && !(t0.bitfield.reg64 & t1.bitfield.reg64));
+}
+