+2019-05-09 Matthew Malcomson <matthew.malcomson@arm.com>
+
+ * config/tc-aarch64.c (REG_ZR): Macro specifying zero register.
+ (parse_address_main): Account for new addressing mode [Zn.S, Xm].
+ (parse_operands): Handle new SVE_ADDR_ZX operand.
+
2019-05-09 Matthew Malcomson <matthew.malcomson@arm.com>
* config/tc-aarch64.c (parse_operands): Handle new SVE_Zm3_11_INDEX
/* Some well known registers that we refer to directly elsewhere. */
#define REG_SP 31
+#define REG_ZR 31
/* Instructions take 4 bytes in the object file. */
#define INSN_SIZE 4
[base,Zm.D,(S|U)XTW {#imm}] // ignores top 32 bits of Zm.D elements
[Zn.S,#imm]
[Zn.D,#imm]
+ [Zn.S{, Xm}]
[Zn.S,Zm.S{,LSL #imm}] // in ADR
[Zn.D,Zm.D{,LSL #imm}] // in ADR
[Zn.D,Zm.D,(S|U)XTW {#imm}] // in ADR
return FALSE;
}
/* We only accept:
+ [base,Xm] # For vector plus scalar SVE2 indexing.
[base,Xm{,LSL #imm}]
[base,Xm,SXTX {#imm}]
[base,Wm,(S|U)XTW {#imm}] */
return FALSE;
}
if (aarch64_get_qualifier_esize (*base_qualifier)
- != aarch64_get_qualifier_esize (*offset_qualifier))
+ != aarch64_get_qualifier_esize (*offset_qualifier)
+ && (operand->type != AARCH64_OPND_SVE_ADDR_ZX
+ || *base_qualifier != AARCH64_OPND_QLF_S_S
+ || *offset_qualifier != AARCH64_OPND_QLF_X))
{
set_syntax_error (_("offset has different size from base"));
return FALSE;
}
/* If at this point neither .preind nor .postind is set, we have a
- bare [Rn]{!}; reject [Rn]! accept [Rn] as a shorthand for [Rn,#0]. */
+ bare [Rn]{!}; reject [Rn]! accept [Rn] as a shorthand for [Rn,#0].
+ For SVE2 vector plus scalar offsets, allow [Zn.<T>] as shorthand for
+ [Zn.<T>, xzr]. */
if (operand->addr.preind == 0 && operand->addr.postind == 0)
{
if (operand->addr.writeback)
}
operand->addr.preind = 1;
- inst.reloc.exp.X_op = O_constant;
- inst.reloc.exp.X_add_number = 0;
+ if (operand->type == AARCH64_OPND_SVE_ADDR_ZX)
+ {
+ operand->addr.offset.is_reg = 1;
+ operand->addr.offset.regno = REG_ZR;
+ *offset_qualifier = AARCH64_OPND_QLF_X;
+ }
+ else
+ {
+ inst.reloc.exp.X_op = O_constant;
+ inst.reloc.exp.X_add_number = 0;
+ }
}
*str = p;
info->qualifier = offset_qualifier;
goto regoff_addr;
+ case AARCH64_OPND_SVE_ADDR_ZX:
+ /* [Zn.<T>{, <Xm>}]. */
+ po_misc_or_fail (parse_sve_address (&str, info, &base_qualifier,
+ &offset_qualifier));
+ /* Things to check:
+ base_qualifier either S_S or S_D
+ offset_qualifier must be X
+ */
+ if ((base_qualifier != AARCH64_OPND_QLF_S_S
+ && base_qualifier != AARCH64_OPND_QLF_S_D)
+ || offset_qualifier != AARCH64_OPND_QLF_X)
+ {
+ set_syntax_error (_("invalid addressing mode"));
+ goto failure;
+ }
+ info->qualifier = base_qualifier;
+ if (!info->addr.offset.is_reg || info->addr.pcrel
+ || !info->addr.preind || info->addr.writeback
+ || info->shifter.operator_present != 0)
+ {
+ set_syntax_error (_("invalid addressing mode"));
+ goto failure;
+ }
+ info->shifter.kind = AARCH64_MOD_LSL;
+ break;
+
+
case AARCH64_OPND_SVE_ADDR_ZI_U5:
case AARCH64_OPND_SVE_ADDR_ZI_U5x2:
case AARCH64_OPND_SVE_ADDR_ZI_U5x4:
+2019-05-09 Matthew Malcomson <matthew.malcomson@arm.com>
+
+ * opcode/aarch64.h (enum aarch64_opnd): New SVE_ADDR_ZX operand.
+
2019-05-09 Matthew Malcomson <matthew.malcomson@arm.com>
* opcode/aarch64.h (enum aarch64_opnd): New SVE_Zm3_11_INDEX operand.
AARCH64_OPND_SVE_ADDR_RX_LSL1, /* SVE [<Xn|SP>, <Xm>, LSL #1]. */
AARCH64_OPND_SVE_ADDR_RX_LSL2, /* SVE [<Xn|SP>, <Xm>, LSL #2]. */
AARCH64_OPND_SVE_ADDR_RX_LSL3, /* SVE [<Xn|SP>, <Xm>, LSL #3]. */
+ AARCH64_OPND_SVE_ADDR_ZX, /* SVE [Zn.<T>{, <Xm>}]. */
AARCH64_OPND_SVE_ADDR_RZ, /* SVE [<Xn|SP>, Zm.D]. */
AARCH64_OPND_SVE_ADDR_RZ_LSL1, /* SVE [<Xn|SP>, Zm.D, LSL #1]. */
AARCH64_OPND_SVE_ADDR_RZ_LSL2, /* SVE [<Xn|SP>, Zm.D, LSL #2]. */
+2019-05-09 Matthew Malcomson <matthew.malcomson@arm.com>
+
+ * aarch64-asm-2.c: Regenerated.
+ * aarch64-dis-2.c: Regenerated.
+ * aarch64-opc-2.c: Regenerated.
+ * aarch64-opc.c (operand_general_constraint_met_p): Constraint checking
+ for SVE_ADDR_ZX.
+ (aarch64_print_operand): Add printing for SVE_ADDR_ZX.
+ * aarch64-tbl.h (AARCH64_OPERANDS): Use new SVE_ADDR_ZX operand.
+
2019-05-09 Matthew Malcomson <matthew.malcomson@arm.com>
* aarch64-asm-2.c: Regenerated.
case 28:
case 29:
case 30:
- case 161:
case 162:
case 163:
case 164:
case 168:
case 169:
case 170:
- case 183:
+ case 171:
case 184:
case 185:
case 186:
case 189:
case 190:
case 191:
- case 196:
- case 199:
+ case 192:
+ case 197:
+ case 200:
return aarch64_ins_regno (self, info, code, inst, errors);
case 14:
return aarch64_ins_reg_extended (self, info, code, inst, errors);
case 32:
case 33:
case 34:
- case 202:
+ case 203:
return aarch64_ins_reglane (self, info, code, inst, errors);
case 35:
return aarch64_ins_reglist (self, info, code, inst, errors);
case 80:
case 81:
case 82:
- case 158:
- case 160:
- case 175:
+ case 159:
+ case 161:
case 176:
case 177:
case 178:
case 180:
case 181:
case 182:
- case 201:
+ case 183:
+ case 202:
return aarch64_ins_imm (self, info, code, inst, errors);
case 43:
case 44:
case 47:
return aarch64_ins_advsimd_imm_modified (self, info, code, inst, errors);
case 51:
- case 148:
+ case 149:
return aarch64_ins_fpimm (self, info, code, inst, errors);
case 68:
- case 156:
+ case 157:
return aarch64_ins_limm (self, info, code, inst, errors);
case 69:
return aarch64_ins_aimm (self, info, code, inst, errors);
return aarch64_ins_fbits (self, info, code, inst, errors);
case 73:
case 74:
- case 153:
+ case 154:
return aarch64_ins_imm_rotate2 (self, info, code, inst, errors);
case 75:
- case 152:
- case 154:
+ case 153:
+ case 155:
return aarch64_ins_imm_rotate1 (self, info, code, inst, errors);
case 76:
case 77:
case 128:
case 129:
case 130:
- return aarch64_ins_sve_addr_rr_lsl (self, info, code, inst, errors);
case 131:
+ return aarch64_ins_sve_addr_rr_lsl (self, info, code, inst, errors);
case 132:
case 133:
case 134:
case 136:
case 137:
case 138:
- return aarch64_ins_sve_addr_rz_xtw (self, info, code, inst, errors);
case 139:
+ return aarch64_ins_sve_addr_rz_xtw (self, info, code, inst, errors);
case 140:
case 141:
case 142:
- return aarch64_ins_sve_addr_zi_u5 (self, info, code, inst, errors);
case 143:
- return aarch64_ins_sve_addr_zz_lsl (self, info, code, inst, errors);
+ return aarch64_ins_sve_addr_zi_u5 (self, info, code, inst, errors);
case 144:
- return aarch64_ins_sve_addr_zz_sxtw (self, info, code, inst, errors);
+ return aarch64_ins_sve_addr_zz_lsl (self, info, code, inst, errors);
case 145:
- return aarch64_ins_sve_addr_zz_uxtw (self, info, code, inst, errors);
+ return aarch64_ins_sve_addr_zz_sxtw (self, info, code, inst, errors);
case 146:
- return aarch64_ins_sve_aimm (self, info, code, inst, errors);
+ return aarch64_ins_sve_addr_zz_uxtw (self, info, code, inst, errors);
case 147:
+ return aarch64_ins_sve_aimm (self, info, code, inst, errors);
+ case 148:
return aarch64_ins_sve_asimm (self, info, code, inst, errors);
- case 149:
- return aarch64_ins_sve_float_half_one (self, info, code, inst, errors);
case 150:
- return aarch64_ins_sve_float_half_two (self, info, code, inst, errors);
+ return aarch64_ins_sve_float_half_one (self, info, code, inst, errors);
case 151:
+ return aarch64_ins_sve_float_half_two (self, info, code, inst, errors);
+ case 152:
return aarch64_ins_sve_float_zero_one (self, info, code, inst, errors);
- case 155:
+ case 156:
return aarch64_ins_inv_limm (self, info, code, inst, errors);
- case 157:
+ case 158:
return aarch64_ins_sve_limm_mov (self, info, code, inst, errors);
- case 159:
+ case 160:
return aarch64_ins_sve_scale (self, info, code, inst, errors);
- case 171:
case 172:
- return aarch64_ins_sve_shlimm (self, info, code, inst, errors);
case 173:
+ return aarch64_ins_sve_shlimm (self, info, code, inst, errors);
case 174:
+ case 175:
return aarch64_ins_sve_shrimm (self, info, code, inst, errors);
- case 192:
case 193:
case 194:
case 195:
+ case 196:
return aarch64_ins_sve_quad_index (self, info, code, inst, errors);
- case 197:
- return aarch64_ins_sve_index (self, info, code, inst, errors);
case 198:
- case 200:
+ return aarch64_ins_sve_index (self, info, code, inst, errors);
+ case 199:
+ case 201:
return aarch64_ins_sve_reglist (self, info, code, inst, errors);
default: assert (0); abort ();
}
case 28:
case 29:
case 30:
- case 161:
case 162:
case 163:
case 164:
case 168:
case 169:
case 170:
- case 183:
+ case 171:
case 184:
case 185:
case 186:
case 189:
case 190:
case 191:
- case 196:
- case 199:
+ case 192:
+ case 197:
+ case 200:
return aarch64_ext_regno (self, info, code, inst, errors);
case 9:
return aarch64_ext_regrt_sysins (self, info, code, inst, errors);
case 32:
case 33:
case 34:
- case 202:
+ case 203:
return aarch64_ext_reglane (self, info, code, inst, errors);
case 35:
return aarch64_ext_reglist (self, info, code, inst, errors);
case 80:
case 81:
case 82:
- case 158:
- case 160:
- case 175:
+ case 159:
+ case 161:
case 176:
case 177:
case 178:
case 180:
case 181:
case 182:
- case 201:
+ case 183:
+ case 202:
return aarch64_ext_imm (self, info, code, inst, errors);
case 43:
case 44:
case 48:
return aarch64_ext_shll_imm (self, info, code, inst, errors);
case 51:
- case 148:
+ case 149:
return aarch64_ext_fpimm (self, info, code, inst, errors);
case 68:
- case 156:
+ case 157:
return aarch64_ext_limm (self, info, code, inst, errors);
case 69:
return aarch64_ext_aimm (self, info, code, inst, errors);
return aarch64_ext_fbits (self, info, code, inst, errors);
case 73:
case 74:
- case 153:
+ case 154:
return aarch64_ext_imm_rotate2 (self, info, code, inst, errors);
case 75:
- case 152:
- case 154:
+ case 153:
+ case 155:
return aarch64_ext_imm_rotate1 (self, info, code, inst, errors);
case 76:
case 77:
case 128:
case 129:
case 130:
- return aarch64_ext_sve_addr_rr_lsl (self, info, code, inst, errors);
case 131:
+ return aarch64_ext_sve_addr_rr_lsl (self, info, code, inst, errors);
case 132:
case 133:
case 134:
case 136:
case 137:
case 138:
- return aarch64_ext_sve_addr_rz_xtw (self, info, code, inst, errors);
case 139:
+ return aarch64_ext_sve_addr_rz_xtw (self, info, code, inst, errors);
case 140:
case 141:
case 142:
- return aarch64_ext_sve_addr_zi_u5 (self, info, code, inst, errors);
case 143:
- return aarch64_ext_sve_addr_zz_lsl (self, info, code, inst, errors);
+ return aarch64_ext_sve_addr_zi_u5 (self, info, code, inst, errors);
case 144:
- return aarch64_ext_sve_addr_zz_sxtw (self, info, code, inst, errors);
+ return aarch64_ext_sve_addr_zz_lsl (self, info, code, inst, errors);
case 145:
- return aarch64_ext_sve_addr_zz_uxtw (self, info, code, inst, errors);
+ return aarch64_ext_sve_addr_zz_sxtw (self, info, code, inst, errors);
case 146:
- return aarch64_ext_sve_aimm (self, info, code, inst, errors);
+ return aarch64_ext_sve_addr_zz_uxtw (self, info, code, inst, errors);
case 147:
+ return aarch64_ext_sve_aimm (self, info, code, inst, errors);
+ case 148:
return aarch64_ext_sve_asimm (self, info, code, inst, errors);
- case 149:
- return aarch64_ext_sve_float_half_one (self, info, code, inst, errors);
case 150:
- return aarch64_ext_sve_float_half_two (self, info, code, inst, errors);
+ return aarch64_ext_sve_float_half_one (self, info, code, inst, errors);
case 151:
+ return aarch64_ext_sve_float_half_two (self, info, code, inst, errors);
+ case 152:
return aarch64_ext_sve_float_zero_one (self, info, code, inst, errors);
- case 155:
+ case 156:
return aarch64_ext_inv_limm (self, info, code, inst, errors);
- case 157:
+ case 158:
return aarch64_ext_sve_limm_mov (self, info, code, inst, errors);
- case 159:
+ case 160:
return aarch64_ext_sve_scale (self, info, code, inst, errors);
- case 171:
case 172:
- return aarch64_ext_sve_shlimm (self, info, code, inst, errors);
case 173:
+ return aarch64_ext_sve_shlimm (self, info, code, inst, errors);
case 174:
+ case 175:
return aarch64_ext_sve_shrimm (self, info, code, inst, errors);
- case 192:
case 193:
case 194:
case 195:
+ case 196:
return aarch64_ext_sve_quad_index (self, info, code, inst, errors);
- case 197:
- return aarch64_ext_sve_index (self, info, code, inst, errors);
case 198:
- case 200:
+ return aarch64_ext_sve_index (self, info, code, inst, errors);
+ case 199:
+ case 201:
return aarch64_ext_sve_reglist (self, info, code, inst, errors);
default: assert (0); abort ();
}
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RX_LSL1", (1 << OPD_F_OD_LSB) | OPD_F_NO_ZR | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RX_LSL2", (2 << OPD_F_OD_LSB) | OPD_F_NO_ZR | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RX_LSL3", (3 << OPD_F_OD_LSB) | OPD_F_NO_ZR | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_Rm}, "an address with a scalar register offset"},
+ {AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_ZX", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_SVE_Zn,FLD_Rm}, "vector of address with a scalar register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ", OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_LSL1", 1 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
{AARCH64_OPND_CLASS_ADDRESS, "SVE_ADDR_RZ_LSL2", 2 << OPD_F_OD_LSB | OPD_F_HAS_INSERTER | OPD_F_HAS_EXTRACTOR, {FLD_Rn,FLD_SVE_Zm_16}, "an address with a vector register offset"},
max_value = 7;
goto sve_imm_offset;
+ case AARCH64_OPND_SVE_ADDR_ZX:
+ /* Everything is already ensured by parse_operands or
+ aarch64_ext_sve_addr_rr_lsl (because this is a very specific
+ argument type). */
+ assert (opnd->addr.offset.is_reg);
+ assert (opnd->addr.preind);
+ assert ((aarch64_operands[type].flags & OPD_F_NO_ZR) == 0);
+ assert (opnd->shifter.kind == AARCH64_MOD_LSL);
+ assert (opnd->shifter.operator_present == 0);
+ break;
+
case AARCH64_OPND_SVE_ADDR_R:
case AARCH64_OPND_SVE_ADDR_RR:
case AARCH64_OPND_SVE_ADDR_RR_LSL1:
get_offset_int_reg_name (opnd));
break;
+ case AARCH64_OPND_SVE_ADDR_ZX:
+ print_register_offset_address
+ (buf, size, opnd,
+ get_addr_sve_reg_name (opnd->addr.base_regno, opnd->qualifier),
+ get_64bit_int_reg_name (opnd->addr.offset.regno, 0));
+ break;
+
case AARCH64_OPND_SVE_ADDR_RZ:
case AARCH64_OPND_SVE_ADDR_RZ_LSL1:
case AARCH64_OPND_SVE_ADDR_RZ_LSL2:
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RX_LSL3", \
(3 << OPD_F_OD_LSB) | OPD_F_NO_ZR, F(FLD_Rn,FLD_Rm), \
"an address with a scalar register offset") \
+ Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_ZX", \
+ 0 << OPD_F_OD_LSB , F(FLD_SVE_Zn,FLD_Rm), \
+ "vector of address with a scalar register offset") \
Y(ADDRESS, sve_addr_rr_lsl, "SVE_ADDR_RZ", 0 << OPD_F_OD_LSB, \
F(FLD_Rn,FLD_SVE_Zm_16), \
"an address with a vector register offset") \