X-Git-Url: http://drtracing.org/?a=blobdiff_plain;f=include%2Fopcode%2Faarch64.h;h=a7a7459c5fb5a928399792ca38ed204683923f59;hb=3bd82c86f0f432bd7653101069bf056fda14b7cd;hp=7130d2dd0c6e94b9f3e0714458db991503e139cf;hpb=b90efa5b79ac1524ec260f8eb89d1be37e0219a7;p=deliverable%2Fbinutils-gdb.git diff --git a/include/opcode/aarch64.h b/include/opcode/aarch64.h index 7130d2dd0c..a7a7459c5f 100644 --- a/include/opcode/aarch64.h +++ b/include/opcode/aarch64.h @@ -1,6 +1,6 @@ /* AArch64 assembler/disassembler support. - Copyright (C) 2009-2015 Free Software Foundation, Inc. + Copyright (C) 2009-2019 Free Software Foundation, Inc. Contributed by ARM Ltd. This file is part of GNU Binutils. @@ -27,32 +27,125 @@ #include #include +#ifdef __cplusplus +extern "C" { +#endif + /* The offset for pc-relative addressing is currently defined to be 0. */ #define AARCH64_PCREL_OFFSET 0 typedef uint32_t aarch64_insn; /* The following bitmasks control CPU features. */ +#define AARCH64_FEATURE_SHA2 0x200000000ULL /* SHA2 instructions. */ +#define AARCH64_FEATURE_AES 0x800000000ULL /* AES instructions. */ +#define AARCH64_FEATURE_V8_4 0x000000800ULL /* ARMv8.4 processors. */ +#define AARCH64_FEATURE_SM4 0x100000000ULL /* SM3 & SM4 instructions. */ +#define AARCH64_FEATURE_SHA3 0x400000000ULL /* SHA3 instructions. */ #define AARCH64_FEATURE_V8 0x00000001 /* All processors. */ +#define AARCH64_FEATURE_V8_2 0x00000020 /* ARMv8.2 processors. */ +#define AARCH64_FEATURE_V8_3 0x00000040 /* ARMv8.3 processors. */ #define AARCH64_FEATURE_CRYPTO 0x00010000 /* Crypto instructions. */ #define AARCH64_FEATURE_FP 0x00020000 /* FP instructions. */ #define AARCH64_FEATURE_SIMD 0x00040000 /* SIMD instructions. */ #define AARCH64_FEATURE_CRC 0x00080000 /* CRC instructions. */ #define AARCH64_FEATURE_LSE 0x00100000 /* LSE instructions. */ +#define AARCH64_FEATURE_PAN 0x00200000 /* PAN instructions. */ +#define AARCH64_FEATURE_LOR 0x00400000 /* LOR instructions. */ +#define AARCH64_FEATURE_RDMA 0x00800000 /* v8.1 SIMD instructions. */ +#define AARCH64_FEATURE_V8_1 0x01000000 /* v8.1 features. */ +#define AARCH64_FEATURE_F16 0x02000000 /* v8.2 FP16 instructions. */ +#define AARCH64_FEATURE_RAS 0x04000000 /* RAS Extensions. */ +#define AARCH64_FEATURE_PROFILE 0x08000000 /* Statistical Profiling. */ +#define AARCH64_FEATURE_SVE 0x10000000 /* SVE instructions. */ +#define AARCH64_FEATURE_RCPC 0x20000000 /* RCPC instructions. */ +#define AARCH64_FEATURE_COMPNUM 0x40000000 /* Complex # instructions. */ +#define AARCH64_FEATURE_DOTPROD 0x080000000 /* Dot Product instructions. */ +#define AARCH64_FEATURE_F16_FML 0x1000000000ULL /* v8.2 FP16FML ins. */ +#define AARCH64_FEATURE_V8_5 0x2000000000ULL /* ARMv8.5 processors. */ + +/* Flag Manipulation insns. */ +#define AARCH64_FEATURE_FLAGMANIP 0x4000000000ULL +/* FRINT[32,64][Z,X] insns. */ +#define AARCH64_FEATURE_FRINTTS 0x8000000000ULL +/* SB instruction. */ +#define AARCH64_FEATURE_SB 0x10000000000ULL +/* Execution and Data Prediction Restriction instructions. */ +#define AARCH64_FEATURE_PREDRES 0x20000000000ULL +/* DC CVADP. */ +#define AARCH64_FEATURE_CVADP 0x40000000000ULL +/* Random Number instructions. */ +#define AARCH64_FEATURE_RNG 0x80000000000ULL +/* BTI instructions. */ +#define AARCH64_FEATURE_BTI 0x100000000000ULL +/* SCXTNUM_ELx. */ +#define AARCH64_FEATURE_SCXTNUM 0x200000000000ULL +/* ID_PFR2 instructions. */ +#define AARCH64_FEATURE_ID_PFR2 0x400000000000ULL +/* SSBS mechanism enabled. */ +#define AARCH64_FEATURE_SSBS 0x800000000000ULL +/* Memory Tagging Extension. */ +#define AARCH64_FEATURE_MEMTAG 0x1000000000000ULL +/* Transactional Memory Extension. */ +#define AARCH64_FEATURE_TME 0x2000000000000ULL + +/* SVE2 instructions. */ +#define AARCH64_FEATURE_SVE2 0x000000010 +#define AARCH64_FEATURE_SVE2_AES 0x000000080 +#define AARCH64_FEATURE_SVE2_BITPERM 0x000000100 +#define AARCH64_FEATURE_SVE2_SM4 0x000000200 +#define AARCH64_FEATURE_SVE2_SHA3 0x000000400 /* Architectures are the sum of the base and extensions. */ #define AARCH64_ARCH_V8 AARCH64_FEATURE (AARCH64_FEATURE_V8, \ AARCH64_FEATURE_FP \ | AARCH64_FEATURE_SIMD) +#define AARCH64_ARCH_V8_1 AARCH64_FEATURE (AARCH64_ARCH_V8, \ + AARCH64_FEATURE_CRC \ + | AARCH64_FEATURE_V8_1 \ + | AARCH64_FEATURE_LSE \ + | AARCH64_FEATURE_PAN \ + | AARCH64_FEATURE_LOR \ + | AARCH64_FEATURE_RDMA) +#define AARCH64_ARCH_V8_2 AARCH64_FEATURE (AARCH64_ARCH_V8_1, \ + AARCH64_FEATURE_V8_2 \ + | AARCH64_FEATURE_RAS) +#define AARCH64_ARCH_V8_3 AARCH64_FEATURE (AARCH64_ARCH_V8_2, \ + AARCH64_FEATURE_V8_3 \ + | AARCH64_FEATURE_RCPC \ + | AARCH64_FEATURE_COMPNUM) +#define AARCH64_ARCH_V8_4 AARCH64_FEATURE (AARCH64_ARCH_V8_3, \ + AARCH64_FEATURE_V8_4 \ + | AARCH64_FEATURE_DOTPROD \ + | AARCH64_FEATURE_F16_FML) +#define AARCH64_ARCH_V8_5 AARCH64_FEATURE (AARCH64_ARCH_V8_4, \ + AARCH64_FEATURE_V8_5 \ + | AARCH64_FEATURE_FLAGMANIP \ + | AARCH64_FEATURE_FRINTTS \ + | AARCH64_FEATURE_SB \ + | AARCH64_FEATURE_PREDRES \ + | AARCH64_FEATURE_CVADP \ + | AARCH64_FEATURE_BTI \ + | AARCH64_FEATURE_SCXTNUM \ + | AARCH64_FEATURE_ID_PFR2 \ + | AARCH64_FEATURE_SSBS) + + #define AARCH64_ARCH_NONE AARCH64_FEATURE (0, 0) #define AARCH64_ANY AARCH64_FEATURE (-1, 0) /* Any basic core. */ /* CPU-specific features. */ -typedef unsigned long aarch64_feature_set; +typedef unsigned long long aarch64_feature_set; -#define AARCH64_CPU_HAS_FEATURE(CPU,FEAT) \ +#define AARCH64_CPU_HAS_ALL_FEATURES(CPU,FEAT) \ + ((~(CPU) & (FEAT)) == 0) + +#define AARCH64_CPU_HAS_ANY_FEATURES(CPU,FEAT) \ (((CPU) & (FEAT)) != 0) +#define AARCH64_CPU_HAS_FEATURE(CPU,FEAT) \ + AARCH64_CPU_HAS_ALL_FEATURES (CPU,FEAT) + #define AARCH64_MERGE_FEATURE_SETS(TARG,F1,F2) \ do \ { \ @@ -69,9 +162,6 @@ typedef unsigned long aarch64_feature_set; #define AARCH64_FEATURE(core,coproc) ((core) | (coproc)) -#define AARCH64_OPCODE_HAS_FEATURE(OPC,FEAT) \ - (((OPC) & (FEAT)) != 0) - enum aarch64_operand_class { AARCH64_OPND_CLASS_NIL, @@ -82,7 +172,8 @@ enum aarch64_operand_class AARCH64_OPND_CLASS_SIMD_ELEMENT, AARCH64_OPND_CLASS_SISD_REG, AARCH64_OPND_CLASS_SIMD_REGLIST, - AARCH64_OPND_CLASS_CP_REG, + AARCH64_OPND_CLASS_SVE_REG, + AARCH64_OPND_CLASS_PRED_REG, AARCH64_OPND_CLASS_ADDRESS, AARCH64_OPND_CLASS_IMMEDIATE, AARCH64_OPND_CLASS_SYSTEM, @@ -101,12 +192,14 @@ enum aarch64_opnd AARCH64_OPND_Rm, /* Integer register as source. */ AARCH64_OPND_Rt, /* Integer register used in ld/st instructions. */ AARCH64_OPND_Rt2, /* Integer register used in ld/st pair instructions. */ + AARCH64_OPND_Rt_SP, /* Integer Rt or SP used in STG instructions. */ AARCH64_OPND_Rs, /* Integer register used in ld/st exclusive. */ AARCH64_OPND_Ra, /* Integer register used in ddp_3src instructions. */ AARCH64_OPND_Rt_SYS, /* Integer register used in system instructions. */ AARCH64_OPND_Rd_SP, /* Integer Rd or SP. */ AARCH64_OPND_Rn_SP, /* Integer Rn or SP. */ + AARCH64_OPND_Rm_SP, /* Integer Rm or SP. */ AARCH64_OPND_PAIRREG, /* Paired register operand. */ AARCH64_OPND_Rm_EXT, /* Integer Rm extended. */ AARCH64_OPND_Rm_SFT, /* Integer Rm shifted. */ @@ -122,6 +215,7 @@ enum aarch64_opnd AARCH64_OPND_Sn, /* AdvSIMD Scalar Sn. */ AARCH64_OPND_Sm, /* AdvSIMD Scalar Sm. */ + AARCH64_OPND_Va, /* AdvSIMD Vector Va. */ AARCH64_OPND_Vd, /* AdvSIMD Vector Vd. */ AARCH64_OPND_Vn, /* AdvSIMD Vector Vn. */ AARCH64_OPND_Vm, /* AdvSIMD Vector Vm. */ @@ -130,16 +224,19 @@ enum aarch64_opnd AARCH64_OPND_Ed, /* AdvSIMD Vector Element Vd. */ AARCH64_OPND_En, /* AdvSIMD Vector Element Vn. */ AARCH64_OPND_Em, /* AdvSIMD Vector Element Vm. */ + AARCH64_OPND_Em16, /* AdvSIMD Vector Element Vm restricted to V0 - V15 when + qualifier is S_H. */ AARCH64_OPND_LVn, /* AdvSIMD Vector register list used in e.g. TBL. */ AARCH64_OPND_LVt, /* AdvSIMD Vector register list used in ld/st. */ AARCH64_OPND_LVt_AL, /* AdvSIMD Vector register list for loading single structure to all lanes. */ AARCH64_OPND_LEt, /* AdvSIMD Vector Element list. */ - AARCH64_OPND_Cn, /* Co-processor register in CRn field. */ - AARCH64_OPND_Cm, /* Co-processor register in CRm field. */ + AARCH64_OPND_CRn, /* Co-processor register in CRn field. */ + AARCH64_OPND_CRm, /* Co-processor register in CRm field. */ AARCH64_OPND_IDX, /* AdvSIMD EXT index operand. */ + AARCH64_OPND_MASK, /* AdvSIMD EXT index operand. */ AARCH64_OPND_IMM_VLSL,/* Immediate for shifting vector registers left. */ AARCH64_OPND_IMM_VLSR,/* Immediate for shifting vector registers right. */ AARCH64_OPND_SIMD_IMM,/* AdvSIMD modified immediate without shift. */ @@ -154,13 +251,17 @@ enum aarch64_opnd AARCH64_OPND_IMMS, /* Immediate # in e.g. BFM. */ AARCH64_OPND_WIDTH, /* Immediate # in e.g. BFI. */ AARCH64_OPND_IMM, /* Immediate. */ + AARCH64_OPND_IMM_2, /* Immediate. */ AARCH64_OPND_UIMM3_OP1,/* Unsigned 3-bit immediate in the op1 field. */ AARCH64_OPND_UIMM3_OP2,/* Unsigned 3-bit immediate in the op2 field. */ AARCH64_OPND_UIMM4, /* Unsigned 4-bit immediate in the CRm field. */ + AARCH64_OPND_UIMM4_ADDG,/* Unsigned 4-bit immediate in addg/subg. */ AARCH64_OPND_UIMM7, /* Unsigned 7-bit immediate in the CRm:op2 fields. */ + AARCH64_OPND_UIMM10, /* Unsigned 10-bit immediate in addg/subg. */ AARCH64_OPND_BIT_NUM, /* Immediate. */ AARCH64_OPND_EXCEPTION,/* imm16 operand in exception instructions. */ AARCH64_OPND_CCMP_IMM,/* Immediate in conditional compare instructions. */ + AARCH64_OPND_SIMM5, /* 5-bit signed immediate in the imm5 field. */ AARCH64_OPND_NZCV, /* Flag bit specifier giving an alternative value for each condition flag. */ @@ -169,6 +270,9 @@ enum aarch64_opnd AARCH64_OPND_HALF, /* #{, LSL #} operand in move wide. */ AARCH64_OPND_FBITS, /* FP # operand in e.g. SCVTF */ AARCH64_OPND_IMM_MOV, /* Immediate operand for the MOV alias. */ + AARCH64_OPND_IMM_ROT1, /* Immediate rotate operand for FCMLA. */ + AARCH64_OPND_IMM_ROT2, /* Immediate rotate operand for indexed FCMLA. */ + AARCH64_OPND_IMM_ROT3, /* Immediate rotate operand for FCADD. */ AARCH64_OPND_COND, /* Standard condition as the last operand. */ AARCH64_OPND_COND1, /* Same as the above, but excluding AL and NV. */ @@ -190,8 +294,14 @@ enum aarch64_opnd friendly feature of using LDR/STR as the the mnemonic name for LDUR/STUR instructions wherever there is no ambiguity. */ + AARCH64_OPND_ADDR_SIMM10, /* Address of signed 10-bit immediate. */ + AARCH64_OPND_ADDR_SIMM11, /* Address with a signed 11-bit (multiple of + 16) immediate. */ AARCH64_OPND_ADDR_UIMM12, /* Address of unsigned 12-bit immediate. */ + AARCH64_OPND_ADDR_SIMM13, /* Address with a signed 13-bit (multiple of + 16) immediate. */ AARCH64_OPND_SIMD_ADDR_SIMPLE,/* Address of ld/st multiple structures. */ + AARCH64_OPND_ADDR_OFFSET, /* Address with an optional 9-bit immediate. */ AARCH64_OPND_SIMD_ADDR_POST, /* Address of ld/st multiple post-indexed. */ AARCH64_OPND_SYSREG, /* System register operand. */ @@ -200,9 +310,116 @@ enum aarch64_opnd AARCH64_OPND_SYSREG_DC, /* System register operand. */ AARCH64_OPND_SYSREG_IC, /* System register operand. */ AARCH64_OPND_SYSREG_TLBI, /* System register operand. */ + AARCH64_OPND_SYSREG_SR, /* System register RCTX operand. */ AARCH64_OPND_BARRIER, /* Barrier operand. */ AARCH64_OPND_BARRIER_ISB, /* Barrier operand for ISB. */ AARCH64_OPND_PRFOP, /* Prefetch operation. */ + AARCH64_OPND_BARRIER_PSB, /* Barrier operand for PSB. */ + AARCH64_OPND_BTI_TARGET, /* BTI {}. */ + + AARCH64_OPND_SVE_ADDR_RI_S4x16, /* SVE [, #*16]. */ + AARCH64_OPND_SVE_ADDR_RI_S4xVL, /* SVE [, #, MUL VL]. */ + AARCH64_OPND_SVE_ADDR_RI_S4x2xVL, /* SVE [, #*2, MUL VL]. */ + AARCH64_OPND_SVE_ADDR_RI_S4x3xVL, /* SVE [, #*3, MUL VL]. */ + AARCH64_OPND_SVE_ADDR_RI_S4x4xVL, /* SVE [, #*4, MUL VL]. */ + AARCH64_OPND_SVE_ADDR_RI_S6xVL, /* SVE [, #, MUL VL]. */ + AARCH64_OPND_SVE_ADDR_RI_S9xVL, /* SVE [, #, MUL VL]. */ + AARCH64_OPND_SVE_ADDR_RI_U6, /* SVE [, #]. */ + AARCH64_OPND_SVE_ADDR_RI_U6x2, /* SVE [, #*2]. */ + AARCH64_OPND_SVE_ADDR_RI_U6x4, /* SVE [, #*4]. */ + AARCH64_OPND_SVE_ADDR_RI_U6x8, /* SVE [, #*8]. */ + AARCH64_OPND_SVE_ADDR_R, /* SVE []. */ + AARCH64_OPND_SVE_ADDR_RR, /* SVE [, ]. */ + AARCH64_OPND_SVE_ADDR_RR_LSL1, /* SVE [, , LSL #1]. */ + AARCH64_OPND_SVE_ADDR_RR_LSL2, /* SVE [, , LSL #2]. */ + AARCH64_OPND_SVE_ADDR_RR_LSL3, /* SVE [, , LSL #3]. */ + AARCH64_OPND_SVE_ADDR_RX, /* SVE [, ]. */ + AARCH64_OPND_SVE_ADDR_RX_LSL1, /* SVE [, , LSL #1]. */ + AARCH64_OPND_SVE_ADDR_RX_LSL2, /* SVE [, , LSL #2]. */ + AARCH64_OPND_SVE_ADDR_RX_LSL3, /* SVE [, , LSL #3]. */ + AARCH64_OPND_SVE_ADDR_RZ, /* SVE [, Zm.D]. */ + AARCH64_OPND_SVE_ADDR_RZ_LSL1, /* SVE [, Zm.D, LSL #1]. */ + AARCH64_OPND_SVE_ADDR_RZ_LSL2, /* SVE [, Zm.D, LSL #2]. */ + AARCH64_OPND_SVE_ADDR_RZ_LSL3, /* SVE [, Zm.D, LSL #3]. */ + AARCH64_OPND_SVE_ADDR_RZ_XTW_14, /* SVE [, Zm., (S|U)XTW]. + Bit 14 controls S/U choice. */ + AARCH64_OPND_SVE_ADDR_RZ_XTW_22, /* SVE [, Zm., (S|U)XTW]. + Bit 22 controls S/U choice. */ + AARCH64_OPND_SVE_ADDR_RZ_XTW1_14, /* SVE [, Zm., (S|U)XTW #1]. + Bit 14 controls S/U choice. */ + AARCH64_OPND_SVE_ADDR_RZ_XTW1_22, /* SVE [, Zm., (S|U)XTW #1]. + Bit 22 controls S/U choice. */ + AARCH64_OPND_SVE_ADDR_RZ_XTW2_14, /* SVE [, Zm., (S|U)XTW #2]. + Bit 14 controls S/U choice. */ + AARCH64_OPND_SVE_ADDR_RZ_XTW2_22, /* SVE [, Zm., (S|U)XTW #2]. + Bit 22 controls S/U choice. */ + AARCH64_OPND_SVE_ADDR_RZ_XTW3_14, /* SVE [, Zm., (S|U)XTW #3]. + Bit 14 controls S/U choice. */ + AARCH64_OPND_SVE_ADDR_RZ_XTW3_22, /* SVE [, Zm., (S|U)XTW #3]. + Bit 22 controls S/U choice. */ + AARCH64_OPND_SVE_ADDR_ZI_U5, /* SVE [Zn., #]. */ + AARCH64_OPND_SVE_ADDR_ZI_U5x2, /* SVE [Zn., #*2]. */ + AARCH64_OPND_SVE_ADDR_ZI_U5x4, /* SVE [Zn., #*4]. */ + AARCH64_OPND_SVE_ADDR_ZI_U5x8, /* SVE [Zn., #*8]. */ + AARCH64_OPND_SVE_ADDR_ZZ_LSL, /* SVE [Zn., Zm,, LSL #]. */ + AARCH64_OPND_SVE_ADDR_ZZ_SXTW, /* SVE [Zn., Zm,, SXTW #]. */ + AARCH64_OPND_SVE_ADDR_ZZ_UXTW, /* SVE [Zn., Zm,, UXTW #]. */ + AARCH64_OPND_SVE_AIMM, /* SVE unsigned arithmetic immediate. */ + AARCH64_OPND_SVE_ASIMM, /* SVE signed arithmetic immediate. */ + AARCH64_OPND_SVE_FPIMM8, /* SVE 8-bit floating-point immediate. */ + AARCH64_OPND_SVE_I1_HALF_ONE, /* SVE choice between 0.5 and 1.0. */ + AARCH64_OPND_SVE_I1_HALF_TWO, /* SVE choice between 0.5 and 2.0. */ + AARCH64_OPND_SVE_I1_ZERO_ONE, /* SVE choice between 0.0 and 1.0. */ + AARCH64_OPND_SVE_IMM_ROT1, /* SVE 1-bit rotate operand (90 or 270). */ + AARCH64_OPND_SVE_IMM_ROT2, /* SVE 2-bit rotate operand (N*90). */ + AARCH64_OPND_SVE_IMM_ROT3, /* SVE cadd 1-bit rotate (90 or 270). */ + AARCH64_OPND_SVE_INV_LIMM, /* SVE inverted logical immediate. */ + AARCH64_OPND_SVE_LIMM, /* SVE logical immediate. */ + AARCH64_OPND_SVE_LIMM_MOV, /* SVE logical immediate for MOV. */ + AARCH64_OPND_SVE_PATTERN, /* SVE vector pattern enumeration. */ + AARCH64_OPND_SVE_PATTERN_SCALED, /* Likewise, with additional MUL factor. */ + AARCH64_OPND_SVE_PRFOP, /* SVE prefetch operation. */ + AARCH64_OPND_SVE_Pd, /* SVE p0-p15 in Pd. */ + AARCH64_OPND_SVE_Pg3, /* SVE p0-p7 in Pg. */ + AARCH64_OPND_SVE_Pg4_5, /* SVE p0-p15 in Pg, bits [8,5]. */ + AARCH64_OPND_SVE_Pg4_10, /* SVE p0-p15 in Pg, bits [13,10]. */ + AARCH64_OPND_SVE_Pg4_16, /* SVE p0-p15 in Pg, bits [19,16]. */ + AARCH64_OPND_SVE_Pm, /* SVE p0-p15 in Pm. */ + AARCH64_OPND_SVE_Pn, /* SVE p0-p15 in Pn. */ + AARCH64_OPND_SVE_Pt, /* SVE p0-p15 in Pt. */ + AARCH64_OPND_SVE_Rm, /* Integer Rm or ZR, alt. SVE position. */ + AARCH64_OPND_SVE_Rn_SP, /* Integer Rn or SP, alt. SVE position. */ + AARCH64_OPND_SVE_SHLIMM_PRED, /* SVE shift left amount (predicated). */ + AARCH64_OPND_SVE_SHLIMM_UNPRED, /* SVE shift left amount (unpredicated). */ + AARCH64_OPND_SVE_SHRIMM_PRED, /* SVE shift right amount (predicated). */ + AARCH64_OPND_SVE_SHRIMM_UNPRED, /* SVE shift right amount (unpredicated). */ + AARCH64_OPND_SVE_SIMM5, /* SVE signed 5-bit immediate. */ + AARCH64_OPND_SVE_SIMM5B, /* SVE secondary signed 5-bit immediate. */ + AARCH64_OPND_SVE_SIMM6, /* SVE signed 6-bit immediate. */ + AARCH64_OPND_SVE_SIMM8, /* SVE signed 8-bit immediate. */ + AARCH64_OPND_SVE_UIMM3, /* SVE unsigned 3-bit immediate. */ + AARCH64_OPND_SVE_UIMM7, /* SVE unsigned 7-bit immediate. */ + AARCH64_OPND_SVE_UIMM8, /* SVE unsigned 8-bit immediate. */ + AARCH64_OPND_SVE_UIMM8_53, /* SVE split unsigned 8-bit immediate. */ + AARCH64_OPND_SVE_VZn, /* Scalar SIMD&FP register in Zn field. */ + AARCH64_OPND_SVE_Vd, /* Scalar SIMD&FP register in Vd. */ + AARCH64_OPND_SVE_Vm, /* Scalar SIMD&FP register in Vm. */ + AARCH64_OPND_SVE_Vn, /* Scalar SIMD&FP register in Vn. */ + AARCH64_OPND_SVE_Za_5, /* SVE vector register in Za, bits [9,5]. */ + AARCH64_OPND_SVE_Za_16, /* SVE vector register in Za, bits [20,16]. */ + AARCH64_OPND_SVE_Zd, /* SVE vector register in Zd. */ + AARCH64_OPND_SVE_Zm_5, /* SVE vector register in Zm, bits [9,5]. */ + AARCH64_OPND_SVE_Zm_16, /* SVE vector register in Zm, bits [20,16]. */ + AARCH64_OPND_SVE_Zm3_INDEX, /* z0-z7[0-3] in Zm, bits [20,16]. */ + AARCH64_OPND_SVE_Zm3_22_INDEX, /* z0-z7[0-7] in Zm3_INDEX plus bit 22. */ + AARCH64_OPND_SVE_Zm4_INDEX, /* z0-z15[0-1] in Zm, bits [20,16]. */ + AARCH64_OPND_SVE_Zn, /* SVE vector register in Zn. */ + AARCH64_OPND_SVE_Zn_INDEX, /* Indexed SVE vector register, for DUP. */ + AARCH64_OPND_SVE_ZnxN, /* SVE vector register list in Zn. */ + AARCH64_OPND_SVE_Zt, /* SVE vector register in Zt. */ + AARCH64_OPND_SVE_ZtxN, /* SVE vector register list in Zt. */ + AARCH64_OPND_TME_UIMM16, /* TME unsigned 16-bit immediate. */ + AARCH64_OPND_SM3_IMM2, /* SM3 encodes lane in bits [13, 14]. */ }; /* Qualifier constrains an operand. It either specifies a variant of an @@ -238,6 +455,11 @@ enum aarch64_opnd_qualifier AARCH64_OPND_QLF_S_S, AARCH64_OPND_QLF_S_D, AARCH64_OPND_QLF_S_Q, + /* This type qualifier has a special meaning in that it means that 4 x 1 byte + are selected by the instruction. Other than that it has no difference + with AARCH64_OPND_QLF_S_B in encoding. It is here purely for syntactical + reasons and is an exception from normal AArch64 disassembly scheme. */ + AARCH64_OPND_QLF_S_4B, /* Qualifying an operand which is a SIMD vector register or a SIMD vector register list; indicating register shape. @@ -245,8 +467,10 @@ enum aarch64_opnd_qualifier a use is only for the ease of operand encoding/decoding and qualifier sequence matching; such a use should not be applied widely; use the value constraint qualifiers for immediate operands wherever possible. */ + AARCH64_OPND_QLF_V_4B, AARCH64_OPND_QLF_V_8B, AARCH64_OPND_QLF_V_16B, + AARCH64_OPND_QLF_V_2H, AARCH64_OPND_QLF_V_4H, AARCH64_OPND_QLF_V_8H, AARCH64_OPND_QLF_V_2S, @@ -255,7 +479,15 @@ enum aarch64_opnd_qualifier AARCH64_OPND_QLF_V_2D, AARCH64_OPND_QLF_V_1Q, + AARCH64_OPND_QLF_P_Z, + AARCH64_OPND_QLF_P_M, + + /* Used in scaled signed immediate that are scaled by a Tag granule + like in stg, st2g, etc. */ + AARCH64_OPND_QLF_imm_tag, + /* Constraint on value. */ + AARCH64_OPND_QLF_CR, /* CRn, CRm. */ AARCH64_OPND_QLF_imm_0_7, AARCH64_OPND_QLF_imm_0_15, AARCH64_OPND_QLF_imm_0_31, @@ -331,6 +563,7 @@ enum aarch64_insn_class ldst_immpost, ldst_immpre, ldst_imm9, /* immpost or immpre */ + ldst_imm10, /* LDRAA/LDRAB */ ldst_pos, ldst_regoff, ldst_unpriv, @@ -346,7 +579,23 @@ enum aarch64_insn_class movewide, pcreladdr, ic_system, + sve_cpy, + sve_index, + sve_limm, + sve_misc, + sve_movprfx, + sve_pred_zm, + sve_shift_pred, + sve_shift_unpred, + sve_size_bhs, + sve_size_bhsd, + sve_size_hsd, + sve_size_hsd2, + sve_size_sd, testbranch, + cryptosm3, + cryptosm4, + dotproduct, }; /* Opcode enumerators. */ @@ -410,6 +659,7 @@ enum aarch64_op OP_SBFX, OP_SBFIZ, OP_BFI, + OP_BFC, /* ARMv8.2. */ OP_UBFIZ, OP_UXTB, OP_UXTH, @@ -435,9 +685,34 @@ enum aarch64_op OP_UXTL, OP_UXTL2, + OP_MOV_P_P, + OP_MOV_Z_P_Z, + OP_MOV_Z_V, + OP_MOV_Z_Z, + OP_MOV_Z_Zi, + OP_MOVM_P_P_P, + OP_MOVS_P_P, + OP_MOVZS_P_P_P, + OP_MOVZ_P_P_P, + OP_NOTS_P_P_P_Z, + OP_NOT_P_P_P_Z, + + OP_FCMLA_ELEM, /* ARMv8.3, indexed element version. */ + OP_TOTAL_NUM, /* Pseudo. */ }; +/* Error types. */ +enum err_type +{ + ERR_OK, + ERR_UND, + ERR_UNP, + ERR_NYI, + ERR_VFI, + ERR_NR_ENTRIES +}; + /* Maximum number of operands an instruction can have. */ #define AARCH64_MAX_OPND_NUM 6 /* Maximum number of qualifier sequences an instruction can have. */ @@ -459,6 +734,13 @@ empty_qualifier_sequence_p (const aarch64_opnd_qualifier_t *qualifiers) return TRUE; } +/* Forward declare error reporting type. */ +typedef struct aarch64_operand_error aarch64_operand_error; +/* Forward declare instruction sequence type. */ +typedef struct aarch64_instr_sequence aarch64_instr_sequence; +/* Forward declare instruction definition. */ +typedef struct aarch64_inst aarch64_inst; + /* This structure holds information for a particular opcode. */ struct aarch64_opcode @@ -497,7 +779,19 @@ struct aarch64_opcode aarch64_opnd_qualifier_seq_t qualifiers_list[AARCH64_MAX_QLF_SEQ_NUM]; /* Flags providing information about this instruction */ - uint32_t flags; + uint64_t flags; + + /* Extra constraints on the instruction that the verifier checks. */ + uint32_t constraints; + + /* If nonzero, this operand and operand 0 are both registers and + are required to have the same register number. */ + unsigned char tied_operand; + + /* If non-NULL, a function to verify that a given instruction is valid. */ + enum err_type (* verifier) (const struct aarch64_inst *, const aarch64_insn, + bfd_vma, bfd_boolean, aarch64_operand_error *, + struct aarch64_instr_sequence *); }; typedef struct aarch64_opcode aarch64_opcode; @@ -555,7 +849,24 @@ extern aarch64_opcode aarch64_opcode_table[]; #define F_OD(X) (((X) & 0x7) << 24) /* Instruction has the field of 'sz'. */ #define F_LSE_SZ (1 << 27) -/* Next bit is 28. */ +/* Require an exact qualifier match, even for NIL qualifiers. */ +#define F_STRICT (1ULL << 28) +/* This system instruction is used to read system registers. */ +#define F_SYS_READ (1ULL << 29) +/* This system instruction is used to write system registers. */ +#define F_SYS_WRITE (1ULL << 30) +/* This instruction has an extra constraint on it that imposes a requirement on + subsequent instructions. */ +#define F_SCAN (1ULL << 31) +/* Next bit is 32. */ + +/* Instruction constraints. */ +/* This instruction has a predication constraint on the instruction at PC+4. */ +#define C_SCAN_MOVPRFX (1U << 0) +/* This instruction's operation width is determined by the operand with the + largest element size. */ +#define C_MAX_ELEM (1U << 1) +/* Next bit is 2. */ static inline bfd_boolean alias_opcode_p (const aarch64_opcode *opcode) @@ -618,6 +929,7 @@ struct aarch64_name_value_pair extern const struct aarch64_name_value_pair aarch64_operand_modifiers []; extern const struct aarch64_name_value_pair aarch64_barrier_options [16]; extern const struct aarch64_name_value_pair aarch64_prfops [32]; +extern const struct aarch64_name_value_pair aarch64_hint_options []; typedef struct { @@ -629,18 +941,28 @@ typedef struct extern const aarch64_sys_reg aarch64_sys_regs []; extern const aarch64_sys_reg aarch64_pstatefields []; extern bfd_boolean aarch64_sys_reg_deprecated_p (const aarch64_sys_reg *); +extern bfd_boolean aarch64_sys_reg_supported_p (const aarch64_feature_set, + const aarch64_sys_reg *); +extern bfd_boolean aarch64_pstatefield_supported_p (const aarch64_feature_set, + const aarch64_sys_reg *); typedef struct { - const char *template; + const char *name; uint32_t value; - int has_xt; + uint32_t flags ; } aarch64_sys_ins_reg; +extern bfd_boolean aarch64_sys_ins_reg_has_xt (const aarch64_sys_ins_reg *); +extern bfd_boolean +aarch64_sys_ins_reg_supported_p (const aarch64_feature_set, + const aarch64_sys_ins_reg *); + extern const aarch64_sys_ins_reg aarch64_sys_regs_ic []; extern const aarch64_sys_ins_reg aarch64_sys_regs_dc []; extern const aarch64_sys_ins_reg aarch64_sys_regs_at []; extern const aarch64_sys_ins_reg aarch64_sys_regs_tlbi []; +extern const aarch64_sys_ins_reg aarch64_sys_regs_sr []; /* Shift/extending operator kinds. N.B. order is important; keep aarch64_operand_modifiers synced. */ @@ -660,6 +982,8 @@ enum aarch64_modifier_kind AARCH64_MOD_SXTH, AARCH64_MOD_SXTW, AARCH64_MOD_SXTX, + AARCH64_MOD_MUL, + AARCH64_MOD_MUL_VL, }; bfd_boolean @@ -673,7 +997,7 @@ typedef struct { /* A list of names with the first one as the disassembly preference; terminated by NULL if fewer than 3. */ - const char *names[3]; + const char *names[4]; aarch64_insn value; } aarch64_cond; @@ -698,8 +1022,8 @@ struct aarch64_opnd_info } reg; struct { - unsigned regno : 5; - unsigned index : 4; + unsigned int regno; + int64_t index; } reglane; /* e.g. LVn. */ struct @@ -709,7 +1033,7 @@ struct aarch64_opnd_info /* 1 if it is a list of reg element. */ unsigned has_index : 1; /* Lane index; valid only when has_index is 1. */ - unsigned index : 4; + int64_t index; } reglist; /* e.g. immediate or pc relative address offset. */ struct @@ -735,13 +1059,22 @@ struct aarch64_opnd_info unsigned preind : 1; /* Pre-indexed. */ unsigned postind : 1; /* Post-indexed. */ } addr; + + struct + { + /* The encoding of the system register. */ + aarch64_insn value; + + /* The system register flags. */ + uint32_t flags; + } sysreg; + const aarch64_cond *cond; - /* The encoding of the system register. */ - aarch64_insn sysreg; /* The encoding of the PSTATE field. */ aarch64_insn pstatefield; const aarch64_sys_ins_reg *sysins_op; const struct aarch64_name_value_pair *barrier; + const struct aarch64_name_value_pair *hint_option; const struct aarch64_name_value_pair *prfop; }; @@ -750,10 +1083,10 @@ struct aarch64_opnd_info struct { enum aarch64_modifier_kind kind; - int amount; unsigned operator_present: 1; /* Only valid during encoding. */ /* Value of the 'S' field in ld/st reg offset; used only in decoding. */ unsigned amount_present: 1; + int64_t amount; } shifter; unsigned skip:1; /* Operand is not completed if there is a fixup needed @@ -795,7 +1128,13 @@ struct aarch64_inst aarch64_opnd_info operands[AARCH64_MAX_OPND_NUM]; }; -typedef struct aarch64_inst aarch64_inst; +/* Defining the HINT #imm values for the aarch64_hint_options. */ +#define HINT_OPD_CSYNC 0x11 +#define HINT_OPD_C 0x22 +#define HINT_OPD_J 0x24 +#define HINT_OPD_JC 0x26 +#define HINT_OPD_NULL 0x00 + /* Diagnosis related declaration and interface. */ @@ -816,6 +1155,10 @@ typedef struct aarch64_inst aarch64_inst; No syntax error, but the operands are not a valid combination, e.g. FMOV D0,S0 + AARCH64_OPDE_UNTIED_OPERAND + The asm failed to use the same register for a destination operand + and a tied source operand. + AARCH64_OPDE_OUT_OF_RANGE Error about some immediate value out of a valid range. @@ -852,6 +1195,7 @@ enum aarch64_operand_error_kind AARCH64_OPDE_SYNTAX_ERROR, AARCH64_OPDE_FATAL_SYNTAX_ERROR, AARCH64_OPDE_INVALID_VARIANT, + AARCH64_OPDE_UNTIED_OPERAND, AARCH64_OPDE_OUT_OF_RANGE, AARCH64_OPDE_UNALIGNED, AARCH64_OPDE_REG_LIST, @@ -865,16 +1209,30 @@ struct aarch64_operand_error int index; const char *error; int data[3]; /* Some data for extra information. */ + bfd_boolean non_fatal; }; -typedef struct aarch64_operand_error aarch64_operand_error; +/* AArch64 sequence structure used to track instructions with F_SCAN + dependencies for both assembler and disassembler. */ +struct aarch64_instr_sequence +{ + /* The instruction that caused this sequence to be opened. */ + aarch64_inst *instr; + /* The number of instructions the above instruction allows to be kept in the + sequence before an automatic close is done. */ + int num_insns; + /* The instructions currently added to the sequence. */ + aarch64_inst **current_insns; + /* The number of instructions already in the sequence. */ + int next_insn; +}; /* Encoding entrypoint. */ extern int aarch64_opcode_encode (const aarch64_opcode *, const aarch64_inst *, aarch64_insn *, aarch64_opnd_qualifier_t *, - aarch64_operand_error *); + aarch64_operand_error *, aarch64_instr_sequence *); extern const aarch64_opcode * aarch64_replace_opcode (struct aarch64_inst *, @@ -889,7 +1247,8 @@ aarch64_get_opcode (enum aarch64_op); /* Generate the string representation of an operand. */ extern void aarch64_print_operand (char *, size_t, bfd_vma, const aarch64_opcode *, - const aarch64_opnd_info *, int, int *, bfd_vma *); + const aarch64_opnd_info *, int, int *, bfd_vma *, + char **); /* Miscellaneous interface. */ @@ -900,14 +1259,24 @@ extern aarch64_opnd_qualifier_t aarch64_get_expected_qualifier (const aarch64_opnd_qualifier_seq_t *, int, const aarch64_opnd_qualifier_t, int); +extern bfd_boolean +aarch64_is_destructive_by_operands (const aarch64_opcode *); + extern int aarch64_num_of_operands (const aarch64_opcode *); extern int aarch64_stack_pointer_p (const aarch64_opnd_info *); -extern -int aarch64_zero_register_p (const aarch64_opnd_info *); +extern int +aarch64_zero_register_p (const aarch64_opnd_info *); + +extern enum err_type +aarch64_decode_insn (aarch64_insn, aarch64_inst *, bfd_boolean, + aarch64_operand_error *); + +extern void +init_insn_sequence (const struct aarch64_inst *, aarch64_instr_sequence *); /* Given an operand qualifier, return the expected data element size of a qualified operand. */ @@ -923,6 +1292,9 @@ aarch64_get_operand_name (enum aarch64_opnd); extern const char * aarch64_get_operand_desc (enum aarch64_opnd); +extern bfd_boolean +aarch64_sve_dupm_mov_immediate_p (uint64_t, int); + #ifdef DEBUG_AARCH64 extern int debug_dump; @@ -945,4 +1317,11 @@ aarch64_verbose (const char *, ...) __attribute__ ((format (printf, 1, 2))); #define DEBUG_TRACE_IF(C, M, ...) ; #endif /* DEBUG_AARCH64 */ +extern const char *const aarch64_sve_pattern_array[32]; +extern const char *const aarch64_sve_prfop_array[16]; + +#ifdef __cplusplus +} +#endif + #endif /* OPCODE_AARCH64_H */