};
/* Types of processor to assemble for. */
-#define ARM_1 ARM_ARCH_V1
-#define ARM_2 ARM_ARCH_V2
-#define ARM_3 ARM_ARCH_V2S
-#define ARM_250 ARM_ARCH_V2S
-#define ARM_6 ARM_ARCH_V3
-#define ARM_7 ARM_ARCH_V3
-#define ARM_8 ARM_ARCH_V4
-#define ARM_9 ARM_ARCH_V4T
-#define ARM_STRONG ARM_ARCH_V4
-#define ARM_CPU_MASK 0x0000000f /* XXX? */
-
#ifndef CPU_DEFAULT
#if defined __XSCALE__
-#define CPU_DEFAULT (ARM_ARCH_XSCALE)
+#define CPU_DEFAULT ARM_ARCH_XSCALE
#else
#if defined __thumb__
-#define CPU_DEFAULT (ARM_ARCH_V5T)
-#else
-#define CPU_DEFAULT ARM_ANY
+#define CPU_DEFAULT ARM_ARCH_V5T
#endif
#endif
#endif
#define streq(a, b) (strcmp (a, b) == 0)
-static unsigned long cpu_variant;
+static arm_feature_set cpu_variant;
+static arm_feature_set arm_arch_used;
+static arm_feature_set thumb_arch_used;
/* Flags stored in private area of BFD structure. */
static int uses_apcs_26 = FALSE;
/* Variables that we set while parsing command-line options. Once all
options have been read we re-process these values to set the real
assembly flags. */
-static int legacy_cpu = -1;
-static int legacy_fpu = -1;
-
-static int mcpu_cpu_opt = -1;
-static int mcpu_fpu_opt = -1;
-static int march_cpu_opt = -1;
-static int march_fpu_opt = -1;
-static int mfpu_opt = -1;
+static const arm_feature_set *legacy_cpu = NULL;
+static const arm_feature_set *legacy_fpu = NULL;
+
+static const arm_feature_set *mcpu_cpu_opt = NULL;
+static const arm_feature_set *mcpu_fpu_opt = NULL;
+static const arm_feature_set *march_cpu_opt = NULL;
+static const arm_feature_set *march_fpu_opt = NULL;
+static const arm_feature_set *mfpu_opt = NULL;
+
+/* Constants for known architecture features. */
+static const arm_feature_set fpu_default = FPU_DEFAULT;
+static const arm_feature_set fpu_arch_vfp_v1 = FPU_ARCH_VFP_V1;
+static const arm_feature_set fpu_arch_vfp_v2 = FPU_ARCH_VFP_V2;
+static const arm_feature_set fpu_arch_fpa = FPU_ARCH_FPA;
+static const arm_feature_set fpu_any_hard = FPU_ANY_HARD;
+static const arm_feature_set fpu_arch_maverick = FPU_ARCH_MAVERICK;
+static const arm_feature_set fpu_endian_pure = FPU_ARCH_ENDIAN_PURE;
+
+#ifdef CPU_DEFAULT
+static const arm_feature_set cpu_default = CPU_DEFAULT;
+#endif
+
+static const arm_feature_set arm_ext_v1 = ARM_FEATURE (ARM_EXT_V1, 0);
+static const arm_feature_set arm_ext_v2 = ARM_FEATURE (ARM_EXT_V1, 0);
+static const arm_feature_set arm_ext_v2s = ARM_FEATURE (ARM_EXT_V2S, 0);
+static const arm_feature_set arm_ext_v3 = ARM_FEATURE (ARM_EXT_V3, 0);
+static const arm_feature_set arm_ext_v3m = ARM_FEATURE (ARM_EXT_V3M, 0);
+static const arm_feature_set arm_ext_v4 = ARM_FEATURE (ARM_EXT_V4, 0);
+static const arm_feature_set arm_ext_v4t = ARM_FEATURE (ARM_EXT_V4T, 0);
+static const arm_feature_set arm_ext_v5 = ARM_FEATURE (ARM_EXT_V5, 0);
+static const arm_feature_set arm_ext_v4t_5 =
+ ARM_FEATURE (ARM_EXT_V4T | ARM_EXT_V5, 0);
+static const arm_feature_set arm_ext_v5t = ARM_FEATURE (ARM_EXT_V5T, 0);
+static const arm_feature_set arm_ext_v5e = ARM_FEATURE (ARM_EXT_V5E, 0);
+static const arm_feature_set arm_ext_v5exp = ARM_FEATURE (ARM_EXT_V5ExP, 0);
+static const arm_feature_set arm_ext_v5j = ARM_FEATURE (ARM_EXT_V5J, 0);
+static const arm_feature_set arm_ext_v6 = ARM_FEATURE (ARM_EXT_V6, 0);
+static const arm_feature_set arm_ext_v6k = ARM_FEATURE (ARM_EXT_V6K, 0);
+static const arm_feature_set arm_ext_v6z = ARM_FEATURE (ARM_EXT_V6Z, 0);
+static const arm_feature_set arm_ext_v6t2 = ARM_FEATURE (ARM_EXT_V6T2, 0);
+static const arm_feature_set arm_ext_v6_notm = ARM_FEATURE (ARM_EXT_V6_NOTM, 0);
+static const arm_feature_set arm_ext_div = ARM_FEATURE (ARM_EXT_DIV, 0);
+static const arm_feature_set arm_ext_v7 = ARM_FEATURE (ARM_EXT_V7, 0);
+static const arm_feature_set arm_ext_v7a = ARM_FEATURE (ARM_EXT_V7A, 0);
+static const arm_feature_set arm_ext_v7r = ARM_FEATURE (ARM_EXT_V7R, 0);
+static const arm_feature_set arm_ext_v7m = ARM_FEATURE (ARM_EXT_V7M, 0);
+
+static const arm_feature_set arm_arch_any = ARM_ANY;
+static const arm_feature_set arm_arch_full = ARM_FEATURE (-1, -1);
+static const arm_feature_set arm_arch_t2 = ARM_ARCH_THUMB2;
+static const arm_feature_set arm_arch_none = ARM_ARCH_NONE;
+
+static const arm_feature_set arm_cext_iwmmxt =
+ ARM_FEATURE (0, ARM_CEXT_IWMMXT);
+static const arm_feature_set arm_cext_xscale =
+ ARM_FEATURE (0, ARM_CEXT_XSCALE);
+static const arm_feature_set arm_cext_maverick =
+ ARM_FEATURE (0, ARM_CEXT_MAVERICK);
+static const arm_feature_set fpu_fpa_ext_v1 = ARM_FEATURE (0, FPU_FPA_EXT_V1);
+static const arm_feature_set fpu_fpa_ext_v2 = ARM_FEATURE (0, FPU_FPA_EXT_V2);
+static const arm_feature_set fpu_vfp_ext_v1xd =
+ ARM_FEATURE (0, FPU_VFP_EXT_V1xD);
+static const arm_feature_set fpu_vfp_ext_v1 = ARM_FEATURE (0, FPU_VFP_EXT_V1);
+static const arm_feature_set fpu_vfp_ext_v2 = ARM_FEATURE (0, FPU_VFP_EXT_V2);
+
static int mfloat_abi_opt = -1;
+/* Record user cpu selection for object attributes. */
+static arm_feature_set selected_cpu = ARM_ARCH_NONE;
+/* Must be long enough to hold any of the names in arm_cpus. */
+static char selected_cpu_name[16];
#ifdef OBJ_ELF
# ifdef EABI_DEFAULT
static int meabi_flags = EABI_DEFAULT;
unsigned long field;
};
+struct asm_barrier_opt
+{
+ const char *template;
+ unsigned long value;
+};
+
/* The bit that distinguishes CPSR and SPSR. */
#define SPSR_BIT (1 << 22)
unsigned int tvalue;
/* Which architecture variant provides this instruction. */
- unsigned long avariant;
- unsigned long tvariant;
+ const arm_feature_set *avariant;
+ const arm_feature_set *tvariant;
/* Function to call to encode instruction in ARM format. */
void (* aencode) (void);
#define DATA_OP_SHIFT 21
+#define T2_OPCODE_MASK 0xfe1fffff
+#define T2_DATA_OP_SHIFT 21
+
/* Codes to distinguish the arithmetic instructions. */
#define OPCODE_AND 0
#define OPCODE_EOR 1
#define OPCODE_BIC 14
#define OPCODE_MVN 15
+#define T2_OPCODE_AND 0
+#define T2_OPCODE_BIC 1
+#define T2_OPCODE_ORR 2
+#define T2_OPCODE_ORN 3
+#define T2_OPCODE_EOR 4
+#define T2_OPCODE_ADD 8
+#define T2_OPCODE_ADC 10
+#define T2_OPCODE_SBC 11
+#define T2_OPCODE_SUB 13
+#define T2_OPCODE_RSB 14
+
#define T_OPCODE_MUL 0x4340
#define T_OPCODE_TST 0x4200
#define T_OPCODE_CMN 0x42c0
#define THUMB_SIZE 2 /* Size of thumb instruction. */
#define THUMB_PP_PC_LR 0x0100
#define THUMB_LOAD_BIT 0x0800
+#define THUMB2_LOAD_BIT 0x00100000
#define BAD_ARGS _("bad arguments to instruction")
#define BAD_PC _("r15 not allowed here")
#define BAD_OVERLAP _("registers may not be the same")
#define BAD_HIREG _("lo register required")
#define BAD_THUMB32 _("instruction not supported in Thumb16 mode")
+#define BAD_ADDR_MODE _("instruction does not accept this addressing mode");
+#define BAD_BRANCH _("branch must be last instruction in IT block")
+#define BAD_NOT_IT _("instruction not allowed in IT block")
static struct hash_control *arm_ops_hsh;
static struct hash_control *arm_cond_hsh;
static struct hash_control *arm_shift_hsh;
static struct hash_control *arm_psr_hsh;
+static struct hash_control *arm_v7m_psr_hsh;
static struct hash_control *arm_reg_hsh;
static struct hash_control *arm_reloc_hsh;
+static struct hash_control *arm_barrier_opt_hsh;
/* Stuff needed to resolve the label ambiguity
As:
}
else
{
- if (cpu_variant & FPU_ARCH_VFP)
+ if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_endian_pure))
for (i = prec - 1; i >= 0; i--)
{
md_number_to_chars (litP, (valueT) words[i], 2);
#ifdef REGISTER_PREFIX
if (*start != REGISTER_PREFIX)
- return FAIL;
+ return NULL;
start++;
#endif
#ifdef OPTIONAL_REGISTER_PREFIX
}
/* As above, but the register must be of type TYPE, and the return
- value is the register number or NULL. */
+ value is the register number or FAIL. */
static int
arm_reg_parse (char **ccp, enum arm_reg_type type)
case REG_TYPE_MVFX:
case REG_TYPE_MVDX:
/* Generic coprocessor register names are allowed for these. */
- if (reg->type == REG_TYPE_CN)
+ if (reg && reg->type == REG_TYPE_CN)
return reg->number;
break;
case REG_TYPE_MMXWC:
/* WC includes WCG. ??? I'm not sure this is true for all
instructions that take WC registers. */
- if (reg->type == REG_TYPE_MMXWCG)
+ if (reg && reg->type == REG_TYPE_MMXWCG)
return reg->number;
break;
case 16:
if (! thumb_mode)
{
- if (! (cpu_variant & ARM_EXT_V4T))
+ if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v4t))
as_bad (_("selected processor does not support THUMB opcodes"));
thumb_mode = 1;
case 32:
if (thumb_mode)
{
- if ((cpu_variant & ARM_ALL) == ARM_EXT_V4T)
+ if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v1))
as_bad (_("selected processor does not support ARM opcodes"));
thumb_mode = 0;
unwind.pending_offset = 0;
}
- /* See if we can use the short opcodes. These pop a block of upto 8
- registers starting with r4, plus maybe r14. */
- for (n = 0; n < 8; n++)
- {
- /* Break at the first non-saved register. */
- if ((range & (1 << (n + 4))) == 0)
- break;
- }
- /* See if there are any other bits set. */
- if (n == 0 || (range & (0xfff0 << n) & 0xbff0) != 0)
- {
- /* Use the long form. */
- op = 0x8000 | ((range >> 4) & 0xfff);
- add_unwind_opcode (op, 2);
- }
- else
+ /* Pop r4-r15. */
+ if (range & 0xfff0)
{
- /* Use the short form. */
- if (range & 0x4000)
- op = 0xa8; /* Pop r14. */
+ /* See if we can use the short opcodes. These pop a block of up to 8
+ registers starting with r4, plus maybe r14. */
+ for (n = 0; n < 8; n++)
+ {
+ /* Break at the first non-saved register. */
+ if ((range & (1 << (n + 4))) == 0)
+ break;
+ }
+ /* See if there are any other bits set. */
+ if (n == 0 || (range & (0xfff0 << n) & 0xbff0) != 0)
+ {
+ /* Use the long form. */
+ op = 0x8000 | ((range >> 4) & 0xfff);
+ add_unwind_opcode (op, 2);
+ }
else
- op = 0xa0; /* Do not pop r14. */
- op |= (n - 1);
- add_unwind_opcode (op, 1);
+ {
+ /* Use the short form. */
+ if (range & 0x4000)
+ op = 0xa8; /* Pop r14. */
+ else
+ op = 0xa0; /* Do not pop r14. */
+ op |= (n - 1);
+ add_unwind_opcode (op, 1);
+ }
}
/* Pop r0-r3. */
demand_empty_rest_of_line ();
}
+
+
+/* Parse a .eabi_attribute directive. */
+
+static void
+s_arm_eabi_attribute (int ignored ATTRIBUTE_UNUSED)
+{
+ expressionS exp;
+ bfd_boolean is_string;
+ int tag;
+ unsigned int i = 0;
+ char *s = NULL;
+ char saved_char;
+
+ expression (& exp);
+ if (exp.X_op != O_constant)
+ goto bad;
+
+ tag = exp.X_add_number;
+ if (tag == 4 || tag == 5 || tag == 32 || (tag > 32 && (tag & 1) != 0))
+ is_string = 1;
+ else
+ is_string = 0;
+
+ if (skip_past_comma (&input_line_pointer) == FAIL)
+ goto bad;
+ if (tag == 32 || !is_string)
+ {
+ expression (& exp);
+ if (exp.X_op != O_constant)
+ {
+ as_bad (_("expected numeric constant"));
+ ignore_rest_of_line ();
+ return;
+ }
+ i = exp.X_add_number;
+ }
+ if (tag == Tag_compatibility
+ && skip_past_comma (&input_line_pointer) == FAIL)
+ {
+ as_bad (_("expected comma"));
+ ignore_rest_of_line ();
+ return;
+ }
+ if (is_string)
+ {
+ skip_whitespace(input_line_pointer);
+ if (*input_line_pointer != '"')
+ goto bad_string;
+ input_line_pointer++;
+ s = input_line_pointer;
+ while (*input_line_pointer && *input_line_pointer != '"')
+ input_line_pointer++;
+ if (*input_line_pointer != '"')
+ goto bad_string;
+ saved_char = *input_line_pointer;
+ *input_line_pointer = 0;
+ }
+ else
+ {
+ s = NULL;
+ saved_char = 0;
+ }
+
+ if (tag == Tag_compatibility)
+ elf32_arm_add_eabi_attr_compat (stdoutput, i, s);
+ else if (is_string)
+ elf32_arm_add_eabi_attr_string (stdoutput, tag, s);
+ else
+ elf32_arm_add_eabi_attr_int (stdoutput, tag, i);
+
+ if (s)
+ {
+ *input_line_pointer = saved_char;
+ input_line_pointer++;
+ }
+ demand_empty_rest_of_line ();
+ return;
+bad_string:
+ as_bad (_("bad string constant"));
+ ignore_rest_of_line ();
+ return;
+bad:
+ as_bad (_("expected <tag> , <value>"));
+ ignore_rest_of_line ();
+}
#endif /* OBJ_ELF */
+static void s_arm_arch (int);
+static void s_arm_cpu (int);
+static void s_arm_fpu (int);
+
/* This table describes all the machine specific pseudo-ops the assembler
has to support. The fields are:
pseudo-op name without dot
{ "ltorg", s_ltorg, 0 },
{ "pool", s_ltorg, 0 },
{ "syntax", s_syntax, 0 },
+ { "cpu", s_arm_cpu, 0 },
+ { "arch", s_arm_arch, 0 },
+ { "fpu", s_arm_fpu, 0 },
#ifdef OBJ_ELF
{ "word", s_arm_elf_cons, 4 },
{ "long", s_arm_elf_cons, 4 },
{ "pad", s_arm_unwind_pad, 0 },
{ "setfp", s_arm_unwind_setfp, 0 },
{ "unwind_raw", s_arm_unwind_raw, 0 },
+ { "eabi_attribute", s_arm_eabi_attribute, 0 },
#else
{ "word", cons, 4},
#endif
{
char *p;
unsigned long psr_field;
+ const struct asm_psr *psr;
+ char *start;
/* CPSR's and SPSR's can now be lowercase. This is just a convenience
feature for ease of use and backwards compatibility. */
p = *str;
- if (*p == 's' || *p == 'S')
+ if (strncasecmp (p, "SPSR", 4) == 0)
psr_field = SPSR_BIT;
- else if (*p == 'c' || *p == 'C')
+ else if (strncasecmp (p, "CPSR", 4) == 0)
psr_field = 0;
else
- goto error;
+ {
+ start = p;
+ do
+ p++;
+ while (ISALNUM (*p) || *p == '_');
- p++;
- if (strncasecmp (p, "PSR", 3) != 0)
- goto error;
- p += 3;
+ psr = hash_find_n (arm_v7m_psr_hsh, start, p - start);
+ if (!psr)
+ return FAIL;
+
+ *str = p;
+ return psr->field;
+ }
+ p += 4;
if (*p == '_')
{
/* A suffix follows. */
- const struct asm_psr *psr;
- char *start;
-
p++;
start = p;
return c->value;
}
+/* Parse an option for a barrier instruction. Returns the encoding for the
+ option, or FAIL. */
+static int
+parse_barrier (char **str)
+{
+ char *p, *q;
+ const struct asm_barrier_opt *o;
+
+ p = q = *str;
+ while (ISALPHA (*q))
+ q++;
+
+ o = hash_find_n (arm_barrier_opt_hsh, p, q - p);
+ if (!o)
+ return FAIL;
+
+ *str = q;
+ return o->value;
+}
+
/* Parse the operands of a table branch instruction. Similar to a memory
operand. */
static int
int reg;
if (skip_past_char (&p, '[') == FAIL)
- return FAIL;
+ {
+ inst.error = _("'[' expected");
+ return FAIL;
+ }
if ((reg = arm_reg_parse (&p, REG_TYPE_RN)) == FAIL)
{
inst.operands[0].reg = reg;
if (skip_past_comma (&p) == FAIL)
- return FAIL;
+ {
+ inst.error = _("',' expected");
+ return FAIL;
+ }
if ((reg = arm_reg_parse (&p, REG_TYPE_RN)) == FAIL)
{
OP_oSHar, /* ASR immediate */
OP_oSHllar, /* LSL or ASR immediate */
OP_oROR, /* ROR 0/8/16/24 */
+ OP_oBARRIER, /* Option argument for a barrier instruction. */
OP_FIRST_OPTIONAL = OP_oI7b
};
case OP_oROR: val = parse_ror (&str); break;
case OP_PSR: val = parse_psr (&str); break;
case OP_COND: val = parse_cond (&str); break;
+ case OP_oBARRIER:val = parse_barrier (&str); break;
case OP_TB:
po_misc_or_fail (parse_tb (&str));
case OP_oROR:
case OP_PSR:
case OP_COND:
+ case OP_oBARRIER:
case OP_REGLST:
case OP_VRSLST:
case OP_VRDLST:
failure:
if (!backtrack_pos)
- return FAIL;
+ {
+ /* The parse routine should already have set inst.error, but set a
+ defaut here just in case. */
+ if (!inst.error)
+ inst.error = _("syntax error");
+ return FAIL;
+ }
/* Do not backtrack over a trailing optional argument that
absorbed some text. We will only fail again, with the
probably less helpful than the current one. */
if (backtrack_index == i && backtrack_pos != str
&& upat[i+1] == OP_stop)
- return FAIL;
+ {
+ if (!inst.error)
+ inst.error = _("syntax error");
+ return FAIL;
+ }
/* Try again, skipping the optional argument at backtrack_pos. */
str = backtrack_pos;
static int
move_or_literal_pool (int i, bfd_boolean thumb_p, bfd_boolean mode_3)
{
- if ((inst.instruction & (thumb_p ? THUMB_LOAD_BIT : LOAD_BIT)) == 0)
+ unsigned long tbit;
+
+ if (thumb_p)
+ tbit = (inst.instruction > 0xffff) ? THUMB2_LOAD_BIT : THUMB_LOAD_BIT;
+ else
+ tbit = LOAD_BIT;
+
+ if ((inst.instruction & tbit) == 0)
{
inst.error = _("invalid pseudo operation");
return 1;
{
if (thumb_p)
{
- if ((inst.reloc.exp.X_add_number & ~0xFF) == 0)
+ if (!unified_syntax && (inst.reloc.exp.X_add_number & ~0xFF) == 0)
{
/* This can be done with a mov(1) instruction. */
inst.instruction = T_OPCODE_MOV_I8 | (inst.operands[i].reg << 8);
}
static void
+do_barrier (void)
+{
+ if (inst.operands[0].present)
+ {
+ constraint ((inst.instruction & 0xf0) != 0x40
+ && inst.operands[0].imm != 0xf,
+ "bad barrier type");
+ inst.instruction |= inst.operands[0].imm;
+ }
+ else
+ inst.instruction |= 0xf;
+}
+
+static void
do_bfc (void)
{
unsigned int msb = inst.operands[1].imm + inst.operands[2].imm;
static void
do_branch (void)
{
- encode_branch (BFD_RELOC_ARM_PCREL_BRANCH);
+#ifdef OBJ_ELF
+ if (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4)
+ encode_branch (BFD_RELOC_ARM_PCREL_JUMP);
+ else
+#endif
+ encode_branch (BFD_RELOC_ARM_PCREL_BRANCH);
+}
+
+static void
+do_bl (void)
+{
+#ifdef OBJ_ELF
+ if (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4)
+ {
+ if (inst.cond == COND_ALWAYS)
+ encode_branch (BFD_RELOC_ARM_PCREL_CALL);
+ else
+ encode_branch (BFD_RELOC_ARM_PCREL_JUMP);
+ }
+ else
+#endif
+ encode_branch (BFD_RELOC_ARM_PCREL_BRANCH);
}
/* ARM V5 branch-link-exchange instruction (argument parse)
conditionally, and the opcode must be adjusted. */
constraint (inst.cond != COND_ALWAYS, BAD_COND);
inst.instruction = 0xfa000000;
- encode_branch (BFD_RELOC_ARM_PCREL_BLX);
+#ifdef OBJ_ELF
+ if (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4)
+ encode_branch (BFD_RELOC_ARM_PCREL_CALL);
+ else
+#endif
+ encode_branch (BFD_RELOC_ARM_PCREL_BLX);
}
}
}
static void
+do_dbg (void)
+{
+ inst.instruction |= inst.operands[0].imm;
+}
+
+static void
do_it (void)
{
/* There is no IT instruction in ARM mode. We
constraint (!inst.operands[1].isreg || !inst.operands[1].preind
|| inst.operands[1].postind || inst.operands[1].writeback
|| inst.operands[1].immisreg || inst.operands[1].shifted
- || inst.operands[1].negative,
- _("instruction does not accept this addressing mode"));
-
- constraint (inst.operands[1].reg == REG_PC, BAD_PC);
+ || inst.operands[1].negative
+ /* This can arise if the programmer has written
+ strex rN, rM, foo
+ or if they have mistakenly used a register name as the last
+ operand, eg:
+ strex rN, rM, rX
+ It is very difficult to distinguish between these two cases
+ because "rX" might actually be a label. ie the register
+ name has been occluded by a symbol of the same name. So we
+ just generate a general 'bad addressing mode' type error
+ message and leave it up to the programmer to discover the
+ true cause and fix their mistake. */
+ || (inst.operands[1].reg == REG_PC),
+ BAD_ADDR_MODE);
constraint (inst.reloc.exp.X_op != O_constant
|| inst.reloc.exp.X_add_number != 0,
_("writeback used in preload instruction"));
constraint (!inst.operands[0].preind,
_("unindexed addressing used in preload instruction"));
- inst.instruction |= inst.operands[0].reg;
encode_arm_addr_mode_2 (0, /*is_t=*/FALSE);
}
+/* ARMv7: PLI <addr_mode> */
+static void
+do_pli (void)
+{
+ constraint (!inst.operands[0].isreg,
+ _("'[' expected after PLI mnemonic"));
+ constraint (inst.operands[0].postind,
+ _("post-indexed expression used in preload instruction"));
+ constraint (inst.operands[0].writeback,
+ _("writeback used in preload instruction"));
+ constraint (!inst.operands[0].preind,
+ _("unindexed addressing used in preload instruction"));
+ encode_arm_addr_mode_2 (0, /*is_t=*/FALSE);
+ inst.instruction &= ~PRE_INDEX;
+}
+
static void
do_push_pop (void)
{
inst.instruction |= Rm;
if (inst.operands[2].isreg) /* Rd, {Rm,} Rs */
{
- constraint (inst.operands[0].reg != Rm,
- _("source1 and dest must be same register"));
inst.instruction |= inst.operands[2].reg << 8;
inst.instruction |= SHIFT_BY_REG;
}
constraint (!inst.operands[2].isreg || !inst.operands[2].preind
|| inst.operands[2].postind || inst.operands[2].writeback
|| inst.operands[2].immisreg || inst.operands[2].shifted
- || inst.operands[2].negative,
- _("instruction does not accept this addressing mode"));
-
- constraint (inst.operands[2].reg == REG_PC, BAD_PC);
+ || inst.operands[2].negative
+ /* See comment in do_ldrex(). */
+ || (inst.operands[2].reg == REG_PC),
+ BAD_ADDR_MODE);
constraint (inst.operands[0].reg == inst.operands[1].reg
|| inst.operands[0].reg == inst.operands[2].reg, BAD_OVERLAP);
bfd_boolean is_pc = (inst.operands[i].reg == REG_PC);
constraint (!inst.operands[i].isreg,
- _("Thumb does not support the ldr =N pseudo-operation"));
+ _("Instruction does not support =N addresses"));
inst.instruction |= inst.operands[i].reg << 16;
if (inst.operands[i].immisreg)
constraint (inst.operands[i].shifted && inst.operands[i].shift_kind != SHIFT_LSL,
_("Thumb supports only LSL in shifted register indexing"));
- inst.instruction |= inst.operands[1].imm;
+ inst.instruction |= inst.operands[i].imm;
if (inst.operands[i].shifted)
{
constraint (inst.reloc.exp.X_op != O_constant,
{
constraint (is_pc && inst.operands[i].writeback,
_("cannot use writeback with PC-relative addressing"));
- constraint (is_t && inst.operands[1].writeback,
+ constraint (is_t && inst.operands[i].writeback,
_("cannot use writeback with this instruction"));
if (is_d)
}
static void
+do_t_barrier (void)
+{
+ if (inst.operands[0].present)
+ {
+ constraint ((inst.instruction & 0xf0) != 0x40
+ && inst.operands[0].imm != 0xf,
+ "bad barrier type");
+ inst.instruction |= inst.operands[0].imm;
+ }
+ else
+ inst.instruction |= 0xf;
+}
+
+static void
do_t_bfc (void)
{
unsigned int msb = inst.operands[1].imm + inst.operands[2].imm;
static void
do_t_blx (void)
{
+ constraint (current_it_mask && current_it_mask != 0x10, BAD_BRANCH);
if (inst.operands[0].isreg)
/* We have a register, so this is BLX(2). */
inst.instruction |= inst.operands[0].reg << 3;
{
/* No register. This must be BLX(1). */
inst.instruction = 0xf000e800;
- inst.reloc.type = BFD_RELOC_THUMB_PCREL_BLX;
+#ifdef OBJ_ELF
+ if (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4)
+ inst.reloc.type = BFD_RELOC_THUMB_PCREL_BRANCH23;
+ else
+#endif
+ inst.reloc.type = BFD_RELOC_THUMB_PCREL_BLX;
inst.reloc.pc_rel = 1;
}
}
do_t_branch (void)
{
int opcode;
- if (inst.cond != COND_ALWAYS)
+ int cond;
+
+ if (current_it_mask)
+ {
+ /* Conditional branches inside IT blocks are encoded as unconditional
+ branches. */
+ cond = COND_ALWAYS;
+ /* A branch must be the last instruction in an IT block. */
+ constraint (current_it_mask != 0x10, BAD_BRANCH);
+ }
+ else
+ cond = inst.cond;
+
+ if (cond != COND_ALWAYS)
opcode = T_MNEM_bcond;
else
opcode = inst.instruction;
if (unified_syntax && inst.size_req == 4)
{
inst.instruction = THUMB_OP32(opcode);
- if (inst.cond == COND_ALWAYS)
+ if (cond == COND_ALWAYS)
inst.reloc.type = BFD_RELOC_THUMB_PCREL_BRANCH25;
else
{
- assert (inst.cond != 0xF);
- inst.instruction |= inst.cond << 22;
+ assert (cond != 0xF);
+ inst.instruction |= cond << 22;
inst.reloc.type = BFD_RELOC_THUMB_PCREL_BRANCH20;
}
}
else
{
inst.instruction = THUMB_OP16(opcode);
- if (inst.cond == COND_ALWAYS)
+ if (cond == COND_ALWAYS)
inst.reloc.type = BFD_RELOC_THUMB_PCREL_BRANCH12;
else
{
- inst.instruction |= inst.cond << 8;
+ inst.instruction |= cond << 8;
inst.reloc.type = BFD_RELOC_THUMB_PCREL_BRANCH9;
}
/* Allow section relaxation. */
static void
do_t_bkpt (void)
{
+ constraint (inst.cond != COND_ALWAYS,
+ _("instruction is always unconditional"));
if (inst.operands[0].present)
{
constraint (inst.operands[0].imm > 255,
static void
do_t_branch23 (void)
{
+ constraint (current_it_mask && current_it_mask != 0x10, BAD_BRANCH);
inst.reloc.type = BFD_RELOC_THUMB_PCREL_BRANCH23;
inst.reloc.pc_rel = 1;
static void
do_t_bx (void)
{
+ constraint (current_it_mask && current_it_mask != 0x10, BAD_BRANCH);
inst.instruction |= inst.operands[0].reg << 3;
/* ??? FIXME: Should add a hacky reloc here if reg is REG_PC. The reloc
should cause the alignment to be checked once it is known. This is
static void
do_t_bxj (void)
{
+ constraint (current_it_mask && current_it_mask != 0x10, BAD_BRANCH);
if (inst.operands[0].reg == REG_PC)
as_tsktsk (_("use of r15 in bxj is not really useful"));
}
static void
+do_t_cps (void)
+{
+ constraint (current_it_mask, BAD_NOT_IT);
+ inst.instruction |= inst.operands[0].imm;
+}
+
+static void
do_t_cpsi (void)
{
+ constraint (current_it_mask, BAD_NOT_IT);
if (unified_syntax
- && (inst.operands[1].present || inst.size_req == 4))
+ && (inst.operands[1].present || inst.size_req == 4)
+ && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6_notm))
{
unsigned int imod = (inst.instruction & 0x0030) >> 4;
inst.instruction = 0xf3af8000;
}
else
{
- constraint (inst.operands[1].present,
+ constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v1)
+ && (inst.operands[0].imm & 4),
+ _("selected processor does not support 'A' form "
+ "of this instruction"));
+ constraint (inst.operands[1].present || inst.size_req == 4,
_("Thumb does not support the 2-argument "
"form of this instruction"));
inst.instruction |= inst.operands[0].imm;
static void
do_t_czb (void)
{
+ constraint (current_it_mask, BAD_NOT_IT);
constraint (inst.operands[0].reg > 7, BAD_HIREG);
inst.instruction |= inst.operands[0].reg;
inst.reloc.pc_rel = 1;
}
static void
+do_t_dbg (void)
+{
+ inst.instruction |= inst.operands[0].imm;
+}
+
+static void
+do_t_div (void)
+{
+ if (!inst.operands[1].present)
+ inst.operands[1].reg = inst.operands[0].reg;
+ inst.instruction |= inst.operands[0].reg << 8;
+ inst.instruction |= inst.operands[1].reg << 16;
+ inst.instruction |= inst.operands[2].reg;
+}
+
+static void
do_t_hint (void)
{
if (unified_syntax && inst.size_req == 4)
{
unsigned int cond = inst.operands[0].imm;
+ constraint (current_it_mask, BAD_NOT_IT);
current_it_mask = (inst.instruction & 0xf) | 0x10;
current_cc = cond;
|| inst.operands[1].postind || inst.operands[1].writeback
|| inst.operands[1].immisreg || inst.operands[1].shifted
|| inst.operands[1].negative,
- _("instruction does not accept this addressing mode"));
+ BAD_ADDR_MODE);
inst.instruction |= inst.operands[0].reg << 12;
inst.instruction |= inst.operands[1].reg << 16;
opcode = inst.instruction;
if (unified_syntax)
{
+ if (!inst.operands[1].isreg)
+ {
+ if (opcode <= 0xffff)
+ inst.instruction = THUMB_OP32 (opcode);
+ if (move_or_literal_pool (0, /*thumb_p=*/TRUE, /*mode_3=*/FALSE))
+ return;
+ }
if (inst.operands[1].isreg
&& !inst.operands[1].writeback
&& !inst.operands[1].shifted && !inst.operands[1].postind
static void
do_t_mrs (void)
{
- /* mrs only accepts CPSR/SPSR/CPSR_all/SPSR_all. */
- constraint ((inst.operands[1].imm & (PSR_c|PSR_x|PSR_s|PSR_f))
- != (PSR_c|PSR_f),
- _("'CPSR' or 'SPSR' expected"));
+ int flags;
+ flags = inst.operands[1].imm & (PSR_c|PSR_x|PSR_s|PSR_f|SPSR_BIT);
+ if (flags == 0)
+ {
+ constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v7m),
+ _("selected processor does not support "
+ "requested special purpose register"));
+ }
+ else
+ {
+ constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v1),
+ _("selected processor does not support "
+ "requested special purpose register %x"));
+ /* mrs only accepts CPSR/SPSR/CPSR_all/SPSR_all. */
+ constraint ((flags & ~SPSR_BIT) != (PSR_c|PSR_f),
+ _("'CPSR' or 'SPSR' expected"));
+ }
+
inst.instruction |= inst.operands[0].reg << 8;
- inst.instruction |= (inst.operands[1].imm & SPSR_BIT) >> 2;
+ inst.instruction |= (flags & SPSR_BIT) >> 2;
+ inst.instruction |= inst.operands[1].imm & 0xff;
}
static void
do_t_msr (void)
{
+ int flags;
+
constraint (!inst.operands[1].isreg,
_("Thumb encoding does not support an immediate here"));
- inst.instruction |= (inst.operands[0].imm & SPSR_BIT) >> 2;
- inst.instruction |= (inst.operands[0].imm & ~SPSR_BIT) >> 8;
+ flags = inst.operands[0].imm;
+ if (flags & ~0xff)
+ {
+ constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v1),
+ _("selected processor does not support "
+ "requested special purpose register"));
+ }
+ else
+ {
+ constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v7m),
+ _("selected processor does not support "
+ "requested special purpose register"));
+ flags |= PSR_f;
+ }
+ inst.instruction |= (flags & SPSR_BIT) >> 2;
+ inst.instruction |= (flags & ~SPSR_BIT) >> 8;
+ inst.instruction |= (flags & 0xff);
inst.instruction |= inst.operands[1].reg << 16;
}
static void
do_t_setend (void)
{
+ constraint (current_it_mask, BAD_NOT_IT);
if (inst.operands[0].imm)
inst.instruction |= 0x8;
}
|| inst.operands[2].postind || inst.operands[2].writeback
|| inst.operands[2].immisreg || inst.operands[2].shifted
|| inst.operands[2].negative,
- _("instruction does not accept this addressing mode"));
+ BAD_ADDR_MODE);
inst.instruction |= inst.operands[0].reg << 8;
inst.instruction |= inst.operands[1].reg << 12;
int half;
half = (inst.instruction & 0x10) != 0;
+ constraint (current_it_mask && current_it_mask != 0x10, BAD_BRANCH);
+ constraint (inst.operands[0].immisreg,
+ _("instruction requires register index"));
constraint (inst.operands[0].imm == 15,
_("PC is not a valid index register"));
constraint (!half && inst.operands[0].shifted,
_("instruction does not allow shifted index"));
- constraint (half && !inst.operands[0].shifted,
- _("instruction requires shifted index"));
inst.instruction |= (inst.operands[0].reg << 16) | inst.operands[0].imm;
}
symbolS *sym;
int offset;
+#ifdef OBJ_ELF
+ /* The size of the instruction is unknown, so tie the debug info to the
+ start of the instruction. */
+ dwarf2_emit_insn (0);
+#endif
+
switch (inst.reloc.exp.X_op)
{
case O_symbol:
to = frag_var (rs_machine_dependent, INSN_SIZE, THUMB_SIZE,
inst.relax, sym, offset, NULL/*offset, opcode*/);
md_number_to_chars (to, inst.instruction, THUMB_SIZE);
-
-#ifdef OBJ_ELF
- dwarf2_emit_insn (INSN_SIZE);
-#endif
}
/* Write a 32-bit thumb instruction to buf. */
OT_cinfix3, /* Instruction takes a conditional infix,
beginning at character index 3. (In
unified mode, it becomes a suffix.) */
+ OT_cinfix3_legacy, /* Legacy instruction takes a conditional infix at
+ character index 3, even in unified mode. Used for
+ legacy instructions where suffix and infix forms
+ may be ambiguous. */
OT_csuf_or_in3, /* Instruction takes either a conditional
- suffix or an infix at character index 3.
- (In unified mode, a suffix only. */
+ suffix or an infix at character index 3. */
OT_odd_infix_unc, /* This is the unconditional variant of an
instruction that takes a conditional infix
at an unusual position. In unified mode,
char *affix;
const struct asm_opcode *opcode;
const struct asm_cond *cond;
+ char save[2];
/* Scan up to the end of the mnemonic, which must end in white space,
'.' (in unified mode only), or end of string. */
/* step CE */
switch (opcode->tag)
{
+ case OT_cinfix3_legacy:
+ /* Ignore conditional suffixes matched on infix only mnemonics. */
+ break;
+
case OT_cinfix3:
case OT_odd_infix_unc:
- /* Some mnemonics are ambiguous between infix and suffix
- conditions. Disambiguate based on assembly syntax. */
if (!unified_syntax)
- break;
+ return 0;
/* else fall through */
case OT_csuffix:
case OT_unconditional:
case OT_unconditionalF:
- /* delayed diagnostic */
- inst.error = BAD_COND;
- inst.cond = COND_ALWAYS;
+ if (thumb_mode)
+ {
+ inst.cond = cond->value;
+ }
+ else
+ {
+ /* delayed diagnostic */
+ inst.error = BAD_COND;
+ inst.cond = COND_ALWAYS;
+ }
return opcode;
default:
/* Look for infixed mnemonic in the usual position. */
affix = base + 3;
cond = hash_find_n (arm_cond_hsh, affix, 2);
- if (cond)
- {
- char save[2];
- memcpy (save, affix, 2);
- memmove (affix, affix + 2, (end - affix) - 2);
- opcode = hash_find_n (arm_ops_hsh, base, (end - base) - 2);
- memmove (affix + 2, affix, (end - affix) - 2);
- memcpy (affix, save, 2);
- }
- if (opcode && (opcode->tag == OT_cinfix3 || opcode->tag == OT_csuf_or_in3))
+ if (!cond)
+ return 0;
+
+ memcpy (save, affix, 2);
+ memmove (affix, affix + 2, (end - affix) - 2);
+ opcode = hash_find_n (arm_ops_hsh, base, (end - base) - 2);
+ memmove (affix + 2, affix, (end - affix) - 2);
+ memcpy (affix, save, 2);
+
+ if (opcode && (opcode->tag == OT_cinfix3 || opcode->tag == OT_csuf_or_in3
+ || opcode->tag == OT_cinfix3_legacy))
{
/* step CM */
- if (unified_syntax)
+ if (unified_syntax && opcode->tag == OT_cinfix3)
as_warn (_("conditional infixes are deprecated in unified syntax"));
inst.cond = cond->value;
if (thumb_mode)
{
- unsigned long variant;
+ arm_feature_set variant;
variant = cpu_variant;
/* Only allow coprocessor instructions on Thumb-2 capable devices. */
- if ((variant & ARM_EXT_V6T2) == 0)
- variant &= ARM_ANY;
+ if (!ARM_CPU_HAS_FEATURE (variant, arm_arch_t2))
+ ARM_CLEAR_FEATURE (variant, variant, fpu_any_hard);
/* Check that this instruction is supported for this CPU. */
- if (thumb_mode == 1 && (opcode->tvariant & variant) == 0)
+ if (!opcode->tvariant
+ || (thumb_mode == 1
+ && !ARM_CPU_HAS_FEATURE (variant, *opcode->tvariant)))
{
as_bad (_("selected processor does not support `%s'"), str);
return;
{
int cond;
cond = current_cc ^ ((current_it_mask >> 4) & 1) ^ 1;
- if (cond != inst.cond)
+ current_it_mask <<= 1;
+ current_it_mask &= 0x1f;
+ /* The BKPT instruction is unconditional even in an IT block. */
+ if (!inst.error
+ && cond != inst.cond && opcode->tencode != do_t_bkpt)
{
as_bad (_("incorrect condition in IT block"));
return;
}
- current_it_mask <<= 1;
- current_it_mask &= 0x1f;
}
else if (inst.cond != COND_ALWAYS && opcode->tencode != do_t_branch)
{
return;
}
}
+ ARM_MERGE_FEATURE_SETS (thumb_arch_used, thumb_arch_used,
+ *opcode->tvariant);
+ /* Many Thumb-2 instructions also have Thumb-1 variants, so explicitly
+ set those bits when Thumb-2 32-bit instuctions are seen. ie.
+ anything other than bl/blx.
+ This is overly pessimistic for relaxable instructions. */
+ if ((inst.size == 4 && (inst.instruction & 0xf800e800) != 0xf000e800)
+ || inst.relax)
+ ARM_MERGE_FEATURE_SETS (thumb_arch_used, thumb_arch_used,
+ arm_ext_v6t2);
}
else
{
/* Check that this instruction is supported for this CPU. */
- if ((opcode->avariant & cpu_variant) == 0)
+ if (!opcode->avariant ||
+ !ARM_CPU_HAS_FEATURE (cpu_variant, *opcode->avariant))
{
as_bad (_("selected processor does not support `%s'"), str);
return;
inst.size = INSN_SIZE;
if (!parse_operands (p, opcode->operands))
opcode->aencode ();
+ /* Arm mode bx is marked as both v4T and v5 because it's still required
+ on a hypothetical non-thumb v5 core. */
+ if (ARM_CPU_HAS_FEATURE (*opcode->avariant, arm_ext_v4t)
+ || ARM_CPU_HAS_FEATURE (*opcode->avariant, arm_ext_v5))
+ ARM_MERGE_FEATURE_SETS (arm_arch_used, arm_arch_used, arm_ext_v4t);
+ else
+ ARM_MERGE_FEATURE_SETS (arm_arch_used, arm_arch_used,
+ *opcode->avariant);
}
output_inst (str);
}
{"cxsf", PSR_c | PSR_x | PSR_s | PSR_f},
};
+/* Table of V7M psr names. */
+static const struct asm_psr v7m_psrs[] =
+{
+ {"apsr", 0 },
+ {"iapsr", 1 },
+ {"eapsr", 2 },
+ {"psr", 3 },
+ {"ipsr", 5 },
+ {"epsr", 6 },
+ {"iepsr", 7 },
+ {"msp", 8 },
+ {"psp", 9 },
+ {"primask", 16},
+ {"basepri", 17},
+ {"basepri_max", 18},
+ {"faultmask", 19},
+ {"control", 20}
+};
+
/* Table of all shift-in-operand names. */
static const struct asm_shift_name shift_names [] =
{
{"al", 0xe}
};
+static struct asm_barrier_opt barrier_opt_names[] =
+{
+ { "sy", 0xf },
+ { "un", 0x7 },
+ { "st", 0xe },
+ { "unst", 0x6 }
+};
+
/* Table of ARM-format instructions. */
/* Macros for gluing together operand strings. N.B. In all cases
TxCM(m1,m2, aop, T_MNEM_##top, nops, ops, ae, te)
/* Mnemonic that cannot be conditionalized. The ARM condition-code
- field is still 0xE. */
+ field is still 0xE. Many of the Thumb variants can be executed
+ conditionally, so this is checked separately. */
#define TUE(mnem, op, top, nops, ops, ae, te) \
{ #mnem, OPS##nops ops, OT_unconditional, 0x##op, 0x##top, ARM_VARIANT, \
THUMB_VARIANT, do_##ae, do_##te }
#define C3(mnem, op, nops, ops, ae) \
{ #mnem, OPS##nops ops, OT_cinfix3, 0x##op, 0x0, ARM_VARIANT, 0, do_##ae, NULL }
+/* Legacy mnemonics that always have conditional infix after the third
+ character. */
+#define CL(mnem, op, nops, ops, ae) \
+ { #mnem, OPS##nops ops, OT_cinfix3_legacy, \
+ 0x##op, 0x0, ARM_VARIANT, 0, do_##ae, NULL }
+
/* Coprocessor instructions. Isomorphic between Arm and Thumb-2. */
#define cCE(mnem, op, nops, ops, ae) \
{ #mnem, OPS##nops ops, OT_csuffix, 0x##op, 0xe##op, ARM_VARIANT, ARM_VARIANT, do_##ae, do_##ae }
-#define cC3(mnem, op, nops, ops, ae) \
- { #mnem, OPS##nops ops, OT_cinfix3, 0x##op, 0xe##op, ARM_VARIANT, ARM_VARIANT, do_##ae, do_##ae }
+/* Legacy coprocessor instructions where conditional infix and conditional
+ suffix are ambiguous. For consistency this includes all FPA instructions,
+ not just the potentially ambiguous ones. */
+#define cCL(mnem, op, nops, ops, ae) \
+ { #mnem, OPS##nops ops, OT_cinfix3_legacy, \
+ 0x##op, 0xe##op, ARM_VARIANT, ARM_VARIANT, do_##ae, do_##ae }
+
+/* Coprocessor, takes either a suffix or a position-3 infix
+ (for an FPA corner case). */
+#define C3E(mnem, op, nops, ops, ae) \
+ { #mnem, OPS##nops ops, OT_csuf_or_in3, \
+ 0x##op, 0xe##op, ARM_VARIANT, ARM_VARIANT, do_##ae, do_##ae }
#define xCM_(m1, m2, m3, op, nops, ops, ae) \
{ #m1 #m2 #m3, OPS##nops ops, \
/* Thumb-only, unconditional. */
#define UT(mnem, op, nops, ops, te) TUE(mnem, 0, op, nops, ops, 0, te)
-/* ARM-only, takes either a suffix or a position-3 infix
- (for an FPA corner case). */
-#define C3E(mnem, op, nops, ops, ae) \
- { #mnem, OPS##nops ops, OT_csuf_or_in3, 0x##op, 0, ARM_VARIANT, 0, do_##ae, 0 }
-
static const struct asm_opcode insns[] =
{
-#define ARM_VARIANT ARM_EXT_V1 /* Core ARM Instructions. */
-#define THUMB_VARIANT ARM_EXT_V4T
+#define ARM_VARIANT &arm_ext_v1 /* Core ARM Instructions. */
+#define THUMB_VARIANT &arm_ext_v4t
tCE(and, 0000000, and, 3, (RR, oRR, SH), arit, t_arit3c),
tC3(ands, 0100000, ands, 3, (RR, oRR, SH), arit, t_arit3c),
tCE(eor, 0200000, eor, 3, (RR, oRR, SH), arit, t_arit3c),
have Thumb equivalents. */
tCE(tst, 1100000, tst, 2, (RR, SH), cmp, t_mvn_tst),
tC3(tsts, 1100000, tst, 2, (RR, SH), cmp, t_mvn_tst),
- C3(tstp, 110f000, 2, (RR, SH), cmp),
+ CL(tstp, 110f000, 2, (RR, SH), cmp),
tCE(cmp, 1500000, cmp, 2, (RR, SH), cmp, t_mov_cmp),
tC3(cmps, 1500000, cmp, 2, (RR, SH), cmp, t_mov_cmp),
- C3(cmpp, 150f000, 2, (RR, SH), cmp),
+ CL(cmpp, 150f000, 2, (RR, SH), cmp),
tCE(cmn, 1700000, cmn, 2, (RR, SH), cmp, t_mvn_tst),
tC3(cmns, 1700000, cmn, 2, (RR, SH), cmp, t_mvn_tst),
- C3(cmnp, 170f000, 2, (RR, SH), cmp),
+ CL(cmnp, 170f000, 2, (RR, SH), cmp),
tCE(mov, 1a00000, mov, 2, (RR, SH), mov, t_mov_cmp),
tC3(movs, 1b00000, movs, 2, (RR, SH), mov, t_mov_cmp),
tCE(str, 4000000, str, 2, (RR, ADDR), ldst, t_ldst),
tC3(strb, 4400000, strb, 2, (RR, ADDR), ldst, t_ldst),
+ tCE(stm, 8800000, stmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
tC3(stmia, 8800000, stmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
tC3(stmea, 8800000, stmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
+ tCE(ldm, 8900000, ldmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
tC3(ldmia, 8900000, ldmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
tC3(ldmfd, 8900000, ldmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
TCE(swi, f000000, df00, 1, (EXPi), swi, t_swi),
+ TCE(svc, f000000, df00, 1, (EXPi), swi, t_swi),
tCE(b, a000000, b, 1, (EXPr), branch, t_branch),
- TCE(bl, b000000, f000f800, 1, (EXPr), branch, t_branch23),
+ TCE(bl, b000000, f000f800, 1, (EXPr), bl, t_branch23),
/* Pseudo ops. */
tCE(adr, 28f0000, adr, 2, (RR, EXP), adr, t_adr),
tCE(pop, 8bd0000, pop, 1, (REGLST), push_pop, t_push_pop),
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V6
+#define THUMB_VARIANT &arm_ext_v6
TCE(cpy, 1a00000, 4600, 2, (RR, RR), rd_rm, t_cpy),
/* V1 instructions with no Thumb analogue prior to V6T2. */
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V6T2
+#define THUMB_VARIANT &arm_ext_v6t2
TCE(rsb, 0600000, ebc00000, 3, (RR, oRR, SH), arit, t_rsb),
TC3(rsbs, 0700000, ebd00000, 3, (RR, oRR, SH), arit, t_rsb),
TCE(teq, 1300000, ea900f00, 2, (RR, SH), cmp, t_mvn_tst),
TC3(teqs, 1300000, ea900f00, 2, (RR, SH), cmp, t_mvn_tst),
- C3(teqp, 130f000, 2, (RR, SH), cmp),
+ CL(teqp, 130f000, 2, (RR, SH), cmp),
TC3(ldrt, 4300000, f8500e00, 2, (RR, ADDR), ldstt, t_ldstt),
- TC3(ldrbt, 4700000, f8300e00, 2, (RR, ADDR), ldstt, t_ldstt),
+ TC3(ldrbt, 4700000, f8100e00, 2, (RR, ADDR), ldstt, t_ldstt),
TC3(strt, 4200000, f8400e00, 2, (RR, ADDR), ldstt, t_ldstt),
- TC3(strbt, 4600000, f8200e00, 2, (RR, ADDR), ldstt, t_ldstt),
+ TC3(strbt, 4600000, f8000e00, 2, (RR, ADDR), ldstt, t_ldstt),
TC3(stmdb, 9000000, e9000000, 2, (RRw, REGLST), ldmstm, t_ldmstm),
TC3(stmfd, 9000000, e9000000, 2, (RRw, REGLST), ldmstm, t_ldmstm),
C3(ldmfa, 8100000, 2, (RRw, REGLST), ldmstm),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V2 /* ARM 2 - multiplies. */
+#define ARM_VARIANT &arm_ext_v2 /* ARM 2 - multiplies. */
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V4T
+#define THUMB_VARIANT &arm_ext_v4t
tCE(mul, 0000090, mul, 3, (RRnpc, RRnpc, oRR), mul, t_mul),
tC3(muls, 0100090, muls, 3, (RRnpc, RRnpc, oRR), mul, t_mul),
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V6T2
+#define THUMB_VARIANT &arm_ext_v6t2
TCE(mla, 0200090, fb000000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mlas, t_mla),
C3(mlas, 0300090, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mlas),
TCE(mrc, e100010, ee100010, 6, (RCP, I7b, RR, RCN, RCN, oI7b), co_reg, co_reg),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V2S /* ARM 3 - swp instructions. */
+#define ARM_VARIANT &arm_ext_v2s /* ARM 3 - swp instructions. */
CE(swp, 1000090, 3, (RRnpc, RRnpc, RRnpcb), rd_rm_rn),
C3(swpb, 1400090, 3, (RRnpc, RRnpc, RRnpcb), rd_rm_rn),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V3 /* ARM 6 Status register instructions. */
+#define ARM_VARIANT &arm_ext_v3 /* ARM 6 Status register instructions. */
TCE(mrs, 10f0000, f3ef8000, 2, (RR, PSR), mrs, t_mrs),
TCE(msr, 120f000, f3808000, 2, (PSR, RR_EXi), msr, t_msr),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V3M /* ARM 7M long multiplies. */
+#define ARM_VARIANT &arm_ext_v3m /* ARM 7M long multiplies. */
TCE(smull, 0c00090, fb800000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull, t_mull),
CM(smull,s, 0d00090, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull),
TCE(umull, 0800090, fba00000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull, t_mull),
CM(umlal,s, 0b00090, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V4 /* ARM Architecture 4. */
+#define ARM_VARIANT &arm_ext_v4 /* ARM Architecture 4. */
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V4T
+#define THUMB_VARIANT &arm_ext_v4t
tC3(ldrh, 01000b0, ldrh, 2, (RR, ADDR), ldstv4, t_ldst),
tC3(strh, 00000b0, strh, 2, (RR, ADDR), ldstv4, t_ldst),
tC3(ldrsh, 01000f0, ldrsh, 2, (RR, ADDR), ldstv4, t_ldst),
tCM(ld,sb, 01000d0, ldrsb, 2, (RR, ADDR), ldstv4, t_ldst),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V4T|ARM_EXT_V5
+#define ARM_VARIANT &arm_ext_v4t_5
/* ARM Architecture 4T. */
/* Note: bx (and blx) are required on V5, even if the processor does
not support Thumb. */
TCE(bx, 12fff10, 4700, 1, (RR), bx, t_bx),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V5 /* ARM Architecture 5T. */
+#define ARM_VARIANT &arm_ext_v5 /* ARM Architecture 5T. */
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V5T
+#define THUMB_VARIANT &arm_ext_v5t
/* Note: blx has 2 variants; the .value coded here is for
BLX(2). Only this variant has conditional execution. */
TCE(blx, 12fff30, 4780, 1, (RR_EXr), blx, t_blx),
TUE(bkpt, 1200070, be00, 1, (oIffffb), bkpt, t_bkpt),
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V6T2
+#define THUMB_VARIANT &arm_ext_v6t2
TCE(clz, 16f0f10, fab0f080, 2, (RRnpc, RRnpc), rd_rm, t_clz),
TUF(ldc2, c100000, fc100000, 3, (RCP, RCN, ADDR), lstc, lstc),
TUF(ldc2l, c500000, fc500000, 3, (RCP, RCN, ADDR), lstc, lstc),
TUF(mrc2, e100010, fe100010, 6, (RCP, I7b, RR, RCN, RCN, oI7b), co_reg, co_reg),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V5ExP /* ARM Architecture 5TExP. */
+#define ARM_VARIANT &arm_ext_v5exp /* ARM Architecture 5TExP. */
TCE(smlabb, 1000080, fb100000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smla, t_mla),
TCE(smlatb, 10000a0, fb100020, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smla, t_mla),
TCE(smlabt, 10000c0, fb100010, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smla, t_mla),
TCE(qdsub, 1600050, fa80f0b0, 3, (RRnpc, RRnpc, RRnpc), rd_rm_rn, rd_rm_rn),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V5E /* ARM Architecture 5TE. */
+#define ARM_VARIANT &arm_ext_v5e /* ARM Architecture 5TE. */
TUF(pld, 450f000, f810f000, 1, (ADDR), pld, t_pld),
TC3(ldrd, 00000d0, e9500000, 3, (RRnpc, oRRnpc, ADDR), ldrd, t_ldstd),
TC3(strd, 00000f0, e9400000, 3, (RRnpc, oRRnpc, ADDR), ldrd, t_ldstd),
TCE(mrrc, c500000, ec500000, 5, (RCP, I15b, RRnpc, RRnpc, RCN), co_reg2c, co_reg2c),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V5J /* ARM Architecture 5TEJ. */
+#define ARM_VARIANT &arm_ext_v5j /* ARM Architecture 5TEJ. */
TCE(bxj, 12fff20, f3c08f00, 1, (RR), bxj, t_bxj),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V6 /* ARM V6. */
+#define ARM_VARIANT &arm_ext_v6 /* ARM V6. */
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V6
+#define THUMB_VARIANT &arm_ext_v6
TUF(cpsie, 1080000, b660, 2, (CPSF, oI31b), cpsi, t_cpsi),
TUF(cpsid, 10c0000, b670, 2, (CPSF, oI31b), cpsi, t_cpsi),
tCE(rev, 6bf0f30, rev, 2, (RRnpc, RRnpc), rd_rm, t_rev),
TUF(setend, 1010000, b650, 1, (ENDI), setend, t_setend),
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V6T2
- TUF(cps, 1020000, f3af8100, 1, (I31b), imm0, imm0),
+#define THUMB_VARIANT &arm_ext_v6t2
TCE(ldrex, 1900f9f, e8500f00, 2, (RRnpc, ADDR), ldrex, t_ldrex),
TUF(mcrr2, c400000, fc400000, 5, (RCP, I15b, RRnpc, RRnpc, RCN), co_reg2c, co_reg2c),
TUF(mrrc2, c500000, fc500000, 5, (RCP, I15b, RRnpc, RRnpc, RCN), co_reg2c, co_reg2c),
+
+ TCE(ssat, 6a00010, f3000000, 4, (RRnpc, I32, RRnpc, oSHllar),ssat, t_ssat),
+ TCE(usat, 6e00010, f3800000, 4, (RRnpc, I31, RRnpc, oSHllar),usat, t_usat),
+
+/* ARM V6 not included in V7M (eg. integer SIMD). */
+#undef THUMB_VARIANT
+#define THUMB_VARIANT &arm_ext_v6_notm
+ TUF(cps, 1020000, f3af8100, 1, (I31b), imm0, t_cps),
TCE(pkhbt, 6800010, eac00000, 4, (RRnpc, RRnpc, RRnpc, oSHll), pkhbt, t_pkhbt),
TCE(pkhtb, 6800050, eac00020, 4, (RRnpc, RRnpc, RRnpc, oSHar), pkhtb, t_pkhtb),
TCE(qadd16, 6200f10, fa90f010, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
TCE(uxtab16, 6c00070, fa30f080, 4, (RRnpc, RRnpc, RRnpc, oROR), sxtah, t_sxtah),
TCE(uxtab, 6e00070, fa50f080, 4, (RRnpc, RRnpc, RRnpc, oROR), sxtah, t_sxtah),
TCE(uxtb16, 6cf0070, fa3ff080, 3, (RRnpc, RRnpc, oROR), sxth, t_sxth),
- TCE(sel, 68000b0, faa0f080, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
+ TCE(sel, 6800fb0, faa0f080, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
TCE(smlad, 7000010, fb200000, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
TCE(smladx, 7000030, fb200010, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
TCE(smlald, 7400010, fbc000c0, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smlal,t_mlal),
UF(srsib, 9cd0500, 1, (I31w), srs),
UF(srsda, 84d0500, 1, (I31w), srs),
TUF(srsdb, 94d0500, e800c000, 1, (I31w), srs, srs),
- TCE(ssat, 6a00010, f3000000, 4, (RRnpc, I32, RRnpc, oSHllar),ssat, t_ssat),
TCE(ssat16, 6a00f30, f3200000, 3, (RRnpc, I16, RRnpc), ssat16, t_ssat16),
TCE(strex, 1800f90, e8400000, 3, (RRnpc, RRnpc, ADDR), strex, t_strex),
TCE(umaal, 0400090, fbe00060, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smlal, t_mlal),
TCE(usad8, 780f010, fb70f000, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
TCE(usada8, 7800010, fb700000, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
- TCE(usat, 6e00010, f3800000, 4, (RRnpc, I31, RRnpc, oSHllar),usat, t_usat),
TCE(usat16, 6e00f30, f3a00000, 3, (RRnpc, I15, RRnpc), usat16, t_usat16),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V6K
+#define ARM_VARIANT &arm_ext_v6k
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V6K
+#define THUMB_VARIANT &arm_ext_v6k
tCE(yield, 320f001, yield, 0, (), noargs, t_hint),
tCE(wfe, 320f002, wfe, 0, (), noargs, t_hint),
tCE(wfi, 320f003, wfi, 0, (), noargs, t_hint),
tCE(sev, 320f004, sev, 0, (), noargs, t_hint),
#undef THUMB_VARIANT
-#define THUMB_VARIANT ARM_EXT_V6T2
+#define THUMB_VARIANT &arm_ext_v6_notm
+ TCE(ldrexd, 1b00f9f, e8d0007f, 3, (RRnpc, oRRnpc, RRnpcb), ldrexd, t_ldrexd),
+ TCE(strexd, 1a00f90, e8c00070, 4, (RRnpc, RRnpc, oRRnpc, RRnpcb), strexd, t_strexd),
+
+#undef THUMB_VARIANT
+#define THUMB_VARIANT &arm_ext_v6t2
TCE(ldrexb, 1d00f9f, e8d00f4f, 2, (RRnpc, RRnpcb), rd_rn, rd_rn),
TCE(ldrexh, 1f00f9f, e8d00f5f, 2, (RRnpc, RRnpcb), rd_rn, rd_rn),
- TCE(ldrexd, 1b00f9f, e8d0007f, 3, (RRnpc, oRRnpc, RRnpcb), ldrexd, t_ldrexd),
TCE(strexb, 1c00f90, e8c00f40, 3, (RRnpc, RRnpc, ADDR), strex, rm_rd_rn),
TCE(strexh, 1e00f90, e8c00f50, 3, (RRnpc, RRnpc, ADDR), strex, rm_rd_rn),
- TCE(strexd, 1a00f90, e8c00070, 4, (RRnpc, RRnpc, oRRnpc, RRnpcb), strexd, t_strexd),
TUF(clrex, 57ff01f, f3bf8f2f, 0, (), noargs, noargs),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V6Z
+#define ARM_VARIANT &arm_ext_v6z
TCE(smc, 1600070, f7f08000, 1, (EXPi), smc, t_smc),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_EXT_V6T2
+#define ARM_VARIANT &arm_ext_v6t2
TCE(bfc, 7c0001f, f36f0000, 3, (RRnpc, I31, I32), bfc, t_bfc),
TCE(bfi, 7c00010, f3600000, 4, (RRnpc, RRnpc_I0, I31, I32), bfi, t_bfi),
TCE(sbfx, 7a00050, f3400000, 4, (RR, RR, I31, I32), bfx, t_bfx),
/* Thumb2 only instructions. */
#undef ARM_VARIANT
-#define ARM_VARIANT 0
+#define ARM_VARIANT NULL
TCE(addw, 0, f2000000, 3, (RR, RR, EXPi), 0, t_add_sub_w),
TCE(subw, 0, f2a00000, 3, (RR, RR, EXPi), 0, t_add_sub_w),
TCE(tbb, 0, e8d0f000, 1, (TB), 0, t_tb),
TCE(tbh, 0, e8d0f010, 1, (TB), 0, t_tb),
+ /* Thumb-2 hardware division instructions (R and M profiles only). */
+#undef THUMB_VARIANT
+#define THUMB_VARIANT &arm_ext_div
+ TCE(sdiv, 0, fb90f0f0, 3, (RR, oRR, RR), 0, t_div),
+ TCE(udiv, 0, fbb0f0f0, 3, (RR, oRR, RR), 0, t_div),
+
+ /* ARM V7 instructions. */
+#undef ARM_VARIANT
+#define ARM_VARIANT &arm_ext_v7
+#undef THUMB_VARIANT
+#define THUMB_VARIANT &arm_ext_v7
+ TUF(pli, 450f000, f910f000, 1, (ADDR), pli, t_pld),
+ TCE(dbg, 320f0f0, f3af80f0, 1, (I15), dbg, t_dbg),
+ TUF(dmb, 57ff050, f3bf8f50, 1, (oBARRIER), barrier, t_barrier),
+ TUF(dsb, 57ff040, f3bf8f40, 1, (oBARRIER), barrier, t_barrier),
+ TUF(isb, 57ff060, f3bf8f60, 1, (oBARRIER), barrier, t_barrier),
+
#undef ARM_VARIANT
-#define ARM_VARIANT FPU_FPA_EXT_V1 /* Core FPA instruction set (V1). */
+#define ARM_VARIANT &fpu_fpa_ext_v1 /* Core FPA instruction set (V1). */
cCE(wfs, e200110, 1, (RR), rd),
cCE(rfs, e300110, 1, (RR), rd),
cCE(wfc, e400110, 1, (RR), rd),
cCE(rfc, e500110, 1, (RR), rd),
- cC3(ldfs, c100100, 2, (RF, ADDR), rd_cpaddr),
- cC3(ldfd, c108100, 2, (RF, ADDR), rd_cpaddr),
- cC3(ldfe, c500100, 2, (RF, ADDR), rd_cpaddr),
- cC3(ldfp, c508100, 2, (RF, ADDR), rd_cpaddr),
-
- cC3(stfs, c000100, 2, (RF, ADDR), rd_cpaddr),
- cC3(stfd, c008100, 2, (RF, ADDR), rd_cpaddr),
- cC3(stfe, c400100, 2, (RF, ADDR), rd_cpaddr),
- cC3(stfp, c408100, 2, (RF, ADDR), rd_cpaddr),
-
- cC3(mvfs, e008100, 2, (RF, RF_IF), rd_rm),
- cC3(mvfsp, e008120, 2, (RF, RF_IF), rd_rm),
- cC3(mvfsm, e008140, 2, (RF, RF_IF), rd_rm),
- cC3(mvfsz, e008160, 2, (RF, RF_IF), rd_rm),
- cC3(mvfd, e008180, 2, (RF, RF_IF), rd_rm),
- cC3(mvfdp, e0081a0, 2, (RF, RF_IF), rd_rm),
- cC3(mvfdm, e0081c0, 2, (RF, RF_IF), rd_rm),
- cC3(mvfdz, e0081e0, 2, (RF, RF_IF), rd_rm),
- cC3(mvfe, e088100, 2, (RF, RF_IF), rd_rm),
- cC3(mvfep, e088120, 2, (RF, RF_IF), rd_rm),
- cC3(mvfem, e088140, 2, (RF, RF_IF), rd_rm),
- cC3(mvfez, e088160, 2, (RF, RF_IF), rd_rm),
-
- cC3(mnfs, e108100, 2, (RF, RF_IF), rd_rm),
- cC3(mnfsp, e108120, 2, (RF, RF_IF), rd_rm),
- cC3(mnfsm, e108140, 2, (RF, RF_IF), rd_rm),
- cC3(mnfsz, e108160, 2, (RF, RF_IF), rd_rm),
- cC3(mnfd, e108180, 2, (RF, RF_IF), rd_rm),
- cC3(mnfdp, e1081a0, 2, (RF, RF_IF), rd_rm),
- cC3(mnfdm, e1081c0, 2, (RF, RF_IF), rd_rm),
- cC3(mnfdz, e1081e0, 2, (RF, RF_IF), rd_rm),
- cC3(mnfe, e188100, 2, (RF, RF_IF), rd_rm),
- cC3(mnfep, e188120, 2, (RF, RF_IF), rd_rm),
- cC3(mnfem, e188140, 2, (RF, RF_IF), rd_rm),
- cC3(mnfez, e188160, 2, (RF, RF_IF), rd_rm),
-
- cC3(abss, e208100, 2, (RF, RF_IF), rd_rm),
- cC3(abssp, e208120, 2, (RF, RF_IF), rd_rm),
- cC3(abssm, e208140, 2, (RF, RF_IF), rd_rm),
- cC3(abssz, e208160, 2, (RF, RF_IF), rd_rm),
- cC3(absd, e208180, 2, (RF, RF_IF), rd_rm),
- cC3(absdp, e2081a0, 2, (RF, RF_IF), rd_rm),
- cC3(absdm, e2081c0, 2, (RF, RF_IF), rd_rm),
- cC3(absdz, e2081e0, 2, (RF, RF_IF), rd_rm),
- cC3(abse, e288100, 2, (RF, RF_IF), rd_rm),
- cC3(absep, e288120, 2, (RF, RF_IF), rd_rm),
- cC3(absem, e288140, 2, (RF, RF_IF), rd_rm),
- cC3(absez, e288160, 2, (RF, RF_IF), rd_rm),
-
- cC3(rnds, e308100, 2, (RF, RF_IF), rd_rm),
- cC3(rndsp, e308120, 2, (RF, RF_IF), rd_rm),
- cC3(rndsm, e308140, 2, (RF, RF_IF), rd_rm),
- cC3(rndsz, e308160, 2, (RF, RF_IF), rd_rm),
- cC3(rndd, e308180, 2, (RF, RF_IF), rd_rm),
- cC3(rnddp, e3081a0, 2, (RF, RF_IF), rd_rm),
- cC3(rnddm, e3081c0, 2, (RF, RF_IF), rd_rm),
- cC3(rnddz, e3081e0, 2, (RF, RF_IF), rd_rm),
- cC3(rnde, e388100, 2, (RF, RF_IF), rd_rm),
- cC3(rndep, e388120, 2, (RF, RF_IF), rd_rm),
- cC3(rndem, e388140, 2, (RF, RF_IF), rd_rm),
- cC3(rndez, e388160, 2, (RF, RF_IF), rd_rm),
-
- cC3(sqts, e408100, 2, (RF, RF_IF), rd_rm),
- cC3(sqtsp, e408120, 2, (RF, RF_IF), rd_rm),
- cC3(sqtsm, e408140, 2, (RF, RF_IF), rd_rm),
- cC3(sqtsz, e408160, 2, (RF, RF_IF), rd_rm),
- cC3(sqtd, e408180, 2, (RF, RF_IF), rd_rm),
- cC3(sqtdp, e4081a0, 2, (RF, RF_IF), rd_rm),
- cC3(sqtdm, e4081c0, 2, (RF, RF_IF), rd_rm),
- cC3(sqtdz, e4081e0, 2, (RF, RF_IF), rd_rm),
- cC3(sqte, e488100, 2, (RF, RF_IF), rd_rm),
- cC3(sqtep, e488120, 2, (RF, RF_IF), rd_rm),
- cC3(sqtem, e488140, 2, (RF, RF_IF), rd_rm),
- cC3(sqtez, e488160, 2, (RF, RF_IF), rd_rm),
-
- cC3(logs, e508100, 2, (RF, RF_IF), rd_rm),
- cC3(logsp, e508120, 2, (RF, RF_IF), rd_rm),
- cC3(logsm, e508140, 2, (RF, RF_IF), rd_rm),
- cC3(logsz, e508160, 2, (RF, RF_IF), rd_rm),
- cC3(logd, e508180, 2, (RF, RF_IF), rd_rm),
- cC3(logdp, e5081a0, 2, (RF, RF_IF), rd_rm),
- cC3(logdm, e5081c0, 2, (RF, RF_IF), rd_rm),
- cC3(logdz, e5081e0, 2, (RF, RF_IF), rd_rm),
- cC3(loge, e588100, 2, (RF, RF_IF), rd_rm),
- cC3(logep, e588120, 2, (RF, RF_IF), rd_rm),
- cC3(logem, e588140, 2, (RF, RF_IF), rd_rm),
- cC3(logez, e588160, 2, (RF, RF_IF), rd_rm),
-
- cC3(lgns, e608100, 2, (RF, RF_IF), rd_rm),
- cC3(lgnsp, e608120, 2, (RF, RF_IF), rd_rm),
- cC3(lgnsm, e608140, 2, (RF, RF_IF), rd_rm),
- cC3(lgnsz, e608160, 2, (RF, RF_IF), rd_rm),
- cC3(lgnd, e608180, 2, (RF, RF_IF), rd_rm),
- cC3(lgndp, e6081a0, 2, (RF, RF_IF), rd_rm),
- cC3(lgndm, e6081c0, 2, (RF, RF_IF), rd_rm),
- cC3(lgndz, e6081e0, 2, (RF, RF_IF), rd_rm),
- cC3(lgne, e688100, 2, (RF, RF_IF), rd_rm),
- cC3(lgnep, e688120, 2, (RF, RF_IF), rd_rm),
- cC3(lgnem, e688140, 2, (RF, RF_IF), rd_rm),
- cC3(lgnez, e688160, 2, (RF, RF_IF), rd_rm),
-
- cC3(exps, e708100, 2, (RF, RF_IF), rd_rm),
- cC3(expsp, e708120, 2, (RF, RF_IF), rd_rm),
- cC3(expsm, e708140, 2, (RF, RF_IF), rd_rm),
- cC3(expsz, e708160, 2, (RF, RF_IF), rd_rm),
- cC3(expd, e708180, 2, (RF, RF_IF), rd_rm),
- cC3(expdp, e7081a0, 2, (RF, RF_IF), rd_rm),
- cC3(expdm, e7081c0, 2, (RF, RF_IF), rd_rm),
- cC3(expdz, e7081e0, 2, (RF, RF_IF), rd_rm),
- cC3(expe, e788100, 2, (RF, RF_IF), rd_rm),
- cC3(expep, e788120, 2, (RF, RF_IF), rd_rm),
- cC3(expem, e788140, 2, (RF, RF_IF), rd_rm),
- cC3(expdz, e788160, 2, (RF, RF_IF), rd_rm),
-
- cC3(sins, e808100, 2, (RF, RF_IF), rd_rm),
- cC3(sinsp, e808120, 2, (RF, RF_IF), rd_rm),
- cC3(sinsm, e808140, 2, (RF, RF_IF), rd_rm),
- cC3(sinsz, e808160, 2, (RF, RF_IF), rd_rm),
- cC3(sind, e808180, 2, (RF, RF_IF), rd_rm),
- cC3(sindp, e8081a0, 2, (RF, RF_IF), rd_rm),
- cC3(sindm, e8081c0, 2, (RF, RF_IF), rd_rm),
- cC3(sindz, e8081e0, 2, (RF, RF_IF), rd_rm),
- cC3(sine, e888100, 2, (RF, RF_IF), rd_rm),
- cC3(sinep, e888120, 2, (RF, RF_IF), rd_rm),
- cC3(sinem, e888140, 2, (RF, RF_IF), rd_rm),
- cC3(sinez, e888160, 2, (RF, RF_IF), rd_rm),
-
- cC3(coss, e908100, 2, (RF, RF_IF), rd_rm),
- cC3(cossp, e908120, 2, (RF, RF_IF), rd_rm),
- cC3(cossm, e908140, 2, (RF, RF_IF), rd_rm),
- cC3(cossz, e908160, 2, (RF, RF_IF), rd_rm),
- cC3(cosd, e908180, 2, (RF, RF_IF), rd_rm),
- cC3(cosdp, e9081a0, 2, (RF, RF_IF), rd_rm),
- cC3(cosdm, e9081c0, 2, (RF, RF_IF), rd_rm),
- cC3(cosdz, e9081e0, 2, (RF, RF_IF), rd_rm),
- cC3(cose, e988100, 2, (RF, RF_IF), rd_rm),
- cC3(cosep, e988120, 2, (RF, RF_IF), rd_rm),
- cC3(cosem, e988140, 2, (RF, RF_IF), rd_rm),
- cC3(cosez, e988160, 2, (RF, RF_IF), rd_rm),
-
- cC3(tans, ea08100, 2, (RF, RF_IF), rd_rm),
- cC3(tansp, ea08120, 2, (RF, RF_IF), rd_rm),
- cC3(tansm, ea08140, 2, (RF, RF_IF), rd_rm),
- cC3(tansz, ea08160, 2, (RF, RF_IF), rd_rm),
- cC3(tand, ea08180, 2, (RF, RF_IF), rd_rm),
- cC3(tandp, ea081a0, 2, (RF, RF_IF), rd_rm),
- cC3(tandm, ea081c0, 2, (RF, RF_IF), rd_rm),
- cC3(tandz, ea081e0, 2, (RF, RF_IF), rd_rm),
- cC3(tane, ea88100, 2, (RF, RF_IF), rd_rm),
- cC3(tanep, ea88120, 2, (RF, RF_IF), rd_rm),
- cC3(tanem, ea88140, 2, (RF, RF_IF), rd_rm),
- cC3(tanez, ea88160, 2, (RF, RF_IF), rd_rm),
-
- cC3(asns, eb08100, 2, (RF, RF_IF), rd_rm),
- cC3(asnsp, eb08120, 2, (RF, RF_IF), rd_rm),
- cC3(asnsm, eb08140, 2, (RF, RF_IF), rd_rm),
- cC3(asnsz, eb08160, 2, (RF, RF_IF), rd_rm),
- cC3(asnd, eb08180, 2, (RF, RF_IF), rd_rm),
- cC3(asndp, eb081a0, 2, (RF, RF_IF), rd_rm),
- cC3(asndm, eb081c0, 2, (RF, RF_IF), rd_rm),
- cC3(asndz, eb081e0, 2, (RF, RF_IF), rd_rm),
- cC3(asne, eb88100, 2, (RF, RF_IF), rd_rm),
- cC3(asnep, eb88120, 2, (RF, RF_IF), rd_rm),
- cC3(asnem, eb88140, 2, (RF, RF_IF), rd_rm),
- cC3(asnez, eb88160, 2, (RF, RF_IF), rd_rm),
-
- cC3(acss, ec08100, 2, (RF, RF_IF), rd_rm),
- cC3(acssp, ec08120, 2, (RF, RF_IF), rd_rm),
- cC3(acssm, ec08140, 2, (RF, RF_IF), rd_rm),
- cC3(acssz, ec08160, 2, (RF, RF_IF), rd_rm),
- cC3(acsd, ec08180, 2, (RF, RF_IF), rd_rm),
- cC3(acsdp, ec081a0, 2, (RF, RF_IF), rd_rm),
- cC3(acsdm, ec081c0, 2, (RF, RF_IF), rd_rm),
- cC3(acsdz, ec081e0, 2, (RF, RF_IF), rd_rm),
- cC3(acse, ec88100, 2, (RF, RF_IF), rd_rm),
- cC3(acsep, ec88120, 2, (RF, RF_IF), rd_rm),
- cC3(acsem, ec88140, 2, (RF, RF_IF), rd_rm),
- cC3(acsez, ec88160, 2, (RF, RF_IF), rd_rm),
-
- cC3(atns, ed08100, 2, (RF, RF_IF), rd_rm),
- cC3(atnsp, ed08120, 2, (RF, RF_IF), rd_rm),
- cC3(atnsm, ed08140, 2, (RF, RF_IF), rd_rm),
- cC3(atnsz, ed08160, 2, (RF, RF_IF), rd_rm),
- cC3(atnd, ed08180, 2, (RF, RF_IF), rd_rm),
- cC3(atndp, ed081a0, 2, (RF, RF_IF), rd_rm),
- cC3(atndm, ed081c0, 2, (RF, RF_IF), rd_rm),
- cC3(atndz, ed081e0, 2, (RF, RF_IF), rd_rm),
- cC3(atne, ed88100, 2, (RF, RF_IF), rd_rm),
- cC3(atnep, ed88120, 2, (RF, RF_IF), rd_rm),
- cC3(atnem, ed88140, 2, (RF, RF_IF), rd_rm),
- cC3(atnez, ed88160, 2, (RF, RF_IF), rd_rm),
-
- cC3(urds, ee08100, 2, (RF, RF_IF), rd_rm),
- cC3(urdsp, ee08120, 2, (RF, RF_IF), rd_rm),
- cC3(urdsm, ee08140, 2, (RF, RF_IF), rd_rm),
- cC3(urdsz, ee08160, 2, (RF, RF_IF), rd_rm),
- cC3(urdd, ee08180, 2, (RF, RF_IF), rd_rm),
- cC3(urddp, ee081a0, 2, (RF, RF_IF), rd_rm),
- cC3(urddm, ee081c0, 2, (RF, RF_IF), rd_rm),
- cC3(urddz, ee081e0, 2, (RF, RF_IF), rd_rm),
- cC3(urde, ee88100, 2, (RF, RF_IF), rd_rm),
- cC3(urdep, ee88120, 2, (RF, RF_IF), rd_rm),
- cC3(urdem, ee88140, 2, (RF, RF_IF), rd_rm),
- cC3(urdez, ee88160, 2, (RF, RF_IF), rd_rm),
-
- cC3(nrms, ef08100, 2, (RF, RF_IF), rd_rm),
- cC3(nrmsp, ef08120, 2, (RF, RF_IF), rd_rm),
- cC3(nrmsm, ef08140, 2, (RF, RF_IF), rd_rm),
- cC3(nrmsz, ef08160, 2, (RF, RF_IF), rd_rm),
- cC3(nrmd, ef08180, 2, (RF, RF_IF), rd_rm),
- cC3(nrmdp, ef081a0, 2, (RF, RF_IF), rd_rm),
- cC3(nrmdm, ef081c0, 2, (RF, RF_IF), rd_rm),
- cC3(nrmdz, ef081e0, 2, (RF, RF_IF), rd_rm),
- cC3(nrme, ef88100, 2, (RF, RF_IF), rd_rm),
- cC3(nrmep, ef88120, 2, (RF, RF_IF), rd_rm),
- cC3(nrmem, ef88140, 2, (RF, RF_IF), rd_rm),
- cC3(nrmez, ef88160, 2, (RF, RF_IF), rd_rm),
-
- cC3(adfs, e000100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfsp, e000120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfsm, e000140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfsz, e000160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfd, e000180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfdp, e0001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfdm, e0001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfdz, e0001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfe, e080100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfep, e080120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfem, e080140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(adfez, e080160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(sufs, e200100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufsp, e200120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufsm, e200140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufsz, e200160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufd, e200180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufdp, e2001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufdm, e2001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufdz, e2001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufe, e280100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufep, e280120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufem, e280140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(sufez, e280160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(rsfs, e300100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfsp, e300120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfsm, e300140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfsz, e300160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfd, e300180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfdp, e3001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfdm, e3001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfdz, e3001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfe, e380100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfep, e380120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfem, e380140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rsfez, e380160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(mufs, e100100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufsp, e100120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufsm, e100140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufsz, e100160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufd, e100180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufdp, e1001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufdm, e1001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufdz, e1001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufe, e180100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufep, e180120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufem, e180140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(mufez, e180160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(dvfs, e400100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfsp, e400120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfsm, e400140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfsz, e400160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfd, e400180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfdp, e4001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfdm, e4001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfdz, e4001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfe, e480100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfep, e480120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfem, e480140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(dvfez, e480160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(rdfs, e500100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfsp, e500120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfsm, e500140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfsz, e500160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfd, e500180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfdp, e5001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfdm, e5001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfdz, e5001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfe, e580100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfep, e580120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfem, e580140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rdfez, e580160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(pows, e600100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powsp, e600120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powsm, e600140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powsz, e600160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powd, e600180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powdp, e6001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powdm, e6001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powdz, e6001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powe, e680100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powep, e680120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powem, e680140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(powez, e680160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(rpws, e700100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwsp, e700120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwsm, e700140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwsz, e700160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwd, e700180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwdp, e7001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwdm, e7001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwdz, e7001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwe, e780100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwep, e780120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwem, e780140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rpwez, e780160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(rmfs, e800100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfsp, e800120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfsm, e800140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfsz, e800160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfd, e800180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfdp, e8001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfdm, e8001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfdz, e8001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfe, e880100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfep, e880120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfem, e880140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(rmfez, e880160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(fmls, e900100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmlsp, e900120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmlsm, e900140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmlsz, e900160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmld, e900180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmldp, e9001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmldm, e9001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmldz, e9001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmle, e980100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmlep, e980120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmlem, e980140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fmlez, e980160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(fdvs, ea00100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvsp, ea00120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvsm, ea00140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvsz, ea00160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvd, ea00180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvdp, ea001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvdm, ea001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvdz, ea001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdve, ea80100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvep, ea80120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvem, ea80140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(fdvez, ea80160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(frds, eb00100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frdsp, eb00120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frdsm, eb00140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frdsz, eb00160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frdd, eb00180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frddp, eb001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frddm, eb001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frddz, eb001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frde, eb80100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frdep, eb80120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frdem, eb80140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(frdez, eb80160, 3, (RF, RF, RF_IF), rd_rn_rm),
-
- cC3(pols, ec00100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(polsp, ec00120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(polsm, ec00140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(polsz, ec00160, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(pold, ec00180, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(poldp, ec001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(poldm, ec001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(poldz, ec001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(pole, ec80100, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(polep, ec80120, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(polem, ec80140, 3, (RF, RF, RF_IF), rd_rn_rm),
- cC3(polez, ec80160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(ldfs, c100100, 2, (RF, ADDR), rd_cpaddr),
+ cCL(ldfd, c108100, 2, (RF, ADDR), rd_cpaddr),
+ cCL(ldfe, c500100, 2, (RF, ADDR), rd_cpaddr),
+ cCL(ldfp, c508100, 2, (RF, ADDR), rd_cpaddr),
+
+ cCL(stfs, c000100, 2, (RF, ADDR), rd_cpaddr),
+ cCL(stfd, c008100, 2, (RF, ADDR), rd_cpaddr),
+ cCL(stfe, c400100, 2, (RF, ADDR), rd_cpaddr),
+ cCL(stfp, c408100, 2, (RF, ADDR), rd_cpaddr),
+
+ cCL(mvfs, e008100, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfsp, e008120, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfsm, e008140, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfsz, e008160, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfd, e008180, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfdp, e0081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfdm, e0081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfdz, e0081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfe, e088100, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfep, e088120, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfem, e088140, 2, (RF, RF_IF), rd_rm),
+ cCL(mvfez, e088160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(mnfs, e108100, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfsp, e108120, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfsm, e108140, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfsz, e108160, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfd, e108180, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfdp, e1081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfdm, e1081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfdz, e1081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfe, e188100, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfep, e188120, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfem, e188140, 2, (RF, RF_IF), rd_rm),
+ cCL(mnfez, e188160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(abss, e208100, 2, (RF, RF_IF), rd_rm),
+ cCL(abssp, e208120, 2, (RF, RF_IF), rd_rm),
+ cCL(abssm, e208140, 2, (RF, RF_IF), rd_rm),
+ cCL(abssz, e208160, 2, (RF, RF_IF), rd_rm),
+ cCL(absd, e208180, 2, (RF, RF_IF), rd_rm),
+ cCL(absdp, e2081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(absdm, e2081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(absdz, e2081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(abse, e288100, 2, (RF, RF_IF), rd_rm),
+ cCL(absep, e288120, 2, (RF, RF_IF), rd_rm),
+ cCL(absem, e288140, 2, (RF, RF_IF), rd_rm),
+ cCL(absez, e288160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(rnds, e308100, 2, (RF, RF_IF), rd_rm),
+ cCL(rndsp, e308120, 2, (RF, RF_IF), rd_rm),
+ cCL(rndsm, e308140, 2, (RF, RF_IF), rd_rm),
+ cCL(rndsz, e308160, 2, (RF, RF_IF), rd_rm),
+ cCL(rndd, e308180, 2, (RF, RF_IF), rd_rm),
+ cCL(rnddp, e3081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(rnddm, e3081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(rnddz, e3081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(rnde, e388100, 2, (RF, RF_IF), rd_rm),
+ cCL(rndep, e388120, 2, (RF, RF_IF), rd_rm),
+ cCL(rndem, e388140, 2, (RF, RF_IF), rd_rm),
+ cCL(rndez, e388160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(sqts, e408100, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtsp, e408120, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtsm, e408140, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtsz, e408160, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtd, e408180, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtdp, e4081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtdm, e4081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtdz, e4081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(sqte, e488100, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtep, e488120, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtem, e488140, 2, (RF, RF_IF), rd_rm),
+ cCL(sqtez, e488160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(logs, e508100, 2, (RF, RF_IF), rd_rm),
+ cCL(logsp, e508120, 2, (RF, RF_IF), rd_rm),
+ cCL(logsm, e508140, 2, (RF, RF_IF), rd_rm),
+ cCL(logsz, e508160, 2, (RF, RF_IF), rd_rm),
+ cCL(logd, e508180, 2, (RF, RF_IF), rd_rm),
+ cCL(logdp, e5081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(logdm, e5081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(logdz, e5081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(loge, e588100, 2, (RF, RF_IF), rd_rm),
+ cCL(logep, e588120, 2, (RF, RF_IF), rd_rm),
+ cCL(logem, e588140, 2, (RF, RF_IF), rd_rm),
+ cCL(logez, e588160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(lgns, e608100, 2, (RF, RF_IF), rd_rm),
+ cCL(lgnsp, e608120, 2, (RF, RF_IF), rd_rm),
+ cCL(lgnsm, e608140, 2, (RF, RF_IF), rd_rm),
+ cCL(lgnsz, e608160, 2, (RF, RF_IF), rd_rm),
+ cCL(lgnd, e608180, 2, (RF, RF_IF), rd_rm),
+ cCL(lgndp, e6081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(lgndm, e6081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(lgndz, e6081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(lgne, e688100, 2, (RF, RF_IF), rd_rm),
+ cCL(lgnep, e688120, 2, (RF, RF_IF), rd_rm),
+ cCL(lgnem, e688140, 2, (RF, RF_IF), rd_rm),
+ cCL(lgnez, e688160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(exps, e708100, 2, (RF, RF_IF), rd_rm),
+ cCL(expsp, e708120, 2, (RF, RF_IF), rd_rm),
+ cCL(expsm, e708140, 2, (RF, RF_IF), rd_rm),
+ cCL(expsz, e708160, 2, (RF, RF_IF), rd_rm),
+ cCL(expd, e708180, 2, (RF, RF_IF), rd_rm),
+ cCL(expdp, e7081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(expdm, e7081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(expdz, e7081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(expe, e788100, 2, (RF, RF_IF), rd_rm),
+ cCL(expep, e788120, 2, (RF, RF_IF), rd_rm),
+ cCL(expem, e788140, 2, (RF, RF_IF), rd_rm),
+ cCL(expdz, e788160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(sins, e808100, 2, (RF, RF_IF), rd_rm),
+ cCL(sinsp, e808120, 2, (RF, RF_IF), rd_rm),
+ cCL(sinsm, e808140, 2, (RF, RF_IF), rd_rm),
+ cCL(sinsz, e808160, 2, (RF, RF_IF), rd_rm),
+ cCL(sind, e808180, 2, (RF, RF_IF), rd_rm),
+ cCL(sindp, e8081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(sindm, e8081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(sindz, e8081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(sine, e888100, 2, (RF, RF_IF), rd_rm),
+ cCL(sinep, e888120, 2, (RF, RF_IF), rd_rm),
+ cCL(sinem, e888140, 2, (RF, RF_IF), rd_rm),
+ cCL(sinez, e888160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(coss, e908100, 2, (RF, RF_IF), rd_rm),
+ cCL(cossp, e908120, 2, (RF, RF_IF), rd_rm),
+ cCL(cossm, e908140, 2, (RF, RF_IF), rd_rm),
+ cCL(cossz, e908160, 2, (RF, RF_IF), rd_rm),
+ cCL(cosd, e908180, 2, (RF, RF_IF), rd_rm),
+ cCL(cosdp, e9081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(cosdm, e9081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(cosdz, e9081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(cose, e988100, 2, (RF, RF_IF), rd_rm),
+ cCL(cosep, e988120, 2, (RF, RF_IF), rd_rm),
+ cCL(cosem, e988140, 2, (RF, RF_IF), rd_rm),
+ cCL(cosez, e988160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(tans, ea08100, 2, (RF, RF_IF), rd_rm),
+ cCL(tansp, ea08120, 2, (RF, RF_IF), rd_rm),
+ cCL(tansm, ea08140, 2, (RF, RF_IF), rd_rm),
+ cCL(tansz, ea08160, 2, (RF, RF_IF), rd_rm),
+ cCL(tand, ea08180, 2, (RF, RF_IF), rd_rm),
+ cCL(tandp, ea081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(tandm, ea081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(tandz, ea081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(tane, ea88100, 2, (RF, RF_IF), rd_rm),
+ cCL(tanep, ea88120, 2, (RF, RF_IF), rd_rm),
+ cCL(tanem, ea88140, 2, (RF, RF_IF), rd_rm),
+ cCL(tanez, ea88160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(asns, eb08100, 2, (RF, RF_IF), rd_rm),
+ cCL(asnsp, eb08120, 2, (RF, RF_IF), rd_rm),
+ cCL(asnsm, eb08140, 2, (RF, RF_IF), rd_rm),
+ cCL(asnsz, eb08160, 2, (RF, RF_IF), rd_rm),
+ cCL(asnd, eb08180, 2, (RF, RF_IF), rd_rm),
+ cCL(asndp, eb081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(asndm, eb081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(asndz, eb081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(asne, eb88100, 2, (RF, RF_IF), rd_rm),
+ cCL(asnep, eb88120, 2, (RF, RF_IF), rd_rm),
+ cCL(asnem, eb88140, 2, (RF, RF_IF), rd_rm),
+ cCL(asnez, eb88160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(acss, ec08100, 2, (RF, RF_IF), rd_rm),
+ cCL(acssp, ec08120, 2, (RF, RF_IF), rd_rm),
+ cCL(acssm, ec08140, 2, (RF, RF_IF), rd_rm),
+ cCL(acssz, ec08160, 2, (RF, RF_IF), rd_rm),
+ cCL(acsd, ec08180, 2, (RF, RF_IF), rd_rm),
+ cCL(acsdp, ec081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(acsdm, ec081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(acsdz, ec081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(acse, ec88100, 2, (RF, RF_IF), rd_rm),
+ cCL(acsep, ec88120, 2, (RF, RF_IF), rd_rm),
+ cCL(acsem, ec88140, 2, (RF, RF_IF), rd_rm),
+ cCL(acsez, ec88160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(atns, ed08100, 2, (RF, RF_IF), rd_rm),
+ cCL(atnsp, ed08120, 2, (RF, RF_IF), rd_rm),
+ cCL(atnsm, ed08140, 2, (RF, RF_IF), rd_rm),
+ cCL(atnsz, ed08160, 2, (RF, RF_IF), rd_rm),
+ cCL(atnd, ed08180, 2, (RF, RF_IF), rd_rm),
+ cCL(atndp, ed081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(atndm, ed081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(atndz, ed081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(atne, ed88100, 2, (RF, RF_IF), rd_rm),
+ cCL(atnep, ed88120, 2, (RF, RF_IF), rd_rm),
+ cCL(atnem, ed88140, 2, (RF, RF_IF), rd_rm),
+ cCL(atnez, ed88160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(urds, ee08100, 2, (RF, RF_IF), rd_rm),
+ cCL(urdsp, ee08120, 2, (RF, RF_IF), rd_rm),
+ cCL(urdsm, ee08140, 2, (RF, RF_IF), rd_rm),
+ cCL(urdsz, ee08160, 2, (RF, RF_IF), rd_rm),
+ cCL(urdd, ee08180, 2, (RF, RF_IF), rd_rm),
+ cCL(urddp, ee081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(urddm, ee081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(urddz, ee081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(urde, ee88100, 2, (RF, RF_IF), rd_rm),
+ cCL(urdep, ee88120, 2, (RF, RF_IF), rd_rm),
+ cCL(urdem, ee88140, 2, (RF, RF_IF), rd_rm),
+ cCL(urdez, ee88160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(nrms, ef08100, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmsp, ef08120, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmsm, ef08140, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmsz, ef08160, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmd, ef08180, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmdp, ef081a0, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmdm, ef081c0, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmdz, ef081e0, 2, (RF, RF_IF), rd_rm),
+ cCL(nrme, ef88100, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmep, ef88120, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmem, ef88140, 2, (RF, RF_IF), rd_rm),
+ cCL(nrmez, ef88160, 2, (RF, RF_IF), rd_rm),
+
+ cCL(adfs, e000100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfsp, e000120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfsm, e000140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfsz, e000160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfd, e000180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfdp, e0001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfdm, e0001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfdz, e0001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfe, e080100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfep, e080120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfem, e080140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(adfez, e080160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(sufs, e200100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufsp, e200120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufsm, e200140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufsz, e200160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufd, e200180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufdp, e2001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufdm, e2001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufdz, e2001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufe, e280100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufep, e280120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufem, e280140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(sufez, e280160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(rsfs, e300100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfsp, e300120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfsm, e300140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfsz, e300160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfd, e300180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfdp, e3001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfdm, e3001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfdz, e3001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfe, e380100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfep, e380120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfem, e380140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rsfez, e380160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(mufs, e100100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufsp, e100120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufsm, e100140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufsz, e100160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufd, e100180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufdp, e1001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufdm, e1001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufdz, e1001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufe, e180100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufep, e180120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufem, e180140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(mufez, e180160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(dvfs, e400100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfsp, e400120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfsm, e400140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfsz, e400160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfd, e400180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfdp, e4001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfdm, e4001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfdz, e4001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfe, e480100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfep, e480120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfem, e480140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(dvfez, e480160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(rdfs, e500100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfsp, e500120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfsm, e500140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfsz, e500160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfd, e500180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfdp, e5001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfdm, e5001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfdz, e5001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfe, e580100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfep, e580120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfem, e580140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rdfez, e580160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(pows, e600100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powsp, e600120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powsm, e600140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powsz, e600160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powd, e600180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powdp, e6001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powdm, e6001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powdz, e6001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powe, e680100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powep, e680120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powem, e680140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(powez, e680160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(rpws, e700100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwsp, e700120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwsm, e700140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwsz, e700160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwd, e700180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwdp, e7001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwdm, e7001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwdz, e7001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwe, e780100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwep, e780120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwem, e780140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rpwez, e780160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(rmfs, e800100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfsp, e800120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfsm, e800140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfsz, e800160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfd, e800180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfdp, e8001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfdm, e8001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfdz, e8001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfe, e880100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfep, e880120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfem, e880140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(rmfez, e880160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(fmls, e900100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmlsp, e900120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmlsm, e900140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmlsz, e900160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmld, e900180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmldp, e9001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmldm, e9001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmldz, e9001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmle, e980100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmlep, e980120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmlem, e980140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fmlez, e980160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(fdvs, ea00100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvsp, ea00120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvsm, ea00140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvsz, ea00160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvd, ea00180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvdp, ea001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvdm, ea001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvdz, ea001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdve, ea80100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvep, ea80120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvem, ea80140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(fdvez, ea80160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(frds, eb00100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frdsp, eb00120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frdsm, eb00140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frdsz, eb00160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frdd, eb00180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frddp, eb001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frddm, eb001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frddz, eb001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frde, eb80100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frdep, eb80120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frdem, eb80140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(frdez, eb80160, 3, (RF, RF, RF_IF), rd_rn_rm),
+
+ cCL(pols, ec00100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(polsp, ec00120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(polsm, ec00140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(polsz, ec00160, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(pold, ec00180, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(poldp, ec001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(poldm, ec001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(poldz, ec001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(pole, ec80100, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(polep, ec80120, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(polem, ec80140, 3, (RF, RF, RF_IF), rd_rn_rm),
+ cCL(polez, ec80160, 3, (RF, RF, RF_IF), rd_rn_rm),
cCE(cmf, e90f110, 2, (RF, RF_IF), fpa_cmp),
C3E(cmfe, ed0f110, 2, (RF, RF_IF), fpa_cmp),
cCE(cnf, eb0f110, 2, (RF, RF_IF), fpa_cmp),
C3E(cnfe, ef0f110, 2, (RF, RF_IF), fpa_cmp),
- cC3(flts, e000110, 2, (RF, RR), rn_rd),
- cC3(fltsp, e000130, 2, (RF, RR), rn_rd),
- cC3(fltsm, e000150, 2, (RF, RR), rn_rd),
- cC3(fltsz, e000170, 2, (RF, RR), rn_rd),
- cC3(fltd, e000190, 2, (RF, RR), rn_rd),
- cC3(fltdp, e0001b0, 2, (RF, RR), rn_rd),
- cC3(fltdm, e0001d0, 2, (RF, RR), rn_rd),
- cC3(fltdz, e0001f0, 2, (RF, RR), rn_rd),
- cC3(flte, e080110, 2, (RF, RR), rn_rd),
- cC3(fltep, e080130, 2, (RF, RR), rn_rd),
- cC3(fltem, e080150, 2, (RF, RR), rn_rd),
- cC3(fltez, e080170, 2, (RF, RR), rn_rd),
+ cCL(flts, e000110, 2, (RF, RR), rn_rd),
+ cCL(fltsp, e000130, 2, (RF, RR), rn_rd),
+ cCL(fltsm, e000150, 2, (RF, RR), rn_rd),
+ cCL(fltsz, e000170, 2, (RF, RR), rn_rd),
+ cCL(fltd, e000190, 2, (RF, RR), rn_rd),
+ cCL(fltdp, e0001b0, 2, (RF, RR), rn_rd),
+ cCL(fltdm, e0001d0, 2, (RF, RR), rn_rd),
+ cCL(fltdz, e0001f0, 2, (RF, RR), rn_rd),
+ cCL(flte, e080110, 2, (RF, RR), rn_rd),
+ cCL(fltep, e080130, 2, (RF, RR), rn_rd),
+ cCL(fltem, e080150, 2, (RF, RR), rn_rd),
+ cCL(fltez, e080170, 2, (RF, RR), rn_rd),
/* The implementation of the FIX instruction is broken on some
assemblers, in that it accepts a precision specifier as well as a
To be more compatible, we accept it as well, though of course it
does not set any bits. */
cCE(fix, e100110, 2, (RR, RF), rd_rm),
- cC3(fixp, e100130, 2, (RR, RF), rd_rm),
- cC3(fixm, e100150, 2, (RR, RF), rd_rm),
- cC3(fixz, e100170, 2, (RR, RF), rd_rm),
- cC3(fixsp, e100130, 2, (RR, RF), rd_rm),
- cC3(fixsm, e100150, 2, (RR, RF), rd_rm),
- cC3(fixsz, e100170, 2, (RR, RF), rd_rm),
- cC3(fixdp, e100130, 2, (RR, RF), rd_rm),
- cC3(fixdm, e100150, 2, (RR, RF), rd_rm),
- cC3(fixdz, e100170, 2, (RR, RF), rd_rm),
- cC3(fixep, e100130, 2, (RR, RF), rd_rm),
- cC3(fixem, e100150, 2, (RR, RF), rd_rm),
- cC3(fixez, e100170, 2, (RR, RF), rd_rm),
+ cCL(fixp, e100130, 2, (RR, RF), rd_rm),
+ cCL(fixm, e100150, 2, (RR, RF), rd_rm),
+ cCL(fixz, e100170, 2, (RR, RF), rd_rm),
+ cCL(fixsp, e100130, 2, (RR, RF), rd_rm),
+ cCL(fixsm, e100150, 2, (RR, RF), rd_rm),
+ cCL(fixsz, e100170, 2, (RR, RF), rd_rm),
+ cCL(fixdp, e100130, 2, (RR, RF), rd_rm),
+ cCL(fixdm, e100150, 2, (RR, RF), rd_rm),
+ cCL(fixdz, e100170, 2, (RR, RF), rd_rm),
+ cCL(fixep, e100130, 2, (RR, RF), rd_rm),
+ cCL(fixem, e100150, 2, (RR, RF), rd_rm),
+ cCL(fixez, e100170, 2, (RR, RF), rd_rm),
/* Instructions that were new with the real FPA, call them V2. */
#undef ARM_VARIANT
-#define ARM_VARIANT FPU_FPA_EXT_V2
+#define ARM_VARIANT &fpu_fpa_ext_v2
cCE(lfm, c100200, 3, (RF, I4b, ADDR), fpa_ldmstm),
- cC3(lfmfd, c900200, 3, (RF, I4b, ADDR), fpa_ldmstm),
- cC3(lfmea, d100200, 3, (RF, I4b, ADDR), fpa_ldmstm),
+ cCL(lfmfd, c900200, 3, (RF, I4b, ADDR), fpa_ldmstm),
+ cCL(lfmea, d100200, 3, (RF, I4b, ADDR), fpa_ldmstm),
cCE(sfm, c000200, 3, (RF, I4b, ADDR), fpa_ldmstm),
- cC3(sfmfd, d000200, 3, (RF, I4b, ADDR), fpa_ldmstm),
- cC3(sfmea, c800200, 3, (RF, I4b, ADDR), fpa_ldmstm),
+ cCL(sfmfd, d000200, 3, (RF, I4b, ADDR), fpa_ldmstm),
+ cCL(sfmea, c800200, 3, (RF, I4b, ADDR), fpa_ldmstm),
#undef ARM_VARIANT
-#define ARM_VARIANT FPU_VFP_EXT_V1xD /* VFP V1xD (single precision). */
+#define ARM_VARIANT &fpu_vfp_ext_v1xd /* VFP V1xD (single precision). */
/* Moves and type conversions. */
cCE(fcpys, eb00a40, 2, (RVS, RVS), vfp_sp_monadic),
cCE(fmrs, e100a10, 2, (RR, RVS), vfp_reg_from_sp),
cCE(fcmpezs, eb50ac0, 1, (RVS), vfp_sp_compare_z),
#undef ARM_VARIANT
-#define ARM_VARIANT FPU_VFP_EXT_V1 /* VFP V1 (Double precision). */
+#define ARM_VARIANT &fpu_vfp_ext_v1 /* VFP V1 (Double precision). */
/* Moves and type conversions. */
cCE(fcpyd, eb00b40, 2, (RVD, RVD), rd_rm),
cCE(fcvtds, eb70ac0, 2, (RVD, RVS), vfp_dp_sp_cvt),
cCE(fcmpezd, eb50bc0, 1, (RVD), rd),
#undef ARM_VARIANT
-#define ARM_VARIANT FPU_VFP_EXT_V2
+#define ARM_VARIANT &fpu_vfp_ext_v2
cCE(fmsrr, c400a10, 3, (VRSLST, RR, RR), vfp_sp2_from_reg2),
cCE(fmrrs, c500a10, 3, (RR, RR, VRSLST), vfp_reg2_from_sp2),
cCE(fmdrr, c400b10, 3, (RVD, RR, RR), rm_rd_rn),
cCE(fmrrd, c500b10, 3, (RR, RR, RVD), rd_rn_rm),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_CEXT_XSCALE /* Intel XScale extensions. */
+#define ARM_VARIANT &arm_cext_xscale /* Intel XScale extensions. */
cCE(mia, e200010, 3, (RXA, RRnpc, RRnpc), xsc_mia),
cCE(miaph, e280010, 3, (RXA, RRnpc, RRnpc), xsc_mia),
cCE(miabb, e2c0010, 3, (RXA, RRnpc, RRnpc), xsc_mia),
cCE(mra, c500000, 3, (RRnpc, RRnpc, RXA), xsc_mra),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_CEXT_IWMMXT /* Intel Wireless MMX technology. */
+#define ARM_VARIANT &arm_cext_iwmmxt /* Intel Wireless MMX technology. */
cCE(tandcb, e13f130, 1, (RR), iwmmxt_tandorc),
cCE(tandch, e53f130, 1, (RR), iwmmxt_tandorc),
cCE(tandcw, e93f130, 1, (RR), iwmmxt_tandorc),
cCE(wzero, e300000, 1, (RIWR), iwmmxt_wzero),
#undef ARM_VARIANT
-#define ARM_VARIANT ARM_CEXT_MAVERICK /* Cirrus Maverick instructions. */
+#define ARM_VARIANT &arm_cext_maverick /* Cirrus Maverick instructions. */
cCE(cfldrs, c100400, 2, (RMF, ADDR), rd_cpaddr),
cCE(cfldrd, c500400, 2, (RMD, ADDR), rd_cpaddr),
cCE(cfldr32, c100500, 2, (RMFX, ADDR), rd_cpaddr),
#undef TUF
#undef TCC
#undef cCE
-#undef cC3
+#undef cCL
+#undef C3E
#undef CE
#undef CM
#undef UE
/* ARM mode branches are offset by +8. However, the Windows CE
loader expects the relocation not to take this into account. */
case BFD_RELOC_ARM_PCREL_BRANCH:
+ case BFD_RELOC_ARM_PCREL_CALL:
+ case BFD_RELOC_ARM_PCREL_JUMP:
case BFD_RELOC_ARM_PCREL_BLX:
case BFD_RELOC_ARM_PLT32:
#ifdef TE_WINCE
return value;
}
+/* Like negate_data_op, but for Thumb-2. */
+
+static unsigned int
+thumb32_negate_data_op (offsetT *instruction, offsetT value)
+{
+ int op, new_inst;
+ int rd;
+ offsetT negated, inverted;
+
+ negated = encode_thumb32_immediate (-value);
+ inverted = encode_thumb32_immediate (~value);
+
+ rd = (*instruction >> 8) & 0xf;
+ op = (*instruction >> T2_DATA_OP_SHIFT) & 0xf;
+ switch (op)
+ {
+ /* ADD <-> SUB. Includes CMP <-> CMN. */
+ case T2_OPCODE_SUB:
+ new_inst = T2_OPCODE_ADD;
+ value = negated;
+ break;
+
+ case T2_OPCODE_ADD:
+ new_inst = T2_OPCODE_SUB;
+ value = negated;
+ break;
+
+ /* ORR <-> ORN. Includes MOV <-> MVN. */
+ case T2_OPCODE_ORR:
+ new_inst = T2_OPCODE_ORN;
+ value = inverted;
+ break;
+
+ case T2_OPCODE_ORN:
+ new_inst = T2_OPCODE_ORR;
+ value = inverted;
+ break;
+
+ /* AND <-> BIC. TST has no inverted equivalent. */
+ case T2_OPCODE_AND:
+ new_inst = T2_OPCODE_BIC;
+ if (rd == 15)
+ value = FAIL;
+ else
+ value = inverted;
+ break;
+
+ case T2_OPCODE_BIC:
+ new_inst = T2_OPCODE_AND;
+ value = inverted;
+ break;
+
+ /* ADC <-> SBC */
+ case T2_OPCODE_ADC:
+ new_inst = T2_OPCODE_SBC;
+ value = inverted;
+ break;
+
+ case T2_OPCODE_SBC:
+ new_inst = T2_OPCODE_ADC;
+ value = inverted;
+ break;
+
+ /* We cannot do anything. */
+ default:
+ return FAIL;
+ }
+
+ if (value == FAIL)
+ return FAIL;
+
+ *instruction &= T2_OPCODE_MASK;
+ *instruction |= new_inst << T2_DATA_OP_SHIFT;
+ return value;
+}
+
/* Read a 32-bit thumb instruction from buf. */
static unsigned long
get_thumb32_insn (char * buf)
break;
case BFD_RELOC_ARM_OFFSET_IMM:
+ if (!fixP->fx_done && seg->use_rela_p)
+ value = 0;
+
case BFD_RELOC_ARM_LITERAL:
sign = value >= 0;
break;
}
value /= 4;
- if (value >= 0xff)
+ if (value > 0xff)
{
as_bad_where (fixP->fx_file, fixP->fx_line,
_("offset out of range"));
newval |= (1 << 23);
else
value = -value;
- if (value >= 0xfff)
+ if (value > 0xfff)
{
as_bad_where (fixP->fx_file, fixP->fx_line,
_("offset out of range"));
newval |= (1 << 9);
else
value = -value;
- if (value >= 0xff)
+ if (value > 0xff)
{
as_bad_where (fixP->fx_file, fixP->fx_line,
_("offset out of range"));
else if ((newval & 0x00000f00) == 0x00000e00)
{
/* T-instruction: positive 8-bit offset. */
- if (value < 0 || value >= 0xff)
+ if (value < 0 || value > 0xff)
{
as_bad_where (fixP->fx_file, fixP->fx_line,
_("offset out of range"));
/* FUTURE: Implement analogue of negate_data_op for T32. */
if (fixP->fx_r_type == BFD_RELOC_ARM_T32_IMMEDIATE)
- newimm = encode_thumb32_immediate (value);
+ {
+ newimm = encode_thumb32_immediate (value);
+ if (newimm == (unsigned int) FAIL)
+ newimm = thumb32_negate_data_op (&newval, value);
+ }
else
{
/* 12 bit immediate for addw/subw. */
md_number_to_chars (buf, newval, INSN_SIZE);
break;
- case BFD_RELOC_ARM_PCREL_BRANCH:
#ifdef OBJ_ELF
+ case BFD_RELOC_ARM_PCREL_CALL:
+ newval = md_chars_to_number (buf, INSN_SIZE);
+ if ((newval & 0xf0000000) == 0xf0000000)
+ temp = 1;
+ else
+ temp = 3;
+ goto arm_branch_common;
+
+ case BFD_RELOC_ARM_PCREL_JUMP:
case BFD_RELOC_ARM_PLT32:
#endif
+ case BFD_RELOC_ARM_PCREL_BRANCH:
+ temp = 3;
+ goto arm_branch_common;
+ case BFD_RELOC_ARM_PCREL_BLX:
+ temp = 1;
+ arm_branch_common:
/* We are going to store value (shifted right by two) in the
- instruction, in a 24 bit, signed field. Bits 0 and 1 must be
- clear, and bits 26 through 32 either all clear or all set. */
- if (value & 0x00000003)
+ instruction, in a 24 bit, signed field. Bits 26 through 32 either
+ all clear or all set and bit 0 must be clear. For B/BL bit 1 must
+ also be be clear. */
+ if (value & temp)
as_bad_where (fixP->fx_file, fixP->fx_line,
_("misaligned branch destination"));
if ((value & (offsetT)0xfe000000) != (offsetT)0
{
newval = md_chars_to_number (buf, INSN_SIZE);
newval |= (value >> 2) & 0x00ffffff;
- md_number_to_chars (buf, newval, INSN_SIZE);
- }
- break;
-
- case BFD_RELOC_ARM_PCREL_BLX:
- /* BLX allows bit 1 to be set in the branch destination, since
- it targets a Thumb instruction which is only required to be
- aligned modulo 2. Other constraints are as for B/BL. */
- if (value & 0x00000001)
- as_bad_where (fixP->fx_file, fixP->fx_line,
- _("misaligned BLX destination"));
- if ((value & (offsetT)0xfe000000) != (offsetT)0
- && (value & (offsetT)0xfe000000) != (offsetT)0xfe000000)
- as_bad_where (fixP->fx_file, fixP->fx_line,
- _("branch out of range"));
-
- if (fixP->fx_done || !seg->use_rela_p)
- {
- offsetT hbit;
- hbit = (value >> 1) & 1;
- value = (value >> 2) & 0x00ffffff;
-
- newval = md_chars_to_number (buf, INSN_SIZE);
- newval |= value | hbit << 24;
+ /* Set the H bit on BLX instructions. */
+ if (temp == 1)
+ {
+ if (value & 2)
+ newval |= 0x01000000;
+ else
+ newval &= ~0x01000000;
+ }
md_number_to_chars (buf, newval, INSN_SIZE);
}
break;
if (fixP->fx_done || !seg->use_rela_p)
{
newval = md_chars_to_number (buf, THUMB_SIZE);
- newval |= ((value & 0x2e) << 2) | ((value & 0x40) << 3);
+ newval |= ((value & 0x3e) << 2) | ((value & 0x40) << 3);
md_number_to_chars (buf, newval, THUMB_SIZE);
}
break;
if (value & 3)
as_bad_where (fixP->fx_file, fixP->fx_line,
_("invalid offset, target not word aligned (0x%08lX)"),
- (((unsigned int) fixP->fx_frag->fr_address
- + (unsigned int) fixP->fx_where) & ~3) + value);
+ (((unsigned long) fixP->fx_frag->fr_address
+ + (unsigned long) fixP->fx_where) & ~3)
+ + (unsigned long) value);
if (value & ~0x3fc)
as_bad_where (fixP->fx_file, fixP->fx_line,
format. */
arelent *
-tc_gen_reloc (asection * section ATTRIBUTE_UNUSED,
- fixS * fixp)
+tc_gen_reloc (asection *section, fixS *fixp)
{
arelent * reloc;
bfd_reloc_code_real_type code;
reloc->address = fixp->fx_frag->fr_address + fixp->fx_where;
if (fixp->fx_pcrel)
- fixp->fx_offset = reloc->address;
+ {
+ if (section->use_rela_p)
+ fixp->fx_offset -= md_pcrel_from_section (fixp, section);
+ else
+ fixp->fx_offset = reloc->address;
+ }
reloc->addend = fixp->fx_offset;
switch (fixp->fx_r_type)
case BFD_RELOC_ARM_TARGET2:
case BFD_RELOC_ARM_TLS_LE32:
case BFD_RELOC_ARM_TLS_LDO32:
+ case BFD_RELOC_ARM_PCREL_CALL:
+ case BFD_RELOC_ARM_PCREL_JUMP:
code = fixp->fx_r_type;
break;
return NULL;
case BFD_RELOC_ARM_OFFSET_IMM:
+ if (section->use_rela_p)
+ {
+ code = fixp->fx_r_type;
+ break;
+ }
+
if (fixp->fx_addsy != NULL
&& !S_IS_DEFINED (fixp->fx_addsy)
&& S_IS_LOCAL (fixp->fx_addsy))
|| (arm_cond_hsh = hash_new ()) == NULL
|| (arm_shift_hsh = hash_new ()) == NULL
|| (arm_psr_hsh = hash_new ()) == NULL
+ || (arm_v7m_psr_hsh = hash_new ()) == NULL
|| (arm_reg_hsh = hash_new ()) == NULL
- || (arm_reloc_hsh = hash_new ()) == NULL)
+ || (arm_reloc_hsh = hash_new ()) == NULL
+ || (arm_barrier_opt_hsh = hash_new ()) == NULL)
as_fatal (_("virtual memory exhausted"));
for (i = 0; i < sizeof (insns) / sizeof (struct asm_opcode); i++)
hash_insert (arm_shift_hsh, shift_names[i].name, (PTR) (shift_names + i));
for (i = 0; i < sizeof (psrs) / sizeof (struct asm_psr); i++)
hash_insert (arm_psr_hsh, psrs[i].template, (PTR) (psrs + i));
+ for (i = 0; i < sizeof (v7m_psrs) / sizeof (struct asm_psr); i++)
+ hash_insert (arm_v7m_psr_hsh, v7m_psrs[i].template, (PTR) (v7m_psrs + i));
for (i = 0; i < sizeof (reg_names) / sizeof (struct reg_entry); i++)
hash_insert (arm_reg_hsh, reg_names[i].name, (PTR) (reg_names + i));
+ for (i = 0;
+ i < sizeof (barrier_opt_names) / sizeof (struct asm_barrier_opt);
+ i++)
+ hash_insert (arm_barrier_opt_hsh, barrier_opt_names[i].template,
+ (PTR) (barrier_opt_names + i));
#ifdef OBJ_ELF
for (i = 0; i < sizeof (reloc_names) / sizeof (struct reloc_entry); i++)
hash_insert (arm_reloc_hsh, reloc_names[i].name, (PTR) (reloc_names + i));
-mcpu= over -march= if both are set (as for GCC); and we prefer
-mfpu= over any other way of setting the floating point unit.
Use of legacy options with new options are faulted. */
- if (legacy_cpu != -1)
+ if (legacy_cpu)
{
- if (mcpu_cpu_opt != -1 || march_cpu_opt != -1)
+ if (mcpu_cpu_opt || march_cpu_opt)
as_bad (_("use of old and new-style options to set CPU type"));
mcpu_cpu_opt = legacy_cpu;
}
- else if (mcpu_cpu_opt == -1)
+ else if (!mcpu_cpu_opt)
mcpu_cpu_opt = march_cpu_opt;
- if (legacy_fpu != -1)
+ if (legacy_fpu)
{
- if (mfpu_opt != -1)
+ if (mfpu_opt)
as_bad (_("use of old and new-style options to set FPU type"));
mfpu_opt = legacy_fpu;
}
- else if (mfpu_opt == -1)
+ else if (!mfpu_opt)
{
#if !(defined (TE_LINUX) || defined (TE_NetBSD) || defined (TE_VXWORKS))
/* Some environments specify a default FPU. If they don't, infer it
from the processor. */
- if (mcpu_fpu_opt != -1)
+ if (mcpu_fpu_opt)
mfpu_opt = mcpu_fpu_opt;
else
mfpu_opt = march_fpu_opt;
#else
- mfpu_opt = FPU_DEFAULT;
+ mfpu_opt = &fpu_default;
#endif
}
- if (mfpu_opt == -1)
+ if (!mfpu_opt)
{
- if (mcpu_cpu_opt == -1)
- mfpu_opt = FPU_DEFAULT;
- else if (mcpu_cpu_opt & ARM_EXT_V5)
- mfpu_opt = FPU_ARCH_VFP_V2;
+ if (!mcpu_cpu_opt)
+ mfpu_opt = &fpu_default;
+ else if (ARM_CPU_HAS_FEATURE (*mcpu_fpu_opt, arm_ext_v5))
+ mfpu_opt = &fpu_arch_vfp_v2;
else
- mfpu_opt = FPU_ARCH_FPA;
+ mfpu_opt = &fpu_arch_fpa;
}
- if (mcpu_cpu_opt == -1)
- mcpu_cpu_opt = CPU_DEFAULT;
+#ifdef CPU_DEFAULT
+ if (!mcpu_cpu_opt)
+ {
+ mcpu_cpu_opt = &cpu_default;
+ selected_cpu = cpu_default;
+ }
+#else
+ if (mcpu_cpu_opt)
+ selected_cpu = *mcpu_cpu_opt;
+ else
+ mcpu_cpu_opt = &arm_arch_any;
+#endif
- cpu_variant = mcpu_cpu_opt | mfpu_opt;
+ ARM_MERGE_FEATURE_SETS (cpu_variant, *mcpu_cpu_opt, *mfpu_opt);
+
+ arm_arch_used = thumb_arch_used = arm_arch_none;
#if defined OBJ_COFF || defined OBJ_ELF
{
if (support_interwork) flags |= F_INTERWORK;
if (uses_apcs_float) flags |= F_APCS_FLOAT;
if (pic_code) flags |= F_PIC;
- if ((cpu_variant & FPU_ANY) == FPU_NONE
- || (cpu_variant & FPU_ANY) == FPU_ARCH_VFP) /* VFP layout only. */
+ if (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_any_hard))
flags |= F_SOFT_FLOAT;
switch (mfloat_abi_opt)
break;
}
- /* Using VFP conventions (even if soft-float). */
- if (cpu_variant & FPU_VFP_EXT_NONE)
+ /* Using pure-endian doubles (even if soft-float). */
+ if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_endian_pure))
flags |= F_VFP_FLOAT;
#if defined OBJ_ELF
- if (cpu_variant & FPU_ARCH_MAVERICK)
+ if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_arch_maverick))
flags |= EF_ARM_MAVERICK_FLOAT;
break;
case EF_ARM_EABI_VER4:
+ case EF_ARM_EABI_VER5:
/* No additional flags to set. */
break;
#endif
/* Record the CPU type as well. */
- switch (cpu_variant & ARM_CPU_MASK)
- {
- case ARM_2:
- mach = bfd_mach_arm_2;
- break;
-
- case ARM_3: /* Also ARM_250. */
- mach = bfd_mach_arm_2a;
- break;
-
- case ARM_6: /* Also ARM_7. */
- mach = bfd_mach_arm_3;
- break;
-
- default:
- mach = bfd_mach_arm_unknown;
- break;
- }
-
- /* Catch special cases. */
- if (cpu_variant & ARM_CEXT_IWMMXT)
+ if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_cext_iwmmxt))
mach = bfd_mach_arm_iWMMXt;
- else if (cpu_variant & ARM_CEXT_XSCALE)
+ else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_cext_xscale))
mach = bfd_mach_arm_XScale;
- else if (cpu_variant & ARM_CEXT_MAVERICK)
+ else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_cext_maverick))
mach = bfd_mach_arm_ep9312;
- else if (cpu_variant & ARM_EXT_V5E)
+ else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v5e))
mach = bfd_mach_arm_5TE;
- else if (cpu_variant & ARM_EXT_V5)
+ else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v5))
{
- if (cpu_variant & ARM_EXT_V4T)
+ if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v4t))
mach = bfd_mach_arm_5T;
else
mach = bfd_mach_arm_5;
}
- else if (cpu_variant & ARM_EXT_V4)
+ else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v4))
{
- if (cpu_variant & ARM_EXT_V4T)
+ if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v4t))
mach = bfd_mach_arm_4T;
else
mach = bfd_mach_arm_4;
}
- else if (cpu_variant & ARM_EXT_V3M)
+ else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v3m))
mach = bfd_mach_arm_3M;
+ else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v3))
+ mach = bfd_mach_arm_3;
+ else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v2s))
+ mach = bfd_mach_arm_2a;
+ else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v2))
+ mach = bfd_mach_arm_2;
+ else
+ mach = bfd_mach_arm_unknown;
bfd_set_arch_mach (stdoutput, TARGET_ARCH, mach);
}
/* These are recognized by the assembler, but have no affect on code. */
{"mapcs-frame", N_("use frame pointer"), NULL, 0, NULL},
{"mapcs-stack-check", N_("use stack size checking"), NULL, 0, NULL},
+ {NULL, NULL, NULL, 0, NULL}
+};
+
+struct arm_legacy_option_table
+{
+ char *option; /* Option name to match. */
+ const arm_feature_set **var; /* Variable to change. */
+ const arm_feature_set value; /* What to change it to. */
+ char *deprecated; /* If non-null, print this message. */
+};
+const struct arm_legacy_option_table arm_legacy_opts[] =
+{
/* DON'T add any new processors to this list -- we want the whole list
to go away... Add them to the processors table instead. */
- {"marm1", NULL, &legacy_cpu, ARM_ARCH_V1, N_("use -mcpu=arm1")},
- {"m1", NULL, &legacy_cpu, ARM_ARCH_V1, N_("use -mcpu=arm1")},
- {"marm2", NULL, &legacy_cpu, ARM_ARCH_V2, N_("use -mcpu=arm2")},
- {"m2", NULL, &legacy_cpu, ARM_ARCH_V2, N_("use -mcpu=arm2")},
- {"marm250", NULL, &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm250")},
- {"m250", NULL, &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm250")},
- {"marm3", NULL, &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm3")},
- {"m3", NULL, &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm3")},
- {"marm6", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm6")},
- {"m6", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm6")},
- {"marm600", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm600")},
- {"m600", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm600")},
- {"marm610", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm610")},
- {"m610", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm610")},
- {"marm620", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm620")},
- {"m620", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm620")},
- {"marm7", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7")},
- {"m7", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7")},
- {"marm70", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm70")},
- {"m70", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm70")},
- {"marm700", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700")},
- {"m700", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700")},
- {"marm700i", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700i")},
- {"m700i", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700i")},
- {"marm710", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710")},
- {"m710", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710")},
- {"marm710c", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710c")},
- {"m710c", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710c")},
- {"marm720", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm720")},
- {"m720", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm720")},
- {"marm7d", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7d")},
- {"m7d", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7d")},
- {"marm7di", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7di")},
- {"m7di", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7di")},
- {"marm7m", NULL, &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7m")},
- {"m7m", NULL, &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7m")},
- {"marm7dm", NULL, &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dm")},
- {"m7dm", NULL, &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dm")},
- {"marm7dmi", NULL, &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dmi")},
- {"m7dmi", NULL, &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dmi")},
- {"marm7100", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7100")},
- {"m7100", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7100")},
- {"marm7500", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500")},
- {"m7500", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500")},
- {"marm7500fe", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500fe")},
- {"m7500fe", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500fe")},
- {"marm7t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
- {"m7t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
- {"marm7tdmi", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
- {"m7tdmi", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
- {"marm710t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm710t")},
- {"m710t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm710t")},
- {"marm720t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm720t")},
- {"m720t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm720t")},
- {"marm740t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm740t")},
- {"m740t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm740t")},
- {"marm8", NULL, &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm8")},
- {"m8", NULL, &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm8")},
- {"marm810", NULL, &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm810")},
- {"m810", NULL, &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm810")},
- {"marm9", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9")},
- {"m9", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9")},
- {"marm9tdmi", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9tdmi")},
- {"m9tdmi", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9tdmi")},
- {"marm920", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm920")},
- {"m920", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm920")},
- {"marm940", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm940")},
- {"m940", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm940")},
- {"mstrongarm", NULL, &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=strongarm")},
- {"mstrongarm110", NULL, &legacy_cpu, ARM_ARCH_V4,
+ {"marm1", &legacy_cpu, ARM_ARCH_V1, N_("use -mcpu=arm1")},
+ {"m1", &legacy_cpu, ARM_ARCH_V1, N_("use -mcpu=arm1")},
+ {"marm2", &legacy_cpu, ARM_ARCH_V2, N_("use -mcpu=arm2")},
+ {"m2", &legacy_cpu, ARM_ARCH_V2, N_("use -mcpu=arm2")},
+ {"marm250", &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm250")},
+ {"m250", &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm250")},
+ {"marm3", &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm3")},
+ {"m3", &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm3")},
+ {"marm6", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm6")},
+ {"m6", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm6")},
+ {"marm600", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm600")},
+ {"m600", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm600")},
+ {"marm610", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm610")},
+ {"m610", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm610")},
+ {"marm620", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm620")},
+ {"m620", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm620")},
+ {"marm7", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7")},
+ {"m7", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7")},
+ {"marm70", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm70")},
+ {"m70", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm70")},
+ {"marm700", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700")},
+ {"m700", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700")},
+ {"marm700i", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700i")},
+ {"m700i", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700i")},
+ {"marm710", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710")},
+ {"m710", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710")},
+ {"marm710c", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710c")},
+ {"m710c", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710c")},
+ {"marm720", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm720")},
+ {"m720", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm720")},
+ {"marm7d", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7d")},
+ {"m7d", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7d")},
+ {"marm7di", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7di")},
+ {"m7di", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7di")},
+ {"marm7m", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7m")},
+ {"m7m", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7m")},
+ {"marm7dm", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dm")},
+ {"m7dm", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dm")},
+ {"marm7dmi", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dmi")},
+ {"m7dmi", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dmi")},
+ {"marm7100", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7100")},
+ {"m7100", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7100")},
+ {"marm7500", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500")},
+ {"m7500", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500")},
+ {"marm7500fe", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500fe")},
+ {"m7500fe", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500fe")},
+ {"marm7t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
+ {"m7t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
+ {"marm7tdmi", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
+ {"m7tdmi", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
+ {"marm710t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm710t")},
+ {"m710t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm710t")},
+ {"marm720t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm720t")},
+ {"m720t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm720t")},
+ {"marm740t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm740t")},
+ {"m740t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm740t")},
+ {"marm8", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm8")},
+ {"m8", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm8")},
+ {"marm810", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm810")},
+ {"m810", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm810")},
+ {"marm9", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9")},
+ {"m9", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9")},
+ {"marm9tdmi", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9tdmi")},
+ {"m9tdmi", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9tdmi")},
+ {"marm920", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm920")},
+ {"m920", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm920")},
+ {"marm940", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm940")},
+ {"m940", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm940")},
+ {"mstrongarm", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=strongarm")},
+ {"mstrongarm110", &legacy_cpu, ARM_ARCH_V4,
N_("use -mcpu=strongarm110")},
- {"mstrongarm1100", NULL, &legacy_cpu, ARM_ARCH_V4,
+ {"mstrongarm1100", &legacy_cpu, ARM_ARCH_V4,
N_("use -mcpu=strongarm1100")},
- {"mstrongarm1110", NULL, &legacy_cpu, ARM_ARCH_V4,
+ {"mstrongarm1110", &legacy_cpu, ARM_ARCH_V4,
N_("use -mcpu=strongarm1110")},
- {"mxscale", NULL, &legacy_cpu, ARM_ARCH_XSCALE, N_("use -mcpu=xscale")},
- {"miwmmxt", NULL, &legacy_cpu, ARM_ARCH_IWMMXT, N_("use -mcpu=iwmmxt")},
- {"mall", NULL, &legacy_cpu, ARM_ANY, N_("use -mcpu=all")},
+ {"mxscale", &legacy_cpu, ARM_ARCH_XSCALE, N_("use -mcpu=xscale")},
+ {"miwmmxt", &legacy_cpu, ARM_ARCH_IWMMXT, N_("use -mcpu=iwmmxt")},
+ {"mall", &legacy_cpu, ARM_ANY, N_("use -mcpu=all")},
/* Architecture variants -- don't add any more to this list either. */
- {"mv2", NULL, &legacy_cpu, ARM_ARCH_V2, N_("use -march=armv2")},
- {"marmv2", NULL, &legacy_cpu, ARM_ARCH_V2, N_("use -march=armv2")},
- {"mv2a", NULL, &legacy_cpu, ARM_ARCH_V2S, N_("use -march=armv2a")},
- {"marmv2a", NULL, &legacy_cpu, ARM_ARCH_V2S, N_("use -march=armv2a")},
- {"mv3", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -march=armv3")},
- {"marmv3", NULL, &legacy_cpu, ARM_ARCH_V3, N_("use -march=armv3")},
- {"mv3m", NULL, &legacy_cpu, ARM_ARCH_V3M, N_("use -march=armv3m")},
- {"marmv3m", NULL, &legacy_cpu, ARM_ARCH_V3M, N_("use -march=armv3m")},
- {"mv4", NULL, &legacy_cpu, ARM_ARCH_V4, N_("use -march=armv4")},
- {"marmv4", NULL, &legacy_cpu, ARM_ARCH_V4, N_("use -march=armv4")},
- {"mv4t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -march=armv4t")},
- {"marmv4t", NULL, &legacy_cpu, ARM_ARCH_V4T, N_("use -march=armv4t")},
- {"mv5", NULL, &legacy_cpu, ARM_ARCH_V5, N_("use -march=armv5")},
- {"marmv5", NULL, &legacy_cpu, ARM_ARCH_V5, N_("use -march=armv5")},
- {"mv5t", NULL, &legacy_cpu, ARM_ARCH_V5T, N_("use -march=armv5t")},
- {"marmv5t", NULL, &legacy_cpu, ARM_ARCH_V5T, N_("use -march=armv5t")},
- {"mv5e", NULL, &legacy_cpu, ARM_ARCH_V5TE, N_("use -march=armv5te")},
- {"marmv5e", NULL, &legacy_cpu, ARM_ARCH_V5TE, N_("use -march=armv5te")},
+ {"mv2", &legacy_cpu, ARM_ARCH_V2, N_("use -march=armv2")},
+ {"marmv2", &legacy_cpu, ARM_ARCH_V2, N_("use -march=armv2")},
+ {"mv2a", &legacy_cpu, ARM_ARCH_V2S, N_("use -march=armv2a")},
+ {"marmv2a", &legacy_cpu, ARM_ARCH_V2S, N_("use -march=armv2a")},
+ {"mv3", &legacy_cpu, ARM_ARCH_V3, N_("use -march=armv3")},
+ {"marmv3", &legacy_cpu, ARM_ARCH_V3, N_("use -march=armv3")},
+ {"mv3m", &legacy_cpu, ARM_ARCH_V3M, N_("use -march=armv3m")},
+ {"marmv3m", &legacy_cpu, ARM_ARCH_V3M, N_("use -march=armv3m")},
+ {"mv4", &legacy_cpu, ARM_ARCH_V4, N_("use -march=armv4")},
+ {"marmv4", &legacy_cpu, ARM_ARCH_V4, N_("use -march=armv4")},
+ {"mv4t", &legacy_cpu, ARM_ARCH_V4T, N_("use -march=armv4t")},
+ {"marmv4t", &legacy_cpu, ARM_ARCH_V4T, N_("use -march=armv4t")},
+ {"mv5", &legacy_cpu, ARM_ARCH_V5, N_("use -march=armv5")},
+ {"marmv5", &legacy_cpu, ARM_ARCH_V5, N_("use -march=armv5")},
+ {"mv5t", &legacy_cpu, ARM_ARCH_V5T, N_("use -march=armv5t")},
+ {"marmv5t", &legacy_cpu, ARM_ARCH_V5T, N_("use -march=armv5t")},
+ {"mv5e", &legacy_cpu, ARM_ARCH_V5TE, N_("use -march=armv5te")},
+ {"marmv5e", &legacy_cpu, ARM_ARCH_V5TE, N_("use -march=armv5te")},
/* Floating point variants -- don't add any more to this list either. */
- {"mfpe-old", NULL, &legacy_fpu, FPU_ARCH_FPE, N_("use -mfpu=fpe")},
- {"mfpa10", NULL, &legacy_fpu, FPU_ARCH_FPA, N_("use -mfpu=fpa10")},
- {"mfpa11", NULL, &legacy_fpu, FPU_ARCH_FPA, N_("use -mfpu=fpa11")},
- {"mno-fpu", NULL, &legacy_fpu, 0,
+ {"mfpe-old", &legacy_fpu, FPU_ARCH_FPE, N_("use -mfpu=fpe")},
+ {"mfpa10", &legacy_fpu, FPU_ARCH_FPA, N_("use -mfpu=fpa10")},
+ {"mfpa11", &legacy_fpu, FPU_ARCH_FPA, N_("use -mfpu=fpa11")},
+ {"mno-fpu", &legacy_fpu, ARM_ARCH_NONE,
N_("use either -mfpu=softfpa or -mfpu=softvfp")},
- {NULL, NULL, NULL, 0, NULL}
+ {NULL, NULL, ARM_ARCH_NONE, NULL}
};
struct arm_cpu_option_table
{
char *name;
- int value;
+ const arm_feature_set value;
/* For some CPUs we assume an FPU unless the user explicitly sets
-mfpu=... */
- int default_fpu;
+ const arm_feature_set default_fpu;
+ /* The canonical name of the CPU, or NULL to use NAME converted to upper
+ case. */
+ const char *canonical_name;
};
/* This list should, at a minimum, contain all the cpu names
recognized by GCC. */
-static struct arm_cpu_option_table arm_cpus[] =
-{
- {"all", ARM_ANY, FPU_ARCH_FPA},
- {"arm1", ARM_ARCH_V1, FPU_ARCH_FPA},
- {"arm2", ARM_ARCH_V2, FPU_ARCH_FPA},
- {"arm250", ARM_ARCH_V2S, FPU_ARCH_FPA},
- {"arm3", ARM_ARCH_V2S, FPU_ARCH_FPA},
- {"arm6", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm60", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm600", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm610", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm620", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm7", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm7m", ARM_ARCH_V3M, FPU_ARCH_FPA},
- {"arm7d", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm7dm", ARM_ARCH_V3M, FPU_ARCH_FPA},
- {"arm7di", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm7dmi", ARM_ARCH_V3M, FPU_ARCH_FPA},
- {"arm70", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm700", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm700i", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm710", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm710t", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm720", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm720t", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm740t", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm710c", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm7100", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm7500", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm7500fe", ARM_ARCH_V3, FPU_ARCH_FPA},
- {"arm7t", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm7tdmi", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm7tdmi-s", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm8", ARM_ARCH_V4, FPU_ARCH_FPA},
- {"arm810", ARM_ARCH_V4, FPU_ARCH_FPA},
- {"strongarm", ARM_ARCH_V4, FPU_ARCH_FPA},
- {"strongarm1", ARM_ARCH_V4, FPU_ARCH_FPA},
- {"strongarm110", ARM_ARCH_V4, FPU_ARCH_FPA},
- {"strongarm1100", ARM_ARCH_V4, FPU_ARCH_FPA},
- {"strongarm1110", ARM_ARCH_V4, FPU_ARCH_FPA},
- {"arm9", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm920", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm920t", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm922t", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm940t", ARM_ARCH_V4T, FPU_ARCH_FPA},
- {"arm9tdmi", ARM_ARCH_V4T, FPU_ARCH_FPA},
+static const struct arm_cpu_option_table arm_cpus[] =
+{
+ {"all", ARM_ANY, FPU_ARCH_FPA, NULL},
+ {"arm1", ARM_ARCH_V1, FPU_ARCH_FPA, NULL},
+ {"arm2", ARM_ARCH_V2, FPU_ARCH_FPA, NULL},
+ {"arm250", ARM_ARCH_V2S, FPU_ARCH_FPA, NULL},
+ {"arm3", ARM_ARCH_V2S, FPU_ARCH_FPA, NULL},
+ {"arm6", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm60", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm600", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm610", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm620", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm7", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm7m", ARM_ARCH_V3M, FPU_ARCH_FPA, NULL},
+ {"arm7d", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm7dm", ARM_ARCH_V3M, FPU_ARCH_FPA, NULL},
+ {"arm7di", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm7dmi", ARM_ARCH_V3M, FPU_ARCH_FPA, NULL},
+ {"arm70", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm700", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm700i", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm710", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm710t", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm720", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm720t", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm740t", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm710c", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm7100", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm7500", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm7500fe", ARM_ARCH_V3, FPU_ARCH_FPA, NULL},
+ {"arm7t", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm7tdmi", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm7tdmi-s", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm8", ARM_ARCH_V4, FPU_ARCH_FPA, NULL},
+ {"arm810", ARM_ARCH_V4, FPU_ARCH_FPA, NULL},
+ {"strongarm", ARM_ARCH_V4, FPU_ARCH_FPA, NULL},
+ {"strongarm1", ARM_ARCH_V4, FPU_ARCH_FPA, NULL},
+ {"strongarm110", ARM_ARCH_V4, FPU_ARCH_FPA, NULL},
+ {"strongarm1100", ARM_ARCH_V4, FPU_ARCH_FPA, NULL},
+ {"strongarm1110", ARM_ARCH_V4, FPU_ARCH_FPA, NULL},
+ {"arm9", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm920", ARM_ARCH_V4T, FPU_ARCH_FPA, "ARM920T"},
+ {"arm920t", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm922t", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm940t", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
+ {"arm9tdmi", ARM_ARCH_V4T, FPU_ARCH_FPA, NULL},
/* For V5 or later processors we default to using VFP; but the user
should really set the FPU type explicitly. */
- {"arm9e-r0", ARM_ARCH_V5TExP, FPU_ARCH_VFP_V2},
- {"arm9e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2},
- {"arm926ej", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2},
- {"arm926ejs", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2},
- {"arm926ej-s", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2},
- {"arm946e-r0", ARM_ARCH_V5TExP, FPU_ARCH_VFP_V2},
- {"arm946e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2},
- {"arm966e-r0", ARM_ARCH_V5TExP, FPU_ARCH_VFP_V2},
- {"arm966e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2},
- {"arm10t", ARM_ARCH_V5T, FPU_ARCH_VFP_V1},
- {"arm10e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2},
- {"arm1020", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2},
- {"arm1020t", ARM_ARCH_V5T, FPU_ARCH_VFP_V1},
- {"arm1020e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2},
- {"arm1026ejs", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2},
- {"arm1026ej-s", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2},
- {"arm1136js", ARM_ARCH_V6, FPU_NONE},
- {"arm1136j-s", ARM_ARCH_V6, FPU_NONE},
- {"arm1136jfs", ARM_ARCH_V6, FPU_ARCH_VFP_V2},
- {"arm1136jf-s", ARM_ARCH_V6, FPU_ARCH_VFP_V2},
- {"mpcore", ARM_ARCH_V6K, FPU_ARCH_VFP_V2},
- {"mpcorenovfp", ARM_ARCH_V6K, FPU_NONE},
- {"arm1176jz-s", ARM_ARCH_V6ZK, FPU_NONE},
- {"arm1176jzf-s", ARM_ARCH_V6ZK, FPU_ARCH_VFP_V2},
+ {"arm9e-r0", ARM_ARCH_V5TExP, FPU_ARCH_VFP_V2, NULL},
+ {"arm9e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, NULL},
+ {"arm926ej", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2, "ARM926EJ-S"},
+ {"arm926ejs", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2, "ARM926EJ-S"},
+ {"arm926ej-s", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2, NULL},
+ {"arm946e-r0", ARM_ARCH_V5TExP, FPU_ARCH_VFP_V2, NULL},
+ {"arm946e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, "ARM946E-S"},
+ {"arm946e-s", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, NULL},
+ {"arm966e-r0", ARM_ARCH_V5TExP, FPU_ARCH_VFP_V2, NULL},
+ {"arm966e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, "ARM966E-S"},
+ {"arm966e-s", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, NULL},
+ {"arm968e-s", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, NULL},
+ {"arm10t", ARM_ARCH_V5T, FPU_ARCH_VFP_V1, NULL},
+ {"arm10tdmi", ARM_ARCH_V5T, FPU_ARCH_VFP_V1, NULL},
+ {"arm10e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, NULL},
+ {"arm1020", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, "ARM1020E"},
+ {"arm1020t", ARM_ARCH_V5T, FPU_ARCH_VFP_V1, NULL},
+ {"arm1020e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, NULL},
+ {"arm1022e", ARM_ARCH_V5TE, FPU_ARCH_VFP_V2, NULL},
+ {"arm1026ejs", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2, "ARM1026EJ-S"},
+ {"arm1026ej-s", ARM_ARCH_V5TEJ, FPU_ARCH_VFP_V2, NULL},
+ {"arm1136js", ARM_ARCH_V6, FPU_NONE, "ARM1136J-S"},
+ {"arm1136j-s", ARM_ARCH_V6, FPU_NONE, NULL},
+ {"arm1136jfs", ARM_ARCH_V6, FPU_ARCH_VFP_V2, "ARM1136JF-S"},
+ {"arm1136jf-s", ARM_ARCH_V6, FPU_ARCH_VFP_V2, NULL},
+ {"mpcore", ARM_ARCH_V6K, FPU_ARCH_VFP_V2, NULL},
+ {"mpcorenovfp", ARM_ARCH_V6K, FPU_NONE, NULL},
+ {"arm1156t2-s", ARM_ARCH_V6T2, FPU_NONE, NULL},
+ {"arm1156t2f-s", ARM_ARCH_V6T2, FPU_ARCH_VFP_V2, NULL},
+ {"arm1176jz-s", ARM_ARCH_V6ZK, FPU_NONE, NULL},
+ {"arm1176jzf-s", ARM_ARCH_V6ZK, FPU_ARCH_VFP_V2, NULL},
+ {"cortex-a8", ARM_ARCH_V7A, FPU_ARCH_VFP_V2, NULL},
+ {"cortex-r4", ARM_ARCH_V7R, FPU_NONE, NULL},
+ {"cortex-m3", ARM_ARCH_V7M, FPU_NONE, NULL},
/* ??? XSCALE is really an architecture. */
- {"xscale", ARM_ARCH_XSCALE, FPU_ARCH_VFP_V2},
+ {"xscale", ARM_ARCH_XSCALE, FPU_ARCH_VFP_V2, NULL},
/* ??? iwmmxt is not a processor. */
- {"iwmmxt", ARM_ARCH_IWMMXT, FPU_ARCH_VFP_V2},
- {"i80200", ARM_ARCH_XSCALE, FPU_ARCH_VFP_V2},
+ {"iwmmxt", ARM_ARCH_IWMMXT, FPU_ARCH_VFP_V2, NULL},
+ {"i80200", ARM_ARCH_XSCALE, FPU_ARCH_VFP_V2, NULL},
/* Maverick */
- {"ep9312", ARM_ARCH_V4T | ARM_CEXT_MAVERICK, FPU_ARCH_MAVERICK},
- {NULL, 0, 0}
+ {"ep9312", ARM_FEATURE(ARM_AEXT_V4T, ARM_CEXT_MAVERICK), FPU_ARCH_MAVERICK, "ARM920T"},
+ {NULL, ARM_ARCH_NONE, ARM_ARCH_NONE, NULL}
};
struct arm_arch_option_table
{
char *name;
- int value;
- int default_fpu;
+ const arm_feature_set value;
+ const arm_feature_set default_fpu;
};
/* This list should, at a minimum, contain all the architecture names
recognized by GCC. */
-static struct arm_arch_option_table arm_archs[] =
+static const struct arm_arch_option_table arm_archs[] =
{
{"all", ARM_ANY, FPU_ARCH_FPA},
{"armv1", ARM_ARCH_V1, FPU_ARCH_FPA},
{"armv6kt2", ARM_ARCH_V6KT2, FPU_ARCH_VFP},
{"armv6zt2", ARM_ARCH_V6ZT2, FPU_ARCH_VFP},
{"armv6zkt2", ARM_ARCH_V6ZKT2, FPU_ARCH_VFP},
+ {"armv7", ARM_ARCH_V7, FPU_ARCH_VFP},
+ {"armv7a", ARM_ARCH_V7A, FPU_ARCH_VFP},
+ {"armv7r", ARM_ARCH_V7R, FPU_ARCH_VFP},
+ {"armv7m", ARM_ARCH_V7M, FPU_ARCH_VFP},
{"xscale", ARM_ARCH_XSCALE, FPU_ARCH_VFP},
{"iwmmxt", ARM_ARCH_IWMMXT, FPU_ARCH_VFP},
- {NULL, 0, 0}
+ {NULL, ARM_ARCH_NONE, ARM_ARCH_NONE}
};
/* ISA extensions in the co-processor space. */
-struct arm_option_value_table
+struct arm_option_cpu_value_table
{
char *name;
- int value;
+ const arm_feature_set value;
};
-static struct arm_option_value_table arm_extensions[] =
+static const struct arm_option_cpu_value_table arm_extensions[] =
{
- {"maverick", ARM_CEXT_MAVERICK},
- {"xscale", ARM_CEXT_XSCALE},
- {"iwmmxt", ARM_CEXT_IWMMXT},
- {NULL, 0}
+ {"maverick", ARM_FEATURE (0, ARM_CEXT_MAVERICK)},
+ {"xscale", ARM_FEATURE (0, ARM_CEXT_XSCALE)},
+ {"iwmmxt", ARM_FEATURE (0, ARM_CEXT_IWMMXT)},
+ {NULL, ARM_ARCH_NONE}
};
/* This list should, at a minimum, contain all the fpu names
recognized by GCC. */
-static struct arm_option_value_table arm_fpus[] =
+static const struct arm_option_cpu_value_table arm_fpus[] =
{
{"softfpa", FPU_NONE},
{"fpe", FPU_ARCH_FPE},
{"arm1136jfs", FPU_ARCH_VFP_V2},
{"arm1136jf-s", FPU_ARCH_VFP_V2},
{"maverick", FPU_ARCH_MAVERICK},
- {NULL, 0}
+ {NULL, ARM_ARCH_NONE}
+};
+
+struct arm_option_value_table
+{
+ char *name;
+ long value;
};
-static struct arm_option_value_table arm_float_abis[] =
+static const struct arm_option_value_table arm_float_abis[] =
{
{"hard", ARM_FLOAT_ABI_HARD},
{"softfp", ARM_FLOAT_ABI_SOFTFP},
{"soft", ARM_FLOAT_ABI_SOFT},
- {NULL, 0}
+ {NULL, 0}
};
#ifdef OBJ_ELF
-/* We only know how to output GNU and ver 4 (AAELF) formats. */
-static struct arm_option_value_table arm_eabis[] =
+/* We only know how to output GNU and ver 4/5 (AAELF) formats. */
+static const struct arm_option_value_table arm_eabis[] =
{
{"gnu", EF_ARM_EABI_UNKNOWN},
{"4", EF_ARM_EABI_VER4},
- {NULL, 0}
+ {"5", EF_ARM_EABI_VER5},
+ {NULL, 0}
};
#endif
};
static int
-arm_parse_extension (char * str, int * opt_p)
+arm_parse_extension (char * str, const arm_feature_set **opt_p)
{
+ arm_feature_set *ext_set = xmalloc (sizeof (arm_feature_set));
+
+ /* Copy the feature set, so that we can modify it. */
+ *ext_set = **opt_p;
+ *opt_p = ext_set;
+
while (str != NULL && *str != 0)
{
- struct arm_option_value_table * opt;
+ const struct arm_option_cpu_value_table * opt;
char * ext;
int optlen;
for (opt = arm_extensions; opt->name != NULL; opt++)
if (strncmp (opt->name, str, optlen) == 0)
{
- *opt_p |= opt->value;
+ ARM_MERGE_FEATURE_SETS (*ext_set, *ext_set, opt->value);
break;
}
static int
arm_parse_cpu (char * str)
{
- struct arm_cpu_option_table * opt;
+ const struct arm_cpu_option_table * opt;
char * ext = strchr (str, '+');
int optlen;
for (opt = arm_cpus; opt->name != NULL; opt++)
if (strncmp (opt->name, str, optlen) == 0)
{
- mcpu_cpu_opt = opt->value;
- mcpu_fpu_opt = opt->default_fpu;
+ mcpu_cpu_opt = &opt->value;
+ mcpu_fpu_opt = &opt->default_fpu;
+ if (opt->canonical_name)
+ strcpy(selected_cpu_name, opt->canonical_name);
+ else
+ {
+ int i;
+ for (i = 0; i < optlen; i++)
+ selected_cpu_name[i] = TOUPPER (opt->name[i]);
+ selected_cpu_name[i] = 0;
+ }
if (ext != NULL)
return arm_parse_extension (ext, &mcpu_cpu_opt);
static int
arm_parse_arch (char * str)
{
- struct arm_arch_option_table *opt;
+ const struct arm_arch_option_table *opt;
char *ext = strchr (str, '+');
int optlen;
return 0;
}
-
for (opt = arm_archs; opt->name != NULL; opt++)
if (streq (opt->name, str))
{
- march_cpu_opt = opt->value;
- march_fpu_opt = opt->default_fpu;
+ march_cpu_opt = &opt->value;
+ march_fpu_opt = &opt->default_fpu;
+ strcpy(selected_cpu_name, opt->name);
if (ext != NULL)
return arm_parse_extension (ext, &march_cpu_opt);
static int
arm_parse_fpu (char * str)
{
- struct arm_option_value_table * opt;
+ const struct arm_option_cpu_value_table * opt;
for (opt = arm_fpus; opt->name != NULL; opt++)
if (streq (opt->name, str))
{
- mfpu_opt = opt->value;
+ mfpu_opt = &opt->value;
return 1;
}
static int
arm_parse_float_abi (char * str)
{
- struct arm_option_value_table * opt;
+ const struct arm_option_value_table * opt;
for (opt = arm_float_abis; opt->name != NULL; opt++)
if (streq (opt->name, str))
static int
arm_parse_eabi (char * str)
{
- struct arm_option_value_table *opt;
+ const struct arm_option_value_table *opt;
for (opt = arm_eabis; opt->name != NULL; opt++)
if (streq (opt->name, str))
md_parse_option (int c, char * arg)
{
struct arm_option_table *opt;
+ const struct arm_legacy_option_table *fopt;
struct arm_long_option_table *lopt;
switch (c)
}
}
+ for (fopt = arm_legacy_opts; fopt->option != NULL; fopt++)
+ {
+ if (c == fopt->option[0]
+ && ((arg == NULL && fopt->option[1] == 0)
+ || streq (arg, fopt->option + 1)))
+ {
+#if WARN_DEPRECATED
+ /* If the option is deprecated, tell the user. */
+ if (fopt->deprecated != NULL)
+ as_tsktsk (_("option `-%c%s' is deprecated: %s"), c,
+ arg ? arg : "", _(fopt->deprecated));
+#endif
+
+ if (fopt->var != NULL)
+ *fopt->var = &fopt->value;
+
+ return 1;
+ }
+ }
+
for (lopt = arm_long_opts; lopt->option != NULL; lopt++)
{
/* These options are expected to have an argument. */
-EL assemble code for a little-endian cpu\n"));
#endif
}
+
+
+#ifdef OBJ_ELF
+typedef struct
+{
+ int val;
+ arm_feature_set flags;
+} cpu_arch_ver_table;
+
+/* Mapping from CPU features to EABI CPU arch values. Table must be sorted
+ least features first. */
+static const cpu_arch_ver_table cpu_arch_ver[] =
+{
+ {1, ARM_ARCH_V4},
+ {2, ARM_ARCH_V4T},
+ {3, ARM_ARCH_V5},
+ {4, ARM_ARCH_V5TE},
+ {5, ARM_ARCH_V5TEJ},
+ {6, ARM_ARCH_V6},
+ {7, ARM_ARCH_V6Z},
+ {8, ARM_ARCH_V6K},
+ {9, ARM_ARCH_V6T2},
+ {10, ARM_ARCH_V7A},
+ {10, ARM_ARCH_V7R},
+ {10, ARM_ARCH_V7M},
+ {0, ARM_ARCH_NONE}
+};
+
+/* Set the public EABI object attributes. */
+static void
+aeabi_set_public_attributes (void)
+{
+ int arch;
+ arm_feature_set flags;
+ arm_feature_set tmp;
+ const cpu_arch_ver_table *p;
+
+ /* Choose the architecture based on the capabilities of the requested cpu
+ (if any) and/or the instructions actually used. */
+ ARM_MERGE_FEATURE_SETS (flags, arm_arch_used, thumb_arch_used);
+ ARM_MERGE_FEATURE_SETS (flags, flags, *mfpu_opt);
+ ARM_MERGE_FEATURE_SETS (flags, flags, selected_cpu);
+
+ tmp = flags;
+ arch = 0;
+ for (p = cpu_arch_ver; p->val; p++)
+ {
+ if (ARM_CPU_HAS_FEATURE (tmp, p->flags))
+ {
+ arch = p->val;
+ ARM_CLEAR_FEATURE (tmp, tmp, p->flags);
+ }
+ }
+
+ /* Tag_CPU_name. */
+ if (selected_cpu_name[0])
+ {
+ char *p;
+
+ p = selected_cpu_name;
+ if (strncmp(p, "armv", 4) == 0)
+ {
+ int i;
+
+ p += 4;
+ for (i = 0; p[i]; i++)
+ p[i] = TOUPPER (p[i]);
+ }
+ elf32_arm_add_eabi_attr_string (stdoutput, 5, p);
+ }
+ /* Tag_CPU_arch. */
+ elf32_arm_add_eabi_attr_int (stdoutput, 6, arch);
+ /* Tag_CPU_arch_profile. */
+ if (ARM_CPU_HAS_FEATURE (flags, arm_ext_v7a))
+ elf32_arm_add_eabi_attr_int (stdoutput, 7, 'A');
+ else if (ARM_CPU_HAS_FEATURE (flags, arm_ext_v7r))
+ elf32_arm_add_eabi_attr_int (stdoutput, 7, 'R');
+ else if (ARM_CPU_HAS_FEATURE (flags, arm_ext_v7m))
+ elf32_arm_add_eabi_attr_int (stdoutput, 7, 'M');
+ /* Tag_ARM_ISA_use. */
+ if (ARM_CPU_HAS_FEATURE (arm_arch_used, arm_arch_full))
+ elf32_arm_add_eabi_attr_int (stdoutput, 8, 1);
+ /* Tag_THUMB_ISA_use. */
+ if (ARM_CPU_HAS_FEATURE (thumb_arch_used, arm_arch_full))
+ elf32_arm_add_eabi_attr_int (stdoutput, 9,
+ ARM_CPU_HAS_FEATURE (thumb_arch_used, arm_arch_t2) ? 2 : 1);
+ /* Tag_VFP_arch. */
+ if (ARM_CPU_HAS_FEATURE (thumb_arch_used, fpu_arch_vfp_v2)
+ || ARM_CPU_HAS_FEATURE (arm_arch_used, fpu_arch_vfp_v2))
+ elf32_arm_add_eabi_attr_int (stdoutput, 10, 2);
+ else if (ARM_CPU_HAS_FEATURE (thumb_arch_used, fpu_arch_vfp_v1)
+ || ARM_CPU_HAS_FEATURE (arm_arch_used, fpu_arch_vfp_v1))
+ elf32_arm_add_eabi_attr_int (stdoutput, 10, 1);
+ /* Tag_WMMX_arch. */
+ if (ARM_CPU_HAS_FEATURE (thumb_arch_used, arm_cext_iwmmxt)
+ || ARM_CPU_HAS_FEATURE (arm_arch_used, arm_cext_iwmmxt))
+ elf32_arm_add_eabi_attr_int (stdoutput, 11, 1);
+}
+
+/* Add the .ARM.attributes section. */
+void
+arm_md_end (void)
+{
+ segT s;
+ char *p;
+ addressT addr;
+ offsetT size;
+
+ if (EF_ARM_EABI_VERSION (meabi_flags) < EF_ARM_EABI_VER4)
+ return;
+
+ aeabi_set_public_attributes ();
+ size = elf32_arm_eabi_attr_size (stdoutput);
+ s = subseg_new (".ARM.attributes", 0);
+ bfd_set_section_flags (stdoutput, s, SEC_READONLY | SEC_DATA);
+ addr = frag_now_fix ();
+ p = frag_more (size);
+ elf32_arm_set_eabi_attr_contents (stdoutput, (bfd_byte *)p, size);
+}
+#endif /* OBJ_ELF */
+
+
+/* Parse a .cpu directive. */
+
+static void
+s_arm_cpu (int ignored ATTRIBUTE_UNUSED)
+{
+ const struct arm_cpu_option_table *opt;
+ char *name;
+ char saved_char;
+
+ name = input_line_pointer;
+ while (*input_line_pointer && !ISSPACE(*input_line_pointer))
+ input_line_pointer++;
+ saved_char = *input_line_pointer;
+ *input_line_pointer = 0;
+
+ /* Skip the first "all" entry. */
+ for (opt = arm_cpus + 1; opt->name != NULL; opt++)
+ if (streq (opt->name, name))
+ {
+ mcpu_cpu_opt = &opt->value;
+ selected_cpu = opt->value;
+ if (opt->canonical_name)
+ strcpy(selected_cpu_name, opt->canonical_name);
+ else
+ {
+ int i;
+ for (i = 0; opt->name[i]; i++)
+ selected_cpu_name[i] = TOUPPER (opt->name[i]);
+ selected_cpu_name[i] = 0;
+ }
+ ARM_MERGE_FEATURE_SETS (cpu_variant, *mcpu_cpu_opt, *mfpu_opt);
+ *input_line_pointer = saved_char;
+ demand_empty_rest_of_line ();
+ return;
+ }
+ as_bad (_("unknown cpu `%s'"), name);
+ *input_line_pointer = saved_char;
+ ignore_rest_of_line ();
+}
+
+
+/* Parse a .arch directive. */
+
+static void
+s_arm_arch (int ignored ATTRIBUTE_UNUSED)
+{
+ const struct arm_arch_option_table *opt;
+ char saved_char;
+ char *name;
+
+ name = input_line_pointer;
+ while (*input_line_pointer && !ISSPACE(*input_line_pointer))
+ input_line_pointer++;
+ saved_char = *input_line_pointer;
+ *input_line_pointer = 0;
+
+ /* Skip the first "all" entry. */
+ for (opt = arm_archs + 1; opt->name != NULL; opt++)
+ if (streq (opt->name, name))
+ {
+ mcpu_cpu_opt = &opt->value;
+ selected_cpu = opt->value;
+ strcpy(selected_cpu_name, opt->name);
+ ARM_MERGE_FEATURE_SETS (cpu_variant, *mcpu_cpu_opt, *mfpu_opt);
+ *input_line_pointer = saved_char;
+ demand_empty_rest_of_line ();
+ return;
+ }
+
+ as_bad (_("unknown architecture `%s'\n"), name);
+ *input_line_pointer = saved_char;
+ ignore_rest_of_line ();
+}
+
+
+/* Parse a .fpu directive. */
+
+static void
+s_arm_fpu (int ignored ATTRIBUTE_UNUSED)
+{
+ const struct arm_option_cpu_value_table *opt;
+ char saved_char;
+ char *name;
+
+ name = input_line_pointer;
+ while (*input_line_pointer && !ISSPACE(*input_line_pointer))
+ input_line_pointer++;
+ saved_char = *input_line_pointer;
+ *input_line_pointer = 0;
+
+ for (opt = arm_fpus; opt->name != NULL; opt++)
+ if (streq (opt->name, name))
+ {
+ mfpu_opt = &opt->value;
+ ARM_MERGE_FEATURE_SETS (cpu_variant, *mcpu_cpu_opt, *mfpu_opt);
+ *input_line_pointer = saved_char;
+ demand_empty_rest_of_line ();
+ return;
+ }
+
+ as_bad (_("unknown floating point format `%s'\n"), name);
+ *input_line_pointer = saved_char;
+ ignore_rest_of_line ();
+}
+