(neon_type_mask): Add N_UNT.
(neon_check_type): Don't always decay typed to untyped sizes.
(do_crypto_2op_1): New function.
(do_aese): Likewise.
(do_aesd): Likewise.
(do_aesmc.8): Likewise.
(do_aesimc.8): Likewise.
(insns): Add AES instructions.
* gas/testsuite/gas/arm/armv8-a+crypto.d: New testcase.
* gas/testsuite/gas/arm/armv8-a+crypto.s: Likewise.
* opcodes/arm-dis.c (neon_opcodes): Add support for AES instructions.
2012-08-24 Matthew Gretton-Dann <matthew.gretton-dann@arm.com>
+ * config/tc-arm.c (NEON_ENC_TAB): Add aes entry.
+ (neon_type_mask): Add N_UNT.
+ (neon_check_type): Don't always decay typed to untyped sizes.
+ (do_crypto_2op_1): New function.
+ (do_aese): Likewise.
+ (do_aesd): Likewise.
+ (do_aesmc.8): Likewise.
+ (do_aesimc.8): Likewise.
+ (insns): Add AES instructions.
+
+2012-08-24 Matthew Gretton-Dann <matthew.gretton-dann@arm.com>
+
* config/tc-arm.c (el_type_type_check): Add handling for 16-bit
floating point types.
(do_neon_cvttb_2): New function.
X(vminnm, 0xe800a40, 0x3200f10, N_INV), \
X(vcvta, 0xebc0a40, 0x3bb0000, N_INV), \
X(vrintr, 0xeb60a40, 0x3ba0400, N_INV), \
- X(vrinta, 0xeb80a40, 0x3ba0400, N_INV)
+ X(vrinta, 0xeb80a40, 0x3ba0400, N_INV), \
+ X(aes, 0x3b00300, N_INV, N_INV)
enum neon_opc
{
N_KEY = 0x1000000, /* Key element (main type specifier). */
N_EQK = 0x2000000, /* Given operand has the same type & size as the key. */
N_VFP = 0x4000000, /* VFP mode: operand size must match register width. */
+ N_UNT = 0x8000000, /* Must be explicitly untyped. */
N_DBL = 0x0000001, /* If N_EQK, this operand is twice the size. */
N_HLF = 0x0000002, /* If N_EQK, this operand is half the size. */
N_SGN = 0x0000004, /* If N_EQK, this operand is forced to be signed. */
/* If only untyped args are allowed, decay any more specific types to
them. Some instructions only care about signs for some element
sizes, so handle that properly. */
- if ((g_size == 8 && (types_allowed & N_8) != 0)
- || (g_size == 16 && (types_allowed & N_16) != 0)
- || (g_size == 32 && (types_allowed & N_32) != 0)
- || (g_size == 64 && (types_allowed & N_64) != 0))
+ if (((types_allowed & N_UNT) == 0)
+ && ((g_size == 8 && (types_allowed & N_8) != 0)
+ || (g_size == 16 && (types_allowed & N_16) != 0)
+ || (g_size == 32 && (types_allowed & N_32) != 0)
+ || (g_size == 64 && (types_allowed & N_64) != 0)))
g_type = NT_untyped;
if (pass == 0)
do_vrint_1 (neon_cvt_mode_m);
}
+/* Crypto v1 instructions. */
+static void
+do_crypto_2op_1 (unsigned elttype, int op)
+{
+ set_it_insn_type (OUTSIDE_IT_INSN);
+
+ if (neon_check_type (2, NS_QQ, N_EQK | N_UNT, elttype | N_UNT | N_KEY).type
+ == NT_invtype)
+ return;
+
+ inst.error = NULL;
+
+ NEON_ENCODE (INTEGER, inst);
+ inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
+ inst.instruction |= HI1 (inst.operands[0].reg) << 22;
+ inst.instruction |= LOW4 (inst.operands[1].reg);
+ inst.instruction |= HI1 (inst.operands[1].reg) << 5;
+ if (op != -1)
+ inst.instruction |= op << 6;
+
+ if (thumb_mode)
+ inst.instruction |= 0xfc000000;
+ else
+ inst.instruction |= 0xf0000000;
+}
+
+static void
+do_aese (void)
+{
+ do_crypto_2op_1 (N_8, 0);
+}
+
+static void
+do_aesd (void)
+{
+ do_crypto_2op_1 (N_8, 1);
+}
+
+static void
+do_aesmc (void)
+{
+ do_crypto_2op_1 (N_8, 2);
+}
+
+static void
+do_aesimc (void)
+{
+ do_crypto_2op_1 (N_8, 3);
+}
+
+
\f
/* Overall per-instruction processing. */
nUF(vrintp, _vrinta, 2, (RNSDQ, oRNSDQ), vrintp),
nUF(vrintm, _vrinta, 2, (RNSDQ, oRNSDQ), vrintm),
+ /* Crypto v1 extensions. */
+#undef ARM_VARIANT
+#define ARM_VARIANT & fpu_crypto_ext_armv8
+#undef THUMB_VARIANT
+#define THUMB_VARIANT & fpu_crypto_ext_armv8
+
+ nUF(aese, _aes, 2, (RNQ, RNQ), aese),
+ nUF(aesd, _aes, 2, (RNQ, RNQ), aesd),
+ nUF(aesmc, _aes, 2, (RNQ, RNQ), aesmc),
+ nUF(aesimc, _aes, 2, (RNQ, RNQ), aesimc),
+
+
#undef ARM_VARIANT
#define ARM_VARIANT & fpu_fpa_ext_v1 /* Core FPA instruction set (V1). */
#undef THUMB_VARIANT
2012-08-24 Matthew Gretton-Dann <matthew.gretton-dann@arm.com>
+ * gas/arm/armv8-a+crypto.d: New testcase.
+ * gas/arm/armv8-a+crypto.s: Likewise.
+
+2012-08-24 Matthew Gretton-Dann <matthew.gretton-dann@arm.com>
+
* gas/arm/armv8-a+fp.d: Update testcase.
* gas/arm/armv8-a+fp.s: Likewise.
* gas/arm/half-prec-vfpv3.s: Likewise.
--- /dev/null
+#name: Valid v8-a+cryptov1
+#objdump: -dr --prefix-addresses --show-raw-insn
+
+.*: +file format .*arm.*
+
+Disassembly of section .text:
+0[0-9a-f]+ <[^>]+> f3b00300 aese.8 q0, q0
+0[0-9a-f]+ <[^>]+> f3b0e30e aese.8 q7, q7
+0[0-9a-f]+ <[^>]+> f3f00320 aese.8 q8, q8
+0[0-9a-f]+ <[^>]+> f3f0e32e aese.8 q15, q15
+0[0-9a-f]+ <[^>]+> f3b00340 aesd.8 q0, q0
+0[0-9a-f]+ <[^>]+> f3b0e34e aesd.8 q7, q7
+0[0-9a-f]+ <[^>]+> f3f00360 aesd.8 q8, q8
+0[0-9a-f]+ <[^>]+> f3f0e36e aesd.8 q15, q15
+0[0-9a-f]+ <[^>]+> f3b00380 aesmc.8 q0, q0
+0[0-9a-f]+ <[^>]+> f3b0e38e aesmc.8 q7, q7
+0[0-9a-f]+ <[^>]+> f3f003a0 aesmc.8 q8, q8
+0[0-9a-f]+ <[^>]+> f3f0e3ae aesmc.8 q15, q15
+0[0-9a-f]+ <[^>]+> f3b003c0 aesimc.8 q0, q0
+0[0-9a-f]+ <[^>]+> f3b0e3ce aesimc.8 q7, q7
+0[0-9a-f]+ <[^>]+> f3f003e0 aesimc.8 q8, q8
+0[0-9a-f]+ <[^>]+> f3f0e3ee aesimc.8 q15, q15
+0[0-9a-f]+ <[^>]+> ffb0 0300 aese.8 q0, q0
+0[0-9a-f]+ <[^>]+> ffb0 e30e aese.8 q7, q7
+0[0-9a-f]+ <[^>]+> fff0 0320 aese.8 q8, q8
+0[0-9a-f]+ <[^>]+> fff0 e32e aese.8 q15, q15
+0[0-9a-f]+ <[^>]+> ffb0 0340 aesd.8 q0, q0
+0[0-9a-f]+ <[^>]+> ffb0 e34e aesd.8 q7, q7
+0[0-9a-f]+ <[^>]+> fff0 0360 aesd.8 q8, q8
+0[0-9a-f]+ <[^>]+> fff0 e36e aesd.8 q15, q15
+0[0-9a-f]+ <[^>]+> ffb0 0380 aesmc.8 q0, q0
+0[0-9a-f]+ <[^>]+> ffb0 e38e aesmc.8 q7, q7
+0[0-9a-f]+ <[^>]+> fff0 03a0 aesmc.8 q8, q8
+0[0-9a-f]+ <[^>]+> fff0 e3ae aesmc.8 q15, q15
+0[0-9a-f]+ <[^>]+> ffb0 03c0 aesimc.8 q0, q0
+0[0-9a-f]+ <[^>]+> ffb0 e3ce aesimc.8 q7, q7
+0[0-9a-f]+ <[^>]+> fff0 03e0 aesimc.8 q8, q8
+0[0-9a-f]+ <[^>]+> fff0 e3ee aesimc.8 q15, q15
--- /dev/null
+ .syntax unified
+ .arch armv8-a
+ .arch_extension crypto
+
+ .arm
+ aese.8 q0, q0
+ aese.8 q7, q7
+ aese.8 q8, q8
+ aese.8 q15, q15
+ aesd.8 q0, q0
+ aesd.8 q7, q7
+ aesd.8 q8, q8
+ aesd.8 q15, q15
+ aesmc.8 q0, q0
+ aesmc.8 q7, q7
+ aesmc.8 q8, q8
+ aesmc.8 q15, q15
+ aesimc.8 q0, q0
+ aesimc.8 q7, q7
+ aesimc.8 q8, q8
+ aesimc.8 q15, q15
+
+ .thumb
+ aese.8 q0, q0
+ aese.8 q7, q7
+ aese.8 q8, q8
+ aese.8 q15, q15
+ aesd.8 q0, q0
+ aesd.8 q7, q7
+ aesd.8 q8, q8
+ aesd.8 q15, q15
+ aesmc.8 q0, q0
+ aesmc.8 q7, q7
+ aesmc.8 q8, q8
+ aesmc.8 q15, q15
+ aesimc.8 q0, q0
+ aesimc.8 q7, q7
+ aesimc.8 q8, q8
+ aesimc.8 q15, q15
2012-08-24 Matthew Gretton-Dann <matthew.gretton-dann@arm.com>
+ * arm-dis.c (neon_opcodes): Add support for AES instructions.
+
+2012-08-24 Matthew Gretton-Dann <matthew.gretton-dann@arm.com>
+
* arm-dis.c (coprocessor_opcodes): Add support for HP/DP
conversions.
/* Two registers, miscellaneous. */
{FPU_NEON_EXT_ARMV8, 0xf3ba0400, 0xffbf0c10, "vrint%7-9?p?m?zaxn%u.f32.f32\t%12-15,22R, %0-3,5R"},
{FPU_NEON_EXT_ARMV8, 0xf3bb0000, 0xffbf0c10, "vcvt%8-9?mpna%u.%7?us32.f32\t%12-15,22R, %0-3,5R"},
+ {FPU_CRYPTO_EXT_ARMV8, 0xf3b00300, 0xffbf0fd0, "aese%u.8\t%12-15,22Q, %0-3,5Q"},
+ {FPU_CRYPTO_EXT_ARMV8, 0xf3b00340, 0xffbf0fd0, "aesd%u.8\t%12-15,22Q, %0-3,5Q"},
+ {FPU_CRYPTO_EXT_ARMV8, 0xf3b00380, 0xffbf0fd0, "aesmc%u.8\t%12-15,22Q, %0-3,5Q"},
+ {FPU_CRYPTO_EXT_ARMV8, 0xf3b003c0, 0xffbf0fd0, "aesimc%u.8\t%12-15,22Q, %0-3,5Q"},
{FPU_NEON_EXT_V1, 0xf2880a10, 0xfebf0fd0, "vmovl%c.%24?us8\t%12-15,22Q, %0-3,5D"},
{FPU_NEON_EXT_V1, 0xf2900a10, 0xfebf0fd0, "vmovl%c.%24?us16\t%12-15,22Q, %0-3,5D"},
{FPU_NEON_EXT_V1, 0xf2a00a10, 0xfebf0fd0, "vmovl%c.%24?us32\t%12-15,22Q, %0-3,5D"},