public static unsafe Vector128<nint> LoadScalarVector128(nint* address) { throw new PlatformNotSupportedException(); }
public static unsafe Vector128<nuint> LoadScalarVector128(nuint* address) { throw new PlatformNotSupportedException(); }
- // Floating-point sign bit operations
+ public static unsafe Vector128<sbyte> LoadScalarAndSplatVector128(sbyte* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<byte> LoadScalarAndSplatVector128(byte* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<short> LoadScalarAndSplatVector128(short* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<ushort> LoadScalarAndSplatVector128(ushort* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<int> LoadScalarAndSplatVector128(int* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<uint> LoadScalarAndSplatVector128(uint* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<long> LoadScalarAndSplatVector128(long* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<ulong> LoadScalarAndSplatVector128(ulong* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<float> LoadScalarAndSplatVector128(float* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<double> LoadScalarAndSplatVector128(double* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<nint> LoadScalarAndSplatVector128(nint* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<nuint> LoadScalarAndSplatVector128(nuint* address) { throw new PlatformNotSupportedException(); }
+
+ public static unsafe Vector128<sbyte> LoadScalarAndInsert(sbyte* address, Vector128<sbyte> vector, [ConstantExpected(Max = (byte)(15))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<byte> LoadScalarAndInsert(byte* address, Vector128<byte> vector, [ConstantExpected(Max = (byte)(15))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<short> LoadScalarAndInsert(short* address, Vector128<short> vector, [ConstantExpected(Max = (byte)(7))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<ushort> LoadScalarAndInsert(ushort* address, Vector128<ushort> vector, [ConstantExpected(Max = (byte)(7))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<int> LoadScalarAndInsert(int* address, Vector128<int> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<uint> LoadScalarAndInsert(uint* address, Vector128<uint> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<long> LoadScalarAndInsert(long* address, Vector128<long> vector, [ConstantExpected(Max = (byte)(1))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<ulong> LoadScalarAndInsert(ulong* address, Vector128<ulong> vector, [ConstantExpected(Max = (byte)(1))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<float> LoadScalarAndInsert(float* address, Vector128<float> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<double> LoadScalarAndInsert(double* address, Vector128<double> vector, [ConstantExpected(Max = (byte)(1))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<nint> LoadScalarAndInsert(nint* address, Vector128<nint> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<nuint> LoadScalarAndInsert(nuint* address, Vector128<nuint> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw new PlatformNotSupportedException(); }
+
+ public static unsafe Vector128<short> LoadWideningVector128(sbyte* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<ushort> LoadWideningVector128(byte* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<int> LoadWideningVector128(short* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<uint> LoadWideningVector128(ushort* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<long> LoadWideningVector128(int* address) { throw new PlatformNotSupportedException(); }
+ public static unsafe Vector128<ulong> LoadWideningVector128(uint* address) { throw new PlatformNotSupportedException(); }
+
+ // Floating-point sign bit operations
public static Vector128<float> Negate(Vector128<float> value) { throw new PlatformNotSupportedException(); }
public static Vector128<double> Negate(Vector128<double> value) { throw new PlatformNotSupportedException(); }
[Intrinsic]
public static unsafe Vector128<nuint> LoadScalarVector128(nuint* address) => LoadScalarVector128(address);
- // Floating-point sign bit operations
+ /// <summary>
+ /// v128.load8_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<sbyte> LoadScalarAndSplatVector128(sbyte* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load8_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<byte> LoadScalarAndSplatVector128(byte* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load16_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<short> LoadScalarAndSplatVector128(short* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load16_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<ushort> LoadScalarAndSplatVector128(ushort* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load32_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<int> LoadScalarAndSplatVector128(int* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load32_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<uint> LoadScalarAndSplatVector128(uint* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load64_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<long> LoadScalarAndSplatVector128(long* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load64_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<ulong> LoadScalarAndSplatVector128(ulong* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load64_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<float> LoadScalarAndSplatVector128(float* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load64_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<double> LoadScalarAndSplatVector128(double* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load64_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<nint> LoadScalarAndSplatVector128(nint* address) => LoadScalarAndSplatVector128(address);
+ /// <summary>
+ /// v128.load64_splat
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<nuint> LoadScalarAndSplatVector128(nuint* address) => LoadScalarAndSplatVector128(address);
+
+ /// <summary>
+ /// v128.load8_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<sbyte> LoadScalarAndInsert(sbyte* address, Vector128<sbyte> vector, [ConstantExpected(Max = (byte)(15))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx16
+ /// <summary>
+ /// v128.load8_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<byte> LoadScalarAndInsert(byte* address, Vector128<byte> vector, [ConstantExpected(Max = (byte)(15))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx16
+ /// <summary>
+ /// v128.load16_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<short> LoadScalarAndInsert(short* address, Vector128<short> vector, [ConstantExpected(Max = (byte)(7))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx8
+ /// <summary>
+ /// v128.load16_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<ushort> LoadScalarAndInsert(ushort* address, Vector128<ushort> vector, [ConstantExpected(Max = (byte)(7))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx8
+ /// <summary>
+ /// v128.load32_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<int> LoadScalarAndInsert(int* address, Vector128<int> vector, [ConstantExpected(Max = (byte)(3))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx4
+ /// <summary>
+ /// v128.load32_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<uint> LoadScalarAndInsert(uint* address, Vector128<uint> vector, [ConstantExpected(Max = (byte)(3))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx4
+ /// <summary>
+ /// v128.load64_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<long> LoadScalarAndInsert(long* address, Vector128<long> vector, [ConstantExpected(Max = (byte)(1))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx2
+ /// <summary>
+ /// v128.load64_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<ulong> LoadScalarAndInsert(ulong* address, Vector128<ulong> vector, [ConstantExpected(Max = (byte)(1))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx2
+ /// <summary>
+ /// v128.load32_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<float> LoadScalarAndInsert(float* address, Vector128<float> vector, [ConstantExpected(Max = (byte)(3))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx4
+ /// <summary>
+ /// v128.load64_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<double> LoadScalarAndInsert(double* address, Vector128<double> vector, [ConstantExpected(Max = (byte)(1))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx2
+ /// <summary>
+ /// v128.load32_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<nint> LoadScalarAndInsert(nint* address, Vector128<nint> vector, [ConstantExpected(Max = (byte)(3))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx4
+ /// <summary>
+ /// v128.load32_lane
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<nuint> LoadScalarAndInsert(nuint* address, Vector128<nuint> vector, [ConstantExpected(Max = (byte)(3))] byte index) => LoadScalarAndInsert(address, vector, index); // takes ImmLaneIdx4
+
+ /// <summary>
+ /// v128.load8x8_s
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<short> LoadWideningVector128(sbyte* address) => LoadWideningVector128(address);
+ /// <summary>
+ /// v128.load8x8_u
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<ushort> LoadWideningVector128(byte* address) => LoadWideningVector128(address);
+ /// <summary>
+ /// v128.load16x4_s
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<int> LoadWideningVector128(short* address) => LoadWideningVector128(address);
+ /// <summary>
+ /// v128.load16x4_u
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<uint> LoadWideningVector128(ushort* address) => LoadWideningVector128(address);
+ /// <summary>
+ /// v128.load32x2_s
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<long> LoadWideningVector128(int* address) => LoadWideningVector128(address);
+ /// <summary>
+ /// v128.load32x2_u
+ /// </summary>
+ [Intrinsic]
+ public static unsafe Vector128<ulong> LoadWideningVector128(uint* address) => LoadWideningVector128(address);
+
+ // Floating-point sign bit operations
/// <summary>
/// f32x4.neg
public static unsafe Vector128<double> LoadScalarVector128(double* address) { throw null; }
public static unsafe Vector128<nint> LoadScalarVector128(nint* address) { throw null; }
public static unsafe Vector128<nuint> LoadScalarVector128(nuint* address) { throw null; }
+ public static unsafe Vector128<sbyte> LoadScalarAndSplatVector128(sbyte* address) { throw null; }
+ public static unsafe Vector128<byte> LoadScalarAndSplatVector128(byte* address) { throw null; }
+ public static unsafe Vector128<short> LoadScalarAndSplatVector128(short* address) { throw null; }
+ public static unsafe Vector128<ushort> LoadScalarAndSplatVector128(ushort* address) { throw null; }
+ public static unsafe Vector128<int> LoadScalarAndSplatVector128(int* address) { throw null; }
+ public static unsafe Vector128<uint> LoadScalarAndSplatVector128(uint* address) { throw null; }
+ public static unsafe Vector128<long> LoadScalarAndSplatVector128(long* address) { throw null; }
+ public static unsafe Vector128<ulong> LoadScalarAndSplatVector128(ulong* address) { throw null; }
+ public static unsafe Vector128<float> LoadScalarAndSplatVector128(float* address) { throw null; }
+ public static unsafe Vector128<double> LoadScalarAndSplatVector128(double* address) { throw null; }
+ public static unsafe Vector128<nint> LoadScalarAndSplatVector128(nint* address) { throw null; }
+ public static unsafe Vector128<nuint> LoadScalarAndSplatVector128(nuint* address) { throw null; }
+ public static unsafe Vector128<sbyte> LoadScalarAndInsert(sbyte* address, Vector128<sbyte> vector, [ConstantExpected(Max = (byte)(15))] byte index) { throw null; }
+ public static unsafe Vector128<byte> LoadScalarAndInsert(byte* address, Vector128<byte> vector, [ConstantExpected(Max = (byte)(15))] byte index) { throw null; }
+ public static unsafe Vector128<short> LoadScalarAndInsert(short* address, Vector128<short> vector, [ConstantExpected(Max = (byte)(7))] byte index) { throw null; }
+ public static unsafe Vector128<ushort> LoadScalarAndInsert(ushort* address, Vector128<ushort> vector, [ConstantExpected(Max = (byte)(7))] byte index) { throw null; }
+ public static unsafe Vector128<int> LoadScalarAndInsert(int* address, Vector128<int> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw null; }
+ public static unsafe Vector128<uint> LoadScalarAndInsert(uint* address, Vector128<uint> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw null; }
+ public static unsafe Vector128<long> LoadScalarAndInsert(long* address, Vector128<long> vector, [ConstantExpected(Max = (byte)(1))] byte index) { throw null; }
+ public static unsafe Vector128<ulong> LoadScalarAndInsert(ulong* address, Vector128<ulong> vector, [ConstantExpected(Max = (byte)(1))] byte index) { throw null; }
+ public static unsafe Vector128<float> LoadScalarAndInsert(float* address, Vector128<float> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw null; }
+ public static unsafe Vector128<double> LoadScalarAndInsert(double* address, Vector128<double> vector, [ConstantExpected(Max = (byte)(1))] byte index) { throw null; }
+ public static unsafe Vector128<nint> LoadScalarAndInsert(nint* address, Vector128<nint> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw null; }
+ public static unsafe Vector128<nuint> LoadScalarAndInsert(nuint* address, Vector128<nuint> vector, [ConstantExpected(Max = (byte)(3))] byte index) { throw null; }
+ public static unsafe Vector128<short> LoadWideningVector128(sbyte* address) { throw null; }
+ public static unsafe Vector128<ushort> LoadWideningVector128(byte* address) { throw null; }
+ public static unsafe Vector128<int> LoadWideningVector128(short* address) { throw null; }
+ public static unsafe Vector128<uint> LoadWideningVector128(ushort* address) { throw null; }
+ public static unsafe Vector128<long> LoadWideningVector128(int* address) { throw null; }
+ public static unsafe Vector128<ulong> LoadWideningVector128(uint* address) { throw null; }
public static Vector128<float> Negate(Vector128<float> value) { throw null; }
public static Vector128<double> Negate(Vector128<double> value) { throw null; }
public static Vector128<float> Abs(Vector128<float> value) { throw null; }
break;
}
+ case OP_WASM_SIMD_LOAD_SCALAR_INSERT: {
+ LLVMTypeRef rtype;
+ switch(inst_c1_type (ins)) {
+ case MONO_TYPE_I1:
+ case MONO_TYPE_U1:
+ rtype = v128_i1_t;
+ break;
+ case MONO_TYPE_I2:
+ case MONO_TYPE_U2:
+ rtype = v128_i2_t;
+ break;
+ case MONO_TYPE_I:
+ case MONO_TYPE_U:
+ case MONO_TYPE_I4:
+ case MONO_TYPE_U4:
+ case MONO_TYPE_R4:
+ rtype = v128_i4_t;
+ break;
+ case MONO_TYPE_I8:
+ case MONO_TYPE_U8:
+ case MONO_TYPE_R8:
+ rtype = v128_i8_t;
+ break;
+ default:
+ g_assert_not_reached();
+ }
+ LLVMTypeRef srctype = LLVMGetElementType (rtype);
+ LLVMValueRef addr = convert (ctx, lhs, pointer_type (srctype));
+ LLVMValueRef scalar = mono_llvm_build_aligned_load (builder, srctype, addr, "", FALSE, 1);
+ LLVMTypeRef vtype = LLVMTypeOf (rhs);
+ LLVMValueRef vec = vtype == rtype ? rhs : LLVMBuildBitCast(builder, rhs, rtype, "");
+ vec = LLVMBuildInsertElement (builder, vec, scalar, arg3, "");
+ if (vtype != rtype)
+ vec = LLVMBuildBitCast(builder, vec, vtype, "");
+ values [ins->dreg] = vec;
+ break;
+ }
+ case OP_WASM_SIMD_LOAD_WIDENING: {
+ MonoTypeEnum ptype = inst_c1_type (ins);
+ LLVMTypeRef srctype = NULL, dsttype = NULL;
+ switch(ptype) {
+ case MONO_TYPE_U1:
+ case MONO_TYPE_I1:
+ srctype = v64_i1_t;
+ dsttype = v128_i2_t;
+ break;
+ case MONO_TYPE_U2:
+ case MONO_TYPE_I2:
+ srctype = v64_i2_t;
+ dsttype = v128_i4_t;
+ break;
+ case MONO_TYPE_U:
+ case MONO_TYPE_I:
+ case MONO_TYPE_U4:
+ case MONO_TYPE_I4:
+ srctype = v64_i4_t;
+ dsttype = v128_i8_t;
+ break;
+ default:
+ g_assert_not_reached();
+ }
+ LLVMValueRef addr = convert (ctx, lhs, pointer_type (srctype));
+ LLVMValueRef narrow = mono_llvm_build_aligned_load (builder, srctype, addr, "", FALSE, 1);
+ values [ins->dreg] = primitive_type_is_unsigned (ptype) ? LLVMBuildZExt (builder, narrow, dsttype, "") : LLVMBuildSExt (builder, narrow, dsttype, "");
+ break;
+ }
+ case OP_WASM_SIMD_LOAD_SCALAR_SPLAT: {
+ int elems;
+ LLVMTypeRef rtype;
+ switch(inst_c1_type (ins)) {
+ case MONO_TYPE_I1:
+ case MONO_TYPE_U1:
+ rtype = v128_i1_t;
+ elems = 16;
+ break;
+ case MONO_TYPE_I2:
+ case MONO_TYPE_U2:
+ rtype = v128_i2_t;
+ elems = 8;
+ break;
+ case MONO_TYPE_I:
+ case MONO_TYPE_U:
+ case MONO_TYPE_I4:
+ case MONO_TYPE_U4:
+ rtype = v128_i4_t;
+ elems = 4;
+ break;
+ case MONO_TYPE_R4:
+ rtype = v128_r4_t;
+ elems = 4;
+ break;
+ case MONO_TYPE_I8:
+ case MONO_TYPE_U8:
+ rtype = v128_i8_t;
+ elems = 2;
+ break;
+ case MONO_TYPE_R8:
+ rtype = v128_r8_t;
+ elems = 2;
+ break;
+ default:
+ g_assert_not_reached();
+ }
+ LLVMTypeRef srctype = LLVMGetElementType (rtype);
+ LLVMValueRef addr = convert (ctx, lhs, pointer_type (srctype));
+ LLVMValueRef scalar = mono_llvm_build_aligned_load (builder, srctype, addr, "", FALSE, 1);
+ values [ins->dreg] = broadcast_element (ctx, scalar, elems);
+ break;
+ }
#endif
#if defined(TARGET_ARM64) || defined(TARGET_X86) || defined(TARGET_AMD64) || defined(TARGET_WASM)
case OP_XEQUAL: {
MINI_OP(OP_WASM_SIMD_CONV_R8_TO_R4, "wasm_simd_conv_r8_to_r4", XREG, XREG, NONE)
MINI_OP(OP_WASM_SIMD_CONV_R8_TO_I4_ZERO, "wasm_simd_conv_r8_to_i4_zero", XREG, XREG, NONE)
MINI_OP(OP_WASM_SIMD_CONV_U4_TO_R8_LOW, "wasm_simd_conv_u4_to_r8_low", XREG, XREG, NONE)
+MINI_OP3(OP_WASM_SIMD_LOAD_SCALAR_INSERT, "wasm_simd_load_scalar_insert", XREG, IREG, XREG, IREG)
+MINI_OP(OP_WASM_SIMD_LOAD_SCALAR_SPLAT, "wasm_simd_load_scalar_splat_i1", XREG, IREG, NONE)
+MINI_OP(OP_WASM_SIMD_LOAD_WIDENING, "wasm_simd_load_widening", XREG, IREG, NONE)
MINI_OP(OP_WASM_SIMD_SEXT_LOWER, "wasm_simd_ext_lower_s", XREG, XREG, NONE)
MINI_OP(OP_WASM_SIMD_SEXT_UPPER, "wasm_simd_ext_upper_s", XREG, XREG, NONE)
MINI_OP(OP_WASM_SIMD_ZEXT_LOWER, "wasm_simd_ext_lower_u", XREG, XREG, NONE)
{SN_Dot, OP_XOP_X_X_X, INTRINS_WASM_DOT},
{SN_ExtractLane},
{SN_Floor, OP_XOP_OVR_X_X, INTRINS_SIMD_FLOOR},
+ {SN_LoadScalarAndInsert, OP_WASM_SIMD_LOAD_SCALAR_INSERT},
+ {SN_LoadScalarAndSplatVector128, OP_WASM_SIMD_LOAD_SCALAR_SPLAT},
{SN_LoadScalarVector128},
{SN_LoadVector128, OP_LOADX_MEMBASE},
+ {SN_LoadWideningVector128, OP_WASM_SIMD_LOAD_WIDENING},
{SN_Max, OP_XBINOP, OP_IMIN, OP_XBINOP, OP_IMIN_UN, OP_XBINOP, OP_FMIN},
{SN_Min, OP_XBINOP, OP_IMAX, OP_XBINOP, OP_IMAX_UN, OP_XBINOP, OP_FMAX},
{SN_Multiply},
METHOD(ConvertToInt32Saturate)
METHOD(ConvertToUnsignedInt32Saturate)
METHOD(ExtractLane)
+METHOD(LoadScalarAndInsert)
+METHOD(LoadScalarAndSplatVector128)
+METHOD(LoadWideningVector128)
METHOD(MultiplyRoundedSaturateQ15)
METHOD(PseudoMax)
METHOD(PseudoMin)