// Copyright 2012 the V8 project authors. All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following
-// disclaimer in the documentation and/or other materials provided
-// with the distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived
-// from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
#include "v8.h"
}
-void CpuFeatures::Probe() {
+void CpuFeatures::Probe(bool serializer_enabled) {
ASSERT(supported_ == CpuFeatures::kDefaultCpuFeatures);
#ifdef DEBUG
initialized_ = true;
#endif
supported_ = kDefaultCpuFeatures;
- if (Serializer::enabled()) {
+ if (serializer_enabled) {
supported_ |= OS::CpuFeaturesImpliedByPlatform();
return; // No features if we might serialize.
}
ASSERT(cpu.has_sse2());
probed_features |= static_cast<uint64_t>(1) << SSE2;
- // CMOD must be available on every x64 CPU.
+ // CMOV must be available on every x64 CPU.
ASSERT(cpu.has_cmov());
probed_features |= static_cast<uint64_t>(1) << CMOV;
#endif
// Patch the code.
- patcher.masm()->movq(kScratchRegister, target, RelocInfo::NONE64);
+ patcher.masm()->movp(kScratchRegister, reinterpret_cast<void*>(target),
+ Assembler::RelocInfoNone());
patcher.masm()->call(kScratchRegister);
// Check that the size of the code generated is as expected.
const int
Register::kRegisterCodeByAllocationIndex[kMaxNumAllocatableRegisters] = {
- // rax, rbx, rdx, rcx, rdi, r8, r9, r11, r14, r15
- 0, 3, 2, 1, 7, 8, 9, 11, 14, 15
+ // rax, rbx, rdx, rcx, rsi, rdi, r8, r9, r11, r14, r15
+ 0, 3, 2, 1, 6, 7, 8, 9, 11, 14, 15
};
const int Register::kAllocationIndexByRegisterCode[kNumRegisters] = {
- 0, 3, 2, 1, -1, -1, -1, 4, 5, 6, -1, 7, -1, -1, 8, 9
+ 0, 3, 2, 1, -1, -1, 4, 5, 6, 7, -1, 8, -1, -1, 9, 10
};
// Assembler Instruction implementations.
-void Assembler::arithmetic_op(byte opcode, Register reg, const Operand& op) {
+void Assembler::arithmetic_op(byte opcode,
+ Register reg,
+ const Operand& op,
+ int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(reg, op);
+ emit_rex(reg, op, size);
emit(opcode);
emit_operand(reg, op);
}
-void Assembler::arithmetic_op(byte opcode, Register reg, Register rm_reg) {
+void Assembler::arithmetic_op(byte opcode,
+ Register reg,
+ Register rm_reg,
+ int size) {
EnsureSpace ensure_space(this);
ASSERT((opcode & 0xC6) == 2);
if (rm_reg.low_bits() == 4) { // Forces SIB byte.
// Swap reg and rm_reg and change opcode operand order.
- emit_rex_64(rm_reg, reg);
+ emit_rex(rm_reg, reg, size);
emit(opcode ^ 0x02);
emit_modrm(rm_reg, reg);
} else {
- emit_rex_64(reg, rm_reg);
+ emit_rex(reg, rm_reg, size);
emit(opcode);
emit_modrm(reg, rm_reg);
}
}
-void Assembler::arithmetic_op_32(byte opcode, Register reg, Register rm_reg) {
+void Assembler::arithmetic_op_8(byte opcode, Register reg, const Operand& op) {
+ EnsureSpace ensure_space(this);
+ if (!reg.is_byte_register()) {
+ // Register is not one of al, bl, cl, dl. Its encoding needs REX.
+ emit_rex_32(reg);
+ }
+ emit(opcode);
+ emit_operand(reg, op);
+}
+
+
+void Assembler::arithmetic_op_8(byte opcode, Register reg, Register rm_reg) {
EnsureSpace ensure_space(this);
ASSERT((opcode & 0xC6) == 2);
- if (rm_reg.low_bits() == 4) { // Forces SIB byte.
+ if (rm_reg.low_bits() == 4) { // Forces SIB byte.
// Swap reg and rm_reg and change opcode operand order.
- emit_optional_rex_32(rm_reg, reg);
- emit(opcode ^ 0x02); // E.g. 0x03 -> 0x01 for ADD.
+ if (!rm_reg.is_byte_register() || !reg.is_byte_register()) {
+ // Register is not one of al, bl, cl, dl. Its encoding needs REX.
+ emit_rex_32(rm_reg, reg);
+ }
+ emit(opcode ^ 0x02);
emit_modrm(rm_reg, reg);
} else {
- emit_optional_rex_32(reg, rm_reg);
+ if (!reg.is_byte_register() || !rm_reg.is_byte_register()) {
+ // Register is not one of al, bl, cl, dl. Its encoding needs REX.
+ emit_rex_32(reg, rm_reg);
+ }
emit(opcode);
emit_modrm(reg, rm_reg);
}
}
-void Assembler::arithmetic_op_32(byte opcode,
- Register reg,
- const Operand& rm_reg) {
- EnsureSpace ensure_space(this);
- emit_optional_rex_32(reg, rm_reg);
- emit(opcode);
- emit_operand(reg, rm_reg);
-}
-
-
void Assembler::immediate_arithmetic_op(byte subcode,
Register dst,
- Immediate src) {
+ Immediate src,
+ int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(dst);
+ emit_rex(dst, size);
if (is_int8(src.value_)) {
emit(0x83);
emit_modrm(subcode, dst);
void Assembler::immediate_arithmetic_op(byte subcode,
const Operand& dst,
- Immediate src) {
+ Immediate src,
+ int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(dst);
+ emit_rex(dst, size);
if (is_int8(src.value_)) {
emit(0x83);
emit_operand(subcode, dst);
}
-void Assembler::immediate_arithmetic_op_32(byte subcode,
- Register dst,
- Immediate src) {
- EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
- if (is_int8(src.value_)) {
- emit(0x83);
- emit_modrm(subcode, dst);
- emit(src.value_);
- } else if (dst.is(rax)) {
- emit(0x05 | (subcode << 3));
- emitl(src.value_);
- } else {
- emit(0x81);
- emit_modrm(subcode, dst);
- emitl(src.value_);
- }
-}
-
-
-void Assembler::immediate_arithmetic_op_32(byte subcode,
- const Operand& dst,
- Immediate src) {
- EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
- if (is_int8(src.value_)) {
- emit(0x83);
- emit_operand(subcode, dst);
- emit(src.value_);
- } else {
- emit(0x81);
- emit_operand(subcode, dst);
- emitl(src.value_);
- }
-}
-
-
void Assembler::immediate_arithmetic_op_8(byte subcode,
const Operand& dst,
Immediate src) {
Immediate src) {
EnsureSpace ensure_space(this);
if (!dst.is_byte_register()) {
- // Use 64-bit mode byte registers.
- emit_rex_64(dst);
+ // Register is not one of al, bl, cl, dl. Its encoding needs REX.
+ emit_rex_32(dst);
}
ASSERT(is_int8(src.value_) || is_uint8(src.value_));
emit(0x80);
}
-void Assembler::shift(Register dst, Immediate shift_amount, int subcode) {
+void Assembler::shift(Register dst,
+ Immediate shift_amount,
+ int subcode,
+ int size) {
EnsureSpace ensure_space(this);
- ASSERT(is_uint6(shift_amount.value_)); // illegal shift count
+ ASSERT(size == kInt64Size ? is_uint6(shift_amount.value_)
+ : is_uint5(shift_amount.value_));
if (shift_amount.value_ == 1) {
- emit_rex_64(dst);
+ emit_rex(dst, size);
emit(0xD1);
emit_modrm(subcode, dst);
} else {
- emit_rex_64(dst);
+ emit_rex(dst, size);
emit(0xC1);
emit_modrm(subcode, dst);
emit(shift_amount.value_);
}
-void Assembler::shift(Register dst, int subcode) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst);
- emit(0xD3);
- emit_modrm(subcode, dst);
-}
-
-
-void Assembler::shift_32(Register dst, int subcode) {
+void Assembler::shift(Register dst, int subcode, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
+ emit_rex(dst, size);
emit(0xD3);
emit_modrm(subcode, dst);
}
-void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) {
- EnsureSpace ensure_space(this);
- ASSERT(is_uint5(shift_amount.value_)); // illegal shift count
- if (shift_amount.value_ == 1) {
- emit_optional_rex_32(dst);
- emit(0xD1);
- emit_modrm(subcode, dst);
- } else {
- emit_optional_rex_32(dst);
- emit(0xC1);
- emit_modrm(subcode, dst);
- emit(shift_amount.value_);
- }
-}
-
-
void Assembler::bt(const Operand& dst, Register src) {
EnsureSpace ensure_space(this);
emit_rex_64(src, dst);
}
+void Assembler::bsrl(Register dst, Register src) {
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0xBD);
+ emit_modrm(dst, src);
+}
+
+
void Assembler::call(Label* L) {
positions_recorder()->WriteRecordedPositions();
EnsureSpace ensure_space(this);
}
-void Assembler::decq(Register dst) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst);
- emit(0xFF);
- emit_modrm(0x1, dst);
-}
-
-
-void Assembler::decq(const Operand& dst) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst);
- emit(0xFF);
- emit_operand(1, dst);
-}
-
-
-void Assembler::decl(Register dst) {
+void Assembler::emit_dec(Register dst, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
+ emit_rex(dst, size);
emit(0xFF);
emit_modrm(0x1, dst);
}
-void Assembler::decl(const Operand& dst) {
+void Assembler::emit_dec(const Operand& dst, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
+ emit_rex(dst, size);
emit(0xFF);
emit_operand(1, dst);
}
}
-void Assembler::idivq(Register src) {
- EnsureSpace ensure_space(this);
- emit_rex_64(src);
- emit(0xF7);
- emit_modrm(0x7, src);
-}
-
-
-void Assembler::idivl(Register src) {
+void Assembler::emit_idiv(Register src, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(src);
+ emit_rex(src, size);
emit(0xF7);
emit_modrm(0x7, src);
}
-void Assembler::imul(Register src) {
+void Assembler::emit_imul(Register src, int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(src);
+ emit_rex(src, size);
emit(0xF7);
emit_modrm(0x5, src);
}
-void Assembler::imul(Register dst, Register src) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst, src);
- emit(0x0F);
- emit(0xAF);
- emit_modrm(dst, src);
-}
-
-
-void Assembler::imul(Register dst, const Operand& src) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst, src);
- emit(0x0F);
- emit(0xAF);
- emit_operand(dst, src);
-}
-
-
-void Assembler::imul(Register dst, Register src, Immediate imm) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst, src);
- if (is_int8(imm.value_)) {
- emit(0x6B);
- emit_modrm(dst, src);
- emit(imm.value_);
- } else {
- emit(0x69);
- emit_modrm(dst, src);
- emitl(imm.value_);
- }
-}
-
-
-void Assembler::imull(Register dst, Register src) {
+void Assembler::emit_imul(Register dst, Register src, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst, src);
+ emit_rex(dst, src, size);
emit(0x0F);
emit(0xAF);
emit_modrm(dst, src);
}
-void Assembler::imull(Register dst, const Operand& src) {
+void Assembler::emit_imul(Register dst, const Operand& src, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst, src);
+ emit_rex(dst, src, size);
emit(0x0F);
emit(0xAF);
emit_operand(dst, src);
}
-void Assembler::imull(Register dst, Register src, Immediate imm) {
+void Assembler::emit_imul(Register dst, Register src, Immediate imm, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst, src);
+ emit_rex(dst, src, size);
if (is_int8(imm.value_)) {
emit(0x6B);
emit_modrm(dst, src);
}
-void Assembler::incq(Register dst) {
+void Assembler::emit_inc(Register dst, int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(dst);
+ emit_rex(dst, size);
emit(0xFF);
emit_modrm(0x0, dst);
}
-void Assembler::incq(const Operand& dst) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst);
- emit(0xFF);
- emit_operand(0, dst);
-}
-
-
-void Assembler::incl(const Operand& dst) {
+void Assembler::emit_inc(const Operand& dst, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
+ emit_rex(dst, size);
emit(0xFF);
emit_operand(0, dst);
}
-void Assembler::incl(Register dst) {
- EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
- emit(0xFF);
- emit_modrm(0, dst);
-}
-
-
void Assembler::int3() {
EnsureSpace ensure_space(this);
emit(0xCC);
}
-void Assembler::lea(Register dst, const Operand& src) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst, src);
- emit(0x8D);
- emit_operand(dst, src);
-}
-
-
-void Assembler::leal(Register dst, const Operand& src) {
+void Assembler::emit_lea(Register dst, const Operand& src, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst, src);
+ emit_rex(dst, src, size);
emit(0x8D);
emit_operand(dst, src);
}
void Assembler::load_rax(void* value, RelocInfo::Mode mode) {
EnsureSpace ensure_space(this);
- emit(0x48); // REX.W
- emit(0xA1);
- emitp(value, mode);
+ if (kPointerSize == kInt64Size) {
+ emit(0x48); // REX.W
+ emit(0xA1);
+ emitp(value, mode);
+ } else {
+ ASSERT(kPointerSize == kInt32Size);
+ emit(0xA1);
+ emitp(value, mode);
+ // In 64-bit mode, need to zero extend the operand to 8 bytes.
+ // See 2.2.1.4 in Intel64 and IA32 Architectures Software
+ // Developer's Manual Volume 2.
+ emitl(0);
+ }
}
}
-void Assembler::movw(const Operand& dst, Register src) {
+void Assembler::movb(const Operand& dst, Immediate imm) {
EnsureSpace ensure_space(this);
- emit(0x66);
- emit_optional_rex_32(src, dst);
- emit(0x89);
- emit_operand(src, dst);
+ emit_optional_rex_32(dst);
+ emit(0xC6);
+ emit_operand(0x0, dst);
+ emit(static_cast<byte>(imm.value_));
}
-void Assembler::movl(Register dst, const Operand& src) {
+void Assembler::movw(Register dst, const Operand& src) {
EnsureSpace ensure_space(this);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x8B);
emit_operand(dst, src);
}
-void Assembler::movl(Register dst, Register src) {
- EnsureSpace ensure_space(this);
- if (src.low_bits() == 4) {
- emit_optional_rex_32(src, dst);
- emit(0x89);
- emit_modrm(src, dst);
- } else {
- emit_optional_rex_32(dst, src);
- emit(0x8B);
- emit_modrm(dst, src);
- }
-}
-
-
-void Assembler::movl(const Operand& dst, Register src) {
+void Assembler::movw(const Operand& dst, Register src) {
EnsureSpace ensure_space(this);
+ emit(0x66);
emit_optional_rex_32(src, dst);
emit(0x89);
emit_operand(src, dst);
}
-void Assembler::movl(const Operand& dst, Immediate value) {
+void Assembler::movw(const Operand& dst, Immediate imm) {
EnsureSpace ensure_space(this);
+ emit(0x66);
emit_optional_rex_32(dst);
emit(0xC7);
emit_operand(0x0, dst);
- emit(value);
-}
-
-
-void Assembler::movl(Register dst, Immediate value) {
- EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
- emit(0xB8 + dst.low_bits());
- emit(value);
+ emit(static_cast<byte>(imm.value_ & 0xff));
+ emit(static_cast<byte>(imm.value_ >> 8));
}
-void Assembler::movq(Register dst, const Operand& src) {
+void Assembler::emit_mov(Register dst, const Operand& src, int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(dst, src);
+ emit_rex(dst, src, size);
emit(0x8B);
emit_operand(dst, src);
}
-void Assembler::movq(Register dst, Register src) {
+void Assembler::emit_mov(Register dst, Register src, int size) {
EnsureSpace ensure_space(this);
if (src.low_bits() == 4) {
- emit_rex_64(src, dst);
+ emit_rex(src, dst, size);
emit(0x89);
emit_modrm(src, dst);
} else {
- emit_rex_64(dst, src);
+ emit_rex(dst, src, size);
emit(0x8B);
emit_modrm(dst, src);
}
}
-void Assembler::movq(Register dst, Immediate value) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst);
- emit(0xC7);
- emit_modrm(0x0, dst);
- emit(value); // Only 32-bit immediates are possible, not 8-bit immediates.
-}
-
-
-void Assembler::movq(const Operand& dst, Register src) {
+void Assembler::emit_mov(const Operand& dst, Register src, int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(src, dst);
+ emit_rex(src, dst, size);
emit(0x89);
emit_operand(src, dst);
}
-void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) {
- // This method must not be used with heap object references. The stored
- // address is not GC safe. Use the handle version instead.
- ASSERT(rmode > RelocInfo::LAST_GCED_ENUM);
+void Assembler::emit_mov(Register dst, Immediate value, int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(dst);
- emit(0xB8 | dst.low_bits());
- emitp(value, rmode);
+ emit_rex(dst, size);
+ if (size == kInt64Size) {
+ emit(0xC7);
+ emit_modrm(0x0, dst);
+ } else {
+ ASSERT(size == kInt32Size);
+ emit(0xB8 + dst.low_bits());
+ }
+ emit(value);
}
-void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) {
- // Non-relocatable values might not need a 64-bit representation.
- ASSERT(RelocInfo::IsNone(rmode));
- if (is_uint32(value)) {
- movl(dst, Immediate(static_cast<int32_t>(value)));
- } else if (is_int32(value)) {
- movq(dst, Immediate(static_cast<int32_t>(value)));
- } else {
- // Value cannot be represented by 32 bits, so do a full 64 bit immediate
- // value.
- EnsureSpace ensure_space(this);
- emit_rex_64(dst);
- emit(0xB8 | dst.low_bits());
- emitq(value);
- }
+void Assembler::emit_mov(const Operand& dst, Immediate value, int size) {
+ EnsureSpace ensure_space(this);
+ emit_rex(dst, size);
+ emit(0xC7);
+ emit_operand(0x0, dst);
+ emit(value);
}
-void Assembler::movq(Register dst, ExternalReference ref) {
- Address value = reinterpret_cast<Address>(ref.address());
- movq(dst, value, RelocInfo::EXTERNAL_REFERENCE);
+void Assembler::movp(Register dst, void* value, RelocInfo::Mode rmode) {
+ EnsureSpace ensure_space(this);
+ emit_rex(dst, kPointerSize);
+ emit(0xB8 | dst.low_bits());
+ emitp(value, rmode);
}
-void Assembler::movq(const Operand& dst, Immediate value) {
+void Assembler::movq(Register dst, int64_t value) {
EnsureSpace ensure_space(this);
emit_rex_64(dst);
- emit(0xC7);
- emit_operand(0, dst);
- emit(value);
+ emit(0xB8 | dst.low_bits());
+ emitq(value);
+}
+
+
+void Assembler::movq(Register dst, uint64_t value) {
+ movq(dst, static_cast<int64_t>(value));
}
}
-void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
- AllowDeferredHandleDereference using_raw_address;
- // If there is no relocation info, emit the value of the handle efficiently
- // (possibly using less that 8 bytes for the value).
- if (RelocInfo::IsNone(mode)) {
- // There is no possible reason to store a heap pointer without relocation
- // info, so it must be a smi.
- ASSERT(value->IsSmi());
- movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE64);
- } else {
- EnsureSpace ensure_space(this);
- ASSERT(value->IsHeapObject());
- ASSERT(!isolate()->heap()->InNewSpace(*value));
- emit_rex_64(dst);
- emit(0xB8 | dst.low_bits());
- emitp(value.location(), mode);
- }
+void Assembler::movsxbl(Register dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0xBE);
+ emit_operand(dst, src);
}
}
+void Assembler::movsxwl(Register dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0xBF);
+ emit_operand(dst, src);
+}
+
+
void Assembler::movsxwq(Register dst, const Operand& src) {
EnsureSpace ensure_space(this);
emit_rex_64(dst, src);
}
-void Assembler::movzxbq(Register dst, const Operand& src) {
+void Assembler::emit_movzxb(Register dst, const Operand& src, int size) {
EnsureSpace ensure_space(this);
// 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
// there is no need to make this a 64 bit operation.
}
-void Assembler::movzxbl(Register dst, const Operand& src) {
- EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst, src);
- emit(0x0F);
- emit(0xB6);
- emit_operand(dst, src);
-}
-
-
-void Assembler::movzxwq(Register dst, const Operand& src) {
- EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst, src);
- emit(0x0F);
- emit(0xB7);
- emit_operand(dst, src);
-}
-
-
-void Assembler::movzxwl(Register dst, const Operand& src) {
+void Assembler::emit_movzxw(Register dst, const Operand& src, int size) {
EnsureSpace ensure_space(this);
+ // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
+ // there is no need to make this a 64 bit operation.
emit_optional_rex_32(dst, src);
emit(0x0F);
emit(0xB7);
}
-void Assembler::movzxwl(Register dst, Register src) {
+void Assembler::emit_movzxw(Register dst, Register src, int size) {
EnsureSpace ensure_space(this);
+ // 32 bit operations zero the top 32 bits of 64 bit registers. Therefore
+ // there is no need to make this a 64 bit operation.
emit_optional_rex_32(dst, src);
emit(0x0F);
emit(0xB7);
}
-void Assembler::repmovsl() {
- EnsureSpace ensure_space(this);
- emit(0xF3);
- emit(0xA5);
-}
-
-
-void Assembler::repmovsq() {
+void Assembler::emit_repmovs(int size) {
EnsureSpace ensure_space(this);
emit(0xF3);
- emit_rex_64();
+ emit_rex(size);
emit(0xA5);
}
}
-void Assembler::neg(Register dst) {
- EnsureSpace ensure_space(this);
- emit_rex_64(dst);
- emit(0xF7);
- emit_modrm(0x3, dst);
-}
-
-
-void Assembler::negl(Register dst) {
+void Assembler::emit_neg(Register dst, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
+ emit_rex(dst, size);
emit(0xF7);
emit_modrm(0x3, dst);
}
-void Assembler::neg(const Operand& dst) {
+void Assembler::emit_neg(const Operand& dst, int size) {
EnsureSpace ensure_space(this);
emit_rex_64(dst);
emit(0xF7);
}
-void Assembler::not_(Register dst) {
+void Assembler::emit_not(Register dst, int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(dst);
+ emit_rex(dst, size);
emit(0xF7);
emit_modrm(0x2, dst);
}
-void Assembler::not_(const Operand& dst) {
+void Assembler::emit_not(const Operand& dst, int size) {
EnsureSpace ensure_space(this);
- emit_rex_64(dst);
+ emit_rex(dst, size);
emit(0xF7);
emit_operand(2, dst);
}
-void Assembler::notl(Register dst) {
- EnsureSpace ensure_space(this);
- emit_optional_rex_32(dst);
- emit(0xF7);
- emit_modrm(0x2, dst);
-}
-
-
void Assembler::Nop(int n) {
// The recommended muti-byte sequences of NOP instructions from the Intel 64
// and IA-32 Architectures Software Developer's Manual.
}
-void Assembler::pop(Register dst) {
+void Assembler::popq(Register dst) {
EnsureSpace ensure_space(this);
emit_optional_rex_32(dst);
emit(0x58 | dst.low_bits());
}
-void Assembler::pop(const Operand& dst) {
+void Assembler::popq(const Operand& dst) {
EnsureSpace ensure_space(this);
emit_optional_rex_32(dst);
emit(0x8F);
}
-void Assembler::push(Register src) {
+void Assembler::pushq(Register src) {
EnsureSpace ensure_space(this);
emit_optional_rex_32(src);
emit(0x50 | src.low_bits());
}
-void Assembler::push(const Operand& src) {
+void Assembler::pushq(const Operand& src) {
EnsureSpace ensure_space(this);
emit_optional_rex_32(src);
emit(0xFF);
}
-void Assembler::push(Immediate value) {
+void Assembler::pushq(Immediate value) {
EnsureSpace ensure_space(this);
if (is_int8(value.value_)) {
emit(0x6A);
}
-void Assembler::push_imm32(int32_t imm32) {
+void Assembler::pushq_imm32(int32_t imm32) {
EnsureSpace ensure_space(this);
emit(0x68);
emitl(imm32);
}
-void Assembler::xchgq(Register dst, Register src) {
+void Assembler::emit_xchg(Register dst, Register src, int size) {
EnsureSpace ensure_space(this);
if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
Register other = src.is(rax) ? dst : src;
- emit_rex_64(other);
+ emit_rex(other, size);
emit(0x90 | other.low_bits());
} else if (dst.low_bits() == 4) {
- emit_rex_64(dst, src);
+ emit_rex(dst, src, size);
emit(0x87);
emit_modrm(dst, src);
} else {
- emit_rex_64(src, dst);
+ emit_rex(src, dst, size);
emit(0x87);
emit_modrm(src, dst);
}
}
-void Assembler::xchgl(Register dst, Register src) {
+void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
EnsureSpace ensure_space(this);
- if (src.is(rax) || dst.is(rax)) { // Single-byte encoding
- Register other = src.is(rax) ? dst : src;
- emit_optional_rex_32(other);
- emit(0x90 | other.low_bits());
- } else if (dst.low_bits() == 4) {
- emit_optional_rex_32(dst, src);
- emit(0x87);
- emit_modrm(dst, src);
+ if (kPointerSize == kInt64Size) {
+ emit(0x48); // REX.W
+ emit(0xA3);
+ emitp(dst, mode);
} else {
- emit_optional_rex_32(src, dst);
- emit(0x87);
- emit_modrm(src, dst);
+ ASSERT(kPointerSize == kInt32Size);
+ emit(0xA3);
+ emitp(dst, mode);
+ // In 64-bit mode, need to zero extend the operand to 8 bytes.
+ // See 2.2.1.4 in Intel64 and IA32 Architectures Software
+ // Developer's Manual Volume 2.
+ emitl(0);
}
}
-void Assembler::store_rax(void* dst, RelocInfo::Mode mode) {
- EnsureSpace ensure_space(this);
- emit(0x48); // REX.W
- emit(0xA3);
- emitp(dst, mode);
-}
-
-
void Assembler::store_rax(ExternalReference ref) {
store_rax(ref.address(), RelocInfo::EXTERNAL_REFERENCE);
}
}
-void Assembler::testl(Register dst, Register src) {
+void Assembler::emit_test(Register dst, Register src, int size) {
EnsureSpace ensure_space(this);
if (src.low_bits() == 4) {
- emit_optional_rex_32(src, dst);
+ emit_rex(src, dst, size);
emit(0x85);
emit_modrm(src, dst);
} else {
- emit_optional_rex_32(dst, src);
+ emit_rex(dst, src, size);
emit(0x85);
emit_modrm(dst, src);
}
}
-void Assembler::testl(Register reg, Immediate mask) {
+void Assembler::emit_test(Register reg, Immediate mask, int size) {
// testl with a mask that fits in the low byte is exactly testb.
if (is_uint8(mask.value_)) {
testb(reg, mask);
}
EnsureSpace ensure_space(this);
if (reg.is(rax)) {
+ emit_rex(rax, size);
emit(0xA9);
emit(mask);
} else {
- emit_optional_rex_32(rax, reg);
+ emit_rex(reg, size);
emit(0xF7);
emit_modrm(0x0, reg);
emit(mask);
}
-void Assembler::testl(const Operand& op, Immediate mask) {
+void Assembler::emit_test(const Operand& op, Immediate mask, int size) {
// testl with a mask that fits in the low byte is exactly testb.
if (is_uint8(mask.value_)) {
testb(op, mask);
return;
}
EnsureSpace ensure_space(this);
- emit_optional_rex_32(rax, op);
+ emit_rex(rax, op, size);
emit(0xF7);
emit_operand(rax, op); // Operation code 0
emit(mask);
}
-void Assembler::testl(const Operand& op, Register reg) {
+void Assembler::emit_test(const Operand& op, Register reg, int size) {
EnsureSpace ensure_space(this);
- emit_optional_rex_32(reg, op);
+ emit_rex(reg, op, size);
emit(0x85);
emit_operand(reg, op);
}
-void Assembler::testq(const Operand& op, Register reg) {
- EnsureSpace ensure_space(this);
- emit_rex_64(reg, op);
- emit(0x85);
- emit_operand(reg, op);
-}
-
-
-void Assembler::testq(Register dst, Register src) {
- EnsureSpace ensure_space(this);
- if (src.low_bits() == 4) {
- emit_rex_64(src, dst);
- emit(0x85);
- emit_modrm(src, dst);
- } else {
- emit_rex_64(dst, src);
- emit(0x85);
- emit_modrm(dst, src);
- }
-}
-
-
-void Assembler::testq(Register dst, Immediate mask) {
- if (is_uint8(mask.value_)) {
- testb(dst, mask);
- return;
- }
- EnsureSpace ensure_space(this);
- if (dst.is(rax)) {
- emit_rex_64();
- emit(0xA9);
- emit(mask);
- } else {
- emit_rex_64(dst);
- emit(0xF7);
- emit_modrm(0, dst);
- emit(mask);
- }
-}
-
-
// FPU instructions.
}
-// SSE 2 operations.
-
-void Assembler::movd(XMMRegister dst, Register src) {
+void Assembler::andps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x6E);
+ emit(0x54);
emit_sse_operand(dst, src);
}
-void Assembler::movd(Register dst, XMMRegister src) {
+void Assembler::orps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0x66);
- emit_optional_rex_32(src, dst);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x7E);
- emit_sse_operand(src, dst);
+ emit(0x56);
+ emit_sse_operand(dst, src);
}
-void Assembler::movq(XMMRegister dst, Register src) {
+void Assembler::orps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0x66);
- emit_rex_64(dst, src);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x6E);
+ emit(0x56);
emit_sse_operand(dst, src);
}
-void Assembler::movq(Register dst, XMMRegister src) {
+void Assembler::xorps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0x66);
- emit_rex_64(src, dst);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x7E);
- emit_sse_operand(src, dst);
+ emit(0x57);
+ emit_sse_operand(dst, src);
}
-void Assembler::movq(XMMRegister dst, XMMRegister src) {
+void Assembler::xorps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- if (dst.low_bits() == 4) {
- // Avoid unnecessary SIB byte.
- emit(0xf3);
- emit_optional_rex_32(dst, src);
- emit(0x0F);
- emit(0x7e);
- emit_sse_operand(dst, src);
- } else {
- emit(0x66);
- emit_optional_rex_32(src, dst);
- emit(0x0F);
- emit(0xD6);
- emit_sse_operand(src, dst);
- }
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x57);
+ emit_sse_operand(dst, src);
}
-void Assembler::movdqa(const Operand& dst, XMMRegister src) {
+void Assembler::addps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0x66);
- emit_rex_64(src, dst);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x7F);
- emit_sse_operand(src, dst);
+ emit(0x58);
+ emit_sse_operand(dst, src);
}
-void Assembler::movdqa(XMMRegister dst, const Operand& src) {
+void Assembler::addps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0x66);
- emit_rex_64(dst, src);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x6F);
+ emit(0x58);
emit_sse_operand(dst, src);
}
-void Assembler::movdqu(const Operand& dst, XMMRegister src) {
+void Assembler::subps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF3);
- emit_rex_64(src, dst);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x7F);
- emit_sse_operand(src, dst);
+ emit(0x5C);
+ emit_sse_operand(dst, src);
}
-void Assembler::movdqu(XMMRegister dst, const Operand& src) {
+void Assembler::subps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF3);
- emit_rex_64(dst, src);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x6F);
+ emit(0x5C);
emit_sse_operand(dst, src);
}
-void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
- ASSERT(IsEnabled(SSE4_1));
- ASSERT(is_uint8(imm8));
+void Assembler::mulps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0x66);
- emit_optional_rex_32(src, dst);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x3A);
- emit(0x17);
- emit_sse_operand(src, dst);
- emit(imm8);
+ emit(0x59);
+ emit_sse_operand(dst, src);
}
-void Assembler::movsd(const Operand& dst, XMMRegister src) {
+void Assembler::mulps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF2); // double
- emit_optional_rex_32(src, dst);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x11); // store
- emit_sse_operand(src, dst);
+ emit(0x59);
+ emit_sse_operand(dst, src);
}
-void Assembler::movsd(XMMRegister dst, XMMRegister src) {
+void Assembler::divps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2); // double
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x10); // load
+ emit(0x5E);
emit_sse_operand(dst, src);
}
-void Assembler::movsd(XMMRegister dst, const Operand& src) {
+void Assembler::divps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF2); // double
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x10); // load
+ emit(0x5E);
emit_sse_operand(dst, src);
}
-void Assembler::movaps(XMMRegister dst, XMMRegister src) {
+void Assembler::addpd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- if (src.low_bits() == 4) {
- // Try to avoid an unnecessary SIB byte.
- emit_optional_rex_32(src, dst);
- emit(0x0F);
- emit(0x29);
- emit_sse_operand(src, dst);
- } else {
- emit_optional_rex_32(dst, src);
- emit(0x0F);
- emit(0x28);
- emit_sse_operand(dst, src);
- }
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x58);
+ emit_sse_operand(dst, src);
}
-void Assembler::movapd(XMMRegister dst, XMMRegister src) {
+void Assembler::addpd(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- if (src.low_bits() == 4) {
- // Try to avoid an unnecessary SIB byte.
- emit(0x66);
- emit_optional_rex_32(src, dst);
- emit(0x0F);
- emit(0x29);
- emit_sse_operand(src, dst);
- } else {
- emit(0x66);
- emit_optional_rex_32(dst, src);
- emit(0x0F);
- emit(0x28);
- emit_sse_operand(dst, src);
- }
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x58);
+ emit_sse_operand(dst, src);
}
-void Assembler::movss(XMMRegister dst, const Operand& src) {
+void Assembler::subpd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF3); // single
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x10); // load
+ emit(0x5C);
emit_sse_operand(dst, src);
}
-void Assembler::movss(const Operand& src, XMMRegister dst) {
+void Assembler::subpd(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF3); // single
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x11); // store
+ emit(0x5C);
emit_sse_operand(dst, src);
}
-void Assembler::cvttss2si(Register dst, const Operand& src) {
+void Assembler::mulpd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF3);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2C);
- emit_operand(dst, src);
+ emit(0x59);
+ emit_sse_operand(dst, src);
}
-void Assembler::cvttss2si(Register dst, XMMRegister src) {
+void Assembler::mulpd(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF3);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x59);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::divpd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x5E);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::divpd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x5E);
+ emit_sse_operand(dst, src);
+}
+
+
+// SSE 2 operations.
+
+void Assembler::movd(XMMRegister dst, Register src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x6E);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::movd(Register dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(src, dst);
+ emit(0x0F);
+ emit(0x7E);
+ emit_sse_operand(src, dst);
+}
+
+
+void Assembler::movq(XMMRegister dst, Register src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_rex_64(dst, src);
+ emit(0x0F);
+ emit(0x6E);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::movq(Register dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_rex_64(src, dst);
+ emit(0x0F);
+ emit(0x7E);
+ emit_sse_operand(src, dst);
+}
+
+
+void Assembler::movq(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ if (dst.low_bits() == 4) {
+ // Avoid unnecessary SIB byte.
+ emit(0xf3);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x7e);
+ emit_sse_operand(dst, src);
+ } else {
+ emit(0x66);
+ emit_optional_rex_32(src, dst);
+ emit(0x0F);
+ emit(0xD6);
+ emit_sse_operand(src, dst);
+ }
+}
+
+
+void Assembler::movdqa(const Operand& dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_rex_64(src, dst);
+ emit(0x0F);
+ emit(0x7F);
+ emit_sse_operand(src, dst);
+}
+
+
+void Assembler::movdqa(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_rex_64(dst, src);
+ emit(0x0F);
+ emit(0x6F);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::movdqu(const Operand& dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3);
+ emit_rex_64(src, dst);
+ emit(0x0F);
+ emit(0x7F);
+ emit_sse_operand(src, dst);
+}
+
+
+void Assembler::movdqu(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3);
+ emit_rex_64(dst, src);
+ emit(0x0F);
+ emit(0x6F);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::extractps(Register dst, XMMRegister src, byte imm8) {
+ ASSERT(IsEnabled(SSE4_1));
+ ASSERT(is_uint8(imm8));
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(src, dst);
+ emit(0x0F);
+ emit(0x3A);
+ emit(0x17);
+ emit_sse_operand(src, dst);
+ emit(imm8);
+}
+
+
+void Assembler::insertps(XMMRegister dst, XMMRegister src, byte imm8) {
+ ASSERT(CpuFeatures::IsSupported(SSE4_1));
+ ASSERT(is_uint8(imm8));
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x3A);
+ emit(0x21);
+ emit_sse_operand(dst, src);
+ emit(imm8);
+}
+
+
+void Assembler::pinsrd(XMMRegister dst, Register src, byte imm8) {
+ ASSERT(CpuFeatures::IsSupported(SSE4_1));
+ ASSERT(is_uint8(imm8));
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x3A);
+ emit(0x22);
+ emit_sse_operand(dst, src);
+ emit(imm8);
+}
+
+
+void Assembler::movsd(const Operand& dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2); // double
+ emit_optional_rex_32(src, dst);
+ emit(0x0F);
+ emit(0x11); // store
+ emit_sse_operand(src, dst);
+}
+
+
+void Assembler::movsd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2); // double
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x10); // load
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::movsd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2); // double
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x10); // load
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::movaps(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ if (src.low_bits() == 4) {
+ // Try to avoid an unnecessary SIB byte.
+ emit_optional_rex_32(src, dst);
+ emit(0x0F);
+ emit(0x29);
+ emit_sse_operand(src, dst);
+ } else {
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x28);
+ emit_sse_operand(dst, src);
+ }
+}
+
+
+void Assembler::movups(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x10);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::movups(const Operand& dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(src, dst);
+ emit(0x0F);
+ emit(0x11);
+ emit_sse_operand(src, dst);
+}
+
+
+void Assembler::shufps(XMMRegister dst, XMMRegister src, byte imm8) {
+ ASSERT(is_uint8(imm8));
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0xC6);
+ emit_sse_operand(dst, src);
+ emit(imm8);
+}
+
+
+void Assembler::shufpd(XMMRegister dst, XMMRegister src, byte imm8) {
+ ASSERT(is_uint8(imm8));
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0xC6);
+ emit_sse_operand(dst, src);
+ emit(imm8);
+}
+
+
+void Assembler::movapd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ if (src.low_bits() == 4) {
+ // Try to avoid an unnecessary SIB byte.
+ emit(0x66);
+ emit_optional_rex_32(src, dst);
+ emit(0x0F);
+ emit(0x29);
+ emit_sse_operand(src, dst);
+ } else {
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x28);
+ emit_sse_operand(dst, src);
+ }
+}
+
+
+void Assembler::movss(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3); // single
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x10); // load
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::movss(const Operand& src, XMMRegister dst) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3); // single
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x11); // store
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::psllq(XMMRegister reg, byte imm8) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit(0x0F);
+ emit(0x73);
+ emit_sse_operand(rsi, reg); // rsi == 6
+ emit(imm8);
+}
+
+
+void Assembler::cvttss2si(Register dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x2C);
+ emit_operand(dst, src);
+}
+
+
+void Assembler::cvttss2si(Register dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x2C);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvttsd2si(Register dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x2C);
+ emit_operand(dst, src);
+}
+
+
+void Assembler::cvttsd2si(Register dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
emit_optional_rex_32(dst, src);
emit(0x0F);
emit(0x2C);
}
-void Assembler::cvttsd2si(Register dst, const Operand& src) {
+void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_rex_64(dst, src);
+ emit(0x0F);
+ emit(0x2C);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x2A);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x2A);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x2A);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_rex_64(dst, src);
+ emit(0x0F);
+ emit(0x2A);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x5A);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x5A);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x5A);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtsd2si(Register dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x2D);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_rex_64(dst, src);
+ emit(0x0F);
+ emit(0x2D);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::addsd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x58);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::addsd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x58);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x59);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::mulsd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x59);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::subsd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x5C);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::divsd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x5E);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::andpd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x54);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::andpd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x54);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::orpd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x56);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x57);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::xorpd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x57);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x51);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0f);
+ emit(0x2e);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0f);
+ emit(0x2e);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF2);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0xC2);
+ emit_sse_operand(dst, src);
+ emit(0x01); // LT == 1
+}
+
+
+void Assembler::cmpps(XMMRegister dst, XMMRegister src, int8_t cmp) {
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0xC2);
+ emit_sse_operand(dst, src);
+ emit(cmp);
+}
+
+
+void Assembler::cmpeqps(XMMRegister dst, XMMRegister src) {
+ cmpps(dst, src, 0x0);
+}
+
+
+void Assembler::cmpltps(XMMRegister dst, XMMRegister src) {
+ cmpps(dst, src, 0x1);
+}
+
+
+void Assembler::cmpleps(XMMRegister dst, XMMRegister src) {
+ cmpps(dst, src, 0x2);
+}
+
+
+void Assembler::cmpneqps(XMMRegister dst, XMMRegister src) {
+ cmpps(dst, src, 0x4);
+}
+
+
+void Assembler::cmpnltps(XMMRegister dst, XMMRegister src) {
+ cmpps(dst, src, 0x5);
+}
+
+
+void Assembler::cmpnleps(XMMRegister dst, XMMRegister src) {
+ cmpps(dst, src, 0x6);
+}
+
+
+void Assembler::pslld(XMMRegister reg, int8_t shift) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(reg);
+ emit(0x0F);
+ emit(0x72);
+ emit_sse_operand(rsi, reg); // rsi == 6
+ emit(shift);
+}
+
+
+void Assembler::pslld(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
emit(0xF2);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::psrld(XMMRegister reg, int8_t shift) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(reg);
+ emit(0x0F);
+ emit(0x72);
+ emit_sse_operand(rdx, reg); // rdx == 2
+ emit(shift);
+}
+
+
+void Assembler::psrld(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2C);
- emit_operand(dst, src);
+ emit(0xD2);
+ emit_sse_operand(dst, src);
}
-void Assembler::cvttsd2si(Register dst, XMMRegister src) {
+void Assembler::psrad(XMMRegister reg, int8_t shift) {
EnsureSpace ensure_space(this);
- emit(0xF2);
+ emit(0x66);
+ emit_optional_rex_32(reg);
+ emit(0x0F);
+ emit(0x72);
+ emit_sse_operand(rsp, reg); // rsp == 4
+ emit(shift);
+}
+
+
+void Assembler::psrad(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2C);
+ emit(0xE2);
emit_sse_operand(dst, src);
}
-void Assembler::cvttsd2siq(Register dst, XMMRegister src) {
+void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
- emit_rex_64(dst, src);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2C);
+ emit(0x76);
emit_sse_operand(dst, src);
}
-void Assembler::cvtlsi2sd(XMMRegister dst, const Operand& src) {
+void Assembler::pcmpgtd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2A);
+ emit(0x66);
emit_sse_operand(dst, src);
}
-void Assembler::cvtlsi2sd(XMMRegister dst, Register src) {
+void Assembler::roundsd(XMMRegister dst, XMMRegister src,
+ Assembler::RoundingMode mode) {
+ ASSERT(IsEnabled(SSE4_1));
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0f);
+ emit(0x3a);
+ emit(0x0b);
+ emit_sse_operand(dst, src);
+ // Mask precision exeption.
+ emit(static_cast<byte>(mode) | 0x8);
+}
+
+
+void Assembler::movmskpd(Register dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0f);
+ emit(0x50);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::movmskps(Register dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0f);
+ emit(0x50);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::minps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2A);
+ emit(0x5D);
emit_sse_operand(dst, src);
}
-void Assembler::cvtlsi2ss(XMMRegister dst, Register src) {
+void Assembler::minps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF3);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2A);
+ emit(0x5D);
emit_sse_operand(dst, src);
}
-void Assembler::cvtqsi2sd(XMMRegister dst, Register src) {
+void Assembler::maxps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
- emit_rex_64(dst, src);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2A);
+ emit(0x5F);
emit_sse_operand(dst, src);
}
-void Assembler::cvtss2sd(XMMRegister dst, XMMRegister src) {
+void Assembler::maxps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF3);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x5A);
+ emit(0x5F);
emit_sse_operand(dst, src);
}
-void Assembler::cvtss2sd(XMMRegister dst, const Operand& src) {
+void Assembler::minpd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF3);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x5A);
+ emit(0x5D);
emit_sse_operand(dst, src);
}
-void Assembler::cvtsd2ss(XMMRegister dst, XMMRegister src) {
+void Assembler::minpd(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x5A);
+ emit(0x5D);
emit_sse_operand(dst, src);
}
-void Assembler::cvtsd2si(Register dst, XMMRegister src) {
+void Assembler::maxpd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2D);
+ emit(0x5F);
emit_sse_operand(dst, src);
}
-void Assembler::cvtsd2siq(Register dst, XMMRegister src) {
+void Assembler::maxpd(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
- emit_rex_64(dst, src);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x2D);
+ emit(0x5F);
emit_sse_operand(dst, src);
}
-void Assembler::addsd(XMMRegister dst, XMMRegister src) {
+void Assembler::rcpps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x58);
+ emit(0x53);
emit_sse_operand(dst, src);
}
-void Assembler::addsd(XMMRegister dst, const Operand& src) {
+void Assembler::rcpps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x58);
+ emit(0x53);
emit_sse_operand(dst, src);
}
-void Assembler::mulsd(XMMRegister dst, XMMRegister src) {
+void Assembler::rsqrtps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x59);
+ emit(0x52);
emit_sse_operand(dst, src);
}
-void Assembler::mulsd(XMMRegister dst, const Operand& src) {
+void Assembler::rsqrtps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x59);
+ emit(0x52);
emit_sse_operand(dst, src);
}
-void Assembler::subsd(XMMRegister dst, XMMRegister src) {
+void Assembler::sqrtps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x5C);
+ emit(0x51);
emit_sse_operand(dst, src);
}
-void Assembler::divsd(XMMRegister dst, XMMRegister src) {
+void Assembler::sqrtps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x5E);
+ emit(0x51);
emit_sse_operand(dst, src);
}
-void Assembler::andpd(XMMRegister dst, XMMRegister src) {
+void Assembler::sqrtpd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x54);
+ emit(0x51);
emit_sse_operand(dst, src);
}
-void Assembler::orpd(XMMRegister dst, XMMRegister src) {
+void Assembler::sqrtpd(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x56);
+ emit(0x51);
emit_sse_operand(dst, src);
}
-void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
+void Assembler::cvtdq2ps(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x57);
+ emit(0x5B);
emit_sse_operand(dst, src);
}
-void Assembler::xorps(XMMRegister dst, XMMRegister src) {
+void Assembler::cvtdq2ps(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x57);
+ emit(0x5B);
emit_sse_operand(dst, src);
}
-void Assembler::sqrtsd(XMMRegister dst, XMMRegister src) {
+void Assembler::paddd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0x51);
+ emit(0xFE);
emit_sse_operand(dst, src);
}
-void Assembler::ucomisd(XMMRegister dst, XMMRegister src) {
+void Assembler::paddd(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
emit(0x66);
emit_optional_rex_32(dst, src);
- emit(0x0f);
- emit(0x2e);
+ emit(0x0F);
+ emit(0xFE);
emit_sse_operand(dst, src);
}
-void Assembler::ucomisd(XMMRegister dst, const Operand& src) {
+void Assembler::psubd(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
emit(0x66);
emit_optional_rex_32(dst, src);
- emit(0x0f);
- emit(0x2e);
+ emit(0x0F);
+ emit(0xFA);
emit_sse_operand(dst, src);
}
-void Assembler::cmpltsd(XMMRegister dst, XMMRegister src) {
+void Assembler::psubd(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
- emit(0xF2);
+ emit(0x66);
emit_optional_rex_32(dst, src);
emit(0x0F);
- emit(0xC2);
+ emit(0xFA);
emit_sse_operand(dst, src);
- emit(0x01); // LT == 1
}
-void Assembler::roundsd(XMMRegister dst, XMMRegister src,
- Assembler::RoundingMode mode) {
+void Assembler::pmulld(XMMRegister dst, XMMRegister src) {
ASSERT(IsEnabled(SSE4_1));
EnsureSpace ensure_space(this);
emit(0x66);
emit_optional_rex_32(dst, src);
- emit(0x0f);
- emit(0x3a);
- emit(0x0b);
+ emit(0x0F);
+ emit(0x38);
+ emit(0x40);
emit_sse_operand(dst, src);
- // Mask precision exeption.
- emit(static_cast<byte>(mode) | 0x8);
}
-void Assembler::movmskpd(Register dst, XMMRegister src) {
+void Assembler::pmulld(XMMRegister dst, const Operand& src) {
EnsureSpace ensure_space(this);
emit(0x66);
emit_optional_rex_32(dst, src);
- emit(0x0f);
- emit(0x50);
+ emit(0x0F);
+ emit(0xF4);
emit_sse_operand(dst, src);
}
-void Assembler::movmskps(Register dst, XMMRegister src) {
+void Assembler::pmuludq(XMMRegister dst, XMMRegister src) {
EnsureSpace ensure_space(this);
+ emit(0x66);
emit_optional_rex_32(dst, src);
- emit(0x0f);
- emit(0x50);
+ emit(0x0F);
+ emit(0xF4);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::pmuludq(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0xF4);
emit_sse_operand(dst, src);
}
+void Assembler::punpackldq(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x62);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::punpackldq(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x62);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::psrldq(XMMRegister dst, uint8_t shift) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst);
+ emit(0x0F);
+ emit(0x73);
+ emit_sse_operand(dst);
+ emit(shift);
+}
+
+
+void Assembler::cvtps2dq(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x5B);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::cvtps2dq(XMMRegister dst, const Operand& src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x5B);
+ emit_sse_operand(dst, src);
+}
+
+
+void Assembler::pshufd(XMMRegister dst, XMMRegister src, uint8_t shuffle) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x70);
+ emit_sse_operand(dst, src);
+ emit(shuffle);
+}
+
+
void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
Register ireg = { reg.code() };
emit_operand(ireg, adr);
}
+void Assembler::emit_sse_operand(XMMRegister dst) {
+ emit(0xD8 | dst.low_bits());
+}
+
+
void Assembler::db(uint8_t data) {
EnsureSpace ensure_space(this);
emit(data);
ASSERT(!RelocInfo::IsNone(rmode));
if (rmode == RelocInfo::EXTERNAL_REFERENCE) {
// Don't record external references unless the heap will be serialized.
-#ifdef DEBUG
- if (!Serializer::enabled()) {
- Serializer::TooLateToEnableNow();
- }
-#endif
- if (!Serializer::enabled() && !emit_debug_code()) {
+ if (!Serializer::enabled(isolate()) && !emit_debug_code()) {
return;
}
} else if (rmode == RelocInfo::CODE_AGE_SEQUENCE) {
}
+Handle<ConstantPoolArray> Assembler::NewConstantPool(Isolate* isolate) {
+ // No out-of-line constant pool support.
+ ASSERT(!FLAG_enable_ool_constant_pool);
+ return isolate->factory()->empty_constant_pool_array();
+}
+
+
+void Assembler::PopulateConstantPool(ConstantPoolArray* constant_pool) {
+ // No out-of-line constant pool support.
+ ASSERT(!FLAG_enable_ool_constant_pool);
+ return;
+}
+
+
const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
1 << RelocInfo::RUNTIME_ENTRY |
1 << RelocInfo::INTERNAL_REFERENCE |
return (1 << rmode_) & kApplyMask;
}
+
+bool RelocInfo::IsInConstantPool() {
+ return false;
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_X64