}
} else if (src.type() == Constant::kFloat32) {
// TODO(turbofan): Can we do better here?
- __ movl(kScratchRegister, Immediate(bit_cast<int32_t>(src.ToFloat32())));
+ uint32_t src_const = bit_cast<uint32_t>(src.ToFloat32());
if (destination->IsDoubleRegister()) {
- XMMRegister dst = g.ToDoubleRegister(destination);
- __ movq(dst, kScratchRegister);
+ __ Move(g.ToDoubleRegister(destination), src_const);
} else {
DCHECK(destination->IsDoubleStackSlot());
Operand dst = g.ToOperand(destination);
- __ movl(dst, kScratchRegister);
+ __ movl(dst, Immediate(src_const));
}
} else {
DCHECK_EQ(Constant::kFloat64, src.type());
- __ movq(kScratchRegister, bit_cast<int64_t>(src.ToFloat64()));
+ uint64_t src_const = bit_cast<uint64_t>(src.ToFloat64());
if (destination->IsDoubleRegister()) {
- __ movq(g.ToDoubleRegister(destination), kScratchRegister);
+ __ Move(g.ToDoubleRegister(destination), src_const);
} else {
DCHECK(destination->IsDoubleStackSlot());
+ __ movq(kScratchRegister, src_const);
__ movq(g.ToOperand(destination), kScratchRegister);
}
}
}
+void Assembler::emit_optional_rex_32(XMMRegister rm_reg) {
+ if (rm_reg.high_bit()) emit(0x41);
+}
+
+
void Assembler::emit_optional_rex_32(const Operand& op) {
if (op.rex_ != 0) emit(0x40 | op.rex_);
}
void Assembler::psllq(XMMRegister reg, byte imm8) {
EnsureSpace ensure_space(this);
emit(0x66);
+ emit_optional_rex_32(reg);
emit(0x0F);
emit(0x73);
emit_sse_operand(rsi, reg); // rsi == 6
}
+void Assembler::psrlq(XMMRegister reg, byte imm8) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(reg);
+ emit(0x0F);
+ emit(0x73);
+ emit_sse_operand(rdx, reg); // rdx == 2
+ emit(imm8);
+}
+
+
+void Assembler::pslld(XMMRegister reg, byte imm8) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(reg);
+ emit(0x0F);
+ emit(0x72);
+ emit_sse_operand(rsi, reg); // rsi == 6
+ emit(imm8);
+}
+
+
+void Assembler::psrld(XMMRegister reg, byte imm8) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(reg);
+ emit(0x0F);
+ emit(0x72);
+ emit_sse_operand(rdx, reg); // rdx == 2
+ emit(imm8);
+}
+
+
void Assembler::cvttss2si(Register dst, const Operand& src) {
EnsureSpace ensure_space(this);
emit(0xF3);
}
+void Assembler::pcmpeqd(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0x66);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x76);
+ emit_sse_operand(dst, src);
+}
+
+
void Assembler::emit_sse_operand(XMMRegister reg, const Operand& adr) {
Register ireg = { reg.code() };
emit_operand(ireg, adr);
void movapd(XMMRegister dst, XMMRegister src);
void psllq(XMMRegister reg, byte imm8);
+ void psrlq(XMMRegister reg, byte imm8);
+ void pslld(XMMRegister reg, byte imm8);
+ void psrld(XMMRegister reg, byte imm8);
void cvttsd2si(Register dst, const Operand& src);
void cvttsd2si(Register dst, XMMRegister src);
void ucomisd(XMMRegister dst, XMMRegister src);
void ucomisd(XMMRegister dst, const Operand& src);
void cmpltsd(XMMRegister dst, XMMRegister src);
+ void pcmpeqd(XMMRegister dst, XMMRegister src);
void movmskpd(Register dst, XMMRegister src);
// Optionally do as emit_rex_32(Register) if the register number has
// the high bit set.
inline void emit_optional_rex_32(Register rm_reg);
+ inline void emit_optional_rex_32(XMMRegister rm_reg);
// Optionally do as emit_rex_32(const Operand&) if the operand register
// numbers have a high bit set.
} else if (opcode == 0x50) {
AppendToBuffer("movmskpd %s,", NameOfCPURegister(regop));
current += PrintRightXMMOperand(current);
+ } else if (opcode == 0x72) {
+ current += 1;
+ AppendToBuffer("%s,%s,%d", (regop == 6) ? "pslld" : "psrld",
+ NameOfXMMRegister(rm), *current & 0x7f);
+ current += 1;
} else if (opcode == 0x73) {
current += 1;
- DCHECK(regop == 6);
- AppendToBuffer("psllq,%s,%d", NameOfXMMRegister(rm), *current & 0x7f);
+ AppendToBuffer("%s,%s,%d", (regop == 6) ? "psllq" : "psrlq",
+ NameOfXMMRegister(rm), *current & 0x7f);
current += 1;
} else {
const char* mnemonic = "?";
}
+void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
+ if (src == 0) {
+ xorps(dst, dst);
+ } else {
+ unsigned cnt = base::bits::CountPopulation32(src);
+ unsigned nlz = base::bits::CountLeadingZeros32(src);
+ unsigned ntz = base::bits::CountTrailingZeros32(src);
+ if (nlz + cnt + ntz == 32) {
+ pcmpeqd(dst, dst);
+ if (ntz == 0) {
+ psrld(dst, 32 - cnt);
+ } else {
+ pslld(dst, 32 - cnt);
+ if (nlz != 0) psrld(dst, nlz);
+ }
+ } else {
+ movl(kScratchRegister, Immediate(src));
+ movq(dst, kScratchRegister);
+ }
+ }
+}
+
+
+void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
+ uint32_t lower = static_cast<uint32_t>(src);
+ uint32_t upper = static_cast<uint32_t>(src >> 32);
+ if (upper == 0) {
+ Move(dst, lower);
+ } else {
+ unsigned cnt = base::bits::CountPopulation64(src);
+ unsigned nlz = base::bits::CountLeadingZeros64(src);
+ unsigned ntz = base::bits::CountTrailingZeros64(src);
+ if (nlz + cnt + ntz == 64) {
+ pcmpeqd(dst, dst);
+ if (ntz == 0) {
+ psrlq(dst, 64 - cnt);
+ } else {
+ psllq(dst, 64 - cnt);
+ if (nlz != 0) psrlq(dst, nlz);
+ }
+ } else if (lower == 0) {
+ Move(dst, upper);
+ psllq(dst, 32);
+ } else {
+ movq(kScratchRegister, src);
+ movq(dst, kScratchRegister);
+ }
+ }
+}
+
+
void MacroAssembler::Cmp(Register dst, Handle<Object> source) {
AllowDeferredHandleDereference smi_check;
if (source->IsSmi()) {
movp(dst, reinterpret_cast<void*>(value.location()), rmode);
}
+ void Move(XMMRegister dst, uint32_t src);
+ void Move(XMMRegister dst, uint64_t src);
+
// Control Flow
void Jump(Address destination, RelocInfo::Mode rmode);
void Jump(ExternalReference ext);
__ ucomisd(xmm0, xmm1);
__ andpd(xmm0, xmm1);
+
+ __ pslld(xmm0, 6);
+ __ psrld(xmm0, 6);
+ __ psllq(xmm0, 6);
+ __ psrlq(xmm0, 6);
+
+ __ pcmpeqd(xmm1, xmm0);
}
// cmov.