void Assembler::movsxbq(Register dst, const Operand& src) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
- emit_rex_32(dst, src);
+ emit_rex_64(dst, src);
emit(0x0F);
emit(0xBE);
emit_operand(dst, src);
void Assembler::movzxbq(Register dst, const Operand& src) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
- emit_rex_64(dst, src);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
emit(0xB6);
emit_operand(dst, src);
void Assembler::movzxwq(Register dst, const Operand& src) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
- emit_rex_64(dst, src);
+ emit_optional_rex_32(dst, src);
emit(0x0F);
emit(0xB7);
emit_operand(dst, src);
__ j(equal, &adapted);
// No arguments adaptor frame. Copy fixed number of arguments.
- __ movq(rax, Immediate(scope()->num_parameters()));
+ __ Set(rax, scope()->num_parameters());
for (int i = 0; i < scope()->num_parameters(); i++) {
__ push(frame_->ParameterAt(i));
}
Label non_number_comparison;
Label unordered;
FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
+ __ xorl(rax, rax);
+ __ xorl(rcx, rcx);
__ ucomisd(xmm0, xmm1);
// Don't base result on EFLAGS when a NaN is involved.
__ j(parity_even, &unordered);
// Return a result of -1, 0, or 1, based on EFLAGS.
- __ movq(rax, Immediate(0)); // equal
- __ movq(rcx, Immediate(1));
- __ cmovq(above, rax, rcx);
- __ movq(rcx, Immediate(-1));
- __ cmovq(below, rax, rcx);
+ __ setcc(above, rax);
+ __ setcc(below, rcx);
+ __ subq(rax, rcx);
__ ret(2 * kPointerSize); // rax, rdx were pushed
// If one of the numbers was NaN, then the result is always false.
if (index == 4 && (base & 7) == 4 && scale == 0 /*times_1*/) {
// index == rsp means no index. Only use sib byte with no index for
// rsp and r12 base.
- AppendToBuffer("[%s]", (this->*register_name)(base));
+ AppendToBuffer("[%s]", NameOfCPURegister(base));
return 2;
} else if (base == 5) {
// base == rbp means no base register (when mod == 0).
int32_t disp = *reinterpret_cast<int32_t*>(modrmp + 2);
AppendToBuffer("[%s*%d+0x%x]",
- (this->*register_name)(index),
+ NameOfCPURegister(index),
1 << scale, disp);
return 6;
} else if (index != 4 && base != 5) {
// [base+index*scale]
AppendToBuffer("[%s+%s*%d]",
- (this->*register_name)(base),
- (this->*register_name)(index),
+ NameOfCPURegister(base),
+ NameOfCPURegister(index),
1 << scale);
return 2;
} else {
return 1;
}
} else {
- AppendToBuffer("[%s]", (this->*register_name)(rm));
+ AppendToBuffer("[%s]", NameOfCPURegister(rm));
return 1;
}
break;
: *reinterpret_cast<char*>(modrmp + 2);
if (index == 4 && (base & 7) == 4 && scale == 0 /*times_1*/) {
if (-disp > 0) {
- AppendToBuffer("[%s-0x%x]", (this->*register_name)(base), -disp);
+ AppendToBuffer("[%s-0x%x]", NameOfCPURegister(base), -disp);
} else {
- AppendToBuffer("[%s+0x%x]", (this->*register_name)(base), disp);
+ AppendToBuffer("[%s+0x%x]", NameOfCPURegister(base), disp);
}
} else {
if (-disp > 0) {
AppendToBuffer("[%s+%s*%d-0x%x]",
- (this->*register_name)(base),
- (this->*register_name)(index),
+ NameOfCPURegister(base),
+ NameOfCPURegister(index),
1 << scale,
-disp);
} else {
AppendToBuffer("[%s+%s*%d+0x%x]",
- (this->*register_name)(base),
- (this->*register_name)(index),
+ NameOfCPURegister(base),
+ NameOfCPURegister(index),
1 << scale,
disp);
}
int disp = (mod == 2) ? *reinterpret_cast<int32_t*>(modrmp + 1)
: *reinterpret_cast<char*>(modrmp + 1);
if (-disp > 0) {
- AppendToBuffer("[%s-0x%x]", (this->*register_name)(rm), -disp);
+ AppendToBuffer("[%s-0x%x]", NameOfCPURegister(rm), -disp);
} else {
- AppendToBuffer("[%s+0x%x]", (this->*register_name)(rm), disp);
+ AppendToBuffer("[%s+0x%x]", NameOfCPURegister(rm), disp);
}
return (mod == 2) ? 5 : 2;
}
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
- movq(rax, Immediate(num_arguments));
+ Set(rax, num_arguments);
movq(rbx, ExternalReference(f));
CEntryStub ces(f->result_size);
CallStub(&ces);
void MacroAssembler::CallExternalReference(const ExternalReference& ext,
int num_arguments) {
- movq(rax, Immediate(num_arguments));
+ Set(rax, num_arguments);
movq(rbx, ext);
CEntryStub stub(1);
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
- movq(rax, Immediate(num_arguments));
+ Set(rax, num_arguments);
JumpToExternalReference(ext, result_size);
}
if (first.is(second)) {
return CheckSmi(first);
}
- movl(kScratchRegister, first);
- orl(kScratchRegister, second);
- testb(kScratchRegister, Immediate(kSmiTagMask));
+ ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
+ leal(kScratchRegister, Operand(first, second, times_1, 0));
+ testb(kScratchRegister, Immediate(0x03));
return zero;
}
if (expected.immediate() == actual.immediate()) {
definitely_matches = true;
} else {
- movq(rax, Immediate(actual.immediate()));
+ Set(rax, actual.immediate());
if (expected.immediate() ==
SharedFunctionInfo::kDontAdaptArgumentsSentinel) {
// Don't worry about adapting arguments for built-ins that
// arguments.
definitely_matches = true;
} else {
- movq(rbx, Immediate(expected.immediate()));
+ Set(rbx, expected.immediate());
}
}
} else {
cmpq(expected.reg(), Immediate(actual.immediate()));
j(equal, &invoke);
ASSERT(expected.reg().is(rbx));
- movq(rax, Immediate(actual.immediate()));
+ Set(rax, actual.immediate());
} else if (!expected.reg().is(actual.reg())) {
// Both expected and actual are in (different) registers. This
// is the case when we invoke functions using call and apply.
// call trampolines per different arguments counts encountered.
Result num_args = cgen()->allocator()->Allocate(rax);
ASSERT(num_args.is_valid());
- __ movq(num_args.reg(), Immediate(arg_count));
+ __ Set(num_args.reg(), arg_count);
function.Unuse();
num_args.Unuse();