}
+void Assembler::mov(Register dst, const Immediate& x) {
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ EMIT(0xB8 | dst.code());
+ emit(x);
+}
+
+
void Assembler::mov(Register dst, Handle<Object> handle) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
}
+void Assembler::mov(Register dst, Register src) {
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ EMIT(0x89);
+ EMIT(0xC0 | src.code() << 3 | dst.code());
+}
+
+
void Assembler::mov(const Operand& dst, const Immediate& x) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
void mov_w(const Operand& dst, Register src);
void mov(Register dst, int32_t imm32);
+ void mov(Register dst, const Immediate& x);
void mov(Register dst, Handle<Object> handle);
void mov(Register dst, const Operand& src);
+ void mov(Register dst, Register src);
void mov(const Operand& dst, const Immediate& x);
void mov(const Operand& dst, Handle<Object> handle);
void mov(const Operand& dst, Register src);
// ebx: JSObject
// edi: start of next object
__ mov(Operand(ebx, JSObject::kMapOffset), eax);
- __ mov(Operand(ecx), Factory::empty_fixed_array());
+ __ mov(ecx, Factory::empty_fixed_array());
__ mov(Operand(ebx, JSObject::kPropertiesOffset), ecx);
__ mov(Operand(ebx, JSObject::kElementsOffset), ecx);
// Set extra fields in the newly allocated object.
// ebx: JSObject
// edi: start of next object
{ Label loop, entry;
- __ mov(Operand(edx), Factory::undefined_value());
+ __ mov(edx, Factory::undefined_value());
__ lea(ecx, Operand(ebx, JSObject::kHeaderSize));
__ jmp(&entry);
__ bind(&loop);
// edi: FixedArray
// ecx: start of next object
{ Label loop, entry;
- __ mov(Operand(edx), Factory::undefined_value());
+ __ mov(edx, Factory::undefined_value());
__ lea(eax, Operand(edi, FixedArray::kHeaderSize));
__ jmp(&entry);
__ bind(&loop);
__ push(edi); // save edi across the call
__ push(ebx);
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
- __ mov(Operand(ebx), eax);
+ __ mov(ebx, eax);
__ pop(edi); // restore edi after the call
// Get the arguments count and untag it.
__ RecordComment("// Calling from debug break to runtime - come in - over");
#endif
__ Set(eax, Immediate(0)); // no arguments
- __ mov(Operand(ebx), Immediate(ExternalReference::debug_break()));
+ __ mov(ebx, Immediate(ExternalReference::debug_break()));
CEntryDebugBreakStub ceb;
__ CallStub(&ceb);
} else {
deferred = new DeferredInlinedSmiSubReversed(this, edx, overwrite_mode);
__ mov(edx, Operand(eax));
- __ mov(Operand(eax), Immediate(value));
+ __ mov(eax, Immediate(value));
__ sub(eax, Operand(edx));
}
__ j(overflow, deferred->enter(), not_taken);
CompareStub stub(cc_, strict_);
// Setup parameters and call stub.
__ mov(edx, Operand(eax));
- __ mov(Operand(eax), Immediate(Smi::FromInt(value_)));
+ __ Set(eax, Immediate(Smi::FromInt(value_)));
__ CallStub(&stub);
__ cmp(eax, 0);
// "result" is returned in the flags
// running with --gc-greedy set.
if (FLAG_gc_greedy) {
Failure* failure = Failure::RetryAfterGC(0);
- __ mov(Operand(eax), Immediate(reinterpret_cast<int32_t>(failure)));
+ __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
}
GenerateCore(masm, &throw_normal_exception,
&throw_out_of_memory_exception,
// Do full GC and retry runtime call one final time.
Failure* failure = Failure::InternalError();
- __ mov(Operand(eax), Immediate(reinterpret_cast<int32_t>(failure)));
+ __ mov(eax, Immediate(reinterpret_cast<int32_t>(failure)));
GenerateCore(masm,
&throw_normal_exception,
&throw_out_of_memory_exception,
// stub, because the builtin stubs may not have been generated yet.
if (is_construct) {
ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline);
- __ mov(Operand(edx), Immediate(construct_entry));
+ __ mov(edx, Immediate(construct_entry));
} else {
ExternalReference entry(Builtins::JSEntryTrampoline);
- __ mov(Operand(edx), Immediate(entry));
+ __ mov(edx, Immediate(entry));
}
__ mov(edx, Operand(edx, 0)); // deref address
__ lea(edx, FieldOperand(edx, Code::kHeaderSize));
// Call the entry.
CEntryStub stub;
- __ mov(Operand(eax), Immediate(2));
- __ mov(Operand(ebx), Immediate(f));
+ __ mov(eax, Immediate(2));
+ __ mov(ebx, Immediate(f));
__ CallStub(&stub);
// Move result to edi and exit the internal frame.
- __ mov(Operand(edi), eax);
+ __ mov(edi, eax);
__ LeaveInternalFrame();
// Check if the receiver is a global object of some sort.
if (x.is_zero()) {
xor_(dst, Operand(dst)); // shorter than mov
} else {
- mov(Operand(dst), x);
+ mov(dst, x);
}
}
if (num_arguments > 0) {
add(Operand(esp), Immediate(num_arguments * kPointerSize));
}
- mov(Operand(eax), Immediate(Factory::undefined_value()));
+ mov(eax, Immediate(Factory::undefined_value()));
}
// arguments passed in because it is constant. At some point we
// should remove this need and make the runtime routine entry code
// smarter.
- mov(Operand(eax), Immediate(num_arguments));
+ Set(eax, Immediate(num_arguments));
JumpToBuiltin(ext);
}
void MacroAssembler::JumpToBuiltin(const ExternalReference& ext) {
// Set the entry point and jump to the C entry runtime stub.
- mov(Operand(ebx), Immediate(ext));
+ mov(ebx, Immediate(ext));
CEntryStub ces;
jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
}
Handle<Code> adaptor =
Handle<Code>(Builtins::builtin(Builtins::ArgumentsAdaptorTrampoline));
if (!code_constant.is_null()) {
- mov(Operand(edx), Immediate(code_constant));
+ mov(edx, Immediate(code_constant));
add(Operand(edx), Immediate(Code::kHeaderSize - kHeapObjectTag));
} else if (!code_operand.is_reg(edx)) {
mov(edx, code_operand);
if ((transition != NULL) && (object->map()->unused_property_fields() == 0)) {
// The properties must be extended before we can store the value.
// We jump to a runtime call that extends the propeties array.
- __ mov(Operand(ecx), Immediate(Handle<Map>(transition)));
+ __ mov(ecx, Immediate(Handle<Map>(transition)));
Handle<Code> ic(Builtins::builtin(storage_extend));
__ jmp(ic, RelocInfo::CODE_TARGET);
return;
}
// Get the function and setup the context.
- __ mov(Operand(edi), Immediate(Handle<JSFunction>(function)));
+ __ mov(edi, Immediate(Handle<JSFunction>(function)));
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
// Perform call.
ExternalReference load_interceptor =
ExternalReference(IC_Utility(IC::kLoadInterceptorProperty));
- __ mov(Operand(eax), Immediate(3));
- __ mov(Operand(ebx), Immediate(load_interceptor));
+ __ mov(eax, Immediate(3));
+ __ mov(ebx, Immediate(load_interceptor));
CEntryStub stub;
__ CallStub(&stub);
// Move result to edi and restore receiver.
- __ mov(Operand(edi), eax);
+ __ mov(edi, eax);
__ mov(edx, Operand(ebp, (argc + 2) * kPointerSize)); // receiver
// Exit frame.
// Handle store cache miss.
__ bind(&miss);
- __ mov(Operand(ecx), Immediate(Handle<String>(name))); // restore name
+ __ mov(ecx, Immediate(Handle<String>(name))); // restore name
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET);
// Handle store cache miss.
__ bind(&miss);
- __ mov(Operand(ecx), Immediate(Handle<String>(name))); // restore name
+ __ mov(ecx, Immediate(Handle<String>(name))); // restore name
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET);
// Handle store cache miss.
__ bind(&miss);
- __ mov(Operand(ecx), Immediate(Handle<String>(name))); // restore name
+ __ mov(ecx, Immediate(Handle<String>(name))); // restore name
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET);