ExternalReference::address_of_regexp_stack_memory_address(isolate);
ExternalReference address_of_regexp_stack_memory_size =
ExternalReference::address_of_regexp_stack_memory_size(isolate);
- __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
- __ movq(kScratchRegister, Operand(kScratchRegister, 0));
+ __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
__ testq(kScratchRegister, kScratchRegister);
__ j(zero, &runtime);
// Argument 8: Pass current isolate address.
// __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
// Immediate(ExternalReference::isolate_address()));
- __ movq(kScratchRegister, ExternalReference::isolate_address());
+ __ LoadAddress(kScratchRegister, ExternalReference::isolate_address());
__ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
kScratchRegister);
#endif
// Argument 5: static offsets vector buffer.
- __ movq(r8, ExternalReference::address_of_static_offsets_vector(isolate));
+ __ LoadAddress(r8,
+ ExternalReference::address_of_static_offsets_vector(isolate));
// Argument 5 passed in r8 on Linux and on the stack on Windows.
#ifdef _WIN64
__ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize), r8);
__ RecordWrite(rcx, RegExpImpl::kLastInputOffset, rax, rdi);
// Get the static offsets vector filled by the native regexp code.
- __ movq(rcx, ExternalReference::address_of_static_offsets_vector(isolate));
+ __ LoadAddress(rcx,
+ ExternalReference::address_of_static_offsets_vector(isolate));
// rbx: last_match_info backing store (FixedArray)
// rcx: offsets vector
// TODO(592): Rerunning the RegExp to get the stack overflow exception.
ExternalReference pending_exception_address(
Isolate::k_pending_exception_address, isolate);
- __ movq(rbx, pending_exception_address);
- __ movq(rax, Operand(rbx, 0));
+ Operand pending_exception_operand =
+ masm->ExternalOperand(pending_exception_address, rbx);
+ __ movq(rax, pending_exception_operand);
__ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
__ cmpq(rax, rdx);
__ j(equal, &runtime);
- __ movq(Operand(rbx, 0), rdx);
+ __ movq(pending_exception_operand, rdx);
__ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
NearLabel termination_exception;
ExternalReference scope_depth =
ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
if (always_allocate_scope) {
- __ movq(kScratchRegister, scope_depth);
- __ incl(Operand(kScratchRegister, 0));
+ Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
+ __ incl(scope_depth_operand);
}
// Call C function.
// Pass a pointer to the Arguments object as the first argument.
// Return result in single register (rax).
__ lea(rcx, StackSpaceOperand(0));
- __ movq(rdx, ExternalReference::isolate_address());
+ __ LoadAddress(rdx, ExternalReference::isolate_address());
} else {
ASSERT_EQ(2, result_size_);
// Pass a pointer to the result location as the first argument.
__ lea(rcx, StackSpaceOperand(2));
// Pass a pointer to the Arguments object as the second argument.
__ lea(rdx, StackSpaceOperand(0));
- __ movq(r8, ExternalReference::isolate_address());
+ __ LoadAddress(r8, ExternalReference::isolate_address());
}
#else // _WIN64
// Result is in rax - do not destroy this register!
if (always_allocate_scope) {
- __ movq(kScratchRegister, scope_depth);
- __ decl(Operand(kScratchRegister, 0));
+ Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
+ __ decl(scope_depth_operand);
}
// Check for failure result.
// Retrieve the pending exception and clear the variable.
ExternalReference pending_exception_address(
Isolate::k_pending_exception_address, masm->isolate());
- __ movq(kScratchRegister, pending_exception_address);
- __ movq(rax, Operand(kScratchRegister, 0));
- __ movq(rdx, ExternalReference::the_hole_value_location(masm->isolate()));
- __ movq(rdx, Operand(rdx, 0));
- __ movq(Operand(kScratchRegister, 0), rdx);
+ Operand pending_exception_operand =
+ masm->ExternalOperand(pending_exception_address);
+ __ movq(rax, pending_exception_operand);
+ __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
+ __ movq(pending_exception_operand, rdx);
// Special handling of termination exceptions which are uncatchable
// by javascript code.
#ifdef ENABLE_LOGGING_AND_PROFILING
Label not_outermost_js, not_outermost_js_2;
#endif
-
- // Setup frame.
- __ push(rbp);
- __ movq(rbp, rsp);
-
- // Push the stack frame type marker twice.
- int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
- // Scratch register is neither callee-save, nor an argument register on any
- // platform. It's free to use at this point.
- // Cannot use smi-register for loading yet.
- __ movq(kScratchRegister,
- reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
- RelocInfo::NONE);
- __ push(kScratchRegister); // context slot
- __ push(kScratchRegister); // function slot
- // Save callee-saved registers (X64/Win64 calling conventions).
- __ push(r12);
- __ push(r13);
- __ push(r14);
- __ push(r15);
+ { // NOLINT. Scope block confuses linter.
+ MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
+ // Setup frame.
+ __ push(rbp);
+ __ movq(rbp, rsp);
+
+ // Push the stack frame type marker twice.
+ int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
+ // Scratch register is neither callee-save, nor an argument register on any
+ // platform. It's free to use at this point.
+ // Cannot use smi-register for loading yet.
+ __ movq(kScratchRegister,
+ reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
+ RelocInfo::NONE);
+ __ push(kScratchRegister); // context slot
+ __ push(kScratchRegister); // function slot
+ // Save callee-saved registers (X64/Win64 calling conventions).
+ __ push(r12);
+ __ push(r13);
+ __ push(r14);
+ __ push(r15);
#ifdef _WIN64
- __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
- __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
+ __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
+ __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
#endif
- __ push(rbx);
- // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
- // callee save as well.
+ __ push(rbx);
+ // TODO(X64): On Win64, if we ever use XMM6-XMM15, the low low 64 bits are
+ // callee save as well.
+
+ // Set up the roots and smi constant registers.
+ // Needs to be done before any further smi loads.
+ __ InitializeSmiConstantRegister();
+ __ InitializeRootRegister();
+ }
Isolate* isolate = masm->isolate();
// Save copies of the top frame descriptor on the stack.
ExternalReference c_entry_fp(Isolate::k_c_entry_fp_address, isolate);
- __ load_rax(c_entry_fp);
- __ push(rax);
-
- // Set up the roots and smi constant registers.
- // Needs to be done before any further smi loads.
- __ InitializeRootRegister();
- __ InitializeSmiConstantRegister();
+ {
+ Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
+ __ push(c_entry_fp_operand);
+ }
#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
ExternalReference js_entry_sp(Isolate::k_js_entry_sp_address, isolate);
- __ load_rax(js_entry_sp);
+ __ Load(rax, js_entry_sp);
__ testq(rax, rax);
__ j(not_zero, ¬_outermost_js);
__ movq(rax, rbp);
- __ store_rax(js_entry_sp);
+ __ Store(js_entry_sp, rax);
__ bind(¬_outermost_js);
#endif
// exception field in the JSEnv and return a failure sentinel.
ExternalReference pending_exception(Isolate::k_pending_exception_address,
isolate);
- __ store_rax(pending_exception);
+ __ Store(pending_exception, rax);
__ movq(rax, Failure::Exception(), RelocInfo::NONE);
__ jmp(&exit);
__ PushTryHandler(IN_JS_ENTRY, JS_ENTRY_HANDLER);
// Clear any pending exceptions.
- __ load_rax(ExternalReference::the_hole_value_location(isolate));
- __ store_rax(pending_exception);
+ __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
+ __ Store(pending_exception, rax);
// Fake a receiver (NULL).
__ push(Immediate(0)); // receiver
if (is_construct) {
ExternalReference construct_entry(Builtins::JSConstructEntryTrampoline,
isolate);
- __ load_rax(construct_entry);
+ __ Load(rax, construct_entry);
} else {
ExternalReference entry(Builtins::JSEntryTrampoline, isolate);
- __ load_rax(entry);
+ __ Load(rax, entry);
}
__ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
__ call(kScratchRegister);
// Unlink this frame from the handler chain.
- __ movq(kScratchRegister,
- ExternalReference(Isolate::k_handler_address, isolate));
- __ pop(Operand(kScratchRegister, 0));
+ Operand handler_operand =
+ masm->ExternalOperand(ExternalReference(Isolate::k_handler_address,
+ isolate));
+ __ pop(handler_operand);
// Pop next_sp.
__ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
// Restore the top frame descriptor from the stack.
__ bind(&exit);
- __ movq(kScratchRegister,
- ExternalReference(Isolate::k_c_entry_fp_address, isolate));
- __ pop(Operand(kScratchRegister, 0));
+ {
+ Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
+ __ pop(c_entry_fp_operand);
+ }
// Restore callee-saved registers (X64 conventions).
__ pop(rbx);
static const int kOffsetToResultValue = 21;
// The last 4 bytes of the instruction sequence
// movq(rax, FieldOperand(rdi, HeapObject::kMapOffset)
- // Move(kScratchRegister, Factory::the_hole_value)
+ // Move(kScratchRegister, FACTORY->the_hole_value())
// in front of the hole value address.
static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
// The last 4 bytes of the instruction sequence
: Assembler(buffer, size),
generating_stub_(false),
allow_stub_calls_(true),
+ root_array_available_(true),
code_object_(HEAP->undefined_value()) {
}
+static intptr_t RootRegisterDelta(ExternalReference other) {
+ Address roots_register_value = kRootRegisterBias +
+ reinterpret_cast<Address>(Isolate::Current()->heap()->roots_address());
+ intptr_t delta = other.address() - roots_register_value;
+ return delta;
+}
+
+
+Operand MacroAssembler::ExternalOperand(ExternalReference target,
+ Register scratch) {
+ if (root_array_available_ && !Serializer::enabled()) {
+ intptr_t delta = RootRegisterDelta(target);
+ if (is_int32(delta)) {
+ Serializer::TooLateToEnableNow();
+ return Operand(kRootRegister, delta);
+ }
+ }
+ movq(scratch, target);
+ return Operand(scratch, 0);
+}
+
+
+void MacroAssembler::Load(Register destination, ExternalReference source) {
+ if (root_array_available_ && !Serializer::enabled()) {
+ intptr_t delta = RootRegisterDelta(source);
+ if (is_int32(delta)) {
+ Serializer::TooLateToEnableNow();
+ movq(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
+ return;
+ }
+ }
+ // Safe code.
+ if (destination.is(rax)) {
+ load_rax(source);
+ } else {
+ movq(kScratchRegister, source);
+ movq(destination, Operand(kScratchRegister, 0));
+ }
+}
+
+
+void MacroAssembler::Store(ExternalReference destination, Register source) {
+ if (root_array_available_ && !Serializer::enabled()) {
+ intptr_t delta = RootRegisterDelta(destination);
+ if (is_int32(delta)) {
+ Serializer::TooLateToEnableNow();
+ movq(Operand(kRootRegister, static_cast<int32_t>(delta)), source);
+ return;
+ }
+ }
+ // Safe code.
+ if (source.is(rax)) {
+ store_rax(destination);
+ } else {
+ movq(kScratchRegister, destination);
+ movq(Operand(kScratchRegister, 0), source);
+ }
+}
+
+
+void MacroAssembler::LoadAddress(Register destination,
+ ExternalReference source) {
+ if (root_array_available_ && !Serializer::enabled()) {
+ intptr_t delta = RootRegisterDelta(source);
+ if (is_int32(delta)) {
+ Serializer::TooLateToEnableNow();
+ lea(destination, Operand(kRootRegister, static_cast<int32_t>(delta)));
+ return;
+ }
+ }
+ // Safe code.
+ movq(destination, source);
+}
+
+
+int MacroAssembler::LoadAddressSize(ExternalReference source) {
+ if (root_array_available_ && !Serializer::enabled()) {
+ // This calculation depends on the internals of LoadAddress.
+ // It's correctness is ensured by the asserts in the Call
+ // instruction below.
+ intptr_t delta = RootRegisterDelta(source);
+ if (is_int32(delta)) {
+ Serializer::TooLateToEnableNow();
+ // Operand is lea(scratch, Operand(kRootRegister, delta));
+ // Opcodes : REX.W 8D ModRM Disp8/Disp32 - 4 or 7.
+ int size = 4;
+ if (!is_int8(static_cast<int32_t>(delta))) {
+ size += 3; // Need full four-byte displacement in lea.
+ }
+ return size;
+ }
+ }
+ // Size of movq(destination, src);
+ return 10;
+}
+
+
void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
+ ASSERT(root_array_available_);
movq(destination, Operand(kRootRegister,
(index << kPointerSizeLog2) - kRootRegisterBias));
}
void MacroAssembler::LoadRootIndexed(Register destination,
Register variable_offset,
int fixed_offset) {
+ ASSERT(root_array_available_);
movq(destination,
Operand(kRootRegister,
variable_offset, times_pointer_size,
void MacroAssembler::StoreRoot(Register source, Heap::RootListIndex index) {
+ ASSERT(root_array_available_);
movq(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias),
source);
}
void MacroAssembler::PushRoot(Heap::RootListIndex index) {
+ ASSERT(root_array_available_);
push(Operand(kRootRegister, (index << kPointerSizeLog2) - kRootRegisterBias));
}
void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
+ ASSERT(root_array_available_);
cmpq(with, Operand(kRootRegister,
(index << kPointerSizeLog2) - kRootRegisterBias));
}
void MacroAssembler::CompareRoot(const Operand& with,
Heap::RootListIndex index) {
+ ASSERT(root_array_available_);
ASSERT(!with.AddressUsesRegister(kScratchRegister));
LoadRoot(kScratchRegister, index);
cmpq(with, kScratchRegister);
void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
const Runtime::Function* function = Runtime::FunctionForId(id);
Set(rax, function->nargs);
- movq(rbx, ExternalReference(function, isolate()));
+ LoadAddress(rbx, ExternalReference(function, isolate()));
CEntryStub ces(1);
ces.SaveDoubles();
CallStub(&ces);
// should remove this need and make the runtime routine entry code
// smarter.
Set(rax, num_arguments);
- movq(rbx, ExternalReference(f, isolate()));
+ LoadAddress(rbx, ExternalReference(f, isolate()));
CEntryStub ces(f->result_size);
CallStub(&ces);
}
// should remove this need and make the runtime routine entry code
// smarter.
Set(rax, num_arguments);
- movq(rbx, ExternalReference(f, isolate()));
+ LoadAddress(rbx, ExternalReference(f, isolate()));
CEntryStub ces(f->result_size);
return TryCallStub(&ces);
}
void MacroAssembler::CallExternalReference(const ExternalReference& ext,
int num_arguments) {
Set(rax, num_arguments);
- movq(rbx, ext);
+ LoadAddress(rbx, ext);
CEntryStub stub(1);
CallStub(&stub);
movq(Operand(base_reg, kLimitOffset), prev_limit_reg);
movq(prev_limit_reg, rax);
#ifdef _WIN64
- movq(rcx, ExternalReference::isolate_address());
+ LoadAddress(rcx, ExternalReference::isolate_address());
#else
- movq(rdi, ExternalReference::isolate_address());
+ LoadAddress(rdi, ExternalReference::isolate_address());
#endif
- movq(rax, ExternalReference::delete_handle_scope_extensions(isolate()));
+ LoadAddress(rax,
+ ExternalReference::delete_handle_scope_extensions(isolate()));
call(rax);
movq(rax, prev_limit_reg);
jmp(&leave_exit_frame);
void MacroAssembler::JumpToExternalReference(const ExternalReference& ext,
int result_size) {
// Set the entry point and jump to the C entry runtime stub.
- movq(rbx, ext);
+ LoadAddress(rbx, ext);
CEntryStub ces(result_size);
jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
}
MaybeObject* MacroAssembler::TryJumpToExternalReference(
const ExternalReference& ext, int result_size) {
// Set the entry point and jump to the C entry runtime stub.
- movq(rbx, ext);
+ LoadAddress(rbx, ext);
CEntryStub ces(result_size);
return TryTailCallStub(&ces);
}
void MacroAssembler::Jump(ExternalReference ext) {
- movq(kScratchRegister, ext);
+ LoadAddress(kScratchRegister, ext);
jmp(kScratchRegister);
}
}
+int MacroAssembler::CallSize(ExternalReference ext) {
+ // Opcode for call kScratchRegister is: Rex.B FF D4 (three bytes).
+ const int kCallInstructionSize = 3;
+ return LoadAddressSize(ext) + kCallInstructionSize;
+}
+
+
void MacroAssembler::Call(ExternalReference ext) {
#ifdef DEBUG
- int pre_position = pc_offset();
+ int end_position = pc_offset() + CallSize(ext);
#endif
- movq(kScratchRegister, ext);
+ LoadAddress(kScratchRegister, ext);
call(kScratchRegister);
#ifdef DEBUG
- int post_position = pc_offset();
- CHECK_EQ(pre_position + CallSize(ext), post_position);
+ CHECK_EQ(end_position, pc_offset());
#endif
}
void MacroAssembler::Call(Address destination, RelocInfo::Mode rmode) {
#ifdef DEBUG
- int pre_position = pc_offset();
+ int end_position = pc_offset() + CallSize(destination, rmode);
#endif
movq(kScratchRegister, destination, rmode);
call(kScratchRegister);
#ifdef DEBUG
- int post_position = pc_offset();
- CHECK_EQ(pre_position + CallSize(destination, rmode), post_position);
+ CHECK_EQ(pc_offset(), end_position);
#endif
}
void MacroAssembler::Call(Handle<Code> code_object, RelocInfo::Mode rmode) {
#ifdef DEBUG
- int pre_position = pc_offset();
+ int end_position = pc_offset() + CallSize(code_object);
#endif
ASSERT(RelocInfo::IsCodeTarget(rmode));
call(code_object, rmode);
#ifdef DEBUG
- int post_position = pc_offset();
- CHECK_EQ(pre_position + CallSize(code_object), post_position);
+ CHECK_EQ(end_position, pc_offset());
#endif
}
push(Immediate(0)); // NULL frame pointer.
}
// Save the current handler.
- movq(kScratchRegister,
- ExternalReference(Isolate::k_handler_address, isolate()));
- push(Operand(kScratchRegister, 0));
+ Operand handler_operand =
+ ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
+ push(handler_operand);
// Link this handler.
- movq(Operand(kScratchRegister, 0), rsp);
+ movq(handler_operand, rsp);
}
void MacroAssembler::PopTryHandler() {
ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
// Unlink this handler.
- movq(kScratchRegister,
- ExternalReference(Isolate::k_handler_address, isolate()));
- pop(Operand(kScratchRegister, 0));
+ Operand handler_operand =
+ ExternalOperand(ExternalReference(Isolate::k_handler_address, isolate()));
+ pop(handler_operand);
// Remove the remaining fields.
addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
}
}
ExternalReference handler_address(Isolate::k_handler_address, isolate());
- movq(kScratchRegister, handler_address);
- movq(rsp, Operand(kScratchRegister, 0));
+ Operand handler_operand = ExternalOperand(handler_address);
+ movq(rsp, handler_operand);
// get next in chain
- pop(rcx);
- movq(Operand(kScratchRegister, 0), rcx);
+ pop(handler_operand);
pop(rbp); // pop frame pointer
pop(rdx); // remove state
}
// Fetch top stack handler.
ExternalReference handler_address(Isolate::k_handler_address, isolate());
- movq(kScratchRegister, handler_address);
- movq(rsp, Operand(kScratchRegister, 0));
+ Load(rsp, handler_address);
// Unwind the handlers until the ENTRY handler is found.
NearLabel loop, done;
bind(&done);
// Set the top handler address to next handler past the current ENTRY handler.
- movq(kScratchRegister, handler_address);
- pop(Operand(kScratchRegister, 0));
+ Operand handler_operand = ExternalOperand(handler_address);
+ pop(handler_operand);
if (type == OUT_OF_MEMORY) {
// Set external caught exception to false.
ExternalReference external_caught(
Isolate::k_external_caught_exception_address, isolate());
movq(rax, Immediate(false));
- store_rax(external_caught);
+ Store(external_caught, rax);
// Set pending exception and rax to out of memory exception.
ExternalReference pending_exception(Isolate::k_pending_exception_address,
isolate());
movq(rax, Failure::OutOfMemoryException(), RelocInfo::NONE);
- store_rax(pending_exception);
+ Store(pending_exception, rax);
}
// Clear the context pointer.
// Restore registers from handler.
STATIC_ASSERT(StackHandlerConstants::kNextOffset + kPointerSize ==
- StackHandlerConstants::kFPOffset);
+ StackHandlerConstants::kFPOffset);
pop(rbp); // FP
STATIC_ASSERT(StackHandlerConstants::kFPOffset + kPointerSize ==
- StackHandlerConstants::kStateOffset);
+ StackHandlerConstants::kStateOffset);
pop(rdx); // State
STATIC_ASSERT(StackHandlerConstants::kStateOffset + kPointerSize ==
- StackHandlerConstants::kPCOffset);
+ StackHandlerConstants::kPCOffset);
ret(0);
}
void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
- movq(kScratchRegister, ExternalReference(counter));
- movl(Operand(kScratchRegister, 0), Immediate(value));
+ Operand counter_operand = ExternalOperand(ExternalReference(counter));
+ movq(counter_operand, Immediate(value));
}
}
void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
ASSERT(value > 0);
if (FLAG_native_code_counters && counter->Enabled()) {
- movq(kScratchRegister, ExternalReference(counter));
- Operand operand(kScratchRegister, 0);
+ Operand counter_operand = ExternalOperand(ExternalReference(counter));
if (value == 1) {
- incl(operand);
+ incl(counter_operand);
} else {
- addl(operand, Immediate(value));
+ addl(counter_operand, Immediate(value));
}
}
}
void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
ASSERT(value > 0);
if (FLAG_native_code_counters && counter->Enabled()) {
- movq(kScratchRegister, ExternalReference(counter));
- Operand operand(kScratchRegister, 0);
+ Operand counter_operand = ExternalOperand(ExternalReference(counter));
if (value == 1) {
- decl(operand);
+ decl(counter_operand);
} else {
- subl(operand, Immediate(value));
+ subl(counter_operand, Immediate(value));
}
}
}
void MacroAssembler::DebugBreak() {
ASSERT(allow_stub_calls());
Set(rax, 0); // No arguments.
- movq(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
+ LoadAddress(rbx, ExternalReference(Runtime::kDebugBreak, isolate()));
CEntryStub ces(1);
Call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
}
movq(r14, rax); // Backup rax in callee-save register.
}
- movq(kScratchRegister,
- ExternalReference(Isolate::k_c_entry_fp_address, isolate()));
- movq(Operand(kScratchRegister, 0), rbp);
-
- movq(kScratchRegister,
- ExternalReference(Isolate::k_context_address, isolate()));
- movq(Operand(kScratchRegister, 0), rsi);
+ Store(ExternalReference(Isolate::k_c_entry_fp_address, isolate()), rbp);
+ Store(ExternalReference(Isolate::k_context_address, isolate()), rsi);
}
void MacroAssembler::LeaveExitFrameEpilogue() {
// Restore current context from top and clear it in debug mode.
ExternalReference context_address(Isolate::k_context_address, isolate());
- movq(kScratchRegister, context_address);
- movq(rsi, Operand(kScratchRegister, 0));
+ Operand context_operand = ExternalOperand(context_address);
+ movq(rsi, context_operand);
#ifdef DEBUG
- movq(Operand(kScratchRegister, 0), Immediate(0));
+ movq(context_operand, Immediate(0));
#endif
// Clear the top frame.
ExternalReference c_entry_fp_address(Isolate::k_c_entry_fp_address,
isolate());
- movq(kScratchRegister, c_entry_fp_address);
- movq(Operand(kScratchRegister, 0), Immediate(0));
+ Operand c_entry_fp_operand = ExternalOperand(c_entry_fp_address);
+ movq(c_entry_fp_operand, Immediate(0));
}
ASSERT(!scratch.is_valid());
#ifdef DEBUG
// Assert that result actually contains top on entry.
- movq(kScratchRegister, new_space_allocation_top);
- cmpq(result, Operand(kScratchRegister, 0));
+ Operand top_operand = ExternalOperand(new_space_allocation_top);
+ cmpq(result, top_operand);
Check(equal, "Unexpected allocation top");
#endif
return;
// Move address of new object to result. Use scratch register if available,
// and keep address in scratch until call to UpdateAllocationTopHelper.
if (scratch.is_valid()) {
- movq(scratch, new_space_allocation_top);
+ LoadAddress(scratch, new_space_allocation_top);
movq(result, Operand(scratch, 0));
- } else if (result.is(rax)) {
- load_rax(new_space_allocation_top);
} else {
- movq(kScratchRegister, new_space_allocation_top);
- movq(result, Operand(kScratchRegister, 0));
+ Load(result, new_space_allocation_top);
}
}
ExternalReference::new_space_allocation_top_address(isolate());
// Update new top.
- if (result_end.is(rax)) {
- // rax can be stored directly to a memory location.
- store_rax(new_space_allocation_top);
+ if (scratch.is_valid()) {
+ // Scratch already contains address of allocation top.
+ movq(Operand(scratch, 0), result_end);
} else {
- // Register required - use scratch provided if available.
- if (scratch.is_valid()) {
- movq(Operand(scratch, 0), result_end);
- } else {
- movq(kScratchRegister, new_space_allocation_top);
- movq(Operand(kScratchRegister, 0), result_end);
- }
+ Store(new_space_allocation_top, result_end);
}
}
}
addq(top_reg, Immediate(object_size));
j(carry, gc_required);
- movq(kScratchRegister, new_space_allocation_limit);
- cmpq(top_reg, Operand(kScratchRegister, 0));
+ Operand limit_operand = ExternalOperand(new_space_allocation_limit);
+ cmpq(top_reg, limit_operand);
j(above, gc_required);
// Update allocation top.
lea(result_end, Operand(element_count, element_size, header_size));
addq(result_end, result);
j(carry, gc_required);
- movq(kScratchRegister, new_space_allocation_limit);
- cmpq(result_end, Operand(kScratchRegister, 0));
+ Operand limit_operand = ExternalOperand(new_space_allocation_limit);
+ cmpq(result_end, limit_operand);
j(above, gc_required);
// Update allocation top.
}
addq(result_end, result);
j(carry, gc_required);
- movq(kScratchRegister, new_space_allocation_limit);
- cmpq(result_end, Operand(kScratchRegister, 0));
+ Operand limit_operand = ExternalOperand(new_space_allocation_limit);
+ cmpq(result_end, limit_operand);
j(above, gc_required);
// Update allocation top.
// Make sure the object has no tag before resetting top.
and_(object, Immediate(~kHeapObjectTagMask));
- movq(kScratchRegister, new_space_allocation_top);
+ Operand top_operand = ExternalOperand(new_space_allocation_top);
#ifdef DEBUG
- cmpq(object, Operand(kScratchRegister, 0));
+ cmpq(object, top_operand);
Check(below, "Undo allocation of non allocated memory");
#endif
- movq(Operand(kScratchRegister, 0), object);
+ movq(top_operand, object);
}
void MacroAssembler::CallCFunction(ExternalReference function,
int num_arguments) {
- movq(rax, function);
+ LoadAddress(rax, function);
CallCFunction(rax, num_arguments);
}
Register arg_to_reg[] = {rdi, rsi, rdx, rcx, r8, r9};
#endif
Register reg = arg_to_reg[num_arguments];
- movq(reg, ExternalReference::isolate_address());
+ LoadAddress(reg, ExternalReference::isolate_address());
} else {
// Push Isolate pointer after all parameters.
int argument_slots_on_stack =
ArgumentStackSlotsForCFunctionCall(num_arguments);
- movq(kScratchRegister, ExternalReference::isolate_address());
+ LoadAddress(kScratchRegister, ExternalReference::isolate_address());
movq(Operand(rsp, argument_slots_on_stack * kPointerSize),
kScratchRegister);
}