}
-void RelocInfo::Visit(ObjectVisitor* visitor) {
+void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
visitor->VisitEmbeddedPointer(this);
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
visitor->VisitCodeAgeSequence(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
- // TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(mode) &&
IsPatchedDebugBreakSlotSequence())) &&
- Isolate::Current()->debug()->has_break_points()) {
+ isolate->debug()->has_break_points()) {
visitor->VisitDebugTarget(this);
#endif
} else if (RelocInfo::IsRuntimeEntry(mode)) {
// See assembler-arm-inl.h for inlined constructors
Operand::Operand(Handle<Object> handle) {
-#ifdef DEBUG
- Isolate* isolate = Isolate::Current();
-#endif
AllowDeferredHandleDereference using_raw_address;
rm_ = no_reg;
// Verify all Objects referred by code are NOT in new space.
Object* obj = *handle;
- ASSERT(!isolate->heap()->InNewSpace(obj));
if (obj->IsHeapObject()) {
+ ASSERT(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
imm32_ = reinterpret_cast<intptr_t>(handle.location());
rmode_ = RelocInfo::EMBEDDED_OBJECT;
} else {
}
-bool WriteInt32ToHeapNumberStub::IsPregenerated() {
+bool WriteInt32ToHeapNumberStub::IsPregenerated(Isolate* isolate) {
// These variants are compiled ahead of time. See next method.
if (the_int_.is(r1) && the_heap_number_.is(r0) && scratch_.is(r2)) {
return true;
}
-bool CEntryStub::IsPregenerated() {
- return (!save_doubles_ || Isolate::Current()->fp_stubs_generated()) &&
+bool CEntryStub::IsPregenerated(Isolate* isolate) {
+ return (!save_doubles_ || isolate->fp_stubs_generated()) &&
result_size_ == 1;
}
#undef REG
-bool RecordWriteStub::IsPregenerated() {
+bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
void Generate(MacroAssembler* masm);
- virtual bool IsPregenerated() { return true; }
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
virtual bool SometimesSetsUpAFrame() { return false; }
the_heap_number_(the_heap_number),
scratch_(scratch) { }
- bool IsPregenerated();
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
private:
INCREMENTAL_COMPACTION
};
- virtual bool IsPregenerated();
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
virtual bool SometimesSetsUpAFrame() { return false; }
CodePatcher patcher(rinfo()->pc(), Assembler::kJSReturnSequenceInstructions);
patcher.masm()->ldr(v8::internal::ip, MemOperand(v8::internal::pc, 0));
patcher.masm()->blx(v8::internal::ip);
- patcher.Emit(Isolate::Current()->debug()->debug_break_return()->entry());
+ patcher.Emit(
+ debug_info_->GetIsolate()->debug()->debug_break_return()->entry());
patcher.masm()->bkpt(0);
}
CodePatcher patcher(rinfo()->pc(), Assembler::kDebugBreakSlotInstructions);
patcher.masm()->ldr(v8::internal::ip, MemOperand(v8::internal::pc, 0));
patcher.masm()->blx(v8::internal::ip);
- patcher.Emit(Isolate::Current()->debug()->debug_break_slot()->entry());
+ patcher.Emit(
+ debug_info_->GetIsolate()->debug()->debug_break_slot()->entry());
}
extra_state,
Code::NORMAL,
argc);
- Isolate::Current()->stub_cache()->GenerateProbe(
+ masm->isolate()->stub_cache()->GenerateProbe(
masm, flags, r1, r2, r3, r4, r5, r6);
// If the stub cache probing failed, the receiver might be a value.
// Probe the stub cache for the value object.
__ bind(&probe);
- Isolate::Current()->stub_cache()->GenerateProbe(
+ masm->isolate()->stub_cache()->GenerateProbe(
masm, flags, r1, r2, r3, r4, r5, r6);
__ bind(&miss);
Code::Flags flags = Code::ComputeFlags(
Code::STUB, MONOMORPHIC, Code::kNoExtraICState,
Code::NORMAL, Code::LOAD_IC);
- Isolate::Current()->stub_cache()->GenerateProbe(
+ masm->isolate()->stub_cache()->GenerateProbe(
masm, flags, r0, r2, r3, r4, r5, r6);
// Cache miss: Jump to runtime.
Code::STUB, MONOMORPHIC, strict_mode,
Code::NORMAL, Code::STORE_IC);
- Isolate::Current()->stub_cache()->GenerateProbe(
+ masm->isolate()->stub_cache()->GenerateProbe(
masm, flags, r1, r2, r3, r4, r5, r6);
// Cache miss: Jump to runtime.
masm_->GetCode(&code_desc);
Handle<Code> code = isolate()->factory()->NewCode(
code_desc, Code::ComputeFlags(Code::REGEXP), masm_->CodeObject());
- PROFILE(Isolate::Current(), RegExpCodeCreateEvent(*code, *source));
+ PROFILE(masm_->isolate(), RegExpCodeCreateEvent(*code, *source));
return Handle<HeapObject>::cast(code);
}
Code* re_code,
Address re_frame) {
Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
- ASSERT(isolate == Isolate::Current());
if (isolate->stack_guard()->IsStackOverflow()) {
isolate->StackOverflow();
return EXCEPTION;
INLINE(Object** call_object_address());
template<typename StaticVisitor> inline void Visit(Heap* heap);
- inline void Visit(ObjectVisitor* v);
+ inline void Visit(Isolate* isolate, ObjectVisitor* v);
// Patch the code with some other code.
void PatchCode(byte* instructions, int instruction_count);
if (UseSpecialCache()
? FindCodeInSpecialCache(&code, isolate)
: FindCodeInCache(&code, isolate)) {
- ASSERT(IsPregenerated() == code->is_pregenerated());
+ ASSERT(IsPregenerated(isolate) == code->is_pregenerated());
return Handle<Code>(code);
}
virtual ~CodeStub() {}
bool CompilingCallsToThisStubIsGCSafe(Isolate* isolate) {
- bool is_pregenerated = IsPregenerated();
+ bool is_pregenerated = IsPregenerated(isolate);
Code* code = NULL;
CHECK(!is_pregenerated || FindCodeInCache(&code, isolate));
return is_pregenerated;
}
// See comment above, where Instanceof is defined.
- virtual bool IsPregenerated() { return false; }
+ virtual bool IsPregenerated(Isolate* isolate) { return false; }
static void GenerateStubsAheadOfTime(Isolate* isolate);
static void GenerateFPStubs(Isolate* isolate);
virtual Handle<Code> GenerateCode(Isolate* isolate);
- virtual bool IsPregenerated() { return true; }
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateAheadOfTime(Isolate* isolate);
// time, so it's OK to call it from other stubs that can't cope with GC during
// their code generation. On machines that always have gp registers (x64) we
// can generate both variants ahead of time.
- virtual bool IsPregenerated();
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
static void GenerateAheadOfTime(Isolate* isolate);
private:
bool always_allocate_scope);
// Number of pointers/values returned.
+ Isolate* isolate_;
const int result_size_;
SaveFPRegsMode save_doubles_;
return ContextCheckModeBits::decode(bit_field_);
}
- virtual bool IsPregenerated() {
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE {
// We only pre-generate stubs that verify correct context
return context_mode() == CONTEXT_CHECK_REQUIRED;
}
kind_ = kind;
}
- virtual bool IsPregenerated() { return true; }
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateStubsAheadOfTime(Isolate* isolate);
static void InstallDescriptors(Isolate* isolate);
explicit StubFailureTrampolineStub(StubFunctionMode function_mode)
: fp_registers_(CanUseFPRegisters()), function_mode_(function_mode) {}
- virtual bool IsPregenerated() { return true; }
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateAheadOfTime(Isolate* isolate);
}
-void RelocInfo::Visit(ObjectVisitor* visitor) {
+void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
visitor->VisitEmbeddedPointer(this);
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
visitor->VisitCodeAgeSequence(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
- // TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(mode) &&
IsPatchedDebugBreakSlotSequence())) &&
- Isolate::Current()->debug()->has_break_points()) {
+ isolate->debug()->has_break_points()) {
visitor->VisitDebugTarget(this);
#endif
} else if (IsRuntimeEntry(mode)) {
Immediate::Immediate(Handle<Object> handle) {
-#ifdef DEBUG
- Isolate* isolate = Isolate::Current();
-#endif
AllowDeferredHandleDereference using_raw_address;
// Verify all Objects referred by code are NOT in new space.
Object* obj = *handle;
- ASSERT(!isolate->heap()->InNewSpace(obj));
if (obj->IsHeapObject()) {
+ ASSERT(!HeapObject::cast(obj)->GetHeap()->InNewSpace(obj));
x_ = reinterpret_cast<intptr_t>(handle.location());
rmode_ = RelocInfo::EMBEDDED_OBJECT;
} else {
}
-bool CEntryStub::IsPregenerated() {
- return (!save_doubles_ || Isolate::Current()->fp_stubs_generated()) &&
+bool CEntryStub::IsPregenerated(Isolate* isolate) {
+ return (!save_doubles_ || isolate->fp_stubs_generated()) &&
result_size_ == 1;
}
#undef REG
-bool RecordWriteStub::IsPregenerated() {
+bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
for (const AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
void Generate(MacroAssembler* masm);
- virtual bool IsPregenerated() { return true; }
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
virtual bool SometimesSetsUpAFrame() { return false; }
INCREMENTAL_COMPACTION
};
- virtual bool IsPregenerated();
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
virtual bool SometimesSetsUpAFrame() { return false; }
void BreakLocationIterator::SetDebugBreakAtReturn() {
ASSERT(Assembler::kJSReturnSequenceLength >=
Assembler::kCallInstructionLength);
- Isolate* isolate = Isolate::Current();
- rinfo()->PatchCodeWithCall(isolate->debug()->debug_break_return()->entry(),
+ rinfo()->PatchCodeWithCall(
+ debug_info_->GetIsolate()->debug()->debug_break_return()->entry(),
Assembler::kJSReturnSequenceLength - Assembler::kCallInstructionLength);
}
void BreakLocationIterator::SetDebugBreakAtSlot() {
ASSERT(IsDebugBreakSlot());
- Isolate* isolate = Isolate::Current();
+ Isolate* isolate = debug_info_->GetIsolate();
rinfo()->PatchCodeWithCall(
isolate->debug()->debug_break_slot()->entry(),
Assembler::kDebugBreakSlotLength - Assembler::kCallInstructionLength);
Code::Flags flags = Code::ComputeFlags(
Code::STUB, MONOMORPHIC, Code::kNoExtraICState,
Code::NORMAL, Code::LOAD_IC);
- Isolate::Current()->stub_cache()->GenerateProbe(
+ masm->isolate()->stub_cache()->GenerateProbe(
masm, flags, edx, ecx, ebx, eax);
// Cache miss: Jump to runtime.
Code::Flags flags = Code::ComputeFlags(
Code::STUB, MONOMORPHIC, strict_mode,
Code::NORMAL, Code::STORE_IC);
- Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, edx, ecx, ebx,
- no_reg);
+ masm->isolate()->stub_cache()->GenerateProbe(
+ masm, flags, edx, ecx, ebx, no_reg);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
Code* re_code,
Address re_frame) {
Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
- ASSERT(isolate == Isolate::Current());
if (isolate->stack_guard()->IsStackOverflow()) {
isolate->StackOverflow();
return EXCEPTION;
};
-static inline void UpdateSlot(ObjectVisitor* v,
+static inline void UpdateSlot(Isolate* isolate,
+ ObjectVisitor* v,
SlotsBuffer::SlotType slot_type,
Address addr) {
switch (slot_type) {
case SlotsBuffer::CODE_TARGET_SLOT: {
RelocInfo rinfo(addr, RelocInfo::CODE_TARGET, 0, NULL);
- rinfo.Visit(v);
+ rinfo.Visit(isolate, v);
break;
}
case SlotsBuffer::CODE_ENTRY_SLOT: {
}
case SlotsBuffer::DEBUG_TARGET_SLOT: {
RelocInfo rinfo(addr, RelocInfo::DEBUG_BREAK_SLOT, 0, NULL);
- if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(v);
+ if (rinfo.IsPatchedDebugBreakSlotSequence()) rinfo.Visit(isolate, v);
break;
}
case SlotsBuffer::JS_RETURN_SLOT: {
RelocInfo rinfo(addr, RelocInfo::JS_RETURN, 0, NULL);
- if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(v);
+ if (rinfo.IsPatchedReturnSequence()) rinfo.Visit(isolate, v);
break;
}
case SlotsBuffer::EMBEDDED_OBJECT_SLOT: {
RelocInfo rinfo(addr, RelocInfo::EMBEDDED_OBJECT, 0, NULL);
- rinfo.Visit(v);
+ rinfo.Visit(isolate, v);
break;
}
default:
} else {
++slot_idx;
ASSERT(slot_idx < idx_);
- UpdateSlot(&v,
+ UpdateSlot(heap->isolate(),
+ &v,
DecodeSlotType(slot),
reinterpret_cast<Address>(slots_[slot_idx]));
}
ASSERT(slot_idx < idx_);
Address pc = reinterpret_cast<Address>(slots_[slot_idx]);
if (!IsOnInvalidatedCodeObject(pc)) {
- UpdateSlot(&v,
+ UpdateSlot(heap->isolate(),
+ &v,
DecodeSlotType(slot),
reinterpret_cast<Address>(slots_[slot_idx]));
}
IteratePointer(v, kTypeFeedbackInfoOffset);
RelocIterator it(this, mode_mask);
+ Isolate* isolate = this->GetIsolate();
for (; !it.done(); it.next()) {
- it.rinfo()->Visit(v);
+ it.rinfo()->Visit(isolate, v);
}
}
}
-void RelocInfo::Visit(ObjectVisitor* visitor) {
+void RelocInfo::Visit(Isolate* isolate, ObjectVisitor* visitor) {
RelocInfo::Mode mode = rmode();
if (mode == RelocInfo::EMBEDDED_OBJECT) {
visitor->VisitEmbeddedPointer(this);
} else if (RelocInfo::IsCodeAgeSequence(mode)) {
visitor->VisitCodeAgeSequence(this);
#ifdef ENABLE_DEBUGGER_SUPPORT
- // TODO(isolates): Get a cached isolate below.
} else if (((RelocInfo::IsJSReturn(mode) &&
IsPatchedReturnSequence()) ||
(RelocInfo::IsDebugBreakSlot(mode) &&
IsPatchedDebugBreakSlotSequence())) &&
- Isolate::Current()->debug()->has_break_points()) {
+ isolate->debug()->has_break_points()) {
visitor->VisitDebugTarget(this);
#endif
} else if (RelocInfo::IsRuntimeEntry(mode)) {
ExternalReference::transcendental_cache_array_address(masm->isolate());
__ movq(rax, cache_array);
int cache_array_index =
- type_ * sizeof(Isolate::Current()->transcendental_cache()->caches_[0]);
+ type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
__ movq(rax, Operand(rax, cache_array_index));
// rax points to the cache for the type type_.
// If NULL, the cache hasn't been initialized yet, so go through runtime.
__ SetCallKind(rcx, CALL_AS_METHOD);
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
Handle<Code> adaptor =
- Isolate::Current()->builtins()->ArgumentsAdaptorTrampoline();
+ isolate->builtins()->ArgumentsAdaptorTrampoline();
__ Jump(adaptor, RelocInfo::CODE_TARGET);
}
}
-bool CEntryStub::IsPregenerated() {
+bool CEntryStub::IsPregenerated(Isolate* isolate) {
#ifdef _WIN64
return result_size_ == 1;
#else
#undef REG
-bool RecordWriteStub::IsPregenerated() {
+bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
void Generate(MacroAssembler* masm);
- virtual bool IsPregenerated() { return true; }
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE { return true; }
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
virtual bool SometimesSetsUpAFrame() { return false; }
INCREMENTAL_COMPACTION
};
- virtual bool IsPregenerated();
+ virtual bool IsPregenerated(Isolate* isolate) V8_OVERRIDE;
static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
virtual bool SometimesSetsUpAFrame() { return false; }
void BreakLocationIterator::SetDebugBreakAtReturn() {
ASSERT(Assembler::kJSReturnSequenceLength >= Assembler::kCallSequenceLength);
rinfo()->PatchCodeWithCall(
- Isolate::Current()->debug()->debug_break_return()->entry(),
+ debug_info_->GetIsolate()->debug()->debug_break_return()->entry(),
Assembler::kJSReturnSequenceLength - Assembler::kCallSequenceLength);
}
void BreakLocationIterator::SetDebugBreakAtSlot() {
ASSERT(IsDebugBreakSlot());
rinfo()->PatchCodeWithCall(
- Isolate::Current()->debug()->debug_break_slot()->entry(),
+ debug_info_->GetIsolate()->debug()->debug_break_slot()->entry(),
Assembler::kDebugBreakSlotLength - Assembler::kCallSequenceLength);
}
extra_state,
Code::NORMAL,
argc);
- Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
- rax);
+ masm->isolate()->stub_cache()->GenerateProbe(
+ masm, flags, rdx, rcx, rbx, rax);
// If the stub cache probing failed, the receiver might be a value.
// For value objects, we use the map of the prototype objects for
// Probe the stub cache for the value object.
__ bind(&probe);
- Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
- no_reg);
+ masm->isolate()->stub_cache()->GenerateProbe(
+ masm, flags, rdx, rcx, rbx, no_reg);
__ bind(&miss);
}
Code::Flags flags = Code::ComputeFlags(
Code::STUB, MONOMORPHIC, Code::kNoExtraICState,
Code::NORMAL, Code::LOAD_IC);
- Isolate::Current()->stub_cache()->GenerateProbe(
+ masm->isolate()->stub_cache()->GenerateProbe(
masm, flags, rax, rcx, rbx, rdx);
GenerateMiss(masm);
Code::Flags flags = Code::ComputeFlags(
Code::STUB, MONOMORPHIC, strict_mode,
Code::NORMAL, Code::STORE_IC);
- Isolate::Current()->stub_cache()->GenerateProbe(masm, flags, rdx, rcx, rbx,
- no_reg);
+ masm->isolate()->stub_cache()->GenerateProbe(
+ masm, flags, rdx, rcx, rbx, no_reg);
// Cache miss: Jump to runtime.
GenerateMiss(masm);
Code* re_code,
Address re_frame) {
Isolate* isolate = frame_entry<Isolate*>(re_frame, kIsolate);
- ASSERT(isolate == Isolate::Current());
if (isolate->stack_guard()->IsStackOverflow()) {
isolate->StackOverflow();
return EXCEPTION;