}
-bool WriteInt32ToHeapNumberStub::CompilingCallsToThisStubIsGCSafe() {
+bool WriteInt32ToHeapNumberStub::IsPregenerated() {
// These variants are compiled ahead of time. See next method.
if (the_int_.is(r1) && the_heap_number_.is(r0) && scratch_.is(r2)) {
return true;
void BinaryOpStub::Generate(MacroAssembler* masm) {
+ // Explicitly allow generation of nested stubs. It is safe here because
+ // generation code does not use any raw pointers.
+ AllowStubCallsScope allow_stub_calls(masm, true);
+
switch (operands_type_) {
case BinaryOpIC::UNINITIALIZED:
GenerateTypeTransition(masm);
}
-bool CEntryStub::CompilingCallsToThisStubIsGCSafe() {
+bool CEntryStub::IsPregenerated() {
return (!save_doubles_ || ISOLATE->fp_stubs_generated()) &&
result_size_ == 1;
}
void CodeStub::GenerateStubsAheadOfTime() {
WriteInt32ToHeapNumberStub::GenerateFixedRegStubsAheadOfTime();
+ StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
+ RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
}
};
-bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() {
+bool RecordWriteStub::IsPregenerated() {
for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
void Generate(MacroAssembler* masm);
- virtual bool CompilingCallsToThisStubIsGCSafe() { return true; }
+ virtual bool IsPregenerated() { return true; }
static void GenerateFixedRegStubsAheadOfTime();
virtual bool SometimesSetsUpAFrame() { return false; }
the_heap_number_(the_heap_number),
scratch_(scratch) { }
- bool CompilingCallsToThisStubIsGCSafe();
+ bool IsPregenerated();
static void GenerateFixedRegStubsAheadOfTime();
private:
INCREMENTAL_COMPACTION
};
- virtual bool CompilingCallsToThisStubIsGCSafe();
+ virtual bool IsPregenerated();
static void GenerateFixedRegStubsAheadOfTime();
virtual bool SometimesSetsUpAFrame() { return false; }
void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
- ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
+ ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
Jump(stub->GetCode(), RelocInfo::CODE_TARGET, cond);
}
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
- return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
+ return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
}
virtual ~CodeStub() {}
+ bool CompilingCallsToThisStubIsGCSafe() {
+ bool is_pregenerated = IsPregenerated();
+#ifdef DEBUG
+ Code* code = NULL;
+ ASSERT(!is_pregenerated || FindCodeInCache(&code));
+#endif
+ return is_pregenerated;
+ }
+
// See comment above, where Instanceof is defined.
- virtual bool CompilingCallsToThisStubIsGCSafe() {
+ virtual bool IsPregenerated() {
return MajorKey() <= Instanceof;
}
// time, so it's OK to call it from other stubs that can't cope with GC during
// their code generation. On machines that always have gp registers (x64) we
// can generate both variants ahead of time.
- virtual bool CompilingCallsToThisStubIsGCSafe();
+ virtual bool IsPregenerated();
private:
void GenerateCore(MacroAssembler* masm,
void BinaryOpStub::Generate(MacroAssembler* masm) {
+ // Explicitly allow generation of nested stubs. It is safe here because
+ // generation code does not use any raw pointers.
+ AllowStubCallsScope allow_stub_calls(masm, true);
+
switch (operands_type_) {
case BinaryOpIC::UNINITIALIZED:
GenerateTypeTransition(masm);
}
-bool CEntryStub::CompilingCallsToThisStubIsGCSafe() {
+bool CEntryStub::IsPregenerated() {
return (!save_doubles_ || ISOLATE->fp_stubs_generated()) &&
result_size_ == 1;
}
void CodeStub::GenerateStubsAheadOfTime() {
+ StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime();
+ // It is important that the store buffer overflow stubs are generated first.
+ RecordWriteStub::GenerateFixedRegStubsAheadOfTime();
}
// GenerateStoreField calls the stub with two different permutations of
// registers. This is the second.
{ ebx, ecx, edx, EMIT_REMEMBERED_SET },
- // StoreIC::GenerateNormal via GenerateDictionaryStore.
+ // StoreIC::GenerateNormal via GenerateDictionaryStore, CompileArrayPushCall
{ ebx, edi, edx, EMIT_REMEMBERED_SET },
// KeyedStoreIC::GenerateGeneric.
{ ebx, edx, ecx, EMIT_REMEMBERED_SET},
};
-bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() {
+bool RecordWriteStub::IsPregenerated() {
for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime() {
StoreBufferOverflowStub stub1(kDontSaveFPRegs);
stub1.GetCode();
- StoreBufferOverflowStub stub2(kSaveFPRegs);
- stub2.GetCode();
+
+ CpuFeatures::TryForceFeatureScope scope(SSE2);
+ if (CpuFeatures::IsSupported(SSE2)) {
+ StoreBufferOverflowStub stub2(kSaveFPRegs);
+ stub2.GetCode();
+ }
}
void Generate(MacroAssembler* masm);
- virtual bool CompilingCallsToThisStubIsGCSafe() { return true; }
+ virtual bool IsPregenerated() { return true; }
static void GenerateFixedRegStubsAheadOfTime();
virtual bool SometimesSetsUpAFrame() { return false; }
INCREMENTAL_COMPACTION
};
- virtual bool CompilingCallsToThisStubIsGCSafe();
+ virtual bool IsPregenerated();
static void GenerateFixedRegStubsAheadOfTime();
virtual bool SometimesSetsUpAFrame() { return false; }
void MacroAssembler::TailCallStub(CodeStub* stub) {
- ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
+ ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
}
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
- return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
+ return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
}
}
-bool CEntryStub::CompilingCallsToThisStubIsGCSafe() {
+bool CEntryStub::IsPregenerated() {
return (!save_doubles_ || ISOLATE->fp_stubs_generated()) &&
result_size_ == 1;
}
void MacroAssembler::TailCallStub(CodeStub* stub) {
- ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
+ ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
}
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
- return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
+ return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
}
void BinaryOpStub::Generate(MacroAssembler* masm) {
+ // Explicitly allow generation of nested stubs. It is safe here because
+ // generation code does not use any raw pointers.
+ AllowStubCallsScope allow_stub_calls(masm, true);
+
switch (operands_type_) {
case BinaryOpIC::UNINITIALIZED:
GenerateTypeTransition(masm);
}
-bool CEntryStub::CompilingCallsToThisStubIsGCSafe() {
+bool CEntryStub::IsPregenerated() {
return result_size_ == 1;
}
};
-bool RecordWriteStub::CompilingCallsToThisStubIsGCSafe() {
+bool RecordWriteStub::IsPregenerated() {
for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
!entry->object.is(no_reg);
entry++) {
void Generate(MacroAssembler* masm);
- virtual bool CompilingCallsToThisStubIsGCSafe() { return true; }
+ virtual bool IsPregenerated() { return true; }
static void GenerateFixedRegStubsAheadOfTime();
virtual bool SometimesSetsUpAFrame() { return false; }
INCREMENTAL_COMPACTION
};
- virtual bool CompilingCallsToThisStubIsGCSafe();
+ virtual bool IsPregenerated();
static void GenerateFixedRegStubsAheadOfTime();
virtual bool SometimesSetsUpAFrame() { return false; }
void MacroAssembler::TailCallStub(CodeStub* stub) {
- ASSERT(stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_);
+ ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
Jump(stub->GetCode(), RelocInfo::CODE_TARGET);
}
bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
- return stub->CompilingCallsToThisStubIsGCSafe() || allow_stub_calls_;
+ return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
}