LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
HValue* value = instr->value();
if (value->EmitAtUses()) {
- HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
+ HBasicBlock* successor = HConstant::cast(value)->BooleanValue()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
return new(zone()) LGoto(successor->block_id());
Isolate* isolate = graph->isolate();
AssertNoAllocation no_gc;
NoHandleAllocation no_handles(isolate);
- NoHandleDereference no_deref(isolate);
+ HandleDereferenceGuard no_deref(isolate, HandleDereferenceGuard::DISALLOW);
ASSERT(graph != NULL);
SmartArrayPointer<char> bailout_reason;
OptimizingCompiler::Status OptimizingCompiler::OptimizeGraph() {
AssertNoAllocation no_gc;
NoHandleAllocation no_handles(isolate());
- NoHandleDereference no_deref(isolate());
+ HandleDereferenceGuard no_deref(isolate(), HandleDereferenceGuard::DISALLOW);
ASSERT(last_status() == SUCCEEDED);
Timer t(this, &time_taken_to_optimize_);
new(info->zone()) OptimizingCompiler(*info);
OptimizingCompiler::Status status = compiler->CreateGraph();
if (status == OptimizingCompiler::SUCCEEDED) {
+ // Do a scavenge to put off the next scavenge as far as possible.
+ // This may ease the issue that GVN blocks the next scavenge.
+ isolate->heap()->CollectGarbage(NEW_SPACE, "parallel recompile");
closure->MarkInRecompileQueue();
shared->code()->set_profiler_ticks(0);
info.Detach();
void Compiler::InstallOptimizedCode(OptimizingCompiler* optimizing_compiler) {
SmartPointer<CompilationInfo> info(optimizing_compiler->info());
ASSERT(info->closure()->IsMarkedForInstallingRecompiledCode());
+ // While waiting for the optimizer thread, OSR may have already done all
+ // the work and disabled optimization of this function for some reason.
+ if (info->shared_info()->optimization_disabled()) {
+ info->SetCode(Handle<Code>(info->shared_info()->code()));
+ InstallFullCode(*info);
+ return;
+ }
Isolate* isolate = info->isolate();
VMState state(isolate, PARALLEL_COMPILER);
void Interrupt();
bool IsTerminateExecution();
void TerminateExecution();
- bool IsCodeReadyEvent();
- void RequestCodeReadyEvent();
#ifdef ENABLE_DEBUGGER_SUPPORT
bool IsDebugBreak();
void DebugBreak();
void FlagList::EnforceFlagImplications() {
#define FLAG_MODE_DEFINE_IMPLICATIONS
#include "flag-definitions.h"
+#undef FLAG_MODE_DEFINE_IMPLICATIONS
}
} } // namespace v8::internal
template <typename T>
+inline bool Handle<T>::is_identical_to(const Handle<T> other) const {
+ ASSERT(location_ == NULL ||
+ reinterpret_cast<Address>(*location_) != kZapValue);
+#ifdef DEBUG
+ if (FLAG_enable_slow_asserts) {
+ Isolate* isolate = Isolate::Current();
+ CHECK(isolate->AllowHandleDereference() ||
+ Heap::RelocationLock::IsLocked(isolate->heap()) ||
+ !isolate->optimizing_compiler_thread()->IsOptimizerThread());
+ }
+#endif // DEBUG
+ return *location_ == *other.location_;
+}
+
+
+template <typename T>
inline T* Handle<T>::operator*() const {
ASSERT(location_ != NULL);
ASSERT(reinterpret_cast<Address>(*location_) != kHandleZapValue);
- SLOW_ASSERT(ISOLATE->allow_handle_deref());
+ SLOW_ASSERT(Isolate::Current()->AllowHandleDereference());
return *BitCast<T**>(location_);
}
inline T** Handle<T>::location() const {
ASSERT(location_ == NULL ||
reinterpret_cast<Address>(*location_) != kZapValue);
- SLOW_ASSERT(ISOLATE->allow_handle_deref());
+ SLOW_ASSERT(Isolate::Current()->AllowHandleDereference());
return location_;
}
}
-NoHandleDereference::NoHandleDereference(Isolate* isolate)
+HandleDereferenceGuard::HandleDereferenceGuard(Isolate* isolate, State state)
: isolate_(isolate) {
- // The guard is set on a per-isolate basis, so it affects all threads.
- // That's why we can only use it when running without parallel recompilation.
- if (FLAG_parallel_recompilation) return;
- old_state_ = isolate->allow_handle_deref();
- isolate_->set_allow_handle_deref(false);
+ old_state_ = isolate_->AllowHandleDereference();
+ isolate_->SetAllowHandleDereference(state == ALLOW);
}
-NoHandleDereference::~NoHandleDereference() {
- if (FLAG_parallel_recompilation) return;
- isolate_->set_allow_handle_deref(old_state_);
+HandleDereferenceGuard::~HandleDereferenceGuard() {
+ isolate_->SetAllowHandleDereference(old_state_);
}
-
-AllowHandleDereference::AllowHandleDereference(Isolate* isolate)
- : isolate_(isolate) {
- // The guard is set on a per-isolate basis, so it affects all threads.
- // That's why we can only use it when running without parallel recompilation.
- if (FLAG_parallel_recompilation) return;
- old_state_ = isolate->allow_handle_deref();
- isolate_->set_allow_handle_deref(true);
-}
-
-
-AllowHandleDereference::~AllowHandleDereference() {
- if (FLAG_parallel_recompilation) return;
- isolate_->set_allow_handle_deref(old_state_);
-}
#endif
-
} } // namespace v8::internal
#endif // V8_HANDLES_INL_H_
INLINE(T* operator ->() const) { return operator*(); }
// Check if this handle refers to the exact same object as the other handle.
- bool is_identical_to(const Handle<T> other) const {
- return *location_ == *other.location_;
- }
+ INLINE(bool is_identical_to(const Handle<T> other) const);
// Provides the C++ dereference operator.
INLINE(T* operator*() const);
};
-class NoHandleDereference BASE_EMBEDDED {
- public:
-#ifndef DEBUG
- explicit NoHandleDereference(Isolate* isolate) {}
- ~NoHandleDereference() {}
-#else
- explicit inline NoHandleDereference(Isolate* isolate);
- inline ~NoHandleDereference();
- private:
- Isolate* isolate_;
- bool old_state_;
-#endif
-};
-
-
-class AllowHandleDereference BASE_EMBEDDED {
+class HandleDereferenceGuard BASE_EMBEDDED {
public:
+ enum State { ALLOW, DISALLOW };
#ifndef DEBUG
- explicit AllowHandleDereference(Isolate* isolate) {}
- ~AllowHandleDereference() {}
+ HandleDereferenceGuard(Isolate* isolate, State state) { }
+ ~HandleDereferenceGuard() { }
#else
- explicit inline AllowHandleDereference(Isolate* isolate);
- inline ~AllowHandleDereference();
+ inline HandleDereferenceGuard(Isolate* isolate, State state);
+ inline ~HandleDereferenceGuard();
private:
Isolate* isolate_;
bool old_state_;
store_buffer()->SetUp();
if (FLAG_parallel_recompilation) relocation_mutex_ = OS::CreateMutex();
+#ifdef DEBUG
+ relocation_mutex_locked_ = false;
+#endif // DEBUG
return true;
}
explicit RelocationLock(Heap* heap) : heap_(heap) {
if (FLAG_parallel_recompilation) {
heap_->relocation_mutex_->Lock();
+#ifdef DEBUG
+ heap_->relocation_mutex_locked_ = true;
+#endif // DEBUG
}
}
+
~RelocationLock() {
if (FLAG_parallel_recompilation) {
+#ifdef DEBUG
+ heap_->relocation_mutex_locked_ = false;
+#endif // DEBUG
heap_->relocation_mutex_->Unlock();
}
}
+#ifdef DEBUG
+ static bool IsLocked(Heap* heap) {
+ return heap->relocation_mutex_locked_;
+ }
+#endif // DEBUG
+
private:
Heap* heap_;
};
MemoryChunk* chunks_queued_for_free_;
Mutex* relocation_mutex_;
+#ifdef DEBUG
+ bool relocation_mutex_locked_;
+#endif // DEBUG;
friend class Factory;
friend class GCTracer;
}
-HType HType::TypeFromValue(Isolate* isolate, Handle<Object> value) {
- // Handle dereferencing is safe here: an object's type as checked below
- // never changes.
- AllowHandleDereference allow_handle_deref(isolate);
-
+HType HType::TypeFromValue(Handle<Object> value) {
HType result = HType::Tagged();
if (value->IsSmi()) {
result = HType::Smi();
}
if (check_ == IS_INTERNALIZED_STRING && value()->IsConstant()) {
- // Dereferencing is safe here:
- // an internalized string cannot become non-internalized.
- AllowHandleDereference allow_handle_deref(isolate());
- if (HConstant::cast(value())->handle()->IsInternalizedString()) return NULL;
+ if (HConstant::cast(value())->HasInternalizedStringValue()) return NULL;
}
return this;
}
HConstant::HConstant(Handle<Object> handle, Representation r)
- : handle_(handle),
- has_int32_value_(false),
- has_double_value_(false) {
- // Dereferencing here is safe: the value of a number object does not change.
- AllowHandleDereference allow_handle_deref(Isolate::Current());
+ : handle_(handle),
+ has_int32_value_(false),
+ has_double_value_(false),
+ is_internalized_string_(false),
+ boolean_value_(handle->BooleanValue()) {
if (handle_->IsNumber()) {
double n = handle_->Number();
has_int32_value_ = IsInteger32(n);
int32_value_ = DoubleToInt32(n);
double_value_ = n;
has_double_value_ = true;
+ } else {
+ type_from_value_ = HType::TypeFromValue(handle_);
+ is_internalized_string_ = handle_->IsInternalizedString();
}
if (r.IsNone()) {
if (has_int32_value_) {
}
-HConstant::HConstant(int32_t integer_value, Representation r)
+HConstant::HConstant(Handle<Object> handle,
+ Representation r,
+ HType type,
+ bool is_internalize_string,
+ bool boolean_value)
+ : handle_(handle),
+ has_int32_value_(false),
+ has_double_value_(false),
+ is_internalized_string_(is_internalize_string),
+ boolean_value_(boolean_value),
+ type_from_value_(type) {
+ ASSERT(!handle.is_null());
+ ASSERT(!type.IsUninitialized());
+ ASSERT(!type.IsTaggedNumber());
+ Initialize(r);
+}
+
+
+HConstant::HConstant(int32_t integer_value,
+ Representation r,
+ Handle<Object> optional_handle)
: has_int32_value_(true),
has_double_value_(true),
+ is_internalized_string_(false),
+ boolean_value_(integer_value != 0),
int32_value_(integer_value),
double_value_(FastI2D(integer_value)) {
Initialize(r);
}
-HConstant::HConstant(double double_value, Representation r)
+HConstant::HConstant(double double_value,
+ Representation r,
+ Handle<Object> optional_handle)
: has_int32_value_(IsInteger32(double_value)),
has_double_value_(true),
+ is_internalized_string_(false),
+ boolean_value_(double_value != 0 && !isnan(double_value)),
int32_value_(DoubleToInt32(double_value)),
double_value_(double_value) {
Initialize(r);
HConstant* HConstant::CopyToRepresentation(Representation r, Zone* zone) const {
if (r.IsInteger32() && !has_int32_value_) return NULL;
if (r.IsDouble() && !has_double_value_) return NULL;
- if (handle_.is_null()) {
- ASSERT(has_int32_value_ || has_double_value_);
- if (has_int32_value_) return new(zone) HConstant(int32_value_, r);
- return new(zone) HConstant(double_value_, r);
- }
- return new(zone) HConstant(handle_, r);
+ if (has_int32_value_) return new(zone) HConstant(int32_value_, r, handle_);
+ if (has_double_value_) return new(zone) HConstant(double_value_, r, handle_);
+ ASSERT(!handle_.is_null());
+ return new(zone) HConstant(
+ handle_, r, type_from_value_, is_internalized_string_, boolean_value_);
}
HConstant* HConstant::CopyToTruncatedInt32(Zone* zone) const {
if (has_int32_value_) {
- if (handle_.is_null()) {
- return new(zone) HConstant(int32_value_, Representation::Integer32());
- } else {
- // Re-use the existing Handle if possible.
- return new(zone) HConstant(handle_, Representation::Integer32());
- }
- } else if (has_double_value_) {
- return new(zone) HConstant(DoubleToInt32(double_value_),
- Representation::Integer32());
- } else {
- return NULL;
- }
-}
-
-
-bool HConstant::ToBoolean() {
- // Converts the constant's boolean value according to
- // ECMAScript section 9.2 ToBoolean conversion.
- if (HasInteger32Value()) return Integer32Value() != 0;
- if (HasDoubleValue()) {
- double v = DoubleValue();
- return v != 0 && !isnan(v);
+ return new(zone) HConstant(
+ int32_value_, Representation::Integer32(), handle_);
}
- // Dereferencing is safe: singletons do not change and strings are
- // immutable.
- AllowHandleDereference allow_handle_deref(isolate());
- if (handle_->IsTrue()) return true;
- if (handle_->IsFalse()) return false;
- if (handle_->IsUndefined()) return false;
- if (handle_->IsNull()) return false;
- if (handle_->IsString() && String::cast(*handle_)->length() == 0) {
- return false;
+ if (has_double_value_) {
+ return new(zone) HConstant(
+ DoubleToInt32(double_value_), Representation::Integer32(), handle_);
}
- return true;
+ return NULL;
}
+
void HConstant::PrintDataTo(StringStream* stream) {
if (has_int32_value_) {
stream->Add("%d ", int32_value_);
return Smi::IsValid(int32_value_) ? HType::Smi() : HType::HeapNumber();
}
if (has_double_value_) return HType::HeapNumber();
- return HType::TypeFromValue(isolate(), handle_);
+ ASSERT(!type_from_value_.IsUninitialized());
+ return type_from_value_;
}
return HType(static_cast<Type>(type_ & other.type_));
}
- bool Equals(const HType& other) {
+ bool Equals(const HType& other) const {
return type_ == other.type_;
}
return Combine(other).Equals(other);
}
- bool IsTagged() {
+ bool IsTagged() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kTagged) == kTagged);
}
- bool IsTaggedPrimitive() {
+ bool IsTaggedPrimitive() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kTaggedPrimitive) == kTaggedPrimitive);
}
- bool IsTaggedNumber() {
+ bool IsTaggedNumber() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kTaggedNumber) == kTaggedNumber);
}
- bool IsSmi() {
+ bool IsSmi() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kSmi) == kSmi);
}
- bool IsHeapNumber() {
+ bool IsHeapNumber() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kHeapNumber) == kHeapNumber);
}
- bool IsString() {
+ bool IsString() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kString) == kString);
}
- bool IsBoolean() {
+ bool IsBoolean() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kBoolean) == kBoolean);
}
- bool IsNonPrimitive() {
+ bool IsNonPrimitive() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kNonPrimitive) == kNonPrimitive);
}
- bool IsJSArray() {
+ bool IsJSArray() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kJSArray) == kJSArray);
}
- bool IsJSObject() {
+ bool IsJSObject() const {
ASSERT(type_ != kUninitialized);
return ((type_ & kJSObject) == kJSObject);
}
- bool IsUninitialized() {
+ bool IsUninitialized() const {
return type_ == kUninitialized;
}
- bool IsHeapObject() {
+ bool IsHeapObject() const {
ASSERT(type_ != kUninitialized);
return IsHeapNumber() || IsString() || IsNonPrimitive();
}
- static HType TypeFromValue(Isolate* isolate, Handle<Object> value);
+ static HType TypeFromValue(Handle<Object> value);
const char* ToString();
virtual intptr_t Hashcode() {
ASSERT_ALLOCATION_DISABLED;
// Dereferencing to use the object's raw address for hashing is safe.
- AllowHandleDereference allow_handle_deref(isolate());
+ HandleDereferenceGuard allow_handle_deref(isolate(),
+ HandleDereferenceGuard::ALLOW);
+ SLOW_ASSERT(Heap::RelocationLock::IsLocked(isolate()->heap()) ||
+ !isolate()->optimizing_compiler_thread()->IsOptimizerThread());
intptr_t hash = 0;
for (int i = 0; i < prototypes_.length(); i++) {
hash = 17 * hash + reinterpret_cast<intptr_t>(*prototypes_[i]);
class HConstant: public HTemplateInstruction<0> {
public:
HConstant(Handle<Object> handle, Representation r);
- HConstant(int32_t value, Representation r);
- HConstant(double value, Representation r);
+ HConstant(int32_t value,
+ Representation r,
+ Handle<Object> optional_handle = Handle<Object>::null());
+ HConstant(double value,
+ Representation r,
+ Handle<Object> optional_handle = Handle<Object>::null());
+ HConstant(Handle<Object> handle,
+ Representation r,
+ HType type,
+ bool is_internalized_string,
+ bool boolean_value);
Handle<Object> handle() {
if (handle_.is_null()) {
Heap* heap = isolate()->heap();
// We should have handled minus_zero_value and nan_value in the
// has_double_value_ clause above.
- // Dereferencing is safe to compare against singletons.
- AllowHandleDereference allow_handle_deref(isolate());
+ // Dereferencing is safe to compare against immovable singletons.
+ HandleDereferenceGuard allow_handle_deref(isolate(),
+ HandleDereferenceGuard::ALLOW);
ASSERT(*handle_ != heap->minus_zero_value());
ASSERT(*handle_ != heap->nan_value());
return *handle_ == heap->undefined_value() ||
bool HasStringValue() const {
if (has_double_value_ || has_int32_value_) return false;
ASSERT(!handle_.is_null());
- return handle_->IsString();
+ return type_from_value_.IsString();
}
Handle<String> StringValue() const {
ASSERT(HasStringValue());
return Handle<String>::cast(handle_);
}
+ bool HasInternalizedStringValue() const {
+ return HasStringValue() && is_internalized_string_;
+ }
- bool ToBoolean();
+ bool BooleanValue() const { return boolean_value_; }
bool IsUint32() {
return HasInteger32Value() && (Integer32Value() >= 0);
} else {
ASSERT(!handle_.is_null());
// Dereferencing to use the object's raw address for hashing is safe.
- AllowHandleDereference allow_handle_deref(isolate());
+ HandleDereferenceGuard allow_handle_deref(isolate(),
+ HandleDereferenceGuard::ALLOW);
+ SLOW_ASSERT(Heap::RelocationLock::IsLocked(isolate()->heap()) ||
+ !isolate()->optimizing_compiler_thread()->IsOptimizerThread());
hash = reinterpret_cast<intptr_t>(*handle_);
}
// not the converse.
bool has_int32_value_ : 1;
bool has_double_value_ : 1;
+ bool is_internalized_string_ : 1; // TODO(yangguo): make this part of HType.
+ bool boolean_value_ : 1;
int32_t int32_value_;
double double_value_;
+ HType type_from_value_;
};
virtual intptr_t Hashcode() {
ASSERT_ALLOCATION_DISABLED;
// Dereferencing to use the object's raw address for hashing is safe.
- AllowHandleDereference allow_handle_deref(isolate());
+ HandleDereferenceGuard allow_handle_deref(isolate(),
+ HandleDereferenceGuard::ALLOW);
+ SLOW_ASSERT(Heap::RelocationLock::IsLocked(isolate()->heap()) ||
+ !isolate()->optimizing_compiler_thread()->IsOptimizerThread());
return reinterpret_cast<intptr_t>(*cell_);
}
void HGraph::Verify(bool do_full_verify) const {
// Allow dereferencing for debug mode verification.
- AllowHandleDereference allow_handle_deref(isolate());
+ Heap::RelocationLock(isolate()->heap());
+ HandleDereferenceGuard allow_handle_deref(isolate(),
+ HandleDereferenceGuard::ALLOW);
for (int i = 0; i < blocks_.length(); i++) {
HBasicBlock* block = blocks_.at(i);
#endif
-HConstant* HGraph::GetConstant(SetOncePointer<HConstant>* pointer,
- Handle<Object> value) {
- if (!pointer->is_set()) {
- HConstant* constant = new(zone()) HConstant(value,
- Representation::Tagged());
- constant->InsertAfter(GetConstantUndefined());
- pointer->set(constant);
- }
- return pointer->get();
-}
-
-
HConstant* HGraph::GetConstantInt32(SetOncePointer<HConstant>* pointer,
int32_t value) {
if (!pointer->is_set()) {
}
-HConstant* HGraph::GetConstantTrue() {
- return GetConstant(&constant_true_, isolate()->factory()->true_value());
+#define DEFINE_GET_CONSTANT(Name, name, htype, boolean_value) \
+HConstant* HGraph::GetConstant##Name() { \
+ if (!constant_##name##_.is_set()) { \
+ HConstant* constant = new(zone()) HConstant( \
+ isolate()->factory()->name##_value(), \
+ Representation::Tagged(), \
+ htype, \
+ false, \
+ boolean_value); \
+ constant->InsertAfter(GetConstantUndefined()); \
+ constant_##name##_.set(constant); \
+ } \
+ return constant_##name##_.get(); \
}
-HConstant* HGraph::GetConstantFalse() {
- return GetConstant(&constant_false_, isolate()->factory()->false_value());
-}
+DEFINE_GET_CONSTANT(True, true, HType::Boolean(), true)
+DEFINE_GET_CONSTANT(False, false, HType::Boolean(), false)
+DEFINE_GET_CONSTANT(Hole, the_hole, HType::Tagged(), false)
-
-HConstant* HGraph::GetConstantHole() {
- return GetConstant(&constant_hole_, isolate()->factory()->the_hole_value());
-}
+#undef DEFINE_GET_CONSTANT
HGraphBuilder::CheckBuilder::CheckBuilder(HGraphBuilder* builder, BailoutId id)
}
if (value->IsConstant()) {
HConstant* constant_value = HConstant::cast(value);
- if (constant_value->ToBoolean()) {
+ if (constant_value->BooleanValue()) {
builder->current_block()->Goto(if_true(), builder->function_state());
} else {
builder->current_block()->Goto(if_false(), builder->function_state());
void HGraph::GlobalValueNumbering() {
// Perform common subexpression elimination and loop-invariant code motion.
if (FLAG_use_gvn) {
+ // We use objects' raw addresses for identification, so they must not move.
+ Heap::RelocationLock relocation_lock(isolate()->heap());
HPhase phase("H_Global value numbering", this);
HGlobalValueNumberer gvn(this, info());
bool removed_side_effects = gvn.Analyze();
if (left_value->IsConstant()) {
HConstant* left_constant = HConstant::cast(left_value);
- if ((is_logical_and && left_constant->ToBoolean()) ||
- (!is_logical_and && !left_constant->ToBoolean())) {
+ if ((is_logical_and && left_constant->BooleanValue()) ||
+ (!is_logical_and && !left_constant->BooleanValue())) {
Drop(1); // left_value.
CHECK_BAILOUT(VisitForValue(expr->right()));
}
void HTracer::TraceLithium(const char* name, LChunk* chunk) {
- AllowHandleDereference allow_handle_deref(chunk->isolate());
+ ASSERT(!FLAG_parallel_recompilation);
+ HandleDereferenceGuard allow_handle_deref(chunk->isolate(),
+ HandleDereferenceGuard::ALLOW);
Trace(name, chunk->graph(), chunk);
}
void HTracer::TraceHydrogen(const char* name, HGraph* graph) {
- AllowHandleDereference allow_handle_deref(graph->isolate());
+ ASSERT(!FLAG_parallel_recompilation);
+ HandleDereferenceGuard allow_handle_deref(graph->isolate(),
+ HandleDereferenceGuard::ALLOW);
Trace(name, graph, NULL);
}
}
private:
- HConstant* GetConstant(SetOncePointer<HConstant>* pointer,
- Handle<Object> value);
HConstant* GetConstantInt32(SetOncePointer<HConstant>* pointer,
int32_t integer_value);
SetOncePointer<HConstant> constant_minus1_;
SetOncePointer<HConstant> constant_true_;
SetOncePointer<HConstant> constant_false_;
- SetOncePointer<HConstant> constant_hole_;
+ SetOncePointer<HConstant> constant_the_hole_;
SetOncePointer<HArgumentsObject> arguments_object_;
SetOncePointer<HBasicBlock> osr_loop_entry_;
if (value->EmitAtUses()) {
ASSERT(value->IsConstant());
ASSERT(!value->representation().IsDouble());
- HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
+ HBasicBlock* successor = HConstant::cast(value)->BooleanValue()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
return new(zone()) LGoto(successor->block_id());
memset(code_kind_statistics_, 0,
sizeof(code_kind_statistics_[0]) * Code::NUMBER_OF_KINDS);
- allow_handle_deref_ = true;
+ allow_compiler_thread_handle_deref_ = true;
+ allow_execution_thread_handle_deref_ = true;
#endif
#ifdef ENABLE_DEBUGGER_SUPPORT
}
+#ifdef DEBUG
+bool Isolate::AllowHandleDereference() {
+ if (allow_execution_thread_handle_deref_ &&
+ allow_compiler_thread_handle_deref_) {
+ // Short-cut to avoid polling thread id.
+ return true;
+ }
+ if (FLAG_parallel_recompilation &&
+ optimizing_compiler_thread()->IsOptimizerThread()) {
+ return allow_compiler_thread_handle_deref_;
+ } else {
+ return allow_execution_thread_handle_deref_;
+ }
+}
+
+
+void Isolate::SetAllowHandleDereference(bool allow) {
+ if (FLAG_parallel_recompilation &&
+ optimizing_compiler_thread()->IsOptimizerThread()) {
+ allow_compiler_thread_handle_deref_ = allow;
+ } else {
+ allow_execution_thread_handle_deref_ = allow;
+ }
+}
+#endif
+
+
HStatistics* Isolate::GetHStatistics() {
if (hstatistics() == NULL) set_hstatistics(new HStatistics());
return hstatistics();
int* code_kind_statistics() { return code_kind_statistics_; }
- bool allow_handle_deref() { return allow_handle_deref_; }
- void set_allow_handle_deref(bool allow) { allow_handle_deref_ = allow; }
+ bool AllowHandleDereference();
+
+ void SetAllowHandleDereference(bool allow);
#endif
#if defined(V8_TARGET_ARCH_ARM) && !defined(__arm__) || \
JSObject::SpillInformation js_spill_information_;
int code_kind_statistics_[Code::NUMBER_OF_KINDS];
- bool allow_handle_deref_;
+ bool allow_compiler_thread_handle_deref_;
+ bool allow_execution_thread_handle_deref_;
#endif
#ifdef ENABLE_DEBUGGER_SUPPORT
LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
HValue* value = instr->value();
if (value->EmitAtUses()) {
- HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
+ HBasicBlock* successor = HConstant::cast(value)->BooleanValue()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
return new(zone()) LGoto(successor->block_id());
void OptimizingCompilerThread::CompileNext() {
- Heap::RelocationLock relocation_lock(isolate_->heap());
OptimizingCompiler* optimizing_compiler = NULL;
input_queue_.Dequeue(&optimizing_compiler);
Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(-1));
FLAG_gc_global = true;
FLAG_max_new_space_size = (1 << (kPageSizeBits - 10)) * 2;
}
+ if (FLAG_trace_hydrogen) FLAG_parallel_recompilation = false;
OS::SetUp();
CPU::SetUp();
use_crankshaft_ = FLAG_crankshaft
if (value->EmitAtUses()) {
ASSERT(value->IsConstant());
ASSERT(!value->representation().IsDouble());
- HBasicBlock* successor = HConstant::cast(value)->ToBoolean()
+ HBasicBlock* successor = HConstant::cast(value)->BooleanValue()
? instr->FirstSuccessor()
: instr->SecondSuccessor();
return new(zone()) LGoto(successor->block_id());