#include "src/compiler.h"
+#include "src/ast-numbering.h"
#include "src/bootstrapper.h"
#include "src/codegen.h"
#include "src/compilation-cache.h"
if (!script_.is_null() && script_->type()->value() == Script::TYPE_NATIVE) {
MarkAsNative();
}
+ // Compiling for the snapshot typically results in different code than
+ // compiling later on. This means that code recompiled with deoptimization
+ // support won't be "equivalent" (as defined by SharedFunctionInfo::
+ // EnableDeoptimizationSupport), so it will replace the old code and all
+ // its type feedback. To avoid this, always compile functions in the snapshot
+ // with deoptimization support.
+ if (isolate_->serializer_enabled()) EnableDeoptimizationSupport();
+
if (isolate_->debug()->is_active()) MarkAsDebug();
if (FLAG_context_specialization) MarkAsContextSpecializing();
if (FLAG_turbo_inlining) MarkAsInliningEnabled();
DCHECK(scope_ == NULL);
scope_ = scope;
- int length = function()->slot_count();
if (feedback_vector_.is_null()) {
// Allocate the feedback vector too.
- feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length);
+ feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(
+ function()->slot_count(), function()->ic_slot_count());
}
- DCHECK(feedback_vector_->length() == length);
+ DCHECK(feedback_vector_->Slots() == function()->slot_count() &&
+ feedback_vector_->ICSlots() == function()->ic_slot_count());
}
OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
- DCHECK(isolate()->use_crankshaft());
DCHECK(info()->IsOptimizing());
DCHECK(!info()->IsCompilingForDebugging());
compiler::Pipeline pipeline(info());
pipeline.GenerateCode();
if (!info()->code().is_null()) {
- if (FLAG_turbo_deoptimization) {
- info()->context()->native_context()->AddOptimizedCode(*info()->code());
- }
return SetLastStatus(SUCCEEDED);
}
}
DCHECK(last_status() == SUCCEEDED);
// TODO(turbofan): Currently everything is done in the first phase.
if (!info()->code().is_null()) {
+ if (FLAG_turbo_deoptimization) {
+ info()->context()->native_context()->AddOptimizedCode(*info()->code());
+ }
RecordOptimizationStats();
return last_status();
}
static bool CompileUnoptimizedCode(CompilationInfo* info) {
DCHECK(AllowCompilation::IsAllowed(info->isolate()));
- DCHECK(info->function() != NULL);
- if (!Rewriter::Rewrite(info)) return false;
- if (!Scope::Analyze(info)) return false;
- DCHECK(info->scope() != NULL);
-
- if (!FullCodeGenerator::MakeCode(info)) {
+ if (!Compiler::Analyze(info) || !FullCodeGenerator::MakeCode(info)) {
Isolate* isolate = info->isolate();
if (!isolate->has_pending_exception()) isolate->StackOverflow();
return false;
shared->set_strict_mode(lit->strict_mode());
SetExpectedNofPropertiesFromEstimate(shared, lit->expected_property_count());
shared->set_bailout_reason(lit->dont_optimize_reason());
- shared->set_ast_node_count(lit->ast_node_count());
// Compile unoptimized code.
if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
}
-static bool CompileOptimizedPrologue(CompilationInfo* info) {
- if (!Parser::Parse(info)) return false;
+static bool Renumber(CompilationInfo* info) {
+ if (!AstNumbering::Renumber(info->function(), info->zone())) return false;
+ if (!info->shared_info().is_null()) {
+ info->shared_info()->set_ast_node_count(info->function()->ast_node_count());
+ }
+ return true;
+}
+
+
+bool Compiler::Analyze(CompilationInfo* info) {
+ DCHECK(info->function() != NULL);
if (!Rewriter::Rewrite(info)) return false;
if (!Scope::Analyze(info)) return false;
+ if (!Renumber(info)) return false;
DCHECK(info->scope() != NULL);
return true;
}
+bool Compiler::ParseAndAnalyze(CompilationInfo* info) {
+ if (!Parser::Parse(info)) return false;
+ return Compiler::Analyze(info);
+}
+
+
static bool GetOptimizedCodeNow(CompilationInfo* info) {
- if (!CompileOptimizedPrologue(info)) return false;
+ if (!Compiler::ParseAndAnalyze(info)) return false;
TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
}
CompilationHandleScope handle_scope(info);
- if (!CompileOptimizedPrologue(info)) return false;
+ if (!Compiler::ParseAndAnalyze(info)) return false;
info->SaveHandles(); // Copy handles to the compilation handle scope.
TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
- DCHECK(!function->GetIsolate()->has_pending_exception());
+ Isolate* isolate = function->GetIsolate();
+ DCHECK(!isolate->has_pending_exception());
DCHECK(!function->is_compiled());
-
- if (FLAG_turbo_asm && function->shared()->asm_function()) {
+ // If the debugger is active, do not compile with turbofan unless we can
+ // deopt from turbofan code.
+ if (FLAG_turbo_asm && function->shared()->asm_function() &&
+ (FLAG_turbo_deoptimization || !isolate->debug()->is_active())) {
CompilationInfoWithZone info(function);
- VMState<COMPILER> state(info.isolate());
- PostponeInterruptsScope postpone(info.isolate());
+ VMState<COMPILER> state(isolate);
+ PostponeInterruptsScope postpone(isolate);
info.SetOptimizing(BailoutId::None(),
Handle<Code>(function->shared()->code()));
info.MarkAsTypingEnabled();
info.MarkAsInliningDisabled();
- if (GetOptimizedCodeNow(&info)) return info.code();
+ if (GetOptimizedCodeNow(&info)) {
+ DCHECK(function->shared()->is_compiled());
+ return info.code();
+ }
}
if (function->shared()->is_compiled()) {
CompilationInfoWithZone info(function);
Handle<Code> result;
- ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
- GetUnoptimizedCodeCommon(&info), Code);
+ ASSIGN_RETURN_ON_EXCEPTION(isolate, result, GetUnoptimizedCodeCommon(&info),
+ Code);
- if (FLAG_always_opt &&
- info.isolate()->use_crankshaft() &&
+ if (FLAG_always_opt && isolate->use_crankshaft() &&
!info.shared_info()->optimization_disabled() &&
- !info.isolate()->DebuggerHasBreakPoints()) {
+ !isolate->DebuggerHasBreakPoints()) {
Handle<Code> opt_code;
if (Compiler::GetOptimizedCode(
function, result,
// TODO(turbofan): In the future, unoptimized code with deopt support could
// be generated lazily once deopt is triggered.
bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
+ DCHECK(info->function() != NULL);
+ DCHECK(info->scope() != NULL);
if (!info->shared_info()->has_deoptimization_support()) {
CompilationInfoWithZone unoptimized(info->shared_info());
// Note that we use the same AST that we will use for generating the
DCHECK(info->is_eval() || info->is_global());
+ info->MarkAsToplevel();
+
Handle<SharedFunctionInfo> result;
{ VMState<COMPILER> state(info->isolate());
compile_options == ScriptCompiler::kConsumeCodeCache &&
!isolate->debug()->is_loaded()) {
HistogramTimerScope timer(isolate->counters()->compile_deserialize());
- return CodeSerializer::Deserialize(isolate, *cached_data, source);
+ Handle<SharedFunctionInfo> result;
+ if (CodeSerializer::Deserialize(isolate, *cached_data, source)
+ .ToHandle(&result)) {
+ return result;
+ }
+ // Deserializer failed. Fall through to compile.
} else {
maybe_result = compilation_cache->LookupScript(
source, script_name, line_offset, column_offset,
result = CompileToplevel(&info);
if (extension == NULL && !result.is_null() && !result->dont_cache()) {
compilation_cache->PutScript(source, context, result);
- if (FLAG_serialize_toplevel &&
+ // TODO(yangguo): Issue 3628
+ // With block scoping, top-level variables may resolve to a global,
+ // context, which makes the code context-dependent.
+ if (FLAG_serialize_toplevel && !FLAG_harmony_scoping &&
compile_options == ScriptCompiler::kProduceCodeCache) {
HistogramTimerScope histogram_timer(
isolate->counters()->compile_serialize());
*cached_data = CodeSerializer::Serialize(isolate, result, source);
if (FLAG_profile_deserialization) {
- PrintF("[Compiling and serializing %d bytes took %0.3f ms]\n",
- (*cached_data)->length(), timer.Elapsed().InMillisecondsF());
+ PrintF("[Compiling and serializing took %0.3f ms]\n",
+ timer.Elapsed().InMillisecondsF());
}
}
}
bool allow_lazy = literal->AllowsLazyCompilation() &&
!DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
+
+ if (outer_info->is_toplevel() && outer_info->will_serialize()) {
+ // Make sure that if the toplevel code (possibly to be serialized),
+ // the inner function must be allowed to be compiled lazily.
+ DCHECK(allow_lazy);
+ }
+
// Generate code
Handle<ScopeInfo> scope_info;
if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
Handle<Code> code = isolate->builtins()->CompileLazy();
info.SetCode(code);
scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
- } else if (FullCodeGenerator::MakeCode(&info)) {
+ } else if (Renumber(&info) && FullCodeGenerator::MakeCode(&info)) {
DCHECK(!info.code().is_null());
scope_info = ScopeInfo::Create(info.scope(), info.zone());
} else {
bool Compiler::DebuggerWantsEagerCompilation(CompilationInfo* info,
bool allow_lazy_without_ctx) {
- return LiveEditFunctionTracker::IsActive(info->isolate()) ||
- (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
+ if (LiveEditFunctionTracker::IsActive(info->isolate())) return true;
+ Debug* debug = info->isolate()->debug();
+ bool debugging = debug->is_active() || debug->has_break_points();
+ return debugging && !allow_lazy_without_ctx;
}