Refactor the compiling pipeline.
authoryangguo@chromium.org <yangguo@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 23 Dec 2013 14:30:35 +0000 (14:30 +0000)
committeryangguo@chromium.org <yangguo@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 23 Dec 2013 14:30:35 +0000 (14:30 +0000)
Goals:
 - easier to read, more suitable identifiers.
 - better distinction between compiling optimized/unoptimized code
 - compiler does not install code on the function.
 - easier to add features (e.g. caching optimized code for osr).
 - remove unnecessary code.

R=titzer@chromium.org

Review URL: https://codereview.chromium.org/110203002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@18409 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

39 files changed:
src/accessors.cc
src/api.cc
src/arm/builtins-arm.cc
src/bootstrapper.cc
src/builtins.h
src/compilation-cache.cc
src/compilation-cache.h
src/compiler.cc
src/compiler.h
src/debug.cc
src/debug.h
src/factory.cc
src/full-codegen.cc
src/full-codegen.h
src/ia32/builtins-ia32.cc
src/ic.cc
src/liveedit.cc
src/log.cc
src/mark-compact.cc
src/mips/builtins-mips.cc
src/objects-inl.h
src/objects.cc
src/objects.h
src/optimizing-compiler-thread.cc
src/optimizing-compiler-thread.h
src/parser.h
src/runtime-profiler.cc
src/runtime.cc
src/runtime.h
src/x64/builtins-x64.cc
test/cctest/test-compiler.cc
test/mjsunit/compiler/concurrent-invalidate-transition-map.js
test/mjsunit/compiler/concurrent-proto-change.js
test/mjsunit/concurrent-initial-prototype-change.js
test/mjsunit/fuzz-natives-part1.js
test/mjsunit/fuzz-natives-part2.js
test/mjsunit/fuzz-natives-part3.js
test/mjsunit/fuzz-natives-part4.js
test/mjsunit/regress/regress-2618.js

index 4da9dd4..ba84c9a 100644 (file)
@@ -28,6 +28,7 @@
 #include "v8.h"
 #include "accessors.h"
 
+#include "compiler.h"
 #include "contexts.h"
 #include "deoptimizer.h"
 #include "execution.h"
@@ -648,9 +649,9 @@ MaybeObject* Accessors::FunctionGetLength(Isolate* isolate,
   // If the function isn't compiled yet, the length is not computed correctly
   // yet. Compile it now and return the right length.
   HandleScope scope(isolate);
-  Handle<JSFunction> handle(function);
-  if (JSFunction::CompileLazy(handle, KEEP_EXCEPTION)) {
-    return Smi::FromInt(handle->shared()->length());
+  Handle<JSFunction> function_handle(function);
+  if (Compiler::EnsureCompiled(function_handle, KEEP_EXCEPTION)) {
+    return Smi::FromInt(function_handle->shared()->length());
   }
   return Failure::Exception();
 }
index 640c01b..c5023f8 100644 (file)
@@ -1720,16 +1720,16 @@ Local<Script> Script::New(v8::Handle<String> source,
       pre_data_impl = NULL;
     }
     i::Handle<i::SharedFunctionInfo> result =
-      i::Compiler::Compile(str,
-                           name_obj,
-                           line_offset,
-                           column_offset,
-                           is_shared_cross_origin,
-                           isolate->global_context(),
-                           NULL,
-                           pre_data_impl,
-                           Utils::OpenHandle(*script_data, true),
-                           i::NOT_NATIVES_CODE);
+      i::Compiler::CompileScript(str,
+                                 name_obj,
+                                 line_offset,
+                                 column_offset,
+                                 is_shared_cross_origin,
+                                 isolate->global_context(),
+                                 NULL,
+                                 pre_data_impl,
+                                 Utils::OpenHandle(*script_data, true),
+                                 i::NOT_NATIVES_CODE);
     has_pending_exception = result.is_null();
     EXCEPTION_BAILOUT_CHECK(isolate, Local<Script>());
     raw_result = *result;
index d95b746..fb319df 100644 (file)
@@ -289,8 +289,8 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
 }
 
 
-static void CallRuntimePassFunction(MacroAssembler* masm,
-                                    Runtime::FunctionId function_id) {
+static void CallRuntimePassFunction(
+    MacroAssembler* masm, Runtime::FunctionId function_id) {
   FrameScope scope(masm, StackFrame::INTERNAL);
   // Push a copy of the function onto the stack.
   __ push(r1);
@@ -313,7 +313,13 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
 }
 
 
-void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
+static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
+  __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
+  __ Jump(r0);
+}
+
+
+void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
   // Checking whether the queued function is ready for install is optional,
   // since we come across interrupts and stack checks elsewhere.  However,
   // not checking may delay installing ready functions, and always checking
@@ -324,22 +330,14 @@ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
   __ cmp(sp, Operand(ip));
   __ b(hs, &ok);
 
-  CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
-  // Tail call to returned code.
-  __ add(r0, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
-  __ Jump(r0);
+  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
+  GenerateTailCallToReturnedCode(masm);
 
   __ bind(&ok);
   GenerateTailCallToSharedCode(masm);
 }
 
 
-void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
-  GenerateTailCallToSharedCode(masm);
-}
-
-
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
                                            bool is_api_function,
                                            bool count_constructions) {
@@ -774,19 +772,38 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
 }
 
 
-void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kLazyCompile);
-  // Do a tail-call of the compiled function.
-  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
-  __ Jump(r2);
+void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
+  CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
+  GenerateTailCallToReturnedCode(masm);
 }
 
 
-void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
-  // Do a tail-call of the compiled function.
-  __ add(r2, r0, Operand(Code::kHeaderSize - kHeapObjectTag));
-  __ Jump(r2);
+static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
+  FrameScope scope(masm, StackFrame::INTERNAL);
+  // Push a copy of the function onto the stack.
+  __ push(r1);
+  // Push call kind information and function as parameter to the runtime call.
+  __ Push(r5, r1);
+  // Whether to compile in a background thread.
+  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
+
+  __ CallRuntime(Runtime::kCompileOptimized, 2);
+  // Restore call kind information.
+  __ pop(r5);
+  // Restore receiver.
+  __ pop(r1);
+}
+
+
+void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
+  CallCompileOptimized(masm, false);
+  GenerateTailCallToReturnedCode(masm);
+}
+
+
+void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
+  CallCompileOptimized(masm, true);
+  GenerateTailCallToReturnedCode(masm);
 }
 
 
index c47bdf5..f27ca7a 100644 (file)
@@ -1499,7 +1499,7 @@ bool Genesis::CompileScriptCached(Isolate* isolate,
   if (cache == NULL || !cache->Lookup(name, &function_info)) {
     ASSERT(source->IsOneByteRepresentation());
     Handle<String> script_name = factory->NewStringFromUtf8(name);
-    function_info = Compiler::Compile(
+    function_info = Compiler::CompileScript(
         source,
         script_name,
         0,
@@ -2354,7 +2354,7 @@ bool Genesis::InstallJSBuiltins(Handle<JSBuiltinsObject> builtins) {
     Handle<JSFunction> function
         = Handle<JSFunction>(JSFunction::cast(function_object));
     builtins->set_javascript_builtin(id, *function);
-    if (!JSFunction::CompileLazy(function, CLEAR_EXCEPTION)) {
+    if (!Compiler::EnsureCompiled(function, CLEAR_EXCEPTION)) {
       return false;
     }
     builtins->set_javascript_builtin_code(id, function->shared()->code());
index edc13f7..affb253 100644 (file)
@@ -88,7 +88,7 @@ enum BuiltinExtraArguments {
 #define BUILTIN_LIST_A(V)                                               \
   V(ArgumentsAdaptorTrampoline,     BUILTIN, UNINITIALIZED,             \
                                     kNoExtraICState)                    \
-  V(InRecompileQueue,               BUILTIN, UNINITIALIZED,             \
+  V(InOptimizationQueue,            BUILTIN, UNINITIALIZED,             \
                                     kNoExtraICState)                    \
   V(JSConstructStubCountdown,       BUILTIN, UNINITIALIZED,             \
                                     kNoExtraICState)                    \
@@ -100,11 +100,11 @@ enum BuiltinExtraArguments {
                                     kNoExtraICState)                    \
   V(JSConstructEntryTrampoline,     BUILTIN, UNINITIALIZED,             \
                                     kNoExtraICState)                    \
-  V(LazyCompile,                    BUILTIN, UNINITIALIZED,             \
+  V(CompileUnoptimized,             BUILTIN, UNINITIALIZED,             \
                                     kNoExtraICState)                    \
-  V(LazyRecompile,                  BUILTIN, UNINITIALIZED,             \
+  V(CompileOptimized,               BUILTIN, UNINITIALIZED,             \
                                     kNoExtraICState)                    \
-  V(ConcurrentRecompile,            BUILTIN, UNINITIALIZED,             \
+  V(CompileOptimizedConcurrent,     BUILTIN, UNINITIALIZED,             \
                                     kNoExtraICState)                    \
   V(NotifyDeoptimized,              BUILTIN, UNINITIALIZED,             \
                                     kNoExtraICState)                    \
@@ -385,15 +385,15 @@ class Builtins {
   static void Generate_Adaptor(MacroAssembler* masm,
                                CFunctionId id,
                                BuiltinExtraArguments extra_args);
-  static void Generate_InRecompileQueue(MacroAssembler* masm);
-  static void Generate_ConcurrentRecompile(MacroAssembler* masm);
+  static void Generate_CompileUnoptimized(MacroAssembler* masm);
+  static void Generate_InOptimizationQueue(MacroAssembler* masm);
+  static void Generate_CompileOptimized(MacroAssembler* masm);
+  static void Generate_CompileOptimizedConcurrent(MacroAssembler* masm);
   static void Generate_JSConstructStubCountdown(MacroAssembler* masm);
   static void Generate_JSConstructStubGeneric(MacroAssembler* masm);
   static void Generate_JSConstructStubApi(MacroAssembler* masm);
   static void Generate_JSEntryTrampoline(MacroAssembler* masm);
   static void Generate_JSConstructEntryTrampoline(MacroAssembler* masm);
-  static void Generate_LazyCompile(MacroAssembler* masm);
-  static void Generate_LazyRecompile(MacroAssembler* masm);
   static void Generate_NotifyDeoptimized(MacroAssembler* masm);
   static void Generate_NotifySoftDeoptimized(MacroAssembler* masm);
   static void Generate_NotifyLazyDeoptimized(MacroAssembler* masm);
index fffe5da..a69ef4c 100644 (file)
@@ -421,7 +421,6 @@ Handle<SharedFunctionInfo> CompilationCache::LookupScript(
 Handle<SharedFunctionInfo> CompilationCache::LookupEval(
     Handle<String> source,
     Handle<Context> context,
-    bool is_global,
     LanguageMode language_mode,
     int scope_position) {
   if (!IsEnabled()) {
@@ -429,7 +428,7 @@ Handle<SharedFunctionInfo> CompilationCache::LookupEval(
   }
 
   Handle<SharedFunctionInfo> result;
-  if (is_global) {
+  if (context->IsNativeContext()) {
     result = eval_global_.Lookup(
         source, context, language_mode, scope_position);
   } else {
@@ -454,9 +453,7 @@ Handle<FixedArray> CompilationCache::LookupRegExp(Handle<String> source,
 void CompilationCache::PutScript(Handle<String> source,
                                  Handle<Context> context,
                                  Handle<SharedFunctionInfo> function_info) {
-  if (!IsEnabled()) {
-    return;
-  }
+  if (!IsEnabled()) return;
 
   script_.Put(source, context, function_info);
 }
@@ -464,15 +461,12 @@ void CompilationCache::PutScript(Handle<String> source,
 
 void CompilationCache::PutEval(Handle<String> source,
                                Handle<Context> context,
-                               bool is_global,
                                Handle<SharedFunctionInfo> function_info,
                                int scope_position) {
-  if (!IsEnabled()) {
-    return;
-  }
+  if (!IsEnabled()) return;
 
   HandleScope scope(isolate());
-  if (is_global) {
+  if (context->IsNativeContext()) {
     eval_global_.Put(source, context, function_info, scope_position);
   } else {
     ASSERT(scope_position != RelocInfo::kNoPosition);
index 414e09e..ead52b5 100644 (file)
@@ -222,7 +222,6 @@ class CompilationCache {
   // contain a script for the given source string.
   Handle<SharedFunctionInfo> LookupEval(Handle<String> source,
                                         Handle<Context> context,
-                                        bool is_global,
                                         LanguageMode language_mode,
                                         int scope_position);
 
@@ -241,7 +240,6 @@ class CompilationCache {
   // with the shared function info. This may overwrite an existing mapping.
   void PutEval(Handle<String> source,
                Handle<Context> context,
-               bool is_global,
                Handle<SharedFunctionInfo> function_info,
                int scope_position);
 
index 6b7786f..82bec65 100644 (file)
@@ -59,7 +59,6 @@ CompilationInfo::CompilationInfo(Handle<Script> script,
     : flags_(LanguageModeField::encode(CLASSIC_MODE)),
       script_(script),
       osr_ast_id_(BailoutId::None()),
-      osr_pc_offset_(0),
       parameter_count_(0) {
   Initialize(script->GetIsolate(), BASE, zone);
 }
@@ -71,7 +70,6 @@ CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
       shared_info_(shared_info),
       script_(Handle<Script>(Script::cast(shared_info->script()))),
       osr_ast_id_(BailoutId::None()),
-      osr_pc_offset_(0),
       parameter_count_(0) {
   Initialize(script_->GetIsolate(), BASE, zone);
 }
@@ -85,7 +83,6 @@ CompilationInfo::CompilationInfo(Handle<JSFunction> closure,
       script_(Handle<Script>(Script::cast(shared_info_->script()))),
       context_(closure->context()),
       osr_ast_id_(BailoutId::None()),
-      osr_pc_offset_(0),
       parameter_count_(0) {
   Initialize(script_->GetIsolate(), BASE, zone);
 }
@@ -97,7 +94,6 @@ CompilationInfo::CompilationInfo(HydrogenCodeStub* stub,
     : flags_(LanguageModeField::encode(CLASSIC_MODE) |
              IsLazy::encode(true)),
       osr_ast_id_(BailoutId::None()),
-      osr_pc_offset_(0),
       parameter_count_(0) {
   Initialize(isolate, STUB, zone);
   code_stub_ = stub;
@@ -243,86 +239,6 @@ bool CompilationInfo::ShouldSelfOptimize() {
 }
 
 
-// Determine whether to use the full compiler for all code. If the flag
-// --always-full-compiler is specified this is the case. For the virtual frame
-// based compiler the full compiler is also used if a debugger is connected, as
-// the code from the full compiler supports mode precise break points. For the
-// crankshaft adaptive compiler debugging the optimized code is not possible at
-// all. However crankshaft support recompilation of functions, so in this case
-// the full compiler need not be be used if a debugger is attached, but only if
-// break points has actually been set.
-static bool IsDebuggerActive(Isolate* isolate) {
-#ifdef ENABLE_DEBUGGER_SUPPORT
-  return isolate->use_crankshaft() ?
-    isolate->debug()->has_break_points() :
-    isolate->debugger()->IsDebuggerActive();
-#else
-  return false;
-#endif
-}
-
-
-static bool AlwaysFullCompiler(Isolate* isolate) {
-  return FLAG_always_full_compiler || IsDebuggerActive(isolate);
-}
-
-
-void RecompileJob::RecordOptimizationStats() {
-  Handle<JSFunction> function = info()->closure();
-  if (!function->IsOptimized()) {
-    // Concurrent recompilation and OSR may race.  Increment only once.
-    int opt_count = function->shared()->opt_count();
-    function->shared()->set_opt_count(opt_count + 1);
-  }
-  double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
-  double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
-  double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
-  if (FLAG_trace_opt) {
-    PrintF("[optimizing ");
-    function->ShortPrint();
-    PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
-           ms_codegen);
-  }
-  if (FLAG_trace_opt_stats) {
-    static double compilation_time = 0.0;
-    static int compiled_functions = 0;
-    static int code_size = 0;
-
-    compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
-    compiled_functions++;
-    code_size += function->shared()->SourceSize();
-    PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
-           compiled_functions,
-           code_size,
-           compilation_time);
-  }
-  if (FLAG_hydrogen_stats) {
-    isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
-                                                    time_taken_to_optimize_,
-                                                    time_taken_to_codegen_);
-  }
-}
-
-
-// A return value of true indicates the compilation pipeline is still
-// going, not necessarily that we optimized the code.
-static bool MakeCrankshaftCode(CompilationInfo* info) {
-  RecompileJob job(info);
-  RecompileJob::Status status = job.CreateGraph();
-
-  if (status != RecompileJob::SUCCEEDED) {
-    return status != RecompileJob::FAILED;
-  }
-  status = job.OptimizeGraph();
-  if (status != RecompileJob::SUCCEEDED) {
-    status = job.AbortOptimization();
-    return status != RecompileJob::FAILED;
-  }
-  status = job.GenerateAndInstallCode();
-  return status != RecompileJob::FAILED;
-}
-
-
 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
  public:
   explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
@@ -359,7 +275,26 @@ class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
 };
 
 
-RecompileJob::Status RecompileJob::CreateGraph() {
+// Determine whether to use the full compiler for all code. If the flag
+// --always-full-compiler is specified this is the case. For the virtual frame
+// based compiler the full compiler is also used if a debugger is connected, as
+// the code from the full compiler supports mode precise break points. For the
+// crankshaft adaptive compiler debugging the optimized code is not possible at
+// all. However crankshaft support recompilation of functions, so in this case
+// the full compiler need not be be used if a debugger is attached, but only if
+// break points has actually been set.
+static bool IsDebuggerActive(Isolate* isolate) {
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  return isolate->use_crankshaft() ?
+    isolate->debug()->has_break_points() :
+    isolate->debugger()->IsDebuggerActive();
+#else
+  return false;
+#endif
+}
+
+
+OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
   ASSERT(isolate()->use_crankshaft());
   ASSERT(info()->IsOptimizing());
   ASSERT(!info()->IsCompilingForDebugging());
@@ -375,18 +310,15 @@ RecompileJob::Status RecompileJob::CreateGraph() {
   // Fall back to using the full code generator if it's not possible
   // to use the Hydrogen-based optimizing compiler. We already have
   // generated code for this from the shared function object.
-  if (AlwaysFullCompiler(isolate())) {
-    info()->AbortOptimization();
-    return SetLastStatus(BAILED_OUT);
-  }
+  if (FLAG_always_full_compiler) return AbortOptimization();
+  if (IsDebuggerActive(isolate())) return AbortOptimization(kDebuggerIsActive);
 
   // Limit the number of times we re-compile a functions with
   // the optimizing compiler.
   const int kMaxOptCount =
       FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
   if (info()->opt_count() > kMaxOptCount) {
-    info()->set_bailout_reason(kOptimizedTooManyTimes);
-    return AbortOptimization();
+    return AbortAndDisableOptimization(kOptimizedTooManyTimes);
   }
 
   // Due to an encoding limit on LUnallocated operands in the Lithium
@@ -399,21 +331,18 @@ RecompileJob::Status RecompileJob::CreateGraph() {
   const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
   Scope* scope = info()->scope();
   if ((scope->num_parameters() + 1) > parameter_limit) {
-    info()->set_bailout_reason(kTooManyParameters);
-    return AbortOptimization();
+    return AbortAndDisableOptimization(kTooManyParameters);
   }
 
   const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
   if (info()->is_osr() &&
       scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
-    info()->set_bailout_reason(kTooManyParametersLocals);
-    return AbortOptimization();
+    return AbortAndDisableOptimization(kTooManyParametersLocals);
   }
 
   // Take --hydrogen-filter into account.
   if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
-    info()->AbortOptimization();
-    return SetLastStatus(BAILED_OUT);
+    return AbortOptimization(kHydrogenFilter);
   }
 
   // Recompile the unoptimized version of the code if the current version
@@ -473,7 +402,6 @@ RecompileJob::Status RecompileJob::CreateGraph() {
   graph_ = graph_builder_->CreateGraph();
 
   if (isolate()->has_pending_exception()) {
-    info()->SetCode(Handle<Code>::null());
     return SetLastStatus(FAILED);
   }
 
@@ -483,24 +411,21 @@ RecompileJob::Status RecompileJob::CreateGraph() {
   ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL);
   if (graph_ == NULL) {
     if (graph_builder_->inline_bailout()) {
-      info_->AbortOptimization();
-      return SetLastStatus(BAILED_OUT);
-    } else {
       return AbortOptimization();
+    } else {
+      return AbortAndDisableOptimization();
     }
   }
 
   if (info()->HasAbortedDueToDependencyChange()) {
-    info_->set_bailout_reason(kBailedOutDueToDependencyChange);
-    info_->AbortOptimization();
-    return SetLastStatus(BAILED_OUT);
+    return AbortOptimization(kBailedOutDueToDependencyChange);
   }
 
   return SetLastStatus(SUCCEEDED);
 }
 
 
-RecompileJob::Status RecompileJob::OptimizeGraph() {
+OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() {
   DisallowHeapAllocation no_allocation;
   DisallowHandleAllocation no_handles;
   DisallowHandleDereference no_deref;
@@ -510,20 +435,19 @@ RecompileJob::Status RecompileJob::OptimizeGraph() {
   Timer t(this, &time_taken_to_optimize_);
   ASSERT(graph_ != NULL);
   BailoutReason bailout_reason = kNoReason;
-  if (!graph_->Optimize(&bailout_reason)) {
-    if (bailout_reason != kNoReason) graph_builder_->Bailout(bailout_reason);
-    return SetLastStatus(BAILED_OUT);
-  } else {
+
+  if (graph_->Optimize(&bailout_reason)) {
     chunk_ = LChunk::NewChunk(graph_);
-    if (chunk_ == NULL) {
-      return SetLastStatus(BAILED_OUT);
-    }
+    if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
+  } else if (bailout_reason != kNoReason) {
+    graph_builder_->Bailout(bailout_reason);
   }
-  return SetLastStatus(SUCCEEDED);
+
+  return AbortOptimization();
 }
 
 
-RecompileJob::Status RecompileJob::GenerateAndInstallCode() {
+OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() {
   ASSERT(last_status() == SUCCEEDED);
   ASSERT(!info()->HasAbortedDueToDependencyChange());
   DisallowCodeDependencyChange no_dependency_change;
@@ -539,9 +463,9 @@ RecompileJob::Status RecompileJob::GenerateAndInstallCode() {
     Handle<Code> optimized_code = chunk_->Codegen();
     if (optimized_code.is_null()) {
       if (info()->bailout_reason() == kNoReason) {
-        info()->set_bailout_reason(kCodeGenerationFailed);
+        info_->set_bailout_reason(kCodeGenerationFailed);
       }
-      return AbortOptimization();
+      return AbortAndDisableOptimization();
     }
     info()->SetCode(optimized_code);
   }
@@ -552,54 +476,40 @@ RecompileJob::Status RecompileJob::GenerateAndInstallCode() {
 }
 
 
-static bool GenerateCode(CompilationInfo* info) {
-  bool is_optimizing = info->isolate()->use_crankshaft() &&
-                       !info->IsCompilingForDebugging() &&
-                       info->IsOptimizing();
-  if (is_optimizing) {
-    Logger::TimerEventScope timer(
-        info->isolate(), Logger::TimerEventScope::v8_recompile_synchronous);
-    return MakeCrankshaftCode(info);
-  } else {
-    if (info->IsOptimizing()) {
-      // Have the CompilationInfo decide if the compilation should be
-      // BASE or NONOPT.
-      info->DisableOptimization();
-    }
-    Logger::TimerEventScope timer(
-        info->isolate(), Logger::TimerEventScope::v8_compile_full_code);
-    return FullCodeGenerator::MakeCode(info);
+void OptimizedCompileJob::RecordOptimizationStats() {
+  Handle<JSFunction> function = info()->closure();
+  if (!function->IsOptimized()) {
+    // Concurrent recompilation and OSR may race.  Increment only once.
+    int opt_count = function->shared()->opt_count();
+    function->shared()->set_opt_count(opt_count + 1);
   }
-}
-
-
-static bool MakeCode(CompilationInfo* info) {
-  // Precondition: code has been parsed.  Postcondition: the code field in
-  // the compilation info is set if compilation succeeded.
-  ASSERT(info->function() != NULL);
-  return Rewriter::Rewrite(info) && Scope::Analyze(info) && GenerateCode(info);
-}
-
-
-#ifdef ENABLE_DEBUGGER_SUPPORT
-bool Compiler::MakeCodeForLiveEdit(CompilationInfo* info) {
-  // Precondition: code has been parsed.  Postcondition: the code field in
-  // the compilation info is set if compilation succeeded.
-  bool succeeded = MakeCode(info);
-  if (!info->shared_info().is_null()) {
-    Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope(),
-                                                     info->zone());
-    info->shared_info()->set_scope_info(*scope_info);
+  double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
+  double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
+  double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
+  if (FLAG_trace_opt) {
+    PrintF("[optimizing ");
+    function->ShortPrint();
+    PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
+           ms_codegen);
   }
-  return succeeded;
-}
-#endif
-
+  if (FLAG_trace_opt_stats) {
+    static double compilation_time = 0.0;
+    static int compiled_functions = 0;
+    static int code_size = 0;
 
-static bool DebuggerWantsEagerCompilation(CompilationInfo* info,
-                                          bool allow_lazy_without_ctx = false) {
-  return LiveEditFunctionTracker::IsActive(info->isolate()) ||
-         (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
+    compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
+    compiled_functions++;
+    code_size += function->shared()->SourceSize();
+    PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
+           compiled_functions,
+           code_size,
+           compilation_time);
+  }
+  if (FLAG_hydrogen_stats) {
+    isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
+                                                    time_taken_to_optimize_,
+                                                    time_taken_to_codegen_);
+  }
 }
 
 
@@ -630,148 +540,379 @@ void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
 }
 
 
-static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
-  Isolate* isolate = info->isolate();
-  PostponeInterruptsScope postpone(isolate);
+static void UpdateSharedFunctionInfo(CompilationInfo* info) {
+  // Update the shared function info with the compiled code and the
+  // scope info.  Please note, that the order of the shared function
+  // info initialization is important since set_scope_info might
+  // trigger a GC, causing the ASSERT below to be invalid if the code
+  // was flushed. By setting the code object last we avoid this.
+  Handle<SharedFunctionInfo> shared = info->shared_info();
+  Handle<ScopeInfo> scope_info =
+      ScopeInfo::Create(info->scope(), info->zone());
+  shared->set_scope_info(*scope_info);
 
-  ASSERT(!isolate->native_context().is_null());
-  Handle<Script> script = info->script();
-  // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
-  FixedArray* array = isolate->native_context()->embedder_data();
-  script->set_context_data(array->get(0));
+  Handle<Code> code = info->code();
+  CHECK(code->kind() == Code::FUNCTION);
+  shared->ReplaceCode(*code);
+  if (shared->optimization_disabled()) code->set_optimizable(false);
 
-#ifdef ENABLE_DEBUGGER_SUPPORT
-  if (info->is_eval()) {
-    script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
-    // For eval scripts add information on the function from which eval was
-    // called.
-    if (info->is_eval()) {
-      StackTraceFrameIterator it(isolate);
-      if (!it.done()) {
-        script->set_eval_from_shared(it.frame()->function()->shared());
-        Code* code = it.frame()->LookupCode();
-        int offset = static_cast<int>(
-            it.frame()->pc() - code->instruction_start());
-        script->set_eval_from_instructions_offset(Smi::FromInt(offset));
-      }
-    }
-  }
+  // Set the expected number of properties for instances.
+  FunctionLiteral* lit = info->function();
+  int expected = lit->expected_property_count();
+  SetExpectedNofPropertiesFromEstimate(shared, expected);
 
-  // Notify debugger
-  isolate->debugger()->OnBeforeCompile(script);
-#endif
+  // Check the function has compiled code.
+  ASSERT(shared->is_compiled());
+  shared->set_dont_optimize_reason(lit->dont_optimize_reason());
+  shared->set_dont_inline(lit->flags()->Contains(kDontInline));
+  shared->set_ast_node_count(lit->ast_node_count());
+  shared->set_language_mode(lit->language_mode());
+}
 
-  // Only allow non-global compiles for eval.
-  ASSERT(info->is_eval() || info->is_global());
-  {
-    Parser parser(info);
-    if ((info->pre_parse_data() != NULL ||
-         String::cast(script->source())->length() > FLAG_min_preparse_length) &&
-        !DebuggerWantsEagerCompilation(info))
-      parser.set_allow_lazy(true);
-    if (!parser.Parse()) {
-      return Handle<SharedFunctionInfo>::null();
-    }
+
+// Sets the function info on a function.
+// The start_position points to the first '(' character after the function name
+// in the full script source. When counting characters in the script source the
+// the first character is number 0 (not 1).
+static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
+                            FunctionLiteral* lit,
+                            bool is_toplevel,
+                            Handle<Script> script) {
+  function_info->set_length(lit->parameter_count());
+  function_info->set_formal_parameter_count(lit->parameter_count());
+  function_info->set_script(*script);
+  function_info->set_function_token_position(lit->function_token_position());
+  function_info->set_start_position(lit->start_position());
+  function_info->set_end_position(lit->end_position());
+  function_info->set_is_expression(lit->is_expression());
+  function_info->set_is_anonymous(lit->is_anonymous());
+  function_info->set_is_toplevel(is_toplevel);
+  function_info->set_inferred_name(*lit->inferred_name());
+  function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
+  function_info->set_allows_lazy_compilation_without_context(
+      lit->AllowsLazyCompilationWithoutContext());
+  function_info->set_language_mode(lit->language_mode());
+  function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
+  function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
+  function_info->set_ast_node_count(lit->ast_node_count());
+  function_info->set_is_function(lit->is_function());
+  function_info->set_dont_optimize_reason(lit->dont_optimize_reason());
+  function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
+  function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
+  function_info->set_is_generator(lit->is_generator());
+}
+
+
+static bool CompileUnoptimizedCode(CompilationInfo* info) {
+  ASSERT(info->function() != NULL);
+  if (!Rewriter::Rewrite(info)) return false;
+  if (!Scope::Analyze(info)) return false;
+  ASSERT(info->scope() != NULL);
+
+  if (!FullCodeGenerator::MakeCode(info)) {
+    Isolate* isolate = info->isolate();
+    if (!isolate->has_pending_exception()) isolate->StackOverflow();
+    return false;
   }
+  return true;
+}
 
-  FunctionLiteral* lit = info->function();
-  LiveEditFunctionTracker live_edit_tracker(isolate, lit);
-  Handle<SharedFunctionInfo> result;
-  {
-    // Measure how long it takes to do the compilation; only take the
-    // rest of the function into account to avoid overlap with the
-    // parsing statistics.
-    HistogramTimer* rate = info->is_eval()
-          ? info->isolate()->counters()->compile_eval()
-          : info->isolate()->counters()->compile();
-    HistogramTimerScope timer(rate);
 
-    // Compile the code.
-    if (!MakeCode(info)) {
-      if (!isolate->has_pending_exception()) isolate->StackOverflow();
-      return Handle<SharedFunctionInfo>::null();
-    }
+static Handle<Code> GetUnoptimizedCodeCommon(CompilationInfo* info) {
+  VMState<COMPILER> state(info->isolate());
+  PostponeInterruptsScope postpone(info->isolate());
+  if (!Parser::Parse(info)) return Handle<Code>::null();
+  LanguageMode language_mode = info->function()->language_mode();
+  info->SetLanguageMode(language_mode);
 
-    // Allocate function.
-    ASSERT(!info->code().is_null());
-    result =
-        isolate->factory()->NewSharedFunctionInfo(
-            lit->name(),
-            lit->materialized_literal_count(),
-            lit->is_generator(),
-            info->code(),
-            ScopeInfo::Create(info->scope(), info->zone()));
+  if (!CompileUnoptimizedCode(info)) return Handle<Code>::null();
+  Compiler::RecordFunctionCompilation(
+      Logger::LAZY_COMPILE_TAG, info, info->shared_info());
+  UpdateSharedFunctionInfo(info);
+  ASSERT_EQ(Code::FUNCTION, info->code()->kind());
+  return info->code();
+}
 
-    ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
-    Compiler::SetFunctionInfo(result, lit, true, script);
-
-    if (script->name()->IsString()) {
-      PROFILE(isolate, CodeCreateEvent(
-          info->is_eval()
-          ? Logger::EVAL_TAG
-              : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
-                *info->code(),
-                *result,
-                info,
-                String::cast(script->name())));
-      GDBJIT(AddCode(Handle<String>(String::cast(script->name())),
-                     script,
-                     info->code(),
-                     info));
-    } else {
-      PROFILE(isolate, CodeCreateEvent(
-          info->is_eval()
-          ? Logger::EVAL_TAG
-              : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
-                *info->code(),
-                *result,
-                info,
-                isolate->heap()->empty_string()));
-      GDBJIT(AddCode(Handle<String>(), script, info->code(), info));
-    }
 
-    // Hint to the runtime system used when allocating space for initial
-    // property space by setting the expected number of properties for
-    // the instances of the function.
-    SetExpectedNofPropertiesFromEstimate(result,
-                                         lit->expected_property_count());
+Handle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) {
+  ASSERT(!function->GetIsolate()->has_pending_exception());
+  ASSERT(!function->is_compiled());
+  if (function->shared()->is_compiled()) {
+    return Handle<Code>(function->shared()->code());
+  }
 
-    script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
+  CompilationInfoWithZone info(function);
+  Handle<Code> result = GetUnoptimizedCodeCommon(&info);
+  ASSERT_EQ(result.is_null(), info.isolate()->has_pending_exception());
+
+  if (FLAG_always_opt &&
+      !result.is_null() &&
+      info.isolate()->use_crankshaft() &&
+      !info.shared_info()->optimization_disabled() &&
+      !info.isolate()->DebuggerHasBreakPoints()) {
+    Handle<Code> opt_code = Compiler::GetOptimizedCode(
+        function, result, Compiler::NOT_CONCURRENT);
+    if (!opt_code.is_null()) result = opt_code;
   }
 
-#ifdef ENABLE_DEBUGGER_SUPPORT
-  // Notify debugger
-  isolate->debugger()->OnAfterCompile(
-      script, Debugger::NO_AFTER_COMPILE_FLAGS);
-#endif
+  return result;
+}
 
-  live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
 
+Handle<Code> Compiler::GetUnoptimizedCode(Handle<SharedFunctionInfo> shared) {
+  ASSERT(!shared->GetIsolate()->has_pending_exception());
+  ASSERT(!shared->is_compiled());
+
+  CompilationInfoWithZone info(shared);
+  Handle<Code> result = GetUnoptimizedCodeCommon(&info);
+  ASSERT_EQ(result.is_null(), info.isolate()->has_pending_exception());
   return result;
 }
 
 
-Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
-                                             Handle<Object> script_name,
-                                             int line_offset,
-                                             int column_offset,
-                                             bool is_shared_cross_origin,
-                                             Handle<Context> context,
-                                             v8::Extension* extension,
-                                             ScriptDataImpl* pre_data,
-                                             Handle<Object> script_data,
-                                             NativesFlag natives) {
-  Isolate* isolate = source->GetIsolate();
-  int source_length = source->length();
-  isolate->counters()->total_load_size()->Increment(source_length);
-  isolate->counters()->total_compile_size()->Increment(source_length);
+bool Compiler::EnsureCompiled(Handle<JSFunction> function,
+                              ClearExceptionFlag flag) {
+  if (function->is_compiled()) return true;
+  Handle<Code> code = Compiler::GetUnoptimizedCode(function);
+  if (code.is_null()) {
+    if (flag == CLEAR_EXCEPTION) {
+      function->GetIsolate()->clear_pending_exception();
+    }
+    return false;
+  }
+  function->ReplaceCode(*code);
+  ASSERT(function->is_compiled());
+  return true;
+}
 
-  // The VM is in the COMPILER state until exiting this function.
+
+// Compile full code for debugging. This code will have debug break slots
+// and deoptimization information. Deoptimization information is required
+// in case that an optimized version of this function is still activated on
+// the stack. It will also make sure that the full code is compiled with
+// the same flags as the previous version, that is flags which can change
+// the code generated. The current method of mapping from already compiled
+// full code without debug break slots to full code with debug break slots
+// depends on the generated code is otherwise exactly the same.
+// If compilation fails, just keep the existing code.
+Handle<Code> Compiler::GetCodeForDebugging(Handle<JSFunction> function) {
+  CompilationInfoWithZone info(function);
+  Isolate* isolate = info.isolate();
   VMState<COMPILER> state(isolate);
 
-  CompilationCache* compilation_cache = isolate->compilation_cache();
+  ASSERT(!isolate->has_pending_exception());
+  Handle<Code> old_code(function->shared()->code());
+  ASSERT(old_code->kind() == Code::FUNCTION);
+  ASSERT(!old_code->has_debug_break_slots());
+
+  info.MarkCompilingForDebugging();
+  if (old_code->is_compiled_optimizable()) {
+    info.EnableDeoptimizationSupport();
+  } else {
+    info.MarkNonOptimizable();
+  }
+  Handle<Code> new_code = GetUnoptimizedCodeCommon(&info);
+  if (new_code.is_null()) {
+    isolate->clear_pending_exception();
+  } else {
+    ASSERT_EQ(old_code->is_compiled_optimizable(),
+              new_code->is_compiled_optimizable());
+  }
+  return new_code;
+}
+
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+void Compiler::CompileForLiveEdit(Handle<Script> script) {
+  // TODO(635): support extensions.
+  CompilationInfoWithZone info(script);
+  VMState<COMPILER> state(info.isolate());
+
+  info.MarkAsGlobal();
+  if (!Parser::Parse(&info)) return;
+  LanguageMode language_mode = info.function()->language_mode();
+  info.SetLanguageMode(language_mode);
+
+  LiveEditFunctionTracker tracker(info.isolate(), info.function());
+  if (!CompileUnoptimizedCode(&info)) return;
+  if (!info.shared_info().is_null()) {
+    Handle<ScopeInfo> scope_info = ScopeInfo::Create(info.scope(),
+                                                     info.zone());
+    info.shared_info()->set_scope_info(*scope_info);
+  }
+  tracker.RecordRootFunctionInfo(info.code());
+}
+#endif
+
+
+static bool DebuggerWantsEagerCompilation(CompilationInfo* info,
+                                          bool allow_lazy_without_ctx = false) {
+  return LiveEditFunctionTracker::IsActive(info->isolate()) ||
+         (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
+}
+
+
+static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
+  Isolate* isolate = info->isolate();
+  ASSERT(!isolate->native_context().is_null());
+  Handle<Script> script = info->script();
+
+  // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
+  FixedArray* array = isolate->native_context()->embedder_data();
+  script->set_context_data(array->get(0));
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  isolate->debugger()->OnBeforeCompile(script);
+#endif
+
+  ASSERT(info->is_eval() || info->is_global());
+
+  bool parse_allow_lazy =
+      (info->pre_parse_data() != NULL ||
+       String::cast(script->source())->length() > FLAG_min_preparse_length) &&
+      !DebuggerWantsEagerCompilation(info);
 
-  // Do a lookup in the compilation cache but not for extensions.
+  Handle<SharedFunctionInfo> result;
+
+  { VMState<COMPILER> state(info->isolate());
+    if (!Parser::Parse(info, parse_allow_lazy)) {
+      return Handle<SharedFunctionInfo>::null();
+    }
+
+    FunctionLiteral* lit = info->function();
+    LiveEditFunctionTracker live_edit_tracker(isolate, lit);
+
+    // Measure how long it takes to do the compilation; only take the
+    // rest of the function into account to avoid overlap with the
+    // parsing statistics.
+    HistogramTimer* rate = info->is_eval()
+          ? info->isolate()->counters()->compile_eval()
+          : info->isolate()->counters()->compile();
+    HistogramTimerScope timer(rate);
+
+    // Compile the code.
+    if (!CompileUnoptimizedCode(info)) {
+      return Handle<SharedFunctionInfo>::null();
+    }
+
+    // Allocate function.
+    ASSERT(!info->code().is_null());
+    result = isolate->factory()->NewSharedFunctionInfo(
+        lit->name(),
+        lit->materialized_literal_count(),
+        lit->is_generator(),
+        info->code(),
+        ScopeInfo::Create(info->scope(), info->zone()));
+
+    ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
+    SetFunctionInfo(result, lit, true, script);
+
+    Handle<String> script_name = script->name()->IsString()
+        ? Handle<String>(String::cast(script->name()))
+        : isolate->factory()->empty_string();
+    Logger::LogEventsAndTags log_tag = info->is_eval()
+        ? Logger::EVAL_TAG
+        : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);
+
+    PROFILE(isolate, CodeCreateEvent(
+                log_tag, *info->code(), *result, info, *script_name));
+    GDBJIT(AddCode(script_name, script, info->code(), info));
+
+    // Hint to the runtime system used when allocating space for initial
+    // property space by setting the expected number of properties for
+    // the instances of the function.
+    SetExpectedNofPropertiesFromEstimate(result,
+                                         lit->expected_property_count());
+
+    script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
+
+    live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
+  }
+
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  isolate->debugger()->OnAfterCompile(script, Debugger::NO_AFTER_COMPILE_FLAGS);
+#endif
+
+  return result;
+}
+
+
+Handle<JSFunction> Compiler::GetFunctionFromEval(Handle<String> source,
+                                                 Handle<Context> context,
+                                                 LanguageMode language_mode,
+                                                 ParseRestriction restriction,
+                                                 int scope_position) {
+  Isolate* isolate = source->GetIsolate();
+  int source_length = source->length();
+  isolate->counters()->total_eval_size()->Increment(source_length);
+  isolate->counters()->total_compile_size()->Increment(source_length);
+
+  CompilationCache* compilation_cache = isolate->compilation_cache();
+  Handle<SharedFunctionInfo> shared_info = compilation_cache->LookupEval(
+      source, context, language_mode, scope_position);
+
+  if (shared_info.is_null()) {
+    Handle<Script> script = isolate->factory()->NewScript(source);
+    CompilationInfoWithZone info(script);
+    info.MarkAsEval();
+    if (context->IsNativeContext()) info.MarkAsGlobal();
+    info.SetLanguageMode(language_mode);
+    info.SetParseRestriction(restriction);
+    info.SetContext(context);
+
+#if ENABLE_DEBUGGER_SUPPORT
+    Debug::RecordEvalCaller(script);
+#endif  // ENABLE_DEBUGGER_SUPPORT
+
+    shared_info = CompileToplevel(&info);
+
+    if (shared_info.is_null()) {
+      return Handle<JSFunction>::null();
+    } else {
+      // Explicitly disable optimization for eval code. We're not yet prepared
+      // to handle eval-code in the optimizing compiler.
+      shared_info->DisableOptimization(kEval);
+
+      // If caller is strict mode, the result must be in strict mode or
+      // extended mode as well, but not the other way around. Consider:
+      // eval("'use strict'; ...");
+      ASSERT(language_mode != STRICT_MODE || !shared_info->is_classic_mode());
+      // If caller is in extended mode, the result must also be in
+      // extended mode.
+      ASSERT(language_mode != EXTENDED_MODE ||
+             shared_info->is_extended_mode());
+      if (!shared_info->dont_cache()) {
+        compilation_cache->PutEval(
+            source, context, shared_info, scope_position);
+      }
+    }
+  } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
+    shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
+  }
+
+  return isolate->factory()->NewFunctionFromSharedFunctionInfo(
+      shared_info, context, NOT_TENURED);
+}
+
+
+Handle<SharedFunctionInfo> Compiler::CompileScript(Handle<String> source,
+                                                   Handle<Object> script_name,
+                                                   int line_offset,
+                                                   int column_offset,
+                                                   bool is_shared_cross_origin,
+                                                   Handle<Context> context,
+                                                   v8::Extension* extension,
+                                                   ScriptDataImpl* pre_data,
+                                                   Handle<Object> script_data,
+                                                   NativesFlag natives) {
+  Isolate* isolate = source->GetIsolate();
+  int source_length = source->length();
+  isolate->counters()->total_load_size()->Increment(source_length);
+  isolate->counters()->total_compile_size()->Increment(source_length);
+
+  CompilationCache* compilation_cache = isolate->compilation_cache();
+
+  // Do a lookup in the compilation cache but not for extensions.
   Handle<SharedFunctionInfo> result;
   if (extension == NULL) {
     result = compilation_cache->LookupScript(source,
@@ -816,14 +957,12 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
     if (FLAG_use_strict) {
       info.SetLanguageMode(FLAG_harmony_scoping ? EXTENDED_MODE : STRICT_MODE);
     }
-    result = MakeFunctionInfo(&info);
+    result = CompileToplevel(&info);
     if (extension == NULL && !result.is_null() && !result->dont_cache()) {
       compilation_cache->PutScript(source, context, result);
     }
-  } else {
-    if (result->ic_age() != isolate->heap()->global_ic_age()) {
+  } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
       result->ResetForNewContext(isolate->heap()->global_ic_age());
-    }
   }
 
   if (result.is_null()) isolate->ReportPendingMessages();
@@ -831,130 +970,82 @@ Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
 }
 
 
-Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
-                                                 Handle<Context> context,
-                                                 bool is_global,
-                                                 LanguageMode language_mode,
-                                                 ParseRestriction restriction,
-                                                 int scope_position) {
-  Isolate* isolate = source->GetIsolate();
-  int source_length = source->length();
-  isolate->counters()->total_eval_size()->Increment(source_length);
-  isolate->counters()->total_compile_size()->Increment(source_length);
-
-  // The VM is in the COMPILER state until exiting this function.
-  VMState<COMPILER> state(isolate);
-
-  // Do a lookup in the compilation cache; if the entry is not there, invoke
-  // the compiler and add the result to the cache.
-  Handle<SharedFunctionInfo> result;
-  CompilationCache* compilation_cache = isolate->compilation_cache();
-  result = compilation_cache->LookupEval(source,
-                                         context,
-                                         is_global,
-                                         language_mode,
-                                         scope_position);
+Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
+                                                       Handle<Script> script) {
+  // Precondition: code has been parsed and scopes have been analyzed.
+  CompilationInfoWithZone info(script);
+  info.SetFunction(literal);
+  info.SetScope(literal->scope());
+  info.SetLanguageMode(literal->scope()->language_mode());
 
-  if (result.is_null()) {
-    // Create a script object describing the script to be compiled.
-    Handle<Script> script = isolate->factory()->NewScript(source);
-    CompilationInfoWithZone info(script);
-    info.MarkAsEval();
-    if (is_global) info.MarkAsGlobal();
-    info.SetLanguageMode(language_mode);
-    info.SetParseRestriction(restriction);
-    info.SetContext(context);
-    result = MakeFunctionInfo(&info);
-    if (!result.is_null()) {
-      // Explicitly disable optimization for eval code. We're not yet prepared
-      // to handle eval-code in the optimizing compiler.
-      result->DisableOptimization(kEval);
+  Isolate* isolate = info.isolate();
+  Factory* factory = isolate->factory();
+  LiveEditFunctionTracker live_edit_tracker(isolate, literal);
+  // Determine if the function can be lazily compiled. This is necessary to
+  // allow some of our builtin JS files to be lazily compiled. These
+  // builtins cannot be handled lazily by the parser, since we have to know
+  // if a function uses the special natives syntax, which is something the
+  // parser records.
+  // If the debugger requests compilation for break points, we cannot be
+  // aggressive about lazy compilation, because it might trigger compilation
+  // of functions without an outer context when setting a breakpoint through
+  // Debug::FindSharedFunctionInfoInScript.
+  bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
+  bool allow_lazy = literal->AllowsLazyCompilation() &&
+      !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
 
-      // If caller is strict mode, the result must be in strict mode or
-      // extended mode as well, but not the other way around. Consider:
-      // eval("'use strict'; ...");
-      ASSERT(language_mode != STRICT_MODE || !result->is_classic_mode());
-      // If caller is in extended mode, the result must also be in
-      // extended mode.
-      ASSERT(language_mode != EXTENDED_MODE ||
-             result->is_extended_mode());
-      if (!result->dont_cache()) {
-        compilation_cache->PutEval(
-            source, context, is_global, result, scope_position);
-      }
-    }
+  // Generate code
+  Handle<ScopeInfo> scope_info;
+  if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
+    Handle<Code> code = isolate->builtins()->CompileUnoptimized();
+    info.SetCode(code);
+    scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
+  } else if (FullCodeGenerator::MakeCode(&info)) {
+    ASSERT(!info.code().is_null());
+    scope_info = ScopeInfo::Create(info.scope(), info.zone());
   } else {
-    if (result->ic_age() != isolate->heap()->global_ic_age()) {
-      result->ResetForNewContext(isolate->heap()->global_ic_age());
-    }
+    return Handle<SharedFunctionInfo>::null();
   }
 
+  // Create a shared function info object.
+  Handle<SharedFunctionInfo> result =
+      factory->NewSharedFunctionInfo(literal->name(),
+                                     literal->materialized_literal_count(),
+                                     literal->is_generator(),
+                                     info.code(),
+                                     scope_info);
+  SetFunctionInfo(result, literal, false, script);
+  RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
+  result->set_allows_lazy_compilation(allow_lazy);
+  result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
+
+  // Set the expected number of properties for instances and return
+  // the resulting function.
+  SetExpectedNofPropertiesFromEstimate(result,
+                                       literal->expected_property_count());
+  live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
   return result;
 }
 
 
-static bool InstallFullCode(CompilationInfo* info) {
-  // Update the shared function info with the compiled code and the
-  // scope info.  Please note, that the order of the shared function
-  // info initialization is important since set_scope_info might
-  // trigger a GC, causing the ASSERT below to be invalid if the code
-  // was flushed. By setting the code object last we avoid this.
-  Handle<SharedFunctionInfo> shared = info->shared_info();
-  Handle<Code> code = info->code();
-  CHECK(code->kind() == Code::FUNCTION);
-  Handle<JSFunction> function = info->closure();
-  Handle<ScopeInfo> scope_info =
-      ScopeInfo::Create(info->scope(), info->zone());
-  shared->set_scope_info(*scope_info);
-  shared->ReplaceCode(*code);
-  if (!function.is_null()) {
-    function->ReplaceCode(*code);
-    ASSERT(!function->IsOptimized());
-  }
-
-  // Set the expected number of properties for instances.
-  FunctionLiteral* lit = info->function();
-  int expected = lit->expected_property_count();
-  SetExpectedNofPropertiesFromEstimate(shared, expected);
-
-  // Check the function has compiled code.
-  ASSERT(shared->is_compiled());
-  shared->set_dont_optimize_reason(lit->dont_optimize_reason());
-  shared->set_dont_inline(lit->flags()->Contains(kDontInline));
-  shared->set_ast_node_count(lit->ast_node_count());
-
-  if (info->isolate()->use_crankshaft() &&
-      !function.is_null() &&
-      !shared->optimization_disabled()) {
-    // If we're asked to always optimize, we compile the optimized
-    // version of the function right away - unless the debugger is
-    // active as it makes no sense to compile optimized code then.
-    if (FLAG_always_opt &&
-        !info->isolate()->DebuggerHasBreakPoints()) {
-      CompilationInfoWithZone optimized(function);
-      optimized.SetOptimizing(BailoutId::None());
-      return Compiler::CompileLazy(&optimized);
+static Handle<Code> GetCodeFromOptimizedCodeMap(Handle<JSFunction> function) {
+  if (FLAG_cache_optimized_code) {
+    Handle<SharedFunctionInfo> shared(function->shared());
+    DisallowHeapAllocation no_gc;
+    int index = shared->SearchOptimizedCodeMap(
+        function->context()->native_context());
+    if (index > 0) {
+      if (FLAG_trace_opt) {
+        PrintF("[found optimized code for ");
+        function->ShortPrint();
+        PrintF("]\n");
+      }
+      FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index);
+      if (literals != NULL) function->set_literals(literals);
+      return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index));
     }
   }
-  return true;
-}
-
-
-static void InstallCodeCommon(CompilationInfo* info) {
-  Handle<SharedFunctionInfo> shared = info->shared_info();
-  Handle<Code> code = info->code();
-  ASSERT(!code.is_null());
-
-  // Set optimizable to false if this is disallowed by the shared
-  // function info, e.g., we might have flushed the code and must
-  // reset this bit when lazy compiling the code again.
-  if (shared->optimization_disabled()) code->set_optimizable(false);
-
-  if (shared->code() == *code) {
-    // Do not send compilation event for the same code twice.
-    return;
-  }
-  Compiler::RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
+  return Handle<Code>::null();
 }
 
 
@@ -974,317 +1065,162 @@ static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
 }
 
 
-static bool InstallCodeFromOptimizedCodeMap(CompilationInfo* info) {
-  if (!info->IsOptimizing()) return false;  // Nothing to look up.
+static bool CompileOptimizedPrologue(CompilationInfo* info) {
+  if (!Parser::Parse(info)) return false;
+  LanguageMode language_mode = info->function()->language_mode();
+  info->SetLanguageMode(language_mode);
 
-  // Lookup non-OSR optimized code.
-  if (FLAG_cache_optimized_code && !info->is_osr()) {
-    Handle<SharedFunctionInfo> shared = info->shared_info();
-    Handle<JSFunction> function = info->closure();
-    ASSERT(!function.is_null());
-    Handle<Context> native_context(function->context()->native_context());
-    int index = shared->SearchOptimizedCodeMap(*native_context);
-    if (index > 0) {
-      if (FLAG_trace_opt) {
-        PrintF("[found optimized code for ");
-        function->ShortPrint();
-        PrintF("]\n");
-      }
-      // Caching of optimized code enabled and optimized code found.
-      shared->InstallFromOptimizedCodeMap(*function, index);
-      return true;
-    }
-  }
-  return false;
+  if (!Rewriter::Rewrite(info)) return false;
+  if (!Scope::Analyze(info)) return false;
+  ASSERT(info->scope() != NULL);
+  return true;
 }
 
 
-bool Compiler::CompileLazy(CompilationInfo* info) {
-  Isolate* isolate = info->isolate();
-
-  // The VM is in the COMPILER state until exiting this function.
-  VMState<COMPILER> state(isolate);
-
-  PostponeInterruptsScope postpone(isolate);
-
-  Handle<SharedFunctionInfo> shared = info->shared_info();
-  int compiled_size = shared->end_position() - shared->start_position();
-  isolate->counters()->total_compile_size()->Increment(compiled_size);
-
-  if (InstallCodeFromOptimizedCodeMap(info)) return true;
-
-  // Generate the AST for the lazily compiled function.
-  if (Parser::Parse(info)) {
-    // Measure how long it takes to do the lazy compilation; only take the
-    // rest of the function into account to avoid overlap with the lazy
-    // parsing statistics.
-    HistogramTimerScope timer(isolate->counters()->compile_lazy());
-
-    // After parsing we know the function's language mode. Remember it.
-    LanguageMode language_mode = info->function()->language_mode();
-    info->SetLanguageMode(language_mode);
-    shared->set_language_mode(language_mode);
+static bool GetOptimizedCodeNow(CompilationInfo* info) {
+  if (!CompileOptimizedPrologue(info)) return false;
 
-    // Compile the code.
-    if (!MakeCode(info)) {
-      if (!isolate->has_pending_exception()) {
-        isolate->StackOverflow();
-      }
-    } else {
-      InstallCodeCommon(info);
-
-      if (info->IsOptimizing()) {
-        // Optimized code successfully created.
-        Handle<Code> code = info->code();
-        ASSERT(shared->scope_info() != ScopeInfo::Empty(isolate));
-        // TODO(titzer): Only replace the code if it was not an OSR compile.
-        info->closure()->ReplaceCode(*code);
-        InsertCodeIntoOptimizedCodeMap(info);
-        return true;
-      } else if (!info->is_osr()) {
-        // Compilation failed. Replace with full code if not OSR compile.
-        return InstallFullCode(info);
-      }
-    }
-  }
-
-  ASSERT(info->code().is_null());
-  return false;
+  Logger::TimerEventScope timer(
+      info->isolate(), Logger::TimerEventScope::v8_recompile_synchronous);
+
+  OptimizedCompileJob job(info);
+  if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED) return false;
+  if (job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED) return false;
+  if (job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) return false;
+
+  // Success!
+  ASSERT(!info->isolate()->has_pending_exception());
+  InsertCodeIntoOptimizedCodeMap(info);
+  Compiler::RecordFunctionCompilation(
+      Logger::LAZY_COMPILE_TAG, info, info->shared_info());
+  return true;
 }
 
 
-bool Compiler::RecompileConcurrent(Handle<JSFunction> closure,
-                                   Handle<Code> unoptimized,
-                                   uint32_t osr_pc_offset) {
-  bool compiling_for_osr = (osr_pc_offset != 0);
-
-  Isolate* isolate = closure->GetIsolate();
-  // Here we prepare compile data for the concurrent recompilation thread, but
-  // this still happens synchronously and interrupts execution.
-  Logger::TimerEventScope timer(
-      isolate, Logger::TimerEventScope::v8_recompile_synchronous);
-
+static bool GetOptimizedCodeLater(CompilationInfo* info) {
+  Isolate* isolate = info->isolate();
   if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
     if (FLAG_trace_concurrent_recompilation) {
       PrintF("  ** Compilation queue full, will retry optimizing ");
-      closure->PrintName();
-      PrintF(" on next run.\n");
+      info->closure()->PrintName();
+      PrintF(" later.\n");
     }
     return false;
   }
 
-  SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(closure));
-  Handle<SharedFunctionInfo> shared = info->shared_info();
+  CompilationHandleScope handle_scope(info);
+  if (!CompileOptimizedPrologue(info)) return false;
+  info->SaveHandles();  // Copy handles to the compilation handle scope.
+
+  Logger::TimerEventScope timer(
+      isolate, Logger::TimerEventScope::v8_recompile_synchronous);
 
-  if (compiling_for_osr) {
-    BailoutId osr_ast_id = unoptimized->TranslatePcOffsetToAstId(osr_pc_offset);
-    ASSERT(!osr_ast_id.IsNone());
-    info->SetOptimizing(osr_ast_id);
-    info->SetOsrInfo(unoptimized, osr_pc_offset);
+  OptimizedCompileJob* job = new(info->zone()) OptimizedCompileJob(info);
+  OptimizedCompileJob::Status status = job->CreateGraph();
+  if (status != OptimizedCompileJob::SUCCEEDED) return false;
+  isolate->optimizing_compiler_thread()->QueueForOptimization(job);
 
-    if (FLAG_trace_osr) {
-      PrintF("[COSR - attempt to queue ");
-      closure->PrintName();
-      PrintF(" at AST id %d]\n", osr_ast_id.ToInt());
+  if (FLAG_trace_concurrent_recompilation) {
+    PrintF("  ** Queued ");
+     info->closure()->PrintName();
+    if (info->is_osr()) {
+      PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
+    } else {
+      PrintF(" for concurrent optimization.\n");
     }
-  } else {
-    info->SetOptimizing(BailoutId::None());
   }
+  return true;
+}
+
 
+Handle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
+                                        Handle<Code> current_code,
+                                        ConcurrencyMode mode,
+                                        BailoutId osr_ast_id) {
+  if (osr_ast_id.IsNone()) {  // No cache for OSR.
+    Handle<Code> cached_code = GetCodeFromOptimizedCodeMap(function);
+    if (!cached_code.is_null()) return cached_code;
+  }
+
+  SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function));
+  Isolate* isolate = info->isolate();
   VMState<COMPILER> state(isolate);
+  ASSERT(!isolate->has_pending_exception());
   PostponeInterruptsScope postpone(isolate);
 
+  Handle<SharedFunctionInfo> shared = info->shared_info();
+  ASSERT_NE(ScopeInfo::Empty(isolate), shared->scope_info());
   int compiled_size = shared->end_position() - shared->start_position();
   isolate->counters()->total_compile_size()->Increment(compiled_size);
+  current_code->set_profiler_ticks(0);
 
-  {
-    CompilationHandleScope handle_scope(info.get());
+  info->SetOptimizing(osr_ast_id, current_code);
 
-    if (!compiling_for_osr && InstallCodeFromOptimizedCodeMap(info.get())) {
-      return true;
+  if (mode == CONCURRENT) {
+    if (GetOptimizedCodeLater(info.get())) {
+      info.Detach();  // The background recompile job owns this now.
+      return isolate->builtins()->InOptimizationQueue();
     }
+  } else {
+    if (GetOptimizedCodeNow(info.get())) return info->code();
+  }
 
-    if (Parser::Parse(info.get())) {
-      LanguageMode language_mode = info->function()->language_mode();
-      info->SetLanguageMode(language_mode);
-      shared->set_language_mode(language_mode);
-      info->SaveHandles();
-
-      if (Rewriter::Rewrite(info.get()) && Scope::Analyze(info.get())) {
-        RecompileJob* job = new(info->zone()) RecompileJob(info.get());
-        RecompileJob::Status status = job->CreateGraph();
-        if (status == RecompileJob::SUCCEEDED) {
-          info.Detach();
-          unoptimized->set_profiler_ticks(0);
-          isolate->optimizing_compiler_thread()->QueueForOptimization(job);
-          ASSERT(!isolate->has_pending_exception());
-          return true;
-        } else if (status == RecompileJob::BAILED_OUT) {
-          isolate->clear_pending_exception();
-          InstallFullCode(info.get());
-        }
-      }
-    }
+  // Failed.
+  if (FLAG_trace_opt) {
+    PrintF("[failed to optimize ");
+    function->PrintName();
+    PrintF("]\n");
   }
 
   if (isolate->has_pending_exception()) isolate->clear_pending_exception();
-  return false;
+  return Handle<Code>::null();
 }
 
 
-Handle<Code> Compiler::InstallOptimizedCode(RecompileJob* job) {
+Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
+  // Take ownership of compilation info.  Deleting compilation info
+  // also tears down the zone and the recompile job.
   SmartPointer<CompilationInfo> info(job->info());
-  // The function may have already been optimized by OSR.  Simply continue.
-  // Except when OSR already disabled optimization for some reason.
-  if (info->shared_info()->optimization_disabled()) {
-    info->AbortOptimization();
-    InstallFullCode(info.get());
-    if (FLAG_trace_concurrent_recompilation) {
-      PrintF("  ** aborting optimization for ");
-      info->closure()->PrintName();
-      PrintF(" as it has been disabled.\n");
-    }
-    ASSERT(!info->closure()->IsInRecompileQueue());
-    return Handle<Code>::null();
-  }
-
   Isolate* isolate = info->isolate();
+
   VMState<COMPILER> state(isolate);
   Logger::TimerEventScope timer(
       isolate, Logger::TimerEventScope::v8_recompile_synchronous);
-  // If crankshaft succeeded, install the optimized code else install
-  // the unoptimized code.
-  RecompileJob::Status status = job->last_status();
-  if (info->HasAbortedDueToDependencyChange()) {
-    info->set_bailout_reason(kBailedOutDueToDependencyChange);
-    status = job->AbortOptimization();
-  } else if (status != RecompileJob::SUCCEEDED) {
-    info->set_bailout_reason(kFailedBailedOutLastTime);
-    status = job->AbortOptimization();
-  } else if (isolate->DebuggerHasBreakPoints()) {
-    info->set_bailout_reason(kDebuggerIsActive);
-    status = job->AbortOptimization();
-  } else {
-    status = job->GenerateAndInstallCode();
-    ASSERT(status == RecompileJob::SUCCEEDED ||
-           status == RecompileJob::BAILED_OUT);
-  }
 
-  InstallCodeCommon(info.get());
-  if (status == RecompileJob::SUCCEEDED) {
-    Handle<Code> code = info->code();
-    ASSERT(info->shared_info()->scope_info() != ScopeInfo::Empty(isolate));
-    info->closure()->ReplaceCode(*code);
-    if (info->shared_info()->SearchOptimizedCodeMap(
-            info->closure()->context()->native_context()) == -1) {
-      InsertCodeIntoOptimizedCodeMap(info.get());
-    }
-    if (FLAG_trace_concurrent_recompilation) {
-      PrintF("  ** Optimized code for ");
-      info->closure()->PrintName();
-      PrintF(" installed.\n");
-    }
-  } else {
-    info->AbortOptimization();
-    InstallFullCode(info.get());
+  Handle<SharedFunctionInfo> shared = info->shared_info();
+  shared->code()->set_profiler_ticks(0);
+
+  // 1) Optimization may have failed.
+  // 2) The function may have already been optimized by OSR.  Simply continue.
+  //    Except when OSR already disabled optimization for some reason.
+  // 3) The code may have already been invalidated due to dependency change.
+  // 4) Debugger may have been activated.
+
+  if (job->last_status() != OptimizedCompileJob::SUCCEEDED ||
+      shared->optimization_disabled() ||
+      info->HasAbortedDueToDependencyChange() ||
+      isolate->DebuggerHasBreakPoints()) {
+    return Handle<Code>::null();
   }
-  // Optimized code is finally replacing unoptimized code.  Reset the latter's
-  // profiler ticks to prevent too soon re-opt after a deopt.
-  info->shared_info()->code()->set_profiler_ticks(0);
-  ASSERT(!info->closure()->IsInRecompileQueue());
-  return (status == RecompileJob::SUCCEEDED) ? info->code()
-                                             : Handle<Code>::null();
-}
-
 
-Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
-                                                       Handle<Script> script) {
-  // Precondition: code has been parsed and scopes have been analyzed.
-  CompilationInfoWithZone info(script);
-  info.SetFunction(literal);
-  info.SetScope(literal->scope());
-  info.SetLanguageMode(literal->scope()->language_mode());
-
-  Isolate* isolate = info.isolate();
-  Factory* factory = isolate->factory();
-  LiveEditFunctionTracker live_edit_tracker(isolate, literal);
-  // Determine if the function can be lazily compiled. This is necessary to
-  // allow some of our builtin JS files to be lazily compiled. These
-  // builtins cannot be handled lazily by the parser, since we have to know
-  // if a function uses the special natives syntax, which is something the
-  // parser records.
-  // If the debugger requests compilation for break points, we cannot be
-  // aggressive about lazy compilation, because it might trigger compilation
-  // of functions without an outer context when setting a breakpoint through
-  // Debug::FindSharedFunctionInfoInScript.
-  bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
-  bool allow_lazy = literal->AllowsLazyCompilation() &&
-      !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
-
-  Handle<ScopeInfo> scope_info(ScopeInfo::Empty(isolate));
-
-  // Generate code
-  if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
-    Handle<Code> code = isolate->builtins()->LazyCompile();
-    info.SetCode(code);
-  } else if (GenerateCode(&info)) {
-    ASSERT(!info.code().is_null());
-    scope_info = ScopeInfo::Create(info.scope(), info.zone());
-  } else {
-    return Handle<SharedFunctionInfo>::null();
+  if (job->GenerateCode() != OptimizedCompileJob::SUCCEEDED) {
+    return Handle<Code>::null();
   }
 
-  // Create a shared function info object.
-  Handle<SharedFunctionInfo> result =
-      factory->NewSharedFunctionInfo(literal->name(),
-                                     literal->materialized_literal_count(),
-                                     literal->is_generator(),
-                                     info.code(),
-                                     scope_info);
-  SetFunctionInfo(result, literal, false, script);
-  RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
-  result->set_allows_lazy_compilation(allow_lazy);
-  result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
-
-  // Set the expected number of properties for instances and return
-  // the resulting function.
-  SetExpectedNofPropertiesFromEstimate(result,
-                                       literal->expected_property_count());
-  live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
-  return result;
-}
+  Compiler::RecordFunctionCompilation(
+      Logger::LAZY_COMPILE_TAG, info.get(), shared);
+  if (info->shared_info()->SearchOptimizedCodeMap(
+          info->context()->native_context()) == -1) {
+    InsertCodeIntoOptimizedCodeMap(info.get());
+  }
 
+  if (FLAG_trace_concurrent_recompilation) {
+    PrintF("  ** Optimized code for ");
+    info->closure()->PrintName();
+    PrintF(" generated.\n");
+  }
 
-// Sets the function info on a function.
-// The start_position points to the first '(' character after the function name
-// in the full script source. When counting characters in the script source the
-// the first character is number 0 (not 1).
-void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
-                               FunctionLiteral* lit,
-                               bool is_toplevel,
-                               Handle<Script> script) {
-  function_info->set_length(lit->parameter_count());
-  function_info->set_formal_parameter_count(lit->parameter_count());
-  function_info->set_script(*script);
-  function_info->set_function_token_position(lit->function_token_position());
-  function_info->set_start_position(lit->start_position());
-  function_info->set_end_position(lit->end_position());
-  function_info->set_is_expression(lit->is_expression());
-  function_info->set_is_anonymous(lit->is_anonymous());
-  function_info->set_is_toplevel(is_toplevel);
-  function_info->set_inferred_name(*lit->inferred_name());
-  function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
-  function_info->set_allows_lazy_compilation_without_context(
-      lit->AllowsLazyCompilationWithoutContext());
-  function_info->set_language_mode(lit->language_mode());
-  function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
-  function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
-  function_info->set_ast_node_count(lit->ast_node_count());
-  function_info->set_is_function(lit->is_function());
-  function_info->set_dont_optimize_reason(lit->dont_optimize_reason());
-  function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
-  function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
-  function_info->set_is_generator(lit->is_generator());
+  return Handle<Code>(*info->code());
 }
 
 
@@ -1301,31 +1237,18 @@ void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
       info->isolate()->cpu_profiler()->is_profiling()) {
     Handle<Script> script = info->script();
     Handle<Code> code = info->code();
-    if (*code == info->isolate()->builtins()->builtin(Builtins::kLazyCompile))
+    if (code.is_identical_to(info->isolate()->builtins()->CompileUnoptimized()))
       return;
     int line_num = GetScriptLineNumber(script, shared->start_position()) + 1;
     int column_num =
         GetScriptColumnNumber(script, shared->start_position()) + 1;
     USE(line_num);
-    if (script->name()->IsString()) {
-      PROFILE(info->isolate(),
-              CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
-                              *code,
-                              *shared,
-                              info,
-                              String::cast(script->name()),
-                              line_num,
-                              column_num));
-    } else {
-      PROFILE(info->isolate(),
-              CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
-                              *code,
-                              *shared,
-                              info,
-                              info->isolate()->heap()->empty_string(),
-                              line_num,
-                              column_num));
-    }
+    String* script_name = script->name()->IsString()
+        ? String::cast(script->name())
+        : info->isolate()->heap()->empty_string();
+    Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
+    PROFILE(info->isolate(), CodeCreateEvent(
+        log_tag, *code, *shared, info, script_name, line_num, column_num));
   }
 
   GDBJIT(AddCode(Handle<String>(shared->DebugName()),
index 7599c13..4d7c1a2 100644 (file)
@@ -84,8 +84,7 @@ class CompilationInfo {
   ScriptDataImpl* pre_parse_data() const { return pre_parse_data_; }
   Handle<Context> context() const { return context_; }
   BailoutId osr_ast_id() const { return osr_ast_id_; }
-  uint32_t osr_pc_offset() const { return osr_pc_offset_; }
-  Handle<Code> osr_patched_code() const { return osr_patched_code_; }
+  Handle<Code> unoptimized_code() const { return unoptimized_code_; }
   int opt_count() const { return opt_count_; }
   int num_parameters() const;
   int num_heap_slots() const;
@@ -189,19 +188,16 @@ class CompilationInfo {
   void SetContext(Handle<Context> context) {
     context_ = context;
   }
-  void MarkCompilingForDebugging(Handle<Code> current_code) {
-    ASSERT(mode_ != OPTIMIZE);
-    ASSERT(current_code->kind() == Code::FUNCTION);
+
+  void MarkCompilingForDebugging() {
     flags_ |= IsCompilingForDebugging::encode(true);
-    if (current_code->is_compiled_optimizable()) {
-      EnableDeoptimizationSupport();
-    } else {
-      mode_ = CompilationInfo::NONOPT;
-    }
   }
   bool IsCompilingForDebugging() {
     return IsCompilingForDebugging::decode(flags_);
   }
+  void MarkNonOptimizable() {
+    SetMode(CompilationInfo::NONOPT);
+  }
 
   bool ShouldTrapOnDeopt() const {
     return (FLAG_trap_on_deopt && IsOptimizing()) ||
@@ -221,9 +217,11 @@ class CompilationInfo {
   bool IsOptimizing() const { return mode_ == OPTIMIZE; }
   bool IsOptimizable() const { return mode_ == BASE; }
   bool IsStub() const { return mode_ == STUB; }
-  void SetOptimizing(BailoutId osr_ast_id) {
+  void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
+    ASSERT(!shared_info_.is_null());
     SetMode(OPTIMIZE);
     osr_ast_id_ = osr_ast_id;
+    unoptimized_code_ = unoptimized;
   }
   void DisableOptimization();
 
@@ -239,11 +237,6 @@ class CompilationInfo {
   // Determines whether or not to insert a self-optimization header.
   bool ShouldSelfOptimize();
 
-  // Reset code to the unoptimized version when optimization is aborted.
-  void AbortOptimization() {
-    SetCode(handle(shared_info()->code()));
-  }
-
   void set_deferred_handles(DeferredHandles* deferred_handles) {
     ASSERT(deferred_handles_ == NULL);
     deferred_handles_ = deferred_handles;
@@ -266,7 +259,7 @@ class CompilationInfo {
     SaveHandle(&shared_info_);
     SaveHandle(&context_);
     SaveHandle(&script_);
-    SaveHandle(&osr_patched_code_);
+    SaveHandle(&unoptimized_code_);
   }
 
   BailoutReason bailout_reason() const { return bailout_reason_; }
@@ -313,13 +306,8 @@ class CompilationInfo {
     return abort_due_to_dependency_;
   }
 
-  void SetOsrInfo(Handle<Code> code, uint32_t pc_offset) {
-    osr_patched_code_ = code;
-    osr_pc_offset_ = pc_offset;
-  }
-
-  bool HasSameOsrEntry(Handle<JSFunction> function, uint32_t pc_offset) {
-    return osr_pc_offset_ == pc_offset && function.is_identical_to(closure_);
+  bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
+    return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure_);
   }
 
  protected:
@@ -416,13 +404,10 @@ class CompilationInfo {
   // Compilation mode flag and whether deoptimization is allowed.
   Mode mode_;
   BailoutId osr_ast_id_;
-  // The pc_offset corresponding to osr_ast_id_ in unoptimized code.
-  // We can look this up in the back edge table, but cache it for quick access.
-  uint32_t osr_pc_offset_;
   // The unoptimized code we patched for OSR may not be the shared code
   // afterwards, since we may need to compile it again to include deoptimization
   // data.  Keep track which code we patched.
-  Handle<Code> osr_patched_code_;
+  Handle<Code> unoptimized_code_;
 
   // Flag whether compilation needs to be aborted due to dependency change.
   bool abort_due_to_dependency_;
@@ -518,9 +503,9 @@ class LChunk;
 // fail, bail-out to the full code generator or succeed.  Apart from
 // their return value, the status of the phase last run can be checked
 // using last_status().
-class RecompileJob: public ZoneObject {
+class OptimizedCompileJob: public ZoneObject {
  public:
-  explicit RecompileJob(CompilationInfo* info)
+  explicit OptimizedCompileJob(CompilationInfo* info)
       : info_(info),
         graph_builder_(NULL),
         graph_(NULL),
@@ -534,14 +519,21 @@ class RecompileJob: public ZoneObject {
 
   MUST_USE_RESULT Status CreateGraph();
   MUST_USE_RESULT Status OptimizeGraph();
-  MUST_USE_RESULT Status GenerateAndInstallCode();
+  MUST_USE_RESULT Status GenerateCode();
 
   Status last_status() const { return last_status_; }
   CompilationInfo* info() const { return info_; }
   Isolate* isolate() const { return info()->isolate(); }
 
-  MUST_USE_RESULT Status AbortOptimization() {
-    info_->AbortOptimization();
+  MUST_USE_RESULT Status AbortOptimization(
+      BailoutReason reason = kNoReason) {
+    if (reason != kNoReason) info_->set_bailout_reason(reason);
+    return SetLastStatus(BAILED_OUT);
+  }
+
+  MUST_USE_RESULT Status AbortAndDisableOptimization(
+      BailoutReason reason = kNoReason) {
+    if (reason != kNoReason) info_->set_bailout_reason(reason);
     info_->shared_info()->DisableOptimization(info_->bailout_reason());
     return SetLastStatus(BAILED_OUT);
   }
@@ -571,7 +563,7 @@ class RecompileJob: public ZoneObject {
   void RecordOptimizationStats();
 
   struct Timer {
-    Timer(RecompileJob* job, TimeDelta* location)
+    Timer(OptimizedCompileJob* job, TimeDelta* location)
         : job_(job), location_(location) {
       ASSERT(location_ != NULL);
       timer_.Start();
@@ -581,7 +573,7 @@ class RecompileJob: public ZoneObject {
       *location_ += timer_.Elapsed();
     }
 
-    RecompileJob* job_;
+    OptimizedCompileJob* job_;
     ElapsedTimer timer_;
     TimeDelta* location_;
   };
@@ -601,57 +593,53 @@ class RecompileJob: public ZoneObject {
 
 class Compiler : public AllStatic {
  public:
-  // Call count before primitive functions trigger their own optimization.
-  static const int kCallsUntilPrimitiveOpt = 200;
+  static Handle<Code> GetUnoptimizedCode(Handle<JSFunction> function);
+  static Handle<Code> GetUnoptimizedCode(Handle<SharedFunctionInfo> shared);
+  static bool EnsureCompiled(Handle<JSFunction> function,
+                             ClearExceptionFlag flag);
+  static Handle<Code> GetCodeForDebugging(Handle<JSFunction> function);
 
-  // All routines return a SharedFunctionInfo.
-  // If an error occurs an exception is raised and the return handle
-  // contains NULL.
+#ifdef ENABLE_DEBUGGER_SUPPORT
+  static void CompileForLiveEdit(Handle<Script> script);
+#endif
 
-  // Compile a String source within a context.
-  static Handle<SharedFunctionInfo> Compile(Handle<String> source,
-                                            Handle<Object> script_name,
-                                            int line_offset,
-                                            int column_offset,
-                                            bool is_shared_cross_origin,
-                                            Handle<Context> context,
-                                            v8::Extension* extension,
-                                            ScriptDataImpl* pre_data,
-                                            Handle<Object> script_data,
-                                            NativesFlag is_natives_code);
-
-  // Compile a String source within a context for Eval.
-  static Handle<SharedFunctionInfo> CompileEval(Handle<String> source,
+  // Compile a String source within a context for eval.
+  static Handle<JSFunction> GetFunctionFromEval(Handle<String> source,
                                                 Handle<Context> context,
-                                                bool is_global,
                                                 LanguageMode language_mode,
                                                 ParseRestriction restriction,
                                                 int scope_position);
 
-  // Compile from function info (used for lazy compilation). Returns true on
-  // success and false if the compilation resulted in a stack overflow.
-  static bool CompileLazy(CompilationInfo* info);
-
-  static bool RecompileConcurrent(Handle<JSFunction> function,
-                                  Handle<Code> unoptimized,
-                                  uint32_t osr_pc_offset = 0);
-
-  // Compile a shared function info object (the function is possibly lazily
-  // compiled).
+  // Compile a String source within a context.
+  static Handle<SharedFunctionInfo> CompileScript(Handle<String> source,
+                                                  Handle<Object> script_name,
+                                                  int line_offset,
+                                                  int column_offset,
+                                                  bool is_shared_cross_origin,
+                                                  Handle<Context> context,
+                                                  v8::Extension* extension,
+                                                  ScriptDataImpl* pre_data,
+                                                  Handle<Object> script_data,
+                                                  NativesFlag is_natives_code);
+
+  // Create a shared function info object (the code may be lazily compiled).
   static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node,
                                                       Handle<Script> script);
 
-  // Set the function info for a newly compiled function.
-  static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
-                              FunctionLiteral* lit,
-                              bool is_toplevel,
-                              Handle<Script> script);
+  enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
 
-  static Handle<Code> InstallOptimizedCode(RecompileJob* job);
+  // Generate and return optimized code or start a concurrent optimization job.
+  // In the latter case, return the InOptimizationQueue builtin.  On failure,
+  // return the empty handle.
+  static Handle<Code> GetOptimizedCode(
+      Handle<JSFunction> function,
+      Handle<Code> current_code,
+      ConcurrencyMode mode,
+      BailoutId osr_ast_id = BailoutId::None());
 
-#ifdef ENABLE_DEBUGGER_SUPPORT
-  static bool MakeCodeForLiveEdit(CompilationInfo* info);
-#endif
+  // Generate and return code from previously queued optimization job.
+  // On failure, return the empty handle.
+  static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
 
   static void RecordFunctionCompilation(Logger::LogEventsAndTags tag,
                                         CompilationInfo* info,
index dbbfe7e..4a7fa6b 100644 (file)
@@ -783,14 +783,13 @@ bool Debug::CompileDebuggerScript(Isolate* isolate, int index) {
 
   // Compile the script.
   Handle<SharedFunctionInfo> function_info;
-  function_info = Compiler::Compile(source_code,
-                                    script_name,
-                                    0, 0,
-                                    false,
-                                    context,
-                                    NULL, NULL,
-                                    Handle<String>::null(),
-                                    NATIVES_CODE);
+  function_info = Compiler::CompileScript(source_code,
+                                          script_name, 0, 0,
+                                          false,
+                                          context,
+                                          NULL, NULL,
+                                          Handle<String>::null(),
+                                          NATIVES_CODE);
 
   // Silently ignore stack overflows during compilation.
   if (function_info.is_null()) {
@@ -1868,41 +1867,6 @@ void Debug::ClearStepNext() {
 }
 
 
-// Helper function to compile full code for debugging. This code will
-// have debug break slots and deoptimization information. Deoptimization
-// information is required in case that an optimized version of this
-// function is still activated on the stack. It will also make sure that
-// the full code is compiled with the same flags as the previous version,
-// that is flags which can change the code generated. The current method
-// of mapping from already compiled full code without debug break slots
-// to full code with debug break slots depends on the generated code is
-// otherwise exactly the same.
-static bool CompileFullCodeForDebugging(Handle<JSFunction> function,
-                                        Handle<Code> current_code) {
-  ASSERT(!current_code->has_debug_break_slots());
-
-  CompilationInfoWithZone info(function);
-  info.MarkCompilingForDebugging(current_code);
-  ASSERT(!info.shared_info()->is_compiled());
-  ASSERT(!info.isolate()->has_pending_exception());
-
-  // Use compile lazy which will end up compiling the full code in the
-  // configuration configured above.
-  bool result = Compiler::CompileLazy(&info);
-  ASSERT(result != info.isolate()->has_pending_exception());
-  info.isolate()->clear_pending_exception();
-#if DEBUG
-  if (result) {
-    Handle<Code> new_code(function->shared()->code());
-    ASSERT(new_code->has_debug_break_slots());
-    ASSERT(current_code->is_compiled_optimizable() ==
-           new_code->is_compiled_optimizable());
-  }
-#endif
-  return result;
-}
-
-
 static void CollectActiveFunctionsFromThread(
     Isolate* isolate,
     ThreadLocalTop* top,
@@ -2059,8 +2023,7 @@ void Debug::PrepareForBreakPoints() {
 
     Deoptimizer::DeoptimizeAll(isolate_);
 
-    Handle<Code> lazy_compile =
-        Handle<Code>(isolate_->builtins()->builtin(Builtins::kLazyCompile));
+    Handle<Code> lazy_compile = isolate_->builtins()->CompileUnoptimized();
 
     // There will be at least one break point when we are done.
     has_break_points_ = true;
@@ -2112,9 +2075,9 @@ void Debug::PrepareForBreakPoints() {
             function->set_code(*lazy_compile);
             function->shared()->set_code(*lazy_compile);
           } else if (kind == Code::BUILTIN &&
-              (function->IsInRecompileQueue() ||
-               function->IsMarkedForLazyRecompilation() ||
-               function->IsMarkedForConcurrentRecompilation())) {
+              (function->IsInOptimizationQueue() ||
+               function->IsMarkedForOptimization() ||
+               function->IsMarkedForConcurrentOptimization())) {
             // Abort in-flight compilation.
             Code* shared_code = function->shared()->code();
             if (shared_code->kind() == Code::FUNCTION &&
@@ -2159,19 +2122,12 @@ void Debug::PrepareForBreakPoints() {
       if (!shared->code()->has_debug_break_slots()) {
         // Try to compile the full code with debug break slots. If it
         // fails just keep the current code.
-        Handle<Code> current_code(function->shared()->code());
-        shared->set_code(*lazy_compile);
         bool prev_force_debugger_active =
             isolate_->debugger()->force_debugger_active();
         isolate_->debugger()->set_force_debugger_active(true);
-        ASSERT(current_code->kind() == Code::FUNCTION);
-        CompileFullCodeForDebugging(function, current_code);
+        function->ReplaceCode(*Compiler::GetCodeForDebugging(function));
         isolate_->debugger()->set_force_debugger_active(
             prev_force_debugger_active);
-        if (!shared->is_compiled()) {
-          shared->set_code(*current_code);
-          continue;
-        }
       }
 
       // Keep function code in sync with shared function info.
@@ -2284,11 +2240,10 @@ Object* Debug::FindSharedFunctionInfoInScript(Handle<Script> script,
       // will compile all inner functions that cannot be compiled without a
       // context, because Compiler::BuildFunctionInfo checks whether the
       // debugger is active.
-      if (target_function.is_null()) {
-        SharedFunctionInfo::CompileLazy(target, KEEP_EXCEPTION);
-      } else {
-        JSFunction::CompileLazy(target_function, KEEP_EXCEPTION);
-      }
+      Handle<Code> result = target_function.is_null()
+          ? Compiler::GetUnoptimizedCode(target)
+          : Compiler::GetUnoptimizedCode(target_function);
+      if (result.is_null()) return isolate_->heap()->undefined_value();
     }
   }  // End while loop.
 
@@ -2312,7 +2267,7 @@ bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared,
 
   // Ensure function is compiled. Return false if this failed.
   if (!function.is_null() &&
-      !JSFunction::EnsureCompiled(function, CLEAR_EXCEPTION)) {
+      !Compiler::EnsureCompiled(function, CLEAR_EXCEPTION)) {
     return false;
   }
 
@@ -2598,6 +2553,21 @@ Handle<FixedArray> Debug::GetLoadedScripts() {
 }
 
 
+void Debug::RecordEvalCaller(Handle<Script> script) {
+  script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
+  // For eval scripts add information on the function from which eval was
+  // called.
+  StackTraceFrameIterator it(script->GetIsolate());
+  if (!it.done()) {
+    script->set_eval_from_shared(it.frame()->function()->shared());
+    Code* code = it.frame()->LookupCode();
+    int offset = static_cast<int>(
+        it.frame()->pc() - code->instruction_start());
+    script->set_eval_from_instructions_offset(Smi::FromInt(offset));
+  }
+}
+
+
 void Debug::AfterGarbageCollection() {
   // Generate events for collected scripts.
   if (script_cache_ != NULL) {
index 7eedfd2..d1b3b23 100644 (file)
@@ -424,6 +424,9 @@ class Debug {
   void AddScriptToScriptCache(Handle<Script> script);
   Handle<FixedArray> GetLoadedScripts();
 
+  // Record function from which eval was called.
+  static void RecordEvalCaller(Handle<Script> script);
+
   // Garbage collection notifications.
   void AfterGarbageCollection();
 
index 47a56a0..1a70f6c 100644 (file)
@@ -940,7 +940,10 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
 
   if (index > 0) {
     // Caching of optimized code enabled and optimized code found.
-    function_info->InstallFromOptimizedCodeMap(*result, index);
+    FixedArray* literals =
+        function_info->GetLiteralsFromOptimizedCodeMap(index);
+    if (literals != NULL) result->set_literals(literals);
+    result->ReplaceCode(function_info->GetCodeFromOptimizedCodeMap(index));
     return result;
   }
 
@@ -951,7 +954,7 @@ Handle<JSFunction> Factory::NewFunctionFromSharedFunctionInfo(
       function_info->allows_lazy_compilation() &&
       !function_info->optimization_disabled() &&
       !isolate()->DebuggerHasBreakPoints()) {
-    result->MarkForLazyRecompilation();
+    result->MarkForOptimization();
   }
   return result;
 }
index a40b61e..ff4e8d3 100644 (file)
@@ -312,6 +312,10 @@ void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
 
 bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
   Isolate* isolate = info->isolate();
+
+  Logger::TimerEventScope timer(
+      isolate, Logger::TimerEventScope::v8_compile_full_code);
+
   Handle<Script> script = info->script();
   if (!script->IsUndefined() && !script->source()->IsUndefined()) {
     int len = String::cast(script->source())->length();
@@ -1644,8 +1648,7 @@ bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
 }
 
 
-void BackEdgeTable::Patch(Isolate* isolate,
-                          Code* unoptimized) {
+void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
   DisallowHeapAllocation no_gc;
   Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
 
@@ -1668,8 +1671,7 @@ void BackEdgeTable::Patch(Isolate* isolate,
 }
 
 
-void BackEdgeTable::Revert(Isolate* isolate,
-                           Code* unoptimized) {
+void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
   DisallowHeapAllocation no_gc;
   Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
 
@@ -1694,29 +1696,23 @@ void BackEdgeTable::Revert(Isolate* isolate,
 }
 
 
-void BackEdgeTable::AddStackCheck(CompilationInfo* info) {
+void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
   DisallowHeapAllocation no_gc;
-  Isolate* isolate = info->isolate();
-  Code* code = *info->osr_patched_code();
-  Address pc = code->instruction_start() + info->osr_pc_offset();
-  ASSERT_EQ(info->osr_ast_id().ToInt(),
-            code->TranslatePcOffsetToAstId(info->osr_pc_offset()).ToInt());
-  ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate, code, pc));
+  Isolate* isolate = code->GetIsolate();
+  Address pc = code->instruction_start() + pc_offset;
   Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
-  PatchAt(code, pc, OSR_AFTER_STACK_CHECK, patch);
+  PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
 }
 
 
-void BackEdgeTable::RemoveStackCheck(CompilationInfo* info) {
+void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
   DisallowHeapAllocation no_gc;
-  Isolate* isolate = info->isolate();
-  Code* code = *info->osr_patched_code();
-  Address pc = code->instruction_start() + info->osr_pc_offset();
-  ASSERT_EQ(info->osr_ast_id().ToInt(),
-            code->TranslatePcOffsetToAstId(info->osr_pc_offset()).ToInt());
-  if (GetBackEdgeState(isolate, code, pc) == OSR_AFTER_STACK_CHECK) {
+  Isolate* isolate = code->GetIsolate();
+  Address pc = code->instruction_start() + pc_offset;
+
+  if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
     Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
-    PatchAt(code, pc, ON_STACK_REPLACEMENT, patch);
+    PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
   }
 }
 
index 11d5341..6fd61c0 100644 (file)
@@ -928,10 +928,10 @@ class BackEdgeTable {
 
   // Change a back edge patched for on-stack replacement to perform a
   // stack check first.
-  static void AddStackCheck(CompilationInfo* info);
+  static void AddStackCheck(Handle<Code> code, uint32_t pc_offset);
 
-  // Remove the stack check, if available, and replace by on-stack replacement.
-  static void RemoveStackCheck(CompilationInfo* info);
+  // Revert the patch by AddStackCheck.
+  static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset);
 
   // Return the current patch state of the back edge.
   static BackEdgeState GetBackEdgeState(Isolate* isolate,
index 2de5ef8..1f73a7d 100644 (file)
@@ -74,8 +74,8 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
 }
 
 
-static void CallRuntimePassFunction(MacroAssembler* masm,
-                                    Runtime::FunctionId function_id) {
+static void CallRuntimePassFunction(
+    MacroAssembler* masm, Runtime::FunctionId function_id) {
   FrameScope scope(masm, StackFrame::INTERNAL);
   // Push a copy of the function.
   __ push(edi);
@@ -100,7 +100,13 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
 }
 
 
-void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
+static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
+  __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
+  __ jmp(eax);
+}
+
+
+void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
   // Checking whether the queued function is ready for install is optional,
   // since we come across interrupts and stack checks elsewhere.  However,
   // not checking may delay installing ready functions, and always checking
@@ -112,22 +118,14 @@ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
   __ cmp(esp, Operand::StaticVariable(stack_limit));
   __ j(above_equal, &ok, Label::kNear);
 
-  CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
-  // Tail call to returned code.
-  __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
-  __ jmp(eax);
+  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
+  GenerateTailCallToReturnedCode(masm);
 
   __ bind(&ok);
   GenerateTailCallToSharedCode(masm);
 }
 
 
-void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
-  GenerateTailCallToSharedCode(masm);
-}
-
-
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
                                            bool is_api_function,
                                            bool count_constructions) {
@@ -509,19 +507,41 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
 }
 
 
-void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kLazyCompile);
-  // Do a tail-call of the compiled function.
-  __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
-  __ jmp(eax);
+void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
+  CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
+  GenerateTailCallToReturnedCode(masm);
 }
 
 
-void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
-  // Do a tail-call of the compiled function.
-  __ lea(eax, FieldOperand(eax, Code::kHeaderSize));
-  __ jmp(eax);
+
+static void CallCompileOptimized(MacroAssembler* masm, bool concurrent) {
+  FrameScope scope(masm, StackFrame::INTERNAL);
+  // Push a copy of the function.
+  __ push(edi);
+  // Push call kind information.
+  __ push(ecx);
+  // Function is also the parameter to the runtime call.
+  __ push(edi);
+  // Whether to compile in a background thread.
+  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
+
+  __ CallRuntime(Runtime::kCompileOptimized, 2);
+  // Restore call kind information.
+  __ pop(ecx);
+  // Restore receiver.
+  __ pop(edi);
+}
+
+
+void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
+  CallCompileOptimized(masm, false);
+  GenerateTailCallToReturnedCode(masm);
+}
+
+
+void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
+  CallCompileOptimized(masm, true);
+  GenerateTailCallToReturnedCode(masm);
 }
 
 
index dd7df98..af9b19e 100644 (file)
--- a/src/ic.cc
+++ b/src/ic.cc
@@ -2054,7 +2054,7 @@ RUNTIME_FUNCTION(MaybeObject*, CallIC_Miss) {
   if (raw_function->is_compiled()) return raw_function;
 
   Handle<JSFunction> function(raw_function);
-  JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
+  Compiler::EnsureCompiled(function, CLEAR_EXCEPTION);
   return *function;
 }
 
@@ -2075,7 +2075,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_Miss) {
   if (raw_function->is_compiled()) return raw_function;
 
   Handle<JSFunction> function(raw_function, isolate);
-  JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
+  Compiler::EnsureCompiled(function, CLEAR_EXCEPTION);
   return *function;
 }
 
@@ -2155,7 +2155,7 @@ RUNTIME_FUNCTION(MaybeObject*, KeyedCallIC_MissFromStubFailure) {
   if (raw_function->is_compiled()) return raw_function;
 
   Handle<JSFunction> function(raw_function, isolate);
-  JSFunction::CompileLazy(function, CLEAR_EXCEPTION);
+  Compiler::EnsureCompiled(function, CLEAR_EXCEPTION);
   return *function;
 }
 
index 3d459d4..002e062 100644 (file)
@@ -602,27 +602,6 @@ Handle<JSArray> LiveEdit::CompareStrings(Handle<String> s1,
 }
 
 
-static void CompileScriptForTracker(Isolate* isolate, Handle<Script> script) {
-  // TODO(635): support extensions.
-  PostponeInterruptsScope postpone(isolate);
-
-  // Build AST.
-  CompilationInfoWithZone info(script);
-  info.MarkAsGlobal();
-  // Parse and don't allow skipping lazy functions.
-  if (Parser::Parse(&info)) {
-    // Compile the code.
-    LiveEditFunctionTracker tracker(info.isolate(), info.function());
-    if (Compiler::MakeCodeForLiveEdit(&info)) {
-      ASSERT(!info.code().is_null());
-      tracker.RecordRootFunctionInfo(info.code());
-    } else {
-      info.isolate()->StackOverflow();
-    }
-  }
-}
-
-
 // Unwraps JSValue object, returning its field "value"
 static Handle<Object> UnwrapJSValue(Handle<JSValue> jsValue) {
   return Handle<Object>(jsValue->value(), jsValue->GetIsolate());
@@ -951,7 +930,7 @@ JSArray* LiveEdit::GatherCompileInfo(Handle<Script> script,
     try_catch.SetVerbose(true);
 
     // A logical 'try' section.
-    CompileScriptForTracker(isolate, script);
+    Compiler::CompileForLiveEdit(script);
   }
 
   // A logical 'catch' section.
index 95362c0..25ce4ee 100644 (file)
@@ -1432,8 +1432,7 @@ void Logger::CodeCreateEvent(LogEventsAndTags tag,
   CALL_LISTENERS(CodeCreateEvent(tag, code, shared, info, name));
 
   if (!FLAG_log_code || !log_->IsEnabled()) return;
-  if (code == isolate_->builtins()->builtin(
-      Builtins::kLazyCompile))
+  if (code == isolate_->builtins()->builtin(Builtins::kCompileUnoptimized))
     return;
 
   Log::MessageBuilder msg(log_);
@@ -1967,8 +1966,8 @@ void Logger::LogCompiledFunctions() {
   // During iteration, there can be heap allocation due to
   // GetScriptLineNumber call.
   for (int i = 0; i < compiled_funcs_count; ++i) {
-    if (*code_objects[i] == isolate_->builtins()->builtin(
-        Builtins::kLazyCompile))
+    if (code_objects[i].is_identical_to(
+            isolate_->builtins()->CompileUnoptimized()))
       continue;
     LogExistingFunction(sfis[i], code_objects[i]);
   }
index 1e78093..0a67935 100644 (file)
@@ -986,7 +986,8 @@ void MarkCompactCollector::Finish() {
 // objects have been marked.
 
 void CodeFlusher::ProcessJSFunctionCandidates() {
-  Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
+  Code* lazy_compile =
+      isolate_->builtins()->builtin(Builtins::kCompileUnoptimized);
   Object* undefined = isolate_->heap()->undefined_value();
 
   JSFunction* candidate = jsfunction_candidates_head_;
@@ -1031,7 +1032,8 @@ void CodeFlusher::ProcessJSFunctionCandidates() {
 
 
 void CodeFlusher::ProcessSharedFunctionInfoCandidates() {
-  Code* lazy_compile = isolate_->builtins()->builtin(Builtins::kLazyCompile);
+  Code* lazy_compile =
+      isolate_->builtins()->builtin(Builtins::kCompileUnoptimized);
 
   SharedFunctionInfo* candidate = shared_function_info_candidates_head_;
   SharedFunctionInfo* next_candidate;
index ae8e3e7..efa78a2 100644 (file)
@@ -297,8 +297,8 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
 }
 
 
-static void CallRuntimePassFunction(MacroAssembler* masm,
-                                    Runtime::FunctionId function_id) {
+static void CallRuntimePassFunction(
+    MacroAssembler* masm, Runtime::FunctionId function_id) {
   FrameScope scope(masm, StackFrame::INTERNAL);
   // Push a copy of the function onto the stack.
   // Push call kind information and function as parameter to the runtime call.
@@ -318,7 +318,13 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
 }
 
 
-void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
+static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
+  __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
+  __ Jump(at);
+}
+
+
+void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
   // Checking whether the queued function is ready for install is optional,
   // since we come across interrupts and stack checks elsewhere.  However,
   // not checking may delay installing ready functions, and always checking
@@ -328,22 +334,14 @@ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
   __ LoadRoot(t0, Heap::kStackLimitRootIndex);
   __ Branch(&ok, hs, sp, Operand(t0));
 
-  CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
-  // Tail call to returned code.
-  __ Addu(at, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
-  __ Jump(at);
+  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
+  GenerateTailCallToReturnedCode(masm);
 
   __ bind(&ok);
   GenerateTailCallToSharedCode(masm);
 }
 
 
-void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
-  GenerateTailCallToSharedCode(masm);
-}
-
-
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
                                            bool is_api_function,
                                            bool count_constructions) {
@@ -790,22 +788,40 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
 }
 
 
-void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kLazyCompile);
-  // Do a tail-call of the compiled function.
-  __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
-  __ Jump(t9);
+void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
+  CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
+  GenerateTailCallToReturnedCode(masm);
 }
 
 
-void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
-  // Do a tail-call of the compiled function.
-  __ Addu(t9, v0, Operand(Code::kHeaderSize - kHeapObjectTag));
-  __ Jump(t9);
+static void CallCompileOptimized(MacroAssembler* masm,
+                                            bool concurrent) {
+  FrameScope scope(masm, StackFrame::INTERNAL);
+  // Push a copy of the function onto the stack.
+  // Push call kind information and function as parameter to the runtime call.
+  __ Push(a1, t1, a1);
+  // Whether to compile in a background thread.
+  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
+
+  __ CallRuntime(Runtime::kCompileOptimized, 2);
+  // Restore call kind information and receiver.
+  __ Pop(a1, t1);
+}
+
+
+void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
+  CallCompileOptimized(masm, false);
+  GenerateTailCallToReturnedCode(masm);
+}
+
+
+void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
+  CallCompileOptimized(masm, true);
+  GenerateTailCallToReturnedCode(masm);
 }
 
 
+
 static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) {
   // For now, we are relying on the fact that make_code_young doesn't do any
   // garbage collection which allows us to save/restore the registers without
index 311afc0..185f9d0 100644 (file)
@@ -4950,7 +4950,7 @@ void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
 
 bool SharedFunctionInfo::is_compiled() {
   return code() !=
-      GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
+      GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
 }
 
 
@@ -5073,20 +5073,21 @@ bool JSFunction::IsOptimizable() {
 }
 
 
-bool JSFunction::IsMarkedForLazyRecompilation() {
-  return code() == GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile);
+bool JSFunction::IsMarkedForOptimization() {
+  return code() == GetIsolate()->builtins()->builtin(
+      Builtins::kCompileOptimized);
 }
 
 
-bool JSFunction::IsMarkedForConcurrentRecompilation() {
+bool JSFunction::IsMarkedForConcurrentOptimization() {
   return code() == GetIsolate()->builtins()->builtin(
-      Builtins::kConcurrentRecompile);
+      Builtins::kCompileOptimizedConcurrent);
 }
 
 
-bool JSFunction::IsInRecompileQueue() {
+bool JSFunction::IsInOptimizationQueue() {
   return code() == GetIsolate()->builtins()->builtin(
-      Builtins::kInRecompileQueue);
+      Builtins::kInOptimizationQueue);
 }
 
 
@@ -5196,7 +5197,8 @@ bool JSFunction::should_have_prototype() {
 
 
 bool JSFunction::is_compiled() {
-  return code() != GetIsolate()->builtins()->builtin(Builtins::kLazyCompile);
+  return code() !=
+      GetIsolate()->builtins()->builtin(Builtins::kCompileUnoptimized);
 }
 
 
index 8c5f903..b9dcaca 100644 (file)
@@ -9475,19 +9475,19 @@ void JSFunction::JSFunctionIterateBody(int object_size, ObjectVisitor* v) {
 }
 
 
-void JSFunction::MarkForLazyRecompilation() {
+void JSFunction::MarkForOptimization() {
   ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
   ASSERT(!IsOptimized());
   ASSERT(shared()->allows_lazy_compilation() ||
          code()->optimizable());
   ASSERT(!shared()->is_generator());
   set_code_no_write_barrier(
-      GetIsolate()->builtins()->builtin(Builtins::kLazyRecompile));
+      GetIsolate()->builtins()->builtin(Builtins::kCompileOptimized));
   // No write barrier required, since the builtin is part of the root set.
 }
 
 
-void JSFunction::MarkForConcurrentRecompilation() {
+void JSFunction::MarkForConcurrentOptimization() {
   ASSERT(is_compiled() || GetIsolate()->DebuggerHasBreakPoints());
   ASSERT(!IsOptimized());
   ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
@@ -9499,16 +9499,16 @@ void JSFunction::MarkForConcurrentRecompilation() {
     PrintF(" for concurrent recompilation.\n");
   }
   set_code_no_write_barrier(
-      GetIsolate()->builtins()->builtin(Builtins::kConcurrentRecompile));
+      GetIsolate()->builtins()->builtin(Builtins::kCompileOptimizedConcurrent));
   // No write barrier required, since the builtin is part of the root set.
 }
 
 
-void JSFunction::MarkInRecompileQueue() {
+void JSFunction::MarkInOptimizationQueue() {
   // We can only arrive here via the concurrent-recompilation builtin.  If
   // break points were set, the code would point to the lazy-compile builtin.
   ASSERT(!GetIsolate()->DebuggerHasBreakPoints());
-  ASSERT(IsMarkedForConcurrentRecompilation() && !IsOptimized());
+  ASSERT(IsMarkedForConcurrentOptimization() && !IsOptimized());
   ASSERT(shared()->allows_lazy_compilation() || code()->optimizable());
   ASSERT(GetIsolate()->concurrent_recompilation_enabled());
   if (FLAG_trace_concurrent_recompilation) {
@@ -9517,33 +9517,11 @@ void JSFunction::MarkInRecompileQueue() {
     PrintF(" for concurrent recompilation.\n");
   }
   set_code_no_write_barrier(
-      GetIsolate()->builtins()->builtin(Builtins::kInRecompileQueue));
+      GetIsolate()->builtins()->builtin(Builtins::kInOptimizationQueue));
   // No write barrier required, since the builtin is part of the root set.
 }
 
 
-static bool CompileLazyHelper(CompilationInfo* info,
-                              ClearExceptionFlag flag) {
-  // Compile the source information to a code object.
-  ASSERT(info->IsOptimizing() || !info->shared_info()->is_compiled());
-  ASSERT(!info->isolate()->has_pending_exception());
-  bool result = Compiler::CompileLazy(info);
-  ASSERT(result != info->isolate()->has_pending_exception());
-  if (!result && flag == CLEAR_EXCEPTION) {
-    info->isolate()->clear_pending_exception();
-  }
-  return result;
-}
-
-
-bool SharedFunctionInfo::CompileLazy(Handle<SharedFunctionInfo> shared,
-                                     ClearExceptionFlag flag) {
-  ASSERT(shared->allows_lazy_compilation_without_context());
-  CompilationInfoWithZone info(shared);
-  return CompileLazyHelper(&info, flag);
-}
-
-
 void SharedFunctionInfo::AddToOptimizedCodeMap(
     Handle<SharedFunctionInfo> shared,
     Handle<Context> native_context,
@@ -9604,19 +9582,25 @@ MaybeObject* SharedFunctionInfo::AddToOptimizedCodeMap(Context* native_context,
 }
 
 
-void SharedFunctionInfo::InstallFromOptimizedCodeMap(JSFunction* function,
-                                                     int index) {
+FixedArray* SharedFunctionInfo::GetLiteralsFromOptimizedCodeMap(int index) {
   ASSERT(index > kEntriesStart);
   FixedArray* code_map = FixedArray::cast(optimized_code_map());
   if (!bound()) {
     FixedArray* cached_literals = FixedArray::cast(code_map->get(index + 1));
-    ASSERT(cached_literals != NULL);
-    function->set_literals(cached_literals);
+    ASSERT_NE(NULL, cached_literals);
+    return cached_literals;
   }
+  return NULL;
+}
+
+
+
+Code* SharedFunctionInfo::GetCodeFromOptimizedCodeMap(int index) {
+  ASSERT(index > kEntriesStart);
+  FixedArray* code_map = FixedArray::cast(optimized_code_map());
   Code* code = Code::cast(code_map->get(index));
-  ASSERT(code != NULL);
-  ASSERT(function->context()->native_context() == code_map->get(index - 1));
-  function->ReplaceCode(code);
+  ASSERT_NE(NULL, code);
+  return code;
 }
 
 
@@ -9682,50 +9666,6 @@ void SharedFunctionInfo::TrimOptimizedCodeMap(int shrink_by) {
 }
 
 
-bool JSFunction::CompileLazy(Handle<JSFunction> function,
-                             ClearExceptionFlag flag) {
-  bool result = true;
-  if (function->shared()->is_compiled()) {
-    function->ReplaceCode(function->shared()->code());
-  } else {
-    ASSERT(function->shared()->allows_lazy_compilation());
-    CompilationInfoWithZone info(function);
-    result = CompileLazyHelper(&info, flag);
-    ASSERT(!result || function->is_compiled());
-  }
-  return result;
-}
-
-
-Handle<Code> JSFunction::CompileOsr(Handle<JSFunction> function,
-                                    BailoutId osr_ast_id,
-                                    ClearExceptionFlag flag) {
-  CompilationInfoWithZone info(function);
-  info.SetOptimizing(osr_ast_id);
-  if (CompileLazyHelper(&info, flag)) {
-    // TODO(titzer): don't install the OSR code.
-    // ASSERT(function->code() != *info.code());
-    return info.code();
-  } else {
-    return Handle<Code>::null();
-  }
-}
-
-
-bool JSFunction::CompileOptimized(Handle<JSFunction> function,
-                                  ClearExceptionFlag flag) {
-  CompilationInfoWithZone info(function);
-  info.SetOptimizing(BailoutId::None());
-  return CompileLazyHelper(&info, flag);
-}
-
-
-bool JSFunction::EnsureCompiled(Handle<JSFunction> function,
-                                ClearExceptionFlag flag) {
-  return function->is_compiled() || CompileLazy(function, flag);
-}
-
-
 void JSObject::OptimizeAsPrototype(Handle<JSObject> object) {
   if (object->IsGlobalObject()) return;
 
@@ -10698,6 +10638,18 @@ BailoutId Code::TranslatePcOffsetToAstId(uint32_t pc_offset) {
 }
 
 
+uint32_t Code::TranslateAstIdToPcOffset(BailoutId ast_id) {
+  DisallowHeapAllocation no_gc;
+  ASSERT(kind() == FUNCTION);
+  BackEdgeTable back_edges(this, &no_gc);
+  for (uint32_t i = 0; i < back_edges.length(); i++) {
+    if (back_edges.ast_id(i) == ast_id) return back_edges.pc_offset(i);
+  }
+  UNREACHABLE();  // We expect to find the back edge.
+  return 0;
+}
+
+
 void Code::MakeCodeAgeSequenceYoung(byte* sequence, Isolate* isolate) {
   PatchPlatformCodeAge(isolate, sequence, kNoAgeCodeAge, NO_MARKING_PARITY);
 }
index 7abee00..a24f30e 100644 (file)
@@ -1056,47 +1056,47 @@ class MaybeObject BASE_EMBEDDED {
                                                                               \
   V(k32BitValueInRegisterIsNotZeroExtended,                                   \
     "32 bit value in register is not zero-extended")                          \
-  V(kAlignmentMarkerExpected, "alignment marker expected")                    \
+  V(kAlignmentMarkerExpected, "Alignment marker expected")                    \
   V(kAllocationIsNotDoubleAligned, "Allocation is not double aligned")        \
   V(kAPICallReturnedInvalidObject, "API call returned invalid object")        \
   V(kArgumentsObjectValueInATestContext,                                      \
-    "arguments object value in a test context")                               \
-  V(kArrayBoilerplateCreationFailed, "array boilerplate creation failed")     \
-  V(kArrayIndexConstantValueTooBig, "array index constant value too big")     \
-  V(kAssignmentToArguments, "assignment to arguments")                        \
+    "Arguments object value in a test context")                               \
+  V(kArrayBoilerplateCreationFailed, "Array boilerplate creation failed")     \
+  V(kArrayIndexConstantValueTooBig, "Array index constant value too big")     \
+  V(kAssignmentToArguments, "Assignment to arguments")                        \
   V(kAssignmentToLetVariableBeforeInitialization,                             \
-    "assignment to let variable before initialization")                       \
-  V(kAssignmentToLOOKUPVariable, "assignment to LOOKUP variable")             \
+    "Assignment to let variable before initialization")                       \
+  V(kAssignmentToLOOKUPVariable, "Assignment to LOOKUP variable")             \
   V(kAssignmentToParameterFunctionUsesArgumentsObject,                        \
-    "assignment to parameter, function uses arguments object")                \
+    "Assignment to parameter, function uses arguments object")                \
   V(kAssignmentToParameterInArgumentsObject,                                  \
-    "assignment to parameter in arguments object")                            \
+    "Assignment to parameter in arguments object")                            \
   V(kAttemptToUseUndefinedCache, "Attempt to use undefined cache")            \
   V(kBadValueContextForArgumentsObjectValue,                                  \
-    "bad value context for arguments object value")                           \
+    "Bad value context for arguments object value")                           \
   V(kBadValueContextForArgumentsValue,                                        \
-    "bad value context for arguments value")                                  \
-  V(kBailedOutDueToDependencyChange, "bailed out due to dependency change")   \
-  V(kBailoutWasNotPrepared, "bailout was not prepared")                       \
+    "Bad value context for arguments value")                                  \
+  V(kBailedOutDueToDependencyChange, "Bailed out due to dependency change")   \
+  V(kBailoutWasNotPrepared, "Bailout was not prepared")                       \
   V(kBinaryStubGenerateFloatingPointCode,                                     \
     "BinaryStub_GenerateFloatingPointCode")                                   \
   V(kBothRegistersWereSmisInSelectNonSmi,                                     \
     "Both registers were smis in SelectNonSmi")                               \
   V(kCallToAJavaScriptRuntimeFunction,                                        \
-    "call to a JavaScript runtime function")                                  \
+    "Call to a JavaScript runtime function")                                  \
   V(kCannotTranslatePositionInChangedArea,                                    \
     "Cannot translate position in changed area")                              \
-  V(kCodeGenerationFailed, "code generation failed")                          \
-  V(kCodeObjectNotProperlyPatched, "code object not properly patched")        \
-  V(kCompoundAssignmentToLookupSlot, "compound assignment to lookup slot")    \
-  V(kContextAllocatedArguments, "context-allocated arguments")                \
-  V(kDebuggerIsActive, "debugger is active")                                  \
+  V(kCodeGenerationFailed, "Code generation failed")                          \
+  V(kCodeObjectNotProperlyPatched, "Code object not properly patched")        \
+  V(kCompoundAssignmentToLookupSlot, "Compound assignment to lookup slot")    \
+  V(kContextAllocatedArguments, "Context-allocated arguments")                \
+  V(kDebuggerIsActive, "Debugger is active")                                  \
   V(kDebuggerStatement, "DebuggerStatement")                                  \
   V(kDeclarationInCatchContext, "Declaration in catch context")               \
   V(kDeclarationInWithContext, "Declaration in with context")                 \
   V(kDefaultNaNModeNotSet, "Default NaN mode not set")                        \
-  V(kDeleteWithGlobalVariable, "delete with global variable")                 \
-  V(kDeleteWithNonGlobalVariable, "delete with non-global variable")          \
+  V(kDeleteWithGlobalVariable, "Delete with global variable")                 \
+  V(kDeleteWithNonGlobalVariable, "Delete with non-global variable")          \
   V(kDestinationOfCopyNotAligned, "Destination of copy not aligned")          \
   V(kDontDeleteCellsCannotContainTheHole,                                     \
     "DontDelete cells can't contain the hole")                                \
@@ -1104,9 +1104,9 @@ class MaybeObject BASE_EMBEDDED {
     "DoPushArgument not implemented for double type")                         \
   V(kEmitLoadRegisterUnsupportedDoubleImmediate,                              \
     "EmitLoadRegister: Unsupported double immediate")                         \
-  V(kEval, "eval")                                                            \
+  V(kEval, "Eval")                                                            \
   V(kExpected0AsASmiSentinel, "Expected 0 as a Smi sentinel")                 \
-  V(kExpectedAlignmentMarker, "expected alignment marker")                    \
+  V(kExpectedAlignmentMarker, "Expected alignment marker")                    \
   V(kExpectedAllocationSiteInCell,                                            \
     "Expected AllocationSite in property cell")                               \
   V(kExpectedPropertyCellInRegisterA2,                                        \
@@ -1119,47 +1119,48 @@ class MaybeObject BASE_EMBEDDED {
     "Expecting alignment for CopyBytes")                                      \
   V(kExportDeclaration, "Export declaration")                                 \
   V(kExternalStringExpectedButNotFound,                                       \
-    "external string expected, but not found")                                \
-  V(kFailedBailedOutLastTime, "failed/bailed out last time")                  \
+    "External string expected, but not found")                                \
+  V(kFailedBailedOutLastTime, "Failed/bailed out last time")                  \
   V(kForInStatementIsNotFastCase, "ForInStatement is not fast case")          \
   V(kForInStatementOptimizationIsDisabled,                                    \
     "ForInStatement optimization is disabled")                                \
   V(kForInStatementWithNonLocalEachVariable,                                  \
     "ForInStatement with non-local each variable")                            \
   V(kForOfStatement, "ForOfStatement")                                        \
-  V(kFrameIsExpectedToBeAligned, "frame is expected to be aligned")           \
-  V(kFunctionCallsEval, "function calls eval")                                \
-  V(kFunctionIsAGenerator, "function is a generator")                         \
-  V(kFunctionWithIllegalRedeclaration, "function with illegal redeclaration") \
+  V(kFrameIsExpectedToBeAligned, "Frame is expected to be aligned")           \
+  V(kFunctionCallsEval, "Function calls eval")                                \
+  V(kFunctionIsAGenerator, "Function is a generator")                         \
+  V(kFunctionWithIllegalRedeclaration, "Function with illegal redeclaration") \
   V(kGeneratedCodeIsTooLarge, "Generated code is too large")                  \
   V(kGeneratorFailedToResume, "Generator failed to resume")                   \
-  V(kGenerator, "generator")                                                  \
+  V(kGenerator, "Generator")                                                  \
   V(kGlobalFunctionsMustHaveInitialMap,                                       \
     "Global functions must have initial map")                                 \
   V(kHeapNumberMapRegisterClobbered, "HeapNumberMap register clobbered")      \
+  V(kHydrogenFilter, "Optimization disabled by filter")                       \
   V(kImportDeclaration, "Import declaration")                                 \
   V(kImproperObjectOnPrototypeChainForStore,                                  \
-    "improper object on prototype chain for store")                           \
+    "Improper object on prototype chain for store")                           \
   V(kIndexIsNegative, "Index is negative")                                    \
   V(kIndexIsTooLarge, "Index is too large")                                   \
-  V(kInlinedRuntimeFunctionClassOf, "inlined runtime function: ClassOf")      \
+  V(kInlinedRuntimeFunctionClassOf, "Inlined runtime function: ClassOf")      \
   V(kInlinedRuntimeFunctionFastAsciiArrayJoin,                                \
-    "inlined runtime function: FastAsciiArrayJoin")                           \
+    "Inlined runtime function: FastAsciiArrayJoin")                           \
   V(kInlinedRuntimeFunctionGeneratorNext,                                     \
-    "inlined runtime function: GeneratorNext")                                \
+    "Inlined runtime function: GeneratorNext")                                \
   V(kInlinedRuntimeFunctionGeneratorThrow,                                    \
-    "inlined runtime function: GeneratorThrow")                               \
+    "Inlined runtime function: GeneratorThrow")                               \
   V(kInlinedRuntimeFunctionGetFromCache,                                      \
-    "inlined runtime function: GetFromCache")                                 \
+    "Inlined runtime function: GetFromCache")                                 \
   V(kInlinedRuntimeFunctionIsNonNegativeSmi,                                  \
-    "inlined runtime function: IsNonNegativeSmi")                             \
+    "Inlined runtime function: IsNonNegativeSmi")                             \
   V(kInlinedRuntimeFunctionIsRegExpEquivalent,                                \
-    "inlined runtime function: IsRegExpEquivalent")                           \
+    "Inlined runtime function: IsRegExpEquivalent")                           \
   V(kInlinedRuntimeFunctionIsStringWrapperSafeForDefaultValueOf,              \
-    "inlined runtime function: IsStringWrapperSafeForDefaultValueOf")         \
-  V(kInliningBailedOut, "inlining bailed out")                                \
+    "Inlined runtime function: IsStringWrapperSafeForDefaultValueOf")         \
+  V(kInliningBailedOut, "Inlining bailed out")                                \
   V(kInputGPRIsExpectedToHaveUpper32Cleared,                                  \
-    "input GPR is expected to have upper32 cleared")                          \
+    "Input GPR is expected to have upper32 cleared")                          \
   V(kInstanceofStubUnexpectedCallSiteCacheCheck,                              \
     "InstanceofStub unexpected call site cache (check)")                      \
   V(kInstanceofStubUnexpectedCallSiteCacheCmp1,                               \
@@ -1174,9 +1175,9 @@ class MaybeObject BASE_EMBEDDED {
   V(kInvalidElementsKindForInternalArrayOrInternalPackedArray,                \
     "Invalid ElementsKind for InternalArray or InternalPackedArray")          \
   V(kInvalidHandleScopeLevel, "Invalid HandleScope level")                    \
-  V(kInvalidLeftHandSideInAssignment, "invalid left-hand side in assignment") \
-  V(kInvalidLhsInCompoundAssignment, "invalid lhs in compound assignment")    \
-  V(kInvalidLhsInCountOperation, "invalid lhs in count operation")            \
+  V(kInvalidLeftHandSideInAssignment, "Invalid left-hand side in assignment") \
+  V(kInvalidLhsInCompoundAssignment, "Invalid lhs in compound assignment")    \
+  V(kInvalidLhsInCountOperation, "Invalid lhs in count operation")            \
   V(kInvalidMinLength, "Invalid min_length")                                  \
   V(kJSGlobalObjectNativeContextShouldBeANativeContext,                       \
     "JSGlobalObject::native_context should be a native context")              \
@@ -1192,7 +1193,7 @@ class MaybeObject BASE_EMBEDDED {
     "LiveEdit frame dropping is not supported on mips")                       \
   V(kLiveEdit, "LiveEdit")                                                    \
   V(kLookupVariableInCountOperation,                                          \
-    "lookup variable in count operation")                                     \
+    "Lookup variable in count operation")                                     \
   V(kMapIsNoLongerInEax, "Map is no longer in eax")                           \
   V(kModuleDeclaration, "Module declaration")                                 \
   V(kModuleLiteral, "Module literal")                                         \
@@ -1201,26 +1202,26 @@ class MaybeObject BASE_EMBEDDED {
   V(kModuleVariable, "Module variable")                                       \
   V(kModuleUrl, "Module url")                                                 \
   V(kNativeFunctionLiteral, "Native function literal")                        \
-  V(kNoCasesLeft, "no cases left")                                            \
+  V(kNoCasesLeft, "No cases left")                                            \
   V(kNoEmptyArraysHereInEmitFastAsciiArrayJoin,                               \
     "No empty arrays here in EmitFastAsciiArrayJoin")                         \
   V(kNonInitializerAssignmentToConst,                                         \
-    "non-initializer assignment to const")                                    \
+    "Non-initializer assignment to const")                                    \
   V(kNonSmiIndex, "Non-smi index")                                            \
   V(kNonSmiKeyInArrayLiteral, "Non-smi key in array literal")                 \
   V(kNonSmiValue, "Non-smi value")                                            \
   V(kNonObject, "Non-object value")                                           \
   V(kNotEnoughVirtualRegistersForValues,                                      \
-    "not enough virtual registers for values")                                \
+    "Not enough virtual registers for values")                                \
   V(kNotEnoughSpillSlotsForOsr,                                               \
-    "not enough spill slots for OSR")                                         \
+    "Not enough spill slots for OSR")                                         \
   V(kNotEnoughVirtualRegistersRegalloc,                                       \
-    "not enough virtual registers (regalloc)")                                \
-  V(kObjectFoundInSmiOnlyArray, "object found in smi-only array")             \
+    "Not enough virtual registers (regalloc)")                                \
+  V(kObjectFoundInSmiOnlyArray, "Object found in smi-only array")             \
   V(kObjectLiteralWithComplexProperty,                                        \
     "Object literal with complex property")                                   \
   V(kOddballInStringTableIsNotUndefinedOrTheHole,                             \
-    "oddball in string table is not undefined or the hole")                   \
+    "Oddball in string table is not undefined or the hole")                   \
   V(kOperandIsASmiAndNotAName, "Operand is a smi and not a name")             \
   V(kOperandIsASmiAndNotAString, "Operand is a smi and not a string")         \
   V(kOperandIsASmi, "Operand is a smi")                                       \
@@ -1230,24 +1231,25 @@ class MaybeObject BASE_EMBEDDED {
   V(kOperandIsNotAString, "Operand is not a string")                          \
   V(kOperandIsNotSmi, "Operand is not smi")                                   \
   V(kOperandNotANumber, "Operand not a number")                               \
-  V(kOptimizedTooManyTimes, "optimized too many times")                       \
+  V(kOptimizationDisabled, "Optimization is disabled")                        \
+  V(kOptimizedTooManyTimes, "Optimized too many times")                       \
   V(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister,                  \
     "Out of virtual registers while trying to allocate temp register")        \
-  V(kParseScopeError, "parse/scope error")                                    \
-  V(kPossibleDirectCallToEval, "possible direct call to eval")                \
+  V(kParseScopeError, "Parse/scope error")                                    \
+  V(kPossibleDirectCallToEval, "Possible direct call to eval")                \
   V(kPropertyAllocationCountFailed, "Property allocation count failed")       \
   V(kReceivedInvalidReturnAddress, "Received invalid return address")         \
   V(kReferenceToAVariableWhichRequiresDynamicLookup,                          \
-    "reference to a variable which requires dynamic lookup")                  \
+    "Reference to a variable which requires dynamic lookup")                  \
   V(kReferenceToGlobalLexicalVariable,                                        \
-    "reference to global lexical variable")                                   \
-  V(kReferenceToUninitializedVariable, "reference to uninitialized variable") \
+    "Reference to global lexical variable")                                   \
+  V(kReferenceToUninitializedVariable, "Reference to uninitialized variable") \
   V(kRegisterDidNotMatchExpectedRoot, "Register did not match expected root") \
-  V(kRegisterWasClobbered, "register was clobbered")                          \
+  V(kRegisterWasClobbered, "Register was clobbered")                          \
   V(kScopedBlock, "ScopedBlock")                                              \
   V(kSmiAdditionOverflow, "Smi addition overflow")                            \
   V(kSmiSubtractionOverflow, "Smi subtraction overflow")                      \
-  V(kStackFrameTypesMustMatch, "stack frame types must match")                \
+  V(kStackFrameTypesMustMatch, "Stack frame types must match")                \
   V(kSwitchStatementMixedOrNonLiteralSwitchLabels,                            \
     "SwitchStatement: mixed or non-literal switch labels")                    \
   V(kSwitchStatementTooManyClauses, "SwitchStatement: too many clauses")      \
@@ -1259,8 +1261,8 @@ class MaybeObject BASE_EMBEDDED {
     "The instruction to patch should be a lui")                               \
   V(kTheInstructionToPatchShouldBeAnOri,                                      \
     "The instruction to patch should be an ori")                              \
-  V(kTooManyParametersLocals, "too many parameters/locals")                   \
-  V(kTooManyParameters, "too many parameters")                                \
+  V(kTooManyParametersLocals, "Too many parameters/locals")                   \
+  V(kTooManyParameters, "Too many parameters")                                \
   V(kTooManySpillSlotsNeededForOSR, "Too many spill slots needed for OSR")    \
   V(kToOperandIsDoubleRegisterUnimplemented,                                  \
     "ToOperand IsDoubleRegister unimplemented")                               \
@@ -1311,23 +1313,23 @@ class MaybeObject BASE_EMBEDDED {
   V(kUnexpectedUnusedPropertiesOfStringWrapper,                               \
     "Unexpected unused properties of string wrapper")                         \
   V(kUninitializedKSmiConstantRegister, "Uninitialized kSmiConstantRegister") \
-  V(kUnknown, "unknown")                                                      \
+  V(kUnknown, "Unknown")                                                      \
   V(kUnsupportedConstCompoundAssignment,                                      \
-    "unsupported const compound assignment")                                  \
+    "Unsupported const compound assignment")                                  \
   V(kUnsupportedCountOperationWithConst,                                      \
-    "unsupported count operation with const")                                 \
-  V(kUnsupportedDoubleImmediate, "unsupported double immediate")              \
-  V(kUnsupportedLetCompoundAssignment, "unsupported let compound assignment") \
+    "Unsupported count operation with const")                                 \
+  V(kUnsupportedDoubleImmediate, "Unsupported double immediate")              \
+  V(kUnsupportedLetCompoundAssignment, "Unsupported let compound assignment") \
   V(kUnsupportedLookupSlotInDeclaration,                                      \
-    "unsupported lookup slot in declaration")                                 \
+    "Unsupported lookup slot in declaration")                                 \
   V(kUnsupportedNonPrimitiveCompare, "Unsupported non-primitive compare")     \
   V(kUnsupportedPhiUseOfArguments, "Unsupported phi use of arguments")        \
   V(kUnsupportedPhiUseOfConstVariable,                                        \
     "Unsupported phi use of const variable")                                  \
-  V(kUnsupportedTaggedImmediate, "unsupported tagged immediate")              \
+  V(kUnsupportedTaggedImmediate, "Unsupported tagged immediate")              \
   V(kVariableResolvedToWithContext, "Variable resolved to with context")      \
   V(kWeShouldNotHaveAnEmptyLexicalContext,                                    \
-    "we should not have an empty lexical context")                            \
+    "We should not have an empty lexical context")                            \
   V(kWithStatement, "WithStatement")                                          \
   V(kWrongAddressOrValuePassedToRecordWrite,                                  \
     "Wrong address or value passed to RecordWrite")                           \
@@ -5358,6 +5360,7 @@ class Code: public HeapObject {
   void ClearTypeFeedbackCells(Heap* heap);
 
   BailoutId TranslatePcOffsetToAstId(uint32_t pc_offset);
+  uint32_t TranslateAstIdToPcOffset(BailoutId ast_id);
 
 #define DECLARE_CODE_AGE_ENUM(X) k##X##CodeAge,
   enum Age {
@@ -6540,7 +6543,9 @@ class SharedFunctionInfo: public HeapObject {
 
   // Installs optimized code from the code map on the given closure. The
   // index has to be consistent with a search result as defined above.
-  void InstallFromOptimizedCodeMap(JSFunction* function, int index);
+  FixedArray* GetLiteralsFromOptimizedCodeMap(int index);
+
+  Code* GetCodeFromOptimizedCodeMap(int index);
 
   // Clear optimized code map.
   void ClearOptimizedCodeMap();
@@ -6924,12 +6929,6 @@ class SharedFunctionInfo: public HeapObject {
 
   void ResetForNewContext(int new_ic_age);
 
-  // Helper to compile the shared code.  Returns true on success, false on
-  // failure (e.g., stack overflow during compilation). This is only used by
-  // the debugger, it is not possible to compile without a context otherwise.
-  static bool CompileLazy(Handle<SharedFunctionInfo> shared,
-                          ClearExceptionFlag flag);
-
   // Casting.
   static inline SharedFunctionInfo* cast(Object* obj);
 
@@ -7260,29 +7259,20 @@ class JSFunction: public JSObject {
 
   // Mark this function for lazy recompilation. The function will be
   // recompiled the next time it is executed.
-  void MarkForLazyRecompilation();
-  void MarkForConcurrentRecompilation();
-  void MarkInRecompileQueue();
-
-  // Helpers to compile this function.  Returns true on success, false on
-  // failure (e.g., stack overflow during compilation).
-  static bool EnsureCompiled(Handle<JSFunction> function,
-                             ClearExceptionFlag flag);
-  static bool CompileLazy(Handle<JSFunction> function,
-                          ClearExceptionFlag flag);
-  static Handle<Code> CompileOsr(Handle<JSFunction> function,
-                                 BailoutId osr_ast_id,
-                                 ClearExceptionFlag flag);
+  void MarkForOptimization();
+  void MarkForConcurrentOptimization();
+  void MarkInOptimizationQueue();
+
   static bool CompileOptimized(Handle<JSFunction> function,
                                ClearExceptionFlag flag);
 
   // Tells whether or not the function is already marked for lazy
   // recompilation.
-  inline bool IsMarkedForLazyRecompilation();
-  inline bool IsMarkedForConcurrentRecompilation();
+  inline bool IsMarkedForOptimization();
+  inline bool IsMarkedForConcurrentOptimization();
 
   // Tells whether or not the function is on the concurrent recompilation queue.
-  inline bool IsInRecompileQueue();
+  inline bool IsInOptimizationQueue();
 
   // [literals_or_bindings]: Fixed array holding either
   // the materialized literals or the bindings of a bound function.
index 32a7f97..d215070 100644 (file)
@@ -106,10 +106,10 @@ void OptimizingCompilerThread::Run() {
 }
 
 
-RecompileJob* OptimizingCompilerThread::NextInput() {
+OptimizedCompileJob* OptimizingCompilerThread::NextInput() {
   LockGuard<Mutex> access_input_queue_(&input_queue_mutex_);
   if (input_queue_length_ == 0) return NULL;
-  RecompileJob* job = input_queue_[InputQueueIndex(0)];
+  OptimizedCompileJob* job = input_queue_[InputQueueIndex(0)];
   ASSERT_NE(NULL, job);
   input_queue_shift_ = InputQueueIndex(1);
   input_queue_length_--;
@@ -118,13 +118,13 @@ RecompileJob* OptimizingCompilerThread::NextInput() {
 
 
 void OptimizingCompilerThread::CompileNext() {
-  RecompileJob* job = NextInput();
+  OptimizedCompileJob* job = NextInput();
   ASSERT_NE(NULL, job);
 
   // The function may have already been optimized by OSR.  Simply continue.
-  RecompileJob::Status status = job->OptimizeGraph();
+  OptimizedCompileJob::Status status = job->OptimizeGraph();
   USE(status);   // Prevent an unused-variable error in release mode.
-  ASSERT(status != RecompileJob::FAILED);
+  ASSERT(status != OptimizedCompileJob::FAILED);
 
   // The function may have already been optimized by OSR.  Simply continue.
   // Use a mutex to make sure that functions marked for install
@@ -134,13 +134,18 @@ void OptimizingCompilerThread::CompileNext() {
 }
 
 
-static void DisposeRecompileJob(RecompileJob* job,
-                                bool restore_function_code) {
+static void DisposeOptimizedCompileJob(OptimizedCompileJob* job,
+                                       bool restore_function_code) {
   // The recompile job is allocated in the CompilationInfo's zone.
   CompilationInfo* info = job->info();
   if (restore_function_code) {
     if (info->is_osr()) {
-      if (!job->IsWaitingForInstall()) BackEdgeTable::RemoveStackCheck(info);
+      if (!job->IsWaitingForInstall()) {
+        // Remove stack check that guards OSR entry on original code.
+        Handle<Code> code = info->unoptimized_code();
+        uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id());
+        BackEdgeTable::RemoveStackCheck(code, offset);
+      }
     } else {
       Handle<JSFunction> function = info->closure();
       function->ReplaceCode(function->shared()->code());
@@ -151,25 +156,25 @@ static void DisposeRecompileJob(RecompileJob* job,
 
 
 void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) {
-  RecompileJob* job;
+  OptimizedCompileJob* job;
   while ((job = NextInput())) {
     // This should not block, since we have one signal on the input queue
     // semaphore corresponding to each element in the input queue.
     input_queue_semaphore_.Wait();
     // OSR jobs are dealt with separately.
     if (!job->info()->is_osr()) {
-      DisposeRecompileJob(job, restore_function_code);
+      DisposeOptimizedCompileJob(job, restore_function_code);
     }
   }
 }
 
 
 void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) {
-  RecompileJob* job;
+  OptimizedCompileJob* job;
   while (output_queue_.Dequeue(&job)) {
     // OSR jobs are dealt with separately.
     if (!job->info()->is_osr()) {
-      DisposeRecompileJob(job, restore_function_code);
+      DisposeOptimizedCompileJob(job, restore_function_code);
     }
   }
 }
@@ -178,7 +183,7 @@ void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) {
 void OptimizingCompilerThread::FlushOsrBuffer(bool restore_function_code) {
   for (int i = 0; i < osr_buffer_capacity_; i++) {
     if (osr_buffer_[i] != NULL) {
-      DisposeRecompileJob(osr_buffer_[i], restore_function_code);
+      DisposeOptimizedCompileJob(osr_buffer_[i], restore_function_code);
       osr_buffer_[i] = NULL;
     }
   }
@@ -236,9 +241,10 @@ void OptimizingCompilerThread::InstallOptimizedFunctions() {
   ASSERT(!IsOptimizerThread());
   HandleScope handle_scope(isolate_);
 
-  RecompileJob* job;
+  OptimizedCompileJob* job;
   while (output_queue_.Dequeue(&job)) {
     CompilationInfo* info = job->info();
+    Handle<JSFunction> function(*info->closure());
     if (info->is_osr()) {
       if (FLAG_trace_osr) {
         PrintF("[COSR - ");
@@ -247,26 +253,25 @@ void OptimizingCompilerThread::InstallOptimizedFunctions() {
                info->osr_ast_id().ToInt());
       }
       job->WaitForInstall();
-      BackEdgeTable::RemoveStackCheck(info);
+      // Remove stack check that guards OSR entry on original code.
+      Handle<Code> code = info->unoptimized_code();
+      uint32_t offset = code->TranslateAstIdToPcOffset(info->osr_ast_id());
+      BackEdgeTable::RemoveStackCheck(code, offset);
     } else {
-      Compiler::InstallOptimizedCode(job);
+      Handle<Code> code = Compiler::GetConcurrentlyOptimizedCode(job);
+      function->ReplaceCode(
+          code.is_null() ? function->shared()->code() : *code);
     }
   }
 }
 
 
-void OptimizingCompilerThread::QueueForOptimization(RecompileJob* job) {
+void OptimizingCompilerThread::QueueForOptimization(OptimizedCompileJob* job) {
   ASSERT(IsQueueAvailable());
   ASSERT(!IsOptimizerThread());
   CompilationInfo* info = job->info();
   if (info->is_osr()) {
-    if (FLAG_trace_concurrent_recompilation) {
-      PrintF("  ** Queueing ");
-      info->closure()->PrintName();
-      PrintF(" for concurrent on-stack replacement.\n");
-    }
     osr_attempts_++;
-    BackEdgeTable::AddStackCheck(info);
     AddToOsrBuffer(job);
     // Add job to the front of the input queue.
     LockGuard<Mutex> access_input_queue(&input_queue_mutex_);
@@ -276,7 +281,6 @@ void OptimizingCompilerThread::QueueForOptimization(RecompileJob* job) {
     input_queue_[InputQueueIndex(0)] = job;
     input_queue_length_++;
   } else {
-    info->closure()->MarkInRecompileQueue();
     // Add job to the back of the input queue.
     LockGuard<Mutex> access_input_queue(&input_queue_mutex_);
     ASSERT_LT(input_queue_length_, input_queue_capacity_);
@@ -300,14 +304,14 @@ void OptimizingCompilerThread::Unblock() {
 }
 
 
-RecompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
-    Handle<JSFunction> function, uint32_t osr_pc_offset) {
+OptimizedCompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
+    Handle<JSFunction> function, BailoutId osr_ast_id) {
   ASSERT(!IsOptimizerThread());
   for (int i = 0; i < osr_buffer_capacity_; i++) {
-    RecompileJob* current = osr_buffer_[i];
+    OptimizedCompileJob* current = osr_buffer_[i];
     if (current != NULL &&
         current->IsWaitingForInstall() &&
-        current->info()->HasSameOsrEntry(function, osr_pc_offset)) {
+        current->info()->HasSameOsrEntry(function, osr_ast_id)) {
       osr_hits_++;
       osr_buffer_[i] = NULL;
       return current;
@@ -318,12 +322,12 @@ RecompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
 
 
 bool OptimizingCompilerThread::IsQueuedForOSR(Handle<JSFunction> function,
-                                              uint32_t osr_pc_offset) {
+                                              BailoutId osr_ast_id) {
   ASSERT(!IsOptimizerThread());
   for (int i = 0; i < osr_buffer_capacity_; i++) {
-    RecompileJob* current = osr_buffer_[i];
+    OptimizedCompileJob* current = osr_buffer_[i];
     if (current != NULL &&
-        current->info()->HasSameOsrEntry(function, osr_pc_offset)) {
+        current->info()->HasSameOsrEntry(function, osr_ast_id)) {
       return !current->IsWaitingForInstall();
     }
   }
@@ -334,7 +338,7 @@ bool OptimizingCompilerThread::IsQueuedForOSR(Handle<JSFunction> function,
 bool OptimizingCompilerThread::IsQueuedForOSR(JSFunction* function) {
   ASSERT(!IsOptimizerThread());
   for (int i = 0; i < osr_buffer_capacity_; i++) {
-    RecompileJob* current = osr_buffer_[i];
+    OptimizedCompileJob* current = osr_buffer_[i];
     if (current != NULL && *current->info()->closure() == function) {
       return !current->IsWaitingForInstall();
     }
@@ -343,10 +347,10 @@ bool OptimizingCompilerThread::IsQueuedForOSR(JSFunction* function) {
 }
 
 
-void OptimizingCompilerThread::AddToOsrBuffer(RecompileJob* job) {
+void OptimizingCompilerThread::AddToOsrBuffer(OptimizedCompileJob* job) {
   ASSERT(!IsOptimizerThread());
   // Find the next slot that is empty or has a stale job.
-  RecompileJob* stale = NULL;
+  OptimizedCompileJob* stale = NULL;
   while (true) {
     stale = osr_buffer_[osr_buffer_cursor_];
     if (stale == NULL || stale->IsWaitingForInstall()) break;
@@ -362,7 +366,7 @@ void OptimizingCompilerThread::AddToOsrBuffer(RecompileJob* job) {
       info->closure()->PrintName();
       PrintF(", AST id %d]\n", info->osr_ast_id().ToInt());
     }
-    DisposeRecompileJob(stale, false);
+    DisposeOptimizedCompileJob(stale, false);
   }
   osr_buffer_[osr_buffer_cursor_] = job;
   osr_buffer_cursor_ = (osr_buffer_cursor_ + 1) % osr_buffer_capacity_;
index 795fa65..eae1f60 100644 (file)
@@ -40,7 +40,7 @@ namespace v8 {
 namespace internal {
 
 class HOptimizedGraphBuilder;
-class RecompileJob;
+class OptimizedCompileJob;
 class SharedFunctionInfo;
 
 class OptimizingCompilerThread : public Thread {
@@ -62,10 +62,10 @@ class OptimizingCompilerThread : public Thread {
       osr_attempts_(0),
       blocked_jobs_(0) {
     NoBarrier_Store(&stop_thread_, static_cast<AtomicWord>(CONTINUE));
-    input_queue_ = NewArray<RecompileJob*>(input_queue_capacity_);
+    input_queue_ = NewArray<OptimizedCompileJob*>(input_queue_capacity_);
     if (FLAG_concurrent_osr) {
       // Allocate and mark OSR buffer slots as empty.
-      osr_buffer_ = NewArray<RecompileJob*>(osr_buffer_capacity_);
+      osr_buffer_ = NewArray<OptimizedCompileJob*>(osr_buffer_capacity_);
       for (int i = 0; i < osr_buffer_capacity_; i++) osr_buffer_[i] = NULL;
     }
   }
@@ -75,12 +75,12 @@ class OptimizingCompilerThread : public Thread {
   void Run();
   void Stop();
   void Flush();
-  void QueueForOptimization(RecompileJob* optimizing_compiler);
+  void QueueForOptimization(OptimizedCompileJob* optimizing_compiler);
   void Unblock();
   void InstallOptimizedFunctions();
-  RecompileJob* FindReadyOSRCandidate(Handle<JSFunction> function,
-                                      uint32_t osr_pc_offset);
-  bool IsQueuedForOSR(Handle<JSFunction> function, uint32_t osr_pc_offset);
+  OptimizedCompileJob* FindReadyOSRCandidate(Handle<JSFunction> function,
+                                             BailoutId osr_ast_id);
+  bool IsQueuedForOSR(Handle<JSFunction> function, BailoutId osr_ast_id);
 
   bool IsQueuedForOSR(JSFunction* function);
 
@@ -112,11 +112,11 @@ class OptimizingCompilerThread : public Thread {
   void FlushOutputQueue(bool restore_function_code);
   void FlushOsrBuffer(bool restore_function_code);
   void CompileNext();
-  RecompileJob* NextInput();
+  OptimizedCompileJob* NextInput();
 
   // Add a recompilation task for OSR to the cyclic buffer, awaiting OSR entry.
   // Tasks evicted from the cyclic buffer are discarded.
-  void AddToOsrBuffer(RecompileJob* compiler);
+  void AddToOsrBuffer(OptimizedCompileJob* compiler);
 
   inline int InputQueueIndex(int i) {
     int result = (i + input_queue_shift_) % input_queue_capacity_;
@@ -135,17 +135,17 @@ class OptimizingCompilerThread : public Thread {
   Semaphore input_queue_semaphore_;
 
   // Circular queue of incoming recompilation tasks (including OSR).
-  RecompileJob** input_queue_;
+  OptimizedCompileJob** input_queue_;
   int input_queue_capacity_;
   int input_queue_length_;
   int input_queue_shift_;
   Mutex input_queue_mutex_;
 
   // Queue of recompilation tasks ready to be installed (excluding OSR).
-  UnboundQueue<RecompileJob*> output_queue_;
+  UnboundQueue<OptimizedCompileJob*> output_queue_;
 
   // Cyclic buffer of recompilation tasks for OSR.
-  RecompileJob** osr_buffer_;
+  OptimizedCompileJob** osr_buffer_;
   int osr_buffer_capacity_;
   int osr_buffer_cursor_;
 
index dd8e600..d3c24d1 100644 (file)
@@ -418,7 +418,12 @@ class Parser : public ParserBase {
   // Parses the source code represented by the compilation info and sets its
   // function literal.  Returns false (and deallocates any allocated AST
   // nodes) if parsing failed.
-  static bool Parse(CompilationInfo* info) { return Parser(info).Parse(); }
+  static bool Parse(CompilationInfo* info,
+                    bool allow_lazy = false) {
+    Parser parser(info);
+    parser.set_allow_lazy(allow_lazy);
+    return parser.Parse();
+  }
   bool Parse();
 
  private:
index 390222d..5784e4d 100644 (file)
@@ -124,11 +124,11 @@ void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
       // recompilation race.  This goes away as soon as OSR becomes one-shot.
       return;
     }
-    ASSERT(!function->IsInRecompileQueue());
-    function->MarkForConcurrentRecompilation();
+    ASSERT(!function->IsInOptimizationQueue());
+    function->MarkForConcurrentOptimization();
   } else {
     // The next call to the function will trigger optimization.
-    function->MarkForLazyRecompilation();
+    function->MarkForOptimization();
   }
 }
 
@@ -186,7 +186,7 @@ void RuntimeProfiler::OptimizeNow() {
     Code* shared_code = shared->code();
 
     if (shared_code->kind() != Code::FUNCTION) continue;
-    if (function->IsInRecompileQueue()) continue;
+    if (function->IsInOptimizationQueue()) continue;
 
     if (FLAG_always_osr &&
         shared_code->allow_osr_at_loop_nesting_level() == 0) {
@@ -198,8 +198,8 @@ void RuntimeProfiler::OptimizeNow() {
       }
       // Fall through and do a normal optimized compile as well.
     } else if (!frame->is_optimized() &&
-        (function->IsMarkedForLazyRecompilation() ||
-         function->IsMarkedForConcurrentRecompilation() ||
+        (function->IsMarkedForOptimization() ||
+         function->IsMarkedForConcurrentOptimization() ||
          function->IsOptimized())) {
       // Attempt OSR if we are still running unoptimized code even though the
       // the function has long been marked or even already been optimized.
index 7d23455..a175836 100644 (file)
@@ -2957,7 +2957,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetCode) {
   Handle<SharedFunctionInfo> target_shared(target->shared());
   Handle<SharedFunctionInfo> source_shared(source->shared());
 
-  if (!JSFunction::EnsureCompiled(source, KEEP_EXCEPTION)) {
+  if (!Compiler::EnsureCompiled(source, KEEP_EXCEPTION)) {
     return Failure::Exception();
   }
 
@@ -8267,7 +8267,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_NewObject) {
 
   // The function should be compiled for the optimization hints to be
   // available.
-  JSFunction::EnsureCompiled(function, CLEAR_EXCEPTION);
+  Compiler::EnsureCompiled(function, CLEAR_EXCEPTION);
 
   Handle<SharedFunctionInfo> shared(function->shared(), isolate);
   if (!function->has_initial_map() &&
@@ -8299,42 +8299,53 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FinalizeInstanceSize) {
 }
 
 
-RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyCompile) {
+RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileUnoptimized) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
 
   Handle<JSFunction> function = args.at<JSFunction>(0);
 #ifdef DEBUG
   if (FLAG_trace_lazy && !function->shared()->is_compiled()) {
-    PrintF("[lazy: ");
+    PrintF("[unoptimized: ");
     function->PrintName();
     PrintF("]\n");
   }
 #endif
 
   // Compile the target function.
-  ASSERT(!function->is_compiled());
-  if (!JSFunction::CompileLazy(function, KEEP_EXCEPTION)) {
-    return Failure::Exception();
-  }
+  ASSERT(function->shared()->allows_lazy_compilation());
+
+  Handle<Code> code = Compiler::GetUnoptimizedCode(function);
+  RETURN_IF_EMPTY_HANDLE(isolate, code);
+  function->ReplaceCode(*code);
 
   // All done. Return the compiled code.
   ASSERT(function->is_compiled());
-  return function->code();
+  ASSERT(function->code()->kind() == Code::FUNCTION ||
+         (FLAG_always_opt &&
+          function->code()->kind() == Code::OPTIMIZED_FUNCTION));
+  return *code;
 }
 
 
-bool AllowOptimization(Isolate* isolate, Handle<JSFunction> function) {
-  // If the function is not compiled ignore the lazy
-  // recompilation. This can happen if the debugger is activated and
-  // the function is returned to the not compiled state.
-  if (!function->shared()->is_compiled()) return false;
-
-  // If the function is not optimizable or debugger is active continue using the
-  // code from the full compiler.
-  if (!isolate->use_crankshaft() ||
-      function->shared()->optimization_disabled() ||
-      isolate->DebuggerHasBreakPoints()) {
+RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileOptimized) {
+  HandleScope scope(isolate);
+  ASSERT(args.length() == 2);
+  Handle<JSFunction> function = args.at<JSFunction>(0);
+  CONVERT_BOOLEAN_ARG_CHECKED(concurrent, 1);
+
+  Handle<Code> unoptimized(function->shared()->code());
+  if (!function->shared()->is_compiled()) {
+    // If the function is not compiled, do not optimize.
+    // This can happen if the debugger is activated and
+    // the function is returned to the not compiled state.
+    // TODO(yangguo): reconsider this.
+    function->ReplaceCode(function->shared()->code());
+  } else if (!isolate->use_crankshaft() ||
+             function->shared()->optimization_disabled() ||
+             isolate->DebuggerHasBreakPoints()) {
+    // If the function is not optimizable or debugger is active continue
+    // using the code from the full compiler.
     if (FLAG_trace_opt) {
       PrintF("[failed to optimize ");
       function->PrintName();
@@ -8342,53 +8353,21 @@ bool AllowOptimization(Isolate* isolate, Handle<JSFunction> function) {
           function->shared()->optimization_disabled() ? "F" : "T",
           isolate->DebuggerHasBreakPoints() ? "T" : "F");
     }
-    return false;
+    function->ReplaceCode(*unoptimized);
+  } else {
+    Compiler::ConcurrencyMode mode = concurrent ? Compiler::CONCURRENT
+                                                : Compiler::NOT_CONCURRENT;
+    Handle<Code> code = Compiler::GetOptimizedCode(function, unoptimized, mode);
+    function->ReplaceCode(code.is_null() ? *unoptimized : *code);
   }
-  return true;
-}
-
 
-RUNTIME_FUNCTION(MaybeObject*, Runtime_LazyRecompile) {
-  HandleScope scope(isolate);
-  ASSERT(args.length() == 1);
-  Handle<JSFunction> function = args.at<JSFunction>(0);
-
-  if (!AllowOptimization(isolate, function)) {
-    function->ReplaceCode(function->shared()->code());
-    return function->code();
-  }
-  function->shared()->code()->set_profiler_ticks(0);
-  if (JSFunction::CompileOptimized(function, CLEAR_EXCEPTION)) {
-    return function->code();
-  }
-  if (FLAG_trace_opt) {
-    PrintF("[failed to optimize ");
-    function->PrintName();
-    PrintF(": optimized compilation failed]\n");
-  }
-  function->ReplaceCode(function->shared()->code());
+  ASSERT(function->code()->kind() == Code::FUNCTION ||
+         function->code()->kind() == Code::OPTIMIZED_FUNCTION ||
+         function->IsInOptimizationQueue());
   return function->code();
 }
 
 
-RUNTIME_FUNCTION(MaybeObject*, Runtime_ConcurrentRecompile) {
-  HandleScope handle_scope(isolate);
-  ASSERT(args.length() == 1);
-  CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
-  if (!AllowOptimization(isolate, function)) {
-    function->ReplaceCode(function->shared()->code());
-    return isolate->heap()->undefined_value();
-  }
-  Handle<Code> shared_code(function->shared()->code());
-  shared_code->set_profiler_ticks(0);
-  ASSERT(isolate->concurrent_recompilation_enabled());
-  if (!Compiler::RecompileConcurrent(function, shared_code)) {
-    function->ReplaceCode(*shared_code);
-  }
-  return isolate->heap()->undefined_value();
-}
-
-
 class ActivationsFinder : public ThreadVisitor {
  public:
   Code* code_;
@@ -8529,7 +8508,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_OptimizeFunctionOnNextCall) {
   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
 
   if (!function->IsOptimizable()) return isolate->heap()->undefined_value();
-  function->MarkForLazyRecompilation();
+  function->MarkForOptimization();
 
   Code* unoptimized = function->shared()->code();
   if (args.length() == 2 &&
@@ -8545,7 +8524,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_OptimizeFunctionOnNextCall) {
       }
     } else if (type->IsOneByteEqualTo(STATIC_ASCII_VECTOR("concurrent")) &&
                isolate->concurrent_recompilation_enabled()) {
-      function->MarkForConcurrentRecompilation();
+      function->MarkForConcurrentOptimization();
     }
   }
 
@@ -8579,7 +8558,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationStatus) {
   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
   if (isolate->concurrent_recompilation_enabled() &&
       sync_with_compiler_thread) {
-    while (function->IsInRecompileQueue()) {
+    while (function->IsInOptimizationQueue()) {
       isolate->optimizing_compiler_thread()->InstallOptimizedFunctions();
       OS::Sleep(50);
     }
@@ -8615,9 +8594,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetOptimizationCount) {
 
 static bool IsSuitableForOnStackReplacement(Isolate* isolate,
                                             Handle<JSFunction> function,
-                                            Handle<Code> unoptimized) {
+                                            Handle<Code> current_code) {
   // Keep track of whether we've succeeded in optimizing.
-  if (!isolate->use_crankshaft() || !unoptimized->optimizable()) return false;
+  if (!isolate->use_crankshaft() || !current_code->optimizable()) return false;
   // If we are trying to do OSR when there are already optimized
   // activations of the function, it means (a) the function is directly or
   // indirectly recursive and (b) an optimized invocation has been
@@ -8636,79 +8615,79 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
-  Handle<Code> unoptimized(function->shared()->code(), isolate);
+  Handle<Code> caller_code(function->shared()->code());
+
+  // We're not prepared to handle a function with arguments object.
+  ASSERT(!function->shared()->uses_arguments());
 
   // Passing the PC in the javascript frame from the caller directly is
   // not GC safe, so we walk the stack to get it.
   JavaScriptFrameIterator it(isolate);
   JavaScriptFrame* frame = it.frame();
-  if (!unoptimized->contains(frame->pc())) {
+  if (!caller_code->contains(frame->pc())) {
     // Code on the stack may not be the code object referenced by the shared
     // function info.  It may have been replaced to include deoptimization data.
-    unoptimized = Handle<Code>(frame->LookupCode());
+    caller_code = Handle<Code>(frame->LookupCode());
   }
 
-  uint32_t pc_offset = static_cast<uint32_t>(frame->pc() -
-                                             unoptimized->instruction_start());
+  uint32_t pc_offset = static_cast<uint32_t>(
+      frame->pc() - caller_code->instruction_start());
 
 #ifdef DEBUG
   ASSERT_EQ(frame->function(), *function);
-  ASSERT_EQ(frame->LookupCode(), *unoptimized);
-  ASSERT(unoptimized->contains(frame->pc()));
+  ASSERT_EQ(frame->LookupCode(), *caller_code);
+  ASSERT(caller_code->contains(frame->pc()));
 #endif  // DEBUG
 
-  // We're not prepared to handle a function with arguments object.
-  ASSERT(!function->shared()->uses_arguments());
 
+  BailoutId ast_id = caller_code->TranslatePcOffsetToAstId(pc_offset);
+  ASSERT(!ast_id.IsNone());
+
+  Compiler::ConcurrencyMode mode = isolate->concurrent_osr_enabled()
+      ? Compiler::CONCURRENT : Compiler::NOT_CONCURRENT;
   Handle<Code> result = Handle<Code>::null();
-  BailoutId ast_id = BailoutId::None();
 
-  if (isolate->concurrent_osr_enabled()) {
-    if (isolate->optimizing_compiler_thread()->
-            IsQueuedForOSR(function, pc_offset)) {
-      // Still waiting for the optimizing compiler thread to finish.  Carry on.
+  OptimizedCompileJob* job = NULL;
+  if (mode == Compiler::CONCURRENT) {
+    // Gate the OSR entry with a stack check.
+    BackEdgeTable::AddStackCheck(caller_code, pc_offset);
+    // Poll already queued compilation jobs.
+    OptimizingCompilerThread* thread = isolate->optimizing_compiler_thread();
+    if (thread->IsQueuedForOSR(function, ast_id)) {
       if (FLAG_trace_osr) {
-        PrintF("[COSR - polling recompile tasks for ");
+        PrintF("[OSR - Still waiting for queued: ");
         function->PrintName();
-        PrintF("]\n");
+        PrintF(" at AST id %d]\n", ast_id.ToInt());
       }
       return NULL;
     }
 
-    RecompileJob* job = isolate->optimizing_compiler_thread()->
-        FindReadyOSRCandidate(function, pc_offset);
+    job = thread->FindReadyOSRCandidate(function, ast_id);
+  }
 
-    if (job == NULL) {
-      if (IsSuitableForOnStackReplacement(isolate, function, unoptimized) &&
-          Compiler::RecompileConcurrent(function, unoptimized, pc_offset)) {
-        if (function->IsMarkedForLazyRecompilation() ||
-            function->IsMarkedForConcurrentRecompilation()) {
-          // Prevent regular recompilation if we queue this for OSR.
-          // TODO(yangguo): remove this as soon as OSR becomes one-shot.
-          function->ReplaceCode(function->shared()->code());
-        }
-        return NULL;
-      }
-      // Fall through to the end in case of failure.
-    } else {
-      // TODO(titzer): don't install the OSR code into the function.
-      ast_id = job->info()->osr_ast_id();
-      result = Compiler::InstallOptimizedCode(job);
+  if (job != NULL) {
+    if (FLAG_trace_osr) {
+      PrintF("[OSR - Found ready: ");
+      function->PrintName();
+      PrintF(" at AST id %d]\n", ast_id.ToInt());
     }
-  } else if (IsSuitableForOnStackReplacement(isolate, function, unoptimized)) {
-    ast_id = unoptimized->TranslatePcOffsetToAstId(pc_offset);
-    ASSERT(!ast_id.IsNone());
+    result = Compiler::GetConcurrentlyOptimizedCode(job);
+  } else if (result.is_null() &&
+             IsSuitableForOnStackReplacement(isolate, function, caller_code)) {
     if (FLAG_trace_osr) {
-      PrintF("[OSR - replacing at AST id %d in ", ast_id.ToInt());
+      PrintF("[OSR - Compiling: ");
       function->PrintName();
-      PrintF("]\n");
+      PrintF(" at AST id %d]\n", ast_id.ToInt());
+    }
+    result = Compiler::GetOptimizedCode(function, caller_code, mode, ast_id);
+    if (result.is_identical_to(isolate->builtins()->InOptimizationQueue())) {
+      // Optimization is queued.  Return to check later.
+      return NULL;
     }
-    // Attempt OSR compilation.
-    result = JSFunction::CompileOsr(function, ast_id, CLEAR_EXCEPTION);
   }
 
   // Revert the patched back edge table, regardless of whether OSR succeeds.
-  BackEdgeTable::Revert(isolate, *unoptimized);
+  BackEdgeTable::Revert(isolate, *caller_code);
 
   // Check whether we ended up with usable optimized code.
   if (!result.is_null() && result->kind() == Code::OPTIMIZED_FUNCTION) {
@@ -8718,26 +8697,27 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
     if (data->OsrPcOffset()->value() >= 0) {
       ASSERT(BailoutId(data->OsrAstId()->value()) == ast_id);
       if (FLAG_trace_osr) {
-        PrintF("[OSR - entry at AST id %d, offset %d in optimized code]\n",
+        PrintF("[OSR - Entry at AST id %d, offset %d in optimized code]\n",
                ast_id.ToInt(), data->OsrPcOffset()->value());
       }
       // TODO(titzer): this is a massive hack to make the deopt counts
       // match. Fix heuristics for reenabling optimizations!
       function->shared()->increment_deopt_count();
+
+      // TODO(titzer): Do not install code into the function.
+      function->ReplaceCode(*result);
       return *result;
     }
   }
 
+  // Failed.
   if (FLAG_trace_osr) {
-    PrintF("[OSR - optimization failed for ");
+    PrintF("[OSR - Failed: ");
     function->PrintName();
-    PrintF("]\n");
+    PrintF(" at AST id %d]\n", ast_id.ToInt());
   }
 
-  if (function->IsMarkedForLazyRecompilation() ||
-      function->IsMarkedForConcurrentRecompilation()) {
-    function->ReplaceCode(function->shared()->code());
-  }
+  function->ReplaceCode(function->shared()->code());
   return NULL;
 }
 
@@ -9439,7 +9419,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_StackGuard) {
 }
 
 
-RUNTIME_FUNCTION(MaybeObject*, Runtime_TryInstallRecompiledCode) {
+RUNTIME_FUNCTION(MaybeObject*, Runtime_TryInstallOptimizedCode) {
   HandleScope scope(isolate);
   ASSERT(args.length() == 1);
   CONVERT_ARG_HANDLE_CHECKED(JSFunction, function, 0);
@@ -9698,13 +9678,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileString) {
   // Compile source string in the native context.
   ParseRestriction restriction = function_literal_only
       ? ONLY_SINGLE_FUNCTION_LITERAL : NO_PARSE_RESTRICTION;
-  Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
-      source, context, true, CLASSIC_MODE, restriction, RelocInfo::kNoPosition);
-  RETURN_IF_EMPTY_HANDLE(isolate, shared);
-  Handle<JSFunction> fun =
-      isolate->factory()->NewFunctionFromSharedFunctionInfo(shared,
-                                                            context,
-                                                            NOT_TENURED);
+  Handle<JSFunction> fun = Compiler::GetFunctionFromEval(
+      source, context, CLASSIC_MODE, restriction, RelocInfo::kNoPosition);
+  RETURN_IF_EMPTY_HANDLE(isolate, fun);
   return *fun;
 }
 
@@ -9730,18 +9706,11 @@ static ObjectPair CompileGlobalEval(Isolate* isolate,
 
   // Deal with a normal eval call with a string argument. Compile it
   // and return the compiled function bound in the local context.
-  Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
-      source,
-      context,
-      context->IsNativeContext(),
-      language_mode,
-      NO_PARSE_RESTRICTION,
-      scope_position);
-  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, shared,
+  static const ParseRestriction restriction = NO_PARSE_RESTRICTION;
+  Handle<JSFunction> compiled = Compiler::GetFunctionFromEval(
+      source, context, language_mode, restriction, scope_position);
+  RETURN_IF_EMPTY_HANDLE_VALUE(isolate, compiled,
                                MakePair(Failure::Exception(), NULL));
-  Handle<JSFunction> compiled =
-      isolate->factory()->NewFunctionFromSharedFunctionInfo(
-          shared, context, NOT_TENURED);
   return MakePair(*compiled, *receiver);
 }
 
@@ -12573,7 +12542,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetScriptBreakPoint) {
   if (!isolate->debug()->SetBreakPointForScript(script, break_point_object_arg,
                                                 &source_position,
                                                 alignment)) {
-    return  isolate->heap()->undefined_value();
+    return isolate->heap()->undefined_value();
   }
 
   return Smi::FromInt(source_position);
@@ -12733,18 +12702,14 @@ static MaybeObject* DebugEvaluate(Isolate* isolate,
     context = isolate->factory()->NewWithContext(closure, context, extension);
   }
 
-  Handle<SharedFunctionInfo> shared = Compiler::CompileEval(
-      source,
-      context,
-      context->IsNativeContext(),
-      CLASSIC_MODE,
-      NO_PARSE_RESTRICTION,
-      RelocInfo::kNoPosition);
-  RETURN_IF_EMPTY_HANDLE(isolate, shared);
-
   Handle<JSFunction> eval_fun =
-      isolate->factory()->NewFunctionFromSharedFunctionInfo(
-          shared, context, NOT_TENURED);
+      Compiler::GetFunctionFromEval(source,
+                                    context,
+                                    CLASSIC_MODE,
+                                    NO_PARSE_RESTRICTION,
+                                    RelocInfo::kNoPosition);
+  RETURN_IF_EMPTY_HANDLE(isolate, eval_fun);
+
   bool pending_exception;
   Handle<Object> result = Execution::Call(
       isolate, eval_fun, receiver, 0, NULL, &pending_exception);
@@ -13160,7 +13125,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleFunction) {
   ASSERT(args.length() == 1);
   // Get the function and make sure it is compiled.
   CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
-  if (!JSFunction::EnsureCompiled(func, KEEP_EXCEPTION)) {
+  if (!Compiler::EnsureCompiled(func, KEEP_EXCEPTION)) {
     return Failure::Exception();
   }
   func->code()->PrintLn();
@@ -13175,7 +13140,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DebugDisassembleConstructor) {
   ASSERT(args.length() == 1);
   // Get the function and make sure it is compiled.
   CONVERT_ARG_HANDLE_CHECKED(JSFunction, func, 0);
-  if (!JSFunction::EnsureCompiled(func, KEEP_EXCEPTION)) {
+  if (!Compiler::EnsureCompiled(func, KEEP_EXCEPTION)) {
     return Failure::Exception();
   }
   func->shared()->construct_stub()->PrintLn();
index fe8a26b..af6c112 100644 (file)
@@ -86,10 +86,9 @@ namespace internal {
   F(GetConstructorDelegate, 1, 1) \
   F(NewArgumentsFast, 3, 1) \
   F(NewStrictArgumentsFast, 3, 1) \
-  F(LazyCompile, 1, 1) \
-  F(LazyRecompile, 1, 1) \
-  F(ConcurrentRecompile, 1, 1) \
-  F(TryInstallRecompiledCode, 1, 1) \
+  F(CompileUnoptimized, 1, 1) \
+  F(CompileOptimized, 2, 1) \
+  F(TryInstallOptimizedCode, 1, 1) \
   F(NotifyDeoptimized, 1, 1) \
   F(NotifyStubFailure, 0, 1) \
   F(DeoptimizeFunction, 1, 1) \
index f2c955c..7803073 100644 (file)
@@ -73,8 +73,8 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm,
 }
 
 
-static void CallRuntimePassFunction(MacroAssembler* masm,
-                                    Runtime::FunctionId function_id) {
+static void CallRuntimePassFunction(
+    MacroAssembler* masm, Runtime::FunctionId function_id) {
   FrameScope scope(masm, StackFrame::INTERNAL);
   // Push a copy of the function onto the stack.
   __ push(rdi);
@@ -101,7 +101,13 @@ static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
 }
 
 
-void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
+static void GenerateTailCallToReturnedCode(MacroAssembler* masm) {
+  __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
+  __ jmp(rax);
+}
+
+
+void Builtins::Generate_InOptimizationQueue(MacroAssembler* masm) {
   // Checking whether the queued function is ready for install is optional,
   // since we come across interrupts and stack checks elsewhere.  However,
   // not checking may delay installing ready functions, and always checking
@@ -111,22 +117,14 @@ void Builtins::Generate_InRecompileQueue(MacroAssembler* masm) {
   __ CompareRoot(rsp, Heap::kStackLimitRootIndex);
   __ j(above_equal, &ok);
 
-  CallRuntimePassFunction(masm, Runtime::kTryInstallRecompiledCode);
-  // Tail call to returned code.
-  __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
-  __ jmp(rax);
+  CallRuntimePassFunction(masm, Runtime::kTryInstallOptimizedCode);
+  GenerateTailCallToReturnedCode(masm);
 
   __ bind(&ok);
   GenerateTailCallToSharedCode(masm);
 }
 
 
-void Builtins::Generate_ConcurrentRecompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kConcurrentRecompile);
-  GenerateTailCallToSharedCode(masm);
-}
-
-
 static void Generate_JSConstructStubHelper(MacroAssembler* masm,
                                            bool is_api_function,
                                            bool count_constructions) {
@@ -573,19 +571,41 @@ void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
 }
 
 
-void Builtins::Generate_LazyCompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kLazyCompile);
-  // Do a tail-call of the compiled function.
-  __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
-  __ jmp(rax);
+void Builtins::Generate_CompileUnoptimized(MacroAssembler* masm) {
+  CallRuntimePassFunction(masm, Runtime::kCompileUnoptimized);
+  GenerateTailCallToReturnedCode(masm);
 }
 
 
-void Builtins::Generate_LazyRecompile(MacroAssembler* masm) {
-  CallRuntimePassFunction(masm, Runtime::kLazyRecompile);
-  // Do a tail-call of the compiled function.
-  __ lea(rax, FieldOperand(rax, Code::kHeaderSize));
-  __ jmp(rax);
+static void CallCompileOptimized(MacroAssembler* masm,
+                                            bool concurrent) {
+  FrameScope scope(masm, StackFrame::INTERNAL);
+  // Push a copy of the function onto the stack.
+  __ push(rdi);
+  // Push call kind information.
+  __ push(rcx);
+  // Function is also the parameter to the runtime call.
+  __ push(rdi);
+  // Whether to compile in a background thread.
+  __ Push(masm->isolate()->factory()->ToBoolean(concurrent));
+
+  __ CallRuntime(Runtime::kCompileOptimized, 2);
+  // Restore call kind information.
+  __ pop(rcx);
+  // Restore receiver.
+  __ pop(rdi);
+}
+
+
+void Builtins::Generate_CompileOptimized(MacroAssembler* masm) {
+  CallCompileOptimized(masm, false);
+  GenerateTailCallToReturnedCode(masm);
+}
+
+
+void Builtins::Generate_CompileOptimizedConcurrent(MacroAssembler* masm) {
+  CallCompileOptimized(masm, true);
+  GenerateTailCallToReturnedCode(masm);
 }
 
 
index 4bedbb9..48de81f 100644 (file)
@@ -104,16 +104,15 @@ static Handle<JSFunction> Compile(const char* source) {
   Handle<String> source_code(
       isolate->factory()->NewStringFromUtf8(CStrVector(source)));
   Handle<SharedFunctionInfo> shared_function =
-      Compiler::Compile(source_code,
-                        Handle<String>(),
-                        0,
-                        0,
-                        false,
-                        Handle<Context>(isolate->native_context()),
-                        NULL,
-                        NULL,
-                        Handle<String>::null(),
-                        NOT_NATIVES_CODE);
+      Compiler::CompileScript(source_code,
+                              Handle<String>(),
+                              0,
+                              0,
+                              false,
+                              Handle<Context>(isolate->native_context()),
+                              NULL, NULL,
+                              Handle<String>::null(),
+                              NOT_NATIVES_CODE);
   return isolate->factory()->NewFunctionFromSharedFunctionInfo(
       shared_function, isolate->native_context());
 }
index 699534f..2a20790 100644 (file)
@@ -59,3 +59,5 @@ assertUnoptimized(add_field, "no sync");
 %UnblockConcurrentRecompilation();
 // Sync with background thread to conclude optimization that bailed out.
 assertUnoptimized(add_field, "sync");
+// Clear type info for stress runs.
+%ClearFunctionTypeFeedback(add_field);
index e126465..3abf292 100644 (file)
@@ -54,3 +54,5 @@ assertUnoptimized(f, "no sync");
 // Optimization eventually bails out due to map dependency.
 assertUnoptimized(f, "sync");
 assertEquals(2, f(o));
+//Clear type info for stress runs.
+%ClearFunctionTypeFeedback(f);
index d5b1b99..1b6f97b 100644 (file)
@@ -55,3 +55,5 @@ assertUnoptimized(f1, "no sync");
 // Sync with background thread to conclude optimization, which bails out
 // due to map dependency.
 assertUnoptimized(f1, "sync");
+//Clear type info for stress runs.
+%ClearFunctionTypeFeedback(f1);
index 88d505a..98dd83a 100644 (file)
@@ -149,9 +149,9 @@ var knownProblems = {
   "PushCatchContext": true,
   "PushBlockContext": true,
   "PushModuleContext": true,
-  "LazyCompile": true,
-  "LazyRecompile": true,
-  "ConcurrentRecompile": true,
+  "CompileUnoptimized": true,
+  "CompileOptimized": true,
+  "CompileOptimizedConcurrent": true,
   "NotifyDeoptimized": true,
   "NotifyStubFailure": true,
   "NotifyOSR": true,
index e86a05d..b4a474c 100644 (file)
@@ -149,9 +149,9 @@ var knownProblems = {
   "PushCatchContext": true,
   "PushBlockContext": true,
   "PushModuleContext": true,
-  "LazyCompile": true,
-  "LazyRecompile": true,
-  "ConcurrentRecompile": true,
+  "CompileUnoptimized": true,
+  "CompileOptimized": true,
+  "CompileOptimizedConcurrent": true,
   "NotifyDeoptimized": true,
   "NotifyStubFailure": true,
   "NotifyOSR": true,
index 0d5ec6f..121b8f5 100644 (file)
@@ -149,9 +149,9 @@ var knownProblems = {
   "PushCatchContext": true,
   "PushBlockContext": true,
   "PushModuleContext": true,
-  "LazyCompile": true,
-  "LazyRecompile": true,
-  "ConcurrentRecompile": true,
+  "CompileUnoptimized": true,
+  "CompileOptimized": true,
+  "CompileOptimizedConcurrent": true,
   "NotifyDeoptimized": true,
   "NotifyStubFailure": true,
   "NotifyOSR": true,
index a17a840..f7a4f7e 100644 (file)
@@ -149,9 +149,9 @@ var knownProblems = {
   "PushCatchContext": true,
   "PushBlockContext": true,
   "PushModuleContext": true,
-  "LazyCompile": true,
-  "LazyRecompile": true,
-  "ConcurrentRecompile": true,
+  "CompileUnoptimized": true,
+  "CompileOptimized": true,
+  "CompileOptimizedConcurrent": true,
   "NotifyDeoptimized": true,
   "NotifyStubFailure": true,
   "NotifyOSR": true,
index d1afa36..363557b 100644 (file)
@@ -25,7 +25,7 @@
 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
-// Flags: --use-osr --allow-natives-syntax
+// Flags: --use-osr --allow-natives-syntax --no-concurrent-osr
 
 function f() {
   do {