Rename "OptimizingCompiler" to the more suitable "RecompileJob".
authoryangguo@chromium.org <yangguo@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 25 Sep 2013 10:01:18 +0000 (10:01 +0000)
committeryangguo@chromium.org <yangguo@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 25 Sep 2013 10:01:18 +0000 (10:01 +0000)
R=mvstanton@chromium.org
BUG=

Review URL: https://codereview.chromium.org/24543002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@16939 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/compiler.cc
src/compiler.h
src/optimizing-compiler-thread.cc
src/optimizing-compiler-thread.h
src/runtime.cc

index 47634ec..01e261a 100644 (file)
@@ -260,7 +260,7 @@ static bool AlwaysFullCompiler(Isolate* isolate) {
 }
 
 
-void OptimizingCompiler::RecordOptimizationStats() {
+void RecompileJob::RecordOptimizationStats() {
   Handle<JSFunction> function = info()->closure();
   int opt_count = function->shared()->opt_count();
   function->shared()->set_opt_count(opt_count + 1);
@@ -297,23 +297,23 @@ void OptimizingCompiler::RecordOptimizationStats() {
 // A return value of true indicates the compilation pipeline is still
 // going, not necessarily that we optimized the code.
 static bool MakeCrankshaftCode(CompilationInfo* info) {
-  OptimizingCompiler compiler(info);
-  OptimizingCompiler::Status status = compiler.CreateGraph();
+  RecompileJob job(info);
+  RecompileJob::Status status = job.CreateGraph();
 
-  if (status != OptimizingCompiler::SUCCEEDED) {
-    return status != OptimizingCompiler::FAILED;
+  if (status != RecompileJob::SUCCEEDED) {
+    return status != RecompileJob::FAILED;
   }
-  status = compiler.OptimizeGraph();
-  if (status != OptimizingCompiler::SUCCEEDED) {
-    status = compiler.AbortOptimization();
-    return status != OptimizingCompiler::FAILED;
+  status = job.OptimizeGraph();
+  if (status != RecompileJob::SUCCEEDED) {
+    status = job.AbortOptimization();
+    return status != RecompileJob::FAILED;
   }
-  status = compiler.GenerateAndInstallCode();
-  return status != OptimizingCompiler::FAILED;
+  status = job.GenerateAndInstallCode();
+  return status != RecompileJob::FAILED;
 }
 
 
-OptimizingCompiler::Status OptimizingCompiler::CreateGraph() {
+RecompileJob::Status RecompileJob::CreateGraph() {
   ASSERT(isolate()->use_crankshaft());
   ASSERT(info()->IsOptimizing());
   ASSERT(!info()->IsCompilingForDebugging());
@@ -452,7 +452,7 @@ OptimizingCompiler::Status OptimizingCompiler::CreateGraph() {
 }
 
 
-OptimizingCompiler::Status OptimizingCompiler::OptimizeGraph() {
+RecompileJob::Status RecompileJob::OptimizeGraph() {
   DisallowHeapAllocation no_allocation;
   DisallowHandleAllocation no_handles;
   DisallowHandleDereference no_deref;
@@ -475,7 +475,7 @@ OptimizingCompiler::Status OptimizingCompiler::OptimizeGraph() {
 }
 
 
-OptimizingCompiler::Status OptimizingCompiler::GenerateAndInstallCode() {
+RecompileJob::Status RecompileJob::GenerateAndInstallCode() {
   ASSERT(last_status() == SUCCEEDED);
   ASSERT(!info()->HasAbortedDueToDependencyChange());
   DisallowCodeDependencyChange no_dependency_change;
@@ -1032,16 +1032,15 @@ bool Compiler::RecompileConcurrent(Handle<JSFunction> closure,
       info->SaveHandles();
 
       if (Rewriter::Rewrite(*info) && Scope::Analyze(*info)) {
-        OptimizingCompiler* compiler =
-            new(info->zone()) OptimizingCompiler(*info);
-        OptimizingCompiler::Status status = compiler->CreateGraph();
-        if (status == OptimizingCompiler::SUCCEEDED) {
+        RecompileJob* job = new(info->zone()) RecompileJob(*info);
+        RecompileJob::Status status = job->CreateGraph();
+        if (status == RecompileJob::SUCCEEDED) {
           info.Detach();
           shared->code()->set_profiler_ticks(0);
-          isolate->optimizing_compiler_thread()->QueueForOptimization(compiler);
+          isolate->optimizing_compiler_thread()->QueueForOptimization(job);
           ASSERT(!isolate->has_pending_exception());
           return true;
-        } else if (status == OptimizingCompiler::BAILED_OUT) {
+        } else if (status == RecompileJob::BAILED_OUT) {
           isolate->clear_pending_exception();
           InstallFullCode(*info);
         }
@@ -1054,9 +1053,8 @@ bool Compiler::RecompileConcurrent(Handle<JSFunction> closure,
 }
 
 
-Handle<Code> Compiler::InstallOptimizedCode(
-    OptimizingCompiler* optimizing_compiler) {
-  SmartPointer<CompilationInfo> info(optimizing_compiler->info());
+Handle<Code> Compiler::InstallOptimizedCode(RecompileJob* job) {
+  SmartPointer<CompilationInfo> info(job->info());
   // The function may have already been optimized by OSR.  Simply continue.
   // Except when OSR already disabled optimization for some reason.
   if (info->shared_info()->optimization_disabled()) {
@@ -1077,24 +1075,24 @@ Handle<Code> Compiler::InstallOptimizedCode(
       isolate, Logger::TimerEventScope::v8_recompile_synchronous);
   // If crankshaft succeeded, install the optimized code else install
   // the unoptimized code.
-  OptimizingCompiler::Status status = optimizing_compiler->last_status();
+  RecompileJob::Status status = job->last_status();
   if (info->HasAbortedDueToDependencyChange()) {
     info->set_bailout_reason(kBailedOutDueToDependencyChange);
-    status = optimizing_compiler->AbortOptimization();
-  } else if (status != OptimizingCompiler::SUCCEEDED) {
+    status = job->AbortOptimization();
+  } else if (status != RecompileJob::SUCCEEDED) {
     info->set_bailout_reason(kFailedBailedOutLastTime);
-    status = optimizing_compiler->AbortOptimization();
+    status = job->AbortOptimization();
   } else if (isolate->DebuggerHasBreakPoints()) {
     info->set_bailout_reason(kDebuggerIsActive);
-    status = optimizing_compiler->AbortOptimization();
+    status = job->AbortOptimization();
   } else {
-    status = optimizing_compiler->GenerateAndInstallCode();
-    ASSERT(status == OptimizingCompiler::SUCCEEDED ||
-           status == OptimizingCompiler::BAILED_OUT);
+    status = job->GenerateAndInstallCode();
+    ASSERT(status == RecompileJob::SUCCEEDED ||
+           status == RecompileJob::BAILED_OUT);
   }
 
   InstallCodeCommon(*info);
-  if (status == OptimizingCompiler::SUCCEEDED) {
+  if (status == RecompileJob::SUCCEEDED) {
     Handle<Code> code = info->code();
     ASSERT(info->shared_info()->scope_info() != ScopeInfo::Empty(isolate));
     info->closure()->ReplaceCode(*code);
@@ -1115,8 +1113,8 @@ Handle<Code> Compiler::InstallOptimizedCode(
   // profiler ticks to prevent too soon re-opt after a deopt.
   info->shared_info()->code()->set_profiler_ticks(0);
   ASSERT(!info->closure()->IsInRecompileQueue());
-  return (status == OptimizingCompiler::SUCCEEDED) ? info->code()
-                                                   : Handle<Code>::null();
+  return (status == RecompileJob::SUCCEEDED) ? info->code()
+                                             : Handle<Code>::null();
 }
 
 
index 96b71af..dc7c19f 100644 (file)
@@ -506,9 +506,9 @@ class LChunk;
 // fail, bail-out to the full code generator or succeed.  Apart from
 // their return value, the status of the phase last run can be checked
 // using last_status().
-class OptimizingCompiler: public ZoneObject {
+class RecompileJob: public ZoneObject {
  public:
-  explicit OptimizingCompiler(CompilationInfo* info)
+  explicit RecompileJob(CompilationInfo* info)
       : info_(info),
         graph_builder_(NULL),
         graph_(NULL),
@@ -558,9 +558,8 @@ class OptimizingCompiler: public ZoneObject {
   void RecordOptimizationStats();
 
   struct Timer {
-    Timer(OptimizingCompiler* compiler, TimeDelta* location)
-        : compiler_(compiler),
-          location_(location) {
+    Timer(RecompileJob* job, TimeDelta* location)
+        : job_(job), location_(location) {
       ASSERT(location_ != NULL);
       timer_.Start();
     }
@@ -569,7 +568,7 @@ class OptimizingCompiler: public ZoneObject {
       *location_ += timer_.Elapsed();
     }
 
-    OptimizingCompiler* compiler_;
+    RecompileJob* job_;
     ElapsedTimer timer_;
     TimeDelta* location_;
   };
@@ -634,7 +633,7 @@ class Compiler : public AllStatic {
                               bool is_toplevel,
                               Handle<Script> script);
 
-  static Handle<Code> InstallOptimizedCode(OptimizingCompiler* info);
+  static Handle<Code> InstallOptimizedCode(RecompileJob* job);
 
 #ifdef ENABLE_DEBUGGER_SUPPORT
   static bool MakeCodeForLiveEdit(CompilationInfo* info);
index 2a327ff..1f72cc0 100644 (file)
@@ -94,28 +94,29 @@ void OptimizingCompilerThread::Run() {
 
 
 void OptimizingCompilerThread::CompileNext() {
-  OptimizingCompiler* optimizing_compiler = NULL;
-  bool result = input_queue_.Dequeue(&optimizing_compiler);
+  RecompileJob* job = NULL;
+  bool result = input_queue_.Dequeue(&job);
   USE(result);
   ASSERT(result);
   Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(-1));
 
   // The function may have already been optimized by OSR.  Simply continue.
-  OptimizingCompiler::Status status = optimizing_compiler->OptimizeGraph();
+  RecompileJob::Status status = job->OptimizeGraph();
   USE(status);   // Prevent an unused-variable error in release mode.
-  ASSERT(status != OptimizingCompiler::FAILED);
+  ASSERT(status != RecompileJob::FAILED);
 
   // The function may have already been optimized by OSR.  Simply continue.
   // Use a mutex to make sure that functions marked for install
   // are always also queued.
   LockGuard<Mutex> access_queue(&queue_mutex_);
-  output_queue_.Enqueue(optimizing_compiler);
+  output_queue_.Enqueue(job);
   isolate_->stack_guard()->RequestInstallCode();
 }
 
 
-static void DisposeOptimizingCompiler(OptimizingCompiler* compiler,
-                                      bool restore_function_code) {
+static void DisposeRecompileJob(RecompileJob* compiler,
+                                bool restore_function_code) {
+  // The recompile job is allocated in the CompilationInfo's zone.
   CompilationInfo* info = compiler->info();
   if (restore_function_code) {
     Handle<JSFunction> function = info->closure();
@@ -126,15 +127,14 @@ static void DisposeOptimizingCompiler(OptimizingCompiler* compiler,
 
 
 void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) {
-  OptimizingCompiler* optimizing_compiler;
-  // The optimizing compiler is allocated in the CompilationInfo's zone.
-  while (input_queue_.Dequeue(&optimizing_compiler)) {
+  RecompileJob* job;
+  while (input_queue_.Dequeue(&job)) {
     // This should not block, since we have one signal on the input queue
     // semaphore corresponding to each element in the input queue.
     input_queue_semaphore_.Wait();
-    if (optimizing_compiler->info()->osr_ast_id().IsNone()) {
+    if (job->info()->osr_ast_id().IsNone()) {
       // OSR jobs are dealt with separately.
-      DisposeOptimizingCompiler(optimizing_compiler, restore_function_code);
+      DisposeRecompileJob(job, restore_function_code);
     }
   }
   Release_Store(&queue_length_, static_cast<AtomicWord>(0));
@@ -142,27 +142,24 @@ void OptimizingCompilerThread::FlushInputQueue(bool restore_function_code) {
 
 
 void OptimizingCompilerThread::FlushOutputQueue(bool restore_function_code) {
-  OptimizingCompiler* optimizing_compiler;
-  // The optimizing compiler is allocated in the CompilationInfo's zone.
+  RecompileJob* job;
   while (true) {
     { LockGuard<Mutex> access_queue(&queue_mutex_);
-      if (!output_queue_.Dequeue(&optimizing_compiler)) break;
+      if (!output_queue_.Dequeue(&job)) break;
     }
-    if (optimizing_compiler->info()->osr_ast_id().IsNone()) {
+    if (job->info()->osr_ast_id().IsNone()) {
       // OSR jobs are dealt with separately.
-      DisposeOptimizingCompiler(optimizing_compiler, restore_function_code);
+      DisposeRecompileJob(job, restore_function_code);
     }
   }
 }
 
 
 void OptimizingCompilerThread::FlushOsrBuffer(bool restore_function_code) {
-  OptimizingCompiler* optimizing_compiler;
+  RecompileJob* job;
   for (int i = 0; i < osr_buffer_size_; i++) {
-    optimizing_compiler = osr_buffer_[i];
-    if (optimizing_compiler != NULL) {
-      DisposeOptimizingCompiler(optimizing_compiler, restore_function_code);
-    }
+    job = osr_buffer_[i];
+    if (job != NULL) DisposeRecompileJob(job, restore_function_code);
   }
   osr_cursor_ = 0;
 }
@@ -215,14 +212,14 @@ void OptimizingCompilerThread::InstallOptimizedFunctions() {
   ASSERT(!IsOptimizerThread());
   HandleScope handle_scope(isolate_);
 
-  OptimizingCompiler* compiler;
+  RecompileJob* job;
   while (true) {
     { LockGuard<Mutex> access_queue(&queue_mutex_);
-      if (!output_queue_.Dequeue(&compiler)) break;
+      if (!output_queue_.Dequeue(&job)) break;
     }
-    CompilationInfo* info = compiler->info();
+    CompilationInfo* info = job->info();
     if (info->osr_ast_id().IsNone()) {
-      Compiler::InstallOptimizedCode(compiler);
+      Compiler::InstallOptimizedCode(job);
     } else {
       if (FLAG_trace_osr) {
         PrintF("[COSR - ");
@@ -230,19 +227,18 @@ void OptimizingCompilerThread::InstallOptimizedFunctions() {
         PrintF(" is ready for install and entry at AST id %d]\n",
                info->osr_ast_id().ToInt());
       }
-      compiler->WaitForInstall();
+      job->WaitForInstall();
       BackEdgeTable::RemoveStackCheck(info);
     }
   }
 }
 
 
-void OptimizingCompilerThread::QueueForOptimization(
-    OptimizingCompiler* optimizing_compiler) {
+void OptimizingCompilerThread::QueueForOptimization(RecompileJob* job) {
   ASSERT(IsQueueAvailable());
   ASSERT(!IsOptimizerThread());
   Barrier_AtomicIncrement(&queue_length_, static_cast<Atomic32>(1));
-  CompilationInfo* info = optimizing_compiler->info();
+  CompilationInfo* info = job->info();
   if (info->osr_ast_id().IsNone()) {
     info->closure()->MarkInRecompileQueue();
   } else {
@@ -251,19 +247,19 @@ void OptimizingCompilerThread::QueueForOptimization(
       info->closure()->PrintName();
       PrintF(" for concurrent on-stack replacement.\n");
     }
-    AddToOsrBuffer(optimizing_compiler);
+    AddToOsrBuffer(job);
     osr_attempts_++;
     BackEdgeTable::AddStackCheck(info);
   }
-  input_queue_.Enqueue(optimizing_compiler);
+  input_queue_.Enqueue(job);
   input_queue_semaphore_.Signal();
 }
 
 
-OptimizingCompiler* OptimizingCompilerThread::FindReadyOSRCandidate(
+RecompileJob* OptimizingCompilerThread::FindReadyOSRCandidate(
     Handle<JSFunction> function, uint32_t osr_pc_offset) {
   ASSERT(!IsOptimizerThread());
-  OptimizingCompiler* result = NULL;
+  RecompileJob* result = NULL;
   for (int i = 0; i < osr_buffer_size_; i++) {
     result = osr_buffer_[i];
     if (result == NULL) continue;
@@ -303,11 +299,11 @@ bool OptimizingCompilerThread::IsQueuedForOSR(JSFunction* function) {
 }
 
 
-void OptimizingCompilerThread::AddToOsrBuffer(OptimizingCompiler* compiler) {
+void OptimizingCompilerThread::AddToOsrBuffer(RecompileJob* job) {
   ASSERT(!IsOptimizerThread());
   // Store into next empty slot or replace next stale OSR job that's waiting
   // in vain.  Dispose in the latter case.
-  OptimizingCompiler* stale;
+  RecompileJob* stale;
   while (true) {
     stale = osr_buffer_[osr_cursor_];
     if (stale == NULL) break;
@@ -319,13 +315,13 @@ void OptimizingCompilerThread::AddToOsrBuffer(OptimizingCompiler* compiler) {
         PrintF(", AST id %d]\n", info->osr_ast_id().ToInt());
       }
       BackEdgeTable::RemoveStackCheck(info);
-      DisposeOptimizingCompiler(stale, false);
+      DisposeRecompileJob(stale, false);
       break;
     }
     AdvanceOsrCursor();
   }
 
-  osr_buffer_[osr_cursor_] = compiler;
+  osr_buffer_[osr_cursor_] = job;
   AdvanceOsrCursor();
 }
 
index ee06c22..2165a4f 100644 (file)
@@ -40,7 +40,7 @@ namespace v8 {
 namespace internal {
 
 class HOptimizedGraphBuilder;
-class OptimizingCompiler;
+class RecompileJob;
 class SharedFunctionInfo;
 
 class OptimizingCompilerThread : public Thread {
@@ -60,7 +60,7 @@ class OptimizingCompilerThread : public Thread {
     NoBarrier_Store(&queue_length_, static_cast<AtomicWord>(0));
     if (FLAG_concurrent_osr) {
       osr_buffer_size_ = FLAG_concurrent_recompilation_queue_length + 4;
-      osr_buffer_ = NewArray<OptimizingCompiler*>(osr_buffer_size_);
+      osr_buffer_ = NewArray<RecompileJob*>(osr_buffer_size_);
       for (int i = 0; i < osr_buffer_size_; i++) osr_buffer_[i] = NULL;
     }
   }
@@ -72,10 +72,10 @@ class OptimizingCompilerThread : public Thread {
   void Run();
   void Stop();
   void Flush();
-  void QueueForOptimization(OptimizingCompiler* optimizing_compiler);
+  void QueueForOptimization(RecompileJob* optimizing_compiler);
   void InstallOptimizedFunctions();
-  OptimizingCompiler* FindReadyOSRCandidate(Handle<JSFunction> function,
-                                            uint32_t osr_pc_offset);
+  RecompileJob* FindReadyOSRCandidate(Handle<JSFunction> function,
+                                      uint32_t osr_pc_offset);
   bool IsQueuedForOSR(Handle<JSFunction> function, uint32_t osr_pc_offset);
 
   bool IsQueuedForOSR(JSFunction* function);
@@ -108,7 +108,7 @@ class OptimizingCompilerThread : public Thread {
 
   // Add a recompilation task for OSR to the cyclic buffer, awaiting OSR entry.
   // Tasks evicted from the cyclic buffer are discarded.
-  void AddToOsrBuffer(OptimizingCompiler* compiler);
+  void AddToOsrBuffer(RecompileJob* compiler);
   void AdvanceOsrCursor() {
     osr_cursor_ = (osr_cursor_ + 1) % osr_buffer_size_;
   }
@@ -123,13 +123,13 @@ class OptimizingCompilerThread : public Thread {
   Semaphore input_queue_semaphore_;
 
   // Queue of incoming recompilation tasks (including OSR).
-  UnboundQueue<OptimizingCompiler*> input_queue_;
+  UnboundQueue<RecompileJob*> input_queue_;
   // Queue of recompilation tasks ready to be installed (excluding OSR).
-  UnboundQueue<OptimizingCompiler*> output_queue_;
+  UnboundQueue<RecompileJob*> output_queue_;
   // Cyclic buffer of recompilation tasks for OSR.
   // TODO(yangguo): This may keep zombie tasks indefinitely, holding on to
   //                a lot of memory.  Fix this.
-  OptimizingCompiler** osr_buffer_;
+  RecompileJob** osr_buffer_;
   // Cursor for the cyclic buffer.
   int osr_cursor_;
   int osr_buffer_size_;
index 6d8cc5d..f097681 100644 (file)
@@ -8621,10 +8621,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
       return NULL;
     }
 
-    OptimizingCompiler* compiler = isolate->optimizing_compiler_thread()->
+    RecompileJob* job = isolate->optimizing_compiler_thread()->
         FindReadyOSRCandidate(function, pc_offset);
 
-    if (compiler == NULL) {
+    if (job == NULL) {
       if (IsSuitableForOnStackReplacement(isolate, function, unoptimized) &&
           Compiler::RecompileConcurrent(function, pc_offset)) {
         if (function->IsMarkedForLazyRecompilation() ||
@@ -8638,8 +8638,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CompileForOnStackReplacement) {
       // Fall through to the end in case of failure.
     } else {
       // TODO(titzer): don't install the OSR code into the function.
-      ast_id = compiler->info()->osr_ast_id();
-      result = Compiler::InstallOptimizedCode(compiler);
+      ast_id = job->info()->osr_ast_id();
+      result = Compiler::InstallOptimizedCode(job);
     }
   } else if (IsSuitableForOnStackReplacement(isolate, function, unoptimized)) {
     ast_id = unoptimized->TranslatePcOffsetToAstId(pc_offset);