Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / v8 / src / compiler.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "v8.h"
6
7 #include "compiler.h"
8
9 #include "bootstrapper.h"
10 #include "codegen.h"
11 #include "compilation-cache.h"
12 #include "cpu-profiler.h"
13 #include "debug.h"
14 #include "deoptimizer.h"
15 #include "full-codegen.h"
16 #include "gdb-jit.h"
17 #include "typing.h"
18 #include "hydrogen.h"
19 #include "isolate-inl.h"
20 #include "lithium.h"
21 #include "liveedit.h"
22 #include "parser.h"
23 #include "rewriter.h"
24 #include "runtime-profiler.h"
25 #include "scanner-character-streams.h"
26 #include "scopeinfo.h"
27 #include "scopes.h"
28 #include "vm-state-inl.h"
29
30 namespace v8 {
31 namespace internal {
32
33
34 CompilationInfo::CompilationInfo(Handle<Script> script,
35                                  Zone* zone)
36     : flags_(StrictModeField::encode(SLOPPY)),
37       script_(script),
38       osr_ast_id_(BailoutId::None()),
39       parameter_count_(0),
40       this_has_uses_(true),
41       optimization_id_(-1) {
42   Initialize(script->GetIsolate(), BASE, zone);
43 }
44
45
46 CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
47                                  Zone* zone)
48     : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
49       shared_info_(shared_info),
50       script_(Handle<Script>(Script::cast(shared_info->script()))),
51       osr_ast_id_(BailoutId::None()),
52       parameter_count_(0),
53       this_has_uses_(true),
54       optimization_id_(-1) {
55   Initialize(script_->GetIsolate(), BASE, zone);
56 }
57
58
59 CompilationInfo::CompilationInfo(Handle<JSFunction> closure,
60                                  Zone* zone)
61     : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
62       closure_(closure),
63       shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
64       script_(Handle<Script>(Script::cast(shared_info_->script()))),
65       context_(closure->context()),
66       osr_ast_id_(BailoutId::None()),
67       parameter_count_(0),
68       this_has_uses_(true),
69       optimization_id_(-1) {
70   Initialize(script_->GetIsolate(), BASE, zone);
71 }
72
73
74 CompilationInfo::CompilationInfo(HydrogenCodeStub* stub,
75                                  Isolate* isolate,
76                                  Zone* zone)
77     : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
78       osr_ast_id_(BailoutId::None()),
79       parameter_count_(0),
80       this_has_uses_(true),
81       optimization_id_(-1) {
82   Initialize(isolate, STUB, zone);
83   code_stub_ = stub;
84 }
85
86
87 void CompilationInfo::Initialize(Isolate* isolate,
88                                  Mode mode,
89                                  Zone* zone) {
90   isolate_ = isolate;
91   function_ = NULL;
92   scope_ = NULL;
93   global_scope_ = NULL;
94   extension_ = NULL;
95   cached_data_ = NULL;
96   cached_data_mode_ = NO_CACHED_DATA;
97   zone_ = zone;
98   deferred_handles_ = NULL;
99   code_stub_ = NULL;
100   prologue_offset_ = Code::kPrologueOffsetNotSet;
101   opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
102   no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
103                    ? new List<OffsetRange>(2) : NULL;
104   for (int i = 0; i < DependentCode::kGroupCount; i++) {
105     dependencies_[i] = NULL;
106   }
107   if (mode == STUB) {
108     mode_ = STUB;
109     return;
110   }
111   mode_ = mode;
112   abort_due_to_dependency_ = false;
113   if (script_->type()->value() == Script::TYPE_NATIVE) {
114     MarkAsNative();
115   }
116   if (!shared_info_.is_null()) {
117     ASSERT(strict_mode() == SLOPPY);
118     SetStrictMode(shared_info_->strict_mode());
119   }
120   set_bailout_reason(kUnknown);
121
122   if (!shared_info().is_null() && shared_info()->is_compiled()) {
123     // We should initialize the CompilationInfo feedback vector from the
124     // passed in shared info, rather than creating a new one.
125     feedback_vector_ = Handle<FixedArray>(shared_info()->feedback_vector(),
126                                           isolate);
127   }
128 }
129
130
131 CompilationInfo::~CompilationInfo() {
132   delete deferred_handles_;
133   delete no_frame_ranges_;
134 #ifdef DEBUG
135   // Check that no dependent maps have been added or added dependent maps have
136   // been rolled back or committed.
137   for (int i = 0; i < DependentCode::kGroupCount; i++) {
138     ASSERT_EQ(NULL, dependencies_[i]);
139   }
140 #endif  // DEBUG
141 }
142
143
144 void CompilationInfo::CommitDependencies(Handle<Code> code) {
145   for (int i = 0; i < DependentCode::kGroupCount; i++) {
146     ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
147     if (group_objects == NULL) continue;
148     ASSERT(!object_wrapper_.is_null());
149     for (int j = 0; j < group_objects->length(); j++) {
150       DependentCode::DependencyGroup group =
151           static_cast<DependentCode::DependencyGroup>(i);
152       DependentCode* dependent_code =
153           DependentCode::ForObject(group_objects->at(j), group);
154       dependent_code->UpdateToFinishedCode(group, this, *code);
155     }
156     dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
157   }
158 }
159
160
161 void CompilationInfo::RollbackDependencies() {
162   // Unregister from all dependent maps if not yet committed.
163   for (int i = 0; i < DependentCode::kGroupCount; i++) {
164     ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
165     if (group_objects == NULL) continue;
166     for (int j = 0; j < group_objects->length(); j++) {
167       DependentCode::DependencyGroup group =
168           static_cast<DependentCode::DependencyGroup>(i);
169       DependentCode* dependent_code =
170           DependentCode::ForObject(group_objects->at(j), group);
171       dependent_code->RemoveCompilationInfo(group, this);
172     }
173     dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
174   }
175 }
176
177
178 int CompilationInfo::num_parameters() const {
179   if (IsStub()) {
180     ASSERT(parameter_count_ > 0);
181     return parameter_count_;
182   } else {
183     return scope()->num_parameters();
184   }
185 }
186
187
188 int CompilationInfo::num_heap_slots() const {
189   if (IsStub()) {
190     return 0;
191   } else {
192     return scope()->num_heap_slots();
193   }
194 }
195
196
197 Code::Flags CompilationInfo::flags() const {
198   if (IsStub()) {
199     return Code::ComputeFlags(code_stub()->GetCodeKind(),
200                               code_stub()->GetICState(),
201                               code_stub()->GetExtraICState(),
202                               code_stub()->GetStubType());
203   } else {
204     return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
205   }
206 }
207
208
209 // Disable optimization for the rest of the compilation pipeline.
210 void CompilationInfo::DisableOptimization() {
211   bool is_optimizable_closure =
212     FLAG_optimize_closures &&
213     closure_.is_null() &&
214     !scope_->HasTrivialOuterContext() &&
215     !scope_->outer_scope_calls_sloppy_eval() &&
216     !scope_->inside_with();
217   SetMode(is_optimizable_closure ? BASE : NONOPT);
218 }
219
220
221 // Primitive functions are unlikely to be picked up by the stack-walking
222 // profiler, so they trigger their own optimization when they're called
223 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
224 bool CompilationInfo::ShouldSelfOptimize() {
225   return FLAG_crankshaft &&
226       !function()->flags()->Contains(kDontSelfOptimize) &&
227       !function()->dont_optimize() &&
228       function()->scope()->AllowsLazyCompilation() &&
229       (shared_info().is_null() || !shared_info()->optimization_disabled());
230 }
231
232
233 void CompilationInfo::PrepareForCompilation(Scope* scope) {
234   ASSERT(scope_ == NULL);
235   scope_ = scope;
236
237   int length = function()->slot_count();
238   if (feedback_vector_.is_null()) {
239     // Allocate the feedback vector too.
240     feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length);
241   }
242   ASSERT(feedback_vector_->length() == length);
243 }
244
245
246 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
247  public:
248   explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
249       : HOptimizedGraphBuilder(info) {
250   }
251
252 #define DEF_VISIT(type)                                 \
253   virtual void Visit##type(type* node) V8_OVERRIDE {    \
254     if (node->position() != RelocInfo::kNoPosition) {   \
255       SetSourcePosition(node->position());              \
256     }                                                   \
257     HOptimizedGraphBuilder::Visit##type(node);          \
258   }
259   EXPRESSION_NODE_LIST(DEF_VISIT)
260 #undef DEF_VISIT
261
262 #define DEF_VISIT(type)                                          \
263   virtual void Visit##type(type* node) V8_OVERRIDE {             \
264     if (node->position() != RelocInfo::kNoPosition) {            \
265       SetSourcePosition(node->position());                       \
266     }                                                            \
267     HOptimizedGraphBuilder::Visit##type(node);                   \
268   }
269   STATEMENT_NODE_LIST(DEF_VISIT)
270 #undef DEF_VISIT
271
272 #define DEF_VISIT(type)                                            \
273   virtual void Visit##type(type* node) V8_OVERRIDE {               \
274     HOptimizedGraphBuilder::Visit##type(node);                     \
275   }
276   MODULE_NODE_LIST(DEF_VISIT)
277   DECLARATION_NODE_LIST(DEF_VISIT)
278 #undef DEF_VISIT
279 };
280
281
282 // Determine whether to use the full compiler for all code. If the flag
283 // --always-full-compiler is specified this is the case. For the virtual frame
284 // based compiler the full compiler is also used if a debugger is connected, as
285 // the code from the full compiler supports mode precise break points. For the
286 // crankshaft adaptive compiler debugging the optimized code is not possible at
287 // all. However crankshaft support recompilation of functions, so in this case
288 // the full compiler need not be be used if a debugger is attached, but only if
289 // break points has actually been set.
290 static bool IsDebuggerActive(Isolate* isolate) {
291   return isolate->use_crankshaft() ?
292     isolate->debug()->has_break_points() :
293     isolate->debugger()->IsDebuggerActive();
294 }
295
296
297 OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
298   ASSERT(isolate()->use_crankshaft());
299   ASSERT(info()->IsOptimizing());
300   ASSERT(!info()->IsCompilingForDebugging());
301
302   // We should never arrive here if there is no code object on the
303   // shared function object.
304   ASSERT(info()->shared_info()->code()->kind() == Code::FUNCTION);
305
306   // We should never arrive here if optimization has been disabled on the
307   // shared function info.
308   ASSERT(!info()->shared_info()->optimization_disabled());
309
310   // Fall back to using the full code generator if it's not possible
311   // to use the Hydrogen-based optimizing compiler. We already have
312   // generated code for this from the shared function object.
313   if (FLAG_always_full_compiler) return AbortOptimization();
314   if (IsDebuggerActive(isolate())) return AbortOptimization(kDebuggerIsActive);
315
316   // Limit the number of times we re-compile a functions with
317   // the optimizing compiler.
318   const int kMaxOptCount =
319       FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
320   if (info()->opt_count() > kMaxOptCount) {
321     return AbortAndDisableOptimization(kOptimizedTooManyTimes);
322   }
323
324   // Due to an encoding limit on LUnallocated operands in the Lithium
325   // language, we cannot optimize functions with too many formal parameters
326   // or perform on-stack replacement for function with too many
327   // stack-allocated local variables.
328   //
329   // The encoding is as a signed value, with parameters and receiver using
330   // the negative indices and locals the non-negative ones.
331   const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
332   Scope* scope = info()->scope();
333   if ((scope->num_parameters() + 1) > parameter_limit) {
334     return AbortAndDisableOptimization(kTooManyParameters);
335   }
336
337   const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
338   if (info()->is_osr() &&
339       scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
340     return AbortAndDisableOptimization(kTooManyParametersLocals);
341   }
342
343   if (scope->HasIllegalRedeclaration()) {
344     return AbortAndDisableOptimization(kFunctionWithIllegalRedeclaration);
345   }
346
347   // Take --hydrogen-filter into account.
348   if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
349     return AbortOptimization(kHydrogenFilter);
350   }
351
352   // Recompile the unoptimized version of the code if the current version
353   // doesn't have deoptimization support. Alternatively, we may decide to
354   // run the full code generator to get a baseline for the compile-time
355   // performance of the hydrogen-based compiler.
356   bool should_recompile = !info()->shared_info()->has_deoptimization_support();
357   if (should_recompile || FLAG_hydrogen_stats) {
358     ElapsedTimer timer;
359     if (FLAG_hydrogen_stats) {
360       timer.Start();
361     }
362     CompilationInfoWithZone unoptimized(info()->shared_info());
363     // Note that we use the same AST that we will use for generating the
364     // optimized code.
365     unoptimized.SetFunction(info()->function());
366     unoptimized.PrepareForCompilation(info()->scope());
367     unoptimized.SetContext(info()->context());
368     if (should_recompile) unoptimized.EnableDeoptimizationSupport();
369     bool succeeded = FullCodeGenerator::MakeCode(&unoptimized);
370     if (should_recompile) {
371       if (!succeeded) return SetLastStatus(FAILED);
372       Handle<SharedFunctionInfo> shared = info()->shared_info();
373       shared->EnableDeoptimizationSupport(*unoptimized.code());
374       // The existing unoptimized code was replaced with the new one.
375       Compiler::RecordFunctionCompilation(
376           Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
377     }
378     if (FLAG_hydrogen_stats) {
379       isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
380     }
381   }
382
383   // Check that the unoptimized, shared code is ready for
384   // optimizations.  When using the always_opt flag we disregard the
385   // optimizable marker in the code object and optimize anyway. This
386   // is safe as long as the unoptimized code has deoptimization
387   // support.
388   ASSERT(FLAG_always_opt || info()->shared_info()->code()->optimizable());
389   ASSERT(info()->shared_info()->has_deoptimization_support());
390
391   if (FLAG_trace_hydrogen) {
392     Handle<String> name = info()->function()->debug_name();
393     PrintF("-----------------------------------------------------------\n");
394     PrintF("Compiling method %s using hydrogen\n", name->ToCString().get());
395     isolate()->GetHTracer()->TraceCompilation(info());
396   }
397
398   // Type-check the function.
399   AstTyper::Run(info());
400
401   graph_builder_ = FLAG_hydrogen_track_positions
402       ? new(info()->zone()) HOptimizedGraphBuilderWithPositions(info())
403       : new(info()->zone()) HOptimizedGraphBuilder(info());
404
405   Timer t(this, &time_taken_to_create_graph_);
406   info()->set_this_has_uses(false);
407   graph_ = graph_builder_->CreateGraph();
408
409   if (isolate()->has_pending_exception()) {
410     return SetLastStatus(FAILED);
411   }
412
413   // The function being compiled may have bailed out due to an inline
414   // candidate bailing out.  In such a case, we don't disable
415   // optimization on the shared_info.
416   ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL);
417   if (graph_ == NULL) {
418     if (graph_builder_->inline_bailout()) {
419       return AbortOptimization();
420     } else {
421       return AbortAndDisableOptimization();
422     }
423   }
424
425   if (info()->HasAbortedDueToDependencyChange()) {
426     return AbortOptimization(kBailedOutDueToDependencyChange);
427   }
428
429   return SetLastStatus(SUCCEEDED);
430 }
431
432
433 OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() {
434   DisallowHeapAllocation no_allocation;
435   DisallowHandleAllocation no_handles;
436   DisallowHandleDereference no_deref;
437   DisallowCodeDependencyChange no_dependency_change;
438
439   ASSERT(last_status() == SUCCEEDED);
440   Timer t(this, &time_taken_to_optimize_);
441   ASSERT(graph_ != NULL);
442   BailoutReason bailout_reason = kNoReason;
443
444   if (graph_->Optimize(&bailout_reason)) {
445     chunk_ = LChunk::NewChunk(graph_);
446     if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
447   } else if (bailout_reason != kNoReason) {
448     graph_builder_->Bailout(bailout_reason);
449   }
450
451   return AbortOptimization();
452 }
453
454
455 OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() {
456   ASSERT(last_status() == SUCCEEDED);
457   ASSERT(!info()->HasAbortedDueToDependencyChange());
458   DisallowCodeDependencyChange no_dependency_change;
459   {  // Scope for timer.
460     Timer timer(this, &time_taken_to_codegen_);
461     ASSERT(chunk_ != NULL);
462     ASSERT(graph_ != NULL);
463     // Deferred handles reference objects that were accessible during
464     // graph creation.  To make sure that we don't encounter inconsistencies
465     // between graph creation and code generation, we disallow accessing
466     // objects through deferred handles during the latter, with exceptions.
467     DisallowDeferredHandleDereference no_deferred_handle_deref;
468     Handle<Code> optimized_code = chunk_->Codegen();
469     if (optimized_code.is_null()) {
470       if (info()->bailout_reason() == kNoReason) {
471         info_->set_bailout_reason(kCodeGenerationFailed);
472       }
473       return AbortAndDisableOptimization();
474     }
475     info()->SetCode(optimized_code);
476   }
477   RecordOptimizationStats();
478   // Add to the weak list of optimized code objects.
479   info()->context()->native_context()->AddOptimizedCode(*info()->code());
480   return SetLastStatus(SUCCEEDED);
481 }
482
483
484 void OptimizedCompileJob::RecordOptimizationStats() {
485   Handle<JSFunction> function = info()->closure();
486   if (!function->IsOptimized()) {
487     // Concurrent recompilation and OSR may race.  Increment only once.
488     int opt_count = function->shared()->opt_count();
489     function->shared()->set_opt_count(opt_count + 1);
490   }
491   double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
492   double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
493   double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
494   if (FLAG_trace_opt) {
495     PrintF("[optimizing ");
496     function->ShortPrint();
497     PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
498            ms_codegen);
499   }
500   if (FLAG_trace_opt_stats) {
501     static double compilation_time = 0.0;
502     static int compiled_functions = 0;
503     static int code_size = 0;
504
505     compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
506     compiled_functions++;
507     code_size += function->shared()->SourceSize();
508     PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
509            compiled_functions,
510            code_size,
511            compilation_time);
512   }
513   if (FLAG_hydrogen_stats) {
514     isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
515                                                     time_taken_to_optimize_,
516                                                     time_taken_to_codegen_);
517   }
518 }
519
520
521 // Sets the expected number of properties based on estimate from compiler.
522 void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
523                                           int estimate) {
524   // See the comment in SetExpectedNofProperties.
525   if (shared->live_objects_may_exist()) return;
526
527   // If no properties are added in the constructor, they are more likely
528   // to be added later.
529   if (estimate == 0) estimate = 2;
530
531   // TODO(yangguo): check whether those heuristics are still up-to-date.
532   // We do not shrink objects that go into a snapshot (yet), so we adjust
533   // the estimate conservatively.
534   if (Serializer::enabled(shared->GetIsolate())) {
535     estimate += 2;
536   } else if (FLAG_clever_optimizations) {
537     // Inobject slack tracking will reclaim redundant inobject space later,
538     // so we can afford to adjust the estimate generously.
539     estimate += 8;
540   } else {
541     estimate += 3;
542   }
543
544   shared->set_expected_nof_properties(estimate);
545 }
546
547
548 static void UpdateSharedFunctionInfo(CompilationInfo* info) {
549   // Update the shared function info with the compiled code and the
550   // scope info.  Please note, that the order of the shared function
551   // info initialization is important since set_scope_info might
552   // trigger a GC, causing the ASSERT below to be invalid if the code
553   // was flushed. By setting the code object last we avoid this.
554   Handle<SharedFunctionInfo> shared = info->shared_info();
555   Handle<ScopeInfo> scope_info =
556       ScopeInfo::Create(info->scope(), info->zone());
557   shared->set_scope_info(*scope_info);
558
559   Handle<Code> code = info->code();
560   CHECK(code->kind() == Code::FUNCTION);
561   shared->ReplaceCode(*code);
562   if (shared->optimization_disabled()) code->set_optimizable(false);
563
564   shared->set_feedback_vector(*info->feedback_vector());
565
566   // Set the expected number of properties for instances.
567   FunctionLiteral* lit = info->function();
568   int expected = lit->expected_property_count();
569   SetExpectedNofPropertiesFromEstimate(shared, expected);
570
571   // Check the function has compiled code.
572   ASSERT(shared->is_compiled());
573   shared->set_dont_optimize_reason(lit->dont_optimize_reason());
574   shared->set_dont_inline(lit->flags()->Contains(kDontInline));
575   shared->set_ast_node_count(lit->ast_node_count());
576   shared->set_strict_mode(lit->strict_mode());
577 }
578
579
580 // Sets the function info on a function.
581 // The start_position points to the first '(' character after the function name
582 // in the full script source. When counting characters in the script source the
583 // the first character is number 0 (not 1).
584 static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
585                             FunctionLiteral* lit,
586                             bool is_toplevel,
587                             Handle<Script> script) {
588   function_info->set_length(lit->parameter_count());
589   function_info->set_formal_parameter_count(lit->parameter_count());
590   function_info->set_script(*script);
591   function_info->set_function_token_position(lit->function_token_position());
592   function_info->set_start_position(lit->start_position());
593   function_info->set_end_position(lit->end_position());
594   function_info->set_is_expression(lit->is_expression());
595   function_info->set_is_anonymous(lit->is_anonymous());
596   function_info->set_is_toplevel(is_toplevel);
597   function_info->set_inferred_name(*lit->inferred_name());
598   function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
599   function_info->set_allows_lazy_compilation_without_context(
600       lit->AllowsLazyCompilationWithoutContext());
601   function_info->set_strict_mode(lit->strict_mode());
602   function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
603   function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
604   function_info->set_ast_node_count(lit->ast_node_count());
605   function_info->set_is_function(lit->is_function());
606   function_info->set_dont_optimize_reason(lit->dont_optimize_reason());
607   function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
608   function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
609   function_info->set_is_generator(lit->is_generator());
610 }
611
612
613 static bool CompileUnoptimizedCode(CompilationInfo* info) {
614   ASSERT(info->function() != NULL);
615   if (!Rewriter::Rewrite(info)) return false;
616   if (!Scope::Analyze(info)) return false;
617   ASSERT(info->scope() != NULL);
618
619   if (!FullCodeGenerator::MakeCode(info)) {
620     Isolate* isolate = info->isolate();
621     if (!isolate->has_pending_exception()) isolate->StackOverflow();
622     return false;
623   }
624   return true;
625 }
626
627
628 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
629     CompilationInfo* info) {
630   VMState<COMPILER> state(info->isolate());
631   PostponeInterruptsScope postpone(info->isolate());
632   if (!Parser::Parse(info)) return MaybeHandle<Code>();
633   info->SetStrictMode(info->function()->strict_mode());
634
635   if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
636   Compiler::RecordFunctionCompilation(
637       Logger::LAZY_COMPILE_TAG, info, info->shared_info());
638   UpdateSharedFunctionInfo(info);
639   ASSERT_EQ(Code::FUNCTION, info->code()->kind());
640   return info->code();
641 }
642
643
644 MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) {
645   ASSERT(!function->GetIsolate()->has_pending_exception());
646   ASSERT(!function->is_compiled());
647   if (function->shared()->is_compiled()) {
648     return Handle<Code>(function->shared()->code());
649   }
650
651   CompilationInfoWithZone info(function);
652   Handle<Code> result;
653   ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
654                              GetUnoptimizedCodeCommon(&info),
655                              Code);
656
657   if (FLAG_always_opt &&
658       info.isolate()->use_crankshaft() &&
659       !info.shared_info()->optimization_disabled() &&
660       !info.isolate()->DebuggerHasBreakPoints()) {
661     Handle<Code> opt_code;
662     if (Compiler::GetOptimizedCode(
663             function, result,
664             Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
665       result = opt_code;
666     }
667   }
668
669   return result;
670 }
671
672
673 MaybeHandle<Code> Compiler::GetUnoptimizedCode(
674     Handle<SharedFunctionInfo> shared) {
675   ASSERT(!shared->GetIsolate()->has_pending_exception());
676   ASSERT(!shared->is_compiled());
677
678   CompilationInfoWithZone info(shared);
679   return GetUnoptimizedCodeCommon(&info);
680 }
681
682
683 bool Compiler::EnsureCompiled(Handle<JSFunction> function,
684                               ClearExceptionFlag flag) {
685   if (function->is_compiled()) return true;
686   MaybeHandle<Code> maybe_code = Compiler::GetUnoptimizedCode(function);
687   Handle<Code> code;
688   if (!maybe_code.ToHandle(&code)) {
689     if (flag == CLEAR_EXCEPTION) {
690       function->GetIsolate()->clear_pending_exception();
691     }
692     return false;
693   }
694   function->ReplaceCode(*code);
695   ASSERT(function->is_compiled());
696   return true;
697 }
698
699
700 // Compile full code for debugging. This code will have debug break slots
701 // and deoptimization information. Deoptimization information is required
702 // in case that an optimized version of this function is still activated on
703 // the stack. It will also make sure that the full code is compiled with
704 // the same flags as the previous version, that is flags which can change
705 // the code generated. The current method of mapping from already compiled
706 // full code without debug break slots to full code with debug break slots
707 // depends on the generated code is otherwise exactly the same.
708 // If compilation fails, just keep the existing code.
709 MaybeHandle<Code> Compiler::GetCodeForDebugging(Handle<JSFunction> function) {
710   CompilationInfoWithZone info(function);
711   Isolate* isolate = info.isolate();
712   VMState<COMPILER> state(isolate);
713
714   ASSERT(!isolate->has_pending_exception());
715   Handle<Code> old_code(function->shared()->code());
716   ASSERT(old_code->kind() == Code::FUNCTION);
717   ASSERT(!old_code->has_debug_break_slots());
718
719   info.MarkCompilingForDebugging();
720   if (old_code->is_compiled_optimizable()) {
721     info.EnableDeoptimizationSupport();
722   } else {
723     info.MarkNonOptimizable();
724   }
725   MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info);
726   Handle<Code> new_code;
727   if (!maybe_new_code.ToHandle(&new_code)) {
728     isolate->clear_pending_exception();
729   } else {
730     ASSERT_EQ(old_code->is_compiled_optimizable(),
731               new_code->is_compiled_optimizable());
732   }
733   return maybe_new_code;
734 }
735
736
737 void Compiler::CompileForLiveEdit(Handle<Script> script) {
738   // TODO(635): support extensions.
739   CompilationInfoWithZone info(script);
740   PostponeInterruptsScope postpone(info.isolate());
741   VMState<COMPILER> state(info.isolate());
742
743   info.MarkAsGlobal();
744   if (!Parser::Parse(&info)) return;
745   info.SetStrictMode(info.function()->strict_mode());
746
747   LiveEditFunctionTracker tracker(info.isolate(), info.function());
748   if (!CompileUnoptimizedCode(&info)) return;
749   if (!info.shared_info().is_null()) {
750     Handle<ScopeInfo> scope_info = ScopeInfo::Create(info.scope(),
751                                                      info.zone());
752     info.shared_info()->set_scope_info(*scope_info);
753   }
754   tracker.RecordRootFunctionInfo(info.code());
755 }
756
757
758 static bool DebuggerWantsEagerCompilation(CompilationInfo* info,
759                                           bool allow_lazy_without_ctx = false) {
760   return LiveEditFunctionTracker::IsActive(info->isolate()) ||
761          (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
762 }
763
764
765 static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
766   Isolate* isolate = info->isolate();
767   PostponeInterruptsScope postpone(isolate);
768   ASSERT(!isolate->native_context().is_null());
769   Handle<Script> script = info->script();
770
771   // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
772   FixedArray* array = isolate->native_context()->embedder_data();
773   script->set_context_data(array->get(0));
774
775   isolate->debugger()->OnBeforeCompile(script);
776
777   ASSERT(info->is_eval() || info->is_global());
778
779   bool parse_allow_lazy =
780       (info->cached_data_mode() == CONSUME_CACHED_DATA ||
781        String::cast(script->source())->length() > FLAG_min_preparse_length) &&
782       !DebuggerWantsEagerCompilation(info);
783
784   if (!parse_allow_lazy && info->cached_data_mode() != NO_CACHED_DATA) {
785     // We are going to parse eagerly, but we either 1) have cached data produced
786     // by lazy parsing or 2) are asked to generate cached data. We cannot use
787     // the existing data, since it won't contain all the symbols we need for
788     // eager parsing. In addition, it doesn't make sense to produce the data
789     // when parsing eagerly. That data would contain all symbols, but no
790     // functions, so it cannot be used to aid lazy parsing later.
791     info->SetCachedData(NULL, NO_CACHED_DATA);
792   }
793
794   Handle<SharedFunctionInfo> result;
795
796   { VMState<COMPILER> state(info->isolate());
797     if (!Parser::Parse(info, parse_allow_lazy)) {
798       return Handle<SharedFunctionInfo>::null();
799     }
800
801     FunctionLiteral* lit = info->function();
802     LiveEditFunctionTracker live_edit_tracker(isolate, lit);
803
804     // Measure how long it takes to do the compilation; only take the
805     // rest of the function into account to avoid overlap with the
806     // parsing statistics.
807     HistogramTimer* rate = info->is_eval()
808           ? info->isolate()->counters()->compile_eval()
809           : info->isolate()->counters()->compile();
810     HistogramTimerScope timer(rate);
811
812     // Compile the code.
813     if (!CompileUnoptimizedCode(info)) {
814       return Handle<SharedFunctionInfo>::null();
815     }
816
817     // Allocate function.
818     ASSERT(!info->code().is_null());
819     result = isolate->factory()->NewSharedFunctionInfo(
820         lit->name(),
821         lit->materialized_literal_count(),
822         lit->is_generator(),
823         info->code(),
824         ScopeInfo::Create(info->scope(), info->zone()),
825         info->feedback_vector());
826
827     ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
828     SetFunctionInfo(result, lit, true, script);
829
830     Handle<String> script_name = script->name()->IsString()
831         ? Handle<String>(String::cast(script->name()))
832         : isolate->factory()->empty_string();
833     Logger::LogEventsAndTags log_tag = info->is_eval()
834         ? Logger::EVAL_TAG
835         : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);
836
837     PROFILE(isolate, CodeCreateEvent(
838                 log_tag, *info->code(), *result, info, *script_name));
839     GDBJIT(AddCode(script_name, script, info->code(), info));
840
841     // Hint to the runtime system used when allocating space for initial
842     // property space by setting the expected number of properties for
843     // the instances of the function.
844     SetExpectedNofPropertiesFromEstimate(result,
845                                          lit->expected_property_count());
846
847     script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
848
849     live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
850   }
851
852   isolate->debugger()->OnAfterCompile(script, Debugger::NO_AFTER_COMPILE_FLAGS);
853
854   return result;
855 }
856
857
858 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
859     Handle<String> source,
860     Handle<Context> context,
861     StrictMode strict_mode,
862     ParseRestriction restriction,
863     int scope_position) {
864   Isolate* isolate = source->GetIsolate();
865   int source_length = source->length();
866   isolate->counters()->total_eval_size()->Increment(source_length);
867   isolate->counters()->total_compile_size()->Increment(source_length);
868
869   CompilationCache* compilation_cache = isolate->compilation_cache();
870   MaybeHandle<SharedFunctionInfo> maybe_shared_info =
871       compilation_cache->LookupEval(source, context, strict_mode,
872                                     scope_position);
873   Handle<SharedFunctionInfo> shared_info;
874
875   if (!maybe_shared_info.ToHandle(&shared_info)) {
876     Handle<Script> script = isolate->factory()->NewScript(source);
877     CompilationInfoWithZone info(script);
878     info.MarkAsEval();
879     if (context->IsNativeContext()) info.MarkAsGlobal();
880     info.SetStrictMode(strict_mode);
881     info.SetParseRestriction(restriction);
882     info.SetContext(context);
883
884     Debug::RecordEvalCaller(script);
885
886     shared_info = CompileToplevel(&info);
887
888     if (shared_info.is_null()) {
889       return MaybeHandle<JSFunction>();
890     } else {
891       // Explicitly disable optimization for eval code. We're not yet prepared
892       // to handle eval-code in the optimizing compiler.
893       shared_info->DisableOptimization(kEval);
894
895       // If caller is strict mode, the result must be in strict mode as well.
896       ASSERT(strict_mode == SLOPPY || shared_info->strict_mode() == STRICT);
897       if (!shared_info->dont_cache()) {
898         compilation_cache->PutEval(
899             source, context, shared_info, scope_position);
900       }
901     }
902   } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
903     shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
904   }
905
906   return isolate->factory()->NewFunctionFromSharedFunctionInfo(
907       shared_info, context, NOT_TENURED);
908 }
909
910
911 Handle<SharedFunctionInfo> Compiler::CompileScript(
912     Handle<String> source,
913     Handle<Object> script_name,
914     int line_offset,
915     int column_offset,
916     bool is_shared_cross_origin,
917     Handle<Context> context,
918     v8::Extension* extension,
919     ScriptData** cached_data,
920     CachedDataMode cached_data_mode,
921     NativesFlag natives) {
922   if (cached_data_mode == NO_CACHED_DATA) {
923     cached_data = NULL;
924   } else if (cached_data_mode == PRODUCE_CACHED_DATA) {
925     ASSERT(cached_data && !*cached_data);
926   } else {
927     ASSERT(cached_data_mode == CONSUME_CACHED_DATA);
928     ASSERT(cached_data && *cached_data);
929   }
930   Isolate* isolate = source->GetIsolate();
931   int source_length = source->length();
932   isolate->counters()->total_load_size()->Increment(source_length);
933   isolate->counters()->total_compile_size()->Increment(source_length);
934
935   CompilationCache* compilation_cache = isolate->compilation_cache();
936
937   // Do a lookup in the compilation cache but not for extensions.
938   MaybeHandle<SharedFunctionInfo> maybe_result;
939   Handle<SharedFunctionInfo> result;
940   if (extension == NULL) {
941     maybe_result = compilation_cache->LookupScript(
942         source, script_name, line_offset, column_offset,
943         is_shared_cross_origin, context);
944   }
945
946   if (!maybe_result.ToHandle(&result)) {
947     // No cache entry found. Compile the script.
948
949     // Create a script object describing the script to be compiled.
950     Handle<Script> script = isolate->factory()->NewScript(source);
951     if (natives == NATIVES_CODE) {
952       script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
953     }
954     if (!script_name.is_null()) {
955       script->set_name(*script_name);
956       script->set_line_offset(Smi::FromInt(line_offset));
957       script->set_column_offset(Smi::FromInt(column_offset));
958     }
959     script->set_is_shared_cross_origin(is_shared_cross_origin);
960
961     // Compile the function and add it to the cache.
962     CompilationInfoWithZone info(script);
963     info.MarkAsGlobal();
964     info.SetExtension(extension);
965     info.SetCachedData(cached_data, cached_data_mode);
966     info.SetContext(context);
967     if (FLAG_use_strict) info.SetStrictMode(STRICT);
968     result = CompileToplevel(&info);
969     if (extension == NULL && !result.is_null() && !result->dont_cache()) {
970       compilation_cache->PutScript(source, context, result);
971     }
972     if (result.is_null()) isolate->ReportPendingMessages();
973   } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
974       result->ResetForNewContext(isolate->heap()->global_ic_age());
975   }
976   return result;
977 }
978
979
980 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
981                                                        Handle<Script> script) {
982   // Precondition: code has been parsed and scopes have been analyzed.
983   CompilationInfoWithZone info(script);
984   info.SetFunction(literal);
985   info.PrepareForCompilation(literal->scope());
986   info.SetStrictMode(literal->scope()->strict_mode());
987
988   Isolate* isolate = info.isolate();
989   Factory* factory = isolate->factory();
990   LiveEditFunctionTracker live_edit_tracker(isolate, literal);
991   // Determine if the function can be lazily compiled. This is necessary to
992   // allow some of our builtin JS files to be lazily compiled. These
993   // builtins cannot be handled lazily by the parser, since we have to know
994   // if a function uses the special natives syntax, which is something the
995   // parser records.
996   // If the debugger requests compilation for break points, we cannot be
997   // aggressive about lazy compilation, because it might trigger compilation
998   // of functions without an outer context when setting a breakpoint through
999   // Debug::FindSharedFunctionInfoInScript.
1000   bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1001   bool allow_lazy = literal->AllowsLazyCompilation() &&
1002       !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
1003
1004   // Generate code
1005   Handle<ScopeInfo> scope_info;
1006   if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
1007     Handle<Code> code = isolate->builtins()->CompileUnoptimized();
1008     info.SetCode(code);
1009     scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
1010   } else if (FullCodeGenerator::MakeCode(&info)) {
1011     ASSERT(!info.code().is_null());
1012     scope_info = ScopeInfo::Create(info.scope(), info.zone());
1013   } else {
1014     return Handle<SharedFunctionInfo>::null();
1015   }
1016
1017   // Create a shared function info object.
1018   Handle<SharedFunctionInfo> result =
1019       factory->NewSharedFunctionInfo(literal->name(),
1020                                      literal->materialized_literal_count(),
1021                                      literal->is_generator(),
1022                                      info.code(),
1023                                      scope_info,
1024                                      info.feedback_vector());
1025   SetFunctionInfo(result, literal, false, script);
1026   RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1027   result->set_allows_lazy_compilation(allow_lazy);
1028   result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1029
1030   // Set the expected number of properties for instances and return
1031   // the resulting function.
1032   SetExpectedNofPropertiesFromEstimate(result,
1033                                        literal->expected_property_count());
1034   live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1035   return result;
1036 }
1037
1038
1039 MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
1040     Handle<JSFunction> function,
1041     BailoutId osr_ast_id) {
1042   if (FLAG_cache_optimized_code) {
1043     Handle<SharedFunctionInfo> shared(function->shared());
1044     DisallowHeapAllocation no_gc;
1045     int index = shared->SearchOptimizedCodeMap(
1046         function->context()->native_context(), osr_ast_id);
1047     if (index > 0) {
1048       if (FLAG_trace_opt) {
1049         PrintF("[found optimized code for ");
1050         function->ShortPrint();
1051         if (!osr_ast_id.IsNone()) {
1052           PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
1053         }
1054         PrintF("]\n");
1055       }
1056       FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index);
1057       if (literals != NULL) function->set_literals(literals);
1058       return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index));
1059     }
1060   }
1061   return MaybeHandle<Code>();
1062 }
1063
1064
1065 static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
1066   Handle<Code> code = info->code();
1067   if (code->kind() != Code::OPTIMIZED_FUNCTION) return;  // Nothing to do.
1068
1069   // Cache optimized code.
1070   if (FLAG_cache_optimized_code) {
1071     Handle<JSFunction> function = info->closure();
1072     Handle<SharedFunctionInfo> shared(function->shared());
1073     Handle<FixedArray> literals(function->literals());
1074     Handle<Context> native_context(function->context()->native_context());
1075     SharedFunctionInfo::AddToOptimizedCodeMap(
1076         shared, native_context, code, literals, info->osr_ast_id());
1077   }
1078 }
1079
1080
1081 static bool CompileOptimizedPrologue(CompilationInfo* info) {
1082   if (!Parser::Parse(info)) return false;
1083   info->SetStrictMode(info->function()->strict_mode());
1084
1085   if (!Rewriter::Rewrite(info)) return false;
1086   if (!Scope::Analyze(info)) return false;
1087   ASSERT(info->scope() != NULL);
1088   return true;
1089 }
1090
1091
1092 static bool GetOptimizedCodeNow(CompilationInfo* info) {
1093   if (!CompileOptimizedPrologue(info)) return false;
1094
1095   Logger::TimerEventScope timer(
1096       info->isolate(), Logger::TimerEventScope::v8_recompile_synchronous);
1097
1098   OptimizedCompileJob job(info);
1099   if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED) return false;
1100   if (job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED) return false;
1101   if (job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) return false;
1102
1103   // Success!
1104   ASSERT(!info->isolate()->has_pending_exception());
1105   InsertCodeIntoOptimizedCodeMap(info);
1106   Compiler::RecordFunctionCompilation(
1107       Logger::LAZY_COMPILE_TAG, info, info->shared_info());
1108   return true;
1109 }
1110
1111
1112 static bool GetOptimizedCodeLater(CompilationInfo* info) {
1113   Isolate* isolate = info->isolate();
1114   if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
1115     if (FLAG_trace_concurrent_recompilation) {
1116       PrintF("  ** Compilation queue full, will retry optimizing ");
1117       info->closure()->PrintName();
1118       PrintF(" later.\n");
1119     }
1120     return false;
1121   }
1122
1123   CompilationHandleScope handle_scope(info);
1124   if (!CompileOptimizedPrologue(info)) return false;
1125   info->SaveHandles();  // Copy handles to the compilation handle scope.
1126
1127   Logger::TimerEventScope timer(
1128       isolate, Logger::TimerEventScope::v8_recompile_synchronous);
1129
1130   OptimizedCompileJob* job = new(info->zone()) OptimizedCompileJob(info);
1131   OptimizedCompileJob::Status status = job->CreateGraph();
1132   if (status != OptimizedCompileJob::SUCCEEDED) return false;
1133   isolate->optimizing_compiler_thread()->QueueForOptimization(job);
1134
1135   if (FLAG_trace_concurrent_recompilation) {
1136     PrintF("  ** Queued ");
1137      info->closure()->PrintName();
1138     if (info->is_osr()) {
1139       PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
1140     } else {
1141       PrintF(" for concurrent optimization.\n");
1142     }
1143   }
1144   return true;
1145 }
1146
1147
1148 MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
1149                                              Handle<Code> current_code,
1150                                              ConcurrencyMode mode,
1151                                              BailoutId osr_ast_id) {
1152   Handle<Code> cached_code;
1153   if (GetCodeFromOptimizedCodeMap(
1154           function, osr_ast_id).ToHandle(&cached_code)) {
1155     return cached_code;
1156   }
1157
1158   SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function));
1159   Isolate* isolate = info->isolate();
1160   VMState<COMPILER> state(isolate);
1161   ASSERT(!isolate->has_pending_exception());
1162   PostponeInterruptsScope postpone(isolate);
1163
1164   Handle<SharedFunctionInfo> shared = info->shared_info();
1165   ASSERT_NE(ScopeInfo::Empty(isolate), shared->scope_info());
1166   int compiled_size = shared->end_position() - shared->start_position();
1167   isolate->counters()->total_compile_size()->Increment(compiled_size);
1168   current_code->set_profiler_ticks(0);
1169
1170   info->SetOptimizing(osr_ast_id, current_code);
1171
1172   if (mode == CONCURRENT) {
1173     if (GetOptimizedCodeLater(info.get())) {
1174       info.Detach();  // The background recompile job owns this now.
1175       return isolate->builtins()->InOptimizationQueue();
1176     }
1177   } else {
1178     if (GetOptimizedCodeNow(info.get())) return info->code();
1179   }
1180
1181   // Failed.
1182   if (FLAG_trace_opt) {
1183     PrintF("[failed to optimize ");
1184     function->PrintName();
1185     PrintF(": %s]\n", GetBailoutReason(info->bailout_reason()));
1186   }
1187
1188   if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1189   return MaybeHandle<Code>();
1190 }
1191
1192
1193 Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
1194   // Take ownership of compilation info.  Deleting compilation info
1195   // also tears down the zone and the recompile job.
1196   SmartPointer<CompilationInfo> info(job->info());
1197   Isolate* isolate = info->isolate();
1198
1199   VMState<COMPILER> state(isolate);
1200   Logger::TimerEventScope timer(
1201       isolate, Logger::TimerEventScope::v8_recompile_synchronous);
1202
1203   Handle<SharedFunctionInfo> shared = info->shared_info();
1204   shared->code()->set_profiler_ticks(0);
1205
1206   // 1) Optimization may have failed.
1207   // 2) The function may have already been optimized by OSR.  Simply continue.
1208   //    Except when OSR already disabled optimization for some reason.
1209   // 3) The code may have already been invalidated due to dependency change.
1210   // 4) Debugger may have been activated.
1211
1212   if (job->last_status() != OptimizedCompileJob::SUCCEEDED ||
1213       shared->optimization_disabled() ||
1214       info->HasAbortedDueToDependencyChange() ||
1215       isolate->DebuggerHasBreakPoints()) {
1216     return Handle<Code>::null();
1217   }
1218
1219   if (job->GenerateCode() != OptimizedCompileJob::SUCCEEDED) {
1220     return Handle<Code>::null();
1221   }
1222
1223   Compiler::RecordFunctionCompilation(
1224       Logger::LAZY_COMPILE_TAG, info.get(), shared);
1225   if (info->shared_info()->SearchOptimizedCodeMap(
1226           info->context()->native_context(), info->osr_ast_id()) == -1) {
1227     InsertCodeIntoOptimizedCodeMap(info.get());
1228   }
1229
1230   if (FLAG_trace_concurrent_recompilation) {
1231     PrintF("  ** Optimized code for ");
1232     info->closure()->PrintName();
1233     PrintF(" generated.\n");
1234   }
1235
1236   return Handle<Code>(*info->code());
1237 }
1238
1239
1240 void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
1241                                          CompilationInfo* info,
1242                                          Handle<SharedFunctionInfo> shared) {
1243   // SharedFunctionInfo is passed separately, because if CompilationInfo
1244   // was created using Script object, it will not have it.
1245
1246   // Log the code generation. If source information is available include
1247   // script name and line number. Check explicitly whether logging is
1248   // enabled as finding the line number is not free.
1249   if (info->isolate()->logger()->is_logging_code_events() ||
1250       info->isolate()->cpu_profiler()->is_profiling()) {
1251     Handle<Script> script = info->script();
1252     Handle<Code> code = info->code();
1253     if (code.is_identical_to(
1254             info->isolate()->builtins()->CompileUnoptimized())) {
1255       return;
1256     }
1257     int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
1258     int column_num =
1259         Script::GetColumnNumber(script, shared->start_position()) + 1;
1260     String* script_name = script->name()->IsString()
1261         ? String::cast(script->name())
1262         : info->isolate()->heap()->empty_string();
1263     Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
1264     PROFILE(info->isolate(), CodeCreateEvent(
1265         log_tag, *code, *shared, info, script_name, line_num, column_num));
1266   }
1267
1268   GDBJIT(AddCode(Handle<String>(shared->DebugName()),
1269                  Handle<Script>(info->script()),
1270                  Handle<Code>(info->code()),
1271                  info));
1272 }
1273
1274
1275 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1276     : name_(name), info_(info), zone_(info->isolate()) {
1277   if (FLAG_hydrogen_stats) {
1278     info_zone_start_allocation_size_ = info->zone()->allocation_size();
1279     timer_.Start();
1280   }
1281 }
1282
1283
1284 CompilationPhase::~CompilationPhase() {
1285   if (FLAG_hydrogen_stats) {
1286     unsigned size = zone()->allocation_size();
1287     size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1288     isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1289   }
1290 }
1291
1292
1293 bool CompilationPhase::ShouldProduceTraceOutput() const {
1294   // Trace if the appropriate trace flag is set and the phase name's first
1295   // character is in the FLAG_trace_phase command line parameter.
1296   AllowHandleDereference allow_deref;
1297   bool tracing_on = info()->IsStub()
1298       ? FLAG_trace_hydrogen_stubs
1299       : (FLAG_trace_hydrogen &&
1300          info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1301   return (tracing_on &&
1302       OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1303 }
1304
1305 } }  // namespace v8::internal