2bad9e69ea67680baab14042597d5db3ab2a3373
[platform/upstream/nodejs.git] / deps / v8 / src / compiler.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/compiler.h"
8
9 #include "src/ast-numbering.h"
10 #include "src/bootstrapper.h"
11 #include "src/codegen.h"
12 #include "src/compilation-cache.h"
13 #include "src/compiler/pipeline.h"
14 #include "src/cpu-profiler.h"
15 #include "src/debug.h"
16 #include "src/deoptimizer.h"
17 #include "src/full-codegen.h"
18 #include "src/gdb-jit.h"
19 #include "src/hydrogen.h"
20 #include "src/isolate-inl.h"
21 #include "src/lithium.h"
22 #include "src/liveedit.h"
23 #include "src/messages.h"
24 #include "src/parser.h"
25 #include "src/prettyprinter.h"
26 #include "src/rewriter.h"
27 #include "src/runtime-profiler.h"
28 #include "src/scanner-character-streams.h"
29 #include "src/scopeinfo.h"
30 #include "src/scopes.h"
31 #include "src/typing.h"
32 #include "src/vm-state-inl.h"
33
34 namespace v8 {
35 namespace internal {
36
37
38 std::ostream& operator<<(std::ostream& os, const SourcePosition& p) {
39   if (p.IsUnknown()) {
40     return os << "<?>";
41   } else if (FLAG_hydrogen_track_positions) {
42     return os << "<" << p.inlining_id() << ":" << p.position() << ">";
43   } else {
44     return os << "<0:" << p.raw() << ">";
45   }
46 }
47
48
49 ScriptData::ScriptData(const byte* data, int length)
50     : owns_data_(false), rejected_(false), data_(data), length_(length) {
51   if (!IsAligned(reinterpret_cast<intptr_t>(data), kPointerAlignment)) {
52     byte* copy = NewArray<byte>(length);
53     DCHECK(IsAligned(reinterpret_cast<intptr_t>(copy), kPointerAlignment));
54     CopyBytes(copy, data, length);
55     data_ = copy;
56     AcquireDataOwnership();
57   }
58 }
59
60
61 CompilationInfo::CompilationInfo(Handle<Script> script, Zone* zone)
62     : flags_(kThisHasUses),
63       script_(script),
64       source_stream_(NULL),
65       osr_ast_id_(BailoutId::None()),
66       parameter_count_(0),
67       optimization_id_(-1),
68       ast_value_factory_(NULL),
69       ast_value_factory_owned_(false),
70       aborted_due_to_dependency_change_(false),
71       osr_expr_stack_height_(0) {
72   Initialize(script->GetIsolate(), BASE, zone);
73 }
74
75
76 CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
77                                  Zone* zone)
78     : flags_(kLazy | kThisHasUses),
79       shared_info_(shared_info),
80       script_(Handle<Script>(Script::cast(shared_info->script()))),
81       source_stream_(NULL),
82       osr_ast_id_(BailoutId::None()),
83       parameter_count_(0),
84       optimization_id_(-1),
85       ast_value_factory_(NULL),
86       ast_value_factory_owned_(false),
87       aborted_due_to_dependency_change_(false),
88       osr_expr_stack_height_(0) {
89   Initialize(script_->GetIsolate(), BASE, zone);
90 }
91
92
93 CompilationInfo::CompilationInfo(Handle<JSFunction> closure, Zone* zone)
94     : flags_(kLazy | kThisHasUses),
95       closure_(closure),
96       shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
97       script_(Handle<Script>(Script::cast(shared_info_->script()))),
98       source_stream_(NULL),
99       context_(closure->context()),
100       osr_ast_id_(BailoutId::None()),
101       parameter_count_(0),
102       optimization_id_(-1),
103       ast_value_factory_(NULL),
104       ast_value_factory_owned_(false),
105       aborted_due_to_dependency_change_(false),
106       osr_expr_stack_height_(0) {
107   Initialize(script_->GetIsolate(), BASE, zone);
108 }
109
110
111 CompilationInfo::CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone)
112     : flags_(kLazy | kThisHasUses),
113       source_stream_(NULL),
114       osr_ast_id_(BailoutId::None()),
115       parameter_count_(0),
116       optimization_id_(-1),
117       ast_value_factory_(NULL),
118       ast_value_factory_owned_(false),
119       aborted_due_to_dependency_change_(false),
120       osr_expr_stack_height_(0) {
121   Initialize(isolate, STUB, zone);
122   code_stub_ = stub;
123 }
124
125
126 CompilationInfo::CompilationInfo(
127     ScriptCompiler::ExternalSourceStream* stream,
128     ScriptCompiler::StreamedSource::Encoding encoding, Isolate* isolate,
129     Zone* zone)
130     : flags_(kThisHasUses),
131       source_stream_(stream),
132       source_stream_encoding_(encoding),
133       osr_ast_id_(BailoutId::None()),
134       parameter_count_(0),
135       optimization_id_(-1),
136       ast_value_factory_(NULL),
137       ast_value_factory_owned_(false),
138       aborted_due_to_dependency_change_(false),
139       osr_expr_stack_height_(0) {
140   Initialize(isolate, BASE, zone);
141 }
142
143
144 void CompilationInfo::Initialize(Isolate* isolate,
145                                  Mode mode,
146                                  Zone* zone) {
147   isolate_ = isolate;
148   function_ = NULL;
149   scope_ = NULL;
150   script_scope_ = NULL;
151   extension_ = NULL;
152   cached_data_ = NULL;
153   compile_options_ = ScriptCompiler::kNoCompileOptions;
154   zone_ = zone;
155   deferred_handles_ = NULL;
156   code_stub_ = NULL;
157   prologue_offset_ = Code::kPrologueOffsetNotSet;
158   opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
159   no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
160                    ? new List<OffsetRange>(2) : NULL;
161   if (FLAG_hydrogen_track_positions) {
162     inlined_function_infos_ = new List<InlinedFunctionInfo>(5);
163     inlining_id_to_function_id_ = new List<int>(5);
164   } else {
165     inlined_function_infos_ = NULL;
166     inlining_id_to_function_id_ = NULL;
167   }
168
169   for (int i = 0; i < DependentCode::kGroupCount; i++) {
170     dependencies_[i] = NULL;
171   }
172   if (mode == STUB) {
173     mode_ = STUB;
174     return;
175   }
176   mode_ = mode;
177   if (!script_.is_null() && script_->type()->value() == Script::TYPE_NATIVE) {
178     MarkAsNative();
179   }
180   // Compiling for the snapshot typically results in different code than
181   // compiling later on. This means that code recompiled with deoptimization
182   // support won't be "equivalent" (as defined by SharedFunctionInfo::
183   // EnableDeoptimizationSupport), so it will replace the old code and all
184   // its type feedback. To avoid this, always compile functions in the snapshot
185   // with deoptimization support.
186   if (isolate_->serializer_enabled()) EnableDeoptimizationSupport();
187
188   if (isolate_->debug()->is_active()) MarkAsDebug();
189   if (FLAG_context_specialization) MarkAsContextSpecializing();
190   if (FLAG_turbo_inlining) MarkAsInliningEnabled();
191   if (FLAG_turbo_splitting) MarkAsSplittingEnabled();
192   if (FLAG_turbo_types) MarkAsTypingEnabled();
193
194   if (!shared_info_.is_null()) {
195     DCHECK(is_sloppy(language_mode()));
196     SetLanguageMode(shared_info_->language_mode());
197   }
198   bailout_reason_ = kNoReason;
199
200   if (!shared_info().is_null() && shared_info()->is_compiled()) {
201     // We should initialize the CompilationInfo feedback vector from the
202     // passed in shared info, rather than creating a new one.
203     feedback_vector_ =
204         Handle<TypeFeedbackVector>(shared_info()->feedback_vector(), isolate);
205   }
206 }
207
208
209 CompilationInfo::~CompilationInfo() {
210   if (GetFlag(kDisableFutureOptimization)) {
211     shared_info()->DisableOptimization(bailout_reason());
212   }
213   delete deferred_handles_;
214   delete no_frame_ranges_;
215   delete inlined_function_infos_;
216   delete inlining_id_to_function_id_;
217   if (ast_value_factory_owned_) delete ast_value_factory_;
218 #ifdef DEBUG
219   // Check that no dependent maps have been added or added dependent maps have
220   // been rolled back or committed.
221   for (int i = 0; i < DependentCode::kGroupCount; i++) {
222     DCHECK(!dependencies_[i]);
223   }
224 #endif  // DEBUG
225 }
226
227
228 void CompilationInfo::CommitDependencies(Handle<Code> code) {
229   bool has_dependencies = false;
230   for (int i = 0; i < DependentCode::kGroupCount; i++) {
231     has_dependencies |=
232         dependencies_[i] != NULL && dependencies_[i]->length() > 0;
233   }
234   // Avoid creating a weak cell for code with no dependencies.
235   if (!has_dependencies) return;
236
237   AllowDeferredHandleDereference get_object_wrapper;
238   WeakCell* cell = *Code::WeakCellFor(code);
239   for (int i = 0; i < DependentCode::kGroupCount; i++) {
240     ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
241     if (group_objects == NULL) continue;
242     DCHECK(!object_wrapper_.is_null());
243     for (int j = 0; j < group_objects->length(); j++) {
244       DependentCode::DependencyGroup group =
245           static_cast<DependentCode::DependencyGroup>(i);
246       Foreign* info = *object_wrapper();
247       DependentCode* dependent_code =
248           DependentCode::ForObject(group_objects->at(j), group);
249       dependent_code->UpdateToFinishedCode(group, info, cell);
250     }
251     dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
252   }
253 }
254
255
256 void CompilationInfo::RollbackDependencies() {
257   AllowDeferredHandleDereference get_object_wrapper;
258   // Unregister from all dependent maps if not yet committed.
259   for (int i = 0; i < DependentCode::kGroupCount; i++) {
260     ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
261     if (group_objects == NULL) continue;
262     for (int j = 0; j < group_objects->length(); j++) {
263       DependentCode::DependencyGroup group =
264           static_cast<DependentCode::DependencyGroup>(i);
265       Foreign* info = *object_wrapper();
266       DependentCode* dependent_code =
267           DependentCode::ForObject(group_objects->at(j), group);
268       dependent_code->RemoveCompilationInfo(group, info);
269     }
270     dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
271   }
272 }
273
274
275 int CompilationInfo::num_parameters() const {
276   if (IsStub()) {
277     DCHECK(parameter_count_ > 0);
278     return parameter_count_;
279   } else {
280     return scope()->num_parameters();
281   }
282 }
283
284
285 int CompilationInfo::num_heap_slots() const {
286   if (IsStub()) {
287     return 0;
288   } else {
289     return scope()->num_heap_slots();
290   }
291 }
292
293
294 Code::Flags CompilationInfo::flags() const {
295   if (IsStub()) {
296     return Code::ComputeFlags(code_stub()->GetCodeKind(),
297                               code_stub()->GetICState(),
298                               code_stub()->GetExtraICState(),
299                               code_stub()->GetStubType());
300   } else {
301     return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
302   }
303 }
304
305
306 // Primitive functions are unlikely to be picked up by the stack-walking
307 // profiler, so they trigger their own optimization when they're called
308 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
309 bool CompilationInfo::ShouldSelfOptimize() {
310   return FLAG_crankshaft &&
311       !function()->flags()->Contains(kDontSelfOptimize) &&
312       !function()->dont_optimize() &&
313       function()->scope()->AllowsLazyCompilation() &&
314       (shared_info().is_null() || !shared_info()->optimization_disabled());
315 }
316
317
318 void CompilationInfo::PrepareForCompilation(Scope* scope) {
319   DCHECK(scope_ == NULL);
320   scope_ = scope;
321 }
322
323
324 void CompilationInfo::EnsureFeedbackVector() {
325   if (feedback_vector_.is_null()) {
326     feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(
327         function()->feedback_vector_spec());
328   }
329 }
330
331
332 bool CompilationInfo::is_simple_parameter_list() {
333   return scope_->is_simple_parameter_list();
334 }
335
336
337 int CompilationInfo::TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
338                                           SourcePosition position) {
339   if (!FLAG_hydrogen_track_positions) {
340     return 0;
341   }
342
343   DCHECK(inlined_function_infos_);
344   DCHECK(inlining_id_to_function_id_);
345   int id = 0;
346   for (; id < inlined_function_infos_->length(); id++) {
347     if (inlined_function_infos_->at(id).shared().is_identical_to(shared)) {
348       break;
349     }
350   }
351   if (id == inlined_function_infos_->length()) {
352     inlined_function_infos_->Add(InlinedFunctionInfo(shared));
353
354     if (!shared->script()->IsUndefined()) {
355       Handle<Script> script(Script::cast(shared->script()));
356       if (!script->source()->IsUndefined()) {
357         CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
358         OFStream os(tracing_scope.file());
359         os << "--- FUNCTION SOURCE (" << shared->DebugName()->ToCString().get()
360            << ") id{" << optimization_id() << "," << id << "} ---\n";
361         {
362           DisallowHeapAllocation no_allocation;
363           int start = shared->start_position();
364           int len = shared->end_position() - start;
365           String::SubStringRange source(String::cast(script->source()), start,
366                                         len);
367           for (const auto& c : source) {
368             os << AsReversiblyEscapedUC16(c);
369           }
370         }
371
372         os << "\n--- END ---\n";
373       }
374     }
375   }
376
377   int inline_id = inlining_id_to_function_id_->length();
378   inlining_id_to_function_id_->Add(id);
379
380   if (inline_id != 0) {
381     CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
382     OFStream os(tracing_scope.file());
383     os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{"
384        << optimization_id() << "," << id << "} AS " << inline_id << " AT "
385        << position << std::endl;
386   }
387
388   return inline_id;
389 }
390
391
392 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
393  public:
394   explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
395       : HOptimizedGraphBuilder(info) {
396   }
397
398 #define DEF_VISIT(type)                               \
399   void Visit##type(type* node) OVERRIDE {             \
400     if (node->position() != RelocInfo::kNoPosition) { \
401       SetSourcePosition(node->position());            \
402     }                                                 \
403     HOptimizedGraphBuilder::Visit##type(node);        \
404   }
405   EXPRESSION_NODE_LIST(DEF_VISIT)
406 #undef DEF_VISIT
407
408 #define DEF_VISIT(type)                               \
409   void Visit##type(type* node) OVERRIDE {             \
410     if (node->position() != RelocInfo::kNoPosition) { \
411       SetSourcePosition(node->position());            \
412     }                                                 \
413     HOptimizedGraphBuilder::Visit##type(node);        \
414   }
415   STATEMENT_NODE_LIST(DEF_VISIT)
416 #undef DEF_VISIT
417
418 #define DEF_VISIT(type)                        \
419   void Visit##type(type* node) OVERRIDE {      \
420     HOptimizedGraphBuilder::Visit##type(node); \
421   }
422   MODULE_NODE_LIST(DEF_VISIT)
423   DECLARATION_NODE_LIST(DEF_VISIT)
424 #undef DEF_VISIT
425 };
426
427
428 OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
429   DCHECK(info()->IsOptimizing());
430   DCHECK(!info()->IsCompilingForDebugging());
431
432   // Do not use Crankshaft/TurboFan if we need to be able to set break points.
433   if (isolate()->debug()->has_break_points()) {
434     return RetryOptimization(kDebuggerHasBreakPoints);
435   }
436
437   // Limit the number of times we re-compile a functions with
438   // the optimizing compiler.
439   const int kMaxOptCount =
440       FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
441   if (info()->opt_count() > kMaxOptCount) {
442     return AbortOptimization(kOptimizedTooManyTimes);
443   }
444
445   // Due to an encoding limit on LUnallocated operands in the Lithium
446   // language, we cannot optimize functions with too many formal parameters
447   // or perform on-stack replacement for function with too many
448   // stack-allocated local variables.
449   //
450   // The encoding is as a signed value, with parameters and receiver using
451   // the negative indices and locals the non-negative ones.
452   const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
453   Scope* scope = info()->scope();
454   if ((scope->num_parameters() + 1) > parameter_limit) {
455     return AbortOptimization(kTooManyParameters);
456   }
457
458   const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
459   if (info()->is_osr() &&
460       scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
461     return AbortOptimization(kTooManyParametersLocals);
462   }
463
464   if (scope->HasIllegalRedeclaration()) {
465     return AbortOptimization(kFunctionWithIllegalRedeclaration);
466   }
467
468   // Check the whitelist for Crankshaft.
469   if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
470     return AbortOptimization(kHydrogenFilter);
471   }
472
473   // Crankshaft requires a version of fullcode with deoptimization support.
474   // Recompile the unoptimized version of the code if the current version
475   // doesn't have deoptimization support already.
476   // Otherwise, if we are gathering compilation time and space statistics
477   // for hydrogen, gather baseline statistics for a fullcode compilation.
478   bool should_recompile = !info()->shared_info()->has_deoptimization_support();
479   if (should_recompile || FLAG_hydrogen_stats) {
480     base::ElapsedTimer timer;
481     if (FLAG_hydrogen_stats) {
482       timer.Start();
483     }
484     if (!Compiler::EnsureDeoptimizationSupport(info())) {
485       return SetLastStatus(FAILED);
486     }
487     if (FLAG_hydrogen_stats) {
488       isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
489     }
490   }
491
492   DCHECK(info()->shared_info()->has_deoptimization_support());
493
494   // Check the whitelist for TurboFan.
495   if ((FLAG_turbo_asm && info()->shared_info()->asm_function()) ||
496       info()->closure()->PassesFilter(FLAG_turbo_filter)) {
497     if (FLAG_trace_opt) {
498       OFStream os(stdout);
499       os << "[compiling method " << Brief(*info()->closure())
500          << " using TurboFan";
501       if (info()->is_osr()) os << " OSR";
502       os << "]" << std::endl;
503     }
504     Timer t(this, &time_taken_to_create_graph_);
505     compiler::Pipeline pipeline(info());
506     pipeline.GenerateCode();
507     if (!info()->code().is_null()) {
508       return SetLastStatus(SUCCEEDED);
509     }
510   }
511
512   if (FLAG_trace_opt) {
513     OFStream os(stdout);
514     os << "[compiling method " << Brief(*info()->closure())
515        << " using Crankshaft";
516     if (info()->is_osr()) os << " OSR";
517     os << "]" << std::endl;
518   }
519
520   if (FLAG_trace_hydrogen) {
521     isolate()->GetHTracer()->TraceCompilation(info());
522   }
523
524   // Type-check the function.
525   AstTyper::Run(info());
526
527   // Optimization could have been disabled by the parser. Note that this check
528   // is only needed because the Hydrogen graph builder is missing some bailouts.
529   if (info()->shared_info()->optimization_disabled()) {
530     return AbortOptimization(
531         info()->shared_info()->disable_optimization_reason());
532   }
533
534   graph_builder_ = (FLAG_hydrogen_track_positions || FLAG_trace_ic)
535       ? new(info()->zone()) HOptimizedGraphBuilderWithPositions(info())
536       : new(info()->zone()) HOptimizedGraphBuilder(info());
537
538   Timer t(this, &time_taken_to_create_graph_);
539   info()->set_this_has_uses(false);
540   graph_ = graph_builder_->CreateGraph();
541
542   if (isolate()->has_pending_exception()) {
543     return SetLastStatus(FAILED);
544   }
545
546   if (graph_ == NULL) return SetLastStatus(BAILED_OUT);
547
548   if (info()->HasAbortedDueToDependencyChange()) {
549     // Dependency has changed during graph creation. Let's try again later.
550     return RetryOptimization(kBailedOutDueToDependencyChange);
551   }
552
553   return SetLastStatus(SUCCEEDED);
554 }
555
556
557 OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() {
558   DisallowHeapAllocation no_allocation;
559   DisallowHandleAllocation no_handles;
560   DisallowHandleDereference no_deref;
561   DisallowCodeDependencyChange no_dependency_change;
562
563   DCHECK(last_status() == SUCCEEDED);
564   // TODO(turbofan): Currently everything is done in the first phase.
565   if (!info()->code().is_null()) {
566     return last_status();
567   }
568
569   Timer t(this, &time_taken_to_optimize_);
570   DCHECK(graph_ != NULL);
571   BailoutReason bailout_reason = kNoReason;
572
573   if (graph_->Optimize(&bailout_reason)) {
574     chunk_ = LChunk::NewChunk(graph_);
575     if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
576   } else if (bailout_reason != kNoReason) {
577     graph_builder_->Bailout(bailout_reason);
578   }
579
580   return SetLastStatus(BAILED_OUT);
581 }
582
583
584 OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() {
585   DCHECK(last_status() == SUCCEEDED);
586   // TODO(turbofan): Currently everything is done in the first phase.
587   if (!info()->code().is_null()) {
588     if (FLAG_turbo_deoptimization) {
589       info()->context()->native_context()->AddOptimizedCode(*info()->code());
590     }
591     RecordOptimizationStats();
592     return last_status();
593   }
594
595   DCHECK(!info()->HasAbortedDueToDependencyChange());
596   DisallowCodeDependencyChange no_dependency_change;
597   DisallowJavascriptExecution no_js(isolate());
598   {  // Scope for timer.
599     Timer timer(this, &time_taken_to_codegen_);
600     DCHECK(chunk_ != NULL);
601     DCHECK(graph_ != NULL);
602     // Deferred handles reference objects that were accessible during
603     // graph creation.  To make sure that we don't encounter inconsistencies
604     // between graph creation and code generation, we disallow accessing
605     // objects through deferred handles during the latter, with exceptions.
606     DisallowDeferredHandleDereference no_deferred_handle_deref;
607     Handle<Code> optimized_code = chunk_->Codegen();
608     if (optimized_code.is_null()) {
609       if (info()->bailout_reason() == kNoReason) {
610         return AbortOptimization(kCodeGenerationFailed);
611       }
612       return SetLastStatus(BAILED_OUT);
613     }
614     info()->SetCode(optimized_code);
615   }
616   RecordOptimizationStats();
617   // Add to the weak list of optimized code objects.
618   info()->context()->native_context()->AddOptimizedCode(*info()->code());
619   return SetLastStatus(SUCCEEDED);
620 }
621
622
623 void OptimizedCompileJob::RecordOptimizationStats() {
624   Handle<JSFunction> function = info()->closure();
625   if (!function->IsOptimized()) {
626     // Concurrent recompilation and OSR may race.  Increment only once.
627     int opt_count = function->shared()->opt_count();
628     function->shared()->set_opt_count(opt_count + 1);
629   }
630   double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
631   double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
632   double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
633   if (FLAG_trace_opt) {
634     PrintF("[optimizing ");
635     function->ShortPrint();
636     PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
637            ms_codegen);
638   }
639   if (FLAG_trace_opt_stats) {
640     static double compilation_time = 0.0;
641     static int compiled_functions = 0;
642     static int code_size = 0;
643
644     compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
645     compiled_functions++;
646     code_size += function->shared()->SourceSize();
647     PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
648            compiled_functions,
649            code_size,
650            compilation_time);
651   }
652   if (FLAG_hydrogen_stats) {
653     isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
654                                                     time_taken_to_optimize_,
655                                                     time_taken_to_codegen_);
656   }
657 }
658
659
660 // Sets the expected number of properties based on estimate from compiler.
661 void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
662                                           int estimate) {
663   // If no properties are added in the constructor, they are more likely
664   // to be added later.
665   if (estimate == 0) estimate = 2;
666
667   // TODO(yangguo): check whether those heuristics are still up-to-date.
668   // We do not shrink objects that go into a snapshot (yet), so we adjust
669   // the estimate conservatively.
670   if (shared->GetIsolate()->serializer_enabled()) {
671     estimate += 2;
672   } else {
673     // Inobject slack tracking will reclaim redundant inobject space later,
674     // so we can afford to adjust the estimate generously.
675     estimate += 8;
676   }
677
678   shared->set_expected_nof_properties(estimate);
679 }
680
681
682 static void MaybeDisableOptimization(Handle<SharedFunctionInfo> shared_info,
683                                      BailoutReason bailout_reason) {
684   if (bailout_reason != kNoReason) {
685     shared_info->DisableOptimization(bailout_reason);
686   }
687 }
688
689
690 static void RecordFunctionCompilation(Logger::LogEventsAndTags tag,
691                                       CompilationInfo* info,
692                                       Handle<SharedFunctionInfo> shared) {
693   // SharedFunctionInfo is passed separately, because if CompilationInfo
694   // was created using Script object, it will not have it.
695
696   // Log the code generation. If source information is available include
697   // script name and line number. Check explicitly whether logging is
698   // enabled as finding the line number is not free.
699   if (info->isolate()->logger()->is_logging_code_events() ||
700       info->isolate()->cpu_profiler()->is_profiling()) {
701     Handle<Script> script = info->script();
702     Handle<Code> code = info->code();
703     if (code.is_identical_to(info->isolate()->builtins()->CompileLazy())) {
704       return;
705     }
706     int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
707     int column_num =
708         Script::GetColumnNumber(script, shared->start_position()) + 1;
709     String* script_name = script->name()->IsString()
710                               ? String::cast(script->name())
711                               : info->isolate()->heap()->empty_string();
712     Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
713     PROFILE(info->isolate(),
714             CodeCreateEvent(log_tag, *code, *shared, info, script_name,
715                             line_num, column_num));
716   }
717
718   GDBJIT(AddCode(Handle<String>(shared->DebugName()),
719                  Handle<Script>(info->script()), Handle<Code>(info->code()),
720                  info));
721 }
722
723
724 static bool CompileUnoptimizedCode(CompilationInfo* info) {
725   DCHECK(AllowCompilation::IsAllowed(info->isolate()));
726   if (!Compiler::Analyze(info) || !FullCodeGenerator::MakeCode(info)) {
727     Isolate* isolate = info->isolate();
728     if (!isolate->has_pending_exception()) isolate->StackOverflow();
729     return false;
730   }
731   return true;
732 }
733
734
735 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
736     CompilationInfo* info) {
737   VMState<COMPILER> state(info->isolate());
738   PostponeInterruptsScope postpone(info->isolate());
739
740   // Parse and update CompilationInfo with the results.
741   if (!Parser::ParseStatic(info)) return MaybeHandle<Code>();
742   Handle<SharedFunctionInfo> shared = info->shared_info();
743   FunctionLiteral* lit = info->function();
744   shared->set_language_mode(lit->language_mode());
745   SetExpectedNofPropertiesFromEstimate(shared, lit->expected_property_count());
746   MaybeDisableOptimization(shared, lit->dont_optimize_reason());
747
748   // Compile unoptimized code.
749   if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
750
751   CHECK_EQ(Code::FUNCTION, info->code()->kind());
752   RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
753
754   // Update the shared function info with the scope info. Allocating the
755   // ScopeInfo object may cause a GC.
756   Handle<ScopeInfo> scope_info =
757       ScopeInfo::Create(info->isolate(), info->zone(), info->scope());
758   shared->set_scope_info(*scope_info);
759
760   // Update the code and feedback vector for the shared function info.
761   shared->ReplaceCode(*info->code());
762   if (shared->optimization_disabled()) info->code()->set_optimizable(false);
763   shared->set_feedback_vector(*info->feedback_vector());
764
765   return info->code();
766 }
767
768
769 MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
770     Handle<JSFunction> function, BailoutId osr_ast_id) {
771   if (FLAG_cache_optimized_code) {
772     Handle<SharedFunctionInfo> shared(function->shared());
773     // Bound functions are not cached.
774     if (shared->bound()) return MaybeHandle<Code>();
775     DisallowHeapAllocation no_gc;
776     int index = shared->SearchOptimizedCodeMap(
777         function->context()->native_context(), osr_ast_id);
778     if (index > 0) {
779       if (FLAG_trace_opt) {
780         PrintF("[found optimized code for ");
781         function->ShortPrint();
782         if (!osr_ast_id.IsNone()) {
783           PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
784         }
785         PrintF("]\n");
786       }
787       FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index);
788       if (literals != NULL) function->set_literals(literals);
789       return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index));
790     }
791   }
792   return MaybeHandle<Code>();
793 }
794
795
796 static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
797   Handle<Code> code = info->code();
798   if (code->kind() != Code::OPTIMIZED_FUNCTION) return;  // Nothing to do.
799
800   // Context specialization folds-in the context, so no sharing can occur.
801   if (code->is_turbofanned() && info->is_context_specializing()) return;
802
803   // Cache optimized code.
804   if (FLAG_cache_optimized_code) {
805     Handle<JSFunction> function = info->closure();
806     Handle<SharedFunctionInfo> shared(function->shared());
807     // Do not cache bound functions.
808     if (shared->bound()) return;
809     Handle<FixedArray> literals(function->literals());
810     Handle<Context> native_context(function->context()->native_context());
811     SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
812                                               literals, info->osr_ast_id());
813   }
814 }
815
816
817 static bool Renumber(CompilationInfo* info) {
818   if (!AstNumbering::Renumber(info->isolate(), info->zone(),
819                               info->function())) {
820     return false;
821   }
822   if (!info->shared_info().is_null()) {
823     FunctionLiteral* lit = info->function();
824     info->shared_info()->set_ast_node_count(lit->ast_node_count());
825     MaybeDisableOptimization(info->shared_info(), lit->dont_optimize_reason());
826     info->shared_info()->set_dont_cache(lit->flags()->Contains(kDontCache));
827   }
828   return true;
829 }
830
831
832 bool Compiler::Analyze(CompilationInfo* info) {
833   DCHECK(info->function() != NULL);
834   if (!Rewriter::Rewrite(info)) return false;
835   if (!Scope::Analyze(info)) return false;
836   if (!Renumber(info)) return false;
837   DCHECK(info->scope() != NULL);
838   return true;
839 }
840
841
842 bool Compiler::ParseAndAnalyze(CompilationInfo* info) {
843   if (!Parser::ParseStatic(info)) return false;
844   return Compiler::Analyze(info);
845 }
846
847
848 static bool GetOptimizedCodeNow(CompilationInfo* info) {
849   if (!Compiler::ParseAndAnalyze(info)) return false;
850
851   TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
852
853   OptimizedCompileJob job(info);
854   if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED ||
855       job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED ||
856       job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) {
857     if (FLAG_trace_opt) {
858       PrintF("[aborted optimizing ");
859       info->closure()->ShortPrint();
860       PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
861     }
862     return false;
863   }
864
865   // Success!
866   DCHECK(!info->isolate()->has_pending_exception());
867   InsertCodeIntoOptimizedCodeMap(info);
868   RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info,
869                             info->shared_info());
870   return true;
871 }
872
873
874 static bool GetOptimizedCodeLater(CompilationInfo* info) {
875   Isolate* isolate = info->isolate();
876   if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
877     if (FLAG_trace_concurrent_recompilation) {
878       PrintF("  ** Compilation queue full, will retry optimizing ");
879       info->closure()->ShortPrint();
880       PrintF(" later.\n");
881     }
882     return false;
883   }
884
885   CompilationHandleScope handle_scope(info);
886   if (!Compiler::ParseAndAnalyze(info)) return false;
887   info->SaveHandles();  // Copy handles to the compilation handle scope.
888
889   TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
890
891   OptimizedCompileJob* job = new (info->zone()) OptimizedCompileJob(info);
892   OptimizedCompileJob::Status status = job->CreateGraph();
893   if (status != OptimizedCompileJob::SUCCEEDED) return false;
894   isolate->optimizing_compiler_thread()->QueueForOptimization(job);
895
896   if (FLAG_trace_concurrent_recompilation) {
897     PrintF("  ** Queued ");
898     info->closure()->ShortPrint();
899     if (info->is_osr()) {
900       PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
901     } else {
902       PrintF(" for concurrent optimization.\n");
903     }
904   }
905   return true;
906 }
907
908
909 MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) {
910   DCHECK(!function->GetIsolate()->has_pending_exception());
911   DCHECK(!function->is_compiled());
912   if (function->shared()->is_compiled()) {
913     return Handle<Code>(function->shared()->code());
914   }
915
916   CompilationInfoWithZone info(function);
917   Handle<Code> result;
918   ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
919                              GetUnoptimizedCodeCommon(&info),
920                              Code);
921   return result;
922 }
923
924
925 MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
926   Isolate* isolate = function->GetIsolate();
927   DCHECK(!isolate->has_pending_exception());
928   DCHECK(!function->is_compiled());
929   AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
930   // If the debugger is active, do not compile with turbofan unless we can
931   // deopt from turbofan code.
932   if (FLAG_turbo_asm && function->shared()->asm_function() &&
933       (FLAG_turbo_deoptimization || !isolate->debug()->is_active())) {
934     CompilationInfoWithZone info(function);
935
936     VMState<COMPILER> state(isolate);
937     PostponeInterruptsScope postpone(isolate);
938
939     info.SetOptimizing(BailoutId::None(), handle(function->shared()->code()));
940     info.MarkAsContextSpecializing();
941
942     if (GetOptimizedCodeNow(&info)) {
943       DCHECK(function->shared()->is_compiled());
944       return info.code();
945     }
946     // We have failed compilation. If there was an exception clear it so that
947     // we can compile unoptimized code.
948     if (isolate->has_pending_exception()) isolate->clear_pending_exception();
949   }
950
951   if (function->shared()->is_compiled()) {
952     return Handle<Code>(function->shared()->code());
953   }
954
955   CompilationInfoWithZone info(function);
956   Handle<Code> result;
957   ASSIGN_RETURN_ON_EXCEPTION(isolate, result, GetUnoptimizedCodeCommon(&info),
958                              Code);
959
960   if (FLAG_always_opt && isolate->use_crankshaft()) {
961     Handle<Code> opt_code;
962     if (Compiler::GetOptimizedCode(
963             function, result,
964             Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
965       result = opt_code;
966     }
967   }
968
969   return result;
970 }
971
972
973 MaybeHandle<Code> Compiler::GetUnoptimizedCode(
974     Handle<SharedFunctionInfo> shared) {
975   DCHECK(!shared->GetIsolate()->has_pending_exception());
976   DCHECK(!shared->is_compiled());
977
978   CompilationInfoWithZone info(shared);
979   return GetUnoptimizedCodeCommon(&info);
980 }
981
982
983 bool Compiler::EnsureCompiled(Handle<JSFunction> function,
984                               ClearExceptionFlag flag) {
985   if (function->is_compiled()) return true;
986   MaybeHandle<Code> maybe_code = Compiler::GetLazyCode(function);
987   Handle<Code> code;
988   if (!maybe_code.ToHandle(&code)) {
989     if (flag == CLEAR_EXCEPTION) {
990       function->GetIsolate()->clear_pending_exception();
991     }
992     return false;
993   }
994   function->ReplaceCode(*code);
995   DCHECK(function->is_compiled());
996   return true;
997 }
998
999
1000 // TODO(turbofan): In the future, unoptimized code with deopt support could
1001 // be generated lazily once deopt is triggered.
1002 bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
1003   DCHECK(info->function() != NULL);
1004   DCHECK(info->scope() != NULL);
1005   if (!info->shared_info()->has_deoptimization_support()) {
1006     Handle<SharedFunctionInfo> shared = info->shared_info();
1007     CompilationInfoWithZone unoptimized(shared);
1008     // Note that we use the same AST that we will use for generating the
1009     // optimized code.
1010     unoptimized.SetFunction(info->function());
1011     unoptimized.PrepareForCompilation(info->scope());
1012     unoptimized.SetContext(info->context());
1013     unoptimized.EnableDeoptimizationSupport();
1014     // If the current code has reloc info for serialization, also include
1015     // reloc info for serialization for the new code, so that deopt support
1016     // can be added without losing IC state.
1017     if (shared->code()->kind() == Code::FUNCTION &&
1018         shared->code()->has_reloc_info_for_serialization()) {
1019       unoptimized.PrepareForSerializing();
1020     }
1021     if (!FullCodeGenerator::MakeCode(&unoptimized)) return false;
1022
1023     shared->EnableDeoptimizationSupport(*unoptimized.code());
1024     shared->set_feedback_vector(*unoptimized.feedback_vector());
1025
1026     // The scope info might not have been set if a lazily compiled
1027     // function is inlined before being called for the first time.
1028     if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) {
1029       Handle<ScopeInfo> target_scope_info =
1030           ScopeInfo::Create(info->isolate(), info->zone(), info->scope());
1031       shared->set_scope_info(*target_scope_info);
1032     }
1033
1034     // The existing unoptimized code was replaced with the new one.
1035     RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
1036   }
1037   return true;
1038 }
1039
1040
1041 // Compile full code for debugging. This code will have debug break slots
1042 // and deoptimization information. Deoptimization information is required
1043 // in case that an optimized version of this function is still activated on
1044 // the stack. It will also make sure that the full code is compiled with
1045 // the same flags as the previous version, that is flags which can change
1046 // the code generated. The current method of mapping from already compiled
1047 // full code without debug break slots to full code with debug break slots
1048 // depends on the generated code is otherwise exactly the same.
1049 // If compilation fails, just keep the existing code.
1050 MaybeHandle<Code> Compiler::GetDebugCode(Handle<JSFunction> function) {
1051   CompilationInfoWithZone info(function);
1052   Isolate* isolate = info.isolate();
1053   VMState<COMPILER> state(isolate);
1054
1055   info.MarkAsDebug();
1056
1057   DCHECK(!isolate->has_pending_exception());
1058   Handle<Code> old_code(function->shared()->code());
1059   DCHECK(old_code->kind() == Code::FUNCTION);
1060   DCHECK(!old_code->has_debug_break_slots());
1061
1062   info.MarkCompilingForDebugging();
1063   if (old_code->is_compiled_optimizable()) {
1064     info.EnableDeoptimizationSupport();
1065   } else {
1066     info.MarkNonOptimizable();
1067   }
1068   MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info);
1069   Handle<Code> new_code;
1070   if (!maybe_new_code.ToHandle(&new_code)) {
1071     isolate->clear_pending_exception();
1072   } else {
1073     DCHECK_EQ(old_code->is_compiled_optimizable(),
1074               new_code->is_compiled_optimizable());
1075   }
1076   return maybe_new_code;
1077 }
1078
1079
1080 void Compiler::CompileForLiveEdit(Handle<Script> script) {
1081   // TODO(635): support extensions.
1082   CompilationInfoWithZone info(script);
1083   PostponeInterruptsScope postpone(info.isolate());
1084   VMState<COMPILER> state(info.isolate());
1085
1086   info.MarkAsGlobal();
1087   if (!Parser::ParseStatic(&info)) return;
1088
1089   LiveEditFunctionTracker tracker(info.isolate(), info.function());
1090   if (!CompileUnoptimizedCode(&info)) return;
1091   if (!info.shared_info().is_null()) {
1092     Handle<ScopeInfo> scope_info =
1093         ScopeInfo::Create(info.isolate(), info.zone(), info.scope());
1094     info.shared_info()->set_scope_info(*scope_info);
1095   }
1096   tracker.RecordRootFunctionInfo(info.code());
1097 }
1098
1099
1100 static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
1101   Isolate* isolate = info->isolate();
1102   PostponeInterruptsScope postpone(isolate);
1103   DCHECK(!isolate->native_context().is_null());
1104   Handle<Script> script = info->script();
1105
1106   // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
1107   FixedArray* array = isolate->native_context()->embedder_data();
1108   script->set_context_data(array->get(0));
1109
1110   isolate->debug()->OnBeforeCompile(script);
1111
1112   DCHECK(info->is_eval() || info->is_global() || info->is_module());
1113
1114   info->MarkAsToplevel();
1115
1116   Handle<SharedFunctionInfo> result;
1117
1118   { VMState<COMPILER> state(info->isolate());
1119     if (info->function() == NULL) {
1120       // Parse the script if needed (if it's already parsed, function() is
1121       // non-NULL).
1122       bool parse_allow_lazy =
1123           (info->compile_options() == ScriptCompiler::kConsumeParserCache ||
1124            String::cast(script->source())->length() >
1125                FLAG_min_preparse_length) &&
1126           !Compiler::DebuggerWantsEagerCompilation(info);
1127
1128       if (!parse_allow_lazy &&
1129           (info->compile_options() == ScriptCompiler::kProduceParserCache ||
1130            info->compile_options() == ScriptCompiler::kConsumeParserCache)) {
1131         // We are going to parse eagerly, but we either 1) have cached data
1132         // produced by lazy parsing or 2) are asked to generate cached data.
1133         // Eager parsing cannot benefit from cached data, and producing cached
1134         // data while parsing eagerly is not implemented.
1135         info->SetCachedData(NULL, ScriptCompiler::kNoCompileOptions);
1136       }
1137       if (!Parser::ParseStatic(info, parse_allow_lazy)) {
1138         return Handle<SharedFunctionInfo>::null();
1139       }
1140     }
1141
1142     FunctionLiteral* lit = info->function();
1143     LiveEditFunctionTracker live_edit_tracker(isolate, lit);
1144
1145     // Measure how long it takes to do the compilation; only take the
1146     // rest of the function into account to avoid overlap with the
1147     // parsing statistics.
1148     HistogramTimer* rate = info->is_eval()
1149           ? info->isolate()->counters()->compile_eval()
1150           : info->isolate()->counters()->compile();
1151     HistogramTimerScope timer(rate);
1152
1153     // Compile the code.
1154     if (!CompileUnoptimizedCode(info)) {
1155       return Handle<SharedFunctionInfo>::null();
1156     }
1157
1158     // Allocate function.
1159     DCHECK(!info->code().is_null());
1160     result = isolate->factory()->NewSharedFunctionInfo(
1161         lit->name(), lit->materialized_literal_count(), lit->kind(),
1162         info->code(),
1163         ScopeInfo::Create(info->isolate(), info->zone(), info->scope()),
1164         info->feedback_vector());
1165
1166     DCHECK_EQ(RelocInfo::kNoPosition, lit->function_token_position());
1167     SharedFunctionInfo::InitFromFunctionLiteral(result, lit);
1168     result->set_script(*script);
1169     result->set_is_toplevel(true);
1170
1171     Handle<String> script_name = script->name()->IsString()
1172         ? Handle<String>(String::cast(script->name()))
1173         : isolate->factory()->empty_string();
1174     Logger::LogEventsAndTags log_tag = info->is_eval()
1175         ? Logger::EVAL_TAG
1176         : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);
1177
1178     PROFILE(isolate, CodeCreateEvent(
1179                 log_tag, *info->code(), *result, info, *script_name));
1180     GDBJIT(AddCode(script_name, script, info->code(), info));
1181
1182     // Hint to the runtime system used when allocating space for initial
1183     // property space by setting the expected number of properties for
1184     // the instances of the function.
1185     SetExpectedNofPropertiesFromEstimate(result,
1186                                          lit->expected_property_count());
1187
1188     if (!script.is_null())
1189       script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
1190
1191     live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
1192   }
1193
1194   isolate->debug()->OnAfterCompile(script);
1195
1196   return result;
1197 }
1198
1199
1200 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
1201     Handle<String> source, Handle<SharedFunctionInfo> outer_info,
1202     Handle<Context> context, LanguageMode language_mode,
1203     ParseRestriction restriction, int scope_position) {
1204   Isolate* isolate = source->GetIsolate();
1205   int source_length = source->length();
1206   isolate->counters()->total_eval_size()->Increment(source_length);
1207   isolate->counters()->total_compile_size()->Increment(source_length);
1208
1209   CompilationCache* compilation_cache = isolate->compilation_cache();
1210   MaybeHandle<SharedFunctionInfo> maybe_shared_info =
1211       compilation_cache->LookupEval(source, outer_info, context, language_mode,
1212                                     scope_position);
1213   Handle<SharedFunctionInfo> shared_info;
1214
1215   if (!maybe_shared_info.ToHandle(&shared_info)) {
1216     Handle<Script> script = isolate->factory()->NewScript(source);
1217     CompilationInfoWithZone info(script);
1218     info.MarkAsEval();
1219     if (context->IsNativeContext()) info.MarkAsGlobal();
1220     info.SetLanguageMode(language_mode);
1221     info.SetParseRestriction(restriction);
1222     info.SetContext(context);
1223
1224     Debug::RecordEvalCaller(script);
1225
1226     shared_info = CompileToplevel(&info);
1227
1228     if (shared_info.is_null()) {
1229       return MaybeHandle<JSFunction>();
1230     } else {
1231       // Explicitly disable optimization for eval code. We're not yet prepared
1232       // to handle eval-code in the optimizing compiler.
1233       if (restriction != ONLY_SINGLE_FUNCTION_LITERAL) {
1234         shared_info->DisableOptimization(kEval);
1235       }
1236
1237       // If caller is strict mode, the result must be in strict mode as well.
1238       DCHECK(is_sloppy(language_mode) ||
1239              is_strict(shared_info->language_mode()));
1240       if (!shared_info->dont_cache()) {
1241         compilation_cache->PutEval(source, outer_info, context, shared_info,
1242                                    scope_position);
1243       }
1244     }
1245   } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
1246     shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
1247   }
1248
1249   return isolate->factory()->NewFunctionFromSharedFunctionInfo(
1250       shared_info, context, NOT_TENURED);
1251 }
1252
1253
1254 Handle<SharedFunctionInfo> Compiler::CompileScript(
1255     Handle<String> source, Handle<Object> script_name, int line_offset,
1256     int column_offset, bool is_embedder_debug_script,
1257     bool is_shared_cross_origin, Handle<Context> context,
1258     v8::Extension* extension, ScriptData** cached_data,
1259     ScriptCompiler::CompileOptions compile_options, NativesFlag natives,
1260     bool is_module) {
1261   Isolate* isolate = source->GetIsolate();
1262   if (compile_options == ScriptCompiler::kNoCompileOptions) {
1263     cached_data = NULL;
1264   } else if (compile_options == ScriptCompiler::kProduceParserCache ||
1265              compile_options == ScriptCompiler::kProduceCodeCache) {
1266     DCHECK(cached_data && !*cached_data);
1267     DCHECK(extension == NULL);
1268     DCHECK(!isolate->debug()->is_loaded());
1269   } else {
1270     DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
1271            compile_options == ScriptCompiler::kConsumeCodeCache);
1272     DCHECK(cached_data && *cached_data);
1273     DCHECK(extension == NULL);
1274   }
1275   int source_length = source->length();
1276   isolate->counters()->total_load_size()->Increment(source_length);
1277   isolate->counters()->total_compile_size()->Increment(source_length);
1278
1279   // TODO(rossberg): The natives do not yet obey strong mode rules
1280   // (for example, some macros use '==').
1281   bool use_strong = FLAG_use_strong && !isolate->bootstrapper()->IsActive();
1282   LanguageMode language_mode =
1283       construct_language_mode(FLAG_use_strict, use_strong);
1284
1285   CompilationCache* compilation_cache = isolate->compilation_cache();
1286
1287   // Do a lookup in the compilation cache but not for extensions.
1288   MaybeHandle<SharedFunctionInfo> maybe_result;
1289   Handle<SharedFunctionInfo> result;
1290   if (extension == NULL) {
1291     // First check per-isolate compilation cache.
1292     maybe_result = compilation_cache->LookupScript(
1293         source, script_name, line_offset, column_offset,
1294         is_embedder_debug_script, is_shared_cross_origin, context,
1295         language_mode);
1296     if (maybe_result.is_null() && FLAG_serialize_toplevel &&
1297         compile_options == ScriptCompiler::kConsumeCodeCache &&
1298         !isolate->debug()->is_loaded()) {
1299       // Then check cached code provided by embedder.
1300       HistogramTimerScope timer(isolate->counters()->compile_deserialize());
1301       Handle<SharedFunctionInfo> result;
1302       if (CodeSerializer::Deserialize(isolate, *cached_data, source)
1303               .ToHandle(&result)) {
1304         // Promote to per-isolate compilation cache.
1305         DCHECK(!result->dont_cache());
1306         compilation_cache->PutScript(source, context, language_mode, result);
1307         return result;
1308       }
1309       // Deserializer failed. Fall through to compile.
1310     }
1311   }
1312
1313   base::ElapsedTimer timer;
1314   if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
1315       compile_options == ScriptCompiler::kProduceCodeCache) {
1316     timer.Start();
1317   }
1318
1319   if (!maybe_result.ToHandle(&result)) {
1320     // No cache entry found. Compile the script.
1321
1322     // Create a script object describing the script to be compiled.
1323     Handle<Script> script = isolate->factory()->NewScript(source);
1324     if (natives == NATIVES_CODE) {
1325       script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
1326     }
1327     if (!script_name.is_null()) {
1328       script->set_name(*script_name);
1329       script->set_line_offset(Smi::FromInt(line_offset));
1330       script->set_column_offset(Smi::FromInt(column_offset));
1331     }
1332     script->set_is_shared_cross_origin(is_shared_cross_origin);
1333     script->set_is_embedder_debug_script(is_embedder_debug_script);
1334
1335     // Compile the function and add it to the cache.
1336     CompilationInfoWithZone info(script);
1337     if (FLAG_harmony_modules && is_module) {
1338       info.MarkAsModule();
1339     } else {
1340       info.MarkAsGlobal();
1341     }
1342     info.SetCachedData(cached_data, compile_options);
1343     info.SetExtension(extension);
1344     info.SetContext(context);
1345     if (FLAG_serialize_toplevel &&
1346         compile_options == ScriptCompiler::kProduceCodeCache) {
1347       info.PrepareForSerializing();
1348     }
1349
1350     info.SetLanguageMode(
1351         static_cast<LanguageMode>(info.language_mode() | language_mode));
1352     result = CompileToplevel(&info);
1353     if (extension == NULL && !result.is_null() && !result->dont_cache()) {
1354       compilation_cache->PutScript(source, context, language_mode, result);
1355       if (FLAG_serialize_toplevel &&
1356           compile_options == ScriptCompiler::kProduceCodeCache) {
1357         HistogramTimerScope histogram_timer(
1358             isolate->counters()->compile_serialize());
1359         *cached_data = CodeSerializer::Serialize(isolate, result, source);
1360         if (FLAG_profile_deserialization) {
1361           PrintF("[Compiling and serializing took %0.3f ms]\n",
1362                  timer.Elapsed().InMillisecondsF());
1363         }
1364       }
1365     }
1366
1367     if (result.is_null()) isolate->ReportPendingMessages();
1368   } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
1369     result->ResetForNewContext(isolate->heap()->global_ic_age());
1370   }
1371   return result;
1372 }
1373
1374
1375 Handle<SharedFunctionInfo> Compiler::CompileStreamedScript(
1376     CompilationInfo* info, int source_length) {
1377   Isolate* isolate = info->isolate();
1378   isolate->counters()->total_load_size()->Increment(source_length);
1379   isolate->counters()->total_compile_size()->Increment(source_length);
1380
1381   LanguageMode language_mode =
1382       construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1383   info->SetLanguageMode(
1384       static_cast<LanguageMode>(info->language_mode() | language_mode));
1385
1386   // TODO(marja): FLAG_serialize_toplevel is not honoured and won't be; when the
1387   // real code caching lands, streaming needs to be adapted to use it.
1388   return CompileToplevel(info);
1389 }
1390
1391
1392 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(
1393     FunctionLiteral* literal, Handle<Script> script,
1394     CompilationInfo* outer_info) {
1395   // Precondition: code has been parsed and scopes have been analyzed.
1396   CompilationInfoWithZone info(script);
1397   info.SetFunction(literal);
1398   info.PrepareForCompilation(literal->scope());
1399   info.SetLanguageMode(literal->scope()->language_mode());
1400   if (outer_info->will_serialize()) info.PrepareForSerializing();
1401
1402   Isolate* isolate = info.isolate();
1403   Factory* factory = isolate->factory();
1404   LiveEditFunctionTracker live_edit_tracker(isolate, literal);
1405   // Determine if the function can be lazily compiled. This is necessary to
1406   // allow some of our builtin JS files to be lazily compiled. These
1407   // builtins cannot be handled lazily by the parser, since we have to know
1408   // if a function uses the special natives syntax, which is something the
1409   // parser records.
1410   // If the debugger requests compilation for break points, we cannot be
1411   // aggressive about lazy compilation, because it might trigger compilation
1412   // of functions without an outer context when setting a breakpoint through
1413   // Debug::FindSharedFunctionInfoInScript.
1414   bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1415   bool allow_lazy = literal->AllowsLazyCompilation() &&
1416       !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
1417
1418   if (outer_info->is_toplevel() && outer_info->will_serialize()) {
1419     // Make sure that if the toplevel code (possibly to be serialized),
1420     // the inner function must be allowed to be compiled lazily.
1421     // This is necessary to serialize toplevel code without inner functions.
1422     DCHECK(allow_lazy);
1423   }
1424
1425   // Generate code
1426   Handle<ScopeInfo> scope_info;
1427   if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
1428     Handle<Code> code = isolate->builtins()->CompileLazy();
1429     info.SetCode(code);
1430     // There's no need in theory for a lazy-compiled function to have a type
1431     // feedback vector, but some parts of the system expect all
1432     // SharedFunctionInfo instances to have one.  The size of the vector depends
1433     // on how many feedback-needing nodes are in the tree, and when lazily
1434     // parsing we might not know that, if this function was never parsed before.
1435     // In that case the vector will be replaced the next time MakeCode is
1436     // called.
1437     info.EnsureFeedbackVector();
1438     scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
1439   } else if (Renumber(&info) && FullCodeGenerator::MakeCode(&info)) {
1440     // MakeCode will ensure that the feedback vector is present and
1441     // appropriately sized.
1442     DCHECK(!info.code().is_null());
1443     scope_info = ScopeInfo::Create(info.isolate(), info.zone(), info.scope());
1444   } else {
1445     return Handle<SharedFunctionInfo>::null();
1446   }
1447
1448   // Create a shared function info object.
1449   Handle<SharedFunctionInfo> result = factory->NewSharedFunctionInfo(
1450       literal->name(), literal->materialized_literal_count(), literal->kind(),
1451       info.code(), scope_info, info.feedback_vector());
1452
1453   SharedFunctionInfo::InitFromFunctionLiteral(result, literal);
1454   result->set_script(*script);
1455   result->set_is_toplevel(false);
1456
1457   RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1458   result->set_allows_lazy_compilation(literal->AllowsLazyCompilation());
1459   result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1460
1461   // Set the expected number of properties for instances and return
1462   // the resulting function.
1463   SetExpectedNofPropertiesFromEstimate(result,
1464                                        literal->expected_property_count());
1465   live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1466   return result;
1467 }
1468
1469
1470 MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
1471                                              Handle<Code> current_code,
1472                                              ConcurrencyMode mode,
1473                                              BailoutId osr_ast_id) {
1474   Handle<Code> cached_code;
1475   if (GetCodeFromOptimizedCodeMap(
1476           function, osr_ast_id).ToHandle(&cached_code)) {
1477     return cached_code;
1478   }
1479
1480   SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function));
1481   Isolate* isolate = info->isolate();
1482   DCHECK(AllowCompilation::IsAllowed(isolate));
1483   VMState<COMPILER> state(isolate);
1484   DCHECK(isolate->use_crankshaft());
1485   DCHECK(!isolate->has_pending_exception());
1486   PostponeInterruptsScope postpone(isolate);
1487
1488   Handle<SharedFunctionInfo> shared = info->shared_info();
1489   if (shared->code()->kind() != Code::FUNCTION ||
1490       ScopeInfo::Empty(isolate) == shared->scope_info()) {
1491     // The function was never compiled. Compile it unoptimized first.
1492     // TODO(titzer): reuse the AST and scope info from this compile.
1493     CompilationInfoWithZone nested(function);
1494     nested.EnableDeoptimizationSupport();
1495     if (!GetUnoptimizedCodeCommon(&nested).ToHandle(&current_code)) {
1496       return MaybeHandle<Code>();
1497     }
1498     shared->ReplaceCode(*current_code);
1499   }
1500   current_code->set_profiler_ticks(0);
1501
1502   info->SetOptimizing(osr_ast_id, current_code);
1503
1504   if (mode == CONCURRENT) {
1505     if (GetOptimizedCodeLater(info.get())) {
1506       info.Detach();  // The background recompile job owns this now.
1507       return isolate->builtins()->InOptimizationQueue();
1508     }
1509   } else {
1510     if (GetOptimizedCodeNow(info.get())) return info->code();
1511   }
1512
1513   if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1514   return MaybeHandle<Code>();
1515 }
1516
1517
1518 Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
1519   // Take ownership of compilation info.  Deleting compilation info
1520   // also tears down the zone and the recompile job.
1521   SmartPointer<CompilationInfo> info(job->info());
1522   Isolate* isolate = info->isolate();
1523
1524   VMState<COMPILER> state(isolate);
1525   TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
1526
1527   Handle<SharedFunctionInfo> shared = info->shared_info();
1528   shared->code()->set_profiler_ticks(0);
1529
1530   // 1) Optimization on the concurrent thread may have failed.
1531   // 2) The function may have already been optimized by OSR.  Simply continue.
1532   //    Except when OSR already disabled optimization for some reason.
1533   // 3) The code may have already been invalidated due to dependency change.
1534   // 4) Debugger may have been activated.
1535   // 5) Code generation may have failed.
1536   if (job->last_status() == OptimizedCompileJob::SUCCEEDED) {
1537     if (shared->optimization_disabled()) {
1538       job->RetryOptimization(kOptimizationDisabled);
1539     } else if (info->HasAbortedDueToDependencyChange()) {
1540       job->RetryOptimization(kBailedOutDueToDependencyChange);
1541     } else if (isolate->debug()->has_break_points()) {
1542       job->RetryOptimization(kDebuggerHasBreakPoints);
1543     } else if (job->GenerateCode() == OptimizedCompileJob::SUCCEEDED) {
1544       RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info.get(), shared);
1545       if (info->shared_info()->SearchOptimizedCodeMap(
1546               info->context()->native_context(), info->osr_ast_id()) == -1) {
1547         InsertCodeIntoOptimizedCodeMap(info.get());
1548       }
1549       if (FLAG_trace_opt) {
1550         PrintF("[completed optimizing ");
1551         info->closure()->ShortPrint();
1552         PrintF("]\n");
1553       }
1554       return Handle<Code>(*info->code());
1555     }
1556   }
1557
1558   DCHECK(job->last_status() != OptimizedCompileJob::SUCCEEDED);
1559   if (FLAG_trace_opt) {
1560     PrintF("[aborted optimizing ");
1561     info->closure()->ShortPrint();
1562     PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
1563   }
1564   return Handle<Code>::null();
1565 }
1566
1567
1568 bool Compiler::DebuggerWantsEagerCompilation(CompilationInfo* info,
1569                                              bool allow_lazy_without_ctx) {
1570   if (LiveEditFunctionTracker::IsActive(info->isolate())) return true;
1571   Debug* debug = info->isolate()->debug();
1572   bool debugging = debug->is_active() || debug->has_break_points();
1573   return debugging && !allow_lazy_without_ctx;
1574 }
1575
1576
1577 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1578     : name_(name), info_(info) {
1579   if (FLAG_hydrogen_stats) {
1580     info_zone_start_allocation_size_ = info->zone()->allocation_size();
1581     timer_.Start();
1582   }
1583 }
1584
1585
1586 CompilationPhase::~CompilationPhase() {
1587   if (FLAG_hydrogen_stats) {
1588     size_t size = zone()->allocation_size();
1589     size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1590     isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1591   }
1592 }
1593
1594
1595 bool CompilationPhase::ShouldProduceTraceOutput() const {
1596   // Trace if the appropriate trace flag is set and the phase name's first
1597   // character is in the FLAG_trace_phase command line parameter.
1598   AllowHandleDereference allow_deref;
1599   bool tracing_on = info()->IsStub()
1600       ? FLAG_trace_hydrogen_stubs
1601       : (FLAG_trace_hydrogen &&
1602          info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1603   return (tracing_on &&
1604       base::OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1605 }
1606
1607
1608 #if DEBUG
1609 void CompilationInfo::PrintAstForTesting() {
1610   PrintF("--- Source from AST ---\n%s\n",
1611          PrettyPrinter(isolate(), zone()).PrintProgram(function()));
1612 }
1613 #endif
1614 } }  // namespace v8::internal