1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/compiler.h"
9 #include "src/bootstrapper.h"
10 #include "src/codegen.h"
11 #include "src/compilation-cache.h"
12 #include "src/compiler/pipeline.h"
13 #include "src/cpu-profiler.h"
14 #include "src/debug.h"
15 #include "src/deoptimizer.h"
16 #include "src/full-codegen.h"
17 #include "src/gdb-jit.h"
18 #include "src/hydrogen.h"
19 #include "src/isolate-inl.h"
20 #include "src/lithium.h"
21 #include "src/liveedit.h"
22 #include "src/parser.h"
23 #include "src/rewriter.h"
24 #include "src/runtime-profiler.h"
25 #include "src/scanner-character-streams.h"
26 #include "src/scopeinfo.h"
27 #include "src/scopes.h"
28 #include "src/typing.h"
29 #include "src/vm-state-inl.h"
35 ScriptData::ScriptData(const byte* data, int length)
36 : owns_data_(false), data_(data), length_(length) {
37 if (!IsAligned(reinterpret_cast<intptr_t>(data), kPointerAlignment)) {
38 byte* copy = NewArray<byte>(length);
39 DCHECK(IsAligned(reinterpret_cast<intptr_t>(copy), kPointerAlignment));
40 CopyBytes(copy, data, length);
42 AcquireDataOwnership();
47 CompilationInfo::CompilationInfo(Handle<Script> script, Zone* zone)
48 : flags_(kThisHasUses),
51 osr_ast_id_(BailoutId::None()),
54 ast_value_factory_(NULL),
55 ast_value_factory_owned_(false) {
56 Initialize(script->GetIsolate(), BASE, zone);
60 CompilationInfo::CompilationInfo(Isolate* isolate, Zone* zone)
61 : flags_(kThisHasUses),
62 script_(Handle<Script>::null()),
64 osr_ast_id_(BailoutId::None()),
67 ast_value_factory_(NULL),
68 ast_value_factory_owned_(false) {
69 Initialize(isolate, STUB, zone);
73 CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
75 : flags_(kLazy | kThisHasUses),
76 shared_info_(shared_info),
77 script_(Handle<Script>(Script::cast(shared_info->script()))),
79 osr_ast_id_(BailoutId::None()),
82 ast_value_factory_(NULL),
83 ast_value_factory_owned_(false) {
84 Initialize(script_->GetIsolate(), BASE, zone);
88 CompilationInfo::CompilationInfo(Handle<JSFunction> closure, Zone* zone)
89 : flags_(kLazy | kThisHasUses),
91 shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
92 script_(Handle<Script>(Script::cast(shared_info_->script()))),
94 context_(closure->context()),
95 osr_ast_id_(BailoutId::None()),
98 ast_value_factory_(NULL),
99 ast_value_factory_owned_(false) {
100 Initialize(script_->GetIsolate(), BASE, zone);
104 CompilationInfo::CompilationInfo(HydrogenCodeStub* stub, Isolate* isolate,
106 : flags_(kLazy | kThisHasUses),
107 source_stream_(NULL),
108 osr_ast_id_(BailoutId::None()),
110 optimization_id_(-1),
111 ast_value_factory_(NULL),
112 ast_value_factory_owned_(false) {
113 Initialize(isolate, STUB, zone);
118 CompilationInfo::CompilationInfo(
119 ScriptCompiler::ExternalSourceStream* stream,
120 ScriptCompiler::StreamedSource::Encoding encoding, Isolate* isolate,
122 : flags_(kThisHasUses),
123 source_stream_(stream),
124 source_stream_encoding_(encoding),
125 osr_ast_id_(BailoutId::None()),
127 optimization_id_(-1),
128 ast_value_factory_(NULL),
129 ast_value_factory_owned_(false) {
130 Initialize(isolate, BASE, zone);
134 void CompilationInfo::Initialize(Isolate* isolate,
140 global_scope_ = NULL;
143 compile_options_ = ScriptCompiler::kNoCompileOptions;
145 deferred_handles_ = NULL;
147 prologue_offset_ = Code::kPrologueOffsetNotSet;
148 opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
149 no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
150 ? new List<OffsetRange>(2) : NULL;
151 for (int i = 0; i < DependentCode::kGroupCount; i++) {
152 dependencies_[i] = NULL;
159 if (!script_.is_null() && script_->type()->value() == Script::TYPE_NATIVE) {
162 if (isolate_->debug()->is_active()) MarkAsDebug();
163 if (FLAG_context_specialization) MarkAsContextSpecializing();
164 if (FLAG_turbo_inlining) MarkAsInliningEnabled();
165 if (FLAG_turbo_types) MarkAsTypingEnabled();
167 if (!shared_info_.is_null()) {
168 DCHECK(strict_mode() == SLOPPY);
169 SetStrictMode(shared_info_->strict_mode());
171 bailout_reason_ = kUnknown;
173 if (!shared_info().is_null() && shared_info()->is_compiled()) {
174 // We should initialize the CompilationInfo feedback vector from the
175 // passed in shared info, rather than creating a new one.
177 Handle<TypeFeedbackVector>(shared_info()->feedback_vector(), isolate);
182 CompilationInfo::~CompilationInfo() {
183 if (GetFlag(kDisableFutureOptimization)) {
184 shared_info()->DisableOptimization(bailout_reason());
186 delete deferred_handles_;
187 delete no_frame_ranges_;
188 if (ast_value_factory_owned_) delete ast_value_factory_;
190 // Check that no dependent maps have been added or added dependent maps have
191 // been rolled back or committed.
192 for (int i = 0; i < DependentCode::kGroupCount; i++) {
193 DCHECK_EQ(NULL, dependencies_[i]);
199 void CompilationInfo::CommitDependencies(Handle<Code> code) {
200 for (int i = 0; i < DependentCode::kGroupCount; i++) {
201 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
202 if (group_objects == NULL) continue;
203 DCHECK(!object_wrapper_.is_null());
204 for (int j = 0; j < group_objects->length(); j++) {
205 DependentCode::DependencyGroup group =
206 static_cast<DependentCode::DependencyGroup>(i);
207 DependentCode* dependent_code =
208 DependentCode::ForObject(group_objects->at(j), group);
209 dependent_code->UpdateToFinishedCode(group, this, *code);
211 dependencies_[i] = NULL; // Zone-allocated, no need to delete.
216 void CompilationInfo::RollbackDependencies() {
217 // Unregister from all dependent maps if not yet committed.
218 for (int i = 0; i < DependentCode::kGroupCount; i++) {
219 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
220 if (group_objects == NULL) continue;
221 for (int j = 0; j < group_objects->length(); j++) {
222 DependentCode::DependencyGroup group =
223 static_cast<DependentCode::DependencyGroup>(i);
224 DependentCode* dependent_code =
225 DependentCode::ForObject(group_objects->at(j), group);
226 dependent_code->RemoveCompilationInfo(group, this);
228 dependencies_[i] = NULL; // Zone-allocated, no need to delete.
233 int CompilationInfo::num_parameters() const {
235 DCHECK(parameter_count_ > 0);
236 return parameter_count_;
238 return scope()->num_parameters();
243 int CompilationInfo::num_heap_slots() const {
247 return scope()->num_heap_slots();
252 Code::Flags CompilationInfo::flags() const {
254 return Code::ComputeFlags(code_stub()->GetCodeKind(),
255 code_stub()->GetICState(),
256 code_stub()->GetExtraICState(),
257 code_stub()->GetStubType());
259 return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
264 // Primitive functions are unlikely to be picked up by the stack-walking
265 // profiler, so they trigger their own optimization when they're called
266 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
267 bool CompilationInfo::ShouldSelfOptimize() {
268 return FLAG_crankshaft &&
269 !function()->flags()->Contains(kDontSelfOptimize) &&
270 !function()->dont_optimize() &&
271 function()->scope()->AllowsLazyCompilation() &&
272 (shared_info().is_null() || !shared_info()->optimization_disabled());
276 void CompilationInfo::PrepareForCompilation(Scope* scope) {
277 DCHECK(scope_ == NULL);
280 int length = function()->slot_count();
281 if (feedback_vector_.is_null()) {
282 // Allocate the feedback vector too.
283 feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length);
285 DCHECK(feedback_vector_->length() == length);
289 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
291 explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
292 : HOptimizedGraphBuilder(info) {
295 #define DEF_VISIT(type) \
296 virtual void Visit##type(type* node) OVERRIDE { \
297 if (node->position() != RelocInfo::kNoPosition) { \
298 SetSourcePosition(node->position()); \
300 HOptimizedGraphBuilder::Visit##type(node); \
302 EXPRESSION_NODE_LIST(DEF_VISIT)
305 #define DEF_VISIT(type) \
306 virtual void Visit##type(type* node) OVERRIDE { \
307 if (node->position() != RelocInfo::kNoPosition) { \
308 SetSourcePosition(node->position()); \
310 HOptimizedGraphBuilder::Visit##type(node); \
312 STATEMENT_NODE_LIST(DEF_VISIT)
315 #define DEF_VISIT(type) \
316 virtual void Visit##type(type* node) OVERRIDE { \
317 HOptimizedGraphBuilder::Visit##type(node); \
319 MODULE_NODE_LIST(DEF_VISIT)
320 DECLARATION_NODE_LIST(DEF_VISIT)
325 OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
326 DCHECK(isolate()->use_crankshaft());
327 DCHECK(info()->IsOptimizing());
328 DCHECK(!info()->IsCompilingForDebugging());
330 // We should never arrive here if optimization has been disabled on the
331 // shared function info.
332 DCHECK(!info()->shared_info()->optimization_disabled());
334 // Do not use crankshaft if we need to be able to set break points.
335 if (isolate()->DebuggerHasBreakPoints()) {
336 return RetryOptimization(kDebuggerHasBreakPoints);
339 // Limit the number of times we re-compile a functions with
340 // the optimizing compiler.
341 const int kMaxOptCount =
342 FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
343 if (info()->opt_count() > kMaxOptCount) {
344 return AbortOptimization(kOptimizedTooManyTimes);
347 // Due to an encoding limit on LUnallocated operands in the Lithium
348 // language, we cannot optimize functions with too many formal parameters
349 // or perform on-stack replacement for function with too many
350 // stack-allocated local variables.
352 // The encoding is as a signed value, with parameters and receiver using
353 // the negative indices and locals the non-negative ones.
354 const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
355 Scope* scope = info()->scope();
356 if ((scope->num_parameters() + 1) > parameter_limit) {
357 return AbortOptimization(kTooManyParameters);
360 const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
361 if (info()->is_osr() &&
362 scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
363 return AbortOptimization(kTooManyParametersLocals);
366 if (scope->HasIllegalRedeclaration()) {
367 return AbortOptimization(kFunctionWithIllegalRedeclaration);
370 // Check the whitelist for Crankshaft.
371 if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
372 return AbortOptimization(kHydrogenFilter);
375 // Crankshaft requires a version of fullcode with deoptimization support.
376 // Recompile the unoptimized version of the code if the current version
377 // doesn't have deoptimization support already.
378 // Otherwise, if we are gathering compilation time and space statistics
379 // for hydrogen, gather baseline statistics for a fullcode compilation.
380 bool should_recompile = !info()->shared_info()->has_deoptimization_support();
381 if (should_recompile || FLAG_hydrogen_stats) {
382 base::ElapsedTimer timer;
383 if (FLAG_hydrogen_stats) {
386 if (!Compiler::EnsureDeoptimizationSupport(info())) {
387 return SetLastStatus(FAILED);
389 if (FLAG_hydrogen_stats) {
390 isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
394 DCHECK(info()->shared_info()->has_deoptimization_support());
396 // Check the whitelist for TurboFan.
397 if ((FLAG_turbo_asm && info()->shared_info()->asm_function()) ||
398 info()->closure()->PassesFilter(FLAG_turbo_filter)) {
399 compiler::Pipeline pipeline(info());
400 pipeline.GenerateCode();
401 if (!info()->code().is_null()) {
402 if (FLAG_turbo_deoptimization) {
403 info()->context()->native_context()->AddOptimizedCode(*info()->code());
405 return SetLastStatus(SUCCEEDED);
409 if (FLAG_trace_hydrogen) {
410 Handle<String> name = info()->function()->debug_name();
411 PrintF("-----------------------------------------------------------\n");
412 PrintF("Compiling method %s using hydrogen\n", name->ToCString().get());
413 isolate()->GetHTracer()->TraceCompilation(info());
416 // Type-check the function.
417 AstTyper::Run(info());
419 graph_builder_ = (FLAG_hydrogen_track_positions || FLAG_trace_ic)
420 ? new(info()->zone()) HOptimizedGraphBuilderWithPositions(info())
421 : new(info()->zone()) HOptimizedGraphBuilder(info());
423 Timer t(this, &time_taken_to_create_graph_);
424 info()->set_this_has_uses(false);
425 graph_ = graph_builder_->CreateGraph();
427 if (isolate()->has_pending_exception()) {
428 return SetLastStatus(FAILED);
431 if (graph_ == NULL) return SetLastStatus(BAILED_OUT);
433 if (info()->HasAbortedDueToDependencyChange()) {
434 // Dependency has changed during graph creation. Let's try again later.
435 return RetryOptimization(kBailedOutDueToDependencyChange);
438 return SetLastStatus(SUCCEEDED);
442 OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() {
443 DisallowHeapAllocation no_allocation;
444 DisallowHandleAllocation no_handles;
445 DisallowHandleDereference no_deref;
446 DisallowCodeDependencyChange no_dependency_change;
448 DCHECK(last_status() == SUCCEEDED);
449 // TODO(turbofan): Currently everything is done in the first phase.
450 if (!info()->code().is_null()) {
451 return last_status();
454 Timer t(this, &time_taken_to_optimize_);
455 DCHECK(graph_ != NULL);
456 BailoutReason bailout_reason = kNoReason;
458 if (graph_->Optimize(&bailout_reason)) {
459 chunk_ = LChunk::NewChunk(graph_);
460 if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
461 } else if (bailout_reason != kNoReason) {
462 graph_builder_->Bailout(bailout_reason);
465 return SetLastStatus(BAILED_OUT);
469 OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() {
470 DCHECK(last_status() == SUCCEEDED);
471 // TODO(turbofan): Currently everything is done in the first phase.
472 if (!info()->code().is_null()) {
473 RecordOptimizationStats();
474 return last_status();
477 DCHECK(!info()->HasAbortedDueToDependencyChange());
478 DisallowCodeDependencyChange no_dependency_change;
479 DisallowJavascriptExecution no_js(isolate());
480 { // Scope for timer.
481 Timer timer(this, &time_taken_to_codegen_);
482 DCHECK(chunk_ != NULL);
483 DCHECK(graph_ != NULL);
484 // Deferred handles reference objects that were accessible during
485 // graph creation. To make sure that we don't encounter inconsistencies
486 // between graph creation and code generation, we disallow accessing
487 // objects through deferred handles during the latter, with exceptions.
488 DisallowDeferredHandleDereference no_deferred_handle_deref;
489 Handle<Code> optimized_code = chunk_->Codegen();
490 if (optimized_code.is_null()) {
491 if (info()->bailout_reason() == kNoReason) {
492 return AbortOptimization(kCodeGenerationFailed);
494 return SetLastStatus(BAILED_OUT);
496 info()->SetCode(optimized_code);
498 RecordOptimizationStats();
499 // Add to the weak list of optimized code objects.
500 info()->context()->native_context()->AddOptimizedCode(*info()->code());
501 return SetLastStatus(SUCCEEDED);
505 void OptimizedCompileJob::RecordOptimizationStats() {
506 Handle<JSFunction> function = info()->closure();
507 if (!function->IsOptimized()) {
508 // Concurrent recompilation and OSR may race. Increment only once.
509 int opt_count = function->shared()->opt_count();
510 function->shared()->set_opt_count(opt_count + 1);
512 double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
513 double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
514 double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
515 if (FLAG_trace_opt) {
516 PrintF("[optimizing ");
517 function->ShortPrint();
518 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
521 if (FLAG_trace_opt_stats) {
522 static double compilation_time = 0.0;
523 static int compiled_functions = 0;
524 static int code_size = 0;
526 compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
527 compiled_functions++;
528 code_size += function->shared()->SourceSize();
529 PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
534 if (FLAG_hydrogen_stats) {
535 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
536 time_taken_to_optimize_,
537 time_taken_to_codegen_);
542 // Sets the expected number of properties based on estimate from compiler.
543 void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
545 // If no properties are added in the constructor, they are more likely
546 // to be added later.
547 if (estimate == 0) estimate = 2;
549 // TODO(yangguo): check whether those heuristics are still up-to-date.
550 // We do not shrink objects that go into a snapshot (yet), so we adjust
551 // the estimate conservatively.
552 if (shared->GetIsolate()->serializer_enabled()) {
554 } else if (FLAG_clever_optimizations) {
555 // Inobject slack tracking will reclaim redundant inobject space later,
556 // so we can afford to adjust the estimate generously.
562 shared->set_expected_nof_properties(estimate);
566 // Sets the function info on a function.
567 // The start_position points to the first '(' character after the function name
568 // in the full script source. When counting characters in the script source the
569 // the first character is number 0 (not 1).
570 static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
571 FunctionLiteral* lit,
573 Handle<Script> script) {
574 function_info->set_length(lit->parameter_count());
575 function_info->set_formal_parameter_count(lit->parameter_count());
576 function_info->set_script(*script);
577 function_info->set_function_token_position(lit->function_token_position());
578 function_info->set_start_position(lit->start_position());
579 function_info->set_end_position(lit->end_position());
580 function_info->set_is_expression(lit->is_expression());
581 function_info->set_is_anonymous(lit->is_anonymous());
582 function_info->set_is_toplevel(is_toplevel);
583 function_info->set_inferred_name(*lit->inferred_name());
584 function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
585 function_info->set_allows_lazy_compilation_without_context(
586 lit->AllowsLazyCompilationWithoutContext());
587 function_info->set_strict_mode(lit->strict_mode());
588 function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
589 function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
590 function_info->set_ast_node_count(lit->ast_node_count());
591 function_info->set_is_function(lit->is_function());
592 function_info->set_bailout_reason(lit->dont_optimize_reason());
593 function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
594 function_info->set_kind(lit->kind());
595 function_info->set_asm_function(lit->scope()->asm_function());
599 static void RecordFunctionCompilation(Logger::LogEventsAndTags tag,
600 CompilationInfo* info,
601 Handle<SharedFunctionInfo> shared) {
602 // SharedFunctionInfo is passed separately, because if CompilationInfo
603 // was created using Script object, it will not have it.
605 // Log the code generation. If source information is available include
606 // script name and line number. Check explicitly whether logging is
607 // enabled as finding the line number is not free.
608 if (info->isolate()->logger()->is_logging_code_events() ||
609 info->isolate()->cpu_profiler()->is_profiling()) {
610 Handle<Script> script = info->script();
611 Handle<Code> code = info->code();
612 if (code.is_identical_to(info->isolate()->builtins()->CompileLazy())) {
615 int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
617 Script::GetColumnNumber(script, shared->start_position()) + 1;
618 String* script_name = script->name()->IsString()
619 ? String::cast(script->name())
620 : info->isolate()->heap()->empty_string();
621 Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
622 PROFILE(info->isolate(),
623 CodeCreateEvent(log_tag, *code, *shared, info, script_name,
624 line_num, column_num));
627 GDBJIT(AddCode(Handle<String>(shared->DebugName()),
628 Handle<Script>(info->script()), Handle<Code>(info->code()),
633 static bool CompileUnoptimizedCode(CompilationInfo* info) {
634 DCHECK(AllowCompilation::IsAllowed(info->isolate()));
635 DCHECK(info->function() != NULL);
636 if (!Rewriter::Rewrite(info)) return false;
637 if (!Scope::Analyze(info)) return false;
638 DCHECK(info->scope() != NULL);
640 if (!FullCodeGenerator::MakeCode(info)) {
641 Isolate* isolate = info->isolate();
642 if (!isolate->has_pending_exception()) isolate->StackOverflow();
649 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
650 CompilationInfo* info) {
651 VMState<COMPILER> state(info->isolate());
652 PostponeInterruptsScope postpone(info->isolate());
654 // Parse and update CompilationInfo with the results.
655 if (!Parser::Parse(info)) return MaybeHandle<Code>();
656 Handle<SharedFunctionInfo> shared = info->shared_info();
657 FunctionLiteral* lit = info->function();
658 shared->set_strict_mode(lit->strict_mode());
659 SetExpectedNofPropertiesFromEstimate(shared, lit->expected_property_count());
660 shared->set_bailout_reason(lit->dont_optimize_reason());
661 shared->set_ast_node_count(lit->ast_node_count());
663 // Compile unoptimized code.
664 if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
666 CHECK_EQ(Code::FUNCTION, info->code()->kind());
667 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
669 // Update the shared function info with the scope info. Allocating the
670 // ScopeInfo object may cause a GC.
671 Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope(), info->zone());
672 shared->set_scope_info(*scope_info);
674 // Update the code and feedback vector for the shared function info.
675 shared->ReplaceCode(*info->code());
676 if (shared->optimization_disabled()) info->code()->set_optimizable(false);
677 shared->set_feedback_vector(*info->feedback_vector());
683 MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
684 Handle<JSFunction> function, BailoutId osr_ast_id) {
685 if (FLAG_cache_optimized_code) {
686 Handle<SharedFunctionInfo> shared(function->shared());
687 // Bound functions are not cached.
688 if (shared->bound()) return MaybeHandle<Code>();
689 DisallowHeapAllocation no_gc;
690 int index = shared->SearchOptimizedCodeMap(
691 function->context()->native_context(), osr_ast_id);
693 if (FLAG_trace_opt) {
694 PrintF("[found optimized code for ");
695 function->ShortPrint();
696 if (!osr_ast_id.IsNone()) {
697 PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
701 FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index);
702 if (literals != NULL) function->set_literals(literals);
703 return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index));
706 return MaybeHandle<Code>();
710 static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
711 Handle<Code> code = info->code();
712 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do.
714 // Context specialization folds-in the context, so no sharing can occur.
715 if (code->is_turbofanned() && info->is_context_specializing()) return;
717 // Cache optimized code.
718 if (FLAG_cache_optimized_code) {
719 Handle<JSFunction> function = info->closure();
720 Handle<SharedFunctionInfo> shared(function->shared());
721 // Do not cache bound functions.
722 if (shared->bound()) return;
723 Handle<FixedArray> literals(function->literals());
724 Handle<Context> native_context(function->context()->native_context());
725 SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
726 literals, info->osr_ast_id());
731 static bool CompileOptimizedPrologue(CompilationInfo* info) {
732 if (!Parser::Parse(info)) return false;
733 if (!Rewriter::Rewrite(info)) return false;
734 if (!Scope::Analyze(info)) return false;
735 DCHECK(info->scope() != NULL);
740 static bool GetOptimizedCodeNow(CompilationInfo* info) {
741 if (!CompileOptimizedPrologue(info)) return false;
743 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
745 OptimizedCompileJob job(info);
746 if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED ||
747 job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED ||
748 job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) {
749 if (FLAG_trace_opt) {
750 PrintF("[aborted optimizing ");
751 info->closure()->ShortPrint();
752 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
758 DCHECK(!info->isolate()->has_pending_exception());
759 InsertCodeIntoOptimizedCodeMap(info);
760 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info,
761 info->shared_info());
762 if (FLAG_trace_opt) {
763 PrintF("[completed optimizing ");
764 info->closure()->ShortPrint();
771 static bool GetOptimizedCodeLater(CompilationInfo* info) {
772 Isolate* isolate = info->isolate();
773 if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
774 if (FLAG_trace_concurrent_recompilation) {
775 PrintF(" ** Compilation queue full, will retry optimizing ");
776 info->closure()->ShortPrint();
782 CompilationHandleScope handle_scope(info);
783 if (!CompileOptimizedPrologue(info)) return false;
784 info->SaveHandles(); // Copy handles to the compilation handle scope.
786 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
788 OptimizedCompileJob* job = new (info->zone()) OptimizedCompileJob(info);
789 OptimizedCompileJob::Status status = job->CreateGraph();
790 if (status != OptimizedCompileJob::SUCCEEDED) return false;
791 isolate->optimizing_compiler_thread()->QueueForOptimization(job);
793 if (FLAG_trace_concurrent_recompilation) {
794 PrintF(" ** Queued ");
795 info->closure()->ShortPrint();
796 if (info->is_osr()) {
797 PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
799 PrintF(" for concurrent optimization.\n");
806 MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) {
807 DCHECK(!function->GetIsolate()->has_pending_exception());
808 DCHECK(!function->is_compiled());
809 if (function->shared()->is_compiled()) {
810 return Handle<Code>(function->shared()->code());
813 CompilationInfoWithZone info(function);
815 ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
816 GetUnoptimizedCodeCommon(&info),
822 MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
823 DCHECK(!function->GetIsolate()->has_pending_exception());
824 DCHECK(!function->is_compiled());
826 if (FLAG_turbo_asm && function->shared()->asm_function()) {
827 CompilationInfoWithZone info(function);
829 VMState<COMPILER> state(info.isolate());
830 PostponeInterruptsScope postpone(info.isolate());
832 info.SetOptimizing(BailoutId::None(),
833 Handle<Code>(function->shared()->code()));
835 info.MarkAsContextSpecializing();
836 info.MarkAsTypingEnabled();
837 info.MarkAsInliningDisabled();
839 if (GetOptimizedCodeNow(&info)) return info.code();
842 if (function->shared()->is_compiled()) {
843 return Handle<Code>(function->shared()->code());
846 CompilationInfoWithZone info(function);
848 ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
849 GetUnoptimizedCodeCommon(&info), Code);
851 if (FLAG_always_opt &&
852 info.isolate()->use_crankshaft() &&
853 !info.shared_info()->optimization_disabled() &&
854 !info.isolate()->DebuggerHasBreakPoints()) {
855 Handle<Code> opt_code;
856 if (Compiler::GetOptimizedCode(
858 Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
867 MaybeHandle<Code> Compiler::GetUnoptimizedCode(
868 Handle<SharedFunctionInfo> shared) {
869 DCHECK(!shared->GetIsolate()->has_pending_exception());
870 DCHECK(!shared->is_compiled());
872 CompilationInfoWithZone info(shared);
873 return GetUnoptimizedCodeCommon(&info);
877 bool Compiler::EnsureCompiled(Handle<JSFunction> function,
878 ClearExceptionFlag flag) {
879 if (function->is_compiled()) return true;
880 MaybeHandle<Code> maybe_code = Compiler::GetLazyCode(function);
882 if (!maybe_code.ToHandle(&code)) {
883 if (flag == CLEAR_EXCEPTION) {
884 function->GetIsolate()->clear_pending_exception();
888 function->ReplaceCode(*code);
889 DCHECK(function->is_compiled());
894 // TODO(turbofan): In the future, unoptimized code with deopt support could
895 // be generated lazily once deopt is triggered.
896 bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
897 if (!info->shared_info()->has_deoptimization_support()) {
898 CompilationInfoWithZone unoptimized(info->shared_info());
899 // Note that we use the same AST that we will use for generating the
901 unoptimized.SetFunction(info->function());
902 unoptimized.PrepareForCompilation(info->scope());
903 unoptimized.SetContext(info->context());
904 unoptimized.EnableDeoptimizationSupport();
905 if (!FullCodeGenerator::MakeCode(&unoptimized)) return false;
907 Handle<SharedFunctionInfo> shared = info->shared_info();
908 shared->EnableDeoptimizationSupport(*unoptimized.code());
909 shared->set_feedback_vector(*unoptimized.feedback_vector());
911 // The scope info might not have been set if a lazily compiled
912 // function is inlined before being called for the first time.
913 if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) {
914 Handle<ScopeInfo> target_scope_info =
915 ScopeInfo::Create(info->scope(), info->zone());
916 shared->set_scope_info(*target_scope_info);
919 // The existing unoptimized code was replaced with the new one.
920 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
926 // Compile full code for debugging. This code will have debug break slots
927 // and deoptimization information. Deoptimization information is required
928 // in case that an optimized version of this function is still activated on
929 // the stack. It will also make sure that the full code is compiled with
930 // the same flags as the previous version, that is flags which can change
931 // the code generated. The current method of mapping from already compiled
932 // full code without debug break slots to full code with debug break slots
933 // depends on the generated code is otherwise exactly the same.
934 // If compilation fails, just keep the existing code.
935 MaybeHandle<Code> Compiler::GetDebugCode(Handle<JSFunction> function) {
936 CompilationInfoWithZone info(function);
937 Isolate* isolate = info.isolate();
938 VMState<COMPILER> state(isolate);
942 DCHECK(!isolate->has_pending_exception());
943 Handle<Code> old_code(function->shared()->code());
944 DCHECK(old_code->kind() == Code::FUNCTION);
945 DCHECK(!old_code->has_debug_break_slots());
947 info.MarkCompilingForDebugging();
948 if (old_code->is_compiled_optimizable()) {
949 info.EnableDeoptimizationSupport();
951 info.MarkNonOptimizable();
953 MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info);
954 Handle<Code> new_code;
955 if (!maybe_new_code.ToHandle(&new_code)) {
956 isolate->clear_pending_exception();
958 DCHECK_EQ(old_code->is_compiled_optimizable(),
959 new_code->is_compiled_optimizable());
961 return maybe_new_code;
965 void Compiler::CompileForLiveEdit(Handle<Script> script) {
966 // TODO(635): support extensions.
967 CompilationInfoWithZone info(script);
968 PostponeInterruptsScope postpone(info.isolate());
969 VMState<COMPILER> state(info.isolate());
972 if (!Parser::Parse(&info)) return;
974 LiveEditFunctionTracker tracker(info.isolate(), info.function());
975 if (!CompileUnoptimizedCode(&info)) return;
976 if (!info.shared_info().is_null()) {
977 Handle<ScopeInfo> scope_info = ScopeInfo::Create(info.scope(),
979 info.shared_info()->set_scope_info(*scope_info);
981 tracker.RecordRootFunctionInfo(info.code());
985 static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
986 Isolate* isolate = info->isolate();
987 PostponeInterruptsScope postpone(isolate);
988 DCHECK(!isolate->native_context().is_null());
989 Handle<Script> script = info->script();
991 // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
992 FixedArray* array = isolate->native_context()->embedder_data();
993 script->set_context_data(array->get(0));
995 isolate->debug()->OnBeforeCompile(script);
997 DCHECK(info->is_eval() || info->is_global());
999 Handle<SharedFunctionInfo> result;
1001 { VMState<COMPILER> state(info->isolate());
1002 if (info->function() == NULL) {
1003 // Parse the script if needed (if it's already parsed, function() is
1005 bool parse_allow_lazy =
1006 (info->compile_options() == ScriptCompiler::kConsumeParserCache ||
1007 String::cast(script->source())->length() >
1008 FLAG_min_preparse_length) &&
1009 !Compiler::DebuggerWantsEagerCompilation(info);
1011 if (!parse_allow_lazy &&
1012 (info->compile_options() == ScriptCompiler::kProduceParserCache ||
1013 info->compile_options() == ScriptCompiler::kConsumeParserCache)) {
1014 // We are going to parse eagerly, but we either 1) have cached data
1015 // produced by lazy parsing or 2) are asked to generate cached data.
1016 // Eager parsing cannot benefit from cached data, and producing cached
1017 // data while parsing eagerly is not implemented.
1018 info->SetCachedData(NULL, ScriptCompiler::kNoCompileOptions);
1020 if (!Parser::Parse(info, parse_allow_lazy)) {
1021 return Handle<SharedFunctionInfo>::null();
1025 FunctionLiteral* lit = info->function();
1026 LiveEditFunctionTracker live_edit_tracker(isolate, lit);
1028 // Measure how long it takes to do the compilation; only take the
1029 // rest of the function into account to avoid overlap with the
1030 // parsing statistics.
1031 HistogramTimer* rate = info->is_eval()
1032 ? info->isolate()->counters()->compile_eval()
1033 : info->isolate()->counters()->compile();
1034 HistogramTimerScope timer(rate);
1036 // Compile the code.
1037 if (!CompileUnoptimizedCode(info)) {
1038 return Handle<SharedFunctionInfo>::null();
1041 // Allocate function.
1042 DCHECK(!info->code().is_null());
1043 result = isolate->factory()->NewSharedFunctionInfo(
1044 lit->name(), lit->materialized_literal_count(), lit->kind(),
1045 info->code(), ScopeInfo::Create(info->scope(), info->zone()),
1046 info->feedback_vector());
1048 DCHECK_EQ(RelocInfo::kNoPosition, lit->function_token_position());
1049 SetFunctionInfo(result, lit, true, script);
1051 Handle<String> script_name = script->name()->IsString()
1052 ? Handle<String>(String::cast(script->name()))
1053 : isolate->factory()->empty_string();
1054 Logger::LogEventsAndTags log_tag = info->is_eval()
1056 : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);
1058 PROFILE(isolate, CodeCreateEvent(
1059 log_tag, *info->code(), *result, info, *script_name));
1060 GDBJIT(AddCode(script_name, script, info->code(), info));
1062 // Hint to the runtime system used when allocating space for initial
1063 // property space by setting the expected number of properties for
1064 // the instances of the function.
1065 SetExpectedNofPropertiesFromEstimate(result,
1066 lit->expected_property_count());
1068 if (!script.is_null())
1069 script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
1071 live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
1074 isolate->debug()->OnAfterCompile(script);
1080 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
1081 Handle<String> source,
1082 Handle<Context> context,
1083 StrictMode strict_mode,
1084 ParseRestriction restriction,
1085 int scope_position) {
1086 Isolate* isolate = source->GetIsolate();
1087 int source_length = source->length();
1088 isolate->counters()->total_eval_size()->Increment(source_length);
1089 isolate->counters()->total_compile_size()->Increment(source_length);
1091 CompilationCache* compilation_cache = isolate->compilation_cache();
1092 MaybeHandle<SharedFunctionInfo> maybe_shared_info =
1093 compilation_cache->LookupEval(source, context, strict_mode,
1095 Handle<SharedFunctionInfo> shared_info;
1097 if (!maybe_shared_info.ToHandle(&shared_info)) {
1098 Handle<Script> script = isolate->factory()->NewScript(source);
1099 CompilationInfoWithZone info(script);
1101 if (context->IsNativeContext()) info.MarkAsGlobal();
1102 info.SetStrictMode(strict_mode);
1103 info.SetParseRestriction(restriction);
1104 info.SetContext(context);
1106 Debug::RecordEvalCaller(script);
1108 shared_info = CompileToplevel(&info);
1110 if (shared_info.is_null()) {
1111 return MaybeHandle<JSFunction>();
1113 // Explicitly disable optimization for eval code. We're not yet prepared
1114 // to handle eval-code in the optimizing compiler.
1115 shared_info->DisableOptimization(kEval);
1117 // If caller is strict mode, the result must be in strict mode as well.
1118 DCHECK(strict_mode == SLOPPY || shared_info->strict_mode() == STRICT);
1119 if (!shared_info->dont_cache()) {
1120 compilation_cache->PutEval(
1121 source, context, shared_info, scope_position);
1124 } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
1125 shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
1128 return isolate->factory()->NewFunctionFromSharedFunctionInfo(
1129 shared_info, context, NOT_TENURED);
1133 Handle<SharedFunctionInfo> Compiler::CompileScript(
1134 Handle<String> source, Handle<Object> script_name, int line_offset,
1135 int column_offset, bool is_shared_cross_origin, Handle<Context> context,
1136 v8::Extension* extension, ScriptData** cached_data,
1137 ScriptCompiler::CompileOptions compile_options, NativesFlag natives) {
1138 if (compile_options == ScriptCompiler::kNoCompileOptions) {
1140 } else if (compile_options == ScriptCompiler::kProduceParserCache ||
1141 compile_options == ScriptCompiler::kProduceCodeCache) {
1142 DCHECK(cached_data && !*cached_data);
1143 DCHECK(extension == NULL);
1145 DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
1146 compile_options == ScriptCompiler::kConsumeCodeCache);
1147 DCHECK(cached_data && *cached_data);
1148 DCHECK(extension == NULL);
1150 Isolate* isolate = source->GetIsolate();
1151 int source_length = source->length();
1152 isolate->counters()->total_load_size()->Increment(source_length);
1153 isolate->counters()->total_compile_size()->Increment(source_length);
1155 CompilationCache* compilation_cache = isolate->compilation_cache();
1157 // Do a lookup in the compilation cache but not for extensions.
1158 MaybeHandle<SharedFunctionInfo> maybe_result;
1159 Handle<SharedFunctionInfo> result;
1160 if (extension == NULL) {
1161 if (FLAG_serialize_toplevel &&
1162 compile_options == ScriptCompiler::kConsumeCodeCache &&
1163 !isolate->debug()->is_loaded()) {
1164 HistogramTimerScope timer(isolate->counters()->compile_deserialize());
1165 return CodeSerializer::Deserialize(isolate, *cached_data, source);
1167 maybe_result = compilation_cache->LookupScript(
1168 source, script_name, line_offset, column_offset,
1169 is_shared_cross_origin, context);
1173 base::ElapsedTimer timer;
1174 if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
1175 compile_options == ScriptCompiler::kProduceCodeCache) {
1179 if (!maybe_result.ToHandle(&result)) {
1180 // No cache entry found. Compile the script.
1182 // Create a script object describing the script to be compiled.
1183 Handle<Script> script = isolate->factory()->NewScript(source);
1184 if (natives == NATIVES_CODE) {
1185 script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
1187 if (!script_name.is_null()) {
1188 script->set_name(*script_name);
1189 script->set_line_offset(Smi::FromInt(line_offset));
1190 script->set_column_offset(Smi::FromInt(column_offset));
1192 script->set_is_shared_cross_origin(is_shared_cross_origin);
1194 // Compile the function and add it to the cache.
1195 CompilationInfoWithZone info(script);
1196 info.MarkAsGlobal();
1197 info.SetCachedData(cached_data, compile_options);
1198 info.SetExtension(extension);
1199 info.SetContext(context);
1200 if (FLAG_serialize_toplevel &&
1201 compile_options == ScriptCompiler::kProduceCodeCache) {
1202 info.PrepareForSerializing();
1204 if (FLAG_use_strict) info.SetStrictMode(STRICT);
1206 result = CompileToplevel(&info);
1207 if (extension == NULL && !result.is_null() && !result->dont_cache()) {
1208 compilation_cache->PutScript(source, context, result);
1209 if (FLAG_serialize_toplevel &&
1210 compile_options == ScriptCompiler::kProduceCodeCache) {
1211 HistogramTimerScope histogram_timer(
1212 isolate->counters()->compile_serialize());
1213 *cached_data = CodeSerializer::Serialize(isolate, result, source);
1214 if (FLAG_profile_deserialization) {
1215 PrintF("[Compiling and serializing %d bytes took %0.3f ms]\n",
1216 (*cached_data)->length(), timer.Elapsed().InMillisecondsF());
1221 if (result.is_null()) isolate->ReportPendingMessages();
1222 } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
1223 result->ResetForNewContext(isolate->heap()->global_ic_age());
1229 Handle<SharedFunctionInfo> Compiler::CompileStreamedScript(
1230 CompilationInfo* info, int source_length) {
1231 Isolate* isolate = info->isolate();
1232 isolate->counters()->total_load_size()->Increment(source_length);
1233 isolate->counters()->total_compile_size()->Increment(source_length);
1235 if (FLAG_use_strict) info->SetStrictMode(STRICT);
1236 // TODO(marja): FLAG_serialize_toplevel is not honoured and won't be; when the
1237 // real code caching lands, streaming needs to be adapted to use it.
1238 return CompileToplevel(info);
1242 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(
1243 FunctionLiteral* literal, Handle<Script> script,
1244 CompilationInfo* outer_info) {
1245 // Precondition: code has been parsed and scopes have been analyzed.
1246 CompilationInfoWithZone info(script);
1247 info.SetFunction(literal);
1248 info.PrepareForCompilation(literal->scope());
1249 info.SetStrictMode(literal->scope()->strict_mode());
1250 if (outer_info->will_serialize()) info.PrepareForSerializing();
1252 Isolate* isolate = info.isolate();
1253 Factory* factory = isolate->factory();
1254 LiveEditFunctionTracker live_edit_tracker(isolate, literal);
1255 // Determine if the function can be lazily compiled. This is necessary to
1256 // allow some of our builtin JS files to be lazily compiled. These
1257 // builtins cannot be handled lazily by the parser, since we have to know
1258 // if a function uses the special natives syntax, which is something the
1260 // If the debugger requests compilation for break points, we cannot be
1261 // aggressive about lazy compilation, because it might trigger compilation
1262 // of functions without an outer context when setting a breakpoint through
1263 // Debug::FindSharedFunctionInfoInScript.
1264 bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1265 bool allow_lazy = literal->AllowsLazyCompilation() &&
1266 !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
1269 Handle<ScopeInfo> scope_info;
1270 if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
1271 Handle<Code> code = isolate->builtins()->CompileLazy();
1273 scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
1274 } else if (FullCodeGenerator::MakeCode(&info)) {
1275 DCHECK(!info.code().is_null());
1276 scope_info = ScopeInfo::Create(info.scope(), info.zone());
1278 return Handle<SharedFunctionInfo>::null();
1281 // Create a shared function info object.
1282 Handle<SharedFunctionInfo> result = factory->NewSharedFunctionInfo(
1283 literal->name(), literal->materialized_literal_count(), literal->kind(),
1284 info.code(), scope_info, info.feedback_vector());
1285 SetFunctionInfo(result, literal, false, script);
1286 RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1287 result->set_allows_lazy_compilation(allow_lazy);
1288 result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1290 // Set the expected number of properties for instances and return
1291 // the resulting function.
1292 SetExpectedNofPropertiesFromEstimate(result,
1293 literal->expected_property_count());
1294 live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1299 MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
1300 Handle<Code> current_code,
1301 ConcurrencyMode mode,
1302 BailoutId osr_ast_id) {
1303 Handle<Code> cached_code;
1304 if (GetCodeFromOptimizedCodeMap(
1305 function, osr_ast_id).ToHandle(&cached_code)) {
1309 SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function));
1310 Isolate* isolate = info->isolate();
1311 DCHECK(AllowCompilation::IsAllowed(isolate));
1312 VMState<COMPILER> state(isolate);
1313 DCHECK(!isolate->has_pending_exception());
1314 PostponeInterruptsScope postpone(isolate);
1316 Handle<SharedFunctionInfo> shared = info->shared_info();
1317 if (shared->code()->kind() != Code::FUNCTION ||
1318 ScopeInfo::Empty(isolate) == shared->scope_info()) {
1319 // The function was never compiled. Compile it unoptimized first.
1320 // TODO(titzer): reuse the AST and scope info from this compile.
1321 CompilationInfoWithZone nested(function);
1322 nested.EnableDeoptimizationSupport();
1323 if (!GetUnoptimizedCodeCommon(&nested).ToHandle(¤t_code)) {
1324 return MaybeHandle<Code>();
1326 shared->ReplaceCode(*current_code);
1328 current_code->set_profiler_ticks(0);
1330 info->SetOptimizing(osr_ast_id, current_code);
1332 if (mode == CONCURRENT) {
1333 if (GetOptimizedCodeLater(info.get())) {
1334 info.Detach(); // The background recompile job owns this now.
1335 return isolate->builtins()->InOptimizationQueue();
1338 if (GetOptimizedCodeNow(info.get())) return info->code();
1341 if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1342 return MaybeHandle<Code>();
1346 Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
1347 // Take ownership of compilation info. Deleting compilation info
1348 // also tears down the zone and the recompile job.
1349 SmartPointer<CompilationInfo> info(job->info());
1350 Isolate* isolate = info->isolate();
1352 VMState<COMPILER> state(isolate);
1353 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
1355 Handle<SharedFunctionInfo> shared = info->shared_info();
1356 shared->code()->set_profiler_ticks(0);
1358 // 1) Optimization on the concurrent thread may have failed.
1359 // 2) The function may have already been optimized by OSR. Simply continue.
1360 // Except when OSR already disabled optimization for some reason.
1361 // 3) The code may have already been invalidated due to dependency change.
1362 // 4) Debugger may have been activated.
1363 // 5) Code generation may have failed.
1364 if (job->last_status() == OptimizedCompileJob::SUCCEEDED) {
1365 if (shared->optimization_disabled()) {
1366 job->RetryOptimization(kOptimizationDisabled);
1367 } else if (info->HasAbortedDueToDependencyChange()) {
1368 job->RetryOptimization(kBailedOutDueToDependencyChange);
1369 } else if (isolate->DebuggerHasBreakPoints()) {
1370 job->RetryOptimization(kDebuggerHasBreakPoints);
1371 } else if (job->GenerateCode() == OptimizedCompileJob::SUCCEEDED) {
1372 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info.get(), shared);
1373 if (info->shared_info()->SearchOptimizedCodeMap(
1374 info->context()->native_context(), info->osr_ast_id()) == -1) {
1375 InsertCodeIntoOptimizedCodeMap(info.get());
1377 if (FLAG_trace_opt) {
1378 PrintF("[completed optimizing ");
1379 info->closure()->ShortPrint();
1382 return Handle<Code>(*info->code());
1386 DCHECK(job->last_status() != OptimizedCompileJob::SUCCEEDED);
1387 if (FLAG_trace_opt) {
1388 PrintF("[aborted optimizing ");
1389 info->closure()->ShortPrint();
1390 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
1392 return Handle<Code>::null();
1396 bool Compiler::DebuggerWantsEagerCompilation(CompilationInfo* info,
1397 bool allow_lazy_without_ctx) {
1398 return LiveEditFunctionTracker::IsActive(info->isolate()) ||
1399 (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
1403 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1404 : name_(name), info_(info), zone_(info->isolate()) {
1405 if (FLAG_hydrogen_stats) {
1406 info_zone_start_allocation_size_ = info->zone()->allocation_size();
1412 CompilationPhase::~CompilationPhase() {
1413 if (FLAG_hydrogen_stats) {
1414 unsigned size = zone()->allocation_size();
1415 size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1416 isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1421 bool CompilationPhase::ShouldProduceTraceOutput() const {
1422 // Trace if the appropriate trace flag is set and the phase name's first
1423 // character is in the FLAG_trace_phase command line parameter.
1424 AllowHandleDereference allow_deref;
1425 bool tracing_on = info()->IsStub()
1426 ? FLAG_trace_hydrogen_stubs
1427 : (FLAG_trace_hydrogen &&
1428 info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1429 return (tracing_on &&
1430 base::OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1433 } } // namespace v8::internal