1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/compiler.h"
9 #include "src/bootstrapper.h"
10 #include "src/codegen.h"
11 #include "src/compilation-cache.h"
12 #include "src/cpu-profiler.h"
13 #include "src/debug.h"
14 #include "src/deoptimizer.h"
15 #include "src/full-codegen.h"
16 #include "src/gdb-jit.h"
17 #include "src/typing.h"
18 #include "src/hydrogen.h"
19 #include "src/isolate-inl.h"
20 #include "src/lithium.h"
21 #include "src/liveedit.h"
22 #include "src/parser.h"
23 #include "src/rewriter.h"
24 #include "src/runtime-profiler.h"
25 #include "src/scanner-character-streams.h"
26 #include "src/scopeinfo.h"
27 #include "src/scopes.h"
28 #include "src/vm-state-inl.h"
34 CompilationInfo::CompilationInfo(Handle<Script> script,
36 : flags_(StrictModeField::encode(SLOPPY)),
38 osr_ast_id_(BailoutId::None()),
41 optimization_id_(-1) {
42 Initialize(script->GetIsolate(), BASE, zone);
46 CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
48 : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
49 shared_info_(shared_info),
50 script_(Handle<Script>(Script::cast(shared_info->script()))),
51 osr_ast_id_(BailoutId::None()),
54 optimization_id_(-1) {
55 Initialize(script_->GetIsolate(), BASE, zone);
59 CompilationInfo::CompilationInfo(Handle<JSFunction> closure,
61 : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
63 shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
64 script_(Handle<Script>(Script::cast(shared_info_->script()))),
65 context_(closure->context()),
66 osr_ast_id_(BailoutId::None()),
69 optimization_id_(-1) {
70 Initialize(script_->GetIsolate(), BASE, zone);
74 CompilationInfo::CompilationInfo(HydrogenCodeStub* stub,
77 : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
78 osr_ast_id_(BailoutId::None()),
81 optimization_id_(-1) {
82 Initialize(isolate, STUB, zone);
87 void CompilationInfo::Initialize(Isolate* isolate,
96 cached_data_mode_ = NO_CACHED_DATA;
98 deferred_handles_ = NULL;
100 prologue_offset_ = Code::kPrologueOffsetNotSet;
101 opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
102 no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
103 ? new List<OffsetRange>(2) : NULL;
104 for (int i = 0; i < DependentCode::kGroupCount; i++) {
105 dependencies_[i] = NULL;
112 abort_due_to_dependency_ = false;
113 if (script_->type()->value() == Script::TYPE_NATIVE) MarkAsNative();
114 if (isolate_->debug()->is_active()) MarkAsDebug();
116 if (!shared_info_.is_null()) {
117 ASSERT(strict_mode() == SLOPPY);
118 SetStrictMode(shared_info_->strict_mode());
120 set_bailout_reason(kUnknown);
122 if (!shared_info().is_null() && shared_info()->is_compiled()) {
123 // We should initialize the CompilationInfo feedback vector from the
124 // passed in shared info, rather than creating a new one.
125 feedback_vector_ = Handle<FixedArray>(shared_info()->feedback_vector(),
131 CompilationInfo::~CompilationInfo() {
132 delete deferred_handles_;
133 delete no_frame_ranges_;
135 // Check that no dependent maps have been added or added dependent maps have
136 // been rolled back or committed.
137 for (int i = 0; i < DependentCode::kGroupCount; i++) {
138 ASSERT_EQ(NULL, dependencies_[i]);
144 void CompilationInfo::CommitDependencies(Handle<Code> code) {
145 for (int i = 0; i < DependentCode::kGroupCount; i++) {
146 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
147 if (group_objects == NULL) continue;
148 ASSERT(!object_wrapper_.is_null());
149 for (int j = 0; j < group_objects->length(); j++) {
150 DependentCode::DependencyGroup group =
151 static_cast<DependentCode::DependencyGroup>(i);
152 DependentCode* dependent_code =
153 DependentCode::ForObject(group_objects->at(j), group);
154 dependent_code->UpdateToFinishedCode(group, this, *code);
156 dependencies_[i] = NULL; // Zone-allocated, no need to delete.
161 void CompilationInfo::RollbackDependencies() {
162 // Unregister from all dependent maps if not yet committed.
163 for (int i = 0; i < DependentCode::kGroupCount; i++) {
164 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
165 if (group_objects == NULL) continue;
166 for (int j = 0; j < group_objects->length(); j++) {
167 DependentCode::DependencyGroup group =
168 static_cast<DependentCode::DependencyGroup>(i);
169 DependentCode* dependent_code =
170 DependentCode::ForObject(group_objects->at(j), group);
171 dependent_code->RemoveCompilationInfo(group, this);
173 dependencies_[i] = NULL; // Zone-allocated, no need to delete.
178 int CompilationInfo::num_parameters() const {
180 ASSERT(parameter_count_ > 0);
181 return parameter_count_;
183 return scope()->num_parameters();
188 int CompilationInfo::num_heap_slots() const {
192 return scope()->num_heap_slots();
197 Code::Flags CompilationInfo::flags() const {
199 return Code::ComputeFlags(code_stub()->GetCodeKind(),
200 code_stub()->GetICState(),
201 code_stub()->GetExtraICState(),
202 code_stub()->GetStubType());
204 return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
209 // Disable optimization for the rest of the compilation pipeline.
210 void CompilationInfo::DisableOptimization() {
211 bool is_optimizable_closure =
212 FLAG_optimize_closures &&
213 closure_.is_null() &&
214 !scope_->HasTrivialOuterContext() &&
215 !scope_->outer_scope_calls_sloppy_eval() &&
216 !scope_->inside_with();
217 SetMode(is_optimizable_closure ? BASE : NONOPT);
221 // Primitive functions are unlikely to be picked up by the stack-walking
222 // profiler, so they trigger their own optimization when they're called
223 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
224 bool CompilationInfo::ShouldSelfOptimize() {
225 return FLAG_crankshaft &&
226 !function()->flags()->Contains(kDontSelfOptimize) &&
227 !function()->dont_optimize() &&
228 function()->scope()->AllowsLazyCompilation() &&
229 (shared_info().is_null() || !shared_info()->optimization_disabled());
233 void CompilationInfo::PrepareForCompilation(Scope* scope) {
234 ASSERT(scope_ == NULL);
237 int length = function()->slot_count();
238 if (feedback_vector_.is_null()) {
239 // Allocate the feedback vector too.
240 feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length);
242 ASSERT(feedback_vector_->length() == length);
246 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
248 explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
249 : HOptimizedGraphBuilder(info) {
252 #define DEF_VISIT(type) \
253 virtual void Visit##type(type* node) V8_OVERRIDE { \
254 if (node->position() != RelocInfo::kNoPosition) { \
255 SetSourcePosition(node->position()); \
257 HOptimizedGraphBuilder::Visit##type(node); \
259 EXPRESSION_NODE_LIST(DEF_VISIT)
262 #define DEF_VISIT(type) \
263 virtual void Visit##type(type* node) V8_OVERRIDE { \
264 if (node->position() != RelocInfo::kNoPosition) { \
265 SetSourcePosition(node->position()); \
267 HOptimizedGraphBuilder::Visit##type(node); \
269 STATEMENT_NODE_LIST(DEF_VISIT)
272 #define DEF_VISIT(type) \
273 virtual void Visit##type(type* node) V8_OVERRIDE { \
274 HOptimizedGraphBuilder::Visit##type(node); \
276 MODULE_NODE_LIST(DEF_VISIT)
277 DECLARATION_NODE_LIST(DEF_VISIT)
282 OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
283 ASSERT(isolate()->use_crankshaft());
284 ASSERT(info()->IsOptimizing());
285 ASSERT(!info()->IsCompilingForDebugging());
287 // We should never arrive here if there is no code object on the
288 // shared function object.
289 ASSERT(info()->shared_info()->code()->kind() == Code::FUNCTION);
291 // We should never arrive here if optimization has been disabled on the
292 // shared function info.
293 ASSERT(!info()->shared_info()->optimization_disabled());
295 // Fall back to using the full code generator if it's not possible
296 // to use the Hydrogen-based optimizing compiler. We already have
297 // generated code for this from the shared function object.
298 if (FLAG_always_full_compiler) return AbortOptimization();
300 // Do not use crankshaft if we need to be able to set break points.
301 if (isolate()->DebuggerHasBreakPoints()) {
302 return AbortOptimization(kDebuggerHasBreakPoints);
305 // Limit the number of times we re-compile a functions with
306 // the optimizing compiler.
307 const int kMaxOptCount =
308 FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
309 if (info()->opt_count() > kMaxOptCount) {
310 return AbortAndDisableOptimization(kOptimizedTooManyTimes);
313 // Due to an encoding limit on LUnallocated operands in the Lithium
314 // language, we cannot optimize functions with too many formal parameters
315 // or perform on-stack replacement for function with too many
316 // stack-allocated local variables.
318 // The encoding is as a signed value, with parameters and receiver using
319 // the negative indices and locals the non-negative ones.
320 const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
321 Scope* scope = info()->scope();
322 if ((scope->num_parameters() + 1) > parameter_limit) {
323 return AbortAndDisableOptimization(kTooManyParameters);
326 const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
327 if (info()->is_osr() &&
328 scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
329 return AbortAndDisableOptimization(kTooManyParametersLocals);
332 if (scope->HasIllegalRedeclaration()) {
333 return AbortAndDisableOptimization(kFunctionWithIllegalRedeclaration);
336 // Take --hydrogen-filter into account.
337 if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
338 return AbortOptimization(kHydrogenFilter);
341 // Recompile the unoptimized version of the code if the current version
342 // doesn't have deoptimization support. Alternatively, we may decide to
343 // run the full code generator to get a baseline for the compile-time
344 // performance of the hydrogen-based compiler.
345 bool should_recompile = !info()->shared_info()->has_deoptimization_support();
346 if (should_recompile || FLAG_hydrogen_stats) {
348 if (FLAG_hydrogen_stats) {
351 CompilationInfoWithZone unoptimized(info()->shared_info());
352 // Note that we use the same AST that we will use for generating the
354 unoptimized.SetFunction(info()->function());
355 unoptimized.PrepareForCompilation(info()->scope());
356 unoptimized.SetContext(info()->context());
357 if (should_recompile) unoptimized.EnableDeoptimizationSupport();
358 bool succeeded = FullCodeGenerator::MakeCode(&unoptimized);
359 if (should_recompile) {
360 if (!succeeded) return SetLastStatus(FAILED);
361 Handle<SharedFunctionInfo> shared = info()->shared_info();
362 shared->EnableDeoptimizationSupport(*unoptimized.code());
363 // The existing unoptimized code was replaced with the new one.
364 Compiler::RecordFunctionCompilation(
365 Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
367 if (FLAG_hydrogen_stats) {
368 isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
372 // Check that the unoptimized, shared code is ready for
373 // optimizations. When using the always_opt flag we disregard the
374 // optimizable marker in the code object and optimize anyway. This
375 // is safe as long as the unoptimized code has deoptimization
377 ASSERT(FLAG_always_opt || info()->shared_info()->code()->optimizable());
378 ASSERT(info()->shared_info()->has_deoptimization_support());
380 if (FLAG_trace_hydrogen) {
381 Handle<String> name = info()->function()->debug_name();
382 PrintF("-----------------------------------------------------------\n");
383 PrintF("Compiling method %s using hydrogen\n", name->ToCString().get());
384 isolate()->GetHTracer()->TraceCompilation(info());
387 // Type-check the function.
388 AstTyper::Run(info());
390 graph_builder_ = FLAG_hydrogen_track_positions
391 ? new(info()->zone()) HOptimizedGraphBuilderWithPositions(info())
392 : new(info()->zone()) HOptimizedGraphBuilder(info());
394 Timer t(this, &time_taken_to_create_graph_);
395 info()->set_this_has_uses(false);
396 graph_ = graph_builder_->CreateGraph();
398 if (isolate()->has_pending_exception()) {
399 return SetLastStatus(FAILED);
402 // The function being compiled may have bailed out due to an inline
403 // candidate bailing out. In such a case, we don't disable
404 // optimization on the shared_info.
405 ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL);
406 if (graph_ == NULL) {
407 if (graph_builder_->inline_bailout()) {
408 return AbortOptimization();
410 return AbortAndDisableOptimization();
414 if (info()->HasAbortedDueToDependencyChange()) {
415 return AbortOptimization(kBailedOutDueToDependencyChange);
418 return SetLastStatus(SUCCEEDED);
422 OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() {
423 DisallowHeapAllocation no_allocation;
424 DisallowHandleAllocation no_handles;
425 DisallowHandleDereference no_deref;
426 DisallowCodeDependencyChange no_dependency_change;
428 ASSERT(last_status() == SUCCEEDED);
429 Timer t(this, &time_taken_to_optimize_);
430 ASSERT(graph_ != NULL);
431 BailoutReason bailout_reason = kNoReason;
433 if (graph_->Optimize(&bailout_reason)) {
434 chunk_ = LChunk::NewChunk(graph_);
435 if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
436 } else if (bailout_reason != kNoReason) {
437 graph_builder_->Bailout(bailout_reason);
440 return AbortOptimization();
444 OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() {
445 ASSERT(last_status() == SUCCEEDED);
446 ASSERT(!info()->HasAbortedDueToDependencyChange());
447 DisallowCodeDependencyChange no_dependency_change;
448 { // Scope for timer.
449 Timer timer(this, &time_taken_to_codegen_);
450 ASSERT(chunk_ != NULL);
451 ASSERT(graph_ != NULL);
452 // Deferred handles reference objects that were accessible during
453 // graph creation. To make sure that we don't encounter inconsistencies
454 // between graph creation and code generation, we disallow accessing
455 // objects through deferred handles during the latter, with exceptions.
456 DisallowDeferredHandleDereference no_deferred_handle_deref;
457 Handle<Code> optimized_code = chunk_->Codegen();
458 if (optimized_code.is_null()) {
459 if (info()->bailout_reason() == kNoReason) {
460 info_->set_bailout_reason(kCodeGenerationFailed);
461 } else if (info()->bailout_reason() == kMapBecameDeprecated) {
462 if (FLAG_trace_opt) {
463 PrintF("[aborted optimizing ");
464 info()->closure()->ShortPrint();
465 PrintF(" because a map became deprecated]\n");
467 return AbortOptimization();
468 } else if (info()->bailout_reason() == kMapBecameUnstable) {
469 if (FLAG_trace_opt) {
470 PrintF("[aborted optimizing ");
471 info()->closure()->ShortPrint();
472 PrintF(" because a map became unstable]\n");
474 return AbortOptimization();
476 return AbortAndDisableOptimization();
478 info()->SetCode(optimized_code);
480 RecordOptimizationStats();
481 // Add to the weak list of optimized code objects.
482 info()->context()->native_context()->AddOptimizedCode(*info()->code());
483 return SetLastStatus(SUCCEEDED);
487 void OptimizedCompileJob::RecordOptimizationStats() {
488 Handle<JSFunction> function = info()->closure();
489 if (!function->IsOptimized()) {
490 // Concurrent recompilation and OSR may race. Increment only once.
491 int opt_count = function->shared()->opt_count();
492 function->shared()->set_opt_count(opt_count + 1);
494 double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
495 double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
496 double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
497 if (FLAG_trace_opt) {
498 PrintF("[optimizing ");
499 function->ShortPrint();
500 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
503 if (FLAG_trace_opt_stats) {
504 static double compilation_time = 0.0;
505 static int compiled_functions = 0;
506 static int code_size = 0;
508 compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
509 compiled_functions++;
510 code_size += function->shared()->SourceSize();
511 PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
516 if (FLAG_hydrogen_stats) {
517 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
518 time_taken_to_optimize_,
519 time_taken_to_codegen_);
524 // Sets the expected number of properties based on estimate from compiler.
525 void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
527 // If no properties are added in the constructor, they are more likely
528 // to be added later.
529 if (estimate == 0) estimate = 2;
531 // TODO(yangguo): check whether those heuristics are still up-to-date.
532 // We do not shrink objects that go into a snapshot (yet), so we adjust
533 // the estimate conservatively.
534 if (shared->GetIsolate()->serializer_enabled()) {
536 } else if (FLAG_clever_optimizations) {
537 // Inobject slack tracking will reclaim redundant inobject space later,
538 // so we can afford to adjust the estimate generously.
544 shared->set_expected_nof_properties(estimate);
548 static void UpdateSharedFunctionInfo(CompilationInfo* info) {
549 // Update the shared function info with the compiled code and the
550 // scope info. Please note, that the order of the shared function
551 // info initialization is important since set_scope_info might
552 // trigger a GC, causing the ASSERT below to be invalid if the code
553 // was flushed. By setting the code object last we avoid this.
554 Handle<SharedFunctionInfo> shared = info->shared_info();
555 Handle<ScopeInfo> scope_info =
556 ScopeInfo::Create(info->scope(), info->zone());
557 shared->set_scope_info(*scope_info);
559 Handle<Code> code = info->code();
560 CHECK(code->kind() == Code::FUNCTION);
561 shared->ReplaceCode(*code);
562 if (shared->optimization_disabled()) code->set_optimizable(false);
564 shared->set_feedback_vector(*info->feedback_vector());
566 // Set the expected number of properties for instances.
567 FunctionLiteral* lit = info->function();
568 int expected = lit->expected_property_count();
569 SetExpectedNofPropertiesFromEstimate(shared, expected);
571 // Check the function has compiled code.
572 ASSERT(shared->is_compiled());
573 shared->set_dont_optimize_reason(lit->dont_optimize_reason());
574 shared->set_dont_inline(lit->flags()->Contains(kDontInline));
575 shared->set_ast_node_count(lit->ast_node_count());
576 shared->set_strict_mode(lit->strict_mode());
580 // Sets the function info on a function.
581 // The start_position points to the first '(' character after the function name
582 // in the full script source. When counting characters in the script source the
583 // the first character is number 0 (not 1).
584 static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
585 FunctionLiteral* lit,
587 Handle<Script> script) {
588 function_info->set_length(lit->parameter_count());
589 function_info->set_formal_parameter_count(lit->parameter_count());
590 function_info->set_script(*script);
591 function_info->set_function_token_position(lit->function_token_position());
592 function_info->set_start_position(lit->start_position());
593 function_info->set_end_position(lit->end_position());
594 function_info->set_is_expression(lit->is_expression());
595 function_info->set_is_anonymous(lit->is_anonymous());
596 function_info->set_is_toplevel(is_toplevel);
597 function_info->set_inferred_name(*lit->inferred_name());
598 function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
599 function_info->set_allows_lazy_compilation_without_context(
600 lit->AllowsLazyCompilationWithoutContext());
601 function_info->set_strict_mode(lit->strict_mode());
602 function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
603 function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
604 function_info->set_ast_node_count(lit->ast_node_count());
605 function_info->set_is_function(lit->is_function());
606 function_info->set_dont_optimize_reason(lit->dont_optimize_reason());
607 function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
608 function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
609 function_info->set_is_generator(lit->is_generator());
613 static bool CompileUnoptimizedCode(CompilationInfo* info) {
614 ASSERT(info->function() != NULL);
615 if (!Rewriter::Rewrite(info)) return false;
616 if (!Scope::Analyze(info)) return false;
617 ASSERT(info->scope() != NULL);
619 if (!FullCodeGenerator::MakeCode(info)) {
620 Isolate* isolate = info->isolate();
621 if (!isolate->has_pending_exception()) isolate->StackOverflow();
628 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
629 CompilationInfo* info) {
630 VMState<COMPILER> state(info->isolate());
631 PostponeInterruptsScope postpone(info->isolate());
632 if (!Parser::Parse(info)) return MaybeHandle<Code>();
633 info->SetStrictMode(info->function()->strict_mode());
635 if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
636 Compiler::RecordFunctionCompilation(
637 Logger::LAZY_COMPILE_TAG, info, info->shared_info());
638 UpdateSharedFunctionInfo(info);
639 ASSERT_EQ(Code::FUNCTION, info->code()->kind());
644 MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) {
645 ASSERT(!function->GetIsolate()->has_pending_exception());
646 ASSERT(!function->is_compiled());
647 if (function->shared()->is_compiled()) {
648 return Handle<Code>(function->shared()->code());
651 CompilationInfoWithZone info(function);
653 ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
654 GetUnoptimizedCodeCommon(&info),
657 if (FLAG_always_opt &&
658 info.isolate()->use_crankshaft() &&
659 !info.shared_info()->optimization_disabled() &&
660 !info.isolate()->DebuggerHasBreakPoints()) {
661 Handle<Code> opt_code;
662 if (Compiler::GetOptimizedCode(
664 Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
673 MaybeHandle<Code> Compiler::GetUnoptimizedCode(
674 Handle<SharedFunctionInfo> shared) {
675 ASSERT(!shared->GetIsolate()->has_pending_exception());
676 ASSERT(!shared->is_compiled());
678 CompilationInfoWithZone info(shared);
679 return GetUnoptimizedCodeCommon(&info);
683 bool Compiler::EnsureCompiled(Handle<JSFunction> function,
684 ClearExceptionFlag flag) {
685 if (function->is_compiled()) return true;
686 MaybeHandle<Code> maybe_code = Compiler::GetUnoptimizedCode(function);
688 if (!maybe_code.ToHandle(&code)) {
689 if (flag == CLEAR_EXCEPTION) {
690 function->GetIsolate()->clear_pending_exception();
694 function->ReplaceCode(*code);
695 ASSERT(function->is_compiled());
700 // Compile full code for debugging. This code will have debug break slots
701 // and deoptimization information. Deoptimization information is required
702 // in case that an optimized version of this function is still activated on
703 // the stack. It will also make sure that the full code is compiled with
704 // the same flags as the previous version, that is flags which can change
705 // the code generated. The current method of mapping from already compiled
706 // full code without debug break slots to full code with debug break slots
707 // depends on the generated code is otherwise exactly the same.
708 // If compilation fails, just keep the existing code.
709 MaybeHandle<Code> Compiler::GetCodeForDebugging(Handle<JSFunction> function) {
710 CompilationInfoWithZone info(function);
711 Isolate* isolate = info.isolate();
712 VMState<COMPILER> state(isolate);
716 ASSERT(!isolate->has_pending_exception());
717 Handle<Code> old_code(function->shared()->code());
718 ASSERT(old_code->kind() == Code::FUNCTION);
719 ASSERT(!old_code->has_debug_break_slots());
721 info.MarkCompilingForDebugging();
722 if (old_code->is_compiled_optimizable()) {
723 info.EnableDeoptimizationSupport();
725 info.MarkNonOptimizable();
727 MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info);
728 Handle<Code> new_code;
729 if (!maybe_new_code.ToHandle(&new_code)) {
730 isolate->clear_pending_exception();
732 ASSERT_EQ(old_code->is_compiled_optimizable(),
733 new_code->is_compiled_optimizable());
735 return maybe_new_code;
739 void Compiler::CompileForLiveEdit(Handle<Script> script) {
740 // TODO(635): support extensions.
741 CompilationInfoWithZone info(script);
742 PostponeInterruptsScope postpone(info.isolate());
743 VMState<COMPILER> state(info.isolate());
746 if (!Parser::Parse(&info)) return;
747 info.SetStrictMode(info.function()->strict_mode());
749 LiveEditFunctionTracker tracker(info.isolate(), info.function());
750 if (!CompileUnoptimizedCode(&info)) return;
751 if (!info.shared_info().is_null()) {
752 Handle<ScopeInfo> scope_info = ScopeInfo::Create(info.scope(),
754 info.shared_info()->set_scope_info(*scope_info);
756 tracker.RecordRootFunctionInfo(info.code());
760 static bool DebuggerWantsEagerCompilation(CompilationInfo* info,
761 bool allow_lazy_without_ctx = false) {
762 return LiveEditFunctionTracker::IsActive(info->isolate()) ||
763 (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
767 static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
768 Isolate* isolate = info->isolate();
769 PostponeInterruptsScope postpone(isolate);
770 ASSERT(!isolate->native_context().is_null());
771 Handle<Script> script = info->script();
773 // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
774 FixedArray* array = isolate->native_context()->embedder_data();
775 script->set_context_data(array->get(0));
777 isolate->debug()->OnBeforeCompile(script);
779 ASSERT(info->is_eval() || info->is_global());
781 bool parse_allow_lazy =
782 (info->cached_data_mode() == CONSUME_CACHED_DATA ||
783 String::cast(script->source())->length() > FLAG_min_preparse_length) &&
784 !DebuggerWantsEagerCompilation(info);
786 if (!parse_allow_lazy && info->cached_data_mode() != NO_CACHED_DATA) {
787 // We are going to parse eagerly, but we either 1) have cached data produced
788 // by lazy parsing or 2) are asked to generate cached data. We cannot use
789 // the existing data, since it won't contain all the symbols we need for
790 // eager parsing. In addition, it doesn't make sense to produce the data
791 // when parsing eagerly. That data would contain all symbols, but no
792 // functions, so it cannot be used to aid lazy parsing later.
793 info->SetCachedData(NULL, NO_CACHED_DATA);
796 Handle<SharedFunctionInfo> result;
798 { VMState<COMPILER> state(info->isolate());
799 if (!Parser::Parse(info, parse_allow_lazy)) {
800 return Handle<SharedFunctionInfo>::null();
803 FunctionLiteral* lit = info->function();
804 LiveEditFunctionTracker live_edit_tracker(isolate, lit);
806 // Measure how long it takes to do the compilation; only take the
807 // rest of the function into account to avoid overlap with the
808 // parsing statistics.
809 HistogramTimer* rate = info->is_eval()
810 ? info->isolate()->counters()->compile_eval()
811 : info->isolate()->counters()->compile();
812 HistogramTimerScope timer(rate);
815 if (!CompileUnoptimizedCode(info)) {
816 return Handle<SharedFunctionInfo>::null();
819 // Allocate function.
820 ASSERT(!info->code().is_null());
821 result = isolate->factory()->NewSharedFunctionInfo(
823 lit->materialized_literal_count(),
826 ScopeInfo::Create(info->scope(), info->zone()),
827 info->feedback_vector());
829 ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
830 SetFunctionInfo(result, lit, true, script);
832 Handle<String> script_name = script->name()->IsString()
833 ? Handle<String>(String::cast(script->name()))
834 : isolate->factory()->empty_string();
835 Logger::LogEventsAndTags log_tag = info->is_eval()
837 : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);
839 PROFILE(isolate, CodeCreateEvent(
840 log_tag, *info->code(), *result, info, *script_name));
841 GDBJIT(AddCode(script_name, script, info->code(), info));
843 // Hint to the runtime system used when allocating space for initial
844 // property space by setting the expected number of properties for
845 // the instances of the function.
846 SetExpectedNofPropertiesFromEstimate(result,
847 lit->expected_property_count());
849 script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
851 live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
854 isolate->debug()->OnAfterCompile(script, Debug::NO_AFTER_COMPILE_FLAGS);
860 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
861 Handle<String> source,
862 Handle<Context> context,
863 StrictMode strict_mode,
864 ParseRestriction restriction,
865 int scope_position) {
866 Isolate* isolate = source->GetIsolate();
867 int source_length = source->length();
868 isolate->counters()->total_eval_size()->Increment(source_length);
869 isolate->counters()->total_compile_size()->Increment(source_length);
871 CompilationCache* compilation_cache = isolate->compilation_cache();
872 MaybeHandle<SharedFunctionInfo> maybe_shared_info =
873 compilation_cache->LookupEval(source, context, strict_mode,
875 Handle<SharedFunctionInfo> shared_info;
877 if (!maybe_shared_info.ToHandle(&shared_info)) {
878 Handle<Script> script = isolate->factory()->NewScript(source);
879 CompilationInfoWithZone info(script);
881 if (context->IsNativeContext()) info.MarkAsGlobal();
882 info.SetStrictMode(strict_mode);
883 info.SetParseRestriction(restriction);
884 info.SetContext(context);
886 Debug::RecordEvalCaller(script);
888 shared_info = CompileToplevel(&info);
890 if (shared_info.is_null()) {
891 return MaybeHandle<JSFunction>();
893 // Explicitly disable optimization for eval code. We're not yet prepared
894 // to handle eval-code in the optimizing compiler.
895 shared_info->DisableOptimization(kEval);
897 // If caller is strict mode, the result must be in strict mode as well.
898 ASSERT(strict_mode == SLOPPY || shared_info->strict_mode() == STRICT);
899 if (!shared_info->dont_cache()) {
900 compilation_cache->PutEval(
901 source, context, shared_info, scope_position);
904 } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
905 shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
908 return isolate->factory()->NewFunctionFromSharedFunctionInfo(
909 shared_info, context, NOT_TENURED);
913 Handle<SharedFunctionInfo> Compiler::CompileScript(
914 Handle<String> source,
915 Handle<Object> script_name,
918 bool is_shared_cross_origin,
919 Handle<Context> context,
920 v8::Extension* extension,
921 ScriptData** cached_data,
922 CachedDataMode cached_data_mode,
923 NativesFlag natives) {
924 if (cached_data_mode == NO_CACHED_DATA) {
926 } else if (cached_data_mode == PRODUCE_CACHED_DATA) {
927 ASSERT(cached_data && !*cached_data);
929 ASSERT(cached_data_mode == CONSUME_CACHED_DATA);
930 ASSERT(cached_data && *cached_data);
932 Isolate* isolate = source->GetIsolate();
933 int source_length = source->length();
934 isolate->counters()->total_load_size()->Increment(source_length);
935 isolate->counters()->total_compile_size()->Increment(source_length);
937 CompilationCache* compilation_cache = isolate->compilation_cache();
939 // Do a lookup in the compilation cache but not for extensions.
940 MaybeHandle<SharedFunctionInfo> maybe_result;
941 Handle<SharedFunctionInfo> result;
942 if (extension == NULL) {
943 maybe_result = compilation_cache->LookupScript(
944 source, script_name, line_offset, column_offset,
945 is_shared_cross_origin, context);
948 if (!maybe_result.ToHandle(&result)) {
949 // No cache entry found. Compile the script.
951 // Create a script object describing the script to be compiled.
952 Handle<Script> script = isolate->factory()->NewScript(source);
953 if (natives == NATIVES_CODE) {
954 script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
956 if (!script_name.is_null()) {
957 script->set_name(*script_name);
958 script->set_line_offset(Smi::FromInt(line_offset));
959 script->set_column_offset(Smi::FromInt(column_offset));
961 script->set_is_shared_cross_origin(is_shared_cross_origin);
963 // Compile the function and add it to the cache.
964 CompilationInfoWithZone info(script);
966 info.SetExtension(extension);
967 info.SetCachedData(cached_data, cached_data_mode);
968 info.SetContext(context);
969 if (FLAG_use_strict) info.SetStrictMode(STRICT);
970 result = CompileToplevel(&info);
971 if (extension == NULL && !result.is_null() && !result->dont_cache()) {
972 compilation_cache->PutScript(source, context, result);
974 if (result.is_null()) isolate->ReportPendingMessages();
975 } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
976 result->ResetForNewContext(isolate->heap()->global_ic_age());
982 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
983 Handle<Script> script) {
984 // Precondition: code has been parsed and scopes have been analyzed.
985 CompilationInfoWithZone info(script);
986 info.SetFunction(literal);
987 info.PrepareForCompilation(literal->scope());
988 info.SetStrictMode(literal->scope()->strict_mode());
990 Isolate* isolate = info.isolate();
991 Factory* factory = isolate->factory();
992 LiveEditFunctionTracker live_edit_tracker(isolate, literal);
993 // Determine if the function can be lazily compiled. This is necessary to
994 // allow some of our builtin JS files to be lazily compiled. These
995 // builtins cannot be handled lazily by the parser, since we have to know
996 // if a function uses the special natives syntax, which is something the
998 // If the debugger requests compilation for break points, we cannot be
999 // aggressive about lazy compilation, because it might trigger compilation
1000 // of functions without an outer context when setting a breakpoint through
1001 // Debug::FindSharedFunctionInfoInScript.
1002 bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1003 bool allow_lazy = literal->AllowsLazyCompilation() &&
1004 !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
1007 Handle<ScopeInfo> scope_info;
1008 if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
1009 Handle<Code> code = isolate->builtins()->CompileUnoptimized();
1011 scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
1012 } else if (FullCodeGenerator::MakeCode(&info)) {
1013 ASSERT(!info.code().is_null());
1014 scope_info = ScopeInfo::Create(info.scope(), info.zone());
1016 return Handle<SharedFunctionInfo>::null();
1019 // Create a shared function info object.
1020 Handle<SharedFunctionInfo> result =
1021 factory->NewSharedFunctionInfo(literal->name(),
1022 literal->materialized_literal_count(),
1023 literal->is_generator(),
1026 info.feedback_vector());
1027 SetFunctionInfo(result, literal, false, script);
1028 RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1029 result->set_allows_lazy_compilation(allow_lazy);
1030 result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1032 // Set the expected number of properties for instances and return
1033 // the resulting function.
1034 SetExpectedNofPropertiesFromEstimate(result,
1035 literal->expected_property_count());
1036 live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1041 MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
1042 Handle<JSFunction> function,
1043 BailoutId osr_ast_id) {
1044 if (FLAG_cache_optimized_code) {
1045 Handle<SharedFunctionInfo> shared(function->shared());
1046 DisallowHeapAllocation no_gc;
1047 int index = shared->SearchOptimizedCodeMap(
1048 function->context()->native_context(), osr_ast_id);
1050 if (FLAG_trace_opt) {
1051 PrintF("[found optimized code for ");
1052 function->ShortPrint();
1053 if (!osr_ast_id.IsNone()) {
1054 PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
1058 FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index);
1059 if (literals != NULL) function->set_literals(literals);
1060 return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index));
1063 return MaybeHandle<Code>();
1067 static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
1068 Handle<Code> code = info->code();
1069 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do.
1071 // Cache optimized code.
1072 if (FLAG_cache_optimized_code) {
1073 Handle<JSFunction> function = info->closure();
1074 Handle<SharedFunctionInfo> shared(function->shared());
1075 Handle<FixedArray> literals(function->literals());
1076 Handle<Context> native_context(function->context()->native_context());
1077 SharedFunctionInfo::AddToOptimizedCodeMap(
1078 shared, native_context, code, literals, info->osr_ast_id());
1083 static bool CompileOptimizedPrologue(CompilationInfo* info) {
1084 if (!Parser::Parse(info)) return false;
1085 info->SetStrictMode(info->function()->strict_mode());
1087 if (!Rewriter::Rewrite(info)) return false;
1088 if (!Scope::Analyze(info)) return false;
1089 ASSERT(info->scope() != NULL);
1094 static bool GetOptimizedCodeNow(CompilationInfo* info) {
1095 if (!CompileOptimizedPrologue(info)) return false;
1097 Logger::TimerEventScope timer(
1098 info->isolate(), Logger::TimerEventScope::v8_recompile_synchronous);
1100 OptimizedCompileJob job(info);
1101 if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED) return false;
1102 if (job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED) return false;
1103 if (job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) return false;
1106 ASSERT(!info->isolate()->has_pending_exception());
1107 InsertCodeIntoOptimizedCodeMap(info);
1108 Compiler::RecordFunctionCompilation(
1109 Logger::LAZY_COMPILE_TAG, info, info->shared_info());
1114 static bool GetOptimizedCodeLater(CompilationInfo* info) {
1115 Isolate* isolate = info->isolate();
1116 if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
1117 if (FLAG_trace_concurrent_recompilation) {
1118 PrintF(" ** Compilation queue full, will retry optimizing ");
1119 info->closure()->PrintName();
1120 PrintF(" later.\n");
1125 CompilationHandleScope handle_scope(info);
1126 if (!CompileOptimizedPrologue(info)) return false;
1127 info->SaveHandles(); // Copy handles to the compilation handle scope.
1129 Logger::TimerEventScope timer(
1130 isolate, Logger::TimerEventScope::v8_recompile_synchronous);
1132 OptimizedCompileJob* job = new(info->zone()) OptimizedCompileJob(info);
1133 OptimizedCompileJob::Status status = job->CreateGraph();
1134 if (status != OptimizedCompileJob::SUCCEEDED) return false;
1135 isolate->optimizing_compiler_thread()->QueueForOptimization(job);
1137 if (FLAG_trace_concurrent_recompilation) {
1138 PrintF(" ** Queued ");
1139 info->closure()->PrintName();
1140 if (info->is_osr()) {
1141 PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
1143 PrintF(" for concurrent optimization.\n");
1150 MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
1151 Handle<Code> current_code,
1152 ConcurrencyMode mode,
1153 BailoutId osr_ast_id) {
1154 Handle<Code> cached_code;
1155 if (GetCodeFromOptimizedCodeMap(
1156 function, osr_ast_id).ToHandle(&cached_code)) {
1160 SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function));
1161 Isolate* isolate = info->isolate();
1162 VMState<COMPILER> state(isolate);
1163 ASSERT(!isolate->has_pending_exception());
1164 PostponeInterruptsScope postpone(isolate);
1166 Handle<SharedFunctionInfo> shared = info->shared_info();
1167 ASSERT_NE(ScopeInfo::Empty(isolate), shared->scope_info());
1168 int compiled_size = shared->end_position() - shared->start_position();
1169 isolate->counters()->total_compile_size()->Increment(compiled_size);
1170 current_code->set_profiler_ticks(0);
1172 info->SetOptimizing(osr_ast_id, current_code);
1174 if (mode == CONCURRENT) {
1175 if (GetOptimizedCodeLater(info.get())) {
1176 info.Detach(); // The background recompile job owns this now.
1177 return isolate->builtins()->InOptimizationQueue();
1180 if (GetOptimizedCodeNow(info.get())) return info->code();
1184 if (FLAG_trace_opt) {
1185 PrintF("[failed to optimize ");
1186 function->PrintName();
1187 PrintF(": %s]\n", GetBailoutReason(info->bailout_reason()));
1190 if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1191 return MaybeHandle<Code>();
1195 Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
1196 // Take ownership of compilation info. Deleting compilation info
1197 // also tears down the zone and the recompile job.
1198 SmartPointer<CompilationInfo> info(job->info());
1199 Isolate* isolate = info->isolate();
1201 VMState<COMPILER> state(isolate);
1202 Logger::TimerEventScope timer(
1203 isolate, Logger::TimerEventScope::v8_recompile_synchronous);
1205 Handle<SharedFunctionInfo> shared = info->shared_info();
1206 shared->code()->set_profiler_ticks(0);
1208 // 1) Optimization may have failed.
1209 // 2) The function may have already been optimized by OSR. Simply continue.
1210 // Except when OSR already disabled optimization for some reason.
1211 // 3) The code may have already been invalidated due to dependency change.
1212 // 4) Debugger may have been activated.
1214 if (job->last_status() != OptimizedCompileJob::SUCCEEDED ||
1215 shared->optimization_disabled() ||
1216 info->HasAbortedDueToDependencyChange() ||
1217 isolate->DebuggerHasBreakPoints()) {
1218 return Handle<Code>::null();
1221 if (job->GenerateCode() != OptimizedCompileJob::SUCCEEDED) {
1222 return Handle<Code>::null();
1225 Compiler::RecordFunctionCompilation(
1226 Logger::LAZY_COMPILE_TAG, info.get(), shared);
1227 if (info->shared_info()->SearchOptimizedCodeMap(
1228 info->context()->native_context(), info->osr_ast_id()) == -1) {
1229 InsertCodeIntoOptimizedCodeMap(info.get());
1232 if (FLAG_trace_concurrent_recompilation) {
1233 PrintF(" ** Optimized code for ");
1234 info->closure()->PrintName();
1235 PrintF(" generated.\n");
1238 return Handle<Code>(*info->code());
1242 void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
1243 CompilationInfo* info,
1244 Handle<SharedFunctionInfo> shared) {
1245 // SharedFunctionInfo is passed separately, because if CompilationInfo
1246 // was created using Script object, it will not have it.
1248 // Log the code generation. If source information is available include
1249 // script name and line number. Check explicitly whether logging is
1250 // enabled as finding the line number is not free.
1251 if (info->isolate()->logger()->is_logging_code_events() ||
1252 info->isolate()->cpu_profiler()->is_profiling()) {
1253 Handle<Script> script = info->script();
1254 Handle<Code> code = info->code();
1255 if (code.is_identical_to(
1256 info->isolate()->builtins()->CompileUnoptimized())) {
1259 int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
1261 Script::GetColumnNumber(script, shared->start_position()) + 1;
1262 String* script_name = script->name()->IsString()
1263 ? String::cast(script->name())
1264 : info->isolate()->heap()->empty_string();
1265 Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
1266 PROFILE(info->isolate(), CodeCreateEvent(
1267 log_tag, *code, *shared, info, script_name, line_num, column_num));
1270 GDBJIT(AddCode(Handle<String>(shared->DebugName()),
1271 Handle<Script>(info->script()),
1272 Handle<Code>(info->code()),
1277 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1278 : name_(name), info_(info), zone_(info->isolate()) {
1279 if (FLAG_hydrogen_stats) {
1280 info_zone_start_allocation_size_ = info->zone()->allocation_size();
1286 CompilationPhase::~CompilationPhase() {
1287 if (FLAG_hydrogen_stats) {
1288 unsigned size = zone()->allocation_size();
1289 size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1290 isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1295 bool CompilationPhase::ShouldProduceTraceOutput() const {
1296 // Trace if the appropriate trace flag is set and the phase name's first
1297 // character is in the FLAG_trace_phase command line parameter.
1298 AllowHandleDereference allow_deref;
1299 bool tracing_on = info()->IsStub()
1300 ? FLAG_trace_hydrogen_stubs
1301 : (FLAG_trace_hydrogen &&
1302 info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1303 return (tracing_on &&
1304 OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1307 } } // namespace v8::internal