1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/compiler.h"
9 #include "src/bootstrapper.h"
10 #include "src/codegen.h"
11 #include "src/compilation-cache.h"
12 #include "src/compiler/pipeline.h"
13 #include "src/cpu-profiler.h"
14 #include "src/debug.h"
15 #include "src/deoptimizer.h"
16 #include "src/full-codegen.h"
17 #include "src/gdb-jit.h"
18 #include "src/hydrogen.h"
19 #include "src/isolate-inl.h"
20 #include "src/lithium.h"
21 #include "src/liveedit.h"
22 #include "src/parser.h"
23 #include "src/rewriter.h"
24 #include "src/runtime-profiler.h"
25 #include "src/scanner-character-streams.h"
26 #include "src/scopeinfo.h"
27 #include "src/scopes.h"
28 #include "src/typing.h"
29 #include "src/vm-state-inl.h"
35 ScriptData::ScriptData(const byte* data, int length)
36 : owns_data_(false), data_(data), length_(length) {
37 if (!IsAligned(reinterpret_cast<intptr_t>(data), kPointerAlignment)) {
38 byte* copy = NewArray<byte>(length);
39 DCHECK(IsAligned(reinterpret_cast<intptr_t>(copy), kPointerAlignment));
40 CopyBytes(copy, data, length);
42 AcquireDataOwnership();
47 CompilationInfo::CompilationInfo(Handle<Script> script,
49 : flags_(StrictModeField::encode(SLOPPY)),
51 osr_ast_id_(BailoutId::None()),
55 ast_value_factory_(NULL),
56 ast_value_factory_owned_(false) {
57 Initialize(script->GetIsolate(), BASE, zone);
61 CompilationInfo::CompilationInfo(Isolate* isolate, Zone* zone)
62 : flags_(StrictModeField::encode(SLOPPY)),
63 script_(Handle<Script>::null()),
64 osr_ast_id_(BailoutId::None()),
68 ast_value_factory_(NULL),
69 ast_value_factory_owned_(false) {
70 Initialize(isolate, STUB, zone);
74 CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
76 : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
77 shared_info_(shared_info),
78 script_(Handle<Script>(Script::cast(shared_info->script()))),
79 osr_ast_id_(BailoutId::None()),
83 ast_value_factory_(NULL),
84 ast_value_factory_owned_(false) {
85 Initialize(script_->GetIsolate(), BASE, zone);
89 CompilationInfo::CompilationInfo(Handle<JSFunction> closure,
91 : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
93 shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
94 script_(Handle<Script>(Script::cast(shared_info_->script()))),
95 context_(closure->context()),
96 osr_ast_id_(BailoutId::None()),
100 ast_value_factory_(NULL),
101 ast_value_factory_owned_(false) {
102 Initialize(script_->GetIsolate(), BASE, zone);
106 CompilationInfo::CompilationInfo(HydrogenCodeStub* stub,
109 : flags_(StrictModeField::encode(SLOPPY) | IsLazy::encode(true)),
110 osr_ast_id_(BailoutId::None()),
112 this_has_uses_(true),
113 optimization_id_(-1),
114 ast_value_factory_(NULL),
115 ast_value_factory_owned_(false) {
116 Initialize(isolate, STUB, zone);
121 void CompilationInfo::Initialize(Isolate* isolate,
127 global_scope_ = NULL;
130 compile_options_ = ScriptCompiler::kNoCompileOptions;
132 deferred_handles_ = NULL;
134 prologue_offset_ = Code::kPrologueOffsetNotSet;
135 opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
136 no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
137 ? new List<OffsetRange>(2) : NULL;
138 for (int i = 0; i < DependentCode::kGroupCount; i++) {
139 dependencies_[i] = NULL;
146 abort_due_to_dependency_ = false;
147 if (script_->type()->value() == Script::TYPE_NATIVE) MarkAsNative();
148 if (isolate_->debug()->is_active()) MarkAsDebug();
150 if (!shared_info_.is_null()) {
151 DCHECK(strict_mode() == SLOPPY);
152 SetStrictMode(shared_info_->strict_mode());
154 set_bailout_reason(kUnknown);
156 if (!shared_info().is_null() && shared_info()->is_compiled()) {
157 // We should initialize the CompilationInfo feedback vector from the
158 // passed in shared info, rather than creating a new one.
159 feedback_vector_ = Handle<FixedArray>(shared_info()->feedback_vector(),
165 CompilationInfo::~CompilationInfo() {
166 delete deferred_handles_;
167 delete no_frame_ranges_;
168 if (ast_value_factory_owned_) delete ast_value_factory_;
170 // Check that no dependent maps have been added or added dependent maps have
171 // been rolled back or committed.
172 for (int i = 0; i < DependentCode::kGroupCount; i++) {
173 DCHECK_EQ(NULL, dependencies_[i]);
179 void CompilationInfo::CommitDependencies(Handle<Code> code) {
180 for (int i = 0; i < DependentCode::kGroupCount; i++) {
181 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
182 if (group_objects == NULL) continue;
183 DCHECK(!object_wrapper_.is_null());
184 for (int j = 0; j < group_objects->length(); j++) {
185 DependentCode::DependencyGroup group =
186 static_cast<DependentCode::DependencyGroup>(i);
187 DependentCode* dependent_code =
188 DependentCode::ForObject(group_objects->at(j), group);
189 dependent_code->UpdateToFinishedCode(group, this, *code);
191 dependencies_[i] = NULL; // Zone-allocated, no need to delete.
196 void CompilationInfo::RollbackDependencies() {
197 // Unregister from all dependent maps if not yet committed.
198 for (int i = 0; i < DependentCode::kGroupCount; i++) {
199 ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
200 if (group_objects == NULL) continue;
201 for (int j = 0; j < group_objects->length(); j++) {
202 DependentCode::DependencyGroup group =
203 static_cast<DependentCode::DependencyGroup>(i);
204 DependentCode* dependent_code =
205 DependentCode::ForObject(group_objects->at(j), group);
206 dependent_code->RemoveCompilationInfo(group, this);
208 dependencies_[i] = NULL; // Zone-allocated, no need to delete.
213 int CompilationInfo::num_parameters() const {
215 DCHECK(parameter_count_ > 0);
216 return parameter_count_;
218 return scope()->num_parameters();
223 int CompilationInfo::num_heap_slots() const {
227 return scope()->num_heap_slots();
232 Code::Flags CompilationInfo::flags() const {
234 return Code::ComputeFlags(code_stub()->GetCodeKind(),
235 code_stub()->GetICState(),
236 code_stub()->GetExtraICState(),
237 code_stub()->GetStubType());
239 return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
244 // Disable optimization for the rest of the compilation pipeline.
245 void CompilationInfo::DisableOptimization() {
246 bool is_optimizable_closure =
247 FLAG_optimize_closures &&
248 closure_.is_null() &&
249 !scope_->HasTrivialOuterContext() &&
250 !scope_->outer_scope_calls_sloppy_eval() &&
251 !scope_->inside_with();
252 SetMode(is_optimizable_closure ? BASE : NONOPT);
256 // Primitive functions are unlikely to be picked up by the stack-walking
257 // profiler, so they trigger their own optimization when they're called
258 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
259 bool CompilationInfo::ShouldSelfOptimize() {
260 return FLAG_crankshaft &&
261 !function()->flags()->Contains(kDontSelfOptimize) &&
262 !function()->dont_optimize() &&
263 function()->scope()->AllowsLazyCompilation() &&
264 (shared_info().is_null() || !shared_info()->optimization_disabled());
268 void CompilationInfo::PrepareForCompilation(Scope* scope) {
269 DCHECK(scope_ == NULL);
272 int length = function()->slot_count();
273 if (feedback_vector_.is_null()) {
274 // Allocate the feedback vector too.
275 feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(length);
277 DCHECK(feedback_vector_->length() == length);
281 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
283 explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
284 : HOptimizedGraphBuilder(info) {
287 #define DEF_VISIT(type) \
288 virtual void Visit##type(type* node) V8_OVERRIDE { \
289 if (node->position() != RelocInfo::kNoPosition) { \
290 SetSourcePosition(node->position()); \
292 HOptimizedGraphBuilder::Visit##type(node); \
294 EXPRESSION_NODE_LIST(DEF_VISIT)
297 #define DEF_VISIT(type) \
298 virtual void Visit##type(type* node) V8_OVERRIDE { \
299 if (node->position() != RelocInfo::kNoPosition) { \
300 SetSourcePosition(node->position()); \
302 HOptimizedGraphBuilder::Visit##type(node); \
304 STATEMENT_NODE_LIST(DEF_VISIT)
307 #define DEF_VISIT(type) \
308 virtual void Visit##type(type* node) V8_OVERRIDE { \
309 HOptimizedGraphBuilder::Visit##type(node); \
311 MODULE_NODE_LIST(DEF_VISIT)
312 DECLARATION_NODE_LIST(DEF_VISIT)
317 OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
318 DCHECK(isolate()->use_crankshaft());
319 DCHECK(info()->IsOptimizing());
320 DCHECK(!info()->IsCompilingForDebugging());
322 // We should never arrive here if there is no code object on the
323 // shared function object.
324 DCHECK(info()->shared_info()->code()->kind() == Code::FUNCTION);
326 // We should never arrive here if optimization has been disabled on the
327 // shared function info.
328 DCHECK(!info()->shared_info()->optimization_disabled());
330 // Fall back to using the full code generator if it's not possible
331 // to use the Hydrogen-based optimizing compiler. We already have
332 // generated code for this from the shared function object.
333 if (FLAG_always_full_compiler) return AbortOptimization();
335 // Do not use crankshaft if we need to be able to set break points.
336 if (isolate()->DebuggerHasBreakPoints()) {
337 return AbortOptimization(kDebuggerHasBreakPoints);
340 // Limit the number of times we re-compile a functions with
341 // the optimizing compiler.
342 const int kMaxOptCount =
343 FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
344 if (info()->opt_count() > kMaxOptCount) {
345 return AbortAndDisableOptimization(kOptimizedTooManyTimes);
348 // Due to an encoding limit on LUnallocated operands in the Lithium
349 // language, we cannot optimize functions with too many formal parameters
350 // or perform on-stack replacement for function with too many
351 // stack-allocated local variables.
353 // The encoding is as a signed value, with parameters and receiver using
354 // the negative indices and locals the non-negative ones.
355 const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
356 Scope* scope = info()->scope();
357 if ((scope->num_parameters() + 1) > parameter_limit) {
358 return AbortAndDisableOptimization(kTooManyParameters);
361 const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
362 if (info()->is_osr() &&
363 scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
364 return AbortAndDisableOptimization(kTooManyParametersLocals);
367 if (scope->HasIllegalRedeclaration()) {
368 return AbortAndDisableOptimization(kFunctionWithIllegalRedeclaration);
371 // Check the whitelist for Crankshaft.
372 if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
373 return AbortOptimization(kHydrogenFilter);
376 // Crankshaft requires a version of fullcode with deoptimization support.
377 // Recompile the unoptimized version of the code if the current version
378 // doesn't have deoptimization support already.
379 // Otherwise, if we are gathering compilation time and space statistics
380 // for hydrogen, gather baseline statistics for a fullcode compilation.
381 bool should_recompile = !info()->shared_info()->has_deoptimization_support();
382 if (should_recompile || FLAG_hydrogen_stats) {
383 base::ElapsedTimer timer;
384 if (FLAG_hydrogen_stats) {
387 CompilationInfoWithZone unoptimized(info()->shared_info());
388 // Note that we use the same AST that we will use for generating the
390 unoptimized.SetFunction(info()->function());
391 unoptimized.PrepareForCompilation(info()->scope());
392 unoptimized.SetContext(info()->context());
393 if (should_recompile) unoptimized.EnableDeoptimizationSupport();
394 bool succeeded = FullCodeGenerator::MakeCode(&unoptimized);
395 if (should_recompile) {
396 if (!succeeded) return SetLastStatus(FAILED);
397 Handle<SharedFunctionInfo> shared = info()->shared_info();
398 shared->EnableDeoptimizationSupport(*unoptimized.code());
399 // The existing unoptimized code was replaced with the new one.
400 Compiler::RecordFunctionCompilation(
401 Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
403 if (FLAG_hydrogen_stats) {
404 isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
408 DCHECK(info()->shared_info()->has_deoptimization_support());
410 // Check the whitelist for TurboFan.
411 if (info()->closure()->PassesFilter(FLAG_turbo_filter) &&
412 // TODO(turbofan): Make try-catch work and remove this bailout.
413 info()->function()->dont_optimize_reason() != kTryCatchStatement &&
414 info()->function()->dont_optimize_reason() != kTryFinallyStatement &&
415 // TODO(turbofan): Make OSR work and remove this bailout.
417 compiler::Pipeline pipeline(info());
418 pipeline.GenerateCode();
419 if (!info()->code().is_null()) {
420 return SetLastStatus(SUCCEEDED);
424 if (FLAG_trace_hydrogen) {
425 Handle<String> name = info()->function()->debug_name();
426 PrintF("-----------------------------------------------------------\n");
427 PrintF("Compiling method %s using hydrogen\n", name->ToCString().get());
428 isolate()->GetHTracer()->TraceCompilation(info());
431 // Type-check the function.
432 AstTyper::Run(info());
434 graph_builder_ = (FLAG_hydrogen_track_positions || FLAG_trace_ic)
435 ? new(info()->zone()) HOptimizedGraphBuilderWithPositions(info())
436 : new(info()->zone()) HOptimizedGraphBuilder(info());
438 Timer t(this, &time_taken_to_create_graph_);
439 info()->set_this_has_uses(false);
440 graph_ = graph_builder_->CreateGraph();
442 if (isolate()->has_pending_exception()) {
443 return SetLastStatus(FAILED);
446 // The function being compiled may have bailed out due to an inline
447 // candidate bailing out. In such a case, we don't disable
448 // optimization on the shared_info.
449 DCHECK(!graph_builder_->inline_bailout() || graph_ == NULL);
450 if (graph_ == NULL) {
451 if (graph_builder_->inline_bailout()) {
452 return AbortOptimization();
454 return AbortAndDisableOptimization();
458 if (info()->HasAbortedDueToDependencyChange()) {
459 return AbortOptimization(kBailedOutDueToDependencyChange);
462 return SetLastStatus(SUCCEEDED);
466 OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() {
467 DisallowHeapAllocation no_allocation;
468 DisallowHandleAllocation no_handles;
469 DisallowHandleDereference no_deref;
470 DisallowCodeDependencyChange no_dependency_change;
472 DCHECK(last_status() == SUCCEEDED);
473 // TODO(turbofan): Currently everything is done in the first phase.
474 if (!info()->code().is_null()) {
475 return last_status();
478 Timer t(this, &time_taken_to_optimize_);
479 DCHECK(graph_ != NULL);
480 BailoutReason bailout_reason = kNoReason;
482 if (graph_->Optimize(&bailout_reason)) {
483 chunk_ = LChunk::NewChunk(graph_);
484 if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
485 } else if (bailout_reason != kNoReason) {
486 graph_builder_->Bailout(bailout_reason);
489 return AbortOptimization();
493 OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() {
494 DCHECK(last_status() == SUCCEEDED);
495 // TODO(turbofan): Currently everything is done in the first phase.
496 if (!info()->code().is_null()) {
497 RecordOptimizationStats();
498 return last_status();
501 DCHECK(!info()->HasAbortedDueToDependencyChange());
502 DisallowCodeDependencyChange no_dependency_change;
503 DisallowJavascriptExecution no_js(isolate());
504 { // Scope for timer.
505 Timer timer(this, &time_taken_to_codegen_);
506 DCHECK(chunk_ != NULL);
507 DCHECK(graph_ != NULL);
508 // Deferred handles reference objects that were accessible during
509 // graph creation. To make sure that we don't encounter inconsistencies
510 // between graph creation and code generation, we disallow accessing
511 // objects through deferred handles during the latter, with exceptions.
512 DisallowDeferredHandleDereference no_deferred_handle_deref;
513 Handle<Code> optimized_code = chunk_->Codegen();
514 if (optimized_code.is_null()) {
515 if (info()->bailout_reason() == kNoReason) {
516 info_->set_bailout_reason(kCodeGenerationFailed);
517 } else if (info()->bailout_reason() == kMapBecameDeprecated) {
518 if (FLAG_trace_opt) {
519 PrintF("[aborted optimizing ");
520 info()->closure()->ShortPrint();
521 PrintF(" because a map became deprecated]\n");
523 return AbortOptimization();
524 } else if (info()->bailout_reason() == kMapBecameUnstable) {
525 if (FLAG_trace_opt) {
526 PrintF("[aborted optimizing ");
527 info()->closure()->ShortPrint();
528 PrintF(" because a map became unstable]\n");
530 return AbortOptimization();
532 return AbortAndDisableOptimization();
534 info()->SetCode(optimized_code);
536 RecordOptimizationStats();
537 // Add to the weak list of optimized code objects.
538 info()->context()->native_context()->AddOptimizedCode(*info()->code());
539 return SetLastStatus(SUCCEEDED);
543 void OptimizedCompileJob::RecordOptimizationStats() {
544 Handle<JSFunction> function = info()->closure();
545 if (!function->IsOptimized()) {
546 // Concurrent recompilation and OSR may race. Increment only once.
547 int opt_count = function->shared()->opt_count();
548 function->shared()->set_opt_count(opt_count + 1);
550 double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
551 double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
552 double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
553 if (FLAG_trace_opt) {
554 PrintF("[optimizing ");
555 function->ShortPrint();
556 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
559 if (FLAG_trace_opt_stats) {
560 static double compilation_time = 0.0;
561 static int compiled_functions = 0;
562 static int code_size = 0;
564 compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
565 compiled_functions++;
566 code_size += function->shared()->SourceSize();
567 PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
572 if (FLAG_hydrogen_stats) {
573 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
574 time_taken_to_optimize_,
575 time_taken_to_codegen_);
580 // Sets the expected number of properties based on estimate from compiler.
581 void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
583 // If no properties are added in the constructor, they are more likely
584 // to be added later.
585 if (estimate == 0) estimate = 2;
587 // TODO(yangguo): check whether those heuristics are still up-to-date.
588 // We do not shrink objects that go into a snapshot (yet), so we adjust
589 // the estimate conservatively.
590 if (shared->GetIsolate()->serializer_enabled()) {
592 } else if (FLAG_clever_optimizations) {
593 // Inobject slack tracking will reclaim redundant inobject space later,
594 // so we can afford to adjust the estimate generously.
600 shared->set_expected_nof_properties(estimate);
604 static void UpdateSharedFunctionInfo(CompilationInfo* info) {
605 // Update the shared function info with the compiled code and the
606 // scope info. Please note, that the order of the shared function
607 // info initialization is important since set_scope_info might
608 // trigger a GC, causing the DCHECK below to be invalid if the code
609 // was flushed. By setting the code object last we avoid this.
610 Handle<SharedFunctionInfo> shared = info->shared_info();
611 Handle<ScopeInfo> scope_info =
612 ScopeInfo::Create(info->scope(), info->zone());
613 shared->set_scope_info(*scope_info);
615 Handle<Code> code = info->code();
616 CHECK(code->kind() == Code::FUNCTION);
617 shared->ReplaceCode(*code);
618 if (shared->optimization_disabled()) code->set_optimizable(false);
620 shared->set_feedback_vector(*info->feedback_vector());
622 // Set the expected number of properties for instances.
623 FunctionLiteral* lit = info->function();
624 int expected = lit->expected_property_count();
625 SetExpectedNofPropertiesFromEstimate(shared, expected);
627 // Check the function has compiled code.
628 DCHECK(shared->is_compiled());
629 shared->set_bailout_reason(lit->dont_optimize_reason());
630 shared->set_ast_node_count(lit->ast_node_count());
631 shared->set_strict_mode(lit->strict_mode());
635 // Sets the function info on a function.
636 // The start_position points to the first '(' character after the function name
637 // in the full script source. When counting characters in the script source the
638 // the first character is number 0 (not 1).
639 static void SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
640 FunctionLiteral* lit,
642 Handle<Script> script) {
643 function_info->set_length(lit->parameter_count());
644 function_info->set_formal_parameter_count(lit->parameter_count());
645 function_info->set_script(*script);
646 function_info->set_function_token_position(lit->function_token_position());
647 function_info->set_start_position(lit->start_position());
648 function_info->set_end_position(lit->end_position());
649 function_info->set_is_expression(lit->is_expression());
650 function_info->set_is_anonymous(lit->is_anonymous());
651 function_info->set_is_toplevel(is_toplevel);
652 function_info->set_inferred_name(*lit->inferred_name());
653 function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
654 function_info->set_allows_lazy_compilation_without_context(
655 lit->AllowsLazyCompilationWithoutContext());
656 function_info->set_strict_mode(lit->strict_mode());
657 function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
658 function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
659 function_info->set_ast_node_count(lit->ast_node_count());
660 function_info->set_is_function(lit->is_function());
661 function_info->set_bailout_reason(lit->dont_optimize_reason());
662 function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
663 function_info->set_is_generator(lit->is_generator());
664 function_info->set_is_arrow(lit->is_arrow());
668 static bool CompileUnoptimizedCode(CompilationInfo* info) {
669 DCHECK(AllowCompilation::IsAllowed(info->isolate()));
670 DCHECK(info->function() != NULL);
671 if (!Rewriter::Rewrite(info)) return false;
672 if (!Scope::Analyze(info)) return false;
673 DCHECK(info->scope() != NULL);
675 if (!FullCodeGenerator::MakeCode(info)) {
676 Isolate* isolate = info->isolate();
677 if (!isolate->has_pending_exception()) isolate->StackOverflow();
684 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
685 CompilationInfo* info) {
686 VMState<COMPILER> state(info->isolate());
687 PostponeInterruptsScope postpone(info->isolate());
688 if (!Parser::Parse(info)) return MaybeHandle<Code>();
689 info->SetStrictMode(info->function()->strict_mode());
691 if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
692 Compiler::RecordFunctionCompilation(
693 Logger::LAZY_COMPILE_TAG, info, info->shared_info());
694 UpdateSharedFunctionInfo(info);
695 DCHECK_EQ(Code::FUNCTION, info->code()->kind());
700 MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) {
701 DCHECK(!function->GetIsolate()->has_pending_exception());
702 DCHECK(!function->is_compiled());
703 if (function->shared()->is_compiled()) {
704 return Handle<Code>(function->shared()->code());
707 CompilationInfoWithZone info(function);
709 ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
710 GetUnoptimizedCodeCommon(&info),
713 if (FLAG_always_opt &&
714 info.isolate()->use_crankshaft() &&
715 !info.shared_info()->optimization_disabled() &&
716 !info.isolate()->DebuggerHasBreakPoints()) {
717 Handle<Code> opt_code;
718 if (Compiler::GetOptimizedCode(
720 Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
729 MaybeHandle<Code> Compiler::GetUnoptimizedCode(
730 Handle<SharedFunctionInfo> shared) {
731 DCHECK(!shared->GetIsolate()->has_pending_exception());
732 DCHECK(!shared->is_compiled());
734 CompilationInfoWithZone info(shared);
735 return GetUnoptimizedCodeCommon(&info);
739 bool Compiler::EnsureCompiled(Handle<JSFunction> function,
740 ClearExceptionFlag flag) {
741 if (function->is_compiled()) return true;
742 MaybeHandle<Code> maybe_code = Compiler::GetUnoptimizedCode(function);
744 if (!maybe_code.ToHandle(&code)) {
745 if (flag == CLEAR_EXCEPTION) {
746 function->GetIsolate()->clear_pending_exception();
750 function->ReplaceCode(*code);
751 DCHECK(function->is_compiled());
756 // Compile full code for debugging. This code will have debug break slots
757 // and deoptimization information. Deoptimization information is required
758 // in case that an optimized version of this function is still activated on
759 // the stack. It will also make sure that the full code is compiled with
760 // the same flags as the previous version, that is flags which can change
761 // the code generated. The current method of mapping from already compiled
762 // full code without debug break slots to full code with debug break slots
763 // depends on the generated code is otherwise exactly the same.
764 // If compilation fails, just keep the existing code.
765 MaybeHandle<Code> Compiler::GetCodeForDebugging(Handle<JSFunction> function) {
766 CompilationInfoWithZone info(function);
767 Isolate* isolate = info.isolate();
768 VMState<COMPILER> state(isolate);
772 DCHECK(!isolate->has_pending_exception());
773 Handle<Code> old_code(function->shared()->code());
774 DCHECK(old_code->kind() == Code::FUNCTION);
775 DCHECK(!old_code->has_debug_break_slots());
777 info.MarkCompilingForDebugging();
778 if (old_code->is_compiled_optimizable()) {
779 info.EnableDeoptimizationSupport();
781 info.MarkNonOptimizable();
783 MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info);
784 Handle<Code> new_code;
785 if (!maybe_new_code.ToHandle(&new_code)) {
786 isolate->clear_pending_exception();
788 DCHECK_EQ(old_code->is_compiled_optimizable(),
789 new_code->is_compiled_optimizable());
791 return maybe_new_code;
795 void Compiler::CompileForLiveEdit(Handle<Script> script) {
796 // TODO(635): support extensions.
797 CompilationInfoWithZone info(script);
798 PostponeInterruptsScope postpone(info.isolate());
799 VMState<COMPILER> state(info.isolate());
802 if (!Parser::Parse(&info)) return;
803 info.SetStrictMode(info.function()->strict_mode());
805 LiveEditFunctionTracker tracker(info.isolate(), info.function());
806 if (!CompileUnoptimizedCode(&info)) return;
807 if (!info.shared_info().is_null()) {
808 Handle<ScopeInfo> scope_info = ScopeInfo::Create(info.scope(),
810 info.shared_info()->set_scope_info(*scope_info);
812 tracker.RecordRootFunctionInfo(info.code());
816 static bool DebuggerWantsEagerCompilation(CompilationInfo* info,
817 bool allow_lazy_without_ctx = false) {
818 return LiveEditFunctionTracker::IsActive(info->isolate()) ||
819 (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
823 static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
824 Isolate* isolate = info->isolate();
825 PostponeInterruptsScope postpone(isolate);
826 DCHECK(!isolate->native_context().is_null());
827 Handle<Script> script = info->script();
829 // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
830 FixedArray* array = isolate->native_context()->embedder_data();
831 script->set_context_data(array->get(0));
833 isolate->debug()->OnBeforeCompile(script);
835 DCHECK(info->is_eval() || info->is_global());
837 bool parse_allow_lazy =
838 (info->compile_options() == ScriptCompiler::kConsumeParserCache ||
839 String::cast(script->source())->length() > FLAG_min_preparse_length) &&
840 !DebuggerWantsEagerCompilation(info);
842 if (!parse_allow_lazy &&
843 (info->compile_options() == ScriptCompiler::kProduceParserCache ||
844 info->compile_options() == ScriptCompiler::kConsumeParserCache)) {
845 // We are going to parse eagerly, but we either 1) have cached data produced
846 // by lazy parsing or 2) are asked to generate cached data. We cannot use
847 // the existing data, since it won't contain all the symbols we need for
848 // eager parsing. In addition, it doesn't make sense to produce the data
849 // when parsing eagerly. That data would contain all symbols, but no
850 // functions, so it cannot be used to aid lazy parsing later.
851 info->SetCachedData(NULL, ScriptCompiler::kNoCompileOptions);
854 Handle<SharedFunctionInfo> result;
856 { VMState<COMPILER> state(info->isolate());
857 if (!Parser::Parse(info, parse_allow_lazy)) {
858 return Handle<SharedFunctionInfo>::null();
861 FunctionLiteral* lit = info->function();
862 LiveEditFunctionTracker live_edit_tracker(isolate, lit);
864 // Measure how long it takes to do the compilation; only take the
865 // rest of the function into account to avoid overlap with the
866 // parsing statistics.
867 HistogramTimer* rate = info->is_eval()
868 ? info->isolate()->counters()->compile_eval()
869 : info->isolate()->counters()->compile();
870 HistogramTimerScope timer(rate);
873 if (!CompileUnoptimizedCode(info)) {
874 return Handle<SharedFunctionInfo>::null();
877 // Allocate function.
878 DCHECK(!info->code().is_null());
879 result = isolate->factory()->NewSharedFunctionInfo(
880 lit->name(), lit->materialized_literal_count(), lit->is_generator(),
881 lit->is_arrow(), info->code(),
882 ScopeInfo::Create(info->scope(), info->zone()),
883 info->feedback_vector());
885 DCHECK_EQ(RelocInfo::kNoPosition, lit->function_token_position());
886 SetFunctionInfo(result, lit, true, script);
888 Handle<String> script_name = script->name()->IsString()
889 ? Handle<String>(String::cast(script->name()))
890 : isolate->factory()->empty_string();
891 Logger::LogEventsAndTags log_tag = info->is_eval()
893 : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);
895 PROFILE(isolate, CodeCreateEvent(
896 log_tag, *info->code(), *result, info, *script_name));
897 GDBJIT(AddCode(script_name, script, info->code(), info));
899 // Hint to the runtime system used when allocating space for initial
900 // property space by setting the expected number of properties for
901 // the instances of the function.
902 SetExpectedNofPropertiesFromEstimate(result,
903 lit->expected_property_count());
905 script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
907 live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
910 isolate->debug()->OnAfterCompile(script);
916 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
917 Handle<String> source,
918 Handle<Context> context,
919 StrictMode strict_mode,
920 ParseRestriction restriction,
921 int scope_position) {
922 Isolate* isolate = source->GetIsolate();
923 int source_length = source->length();
924 isolate->counters()->total_eval_size()->Increment(source_length);
925 isolate->counters()->total_compile_size()->Increment(source_length);
927 CompilationCache* compilation_cache = isolate->compilation_cache();
928 MaybeHandle<SharedFunctionInfo> maybe_shared_info =
929 compilation_cache->LookupEval(source, context, strict_mode,
931 Handle<SharedFunctionInfo> shared_info;
933 if (!maybe_shared_info.ToHandle(&shared_info)) {
934 Handle<Script> script = isolate->factory()->NewScript(source);
935 CompilationInfoWithZone info(script);
937 if (context->IsNativeContext()) info.MarkAsGlobal();
938 info.SetStrictMode(strict_mode);
939 info.SetParseRestriction(restriction);
940 info.SetContext(context);
942 Debug::RecordEvalCaller(script);
944 shared_info = CompileToplevel(&info);
946 if (shared_info.is_null()) {
947 return MaybeHandle<JSFunction>();
949 // Explicitly disable optimization for eval code. We're not yet prepared
950 // to handle eval-code in the optimizing compiler.
951 shared_info->DisableOptimization(kEval);
953 // If caller is strict mode, the result must be in strict mode as well.
954 DCHECK(strict_mode == SLOPPY || shared_info->strict_mode() == STRICT);
955 if (!shared_info->dont_cache()) {
956 compilation_cache->PutEval(
957 source, context, shared_info, scope_position);
960 } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
961 shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
964 return isolate->factory()->NewFunctionFromSharedFunctionInfo(
965 shared_info, context, NOT_TENURED);
969 Handle<SharedFunctionInfo> Compiler::CompileScript(
970 Handle<String> source, Handle<Object> script_name, int line_offset,
971 int column_offset, bool is_shared_cross_origin, Handle<Context> context,
972 v8::Extension* extension, ScriptData** cached_data,
973 ScriptCompiler::CompileOptions compile_options, NativesFlag natives) {
974 if (compile_options == ScriptCompiler::kNoCompileOptions) {
976 } else if (compile_options == ScriptCompiler::kProduceParserCache ||
977 compile_options == ScriptCompiler::kProduceCodeCache) {
978 DCHECK(cached_data && !*cached_data);
979 DCHECK(extension == NULL);
981 DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
982 compile_options == ScriptCompiler::kConsumeCodeCache);
983 DCHECK(cached_data && *cached_data);
984 DCHECK(extension == NULL);
986 Isolate* isolate = source->GetIsolate();
987 int source_length = source->length();
988 isolate->counters()->total_load_size()->Increment(source_length);
989 isolate->counters()->total_compile_size()->Increment(source_length);
991 CompilationCache* compilation_cache = isolate->compilation_cache();
993 // Do a lookup in the compilation cache but not for extensions.
994 MaybeHandle<SharedFunctionInfo> maybe_result;
995 Handle<SharedFunctionInfo> result;
996 if (extension == NULL) {
997 if (FLAG_serialize_toplevel &&
998 compile_options == ScriptCompiler::kConsumeCodeCache &&
999 !isolate->debug()->is_loaded()) {
1000 return CodeSerializer::Deserialize(isolate, *cached_data, source);
1002 maybe_result = compilation_cache->LookupScript(
1003 source, script_name, line_offset, column_offset,
1004 is_shared_cross_origin, context);
1008 base::ElapsedTimer timer;
1009 if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
1010 compile_options == ScriptCompiler::kProduceCodeCache) {
1014 if (!maybe_result.ToHandle(&result)) {
1015 // No cache entry found. Compile the script.
1017 // Create a script object describing the script to be compiled.
1018 Handle<Script> script = isolate->factory()->NewScript(source);
1019 if (natives == NATIVES_CODE) {
1020 script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
1022 if (!script_name.is_null()) {
1023 script->set_name(*script_name);
1024 script->set_line_offset(Smi::FromInt(line_offset));
1025 script->set_column_offset(Smi::FromInt(column_offset));
1027 script->set_is_shared_cross_origin(is_shared_cross_origin);
1029 // Compile the function and add it to the cache.
1030 CompilationInfoWithZone info(script);
1031 info.MarkAsGlobal();
1032 info.SetCachedData(cached_data, compile_options);
1033 info.SetExtension(extension);
1034 info.SetContext(context);
1035 if (FLAG_serialize_toplevel &&
1036 compile_options == ScriptCompiler::kProduceCodeCache) {
1037 info.PrepareForSerializing();
1039 if (FLAG_use_strict) info.SetStrictMode(STRICT);
1041 result = CompileToplevel(&info);
1042 if (extension == NULL && !result.is_null() && !result->dont_cache()) {
1043 compilation_cache->PutScript(source, context, result);
1044 if (FLAG_serialize_toplevel &&
1045 compile_options == ScriptCompiler::kProduceCodeCache) {
1046 *cached_data = CodeSerializer::Serialize(isolate, result, source);
1047 if (FLAG_profile_deserialization) {
1048 PrintF("[Compiling and serializing %d bytes took %0.3f ms]\n",
1049 (*cached_data)->length(), timer.Elapsed().InMillisecondsF());
1054 if (result.is_null()) isolate->ReportPendingMessages();
1055 } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
1056 result->ResetForNewContext(isolate->heap()->global_ic_age());
1062 Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(
1063 FunctionLiteral* literal, Handle<Script> script,
1064 CompilationInfo* outer_info) {
1065 // Precondition: code has been parsed and scopes have been analyzed.
1066 CompilationInfoWithZone info(script);
1067 info.SetFunction(literal);
1068 info.PrepareForCompilation(literal->scope());
1069 info.SetStrictMode(literal->scope()->strict_mode());
1070 if (outer_info->will_serialize()) info.PrepareForSerializing();
1072 Isolate* isolate = info.isolate();
1073 Factory* factory = isolate->factory();
1074 LiveEditFunctionTracker live_edit_tracker(isolate, literal);
1075 // Determine if the function can be lazily compiled. This is necessary to
1076 // allow some of our builtin JS files to be lazily compiled. These
1077 // builtins cannot be handled lazily by the parser, since we have to know
1078 // if a function uses the special natives syntax, which is something the
1080 // If the debugger requests compilation for break points, we cannot be
1081 // aggressive about lazy compilation, because it might trigger compilation
1082 // of functions without an outer context when setting a breakpoint through
1083 // Debug::FindSharedFunctionInfoInScript.
1084 bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1085 bool allow_lazy = literal->AllowsLazyCompilation() &&
1086 !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
1089 Handle<ScopeInfo> scope_info;
1090 if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
1091 Handle<Code> code = isolate->builtins()->CompileUnoptimized();
1093 scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
1094 } else if (FullCodeGenerator::MakeCode(&info)) {
1095 DCHECK(!info.code().is_null());
1096 scope_info = ScopeInfo::Create(info.scope(), info.zone());
1098 return Handle<SharedFunctionInfo>::null();
1101 // Create a shared function info object.
1102 Handle<SharedFunctionInfo> result = factory->NewSharedFunctionInfo(
1103 literal->name(), literal->materialized_literal_count(),
1104 literal->is_generator(), literal->is_arrow(), info.code(), scope_info,
1105 info.feedback_vector());
1106 SetFunctionInfo(result, literal, false, script);
1107 RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1108 result->set_allows_lazy_compilation(allow_lazy);
1109 result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1111 // Set the expected number of properties for instances and return
1112 // the resulting function.
1113 SetExpectedNofPropertiesFromEstimate(result,
1114 literal->expected_property_count());
1115 live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1120 MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
1121 Handle<JSFunction> function,
1122 BailoutId osr_ast_id) {
1123 if (FLAG_cache_optimized_code) {
1124 Handle<SharedFunctionInfo> shared(function->shared());
1125 // Bound functions are not cached.
1126 if (shared->bound()) return MaybeHandle<Code>();
1127 DisallowHeapAllocation no_gc;
1128 int index = shared->SearchOptimizedCodeMap(
1129 function->context()->native_context(), osr_ast_id);
1131 if (FLAG_trace_opt) {
1132 PrintF("[found optimized code for ");
1133 function->ShortPrint();
1134 if (!osr_ast_id.IsNone()) {
1135 PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
1139 FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index);
1140 if (literals != NULL) function->set_literals(literals);
1141 return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index));
1144 return MaybeHandle<Code>();
1148 static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
1149 Handle<Code> code = info->code();
1150 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do.
1152 // Context specialization folds-in the context, so no sharing can occur.
1153 if (code->is_turbofanned() && FLAG_context_specialization) return;
1155 // Cache optimized code.
1156 if (FLAG_cache_optimized_code) {
1157 Handle<JSFunction> function = info->closure();
1158 Handle<SharedFunctionInfo> shared(function->shared());
1159 // Do not cache bound functions.
1160 if (shared->bound()) return;
1161 Handle<FixedArray> literals(function->literals());
1162 Handle<Context> native_context(function->context()->native_context());
1163 SharedFunctionInfo::AddToOptimizedCodeMap(
1164 shared, native_context, code, literals, info->osr_ast_id());
1169 static bool CompileOptimizedPrologue(CompilationInfo* info) {
1170 if (!Parser::Parse(info)) return false;
1171 info->SetStrictMode(info->function()->strict_mode());
1173 if (!Rewriter::Rewrite(info)) return false;
1174 if (!Scope::Analyze(info)) return false;
1175 DCHECK(info->scope() != NULL);
1180 static bool GetOptimizedCodeNow(CompilationInfo* info) {
1181 if (!CompileOptimizedPrologue(info)) return false;
1183 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
1185 OptimizedCompileJob job(info);
1186 if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED) return false;
1187 if (job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED) return false;
1188 if (job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) return false;
1191 DCHECK(!info->isolate()->has_pending_exception());
1192 InsertCodeIntoOptimizedCodeMap(info);
1193 Compiler::RecordFunctionCompilation(
1194 Logger::LAZY_COMPILE_TAG, info, info->shared_info());
1199 static bool GetOptimizedCodeLater(CompilationInfo* info) {
1200 Isolate* isolate = info->isolate();
1201 if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
1202 if (FLAG_trace_concurrent_recompilation) {
1203 PrintF(" ** Compilation queue full, will retry optimizing ");
1204 info->closure()->PrintName();
1205 PrintF(" later.\n");
1210 CompilationHandleScope handle_scope(info);
1211 if (!CompileOptimizedPrologue(info)) return false;
1212 info->SaveHandles(); // Copy handles to the compilation handle scope.
1214 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
1216 OptimizedCompileJob* job = new(info->zone()) OptimizedCompileJob(info);
1217 OptimizedCompileJob::Status status = job->CreateGraph();
1218 if (status != OptimizedCompileJob::SUCCEEDED) return false;
1219 isolate->optimizing_compiler_thread()->QueueForOptimization(job);
1221 if (FLAG_trace_concurrent_recompilation) {
1222 PrintF(" ** Queued ");
1223 info->closure()->PrintName();
1224 if (info->is_osr()) {
1225 PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
1227 PrintF(" for concurrent optimization.\n");
1234 MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
1235 Handle<Code> current_code,
1236 ConcurrencyMode mode,
1237 BailoutId osr_ast_id) {
1238 Handle<Code> cached_code;
1239 if (GetCodeFromOptimizedCodeMap(
1240 function, osr_ast_id).ToHandle(&cached_code)) {
1244 SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function));
1245 Isolate* isolate = info->isolate();
1246 DCHECK(AllowCompilation::IsAllowed(isolate));
1247 VMState<COMPILER> state(isolate);
1248 DCHECK(!isolate->has_pending_exception());
1249 PostponeInterruptsScope postpone(isolate);
1251 Handle<SharedFunctionInfo> shared = info->shared_info();
1252 DCHECK_NE(ScopeInfo::Empty(isolate), shared->scope_info());
1253 int compiled_size = shared->end_position() - shared->start_position();
1254 isolate->counters()->total_compile_size()->Increment(compiled_size);
1255 current_code->set_profiler_ticks(0);
1257 info->SetOptimizing(osr_ast_id, current_code);
1259 if (mode == CONCURRENT) {
1260 if (GetOptimizedCodeLater(info.get())) {
1261 info.Detach(); // The background recompile job owns this now.
1262 return isolate->builtins()->InOptimizationQueue();
1265 if (GetOptimizedCodeNow(info.get())) return info->code();
1269 if (FLAG_trace_opt) {
1270 PrintF("[failed to optimize ");
1271 function->PrintName();
1272 PrintF(": %s]\n", GetBailoutReason(info->bailout_reason()));
1275 if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1276 return MaybeHandle<Code>();
1280 Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
1281 // Take ownership of compilation info. Deleting compilation info
1282 // also tears down the zone and the recompile job.
1283 SmartPointer<CompilationInfo> info(job->info());
1284 Isolate* isolate = info->isolate();
1286 VMState<COMPILER> state(isolate);
1287 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
1289 Handle<SharedFunctionInfo> shared = info->shared_info();
1290 shared->code()->set_profiler_ticks(0);
1292 // 1) Optimization may have failed.
1293 // 2) The function may have already been optimized by OSR. Simply continue.
1294 // Except when OSR already disabled optimization for some reason.
1295 // 3) The code may have already been invalidated due to dependency change.
1296 // 4) Debugger may have been activated.
1298 if (job->last_status() != OptimizedCompileJob::SUCCEEDED ||
1299 shared->optimization_disabled() ||
1300 info->HasAbortedDueToDependencyChange() ||
1301 isolate->DebuggerHasBreakPoints()) {
1302 return Handle<Code>::null();
1305 if (job->GenerateCode() != OptimizedCompileJob::SUCCEEDED) {
1306 return Handle<Code>::null();
1309 Compiler::RecordFunctionCompilation(
1310 Logger::LAZY_COMPILE_TAG, info.get(), shared);
1311 if (info->shared_info()->SearchOptimizedCodeMap(
1312 info->context()->native_context(), info->osr_ast_id()) == -1) {
1313 InsertCodeIntoOptimizedCodeMap(info.get());
1316 if (FLAG_trace_concurrent_recompilation) {
1317 PrintF(" ** Optimized code for ");
1318 info->closure()->PrintName();
1319 PrintF(" generated.\n");
1322 return Handle<Code>(*info->code());
1326 void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
1327 CompilationInfo* info,
1328 Handle<SharedFunctionInfo> shared) {
1329 // SharedFunctionInfo is passed separately, because if CompilationInfo
1330 // was created using Script object, it will not have it.
1332 // Log the code generation. If source information is available include
1333 // script name and line number. Check explicitly whether logging is
1334 // enabled as finding the line number is not free.
1335 if (info->isolate()->logger()->is_logging_code_events() ||
1336 info->isolate()->cpu_profiler()->is_profiling()) {
1337 Handle<Script> script = info->script();
1338 Handle<Code> code = info->code();
1339 if (code.is_identical_to(
1340 info->isolate()->builtins()->CompileUnoptimized())) {
1343 int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
1345 Script::GetColumnNumber(script, shared->start_position()) + 1;
1346 String* script_name = script->name()->IsString()
1347 ? String::cast(script->name())
1348 : info->isolate()->heap()->empty_string();
1349 Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
1350 PROFILE(info->isolate(), CodeCreateEvent(
1351 log_tag, *code, *shared, info, script_name, line_num, column_num));
1354 GDBJIT(AddCode(Handle<String>(shared->DebugName()),
1355 Handle<Script>(info->script()),
1356 Handle<Code>(info->code()),
1361 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1362 : name_(name), info_(info), zone_(info->isolate()) {
1363 if (FLAG_hydrogen_stats) {
1364 info_zone_start_allocation_size_ = info->zone()->allocation_size();
1370 CompilationPhase::~CompilationPhase() {
1371 if (FLAG_hydrogen_stats) {
1372 unsigned size = zone()->allocation_size();
1373 size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1374 isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1379 bool CompilationPhase::ShouldProduceTraceOutput() const {
1380 // Trace if the appropriate trace flag is set and the phase name's first
1381 // character is in the FLAG_trace_phase command line parameter.
1382 AllowHandleDereference allow_deref;
1383 bool tracing_on = info()->IsStub()
1384 ? FLAG_trace_hydrogen_stubs
1385 : (FLAG_trace_hydrogen &&
1386 info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1387 return (tracing_on &&
1388 base::OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1391 } } // namespace v8::internal