1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "src/allocation.h"
10 #include "src/bailout-reason.h"
16 class AstValueFactory;
17 class HydrogenCodeStub;
22 OffsetRange(int from, int to) : from(from), to(to) {}
28 // This class encapsulates encoding and decoding of sources positions from
29 // which hydrogen values originated.
30 // When FLAG_track_hydrogen_positions is set this object encodes the
31 // identifier of the inlining and absolute offset from the start of the
33 // When the flag is not set we simply track absolute offset from the
35 class SourcePosition {
37 static SourcePosition Unknown() {
38 return SourcePosition::FromRaw(kNoPosition);
41 bool IsUnknown() const { return value_ == kNoPosition; }
43 uint32_t position() const { return PositionField::decode(value_); }
44 void set_position(uint32_t position) {
45 if (FLAG_hydrogen_track_positions) {
46 value_ = static_cast<uint32_t>(PositionField::update(value_, position));
52 uint32_t inlining_id() const { return InliningIdField::decode(value_); }
53 void set_inlining_id(uint32_t inlining_id) {
54 if (FLAG_hydrogen_track_positions) {
56 static_cast<uint32_t>(InliningIdField::update(value_, inlining_id));
60 uint32_t raw() const { return value_; }
63 static const uint32_t kNoPosition =
64 static_cast<uint32_t>(RelocInfo::kNoPosition);
65 typedef BitField<uint32_t, 0, 9> InliningIdField;
67 // Offset from the start of the inlined function.
68 typedef BitField<uint32_t, 9, 23> PositionField;
70 friend class HPositionInfo;
71 friend class Deoptimizer;
73 static SourcePosition FromRaw(uint32_t raw_position) {
74 SourcePosition position;
75 position.value_ = raw_position;
79 // If FLAG_hydrogen_track_positions is set contains bitfields InliningIdField
81 // Otherwise contains absolute offset from the script start.
86 std::ostream& operator<<(std::ostream& os, const SourcePosition& p);
89 struct InlinedFunctionInfo {
90 InlinedFunctionInfo(int parent_id, SourcePosition inline_position,
91 int script_id, int start_position)
92 : parent_id(parent_id),
93 inline_position(inline_position),
95 start_position(start_position) {}
97 SourcePosition inline_position;
100 std::vector<size_t> deopt_pc_offsets;
102 static const int kNoParentId = -1;
106 // CompilationInfo encapsulates some information known at compile time. It
107 // is constructed based on the resources available at compile-time.
108 class CompilationInfo {
110 // Various configuration flags for a compilation, as well as some properties
111 // of the compiled code produced by a compilation.
113 kDeferredCalling = 1 << 0,
114 kNonDeferredCalling = 1 << 1,
115 kSavesCallerDoubles = 1 << 2,
116 kRequiresFrame = 1 << 3,
117 kMustNotHaveEagerFrame = 1 << 4,
118 kDeoptimizationSupport = 1 << 5,
120 kCompilingForDebugging = 1 << 7,
121 kSerializing = 1 << 8,
122 kContextSpecializing = 1 << 9,
123 kInliningEnabled = 1 << 10,
124 kTypingEnabled = 1 << 11,
125 kDisableFutureOptimization = 1 << 12,
126 kSplittingEnabled = 1 << 13,
127 kBuiltinInliningEnabled = 1 << 14,
128 kTypeFeedbackEnabled = 1 << 15
131 explicit CompilationInfo(ParseInfo* parse_info);
132 CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone);
133 virtual ~CompilationInfo();
135 ParseInfo* parse_info() const { return parse_info_; }
137 // -----------------------------------------------------------
138 // TODO(titzer): inline and delete accessors of ParseInfo
139 // -----------------------------------------------------------
140 Handle<Script> script() const;
141 bool is_eval() const;
142 bool is_native() const;
143 bool is_module() const;
144 LanguageMode language_mode() const;
145 Handle<JSFunction> closure() const;
146 FunctionLiteral* function() const;
147 Scope* scope() const;
148 Handle<Context> context() const;
149 Handle<SharedFunctionInfo> shared_info() const;
150 bool has_shared_info() const;
151 // -----------------------------------------------------------
153 Isolate* isolate() const {
156 Zone* zone() { return zone_; }
157 bool is_osr() const { return !osr_ast_id_.IsNone(); }
158 Handle<Code> code() const { return code_; }
159 CodeStub* code_stub() const { return code_stub_; }
160 BailoutId osr_ast_id() const { return osr_ast_id_; }
161 Handle<Code> unoptimized_code() const { return unoptimized_code_; }
162 int opt_count() const { return opt_count_; }
163 int num_parameters() const;
164 int num_heap_slots() const;
165 Code::Flags flags() const;
166 bool has_scope() const { return scope() != nullptr; }
168 void set_parameter_count(int parameter_count) {
170 parameter_count_ = parameter_count;
173 bool is_tracking_positions() const { return track_positions_; }
175 bool is_calling() const {
176 return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
179 void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
181 bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
183 void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
185 bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
187 void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
189 bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
191 void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
193 bool requires_frame() const { return GetFlag(kRequiresFrame); }
195 void MarkMustNotHaveEagerFrame() { SetFlag(kMustNotHaveEagerFrame); }
197 bool GetMustNotHaveEagerFrame() const {
198 return GetFlag(kMustNotHaveEagerFrame);
201 void MarkAsDebug() { SetFlag(kDebug); }
203 bool is_debug() const { return GetFlag(kDebug); }
205 void PrepareForSerializing() { SetFlag(kSerializing); }
207 bool will_serialize() const { return GetFlag(kSerializing); }
209 void MarkAsContextSpecializing() { SetFlag(kContextSpecializing); }
211 bool is_context_specializing() const { return GetFlag(kContextSpecializing); }
213 void MarkAsTypeFeedbackEnabled() { SetFlag(kTypeFeedbackEnabled); }
215 bool is_type_feedback_enabled() const {
216 return GetFlag(kTypeFeedbackEnabled);
219 void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
221 bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
223 void MarkAsBuiltinInliningEnabled() { SetFlag(kBuiltinInliningEnabled); }
225 bool is_builtin_inlining_enabled() const {
226 return GetFlag(kBuiltinInliningEnabled);
229 void MarkAsTypingEnabled() { SetFlag(kTypingEnabled); }
231 bool is_typing_enabled() const { return GetFlag(kTypingEnabled); }
233 void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
235 bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
237 bool IsCodePreAgingActive() const {
238 return FLAG_optimize_for_size && FLAG_age_code && !will_serialize() &&
242 void EnsureFeedbackVector();
243 Handle<TypeFeedbackVector> feedback_vector() const {
244 return feedback_vector_;
246 void SetCode(Handle<Code> code) { code_ = code; }
248 void MarkCompilingForDebugging() { SetFlag(kCompilingForDebugging); }
249 bool IsCompilingForDebugging() { return GetFlag(kCompilingForDebugging); }
250 void MarkNonOptimizable() {
251 SetMode(CompilationInfo::NONOPT);
254 bool ShouldTrapOnDeopt() const {
255 return (FLAG_trap_on_deopt && IsOptimizing()) ||
256 (FLAG_trap_on_stub_deopt && IsStub());
259 bool has_global_object() const {
260 return !closure().is_null() &&
261 (closure()->context()->global_object() != NULL);
264 GlobalObject* global_object() const {
265 return has_global_object() ? closure()->context()->global_object() : NULL;
268 // Accessors for the different compilation modes.
269 bool IsOptimizing() const { return mode_ == OPTIMIZE; }
270 bool IsOptimizable() const { return mode_ == BASE; }
271 bool IsStub() const { return mode_ == STUB; }
272 void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
273 DCHECK(!shared_info().is_null());
275 osr_ast_id_ = osr_ast_id;
276 unoptimized_code_ = unoptimized;
277 optimization_id_ = isolate()->NextOptimizationId();
280 void SetStub(CodeStub* code_stub) {
282 code_stub_ = code_stub;
285 // Deoptimization support.
286 bool HasDeoptimizationSupport() const {
287 return GetFlag(kDeoptimizationSupport);
289 void EnableDeoptimizationSupport() {
290 DCHECK(IsOptimizable());
291 SetFlag(kDeoptimizationSupport);
294 // Determines whether or not to insert a self-optimization header.
295 bool ShouldSelfOptimize();
297 void set_deferred_handles(DeferredHandles* deferred_handles) {
298 DCHECK(deferred_handles_ == NULL);
299 deferred_handles_ = deferred_handles;
302 ZoneList<Handle<HeapObject> >* dependencies(
303 DependentCode::DependencyGroup group) {
304 if (dependencies_[group] == NULL) {
305 dependencies_[group] = new(zone_) ZoneList<Handle<HeapObject> >(2, zone_);
307 return dependencies_[group];
310 void CommitDependencies(Handle<Code> code);
312 void RollbackDependencies();
314 void ReopenHandlesInNewHandleScope() {
315 unoptimized_code_ = Handle<Code>(*unoptimized_code_);
318 void AbortOptimization(BailoutReason reason) {
319 DCHECK(reason != kNoReason);
320 if (bailout_reason_ == kNoReason) bailout_reason_ = reason;
321 SetFlag(kDisableFutureOptimization);
324 void RetryOptimization(BailoutReason reason) {
325 DCHECK(reason != kNoReason);
326 if (GetFlag(kDisableFutureOptimization)) return;
327 bailout_reason_ = reason;
330 BailoutReason bailout_reason() const { return bailout_reason_; }
332 int prologue_offset() const {
333 DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
334 return prologue_offset_;
337 void set_prologue_offset(int prologue_offset) {
338 DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
339 prologue_offset_ = prologue_offset;
342 // Adds offset range [from, to) where fp register does not point
343 // to the current frame base. Used in CPU profiler to detect stack
344 // samples where top frame is not set up.
345 inline void AddNoFrameRange(int from, int to) {
346 if (no_frame_ranges_) no_frame_ranges_->Add(OffsetRange(from, to));
349 List<OffsetRange>* ReleaseNoFrameRanges() {
350 List<OffsetRange>* result = no_frame_ranges_;
351 no_frame_ranges_ = NULL;
355 int start_position_for(uint32_t inlining_id) {
356 return inlined_function_infos_.at(inlining_id).start_position;
358 const std::vector<InlinedFunctionInfo>& inlined_function_infos() {
359 return inlined_function_infos_;
362 void LogDeoptCallPosition(int pc_offset, int inlining_id);
363 int TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
364 SourcePosition position, int pareint_id);
366 Handle<Foreign> object_wrapper() {
367 if (object_wrapper_.is_null()) {
369 isolate()->factory()->NewForeign(reinterpret_cast<Address>(this));
371 return object_wrapper_;
374 void AbortDueToDependencyChange() {
375 DCHECK(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
376 aborted_due_to_dependency_change_ = true;
379 bool HasAbortedDueToDependencyChange() const {
380 DCHECK(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
381 return aborted_due_to_dependency_change_;
384 bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
385 return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure());
388 int optimization_id() const { return optimization_id_; }
390 int osr_expr_stack_height() { return osr_expr_stack_height_; }
391 void set_osr_expr_stack_height(int height) {
393 osr_expr_stack_height_ = height;
397 void PrintAstForTesting();
400 bool is_simple_parameter_list();
403 ParseInfo* parse_info_;
405 void DisableFutureOptimization() {
406 if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
407 shared_info()->DisableOptimization(bailout_reason());
413 // BASE is generated by the full codegen, optionally prepared for bailouts.
414 // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
415 // NONOPT is generated by the full codegen and is not prepared for
416 // recompilation/bailouts. These functions are never recompiled.
424 CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub, Mode mode,
425 Isolate* isolate, Zone* zone);
429 void SetMode(Mode mode) {
433 void SetFlag(Flag flag) { flags_ |= flag; }
435 void SetFlag(Flag flag, bool value) {
436 flags_ = value ? flags_ | flag : flags_ & ~flag;
439 bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
443 // For compiled stubs, the stub object
444 CodeStub* code_stub_;
445 // The compiled code.
448 // Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
449 Handle<TypeFeedbackVector> feedback_vector_;
451 // Compilation mode flag and whether deoptimization is allowed.
453 BailoutId osr_ast_id_;
454 // The unoptimized code we patched for OSR may not be the shared code
455 // afterwards, since we may need to compile it again to include deoptimization
456 // data. Keep track which code we patched.
457 Handle<Code> unoptimized_code_;
459 // The zone from which the compilation pipeline working on this
460 // CompilationInfo allocates.
463 DeferredHandles* deferred_handles_;
465 ZoneList<Handle<HeapObject> >* dependencies_[DependentCode::kGroupCount];
467 BailoutReason bailout_reason_;
469 int prologue_offset_;
471 List<OffsetRange>* no_frame_ranges_;
472 std::vector<InlinedFunctionInfo> inlined_function_infos_;
473 bool track_positions_;
475 // A copy of shared_info()->opt_count() to avoid handle deref
476 // during graph optimization.
479 // Number of parameters used for compilation of stubs that require arguments.
480 int parameter_count_;
482 Handle<Foreign> object_wrapper_;
484 int optimization_id_;
486 // This flag is used by the main thread to track whether this compilation
487 // should be abandoned due to dependency change.
488 bool aborted_due_to_dependency_change_;
490 int osr_expr_stack_height_;
492 DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
496 // A wrapper around a CompilationInfo that detaches the Handles from
497 // the underlying DeferredHandleScope and stores them in info_ on
499 class CompilationHandleScope BASE_EMBEDDED {
501 explicit CompilationHandleScope(CompilationInfo* info)
502 : deferred_(info->isolate()), info_(info) {}
503 ~CompilationHandleScope() {
504 info_->set_deferred_handles(deferred_.Detach());
508 DeferredHandleScope deferred_;
509 CompilationInfo* info_;
514 class HOptimizedGraphBuilder;
517 // A helper class that calls the three compilation phases in
518 // Crankshaft and keeps track of its state. The three phases
519 // CreateGraph, OptimizeGraph and GenerateAndInstallCode can either
520 // fail, bail-out to the full code generator or succeed. Apart from
521 // their return value, the status of the phase last run can be checked
522 // using last_status().
523 class OptimizedCompileJob: public ZoneObject {
525 explicit OptimizedCompileJob(CompilationInfo* info)
527 graph_builder_(NULL),
530 last_status_(FAILED),
531 awaiting_install_(false) { }
534 FAILED, BAILED_OUT, SUCCEEDED
537 MUST_USE_RESULT Status CreateGraph();
538 MUST_USE_RESULT Status OptimizeGraph();
539 MUST_USE_RESULT Status GenerateCode();
541 Status last_status() const { return last_status_; }
542 CompilationInfo* info() const { return info_; }
543 Isolate* isolate() const { return info()->isolate(); }
545 Status RetryOptimization(BailoutReason reason) {
546 info_->RetryOptimization(reason);
547 return SetLastStatus(BAILED_OUT);
550 Status AbortOptimization(BailoutReason reason) {
551 info_->AbortOptimization(reason);
552 return SetLastStatus(BAILED_OUT);
555 void WaitForInstall() {
556 DCHECK(info_->is_osr());
557 awaiting_install_ = true;
560 bool IsWaitingForInstall() { return awaiting_install_; }
563 CompilationInfo* info_;
564 HOptimizedGraphBuilder* graph_builder_;
567 base::TimeDelta time_taken_to_create_graph_;
568 base::TimeDelta time_taken_to_optimize_;
569 base::TimeDelta time_taken_to_codegen_;
571 bool awaiting_install_;
573 MUST_USE_RESULT Status SetLastStatus(Status status) {
574 last_status_ = status;
577 void RecordOptimizationStats();
580 Timer(OptimizedCompileJob* job, base::TimeDelta* location)
581 : job_(job), location_(location) {
582 DCHECK(location_ != NULL);
587 *location_ += timer_.Elapsed();
590 OptimizedCompileJob* job_;
591 base::ElapsedTimer timer_;
592 base::TimeDelta* location_;
599 // General strategy: Source code is translated into an anonymous function w/o
600 // parameters which then can be executed. If the source code contains other
601 // functions, they will be compiled and allocated as part of the compilation
602 // of the source code.
604 // Please note this interface returns shared function infos. This means you
605 // need to call Factory::NewFunctionFromSharedFunctionInfo before you have a
606 // real function with a context.
608 class Compiler : public AllStatic {
610 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
611 Handle<JSFunction> function);
612 MUST_USE_RESULT static MaybeHandle<Code> GetLazyCode(
613 Handle<JSFunction> function);
614 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
615 Handle<SharedFunctionInfo> shared);
616 MUST_USE_RESULT static MaybeHandle<Code> GetDebugCode(
617 Handle<JSFunction> function);
619 // Parser::Parse, then Compiler::Analyze.
620 static bool ParseAndAnalyze(ParseInfo* info);
621 // Rewrite, analyze scopes, and renumber.
622 static bool Analyze(ParseInfo* info);
623 // Adds deoptimization support, requires ParseAndAnalyze.
624 static bool EnsureDeoptimizationSupport(CompilationInfo* info);
626 static bool EnsureCompiled(Handle<JSFunction> function,
627 ClearExceptionFlag flag);
629 static void CompileForLiveEdit(Handle<Script> script);
631 // Compile a String source within a context for eval.
632 MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval(
633 Handle<String> source, Handle<SharedFunctionInfo> outer_info,
634 Handle<Context> context, LanguageMode language_mode,
635 ParseRestriction restriction, int scope_position);
637 // Compile a String source within a context.
638 static Handle<SharedFunctionInfo> CompileScript(
639 Handle<String> source, Handle<Object> script_name, int line_offset,
640 int column_offset, bool is_debugger_script, bool is_shared_cross_origin,
641 Handle<Object> source_map_url, Handle<Context> context,
642 v8::Extension* extension, ScriptData** cached_data,
643 ScriptCompiler::CompileOptions compile_options,
644 NativesFlag is_natives_code, bool is_module);
646 static Handle<SharedFunctionInfo> CompileStreamedScript(Handle<Script> script,
650 // Create a shared function info object (the code may be lazily compiled).
651 static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node,
652 Handle<Script> script,
653 CompilationInfo* outer);
655 enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
657 // Generate and return optimized code or start a concurrent optimization job.
658 // In the latter case, return the InOptimizationQueue builtin. On failure,
659 // return the empty handle.
660 MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode(
661 Handle<JSFunction> function,
662 Handle<Code> current_code,
663 ConcurrencyMode mode,
664 BailoutId osr_ast_id = BailoutId::None());
666 // Generate and return code from previously queued optimization job.
667 // On failure, return the empty handle.
668 static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
670 // TODO(titzer): move this method out of the compiler.
671 static bool DebuggerWantsEagerCompilation(
672 Isolate* isolate, bool allow_lazy_without_ctx = false);
676 class CompilationPhase BASE_EMBEDDED {
678 CompilationPhase(const char* name, CompilationInfo* info);
682 bool ShouldProduceTraceOutput() const;
684 const char* name() const { return name_; }
685 CompilationInfo* info() const { return info_; }
686 Isolate* isolate() const { return info()->isolate(); }
687 Zone* zone() { return &zone_; }
691 CompilationInfo* info_;
693 size_t info_zone_start_allocation_size_;
694 base::ElapsedTimer timer_;
696 DISALLOW_COPY_AND_ASSIGN(CompilationPhase);
699 } } // namespace v8::internal
701 #endif // V8_COMPILER_H_