1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "src/allocation.h"
10 #include "src/bailout-reason.h"
11 #include "src/compilation-dependencies.h"
12 #include "src/signature.h"
18 class AstValueFactory;
19 class HydrogenCodeStub;
20 class JavaScriptFrame;
25 OffsetRange(int from, int to) : from(from), to(to) {}
31 // This class encapsulates encoding and decoding of sources positions from
32 // which hydrogen values originated.
33 // When FLAG_track_hydrogen_positions is set this object encodes the
34 // identifier of the inlining and absolute offset from the start of the
36 // When the flag is not set we simply track absolute offset from the
38 class SourcePosition {
40 static SourcePosition Unknown() {
41 return SourcePosition::FromRaw(kNoPosition);
44 bool IsUnknown() const { return value_ == kNoPosition; }
46 uint32_t position() const { return PositionField::decode(value_); }
47 void set_position(uint32_t position) {
48 if (FLAG_hydrogen_track_positions) {
49 value_ = static_cast<uint32_t>(PositionField::update(value_, position));
55 uint32_t inlining_id() const { return InliningIdField::decode(value_); }
56 void set_inlining_id(uint32_t inlining_id) {
57 if (FLAG_hydrogen_track_positions) {
59 static_cast<uint32_t>(InliningIdField::update(value_, inlining_id));
63 uint32_t raw() const { return value_; }
66 static const uint32_t kNoPosition =
67 static_cast<uint32_t>(RelocInfo::kNoPosition);
68 typedef BitField<uint32_t, 0, 9> InliningIdField;
70 // Offset from the start of the inlined function.
71 typedef BitField<uint32_t, 9, 23> PositionField;
73 friend class HPositionInfo;
74 friend class Deoptimizer;
76 static SourcePosition FromRaw(uint32_t raw_position) {
77 SourcePosition position;
78 position.value_ = raw_position;
82 // If FLAG_hydrogen_track_positions is set contains bitfields InliningIdField
84 // Otherwise contains absolute offset from the script start.
89 std::ostream& operator<<(std::ostream& os, const SourcePosition& p);
92 struct InlinedFunctionInfo {
93 InlinedFunctionInfo(int parent_id, SourcePosition inline_position,
94 int script_id, int start_position)
95 : parent_id(parent_id),
96 inline_position(inline_position),
98 start_position(start_position) {}
100 SourcePosition inline_position;
103 std::vector<size_t> deopt_pc_offsets;
105 static const int kNoParentId = -1;
109 // CompilationInfo encapsulates some information known at compile time. It
110 // is constructed based on the resources available at compile-time.
111 class CompilationInfo {
113 // Various configuration flags for a compilation, as well as some properties
114 // of the compiled code produced by a compilation.
116 kDeferredCalling = 1 << 0,
117 kNonDeferredCalling = 1 << 1,
118 kSavesCallerDoubles = 1 << 2,
119 kRequiresFrame = 1 << 3,
120 kMustNotHaveEagerFrame = 1 << 4,
121 kDeoptimizationSupport = 1 << 5,
123 kSerializing = 1 << 7,
124 kFunctionContextSpecializing = 1 << 8,
125 kFrameSpecializing = 1 << 9,
126 kInliningEnabled = 1 << 10,
127 kTypingEnabled = 1 << 11,
128 kDisableFutureOptimization = 1 << 12,
129 kSplittingEnabled = 1 << 13,
130 kTypeFeedbackEnabled = 1 << 14,
131 kDeoptimizationEnabled = 1 << 15,
132 kSourcePositionsEnabled = 1 << 16,
133 kFirstCompile = 1 << 17,
136 explicit CompilationInfo(ParseInfo* parse_info);
137 CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone);
138 CompilationInfo(const char* debug_name, Isolate* isolate, Zone* zone);
139 virtual ~CompilationInfo();
141 ParseInfo* parse_info() const { return parse_info_; }
143 // -----------------------------------------------------------
144 // TODO(titzer): inline and delete accessors of ParseInfo
145 // -----------------------------------------------------------
146 Handle<Script> script() const;
147 bool is_eval() const;
148 bool is_native() const;
149 bool is_module() const;
150 LanguageMode language_mode() const;
151 Handle<JSFunction> closure() const;
152 FunctionLiteral* literal() const;
153 Scope* scope() const;
154 Handle<Context> context() const;
155 Handle<SharedFunctionInfo> shared_info() const;
156 bool has_shared_info() const;
157 bool has_context() const;
158 bool has_literal() const;
159 bool has_scope() const;
160 // -----------------------------------------------------------
162 Isolate* isolate() const {
165 Zone* zone() { return zone_; }
166 bool is_osr() const { return !osr_ast_id_.IsNone(); }
167 Handle<Code> code() const { return code_; }
168 CodeStub* code_stub() const { return code_stub_; }
169 BailoutId osr_ast_id() const { return osr_ast_id_; }
170 Handle<Code> unoptimized_code() const { return unoptimized_code_; }
171 int opt_count() const { return opt_count_; }
172 int num_parameters() const;
173 int num_parameters_including_this() const;
174 bool is_this_defined() const;
175 int num_heap_slots() const;
177 void set_parameter_count(int parameter_count) {
179 parameter_count_ = parameter_count;
182 bool is_tracking_positions() const { return track_positions_; }
184 bool is_calling() const {
185 return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
188 void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
190 bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
192 void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
194 bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
196 void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
198 bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
200 void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
202 bool requires_frame() const { return GetFlag(kRequiresFrame); }
204 void MarkMustNotHaveEagerFrame() { SetFlag(kMustNotHaveEagerFrame); }
206 bool GetMustNotHaveEagerFrame() const {
207 return GetFlag(kMustNotHaveEagerFrame);
210 // Compiles marked as debug produce unoptimized code with debug break slots.
211 // Inner functions that cannot be compiled w/o context are compiled eagerly.
212 // Always include deoptimization support to avoid having to recompile again.
215 SetFlag(kDeoptimizationSupport);
218 bool is_debug() const { return GetFlag(kDebug); }
220 void PrepareForSerializing() { SetFlag(kSerializing); }
222 bool will_serialize() const { return GetFlag(kSerializing); }
224 void MarkAsFunctionContextSpecializing() {
225 SetFlag(kFunctionContextSpecializing);
228 bool is_function_context_specializing() const {
229 return GetFlag(kFunctionContextSpecializing);
232 void MarkAsFrameSpecializing() { SetFlag(kFrameSpecializing); }
234 bool is_frame_specializing() const { return GetFlag(kFrameSpecializing); }
236 void MarkAsTypeFeedbackEnabled() { SetFlag(kTypeFeedbackEnabled); }
238 bool is_type_feedback_enabled() const {
239 return GetFlag(kTypeFeedbackEnabled);
242 void MarkAsDeoptimizationEnabled() { SetFlag(kDeoptimizationEnabled); }
244 bool is_deoptimization_enabled() const {
245 return GetFlag(kDeoptimizationEnabled);
248 void MarkAsSourcePositionsEnabled() { SetFlag(kSourcePositionsEnabled); }
250 bool is_source_positions_enabled() const {
251 return GetFlag(kSourcePositionsEnabled);
254 void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
256 bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
258 void MarkAsTypingEnabled() { SetFlag(kTypingEnabled); }
260 bool is_typing_enabled() const { return GetFlag(kTypingEnabled); }
262 void MarkAsSplittingEnabled() { SetFlag(kSplittingEnabled); }
264 bool is_splitting_enabled() const { return GetFlag(kSplittingEnabled); }
266 void MarkAsFirstCompile() { SetFlag(kFirstCompile); }
268 void MarkAsCompiled() { SetFlag(kFirstCompile, false); }
270 bool is_first_compile() const { return GetFlag(kFirstCompile); }
272 bool IsCodePreAgingActive() const {
273 return FLAG_optimize_for_size && FLAG_age_code && !will_serialize() &&
277 void EnsureFeedbackVector();
278 Handle<TypeFeedbackVector> feedback_vector() const {
279 return feedback_vector_;
281 void SetCode(Handle<Code> code) { code_ = code; }
283 bool ShouldTrapOnDeopt() const {
284 return (FLAG_trap_on_deopt && IsOptimizing()) ||
285 (FLAG_trap_on_stub_deopt && IsStub());
288 bool has_global_object() const {
289 return !closure().is_null() &&
290 (closure()->context()->global_object() != NULL);
293 GlobalObject* global_object() const {
294 return has_global_object() ? closure()->context()->global_object() : NULL;
297 // Accessors for the different compilation modes.
298 bool IsOptimizing() const { return mode_ == OPTIMIZE; }
299 bool IsStub() const { return mode_ == STUB; }
300 void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
301 DCHECK(has_shared_info());
303 osr_ast_id_ = osr_ast_id;
304 unoptimized_code_ = unoptimized;
305 optimization_id_ = isolate()->NextOptimizationId();
306 set_output_code_kind(Code::OPTIMIZED_FUNCTION);
309 void SetFunctionType(Type::FunctionType* function_type) {
310 function_type_ = function_type;
312 Type::FunctionType* function_type() const { return function_type_; }
314 void SetStub(CodeStub* code_stub);
316 // Deoptimization support.
317 bool HasDeoptimizationSupport() const {
318 return GetFlag(kDeoptimizationSupport);
320 void EnableDeoptimizationSupport() {
321 DCHECK_EQ(BASE, mode_);
322 SetFlag(kDeoptimizationSupport);
324 bool ShouldEnsureSpaceForLazyDeopt() { return !IsStub(); }
326 bool MustReplaceUndefinedReceiverWithGlobalProxy();
328 // Determines whether or not to insert a self-optimization header.
329 bool ShouldSelfOptimize();
331 void set_deferred_handles(DeferredHandles* deferred_handles) {
332 DCHECK(deferred_handles_ == NULL);
333 deferred_handles_ = deferred_handles;
336 void ReopenHandlesInNewHandleScope() {
337 unoptimized_code_ = Handle<Code>(*unoptimized_code_);
340 void AbortOptimization(BailoutReason reason) {
341 DCHECK(reason != kNoReason);
342 if (bailout_reason_ == kNoReason) bailout_reason_ = reason;
343 SetFlag(kDisableFutureOptimization);
346 void RetryOptimization(BailoutReason reason) {
347 DCHECK(reason != kNoReason);
348 if (GetFlag(kDisableFutureOptimization)) return;
349 bailout_reason_ = reason;
352 BailoutReason bailout_reason() const { return bailout_reason_; }
354 int prologue_offset() const {
355 DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
356 return prologue_offset_;
359 void set_prologue_offset(int prologue_offset) {
360 DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
361 prologue_offset_ = prologue_offset;
364 // Adds offset range [from, to) where fp register does not point
365 // to the current frame base. Used in CPU profiler to detect stack
366 // samples where top frame is not set up.
367 inline void AddNoFrameRange(int from, int to) {
368 if (no_frame_ranges_) no_frame_ranges_->Add(OffsetRange(from, to));
371 List<OffsetRange>* ReleaseNoFrameRanges() {
372 List<OffsetRange>* result = no_frame_ranges_;
373 no_frame_ranges_ = NULL;
377 int start_position_for(uint32_t inlining_id) {
378 return inlined_function_infos_.at(inlining_id).start_position;
380 const std::vector<InlinedFunctionInfo>& inlined_function_infos() {
381 return inlined_function_infos_;
384 void LogDeoptCallPosition(int pc_offset, int inlining_id);
385 int TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
386 SourcePosition position, int pareint_id);
388 CompilationDependencies* dependencies() { return &dependencies_; }
390 bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
391 return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure());
394 int optimization_id() const { return optimization_id_; }
396 int osr_expr_stack_height() { return osr_expr_stack_height_; }
397 void set_osr_expr_stack_height(int height) {
399 osr_expr_stack_height_ = height;
401 JavaScriptFrame* osr_frame() const { return osr_frame_; }
402 void set_osr_frame(JavaScriptFrame* osr_frame) { osr_frame_ = osr_frame; }
405 void PrintAstForTesting();
408 bool has_simple_parameters();
410 typedef std::vector<Handle<SharedFunctionInfo>> InlinedFunctionList;
411 InlinedFunctionList const& inlined_functions() const {
412 return inlined_functions_;
414 void AddInlinedFunction(Handle<SharedFunctionInfo> inlined_function) {
415 inlined_functions_.push_back(inlined_function);
418 base::SmartArrayPointer<char> GetDebugName() const;
420 Code::Kind output_code_kind() const { return output_code_kind_; }
422 void set_output_code_kind(Code::Kind kind) { output_code_kind_ = kind; }
425 ParseInfo* parse_info_;
427 void DisableFutureOptimization() {
428 if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
429 shared_info()->DisableOptimization(bailout_reason());
435 // BASE is generated by the full codegen, optionally prepared for bailouts.
436 // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
443 CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub,
444 const char* debug_name, Mode mode, Isolate* isolate,
449 void SetMode(Mode mode) {
453 void SetFlag(Flag flag) { flags_ |= flag; }
455 void SetFlag(Flag flag, bool value) {
456 flags_ = value ? flags_ | flag : flags_ & ~flag;
459 bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
463 Code::Kind output_code_kind_;
465 // For compiled stubs, the stub object
466 CodeStub* code_stub_;
467 // The compiled code.
470 // Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
471 Handle<TypeFeedbackVector> feedback_vector_;
473 // Compilation mode flag and whether deoptimization is allowed.
475 BailoutId osr_ast_id_;
476 // The unoptimized code we patched for OSR may not be the shared code
477 // afterwards, since we may need to compile it again to include deoptimization
478 // data. Keep track which code we patched.
479 Handle<Code> unoptimized_code_;
481 // The zone from which the compilation pipeline working on this
482 // CompilationInfo allocates.
485 DeferredHandles* deferred_handles_;
487 // Dependencies for this compilation, e.g. stable maps.
488 CompilationDependencies dependencies_;
490 BailoutReason bailout_reason_;
492 int prologue_offset_;
494 List<OffsetRange>* no_frame_ranges_;
495 std::vector<InlinedFunctionInfo> inlined_function_infos_;
496 bool track_positions_;
498 InlinedFunctionList inlined_functions_;
500 // A copy of shared_info()->opt_count() to avoid handle deref
501 // during graph optimization.
504 // Number of parameters used for compilation of stubs that require arguments.
505 int parameter_count_;
507 int optimization_id_;
509 int osr_expr_stack_height_;
511 // The current OSR frame for specialization or {nullptr}.
512 JavaScriptFrame* osr_frame_ = nullptr;
514 Type::FunctionType* function_type_;
516 const char* debug_name_;
518 DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
522 // A wrapper around a CompilationInfo that detaches the Handles from
523 // the underlying DeferredHandleScope and stores them in info_ on
525 class CompilationHandleScope BASE_EMBEDDED {
527 explicit CompilationHandleScope(CompilationInfo* info)
528 : deferred_(info->isolate()), info_(info) {}
529 ~CompilationHandleScope() {
530 info_->set_deferred_handles(deferred_.Detach());
534 DeferredHandleScope deferred_;
535 CompilationInfo* info_;
540 class HOptimizedGraphBuilder;
543 // A helper class that calls the three compilation phases in
544 // Crankshaft and keeps track of its state. The three phases
545 // CreateGraph, OptimizeGraph and GenerateAndInstallCode can either
546 // fail, bail-out to the full code generator or succeed. Apart from
547 // their return value, the status of the phase last run can be checked
548 // using last_status().
549 class OptimizedCompileJob: public ZoneObject {
551 explicit OptimizedCompileJob(CompilationInfo* info)
553 graph_builder_(NULL),
556 last_status_(FAILED),
557 awaiting_install_(false) { }
560 FAILED, BAILED_OUT, SUCCEEDED
563 MUST_USE_RESULT Status CreateGraph();
564 MUST_USE_RESULT Status OptimizeGraph();
565 MUST_USE_RESULT Status GenerateCode();
567 Status last_status() const { return last_status_; }
568 CompilationInfo* info() const { return info_; }
569 Isolate* isolate() const { return info()->isolate(); }
571 Status RetryOptimization(BailoutReason reason) {
572 info_->RetryOptimization(reason);
573 return SetLastStatus(BAILED_OUT);
576 Status AbortOptimization(BailoutReason reason) {
577 info_->AbortOptimization(reason);
578 return SetLastStatus(BAILED_OUT);
581 void WaitForInstall() {
582 DCHECK(info_->is_osr());
583 awaiting_install_ = true;
586 bool IsWaitingForInstall() { return awaiting_install_; }
589 CompilationInfo* info_;
590 HOptimizedGraphBuilder* graph_builder_;
593 base::TimeDelta time_taken_to_create_graph_;
594 base::TimeDelta time_taken_to_optimize_;
595 base::TimeDelta time_taken_to_codegen_;
597 bool awaiting_install_;
599 MUST_USE_RESULT Status SetLastStatus(Status status) {
600 last_status_ = status;
603 void RecordOptimizationStats();
606 Timer(OptimizedCompileJob* job, base::TimeDelta* location)
607 : job_(job), location_(location) {
608 DCHECK(location_ != NULL);
613 *location_ += timer_.Elapsed();
616 OptimizedCompileJob* job_;
617 base::ElapsedTimer timer_;
618 base::TimeDelta* location_;
625 // General strategy: Source code is translated into an anonymous function w/o
626 // parameters which then can be executed. If the source code contains other
627 // functions, they will be compiled and allocated as part of the compilation
628 // of the source code.
630 // Please note this interface returns shared function infos. This means you
631 // need to call Factory::NewFunctionFromSharedFunctionInfo before you have a
632 // real function with a context.
634 class Compiler : public AllStatic {
636 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
637 Handle<JSFunction> function);
638 MUST_USE_RESULT static MaybeHandle<Code> GetLazyCode(
639 Handle<JSFunction> function);
640 MUST_USE_RESULT static MaybeHandle<Code> GetStubCode(
641 Handle<JSFunction> function, CodeStub* stub);
643 static bool Compile(Handle<JSFunction> function, ClearExceptionFlag flag);
644 static bool CompileDebugCode(Handle<JSFunction> function);
645 static bool CompileDebugCode(Handle<SharedFunctionInfo> shared);
646 static void CompileForLiveEdit(Handle<Script> script);
648 // Parser::Parse, then Compiler::Analyze.
649 static bool ParseAndAnalyze(ParseInfo* info);
650 // Rewrite, analyze scopes, and renumber.
651 static bool Analyze(ParseInfo* info);
652 // Adds deoptimization support, requires ParseAndAnalyze.
653 static bool EnsureDeoptimizationSupport(CompilationInfo* info);
655 // Compile a String source within a context for eval.
656 MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval(
657 Handle<String> source, Handle<SharedFunctionInfo> outer_info,
658 Handle<Context> context, LanguageMode language_mode,
659 ParseRestriction restriction, int line_offset, int column_offset = 0,
660 Handle<Object> script_name = Handle<Object>(),
661 ScriptOriginOptions options = ScriptOriginOptions());
663 // Compile a String source within a context.
664 static Handle<SharedFunctionInfo> CompileScript(
665 Handle<String> source, Handle<Object> script_name, int line_offset,
666 int column_offset, ScriptOriginOptions resource_options,
667 Handle<Object> source_map_url, Handle<Context> context,
668 v8::Extension* extension, ScriptData** cached_data,
669 ScriptCompiler::CompileOptions compile_options,
670 NativesFlag is_natives_code, bool is_module);
672 static Handle<SharedFunctionInfo> CompileStreamedScript(Handle<Script> script,
676 // Create a shared function info object (the code may be lazily compiled).
677 static Handle<SharedFunctionInfo> GetSharedFunctionInfo(
678 FunctionLiteral* node, Handle<Script> script, CompilationInfo* outer);
680 enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
682 // Generate and return optimized code or start a concurrent optimization job.
683 // In the latter case, return the InOptimizationQueue builtin. On failure,
684 // return the empty handle.
685 MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode(
686 Handle<JSFunction> function, Handle<Code> current_code,
687 ConcurrencyMode mode, BailoutId osr_ast_id = BailoutId::None(),
688 JavaScriptFrame* osr_frame = nullptr);
690 // Generate and return code from previously queued optimization job.
691 // On failure, return the empty handle.
692 static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
696 class CompilationPhase BASE_EMBEDDED {
698 CompilationPhase(const char* name, CompilationInfo* info);
702 bool ShouldProduceTraceOutput() const;
704 const char* name() const { return name_; }
705 CompilationInfo* info() const { return info_; }
706 Isolate* isolate() const { return info()->isolate(); }
707 Zone* zone() { return &zone_; }
711 CompilationInfo* info_;
713 size_t info_zone_start_allocation_size_;
714 base::ElapsedTimer timer_;
716 DISALLOW_COPY_AND_ASSIGN(CompilationPhase);
719 } // namespace internal
722 #endif // V8_COMPILER_H_