1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "src/allocation.h"
16 class HydrogenCodeStub;
18 // ParseRestriction is used to restrict the set of valid statements in a
19 // unit of compilation. Restriction violations cause a syntax error.
20 enum ParseRestriction {
21 NO_PARSE_RESTRICTION, // All expressions are allowed.
22 ONLY_SINGLE_FUNCTION_LITERAL // Only a single FunctionLiteral expression.
32 OffsetRange(int from, int to) : from(from), to(to) {}
37 // CompilationInfo encapsulates some information known at compile time. It
38 // is constructed based on the resources available at compile-time.
39 class CompilationInfo {
41 CompilationInfo(Handle<JSFunction> closure, Zone* zone);
42 virtual ~CompilationInfo();
44 Isolate* isolate() const {
47 Zone* zone() { return zone_; }
48 bool is_osr() const { return !osr_ast_id_.IsNone(); }
49 bool is_lazy() const { return IsLazy::decode(flags_); }
50 bool is_eval() const { return IsEval::decode(flags_); }
51 bool is_global() const { return IsGlobal::decode(flags_); }
52 StrictMode strict_mode() const { return StrictModeField::decode(flags_); }
53 FunctionLiteral* function() const { return function_; }
54 Scope* scope() const { return scope_; }
55 Scope* global_scope() const { return global_scope_; }
56 Handle<Code> code() const { return code_; }
57 Handle<JSFunction> closure() const { return closure_; }
58 Handle<SharedFunctionInfo> shared_info() const { return shared_info_; }
59 Handle<Script> script() const { return script_; }
60 HydrogenCodeStub* code_stub() const {return code_stub_; }
61 v8::Extension* extension() const { return extension_; }
62 ScriptData** cached_data() const { return cached_data_; }
63 CachedDataMode cached_data_mode() const {
64 return cached_data_mode_;
66 Handle<Context> context() const { return context_; }
67 BailoutId osr_ast_id() const { return osr_ast_id_; }
68 Handle<Code> unoptimized_code() const { return unoptimized_code_; }
69 int opt_count() const { return opt_count_; }
70 int num_parameters() const;
71 int num_heap_slots() const;
72 Code::Flags flags() const;
76 flags_ |= IsEval::encode(true);
80 flags_ |= IsGlobal::encode(true);
82 void set_parameter_count(int parameter_count) {
84 parameter_count_ = parameter_count;
87 void set_this_has_uses(bool has_no_uses) {
88 this_has_uses_ = has_no_uses;
90 bool this_has_uses() {
91 return this_has_uses_;
93 void SetStrictMode(StrictMode strict_mode) {
94 ASSERT(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
95 flags_ = StrictModeField::update(flags_, strict_mode);
98 flags_ |= IsNative::encode(true);
101 bool is_native() const {
102 return IsNative::decode(flags_);
105 bool is_calling() const {
106 return is_deferred_calling() || is_non_deferred_calling();
109 void MarkAsDeferredCalling() {
110 flags_ |= IsDeferredCalling::encode(true);
113 bool is_deferred_calling() const {
114 return IsDeferredCalling::decode(flags_);
117 void MarkAsNonDeferredCalling() {
118 flags_ |= IsNonDeferredCalling::encode(true);
121 bool is_non_deferred_calling() const {
122 return IsNonDeferredCalling::decode(flags_);
125 void MarkAsSavesCallerDoubles() {
126 flags_ |= SavesCallerDoubles::encode(true);
129 bool saves_caller_doubles() const {
130 return SavesCallerDoubles::decode(flags_);
133 void MarkAsRequiresFrame() {
134 flags_ |= RequiresFrame::encode(true);
137 bool requires_frame() const {
138 return RequiresFrame::decode(flags_);
141 void MarkMustNotHaveEagerFrame() {
142 flags_ |= MustNotHaveEagerFrame::encode(true);
145 bool GetMustNotHaveEagerFrame() const {
146 return MustNotHaveEagerFrame::decode(flags_);
150 flags_ |= IsDebug::encode(true);
153 bool is_debug() const {
154 return IsDebug::decode(flags_);
157 bool IsCodePreAgingActive() const {
158 return FLAG_optimize_for_size && FLAG_age_code && !is_debug();
161 void SetParseRestriction(ParseRestriction restriction) {
162 flags_ = ParseRestricitonField::update(flags_, restriction);
165 ParseRestriction parse_restriction() const {
166 return ParseRestricitonField::decode(flags_);
169 void SetFunction(FunctionLiteral* literal) {
170 ASSERT(function_ == NULL);
173 void PrepareForCompilation(Scope* scope);
174 void SetGlobalScope(Scope* global_scope) {
175 ASSERT(global_scope_ == NULL);
176 global_scope_ = global_scope;
178 Handle<FixedArray> feedback_vector() const {
179 return feedback_vector_;
181 void SetCode(Handle<Code> code) { code_ = code; }
182 void SetExtension(v8::Extension* extension) {
184 extension_ = extension;
186 void SetCachedData(ScriptData** cached_data,
187 CachedDataMode cached_data_mode) {
188 cached_data_mode_ = cached_data_mode;
189 if (cached_data_mode == NO_CACHED_DATA) {
193 cached_data_ = cached_data;
196 void SetContext(Handle<Context> context) {
200 void MarkCompilingForDebugging() {
201 flags_ |= IsCompilingForDebugging::encode(true);
203 bool IsCompilingForDebugging() {
204 return IsCompilingForDebugging::decode(flags_);
206 void MarkNonOptimizable() {
207 SetMode(CompilationInfo::NONOPT);
210 bool ShouldTrapOnDeopt() const {
211 return (FLAG_trap_on_deopt && IsOptimizing()) ||
212 (FLAG_trap_on_stub_deopt && IsStub());
215 bool has_global_object() const {
216 return !closure().is_null() &&
217 (closure()->context()->global_object() != NULL);
220 GlobalObject* global_object() const {
221 return has_global_object() ? closure()->context()->global_object() : NULL;
224 // Accessors for the different compilation modes.
225 bool IsOptimizing() const { return mode_ == OPTIMIZE; }
226 bool IsOptimizable() const { return mode_ == BASE; }
227 bool IsStub() const { return mode_ == STUB; }
228 void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
229 ASSERT(!shared_info_.is_null());
231 osr_ast_id_ = osr_ast_id;
232 unoptimized_code_ = unoptimized;
233 optimization_id_ = isolate()->NextOptimizationId();
235 void DisableOptimization();
237 // Deoptimization support.
238 bool HasDeoptimizationSupport() const {
239 return SupportsDeoptimization::decode(flags_);
241 void EnableDeoptimizationSupport() {
242 ASSERT(IsOptimizable());
243 flags_ |= SupportsDeoptimization::encode(true);
246 // Determines whether or not to insert a self-optimization header.
247 bool ShouldSelfOptimize();
249 void set_deferred_handles(DeferredHandles* deferred_handles) {
250 ASSERT(deferred_handles_ == NULL);
251 deferred_handles_ = deferred_handles;
254 ZoneList<Handle<HeapObject> >* dependencies(
255 DependentCode::DependencyGroup group) {
256 if (dependencies_[group] == NULL) {
257 dependencies_[group] = new(zone_) ZoneList<Handle<HeapObject> >(2, zone_);
259 return dependencies_[group];
262 void CommitDependencies(Handle<Code> code);
264 void RollbackDependencies();
267 SaveHandle(&closure_);
268 SaveHandle(&shared_info_);
269 SaveHandle(&context_);
270 SaveHandle(&script_);
271 SaveHandle(&unoptimized_code_);
274 BailoutReason bailout_reason() const { return bailout_reason_; }
275 void set_bailout_reason(BailoutReason reason) { bailout_reason_ = reason; }
277 int prologue_offset() const {
278 ASSERT_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
279 return prologue_offset_;
282 void set_prologue_offset(int prologue_offset) {
283 ASSERT_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
284 prologue_offset_ = prologue_offset;
287 // Adds offset range [from, to) where fp register does not point
288 // to the current frame base. Used in CPU profiler to detect stack
289 // samples where top frame is not set up.
290 inline void AddNoFrameRange(int from, int to) {
291 if (no_frame_ranges_) no_frame_ranges_->Add(OffsetRange(from, to));
294 List<OffsetRange>* ReleaseNoFrameRanges() {
295 List<OffsetRange>* result = no_frame_ranges_;
296 no_frame_ranges_ = NULL;
300 Handle<Foreign> object_wrapper() {
301 if (object_wrapper_.is_null()) {
303 isolate()->factory()->NewForeign(reinterpret_cast<Address>(this));
305 return object_wrapper_;
308 void AbortDueToDependencyChange() {
309 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
310 abort_due_to_dependency_ = true;
313 bool HasAbortedDueToDependencyChange() {
314 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
315 return abort_due_to_dependency_;
318 bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
319 return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure_);
322 int optimization_id() const { return optimization_id_; }
325 CompilationInfo(Handle<Script> script,
327 CompilationInfo(Handle<SharedFunctionInfo> shared_info,
329 CompilationInfo(HydrogenCodeStub* stub,
337 // BASE is generated by the full codegen, optionally prepared for bailouts.
338 // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
339 // NONOPT is generated by the full codegen and is not prepared for
340 // recompilation/bailouts. These functions are never recompiled.
348 void Initialize(Isolate* isolate, Mode mode, Zone* zone);
350 void SetMode(Mode mode) {
351 ASSERT(isolate()->use_crankshaft());
355 // Flags using template class BitField<type, start, length>. All are
358 // Compilation is either eager or lazy.
359 class IsLazy: public BitField<bool, 0, 1> {};
360 // Flags that can be set for eager compilation.
361 class IsEval: public BitField<bool, 1, 1> {};
362 class IsGlobal: public BitField<bool, 2, 1> {};
363 // If the function is being compiled for the debugger.
364 class IsDebug: public BitField<bool, 3, 1> {};
365 // Strict mode - used in eager compilation.
366 class StrictModeField: public BitField<StrictMode, 4, 1> {};
367 // Is this a function from our natives.
368 class IsNative: public BitField<bool, 5, 1> {};
369 // Is this code being compiled with support for deoptimization..
370 class SupportsDeoptimization: public BitField<bool, 6, 1> {};
371 // If compiling for debugging produce just full code matching the
372 // initial mode setting.
373 class IsCompilingForDebugging: public BitField<bool, 7, 1> {};
374 // If the compiled code contains calls that require building a frame
375 class IsCalling: public BitField<bool, 8, 1> {};
376 // If the compiled code contains calls that require building a frame
377 class IsDeferredCalling: public BitField<bool, 9, 1> {};
378 // If the compiled code contains calls that require building a frame
379 class IsNonDeferredCalling: public BitField<bool, 10, 1> {};
380 // If the compiled code saves double caller registers that it clobbers.
381 class SavesCallerDoubles: public BitField<bool, 11, 1> {};
382 // If the set of valid statements is restricted.
383 class ParseRestricitonField: public BitField<ParseRestriction, 12, 1> {};
384 // If the function requires a frame (for unspecified reasons)
385 class RequiresFrame: public BitField<bool, 13, 1> {};
386 // If the function cannot build a frame (for unspecified reasons)
387 class MustNotHaveEagerFrame: public BitField<bool, 14, 1> {};
391 // Fields filled in by the compilation pipeline.
392 // AST filled in by the parser.
393 FunctionLiteral* function_;
394 // The scope of the function literal as a convenience. Set to indicate
395 // that scopes have been analyzed.
397 // The global scope provided as a convenience.
398 Scope* global_scope_;
399 // For compiled stubs, the stub object
400 HydrogenCodeStub* code_stub_;
401 // The compiled code.
404 // Possible initial inputs to the compilation process.
405 Handle<JSFunction> closure_;
406 Handle<SharedFunctionInfo> shared_info_;
407 Handle<Script> script_;
409 // Fields possibly needed for eager compilation, NULL by default.
410 v8::Extension* extension_;
411 ScriptData** cached_data_;
412 CachedDataMode cached_data_mode_;
414 // The context of the caller for eval code, and the global context for a
415 // global script. Will be a null handle otherwise.
416 Handle<Context> context_;
418 // Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
419 Handle<FixedArray> feedback_vector_;
421 // Compilation mode flag and whether deoptimization is allowed.
423 BailoutId osr_ast_id_;
424 // The unoptimized code we patched for OSR may not be the shared code
425 // afterwards, since we may need to compile it again to include deoptimization
426 // data. Keep track which code we patched.
427 Handle<Code> unoptimized_code_;
429 // Flag whether compilation needs to be aborted due to dependency change.
430 bool abort_due_to_dependency_;
432 // The zone from which the compilation pipeline working on this
433 // CompilationInfo allocates.
436 DeferredHandles* deferred_handles_;
438 ZoneList<Handle<HeapObject> >* dependencies_[DependentCode::kGroupCount];
441 void SaveHandle(Handle<T> *object) {
442 if (!object->is_null()) {
443 Handle<T> handle(*(*object));
448 BailoutReason bailout_reason_;
450 int prologue_offset_;
452 List<OffsetRange>* no_frame_ranges_;
454 // A copy of shared_info()->opt_count() to avoid handle deref
455 // during graph optimization.
458 // Number of parameters used for compilation of stubs that require arguments.
459 int parameter_count_;
463 Handle<Foreign> object_wrapper_;
465 int optimization_id_;
467 DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
471 // Exactly like a CompilationInfo, except also creates and enters a
472 // Zone on construction and deallocates it on exit.
473 class CompilationInfoWithZone: public CompilationInfo {
475 explicit CompilationInfoWithZone(Handle<Script> script)
476 : CompilationInfo(script, &zone_),
477 zone_(script->GetIsolate()) {}
478 explicit CompilationInfoWithZone(Handle<SharedFunctionInfo> shared_info)
479 : CompilationInfo(shared_info, &zone_),
480 zone_(shared_info->GetIsolate()) {}
481 explicit CompilationInfoWithZone(Handle<JSFunction> closure)
482 : CompilationInfo(closure, &zone_),
483 zone_(closure->GetIsolate()) {}
484 CompilationInfoWithZone(HydrogenCodeStub* stub, Isolate* isolate)
485 : CompilationInfo(stub, isolate, &zone_),
488 // Virtual destructor because a CompilationInfoWithZone has to exit the
489 // zone scope and get rid of dependent maps even when the destructor is
490 // called when cast as a CompilationInfo.
491 virtual ~CompilationInfoWithZone() {
492 RollbackDependencies();
500 // A wrapper around a CompilationInfo that detaches the Handles from
501 // the underlying DeferredHandleScope and stores them in info_ on
503 class CompilationHandleScope BASE_EMBEDDED {
505 explicit CompilationHandleScope(CompilationInfo* info)
506 : deferred_(info->isolate()), info_(info) {}
507 ~CompilationHandleScope() {
508 info_->set_deferred_handles(deferred_.Detach());
512 DeferredHandleScope deferred_;
513 CompilationInfo* info_;
518 class HOptimizedGraphBuilder;
521 // A helper class that calls the three compilation phases in
522 // Crankshaft and keeps track of its state. The three phases
523 // CreateGraph, OptimizeGraph and GenerateAndInstallCode can either
524 // fail, bail-out to the full code generator or succeed. Apart from
525 // their return value, the status of the phase last run can be checked
526 // using last_status().
527 class OptimizedCompileJob: public ZoneObject {
529 explicit OptimizedCompileJob(CompilationInfo* info)
531 graph_builder_(NULL),
534 last_status_(FAILED),
535 awaiting_install_(false) { }
538 FAILED, BAILED_OUT, SUCCEEDED
541 MUST_USE_RESULT Status CreateGraph();
542 MUST_USE_RESULT Status OptimizeGraph();
543 MUST_USE_RESULT Status GenerateCode();
545 Status last_status() const { return last_status_; }
546 CompilationInfo* info() const { return info_; }
547 Isolate* isolate() const { return info()->isolate(); }
549 MUST_USE_RESULT Status AbortOptimization(
550 BailoutReason reason = kNoReason) {
551 if (reason != kNoReason) info_->set_bailout_reason(reason);
552 return SetLastStatus(BAILED_OUT);
555 MUST_USE_RESULT Status AbortAndDisableOptimization(
556 BailoutReason reason = kNoReason) {
557 if (reason != kNoReason) info_->set_bailout_reason(reason);
558 // Reference to shared function info does not change between phases.
559 AllowDeferredHandleDereference allow_handle_dereference;
560 info_->shared_info()->DisableOptimization(info_->bailout_reason());
561 return SetLastStatus(BAILED_OUT);
564 void WaitForInstall() {
565 ASSERT(info_->is_osr());
566 awaiting_install_ = true;
569 bool IsWaitingForInstall() { return awaiting_install_; }
572 CompilationInfo* info_;
573 HOptimizedGraphBuilder* graph_builder_;
576 TimeDelta time_taken_to_create_graph_;
577 TimeDelta time_taken_to_optimize_;
578 TimeDelta time_taken_to_codegen_;
580 bool awaiting_install_;
582 MUST_USE_RESULT Status SetLastStatus(Status status) {
583 last_status_ = status;
586 void RecordOptimizationStats();
589 Timer(OptimizedCompileJob* job, TimeDelta* location)
590 : job_(job), location_(location) {
591 ASSERT(location_ != NULL);
596 *location_ += timer_.Elapsed();
599 OptimizedCompileJob* job_;
601 TimeDelta* location_;
608 // General strategy: Source code is translated into an anonymous function w/o
609 // parameters which then can be executed. If the source code contains other
610 // functions, they will be compiled and allocated as part of the compilation
611 // of the source code.
613 // Please note this interface returns shared function infos. This means you
614 // need to call Factory::NewFunctionFromSharedFunctionInfo before you have a
615 // real function with a context.
617 class Compiler : public AllStatic {
619 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
620 Handle<JSFunction> function);
621 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
622 Handle<SharedFunctionInfo> shared);
623 static bool EnsureCompiled(Handle<JSFunction> function,
624 ClearExceptionFlag flag);
625 MUST_USE_RESULT static MaybeHandle<Code> GetCodeForDebugging(
626 Handle<JSFunction> function);
628 static void CompileForLiveEdit(Handle<Script> script);
630 // Compile a String source within a context for eval.
631 MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval(
632 Handle<String> source,
633 Handle<Context> context,
634 StrictMode strict_mode,
635 ParseRestriction restriction,
638 // Compile a String source within a context.
639 static Handle<SharedFunctionInfo> CompileScript(
640 Handle<String> source,
641 Handle<Object> script_name,
644 bool is_shared_cross_origin,
645 Handle<Context> context,
646 v8::Extension* extension,
647 ScriptData** cached_data,
648 CachedDataMode cached_data_mode,
649 NativesFlag is_natives_code);
651 // Create a shared function info object (the code may be lazily compiled).
652 static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node,
653 Handle<Script> script);
655 enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
657 // Generate and return optimized code or start a concurrent optimization job.
658 // In the latter case, return the InOptimizationQueue builtin. On failure,
659 // return the empty handle.
660 MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode(
661 Handle<JSFunction> function,
662 Handle<Code> current_code,
663 ConcurrencyMode mode,
664 BailoutId osr_ast_id = BailoutId::None());
666 // Generate and return code from previously queued optimization job.
667 // On failure, return the empty handle.
668 static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
670 static void RecordFunctionCompilation(Logger::LogEventsAndTags tag,
671 CompilationInfo* info,
672 Handle<SharedFunctionInfo> shared);
676 class CompilationPhase BASE_EMBEDDED {
678 CompilationPhase(const char* name, CompilationInfo* info);
682 bool ShouldProduceTraceOutput() const;
684 const char* name() const { return name_; }
685 CompilationInfo* info() const { return info_; }
686 Isolate* isolate() const { return info()->isolate(); }
687 Zone* zone() { return &zone_; }
691 CompilationInfo* info_;
693 unsigned info_zone_start_allocation_size_;
696 DISALLOW_COPY_AND_ASSIGN(CompilationPhase);
700 } } // namespace v8::internal
702 #endif // V8_COMPILER_H_