1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "allocation.h"
16 class HydrogenCodeStub;
18 // ParseRestriction is used to restrict the set of valid statements in a
19 // unit of compilation. Restriction violations cause a syntax error.
20 enum ParseRestriction {
21 NO_PARSE_RESTRICTION, // All expressions are allowed.
22 ONLY_SINGLE_FUNCTION_LITERAL // Only a single FunctionLiteral expression.
32 OffsetRange(int from, int to) : from(from), to(to) {}
37 // CompilationInfo encapsulates some information known at compile time. It
38 // is constructed based on the resources available at compile-time.
39 class CompilationInfo {
41 CompilationInfo(Handle<JSFunction> closure, Zone* zone);
42 virtual ~CompilationInfo();
44 Isolate* isolate() const {
47 Zone* zone() { return zone_; }
48 bool is_osr() const { return !osr_ast_id_.IsNone(); }
49 bool is_lazy() const { return IsLazy::decode(flags_); }
50 bool is_eval() const { return IsEval::decode(flags_); }
51 bool is_global() const { return IsGlobal::decode(flags_); }
52 StrictMode strict_mode() const { return StrictModeField::decode(flags_); }
53 bool is_in_loop() const { return IsInLoop::decode(flags_); }
54 FunctionLiteral* function() const { return function_; }
55 Scope* scope() const { return scope_; }
56 Scope* global_scope() const { return global_scope_; }
57 Handle<Code> code() const { return code_; }
58 Handle<JSFunction> closure() const { return closure_; }
59 Handle<SharedFunctionInfo> shared_info() const { return shared_info_; }
60 Handle<Script> script() const { return script_; }
61 HydrogenCodeStub* code_stub() const {return code_stub_; }
62 v8::Extension* extension() const { return extension_; }
63 ScriptData** cached_data() const { return cached_data_; }
64 CachedDataMode cached_data_mode() const {
65 return cached_data_mode_;
67 Handle<Context> context() const { return context_; }
68 BailoutId osr_ast_id() const { return osr_ast_id_; }
69 Handle<Code> unoptimized_code() const { return unoptimized_code_; }
70 int opt_count() const { return opt_count_; }
71 int num_parameters() const;
72 int num_heap_slots() const;
73 Code::Flags flags() const;
77 flags_ |= IsEval::encode(true);
81 flags_ |= IsGlobal::encode(true);
83 void set_parameter_count(int parameter_count) {
85 parameter_count_ = parameter_count;
88 void set_this_has_uses(bool has_no_uses) {
89 this_has_uses_ = has_no_uses;
91 bool this_has_uses() {
92 return this_has_uses_;
94 void SetStrictMode(StrictMode strict_mode) {
95 ASSERT(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
96 flags_ = StrictModeField::update(flags_, strict_mode);
100 flags_ |= IsInLoop::encode(true);
102 void MarkAsNative() {
103 flags_ |= IsNative::encode(true);
106 bool is_native() const {
107 return IsNative::decode(flags_);
110 bool is_calling() const {
111 return is_deferred_calling() || is_non_deferred_calling();
114 void MarkAsDeferredCalling() {
115 flags_ |= IsDeferredCalling::encode(true);
118 bool is_deferred_calling() const {
119 return IsDeferredCalling::decode(flags_);
122 void MarkAsNonDeferredCalling() {
123 flags_ |= IsNonDeferredCalling::encode(true);
126 bool is_non_deferred_calling() const {
127 return IsNonDeferredCalling::decode(flags_);
130 void MarkAsSavesCallerDoubles() {
131 flags_ |= SavesCallerDoubles::encode(true);
134 bool saves_caller_doubles() const {
135 return SavesCallerDoubles::decode(flags_);
138 void MarkAsRequiresFrame() {
139 flags_ |= RequiresFrame::encode(true);
142 bool requires_frame() const {
143 return RequiresFrame::decode(flags_);
146 void SetParseRestriction(ParseRestriction restriction) {
147 flags_ = ParseRestricitonField::update(flags_, restriction);
150 ParseRestriction parse_restriction() const {
151 return ParseRestricitonField::decode(flags_);
154 void SetFunction(FunctionLiteral* literal) {
155 ASSERT(function_ == NULL);
158 void PrepareForCompilation(Scope* scope);
159 void SetGlobalScope(Scope* global_scope) {
160 ASSERT(global_scope_ == NULL);
161 global_scope_ = global_scope;
163 Handle<FixedArray> feedback_vector() const {
164 return feedback_vector_;
166 void SetCode(Handle<Code> code) { code_ = code; }
167 void SetExtension(v8::Extension* extension) {
169 extension_ = extension;
171 void SetCachedData(ScriptData** cached_data,
172 CachedDataMode cached_data_mode) {
173 cached_data_mode_ = cached_data_mode;
174 if (cached_data_mode == NO_CACHED_DATA) {
178 cached_data_ = cached_data;
181 void SetContext(Handle<Context> context) {
185 void MarkCompilingForDebugging() {
186 flags_ |= IsCompilingForDebugging::encode(true);
188 bool IsCompilingForDebugging() {
189 return IsCompilingForDebugging::decode(flags_);
191 void MarkNonOptimizable() {
192 SetMode(CompilationInfo::NONOPT);
195 bool ShouldTrapOnDeopt() const {
196 return (FLAG_trap_on_deopt && IsOptimizing()) ||
197 (FLAG_trap_on_stub_deopt && IsStub());
200 bool has_global_object() const {
201 return !closure().is_null() &&
202 (closure()->context()->global_object() != NULL);
205 GlobalObject* global_object() const {
206 return has_global_object() ? closure()->context()->global_object() : NULL;
209 // Accessors for the different compilation modes.
210 bool IsOptimizing() const { return mode_ == OPTIMIZE; }
211 bool IsOptimizable() const { return mode_ == BASE; }
212 bool IsStub() const { return mode_ == STUB; }
213 void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
214 ASSERT(!shared_info_.is_null());
216 osr_ast_id_ = osr_ast_id;
217 unoptimized_code_ = unoptimized;
218 optimization_id_ = isolate()->NextOptimizationId();
220 void DisableOptimization();
222 // Deoptimization support.
223 bool HasDeoptimizationSupport() const {
224 return SupportsDeoptimization::decode(flags_);
226 void EnableDeoptimizationSupport() {
227 ASSERT(IsOptimizable());
228 flags_ |= SupportsDeoptimization::encode(true);
231 // Determines whether or not to insert a self-optimization header.
232 bool ShouldSelfOptimize();
234 void set_deferred_handles(DeferredHandles* deferred_handles) {
235 ASSERT(deferred_handles_ == NULL);
236 deferred_handles_ = deferred_handles;
239 ZoneList<Handle<HeapObject> >* dependencies(
240 DependentCode::DependencyGroup group) {
241 if (dependencies_[group] == NULL) {
242 dependencies_[group] = new(zone_) ZoneList<Handle<HeapObject> >(2, zone_);
244 return dependencies_[group];
247 void CommitDependencies(Handle<Code> code);
249 void RollbackDependencies();
252 SaveHandle(&closure_);
253 SaveHandle(&shared_info_);
254 SaveHandle(&context_);
255 SaveHandle(&script_);
256 SaveHandle(&unoptimized_code_);
259 BailoutReason bailout_reason() const { return bailout_reason_; }
260 void set_bailout_reason(BailoutReason reason) { bailout_reason_ = reason; }
262 int prologue_offset() const {
263 ASSERT_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
264 return prologue_offset_;
267 void set_prologue_offset(int prologue_offset) {
268 ASSERT_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
269 prologue_offset_ = prologue_offset;
272 // Adds offset range [from, to) where fp register does not point
273 // to the current frame base. Used in CPU profiler to detect stack
274 // samples where top frame is not set up.
275 inline void AddNoFrameRange(int from, int to) {
276 if (no_frame_ranges_) no_frame_ranges_->Add(OffsetRange(from, to));
279 List<OffsetRange>* ReleaseNoFrameRanges() {
280 List<OffsetRange>* result = no_frame_ranges_;
281 no_frame_ranges_ = NULL;
285 Handle<Foreign> object_wrapper() {
286 if (object_wrapper_.is_null()) {
288 isolate()->factory()->NewForeign(reinterpret_cast<Address>(this));
290 return object_wrapper_;
293 void AbortDueToDependencyChange() {
294 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
295 abort_due_to_dependency_ = true;
298 bool HasAbortedDueToDependencyChange() {
299 ASSERT(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
300 return abort_due_to_dependency_;
303 bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
304 return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure_);
307 int optimization_id() const { return optimization_id_; }
310 CompilationInfo(Handle<Script> script,
312 CompilationInfo(Handle<SharedFunctionInfo> shared_info,
314 CompilationInfo(HydrogenCodeStub* stub,
322 // BASE is generated by the full codegen, optionally prepared for bailouts.
323 // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
324 // NONOPT is generated by the full codegen and is not prepared for
325 // recompilation/bailouts. These functions are never recompiled.
333 void Initialize(Isolate* isolate, Mode mode, Zone* zone);
335 void SetMode(Mode mode) {
336 ASSERT(isolate()->use_crankshaft());
340 // Flags using template class BitField<type, start, length>. All are
343 // Compilation is either eager or lazy.
344 class IsLazy: public BitField<bool, 0, 1> {};
345 // Flags that can be set for eager compilation.
346 class IsEval: public BitField<bool, 1, 1> {};
347 class IsGlobal: public BitField<bool, 2, 1> {};
348 // Flags that can be set for lazy compilation.
349 class IsInLoop: public BitField<bool, 3, 1> {};
350 // Strict mode - used in eager compilation.
351 class StrictModeField: public BitField<StrictMode, 4, 1> {};
352 // Is this a function from our natives.
353 class IsNative: public BitField<bool, 5, 1> {};
354 // Is this code being compiled with support for deoptimization..
355 class SupportsDeoptimization: public BitField<bool, 6, 1> {};
356 // If compiling for debugging produce just full code matching the
357 // initial mode setting.
358 class IsCompilingForDebugging: public BitField<bool, 7, 1> {};
359 // If the compiled code contains calls that require building a frame
360 class IsCalling: public BitField<bool, 8, 1> {};
361 // If the compiled code contains calls that require building a frame
362 class IsDeferredCalling: public BitField<bool, 9, 1> {};
363 // If the compiled code contains calls that require building a frame
364 class IsNonDeferredCalling: public BitField<bool, 10, 1> {};
365 // If the compiled code saves double caller registers that it clobbers.
366 class SavesCallerDoubles: public BitField<bool, 11, 1> {};
367 // If the set of valid statements is restricted.
368 class ParseRestricitonField: public BitField<ParseRestriction, 12, 1> {};
369 // If the function requires a frame (for unspecified reasons)
370 class RequiresFrame: public BitField<bool, 13, 1> {};
374 // Fields filled in by the compilation pipeline.
375 // AST filled in by the parser.
376 FunctionLiteral* function_;
377 // The scope of the function literal as a convenience. Set to indicate
378 // that scopes have been analyzed.
380 // The global scope provided as a convenience.
381 Scope* global_scope_;
382 // For compiled stubs, the stub object
383 HydrogenCodeStub* code_stub_;
384 // The compiled code.
387 // Possible initial inputs to the compilation process.
388 Handle<JSFunction> closure_;
389 Handle<SharedFunctionInfo> shared_info_;
390 Handle<Script> script_;
392 // Fields possibly needed for eager compilation, NULL by default.
393 v8::Extension* extension_;
394 ScriptData** cached_data_;
395 CachedDataMode cached_data_mode_;
397 // The context of the caller for eval code, and the global context for a
398 // global script. Will be a null handle otherwise.
399 Handle<Context> context_;
401 // Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
402 Handle<FixedArray> feedback_vector_;
404 // Compilation mode flag and whether deoptimization is allowed.
406 BailoutId osr_ast_id_;
407 // The unoptimized code we patched for OSR may not be the shared code
408 // afterwards, since we may need to compile it again to include deoptimization
409 // data. Keep track which code we patched.
410 Handle<Code> unoptimized_code_;
412 // Flag whether compilation needs to be aborted due to dependency change.
413 bool abort_due_to_dependency_;
415 // The zone from which the compilation pipeline working on this
416 // CompilationInfo allocates.
419 DeferredHandles* deferred_handles_;
421 ZoneList<Handle<HeapObject> >* dependencies_[DependentCode::kGroupCount];
424 void SaveHandle(Handle<T> *object) {
425 if (!object->is_null()) {
426 Handle<T> handle(*(*object));
431 BailoutReason bailout_reason_;
433 int prologue_offset_;
435 List<OffsetRange>* no_frame_ranges_;
437 // A copy of shared_info()->opt_count() to avoid handle deref
438 // during graph optimization.
441 // Number of parameters used for compilation of stubs that require arguments.
442 int parameter_count_;
446 Handle<Foreign> object_wrapper_;
448 int optimization_id_;
450 DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
454 // Exactly like a CompilationInfo, except also creates and enters a
455 // Zone on construction and deallocates it on exit.
456 class CompilationInfoWithZone: public CompilationInfo {
458 explicit CompilationInfoWithZone(Handle<Script> script)
459 : CompilationInfo(script, &zone_),
460 zone_(script->GetIsolate()) {}
461 explicit CompilationInfoWithZone(Handle<SharedFunctionInfo> shared_info)
462 : CompilationInfo(shared_info, &zone_),
463 zone_(shared_info->GetIsolate()) {}
464 explicit CompilationInfoWithZone(Handle<JSFunction> closure)
465 : CompilationInfo(closure, &zone_),
466 zone_(closure->GetIsolate()) {}
467 CompilationInfoWithZone(HydrogenCodeStub* stub, Isolate* isolate)
468 : CompilationInfo(stub, isolate, &zone_),
471 // Virtual destructor because a CompilationInfoWithZone has to exit the
472 // zone scope and get rid of dependent maps even when the destructor is
473 // called when cast as a CompilationInfo.
474 virtual ~CompilationInfoWithZone() {
475 RollbackDependencies();
483 // A wrapper around a CompilationInfo that detaches the Handles from
484 // the underlying DeferredHandleScope and stores them in info_ on
486 class CompilationHandleScope BASE_EMBEDDED {
488 explicit CompilationHandleScope(CompilationInfo* info)
489 : deferred_(info->isolate()), info_(info) {}
490 ~CompilationHandleScope() {
491 info_->set_deferred_handles(deferred_.Detach());
495 DeferredHandleScope deferred_;
496 CompilationInfo* info_;
501 class HOptimizedGraphBuilder;
504 // A helper class that calls the three compilation phases in
505 // Crankshaft and keeps track of its state. The three phases
506 // CreateGraph, OptimizeGraph and GenerateAndInstallCode can either
507 // fail, bail-out to the full code generator or succeed. Apart from
508 // their return value, the status of the phase last run can be checked
509 // using last_status().
510 class OptimizedCompileJob: public ZoneObject {
512 explicit OptimizedCompileJob(CompilationInfo* info)
514 graph_builder_(NULL),
517 last_status_(FAILED),
518 awaiting_install_(false) { }
521 FAILED, BAILED_OUT, SUCCEEDED
524 MUST_USE_RESULT Status CreateGraph();
525 MUST_USE_RESULT Status OptimizeGraph();
526 MUST_USE_RESULT Status GenerateCode();
528 Status last_status() const { return last_status_; }
529 CompilationInfo* info() const { return info_; }
530 Isolate* isolate() const { return info()->isolate(); }
532 MUST_USE_RESULT Status AbortOptimization(
533 BailoutReason reason = kNoReason) {
534 if (reason != kNoReason) info_->set_bailout_reason(reason);
535 return SetLastStatus(BAILED_OUT);
538 MUST_USE_RESULT Status AbortAndDisableOptimization(
539 BailoutReason reason = kNoReason) {
540 if (reason != kNoReason) info_->set_bailout_reason(reason);
541 // Reference to shared function info does not change between phases.
542 AllowDeferredHandleDereference allow_handle_dereference;
543 info_->shared_info()->DisableOptimization(info_->bailout_reason());
544 return SetLastStatus(BAILED_OUT);
547 void WaitForInstall() {
548 ASSERT(info_->is_osr());
549 awaiting_install_ = true;
552 bool IsWaitingForInstall() { return awaiting_install_; }
555 CompilationInfo* info_;
556 HOptimizedGraphBuilder* graph_builder_;
559 TimeDelta time_taken_to_create_graph_;
560 TimeDelta time_taken_to_optimize_;
561 TimeDelta time_taken_to_codegen_;
563 bool awaiting_install_;
565 MUST_USE_RESULT Status SetLastStatus(Status status) {
566 last_status_ = status;
569 void RecordOptimizationStats();
572 Timer(OptimizedCompileJob* job, TimeDelta* location)
573 : job_(job), location_(location) {
574 ASSERT(location_ != NULL);
579 *location_ += timer_.Elapsed();
582 OptimizedCompileJob* job_;
584 TimeDelta* location_;
591 // General strategy: Source code is translated into an anonymous function w/o
592 // parameters which then can be executed. If the source code contains other
593 // functions, they will be compiled and allocated as part of the compilation
594 // of the source code.
596 // Please note this interface returns shared function infos. This means you
597 // need to call Factory::NewFunctionFromSharedFunctionInfo before you have a
598 // real function with a context.
600 class Compiler : public AllStatic {
602 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
603 Handle<JSFunction> function);
604 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
605 Handle<SharedFunctionInfo> shared);
606 static bool EnsureCompiled(Handle<JSFunction> function,
607 ClearExceptionFlag flag);
608 MUST_USE_RESULT static MaybeHandle<Code> GetCodeForDebugging(
609 Handle<JSFunction> function);
611 static void CompileForLiveEdit(Handle<Script> script);
613 // Compile a String source within a context for eval.
614 MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval(
615 Handle<String> source,
616 Handle<Context> context,
617 StrictMode strict_mode,
618 ParseRestriction restriction,
621 // Compile a String source within a context.
622 static Handle<SharedFunctionInfo> CompileScript(
623 Handle<String> source,
624 Handle<Object> script_name,
627 bool is_shared_cross_origin,
628 Handle<Context> context,
629 v8::Extension* extension,
630 ScriptData** cached_data,
631 CachedDataMode cached_data_mode,
632 NativesFlag is_natives_code);
634 // Create a shared function info object (the code may be lazily compiled).
635 static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node,
636 Handle<Script> script);
638 enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
640 // Generate and return optimized code or start a concurrent optimization job.
641 // In the latter case, return the InOptimizationQueue builtin. On failure,
642 // return the empty handle.
643 MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode(
644 Handle<JSFunction> function,
645 Handle<Code> current_code,
646 ConcurrencyMode mode,
647 BailoutId osr_ast_id = BailoutId::None());
649 // Generate and return code from previously queued optimization job.
650 // On failure, return the empty handle.
651 static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
653 static void RecordFunctionCompilation(Logger::LogEventsAndTags tag,
654 CompilationInfo* info,
655 Handle<SharedFunctionInfo> shared);
659 class CompilationPhase BASE_EMBEDDED {
661 CompilationPhase(const char* name, CompilationInfo* info);
665 bool ShouldProduceTraceOutput() const;
667 const char* name() const { return name_; }
668 CompilationInfo* info() const { return info_; }
669 Isolate* isolate() const { return info()->isolate(); }
670 Zone* zone() { return &zone_; }
674 CompilationInfo* info_;
676 unsigned info_zone_start_allocation_size_;
679 DISALLOW_COPY_AND_ASSIGN(CompilationPhase);
683 } } // namespace v8::internal
685 #endif // V8_COMPILER_H_