1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_FULL_CODEGEN_H_
6 #define V8_FULL_CODEGEN_H_
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
13 #include "src/bit-vector.h"
14 #include "src/code-stubs.h"
15 #include "src/codegen.h"
16 #include "src/compiler.h"
17 #include "src/globals.h"
18 #include "src/objects.h"
23 // Forward declarations.
26 // AST node visitor which can tell whether a given statement will be breakable
27 // when the code is compiled by the full compiler in the debugger. This means
28 // that there will be an IC (load/store/call) in the code generated for the
29 // debugger to piggybag on.
30 class BreakableStatementChecker: public AstVisitor {
32 BreakableStatementChecker(Isolate* isolate, Zone* zone)
33 : is_breakable_(false) {
34 InitializeAstVisitor(isolate, zone);
37 void Check(Statement* stmt);
38 void Check(Expression* stmt);
40 bool is_breakable() { return is_breakable_; }
43 // AST node visit functions.
44 #define DECLARE_VISIT(type) virtual void Visit##type(type* node) OVERRIDE;
45 AST_NODE_LIST(DECLARE_VISIT)
50 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
51 DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker);
55 // -----------------------------------------------------------------------------
56 // Full code generator.
58 class FullCodeGenerator: public AstVisitor {
65 FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
68 scope_(info->scope()),
73 bailout_entries_(info->HasDeoptimizationSupport()
74 ? info->function()->ast_node_count() : 0,
76 back_edges_(2, info->zone()),
78 DCHECK(!info->IsStub());
84 static bool MakeCode(CompilationInfo* info);
86 // Encode state and pc-offset as a BitField<type, start, size>.
87 // Only use 30 bits because we encode the result as a smi.
88 class StateField : public BitField<State, 0, 1> { };
89 class PcField : public BitField<unsigned, 1, 30-1> { };
91 static const char* State2String(State state) {
93 case NO_REGISTERS: return "NO_REGISTERS";
94 case TOS_REG: return "TOS_REG";
100 static const int kMaxBackEdgeWeight = 127;
102 // Platform-specific code size multiplier.
103 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
104 static const int kCodeSizeMultiplier = 105;
105 #elif V8_TARGET_ARCH_X64
106 static const int kCodeSizeMultiplier = 170;
107 #elif V8_TARGET_ARCH_ARM
108 static const int kCodeSizeMultiplier = 149;
109 #elif V8_TARGET_ARCH_ARM64
110 // TODO(all): Copied ARM value. Check this is sensible for ARM64.
111 static const int kCodeSizeMultiplier = 149;
112 #elif V8_TARGET_ARCH_PPC64
113 static const int kCodeSizeMultiplier = 200;
114 #elif V8_TARGET_ARCH_PPC
115 static const int kCodeSizeMultiplier = 200;
116 #elif V8_TARGET_ARCH_MIPS
117 static const int kCodeSizeMultiplier = 149;
118 #elif V8_TARGET_ARCH_MIPS64
119 static const int kCodeSizeMultiplier = 149;
121 #error Unsupported target architecture.
130 class NestedStatement BASE_EMBEDDED {
132 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
133 // Link into codegen's nesting stack.
134 previous_ = codegen->nesting_stack_;
135 codegen->nesting_stack_ = this;
137 virtual ~NestedStatement() {
138 // Unlink from codegen's nesting stack.
139 DCHECK_EQ(this, codegen_->nesting_stack_);
140 codegen_->nesting_stack_ = previous_;
143 virtual Breakable* AsBreakable() { return NULL; }
144 virtual Iteration* AsIteration() { return NULL; }
146 virtual bool IsContinueTarget(Statement* target) { return false; }
147 virtual bool IsBreakTarget(Statement* target) { return false; }
149 // Notify the statement that we are exiting it via break, continue, or
150 // return and give it a chance to generate cleanup code. Return the
151 // next outer statement in the nesting stack. We accumulate in
152 // *stack_depth the amount to drop the stack and in *context_length the
153 // number of context chain links to unwind as we traverse the nesting
154 // stack from an exit to its target.
155 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
159 // Like the Exit() method above, but limited to accumulating stack depth.
160 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
165 MacroAssembler* masm() { return codegen_->masm(); }
167 FullCodeGenerator* codegen_;
168 NestedStatement* previous_;
171 DISALLOW_COPY_AND_ASSIGN(NestedStatement);
174 // A breakable statement such as a block.
175 class Breakable : public NestedStatement {
177 Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
178 : NestedStatement(codegen), statement_(statement) {
180 virtual ~Breakable() {}
182 virtual Breakable* AsBreakable() { return this; }
183 virtual bool IsBreakTarget(Statement* target) {
184 return statement() == target;
187 BreakableStatement* statement() { return statement_; }
188 Label* break_label() { return &break_label_; }
191 BreakableStatement* statement_;
195 // An iteration statement such as a while, for, or do loop.
196 class Iteration : public Breakable {
198 Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
199 : Breakable(codegen, statement) {
201 virtual ~Iteration() {}
203 virtual Iteration* AsIteration() { return this; }
204 virtual bool IsContinueTarget(Statement* target) {
205 return statement() == target;
208 Label* continue_label() { return &continue_label_; }
211 Label continue_label_;
214 // A nested block statement.
215 class NestedBlock : public Breakable {
217 NestedBlock(FullCodeGenerator* codegen, Block* block)
218 : Breakable(codegen, block) {
220 virtual ~NestedBlock() {}
222 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
223 if (statement()->AsBlock()->scope() != NULL) {
230 // The try block of a try/catch statement.
231 class TryCatch : public NestedStatement {
233 static const int kElementCount = TryBlockConstant::kElementCount;
235 explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {}
236 virtual ~TryCatch() {}
238 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
239 *stack_depth += kElementCount;
242 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
243 *stack_depth += kElementCount;
248 // The try block of a try/finally statement.
249 class TryFinally : public NestedStatement {
251 static const int kElementCount = TryBlockConstant::kElementCount;
253 TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
254 : NestedStatement(codegen), finally_entry_(finally_entry) {
256 virtual ~TryFinally() {}
258 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
259 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
260 *stack_depth += kElementCount;
265 Label* finally_entry_;
268 // The finally block of a try/finally statement.
269 class Finally : public NestedStatement {
271 static const int kElementCount = 3;
273 explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) {}
274 virtual ~Finally() {}
276 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
277 *stack_depth += kElementCount;
280 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
281 *stack_depth += kElementCount;
286 // The body of a for/in loop.
287 class ForIn : public Iteration {
289 static const int kElementCount = 5;
291 ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
292 : Iteration(codegen, statement) {
296 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
297 *stack_depth += kElementCount;
300 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
301 *stack_depth += kElementCount;
307 // The body of a with or catch.
308 class WithOrCatch : public NestedStatement {
310 explicit WithOrCatch(FullCodeGenerator* codegen)
311 : NestedStatement(codegen) {
313 virtual ~WithOrCatch() {}
315 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
321 // A platform-specific utility to overwrite the accumulator register
322 // with a GC-safe value.
323 void ClearAccumulator();
325 // Determine whether or not to inline the smi case for the given
327 bool ShouldInlineSmiCase(Token::Value op);
329 // Helper function to convert a pure value into a test context. The value
330 // is expected on the stack or the accumulator, depending on the platform.
331 // See the platform-specific implementation for details.
332 void DoTest(Expression* condition,
335 Label* fall_through);
336 void DoTest(const TestContext* context);
338 // Helper function to split control flow and avoid a branch to the
339 // fall-through label if it is set up.
340 #if V8_TARGET_ARCH_MIPS
341 void Split(Condition cc,
346 Label* fall_through);
347 #elif V8_TARGET_ARCH_MIPS64
348 void Split(Condition cc,
353 Label* fall_through);
354 #elif V8_TARGET_ARCH_PPC
355 void Split(Condition cc, Label* if_true, Label* if_false, Label* fall_through,
357 #else // All other arch.
358 void Split(Condition cc,
361 Label* fall_through);
364 // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
365 // a register. Emits a context chain walk if if necessary (so does
366 // SetVar) so avoid calling both on the same variable.
367 void GetVar(Register destination, Variable* var);
369 // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in
370 // the context, the write barrier will be emitted and source, scratch0,
371 // scratch1 will be clobbered. Emits a context chain walk if if necessary
372 // (so does GetVar) so avoid calling both on the same variable.
373 void SetVar(Variable* var,
378 // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
379 // variable. Writing does not need the write barrier.
380 MemOperand StackOperand(Variable* var);
382 // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
383 // variable. May emit code to traverse the context chain, loading the
384 // found context into the scratch register. Writing to this operand will
385 // need the write barrier if location is CONTEXT.
386 MemOperand VarOperand(Variable* var, Register scratch);
388 void VisitForEffect(Expression* expr) {
389 EffectContext context(this);
391 PrepareForBailout(expr, NO_REGISTERS);
394 void VisitForAccumulatorValue(Expression* expr) {
395 AccumulatorValueContext context(this);
397 PrepareForBailout(expr, TOS_REG);
400 void VisitForStackValue(Expression* expr) {
401 StackValueContext context(this);
403 PrepareForBailout(expr, NO_REGISTERS);
406 void VisitForControl(Expression* expr,
409 Label* fall_through) {
410 TestContext context(this, expr, if_true, if_false, fall_through);
412 // For test contexts, we prepare for bailout before branching, not at
413 // the end of the entire expression. This happens as part of visiting
417 void VisitInDuplicateContext(Expression* expr);
419 void VisitDeclarations(ZoneList<Declaration*>* declarations) OVERRIDE;
420 void DeclareModules(Handle<FixedArray> descriptions);
421 void DeclareGlobals(Handle<FixedArray> pairs);
422 int DeclareGlobalsFlags();
424 // Generate code to allocate all (including nested) modules and contexts.
425 // Because of recursive linking and the presence of module alias declarations,
426 // this has to be a separate pass _before_ populating or executing any module.
427 void AllocateModules(ZoneList<Declaration*>* declarations);
429 // Generate code to create an iterator result object. The "value" property is
430 // set to a value popped from the stack, and "done" is set according to the
431 // argument. The result object is left in the result register.
432 void EmitCreateIteratorResult(bool done);
434 // Try to perform a comparison as a fast inlined literal compare if
435 // the operands allow it. Returns true if the compare operations
436 // has been matched and all code generated; false otherwise.
437 bool TryLiteralCompare(CompareOperation* compare);
439 // Platform-specific code for comparing the type of a value with
440 // a given literal string.
441 void EmitLiteralCompareTypeof(Expression* expr,
442 Expression* sub_expr,
443 Handle<String> check);
445 // Platform-specific code for equality comparison with a nil-like value.
446 void EmitLiteralCompareNil(CompareOperation* expr,
447 Expression* sub_expr,
451 void PrepareForBailout(Expression* node, State state);
452 void PrepareForBailoutForId(BailoutId id, State state);
454 // Feedback slot support. The feedback vector will be cleared during gc and
455 // collected by the type-feedback oracle.
456 Handle<TypeFeedbackVector> FeedbackVector() const {
457 return info_->feedback_vector();
459 void EnsureSlotContainsAllocationSite(FeedbackVectorSlot slot);
460 void EnsureSlotContainsAllocationSite(FeedbackVectorICSlot slot);
462 // Returns a smi for the index into the FixedArray that backs the feedback
464 Smi* SmiFromSlot(FeedbackVectorSlot slot) const {
465 return Smi::FromInt(FeedbackVector()->GetIndex(slot));
468 Smi* SmiFromSlot(FeedbackVectorICSlot slot) const {
469 return Smi::FromInt(FeedbackVector()->GetIndex(slot));
472 // Record a call's return site offset, used to rebuild the frame if the
473 // called function was inlined at the site.
474 void RecordJSReturnSite(Call* call);
476 // Prepare for bailout before a test (or compare) and branch. If
477 // should_normalize, then the following comparison will not handle the
478 // canonical JS true value so we will insert a (dead) test against true at
479 // the actual bailout target from the optimized code. If not
480 // should_normalize, the true and false labels are ignored.
481 void PrepareForBailoutBeforeSplit(Expression* expr,
482 bool should_normalize,
486 // If enabled, emit debug code for checking that the current context is
487 // neither a with nor a catch context.
488 void EmitDebugCheckDeclarationContext(Variable* variable);
490 // This is meant to be called at loop back edges, |back_edge_target| is
491 // the jump target of the back edge and is used to approximate the amount
492 // of code inside the loop.
493 void EmitBackEdgeBookkeeping(IterationStatement* stmt,
494 Label* back_edge_target);
495 // Record the OSR AST id corresponding to a back edge in the code.
496 void RecordBackEdge(BailoutId osr_ast_id);
497 // Emit a table of back edge ids, pcs and loop depths into the code stream.
498 // Return the offset of the start of the table.
499 unsigned EmitBackEdgeTable();
501 void EmitProfilingCounterDecrement(int delta);
502 void EmitProfilingCounterReset();
504 // Emit code to pop values from the stack associated with nested statements
505 // like try/catch, try/finally, etc, running the finallies and unwinding the
506 // handlers as needed.
507 void EmitUnwindBeforeReturn();
509 // Platform-specific return sequence
510 void EmitReturnSequence();
512 // Platform-specific code sequences for calls
513 void EmitCall(Call* expr, CallICState::CallType = CallICState::FUNCTION);
514 void EmitSuperConstructorCall(Call* expr);
515 void EmitCallWithLoadIC(Call* expr);
516 void EmitSuperCallWithLoadIC(Call* expr);
517 void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
518 void EmitKeyedSuperCallWithLoadIC(Call* expr);
520 #define FOR_EACH_FULL_CODE_INTRINSIC(F) \
522 F(IsNonNegativeSmi) \
528 F(DefaultConstructorCallSuper) \
534 F(StringCharFromCode) \
536 F(OneByteSeqStringSetChar) \
537 F(TwoByteSeqStringSetChar) \
541 F(IsUndetectableObject) \
543 F(IsStringWrapperSafeForDefaultValueOf) \
546 F(HasCachedArrayIndex) \
547 F(GetCachedArrayIndex) \
548 F(FastOneByteArrayJoin) \
551 F(DebugBreakInOptimizedCode) \
553 F(StringCharCodeAt) \
558 F(RegExpConstructResult) \
563 #define GENERATOR_DECLARATION(Name) void Emit##Name(CallRuntime* call);
564 FOR_EACH_FULL_CODE_INTRINSIC(GENERATOR_DECLARATION)
565 #undef GENERATOR_DECLARATION
567 // Platform-specific code for resuming generators.
568 void EmitGeneratorResume(Expression *generator,
570 JSGeneratorObject::ResumeMode resume_mode);
572 // Platform-specific code for loading variables.
573 void EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
574 TypeofState typeof_state,
576 MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
577 void EmitDynamicLookupFastCase(VariableProxy* proxy,
578 TypeofState typeof_state,
581 void EmitVariableLoad(VariableProxy* proxy);
583 void EmitAccessor(Expression* expression);
585 // Expects the arguments and the function already pushed.
586 void EmitResolvePossiblyDirectEval(int arg_count);
588 // Platform-specific support for allocating a new closure based on
589 // the given function info.
590 void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
592 // Platform-specific support for compiling assignments.
594 // Left-hand side can only be a property, a global or a (parameter or local)
600 NAMED_SUPER_PROPERTY,
604 static LhsKind GetAssignType(Property* property) {
605 if (property == NULL) return VARIABLE;
606 bool super_access = property->IsSuperAccess();
607 return (property->key()->IsPropertyName())
608 ? (super_access ? NAMED_SUPER_PROPERTY : NAMED_PROPERTY)
609 : (super_access ? KEYED_SUPER_PROPERTY : KEYED_PROPERTY);
612 // Load a value from a named property.
613 // The receiver is left on the stack by the IC.
614 void EmitNamedPropertyLoad(Property* expr);
616 // Load a value from super.named property.
617 // Expect receiver ('this' value) and home_object on the stack.
618 void EmitNamedSuperPropertyLoad(Property* expr);
620 // Load a value from super[keyed] property.
621 // Expect receiver ('this' value), home_object and key on the stack.
622 void EmitKeyedSuperPropertyLoad(Property* expr);
624 // Load a value from a keyed property.
625 // The receiver and the key is left on the stack by the IC.
626 void EmitKeyedPropertyLoad(Property* expr);
628 // Adds the properties to the class (function) object and to its prototype.
629 // Expects the class (function) in the accumulator. The class (function) is
630 // in the accumulator after installing all the properties.
631 void EmitClassDefineProperties(ClassLiteral* lit);
633 // Pushes the property key as a Name on the stack.
634 void EmitPropertyKey(ObjectLiteralProperty* property, BailoutId bailout_id);
636 // Apply the compound assignment operator. Expects the left operand on top
637 // of the stack and the right one in the accumulator.
638 void EmitBinaryOp(BinaryOperation* expr, Token::Value op);
640 // Helper functions for generating inlined smi code for certain
641 // binary operations.
642 void EmitInlineSmiBinaryOp(BinaryOperation* expr,
647 // Assign to the given expression as if via '='. The right-hand-side value
648 // is expected in the accumulator.
649 void EmitAssignment(Expression* expr);
651 // Complete a variable assignment. The right-hand-side value is expected
652 // in the accumulator.
653 void EmitVariableAssignment(Variable* var,
656 // Helper functions to EmitVariableAssignment
657 void EmitStoreToStackLocalOrContextSlot(Variable* var,
658 MemOperand location);
660 // Complete a named property assignment. The receiver is expected on top
661 // of the stack and the right-hand-side value in the accumulator.
662 void EmitNamedPropertyAssignment(Assignment* expr);
664 // Complete a super named property assignment. The right-hand-side value
665 // is expected in accumulator.
666 void EmitNamedSuperPropertyStore(Property* prop);
668 // Complete a super named property assignment. The right-hand-side value
669 // is expected in accumulator.
670 void EmitKeyedSuperPropertyStore(Property* prop);
672 // Complete a keyed property assignment. The receiver and key are
673 // expected on top of the stack and the right-hand-side value in the
675 void EmitKeyedPropertyAssignment(Assignment* expr);
677 void EmitLoadHomeObject(SuperReference* expr);
679 static bool NeedsHomeObject(Expression* expr) {
680 return FunctionLiteral::NeedsHomeObject(expr);
683 // Adds the [[HomeObject]] to |initializer| if it is a FunctionLiteral.
684 // The value of the initializer is expected to be at the top of the stack.
685 // |offset| is the offset in the stack where the home object can be found.
686 void EmitSetHomeObjectIfNeeded(Expression* initializer, int offset);
688 void EmitLoadSuperConstructor();
690 void CallIC(Handle<Code> code,
691 TypeFeedbackId id = TypeFeedbackId::None());
693 void CallLoadIC(ContextualMode mode,
694 TypeFeedbackId id = TypeFeedbackId::None());
695 void CallGlobalLoadIC(Handle<String> name);
696 void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None());
698 void SetFunctionPosition(FunctionLiteral* fun);
699 void SetReturnPosition(FunctionLiteral* fun);
700 void SetStatementPosition(Statement* stmt);
701 void SetExpressionPosition(Expression* expr);
702 void SetSourcePosition(int pos);
704 // Non-local control flow support.
705 void EnterTryBlock(int handler_index, Label* handler);
706 void ExitTryBlock(int handler_index);
707 void EnterFinallyBlock();
708 void ExitFinallyBlock();
710 // Loop nesting counter.
711 int loop_depth() { return loop_depth_; }
712 void increment_loop_depth() { loop_depth_++; }
713 void decrement_loop_depth() {
714 DCHECK(loop_depth_ > 0);
718 MacroAssembler* masm() const { return masm_; }
720 class ExpressionContext;
721 const ExpressionContext* context() { return context_; }
722 void set_new_context(const ExpressionContext* context) { context_ = context; }
724 Handle<Script> script() { return info_->script(); }
725 bool is_eval() { return info_->is_eval(); }
726 bool is_native() { return info_->is_native(); }
727 LanguageMode language_mode() { return function()->language_mode(); }
728 bool is_simple_parameter_list() { return info_->is_simple_parameter_list(); }
729 FunctionLiteral* function() { return info_->function(); }
730 Scope* scope() { return scope_; }
732 static Register result_register();
733 static Register context_register();
735 // Set fields in the stack frame. Offsets are the frame pointer relative
736 // offsets defined in, e.g., StandardFrameConstants.
737 void StoreToFrameField(int frame_offset, Register value);
739 // Load a value from the current context. Indices are defined as an enum
740 // in v8::internal::Context.
741 void LoadContextField(Register dst, int context_index);
743 // Push the function argument for the runtime functions PushWithContext
744 // and PushCatchContext.
745 void PushFunctionArgumentForContextAllocation();
747 // AST node visit functions.
748 #define DECLARE_VISIT(type) virtual void Visit##type(type* node) OVERRIDE;
749 AST_NODE_LIST(DECLARE_VISIT)
752 void VisitComma(BinaryOperation* expr);
753 void VisitLogicalExpression(BinaryOperation* expr);
754 void VisitArithmeticExpression(BinaryOperation* expr);
756 void VisitForTypeofValue(Expression* expr);
759 void PopulateDeoptimizationData(Handle<Code> code);
760 void PopulateTypeFeedbackInfo(Handle<Code> code);
762 Handle<HandlerTable> handler_table() { return handler_table_; }
764 struct BailoutEntry {
766 unsigned pc_and_state;
769 struct BackEdgeEntry {
775 class ExpressionContext BASE_EMBEDDED {
777 explicit ExpressionContext(FullCodeGenerator* codegen)
778 : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
779 codegen->set_new_context(this);
782 virtual ~ExpressionContext() {
783 codegen_->set_new_context(old_);
786 Isolate* isolate() const { return codegen_->isolate(); }
788 // Convert constant control flow (true or false) to the result expected for
789 // this expression context.
790 virtual void Plug(bool flag) const = 0;
792 // Emit code to convert a pure value (in a register, known variable
793 // location, as a literal, or on top of the stack) into the result
794 // expected according to this expression context.
795 virtual void Plug(Register reg) const = 0;
796 virtual void Plug(Variable* var) const = 0;
797 virtual void Plug(Handle<Object> lit) const = 0;
798 virtual void Plug(Heap::RootListIndex index) const = 0;
799 virtual void PlugTOS() const = 0;
801 // Emit code to convert pure control flow to a pair of unbound labels into
802 // the result expected according to this expression context. The
803 // implementation will bind both labels unless it's a TestContext, which
804 // won't bind them at this point.
805 virtual void Plug(Label* materialize_true,
806 Label* materialize_false) const = 0;
808 // Emit code to discard count elements from the top of stack, then convert
809 // a pure value into the result expected according to this expression
811 virtual void DropAndPlug(int count, Register reg) const = 0;
813 // Set up branch labels for a test expression. The three Label** parameters
814 // are output parameters.
815 virtual void PrepareTest(Label* materialize_true,
816 Label* materialize_false,
819 Label** fall_through) const = 0;
821 // Returns true if we are evaluating only for side effects (i.e. if the
822 // result will be discarded).
823 virtual bool IsEffect() const { return false; }
825 // Returns true if we are evaluating for the value (in accu/on stack).
826 virtual bool IsAccumulatorValue() const { return false; }
827 virtual bool IsStackValue() const { return false; }
829 // Returns true if we are branching on the value rather than materializing
830 // it. Only used for asserts.
831 virtual bool IsTest() const { return false; }
834 FullCodeGenerator* codegen() const { return codegen_; }
835 MacroAssembler* masm() const { return masm_; }
836 MacroAssembler* masm_;
839 const ExpressionContext* old_;
840 FullCodeGenerator* codegen_;
843 class AccumulatorValueContext : public ExpressionContext {
845 explicit AccumulatorValueContext(FullCodeGenerator* codegen)
846 : ExpressionContext(codegen) { }
848 virtual void Plug(bool flag) const;
849 virtual void Plug(Register reg) const;
850 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
851 virtual void Plug(Variable* var) const;
852 virtual void Plug(Handle<Object> lit) const;
853 virtual void Plug(Heap::RootListIndex) const;
854 virtual void PlugTOS() const;
855 virtual void DropAndPlug(int count, Register reg) const;
856 virtual void PrepareTest(Label* materialize_true,
857 Label* materialize_false,
860 Label** fall_through) const;
861 virtual bool IsAccumulatorValue() const { return true; }
864 class StackValueContext : public ExpressionContext {
866 explicit StackValueContext(FullCodeGenerator* codegen)
867 : ExpressionContext(codegen) { }
869 virtual void Plug(bool flag) const;
870 virtual void Plug(Register reg) const;
871 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
872 virtual void Plug(Variable* var) const;
873 virtual void Plug(Handle<Object> lit) const;
874 virtual void Plug(Heap::RootListIndex) const;
875 virtual void PlugTOS() const;
876 virtual void DropAndPlug(int count, Register reg) const;
877 virtual void PrepareTest(Label* materialize_true,
878 Label* materialize_false,
881 Label** fall_through) const;
882 virtual bool IsStackValue() const { return true; }
885 class TestContext : public ExpressionContext {
887 TestContext(FullCodeGenerator* codegen,
888 Expression* condition,
892 : ExpressionContext(codegen),
893 condition_(condition),
894 true_label_(true_label),
895 false_label_(false_label),
896 fall_through_(fall_through) { }
898 static const TestContext* cast(const ExpressionContext* context) {
899 DCHECK(context->IsTest());
900 return reinterpret_cast<const TestContext*>(context);
903 Expression* condition() const { return condition_; }
904 Label* true_label() const { return true_label_; }
905 Label* false_label() const { return false_label_; }
906 Label* fall_through() const { return fall_through_; }
908 virtual void Plug(bool flag) const;
909 virtual void Plug(Register reg) const;
910 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
911 virtual void Plug(Variable* var) const;
912 virtual void Plug(Handle<Object> lit) const;
913 virtual void Plug(Heap::RootListIndex) const;
914 virtual void PlugTOS() const;
915 virtual void DropAndPlug(int count, Register reg) const;
916 virtual void PrepareTest(Label* materialize_true,
917 Label* materialize_false,
920 Label** fall_through) const;
921 virtual bool IsTest() const { return true; }
924 Expression* condition_;
927 Label* fall_through_;
930 class EffectContext : public ExpressionContext {
932 explicit EffectContext(FullCodeGenerator* codegen)
933 : ExpressionContext(codegen) { }
935 virtual void Plug(bool flag) const;
936 virtual void Plug(Register reg) const;
937 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
938 virtual void Plug(Variable* var) const;
939 virtual void Plug(Handle<Object> lit) const;
940 virtual void Plug(Heap::RootListIndex) const;
941 virtual void PlugTOS() const;
942 virtual void DropAndPlug(int count, Register reg) const;
943 virtual void PrepareTest(Label* materialize_true,
944 Label* materialize_false,
947 Label** fall_through) const;
948 virtual bool IsEffect() const { return true; }
951 class EnterBlockScopeIfNeeded {
953 EnterBlockScopeIfNeeded(FullCodeGenerator* codegen, Scope* scope,
954 BailoutId entry_id, BailoutId declarations_id,
956 ~EnterBlockScopeIfNeeded();
959 MacroAssembler* masm() const { return codegen_->masm(); }
961 FullCodeGenerator* codegen_;
967 MacroAssembler* masm_;
968 CompilationInfo* info_;
971 NestedStatement* nesting_stack_;
973 ZoneList<Handle<Object> >* globals_;
974 Handle<FixedArray> modules_;
976 const ExpressionContext* context_;
977 ZoneList<BailoutEntry> bailout_entries_;
978 ZoneList<BackEdgeEntry> back_edges_;
980 Handle<HandlerTable> handler_table_;
981 Handle<Cell> profiling_counter_;
982 bool generate_debug_code_;
984 friend class NestedStatement;
986 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
987 DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
991 // A map from property names to getter/setter pairs allocated in the zone.
992 class AccessorTable: public TemplateHashMap<Literal,
993 ObjectLiteral::Accessors,
994 ZoneAllocationPolicy> {
996 explicit AccessorTable(Zone* zone) :
997 TemplateHashMap<Literal, ObjectLiteral::Accessors,
998 ZoneAllocationPolicy>(Literal::Match,
999 ZoneAllocationPolicy(zone)),
1002 Iterator lookup(Literal* literal) {
1003 Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
1004 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
1013 class BackEdgeTable {
1015 BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
1016 DCHECK(code->kind() == Code::FUNCTION);
1017 instruction_start_ = code->instruction_start();
1018 Address table_address = instruction_start_ + code->back_edge_table_offset();
1019 length_ = Memory::uint32_at(table_address);
1020 start_ = table_address + kTableLengthSize;
1023 uint32_t length() { return length_; }
1025 BailoutId ast_id(uint32_t index) {
1026 return BailoutId(static_cast<int>(
1027 Memory::uint32_at(entry_at(index) + kAstIdOffset)));
1030 uint32_t loop_depth(uint32_t index) {
1031 return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
1034 uint32_t pc_offset(uint32_t index) {
1035 return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
1038 Address pc(uint32_t index) {
1039 return instruction_start_ + pc_offset(index);
1042 enum BackEdgeState {
1044 ON_STACK_REPLACEMENT,
1045 OSR_AFTER_STACK_CHECK
1048 // Increase allowed loop nesting level by one and patch those matching loops.
1049 static void Patch(Isolate* isolate, Code* unoptimized_code);
1051 // Patch the back edge to the target state, provided the correct callee.
1052 static void PatchAt(Code* unoptimized_code,
1054 BackEdgeState target_state,
1055 Code* replacement_code);
1057 // Change all patched back edges back to normal interrupts.
1058 static void Revert(Isolate* isolate,
1059 Code* unoptimized_code);
1061 // Change a back edge patched for on-stack replacement to perform a
1062 // stack check first.
1063 static void AddStackCheck(Handle<Code> code, uint32_t pc_offset);
1065 // Revert the patch by AddStackCheck.
1066 static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset);
1068 // Return the current patch state of the back edge.
1069 static BackEdgeState GetBackEdgeState(Isolate* isolate,
1070 Code* unoptimized_code,
1074 // Verify that all back edges of a certain loop depth are patched.
1075 static bool Verify(Isolate* isolate, Code* unoptimized_code);
1079 Address entry_at(uint32_t index) {
1080 DCHECK(index < length_);
1081 return start_ + index * kEntrySize;
1084 static const int kTableLengthSize = kIntSize;
1085 static const int kAstIdOffset = 0 * kIntSize;
1086 static const int kPcOffsetOffset = 1 * kIntSize;
1087 static const int kLoopDepthOffset = 2 * kIntSize;
1088 static const int kEntrySize = 3 * kIntSize;
1091 Address instruction_start_;
1096 } } // namespace v8::internal
1098 #endif // V8_FULL_CODEGEN_H_