1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_FULL_CODEGEN_H_
6 #define V8_FULL_CODEGEN_H_
10 #include "src/allocation.h"
11 #include "src/assert-scope.h"
13 #include "src/bit-vector.h"
14 #include "src/code-stubs.h"
15 #include "src/codegen.h"
16 #include "src/compiler.h"
17 #include "src/globals.h"
18 #include "src/objects.h"
23 // Forward declarations.
26 // AST node visitor which can tell whether a given statement will be breakable
27 // when the code is compiled by the full compiler in the debugger. This means
28 // that there will be an IC (load/store/call) in the code generated for the
29 // debugger to piggybag on.
30 class BreakableStatementChecker: public AstVisitor {
32 explicit BreakableStatementChecker(Zone* zone) : is_breakable_(false) {
33 InitializeAstVisitor(zone);
36 void Check(Statement* stmt);
37 void Check(Expression* stmt);
39 bool is_breakable() { return is_breakable_; }
42 // AST node visit functions.
43 #define DECLARE_VISIT(type) virtual void Visit##type(type* node) OVERRIDE;
44 AST_NODE_LIST(DECLARE_VISIT)
49 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
50 DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker);
54 // -----------------------------------------------------------------------------
55 // Full code generator.
57 class FullCodeGenerator: public AstVisitor {
64 FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
67 scope_(info->scope()),
72 bailout_entries_(info->HasDeoptimizationSupport()
73 ? info->function()->ast_node_count() : 0,
75 back_edges_(2, info->zone()),
77 DCHECK(!info->IsStub());
83 static bool MakeCode(CompilationInfo* info);
85 // Encode state and pc-offset as a BitField<type, start, size>.
86 // Only use 30 bits because we encode the result as a smi.
87 class StateField : public BitField<State, 0, 1> { };
88 class PcField : public BitField<unsigned, 1, 30-1> { };
90 static const char* State2String(State state) {
92 case NO_REGISTERS: return "NO_REGISTERS";
93 case TOS_REG: return "TOS_REG";
99 static const int kMaxBackEdgeWeight = 127;
101 // Platform-specific code size multiplier.
102 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
103 static const int kCodeSizeMultiplier = 105;
104 static const int kBootCodeSizeMultiplier = 100;
105 #elif V8_TARGET_ARCH_X64
106 static const int kCodeSizeMultiplier = 170;
107 static const int kBootCodeSizeMultiplier = 140;
108 #elif V8_TARGET_ARCH_ARM
109 static const int kCodeSizeMultiplier = 149;
110 static const int kBootCodeSizeMultiplier = 110;
111 #elif V8_TARGET_ARCH_ARM64
112 // TODO(all): Copied ARM value. Check this is sensible for ARM64.
113 static const int kCodeSizeMultiplier = 149;
114 static const int kBootCodeSizeMultiplier = 110;
115 #elif V8_TARGET_ARCH_MIPS
116 static const int kCodeSizeMultiplier = 149;
117 static const int kBootCodeSizeMultiplier = 120;
118 #elif V8_TARGET_ARCH_MIPS64
119 static const int kCodeSizeMultiplier = 149;
120 static const int kBootCodeSizeMultiplier = 120;
122 #error Unsupported target architecture.
131 class NestedStatement BASE_EMBEDDED {
133 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
134 // Link into codegen's nesting stack.
135 previous_ = codegen->nesting_stack_;
136 codegen->nesting_stack_ = this;
138 virtual ~NestedStatement() {
139 // Unlink from codegen's nesting stack.
140 DCHECK_EQ(this, codegen_->nesting_stack_);
141 codegen_->nesting_stack_ = previous_;
144 virtual Breakable* AsBreakable() { return NULL; }
145 virtual Iteration* AsIteration() { return NULL; }
147 virtual bool IsContinueTarget(Statement* target) { return false; }
148 virtual bool IsBreakTarget(Statement* target) { return false; }
150 // Notify the statement that we are exiting it via break, continue, or
151 // return and give it a chance to generate cleanup code. Return the
152 // next outer statement in the nesting stack. We accumulate in
153 // *stack_depth the amount to drop the stack and in *context_length the
154 // number of context chain links to unwind as we traverse the nesting
155 // stack from an exit to its target.
156 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
161 MacroAssembler* masm() { return codegen_->masm(); }
163 FullCodeGenerator* codegen_;
164 NestedStatement* previous_;
167 DISALLOW_COPY_AND_ASSIGN(NestedStatement);
170 // A breakable statement such as a block.
171 class Breakable : public NestedStatement {
173 Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
174 : NestedStatement(codegen), statement_(statement) {
176 virtual ~Breakable() {}
178 virtual Breakable* AsBreakable() { return this; }
179 virtual bool IsBreakTarget(Statement* target) {
180 return statement() == target;
183 BreakableStatement* statement() { return statement_; }
184 Label* break_label() { return &break_label_; }
187 BreakableStatement* statement_;
191 // An iteration statement such as a while, for, or do loop.
192 class Iteration : public Breakable {
194 Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
195 : Breakable(codegen, statement) {
197 virtual ~Iteration() {}
199 virtual Iteration* AsIteration() { return this; }
200 virtual bool IsContinueTarget(Statement* target) {
201 return statement() == target;
204 Label* continue_label() { return &continue_label_; }
207 Label continue_label_;
210 // A nested block statement.
211 class NestedBlock : public Breakable {
213 NestedBlock(FullCodeGenerator* codegen, Block* block)
214 : Breakable(codegen, block) {
216 virtual ~NestedBlock() {}
218 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
219 if (statement()->AsBlock()->scope() != NULL) {
226 // The try block of a try/catch statement.
227 class TryCatch : public NestedStatement {
229 explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {
231 virtual ~TryCatch() {}
233 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
236 // The try block of a try/finally statement.
237 class TryFinally : public NestedStatement {
239 TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
240 : NestedStatement(codegen), finally_entry_(finally_entry) {
242 virtual ~TryFinally() {}
244 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
247 Label* finally_entry_;
250 // The finally block of a try/finally statement.
251 class Finally : public NestedStatement {
253 static const int kElementCount = 5;
255 explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
256 virtual ~Finally() {}
258 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
259 *stack_depth += kElementCount;
264 // The body of a for/in loop.
265 class ForIn : public Iteration {
267 static const int kElementCount = 5;
269 ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
270 : Iteration(codegen, statement) {
274 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
275 *stack_depth += kElementCount;
281 // The body of a with or catch.
282 class WithOrCatch : public NestedStatement {
284 explicit WithOrCatch(FullCodeGenerator* codegen)
285 : NestedStatement(codegen) {
287 virtual ~WithOrCatch() {}
289 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
295 // Type of a member function that generates inline code for a native function.
296 typedef void (FullCodeGenerator::*InlineFunctionGenerator)(CallRuntime* expr);
298 static const InlineFunctionGenerator kInlineFunctionGenerators[];
300 // A platform-specific utility to overwrite the accumulator register
301 // with a GC-safe value.
302 void ClearAccumulator();
304 // Determine whether or not to inline the smi case for the given
306 bool ShouldInlineSmiCase(Token::Value op);
308 // Helper function to convert a pure value into a test context. The value
309 // is expected on the stack or the accumulator, depending on the platform.
310 // See the platform-specific implementation for details.
311 void DoTest(Expression* condition,
314 Label* fall_through);
315 void DoTest(const TestContext* context);
317 // Helper function to split control flow and avoid a branch to the
318 // fall-through label if it is set up.
319 #if V8_TARGET_ARCH_MIPS
320 void Split(Condition cc,
325 Label* fall_through);
326 #elif V8_TARGET_ARCH_MIPS64
327 void Split(Condition cc,
332 Label* fall_through);
333 #else // All non-mips arch.
334 void Split(Condition cc,
337 Label* fall_through);
338 #endif // V8_TARGET_ARCH_MIPS
340 // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
341 // a register. Emits a context chain walk if if necessary (so does
342 // SetVar) so avoid calling both on the same variable.
343 void GetVar(Register destination, Variable* var);
345 // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in
346 // the context, the write barrier will be emitted and source, scratch0,
347 // scratch1 will be clobbered. Emits a context chain walk if if necessary
348 // (so does GetVar) so avoid calling both on the same variable.
349 void SetVar(Variable* var,
354 // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
355 // variable. Writing does not need the write barrier.
356 MemOperand StackOperand(Variable* var);
358 // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
359 // variable. May emit code to traverse the context chain, loading the
360 // found context into the scratch register. Writing to this operand will
361 // need the write barrier if location is CONTEXT.
362 MemOperand VarOperand(Variable* var, Register scratch);
364 void VisitForEffect(Expression* expr) {
365 EffectContext context(this);
367 PrepareForBailout(expr, NO_REGISTERS);
370 void VisitForAccumulatorValue(Expression* expr) {
371 AccumulatorValueContext context(this);
373 PrepareForBailout(expr, TOS_REG);
376 void VisitForStackValue(Expression* expr) {
377 StackValueContext context(this);
379 PrepareForBailout(expr, NO_REGISTERS);
382 void VisitForControl(Expression* expr,
385 Label* fall_through) {
386 TestContext context(this, expr, if_true, if_false, fall_through);
388 // For test contexts, we prepare for bailout before branching, not at
389 // the end of the entire expression. This happens as part of visiting
393 void VisitInDuplicateContext(Expression* expr);
395 void VisitDeclarations(ZoneList<Declaration*>* declarations) OVERRIDE;
396 void DeclareModules(Handle<FixedArray> descriptions);
397 void DeclareGlobals(Handle<FixedArray> pairs);
398 int DeclareGlobalsFlags();
400 // Generate code to allocate all (including nested) modules and contexts.
401 // Because of recursive linking and the presence of module alias declarations,
402 // this has to be a separate pass _before_ populating or executing any module.
403 void AllocateModules(ZoneList<Declaration*>* declarations);
405 // Generate code to create an iterator result object. The "value" property is
406 // set to a value popped from the stack, and "done" is set according to the
407 // argument. The result object is left in the result register.
408 void EmitCreateIteratorResult(bool done);
410 // Try to perform a comparison as a fast inlined literal compare if
411 // the operands allow it. Returns true if the compare operations
412 // has been matched and all code generated; false otherwise.
413 bool TryLiteralCompare(CompareOperation* compare);
415 // Platform-specific code for comparing the type of a value with
416 // a given literal string.
417 void EmitLiteralCompareTypeof(Expression* expr,
418 Expression* sub_expr,
419 Handle<String> check);
421 // Platform-specific code for equality comparison with a nil-like value.
422 void EmitLiteralCompareNil(CompareOperation* expr,
423 Expression* sub_expr,
427 void PrepareForBailout(Expression* node, State state);
428 void PrepareForBailoutForId(BailoutId id, State state);
430 // Feedback slot support. The feedback vector will be cleared during gc and
431 // collected by the type-feedback oracle.
432 Handle<TypeFeedbackVector> FeedbackVector() const {
433 return info_->feedback_vector();
435 void EnsureSlotContainsAllocationSite(FeedbackVectorSlot slot);
436 void EnsureSlotContainsAllocationSite(FeedbackVectorICSlot slot);
438 // Returns a smi for the index into the FixedArray that backs the feedback
440 Smi* SmiFromSlot(FeedbackVectorSlot slot) const {
441 return Smi::FromInt(FeedbackVector()->GetIndex(slot));
444 Smi* SmiFromSlot(FeedbackVectorICSlot slot) const {
445 return Smi::FromInt(FeedbackVector()->GetIndex(slot));
448 // Record a call's return site offset, used to rebuild the frame if the
449 // called function was inlined at the site.
450 void RecordJSReturnSite(Call* call);
452 // Prepare for bailout before a test (or compare) and branch. If
453 // should_normalize, then the following comparison will not handle the
454 // canonical JS true value so we will insert a (dead) test against true at
455 // the actual bailout target from the optimized code. If not
456 // should_normalize, the true and false labels are ignored.
457 void PrepareForBailoutBeforeSplit(Expression* expr,
458 bool should_normalize,
462 // If enabled, emit debug code for checking that the current context is
463 // neither a with nor a catch context.
464 void EmitDebugCheckDeclarationContext(Variable* variable);
466 // This is meant to be called at loop back edges, |back_edge_target| is
467 // the jump target of the back edge and is used to approximate the amount
468 // of code inside the loop.
469 void EmitBackEdgeBookkeeping(IterationStatement* stmt,
470 Label* back_edge_target);
471 // Record the OSR AST id corresponding to a back edge in the code.
472 void RecordBackEdge(BailoutId osr_ast_id);
473 // Emit a table of back edge ids, pcs and loop depths into the code stream.
474 // Return the offset of the start of the table.
475 unsigned EmitBackEdgeTable();
477 void EmitProfilingCounterDecrement(int delta);
478 void EmitProfilingCounterReset();
480 // Emit code to pop values from the stack associated with nested statements
481 // like try/catch, try/finally, etc, running the finallies and unwinding the
482 // handlers as needed.
483 void EmitUnwindBeforeReturn();
485 // Platform-specific return sequence
486 void EmitReturnSequence();
488 // Platform-specific code sequences for calls
489 void EmitCall(Call* expr, CallICState::CallType = CallICState::FUNCTION);
490 void EmitCallWithLoadIC(Call* expr);
491 void EmitSuperCallWithLoadIC(Call* expr);
492 void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
493 void EmitKeyedSuperCallWithLoadIC(Call* expr);
495 // Platform-specific code for inline runtime calls.
496 InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
498 void EmitInlineRuntimeCall(CallRuntime* expr);
500 #define EMIT_INLINE_RUNTIME_CALL(name, x, y) \
501 void Emit##name(CallRuntime* expr);
502 INLINE_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
503 #undef EMIT_INLINE_RUNTIME_CALL
505 // Platform-specific code for resuming generators.
506 void EmitGeneratorResume(Expression *generator,
508 JSGeneratorObject::ResumeMode resume_mode);
510 // Platform-specific code for loading variables.
511 void EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
512 TypeofState typeof_state,
514 MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
515 void EmitDynamicLookupFastCase(VariableProxy* proxy,
516 TypeofState typeof_state,
519 void EmitVariableLoad(VariableProxy* proxy);
521 void EmitAccessor(Expression* expression);
523 // Expects the arguments and the function already pushed.
524 void EmitResolvePossiblyDirectEval(int arg_count);
526 // Platform-specific support for allocating a new closure based on
527 // the given function info.
528 void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
530 // Platform-specific support for compiling assignments.
532 // Left-hand side can only be a property, a global or a (parameter or local)
538 NAMED_SUPER_PROPERTY,
542 static LhsKind GetAssignType(Property* property) {
543 if (property == NULL) return VARIABLE;
544 bool super_access = property->IsSuperAccess();
545 return (property->key()->IsPropertyName())
546 ? (super_access ? NAMED_SUPER_PROPERTY : NAMED_PROPERTY)
547 : (super_access ? KEYED_SUPER_PROPERTY : KEYED_PROPERTY);
550 // Load a value from a named property.
551 // The receiver is left on the stack by the IC.
552 void EmitNamedPropertyLoad(Property* expr);
554 // Load a value from super.named property.
555 // Expect receiver ('this' value) and home_object on the stack.
556 void EmitNamedSuperPropertyLoad(Property* expr);
558 // Load a value from super[keyed] property.
559 // Expect receiver ('this' value), home_object and key on the stack.
560 void EmitKeyedSuperPropertyLoad(Property* expr);
562 // Load a value from a keyed property.
563 // The receiver and the key is left on the stack by the IC.
564 void EmitKeyedPropertyLoad(Property* expr);
566 // Adds the properties to the class (function) object and to its prototype.
567 // Expects the class (function) in the accumulator. The class (function) is
568 // in the accumulator after installing all the properties.
569 void EmitClassDefineProperties(ClassLiteral* lit);
571 // Apply the compound assignment operator. Expects the left operand on top
572 // of the stack and the right one in the accumulator.
573 void EmitBinaryOp(BinaryOperation* expr,
577 // Helper functions for generating inlined smi code for certain
578 // binary operations.
579 void EmitInlineSmiBinaryOp(BinaryOperation* expr,
585 // Assign to the given expression as if via '='. The right-hand-side value
586 // is expected in the accumulator.
587 void EmitAssignment(Expression* expr);
589 // Shall an error be thrown if assignment with 'op' operation is perfomed
590 // on this variable in given language mode?
591 static bool IsSignallingAssignmentToConst(Variable* var, Token::Value op,
592 StrictMode strict_mode) {
593 if (var->mode() == CONST) return op != Token::INIT_CONST;
595 if (var->mode() == CONST_LEGACY) {
596 return strict_mode == STRICT && op != Token::INIT_CONST_LEGACY;
602 // Complete a variable assignment. The right-hand-side value is expected
603 // in the accumulator.
604 void EmitVariableAssignment(Variable* var,
607 // Helper functions to EmitVariableAssignment
608 void EmitStoreToStackLocalOrContextSlot(Variable* var,
609 MemOperand location);
611 // Complete a named property assignment. The receiver is expected on top
612 // of the stack and the right-hand-side value in the accumulator.
613 void EmitNamedPropertyAssignment(Assignment* expr);
615 // Complete a super named property assignment. The right-hand-side value
616 // is expected in accumulator.
617 void EmitNamedSuperPropertyStore(Property* prop);
619 // Complete a super named property assignment. The right-hand-side value
620 // is expected in accumulator.
621 void EmitKeyedSuperPropertyStore(Property* prop);
623 // Complete a keyed property assignment. The receiver and key are
624 // expected on top of the stack and the right-hand-side value in the
626 void EmitKeyedPropertyAssignment(Assignment* expr);
628 void EmitLoadHomeObject(SuperReference* expr);
630 static bool NeedsHomeObject(Expression* expr) {
631 return FunctionLiteral::NeedsHomeObject(expr);
634 // Adds the [[HomeObject]] to |initializer| if it is a FunctionLiteral.
635 // The value of the initializer is expected to be at the top of the stack.
636 // |offset| is the offset in the stack where the home object can be found.
637 void EmitSetHomeObjectIfNeeded(Expression* initializer, int offset);
639 void EmitLoadSuperConstructor(SuperReference* expr);
641 void CallIC(Handle<Code> code,
642 TypeFeedbackId id = TypeFeedbackId::None());
644 void CallLoadIC(ContextualMode mode,
645 TypeFeedbackId id = TypeFeedbackId::None());
646 void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None());
648 void SetFunctionPosition(FunctionLiteral* fun);
649 void SetReturnPosition(FunctionLiteral* fun);
650 void SetStatementPosition(Statement* stmt);
651 void SetExpressionPosition(Expression* expr);
652 void SetSourcePosition(int pos);
654 // Non-local control flow support.
655 void EnterFinallyBlock();
656 void ExitFinallyBlock();
658 // Loop nesting counter.
659 int loop_depth() { return loop_depth_; }
660 void increment_loop_depth() { loop_depth_++; }
661 void decrement_loop_depth() {
662 DCHECK(loop_depth_ > 0);
666 MacroAssembler* masm() { return masm_; }
668 class ExpressionContext;
669 const ExpressionContext* context() { return context_; }
670 void set_new_context(const ExpressionContext* context) { context_ = context; }
672 Handle<Script> script() { return info_->script(); }
673 bool is_eval() { return info_->is_eval(); }
674 bool is_native() { return info_->is_native(); }
675 StrictMode strict_mode() { return function()->strict_mode(); }
676 FunctionLiteral* function() { return info_->function(); }
677 Scope* scope() { return scope_; }
679 static Register result_register();
680 static Register context_register();
682 // Set fields in the stack frame. Offsets are the frame pointer relative
683 // offsets defined in, e.g., StandardFrameConstants.
684 void StoreToFrameField(int frame_offset, Register value);
686 // Load a value from the current context. Indices are defined as an enum
687 // in v8::internal::Context.
688 void LoadContextField(Register dst, int context_index);
690 // Push the function argument for the runtime functions PushWithContext
691 // and PushCatchContext.
692 void PushFunctionArgumentForContextAllocation();
694 // AST node visit functions.
695 #define DECLARE_VISIT(type) virtual void Visit##type(type* node) OVERRIDE;
696 AST_NODE_LIST(DECLARE_VISIT)
699 void VisitComma(BinaryOperation* expr);
700 void VisitLogicalExpression(BinaryOperation* expr);
701 void VisitArithmeticExpression(BinaryOperation* expr);
703 void VisitForTypeofValue(Expression* expr);
706 void PopulateDeoptimizationData(Handle<Code> code);
707 void PopulateTypeFeedbackInfo(Handle<Code> code);
709 Handle<FixedArray> handler_table() { return handler_table_; }
711 struct BailoutEntry {
713 unsigned pc_and_state;
716 struct BackEdgeEntry {
722 class ExpressionContext BASE_EMBEDDED {
724 explicit ExpressionContext(FullCodeGenerator* codegen)
725 : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
726 codegen->set_new_context(this);
729 virtual ~ExpressionContext() {
730 codegen_->set_new_context(old_);
733 Isolate* isolate() const { return codegen_->isolate(); }
735 // Convert constant control flow (true or false) to the result expected for
736 // this expression context.
737 virtual void Plug(bool flag) const = 0;
739 // Emit code to convert a pure value (in a register, known variable
740 // location, as a literal, or on top of the stack) into the result
741 // expected according to this expression context.
742 virtual void Plug(Register reg) const = 0;
743 virtual void Plug(Variable* var) const = 0;
744 virtual void Plug(Handle<Object> lit) const = 0;
745 virtual void Plug(Heap::RootListIndex index) const = 0;
746 virtual void PlugTOS() const = 0;
748 // Emit code to convert pure control flow to a pair of unbound labels into
749 // the result expected according to this expression context. The
750 // implementation will bind both labels unless it's a TestContext, which
751 // won't bind them at this point.
752 virtual void Plug(Label* materialize_true,
753 Label* materialize_false) const = 0;
755 // Emit code to discard count elements from the top of stack, then convert
756 // a pure value into the result expected according to this expression
758 virtual void DropAndPlug(int count, Register reg) const = 0;
760 // Set up branch labels for a test expression. The three Label** parameters
761 // are output parameters.
762 virtual void PrepareTest(Label* materialize_true,
763 Label* materialize_false,
766 Label** fall_through) const = 0;
768 // Returns true if we are evaluating only for side effects (i.e. if the
769 // result will be discarded).
770 virtual bool IsEffect() const { return false; }
772 // Returns true if we are evaluating for the value (in accu/on stack).
773 virtual bool IsAccumulatorValue() const { return false; }
774 virtual bool IsStackValue() const { return false; }
776 // Returns true if we are branching on the value rather than materializing
777 // it. Only used for asserts.
778 virtual bool IsTest() const { return false; }
781 FullCodeGenerator* codegen() const { return codegen_; }
782 MacroAssembler* masm() const { return masm_; }
783 MacroAssembler* masm_;
786 const ExpressionContext* old_;
787 FullCodeGenerator* codegen_;
790 class AccumulatorValueContext : public ExpressionContext {
792 explicit AccumulatorValueContext(FullCodeGenerator* codegen)
793 : ExpressionContext(codegen) { }
795 virtual void Plug(bool flag) const;
796 virtual void Plug(Register reg) const;
797 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
798 virtual void Plug(Variable* var) const;
799 virtual void Plug(Handle<Object> lit) const;
800 virtual void Plug(Heap::RootListIndex) const;
801 virtual void PlugTOS() const;
802 virtual void DropAndPlug(int count, Register reg) const;
803 virtual void PrepareTest(Label* materialize_true,
804 Label* materialize_false,
807 Label** fall_through) const;
808 virtual bool IsAccumulatorValue() const { return true; }
811 class StackValueContext : public ExpressionContext {
813 explicit StackValueContext(FullCodeGenerator* codegen)
814 : ExpressionContext(codegen) { }
816 virtual void Plug(bool flag) const;
817 virtual void Plug(Register reg) const;
818 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
819 virtual void Plug(Variable* var) const;
820 virtual void Plug(Handle<Object> lit) const;
821 virtual void Plug(Heap::RootListIndex) const;
822 virtual void PlugTOS() const;
823 virtual void DropAndPlug(int count, Register reg) const;
824 virtual void PrepareTest(Label* materialize_true,
825 Label* materialize_false,
828 Label** fall_through) const;
829 virtual bool IsStackValue() const { return true; }
832 class TestContext : public ExpressionContext {
834 TestContext(FullCodeGenerator* codegen,
835 Expression* condition,
839 : ExpressionContext(codegen),
840 condition_(condition),
841 true_label_(true_label),
842 false_label_(false_label),
843 fall_through_(fall_through) { }
845 static const TestContext* cast(const ExpressionContext* context) {
846 DCHECK(context->IsTest());
847 return reinterpret_cast<const TestContext*>(context);
850 Expression* condition() const { return condition_; }
851 Label* true_label() const { return true_label_; }
852 Label* false_label() const { return false_label_; }
853 Label* fall_through() const { return fall_through_; }
855 virtual void Plug(bool flag) const;
856 virtual void Plug(Register reg) const;
857 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
858 virtual void Plug(Variable* var) const;
859 virtual void Plug(Handle<Object> lit) const;
860 virtual void Plug(Heap::RootListIndex) const;
861 virtual void PlugTOS() const;
862 virtual void DropAndPlug(int count, Register reg) const;
863 virtual void PrepareTest(Label* materialize_true,
864 Label* materialize_false,
867 Label** fall_through) const;
868 virtual bool IsTest() const { return true; }
871 Expression* condition_;
874 Label* fall_through_;
877 class EffectContext : public ExpressionContext {
879 explicit EffectContext(FullCodeGenerator* codegen)
880 : ExpressionContext(codegen) { }
882 virtual void Plug(bool flag) const;
883 virtual void Plug(Register reg) const;
884 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
885 virtual void Plug(Variable* var) const;
886 virtual void Plug(Handle<Object> lit) const;
887 virtual void Plug(Heap::RootListIndex) const;
888 virtual void PlugTOS() const;
889 virtual void DropAndPlug(int count, Register reg) const;
890 virtual void PrepareTest(Label* materialize_true,
891 Label* materialize_false,
894 Label** fall_through) const;
895 virtual bool IsEffect() const { return true; }
898 class EnterBlockScopeIfNeeded {
900 EnterBlockScopeIfNeeded(FullCodeGenerator* codegen, Scope* scope,
901 BailoutId entry_id, BailoutId declarations_id,
903 ~EnterBlockScopeIfNeeded();
906 MacroAssembler* masm() const { return codegen_->masm(); }
908 FullCodeGenerator* codegen_;
914 MacroAssembler* masm_;
915 CompilationInfo* info_;
918 NestedStatement* nesting_stack_;
920 ZoneList<Handle<Object> >* globals_;
921 Handle<FixedArray> modules_;
923 const ExpressionContext* context_;
924 ZoneList<BailoutEntry> bailout_entries_;
925 ZoneList<BackEdgeEntry> back_edges_;
927 Handle<FixedArray> handler_table_;
928 Handle<Cell> profiling_counter_;
929 bool generate_debug_code_;
931 friend class NestedStatement;
933 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
934 DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
938 // A map from property names to getter/setter pairs allocated in the zone.
939 class AccessorTable: public TemplateHashMap<Literal,
940 ObjectLiteral::Accessors,
941 ZoneAllocationPolicy> {
943 explicit AccessorTable(Zone* zone) :
944 TemplateHashMap<Literal, ObjectLiteral::Accessors,
945 ZoneAllocationPolicy>(Literal::Match,
946 ZoneAllocationPolicy(zone)),
949 Iterator lookup(Literal* literal) {
950 Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
951 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
960 class BackEdgeTable {
962 BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
963 DCHECK(code->kind() == Code::FUNCTION);
964 instruction_start_ = code->instruction_start();
965 Address table_address = instruction_start_ + code->back_edge_table_offset();
966 length_ = Memory::uint32_at(table_address);
967 start_ = table_address + kTableLengthSize;
970 uint32_t length() { return length_; }
972 BailoutId ast_id(uint32_t index) {
973 return BailoutId(static_cast<int>(
974 Memory::uint32_at(entry_at(index) + kAstIdOffset)));
977 uint32_t loop_depth(uint32_t index) {
978 return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
981 uint32_t pc_offset(uint32_t index) {
982 return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
985 Address pc(uint32_t index) {
986 return instruction_start_ + pc_offset(index);
991 ON_STACK_REPLACEMENT,
992 OSR_AFTER_STACK_CHECK
995 // Increase allowed loop nesting level by one and patch those matching loops.
996 static void Patch(Isolate* isolate, Code* unoptimized_code);
998 // Patch the back edge to the target state, provided the correct callee.
999 static void PatchAt(Code* unoptimized_code,
1001 BackEdgeState target_state,
1002 Code* replacement_code);
1004 // Change all patched back edges back to normal interrupts.
1005 static void Revert(Isolate* isolate,
1006 Code* unoptimized_code);
1008 // Change a back edge patched for on-stack replacement to perform a
1009 // stack check first.
1010 static void AddStackCheck(Handle<Code> code, uint32_t pc_offset);
1012 // Revert the patch by AddStackCheck.
1013 static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset);
1015 // Return the current patch state of the back edge.
1016 static BackEdgeState GetBackEdgeState(Isolate* isolate,
1017 Code* unoptimized_code,
1021 // Verify that all back edges of a certain loop depth are patched.
1022 static bool Verify(Isolate* isolate, Code* unoptimized_code);
1026 Address entry_at(uint32_t index) {
1027 DCHECK(index < length_);
1028 return start_ + index * kEntrySize;
1031 static const int kTableLengthSize = kIntSize;
1032 static const int kAstIdOffset = 0 * kIntSize;
1033 static const int kPcOffsetOffset = 1 * kIntSize;
1034 static const int kLoopDepthOffset = 2 * kIntSize;
1035 static const int kEntrySize = 3 * kIntSize;
1038 Address instruction_start_;
1043 } } // namespace v8::internal
1045 #endif // V8_FULL_CODEGEN_H_