1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_FULL_CODEGEN_H_
6 #define V8_FULL_CODEGEN_H_
10 #include "allocation.h"
11 #include "assert-scope.h"
13 #include "code-stubs.h"
16 #include "data-flow.h"
23 // Forward declarations.
26 // AST node visitor which can tell whether a given statement will be breakable
27 // when the code is compiled by the full compiler in the debugger. This means
28 // that there will be an IC (load/store/call) in the code generated for the
29 // debugger to piggybag on.
30 class BreakableStatementChecker: public AstVisitor {
32 explicit BreakableStatementChecker(Zone* zone) : is_breakable_(false) {
33 InitializeAstVisitor(zone);
36 void Check(Statement* stmt);
37 void Check(Expression* stmt);
39 bool is_breakable() { return is_breakable_; }
42 // AST node visit functions.
43 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
44 AST_NODE_LIST(DECLARE_VISIT)
49 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
50 DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker);
54 // -----------------------------------------------------------------------------
55 // Full code generator.
57 class FullCodeGenerator: public AstVisitor {
64 FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
67 scope_(info->scope()),
72 bailout_entries_(info->HasDeoptimizationSupport()
73 ? info->function()->ast_node_count() : 0,
75 back_edges_(2, info->zone()),
82 static bool MakeCode(CompilationInfo* info);
84 // Encode state and pc-offset as a BitField<type, start, size>.
85 // Only use 30 bits because we encode the result as a smi.
86 class StateField : public BitField<State, 0, 1> { };
87 class PcField : public BitField<unsigned, 1, 30-1> { };
89 static const char* State2String(State state) {
91 case NO_REGISTERS: return "NO_REGISTERS";
92 case TOS_REG: return "TOS_REG";
98 static const int kMaxBackEdgeWeight = 127;
100 // Platform-specific code size multiplier.
101 #if V8_TARGET_ARCH_IA32
102 static const int kCodeSizeMultiplier = 105;
103 static const int kBootCodeSizeMultiplier = 100;
104 #elif V8_TARGET_ARCH_X64
105 static const int kCodeSizeMultiplier = 170;
106 static const int kBootCodeSizeMultiplier = 140;
107 #elif V8_TARGET_ARCH_ARM
108 static const int kCodeSizeMultiplier = 149;
109 static const int kBootCodeSizeMultiplier = 110;
110 #elif V8_TARGET_ARCH_ARM64
111 // TODO(all): Copied ARM value. Check this is sensible for ARM64.
112 static const int kCodeSizeMultiplier = 149;
113 static const int kBootCodeSizeMultiplier = 110;
114 #elif V8_TARGET_ARCH_MIPS
115 static const int kCodeSizeMultiplier = 149;
116 static const int kBootCodeSizeMultiplier = 120;
118 #error Unsupported target architecture.
127 class NestedStatement BASE_EMBEDDED {
129 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
130 // Link into codegen's nesting stack.
131 previous_ = codegen->nesting_stack_;
132 codegen->nesting_stack_ = this;
134 virtual ~NestedStatement() {
135 // Unlink from codegen's nesting stack.
136 ASSERT_EQ(this, codegen_->nesting_stack_);
137 codegen_->nesting_stack_ = previous_;
140 virtual Breakable* AsBreakable() { return NULL; }
141 virtual Iteration* AsIteration() { return NULL; }
143 virtual bool IsContinueTarget(Statement* target) { return false; }
144 virtual bool IsBreakTarget(Statement* target) { return false; }
146 // Notify the statement that we are exiting it via break, continue, or
147 // return and give it a chance to generate cleanup code. Return the
148 // next outer statement in the nesting stack. We accumulate in
149 // *stack_depth the amount to drop the stack and in *context_length the
150 // number of context chain links to unwind as we traverse the nesting
151 // stack from an exit to its target.
152 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
157 MacroAssembler* masm() { return codegen_->masm(); }
159 FullCodeGenerator* codegen_;
160 NestedStatement* previous_;
163 DISALLOW_COPY_AND_ASSIGN(NestedStatement);
166 // A breakable statement such as a block.
167 class Breakable : public NestedStatement {
169 Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
170 : NestedStatement(codegen), statement_(statement) {
172 virtual ~Breakable() {}
174 virtual Breakable* AsBreakable() { return this; }
175 virtual bool IsBreakTarget(Statement* target) {
176 return statement() == target;
179 BreakableStatement* statement() { return statement_; }
180 Label* break_label() { return &break_label_; }
183 BreakableStatement* statement_;
187 // An iteration statement such as a while, for, or do loop.
188 class Iteration : public Breakable {
190 Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
191 : Breakable(codegen, statement) {
193 virtual ~Iteration() {}
195 virtual Iteration* AsIteration() { return this; }
196 virtual bool IsContinueTarget(Statement* target) {
197 return statement() == target;
200 Label* continue_label() { return &continue_label_; }
203 Label continue_label_;
206 // A nested block statement.
207 class NestedBlock : public Breakable {
209 NestedBlock(FullCodeGenerator* codegen, Block* block)
210 : Breakable(codegen, block) {
212 virtual ~NestedBlock() {}
214 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
215 if (statement()->AsBlock()->scope() != NULL) {
222 // The try block of a try/catch statement.
223 class TryCatch : public NestedStatement {
225 explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {
227 virtual ~TryCatch() {}
229 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
232 // The try block of a try/finally statement.
233 class TryFinally : public NestedStatement {
235 TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
236 : NestedStatement(codegen), finally_entry_(finally_entry) {
238 virtual ~TryFinally() {}
240 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
243 Label* finally_entry_;
246 // The finally block of a try/finally statement.
247 class Finally : public NestedStatement {
249 static const int kElementCount = 5;
251 explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
252 virtual ~Finally() {}
254 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
255 *stack_depth += kElementCount;
260 // The body of a for/in loop.
261 class ForIn : public Iteration {
263 static const int kElementCount = 5;
265 ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
266 : Iteration(codegen, statement) {
270 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
271 *stack_depth += kElementCount;
277 // The body of a with or catch.
278 class WithOrCatch : public NestedStatement {
280 explicit WithOrCatch(FullCodeGenerator* codegen)
281 : NestedStatement(codegen) {
283 virtual ~WithOrCatch() {}
285 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
291 // Type of a member function that generates inline code for a native function.
292 typedef void (FullCodeGenerator::*InlineFunctionGenerator)(CallRuntime* expr);
294 static const InlineFunctionGenerator kInlineFunctionGenerators[];
296 // A platform-specific utility to overwrite the accumulator register
297 // with a GC-safe value.
298 void ClearAccumulator();
300 // Determine whether or not to inline the smi case for the given
302 bool ShouldInlineSmiCase(Token::Value op);
304 // Helper function to convert a pure value into a test context. The value
305 // is expected on the stack or the accumulator, depending on the platform.
306 // See the platform-specific implementation for details.
307 void DoTest(Expression* condition,
310 Label* fall_through);
311 void DoTest(const TestContext* context);
313 // Helper function to split control flow and avoid a branch to the
314 // fall-through label if it is set up.
315 #if V8_TARGET_ARCH_MIPS
316 void Split(Condition cc,
321 Label* fall_through);
322 #else // All non-mips arch.
323 void Split(Condition cc,
326 Label* fall_through);
327 #endif // V8_TARGET_ARCH_MIPS
329 // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
330 // a register. Emits a context chain walk if if necessary (so does
331 // SetVar) so avoid calling both on the same variable.
332 void GetVar(Register destination, Variable* var);
334 // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in
335 // the context, the write barrier will be emitted and source, scratch0,
336 // scratch1 will be clobbered. Emits a context chain walk if if necessary
337 // (so does GetVar) so avoid calling both on the same variable.
338 void SetVar(Variable* var,
343 // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
344 // variable. Writing does not need the write barrier.
345 MemOperand StackOperand(Variable* var);
347 // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
348 // variable. May emit code to traverse the context chain, loading the
349 // found context into the scratch register. Writing to this operand will
350 // need the write barrier if location is CONTEXT.
351 MemOperand VarOperand(Variable* var, Register scratch);
353 void VisitForEffect(Expression* expr) {
354 EffectContext context(this);
356 PrepareForBailout(expr, NO_REGISTERS);
359 void VisitForAccumulatorValue(Expression* expr) {
360 AccumulatorValueContext context(this);
362 PrepareForBailout(expr, TOS_REG);
365 void VisitForStackValue(Expression* expr) {
366 StackValueContext context(this);
368 PrepareForBailout(expr, NO_REGISTERS);
371 void VisitForControl(Expression* expr,
374 Label* fall_through) {
375 TestContext context(this, expr, if_true, if_false, fall_through);
377 // For test contexts, we prepare for bailout before branching, not at
378 // the end of the entire expression. This happens as part of visiting
382 void VisitInDuplicateContext(Expression* expr);
384 void VisitDeclarations(ZoneList<Declaration*>* declarations);
385 void DeclareModules(Handle<FixedArray> descriptions);
386 void DeclareGlobals(Handle<FixedArray> pairs);
387 int DeclareGlobalsFlags();
389 // Generate code to allocate all (including nested) modules and contexts.
390 // Because of recursive linking and the presence of module alias declarations,
391 // this has to be a separate pass _before_ populating or executing any module.
392 void AllocateModules(ZoneList<Declaration*>* declarations);
394 // Generate code to create an iterator result object. The "value" property is
395 // set to a value popped from the stack, and "done" is set according to the
396 // argument. The result object is left in the result register.
397 void EmitCreateIteratorResult(bool done);
399 // Try to perform a comparison as a fast inlined literal compare if
400 // the operands allow it. Returns true if the compare operations
401 // has been matched and all code generated; false otherwise.
402 bool TryLiteralCompare(CompareOperation* compare);
404 // Platform-specific code for comparing the type of a value with
405 // a given literal string.
406 void EmitLiteralCompareTypeof(Expression* expr,
407 Expression* sub_expr,
408 Handle<String> check);
410 // Platform-specific code for equality comparison with a nil-like value.
411 void EmitLiteralCompareNil(CompareOperation* expr,
412 Expression* sub_expr,
416 void PrepareForBailout(Expression* node, State state);
417 void PrepareForBailoutForId(BailoutId id, State state);
419 // Feedback slot support. The feedback vector will be cleared during gc and
420 // collected by the type-feedback oracle.
421 Handle<FixedArray> FeedbackVector() {
422 return info_->feedback_vector();
424 void EnsureSlotContainsAllocationSite(int slot);
426 // Record a call's return site offset, used to rebuild the frame if the
427 // called function was inlined at the site.
428 void RecordJSReturnSite(Call* call);
430 // Prepare for bailout before a test (or compare) and branch. If
431 // should_normalize, then the following comparison will not handle the
432 // canonical JS true value so we will insert a (dead) test against true at
433 // the actual bailout target from the optimized code. If not
434 // should_normalize, the true and false labels are ignored.
435 void PrepareForBailoutBeforeSplit(Expression* expr,
436 bool should_normalize,
440 // If enabled, emit debug code for checking that the current context is
441 // neither a with nor a catch context.
442 void EmitDebugCheckDeclarationContext(Variable* variable);
444 // This is meant to be called at loop back edges, |back_edge_target| is
445 // the jump target of the back edge and is used to approximate the amount
446 // of code inside the loop.
447 void EmitBackEdgeBookkeeping(IterationStatement* stmt,
448 Label* back_edge_target);
449 // Record the OSR AST id corresponding to a back edge in the code.
450 void RecordBackEdge(BailoutId osr_ast_id);
451 // Emit a table of back edge ids, pcs and loop depths into the code stream.
452 // Return the offset of the start of the table.
453 unsigned EmitBackEdgeTable();
455 void EmitProfilingCounterDecrement(int delta);
456 void EmitProfilingCounterReset();
458 // Emit code to pop values from the stack associated with nested statements
459 // like try/catch, try/finally, etc, running the finallies and unwinding the
460 // handlers as needed.
461 void EmitUnwindBeforeReturn();
463 // Platform-specific return sequence
464 void EmitReturnSequence();
466 // Platform-specific code sequences for calls
467 void EmitCall(Call* expr, CallIC::CallType = CallIC::FUNCTION);
468 void EmitCallWithLoadIC(Call* expr);
469 void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
471 // Platform-specific code for inline runtime calls.
472 InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
474 void EmitInlineRuntimeCall(CallRuntime* expr);
476 #define EMIT_INLINE_RUNTIME_CALL(name, x, y) \
477 void Emit##name(CallRuntime* expr);
478 INLINE_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
479 #undef EMIT_INLINE_RUNTIME_CALL
481 // Platform-specific code for resuming generators.
482 void EmitGeneratorResume(Expression *generator,
484 JSGeneratorObject::ResumeMode resume_mode);
486 // Platform-specific code for loading variables.
487 void EmitLoadGlobalCheckExtensions(Variable* var,
488 TypeofState typeof_state,
490 MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
491 void EmitDynamicLookupFastCase(Variable* var,
492 TypeofState typeof_state,
495 void EmitVariableLoad(VariableProxy* proxy);
497 void EmitAccessor(Expression* expression);
499 // Expects the arguments and the function already pushed.
500 void EmitResolvePossiblyDirectEval(int arg_count);
502 // Platform-specific support for allocating a new closure based on
503 // the given function info.
504 void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
506 // Platform-specific support for compiling assignments.
508 // Load a value from a named property.
509 // The receiver is left on the stack by the IC.
510 void EmitNamedPropertyLoad(Property* expr);
512 // Load a value from a keyed property.
513 // The receiver and the key is left on the stack by the IC.
514 void EmitKeyedPropertyLoad(Property* expr);
516 // Apply the compound assignment operator. Expects the left operand on top
517 // of the stack and the right one in the accumulator.
518 void EmitBinaryOp(BinaryOperation* expr,
522 // Helper functions for generating inlined smi code for certain
523 // binary operations.
524 void EmitInlineSmiBinaryOp(BinaryOperation* expr,
530 // Assign to the given expression as if via '='. The right-hand-side value
531 // is expected in the accumulator.
532 void EmitAssignment(Expression* expr);
534 // Complete a variable assignment. The right-hand-side value is expected
535 // in the accumulator.
536 void EmitVariableAssignment(Variable* var,
539 // Helper functions to EmitVariableAssignment
540 void EmitStoreToStackLocalOrContextSlot(Variable* var,
541 MemOperand location);
542 void EmitCallStoreContextSlot(Handle<String> name, StrictMode strict_mode);
544 // Complete a named property assignment. The receiver is expected on top
545 // of the stack and the right-hand-side value in the accumulator.
546 void EmitNamedPropertyAssignment(Assignment* expr);
548 // Complete a keyed property assignment. The receiver and key are
549 // expected on top of the stack and the right-hand-side value in the
551 void EmitKeyedPropertyAssignment(Assignment* expr);
553 void CallIC(Handle<Code> code,
554 TypeFeedbackId id = TypeFeedbackId::None());
556 void CallLoadIC(ContextualMode mode,
557 TypeFeedbackId id = TypeFeedbackId::None());
558 void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None());
560 void SetFunctionPosition(FunctionLiteral* fun);
561 void SetReturnPosition(FunctionLiteral* fun);
562 void SetStatementPosition(Statement* stmt);
563 void SetExpressionPosition(Expression* expr);
564 void SetStatementPosition(int pos);
565 void SetSourcePosition(int pos);
567 // Non-local control flow support.
568 void EnterFinallyBlock();
569 void ExitFinallyBlock();
571 // Loop nesting counter.
572 int loop_depth() { return loop_depth_; }
573 void increment_loop_depth() { loop_depth_++; }
574 void decrement_loop_depth() {
575 ASSERT(loop_depth_ > 0);
579 MacroAssembler* masm() { return masm_; }
581 class ExpressionContext;
582 const ExpressionContext* context() { return context_; }
583 void set_new_context(const ExpressionContext* context) { context_ = context; }
585 Handle<Script> script() { return info_->script(); }
586 bool is_eval() { return info_->is_eval(); }
587 bool is_native() { return info_->is_native(); }
588 StrictMode strict_mode() { return function()->strict_mode(); }
589 FunctionLiteral* function() { return info_->function(); }
590 Scope* scope() { return scope_; }
592 static Register result_register();
593 static Register context_register();
595 // Set fields in the stack frame. Offsets are the frame pointer relative
596 // offsets defined in, e.g., StandardFrameConstants.
597 void StoreToFrameField(int frame_offset, Register value);
599 // Load a value from the current context. Indices are defined as an enum
600 // in v8::internal::Context.
601 void LoadContextField(Register dst, int context_index);
603 // Push the function argument for the runtime functions PushWithContext
604 // and PushCatchContext.
605 void PushFunctionArgumentForContextAllocation();
607 // AST node visit functions.
608 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
609 AST_NODE_LIST(DECLARE_VISIT)
612 void VisitComma(BinaryOperation* expr);
613 void VisitLogicalExpression(BinaryOperation* expr);
614 void VisitArithmeticExpression(BinaryOperation* expr);
616 void VisitForTypeofValue(Expression* expr);
619 void PopulateDeoptimizationData(Handle<Code> code);
620 void PopulateTypeFeedbackInfo(Handle<Code> code);
622 Handle<FixedArray> handler_table() { return handler_table_; }
624 struct BailoutEntry {
626 unsigned pc_and_state;
629 struct BackEdgeEntry {
635 class ExpressionContext BASE_EMBEDDED {
637 explicit ExpressionContext(FullCodeGenerator* codegen)
638 : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
639 codegen->set_new_context(this);
642 virtual ~ExpressionContext() {
643 codegen_->set_new_context(old_);
646 Isolate* isolate() const { return codegen_->isolate(); }
648 // Convert constant control flow (true or false) to the result expected for
649 // this expression context.
650 virtual void Plug(bool flag) const = 0;
652 // Emit code to convert a pure value (in a register, known variable
653 // location, as a literal, or on top of the stack) into the result
654 // expected according to this expression context.
655 virtual void Plug(Register reg) const = 0;
656 virtual void Plug(Variable* var) const = 0;
657 virtual void Plug(Handle<Object> lit) const = 0;
658 virtual void Plug(Heap::RootListIndex index) const = 0;
659 virtual void PlugTOS() const = 0;
661 // Emit code to convert pure control flow to a pair of unbound labels into
662 // the result expected according to this expression context. The
663 // implementation will bind both labels unless it's a TestContext, which
664 // won't bind them at this point.
665 virtual void Plug(Label* materialize_true,
666 Label* materialize_false) const = 0;
668 // Emit code to discard count elements from the top of stack, then convert
669 // a pure value into the result expected according to this expression
671 virtual void DropAndPlug(int count, Register reg) const = 0;
673 // Set up branch labels for a test expression. The three Label** parameters
674 // are output parameters.
675 virtual void PrepareTest(Label* materialize_true,
676 Label* materialize_false,
679 Label** fall_through) const = 0;
681 // Returns true if we are evaluating only for side effects (i.e. if the
682 // result will be discarded).
683 virtual bool IsEffect() const { return false; }
685 // Returns true if we are evaluating for the value (in accu/on stack).
686 virtual bool IsAccumulatorValue() const { return false; }
687 virtual bool IsStackValue() const { return false; }
689 // Returns true if we are branching on the value rather than materializing
690 // it. Only used for asserts.
691 virtual bool IsTest() const { return false; }
694 FullCodeGenerator* codegen() const { return codegen_; }
695 MacroAssembler* masm() const { return masm_; }
696 MacroAssembler* masm_;
699 const ExpressionContext* old_;
700 FullCodeGenerator* codegen_;
703 class AccumulatorValueContext : public ExpressionContext {
705 explicit AccumulatorValueContext(FullCodeGenerator* codegen)
706 : ExpressionContext(codegen) { }
708 virtual void Plug(bool flag) const;
709 virtual void Plug(Register reg) const;
710 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
711 virtual void Plug(Variable* var) const;
712 virtual void Plug(Handle<Object> lit) const;
713 virtual void Plug(Heap::RootListIndex) const;
714 virtual void PlugTOS() const;
715 virtual void DropAndPlug(int count, Register reg) const;
716 virtual void PrepareTest(Label* materialize_true,
717 Label* materialize_false,
720 Label** fall_through) const;
721 virtual bool IsAccumulatorValue() const { return true; }
724 class StackValueContext : public ExpressionContext {
726 explicit StackValueContext(FullCodeGenerator* codegen)
727 : ExpressionContext(codegen) { }
729 virtual void Plug(bool flag) const;
730 virtual void Plug(Register reg) const;
731 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
732 virtual void Plug(Variable* var) const;
733 virtual void Plug(Handle<Object> lit) const;
734 virtual void Plug(Heap::RootListIndex) const;
735 virtual void PlugTOS() const;
736 virtual void DropAndPlug(int count, Register reg) const;
737 virtual void PrepareTest(Label* materialize_true,
738 Label* materialize_false,
741 Label** fall_through) const;
742 virtual bool IsStackValue() const { return true; }
745 class TestContext : public ExpressionContext {
747 TestContext(FullCodeGenerator* codegen,
748 Expression* condition,
752 : ExpressionContext(codegen),
753 condition_(condition),
754 true_label_(true_label),
755 false_label_(false_label),
756 fall_through_(fall_through) { }
758 static const TestContext* cast(const ExpressionContext* context) {
759 ASSERT(context->IsTest());
760 return reinterpret_cast<const TestContext*>(context);
763 Expression* condition() const { return condition_; }
764 Label* true_label() const { return true_label_; }
765 Label* false_label() const { return false_label_; }
766 Label* fall_through() const { return fall_through_; }
768 virtual void Plug(bool flag) const;
769 virtual void Plug(Register reg) const;
770 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
771 virtual void Plug(Variable* var) const;
772 virtual void Plug(Handle<Object> lit) const;
773 virtual void Plug(Heap::RootListIndex) const;
774 virtual void PlugTOS() const;
775 virtual void DropAndPlug(int count, Register reg) const;
776 virtual void PrepareTest(Label* materialize_true,
777 Label* materialize_false,
780 Label** fall_through) const;
781 virtual bool IsTest() const { return true; }
784 Expression* condition_;
787 Label* fall_through_;
790 class EffectContext : public ExpressionContext {
792 explicit EffectContext(FullCodeGenerator* codegen)
793 : ExpressionContext(codegen) { }
795 virtual void Plug(bool flag) const;
796 virtual void Plug(Register reg) const;
797 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
798 virtual void Plug(Variable* var) const;
799 virtual void Plug(Handle<Object> lit) const;
800 virtual void Plug(Heap::RootListIndex) const;
801 virtual void PlugTOS() const;
802 virtual void DropAndPlug(int count, Register reg) const;
803 virtual void PrepareTest(Label* materialize_true,
804 Label* materialize_false,
807 Label** fall_through) const;
808 virtual bool IsEffect() const { return true; }
811 MacroAssembler* masm_;
812 CompilationInfo* info_;
815 NestedStatement* nesting_stack_;
817 ZoneList<Handle<Object> >* globals_;
818 Handle<FixedArray> modules_;
820 const ExpressionContext* context_;
821 ZoneList<BailoutEntry> bailout_entries_;
822 GrowableBitVector prepared_bailout_ids_;
823 ZoneList<BackEdgeEntry> back_edges_;
825 Handle<FixedArray> handler_table_;
826 Handle<Cell> profiling_counter_;
827 bool generate_debug_code_;
829 friend class NestedStatement;
831 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
832 DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
836 // A map from property names to getter/setter pairs allocated in the zone.
837 class AccessorTable: public TemplateHashMap<Literal,
838 ObjectLiteral::Accessors,
839 ZoneAllocationPolicy> {
841 explicit AccessorTable(Zone* zone) :
842 TemplateHashMap<Literal, ObjectLiteral::Accessors,
843 ZoneAllocationPolicy>(Literal::Match,
844 ZoneAllocationPolicy(zone)),
847 Iterator lookup(Literal* literal) {
848 Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
849 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
858 class BackEdgeTable {
860 BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
861 ASSERT(code->kind() == Code::FUNCTION);
862 instruction_start_ = code->instruction_start();
863 Address table_address = instruction_start_ + code->back_edge_table_offset();
864 length_ = Memory::uint32_at(table_address);
865 start_ = table_address + kTableLengthSize;
868 uint32_t length() { return length_; }
870 BailoutId ast_id(uint32_t index) {
871 return BailoutId(static_cast<int>(
872 Memory::uint32_at(entry_at(index) + kAstIdOffset)));
875 uint32_t loop_depth(uint32_t index) {
876 return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
879 uint32_t pc_offset(uint32_t index) {
880 return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
883 Address pc(uint32_t index) {
884 return instruction_start_ + pc_offset(index);
889 ON_STACK_REPLACEMENT,
890 OSR_AFTER_STACK_CHECK
893 // Patch all interrupts with allowed loop depth in the unoptimized code to
894 // unconditionally call replacement_code.
895 static void Patch(Isolate* isolate,
896 Code* unoptimized_code);
898 // Patch the back edge to the target state, provided the correct callee.
899 static void PatchAt(Code* unoptimized_code,
901 BackEdgeState target_state,
902 Code* replacement_code);
904 // Change all patched back edges back to normal interrupts.
905 static void Revert(Isolate* isolate,
906 Code* unoptimized_code);
908 // Change a back edge patched for on-stack replacement to perform a
909 // stack check first.
910 static void AddStackCheck(Handle<Code> code, uint32_t pc_offset);
912 // Revert the patch by AddStackCheck.
913 static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset);
915 // Return the current patch state of the back edge.
916 static BackEdgeState GetBackEdgeState(Isolate* isolate,
917 Code* unoptimized_code,
921 // Verify that all back edges of a certain loop depth are patched.
922 static bool Verify(Isolate* isolate,
923 Code* unoptimized_code,
924 int loop_nesting_level);
928 Address entry_at(uint32_t index) {
929 ASSERT(index < length_);
930 return start_ + index * kEntrySize;
933 static const int kTableLengthSize = kIntSize;
934 static const int kAstIdOffset = 0 * kIntSize;
935 static const int kPcOffsetOffset = 1 * kIntSize;
936 static const int kLoopDepthOffset = 2 * kIntSize;
937 static const int kEntrySize = 3 * kIntSize;
940 Address instruction_start_;
945 } } // namespace v8::internal
947 #endif // V8_FULL_CODEGEN_H_