1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_FULL_CODEGEN_H_
29 #define V8_FULL_CODEGEN_H_
33 #include "allocation.h"
34 #include "assert-scope.h"
36 #include "code-stubs.h"
39 #include "data-flow.h"
46 // Forward declarations.
49 // AST node visitor which can tell whether a given statement will be breakable
50 // when the code is compiled by the full compiler in the debugger. This means
51 // that there will be an IC (load/store/call) in the code generated for the
52 // debugger to piggybag on.
53 class BreakableStatementChecker: public AstVisitor {
55 explicit BreakableStatementChecker(Zone* zone) : is_breakable_(false) {
56 InitializeAstVisitor(zone);
59 void Check(Statement* stmt);
60 void Check(Expression* stmt);
62 bool is_breakable() { return is_breakable_; }
65 // AST node visit functions.
66 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
67 AST_NODE_LIST(DECLARE_VISIT)
72 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
73 DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker);
77 // -----------------------------------------------------------------------------
78 // Full code generator.
80 class FullCodeGenerator: public AstVisitor {
87 FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
90 scope_(info->scope()),
95 bailout_entries_(info->HasDeoptimizationSupport()
96 ? info->function()->ast_node_count() : 0,
98 back_edges_(2, info->zone()),
99 type_feedback_cells_(info->HasDeoptimizationSupport()
100 ? info->function()->ast_node_count() : 0,
108 static bool MakeCode(CompilationInfo* info);
110 // Encode state and pc-offset as a BitField<type, start, size>.
111 // Only use 30 bits because we encode the result as a smi.
112 class StateField : public BitField<State, 0, 1> { };
113 class PcField : public BitField<unsigned, 1, 30-1> { };
115 static const char* State2String(State state) {
117 case NO_REGISTERS: return "NO_REGISTERS";
118 case TOS_REG: return "TOS_REG";
124 static const int kMaxBackEdgeWeight = 127;
126 // Platform-specific code size multiplier.
127 #if V8_TARGET_ARCH_IA32
128 static const int kCodeSizeMultiplier = 100;
129 #elif V8_TARGET_ARCH_X64
130 static const int kCodeSizeMultiplier = 162;
131 #elif V8_TARGET_ARCH_ARM
132 static const int kCodeSizeMultiplier = 142;
133 #elif V8_TARGET_ARCH_MIPS
134 static const int kCodeSizeMultiplier = 142;
136 #error Unsupported target architecture.
145 class NestedStatement BASE_EMBEDDED {
147 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
148 // Link into codegen's nesting stack.
149 previous_ = codegen->nesting_stack_;
150 codegen->nesting_stack_ = this;
152 virtual ~NestedStatement() {
153 // Unlink from codegen's nesting stack.
154 ASSERT_EQ(this, codegen_->nesting_stack_);
155 codegen_->nesting_stack_ = previous_;
158 virtual Breakable* AsBreakable() { return NULL; }
159 virtual Iteration* AsIteration() { return NULL; }
161 virtual bool IsContinueTarget(Statement* target) { return false; }
162 virtual bool IsBreakTarget(Statement* target) { return false; }
164 // Notify the statement that we are exiting it via break, continue, or
165 // return and give it a chance to generate cleanup code. Return the
166 // next outer statement in the nesting stack. We accumulate in
167 // *stack_depth the amount to drop the stack and in *context_length the
168 // number of context chain links to unwind as we traverse the nesting
169 // stack from an exit to its target.
170 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
175 MacroAssembler* masm() { return codegen_->masm(); }
177 FullCodeGenerator* codegen_;
178 NestedStatement* previous_;
181 DISALLOW_COPY_AND_ASSIGN(NestedStatement);
184 // A breakable statement such as a block.
185 class Breakable : public NestedStatement {
187 Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
188 : NestedStatement(codegen), statement_(statement) {
190 virtual ~Breakable() {}
192 virtual Breakable* AsBreakable() { return this; }
193 virtual bool IsBreakTarget(Statement* target) {
194 return statement() == target;
197 BreakableStatement* statement() { return statement_; }
198 Label* break_label() { return &break_label_; }
201 BreakableStatement* statement_;
205 // An iteration statement such as a while, for, or do loop.
206 class Iteration : public Breakable {
208 Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
209 : Breakable(codegen, statement) {
211 virtual ~Iteration() {}
213 virtual Iteration* AsIteration() { return this; }
214 virtual bool IsContinueTarget(Statement* target) {
215 return statement() == target;
218 Label* continue_label() { return &continue_label_; }
221 Label continue_label_;
224 // A nested block statement.
225 class NestedBlock : public Breakable {
227 NestedBlock(FullCodeGenerator* codegen, Block* block)
228 : Breakable(codegen, block) {
230 virtual ~NestedBlock() {}
232 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
233 if (statement()->AsBlock()->scope() != NULL) {
240 // The try block of a try/catch statement.
241 class TryCatch : public NestedStatement {
243 explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {
245 virtual ~TryCatch() {}
247 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
250 // The try block of a try/finally statement.
251 class TryFinally : public NestedStatement {
253 TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
254 : NestedStatement(codegen), finally_entry_(finally_entry) {
256 virtual ~TryFinally() {}
258 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
261 Label* finally_entry_;
264 // The finally block of a try/finally statement.
265 class Finally : public NestedStatement {
267 static const int kElementCount = 5;
269 explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
270 virtual ~Finally() {}
272 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
273 *stack_depth += kElementCount;
278 // The body of a for/in loop.
279 class ForIn : public Iteration {
281 static const int kElementCount = 5;
283 ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
284 : Iteration(codegen, statement) {
288 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
289 *stack_depth += kElementCount;
295 // The body of a with or catch.
296 class WithOrCatch : public NestedStatement {
298 explicit WithOrCatch(FullCodeGenerator* codegen)
299 : NestedStatement(codegen) {
301 virtual ~WithOrCatch() {}
303 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
309 // Type of a member function that generates inline code for a native function.
310 typedef void (FullCodeGenerator::*InlineFunctionGenerator)(CallRuntime* expr);
312 static const InlineFunctionGenerator kInlineFunctionGenerators[];
314 // A platform-specific utility to overwrite the accumulator register
315 // with a GC-safe value.
316 void ClearAccumulator();
318 // Determine whether or not to inline the smi case for the given
320 bool ShouldInlineSmiCase(Token::Value op);
322 // Helper function to convert a pure value into a test context. The value
323 // is expected on the stack or the accumulator, depending on the platform.
324 // See the platform-specific implementation for details.
325 void DoTest(Expression* condition,
328 Label* fall_through);
329 void DoTest(const TestContext* context);
331 // Helper function to split control flow and avoid a branch to the
332 // fall-through label if it is set up.
333 #if V8_TARGET_ARCH_MIPS
334 void Split(Condition cc,
339 Label* fall_through);
340 #else // All non-mips arch.
341 void Split(Condition cc,
344 Label* fall_through);
345 #endif // V8_TARGET_ARCH_MIPS
347 // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
348 // a register. Emits a context chain walk if if necessary (so does
349 // SetVar) so avoid calling both on the same variable.
350 void GetVar(Register destination, Variable* var);
352 // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in
353 // the context, the write barrier will be emitted and source, scratch0,
354 // scratch1 will be clobbered. Emits a context chain walk if if necessary
355 // (so does GetVar) so avoid calling both on the same variable.
356 void SetVar(Variable* var,
361 // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
362 // variable. Writing does not need the write barrier.
363 MemOperand StackOperand(Variable* var);
365 // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
366 // variable. May emit code to traverse the context chain, loading the
367 // found context into the scratch register. Writing to this operand will
368 // need the write barrier if location is CONTEXT.
369 MemOperand VarOperand(Variable* var, Register scratch);
371 void VisitForEffect(Expression* expr) {
372 EffectContext context(this);
374 PrepareForBailout(expr, NO_REGISTERS);
377 void VisitForAccumulatorValue(Expression* expr) {
378 AccumulatorValueContext context(this);
380 PrepareForBailout(expr, TOS_REG);
383 void VisitForStackValue(Expression* expr) {
384 StackValueContext context(this);
386 PrepareForBailout(expr, NO_REGISTERS);
389 void VisitForControl(Expression* expr,
392 Label* fall_through) {
393 TestContext context(this, expr, if_true, if_false, fall_through);
395 // For test contexts, we prepare for bailout before branching, not at
396 // the end of the entire expression. This happens as part of visiting
400 void VisitInDuplicateContext(Expression* expr);
402 void VisitDeclarations(ZoneList<Declaration*>* declarations);
403 void DeclareModules(Handle<FixedArray> descriptions);
404 void DeclareGlobals(Handle<FixedArray> pairs);
405 int DeclareGlobalsFlags();
407 // Generate code to allocate all (including nested) modules and contexts.
408 // Because of recursive linking and the presence of module alias declarations,
409 // this has to be a separate pass _before_ populating or executing any module.
410 void AllocateModules(ZoneList<Declaration*>* declarations);
412 // Generate code to create an iterator result object. The "value" property is
413 // set to a value popped from the stack, and "done" is set according to the
414 // argument. The result object is left in the result register.
415 void EmitCreateIteratorResult(bool done);
417 // Try to perform a comparison as a fast inlined literal compare if
418 // the operands allow it. Returns true if the compare operations
419 // has been matched and all code generated; false otherwise.
420 bool TryLiteralCompare(CompareOperation* compare);
422 // Platform-specific code for comparing the type of a value with
423 // a given literal string.
424 void EmitLiteralCompareTypeof(Expression* expr,
425 Expression* sub_expr,
426 Handle<String> check);
428 // Platform-specific code for equality comparison with a nil-like value.
429 void EmitLiteralCompareNil(CompareOperation* expr,
430 Expression* sub_expr,
434 void PrepareForBailout(Expression* node, State state);
435 void PrepareForBailoutForId(BailoutId id, State state);
437 // Cache cell support. This associates AST ids with global property cells
438 // that will be cleared during GC and collected by the type-feedback oracle.
439 void RecordTypeFeedbackCell(TypeFeedbackId id, Handle<Cell> cell);
441 // Record a call's return site offset, used to rebuild the frame if the
442 // called function was inlined at the site.
443 void RecordJSReturnSite(Call* call);
445 // Prepare for bailout before a test (or compare) and branch. If
446 // should_normalize, then the following comparison will not handle the
447 // canonical JS true value so we will insert a (dead) test against true at
448 // the actual bailout target from the optimized code. If not
449 // should_normalize, the true and false labels are ignored.
450 void PrepareForBailoutBeforeSplit(Expression* expr,
451 bool should_normalize,
455 // If enabled, emit debug code for checking that the current context is
456 // neither a with nor a catch context.
457 void EmitDebugCheckDeclarationContext(Variable* variable);
459 // This is meant to be called at loop back edges, |back_edge_target| is
460 // the jump target of the back edge and is used to approximate the amount
461 // of code inside the loop.
462 void EmitBackEdgeBookkeeping(IterationStatement* stmt,
463 Label* back_edge_target);
464 // Record the OSR AST id corresponding to a back edge in the code.
465 void RecordBackEdge(BailoutId osr_ast_id);
466 // Emit a table of back edge ids, pcs and loop depths into the code stream.
467 // Return the offset of the start of the table.
468 unsigned EmitBackEdgeTable();
470 void EmitProfilingCounterDecrement(int delta);
471 void EmitProfilingCounterReset();
473 // Emit code to pop values from the stack associated with nested statements
474 // like try/catch, try/finally, etc, running the finallies and unwinding the
475 // handlers as needed.
476 void EmitUnwindBeforeReturn();
478 // Platform-specific return sequence
479 void EmitReturnSequence();
481 // Platform-specific code sequences for calls
482 void EmitCallWithStub(Call* expr);
483 void EmitCallWithIC(Call* expr);
484 void EmitKeyedCallWithIC(Call* expr, Expression* key);
486 // Platform-specific code for inline runtime calls.
487 InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
489 void EmitInlineRuntimeCall(CallRuntime* expr);
491 #define EMIT_INLINE_RUNTIME_CALL(name, x, y) \
492 void Emit##name(CallRuntime* expr);
493 INLINE_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
494 INLINE_RUNTIME_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
495 #undef EMIT_INLINE_RUNTIME_CALL
497 // Platform-specific code for resuming generators.
498 void EmitGeneratorResume(Expression *generator,
500 JSGeneratorObject::ResumeMode resume_mode);
502 // Platform-specific code for loading variables.
503 void EmitLoadGlobalCheckExtensions(Variable* var,
504 TypeofState typeof_state,
506 MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
507 void EmitDynamicLookupFastCase(Variable* var,
508 TypeofState typeof_state,
511 void EmitVariableLoad(VariableProxy* proxy);
513 void EmitAccessor(Expression* expression);
515 // Expects the arguments and the function already pushed.
516 void EmitResolvePossiblyDirectEval(int arg_count);
518 // Platform-specific support for allocating a new closure based on
519 // the given function info.
520 void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
522 // Platform-specific support for compiling assignments.
524 // Load a value from a named property.
525 // The receiver is left on the stack by the IC.
526 void EmitNamedPropertyLoad(Property* expr);
528 // Load a value from a keyed property.
529 // The receiver and the key is left on the stack by the IC.
530 void EmitKeyedPropertyLoad(Property* expr);
532 // Apply the compound assignment operator. Expects the left operand on top
533 // of the stack and the right one in the accumulator.
534 void EmitBinaryOp(BinaryOperation* expr,
538 // Helper functions for generating inlined smi code for certain
539 // binary operations.
540 void EmitInlineSmiBinaryOp(BinaryOperation* expr,
546 // Assign to the given expression as if via '='. The right-hand-side value
547 // is expected in the accumulator.
548 void EmitAssignment(Expression* expr);
550 // Complete a variable assignment. The right-hand-side value is expected
551 // in the accumulator.
552 void EmitVariableAssignment(Variable* var,
555 // Complete a named property assignment. The receiver is expected on top
556 // of the stack and the right-hand-side value in the accumulator.
557 void EmitNamedPropertyAssignment(Assignment* expr);
559 // Complete a keyed property assignment. The receiver and key are
560 // expected on top of the stack and the right-hand-side value in the
562 void EmitKeyedPropertyAssignment(Assignment* expr);
564 void CallIC(Handle<Code> code,
565 ContextualMode mode = NOT_CONTEXTUAL,
566 TypeFeedbackId id = TypeFeedbackId::None());
568 void CallLoadIC(ContextualMode mode,
569 TypeFeedbackId id = TypeFeedbackId::None());
570 void CallStoreIC(ContextualMode mode,
571 TypeFeedbackId id = TypeFeedbackId::None());
573 void SetFunctionPosition(FunctionLiteral* fun);
574 void SetReturnPosition(FunctionLiteral* fun);
575 void SetStatementPosition(Statement* stmt);
576 void SetExpressionPosition(Expression* expr);
577 void SetStatementPosition(int pos);
578 void SetSourcePosition(int pos);
580 // Non-local control flow support.
581 void EnterFinallyBlock();
582 void ExitFinallyBlock();
584 // Loop nesting counter.
585 int loop_depth() { return loop_depth_; }
586 void increment_loop_depth() { loop_depth_++; }
587 void decrement_loop_depth() {
588 ASSERT(loop_depth_ > 0);
592 MacroAssembler* masm() { return masm_; }
594 class ExpressionContext;
595 const ExpressionContext* context() { return context_; }
596 void set_new_context(const ExpressionContext* context) { context_ = context; }
598 Handle<Script> script() { return info_->script(); }
599 bool is_eval() { return info_->is_eval(); }
600 bool is_native() { return info_->is_native(); }
601 bool is_classic_mode() { return language_mode() == CLASSIC_MODE; }
602 StrictModeFlag strict_mode() {
603 return is_classic_mode() ? kNonStrictMode : kStrictMode;
605 LanguageMode language_mode() { return function()->language_mode(); }
606 FunctionLiteral* function() { return info_->function(); }
607 Scope* scope() { return scope_; }
609 static Register result_register();
610 static Register context_register();
612 // Set fields in the stack frame. Offsets are the frame pointer relative
613 // offsets defined in, e.g., StandardFrameConstants.
614 void StoreToFrameField(int frame_offset, Register value);
616 // Load a value from the current context. Indices are defined as an enum
617 // in v8::internal::Context.
618 void LoadContextField(Register dst, int context_index);
620 // Push the function argument for the runtime functions PushWithContext
621 // and PushCatchContext.
622 void PushFunctionArgumentForContextAllocation();
624 // AST node visit functions.
625 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
626 AST_NODE_LIST(DECLARE_VISIT)
629 void VisitComma(BinaryOperation* expr);
630 void VisitLogicalExpression(BinaryOperation* expr);
631 void VisitArithmeticExpression(BinaryOperation* expr);
633 void VisitForTypeofValue(Expression* expr);
636 void PopulateDeoptimizationData(Handle<Code> code);
637 void PopulateTypeFeedbackInfo(Handle<Code> code);
638 void PopulateTypeFeedbackCells(Handle<Code> code);
640 Handle<FixedArray> handler_table() { return handler_table_; }
642 struct BailoutEntry {
644 unsigned pc_and_state;
647 struct BackEdgeEntry {
653 struct TypeFeedbackCellEntry {
654 TypeFeedbackId ast_id;
659 class ExpressionContext BASE_EMBEDDED {
661 explicit ExpressionContext(FullCodeGenerator* codegen)
662 : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
663 codegen->set_new_context(this);
666 virtual ~ExpressionContext() {
667 codegen_->set_new_context(old_);
670 Isolate* isolate() const { return codegen_->isolate(); }
672 // Convert constant control flow (true or false) to the result expected for
673 // this expression context.
674 virtual void Plug(bool flag) const = 0;
676 // Emit code to convert a pure value (in a register, known variable
677 // location, as a literal, or on top of the stack) into the result
678 // expected according to this expression context.
679 virtual void Plug(Register reg) const = 0;
680 virtual void Plug(Variable* var) const = 0;
681 virtual void Plug(Handle<Object> lit) const = 0;
682 virtual void Plug(Heap::RootListIndex index) const = 0;
683 virtual void PlugTOS() const = 0;
685 // Emit code to convert pure control flow to a pair of unbound labels into
686 // the result expected according to this expression context. The
687 // implementation will bind both labels unless it's a TestContext, which
688 // won't bind them at this point.
689 virtual void Plug(Label* materialize_true,
690 Label* materialize_false) const = 0;
692 // Emit code to discard count elements from the top of stack, then convert
693 // a pure value into the result expected according to this expression
695 virtual void DropAndPlug(int count, Register reg) const = 0;
697 // Set up branch labels for a test expression. The three Label** parameters
698 // are output parameters.
699 virtual void PrepareTest(Label* materialize_true,
700 Label* materialize_false,
703 Label** fall_through) const = 0;
705 // Returns true if we are evaluating only for side effects (i.e. if the
706 // result will be discarded).
707 virtual bool IsEffect() const { return false; }
709 // Returns true if we are evaluating for the value (in accu/on stack).
710 virtual bool IsAccumulatorValue() const { return false; }
711 virtual bool IsStackValue() const { return false; }
713 // Returns true if we are branching on the value rather than materializing
714 // it. Only used for asserts.
715 virtual bool IsTest() const { return false; }
718 FullCodeGenerator* codegen() const { return codegen_; }
719 MacroAssembler* masm() const { return masm_; }
720 MacroAssembler* masm_;
723 const ExpressionContext* old_;
724 FullCodeGenerator* codegen_;
727 class AccumulatorValueContext : public ExpressionContext {
729 explicit AccumulatorValueContext(FullCodeGenerator* codegen)
730 : ExpressionContext(codegen) { }
732 virtual void Plug(bool flag) const;
733 virtual void Plug(Register reg) const;
734 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
735 virtual void Plug(Variable* var) const;
736 virtual void Plug(Handle<Object> lit) const;
737 virtual void Plug(Heap::RootListIndex) const;
738 virtual void PlugTOS() const;
739 virtual void DropAndPlug(int count, Register reg) const;
740 virtual void PrepareTest(Label* materialize_true,
741 Label* materialize_false,
744 Label** fall_through) const;
745 virtual bool IsAccumulatorValue() const { return true; }
748 class StackValueContext : public ExpressionContext {
750 explicit StackValueContext(FullCodeGenerator* codegen)
751 : ExpressionContext(codegen) { }
753 virtual void Plug(bool flag) const;
754 virtual void Plug(Register reg) const;
755 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
756 virtual void Plug(Variable* var) const;
757 virtual void Plug(Handle<Object> lit) const;
758 virtual void Plug(Heap::RootListIndex) const;
759 virtual void PlugTOS() const;
760 virtual void DropAndPlug(int count, Register reg) const;
761 virtual void PrepareTest(Label* materialize_true,
762 Label* materialize_false,
765 Label** fall_through) const;
766 virtual bool IsStackValue() const { return true; }
769 class TestContext : public ExpressionContext {
771 TestContext(FullCodeGenerator* codegen,
772 Expression* condition,
776 : ExpressionContext(codegen),
777 condition_(condition),
778 true_label_(true_label),
779 false_label_(false_label),
780 fall_through_(fall_through) { }
782 static const TestContext* cast(const ExpressionContext* context) {
783 ASSERT(context->IsTest());
784 return reinterpret_cast<const TestContext*>(context);
787 Expression* condition() const { return condition_; }
788 Label* true_label() const { return true_label_; }
789 Label* false_label() const { return false_label_; }
790 Label* fall_through() const { return fall_through_; }
792 virtual void Plug(bool flag) const;
793 virtual void Plug(Register reg) const;
794 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
795 virtual void Plug(Variable* var) const;
796 virtual void Plug(Handle<Object> lit) const;
797 virtual void Plug(Heap::RootListIndex) const;
798 virtual void PlugTOS() const;
799 virtual void DropAndPlug(int count, Register reg) const;
800 virtual void PrepareTest(Label* materialize_true,
801 Label* materialize_false,
804 Label** fall_through) const;
805 virtual bool IsTest() const { return true; }
808 Expression* condition_;
811 Label* fall_through_;
814 class EffectContext : public ExpressionContext {
816 explicit EffectContext(FullCodeGenerator* codegen)
817 : ExpressionContext(codegen) { }
819 virtual void Plug(bool flag) const;
820 virtual void Plug(Register reg) const;
821 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
822 virtual void Plug(Variable* var) const;
823 virtual void Plug(Handle<Object> lit) const;
824 virtual void Plug(Heap::RootListIndex) const;
825 virtual void PlugTOS() const;
826 virtual void DropAndPlug(int count, Register reg) const;
827 virtual void PrepareTest(Label* materialize_true,
828 Label* materialize_false,
831 Label** fall_through) const;
832 virtual bool IsEffect() const { return true; }
835 MacroAssembler* masm_;
836 CompilationInfo* info_;
839 NestedStatement* nesting_stack_;
841 ZoneList<Handle<Object> >* globals_;
842 Handle<FixedArray> modules_;
844 const ExpressionContext* context_;
845 ZoneList<BailoutEntry> bailout_entries_;
846 GrowableBitVector prepared_bailout_ids_;
847 ZoneList<BackEdgeEntry> back_edges_;
848 ZoneList<TypeFeedbackCellEntry> type_feedback_cells_;
850 Handle<FixedArray> handler_table_;
851 Handle<Cell> profiling_counter_;
852 bool generate_debug_code_;
854 friend class NestedStatement;
856 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
857 DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
861 // A map from property names to getter/setter pairs allocated in the zone.
862 class AccessorTable: public TemplateHashMap<Literal,
863 ObjectLiteral::Accessors,
864 ZoneAllocationPolicy> {
866 explicit AccessorTable(Zone* zone) :
867 TemplateHashMap<Literal, ObjectLiteral::Accessors,
868 ZoneAllocationPolicy>(Literal::Match,
869 ZoneAllocationPolicy(zone)),
872 Iterator lookup(Literal* literal) {
873 Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
874 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
883 class BackEdgeTable {
885 BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
886 ASSERT(code->kind() == Code::FUNCTION);
887 instruction_start_ = code->instruction_start();
888 Address table_address = instruction_start_ + code->back_edge_table_offset();
889 length_ = Memory::uint32_at(table_address);
890 start_ = table_address + kTableLengthSize;
893 uint32_t length() { return length_; }
895 BailoutId ast_id(uint32_t index) {
896 return BailoutId(static_cast<int>(
897 Memory::uint32_at(entry_at(index) + kAstIdOffset)));
900 uint32_t loop_depth(uint32_t index) {
901 return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
904 uint32_t pc_offset(uint32_t index) {
905 return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
908 Address pc(uint32_t index) {
909 return instruction_start_ + pc_offset(index);
914 ON_STACK_REPLACEMENT,
915 OSR_AFTER_STACK_CHECK
918 // Patch all interrupts with allowed loop depth in the unoptimized code to
919 // unconditionally call replacement_code.
920 static void Patch(Isolate* isolate,
921 Code* unoptimized_code);
923 // Patch the back edge to the target state, provided the correct callee.
924 static void PatchAt(Code* unoptimized_code,
926 BackEdgeState target_state,
927 Code* replacement_code);
929 // Change all patched back edges back to normal interrupts.
930 static void Revert(Isolate* isolate,
931 Code* unoptimized_code);
933 // Change a back edge patched for on-stack replacement to perform a
934 // stack check first.
935 static void AddStackCheck(Handle<Code> code, uint32_t pc_offset);
937 // Revert the patch by AddStackCheck.
938 static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset);
940 // Return the current patch state of the back edge.
941 static BackEdgeState GetBackEdgeState(Isolate* isolate,
942 Code* unoptimized_code,
946 // Verify that all back edges of a certain loop depth are patched.
947 static bool Verify(Isolate* isolate,
948 Code* unoptimized_code,
949 int loop_nesting_level);
953 Address entry_at(uint32_t index) {
954 ASSERT(index < length_);
955 return start_ + index * kEntrySize;
958 static const int kTableLengthSize = kIntSize;
959 static const int kAstIdOffset = 0 * kIntSize;
960 static const int kPcOffsetOffset = 1 * kIntSize;
961 static const int kLoopDepthOffset = 2 * kIntSize;
962 static const int kEntrySize = 3 * kIntSize;
965 Address instruction_start_;
970 } } // namespace v8::internal
972 #endif // V8_FULL_CODEGEN_H_