1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_FULL_CODEGEN_H_
29 #define V8_FULL_CODEGEN_H_
33 #include "allocation.h"
35 #include "code-stubs.h"
42 // Forward declarations.
45 // AST node visitor which can tell whether a given statement will be breakable
46 // when the code is compiled by the full compiler in the debugger. This means
47 // that there will be an IC (load/store/call) in the code generated for the
48 // debugger to piggybag on.
49 class BreakableStatementChecker: public AstVisitor {
51 BreakableStatementChecker() : is_breakable_(false) {}
53 void Check(Statement* stmt);
54 void Check(Expression* stmt);
56 bool is_breakable() { return is_breakable_; }
59 // AST node visit functions.
60 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
61 AST_NODE_LIST(DECLARE_VISIT)
66 DISALLOW_COPY_AND_ASSIGN(BreakableStatementChecker);
70 // -----------------------------------------------------------------------------
71 // Full code generator.
73 class FullCodeGenerator: public AstVisitor {
80 FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
83 scope_(info->scope()),
88 bailout_entries_(info->HasDeoptimizationSupport()
89 ? info->function()->ast_node_count() : 0),
90 stack_checks_(2), // There's always at least one.
91 type_feedback_cells_(info->HasDeoptimizationSupport()
92 ? info->function()->ast_node_count() : 0),
93 ic_total_count_(0) { }
95 static bool MakeCode(CompilationInfo* info);
97 // Encode state and pc-offset as a BitField<type, start, size>.
98 // Only use 30 bits because we encode the result as a smi.
99 class StateField : public BitField<State, 0, 1> { };
100 class PcField : public BitField<unsigned, 1, 30-1> { };
102 static const char* State2String(State state) {
104 case NO_REGISTERS: return "NO_REGISTERS";
105 case TOS_REG: return "TOS_REG";
117 class NestedStatement BASE_EMBEDDED {
119 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
120 // Link into codegen's nesting stack.
121 previous_ = codegen->nesting_stack_;
122 codegen->nesting_stack_ = this;
124 virtual ~NestedStatement() {
125 // Unlink from codegen's nesting stack.
126 ASSERT_EQ(this, codegen_->nesting_stack_);
127 codegen_->nesting_stack_ = previous_;
130 virtual Breakable* AsBreakable() { return NULL; }
131 virtual Iteration* AsIteration() { return NULL; }
133 virtual bool IsContinueTarget(Statement* target) { return false; }
134 virtual bool IsBreakTarget(Statement* target) { return false; }
136 // Notify the statement that we are exiting it via break, continue, or
137 // return and give it a chance to generate cleanup code. Return the
138 // next outer statement in the nesting stack. We accumulate in
139 // *stack_depth the amount to drop the stack and in *context_length the
140 // number of context chain links to unwind as we traverse the nesting
141 // stack from an exit to its target.
142 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
147 MacroAssembler* masm() { return codegen_->masm(); }
149 FullCodeGenerator* codegen_;
150 NestedStatement* previous_;
153 DISALLOW_COPY_AND_ASSIGN(NestedStatement);
156 // A breakable statement such as a block.
157 class Breakable : public NestedStatement {
159 Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
160 : NestedStatement(codegen), statement_(statement) {
162 virtual ~Breakable() {}
164 virtual Breakable* AsBreakable() { return this; }
165 virtual bool IsBreakTarget(Statement* target) {
166 return statement() == target;
169 BreakableStatement* statement() { return statement_; }
170 Label* break_label() { return &break_label_; }
173 BreakableStatement* statement_;
177 // An iteration statement such as a while, for, or do loop.
178 class Iteration : public Breakable {
180 Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
181 : Breakable(codegen, statement) {
183 virtual ~Iteration() {}
185 virtual Iteration* AsIteration() { return this; }
186 virtual bool IsContinueTarget(Statement* target) {
187 return statement() == target;
190 Label* continue_label() { return &continue_label_; }
193 Label continue_label_;
196 // A nested block statement.
197 class NestedBlock : public Breakable {
199 NestedBlock(FullCodeGenerator* codegen, Block* block)
200 : Breakable(codegen, block) {
202 virtual ~NestedBlock() {}
204 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
205 if (statement()->AsBlock()->scope() != NULL) {
212 // The try block of a try/catch statement.
213 class TryCatch : public NestedStatement {
215 explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {
217 virtual ~TryCatch() {}
219 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
222 // The try block of a try/finally statement.
223 class TryFinally : public NestedStatement {
225 TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
226 : NestedStatement(codegen), finally_entry_(finally_entry) {
228 virtual ~TryFinally() {}
230 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
233 Label* finally_entry_;
236 // The finally block of a try/finally statement.
237 class Finally : public NestedStatement {
239 static const int kElementCount = 2;
241 explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
242 virtual ~Finally() {}
244 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
245 *stack_depth += kElementCount;
250 // The body of a for/in loop.
251 class ForIn : public Iteration {
253 static const int kElementCount = 5;
255 ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
256 : Iteration(codegen, statement) {
260 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
261 *stack_depth += kElementCount;
267 // The body of a with or catch.
268 class WithOrCatch : public NestedStatement {
270 explicit WithOrCatch(FullCodeGenerator* codegen)
271 : NestedStatement(codegen) {
273 virtual ~WithOrCatch() {}
275 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
281 // Type of a member function that generates inline code for a native function.
282 typedef void (FullCodeGenerator::*InlineFunctionGenerator)(CallRuntime* expr);
284 static const InlineFunctionGenerator kInlineFunctionGenerators[];
286 // A platform-specific utility to overwrite the accumulator register
287 // with a GC-safe value.
288 void ClearAccumulator();
290 // Determine whether or not to inline the smi case for the given
292 bool ShouldInlineSmiCase(Token::Value op);
294 // Helper function to convert a pure value into a test context. The value
295 // is expected on the stack or the accumulator, depending on the platform.
296 // See the platform-specific implementation for details.
297 void DoTest(Expression* condition,
300 Label* fall_through);
301 void DoTest(const TestContext* context);
303 // Helper function to split control flow and avoid a branch to the
304 // fall-through label if it is set up.
305 #ifdef V8_TARGET_ARCH_MIPS
306 void Split(Condition cc,
311 Label* fall_through);
312 #else // All non-mips arch.
313 void Split(Condition cc,
316 Label* fall_through);
317 #endif // V8_TARGET_ARCH_MIPS
319 // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
320 // a register. Emits a context chain walk if if necessary (so does
321 // SetVar) so avoid calling both on the same variable.
322 void GetVar(Register destination, Variable* var);
324 // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in
325 // the context, the write barrier will be emitted and source, scratch0,
326 // scratch1 will be clobbered. Emits a context chain walk if if necessary
327 // (so does GetVar) so avoid calling both on the same variable.
328 void SetVar(Variable* var,
333 // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
334 // variable. Writing does not need the write barrier.
335 MemOperand StackOperand(Variable* var);
337 // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
338 // variable. May emit code to traverse the context chain, loading the
339 // found context into the scratch register. Writing to this operand will
340 // need the write barrier if location is CONTEXT.
341 MemOperand VarOperand(Variable* var, Register scratch);
343 void VisitForEffect(Expression* expr) {
344 EffectContext context(this);
346 PrepareForBailout(expr, NO_REGISTERS);
349 void VisitForAccumulatorValue(Expression* expr) {
350 AccumulatorValueContext context(this);
352 PrepareForBailout(expr, TOS_REG);
355 void VisitForStackValue(Expression* expr) {
356 StackValueContext context(this);
358 PrepareForBailout(expr, NO_REGISTERS);
361 void VisitForControl(Expression* expr,
364 Label* fall_through) {
365 TestContext context(this, expr, if_true, if_false, fall_through);
367 // For test contexts, we prepare for bailout before branching, not at
368 // the end of the entire expression. This happens as part of visiting
372 void VisitInDuplicateContext(Expression* expr);
374 void VisitDeclarations(ZoneList<Declaration*>* declarations);
375 void DeclareGlobals(Handle<FixedArray> pairs);
376 int DeclareGlobalsFlags();
378 // Try to perform a comparison as a fast inlined literal compare if
379 // the operands allow it. Returns true if the compare operations
380 // has been matched and all code generated; false otherwise.
381 bool TryLiteralCompare(CompareOperation* compare);
383 // Platform-specific code for comparing the type of a value with
384 // a given literal string.
385 void EmitLiteralCompareTypeof(Expression* expr,
386 Expression* sub_expr,
387 Handle<String> check);
389 // Platform-specific code for equality comparison with a nil-like value.
390 void EmitLiteralCompareNil(CompareOperation* expr,
391 Expression* sub_expr,
395 void PrepareForBailout(Expression* node, State state);
396 void PrepareForBailoutForId(unsigned id, State state);
398 // Cache cell support. This associates AST ids with global property cells
399 // that will be cleared during GC and collected by the type-feedback oracle.
400 void RecordTypeFeedbackCell(unsigned id, Handle<JSGlobalPropertyCell> cell);
402 // Record a call's return site offset, used to rebuild the frame if the
403 // called function was inlined at the site.
404 void RecordJSReturnSite(Call* call);
406 // Prepare for bailout before a test (or compare) and branch. If
407 // should_normalize, then the following comparison will not handle the
408 // canonical JS true value so we will insert a (dead) test against true at
409 // the actual bailout target from the optimized code. If not
410 // should_normalize, the true and false labels are ignored.
411 void PrepareForBailoutBeforeSplit(Expression* expr,
412 bool should_normalize,
416 // If enabled, emit debug code for checking that the current context is
417 // neither a with nor a catch context.
418 void EmitDebugCheckDeclarationContext(Variable* variable);
420 // Platform-specific code for checking the stack limit at the back edge of
422 // This is meant to be called at loop back edges, |back_edge_target| is
423 // the jump target of the back edge and is used to approximate the amount
424 // of code inside the loop.
425 void EmitStackCheck(IterationStatement* stmt, Label* back_edge_target);
426 // Record the OSR AST id corresponding to a stack check in the code.
427 void RecordStackCheck(unsigned osr_ast_id);
428 // Emit a table of stack check ids and pcs into the code stream. Return
429 // the offset of the start of the table.
430 unsigned EmitStackCheckTable();
432 void EmitProfilingCounterDecrement(int delta);
433 void EmitProfilingCounterReset();
435 // Platform-specific return sequence
436 void EmitReturnSequence();
438 // Platform-specific code sequences for calls
439 void EmitCallWithStub(Call* expr, CallFunctionFlags flags);
440 void EmitCallWithIC(Call* expr, Handle<Object> name, RelocInfo::Mode mode);
441 void EmitKeyedCallWithIC(Call* expr, Expression* key);
443 // Platform-specific code for inline runtime calls.
444 InlineFunctionGenerator FindInlineFunctionGenerator(Runtime::FunctionId id);
446 void EmitInlineRuntimeCall(CallRuntime* expr);
448 #define EMIT_INLINE_RUNTIME_CALL(name, x, y) \
449 void Emit##name(CallRuntime* expr);
450 INLINE_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
451 INLINE_RUNTIME_FUNCTION_LIST(EMIT_INLINE_RUNTIME_CALL)
452 #undef EMIT_INLINE_RUNTIME_CALL
454 // Platform-specific code for loading variables.
455 void EmitLoadGlobalCheckExtensions(Variable* var,
456 TypeofState typeof_state,
458 MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
459 void EmitDynamicLookupFastCase(Variable* var,
460 TypeofState typeof_state,
463 void EmitVariableLoad(VariableProxy* proxy);
465 void EmitAccessor(Expression* expression);
467 // Expects the arguments and the function already pushed.
468 void EmitResolvePossiblyDirectEval(int arg_count);
470 // Platform-specific support for allocating a new closure based on
471 // the given function info.
472 void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
474 // Platform-specific support for compiling assignments.
476 // Load a value from a named property.
477 // The receiver is left on the stack by the IC.
478 void EmitNamedPropertyLoad(Property* expr);
480 // Load a value from a keyed property.
481 // The receiver and the key is left on the stack by the IC.
482 void EmitKeyedPropertyLoad(Property* expr);
484 // Apply the compound assignment operator. Expects the left operand on top
485 // of the stack and the right one in the accumulator.
486 void EmitBinaryOp(BinaryOperation* expr,
490 // Helper functions for generating inlined smi code for certain
491 // binary operations.
492 void EmitInlineSmiBinaryOp(BinaryOperation* expr,
498 // Assign to the given expression as if via '='. The right-hand-side value
499 // is expected in the accumulator.
500 void EmitAssignment(Expression* expr);
502 // Complete a variable assignment. The right-hand-side value is expected
503 // in the accumulator.
504 void EmitVariableAssignment(Variable* var,
507 // Complete a named property assignment. The receiver is expected on top
508 // of the stack and the right-hand-side value in the accumulator.
509 void EmitNamedPropertyAssignment(Assignment* expr);
511 // Complete a keyed property assignment. The receiver and key are
512 // expected on top of the stack and the right-hand-side value in the
514 void EmitKeyedPropertyAssignment(Assignment* expr);
516 void CallIC(Handle<Code> code,
517 RelocInfo::Mode rmode = RelocInfo::CODE_TARGET,
518 unsigned ast_id = kNoASTId);
520 void SetFunctionPosition(FunctionLiteral* fun);
521 void SetReturnPosition(FunctionLiteral* fun);
522 void SetStatementPosition(Statement* stmt);
523 void SetExpressionPosition(Expression* expr, int pos);
524 void SetStatementPosition(int pos);
525 void SetSourcePosition(int pos);
527 // Non-local control flow support.
528 void EnterFinallyBlock();
529 void ExitFinallyBlock();
531 // Loop nesting counter.
532 int loop_depth() { return loop_depth_; }
533 void increment_loop_depth() { loop_depth_++; }
534 void decrement_loop_depth() {
535 ASSERT(loop_depth_ > 0);
539 MacroAssembler* masm() { return masm_; }
541 class ExpressionContext;
542 const ExpressionContext* context() { return context_; }
543 void set_new_context(const ExpressionContext* context) { context_ = context; }
545 Handle<Script> script() { return info_->script(); }
546 bool is_eval() { return info_->is_eval(); }
547 bool is_native() { return info_->is_native(); }
548 bool is_classic_mode() { return language_mode() == CLASSIC_MODE; }
549 LanguageMode language_mode() { return function()->language_mode(); }
550 bool is_qml_mode() { return function()->qml_mode(); }
551 FunctionLiteral* function() { return info_->function(); }
552 Scope* scope() { return scope_; }
554 static Register result_register();
555 static Register context_register();
557 // Set fields in the stack frame. Offsets are the frame pointer relative
558 // offsets defined in, e.g., StandardFrameConstants.
559 void StoreToFrameField(int frame_offset, Register value);
561 // Load a value from the current context. Indices are defined as an enum
562 // in v8::internal::Context.
563 void LoadContextField(Register dst, int context_index);
565 // Push the function argument for the runtime functions PushWithContext
566 // and PushCatchContext.
567 void PushFunctionArgumentForContextAllocation();
569 // AST node visit functions.
570 #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
571 AST_NODE_LIST(DECLARE_VISIT)
574 void EmitUnaryOperation(UnaryOperation* expr, const char* comment);
576 void VisitComma(BinaryOperation* expr);
577 void VisitLogicalExpression(BinaryOperation* expr);
578 void VisitArithmeticExpression(BinaryOperation* expr);
580 void VisitForTypeofValue(Expression* expr);
583 void PopulateDeoptimizationData(Handle<Code> code);
584 void PopulateTypeFeedbackInfo(Handle<Code> code);
585 void PopulateTypeFeedbackCells(Handle<Code> code);
587 Handle<FixedArray> handler_table() { return handler_table_; }
589 struct BailoutEntry {
591 unsigned pc_and_state;
594 struct TypeFeedbackCellEntry {
596 Handle<JSGlobalPropertyCell> cell;
600 class ExpressionContext BASE_EMBEDDED {
602 explicit ExpressionContext(FullCodeGenerator* codegen)
603 : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
604 codegen->set_new_context(this);
607 virtual ~ExpressionContext() {
608 codegen_->set_new_context(old_);
611 Isolate* isolate() const { return codegen_->isolate(); }
613 // Convert constant control flow (true or false) to the result expected for
614 // this expression context.
615 virtual void Plug(bool flag) const = 0;
617 // Emit code to convert a pure value (in a register, known variable
618 // location, as a literal, or on top of the stack) into the result
619 // expected according to this expression context.
620 virtual void Plug(Register reg) const = 0;
621 virtual void Plug(Variable* var) const = 0;
622 virtual void Plug(Handle<Object> lit) const = 0;
623 virtual void Plug(Heap::RootListIndex index) const = 0;
624 virtual void PlugTOS() const = 0;
626 // Emit code to convert pure control flow to a pair of unbound labels into
627 // the result expected according to this expression context. The
628 // implementation will bind both labels unless it's a TestContext, which
629 // won't bind them at this point.
630 virtual void Plug(Label* materialize_true,
631 Label* materialize_false) const = 0;
633 // Emit code to discard count elements from the top of stack, then convert
634 // a pure value into the result expected according to this expression
636 virtual void DropAndPlug(int count, Register reg) const = 0;
638 // Set up branch labels for a test expression. The three Label** parameters
639 // are output parameters.
640 virtual void PrepareTest(Label* materialize_true,
641 Label* materialize_false,
644 Label** fall_through) const = 0;
646 // Returns true if we are evaluating only for side effects (i.e. if the
647 // result will be discarded).
648 virtual bool IsEffect() const { return false; }
650 // Returns true if we are evaluating for the value (in accu/on stack).
651 virtual bool IsAccumulatorValue() const { return false; }
652 virtual bool IsStackValue() const { return false; }
654 // Returns true if we are branching on the value rather than materializing
655 // it. Only used for asserts.
656 virtual bool IsTest() const { return false; }
659 FullCodeGenerator* codegen() const { return codegen_; }
660 MacroAssembler* masm() const { return masm_; }
661 MacroAssembler* masm_;
664 const ExpressionContext* old_;
665 FullCodeGenerator* codegen_;
668 class AccumulatorValueContext : public ExpressionContext {
670 explicit AccumulatorValueContext(FullCodeGenerator* codegen)
671 : ExpressionContext(codegen) { }
673 virtual void Plug(bool flag) const;
674 virtual void Plug(Register reg) const;
675 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
676 virtual void Plug(Variable* var) const;
677 virtual void Plug(Handle<Object> lit) const;
678 virtual void Plug(Heap::RootListIndex) const;
679 virtual void PlugTOS() const;
680 virtual void DropAndPlug(int count, Register reg) const;
681 virtual void PrepareTest(Label* materialize_true,
682 Label* materialize_false,
685 Label** fall_through) const;
686 virtual bool IsAccumulatorValue() const { return true; }
689 class StackValueContext : public ExpressionContext {
691 explicit StackValueContext(FullCodeGenerator* codegen)
692 : ExpressionContext(codegen) { }
694 virtual void Plug(bool flag) const;
695 virtual void Plug(Register reg) const;
696 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
697 virtual void Plug(Variable* var) const;
698 virtual void Plug(Handle<Object> lit) const;
699 virtual void Plug(Heap::RootListIndex) const;
700 virtual void PlugTOS() const;
701 virtual void DropAndPlug(int count, Register reg) const;
702 virtual void PrepareTest(Label* materialize_true,
703 Label* materialize_false,
706 Label** fall_through) const;
707 virtual bool IsStackValue() const { return true; }
710 class TestContext : public ExpressionContext {
712 TestContext(FullCodeGenerator* codegen,
713 Expression* condition,
717 : ExpressionContext(codegen),
718 condition_(condition),
719 true_label_(true_label),
720 false_label_(false_label),
721 fall_through_(fall_through) { }
723 static const TestContext* cast(const ExpressionContext* context) {
724 ASSERT(context->IsTest());
725 return reinterpret_cast<const TestContext*>(context);
728 Expression* condition() const { return condition_; }
729 Label* true_label() const { return true_label_; }
730 Label* false_label() const { return false_label_; }
731 Label* fall_through() const { return fall_through_; }
733 virtual void Plug(bool flag) const;
734 virtual void Plug(Register reg) const;
735 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
736 virtual void Plug(Variable* var) const;
737 virtual void Plug(Handle<Object> lit) const;
738 virtual void Plug(Heap::RootListIndex) const;
739 virtual void PlugTOS() const;
740 virtual void DropAndPlug(int count, Register reg) const;
741 virtual void PrepareTest(Label* materialize_true,
742 Label* materialize_false,
745 Label** fall_through) const;
746 virtual bool IsTest() const { return true; }
749 Expression* condition_;
752 Label* fall_through_;
755 class EffectContext : public ExpressionContext {
757 explicit EffectContext(FullCodeGenerator* codegen)
758 : ExpressionContext(codegen) { }
760 virtual void Plug(bool flag) const;
761 virtual void Plug(Register reg) const;
762 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
763 virtual void Plug(Variable* var) const;
764 virtual void Plug(Handle<Object> lit) const;
765 virtual void Plug(Heap::RootListIndex) const;
766 virtual void PlugTOS() const;
767 virtual void DropAndPlug(int count, Register reg) const;
768 virtual void PrepareTest(Label* materialize_true,
769 Label* materialize_false,
772 Label** fall_through) const;
773 virtual bool IsEffect() const { return true; }
776 MacroAssembler* masm_;
777 CompilationInfo* info_;
780 NestedStatement* nesting_stack_;
782 ZoneList<Handle<Object> >* globals_;
783 const ExpressionContext* context_;
784 ZoneList<BailoutEntry> bailout_entries_;
785 ZoneList<BailoutEntry> stack_checks_;
786 ZoneList<TypeFeedbackCellEntry> type_feedback_cells_;
788 Handle<FixedArray> handler_table_;
789 Handle<JSGlobalPropertyCell> profiling_counter_;
791 friend class NestedStatement;
793 DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
797 // A map from property names to getter/setter pairs allocated in the zone.
798 class AccessorTable: public TemplateHashMap<Literal,
799 ObjectLiteral::Accessors,
800 ZoneListAllocationPolicy> {
802 explicit AccessorTable(Zone* zone) :
803 TemplateHashMap<Literal,
804 ObjectLiteral::Accessors,
805 ZoneListAllocationPolicy>(Literal::Match),
808 Iterator lookup(Literal* literal) {
809 Iterator it = find(literal, true);
810 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
819 } } // namespace v8::internal
821 #endif // V8_FULL_CODEGEN_H_