1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_FULL_CODEGEN_FULL_CODEGEN_H_
6 #define V8_FULL_CODEGEN_FULL_CODEGEN_H_
8 #include "src/allocation.h"
9 #include "src/assert-scope.h"
11 #include "src/bit-vector.h"
12 #include "src/code-stubs.h"
13 #include "src/codegen.h"
14 #include "src/compiler.h"
15 #include "src/globals.h"
16 #include "src/objects.h"
17 #include "src/scopes.h"
22 // Forward declarations.
25 // -----------------------------------------------------------------------------
26 // Full code generator.
28 class FullCodeGenerator: public AstVisitor {
35 FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
38 scope_(info->scope()),
44 bailout_entries_(info->HasDeoptimizationSupport()
45 ? info->literal()->ast_node_count()
48 back_edges_(2, info->zone()),
49 handler_table_(info->zone()),
51 DCHECK(!info->IsStub());
57 static bool MakeCode(CompilationInfo* info);
59 // Encode state and pc-offset as a BitField<type, start, size>.
60 // Only use 30 bits because we encode the result as a smi.
61 class StateField : public BitField<State, 0, 1> { };
62 class PcField : public BitField<unsigned, 1, 30-1> { };
64 static const char* State2String(State state) {
66 case NO_REGISTERS: return "NO_REGISTERS";
67 case TOS_REG: return "TOS_REG";
73 static const int kMaxBackEdgeWeight = 127;
75 // Platform-specific code size multiplier.
76 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
77 static const int kCodeSizeMultiplier = 105;
78 #elif V8_TARGET_ARCH_X64
79 static const int kCodeSizeMultiplier = 165;
80 #elif V8_TARGET_ARCH_ARM
81 static const int kCodeSizeMultiplier = 149;
82 #elif V8_TARGET_ARCH_ARM64
83 // TODO(all): Copied ARM value. Check this is sensible for ARM64.
84 static const int kCodeSizeMultiplier = 149;
85 #elif V8_TARGET_ARCH_PPC64
86 static const int kCodeSizeMultiplier = 200;
87 #elif V8_TARGET_ARCH_PPC
88 static const int kCodeSizeMultiplier = 200;
89 #elif V8_TARGET_ARCH_MIPS
90 static const int kCodeSizeMultiplier = 149;
91 #elif V8_TARGET_ARCH_MIPS64
92 static const int kCodeSizeMultiplier = 149;
94 #error Unsupported target architecture.
103 class NestedStatement BASE_EMBEDDED {
105 explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
106 // Link into codegen's nesting stack.
107 previous_ = codegen->nesting_stack_;
108 codegen->nesting_stack_ = this;
110 virtual ~NestedStatement() {
111 // Unlink from codegen's nesting stack.
112 DCHECK_EQ(this, codegen_->nesting_stack_);
113 codegen_->nesting_stack_ = previous_;
116 virtual Breakable* AsBreakable() { return NULL; }
117 virtual Iteration* AsIteration() { return NULL; }
119 virtual bool IsContinueTarget(Statement* target) { return false; }
120 virtual bool IsBreakTarget(Statement* target) { return false; }
122 // Notify the statement that we are exiting it via break, continue, or
123 // return and give it a chance to generate cleanup code. Return the
124 // next outer statement in the nesting stack. We accumulate in
125 // *stack_depth the amount to drop the stack and in *context_length the
126 // number of context chain links to unwind as we traverse the nesting
127 // stack from an exit to its target.
128 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
132 // Like the Exit() method above, but limited to accumulating stack depth.
133 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
138 MacroAssembler* masm() { return codegen_->masm(); }
140 FullCodeGenerator* codegen_;
141 NestedStatement* previous_;
144 DISALLOW_COPY_AND_ASSIGN(NestedStatement);
147 // A breakable statement such as a block.
148 class Breakable : public NestedStatement {
150 Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
151 : NestedStatement(codegen), statement_(statement) {
153 virtual ~Breakable() {}
155 virtual Breakable* AsBreakable() { return this; }
156 virtual bool IsBreakTarget(Statement* target) {
157 return statement() == target;
160 BreakableStatement* statement() { return statement_; }
161 Label* break_label() { return &break_label_; }
164 BreakableStatement* statement_;
168 // An iteration statement such as a while, for, or do loop.
169 class Iteration : public Breakable {
171 Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
172 : Breakable(codegen, statement) {
174 virtual ~Iteration() {}
176 virtual Iteration* AsIteration() { return this; }
177 virtual bool IsContinueTarget(Statement* target) {
178 return statement() == target;
181 Label* continue_label() { return &continue_label_; }
184 Label continue_label_;
187 // A nested block statement.
188 class NestedBlock : public Breakable {
190 NestedBlock(FullCodeGenerator* codegen, Block* block)
191 : Breakable(codegen, block) {
193 virtual ~NestedBlock() {}
195 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
196 auto block_scope = statement()->AsBlock()->scope();
197 if (block_scope != nullptr) {
198 if (block_scope->ContextLocalCount() > 0) ++(*context_length);
204 // The try block of a try/catch statement.
205 class TryCatch : public NestedStatement {
207 static const int kElementCount = TryBlockConstant::kElementCount;
209 explicit TryCatch(FullCodeGenerator* codegen) : NestedStatement(codegen) {}
210 virtual ~TryCatch() {}
212 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
213 *stack_depth += kElementCount;
216 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
217 *stack_depth += kElementCount;
222 // The try block of a try/finally statement.
223 class TryFinally : public NestedStatement {
225 static const int kElementCount = TryBlockConstant::kElementCount;
227 TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
228 : NestedStatement(codegen), finally_entry_(finally_entry) {
230 virtual ~TryFinally() {}
232 virtual NestedStatement* Exit(int* stack_depth, int* context_length);
233 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
234 *stack_depth += kElementCount;
239 Label* finally_entry_;
242 // The finally block of a try/finally statement.
243 class Finally : public NestedStatement {
245 static const int kElementCount = 3;
247 explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) {}
248 virtual ~Finally() {}
250 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
251 *stack_depth += kElementCount;
254 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
255 *stack_depth += kElementCount;
260 // The body of a for/in loop.
261 class ForIn : public Iteration {
263 static const int kElementCount = 5;
265 ForIn(FullCodeGenerator* codegen, ForInStatement* statement)
266 : Iteration(codegen, statement) {
270 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
271 *stack_depth += kElementCount;
274 virtual NestedStatement* AccumulateDepth(int* stack_depth) {
275 *stack_depth += kElementCount;
281 // The body of a with or catch.
282 class WithOrCatch : public NestedStatement {
284 explicit WithOrCatch(FullCodeGenerator* codegen)
285 : NestedStatement(codegen) {
287 virtual ~WithOrCatch() {}
289 virtual NestedStatement* Exit(int* stack_depth, int* context_length) {
295 // A platform-specific utility to overwrite the accumulator register
296 // with a GC-safe value.
297 void ClearAccumulator();
299 // Determine whether or not to inline the smi case for the given
301 bool ShouldInlineSmiCase(Token::Value op);
303 // Helper function to convert a pure value into a test context. The value
304 // is expected on the stack or the accumulator, depending on the platform.
305 // See the platform-specific implementation for details.
306 void DoTest(Expression* condition,
309 Label* fall_through);
310 void DoTest(const TestContext* context);
312 // Helper function to split control flow and avoid a branch to the
313 // fall-through label if it is set up.
314 #if V8_TARGET_ARCH_MIPS
315 void Split(Condition cc,
320 Label* fall_through);
321 #elif V8_TARGET_ARCH_MIPS64
322 void Split(Condition cc,
327 Label* fall_through);
328 #elif V8_TARGET_ARCH_PPC
329 void Split(Condition cc, Label* if_true, Label* if_false, Label* fall_through,
331 #else // All other arch.
332 void Split(Condition cc,
335 Label* fall_through);
338 // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
339 // a register. Emits a context chain walk if if necessary (so does
340 // SetVar) so avoid calling both on the same variable.
341 void GetVar(Register destination, Variable* var);
343 // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable. If it's in
344 // the context, the write barrier will be emitted and source, scratch0,
345 // scratch1 will be clobbered. Emits a context chain walk if if necessary
346 // (so does GetVar) so avoid calling both on the same variable.
347 void SetVar(Variable* var,
352 // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
353 // variable. Writing does not need the write barrier.
354 MemOperand StackOperand(Variable* var);
356 // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
357 // variable. May emit code to traverse the context chain, loading the
358 // found context into the scratch register. Writing to this operand will
359 // need the write barrier if location is CONTEXT.
360 MemOperand VarOperand(Variable* var, Register scratch);
362 void VisitForEffect(Expression* expr) {
363 EffectContext context(this);
365 PrepareForBailout(expr, NO_REGISTERS);
368 void VisitForAccumulatorValue(Expression* expr) {
369 AccumulatorValueContext context(this);
371 PrepareForBailout(expr, TOS_REG);
374 void VisitForStackValue(Expression* expr) {
375 StackValueContext context(this);
377 PrepareForBailout(expr, NO_REGISTERS);
380 void VisitForControl(Expression* expr,
383 Label* fall_through) {
384 TestContext context(this, expr, if_true, if_false, fall_through);
386 // For test contexts, we prepare for bailout before branching, not at
387 // the end of the entire expression. This happens as part of visiting
391 void VisitInDuplicateContext(Expression* expr);
393 void VisitDeclarations(ZoneList<Declaration*>* declarations) override;
394 void DeclareModules(Handle<FixedArray> descriptions);
395 void DeclareGlobals(Handle<FixedArray> pairs);
396 int DeclareGlobalsFlags();
398 // Generate code to create an iterator result object. The "value" property is
399 // set to a value popped from the stack, and "done" is set according to the
400 // argument. The result object is left in the result register.
401 void EmitCreateIteratorResult(bool done);
403 // Try to perform a comparison as a fast inlined literal compare if
404 // the operands allow it. Returns true if the compare operations
405 // has been matched and all code generated; false otherwise.
406 bool TryLiteralCompare(CompareOperation* compare);
408 // Platform-specific code for comparing the type of a value with
409 // a given literal string.
410 void EmitLiteralCompareTypeof(Expression* expr,
411 Expression* sub_expr,
412 Handle<String> check);
414 // Platform-specific code for equality comparison with a nil-like value.
415 void EmitLiteralCompareNil(CompareOperation* expr,
416 Expression* sub_expr,
420 void PrepareForBailout(Expression* node, State state);
421 void PrepareForBailoutForId(BailoutId id, State state);
423 // Returns a smi for the index into the FixedArray that backs the feedback
425 Smi* SmiFromSlot(FeedbackVectorSlot slot) const {
426 return Smi::FromInt(TypeFeedbackVector::GetIndexFromSpec(
427 literal()->feedback_vector_spec(), slot));
430 Smi* SmiFromSlot(FeedbackVectorICSlot slot) const {
431 return Smi::FromInt(TypeFeedbackVector::GetIndexFromSpec(
432 literal()->feedback_vector_spec(), slot));
435 // Record a call's return site offset, used to rebuild the frame if the
436 // called function was inlined at the site.
437 void RecordJSReturnSite(Call* call);
439 // Prepare for bailout before a test (or compare) and branch. If
440 // should_normalize, then the following comparison will not handle the
441 // canonical JS true value so we will insert a (dead) test against true at
442 // the actual bailout target from the optimized code. If not
443 // should_normalize, the true and false labels are ignored.
444 void PrepareForBailoutBeforeSplit(Expression* expr,
445 bool should_normalize,
449 // If enabled, emit debug code for checking that the current context is
450 // neither a with nor a catch context.
451 void EmitDebugCheckDeclarationContext(Variable* variable);
453 // This is meant to be called at loop back edges, |back_edge_target| is
454 // the jump target of the back edge and is used to approximate the amount
455 // of code inside the loop.
456 void EmitBackEdgeBookkeeping(IterationStatement* stmt,
457 Label* back_edge_target);
458 // Record the OSR AST id corresponding to a back edge in the code.
459 void RecordBackEdge(BailoutId osr_ast_id);
460 // Emit a table of back edge ids, pcs and loop depths into the code stream.
461 // Return the offset of the start of the table.
462 unsigned EmitBackEdgeTable();
464 void EmitProfilingCounterDecrement(int delta);
465 void EmitProfilingCounterReset();
467 // Emit code to pop values from the stack associated with nested statements
468 // like try/catch, try/finally, etc, running the finallies and unwinding the
469 // handlers as needed.
470 void EmitUnwindBeforeReturn();
472 // Platform-specific return sequence
473 void EmitReturnSequence();
475 // Platform-specific code sequences for calls
476 void EmitCall(Call* expr, CallICState::CallType = CallICState::FUNCTION);
477 void EmitSuperConstructorCall(Call* expr);
478 void EmitCallWithLoadIC(Call* expr);
479 void EmitSuperCallWithLoadIC(Call* expr);
480 void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
481 void EmitKeyedSuperCallWithLoadIC(Call* expr);
483 #define FOR_EACH_FULL_CODE_INTRINSIC(F) \
492 F(DefaultConstructorCallSuper) \
499 F(StringCharFromCode) \
501 F(OneByteSeqStringSetChar) \
502 F(TwoByteSeqStringSetChar) \
509 F(HasCachedArrayIndex) \
510 F(GetCachedArrayIndex) \
511 F(FastOneByteArrayJoin) \
514 F(DebugBreakInOptimizedCode) \
516 F(StringCharCodeAt) \
520 F(RegExpConstructResult) \
527 F(CreateIterResultObject)
529 #define GENERATOR_DECLARATION(Name) void Emit##Name(CallRuntime* call);
530 FOR_EACH_FULL_CODE_INTRINSIC(GENERATOR_DECLARATION)
531 #undef GENERATOR_DECLARATION
533 // Platform-specific code for resuming generators.
534 void EmitGeneratorResume(Expression *generator,
536 JSGeneratorObject::ResumeMode resume_mode);
538 // Platform-specific code for loading variables.
539 void EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
540 TypeofMode typeof_mode, Label* slow);
541 MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
542 void EmitDynamicLookupFastCase(VariableProxy* proxy, TypeofMode typeof_mode,
543 Label* slow, Label* done);
544 void EmitGlobalVariableLoad(VariableProxy* proxy, TypeofMode typeof_mode);
545 void EmitVariableLoad(VariableProxy* proxy,
546 TypeofMode typeof_mode = NOT_INSIDE_TYPEOF);
548 void EmitAccessor(ObjectLiteralProperty* property);
550 bool NeedsHoleCheckForLoad(VariableProxy* proxy);
552 // Expects the arguments and the function already pushed.
553 void EmitResolvePossiblyDirectEval(int arg_count);
555 // Platform-specific support for allocating a new closure based on
556 // the given function info.
557 void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
559 // Re-usable portions of CallRuntime
560 void EmitLoadJSRuntimeFunction(CallRuntime* expr);
561 void EmitCallJSRuntimeFunction(CallRuntime* expr);
563 // Load a value from a named property.
564 // The receiver is left on the stack by the IC.
565 void EmitNamedPropertyLoad(Property* expr);
567 // Load a value from super.named property.
568 // Expect receiver ('this' value) and home_object on the stack.
569 void EmitNamedSuperPropertyLoad(Property* expr);
571 // Load a value from super[keyed] property.
572 // Expect receiver ('this' value), home_object and key on the stack.
573 void EmitKeyedSuperPropertyLoad(Property* expr);
575 // Load a value from a keyed property.
576 // The receiver and the key is left on the stack by the IC.
577 void EmitKeyedPropertyLoad(Property* expr);
579 // Adds the properties to the class (function) object and to its prototype.
580 // Expects the class (function) in the accumulator. The class (function) is
581 // in the accumulator after installing all the properties.
582 void EmitClassDefineProperties(ClassLiteral* lit);
584 // Pushes the property key as a Name on the stack.
585 void EmitPropertyKey(ObjectLiteralProperty* property, BailoutId bailout_id);
587 // Apply the compound assignment operator. Expects the left operand on top
588 // of the stack and the right one in the accumulator.
589 void EmitBinaryOp(BinaryOperation* expr, Token::Value op);
591 // Helper functions for generating inlined smi code for certain
592 // binary operations.
593 void EmitInlineSmiBinaryOp(BinaryOperation* expr,
598 // Assign to the given expression as if via '='. The right-hand-side value
599 // is expected in the accumulator. slot is only used if FLAG_vector_stores
601 void EmitAssignment(Expression* expr, FeedbackVectorICSlot slot);
603 // Complete a variable assignment. The right-hand-side value is expected
604 // in the accumulator.
605 void EmitVariableAssignment(Variable* var, Token::Value op,
606 FeedbackVectorICSlot slot);
608 // Helper functions to EmitVariableAssignment
609 void EmitStoreToStackLocalOrContextSlot(Variable* var,
610 MemOperand location);
612 // Complete a named property assignment. The receiver is expected on top
613 // of the stack and the right-hand-side value in the accumulator.
614 void EmitNamedPropertyAssignment(Assignment* expr);
616 // Complete a super named property assignment. The right-hand-side value
617 // is expected in accumulator.
618 void EmitNamedSuperPropertyStore(Property* prop);
620 // Complete a super named property assignment. The right-hand-side value
621 // is expected in accumulator.
622 void EmitKeyedSuperPropertyStore(Property* prop);
624 // Complete a keyed property assignment. The receiver and key are
625 // expected on top of the stack and the right-hand-side value in the
627 void EmitKeyedPropertyAssignment(Assignment* expr);
629 static bool NeedsHomeObject(Expression* expr) {
630 return FunctionLiteral::NeedsHomeObject(expr);
633 // Adds the [[HomeObject]] to |initializer| if it is a FunctionLiteral.
634 // The value of the initializer is expected to be at the top of the stack.
635 // |offset| is the offset in the stack where the home object can be found.
636 void EmitSetHomeObject(Expression* initializer, int offset,
637 FeedbackVectorICSlot slot);
639 void EmitSetHomeObjectAccumulator(Expression* initializer, int offset,
640 FeedbackVectorICSlot slot);
642 void EmitLoadSuperConstructor(SuperCallReference* super_call_ref);
644 void CallIC(Handle<Code> code,
645 TypeFeedbackId id = TypeFeedbackId::None());
647 // Inside typeof reference errors are never thrown.
648 void CallLoadIC(TypeofMode typeof_mode, LanguageMode language_mode = SLOPPY,
649 TypeFeedbackId id = TypeFeedbackId::None());
650 void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None());
652 void SetFunctionPosition(FunctionLiteral* fun);
653 void SetReturnPosition(FunctionLiteral* fun);
655 enum InsertBreak { INSERT_BREAK, SKIP_BREAK };
657 // During stepping we want to be able to break at each statement, but not at
658 // every (sub-)expression. That is why by default we insert breaks at every
659 // statement position, but not at every expression position, unless stated
661 void SetStatementPosition(Statement* stmt,
662 InsertBreak insert_break = INSERT_BREAK);
663 void SetExpressionPosition(Expression* expr,
664 InsertBreak insert_break = SKIP_BREAK);
666 // Consider an expression a statement. As such, we also insert a break.
667 // This is used in loop headers where we want to break for each iteration.
668 void SetExpressionAsStatementPosition(Expression* expr);
670 void SetCallPosition(Expression* expr, int argc);
672 void SetConstructCallPosition(Expression* expr);
674 // Non-local control flow support.
675 void EnterTryBlock(int handler_index, Label* handler);
676 void ExitTryBlock(int handler_index);
677 void EnterFinallyBlock();
678 void ExitFinallyBlock();
679 void ClearPendingMessage();
681 // Loop nesting counter.
682 int loop_depth() { return loop_depth_; }
683 void increment_loop_depth() { loop_depth_++; }
684 void decrement_loop_depth() {
685 DCHECK(loop_depth_ > 0);
689 MacroAssembler* masm() const { return masm_; }
691 class ExpressionContext;
692 const ExpressionContext* context() { return context_; }
693 void set_new_context(const ExpressionContext* context) { context_ = context; }
695 Handle<Script> script() { return info_->script(); }
696 bool is_eval() { return info_->is_eval(); }
697 bool is_native() { return info_->is_native(); }
698 LanguageMode language_mode() { return literal()->language_mode(); }
699 bool has_simple_parameters() { return info_->has_simple_parameters(); }
700 FunctionLiteral* literal() const { return info_->literal(); }
701 Scope* scope() { return scope_; }
703 static Register result_register();
704 static Register context_register();
706 // Set fields in the stack frame. Offsets are the frame pointer relative
707 // offsets defined in, e.g., StandardFrameConstants.
708 void StoreToFrameField(int frame_offset, Register value);
710 // Load a value from the current context. Indices are defined as an enum
711 // in v8::internal::Context.
712 void LoadContextField(Register dst, int context_index);
714 // Push the function argument for the runtime functions PushWithContext
715 // and PushCatchContext.
716 void PushFunctionArgumentForContextAllocation();
718 void PushCalleeAndWithBaseObject(Call* expr);
720 // AST node visit functions.
721 #define DECLARE_VISIT(type) virtual void Visit##type(type* node) override;
722 AST_NODE_LIST(DECLARE_VISIT)
725 void VisitComma(BinaryOperation* expr);
726 void VisitLogicalExpression(BinaryOperation* expr);
727 void VisitArithmeticExpression(BinaryOperation* expr);
729 void VisitForTypeofValue(Expression* expr);
732 void PopulateDeoptimizationData(Handle<Code> code);
733 void PopulateTypeFeedbackInfo(Handle<Code> code);
734 void PopulateHandlerTable(Handle<Code> code);
736 bool MustCreateObjectLiteralWithRuntime(ObjectLiteral* expr) const;
737 bool MustCreateArrayLiteralWithRuntime(ArrayLiteral* expr) const;
739 void EmitLoadStoreICSlot(FeedbackVectorICSlot slot);
741 int NewHandlerTableEntry();
743 struct BailoutEntry {
745 unsigned pc_and_state;
748 struct BackEdgeEntry {
754 struct HandlerTableEntry {
755 unsigned range_start;
757 unsigned handler_offset;
762 class ExpressionContext BASE_EMBEDDED {
764 explicit ExpressionContext(FullCodeGenerator* codegen)
765 : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
766 codegen->set_new_context(this);
769 virtual ~ExpressionContext() {
770 codegen_->set_new_context(old_);
773 Isolate* isolate() const { return codegen_->isolate(); }
775 // Convert constant control flow (true or false) to the result expected for
776 // this expression context.
777 virtual void Plug(bool flag) const = 0;
779 // Emit code to convert a pure value (in a register, known variable
780 // location, as a literal, or on top of the stack) into the result
781 // expected according to this expression context.
782 virtual void Plug(Register reg) const = 0;
783 virtual void Plug(Variable* var) const = 0;
784 virtual void Plug(Handle<Object> lit) const = 0;
785 virtual void Plug(Heap::RootListIndex index) const = 0;
786 virtual void PlugTOS() const = 0;
788 // Emit code to convert pure control flow to a pair of unbound labels into
789 // the result expected according to this expression context. The
790 // implementation will bind both labels unless it's a TestContext, which
791 // won't bind them at this point.
792 virtual void Plug(Label* materialize_true,
793 Label* materialize_false) const = 0;
795 // Emit code to discard count elements from the top of stack, then convert
796 // a pure value into the result expected according to this expression
798 virtual void DropAndPlug(int count, Register reg) const = 0;
800 // Set up branch labels for a test expression. The three Label** parameters
801 // are output parameters.
802 virtual void PrepareTest(Label* materialize_true,
803 Label* materialize_false,
806 Label** fall_through) const = 0;
808 // Returns true if we are evaluating only for side effects (i.e. if the
809 // result will be discarded).
810 virtual bool IsEffect() const { return false; }
812 // Returns true if we are evaluating for the value (in accu/on stack).
813 virtual bool IsAccumulatorValue() const { return false; }
814 virtual bool IsStackValue() const { return false; }
816 // Returns true if we are branching on the value rather than materializing
817 // it. Only used for asserts.
818 virtual bool IsTest() const { return false; }
821 FullCodeGenerator* codegen() const { return codegen_; }
822 MacroAssembler* masm() const { return masm_; }
823 MacroAssembler* masm_;
826 const ExpressionContext* old_;
827 FullCodeGenerator* codegen_;
830 class AccumulatorValueContext : public ExpressionContext {
832 explicit AccumulatorValueContext(FullCodeGenerator* codegen)
833 : ExpressionContext(codegen) { }
835 virtual void Plug(bool flag) const;
836 virtual void Plug(Register reg) const;
837 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
838 virtual void Plug(Variable* var) const;
839 virtual void Plug(Handle<Object> lit) const;
840 virtual void Plug(Heap::RootListIndex) const;
841 virtual void PlugTOS() const;
842 virtual void DropAndPlug(int count, Register reg) const;
843 virtual void PrepareTest(Label* materialize_true,
844 Label* materialize_false,
847 Label** fall_through) const;
848 virtual bool IsAccumulatorValue() const { return true; }
851 class StackValueContext : public ExpressionContext {
853 explicit StackValueContext(FullCodeGenerator* codegen)
854 : ExpressionContext(codegen) { }
856 virtual void Plug(bool flag) const;
857 virtual void Plug(Register reg) const;
858 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
859 virtual void Plug(Variable* var) const;
860 virtual void Plug(Handle<Object> lit) const;
861 virtual void Plug(Heap::RootListIndex) const;
862 virtual void PlugTOS() const;
863 virtual void DropAndPlug(int count, Register reg) const;
864 virtual void PrepareTest(Label* materialize_true,
865 Label* materialize_false,
868 Label** fall_through) const;
869 virtual bool IsStackValue() const { return true; }
872 class TestContext : public ExpressionContext {
874 TestContext(FullCodeGenerator* codegen,
875 Expression* condition,
879 : ExpressionContext(codegen),
880 condition_(condition),
881 true_label_(true_label),
882 false_label_(false_label),
883 fall_through_(fall_through) { }
885 static const TestContext* cast(const ExpressionContext* context) {
886 DCHECK(context->IsTest());
887 return reinterpret_cast<const TestContext*>(context);
890 Expression* condition() const { return condition_; }
891 Label* true_label() const { return true_label_; }
892 Label* false_label() const { return false_label_; }
893 Label* fall_through() const { return fall_through_; }
895 virtual void Plug(bool flag) const;
896 virtual void Plug(Register reg) const;
897 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
898 virtual void Plug(Variable* var) const;
899 virtual void Plug(Handle<Object> lit) const;
900 virtual void Plug(Heap::RootListIndex) const;
901 virtual void PlugTOS() const;
902 virtual void DropAndPlug(int count, Register reg) const;
903 virtual void PrepareTest(Label* materialize_true,
904 Label* materialize_false,
907 Label** fall_through) const;
908 virtual bool IsTest() const { return true; }
911 Expression* condition_;
914 Label* fall_through_;
917 class EffectContext : public ExpressionContext {
919 explicit EffectContext(FullCodeGenerator* codegen)
920 : ExpressionContext(codegen) { }
922 virtual void Plug(bool flag) const;
923 virtual void Plug(Register reg) const;
924 virtual void Plug(Label* materialize_true, Label* materialize_false) const;
925 virtual void Plug(Variable* var) const;
926 virtual void Plug(Handle<Object> lit) const;
927 virtual void Plug(Heap::RootListIndex) const;
928 virtual void PlugTOS() const;
929 virtual void DropAndPlug(int count, Register reg) const;
930 virtual void PrepareTest(Label* materialize_true,
931 Label* materialize_false,
934 Label** fall_through) const;
935 virtual bool IsEffect() const { return true; }
938 class EnterBlockScopeIfNeeded {
940 EnterBlockScopeIfNeeded(FullCodeGenerator* codegen, Scope* scope,
941 BailoutId entry_id, BailoutId declarations_id,
943 ~EnterBlockScopeIfNeeded();
946 MacroAssembler* masm() const { return codegen_->masm(); }
948 FullCodeGenerator* codegen_;
951 bool needs_block_context_;
954 MacroAssembler* masm_;
955 CompilationInfo* info_;
958 NestedStatement* nesting_stack_;
960 int try_catch_depth_;
961 ZoneList<Handle<Object> >* globals_;
962 Handle<FixedArray> modules_;
964 const ExpressionContext* context_;
965 ZoneList<BailoutEntry> bailout_entries_;
966 ZoneList<BackEdgeEntry> back_edges_;
967 ZoneVector<HandlerTableEntry> handler_table_;
969 Handle<Cell> profiling_counter_;
970 bool generate_debug_code_;
972 friend class NestedStatement;
974 DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
975 DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
979 // A map from property names to getter/setter pairs allocated in the zone.
980 class AccessorTable: public TemplateHashMap<Literal,
981 ObjectLiteral::Accessors,
982 ZoneAllocationPolicy> {
984 explicit AccessorTable(Zone* zone) :
985 TemplateHashMap<Literal, ObjectLiteral::Accessors,
986 ZoneAllocationPolicy>(Literal::Match,
987 ZoneAllocationPolicy(zone)),
990 Iterator lookup(Literal* literal) {
991 Iterator it = find(literal, true, ZoneAllocationPolicy(zone_));
992 if (it->second == NULL) it->second = new(zone_) ObjectLiteral::Accessors();
1001 class BackEdgeTable {
1003 BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
1004 DCHECK(code->kind() == Code::FUNCTION);
1005 instruction_start_ = code->instruction_start();
1006 Address table_address = instruction_start_ + code->back_edge_table_offset();
1007 length_ = Memory::uint32_at(table_address);
1008 start_ = table_address + kTableLengthSize;
1011 uint32_t length() { return length_; }
1013 BailoutId ast_id(uint32_t index) {
1014 return BailoutId(static_cast<int>(
1015 Memory::uint32_at(entry_at(index) + kAstIdOffset)));
1018 uint32_t loop_depth(uint32_t index) {
1019 return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
1022 uint32_t pc_offset(uint32_t index) {
1023 return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
1026 Address pc(uint32_t index) {
1027 return instruction_start_ + pc_offset(index);
1030 enum BackEdgeState {
1032 ON_STACK_REPLACEMENT,
1033 OSR_AFTER_STACK_CHECK
1036 // Increase allowed loop nesting level by one and patch those matching loops.
1037 static void Patch(Isolate* isolate, Code* unoptimized_code);
1039 // Patch the back edge to the target state, provided the correct callee.
1040 static void PatchAt(Code* unoptimized_code,
1042 BackEdgeState target_state,
1043 Code* replacement_code);
1045 // Change all patched back edges back to normal interrupts.
1046 static void Revert(Isolate* isolate,
1047 Code* unoptimized_code);
1049 // Change a back edge patched for on-stack replacement to perform a
1050 // stack check first.
1051 static void AddStackCheck(Handle<Code> code, uint32_t pc_offset);
1053 // Revert the patch by AddStackCheck.
1054 static void RemoveStackCheck(Handle<Code> code, uint32_t pc_offset);
1056 // Return the current patch state of the back edge.
1057 static BackEdgeState GetBackEdgeState(Isolate* isolate,
1058 Code* unoptimized_code,
1062 // Verify that all back edges of a certain loop depth are patched.
1063 static bool Verify(Isolate* isolate, Code* unoptimized_code);
1067 Address entry_at(uint32_t index) {
1068 DCHECK(index < length_);
1069 return start_ + index * kEntrySize;
1072 static const int kTableLengthSize = kIntSize;
1073 static const int kAstIdOffset = 0 * kIntSize;
1074 static const int kPcOffsetOffset = 1 * kIntSize;
1075 static const int kLoopDepthOffset = 2 * kIntSize;
1076 static const int kEntrySize = 3 * kIntSize;
1079 Address instruction_start_;
1084 } } // namespace v8::internal
1086 #endif // V8_FULL_CODEGEN_FULL_CODEGEN_H_