1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/codegen.h"
8 #include "src/compiler.h"
10 #include "src/full-codegen.h"
11 #include "src/liveedit.h"
12 #include "src/macro-assembler.h"
13 #include "src/prettyprinter.h"
14 #include "src/scopeinfo.h"
15 #include "src/scopes.h"
16 #include "src/snapshot.h"
21 void BreakableStatementChecker::Check(Statement* stmt) {
26 void BreakableStatementChecker::Check(Expression* expr) {
31 void BreakableStatementChecker::VisitVariableDeclaration(
32 VariableDeclaration* decl) {
35 void BreakableStatementChecker::VisitFunctionDeclaration(
36 FunctionDeclaration* decl) {
39 void BreakableStatementChecker::VisitModuleDeclaration(
40 ModuleDeclaration* decl) {
43 void BreakableStatementChecker::VisitImportDeclaration(
44 ImportDeclaration* decl) {
47 void BreakableStatementChecker::VisitExportDeclaration(
48 ExportDeclaration* decl) {
52 void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
56 void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
60 void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
64 void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
68 void BreakableStatementChecker::VisitModuleStatement(ModuleStatement* stmt) {
72 void BreakableStatementChecker::VisitBlock(Block* stmt) {
76 void BreakableStatementChecker::VisitExpressionStatement(
77 ExpressionStatement* stmt) {
78 // Check if expression is breakable.
79 Visit(stmt->expression());
83 void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
87 void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
88 // If the condition is breakable the if statement is breakable.
89 Visit(stmt->condition());
93 void BreakableStatementChecker::VisitContinueStatement(
94 ContinueStatement* stmt) {
98 void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
102 void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
103 // Return is breakable if the expression is.
104 Visit(stmt->expression());
108 void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
109 Visit(stmt->expression());
113 void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
114 // Switch statements breakable if the tag expression is.
119 void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
120 // Mark do while as breakable to avoid adding a break slot in front of it.
121 is_breakable_ = true;
125 void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
126 // Mark while statements breakable if the condition expression is.
131 void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
132 // Mark for statements breakable if the condition expression is.
133 if (stmt->cond() != NULL) {
139 void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
140 // Mark for in statements breakable if the enumerable expression is.
141 Visit(stmt->enumerable());
145 void BreakableStatementChecker::VisitForOfStatement(ForOfStatement* stmt) {
146 // For-of is breakable because of the next() call.
147 is_breakable_ = true;
151 void BreakableStatementChecker::VisitTryCatchStatement(
152 TryCatchStatement* stmt) {
153 // Mark try catch as breakable to avoid adding a break slot in front of it.
154 is_breakable_ = true;
158 void BreakableStatementChecker::VisitTryFinallyStatement(
159 TryFinallyStatement* stmt) {
160 // Mark try finally as breakable to avoid adding a break slot in front of it.
161 is_breakable_ = true;
165 void BreakableStatementChecker::VisitDebuggerStatement(
166 DebuggerStatement* stmt) {
167 // The debugger statement is breakable.
168 is_breakable_ = true;
172 void BreakableStatementChecker::VisitCaseClause(CaseClause* clause) {
176 void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
180 void BreakableStatementChecker::VisitNativeFunctionLiteral(
181 NativeFunctionLiteral* expr) {
185 void BreakableStatementChecker::VisitConditional(Conditional* expr) {
189 void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
193 void BreakableStatementChecker::VisitLiteral(Literal* expr) {
197 void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
201 void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
205 void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
209 void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
210 // If assigning to a property (including a global property) the assignment is
212 VariableProxy* proxy = expr->target()->AsVariableProxy();
213 Property* prop = expr->target()->AsProperty();
214 if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
215 is_breakable_ = true;
219 // Otherwise the assignment is breakable if the assigned value is.
220 Visit(expr->value());
224 void BreakableStatementChecker::VisitYield(Yield* expr) {
225 // Yield is breakable if the expression is.
226 Visit(expr->expression());
230 void BreakableStatementChecker::VisitThrow(Throw* expr) {
231 // Throw is breakable if the expression is.
232 Visit(expr->exception());
236 void BreakableStatementChecker::VisitProperty(Property* expr) {
237 // Property load is breakable.
238 is_breakable_ = true;
242 void BreakableStatementChecker::VisitCall(Call* expr) {
243 // Function calls both through IC and call stub are breakable.
244 is_breakable_ = true;
248 void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
249 // Function calls through new are breakable.
250 is_breakable_ = true;
254 void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
258 void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
259 Visit(expr->expression());
263 void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
264 Visit(expr->expression());
268 void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
270 if (expr->op() != Token::AND &&
271 expr->op() != Token::OR) {
272 Visit(expr->right());
277 void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
279 Visit(expr->right());
283 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
287 void BreakableStatementChecker::VisitSuperReference(SuperReference* expr) {}
290 #define __ ACCESS_MASM(masm())
292 bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
293 Isolate* isolate = info->isolate();
295 TimerEventScope<TimerEventCompileFullCode> timer(info->isolate());
297 Handle<Script> script = info->script();
298 if (!script->IsUndefined() && !script->source()->IsUndefined()) {
299 int len = String::cast(script->source())->length();
300 isolate->counters()->total_full_codegen_source_size()->Increment(len);
302 CodeGenerator::MakeCodePrologue(info, "full");
303 const int kInitialBufferSize = 4 * KB;
304 MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize);
305 if (info->will_serialize()) masm.enable_serializer();
307 LOG_CODE_EVENT(isolate,
308 CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
310 FullCodeGenerator cgen(&masm, info);
312 if (cgen.HasStackOverflow()) {
313 DCHECK(!isolate->has_pending_exception());
316 unsigned table_offset = cgen.EmitBackEdgeTable();
318 Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
319 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
320 code->set_optimizable(info->IsOptimizable() &&
321 !info->function()->dont_optimize() &&
322 info->function()->scope()->AllowsLazyCompilation());
323 cgen.PopulateDeoptimizationData(code);
324 cgen.PopulateTypeFeedbackInfo(code);
325 code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
326 code->set_handler_table(*cgen.handler_table());
327 code->set_compiled_optimizable(info->IsOptimizable());
328 code->set_allow_osr_at_loop_nesting_level(0);
329 code->set_profiler_ticks(0);
330 code->set_back_edge_table_offset(table_offset);
331 CodeGenerator::PrintCode(code, info);
333 void* line_info = masm.positions_recorder()->DetachJITHandlerData();
334 LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
339 unsigned FullCodeGenerator::EmitBackEdgeTable() {
340 // The back edge table consists of a length (in number of entries)
341 // field, and then a sequence of entries. Each entry is a pair of AST id
342 // and code-relative pc offset.
343 masm()->Align(kPointerSize);
344 unsigned offset = masm()->pc_offset();
345 unsigned length = back_edges_.length();
347 for (unsigned i = 0; i < length; ++i) {
348 __ dd(back_edges_[i].id.ToInt());
349 __ dd(back_edges_[i].pc);
350 __ dd(back_edges_[i].loop_depth);
356 void FullCodeGenerator::EnsureSlotContainsAllocationSite(int slot) {
357 Handle<FixedArray> vector = FeedbackVector();
358 if (!vector->get(slot)->IsAllocationSite()) {
359 Handle<AllocationSite> allocation_site =
360 isolate()->factory()->NewAllocationSite();
361 vector->set(slot, *allocation_site);
366 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
367 // Fill in the deoptimization information.
368 DCHECK(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
369 if (!info_->HasDeoptimizationSupport()) return;
370 int length = bailout_entries_.length();
371 Handle<DeoptimizationOutputData> data =
372 DeoptimizationOutputData::New(isolate(), length, TENURED);
373 for (int i = 0; i < length; i++) {
374 data->SetAstId(i, bailout_entries_[i].id);
375 data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
377 code->set_deoptimization_data(*data);
381 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
382 Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
383 info->set_ic_total_count(ic_total_count_);
384 DCHECK(!isolate()->heap()->InNewSpace(*info));
385 code->set_type_feedback_info(*info);
389 void FullCodeGenerator::Initialize() {
390 InitializeAstVisitor(info_->zone());
391 // The generation of debug code must match between the snapshot code and the
392 // code that is generated later. This is assumed by the debugger when it is
393 // calculating PC offsets after generating a debug version of code. Therefore
394 // we disable the production of debug code in the full compiler if we are
395 // either generating a snapshot or we booted from a snapshot.
396 generate_debug_code_ = FLAG_debug_code &&
397 !masm_->serializer_enabled() &&
398 !Snapshot::HaveASnapshotToStartFrom();
399 masm_->set_emit_debug_code(generate_debug_code_);
400 masm_->set_predictable_code_size(true);
404 void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
405 PrepareForBailoutForId(node->id(), state);
409 void FullCodeGenerator::CallLoadIC(ContextualMode contextual_mode,
411 ExtraICState extra_state = LoadIC::ComputeExtraICState(contextual_mode);
412 Handle<Code> ic = LoadIC::initialize_stub(isolate(), extra_state);
417 void FullCodeGenerator::CallStoreIC(TypeFeedbackId id) {
418 Handle<Code> ic = StoreIC::initialize_stub(isolate(), strict_mode());
423 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
424 // We record the offset of the function return so we can rebuild the frame
425 // if the function was inlined, i.e., this is the return address in the
426 // inlined function's frame.
428 // The state is ignored. We defensively set it to TOS_REG, which is the
429 // real state of the unoptimized code at the return site.
430 PrepareForBailoutForId(call->ReturnId(), TOS_REG);
432 // In debug builds, mark the return so we can verify that this function
434 DCHECK(!call->return_is_recorded_);
435 call->return_is_recorded_ = true;
440 void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
441 // There's no need to prepare this code for bailouts from already optimized
442 // code or code that can't be optimized.
443 if (!info_->HasDeoptimizationSupport()) return;
444 unsigned pc_and_state =
445 StateField::encode(state) | PcField::encode(masm_->pc_offset());
446 DCHECK(Smi::IsValid(pc_and_state));
448 for (int i = 0; i < bailout_entries_.length(); ++i) {
449 DCHECK(bailout_entries_[i].id != id);
452 BailoutEntry entry = { id, pc_and_state };
453 bailout_entries_.Add(entry, zone());
457 void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
458 // The pc offset does not need to be encoded and packed together with a state.
459 DCHECK(masm_->pc_offset() > 0);
460 DCHECK(loop_depth() > 0);
461 uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
462 BackEdgeEntry entry =
463 { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
464 back_edges_.Add(entry, zone());
468 bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
469 // Inline smi case inside loops, but not division and modulo which
470 // are too complicated and take up too much space.
471 if (op == Token::DIV ||op == Token::MOD) return false;
472 if (FLAG_always_inline_smi_code) return true;
473 return loop_depth_ > 0;
477 void FullCodeGenerator::EffectContext::Plug(Register reg) const {
481 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
482 __ Move(result_register(), reg);
486 void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
491 void FullCodeGenerator::TestContext::Plug(Register reg) const {
492 // For simplicity we always test the accumulator register.
493 __ Move(result_register(), reg);
494 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
495 codegen()->DoTest(this);
499 void FullCodeGenerator::EffectContext::PlugTOS() const {
504 void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
505 __ Pop(result_register());
509 void FullCodeGenerator::StackValueContext::PlugTOS() const {
513 void FullCodeGenerator::TestContext::PlugTOS() const {
514 // For simplicity we always test the accumulator register.
515 __ Pop(result_register());
516 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
517 codegen()->DoTest(this);
521 void FullCodeGenerator::EffectContext::PrepareTest(
522 Label* materialize_true,
523 Label* materialize_false,
526 Label** fall_through) const {
527 // In an effect context, the true and the false case branch to the
529 *if_true = *if_false = *fall_through = materialize_true;
533 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
534 Label* materialize_true,
535 Label* materialize_false,
538 Label** fall_through) const {
539 *if_true = *fall_through = materialize_true;
540 *if_false = materialize_false;
544 void FullCodeGenerator::StackValueContext::PrepareTest(
545 Label* materialize_true,
546 Label* materialize_false,
549 Label** fall_through) const {
550 *if_true = *fall_through = materialize_true;
551 *if_false = materialize_false;
555 void FullCodeGenerator::TestContext::PrepareTest(
556 Label* materialize_true,
557 Label* materialize_false,
560 Label** fall_through) const {
561 *if_true = true_label_;
562 *if_false = false_label_;
563 *fall_through = fall_through_;
567 void FullCodeGenerator::DoTest(const TestContext* context) {
568 DoTest(context->condition(),
569 context->true_label(),
570 context->false_label(),
571 context->fall_through());
575 void FullCodeGenerator::AllocateModules(ZoneList<Declaration*>* declarations) {
576 DCHECK(scope_->is_global_scope());
578 for (int i = 0; i < declarations->length(); i++) {
579 ModuleDeclaration* declaration = declarations->at(i)->AsModuleDeclaration();
580 if (declaration != NULL) {
581 ModuleLiteral* module = declaration->module()->AsModuleLiteral();
582 if (module != NULL) {
583 Comment cmnt(masm_, "[ Link nested modules");
584 Scope* scope = module->body()->scope();
585 Interface* interface = scope->interface();
586 DCHECK(interface->IsModule() && interface->IsFrozen());
588 interface->Allocate(scope->module_var()->index());
590 // Set up module context.
591 DCHECK(scope->interface()->Index() >= 0);
592 __ Push(Smi::FromInt(scope->interface()->Index()));
593 __ Push(scope->GetScopeInfo());
594 __ CallRuntime(Runtime::kPushModuleContext, 2);
595 StoreToFrameField(StandardFrameConstants::kContextOffset,
598 AllocateModules(scope->declarations());
600 // Pop module context.
601 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
602 // Update local stack frame context field.
603 StoreToFrameField(StandardFrameConstants::kContextOffset,
611 // Modules have their own local scope, represented by their own context.
612 // Module instance objects have an accessor for every export that forwards
613 // access to the respective slot from the module's context. (Exports that are
614 // modules themselves, however, are simple data properties.)
616 // All modules have a _hosting_ scope/context, which (currently) is the
617 // (innermost) enclosing global scope. To deal with recursion, nested modules
618 // are hosted by the same scope as global ones.
620 // For every (global or nested) module literal, the hosting context has an
621 // internal slot that points directly to the respective module context. This
622 // enables quick access to (statically resolved) module members by 2-dimensional
623 // access through the hosting context. For example,
627 // module B { let y; }
629 // module C { let z; }
631 // allocates contexts as follows:
633 // [header| .A | .B | .C | A | C ] (global)
635 // | | +-- [header| z ] (module)
637 // | +------- [header| y ] (module)
639 // +------------ [header| x | B ] (module)
641 // Here, .A, .B, .C are the internal slots pointing to the hosted module
642 // contexts, whereas A, B, C hold the actual instance objects (note that every
643 // module context also points to the respective instance object through its
644 // extension slot in the header).
646 // To deal with arbitrary recursion and aliases between modules,
647 // they are created and initialized in several stages. Each stage applies to
648 // all modules in the hosting global scope, including nested ones.
650 // 1. Allocate: for each module _literal_, allocate the module contexts and
651 // respective instance object and wire them up. This happens in the
652 // PushModuleContext runtime function, as generated by AllocateModules
653 // (invoked by VisitDeclarations in the hosting scope).
655 // 2. Bind: for each module _declaration_ (i.e. literals as well as aliases),
656 // assign the respective instance object to respective local variables. This
657 // happens in VisitModuleDeclaration, and uses the instance objects created
658 // in the previous stage.
659 // For each module _literal_, this phase also constructs a module descriptor
660 // for the next stage. This happens in VisitModuleLiteral.
662 // 3. Populate: invoke the DeclareModules runtime function to populate each
663 // _instance_ object with accessors for it exports. This is generated by
664 // DeclareModules (invoked by VisitDeclarations in the hosting scope again),
665 // and uses the descriptors generated in the previous stage.
667 // 4. Initialize: execute the module bodies (and other code) in sequence. This
668 // happens by the separate statements generated for module bodies. To reenter
669 // the module scopes properly, the parser inserted ModuleStatements.
671 void FullCodeGenerator::VisitDeclarations(
672 ZoneList<Declaration*>* declarations) {
673 Handle<FixedArray> saved_modules = modules_;
674 int saved_module_index = module_index_;
675 ZoneList<Handle<Object> >* saved_globals = globals_;
676 ZoneList<Handle<Object> > inner_globals(10, zone());
677 globals_ = &inner_globals;
679 if (scope_->num_modules() != 0) {
680 // This is a scope hosting modules. Allocate a descriptor array to pass
681 // to the runtime for initialization.
682 Comment cmnt(masm_, "[ Allocate modules");
683 DCHECK(scope_->is_global_scope());
685 isolate()->factory()->NewFixedArray(scope_->num_modules(), TENURED);
688 // Generate code for allocating all modules, including nested ones.
689 // The allocated contexts are stored in internal variables in this scope.
690 AllocateModules(declarations);
693 AstVisitor::VisitDeclarations(declarations);
695 if (scope_->num_modules() != 0) {
696 // Initialize modules from descriptor array.
697 DCHECK(module_index_ == modules_->length());
698 DeclareModules(modules_);
699 modules_ = saved_modules;
700 module_index_ = saved_module_index;
703 if (!globals_->is_empty()) {
704 // Invoke the platform-dependent code generator to do the actual
705 // declaration of the global functions and variables.
706 Handle<FixedArray> array =
707 isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
708 for (int i = 0; i < globals_->length(); ++i)
709 array->set(i, *globals_->at(i));
710 DeclareGlobals(array);
713 globals_ = saved_globals;
717 void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
718 Block* block = module->body();
719 Scope* saved_scope = scope();
720 scope_ = block->scope();
721 Interface* interface = scope_->interface();
723 Comment cmnt(masm_, "[ ModuleLiteral");
724 SetStatementPosition(block);
726 DCHECK(!modules_.is_null());
727 DCHECK(module_index_ < modules_->length());
728 int index = module_index_++;
730 // Set up module context.
731 DCHECK(interface->Index() >= 0);
732 __ Push(Smi::FromInt(interface->Index()));
733 __ Push(Smi::FromInt(0));
734 __ CallRuntime(Runtime::kPushModuleContext, 2);
735 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
738 Comment cmnt(masm_, "[ Declarations");
739 VisitDeclarations(scope_->declarations());
742 // Populate the module description.
743 Handle<ModuleInfo> description =
744 ModuleInfo::Create(isolate(), interface, scope_);
745 modules_->set(index, *description);
747 scope_ = saved_scope;
748 // Pop module context.
749 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
750 // Update local stack frame context field.
751 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
755 void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
757 // The instance object is resolved statically through the module's interface.
761 void FullCodeGenerator::VisitModulePath(ModulePath* module) {
763 // The instance object is resolved statically through the module's interface.
767 void FullCodeGenerator::VisitModuleUrl(ModuleUrl* module) {
768 // TODO(rossberg): dummy allocation for now.
769 Scope* scope = module->body()->scope();
770 Interface* interface = scope_->interface();
772 DCHECK(interface->IsModule() && interface->IsFrozen());
773 DCHECK(!modules_.is_null());
774 DCHECK(module_index_ < modules_->length());
775 interface->Allocate(scope->module_var()->index());
776 int index = module_index_++;
778 Handle<ModuleInfo> description =
779 ModuleInfo::Create(isolate(), interface, scope_);
780 modules_->set(index, *description);
784 int FullCodeGenerator::DeclareGlobalsFlags() {
785 DCHECK(DeclareGlobalsStrictMode::is_valid(strict_mode()));
786 return DeclareGlobalsEvalFlag::encode(is_eval()) |
787 DeclareGlobalsNativeFlag::encode(is_native()) |
788 DeclareGlobalsStrictMode::encode(strict_mode());
792 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
793 CodeGenerator::RecordPositions(masm_, fun->start_position());
797 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
798 CodeGenerator::RecordPositions(masm_, fun->end_position() - 1);
802 void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
803 if (!info_->is_debug()) {
804 CodeGenerator::RecordPositions(masm_, stmt->position());
806 // Check if the statement will be breakable without adding a debug break
808 BreakableStatementChecker checker(zone());
810 // Record the statement position right here if the statement is not
811 // breakable. For breakable statements the actual recording of the
812 // position will be postponed to the breakable code (typically an IC).
813 bool position_recorded = CodeGenerator::RecordPositions(
814 masm_, stmt->position(), !checker.is_breakable());
815 // If the position recording did record a new position generate a debug
816 // break slot to make the statement breakable.
817 if (position_recorded) {
818 DebugCodegen::GenerateSlot(masm_);
824 void FullCodeGenerator::VisitSuperReference(SuperReference* super) {
825 DCHECK(FLAG_harmony_classes);
830 void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
831 if (!info_->is_debug()) {
832 CodeGenerator::RecordPositions(masm_, expr->position());
834 // Check if the expression will be breakable without adding a debug break
836 BreakableStatementChecker checker(zone());
838 // Record a statement position right here if the expression is not
839 // breakable. For breakable expressions the actual recording of the
840 // position will be postponed to the breakable code (typically an IC).
841 // NOTE this will record a statement position for something which might
842 // not be a statement. As stepping in the debugger will only stop at
843 // statement positions this is used for e.g. the condition expression of
845 bool position_recorded = CodeGenerator::RecordPositions(
846 masm_, expr->position(), !checker.is_breakable());
847 // If the position recording did record a new position generate a debug
848 // break slot to make the statement breakable.
849 if (position_recorded) {
850 DebugCodegen::GenerateSlot(masm_);
856 void FullCodeGenerator::SetSourcePosition(int pos) {
857 if (pos != RelocInfo::kNoPosition) {
858 masm_->positions_recorder()->RecordPosition(pos);
863 // Lookup table for code generators for special runtime calls which are
865 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
866 &FullCodeGenerator::Emit##Name,
868 const FullCodeGenerator::InlineFunctionGenerator
869 FullCodeGenerator::kInlineFunctionGenerators[] = {
870 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
872 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
875 FullCodeGenerator::InlineFunctionGenerator
876 FullCodeGenerator::FindInlineFunctionGenerator(Runtime::FunctionId id) {
878 static_cast<int>(id) - static_cast<int>(Runtime::kFirstInlineFunction);
879 DCHECK(lookup_index >= 0);
880 DCHECK(static_cast<size_t>(lookup_index) <
881 ARRAY_SIZE(kInlineFunctionGenerators));
882 return kInlineFunctionGenerators[lookup_index];
886 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
887 const Runtime::Function* function = expr->function();
888 DCHECK(function != NULL);
889 DCHECK(function->intrinsic_type == Runtime::INLINE);
890 InlineFunctionGenerator generator =
891 FindInlineFunctionGenerator(function->function_id);
892 ((*this).*(generator))(expr);
896 void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
897 ZoneList<Expression*>* args = expr->arguments();
898 DCHECK(args->length() == 2);
899 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT);
903 void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
904 ZoneList<Expression*>* args = expr->arguments();
905 DCHECK(args->length() == 2);
906 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
910 void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
911 context()->Plug(handle(Smi::FromInt(0), isolate()));
915 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
916 switch (expr->op()) {
918 return VisitComma(expr);
921 return VisitLogicalExpression(expr);
923 return VisitArithmeticExpression(expr);
928 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
929 if (context()->IsEffect()) {
930 VisitForEffect(expr);
931 } else if (context()->IsAccumulatorValue()) {
932 VisitForAccumulatorValue(expr);
933 } else if (context()->IsStackValue()) {
934 VisitForStackValue(expr);
935 } else if (context()->IsTest()) {
936 const TestContext* test = TestContext::cast(context());
937 VisitForControl(expr, test->true_label(), test->false_label(),
938 test->fall_through());
943 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
944 Comment cmnt(masm_, "[ Comma");
945 VisitForEffect(expr->left());
946 VisitInDuplicateContext(expr->right());
950 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
951 bool is_logical_and = expr->op() == Token::AND;
952 Comment cmnt(masm_, is_logical_and ? "[ Logical AND" : "[ Logical OR");
953 Expression* left = expr->left();
954 Expression* right = expr->right();
955 BailoutId right_id = expr->RightId();
958 if (context()->IsTest()) {
960 const TestContext* test = TestContext::cast(context());
961 if (is_logical_and) {
962 VisitForControl(left, &eval_right, test->false_label(), &eval_right);
964 VisitForControl(left, test->true_label(), &eval_right, &eval_right);
966 PrepareForBailoutForId(right_id, NO_REGISTERS);
967 __ bind(&eval_right);
969 } else if (context()->IsAccumulatorValue()) {
970 VisitForAccumulatorValue(left);
971 // We want the value in the accumulator for the test, and on the stack in
973 __ Push(result_register());
974 Label discard, restore;
975 if (is_logical_and) {
976 DoTest(left, &discard, &restore, &restore);
978 DoTest(left, &restore, &discard, &restore);
981 __ Pop(result_register());
985 PrepareForBailoutForId(right_id, NO_REGISTERS);
987 } else if (context()->IsStackValue()) {
988 VisitForAccumulatorValue(left);
989 // We want the value in the accumulator for the test, and on the stack in
991 __ Push(result_register());
993 if (is_logical_and) {
994 DoTest(left, &discard, &done, &discard);
996 DoTest(left, &done, &discard, &discard);
1000 PrepareForBailoutForId(right_id, NO_REGISTERS);
1003 DCHECK(context()->IsEffect());
1005 if (is_logical_and) {
1006 VisitForControl(left, &eval_right, &done, &eval_right);
1008 VisitForControl(left, &done, &eval_right, &eval_right);
1010 PrepareForBailoutForId(right_id, NO_REGISTERS);
1011 __ bind(&eval_right);
1014 VisitInDuplicateContext(right);
1019 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
1020 Token::Value op = expr->op();
1021 Comment cmnt(masm_, "[ ArithmeticExpression");
1022 Expression* left = expr->left();
1023 Expression* right = expr->right();
1024 OverwriteMode mode =
1025 left->ResultOverwriteAllowed()
1027 : (right->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE);
1029 VisitForStackValue(left);
1030 VisitForAccumulatorValue(right);
1032 SetSourcePosition(expr->position());
1033 if (ShouldInlineSmiCase(op)) {
1034 EmitInlineSmiBinaryOp(expr, op, mode, left, right);
1036 EmitBinaryOp(expr, op, mode);
1041 void FullCodeGenerator::VisitBlock(Block* stmt) {
1042 Comment cmnt(masm_, "[ Block");
1043 NestedBlock nested_block(this, stmt);
1044 SetStatementPosition(stmt);
1046 Scope* saved_scope = scope();
1047 // Push a block context when entering a block with block scoped variables.
1048 if (stmt->scope() == NULL) {
1049 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1051 scope_ = stmt->scope();
1052 DCHECK(!scope_->is_module_scope());
1053 { Comment cmnt(masm_, "[ Extend block context");
1054 __ Push(scope_->GetScopeInfo());
1055 PushFunctionArgumentForContextAllocation();
1056 __ CallRuntime(Runtime::kPushBlockContext, 2);
1058 // Replace the context stored in the frame.
1059 StoreToFrameField(StandardFrameConstants::kContextOffset,
1060 context_register());
1061 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1063 { Comment cmnt(masm_, "[ Declarations");
1064 VisitDeclarations(scope_->declarations());
1065 PrepareForBailoutForId(stmt->DeclsId(), NO_REGISTERS);
1069 VisitStatements(stmt->statements());
1070 scope_ = saved_scope;
1071 __ bind(nested_block.break_label());
1073 // Pop block context if necessary.
1074 if (stmt->scope() != NULL) {
1075 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1076 // Update local stack frame context field.
1077 StoreToFrameField(StandardFrameConstants::kContextOffset,
1078 context_register());
1080 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1084 void FullCodeGenerator::VisitModuleStatement(ModuleStatement* stmt) {
1085 Comment cmnt(masm_, "[ Module context");
1087 __ Push(Smi::FromInt(stmt->proxy()->interface()->Index()));
1088 __ Push(Smi::FromInt(0));
1089 __ CallRuntime(Runtime::kPushModuleContext, 2);
1091 StandardFrameConstants::kContextOffset, context_register());
1093 Scope* saved_scope = scope_;
1094 scope_ = stmt->body()->scope();
1095 VisitStatements(stmt->body()->statements());
1096 scope_ = saved_scope;
1097 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1098 // Update local stack frame context field.
1099 StoreToFrameField(StandardFrameConstants::kContextOffset,
1100 context_register());
1104 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1105 Comment cmnt(masm_, "[ ExpressionStatement");
1106 SetStatementPosition(stmt);
1107 VisitForEffect(stmt->expression());
1111 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1112 Comment cmnt(masm_, "[ EmptyStatement");
1113 SetStatementPosition(stmt);
1117 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
1118 Comment cmnt(masm_, "[ IfStatement");
1119 SetStatementPosition(stmt);
1120 Label then_part, else_part, done;
1122 if (stmt->HasElseStatement()) {
1123 VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
1124 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1125 __ bind(&then_part);
1126 Visit(stmt->then_statement());
1129 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1130 __ bind(&else_part);
1131 Visit(stmt->else_statement());
1133 VisitForControl(stmt->condition(), &then_part, &done, &then_part);
1134 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1135 __ bind(&then_part);
1136 Visit(stmt->then_statement());
1138 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1141 PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
1145 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1146 Comment cmnt(masm_, "[ ContinueStatement");
1147 SetStatementPosition(stmt);
1148 NestedStatement* current = nesting_stack_;
1149 int stack_depth = 0;
1150 int context_length = 0;
1151 // When continuing, we clobber the unpredictable value in the accumulator
1152 // with one that's safe for GC. If we hit an exit from the try block of
1153 // try...finally on our way out, we will unconditionally preserve the
1154 // accumulator on the stack.
1156 while (!current->IsContinueTarget(stmt->target())) {
1157 current = current->Exit(&stack_depth, &context_length);
1159 __ Drop(stack_depth);
1160 if (context_length > 0) {
1161 while (context_length > 0) {
1162 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1165 StoreToFrameField(StandardFrameConstants::kContextOffset,
1166 context_register());
1169 __ jmp(current->AsIteration()->continue_label());
1173 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1174 Comment cmnt(masm_, "[ BreakStatement");
1175 SetStatementPosition(stmt);
1176 NestedStatement* current = nesting_stack_;
1177 int stack_depth = 0;
1178 int context_length = 0;
1179 // When breaking, we clobber the unpredictable value in the accumulator
1180 // with one that's safe for GC. If we hit an exit from the try block of
1181 // try...finally on our way out, we will unconditionally preserve the
1182 // accumulator on the stack.
1184 while (!current->IsBreakTarget(stmt->target())) {
1185 current = current->Exit(&stack_depth, &context_length);
1187 __ Drop(stack_depth);
1188 if (context_length > 0) {
1189 while (context_length > 0) {
1190 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1193 StoreToFrameField(StandardFrameConstants::kContextOffset,
1194 context_register());
1197 __ jmp(current->AsBreakable()->break_label());
1201 void FullCodeGenerator::EmitUnwindBeforeReturn() {
1202 NestedStatement* current = nesting_stack_;
1203 int stack_depth = 0;
1204 int context_length = 0;
1205 while (current != NULL) {
1206 current = current->Exit(&stack_depth, &context_length);
1208 __ Drop(stack_depth);
1212 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1213 Comment cmnt(masm_, "[ ReturnStatement");
1214 SetStatementPosition(stmt);
1215 Expression* expr = stmt->expression();
1216 VisitForAccumulatorValue(expr);
1217 EmitUnwindBeforeReturn();
1218 EmitReturnSequence();
1222 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1223 Comment cmnt(masm_, "[ WithStatement");
1224 SetStatementPosition(stmt);
1226 VisitForStackValue(stmt->expression());
1227 PushFunctionArgumentForContextAllocation();
1228 __ CallRuntime(Runtime::kPushWithContext, 2);
1229 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1231 Scope* saved_scope = scope();
1232 scope_ = stmt->scope();
1233 { WithOrCatch body(this);
1234 Visit(stmt->statement());
1236 scope_ = saved_scope;
1239 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1240 // Update local stack frame context field.
1241 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1245 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1246 Comment cmnt(masm_, "[ DoWhileStatement");
1247 SetStatementPosition(stmt);
1248 Label body, book_keeping;
1250 Iteration loop_statement(this, stmt);
1251 increment_loop_depth();
1254 Visit(stmt->body());
1256 // Record the position of the do while condition and make sure it is
1257 // possible to break on the condition.
1258 __ bind(loop_statement.continue_label());
1259 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1260 SetExpressionPosition(stmt->cond());
1261 VisitForControl(stmt->cond(),
1263 loop_statement.break_label(),
1266 // Check stack before looping.
1267 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1268 __ bind(&book_keeping);
1269 EmitBackEdgeBookkeeping(stmt, &body);
1272 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1273 __ bind(loop_statement.break_label());
1274 decrement_loop_depth();
1278 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1279 Comment cmnt(masm_, "[ WhileStatement");
1282 Iteration loop_statement(this, stmt);
1283 increment_loop_depth();
1287 SetExpressionPosition(stmt->cond());
1288 VisitForControl(stmt->cond(),
1290 loop_statement.break_label(),
1293 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1295 Visit(stmt->body());
1297 __ bind(loop_statement.continue_label());
1299 // Check stack before looping.
1300 EmitBackEdgeBookkeeping(stmt, &loop);
1303 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1304 __ bind(loop_statement.break_label());
1305 decrement_loop_depth();
1309 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1310 Comment cmnt(masm_, "[ ForStatement");
1313 Iteration loop_statement(this, stmt);
1315 // Set statement position for a break slot before entering the for-body.
1316 SetStatementPosition(stmt);
1318 if (stmt->init() != NULL) {
1319 Visit(stmt->init());
1322 increment_loop_depth();
1323 // Emit the test at the bottom of the loop (even if empty).
1326 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1328 Visit(stmt->body());
1330 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1331 __ bind(loop_statement.continue_label());
1332 if (stmt->next() != NULL) {
1333 Visit(stmt->next());
1336 // Emit the statement position here as this is where the for
1337 // statement code starts.
1338 SetStatementPosition(stmt);
1340 // Check stack before looping.
1341 EmitBackEdgeBookkeeping(stmt, &body);
1344 if (stmt->cond() != NULL) {
1345 VisitForControl(stmt->cond(),
1347 loop_statement.break_label(),
1348 loop_statement.break_label());
1353 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1354 __ bind(loop_statement.break_label());
1355 decrement_loop_depth();
1359 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1360 Comment cmnt(masm_, "[ TryCatchStatement");
1361 SetStatementPosition(stmt);
1362 // The try block adds a handler to the exception handler chain before
1363 // entering, and removes it again when exiting normally. If an exception
1364 // is thrown during execution of the try block, the handler is consumed
1365 // and control is passed to the catch block with the exception in the
1368 Label try_entry, handler_entry, exit;
1370 __ bind(&handler_entry);
1371 handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1372 // Exception handler code, the exception is in the result register.
1373 // Extend the context before executing the catch block.
1374 { Comment cmnt(masm_, "[ Extend catch context");
1375 __ Push(stmt->variable()->name());
1376 __ Push(result_register());
1377 PushFunctionArgumentForContextAllocation();
1378 __ CallRuntime(Runtime::kPushCatchContext, 3);
1379 StoreToFrameField(StandardFrameConstants::kContextOffset,
1380 context_register());
1383 Scope* saved_scope = scope();
1384 scope_ = stmt->scope();
1385 DCHECK(scope_->declarations()->is_empty());
1386 { WithOrCatch catch_body(this);
1387 Visit(stmt->catch_block());
1389 // Restore the context.
1390 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1391 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1392 scope_ = saved_scope;
1395 // Try block code. Sets up the exception handler chain.
1396 __ bind(&try_entry);
1397 __ PushTryHandler(StackHandler::CATCH, stmt->index());
1398 { TryCatch try_body(this);
1399 Visit(stmt->try_block());
1406 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1407 Comment cmnt(masm_, "[ TryFinallyStatement");
1408 SetStatementPosition(stmt);
1409 // Try finally is compiled by setting up a try-handler on the stack while
1410 // executing the try body, and removing it again afterwards.
1412 // The try-finally construct can enter the finally block in three ways:
1413 // 1. By exiting the try-block normally. This removes the try-handler and
1414 // calls the finally block code before continuing.
1415 // 2. By exiting the try-block with a function-local control flow transfer
1416 // (break/continue/return). The site of the, e.g., break removes the
1417 // try handler and calls the finally block code before continuing
1418 // its outward control transfer.
1419 // 3. By exiting the try-block with a thrown exception.
1420 // This can happen in nested function calls. It traverses the try-handler
1421 // chain and consumes the try-handler entry before jumping to the
1422 // handler code. The handler code then calls the finally-block before
1423 // rethrowing the exception.
1425 // The finally block must assume a return address on top of the stack
1426 // (or in the link register on ARM chips) and a value (return value or
1427 // exception) in the result register (rax/eax/r0), both of which must
1428 // be preserved. The return address isn't GC-safe, so it should be
1429 // cooked before GC.
1430 Label try_entry, handler_entry, finally_entry;
1432 // Jump to try-handler setup and try-block code.
1434 __ bind(&handler_entry);
1435 handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1436 // Exception handler code. This code is only executed when an exception
1437 // is thrown. The exception is in the result register, and must be
1438 // preserved by the finally block. Call the finally block and then
1439 // rethrow the exception if it returns.
1440 __ Call(&finally_entry);
1441 __ Push(result_register());
1442 __ CallRuntime(Runtime::kReThrow, 1);
1444 // Finally block implementation.
1445 __ bind(&finally_entry);
1446 EnterFinallyBlock();
1447 { Finally finally_body(this);
1448 Visit(stmt->finally_block());
1450 ExitFinallyBlock(); // Return to the calling code.
1452 // Set up try handler.
1453 __ bind(&try_entry);
1454 __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1455 { TryFinally try_body(this, &finally_entry);
1456 Visit(stmt->try_block());
1459 // Execute the finally block on the way out. Clobber the unpredictable
1460 // value in the result register with one that's safe for GC because the
1461 // finally block will unconditionally preserve the result register on the
1464 __ Call(&finally_entry);
1468 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1469 Comment cmnt(masm_, "[ DebuggerStatement");
1470 SetStatementPosition(stmt);
1473 // Ignore the return value.
1475 PrepareForBailoutForId(stmt->DebugBreakId(), NO_REGISTERS);
1479 void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1484 void FullCodeGenerator::VisitConditional(Conditional* expr) {
1485 Comment cmnt(masm_, "[ Conditional");
1486 Label true_case, false_case, done;
1487 VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1489 PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1490 __ bind(&true_case);
1491 SetExpressionPosition(expr->then_expression());
1492 if (context()->IsTest()) {
1493 const TestContext* for_test = TestContext::cast(context());
1494 VisitForControl(expr->then_expression(),
1495 for_test->true_label(),
1496 for_test->false_label(),
1499 VisitInDuplicateContext(expr->then_expression());
1503 PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1504 __ bind(&false_case);
1505 SetExpressionPosition(expr->else_expression());
1506 VisitInDuplicateContext(expr->else_expression());
1507 // If control flow falls through Visit, merge it with true case here.
1508 if (!context()->IsTest()) {
1514 void FullCodeGenerator::VisitLiteral(Literal* expr) {
1515 Comment cmnt(masm_, "[ Literal");
1516 context()->Plug(expr->value());
1520 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1521 Comment cmnt(masm_, "[ FunctionLiteral");
1523 // Build the function boilerplate and instantiate it.
1524 Handle<SharedFunctionInfo> function_info =
1525 Compiler::BuildFunctionInfo(expr, script(), info_);
1526 if (function_info.is_null()) {
1530 EmitNewClosure(function_info, expr->pretenure());
1534 void FullCodeGenerator::VisitNativeFunctionLiteral(
1535 NativeFunctionLiteral* expr) {
1536 Comment cmnt(masm_, "[ NativeFunctionLiteral");
1538 // Compute the function template for the native function.
1539 Handle<String> name = expr->name();
1540 v8::Handle<v8::FunctionTemplate> fun_template =
1541 expr->extension()->GetNativeFunctionTemplate(
1542 reinterpret_cast<v8::Isolate*>(isolate()), v8::Utils::ToLocal(name));
1543 DCHECK(!fun_template.IsEmpty());
1545 // Instantiate the function and create a shared function info from it.
1546 Handle<JSFunction> fun = Utils::OpenHandle(*fun_template->GetFunction());
1547 const int literals = fun->NumberOfLiterals();
1548 Handle<Code> code = Handle<Code>(fun->shared()->code());
1549 Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1550 bool is_generator = false;
1551 bool is_arrow = false;
1552 Handle<SharedFunctionInfo> shared =
1553 isolate()->factory()->NewSharedFunctionInfo(
1554 name, literals, is_generator, is_arrow, code,
1555 Handle<ScopeInfo>(fun->shared()->scope_info()),
1556 Handle<FixedArray>(fun->shared()->feedback_vector()));
1557 shared->set_construct_stub(*construct_stub);
1559 // Copy the function data to the shared function info.
1560 shared->set_function_data(fun->shared()->function_data());
1561 int parameters = fun->shared()->formal_parameter_count();
1562 shared->set_formal_parameter_count(parameters);
1564 EmitNewClosure(shared, false);
1568 void FullCodeGenerator::VisitThrow(Throw* expr) {
1569 Comment cmnt(masm_, "[ Throw");
1570 VisitForStackValue(expr->exception());
1571 __ CallRuntime(Runtime::kThrow, 1);
1572 // Never returns here.
1576 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
1578 int* context_length) {
1579 // The macros used here must preserve the result register.
1580 __ Drop(*stack_depth);
1587 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1588 Expression* sub_expr;
1589 Handle<String> check;
1590 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1591 EmitLiteralCompareTypeof(expr, sub_expr, check);
1595 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
1596 EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1600 if (expr->IsLiteralCompareNull(&sub_expr)) {
1601 EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1609 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
1610 DisallowHeapAllocation no_gc;
1611 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1613 // Increment loop nesting level by one and iterate over the back edge table
1614 // to find the matching loops to patch the interrupt
1615 // call to an unconditional call to the replacement code.
1616 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level() + 1;
1617 if (loop_nesting_level > Code::kMaxLoopNestingMarker) return;
1619 BackEdgeTable back_edges(unoptimized, &no_gc);
1620 for (uint32_t i = 0; i < back_edges.length(); i++) {
1621 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1622 DCHECK_EQ(INTERRUPT, GetBackEdgeState(isolate,
1625 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1629 unoptimized->set_allow_osr_at_loop_nesting_level(loop_nesting_level);
1630 DCHECK(Verify(isolate, unoptimized));
1634 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
1635 DisallowHeapAllocation no_gc;
1636 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1638 // Iterate over the back edge table and revert the patched interrupt calls.
1639 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1641 BackEdgeTable back_edges(unoptimized, &no_gc);
1642 for (uint32_t i = 0; i < back_edges.length(); i++) {
1643 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1644 DCHECK_NE(INTERRUPT, GetBackEdgeState(isolate,
1647 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1651 unoptimized->set_allow_osr_at_loop_nesting_level(0);
1652 // Assert that none of the back edges are patched anymore.
1653 DCHECK(Verify(isolate, unoptimized));
1657 void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
1658 DisallowHeapAllocation no_gc;
1659 Isolate* isolate = code->GetIsolate();
1660 Address pc = code->instruction_start() + pc_offset;
1661 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
1662 PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
1666 void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
1667 DisallowHeapAllocation no_gc;
1668 Isolate* isolate = code->GetIsolate();
1669 Address pc = code->instruction_start() + pc_offset;
1671 if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
1672 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1673 PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
1679 bool BackEdgeTable::Verify(Isolate* isolate, Code* unoptimized) {
1680 DisallowHeapAllocation no_gc;
1681 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1682 BackEdgeTable back_edges(unoptimized, &no_gc);
1683 for (uint32_t i = 0; i < back_edges.length(); i++) {
1684 uint32_t loop_depth = back_edges.loop_depth(i);
1685 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1686 // Assert that all back edges for shallower loops (and only those)
1687 // have already been patched.
1688 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1689 GetBackEdgeState(isolate,
1691 back_edges.pc(i)) != INTERRUPT);
1701 } } // namespace v8::internal