1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
10 #include "full-codegen.h"
12 #include "macro-assembler.h"
13 #include "prettyprinter.h"
15 #include "scopeinfo.h"
17 #include "stub-cache.h"
22 void BreakableStatementChecker::Check(Statement* stmt) {
27 void BreakableStatementChecker::Check(Expression* expr) {
32 void BreakableStatementChecker::VisitVariableDeclaration(
33 VariableDeclaration* decl) {
36 void BreakableStatementChecker::VisitFunctionDeclaration(
37 FunctionDeclaration* decl) {
40 void BreakableStatementChecker::VisitModuleDeclaration(
41 ModuleDeclaration* decl) {
44 void BreakableStatementChecker::VisitImportDeclaration(
45 ImportDeclaration* decl) {
48 void BreakableStatementChecker::VisitExportDeclaration(
49 ExportDeclaration* decl) {
53 void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
57 void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
61 void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
65 void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
69 void BreakableStatementChecker::VisitModuleStatement(ModuleStatement* stmt) {
73 void BreakableStatementChecker::VisitBlock(Block* stmt) {
77 void BreakableStatementChecker::VisitExpressionStatement(
78 ExpressionStatement* stmt) {
79 // Check if expression is breakable.
80 Visit(stmt->expression());
84 void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
88 void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
89 // If the condition is breakable the if statement is breakable.
90 Visit(stmt->condition());
94 void BreakableStatementChecker::VisitContinueStatement(
95 ContinueStatement* stmt) {
99 void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
103 void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
104 // Return is breakable if the expression is.
105 Visit(stmt->expression());
109 void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
110 Visit(stmt->expression());
114 void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
115 // Switch statements breakable if the tag expression is.
120 void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
121 // Mark do while as breakable to avoid adding a break slot in front of it.
122 is_breakable_ = true;
126 void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
127 // Mark while statements breakable if the condition expression is.
132 void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
133 // Mark for statements breakable if the condition expression is.
134 if (stmt->cond() != NULL) {
140 void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
141 // Mark for in statements breakable if the enumerable expression is.
142 Visit(stmt->enumerable());
146 void BreakableStatementChecker::VisitForOfStatement(ForOfStatement* stmt) {
147 // For-of is breakable because of the next() call.
148 is_breakable_ = true;
152 void BreakableStatementChecker::VisitTryCatchStatement(
153 TryCatchStatement* stmt) {
154 // Mark try catch as breakable to avoid adding a break slot in front of it.
155 is_breakable_ = true;
159 void BreakableStatementChecker::VisitTryFinallyStatement(
160 TryFinallyStatement* stmt) {
161 // Mark try finally as breakable to avoid adding a break slot in front of it.
162 is_breakable_ = true;
166 void BreakableStatementChecker::VisitDebuggerStatement(
167 DebuggerStatement* stmt) {
168 // The debugger statement is breakable.
169 is_breakable_ = true;
173 void BreakableStatementChecker::VisitCaseClause(CaseClause* clause) {
177 void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
181 void BreakableStatementChecker::VisitNativeFunctionLiteral(
182 NativeFunctionLiteral* expr) {
186 void BreakableStatementChecker::VisitConditional(Conditional* expr) {
190 void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
194 void BreakableStatementChecker::VisitLiteral(Literal* expr) {
198 void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
202 void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
206 void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
210 void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
211 // If assigning to a property (including a global property) the assignment is
213 VariableProxy* proxy = expr->target()->AsVariableProxy();
214 Property* prop = expr->target()->AsProperty();
215 if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
216 is_breakable_ = true;
220 // Otherwise the assignment is breakable if the assigned value is.
221 Visit(expr->value());
225 void BreakableStatementChecker::VisitYield(Yield* expr) {
226 // Yield is breakable if the expression is.
227 Visit(expr->expression());
231 void BreakableStatementChecker::VisitThrow(Throw* expr) {
232 // Throw is breakable if the expression is.
233 Visit(expr->exception());
237 void BreakableStatementChecker::VisitProperty(Property* expr) {
238 // Property load is breakable.
239 is_breakable_ = true;
243 void BreakableStatementChecker::VisitCall(Call* expr) {
244 // Function calls both through IC and call stub are breakable.
245 is_breakable_ = true;
249 void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
250 // Function calls through new are breakable.
251 is_breakable_ = true;
255 void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
259 void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
260 Visit(expr->expression());
264 void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
265 Visit(expr->expression());
269 void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
271 if (expr->op() != Token::AND &&
272 expr->op() != Token::OR) {
273 Visit(expr->right());
278 void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
280 Visit(expr->right());
284 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
288 #define __ ACCESS_MASM(masm())
290 bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
291 Isolate* isolate = info->isolate();
293 Logger::TimerEventScope timer(
294 isolate, Logger::TimerEventScope::v8_compile_full_code);
296 Handle<Script> script = info->script();
297 if (!script->IsUndefined() && !script->source()->IsUndefined()) {
298 int len = String::cast(script->source())->length();
299 isolate->counters()->total_full_codegen_source_size()->Increment(len);
301 CodeGenerator::MakeCodePrologue(info, "full");
302 const int kInitialBufferSize = 4 * KB;
303 MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize);
304 #ifdef ENABLE_GDB_JIT_INTERFACE
305 masm.positions_recorder()->StartGDBJITLineInfoRecording();
307 LOG_CODE_EVENT(isolate,
308 CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
310 FullCodeGenerator cgen(&masm, info);
312 if (cgen.HasStackOverflow()) {
313 ASSERT(!isolate->has_pending_exception());
316 unsigned table_offset = cgen.EmitBackEdgeTable();
318 Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
319 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
320 code->set_optimizable(info->IsOptimizable() &&
321 !info->function()->dont_optimize() &&
322 info->function()->scope()->AllowsLazyCompilation());
323 cgen.PopulateDeoptimizationData(code);
324 cgen.PopulateTypeFeedbackInfo(code);
325 code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
326 code->set_handler_table(*cgen.handler_table());
327 code->set_compiled_optimizable(info->IsOptimizable());
328 code->set_allow_osr_at_loop_nesting_level(0);
329 code->set_profiler_ticks(0);
330 code->set_back_edge_table_offset(table_offset);
331 code->set_back_edges_patched_for_osr(false);
332 CodeGenerator::PrintCode(code, info);
334 #ifdef ENABLE_GDB_JIT_INTERFACE
336 GDBJITLineInfo* lineinfo =
337 masm.positions_recorder()->DetachGDBJITLineInfo();
338 GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
341 void* line_info = masm.positions_recorder()->DetachJITHandlerData();
342 LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
347 unsigned FullCodeGenerator::EmitBackEdgeTable() {
348 // The back edge table consists of a length (in number of entries)
349 // field, and then a sequence of entries. Each entry is a pair of AST id
350 // and code-relative pc offset.
351 masm()->Align(kIntSize);
352 unsigned offset = masm()->pc_offset();
353 unsigned length = back_edges_.length();
355 for (unsigned i = 0; i < length; ++i) {
356 __ dd(back_edges_[i].id.ToInt());
357 __ dd(back_edges_[i].pc);
358 __ dd(back_edges_[i].loop_depth);
364 void FullCodeGenerator::EnsureSlotContainsAllocationSite(int slot) {
365 Handle<FixedArray> vector = FeedbackVector();
366 if (!vector->get(slot)->IsAllocationSite()) {
367 Handle<AllocationSite> allocation_site =
368 isolate()->factory()->NewAllocationSite();
369 vector->set(slot, *allocation_site);
374 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
375 // Fill in the deoptimization information.
376 ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
377 if (!info_->HasDeoptimizationSupport()) return;
378 int length = bailout_entries_.length();
379 Handle<DeoptimizationOutputData> data =
380 DeoptimizationOutputData::New(isolate(), length, TENURED);
381 for (int i = 0; i < length; i++) {
382 data->SetAstId(i, bailout_entries_[i].id);
383 data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
385 code->set_deoptimization_data(*data);
389 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
390 Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
391 info->set_ic_total_count(ic_total_count_);
392 ASSERT(!isolate()->heap()->InNewSpace(*info));
393 code->set_type_feedback_info(*info);
397 void FullCodeGenerator::Initialize() {
398 InitializeAstVisitor(info_->zone());
399 // The generation of debug code must match between the snapshot code and the
400 // code that is generated later. This is assumed by the debugger when it is
401 // calculating PC offsets after generating a debug version of code. Therefore
402 // we disable the production of debug code in the full compiler if we are
403 // either generating a snapshot or we booted from a snapshot.
404 generate_debug_code_ = FLAG_debug_code &&
405 !Serializer::enabled(isolate()) &&
406 !Snapshot::HaveASnapshotToStartFrom();
407 masm_->set_emit_debug_code(generate_debug_code_);
408 masm_->set_predictable_code_size(true);
412 void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
413 PrepareForBailoutForId(node->id(), state);
417 void FullCodeGenerator::CallLoadIC(ContextualMode contextual_mode,
419 ExtraICState extra_state = LoadIC::ComputeExtraICState(contextual_mode);
420 Handle<Code> ic = LoadIC::initialize_stub(isolate(), extra_state);
425 void FullCodeGenerator::CallStoreIC(TypeFeedbackId id) {
426 Handle<Code> ic = StoreIC::initialize_stub(isolate(), strict_mode());
431 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
432 // We record the offset of the function return so we can rebuild the frame
433 // if the function was inlined, i.e., this is the return address in the
434 // inlined function's frame.
436 // The state is ignored. We defensively set it to TOS_REG, which is the
437 // real state of the unoptimized code at the return site.
438 PrepareForBailoutForId(call->ReturnId(), TOS_REG);
440 // In debug builds, mark the return so we can verify that this function
442 ASSERT(!call->return_is_recorded_);
443 call->return_is_recorded_ = true;
448 void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
449 // There's no need to prepare this code for bailouts from already optimized
450 // code or code that can't be optimized.
451 if (!info_->HasDeoptimizationSupport()) return;
452 unsigned pc_and_state =
453 StateField::encode(state) | PcField::encode(masm_->pc_offset());
454 ASSERT(Smi::IsValid(pc_and_state));
455 BailoutEntry entry = { id, pc_and_state };
456 ASSERT(!prepared_bailout_ids_.Contains(id.ToInt()));
457 prepared_bailout_ids_.Add(id.ToInt(), zone());
458 bailout_entries_.Add(entry, zone());
462 void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
463 // The pc offset does not need to be encoded and packed together with a state.
464 ASSERT(masm_->pc_offset() > 0);
465 ASSERT(loop_depth() > 0);
466 uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
467 BackEdgeEntry entry =
468 { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
469 back_edges_.Add(entry, zone());
473 bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
474 // Inline smi case inside loops, but not division and modulo which
475 // are too complicated and take up too much space.
476 if (op == Token::DIV ||op == Token::MOD) return false;
477 if (FLAG_always_inline_smi_code) return true;
478 return loop_depth_ > 0;
482 void FullCodeGenerator::EffectContext::Plug(Register reg) const {
486 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
487 __ Move(result_register(), reg);
491 void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
496 void FullCodeGenerator::TestContext::Plug(Register reg) const {
497 // For simplicity we always test the accumulator register.
498 __ Move(result_register(), reg);
499 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
500 codegen()->DoTest(this);
504 void FullCodeGenerator::EffectContext::PlugTOS() const {
509 void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
510 __ Pop(result_register());
514 void FullCodeGenerator::StackValueContext::PlugTOS() const {
518 void FullCodeGenerator::TestContext::PlugTOS() const {
519 // For simplicity we always test the accumulator register.
520 __ Pop(result_register());
521 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
522 codegen()->DoTest(this);
526 void FullCodeGenerator::EffectContext::PrepareTest(
527 Label* materialize_true,
528 Label* materialize_false,
531 Label** fall_through) const {
532 // In an effect context, the true and the false case branch to the
534 *if_true = *if_false = *fall_through = materialize_true;
538 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
539 Label* materialize_true,
540 Label* materialize_false,
543 Label** fall_through) const {
544 *if_true = *fall_through = materialize_true;
545 *if_false = materialize_false;
549 void FullCodeGenerator::StackValueContext::PrepareTest(
550 Label* materialize_true,
551 Label* materialize_false,
554 Label** fall_through) const {
555 *if_true = *fall_through = materialize_true;
556 *if_false = materialize_false;
560 void FullCodeGenerator::TestContext::PrepareTest(
561 Label* materialize_true,
562 Label* materialize_false,
565 Label** fall_through) const {
566 *if_true = true_label_;
567 *if_false = false_label_;
568 *fall_through = fall_through_;
572 void FullCodeGenerator::DoTest(const TestContext* context) {
573 DoTest(context->condition(),
574 context->true_label(),
575 context->false_label(),
576 context->fall_through());
580 void FullCodeGenerator::AllocateModules(ZoneList<Declaration*>* declarations) {
581 ASSERT(scope_->is_global_scope());
583 for (int i = 0; i < declarations->length(); i++) {
584 ModuleDeclaration* declaration = declarations->at(i)->AsModuleDeclaration();
585 if (declaration != NULL) {
586 ModuleLiteral* module = declaration->module()->AsModuleLiteral();
587 if (module != NULL) {
588 Comment cmnt(masm_, "[ Link nested modules");
589 Scope* scope = module->body()->scope();
590 Interface* interface = scope->interface();
591 ASSERT(interface->IsModule() && interface->IsFrozen());
593 interface->Allocate(scope->module_var()->index());
595 // Set up module context.
596 ASSERT(scope->interface()->Index() >= 0);
597 __ Push(Smi::FromInt(scope->interface()->Index()));
598 __ Push(scope->GetScopeInfo());
599 __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
600 StoreToFrameField(StandardFrameConstants::kContextOffset,
603 AllocateModules(scope->declarations());
605 // Pop module context.
606 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
607 // Update local stack frame context field.
608 StoreToFrameField(StandardFrameConstants::kContextOffset,
616 // Modules have their own local scope, represented by their own context.
617 // Module instance objects have an accessor for every export that forwards
618 // access to the respective slot from the module's context. (Exports that are
619 // modules themselves, however, are simple data properties.)
621 // All modules have a _hosting_ scope/context, which (currently) is the
622 // (innermost) enclosing global scope. To deal with recursion, nested modules
623 // are hosted by the same scope as global ones.
625 // For every (global or nested) module literal, the hosting context has an
626 // internal slot that points directly to the respective module context. This
627 // enables quick access to (statically resolved) module members by 2-dimensional
628 // access through the hosting context. For example,
632 // module B { let y; }
634 // module C { let z; }
636 // allocates contexts as follows:
638 // [header| .A | .B | .C | A | C ] (global)
640 // | | +-- [header| z ] (module)
642 // | +------- [header| y ] (module)
644 // +------------ [header| x | B ] (module)
646 // Here, .A, .B, .C are the internal slots pointing to the hosted module
647 // contexts, whereas A, B, C hold the actual instance objects (note that every
648 // module context also points to the respective instance object through its
649 // extension slot in the header).
651 // To deal with arbitrary recursion and aliases between modules,
652 // they are created and initialized in several stages. Each stage applies to
653 // all modules in the hosting global scope, including nested ones.
655 // 1. Allocate: for each module _literal_, allocate the module contexts and
656 // respective instance object and wire them up. This happens in the
657 // PushModuleContext runtime function, as generated by AllocateModules
658 // (invoked by VisitDeclarations in the hosting scope).
660 // 2. Bind: for each module _declaration_ (i.e. literals as well as aliases),
661 // assign the respective instance object to respective local variables. This
662 // happens in VisitModuleDeclaration, and uses the instance objects created
663 // in the previous stage.
664 // For each module _literal_, this phase also constructs a module descriptor
665 // for the next stage. This happens in VisitModuleLiteral.
667 // 3. Populate: invoke the DeclareModules runtime function to populate each
668 // _instance_ object with accessors for it exports. This is generated by
669 // DeclareModules (invoked by VisitDeclarations in the hosting scope again),
670 // and uses the descriptors generated in the previous stage.
672 // 4. Initialize: execute the module bodies (and other code) in sequence. This
673 // happens by the separate statements generated for module bodies. To reenter
674 // the module scopes properly, the parser inserted ModuleStatements.
676 void FullCodeGenerator::VisitDeclarations(
677 ZoneList<Declaration*>* declarations) {
678 Handle<FixedArray> saved_modules = modules_;
679 int saved_module_index = module_index_;
680 ZoneList<Handle<Object> >* saved_globals = globals_;
681 ZoneList<Handle<Object> > inner_globals(10, zone());
682 globals_ = &inner_globals;
684 if (scope_->num_modules() != 0) {
685 // This is a scope hosting modules. Allocate a descriptor array to pass
686 // to the runtime for initialization.
687 Comment cmnt(masm_, "[ Allocate modules");
688 ASSERT(scope_->is_global_scope());
690 isolate()->factory()->NewFixedArray(scope_->num_modules(), TENURED);
693 // Generate code for allocating all modules, including nested ones.
694 // The allocated contexts are stored in internal variables in this scope.
695 AllocateModules(declarations);
698 AstVisitor::VisitDeclarations(declarations);
700 if (scope_->num_modules() != 0) {
701 // Initialize modules from descriptor array.
702 ASSERT(module_index_ == modules_->length());
703 DeclareModules(modules_);
704 modules_ = saved_modules;
705 module_index_ = saved_module_index;
708 if (!globals_->is_empty()) {
709 // Invoke the platform-dependent code generator to do the actual
710 // declaration of the global functions and variables.
711 Handle<FixedArray> array =
712 isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
713 for (int i = 0; i < globals_->length(); ++i)
714 array->set(i, *globals_->at(i));
715 DeclareGlobals(array);
718 globals_ = saved_globals;
722 void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
723 Block* block = module->body();
724 Scope* saved_scope = scope();
725 scope_ = block->scope();
726 Interface* interface = scope_->interface();
728 Comment cmnt(masm_, "[ ModuleLiteral");
729 SetStatementPosition(block);
731 ASSERT(!modules_.is_null());
732 ASSERT(module_index_ < modules_->length());
733 int index = module_index_++;
735 // Set up module context.
736 ASSERT(interface->Index() >= 0);
737 __ Push(Smi::FromInt(interface->Index()));
738 __ Push(Smi::FromInt(0));
739 __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
740 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
743 Comment cmnt(masm_, "[ Declarations");
744 VisitDeclarations(scope_->declarations());
747 // Populate the module description.
748 Handle<ModuleInfo> description =
749 ModuleInfo::Create(isolate(), interface, scope_);
750 modules_->set(index, *description);
752 scope_ = saved_scope;
753 // Pop module context.
754 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
755 // Update local stack frame context field.
756 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
760 void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
762 // The instance object is resolved statically through the module's interface.
766 void FullCodeGenerator::VisitModulePath(ModulePath* module) {
768 // The instance object is resolved statically through the module's interface.
772 void FullCodeGenerator::VisitModuleUrl(ModuleUrl* module) {
773 // TODO(rossberg): dummy allocation for now.
774 Scope* scope = module->body()->scope();
775 Interface* interface = scope_->interface();
777 ASSERT(interface->IsModule() && interface->IsFrozen());
778 ASSERT(!modules_.is_null());
779 ASSERT(module_index_ < modules_->length());
780 interface->Allocate(scope->module_var()->index());
781 int index = module_index_++;
783 Handle<ModuleInfo> description =
784 ModuleInfo::Create(isolate(), interface, scope_);
785 modules_->set(index, *description);
789 int FullCodeGenerator::DeclareGlobalsFlags() {
790 ASSERT(DeclareGlobalsStrictMode::is_valid(strict_mode()));
791 return DeclareGlobalsEvalFlag::encode(is_eval()) |
792 DeclareGlobalsNativeFlag::encode(is_native()) |
793 DeclareGlobalsStrictMode::encode(strict_mode());
797 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
798 CodeGenerator::RecordPositions(masm_, fun->start_position());
802 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
803 CodeGenerator::RecordPositions(masm_, fun->end_position() - 1);
807 void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
808 if (!isolate()->debugger()->IsDebuggerActive()) {
809 CodeGenerator::RecordPositions(masm_, stmt->position());
811 // Check if the statement will be breakable without adding a debug break
813 BreakableStatementChecker checker(zone());
815 // Record the statement position right here if the statement is not
816 // breakable. For breakable statements the actual recording of the
817 // position will be postponed to the breakable code (typically an IC).
818 bool position_recorded = CodeGenerator::RecordPositions(
819 masm_, stmt->position(), !checker.is_breakable());
820 // If the position recording did record a new position generate a debug
821 // break slot to make the statement breakable.
822 if (position_recorded) {
823 Debug::GenerateSlot(masm_);
829 void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
830 if (!isolate()->debugger()->IsDebuggerActive()) {
831 CodeGenerator::RecordPositions(masm_, expr->position());
833 // Check if the expression will be breakable without adding a debug break
835 BreakableStatementChecker checker(zone());
837 // Record a statement position right here if the expression is not
838 // breakable. For breakable expressions the actual recording of the
839 // position will be postponed to the breakable code (typically an IC).
840 // NOTE this will record a statement position for something which might
841 // not be a statement. As stepping in the debugger will only stop at
842 // statement positions this is used for e.g. the condition expression of
844 bool position_recorded = CodeGenerator::RecordPositions(
845 masm_, expr->position(), !checker.is_breakable());
846 // If the position recording did record a new position generate a debug
847 // break slot to make the statement breakable.
848 if (position_recorded) {
849 Debug::GenerateSlot(masm_);
855 void FullCodeGenerator::SetStatementPosition(int pos) {
856 CodeGenerator::RecordPositions(masm_, pos);
860 void FullCodeGenerator::SetSourcePosition(int pos) {
861 if (pos != RelocInfo::kNoPosition) {
862 masm_->positions_recorder()->RecordPosition(pos);
867 // Lookup table for code generators for special runtime calls which are
869 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
870 &FullCodeGenerator::Emit##Name,
872 const FullCodeGenerator::InlineFunctionGenerator
873 FullCodeGenerator::kInlineFunctionGenerators[] = {
874 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
876 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
879 FullCodeGenerator::InlineFunctionGenerator
880 FullCodeGenerator::FindInlineFunctionGenerator(Runtime::FunctionId id) {
882 static_cast<int>(id) - static_cast<int>(Runtime::kFirstInlineFunction);
883 ASSERT(lookup_index >= 0);
884 ASSERT(static_cast<size_t>(lookup_index) <
885 ARRAY_SIZE(kInlineFunctionGenerators));
886 return kInlineFunctionGenerators[lookup_index];
890 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
891 const Runtime::Function* function = expr->function();
892 ASSERT(function != NULL);
893 ASSERT(function->intrinsic_type == Runtime::INLINE);
894 InlineFunctionGenerator generator =
895 FindInlineFunctionGenerator(function->function_id);
896 ((*this).*(generator))(expr);
900 void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
901 ZoneList<Expression*>* args = expr->arguments();
902 ASSERT(args->length() == 2);
903 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT);
907 void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
908 ZoneList<Expression*>* args = expr->arguments();
909 ASSERT(args->length() == 2);
910 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
914 void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
915 context()->Plug(handle(Smi::FromInt(0), isolate()));
919 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
920 switch (expr->op()) {
922 return VisitComma(expr);
925 return VisitLogicalExpression(expr);
927 return VisitArithmeticExpression(expr);
932 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
933 if (context()->IsEffect()) {
934 VisitForEffect(expr);
935 } else if (context()->IsAccumulatorValue()) {
936 VisitForAccumulatorValue(expr);
937 } else if (context()->IsStackValue()) {
938 VisitForStackValue(expr);
939 } else if (context()->IsTest()) {
940 const TestContext* test = TestContext::cast(context());
941 VisitForControl(expr, test->true_label(), test->false_label(),
942 test->fall_through());
947 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
948 Comment cmnt(masm_, "[ Comma");
949 VisitForEffect(expr->left());
950 VisitInDuplicateContext(expr->right());
954 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
955 bool is_logical_and = expr->op() == Token::AND;
956 Comment cmnt(masm_, is_logical_and ? "[ Logical AND" : "[ Logical OR");
957 Expression* left = expr->left();
958 Expression* right = expr->right();
959 BailoutId right_id = expr->RightId();
962 if (context()->IsTest()) {
964 const TestContext* test = TestContext::cast(context());
965 if (is_logical_and) {
966 VisitForControl(left, &eval_right, test->false_label(), &eval_right);
968 VisitForControl(left, test->true_label(), &eval_right, &eval_right);
970 PrepareForBailoutForId(right_id, NO_REGISTERS);
971 __ bind(&eval_right);
973 } else if (context()->IsAccumulatorValue()) {
974 VisitForAccumulatorValue(left);
975 // We want the value in the accumulator for the test, and on the stack in
977 __ Push(result_register());
978 Label discard, restore;
979 if (is_logical_and) {
980 DoTest(left, &discard, &restore, &restore);
982 DoTest(left, &restore, &discard, &restore);
985 __ Pop(result_register());
989 PrepareForBailoutForId(right_id, NO_REGISTERS);
991 } else if (context()->IsStackValue()) {
992 VisitForAccumulatorValue(left);
993 // We want the value in the accumulator for the test, and on the stack in
995 __ Push(result_register());
997 if (is_logical_and) {
998 DoTest(left, &discard, &done, &discard);
1000 DoTest(left, &done, &discard, &discard);
1004 PrepareForBailoutForId(right_id, NO_REGISTERS);
1007 ASSERT(context()->IsEffect());
1009 if (is_logical_and) {
1010 VisitForControl(left, &eval_right, &done, &eval_right);
1012 VisitForControl(left, &done, &eval_right, &eval_right);
1014 PrepareForBailoutForId(right_id, NO_REGISTERS);
1015 __ bind(&eval_right);
1018 VisitInDuplicateContext(right);
1023 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
1024 Token::Value op = expr->op();
1025 Comment cmnt(masm_, "[ ArithmeticExpression");
1026 Expression* left = expr->left();
1027 Expression* right = expr->right();
1028 OverwriteMode mode =
1029 left->ResultOverwriteAllowed()
1031 : (right->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE);
1033 VisitForStackValue(left);
1034 VisitForAccumulatorValue(right);
1036 SetSourcePosition(expr->position());
1037 if (ShouldInlineSmiCase(op)) {
1038 EmitInlineSmiBinaryOp(expr, op, mode, left, right);
1040 EmitBinaryOp(expr, op, mode);
1045 void FullCodeGenerator::VisitBlock(Block* stmt) {
1046 Comment cmnt(masm_, "[ Block");
1047 NestedBlock nested_block(this, stmt);
1048 SetStatementPosition(stmt);
1050 Scope* saved_scope = scope();
1051 // Push a block context when entering a block with block scoped variables.
1052 if (stmt->scope() != NULL) {
1053 scope_ = stmt->scope();
1054 ASSERT(!scope_->is_module_scope());
1055 { Comment cmnt(masm_, "[ Extend block context");
1056 __ Push(scope_->GetScopeInfo());
1057 PushFunctionArgumentForContextAllocation();
1058 __ CallRuntime(Runtime::kHiddenPushBlockContext, 2);
1060 // Replace the context stored in the frame.
1061 StoreToFrameField(StandardFrameConstants::kContextOffset,
1062 context_register());
1064 { Comment cmnt(masm_, "[ Declarations");
1065 VisitDeclarations(scope_->declarations());
1069 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1070 VisitStatements(stmt->statements());
1071 scope_ = saved_scope;
1072 __ bind(nested_block.break_label());
1073 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1075 // Pop block context if necessary.
1076 if (stmt->scope() != NULL) {
1077 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1078 // Update local stack frame context field.
1079 StoreToFrameField(StandardFrameConstants::kContextOffset,
1080 context_register());
1085 void FullCodeGenerator::VisitModuleStatement(ModuleStatement* stmt) {
1086 Comment cmnt(masm_, "[ Module context");
1088 __ Push(Smi::FromInt(stmt->proxy()->interface()->Index()));
1089 __ Push(Smi::FromInt(0));
1090 __ CallRuntime(Runtime::kHiddenPushModuleContext, 2);
1092 StandardFrameConstants::kContextOffset, context_register());
1094 Scope* saved_scope = scope_;
1095 scope_ = stmt->body()->scope();
1096 VisitStatements(stmt->body()->statements());
1097 scope_ = saved_scope;
1098 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1099 // Update local stack frame context field.
1100 StoreToFrameField(StandardFrameConstants::kContextOffset,
1101 context_register());
1105 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1106 Comment cmnt(masm_, "[ ExpressionStatement");
1107 SetStatementPosition(stmt);
1108 VisitForEffect(stmt->expression());
1112 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1113 Comment cmnt(masm_, "[ EmptyStatement");
1114 SetStatementPosition(stmt);
1118 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
1119 Comment cmnt(masm_, "[ IfStatement");
1120 SetStatementPosition(stmt);
1121 Label then_part, else_part, done;
1123 if (stmt->HasElseStatement()) {
1124 VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
1125 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1126 __ bind(&then_part);
1127 Visit(stmt->then_statement());
1130 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1131 __ bind(&else_part);
1132 Visit(stmt->else_statement());
1134 VisitForControl(stmt->condition(), &then_part, &done, &then_part);
1135 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1136 __ bind(&then_part);
1137 Visit(stmt->then_statement());
1139 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1142 PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
1146 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1147 Comment cmnt(masm_, "[ ContinueStatement");
1148 SetStatementPosition(stmt);
1149 NestedStatement* current = nesting_stack_;
1150 int stack_depth = 0;
1151 int context_length = 0;
1152 // When continuing, we clobber the unpredictable value in the accumulator
1153 // with one that's safe for GC. If we hit an exit from the try block of
1154 // try...finally on our way out, we will unconditionally preserve the
1155 // accumulator on the stack.
1157 while (!current->IsContinueTarget(stmt->target())) {
1158 current = current->Exit(&stack_depth, &context_length);
1160 __ Drop(stack_depth);
1161 if (context_length > 0) {
1162 while (context_length > 0) {
1163 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1166 StoreToFrameField(StandardFrameConstants::kContextOffset,
1167 context_register());
1170 __ jmp(current->AsIteration()->continue_label());
1174 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1175 Comment cmnt(masm_, "[ BreakStatement");
1176 SetStatementPosition(stmt);
1177 NestedStatement* current = nesting_stack_;
1178 int stack_depth = 0;
1179 int context_length = 0;
1180 // When breaking, we clobber the unpredictable value in the accumulator
1181 // with one that's safe for GC. If we hit an exit from the try block of
1182 // try...finally on our way out, we will unconditionally preserve the
1183 // accumulator on the stack.
1185 while (!current->IsBreakTarget(stmt->target())) {
1186 current = current->Exit(&stack_depth, &context_length);
1188 __ Drop(stack_depth);
1189 if (context_length > 0) {
1190 while (context_length > 0) {
1191 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1194 StoreToFrameField(StandardFrameConstants::kContextOffset,
1195 context_register());
1198 __ jmp(current->AsBreakable()->break_label());
1202 void FullCodeGenerator::EmitUnwindBeforeReturn() {
1203 NestedStatement* current = nesting_stack_;
1204 int stack_depth = 0;
1205 int context_length = 0;
1206 while (current != NULL) {
1207 current = current->Exit(&stack_depth, &context_length);
1209 __ Drop(stack_depth);
1213 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1214 Comment cmnt(masm_, "[ ReturnStatement");
1215 SetStatementPosition(stmt);
1216 Expression* expr = stmt->expression();
1217 VisitForAccumulatorValue(expr);
1218 EmitUnwindBeforeReturn();
1219 EmitReturnSequence();
1223 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1224 Comment cmnt(masm_, "[ WithStatement");
1225 SetStatementPosition(stmt);
1227 VisitForStackValue(stmt->expression());
1228 PushFunctionArgumentForContextAllocation();
1229 __ CallRuntime(Runtime::kHiddenPushWithContext, 2);
1230 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1232 Scope* saved_scope = scope();
1233 scope_ = stmt->scope();
1234 { WithOrCatch body(this);
1235 Visit(stmt->statement());
1237 scope_ = saved_scope;
1240 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1241 // Update local stack frame context field.
1242 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1246 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1247 Comment cmnt(masm_, "[ DoWhileStatement");
1248 SetStatementPosition(stmt);
1249 Label body, book_keeping;
1251 Iteration loop_statement(this, stmt);
1252 increment_loop_depth();
1255 Visit(stmt->body());
1257 // Record the position of the do while condition and make sure it is
1258 // possible to break on the condition.
1259 __ bind(loop_statement.continue_label());
1260 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1261 SetExpressionPosition(stmt->cond());
1262 VisitForControl(stmt->cond(),
1264 loop_statement.break_label(),
1267 // Check stack before looping.
1268 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1269 __ bind(&book_keeping);
1270 EmitBackEdgeBookkeeping(stmt, &body);
1273 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1274 __ bind(loop_statement.break_label());
1275 decrement_loop_depth();
1279 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1280 Comment cmnt(masm_, "[ WhileStatement");
1283 Iteration loop_statement(this, stmt);
1284 increment_loop_depth();
1286 // Emit the test at the bottom of the loop.
1289 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1291 Visit(stmt->body());
1293 // Emit the statement position here as this is where the while
1294 // statement code starts.
1295 __ bind(loop_statement.continue_label());
1296 SetStatementPosition(stmt);
1298 // Check stack before looping.
1299 EmitBackEdgeBookkeeping(stmt, &body);
1302 VisitForControl(stmt->cond(),
1304 loop_statement.break_label(),
1305 loop_statement.break_label());
1307 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1308 __ bind(loop_statement.break_label());
1309 decrement_loop_depth();
1313 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1314 Comment cmnt(masm_, "[ ForStatement");
1317 Iteration loop_statement(this, stmt);
1319 // Set statement position for a break slot before entering the for-body.
1320 SetStatementPosition(stmt);
1322 if (stmt->init() != NULL) {
1323 Visit(stmt->init());
1326 increment_loop_depth();
1327 // Emit the test at the bottom of the loop (even if empty).
1330 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1332 Visit(stmt->body());
1334 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1335 __ bind(loop_statement.continue_label());
1336 if (stmt->next() != NULL) {
1337 Visit(stmt->next());
1340 // Emit the statement position here as this is where the for
1341 // statement code starts.
1342 SetStatementPosition(stmt);
1344 // Check stack before looping.
1345 EmitBackEdgeBookkeeping(stmt, &body);
1348 if (stmt->cond() != NULL) {
1349 VisitForControl(stmt->cond(),
1351 loop_statement.break_label(),
1352 loop_statement.break_label());
1357 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1358 __ bind(loop_statement.break_label());
1359 decrement_loop_depth();
1363 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1364 Comment cmnt(masm_, "[ TryCatchStatement");
1365 SetStatementPosition(stmt);
1366 // The try block adds a handler to the exception handler chain before
1367 // entering, and removes it again when exiting normally. If an exception
1368 // is thrown during execution of the try block, the handler is consumed
1369 // and control is passed to the catch block with the exception in the
1372 Label try_entry, handler_entry, exit;
1374 __ bind(&handler_entry);
1375 handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1376 // Exception handler code, the exception is in the result register.
1377 // Extend the context before executing the catch block.
1378 { Comment cmnt(masm_, "[ Extend catch context");
1379 __ Push(stmt->variable()->name());
1380 __ Push(result_register());
1381 PushFunctionArgumentForContextAllocation();
1382 __ CallRuntime(Runtime::kHiddenPushCatchContext, 3);
1383 StoreToFrameField(StandardFrameConstants::kContextOffset,
1384 context_register());
1387 Scope* saved_scope = scope();
1388 scope_ = stmt->scope();
1389 ASSERT(scope_->declarations()->is_empty());
1390 { WithOrCatch catch_body(this);
1391 Visit(stmt->catch_block());
1393 // Restore the context.
1394 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1395 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1396 scope_ = saved_scope;
1399 // Try block code. Sets up the exception handler chain.
1400 __ bind(&try_entry);
1401 __ PushTryHandler(StackHandler::CATCH, stmt->index());
1402 { TryCatch try_body(this);
1403 Visit(stmt->try_block());
1410 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1411 Comment cmnt(masm_, "[ TryFinallyStatement");
1412 SetStatementPosition(stmt);
1413 // Try finally is compiled by setting up a try-handler on the stack while
1414 // executing the try body, and removing it again afterwards.
1416 // The try-finally construct can enter the finally block in three ways:
1417 // 1. By exiting the try-block normally. This removes the try-handler and
1418 // calls the finally block code before continuing.
1419 // 2. By exiting the try-block with a function-local control flow transfer
1420 // (break/continue/return). The site of the, e.g., break removes the
1421 // try handler and calls the finally block code before continuing
1422 // its outward control transfer.
1423 // 3. By exiting the try-block with a thrown exception.
1424 // This can happen in nested function calls. It traverses the try-handler
1425 // chain and consumes the try-handler entry before jumping to the
1426 // handler code. The handler code then calls the finally-block before
1427 // rethrowing the exception.
1429 // The finally block must assume a return address on top of the stack
1430 // (or in the link register on ARM chips) and a value (return value or
1431 // exception) in the result register (rax/eax/r0), both of which must
1432 // be preserved. The return address isn't GC-safe, so it should be
1433 // cooked before GC.
1434 Label try_entry, handler_entry, finally_entry;
1436 // Jump to try-handler setup and try-block code.
1438 __ bind(&handler_entry);
1439 handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1440 // Exception handler code. This code is only executed when an exception
1441 // is thrown. The exception is in the result register, and must be
1442 // preserved by the finally block. Call the finally block and then
1443 // rethrow the exception if it returns.
1444 __ Call(&finally_entry);
1445 __ Push(result_register());
1446 __ CallRuntime(Runtime::kHiddenReThrow, 1);
1448 // Finally block implementation.
1449 __ bind(&finally_entry);
1450 EnterFinallyBlock();
1451 { Finally finally_body(this);
1452 Visit(stmt->finally_block());
1454 ExitFinallyBlock(); // Return to the calling code.
1456 // Set up try handler.
1457 __ bind(&try_entry);
1458 __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1459 { TryFinally try_body(this, &finally_entry);
1460 Visit(stmt->try_block());
1463 // Execute the finally block on the way out. Clobber the unpredictable
1464 // value in the result register with one that's safe for GC because the
1465 // finally block will unconditionally preserve the result register on the
1468 __ Call(&finally_entry);
1472 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1473 Comment cmnt(masm_, "[ DebuggerStatement");
1474 SetStatementPosition(stmt);
1477 // Ignore the return value.
1481 void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1486 void FullCodeGenerator::VisitConditional(Conditional* expr) {
1487 Comment cmnt(masm_, "[ Conditional");
1488 Label true_case, false_case, done;
1489 VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1491 PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1492 __ bind(&true_case);
1493 SetExpressionPosition(expr->then_expression());
1494 if (context()->IsTest()) {
1495 const TestContext* for_test = TestContext::cast(context());
1496 VisitForControl(expr->then_expression(),
1497 for_test->true_label(),
1498 for_test->false_label(),
1501 VisitInDuplicateContext(expr->then_expression());
1505 PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1506 __ bind(&false_case);
1507 SetExpressionPosition(expr->else_expression());
1508 VisitInDuplicateContext(expr->else_expression());
1509 // If control flow falls through Visit, merge it with true case here.
1510 if (!context()->IsTest()) {
1516 void FullCodeGenerator::VisitLiteral(Literal* expr) {
1517 Comment cmnt(masm_, "[ Literal");
1518 context()->Plug(expr->value());
1522 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1523 Comment cmnt(masm_, "[ FunctionLiteral");
1525 // Build the function boilerplate and instantiate it.
1526 Handle<SharedFunctionInfo> function_info =
1527 Compiler::BuildFunctionInfo(expr, script());
1528 if (function_info.is_null()) {
1532 EmitNewClosure(function_info, expr->pretenure());
1536 void FullCodeGenerator::VisitNativeFunctionLiteral(
1537 NativeFunctionLiteral* expr) {
1538 Comment cmnt(masm_, "[ NativeFunctionLiteral");
1540 // Compute the function template for the native function.
1541 Handle<String> name = expr->name();
1542 v8::Handle<v8::FunctionTemplate> fun_template =
1543 expr->extension()->GetNativeFunctionTemplate(
1544 reinterpret_cast<v8::Isolate*>(isolate()), v8::Utils::ToLocal(name));
1545 ASSERT(!fun_template.IsEmpty());
1547 // Instantiate the function and create a shared function info from it.
1548 Handle<JSFunction> fun = Utils::OpenHandle(*fun_template->GetFunction());
1549 const int literals = fun->NumberOfLiterals();
1550 Handle<Code> code = Handle<Code>(fun->shared()->code());
1551 Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1552 bool is_generator = false;
1553 Handle<SharedFunctionInfo> shared =
1554 isolate()->factory()->NewSharedFunctionInfo(
1555 name, literals, is_generator,
1556 code, Handle<ScopeInfo>(fun->shared()->scope_info()),
1557 Handle<FixedArray>(fun->shared()->feedback_vector()));
1558 shared->set_construct_stub(*construct_stub);
1560 // Copy the function data to the shared function info.
1561 shared->set_function_data(fun->shared()->function_data());
1562 int parameters = fun->shared()->formal_parameter_count();
1563 shared->set_formal_parameter_count(parameters);
1565 EmitNewClosure(shared, false);
1569 void FullCodeGenerator::VisitThrow(Throw* expr) {
1570 Comment cmnt(masm_, "[ Throw");
1571 VisitForStackValue(expr->exception());
1572 __ CallRuntime(Runtime::kHiddenThrow, 1);
1573 // Never returns here.
1577 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
1579 int* context_length) {
1580 // The macros used here must preserve the result register.
1581 __ Drop(*stack_depth);
1588 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1589 Expression* sub_expr;
1590 Handle<String> check;
1591 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1592 EmitLiteralCompareTypeof(expr, sub_expr, check);
1596 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
1597 EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1601 if (expr->IsLiteralCompareNull(&sub_expr)) {
1602 EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1610 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
1611 DisallowHeapAllocation no_gc;
1612 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1614 // Iterate over the back edge table and patch every interrupt
1615 // call to an unconditional call to the replacement code.
1616 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1618 BackEdgeTable back_edges(unoptimized, &no_gc);
1619 for (uint32_t i = 0; i < back_edges.length(); i++) {
1620 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1621 ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate,
1624 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1628 unoptimized->set_back_edges_patched_for_osr(true);
1629 ASSERT(Verify(isolate, unoptimized, loop_nesting_level));
1633 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
1634 DisallowHeapAllocation no_gc;
1635 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1637 // Iterate over the back edge table and revert the patched interrupt calls.
1638 ASSERT(unoptimized->back_edges_patched_for_osr());
1639 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1641 BackEdgeTable back_edges(unoptimized, &no_gc);
1642 for (uint32_t i = 0; i < back_edges.length(); i++) {
1643 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1644 ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate,
1647 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1651 unoptimized->set_back_edges_patched_for_osr(false);
1652 unoptimized->set_allow_osr_at_loop_nesting_level(0);
1653 // Assert that none of the back edges are patched anymore.
1654 ASSERT(Verify(isolate, unoptimized, -1));
1658 void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
1659 DisallowHeapAllocation no_gc;
1660 Isolate* isolate = code->GetIsolate();
1661 Address pc = code->instruction_start() + pc_offset;
1662 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
1663 PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
1667 void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
1668 DisallowHeapAllocation no_gc;
1669 Isolate* isolate = code->GetIsolate();
1670 Address pc = code->instruction_start() + pc_offset;
1672 if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
1673 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1674 PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
1680 bool BackEdgeTable::Verify(Isolate* isolate,
1682 int loop_nesting_level) {
1683 DisallowHeapAllocation no_gc;
1684 BackEdgeTable back_edges(unoptimized, &no_gc);
1685 for (uint32_t i = 0; i < back_edges.length(); i++) {
1686 uint32_t loop_depth = back_edges.loop_depth(i);
1687 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1688 // Assert that all back edges for shallower loops (and only those)
1689 // have already been patched.
1690 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1691 GetBackEdgeState(isolate,
1693 back_edges.pc(i)) != INTERRUPT);
1703 } } // namespace v8::internal