1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
33 #include "full-codegen.h"
35 #include "macro-assembler.h"
36 #include "prettyprinter.h"
38 #include "scopeinfo.h"
40 #include "stub-cache.h"
45 void BreakableStatementChecker::Check(Statement* stmt) {
50 void BreakableStatementChecker::Check(Expression* expr) {
55 void BreakableStatementChecker::VisitVariableDeclaration(
56 VariableDeclaration* decl) {
59 void BreakableStatementChecker::VisitFunctionDeclaration(
60 FunctionDeclaration* decl) {
63 void BreakableStatementChecker::VisitModuleDeclaration(
64 ModuleDeclaration* decl) {
67 void BreakableStatementChecker::VisitImportDeclaration(
68 ImportDeclaration* decl) {
71 void BreakableStatementChecker::VisitExportDeclaration(
72 ExportDeclaration* decl) {
76 void BreakableStatementChecker::VisitModuleLiteral(ModuleLiteral* module) {
80 void BreakableStatementChecker::VisitModuleVariable(ModuleVariable* module) {
84 void BreakableStatementChecker::VisitModulePath(ModulePath* module) {
88 void BreakableStatementChecker::VisitModuleUrl(ModuleUrl* module) {
92 void BreakableStatementChecker::VisitModuleStatement(ModuleStatement* stmt) {
96 void BreakableStatementChecker::VisitBlock(Block* stmt) {
100 void BreakableStatementChecker::VisitExpressionStatement(
101 ExpressionStatement* stmt) {
102 // Check if expression is breakable.
103 Visit(stmt->expression());
107 void BreakableStatementChecker::VisitEmptyStatement(EmptyStatement* stmt) {
111 void BreakableStatementChecker::VisitIfStatement(IfStatement* stmt) {
112 // If the condition is breakable the if statement is breakable.
113 Visit(stmt->condition());
117 void BreakableStatementChecker::VisitContinueStatement(
118 ContinueStatement* stmt) {
122 void BreakableStatementChecker::VisitBreakStatement(BreakStatement* stmt) {
126 void BreakableStatementChecker::VisitReturnStatement(ReturnStatement* stmt) {
127 // Return is breakable if the expression is.
128 Visit(stmt->expression());
132 void BreakableStatementChecker::VisitWithStatement(WithStatement* stmt) {
133 Visit(stmt->expression());
137 void BreakableStatementChecker::VisitSwitchStatement(SwitchStatement* stmt) {
138 // Switch statements breakable if the tag expression is.
143 void BreakableStatementChecker::VisitDoWhileStatement(DoWhileStatement* stmt) {
144 // Mark do while as breakable to avoid adding a break slot in front of it.
145 is_breakable_ = true;
149 void BreakableStatementChecker::VisitWhileStatement(WhileStatement* stmt) {
150 // Mark while statements breakable if the condition expression is.
155 void BreakableStatementChecker::VisitForStatement(ForStatement* stmt) {
156 // Mark for statements breakable if the condition expression is.
157 if (stmt->cond() != NULL) {
163 void BreakableStatementChecker::VisitForInStatement(ForInStatement* stmt) {
164 // Mark for in statements breakable if the enumerable expression is.
165 Visit(stmt->enumerable());
169 void BreakableStatementChecker::VisitForOfStatement(ForOfStatement* stmt) {
170 // For-of is breakable because of the next() call.
171 is_breakable_ = true;
175 void BreakableStatementChecker::VisitTryCatchStatement(
176 TryCatchStatement* stmt) {
177 // Mark try catch as breakable to avoid adding a break slot in front of it.
178 is_breakable_ = true;
182 void BreakableStatementChecker::VisitTryFinallyStatement(
183 TryFinallyStatement* stmt) {
184 // Mark try finally as breakable to avoid adding a break slot in front of it.
185 is_breakable_ = true;
189 void BreakableStatementChecker::VisitDebuggerStatement(
190 DebuggerStatement* stmt) {
191 // The debugger statement is breakable.
192 is_breakable_ = true;
196 void BreakableStatementChecker::VisitCaseClause(CaseClause* clause) {
200 void BreakableStatementChecker::VisitFunctionLiteral(FunctionLiteral* expr) {
204 void BreakableStatementChecker::VisitNativeFunctionLiteral(
205 NativeFunctionLiteral* expr) {
209 void BreakableStatementChecker::VisitConditional(Conditional* expr) {
213 void BreakableStatementChecker::VisitVariableProxy(VariableProxy* expr) {
217 void BreakableStatementChecker::VisitLiteral(Literal* expr) {
221 void BreakableStatementChecker::VisitRegExpLiteral(RegExpLiteral* expr) {
225 void BreakableStatementChecker::VisitObjectLiteral(ObjectLiteral* expr) {
229 void BreakableStatementChecker::VisitArrayLiteral(ArrayLiteral* expr) {
233 void BreakableStatementChecker::VisitAssignment(Assignment* expr) {
234 // If assigning to a property (including a global property) the assignment is
236 VariableProxy* proxy = expr->target()->AsVariableProxy();
237 Property* prop = expr->target()->AsProperty();
238 if (prop != NULL || (proxy != NULL && proxy->var()->IsUnallocated())) {
239 is_breakable_ = true;
243 // Otherwise the assignment is breakable if the assigned value is.
244 Visit(expr->value());
248 void BreakableStatementChecker::VisitYield(Yield* expr) {
249 // Yield is breakable if the expression is.
250 Visit(expr->expression());
254 void BreakableStatementChecker::VisitThrow(Throw* expr) {
255 // Throw is breakable if the expression is.
256 Visit(expr->exception());
260 void BreakableStatementChecker::VisitProperty(Property* expr) {
261 // Property load is breakable.
262 is_breakable_ = true;
266 void BreakableStatementChecker::VisitCall(Call* expr) {
267 // Function calls both through IC and call stub are breakable.
268 is_breakable_ = true;
272 void BreakableStatementChecker::VisitCallNew(CallNew* expr) {
273 // Function calls through new are breakable.
274 is_breakable_ = true;
278 void BreakableStatementChecker::VisitCallRuntime(CallRuntime* expr) {
282 void BreakableStatementChecker::VisitUnaryOperation(UnaryOperation* expr) {
283 Visit(expr->expression());
287 void BreakableStatementChecker::VisitCountOperation(CountOperation* expr) {
288 Visit(expr->expression());
292 void BreakableStatementChecker::VisitBinaryOperation(BinaryOperation* expr) {
294 if (expr->op() != Token::AND &&
295 expr->op() != Token::OR) {
296 Visit(expr->right());
301 void BreakableStatementChecker::VisitCompareOperation(CompareOperation* expr) {
303 Visit(expr->right());
307 void BreakableStatementChecker::VisitThisFunction(ThisFunction* expr) {
311 #define __ ACCESS_MASM(masm())
313 bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
314 Isolate* isolate = info->isolate();
316 Logger::TimerEventScope timer(
317 isolate, Logger::TimerEventScope::v8_compile_full_code);
319 Handle<Script> script = info->script();
320 if (!script->IsUndefined() && !script->source()->IsUndefined()) {
321 int len = String::cast(script->source())->length();
322 isolate->counters()->total_full_codegen_source_size()->Increment(len);
324 CodeGenerator::MakeCodePrologue(info, "full");
325 const int kInitialBufferSize = 4 * KB;
326 MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize);
327 #ifdef ENABLE_GDB_JIT_INTERFACE
328 masm.positions_recorder()->StartGDBJITLineInfoRecording();
330 LOG_CODE_EVENT(isolate,
331 CodeStartLinePosInfoRecordEvent(masm.positions_recorder()));
333 FullCodeGenerator cgen(&masm, info);
335 if (cgen.HasStackOverflow()) {
336 ASSERT(!isolate->has_pending_exception());
339 unsigned table_offset = cgen.EmitBackEdgeTable();
341 Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
342 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
343 code->set_optimizable(info->IsOptimizable() &&
344 !info->function()->dont_optimize() &&
345 info->function()->scope()->AllowsLazyCompilation());
346 cgen.PopulateDeoptimizationData(code);
347 cgen.PopulateTypeFeedbackInfo(code);
348 cgen.PopulateTypeFeedbackCells(code);
349 code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
350 code->set_handler_table(*cgen.handler_table());
351 #ifdef ENABLE_DEBUGGER_SUPPORT
352 code->set_compiled_optimizable(info->IsOptimizable());
353 #endif // ENABLE_DEBUGGER_SUPPORT
354 code->set_allow_osr_at_loop_nesting_level(0);
355 code->set_profiler_ticks(0);
356 code->set_back_edge_table_offset(table_offset);
357 code->set_back_edges_patched_for_osr(false);
358 CodeGenerator::PrintCode(code, info);
360 #ifdef ENABLE_GDB_JIT_INTERFACE
362 GDBJITLineInfo* lineinfo =
363 masm.positions_recorder()->DetachGDBJITLineInfo();
364 GDBJIT(RegisterDetailedLineInfo(*code, lineinfo));
367 void* line_info = masm.positions_recorder()->DetachJITHandlerData();
368 LOG_CODE_EVENT(isolate, CodeEndLinePosInfoRecordEvent(*code, line_info));
373 unsigned FullCodeGenerator::EmitBackEdgeTable() {
374 // The back edge table consists of a length (in number of entries)
375 // field, and then a sequence of entries. Each entry is a pair of AST id
376 // and code-relative pc offset.
377 masm()->Align(kIntSize);
378 unsigned offset = masm()->pc_offset();
379 unsigned length = back_edges_.length();
381 for (unsigned i = 0; i < length; ++i) {
382 __ dd(back_edges_[i].id.ToInt());
383 __ dd(back_edges_[i].pc);
384 __ dd(back_edges_[i].loop_depth);
390 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
391 // Fill in the deoptimization information.
392 ASSERT(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
393 if (!info_->HasDeoptimizationSupport()) return;
394 int length = bailout_entries_.length();
395 Handle<DeoptimizationOutputData> data = isolate()->factory()->
396 NewDeoptimizationOutputData(length, TENURED);
397 for (int i = 0; i < length; i++) {
398 data->SetAstId(i, bailout_entries_[i].id);
399 data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
401 code->set_deoptimization_data(*data);
405 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
406 Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
407 info->set_ic_total_count(ic_total_count_);
408 ASSERT(!isolate()->heap()->InNewSpace(*info));
409 code->set_type_feedback_info(*info);
413 void FullCodeGenerator::Initialize() {
414 // The generation of debug code must match between the snapshot code and the
415 // code that is generated later. This is assumed by the debugger when it is
416 // calculating PC offsets after generating a debug version of code. Therefore
417 // we disable the production of debug code in the full compiler if we are
418 // either generating a snapshot or we booted from a snapshot.
419 generate_debug_code_ = FLAG_debug_code &&
420 !Serializer::enabled() &&
421 !Snapshot::HaveASnapshotToStartFrom();
422 masm_->set_emit_debug_code(generate_debug_code_);
423 masm_->set_predictable_code_size(true);
424 InitializeAstVisitor(info_->zone());
428 void FullCodeGenerator::PopulateTypeFeedbackCells(Handle<Code> code) {
429 if (type_feedback_cells_.is_empty()) return;
430 int length = type_feedback_cells_.length();
431 int array_size = TypeFeedbackCells::LengthOfFixedArray(length);
432 Handle<TypeFeedbackCells> cache = Handle<TypeFeedbackCells>::cast(
433 isolate()->factory()->NewFixedArray(array_size, TENURED));
434 for (int i = 0; i < length; i++) {
435 cache->SetAstId(i, type_feedback_cells_[i].ast_id);
436 cache->SetCell(i, *type_feedback_cells_[i].cell);
438 TypeFeedbackInfo::cast(code->type_feedback_info())->set_type_feedback_cells(
443 void FullCodeGenerator::PrepareForBailout(Expression* node, State state) {
444 PrepareForBailoutForId(node->id(), state);
448 void FullCodeGenerator::CallLoadIC(ContextualMode mode, TypeFeedbackId id) {
449 Handle<Code> ic = LoadIC::initialize_stub(isolate(), mode);
450 CallIC(ic, mode, id);
454 void FullCodeGenerator::CallStoreIC(ContextualMode mode, TypeFeedbackId id) {
455 Handle<Code> ic = StoreIC::initialize_stub(isolate(), strict_mode());
456 CallIC(ic, mode, id);
460 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
461 // We record the offset of the function return so we can rebuild the frame
462 // if the function was inlined, i.e., this is the return address in the
463 // inlined function's frame.
465 // The state is ignored. We defensively set it to TOS_REG, which is the
466 // real state of the unoptimized code at the return site.
467 PrepareForBailoutForId(call->ReturnId(), TOS_REG);
469 // In debug builds, mark the return so we can verify that this function
471 ASSERT(!call->return_is_recorded_);
472 call->return_is_recorded_ = true;
477 void FullCodeGenerator::PrepareForBailoutForId(BailoutId id, State state) {
478 // There's no need to prepare this code for bailouts from already optimized
479 // code or code that can't be optimized.
480 if (!info_->HasDeoptimizationSupport()) return;
481 unsigned pc_and_state =
482 StateField::encode(state) | PcField::encode(masm_->pc_offset());
483 ASSERT(Smi::IsValid(pc_and_state));
484 BailoutEntry entry = { id, pc_and_state };
485 ASSERT(!prepared_bailout_ids_.Contains(id.ToInt()));
486 prepared_bailout_ids_.Add(id.ToInt(), zone());
487 bailout_entries_.Add(entry, zone());
491 void FullCodeGenerator::RecordTypeFeedbackCell(
492 TypeFeedbackId id, Handle<Cell> cell) {
493 TypeFeedbackCellEntry entry = { id, cell };
494 type_feedback_cells_.Add(entry, zone());
498 void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
499 // The pc offset does not need to be encoded and packed together with a state.
500 ASSERT(masm_->pc_offset() > 0);
501 ASSERT(loop_depth() > 0);
502 uint8_t depth = Min(loop_depth(), Code::kMaxLoopNestingMarker);
503 BackEdgeEntry entry =
504 { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
505 back_edges_.Add(entry, zone());
509 bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
510 // Inline smi case inside loops, but not division and modulo which
511 // are too complicated and take up too much space.
512 if (op == Token::DIV ||op == Token::MOD) return false;
513 if (FLAG_always_inline_smi_code) return true;
514 return loop_depth_ > 0;
518 void FullCodeGenerator::EffectContext::Plug(Register reg) const {
522 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
523 __ Move(result_register(), reg);
527 void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
532 void FullCodeGenerator::TestContext::Plug(Register reg) const {
533 // For simplicity we always test the accumulator register.
534 __ Move(result_register(), reg);
535 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
536 codegen()->DoTest(this);
540 void FullCodeGenerator::EffectContext::PlugTOS() const {
545 void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
546 __ Pop(result_register());
550 void FullCodeGenerator::StackValueContext::PlugTOS() const {
554 void FullCodeGenerator::TestContext::PlugTOS() const {
555 // For simplicity we always test the accumulator register.
556 __ Pop(result_register());
557 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
558 codegen()->DoTest(this);
562 void FullCodeGenerator::EffectContext::PrepareTest(
563 Label* materialize_true,
564 Label* materialize_false,
567 Label** fall_through) const {
568 // In an effect context, the true and the false case branch to the
570 *if_true = *if_false = *fall_through = materialize_true;
574 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
575 Label* materialize_true,
576 Label* materialize_false,
579 Label** fall_through) const {
580 *if_true = *fall_through = materialize_true;
581 *if_false = materialize_false;
585 void FullCodeGenerator::StackValueContext::PrepareTest(
586 Label* materialize_true,
587 Label* materialize_false,
590 Label** fall_through) const {
591 *if_true = *fall_through = materialize_true;
592 *if_false = materialize_false;
596 void FullCodeGenerator::TestContext::PrepareTest(
597 Label* materialize_true,
598 Label* materialize_false,
601 Label** fall_through) const {
602 *if_true = true_label_;
603 *if_false = false_label_;
604 *fall_through = fall_through_;
608 void FullCodeGenerator::DoTest(const TestContext* context) {
609 DoTest(context->condition(),
610 context->true_label(),
611 context->false_label(),
612 context->fall_through());
616 void FullCodeGenerator::AllocateModules(ZoneList<Declaration*>* declarations) {
617 ASSERT(scope_->is_global_scope());
619 for (int i = 0; i < declarations->length(); i++) {
620 ModuleDeclaration* declaration = declarations->at(i)->AsModuleDeclaration();
621 if (declaration != NULL) {
622 ModuleLiteral* module = declaration->module()->AsModuleLiteral();
623 if (module != NULL) {
624 Comment cmnt(masm_, "[ Link nested modules");
625 Scope* scope = module->body()->scope();
626 Interface* interface = scope->interface();
627 ASSERT(interface->IsModule() && interface->IsFrozen());
629 interface->Allocate(scope->module_var()->index());
631 // Set up module context.
632 ASSERT(scope->interface()->Index() >= 0);
633 __ Push(Smi::FromInt(scope->interface()->Index()));
634 __ Push(scope->GetScopeInfo());
635 __ CallRuntime(Runtime::kPushModuleContext, 2);
636 StoreToFrameField(StandardFrameConstants::kContextOffset,
639 AllocateModules(scope->declarations());
641 // Pop module context.
642 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
643 // Update local stack frame context field.
644 StoreToFrameField(StandardFrameConstants::kContextOffset,
652 // Modules have their own local scope, represented by their own context.
653 // Module instance objects have an accessor for every export that forwards
654 // access to the respective slot from the module's context. (Exports that are
655 // modules themselves, however, are simple data properties.)
657 // All modules have a _hosting_ scope/context, which (currently) is the
658 // (innermost) enclosing global scope. To deal with recursion, nested modules
659 // are hosted by the same scope as global ones.
661 // For every (global or nested) module literal, the hosting context has an
662 // internal slot that points directly to the respective module context. This
663 // enables quick access to (statically resolved) module members by 2-dimensional
664 // access through the hosting context. For example,
668 // module B { let y; }
670 // module C { let z; }
672 // allocates contexts as follows:
674 // [header| .A | .B | .C | A | C ] (global)
676 // | | +-- [header| z ] (module)
678 // | +------- [header| y ] (module)
680 // +------------ [header| x | B ] (module)
682 // Here, .A, .B, .C are the internal slots pointing to the hosted module
683 // contexts, whereas A, B, C hold the actual instance objects (note that every
684 // module context also points to the respective instance object through its
685 // extension slot in the header).
687 // To deal with arbitrary recursion and aliases between modules,
688 // they are created and initialized in several stages. Each stage applies to
689 // all modules in the hosting global scope, including nested ones.
691 // 1. Allocate: for each module _literal_, allocate the module contexts and
692 // respective instance object and wire them up. This happens in the
693 // PushModuleContext runtime function, as generated by AllocateModules
694 // (invoked by VisitDeclarations in the hosting scope).
696 // 2. Bind: for each module _declaration_ (i.e. literals as well as aliases),
697 // assign the respective instance object to respective local variables. This
698 // happens in VisitModuleDeclaration, and uses the instance objects created
699 // in the previous stage.
700 // For each module _literal_, this phase also constructs a module descriptor
701 // for the next stage. This happens in VisitModuleLiteral.
703 // 3. Populate: invoke the DeclareModules runtime function to populate each
704 // _instance_ object with accessors for it exports. This is generated by
705 // DeclareModules (invoked by VisitDeclarations in the hosting scope again),
706 // and uses the descriptors generated in the previous stage.
708 // 4. Initialize: execute the module bodies (and other code) in sequence. This
709 // happens by the separate statements generated for module bodies. To reenter
710 // the module scopes properly, the parser inserted ModuleStatements.
712 void FullCodeGenerator::VisitDeclarations(
713 ZoneList<Declaration*>* declarations) {
714 Handle<FixedArray> saved_modules = modules_;
715 int saved_module_index = module_index_;
716 ZoneList<Handle<Object> >* saved_globals = globals_;
717 ZoneList<Handle<Object> > inner_globals(10, zone());
718 globals_ = &inner_globals;
720 if (scope_->num_modules() != 0) {
721 // This is a scope hosting modules. Allocate a descriptor array to pass
722 // to the runtime for initialization.
723 Comment cmnt(masm_, "[ Allocate modules");
724 ASSERT(scope_->is_global_scope());
726 isolate()->factory()->NewFixedArray(scope_->num_modules(), TENURED);
729 // Generate code for allocating all modules, including nested ones.
730 // The allocated contexts are stored in internal variables in this scope.
731 AllocateModules(declarations);
734 AstVisitor::VisitDeclarations(declarations);
736 if (scope_->num_modules() != 0) {
737 // Initialize modules from descriptor array.
738 ASSERT(module_index_ == modules_->length());
739 DeclareModules(modules_);
740 modules_ = saved_modules;
741 module_index_ = saved_module_index;
744 if (!globals_->is_empty()) {
745 // Invoke the platform-dependent code generator to do the actual
746 // declaration of the global functions and variables.
747 Handle<FixedArray> array =
748 isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
749 for (int i = 0; i < globals_->length(); ++i)
750 array->set(i, *globals_->at(i));
751 DeclareGlobals(array);
754 globals_ = saved_globals;
758 void FullCodeGenerator::VisitModuleLiteral(ModuleLiteral* module) {
759 Block* block = module->body();
760 Scope* saved_scope = scope();
761 scope_ = block->scope();
762 Interface* interface = scope_->interface();
764 Comment cmnt(masm_, "[ ModuleLiteral");
765 SetStatementPosition(block);
767 ASSERT(!modules_.is_null());
768 ASSERT(module_index_ < modules_->length());
769 int index = module_index_++;
771 // Set up module context.
772 ASSERT(interface->Index() >= 0);
773 __ Push(Smi::FromInt(interface->Index()));
774 __ Push(Smi::FromInt(0));
775 __ CallRuntime(Runtime::kPushModuleContext, 2);
776 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
779 Comment cmnt(masm_, "[ Declarations");
780 VisitDeclarations(scope_->declarations());
783 // Populate the module description.
784 Handle<ModuleInfo> description =
785 ModuleInfo::Create(isolate(), interface, scope_);
786 modules_->set(index, *description);
788 scope_ = saved_scope;
789 // Pop module context.
790 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
791 // Update local stack frame context field.
792 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
796 void FullCodeGenerator::VisitModuleVariable(ModuleVariable* module) {
798 // The instance object is resolved statically through the module's interface.
802 void FullCodeGenerator::VisitModulePath(ModulePath* module) {
804 // The instance object is resolved statically through the module's interface.
808 void FullCodeGenerator::VisitModuleUrl(ModuleUrl* module) {
809 // TODO(rossberg): dummy allocation for now.
810 Scope* scope = module->body()->scope();
811 Interface* interface = scope_->interface();
813 ASSERT(interface->IsModule() && interface->IsFrozen());
814 ASSERT(!modules_.is_null());
815 ASSERT(module_index_ < modules_->length());
816 interface->Allocate(scope->module_var()->index());
817 int index = module_index_++;
819 Handle<ModuleInfo> description =
820 ModuleInfo::Create(isolate(), interface, scope_);
821 modules_->set(index, *description);
825 int FullCodeGenerator::DeclareGlobalsFlags() {
826 ASSERT(DeclareGlobalsLanguageMode::is_valid(language_mode()));
827 return DeclareGlobalsEvalFlag::encode(is_eval()) |
828 DeclareGlobalsNativeFlag::encode(is_native()) |
829 DeclareGlobalsLanguageMode::encode(language_mode());
833 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
834 CodeGenerator::RecordPositions(masm_, fun->start_position());
838 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
839 CodeGenerator::RecordPositions(masm_, fun->end_position() - 1);
843 void FullCodeGenerator::SetStatementPosition(Statement* stmt) {
844 #ifdef ENABLE_DEBUGGER_SUPPORT
845 if (!isolate()->debugger()->IsDebuggerActive()) {
846 CodeGenerator::RecordPositions(masm_, stmt->position());
848 // Check if the statement will be breakable without adding a debug break
850 BreakableStatementChecker checker(zone());
852 // Record the statement position right here if the statement is not
853 // breakable. For breakable statements the actual recording of the
854 // position will be postponed to the breakable code (typically an IC).
855 bool position_recorded = CodeGenerator::RecordPositions(
856 masm_, stmt->position(), !checker.is_breakable());
857 // If the position recording did record a new position generate a debug
858 // break slot to make the statement breakable.
859 if (position_recorded) {
860 Debug::GenerateSlot(masm_);
864 CodeGenerator::RecordPositions(masm_, stmt->position());
869 void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
870 #ifdef ENABLE_DEBUGGER_SUPPORT
871 if (!isolate()->debugger()->IsDebuggerActive()) {
872 CodeGenerator::RecordPositions(masm_, expr->position());
874 // Check if the expression will be breakable without adding a debug break
876 BreakableStatementChecker checker(zone());
878 // Record a statement position right here if the expression is not
879 // breakable. For breakable expressions the actual recording of the
880 // position will be postponed to the breakable code (typically an IC).
881 // NOTE this will record a statement position for something which might
882 // not be a statement. As stepping in the debugger will only stop at
883 // statement positions this is used for e.g. the condition expression of
885 bool position_recorded = CodeGenerator::RecordPositions(
886 masm_, expr->position(), !checker.is_breakable());
887 // If the position recording did record a new position generate a debug
888 // break slot to make the statement breakable.
889 if (position_recorded) {
890 Debug::GenerateSlot(masm_);
894 CodeGenerator::RecordPositions(masm_, pos);
899 void FullCodeGenerator::SetStatementPosition(int pos) {
900 CodeGenerator::RecordPositions(masm_, pos);
904 void FullCodeGenerator::SetSourcePosition(int pos) {
905 if (pos != RelocInfo::kNoPosition) {
906 masm_->positions_recorder()->RecordPosition(pos);
911 // Lookup table for code generators for special runtime calls which are
913 #define INLINE_FUNCTION_GENERATOR_ADDRESS(Name, argc, ressize) \
914 &FullCodeGenerator::Emit##Name,
916 const FullCodeGenerator::InlineFunctionGenerator
917 FullCodeGenerator::kInlineFunctionGenerators[] = {
918 INLINE_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
919 INLINE_RUNTIME_FUNCTION_LIST(INLINE_FUNCTION_GENERATOR_ADDRESS)
921 #undef INLINE_FUNCTION_GENERATOR_ADDRESS
924 FullCodeGenerator::InlineFunctionGenerator
925 FullCodeGenerator::FindInlineFunctionGenerator(Runtime::FunctionId id) {
927 static_cast<int>(id) - static_cast<int>(Runtime::kFirstInlineFunction);
928 ASSERT(lookup_index >= 0);
929 ASSERT(static_cast<size_t>(lookup_index) <
930 ARRAY_SIZE(kInlineFunctionGenerators));
931 return kInlineFunctionGenerators[lookup_index];
935 void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
936 const Runtime::Function* function = expr->function();
937 ASSERT(function != NULL);
938 ASSERT(function->intrinsic_type == Runtime::INLINE);
939 InlineFunctionGenerator generator =
940 FindInlineFunctionGenerator(function->function_id);
941 ((*this).*(generator))(expr);
945 void FullCodeGenerator::EmitGeneratorNext(CallRuntime* expr) {
946 ZoneList<Expression*>* args = expr->arguments();
947 ASSERT(args->length() == 2);
948 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::NEXT);
952 void FullCodeGenerator::EmitGeneratorThrow(CallRuntime* expr) {
953 ZoneList<Expression*>* args = expr->arguments();
954 ASSERT(args->length() == 2);
955 EmitGeneratorResume(args->at(0), args->at(1), JSGeneratorObject::THROW);
959 void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
960 context()->Plug(handle(Smi::FromInt(0), isolate()));
964 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
965 switch (expr->op()) {
967 return VisitComma(expr);
970 return VisitLogicalExpression(expr);
972 return VisitArithmeticExpression(expr);
977 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
978 if (context()->IsEffect()) {
979 VisitForEffect(expr);
980 } else if (context()->IsAccumulatorValue()) {
981 VisitForAccumulatorValue(expr);
982 } else if (context()->IsStackValue()) {
983 VisitForStackValue(expr);
984 } else if (context()->IsTest()) {
985 const TestContext* test = TestContext::cast(context());
986 VisitForControl(expr, test->true_label(), test->false_label(),
987 test->fall_through());
992 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
993 Comment cmnt(masm_, "[ Comma");
994 VisitForEffect(expr->left());
995 VisitInDuplicateContext(expr->right());
999 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
1000 bool is_logical_and = expr->op() == Token::AND;
1001 Comment cmnt(masm_, is_logical_and ? "[ Logical AND" : "[ Logical OR");
1002 Expression* left = expr->left();
1003 Expression* right = expr->right();
1004 BailoutId right_id = expr->RightId();
1007 if (context()->IsTest()) {
1009 const TestContext* test = TestContext::cast(context());
1010 if (is_logical_and) {
1011 VisitForControl(left, &eval_right, test->false_label(), &eval_right);
1013 VisitForControl(left, test->true_label(), &eval_right, &eval_right);
1015 PrepareForBailoutForId(right_id, NO_REGISTERS);
1016 __ bind(&eval_right);
1018 } else if (context()->IsAccumulatorValue()) {
1019 VisitForAccumulatorValue(left);
1020 // We want the value in the accumulator for the test, and on the stack in
1022 __ Push(result_register());
1023 Label discard, restore;
1024 if (is_logical_and) {
1025 DoTest(left, &discard, &restore, &restore);
1027 DoTest(left, &restore, &discard, &restore);
1030 __ Pop(result_register());
1034 PrepareForBailoutForId(right_id, NO_REGISTERS);
1036 } else if (context()->IsStackValue()) {
1037 VisitForAccumulatorValue(left);
1038 // We want the value in the accumulator for the test, and on the stack in
1040 __ Push(result_register());
1042 if (is_logical_and) {
1043 DoTest(left, &discard, &done, &discard);
1045 DoTest(left, &done, &discard, &discard);
1049 PrepareForBailoutForId(right_id, NO_REGISTERS);
1052 ASSERT(context()->IsEffect());
1054 if (is_logical_and) {
1055 VisitForControl(left, &eval_right, &done, &eval_right);
1057 VisitForControl(left, &done, &eval_right, &eval_right);
1059 PrepareForBailoutForId(right_id, NO_REGISTERS);
1060 __ bind(&eval_right);
1063 VisitInDuplicateContext(right);
1068 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
1069 Token::Value op = expr->op();
1070 Comment cmnt(masm_, "[ ArithmeticExpression");
1071 Expression* left = expr->left();
1072 Expression* right = expr->right();
1073 OverwriteMode mode =
1074 left->ResultOverwriteAllowed()
1076 : (right->ResultOverwriteAllowed() ? OVERWRITE_RIGHT : NO_OVERWRITE);
1078 VisitForStackValue(left);
1079 VisitForAccumulatorValue(right);
1081 SetSourcePosition(expr->position());
1082 if (ShouldInlineSmiCase(op)) {
1083 EmitInlineSmiBinaryOp(expr, op, mode, left, right);
1085 EmitBinaryOp(expr, op, mode);
1090 void FullCodeGenerator::VisitBlock(Block* stmt) {
1091 Comment cmnt(masm_, "[ Block");
1092 NestedBlock nested_block(this, stmt);
1093 SetStatementPosition(stmt);
1095 Scope* saved_scope = scope();
1096 // Push a block context when entering a block with block scoped variables.
1097 if (stmt->scope() != NULL) {
1098 scope_ = stmt->scope();
1099 ASSERT(!scope_->is_module_scope());
1100 { Comment cmnt(masm_, "[ Extend block context");
1101 Handle<ScopeInfo> scope_info = scope_->GetScopeInfo();
1102 int heap_slots = scope_info->ContextLength() - Context::MIN_CONTEXT_SLOTS;
1103 __ Push(scope_info);
1104 PushFunctionArgumentForContextAllocation();
1105 if (heap_slots <= FastNewBlockContextStub::kMaximumSlots) {
1106 FastNewBlockContextStub stub(heap_slots);
1109 __ CallRuntime(Runtime::kPushBlockContext, 2);
1112 // Replace the context stored in the frame.
1113 StoreToFrameField(StandardFrameConstants::kContextOffset,
1114 context_register());
1116 { Comment cmnt(masm_, "[ Declarations");
1117 VisitDeclarations(scope_->declarations());
1121 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
1122 VisitStatements(stmt->statements());
1123 scope_ = saved_scope;
1124 __ bind(nested_block.break_label());
1125 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1127 // Pop block context if necessary.
1128 if (stmt->scope() != NULL) {
1129 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1130 // Update local stack frame context field.
1131 StoreToFrameField(StandardFrameConstants::kContextOffset,
1132 context_register());
1137 void FullCodeGenerator::VisitModuleStatement(ModuleStatement* stmt) {
1138 Comment cmnt(masm_, "[ Module context");
1140 __ Push(Smi::FromInt(stmt->proxy()->interface()->Index()));
1141 __ Push(Smi::FromInt(0));
1142 __ CallRuntime(Runtime::kPushModuleContext, 2);
1144 StandardFrameConstants::kContextOffset, context_register());
1146 Scope* saved_scope = scope_;
1147 scope_ = stmt->body()->scope();
1148 VisitStatements(stmt->body()->statements());
1149 scope_ = saved_scope;
1150 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1151 // Update local stack frame context field.
1152 StoreToFrameField(StandardFrameConstants::kContextOffset,
1153 context_register());
1157 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
1158 Comment cmnt(masm_, "[ ExpressionStatement");
1159 SetStatementPosition(stmt);
1160 VisitForEffect(stmt->expression());
1164 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
1165 Comment cmnt(masm_, "[ EmptyStatement");
1166 SetStatementPosition(stmt);
1170 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
1171 Comment cmnt(masm_, "[ IfStatement");
1172 SetStatementPosition(stmt);
1173 Label then_part, else_part, done;
1175 if (stmt->HasElseStatement()) {
1176 VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
1177 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1178 __ bind(&then_part);
1179 Visit(stmt->then_statement());
1182 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1183 __ bind(&else_part);
1184 Visit(stmt->else_statement());
1186 VisitForControl(stmt->condition(), &then_part, &done, &then_part);
1187 PrepareForBailoutForId(stmt->ThenId(), NO_REGISTERS);
1188 __ bind(&then_part);
1189 Visit(stmt->then_statement());
1191 PrepareForBailoutForId(stmt->ElseId(), NO_REGISTERS);
1194 PrepareForBailoutForId(stmt->IfId(), NO_REGISTERS);
1198 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1199 Comment cmnt(masm_, "[ ContinueStatement");
1200 SetStatementPosition(stmt);
1201 NestedStatement* current = nesting_stack_;
1202 int stack_depth = 0;
1203 int context_length = 0;
1204 // When continuing, we clobber the unpredictable value in the accumulator
1205 // with one that's safe for GC. If we hit an exit from the try block of
1206 // try...finally on our way out, we will unconditionally preserve the
1207 // accumulator on the stack.
1209 while (!current->IsContinueTarget(stmt->target())) {
1210 current = current->Exit(&stack_depth, &context_length);
1212 __ Drop(stack_depth);
1213 if (context_length > 0) {
1214 while (context_length > 0) {
1215 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1218 StoreToFrameField(StandardFrameConstants::kContextOffset,
1219 context_register());
1222 __ jmp(current->AsIteration()->continue_label());
1226 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1227 Comment cmnt(masm_, "[ BreakStatement");
1228 SetStatementPosition(stmt);
1229 NestedStatement* current = nesting_stack_;
1230 int stack_depth = 0;
1231 int context_length = 0;
1232 // When breaking, we clobber the unpredictable value in the accumulator
1233 // with one that's safe for GC. If we hit an exit from the try block of
1234 // try...finally on our way out, we will unconditionally preserve the
1235 // accumulator on the stack.
1237 while (!current->IsBreakTarget(stmt->target())) {
1238 current = current->Exit(&stack_depth, &context_length);
1240 __ Drop(stack_depth);
1241 if (context_length > 0) {
1242 while (context_length > 0) {
1243 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1246 StoreToFrameField(StandardFrameConstants::kContextOffset,
1247 context_register());
1250 __ jmp(current->AsBreakable()->break_label());
1254 void FullCodeGenerator::EmitUnwindBeforeReturn() {
1255 NestedStatement* current = nesting_stack_;
1256 int stack_depth = 0;
1257 int context_length = 0;
1258 while (current != NULL) {
1259 current = current->Exit(&stack_depth, &context_length);
1261 __ Drop(stack_depth);
1265 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1266 Comment cmnt(masm_, "[ ReturnStatement");
1267 SetStatementPosition(stmt);
1268 Expression* expr = stmt->expression();
1269 VisitForAccumulatorValue(expr);
1270 EmitUnwindBeforeReturn();
1271 EmitReturnSequence();
1275 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1276 Comment cmnt(masm_, "[ WithStatement");
1277 SetStatementPosition(stmt);
1279 VisitForStackValue(stmt->expression());
1280 PushFunctionArgumentForContextAllocation();
1281 __ CallRuntime(Runtime::kPushWithContext, 2);
1282 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1284 Scope* saved_scope = scope();
1285 scope_ = stmt->scope();
1286 { WithOrCatch body(this);
1287 Visit(stmt->statement());
1289 scope_ = saved_scope;
1292 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1293 // Update local stack frame context field.
1294 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1298 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1299 Comment cmnt(masm_, "[ DoWhileStatement");
1300 SetStatementPosition(stmt);
1301 Label body, book_keeping;
1303 Iteration loop_statement(this, stmt);
1304 increment_loop_depth();
1307 Visit(stmt->body());
1309 // Record the position of the do while condition and make sure it is
1310 // possible to break on the condition.
1311 __ bind(loop_statement.continue_label());
1312 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1313 SetExpressionPosition(stmt->cond());
1314 VisitForControl(stmt->cond(),
1316 loop_statement.break_label(),
1319 // Check stack before looping.
1320 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1321 __ bind(&book_keeping);
1322 EmitBackEdgeBookkeeping(stmt, &body);
1325 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1326 __ bind(loop_statement.break_label());
1327 decrement_loop_depth();
1331 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1332 Comment cmnt(masm_, "[ WhileStatement");
1335 Iteration loop_statement(this, stmt);
1336 increment_loop_depth();
1338 // Emit the test at the bottom of the loop.
1341 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1343 Visit(stmt->body());
1345 // Emit the statement position here as this is where the while
1346 // statement code starts.
1347 __ bind(loop_statement.continue_label());
1348 SetStatementPosition(stmt);
1350 // Check stack before looping.
1351 EmitBackEdgeBookkeeping(stmt, &body);
1354 VisitForControl(stmt->cond(),
1356 loop_statement.break_label(),
1357 loop_statement.break_label());
1359 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1360 __ bind(loop_statement.break_label());
1361 decrement_loop_depth();
1365 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1366 Comment cmnt(masm_, "[ ForStatement");
1369 Iteration loop_statement(this, stmt);
1371 // Set statement position for a break slot before entering the for-body.
1372 SetStatementPosition(stmt);
1374 if (stmt->init() != NULL) {
1375 Visit(stmt->init());
1378 increment_loop_depth();
1379 // Emit the test at the bottom of the loop (even if empty).
1382 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1384 Visit(stmt->body());
1386 PrepareForBailoutForId(stmt->ContinueId(), NO_REGISTERS);
1387 __ bind(loop_statement.continue_label());
1388 if (stmt->next() != NULL) {
1389 Visit(stmt->next());
1392 // Emit the statement position here as this is where the for
1393 // statement code starts.
1394 SetStatementPosition(stmt);
1396 // Check stack before looping.
1397 EmitBackEdgeBookkeeping(stmt, &body);
1400 if (stmt->cond() != NULL) {
1401 VisitForControl(stmt->cond(),
1403 loop_statement.break_label(),
1404 loop_statement.break_label());
1409 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1410 __ bind(loop_statement.break_label());
1411 decrement_loop_depth();
1415 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1416 Comment cmnt(masm_, "[ TryCatchStatement");
1417 SetStatementPosition(stmt);
1418 // The try block adds a handler to the exception handler chain before
1419 // entering, and removes it again when exiting normally. If an exception
1420 // is thrown during execution of the try block, the handler is consumed
1421 // and control is passed to the catch block with the exception in the
1424 Label try_entry, handler_entry, exit;
1426 __ bind(&handler_entry);
1427 handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1428 // Exception handler code, the exception is in the result register.
1429 // Extend the context before executing the catch block.
1430 { Comment cmnt(masm_, "[ Extend catch context");
1431 __ Push(stmt->variable()->name());
1432 __ Push(result_register());
1433 PushFunctionArgumentForContextAllocation();
1434 __ CallRuntime(Runtime::kPushCatchContext, 3);
1435 StoreToFrameField(StandardFrameConstants::kContextOffset,
1436 context_register());
1439 Scope* saved_scope = scope();
1440 scope_ = stmt->scope();
1441 ASSERT(scope_->declarations()->is_empty());
1442 { WithOrCatch catch_body(this);
1443 Visit(stmt->catch_block());
1445 // Restore the context.
1446 LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1447 StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1448 scope_ = saved_scope;
1451 // Try block code. Sets up the exception handler chain.
1452 __ bind(&try_entry);
1453 __ PushTryHandler(StackHandler::CATCH, stmt->index());
1454 { TryCatch try_body(this);
1455 Visit(stmt->try_block());
1462 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1463 Comment cmnt(masm_, "[ TryFinallyStatement");
1464 SetStatementPosition(stmt);
1465 // Try finally is compiled by setting up a try-handler on the stack while
1466 // executing the try body, and removing it again afterwards.
1468 // The try-finally construct can enter the finally block in three ways:
1469 // 1. By exiting the try-block normally. This removes the try-handler and
1470 // calls the finally block code before continuing.
1471 // 2. By exiting the try-block with a function-local control flow transfer
1472 // (break/continue/return). The site of the, e.g., break removes the
1473 // try handler and calls the finally block code before continuing
1474 // its outward control transfer.
1475 // 3. By exiting the try-block with a thrown exception.
1476 // This can happen in nested function calls. It traverses the try-handler
1477 // chain and consumes the try-handler entry before jumping to the
1478 // handler code. The handler code then calls the finally-block before
1479 // rethrowing the exception.
1481 // The finally block must assume a return address on top of the stack
1482 // (or in the link register on ARM chips) and a value (return value or
1483 // exception) in the result register (rax/eax/r0), both of which must
1484 // be preserved. The return address isn't GC-safe, so it should be
1485 // cooked before GC.
1486 Label try_entry, handler_entry, finally_entry;
1488 // Jump to try-handler setup and try-block code.
1490 __ bind(&handler_entry);
1491 handler_table()->set(stmt->index(), Smi::FromInt(handler_entry.pos()));
1492 // Exception handler code. This code is only executed when an exception
1493 // is thrown. The exception is in the result register, and must be
1494 // preserved by the finally block. Call the finally block and then
1495 // rethrow the exception if it returns.
1496 __ Call(&finally_entry);
1497 __ Push(result_register());
1498 __ CallRuntime(Runtime::kReThrow, 1);
1500 // Finally block implementation.
1501 __ bind(&finally_entry);
1502 EnterFinallyBlock();
1503 { Finally finally_body(this);
1504 Visit(stmt->finally_block());
1506 ExitFinallyBlock(); // Return to the calling code.
1508 // Set up try handler.
1509 __ bind(&try_entry);
1510 __ PushTryHandler(StackHandler::FINALLY, stmt->index());
1511 { TryFinally try_body(this, &finally_entry);
1512 Visit(stmt->try_block());
1515 // Execute the finally block on the way out. Clobber the unpredictable
1516 // value in the result register with one that's safe for GC because the
1517 // finally block will unconditionally preserve the result register on the
1520 __ Call(&finally_entry);
1524 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1525 #ifdef ENABLE_DEBUGGER_SUPPORT
1526 Comment cmnt(masm_, "[ DebuggerStatement");
1527 SetStatementPosition(stmt);
1530 // Ignore the return value.
1535 void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1540 void FullCodeGenerator::VisitConditional(Conditional* expr) {
1541 Comment cmnt(masm_, "[ Conditional");
1542 Label true_case, false_case, done;
1543 VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1545 PrepareForBailoutForId(expr->ThenId(), NO_REGISTERS);
1546 __ bind(&true_case);
1547 SetExpressionPosition(expr->then_expression());
1548 if (context()->IsTest()) {
1549 const TestContext* for_test = TestContext::cast(context());
1550 VisitForControl(expr->then_expression(),
1551 for_test->true_label(),
1552 for_test->false_label(),
1555 VisitInDuplicateContext(expr->then_expression());
1559 PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
1560 __ bind(&false_case);
1561 SetExpressionPosition(expr->else_expression());
1562 VisitInDuplicateContext(expr->else_expression());
1563 // If control flow falls through Visit, merge it with true case here.
1564 if (!context()->IsTest()) {
1570 void FullCodeGenerator::VisitLiteral(Literal* expr) {
1571 Comment cmnt(masm_, "[ Literal");
1572 context()->Plug(expr->value());
1576 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1577 Comment cmnt(masm_, "[ FunctionLiteral");
1579 // Build the function boilerplate and instantiate it.
1580 Handle<SharedFunctionInfo> function_info =
1581 Compiler::BuildFunctionInfo(expr, script());
1582 if (function_info.is_null()) {
1586 EmitNewClosure(function_info, expr->pretenure());
1590 void FullCodeGenerator::VisitNativeFunctionLiteral(
1591 NativeFunctionLiteral* expr) {
1592 Comment cmnt(masm_, "[ NativeFunctionLiteral");
1594 // Compute the function template for the native function.
1595 Handle<String> name = expr->name();
1596 v8::Handle<v8::FunctionTemplate> fun_template =
1597 expr->extension()->GetNativeFunctionTemplate(
1598 reinterpret_cast<v8::Isolate*>(isolate()), v8::Utils::ToLocal(name));
1599 ASSERT(!fun_template.IsEmpty());
1601 // Instantiate the function and create a shared function info from it.
1602 Handle<JSFunction> fun = Utils::OpenHandle(*fun_template->GetFunction());
1603 const int literals = fun->NumberOfLiterals();
1604 Handle<Code> code = Handle<Code>(fun->shared()->code());
1605 Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1606 bool is_generator = false;
1607 Handle<SharedFunctionInfo> shared =
1608 isolate()->factory()->NewSharedFunctionInfo(name, literals, is_generator,
1609 code, Handle<ScopeInfo>(fun->shared()->scope_info()));
1610 shared->set_construct_stub(*construct_stub);
1612 // Copy the function data to the shared function info.
1613 shared->set_function_data(fun->shared()->function_data());
1614 int parameters = fun->shared()->formal_parameter_count();
1615 shared->set_formal_parameter_count(parameters);
1617 EmitNewClosure(shared, false);
1621 void FullCodeGenerator::VisitThrow(Throw* expr) {
1622 Comment cmnt(masm_, "[ Throw");
1623 VisitForStackValue(expr->exception());
1624 __ CallRuntime(Runtime::kThrow, 1);
1625 // Never returns here.
1629 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryCatch::Exit(
1631 int* context_length) {
1632 // The macros used here must preserve the result register.
1633 __ Drop(*stack_depth);
1640 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1641 Expression* sub_expr;
1642 Handle<String> check;
1643 if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1644 EmitLiteralCompareTypeof(expr, sub_expr, check);
1648 if (expr->IsLiteralCompareUndefined(&sub_expr, isolate())) {
1649 EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1653 if (expr->IsLiteralCompareNull(&sub_expr)) {
1654 EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1662 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
1663 DisallowHeapAllocation no_gc;
1664 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1666 // Iterate over the back edge table and patch every interrupt
1667 // call to an unconditional call to the replacement code.
1668 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1670 BackEdgeTable back_edges(unoptimized, &no_gc);
1671 for (uint32_t i = 0; i < back_edges.length(); i++) {
1672 if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1673 ASSERT_EQ(INTERRUPT, GetBackEdgeState(isolate,
1676 PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1680 unoptimized->set_back_edges_patched_for_osr(true);
1681 ASSERT(Verify(isolate, unoptimized, loop_nesting_level));
1685 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
1686 DisallowHeapAllocation no_gc;
1687 Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1689 // Iterate over the back edge table and revert the patched interrupt calls.
1690 ASSERT(unoptimized->back_edges_patched_for_osr());
1691 int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1693 BackEdgeTable back_edges(unoptimized, &no_gc);
1694 for (uint32_t i = 0; i < back_edges.length(); i++) {
1695 if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1696 ASSERT_NE(INTERRUPT, GetBackEdgeState(isolate,
1699 PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1703 unoptimized->set_back_edges_patched_for_osr(false);
1704 unoptimized->set_allow_osr_at_loop_nesting_level(0);
1705 // Assert that none of the back edges are patched anymore.
1706 ASSERT(Verify(isolate, unoptimized, -1));
1710 void BackEdgeTable::AddStackCheck(Handle<Code> code, uint32_t pc_offset) {
1711 DisallowHeapAllocation no_gc;
1712 Isolate* isolate = code->GetIsolate();
1713 Address pc = code->instruction_start() + pc_offset;
1714 Code* patch = isolate->builtins()->builtin(Builtins::kOsrAfterStackCheck);
1715 PatchAt(*code, pc, OSR_AFTER_STACK_CHECK, patch);
1719 void BackEdgeTable::RemoveStackCheck(Handle<Code> code, uint32_t pc_offset) {
1720 DisallowHeapAllocation no_gc;
1721 Isolate* isolate = code->GetIsolate();
1722 Address pc = code->instruction_start() + pc_offset;
1724 if (OSR_AFTER_STACK_CHECK == GetBackEdgeState(isolate, *code, pc)) {
1725 Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1726 PatchAt(*code, pc, ON_STACK_REPLACEMENT, patch);
1732 bool BackEdgeTable::Verify(Isolate* isolate,
1734 int loop_nesting_level) {
1735 DisallowHeapAllocation no_gc;
1736 BackEdgeTable back_edges(unoptimized, &no_gc);
1737 for (uint32_t i = 0; i < back_edges.length(); i++) {
1738 uint32_t loop_depth = back_edges.loop_depth(i);
1739 CHECK_LE(static_cast<int>(loop_depth), Code::kMaxLoopNestingMarker);
1740 // Assert that all back edges for shallower loops (and only those)
1741 // have already been patched.
1742 CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1743 GetBackEdgeState(isolate,
1745 back_edges.pc(i)) != INTERRUPT);
1755 } } // namespace v8::internal