1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_IA32
32 #include "code-stubs.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
40 #include "stub-cache.h"
45 #define __ ACCESS_MASM(masm_)
48 class JumpPatchSite BASE_EMBEDDED {
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
52 info_emitted_ = false;
57 ASSERT(patch_site_.is_bound() == info_emitted_);
60 void EmitJumpIfNotSmi(Register reg,
62 Label::Distance distance = Label::kFar) {
63 __ test(reg, Immediate(kSmiTagMask));
64 EmitJump(not_carry, target, distance); // Always taken before patched.
67 void EmitJumpIfSmi(Register reg,
69 Label::Distance distance = Label::kFar) {
70 __ test(reg, Immediate(kSmiTagMask));
71 EmitJump(carry, target, distance); // Never taken before patched.
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77 ASSERT(is_int8(delta_to_patch_site));
78 __ test(eax, Immediate(delta_to_patch_site));
83 __ nop(); // Signals no inlined code.
88 // jc will be patched with jz, jnc will become jnz.
89 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
91 ASSERT(cc == carry || cc == not_carry);
92 __ bind(&patch_site_);
93 __ j(cc, target, distance);
96 MacroAssembler* masm_;
104 static void EmitStackCheck(MacroAssembler* masm_,
106 Register scratch = esp) {
108 Isolate* isolate = masm_->isolate();
109 ExternalReference stack_limit =
110 ExternalReference::address_of_stack_limit(isolate);
111 ASSERT(scratch.is(esp) == (pointers == 0));
113 __ mov(scratch, esp);
114 __ sub(scratch, Immediate(pointers * kPointerSize));
116 __ cmp(scratch, Operand::StaticVariable(stack_limit));
117 __ j(above_equal, &ok, Label::kNear);
118 __ call(isolate->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
123 // Generate code for a JS function. On entry to the function the receiver
124 // and arguments have been pushed on the stack left to right, with the
125 // return address on top of them. The actual argument count matches the
126 // formal parameter count expected by the function.
128 // The live registers are:
129 // o edi: the JS function object being called (i.e. ourselves)
130 // o esi: our context
131 // o ebp: our caller's frame pointer
132 // o esp: stack pointer (pointing to return address)
134 // The function builds a JS frame. Please see JavaScriptFrameConstants in
135 // frames-ia32.h for its layout.
136 void FullCodeGenerator::Generate() {
137 CompilationInfo* info = info_;
139 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
141 InitializeFeedbackVector();
143 profiling_counter_ = isolate()->factory()->NewCell(
144 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
145 SetFunctionPosition(function());
146 Comment cmnt(masm_, "[ function compiled by full code generator");
148 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
151 if (strlen(FLAG_stop_at) > 0 &&
152 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
157 // Sloppy mode functions and builtins need to replace the receiver with the
158 // global proxy when called as functions (without an explicit receiver
160 if (info->strict_mode() == SLOPPY && !info->is_native()) {
162 // +1 for return address.
163 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
164 __ mov(ecx, Operand(esp, receiver_offset));
166 __ cmp(ecx, isolate()->factory()->undefined_value());
167 __ j(not_equal, &ok, Label::kNear);
169 __ mov(ecx, GlobalObjectOperand());
170 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
172 __ mov(Operand(esp, receiver_offset), ecx);
177 // Open a frame scope to indicate that there is a frame on the stack. The
178 // MANUAL indicates that the scope shouldn't actually generate code to set up
179 // the frame (that is done below).
180 FrameScope frame_scope(masm_, StackFrame::MANUAL);
182 info->set_prologue_offset(masm_->pc_offset());
183 __ Prologue(BUILD_FUNCTION_FRAME);
184 info->AddNoFrameRange(0, masm_->pc_offset());
186 { Comment cmnt(masm_, "[ Allocate locals");
187 int locals_count = info->scope()->num_stack_slots();
188 // Generators allocate locals, if any, in context slots.
189 ASSERT(!info->function()->is_generator() || locals_count == 0);
190 if (locals_count == 1) {
191 __ push(Immediate(isolate()->factory()->undefined_value()));
192 } else if (locals_count > 1) {
193 if (locals_count >= 128) {
194 EmitStackCheck(masm_, locals_count, ecx);
196 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
197 const int kMaxPushes = 32;
198 if (locals_count >= kMaxPushes) {
199 int loop_iterations = locals_count / kMaxPushes;
200 __ mov(ecx, loop_iterations);
202 __ bind(&loop_header);
204 for (int i = 0; i < kMaxPushes; i++) {
208 __ j(not_zero, &loop_header, Label::kNear);
210 int remaining = locals_count % kMaxPushes;
211 // Emit the remaining pushes.
212 for (int i = 0; i < remaining; i++) {
218 bool function_in_register = true;
220 // Possibly allocate a local context.
221 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
222 if (heap_slots > 0) {
223 Comment cmnt(masm_, "[ Allocate context");
224 // Argument to NewContext is the function, which is still in edi.
225 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
227 __ Push(info->scope()->GetScopeInfo());
228 __ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
229 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
230 FastNewContextStub stub(heap_slots);
234 __ CallRuntime(Runtime::kHiddenNewFunctionContext, 1);
236 function_in_register = false;
237 // Context is returned in eax. It replaces the context passed to us.
238 // It's saved in the stack and kept live in esi.
240 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
242 // Copy parameters into context if necessary.
243 int num_parameters = info->scope()->num_parameters();
244 for (int i = 0; i < num_parameters; i++) {
245 Variable* var = scope()->parameter(i);
246 if (var->IsContextSlot()) {
247 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
248 (num_parameters - 1 - i) * kPointerSize;
249 // Load parameter from stack.
250 __ mov(eax, Operand(ebp, parameter_offset));
251 // Store it in the context.
252 int context_offset = Context::SlotOffset(var->index());
253 __ mov(Operand(esi, context_offset), eax);
254 // Update the write barrier. This clobbers eax and ebx.
255 __ RecordWriteContextSlot(esi,
264 Variable* arguments = scope()->arguments();
265 if (arguments != NULL) {
266 // Function uses arguments object.
267 Comment cmnt(masm_, "[ Allocate arguments object");
268 if (function_in_register) {
271 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
273 // Receiver is just before the parameters on the caller's stack.
274 int num_parameters = info->scope()->num_parameters();
275 int offset = num_parameters * kPointerSize;
277 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
279 __ push(Immediate(Smi::FromInt(num_parameters)));
280 // Arguments to ArgumentsAccessStub:
281 // function, receiver address, parameter count.
282 // The stub will rewrite receiver and parameter count if the previous
283 // stack frame was an arguments adapter frame.
284 ArgumentsAccessStub::Type type;
285 if (strict_mode() == STRICT) {
286 type = ArgumentsAccessStub::NEW_STRICT;
287 } else if (function()->has_duplicate_parameters()) {
288 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
290 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
292 ArgumentsAccessStub stub(type);
295 SetVar(arguments, eax, ebx, edx);
299 __ CallRuntime(Runtime::kTraceEnter, 0);
302 // Visit the declarations and body unless there is an illegal
304 if (scope()->HasIllegalRedeclaration()) {
305 Comment cmnt(masm_, "[ Declarations");
306 scope()->VisitIllegalRedeclaration(this);
309 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
310 { Comment cmnt(masm_, "[ Declarations");
311 // For named function expressions, declare the function name as a
313 if (scope()->is_function_scope() && scope()->function() != NULL) {
314 VariableDeclaration* function = scope()->function();
315 ASSERT(function->proxy()->var()->mode() == CONST ||
316 function->proxy()->var()->mode() == CONST_LEGACY);
317 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
318 VisitVariableDeclaration(function);
320 VisitDeclarations(scope()->declarations());
323 { Comment cmnt(masm_, "[ Stack check");
324 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
325 EmitStackCheck(masm_);
328 { Comment cmnt(masm_, "[ Body");
329 ASSERT(loop_depth() == 0);
330 VisitStatements(function()->body());
331 ASSERT(loop_depth() == 0);
335 // Always emit a 'return undefined' in case control fell off the end of
337 { Comment cmnt(masm_, "[ return <undefined>;");
338 __ mov(eax, isolate()->factory()->undefined_value());
339 EmitReturnSequence();
344 void FullCodeGenerator::ClearAccumulator() {
345 __ Move(eax, Immediate(Smi::FromInt(0)));
349 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
350 __ mov(ebx, Immediate(profiling_counter_));
351 __ sub(FieldOperand(ebx, Cell::kValueOffset),
352 Immediate(Smi::FromInt(delta)));
356 void FullCodeGenerator::EmitProfilingCounterReset() {
357 int reset_value = FLAG_interrupt_budget;
358 __ mov(ebx, Immediate(profiling_counter_));
359 __ mov(FieldOperand(ebx, Cell::kValueOffset),
360 Immediate(Smi::FromInt(reset_value)));
364 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
365 Label* back_edge_target) {
366 Comment cmnt(masm_, "[ Back edge bookkeeping");
369 ASSERT(back_edge_target->is_bound());
370 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
371 int weight = Min(kMaxBackEdgeWeight,
372 Max(1, distance / kCodeSizeMultiplier));
373 EmitProfilingCounterDecrement(weight);
374 __ j(positive, &ok, Label::kNear);
375 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
377 // Record a mapping of this PC offset to the OSR id. This is used to find
378 // the AST id from the unoptimized code in order to use it as a key into
379 // the deoptimization input data found in the optimized code.
380 RecordBackEdge(stmt->OsrEntryId());
382 EmitProfilingCounterReset();
385 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
386 // Record a mapping of the OSR id to this PC. This is used if the OSR
387 // entry becomes the target of a bailout. We don't expect it to be, but
388 // we want it to work if it is.
389 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
393 void FullCodeGenerator::EmitReturnSequence() {
394 Comment cmnt(masm_, "[ Return sequence");
395 if (return_label_.is_bound()) {
396 __ jmp(&return_label_);
398 // Common return label
399 __ bind(&return_label_);
402 __ CallRuntime(Runtime::kTraceExit, 1);
404 // Pretend that the exit is a backwards jump to the entry.
406 if (info_->ShouldSelfOptimize()) {
407 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
409 int distance = masm_->pc_offset();
410 weight = Min(kMaxBackEdgeWeight,
411 Max(1, distance / kCodeSizeMultiplier));
413 EmitProfilingCounterDecrement(weight);
415 __ j(positive, &ok, Label::kNear);
417 __ call(isolate()->builtins()->InterruptCheck(),
418 RelocInfo::CODE_TARGET);
420 EmitProfilingCounterReset();
423 // Add a label for checking the size of the code used for returning.
424 Label check_exit_codesize;
425 masm_->bind(&check_exit_codesize);
427 SetSourcePosition(function()->end_position() - 1);
429 // Do not use the leave instruction here because it is too short to
430 // patch with the code required by the debugger.
432 int no_frame_start = masm_->pc_offset();
435 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
436 __ Ret(arguments_bytes, ecx);
437 #ifdef ENABLE_DEBUGGER_SUPPORT
438 // Check that the size of the code used for returning is large enough
439 // for the debugger's requirements.
440 ASSERT(Assembler::kJSReturnSequenceLength <=
441 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
443 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
448 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
449 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
453 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
454 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
455 codegen()->GetVar(result_register(), var);
459 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
460 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
461 MemOperand operand = codegen()->VarOperand(var, result_register());
462 // Memory operands can be pushed directly.
467 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
468 // For simplicity we always test the accumulator register.
469 codegen()->GetVar(result_register(), var);
470 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
471 codegen()->DoTest(this);
475 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
476 UNREACHABLE(); // Not used on IA32.
480 void FullCodeGenerator::AccumulatorValueContext::Plug(
481 Heap::RootListIndex index) const {
482 UNREACHABLE(); // Not used on IA32.
486 void FullCodeGenerator::StackValueContext::Plug(
487 Heap::RootListIndex index) const {
488 UNREACHABLE(); // Not used on IA32.
492 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
493 UNREACHABLE(); // Not used on IA32.
497 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
501 void FullCodeGenerator::AccumulatorValueContext::Plug(
502 Handle<Object> lit) const {
504 __ SafeMove(result_register(), Immediate(lit));
506 __ Move(result_register(), Immediate(lit));
511 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
513 __ SafePush(Immediate(lit));
515 __ push(Immediate(lit));
520 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
521 codegen()->PrepareForBailoutBeforeSplit(condition(),
525 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
526 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
527 if (false_label_ != fall_through_) __ jmp(false_label_);
528 } else if (lit->IsTrue() || lit->IsJSObject()) {
529 if (true_label_ != fall_through_) __ jmp(true_label_);
530 } else if (lit->IsString()) {
531 if (String::cast(*lit)->length() == 0) {
532 if (false_label_ != fall_through_) __ jmp(false_label_);
534 if (true_label_ != fall_through_) __ jmp(true_label_);
536 } else if (lit->IsSmi()) {
537 if (Smi::cast(*lit)->value() == 0) {
538 if (false_label_ != fall_through_) __ jmp(false_label_);
540 if (true_label_ != fall_through_) __ jmp(true_label_);
543 // For simplicity we always test the accumulator register.
544 __ mov(result_register(), lit);
545 codegen()->DoTest(this);
550 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
551 Register reg) const {
557 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
559 Register reg) const {
562 __ Move(result_register(), reg);
566 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
567 Register reg) const {
569 if (count > 1) __ Drop(count - 1);
570 __ mov(Operand(esp, 0), reg);
574 void FullCodeGenerator::TestContext::DropAndPlug(int count,
575 Register reg) const {
577 // For simplicity we always test the accumulator register.
579 __ Move(result_register(), reg);
580 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
581 codegen()->DoTest(this);
585 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
586 Label* materialize_false) const {
587 ASSERT(materialize_true == materialize_false);
588 __ bind(materialize_true);
592 void FullCodeGenerator::AccumulatorValueContext::Plug(
593 Label* materialize_true,
594 Label* materialize_false) const {
596 __ bind(materialize_true);
597 __ mov(result_register(), isolate()->factory()->true_value());
598 __ jmp(&done, Label::kNear);
599 __ bind(materialize_false);
600 __ mov(result_register(), isolate()->factory()->false_value());
605 void FullCodeGenerator::StackValueContext::Plug(
606 Label* materialize_true,
607 Label* materialize_false) const {
609 __ bind(materialize_true);
610 __ push(Immediate(isolate()->factory()->true_value()));
611 __ jmp(&done, Label::kNear);
612 __ bind(materialize_false);
613 __ push(Immediate(isolate()->factory()->false_value()));
618 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
619 Label* materialize_false) const {
620 ASSERT(materialize_true == true_label_);
621 ASSERT(materialize_false == false_label_);
625 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
629 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
630 Handle<Object> value = flag
631 ? isolate()->factory()->true_value()
632 : isolate()->factory()->false_value();
633 __ mov(result_register(), value);
637 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
638 Handle<Object> value = flag
639 ? isolate()->factory()->true_value()
640 : isolate()->factory()->false_value();
641 __ push(Immediate(value));
645 void FullCodeGenerator::TestContext::Plug(bool flag) const {
646 codegen()->PrepareForBailoutBeforeSplit(condition(),
651 if (true_label_ != fall_through_) __ jmp(true_label_);
653 if (false_label_ != fall_through_) __ jmp(false_label_);
658 void FullCodeGenerator::DoTest(Expression* condition,
661 Label* fall_through) {
662 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
663 CallIC(ic, condition->test_id());
664 __ test(result_register(), result_register());
665 // The stub returns nonzero for true.
666 Split(not_zero, if_true, if_false, fall_through);
670 void FullCodeGenerator::Split(Condition cc,
673 Label* fall_through) {
674 if (if_false == fall_through) {
676 } else if (if_true == fall_through) {
677 __ j(NegateCondition(cc), if_false);
685 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
686 ASSERT(var->IsStackAllocated());
687 // Offset is negative because higher indexes are at lower addresses.
688 int offset = -var->index() * kPointerSize;
689 // Adjust by a (parameter or local) base offset.
690 if (var->IsParameter()) {
691 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
693 offset += JavaScriptFrameConstants::kLocal0Offset;
695 return Operand(ebp, offset);
699 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
700 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
701 if (var->IsContextSlot()) {
702 int context_chain_length = scope()->ContextChainLength(var->scope());
703 __ LoadContext(scratch, context_chain_length);
704 return ContextOperand(scratch, var->index());
706 return StackOperand(var);
711 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
712 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
713 MemOperand location = VarOperand(var, dest);
714 __ mov(dest, location);
718 void FullCodeGenerator::SetVar(Variable* var,
722 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
723 ASSERT(!scratch0.is(src));
724 ASSERT(!scratch0.is(scratch1));
725 ASSERT(!scratch1.is(src));
726 MemOperand location = VarOperand(var, scratch0);
727 __ mov(location, src);
729 // Emit the write barrier code if the location is in the heap.
730 if (var->IsContextSlot()) {
731 int offset = Context::SlotOffset(var->index());
732 ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
733 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
738 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
739 bool should_normalize,
742 // Only prepare for bailouts before splits if we're in a test
743 // context. Otherwise, we let the Visit function deal with the
744 // preparation to avoid preparing with the same AST id twice.
745 if (!context()->IsTest() || !info_->IsOptimizable()) return;
748 if (should_normalize) __ jmp(&skip, Label::kNear);
749 PrepareForBailout(expr, TOS_REG);
750 if (should_normalize) {
751 __ cmp(eax, isolate()->factory()->true_value());
752 Split(equal, if_true, if_false, NULL);
758 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
759 // The variable in the declaration always resides in the current context.
760 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
761 if (generate_debug_code_) {
762 // Check that we're not inside a with or catch context.
763 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
764 __ cmp(ebx, isolate()->factory()->with_context_map());
765 __ Check(not_equal, kDeclarationInWithContext);
766 __ cmp(ebx, isolate()->factory()->catch_context_map());
767 __ Check(not_equal, kDeclarationInCatchContext);
772 void FullCodeGenerator::VisitVariableDeclaration(
773 VariableDeclaration* declaration) {
774 // If it was not possible to allocate the variable at compile time, we
775 // need to "declare" it at runtime to make sure it actually exists in the
777 VariableProxy* proxy = declaration->proxy();
778 VariableMode mode = declaration->mode();
779 Variable* variable = proxy->var();
780 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
781 switch (variable->location()) {
782 case Variable::UNALLOCATED:
783 globals_->Add(variable->name(), zone());
784 globals_->Add(variable->binding_needs_init()
785 ? isolate()->factory()->the_hole_value()
786 : isolate()->factory()->undefined_value(), zone());
789 case Variable::PARAMETER:
790 case Variable::LOCAL:
792 Comment cmnt(masm_, "[ VariableDeclaration");
793 __ mov(StackOperand(variable),
794 Immediate(isolate()->factory()->the_hole_value()));
798 case Variable::CONTEXT:
800 Comment cmnt(masm_, "[ VariableDeclaration");
801 EmitDebugCheckDeclarationContext(variable);
802 __ mov(ContextOperand(esi, variable->index()),
803 Immediate(isolate()->factory()->the_hole_value()));
804 // No write barrier since the hole value is in old space.
805 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
809 case Variable::LOOKUP: {
810 Comment cmnt(masm_, "[ VariableDeclaration");
812 __ push(Immediate(variable->name()));
813 // VariableDeclaration nodes are always introduced in one of four modes.
814 ASSERT(IsDeclaredVariableMode(mode));
815 PropertyAttributes attr =
816 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
817 __ push(Immediate(Smi::FromInt(attr)));
818 // Push initial value, if any.
819 // Note: For variables we must not push an initial value (such as
820 // 'undefined') because we may have a (legal) redeclaration and we
821 // must not destroy the current value.
823 __ push(Immediate(isolate()->factory()->the_hole_value()));
825 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
827 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
834 void FullCodeGenerator::VisitFunctionDeclaration(
835 FunctionDeclaration* declaration) {
836 VariableProxy* proxy = declaration->proxy();
837 Variable* variable = proxy->var();
838 switch (variable->location()) {
839 case Variable::UNALLOCATED: {
840 globals_->Add(variable->name(), zone());
841 Handle<SharedFunctionInfo> function =
842 Compiler::BuildFunctionInfo(declaration->fun(), script());
843 // Check for stack-overflow exception.
844 if (function.is_null()) return SetStackOverflow();
845 globals_->Add(function, zone());
849 case Variable::PARAMETER:
850 case Variable::LOCAL: {
851 Comment cmnt(masm_, "[ FunctionDeclaration");
852 VisitForAccumulatorValue(declaration->fun());
853 __ mov(StackOperand(variable), result_register());
857 case Variable::CONTEXT: {
858 Comment cmnt(masm_, "[ FunctionDeclaration");
859 EmitDebugCheckDeclarationContext(variable);
860 VisitForAccumulatorValue(declaration->fun());
861 __ mov(ContextOperand(esi, variable->index()), result_register());
862 // We know that we have written a function, which is not a smi.
863 __ RecordWriteContextSlot(esi,
864 Context::SlotOffset(variable->index()),
870 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
874 case Variable::LOOKUP: {
875 Comment cmnt(masm_, "[ FunctionDeclaration");
877 __ push(Immediate(variable->name()));
878 __ push(Immediate(Smi::FromInt(NONE)));
879 VisitForStackValue(declaration->fun());
880 __ CallRuntime(Runtime::kHiddenDeclareContextSlot, 4);
887 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
888 Variable* variable = declaration->proxy()->var();
889 ASSERT(variable->location() == Variable::CONTEXT);
890 ASSERT(variable->interface()->IsFrozen());
892 Comment cmnt(masm_, "[ ModuleDeclaration");
893 EmitDebugCheckDeclarationContext(variable);
895 // Load instance object.
896 __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope()));
897 __ mov(eax, ContextOperand(eax, variable->interface()->Index()));
898 __ mov(eax, ContextOperand(eax, Context::EXTENSION_INDEX));
901 __ mov(ContextOperand(esi, variable->index()), eax);
902 // We know that we have written a module, which is not a smi.
903 __ RecordWriteContextSlot(esi,
904 Context::SlotOffset(variable->index()),
910 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
912 // Traverse into body.
913 Visit(declaration->module());
917 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
918 VariableProxy* proxy = declaration->proxy();
919 Variable* variable = proxy->var();
920 switch (variable->location()) {
921 case Variable::UNALLOCATED:
925 case Variable::CONTEXT: {
926 Comment cmnt(masm_, "[ ImportDeclaration");
927 EmitDebugCheckDeclarationContext(variable);
932 case Variable::PARAMETER:
933 case Variable::LOCAL:
934 case Variable::LOOKUP:
940 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
945 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
946 // Call the runtime to declare the globals.
947 __ push(esi); // The context is the first argument.
949 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
950 __ CallRuntime(Runtime::kHiddenDeclareGlobals, 3);
951 // Return value is ignored.
955 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
956 // Call the runtime to declare the modules.
957 __ Push(descriptions);
958 __ CallRuntime(Runtime::kHiddenDeclareModules, 1);
959 // Return value is ignored.
963 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
964 Comment cmnt(masm_, "[ SwitchStatement");
965 Breakable nested_statement(this, stmt);
966 SetStatementPosition(stmt);
968 // Keep the switch value on the stack until a case matches.
969 VisitForStackValue(stmt->tag());
970 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
972 ZoneList<CaseClause*>* clauses = stmt->cases();
973 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
975 Label next_test; // Recycled for each test.
976 // Compile all the tests with branches to their bodies.
977 for (int i = 0; i < clauses->length(); i++) {
978 CaseClause* clause = clauses->at(i);
979 clause->body_target()->Unuse();
981 // The default is not a test, but remember it as final fall through.
982 if (clause->is_default()) {
983 default_clause = clause;
987 Comment cmnt(masm_, "[ Case comparison");
991 // Compile the label expression.
992 VisitForAccumulatorValue(clause->label());
994 // Perform the comparison as if via '==='.
995 __ mov(edx, Operand(esp, 0)); // Switch value.
996 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
997 JumpPatchSite patch_site(masm_);
998 if (inline_smi_code) {
1002 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
1005 __ j(not_equal, &next_test);
1006 __ Drop(1); // Switch value is no longer needed.
1007 __ jmp(clause->body_target());
1008 __ bind(&slow_case);
1011 // Record position before stub call for type feedback.
1012 SetSourcePosition(clause->position());
1013 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
1014 CallIC(ic, clause->CompareId());
1015 patch_site.EmitPatchInfo();
1018 __ jmp(&skip, Label::kNear);
1019 PrepareForBailout(clause, TOS_REG);
1020 __ cmp(eax, isolate()->factory()->true_value());
1021 __ j(not_equal, &next_test);
1023 __ jmp(clause->body_target());
1027 __ j(not_equal, &next_test);
1028 __ Drop(1); // Switch value is no longer needed.
1029 __ jmp(clause->body_target());
1032 // Discard the test value and jump to the default if present, otherwise to
1033 // the end of the statement.
1034 __ bind(&next_test);
1035 __ Drop(1); // Switch value is no longer needed.
1036 if (default_clause == NULL) {
1037 __ jmp(nested_statement.break_label());
1039 __ jmp(default_clause->body_target());
1042 // Compile all the case bodies.
1043 for (int i = 0; i < clauses->length(); i++) {
1044 Comment cmnt(masm_, "[ Case body");
1045 CaseClause* clause = clauses->at(i);
1046 __ bind(clause->body_target());
1047 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1048 VisitStatements(clause->statements());
1051 __ bind(nested_statement.break_label());
1052 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1056 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1057 Comment cmnt(masm_, "[ ForInStatement");
1058 int slot = stmt->ForInFeedbackSlot();
1060 SetStatementPosition(stmt);
1063 ForIn loop_statement(this, stmt);
1064 increment_loop_depth();
1066 // Get the object to enumerate over. If the object is null or undefined, skip
1067 // over the loop. See ECMA-262 version 5, section 12.6.4.
1068 VisitForAccumulatorValue(stmt->enumerable());
1069 __ cmp(eax, isolate()->factory()->undefined_value());
1071 __ cmp(eax, isolate()->factory()->null_value());
1074 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1076 // Convert the object to a JS object.
1077 Label convert, done_convert;
1078 __ JumpIfSmi(eax, &convert, Label::kNear);
1079 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1080 __ j(above_equal, &done_convert, Label::kNear);
1083 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1084 __ bind(&done_convert);
1087 // Check for proxies.
1088 Label call_runtime, use_cache, fixed_array;
1089 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1090 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1091 __ j(below_equal, &call_runtime);
1093 // Check cache validity in generated code. This is a fast case for
1094 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1095 // guarantee cache validity, call the runtime system to check cache
1096 // validity or get the property names in a fixed array.
1097 __ CheckEnumCache(&call_runtime);
1099 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1100 __ jmp(&use_cache, Label::kNear);
1102 // Get the set of properties to enumerate.
1103 __ bind(&call_runtime);
1105 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1106 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1107 isolate()->factory()->meta_map());
1108 __ j(not_equal, &fixed_array);
1111 // We got a map in register eax. Get the enumeration cache from it.
1112 Label no_descriptors;
1113 __ bind(&use_cache);
1115 __ EnumLength(edx, eax);
1116 __ cmp(edx, Immediate(Smi::FromInt(0)));
1117 __ j(equal, &no_descriptors);
1119 __ LoadInstanceDescriptors(eax, ecx);
1120 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1121 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1123 // Set up the four remaining stack slots.
1124 __ push(eax); // Map.
1125 __ push(ecx); // Enumeration cache.
1126 __ push(edx); // Number of valid entries for the map in the enum cache.
1127 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1130 __ bind(&no_descriptors);
1131 __ add(esp, Immediate(kPointerSize));
1134 // We got a fixed array in register eax. Iterate through that.
1136 __ bind(&fixed_array);
1138 Handle<Object> feedback = Handle<Object>(
1139 Smi::FromInt(TypeFeedbackInfo::kForInFastCaseMarker),
1141 StoreFeedbackVectorSlot(slot, feedback);
1143 // No need for a write barrier, we are storing a Smi in the feedback vector.
1144 __ LoadHeapObject(ebx, FeedbackVector());
1145 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)),
1146 Immediate(Smi::FromInt(TypeFeedbackInfo::kForInSlowCaseMarker)));
1148 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1149 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1150 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1151 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1152 __ j(above, &non_proxy);
1153 __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1154 __ bind(&non_proxy);
1155 __ push(ebx); // Smi
1156 __ push(eax); // Array
1157 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1158 __ push(eax); // Fixed array length (as smi).
1159 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1161 // Generate code for doing the condition check.
1162 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1164 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1165 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1166 __ j(above_equal, loop_statement.break_label());
1168 // Get the current entry of the array into register ebx.
1169 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1170 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1172 // Get the expected map from the stack or a smi in the
1173 // permanent slow case into register edx.
1174 __ mov(edx, Operand(esp, 3 * kPointerSize));
1176 // Check if the expected map still matches that of the enumerable.
1177 // If not, we may have to filter the key.
1179 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1180 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1181 __ j(equal, &update_each, Label::kNear);
1183 // For proxies, no filtering is done.
1184 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1185 ASSERT(Smi::FromInt(0) == 0);
1187 __ j(zero, &update_each);
1189 // Convert the entry to a string or null if it isn't a property
1190 // anymore. If the property has been removed while iterating, we
1192 __ push(ecx); // Enumerable.
1193 __ push(ebx); // Current entry.
1194 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1196 __ j(equal, loop_statement.continue_label());
1199 // Update the 'each' property or variable from the possibly filtered
1200 // entry in register ebx.
1201 __ bind(&update_each);
1202 __ mov(result_register(), ebx);
1203 // Perform the assignment as if via '='.
1204 { EffectContext context(this);
1205 EmitAssignment(stmt->each());
1208 // Generate code for the body of the loop.
1209 Visit(stmt->body());
1211 // Generate code for going to the next element by incrementing the
1212 // index (smi) stored on top of the stack.
1213 __ bind(loop_statement.continue_label());
1214 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1216 EmitBackEdgeBookkeeping(stmt, &loop);
1219 // Remove the pointers stored on the stack.
1220 __ bind(loop_statement.break_label());
1221 __ add(esp, Immediate(5 * kPointerSize));
1223 // Exit and decrement the loop depth.
1224 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1226 decrement_loop_depth();
1230 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1231 Comment cmnt(masm_, "[ ForOfStatement");
1232 SetStatementPosition(stmt);
1234 Iteration loop_statement(this, stmt);
1235 increment_loop_depth();
1237 // var iterator = iterable[@@iterator]()
1238 VisitForAccumulatorValue(stmt->assign_iterator());
1240 // As with for-in, skip the loop if the iterator is null or undefined.
1241 __ CompareRoot(eax, Heap::kUndefinedValueRootIndex);
1242 __ j(equal, loop_statement.break_label());
1243 __ CompareRoot(eax, Heap::kNullValueRootIndex);
1244 __ j(equal, loop_statement.break_label());
1246 // Convert the iterator to a JS object.
1247 Label convert, done_convert;
1248 __ JumpIfSmi(eax, &convert);
1249 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1250 __ j(above_equal, &done_convert);
1253 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1254 __ bind(&done_convert);
1257 __ bind(loop_statement.continue_label());
1259 // result = iterator.next()
1260 VisitForEffect(stmt->next_result());
1262 // if (result.done) break;
1263 Label result_not_done;
1264 VisitForControl(stmt->result_done(),
1265 loop_statement.break_label(),
1268 __ bind(&result_not_done);
1270 // each = result.value
1271 VisitForEffect(stmt->assign_each());
1273 // Generate code for the body of the loop.
1274 Visit(stmt->body());
1276 // Check stack before looping.
1277 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1278 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1279 __ jmp(loop_statement.continue_label());
1281 // Exit and decrement the loop depth.
1282 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1283 __ bind(loop_statement.break_label());
1284 decrement_loop_depth();
1288 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1290 // Use the fast case closure allocation code that allocates in new
1291 // space for nested functions that don't need literals cloning. If
1292 // we're running with the --always-opt or the --prepare-always-opt
1293 // flag, we need to use the runtime function so that the new function
1294 // we are creating here gets a chance to have its code optimized and
1295 // doesn't just get a copy of the existing unoptimized code.
1296 if (!FLAG_always_opt &&
1297 !FLAG_prepare_always_opt &&
1299 scope()->is_function_scope() &&
1300 info->num_literals() == 0) {
1301 FastNewClosureStub stub(info->strict_mode(), info->is_generator());
1302 __ mov(ebx, Immediate(info));
1306 __ push(Immediate(info));
1307 __ push(Immediate(pretenure
1308 ? isolate()->factory()->true_value()
1309 : isolate()->factory()->false_value()));
1310 __ CallRuntime(Runtime::kHiddenNewClosure, 3);
1312 context()->Plug(eax);
1316 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1317 Comment cmnt(masm_, "[ VariableProxy");
1318 EmitVariableLoad(expr);
1322 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1323 TypeofState typeof_state,
1325 Register context = esi;
1326 Register temp = edx;
1330 if (s->num_heap_slots() > 0) {
1331 if (s->calls_sloppy_eval()) {
1332 // Check that extension is NULL.
1333 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1335 __ j(not_equal, slow);
1337 // Load next context in chain.
1338 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1339 // Walk the rest of the chain without clobbering esi.
1342 // If no outer scope calls eval, we do not need to check more
1343 // context extensions. If we have reached an eval scope, we check
1344 // all extensions from this point.
1345 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1346 s = s->outer_scope();
1349 if (s != NULL && s->is_eval_scope()) {
1350 // Loop up the context chain. There is no frame effect so it is
1351 // safe to use raw labels here.
1353 if (!context.is(temp)) {
1354 __ mov(temp, context);
1357 // Terminate at native context.
1358 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1359 Immediate(isolate()->factory()->native_context_map()));
1360 __ j(equal, &fast, Label::kNear);
1361 // Check that extension is NULL.
1362 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1363 __ j(not_equal, slow);
1364 // Load next context in chain.
1365 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1370 // All extension objects were empty and it is safe to use a global
1372 __ mov(edx, GlobalObjectOperand());
1373 __ mov(ecx, var->name());
1374 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1382 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1384 ASSERT(var->IsContextSlot());
1385 Register context = esi;
1386 Register temp = ebx;
1388 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1389 if (s->num_heap_slots() > 0) {
1390 if (s->calls_sloppy_eval()) {
1391 // Check that extension is NULL.
1392 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1394 __ j(not_equal, slow);
1396 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1397 // Walk the rest of the chain without clobbering esi.
1401 // Check that last extension is NULL.
1402 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1403 __ j(not_equal, slow);
1405 // This function is used only for loads, not stores, so it's safe to
1406 // return an esi-based operand (the write barrier cannot be allowed to
1407 // destroy the esi register).
1408 return ContextOperand(context, var->index());
1412 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1413 TypeofState typeof_state,
1416 // Generate fast-case code for variables that might be shadowed by
1417 // eval-introduced variables. Eval is used a lot without
1418 // introducing variables. In those cases, we do not want to
1419 // perform a runtime call for all variables in the scope
1420 // containing the eval.
1421 if (var->mode() == DYNAMIC_GLOBAL) {
1422 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1424 } else if (var->mode() == DYNAMIC_LOCAL) {
1425 Variable* local = var->local_if_not_shadowed();
1426 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1427 if (local->mode() == LET || local->mode() == CONST ||
1428 local->mode() == CONST_LEGACY) {
1429 __ cmp(eax, isolate()->factory()->the_hole_value());
1430 __ j(not_equal, done);
1431 if (local->mode() == CONST_LEGACY) {
1432 __ mov(eax, isolate()->factory()->undefined_value());
1433 } else { // LET || CONST
1434 __ push(Immediate(var->name()));
1435 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1443 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1444 // Record position before possible IC call.
1445 SetSourcePosition(proxy->position());
1446 Variable* var = proxy->var();
1448 // Three cases: global variables, lookup variables, and all other types of
1450 switch (var->location()) {
1451 case Variable::UNALLOCATED: {
1452 Comment cmnt(masm_, "[ Global variable");
1453 // Use inline caching. Variable name is passed in ecx and the global
1455 __ mov(edx, GlobalObjectOperand());
1456 __ mov(ecx, var->name());
1457 CallLoadIC(CONTEXTUAL);
1458 context()->Plug(eax);
1462 case Variable::PARAMETER:
1463 case Variable::LOCAL:
1464 case Variable::CONTEXT: {
1465 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1466 : "[ Stack variable");
1467 if (var->binding_needs_init()) {
1468 // var->scope() may be NULL when the proxy is located in eval code and
1469 // refers to a potential outside binding. Currently those bindings are
1470 // always looked up dynamically, i.e. in that case
1471 // var->location() == LOOKUP.
1473 ASSERT(var->scope() != NULL);
1475 // Check if the binding really needs an initialization check. The check
1476 // can be skipped in the following situation: we have a LET or CONST
1477 // binding in harmony mode, both the Variable and the VariableProxy have
1478 // the same declaration scope (i.e. they are both in global code, in the
1479 // same function or in the same eval code) and the VariableProxy is in
1480 // the source physically located after the initializer of the variable.
1482 // We cannot skip any initialization checks for CONST in non-harmony
1483 // mode because const variables may be declared but never initialized:
1484 // if (false) { const x; }; var y = x;
1486 // The condition on the declaration scopes is a conservative check for
1487 // nested functions that access a binding and are called before the
1488 // binding is initialized:
1489 // function() { f(); let x = 1; function f() { x = 2; } }
1491 bool skip_init_check;
1492 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1493 skip_init_check = false;
1495 // Check that we always have valid source position.
1496 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1497 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1498 skip_init_check = var->mode() != CONST_LEGACY &&
1499 var->initializer_position() < proxy->position();
1502 if (!skip_init_check) {
1503 // Let and const need a read barrier.
1506 __ cmp(eax, isolate()->factory()->the_hole_value());
1507 __ j(not_equal, &done, Label::kNear);
1508 if (var->mode() == LET || var->mode() == CONST) {
1509 // Throw a reference error when using an uninitialized let/const
1510 // binding in harmony mode.
1511 __ push(Immediate(var->name()));
1512 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
1514 // Uninitalized const bindings outside of harmony mode are unholed.
1515 ASSERT(var->mode() == CONST_LEGACY);
1516 __ mov(eax, isolate()->factory()->undefined_value());
1519 context()->Plug(eax);
1523 context()->Plug(var);
1527 case Variable::LOOKUP: {
1528 Comment cmnt(masm_, "[ Lookup variable");
1530 // Generate code for loading from variables potentially shadowed
1531 // by eval-introduced variables.
1532 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1534 __ push(esi); // Context.
1535 __ push(Immediate(var->name()));
1536 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
1538 context()->Plug(eax);
1545 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1546 Comment cmnt(masm_, "[ RegExpLiteral");
1548 // Registers will be used as follows:
1549 // edi = JS function.
1550 // ecx = literals array.
1551 // ebx = regexp literal.
1552 // eax = regexp literal clone.
1553 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1554 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1555 int literal_offset =
1556 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1557 __ mov(ebx, FieldOperand(ecx, literal_offset));
1558 __ cmp(ebx, isolate()->factory()->undefined_value());
1559 __ j(not_equal, &materialized, Label::kNear);
1561 // Create regexp literal using runtime function
1562 // Result will be in eax.
1564 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1565 __ push(Immediate(expr->pattern()));
1566 __ push(Immediate(expr->flags()));
1567 __ CallRuntime(Runtime::kHiddenMaterializeRegExpLiteral, 4);
1570 __ bind(&materialized);
1571 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1572 Label allocated, runtime_allocate;
1573 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1576 __ bind(&runtime_allocate);
1578 __ push(Immediate(Smi::FromInt(size)));
1579 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
1582 __ bind(&allocated);
1583 // Copy the content into the newly allocated memory.
1584 // (Unroll copy loop once for better throughput).
1585 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1586 __ mov(edx, FieldOperand(ebx, i));
1587 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1588 __ mov(FieldOperand(eax, i), edx);
1589 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1591 if ((size % (2 * kPointerSize)) != 0) {
1592 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1593 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1595 context()->Plug(eax);
1599 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1600 if (expression == NULL) {
1601 __ push(Immediate(isolate()->factory()->null_value()));
1603 VisitForStackValue(expression);
1608 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1609 Comment cmnt(masm_, "[ ObjectLiteral");
1611 expr->BuildConstantProperties(isolate());
1612 Handle<FixedArray> constant_properties = expr->constant_properties();
1613 int flags = expr->fast_elements()
1614 ? ObjectLiteral::kFastElements
1615 : ObjectLiteral::kNoFlags;
1616 flags |= expr->has_function()
1617 ? ObjectLiteral::kHasFunction
1618 : ObjectLiteral::kNoFlags;
1619 int properties_count = constant_properties->length() / 2;
1620 if (expr->may_store_doubles() || expr->depth() > 1 || Serializer::enabled() ||
1621 flags != ObjectLiteral::kFastElements ||
1622 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1623 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1624 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1625 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1626 __ push(Immediate(constant_properties));
1627 __ push(Immediate(Smi::FromInt(flags)));
1628 __ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
1630 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1631 __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1632 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1633 __ mov(ecx, Immediate(constant_properties));
1634 __ mov(edx, Immediate(Smi::FromInt(flags)));
1635 FastCloneShallowObjectStub stub(properties_count);
1639 // If result_saved is true the result is on top of the stack. If
1640 // result_saved is false the result is in eax.
1641 bool result_saved = false;
1643 // Mark all computed expressions that are bound to a key that
1644 // is shadowed by a later occurrence of the same key. For the
1645 // marked expressions, no store code is emitted.
1646 expr->CalculateEmitStore(zone());
1648 AccessorTable accessor_table(zone());
1649 for (int i = 0; i < expr->properties()->length(); i++) {
1650 ObjectLiteral::Property* property = expr->properties()->at(i);
1651 if (property->IsCompileTimeValue()) continue;
1653 Literal* key = property->key();
1654 Expression* value = property->value();
1655 if (!result_saved) {
1656 __ push(eax); // Save result on the stack
1657 result_saved = true;
1659 switch (property->kind()) {
1660 case ObjectLiteral::Property::CONSTANT:
1662 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1663 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1665 case ObjectLiteral::Property::COMPUTED:
1666 if (key->value()->IsInternalizedString()) {
1667 if (property->emit_store()) {
1668 VisitForAccumulatorValue(value);
1669 __ mov(ecx, Immediate(key->value()));
1670 __ mov(edx, Operand(esp, 0));
1671 CallStoreIC(key->LiteralFeedbackId());
1672 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1674 VisitForEffect(value);
1678 __ push(Operand(esp, 0)); // Duplicate receiver.
1679 VisitForStackValue(key);
1680 VisitForStackValue(value);
1681 if (property->emit_store()) {
1682 __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1683 __ CallRuntime(Runtime::kSetProperty, 4);
1688 case ObjectLiteral::Property::PROTOTYPE:
1689 __ push(Operand(esp, 0)); // Duplicate receiver.
1690 VisitForStackValue(value);
1691 if (property->emit_store()) {
1692 __ CallRuntime(Runtime::kSetPrototype, 2);
1697 case ObjectLiteral::Property::GETTER:
1698 accessor_table.lookup(key)->second->getter = value;
1700 case ObjectLiteral::Property::SETTER:
1701 accessor_table.lookup(key)->second->setter = value;
1706 // Emit code to define accessors, using only a single call to the runtime for
1707 // each pair of corresponding getters and setters.
1708 for (AccessorTable::Iterator it = accessor_table.begin();
1709 it != accessor_table.end();
1711 __ push(Operand(esp, 0)); // Duplicate receiver.
1712 VisitForStackValue(it->first);
1713 EmitAccessor(it->second->getter);
1714 EmitAccessor(it->second->setter);
1715 __ push(Immediate(Smi::FromInt(NONE)));
1716 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1719 if (expr->has_function()) {
1720 ASSERT(result_saved);
1721 __ push(Operand(esp, 0));
1722 __ CallRuntime(Runtime::kToFastProperties, 1);
1726 context()->PlugTOS();
1728 context()->Plug(eax);
1733 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1734 Comment cmnt(masm_, "[ ArrayLiteral");
1736 expr->BuildConstantElements(isolate());
1737 int flags = expr->depth() == 1
1738 ? ArrayLiteral::kShallowElements
1739 : ArrayLiteral::kNoFlags;
1741 ZoneList<Expression*>* subexprs = expr->values();
1742 int length = subexprs->length();
1743 Handle<FixedArray> constant_elements = expr->constant_elements();
1744 ASSERT_EQ(2, constant_elements->length());
1745 ElementsKind constant_elements_kind =
1746 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1747 bool has_constant_fast_elements =
1748 IsFastObjectElementsKind(constant_elements_kind);
1749 Handle<FixedArrayBase> constant_elements_values(
1750 FixedArrayBase::cast(constant_elements->get(1)));
1752 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1753 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1754 // If the only customer of allocation sites is transitioning, then
1755 // we can turn it off if we don't have anywhere else to transition to.
1756 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1759 Heap* heap = isolate()->heap();
1760 if (has_constant_fast_elements &&
1761 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1762 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1763 // change, so it's possible to specialize the stub in advance.
1764 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1765 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1766 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1767 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1768 __ mov(ecx, Immediate(constant_elements));
1769 FastCloneShallowArrayStub stub(
1770 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1771 allocation_site_mode,
1774 } else if (expr->depth() > 1 || Serializer::enabled() ||
1775 length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1776 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1777 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1778 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1779 __ push(Immediate(constant_elements));
1780 __ push(Immediate(Smi::FromInt(flags)));
1781 __ CallRuntime(Runtime::kHiddenCreateArrayLiteral, 4);
1783 ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
1784 FLAG_smi_only_arrays);
1785 FastCloneShallowArrayStub::Mode mode =
1786 FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1788 // If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
1789 // change, so it's possible to specialize the stub in advance.
1790 if (has_constant_fast_elements) {
1791 mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
1794 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1795 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1796 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1797 __ mov(ecx, Immediate(constant_elements));
1798 FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
1802 bool result_saved = false; // Is the result saved to the stack?
1804 // Emit code to evaluate all the non-constant subexpressions and to store
1805 // them into the newly cloned array.
1806 for (int i = 0; i < length; i++) {
1807 Expression* subexpr = subexprs->at(i);
1808 // If the subexpression is a literal or a simple materialized literal it
1809 // is already set in the cloned array.
1810 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1812 if (!result_saved) {
1813 __ push(eax); // array literal.
1814 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1815 result_saved = true;
1817 VisitForAccumulatorValue(subexpr);
1819 if (IsFastObjectElementsKind(constant_elements_kind)) {
1820 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1821 // cannot transition and don't need to call the runtime stub.
1822 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1823 __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1824 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1825 // Store the subexpression value in the array's elements.
1826 __ mov(FieldOperand(ebx, offset), result_register());
1827 // Update the write barrier for the array store.
1828 __ RecordWriteField(ebx, offset, result_register(), ecx,
1830 EMIT_REMEMBERED_SET,
1833 // Store the subexpression value in the array's elements.
1834 __ mov(ecx, Immediate(Smi::FromInt(i)));
1835 StoreArrayLiteralElementStub stub;
1839 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1843 __ add(esp, Immediate(kPointerSize)); // literal index
1844 context()->PlugTOS();
1846 context()->Plug(eax);
1851 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1852 ASSERT(expr->target()->IsValidLeftHandSide());
1854 Comment cmnt(masm_, "[ Assignment");
1856 // Left-hand side can only be a property, a global or a (parameter or local)
1858 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1859 LhsKind assign_type = VARIABLE;
1860 Property* property = expr->target()->AsProperty();
1861 if (property != NULL) {
1862 assign_type = (property->key()->IsPropertyName())
1867 // Evaluate LHS expression.
1868 switch (assign_type) {
1870 // Nothing to do here.
1872 case NAMED_PROPERTY:
1873 if (expr->is_compound()) {
1874 // We need the receiver both on the stack and in edx.
1875 VisitForStackValue(property->obj());
1876 __ mov(edx, Operand(esp, 0));
1878 VisitForStackValue(property->obj());
1881 case KEYED_PROPERTY: {
1882 if (expr->is_compound()) {
1883 VisitForStackValue(property->obj());
1884 VisitForStackValue(property->key());
1885 __ mov(edx, Operand(esp, kPointerSize)); // Object.
1886 __ mov(ecx, Operand(esp, 0)); // Key.
1888 VisitForStackValue(property->obj());
1889 VisitForStackValue(property->key());
1895 // For compound assignments we need another deoptimization point after the
1896 // variable/property load.
1897 if (expr->is_compound()) {
1898 AccumulatorValueContext result_context(this);
1899 { AccumulatorValueContext left_operand_context(this);
1900 switch (assign_type) {
1902 EmitVariableLoad(expr->target()->AsVariableProxy());
1903 PrepareForBailout(expr->target(), TOS_REG);
1905 case NAMED_PROPERTY:
1906 EmitNamedPropertyLoad(property);
1907 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1909 case KEYED_PROPERTY:
1910 EmitKeyedPropertyLoad(property);
1911 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1916 Token::Value op = expr->binary_op();
1917 __ push(eax); // Left operand goes on the stack.
1918 VisitForAccumulatorValue(expr->value());
1920 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1923 SetSourcePosition(expr->position() + 1);
1924 if (ShouldInlineSmiCase(op)) {
1925 EmitInlineSmiBinaryOp(expr->binary_operation(),
1931 EmitBinaryOp(expr->binary_operation(), op, mode);
1934 // Deoptimization point in case the binary operation may have side effects.
1935 PrepareForBailout(expr->binary_operation(), TOS_REG);
1937 VisitForAccumulatorValue(expr->value());
1940 // Record source position before possible IC call.
1941 SetSourcePosition(expr->position());
1944 switch (assign_type) {
1946 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1948 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1949 context()->Plug(eax);
1951 case NAMED_PROPERTY:
1952 EmitNamedPropertyAssignment(expr);
1954 case KEYED_PROPERTY:
1955 EmitKeyedPropertyAssignment(expr);
1961 void FullCodeGenerator::VisitYield(Yield* expr) {
1962 Comment cmnt(masm_, "[ Yield");
1963 // Evaluate yielded value first; the initial iterator definition depends on
1964 // this. It stays on the stack while we update the iterator.
1965 VisitForStackValue(expr->expression());
1967 switch (expr->yield_kind()) {
1968 case Yield::SUSPEND:
1969 // Pop value from top-of-stack slot; box result into result register.
1970 EmitCreateIteratorResult(false);
1971 __ push(result_register());
1973 case Yield::INITIAL: {
1974 Label suspend, continuation, post_runtime, resume;
1978 __ bind(&continuation);
1982 VisitForAccumulatorValue(expr->generator_object());
1983 ASSERT(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1984 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1985 Immediate(Smi::FromInt(continuation.pos())));
1986 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1988 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1990 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1992 __ j(equal, &post_runtime);
1993 __ push(eax); // generator object
1994 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
1995 __ mov(context_register(),
1996 Operand(ebp, StandardFrameConstants::kContextOffset));
1997 __ bind(&post_runtime);
1998 __ pop(result_register());
1999 EmitReturnSequence();
2002 context()->Plug(result_register());
2006 case Yield::FINAL: {
2007 VisitForAccumulatorValue(expr->generator_object());
2008 __ mov(FieldOperand(result_register(),
2009 JSGeneratorObject::kContinuationOffset),
2010 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
2011 // Pop value from top-of-stack slot, box result into result register.
2012 EmitCreateIteratorResult(true);
2013 EmitUnwindBeforeReturn();
2014 EmitReturnSequence();
2018 case Yield::DELEGATING: {
2019 VisitForStackValue(expr->generator_object());
2021 // Initial stack layout is as follows:
2022 // [sp + 1 * kPointerSize] iter
2023 // [sp + 0 * kPointerSize] g
2025 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
2026 Label l_next, l_call, l_loop;
2027 // Initial send value is undefined.
2028 __ mov(eax, isolate()->factory()->undefined_value());
2031 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
2033 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
2034 __ mov(ecx, isolate()->factory()->throw_string()); // "throw"
2035 __ push(ecx); // "throw"
2036 __ push(Operand(esp, 2 * kPointerSize)); // iter
2037 __ push(eax); // exception
2040 // try { received = %yield result }
2041 // Shuffle the received result above a try handler and yield it without
2044 __ pop(eax); // result
2045 __ PushTryHandler(StackHandler::CATCH, expr->index());
2046 const int handler_size = StackHandlerConstants::kSize;
2047 __ push(eax); // result
2049 __ bind(&l_continuation);
2051 __ bind(&l_suspend);
2052 const int generator_object_depth = kPointerSize + handler_size;
2053 __ mov(eax, Operand(esp, generator_object_depth));
2055 ASSERT(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2056 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2057 Immediate(Smi::FromInt(l_continuation.pos())));
2058 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2060 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2062 __ CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject, 1);
2063 __ mov(context_register(),
2064 Operand(ebp, StandardFrameConstants::kContextOffset));
2065 __ pop(eax); // result
2066 EmitReturnSequence();
2067 __ bind(&l_resume); // received in eax
2070 // receiver = iter; f = iter.next; arg = received;
2072 __ mov(ecx, isolate()->factory()->next_string()); // "next"
2074 __ push(Operand(esp, 2 * kPointerSize)); // iter
2075 __ push(eax); // received
2077 // result = receiver[f](arg);
2079 __ mov(edx, Operand(esp, kPointerSize));
2080 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2081 CallIC(ic, TypeFeedbackId::None());
2083 __ mov(Operand(esp, 2 * kPointerSize), edi);
2084 CallFunctionStub stub(1, CALL_AS_METHOD);
2087 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2088 __ Drop(1); // The function is still on the stack; drop it.
2090 // if (!result.done) goto l_try;
2092 __ push(eax); // save result
2093 __ mov(edx, eax); // result
2094 __ mov(ecx, isolate()->factory()->done_string()); // "done"
2095 CallLoadIC(NOT_CONTEXTUAL); // result.done in eax
2096 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2102 __ pop(edx); // result
2103 __ mov(ecx, isolate()->factory()->value_string()); // "value"
2104 CallLoadIC(NOT_CONTEXTUAL); // result.value in eax
2105 context()->DropAndPlug(2, eax); // drop iter and g
2112 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2114 JSGeneratorObject::ResumeMode resume_mode) {
2115 // The value stays in eax, and is ultimately read by the resumed generator, as
2116 // if CallRuntime(Runtime::kHiddenSuspendJSGeneratorObject) returned it. Or it
2117 // is read to throw the value when the resumed generator is already closed.
2118 // ebx will hold the generator object until the activation has been resumed.
2119 VisitForStackValue(generator);
2120 VisitForAccumulatorValue(value);
2123 // Check generator state.
2124 Label wrong_state, closed_state, done;
2125 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2126 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2127 __ cmp(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2128 Immediate(Smi::FromInt(0)));
2129 __ j(equal, &closed_state);
2130 __ j(less, &wrong_state);
2132 // Load suspended function and context.
2133 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2134 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2137 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2139 // Push holes for arguments to generator function.
2140 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2142 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2143 __ mov(ecx, isolate()->factory()->the_hole_value());
2144 Label push_argument_holes, push_frame;
2145 __ bind(&push_argument_holes);
2146 __ sub(edx, Immediate(Smi::FromInt(1)));
2147 __ j(carry, &push_frame);
2149 __ jmp(&push_argument_holes);
2151 // Enter a new JavaScript frame, and initialize its slots as they were when
2152 // the generator was suspended.
2154 __ bind(&push_frame);
2155 __ call(&resume_frame);
2157 __ bind(&resume_frame);
2158 __ push(ebp); // Caller's frame pointer.
2160 __ push(esi); // Callee's context.
2161 __ push(edi); // Callee's JS Function.
2163 // Load the operand stack size.
2164 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2165 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2168 // If we are sending a value and there is no operand stack, we can jump back
2170 if (resume_mode == JSGeneratorObject::NEXT) {
2172 __ cmp(edx, Immediate(0));
2173 __ j(not_zero, &slow_resume);
2174 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2175 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2178 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2179 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2181 __ bind(&slow_resume);
2184 // Otherwise, we push holes for the operand stack and call the runtime to fix
2185 // up the stack and the handlers.
2186 Label push_operand_holes, call_resume;
2187 __ bind(&push_operand_holes);
2188 __ sub(edx, Immediate(1));
2189 __ j(carry, &call_resume);
2191 __ jmp(&push_operand_holes);
2192 __ bind(&call_resume);
2194 __ push(result_register());
2195 __ Push(Smi::FromInt(resume_mode));
2196 __ CallRuntime(Runtime::kHiddenResumeJSGeneratorObject, 3);
2197 // Not reached: the runtime call returns elsewhere.
2198 __ Abort(kGeneratorFailedToResume);
2200 // Reach here when generator is closed.
2201 __ bind(&closed_state);
2202 if (resume_mode == JSGeneratorObject::NEXT) {
2203 // Return completed iterator result when generator is closed.
2204 __ push(Immediate(isolate()->factory()->undefined_value()));
2205 // Pop value from top-of-stack slot; box result into result register.
2206 EmitCreateIteratorResult(true);
2208 // Throw the provided value.
2210 __ CallRuntime(Runtime::kHiddenThrow, 1);
2214 // Throw error if we attempt to operate on a running generator.
2215 __ bind(&wrong_state);
2217 __ CallRuntime(Runtime::kHiddenThrowGeneratorStateError, 1);
2220 context()->Plug(result_register());
2224 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2228 Handle<Map> map(isolate()->native_context()->generator_result_map());
2230 __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT);
2233 __ bind(&gc_required);
2234 __ Push(Smi::FromInt(map->instance_size()));
2235 __ CallRuntime(Runtime::kHiddenAllocateInNewSpace, 1);
2236 __ mov(context_register(),
2237 Operand(ebp, StandardFrameConstants::kContextOffset));
2239 __ bind(&allocated);
2242 __ mov(edx, isolate()->factory()->ToBoolean(done));
2243 ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
2244 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2245 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2246 isolate()->factory()->empty_fixed_array());
2247 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2248 isolate()->factory()->empty_fixed_array());
2249 __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2250 __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2252 // Only the value field needs a write barrier, as the other values are in the
2254 __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
2255 ecx, edx, kDontSaveFPRegs);
2259 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2260 SetSourcePosition(prop->position());
2261 Literal* key = prop->key()->AsLiteral();
2262 ASSERT(!key->value()->IsSmi());
2263 __ mov(ecx, Immediate(key->value()));
2264 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2268 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2269 SetSourcePosition(prop->position());
2270 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2271 CallIC(ic, prop->PropertyFeedbackId());
2275 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2279 Expression* right) {
2280 // Do combined smi check of the operands. Left operand is on the
2281 // stack. Right operand is in eax.
2282 Label smi_case, done, stub_call;
2286 JumpPatchSite patch_site(masm_);
2287 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2289 __ bind(&stub_call);
2291 BinaryOpICStub stub(op, mode);
2292 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2293 patch_site.EmitPatchInfo();
2294 __ jmp(&done, Label::kNear);
2298 __ mov(eax, edx); // Copy left operand in case of a stub call.
2303 __ sar_cl(eax); // No checks of result necessary
2304 __ and_(eax, Immediate(~kSmiTagMask));
2311 // Check that the *signed* result fits in a smi.
2312 __ cmp(eax, 0xc0000000);
2313 __ j(positive, &result_ok);
2316 __ bind(&result_ok);
2325 __ test(eax, Immediate(0xc0000000));
2326 __ j(zero, &result_ok);
2329 __ bind(&result_ok);
2335 __ j(overflow, &stub_call);
2339 __ j(overflow, &stub_call);
2344 __ j(overflow, &stub_call);
2346 __ j(not_zero, &done, Label::kNear);
2349 __ j(negative, &stub_call);
2355 case Token::BIT_AND:
2358 case Token::BIT_XOR:
2366 context()->Plug(eax);
2370 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2372 OverwriteMode mode) {
2374 BinaryOpICStub stub(op, mode);
2375 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2376 CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
2377 patch_site.EmitPatchInfo();
2378 context()->Plug(eax);
2382 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2383 ASSERT(expr->IsValidLeftHandSide());
2385 // Left-hand side can only be a property, a global or a (parameter or local)
2387 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2388 LhsKind assign_type = VARIABLE;
2389 Property* prop = expr->AsProperty();
2391 assign_type = (prop->key()->IsPropertyName())
2396 switch (assign_type) {
2398 Variable* var = expr->AsVariableProxy()->var();
2399 EffectContext context(this);
2400 EmitVariableAssignment(var, Token::ASSIGN);
2403 case NAMED_PROPERTY: {
2404 __ push(eax); // Preserve value.
2405 VisitForAccumulatorValue(prop->obj());
2407 __ pop(eax); // Restore value.
2408 __ mov(ecx, prop->key()->AsLiteral()->value());
2412 case KEYED_PROPERTY: {
2413 __ push(eax); // Preserve value.
2414 VisitForStackValue(prop->obj());
2415 VisitForAccumulatorValue(prop->key());
2417 __ pop(edx); // Receiver.
2418 __ pop(eax); // Restore value.
2419 Handle<Code> ic = strict_mode() == SLOPPY
2420 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2421 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2426 context()->Plug(eax);
2430 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2431 Variable* var, MemOperand location) {
2432 __ mov(location, eax);
2433 if (var->IsContextSlot()) {
2435 int offset = Context::SlotOffset(var->index());
2436 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2441 void FullCodeGenerator::EmitCallStoreContextSlot(
2442 Handle<String> name, StrictMode strict_mode) {
2443 __ push(eax); // Value.
2444 __ push(esi); // Context.
2445 __ push(Immediate(name));
2446 __ push(Immediate(Smi::FromInt(strict_mode)));
2447 __ CallRuntime(Runtime::kHiddenStoreContextSlot, 4);
2451 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2453 if (var->IsUnallocated()) {
2454 // Global var, const, or let.
2455 __ mov(ecx, var->name());
2456 __ mov(edx, GlobalObjectOperand());
2459 } else if (op == Token::INIT_CONST_LEGACY) {
2460 // Const initializers need a write barrier.
2461 ASSERT(!var->IsParameter()); // No const parameters.
2462 if (var->IsLookupSlot()) {
2465 __ push(Immediate(var->name()));
2466 __ CallRuntime(Runtime::kHiddenInitializeConstContextSlot, 3);
2468 ASSERT(var->IsStackLocal() || var->IsContextSlot());
2470 MemOperand location = VarOperand(var, ecx);
2471 __ mov(edx, location);
2472 __ cmp(edx, isolate()->factory()->the_hole_value());
2473 __ j(not_equal, &skip, Label::kNear);
2474 EmitStoreToStackLocalOrContextSlot(var, location);
2478 } else if (var->mode() == LET && op != Token::INIT_LET) {
2479 // Non-initializing assignment to let variable needs a write barrier.
2480 if (var->IsLookupSlot()) {
2481 EmitCallStoreContextSlot(var->name(), strict_mode());
2483 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2485 MemOperand location = VarOperand(var, ecx);
2486 __ mov(edx, location);
2487 __ cmp(edx, isolate()->factory()->the_hole_value());
2488 __ j(not_equal, &assign, Label::kNear);
2489 __ push(Immediate(var->name()));
2490 __ CallRuntime(Runtime::kHiddenThrowReferenceError, 1);
2492 EmitStoreToStackLocalOrContextSlot(var, location);
2495 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2496 // Assignment to var or initializing assignment to let/const
2498 if (var->IsLookupSlot()) {
2499 EmitCallStoreContextSlot(var->name(), strict_mode());
2501 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2502 MemOperand location = VarOperand(var, ecx);
2503 if (generate_debug_code_ && op == Token::INIT_LET) {
2504 // Check for an uninitialized let binding.
2505 __ mov(edx, location);
2506 __ cmp(edx, isolate()->factory()->the_hole_value());
2507 __ Check(equal, kLetBindingReInitialization);
2509 EmitStoreToStackLocalOrContextSlot(var, location);
2512 // Non-initializing assignments to consts are ignored.
2516 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2517 // Assignment to a property, using a named store IC.
2519 // esp[0] : receiver
2521 Property* prop = expr->target()->AsProperty();
2522 ASSERT(prop != NULL);
2523 ASSERT(prop->key()->AsLiteral() != NULL);
2525 // Record source code position before IC call.
2526 SetSourcePosition(expr->position());
2527 __ mov(ecx, prop->key()->AsLiteral()->value());
2529 CallStoreIC(expr->AssignmentFeedbackId());
2530 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2531 context()->Plug(eax);
2535 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2536 // Assignment to a property, using a keyed store IC.
2539 // esp[kPointerSize] : receiver
2541 __ pop(ecx); // Key.
2543 // Record source code position before IC call.
2544 SetSourcePosition(expr->position());
2545 Handle<Code> ic = strict_mode() == SLOPPY
2546 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2547 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2548 CallIC(ic, expr->AssignmentFeedbackId());
2550 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2551 context()->Plug(eax);
2555 void FullCodeGenerator::VisitProperty(Property* expr) {
2556 Comment cmnt(masm_, "[ Property");
2557 Expression* key = expr->key();
2559 if (key->IsPropertyName()) {
2560 VisitForAccumulatorValue(expr->obj());
2561 __ mov(edx, result_register());
2562 EmitNamedPropertyLoad(expr);
2563 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2564 context()->Plug(eax);
2566 VisitForStackValue(expr->obj());
2567 VisitForAccumulatorValue(expr->key());
2568 __ pop(edx); // Object.
2569 __ mov(ecx, result_register()); // Key.
2570 EmitKeyedPropertyLoad(expr);
2571 context()->Plug(eax);
2576 void FullCodeGenerator::CallIC(Handle<Code> code,
2577 TypeFeedbackId ast_id) {
2579 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2585 // Code common for calls using the IC.
2586 void FullCodeGenerator::EmitCallWithIC(Call* expr) {
2587 Expression* callee = expr->expression();
2588 ZoneList<Expression*>* args = expr->arguments();
2589 int arg_count = args->length();
2591 CallFunctionFlags flags;
2592 // Get the target function.
2593 if (callee->IsVariableProxy()) {
2594 { StackValueContext context(this);
2595 EmitVariableLoad(callee->AsVariableProxy());
2596 PrepareForBailout(callee, NO_REGISTERS);
2598 // Push undefined as receiver. This is patched in the method prologue if it
2599 // is a sloppy mode method.
2600 __ push(Immediate(isolate()->factory()->undefined_value()));
2601 flags = NO_CALL_FUNCTION_FLAGS;
2603 // Load the function from the receiver.
2604 ASSERT(callee->IsProperty());
2605 __ mov(edx, Operand(esp, 0));
2606 EmitNamedPropertyLoad(callee->AsProperty());
2607 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2608 // Push the target function under the receiver.
2609 __ push(Operand(esp, 0));
2610 __ mov(Operand(esp, kPointerSize), eax);
2611 flags = CALL_AS_METHOD;
2614 // Load the arguments.
2615 { PreservePositionScope scope(masm()->positions_recorder());
2616 for (int i = 0; i < arg_count; i++) {
2617 VisitForStackValue(args->at(i));
2621 // Record source position of the IC call.
2622 SetSourcePosition(expr->position());
2623 CallFunctionStub stub(arg_count, flags);
2624 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2626 RecordJSReturnSite(expr);
2628 // Restore context register.
2629 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2631 context()->DropAndPlug(1, eax);
2635 // Code common for calls using the IC.
2636 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2639 VisitForAccumulatorValue(key);
2641 Expression* callee = expr->expression();
2642 ZoneList<Expression*>* args = expr->arguments();
2643 int arg_count = args->length();
2645 // Load the function from the receiver.
2646 ASSERT(callee->IsProperty());
2647 __ mov(edx, Operand(esp, 0));
2648 // Move the key into the right register for the keyed load IC.
2650 EmitKeyedPropertyLoad(callee->AsProperty());
2651 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2653 // Push the target function under the receiver.
2654 __ push(Operand(esp, 0));
2655 __ mov(Operand(esp, kPointerSize), eax);
2657 // Load the arguments.
2658 { PreservePositionScope scope(masm()->positions_recorder());
2659 for (int i = 0; i < arg_count; i++) {
2660 VisitForStackValue(args->at(i));
2664 // Record source position of the IC call.
2665 SetSourcePosition(expr->position());
2666 CallFunctionStub stub(arg_count, CALL_AS_METHOD);
2667 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2669 RecordJSReturnSite(expr);
2671 // Restore context register.
2672 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2674 context()->DropAndPlug(1, eax);
2678 void FullCodeGenerator::EmitCallWithStub(Call* expr) {
2679 // Code common for calls using the call stub.
2680 ZoneList<Expression*>* args = expr->arguments();
2681 int arg_count = args->length();
2682 { PreservePositionScope scope(masm()->positions_recorder());
2683 for (int i = 0; i < arg_count; i++) {
2684 VisitForStackValue(args->at(i));
2687 // Record source position for debugger.
2688 SetSourcePosition(expr->position());
2690 Handle<Object> uninitialized =
2691 TypeFeedbackInfo::UninitializedSentinel(isolate());
2692 StoreFeedbackVectorSlot(expr->CallFeedbackSlot(), uninitialized);
2693 __ LoadHeapObject(ebx, FeedbackVector());
2694 __ mov(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
2696 // Record call targets in unoptimized code.
2697 CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
2698 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2701 RecordJSReturnSite(expr);
2702 // Restore context register.
2703 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2704 context()->DropAndPlug(1, eax);
2708 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2709 // Push copy of the first argument or undefined if it doesn't exist.
2710 if (arg_count > 0) {
2711 __ push(Operand(esp, arg_count * kPointerSize));
2713 __ push(Immediate(isolate()->factory()->undefined_value()));
2716 // Push the receiver of the enclosing function.
2717 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2718 // Push the language mode.
2719 __ push(Immediate(Smi::FromInt(strict_mode())));
2721 // Push the start position of the scope the calls resides in.
2722 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2724 // Do the runtime call.
2725 __ CallRuntime(Runtime::kHiddenResolvePossiblyDirectEval, 5);
2729 void FullCodeGenerator::VisitCall(Call* expr) {
2731 // We want to verify that RecordJSReturnSite gets called on all paths
2732 // through this function. Avoid early returns.
2733 expr->return_is_recorded_ = false;
2736 Comment cmnt(masm_, "[ Call");
2737 Expression* callee = expr->expression();
2738 Call::CallType call_type = expr->GetCallType(isolate());
2740 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2741 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2742 // to resolve the function we need to call and the receiver of the call.
2743 // Then we call the resolved function using the given arguments.
2744 ZoneList<Expression*>* args = expr->arguments();
2745 int arg_count = args->length();
2746 { PreservePositionScope pos_scope(masm()->positions_recorder());
2747 VisitForStackValue(callee);
2748 // Reserved receiver slot.
2749 __ push(Immediate(isolate()->factory()->undefined_value()));
2750 // Push the arguments.
2751 for (int i = 0; i < arg_count; i++) {
2752 VisitForStackValue(args->at(i));
2755 // Push a copy of the function (found below the arguments) and
2757 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2758 EmitResolvePossiblyDirectEval(arg_count);
2760 // The runtime call returns a pair of values in eax (function) and
2761 // edx (receiver). Touch up the stack with the right values.
2762 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2763 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2765 // Record source position for debugger.
2766 SetSourcePosition(expr->position());
2767 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
2768 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2770 RecordJSReturnSite(expr);
2771 // Restore context register.
2772 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2773 context()->DropAndPlug(1, eax);
2775 } else if (call_type == Call::GLOBAL_CALL) {
2776 EmitCallWithIC(expr);
2778 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2779 // Call to a lookup slot (dynamically introduced variable).
2780 VariableProxy* proxy = callee->AsVariableProxy();
2782 { PreservePositionScope scope(masm()->positions_recorder());
2783 // Generate code for loading from variables potentially shadowed by
2784 // eval-introduced variables.
2785 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2788 // Call the runtime to find the function to call (returned in eax) and
2789 // the object holding it (returned in edx).
2790 __ push(context_register());
2791 __ push(Immediate(proxy->name()));
2792 __ CallRuntime(Runtime::kHiddenLoadContextSlot, 2);
2793 __ push(eax); // Function.
2794 __ push(edx); // Receiver.
2796 // If fast case code has been generated, emit code to push the function
2797 // and receiver and have the slow path jump around this code.
2798 if (done.is_linked()) {
2800 __ jmp(&call, Label::kNear);
2804 // The receiver is implicitly the global receiver. Indicate this by
2805 // passing the hole to the call function stub.
2806 __ push(Immediate(isolate()->factory()->undefined_value()));
2810 // The receiver is either the global receiver or an object found by
2812 EmitCallWithStub(expr);
2814 } else if (call_type == Call::PROPERTY_CALL) {
2815 Property* property = callee->AsProperty();
2816 { PreservePositionScope scope(masm()->positions_recorder());
2817 VisitForStackValue(property->obj());
2819 if (property->key()->IsPropertyName()) {
2820 EmitCallWithIC(expr);
2822 EmitKeyedCallWithIC(expr, property->key());
2826 ASSERT(call_type == Call::OTHER_CALL);
2827 // Call to an arbitrary expression not handled specially above.
2828 { PreservePositionScope scope(masm()->positions_recorder());
2829 VisitForStackValue(callee);
2831 __ push(Immediate(isolate()->factory()->undefined_value()));
2832 // Emit function call.
2833 EmitCallWithStub(expr);
2837 // RecordJSReturnSite should have been called.
2838 ASSERT(expr->return_is_recorded_);
2843 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2844 Comment cmnt(masm_, "[ CallNew");
2845 // According to ECMA-262, section 11.2.2, page 44, the function
2846 // expression in new calls must be evaluated before the
2849 // Push constructor on the stack. If it's not a function it's used as
2850 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2852 VisitForStackValue(expr->expression());
2854 // Push the arguments ("left-to-right") on the stack.
2855 ZoneList<Expression*>* args = expr->arguments();
2856 int arg_count = args->length();
2857 for (int i = 0; i < arg_count; i++) {
2858 VisitForStackValue(args->at(i));
2861 // Call the construct call builtin that handles allocation and
2862 // constructor invocation.
2863 SetSourcePosition(expr->position());
2865 // Load function and argument count into edi and eax.
2866 __ Move(eax, Immediate(arg_count));
2867 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2869 // Record call targets in unoptimized code.
2870 Handle<Object> uninitialized =
2871 TypeFeedbackInfo::UninitializedSentinel(isolate());
2872 StoreFeedbackVectorSlot(expr->CallNewFeedbackSlot(), uninitialized);
2873 if (FLAG_pretenuring_call_new) {
2874 StoreFeedbackVectorSlot(expr->AllocationSiteFeedbackSlot(),
2875 isolate()->factory()->NewAllocationSite());
2876 ASSERT(expr->AllocationSiteFeedbackSlot() ==
2877 expr->CallNewFeedbackSlot() + 1);
2880 __ LoadHeapObject(ebx, FeedbackVector());
2881 __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
2883 CallConstructStub stub(RECORD_CALL_TARGET);
2884 __ call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
2885 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2886 context()->Plug(eax);
2890 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2891 ZoneList<Expression*>* args = expr->arguments();
2892 ASSERT(args->length() == 1);
2894 VisitForAccumulatorValue(args->at(0));
2896 Label materialize_true, materialize_false;
2897 Label* if_true = NULL;
2898 Label* if_false = NULL;
2899 Label* fall_through = NULL;
2900 context()->PrepareTest(&materialize_true, &materialize_false,
2901 &if_true, &if_false, &fall_through);
2903 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2904 __ test(eax, Immediate(kSmiTagMask));
2905 Split(zero, if_true, if_false, fall_through);
2907 context()->Plug(if_true, if_false);
2911 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2912 ZoneList<Expression*>* args = expr->arguments();
2913 ASSERT(args->length() == 1);
2915 VisitForAccumulatorValue(args->at(0));
2917 Label materialize_true, materialize_false;
2918 Label* if_true = NULL;
2919 Label* if_false = NULL;
2920 Label* fall_through = NULL;
2921 context()->PrepareTest(&materialize_true, &materialize_false,
2922 &if_true, &if_false, &fall_through);
2924 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2925 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2926 Split(zero, if_true, if_false, fall_through);
2928 context()->Plug(if_true, if_false);
2932 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2933 ZoneList<Expression*>* args = expr->arguments();
2934 ASSERT(args->length() == 1);
2936 VisitForAccumulatorValue(args->at(0));
2938 Label materialize_true, materialize_false;
2939 Label* if_true = NULL;
2940 Label* if_false = NULL;
2941 Label* fall_through = NULL;
2942 context()->PrepareTest(&materialize_true, &materialize_false,
2943 &if_true, &if_false, &fall_through);
2945 __ JumpIfSmi(eax, if_false);
2946 __ cmp(eax, isolate()->factory()->null_value());
2947 __ j(equal, if_true);
2948 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2949 // Undetectable objects behave like undefined when tested with typeof.
2950 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2951 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2952 __ j(not_zero, if_false);
2953 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2954 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2955 __ j(below, if_false);
2956 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2957 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2958 Split(below_equal, if_true, if_false, fall_through);
2960 context()->Plug(if_true, if_false);
2964 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2965 ZoneList<Expression*>* args = expr->arguments();
2966 ASSERT(args->length() == 1);
2968 VisitForAccumulatorValue(args->at(0));
2970 Label materialize_true, materialize_false;
2971 Label* if_true = NULL;
2972 Label* if_false = NULL;
2973 Label* fall_through = NULL;
2974 context()->PrepareTest(&materialize_true, &materialize_false,
2975 &if_true, &if_false, &fall_through);
2977 __ JumpIfSmi(eax, if_false);
2978 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2979 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2980 Split(above_equal, if_true, if_false, fall_through);
2982 context()->Plug(if_true, if_false);
2986 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2987 ZoneList<Expression*>* args = expr->arguments();
2988 ASSERT(args->length() == 1);
2990 VisitForAccumulatorValue(args->at(0));
2992 Label materialize_true, materialize_false;
2993 Label* if_true = NULL;
2994 Label* if_false = NULL;
2995 Label* fall_through = NULL;
2996 context()->PrepareTest(&materialize_true, &materialize_false,
2997 &if_true, &if_false, &fall_through);
2999 __ JumpIfSmi(eax, if_false);
3000 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3001 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
3002 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
3003 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3004 Split(not_zero, if_true, if_false, fall_through);
3006 context()->Plug(if_true, if_false);
3010 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
3011 CallRuntime* expr) {
3012 ZoneList<Expression*>* args = expr->arguments();
3013 ASSERT(args->length() == 1);
3015 VisitForAccumulatorValue(args->at(0));
3017 Label materialize_true, materialize_false, skip_lookup;
3018 Label* if_true = NULL;
3019 Label* if_false = NULL;
3020 Label* fall_through = NULL;
3021 context()->PrepareTest(&materialize_true, &materialize_false,
3022 &if_true, &if_false, &fall_through);
3024 __ AssertNotSmi(eax);
3026 // Check whether this map has already been checked to be safe for default
3028 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3029 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
3030 1 << Map::kStringWrapperSafeForDefaultValueOf);
3031 __ j(not_zero, &skip_lookup);
3033 // Check for fast case object. Return false for slow case objects.
3034 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
3035 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3036 __ cmp(ecx, isolate()->factory()->hash_table_map());
3037 __ j(equal, if_false);
3039 // Look for valueOf string in the descriptor array, and indicate false if
3040 // found. Since we omit an enumeration index check, if it is added via a
3041 // transition that shares its descriptor array, this is a false positive.
3042 Label entry, loop, done;
3044 // Skip loop if no descriptors are valid.
3045 __ NumberOfOwnDescriptors(ecx, ebx);
3049 __ LoadInstanceDescriptors(ebx, ebx);
3050 // ebx: descriptor array.
3051 // ecx: valid entries in the descriptor array.
3052 // Calculate the end of the descriptor array.
3053 STATIC_ASSERT(kSmiTag == 0);
3054 STATIC_ASSERT(kSmiTagSize == 1);
3055 STATIC_ASSERT(kPointerSize == 4);
3056 __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
3057 __ lea(ecx, Operand(ebx, ecx, times_2, DescriptorArray::kFirstOffset));
3058 // Calculate location of the first key name.
3059 __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
3060 // Loop through all the keys in the descriptor array. If one of these is the
3061 // internalized string "valueOf" the result is false.
3064 __ mov(edx, FieldOperand(ebx, 0));
3065 __ cmp(edx, isolate()->factory()->value_of_string());
3066 __ j(equal, if_false);
3067 __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
3070 __ j(not_equal, &loop);
3074 // Reload map as register ebx was used as temporary above.
3075 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
3077 // Set the bit in the map to indicate that there is no local valueOf field.
3078 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
3079 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
3081 __ bind(&skip_lookup);
3083 // If a valueOf property is not found on the object check that its
3084 // prototype is the un-modified String prototype. If not result is false.
3085 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3086 __ JumpIfSmi(ecx, if_false);
3087 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3088 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3090 FieldOperand(edx, GlobalObject::kNativeContextOffset));
3093 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3094 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3095 Split(equal, if_true, if_false, fall_through);
3097 context()->Plug(if_true, if_false);
3101 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3102 ZoneList<Expression*>* args = expr->arguments();
3103 ASSERT(args->length() == 1);
3105 VisitForAccumulatorValue(args->at(0));
3107 Label materialize_true, materialize_false;
3108 Label* if_true = NULL;
3109 Label* if_false = NULL;
3110 Label* fall_through = NULL;
3111 context()->PrepareTest(&materialize_true, &materialize_false,
3112 &if_true, &if_false, &fall_through);
3114 __ JumpIfSmi(eax, if_false);
3115 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3116 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3117 Split(equal, if_true, if_false, fall_through);
3119 context()->Plug(if_true, if_false);
3123 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3124 ZoneList<Expression*>* args = expr->arguments();
3125 ASSERT(args->length() == 1);
3127 VisitForAccumulatorValue(args->at(0));
3129 Label materialize_true, materialize_false;
3130 Label* if_true = NULL;
3131 Label* if_false = NULL;
3132 Label* fall_through = NULL;
3133 context()->PrepareTest(&materialize_true, &materialize_false,
3134 &if_true, &if_false, &fall_through);
3136 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3137 __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3138 // Check if the exponent half is 0x80000000. Comparing against 1 and
3139 // checking for overflow is the shortest possible encoding.
3140 __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3141 __ j(no_overflow, if_false);
3142 __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3143 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3144 Split(equal, if_true, if_false, fall_through);
3146 context()->Plug(if_true, if_false);
3151 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3152 ZoneList<Expression*>* args = expr->arguments();
3153 ASSERT(args->length() == 1);
3155 VisitForAccumulatorValue(args->at(0));
3157 Label materialize_true, materialize_false;
3158 Label* if_true = NULL;
3159 Label* if_false = NULL;
3160 Label* fall_through = NULL;
3161 context()->PrepareTest(&materialize_true, &materialize_false,
3162 &if_true, &if_false, &fall_through);
3164 __ JumpIfSmi(eax, if_false);
3165 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3166 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3167 Split(equal, if_true, if_false, fall_through);
3169 context()->Plug(if_true, if_false);
3173 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3174 ZoneList<Expression*>* args = expr->arguments();
3175 ASSERT(args->length() == 1);
3177 VisitForAccumulatorValue(args->at(0));
3179 Label materialize_true, materialize_false;
3180 Label* if_true = NULL;
3181 Label* if_false = NULL;
3182 Label* fall_through = NULL;
3183 context()->PrepareTest(&materialize_true, &materialize_false,
3184 &if_true, &if_false, &fall_through);
3186 __ JumpIfSmi(eax, if_false);
3187 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3188 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3189 Split(equal, if_true, if_false, fall_through);
3191 context()->Plug(if_true, if_false);
3196 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3197 ASSERT(expr->arguments()->length() == 0);
3199 Label materialize_true, materialize_false;
3200 Label* if_true = NULL;
3201 Label* if_false = NULL;
3202 Label* fall_through = NULL;
3203 context()->PrepareTest(&materialize_true, &materialize_false,
3204 &if_true, &if_false, &fall_through);
3206 // Get the frame pointer for the calling frame.
3207 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3209 // Skip the arguments adaptor frame if it exists.
3210 Label check_frame_marker;
3211 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3212 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3213 __ j(not_equal, &check_frame_marker);
3214 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3216 // Check the marker in the calling frame.
3217 __ bind(&check_frame_marker);
3218 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3219 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3220 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3221 Split(equal, if_true, if_false, fall_through);
3223 context()->Plug(if_true, if_false);
3227 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3228 ZoneList<Expression*>* args = expr->arguments();
3229 ASSERT(args->length() == 2);
3231 // Load the two objects into registers and perform the comparison.
3232 VisitForStackValue(args->at(0));
3233 VisitForAccumulatorValue(args->at(1));
3235 Label materialize_true, materialize_false;
3236 Label* if_true = NULL;
3237 Label* if_false = NULL;
3238 Label* fall_through = NULL;
3239 context()->PrepareTest(&materialize_true, &materialize_false,
3240 &if_true, &if_false, &fall_through);
3244 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3245 Split(equal, if_true, if_false, fall_through);
3247 context()->Plug(if_true, if_false);
3251 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3252 ZoneList<Expression*>* args = expr->arguments();
3253 ASSERT(args->length() == 1);
3255 // ArgumentsAccessStub expects the key in edx and the formal
3256 // parameter count in eax.
3257 VisitForAccumulatorValue(args->at(0));
3259 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3260 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
3262 context()->Plug(eax);
3266 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3267 ASSERT(expr->arguments()->length() == 0);
3270 // Get the number of formal parameters.
3271 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3273 // Check if the calling frame is an arguments adaptor frame.
3274 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3275 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3276 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3277 __ j(not_equal, &exit);
3279 // Arguments adaptor case: Read the arguments length from the
3281 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3285 context()->Plug(eax);
3289 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3290 ZoneList<Expression*>* args = expr->arguments();
3291 ASSERT(args->length() == 1);
3292 Label done, null, function, non_function_constructor;
3294 VisitForAccumulatorValue(args->at(0));
3296 // If the object is a smi, we return null.
3297 __ JumpIfSmi(eax, &null);
3299 // Check that the object is a JS object but take special care of JS
3300 // functions to make sure they have 'Function' as their class.
3301 // Assume that there are only two callable types, and one of them is at
3302 // either end of the type range for JS object types. Saves extra comparisons.
3303 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3304 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3305 // Map is now in eax.
3307 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3308 FIRST_SPEC_OBJECT_TYPE + 1);
3309 __ j(equal, &function);
3311 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3312 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3313 LAST_SPEC_OBJECT_TYPE - 1);
3314 __ j(equal, &function);
3315 // Assume that there is no larger type.
3316 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3318 // Check if the constructor in the map is a JS function.
3319 __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
3320 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3321 __ j(not_equal, &non_function_constructor);
3323 // eax now contains the constructor function. Grab the
3324 // instance class name from there.
3325 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3326 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3329 // Functions have class 'Function'.
3331 __ mov(eax, isolate()->factory()->function_class_string());
3334 // Objects with a non-function constructor have class 'Object'.
3335 __ bind(&non_function_constructor);
3336 __ mov(eax, isolate()->factory()->Object_string());
3339 // Non-JS objects have class null.
3341 __ mov(eax, isolate()->factory()->null_value());
3346 context()->Plug(eax);
3350 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
3351 // Conditionally generate a log call.
3353 // 0 (literal string): The type of logging (corresponds to the flags).
3354 // This is used to determine whether or not to generate the log call.
3355 // 1 (string): Format string. Access the string at argument index 2
3356 // with '%2s' (see Logger::LogRuntime for all the formats).
3357 // 2 (array): Arguments to the format string.
3358 ZoneList<Expression*>* args = expr->arguments();
3359 ASSERT_EQ(args->length(), 3);
3360 if (CodeGenerator::ShouldGenerateLog(isolate(), args->at(0))) {
3361 VisitForStackValue(args->at(1));
3362 VisitForStackValue(args->at(2));
3363 __ CallRuntime(Runtime::kHiddenLog, 2);
3365 // Finally, we're expected to leave a value on the top of the stack.
3366 __ mov(eax, isolate()->factory()->undefined_value());
3367 context()->Plug(eax);
3371 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3372 // Load the arguments on the stack and call the stub.
3374 ZoneList<Expression*>* args = expr->arguments();
3375 ASSERT(args->length() == 3);
3376 VisitForStackValue(args->at(0));
3377 VisitForStackValue(args->at(1));
3378 VisitForStackValue(args->at(2));
3380 context()->Plug(eax);
3384 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3385 // Load the arguments on the stack and call the stub.
3386 RegExpExecStub stub;
3387 ZoneList<Expression*>* args = expr->arguments();
3388 ASSERT(args->length() == 4);
3389 VisitForStackValue(args->at(0));
3390 VisitForStackValue(args->at(1));
3391 VisitForStackValue(args->at(2));
3392 VisitForStackValue(args->at(3));
3394 context()->Plug(eax);
3398 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3399 ZoneList<Expression*>* args = expr->arguments();
3400 ASSERT(args->length() == 1);
3402 VisitForAccumulatorValue(args->at(0)); // Load the object.
3405 // If the object is a smi return the object.
3406 __ JumpIfSmi(eax, &done, Label::kNear);
3407 // If the object is not a value type, return the object.
3408 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3409 __ j(not_equal, &done, Label::kNear);
3410 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3413 context()->Plug(eax);
3417 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3418 ZoneList<Expression*>* args = expr->arguments();
3419 ASSERT(args->length() == 2);
3420 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3421 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3423 VisitForAccumulatorValue(args->at(0)); // Load the object.
3425 Label runtime, done, not_date_object;
3426 Register object = eax;
3427 Register result = eax;
3428 Register scratch = ecx;
3430 __ JumpIfSmi(object, ¬_date_object);
3431 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3432 __ j(not_equal, ¬_date_object);
3434 if (index->value() == 0) {
3435 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3438 if (index->value() < JSDate::kFirstUncachedField) {
3439 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3440 __ mov(scratch, Operand::StaticVariable(stamp));
3441 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3442 __ j(not_equal, &runtime, Label::kNear);
3443 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3444 kPointerSize * index->value()));
3448 __ PrepareCallCFunction(2, scratch);
3449 __ mov(Operand(esp, 0), object);
3450 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3451 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3455 __ bind(¬_date_object);
3456 __ CallRuntime(Runtime::kHiddenThrowNotDateError, 0);
3458 context()->Plug(result);
3462 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3463 ZoneList<Expression*>* args = expr->arguments();
3464 ASSERT_EQ(3, args->length());
3466 Register string = eax;
3467 Register index = ebx;
3468 Register value = ecx;
3470 VisitForStackValue(args->at(1)); // index
3471 VisitForStackValue(args->at(2)); // value
3472 VisitForAccumulatorValue(args->at(0)); // string
3477 if (FLAG_debug_code) {
3478 __ test(value, Immediate(kSmiTagMask));
3479 __ Check(zero, kNonSmiValue);
3480 __ test(index, Immediate(kSmiTagMask));
3481 __ Check(zero, kNonSmiValue);
3487 if (FLAG_debug_code) {
3488 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3489 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3492 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3494 context()->Plug(string);
3498 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3499 ZoneList<Expression*>* args = expr->arguments();
3500 ASSERT_EQ(3, args->length());
3502 Register string = eax;
3503 Register index = ebx;
3504 Register value = ecx;
3506 VisitForStackValue(args->at(1)); // index
3507 VisitForStackValue(args->at(2)); // value
3508 VisitForAccumulatorValue(args->at(0)); // string
3512 if (FLAG_debug_code) {
3513 __ test(value, Immediate(kSmiTagMask));
3514 __ Check(zero, kNonSmiValue);
3515 __ test(index, Immediate(kSmiTagMask));
3516 __ Check(zero, kNonSmiValue);
3518 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3519 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3524 // No need to untag a smi for two-byte addressing.
3525 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3527 context()->Plug(string);
3531 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3532 // Load the arguments on the stack and call the runtime function.
3533 ZoneList<Expression*>* args = expr->arguments();
3534 ASSERT(args->length() == 2);
3535 VisitForStackValue(args->at(0));
3536 VisitForStackValue(args->at(1));
3538 if (CpuFeatures::IsSupported(SSE2)) {
3539 MathPowStub stub(MathPowStub::ON_STACK);
3542 __ CallRuntime(Runtime::kMath_pow, 2);
3544 context()->Plug(eax);
3548 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3549 ZoneList<Expression*>* args = expr->arguments();
3550 ASSERT(args->length() == 2);
3552 VisitForStackValue(args->at(0)); // Load the object.
3553 VisitForAccumulatorValue(args->at(1)); // Load the value.
3554 __ pop(ebx); // eax = value. ebx = object.
3557 // If the object is a smi, return the value.
3558 __ JumpIfSmi(ebx, &done, Label::kNear);
3560 // If the object is not a value type, return the value.
3561 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3562 __ j(not_equal, &done, Label::kNear);
3565 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3567 // Update the write barrier. Save the value as it will be
3568 // overwritten by the write barrier code and is needed afterward.
3570 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3573 context()->Plug(eax);
3577 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3578 ZoneList<Expression*>* args = expr->arguments();
3579 ASSERT_EQ(args->length(), 1);
3581 // Load the argument into eax and call the stub.
3582 VisitForAccumulatorValue(args->at(0));
3584 NumberToStringStub stub;
3586 context()->Plug(eax);
3590 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3591 ZoneList<Expression*>* args = expr->arguments();
3592 ASSERT(args->length() == 1);
3594 VisitForAccumulatorValue(args->at(0));
3597 StringCharFromCodeGenerator generator(eax, ebx);
3598 generator.GenerateFast(masm_);
3601 NopRuntimeCallHelper call_helper;
3602 generator.GenerateSlow(masm_, call_helper);
3605 context()->Plug(ebx);
3609 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3610 ZoneList<Expression*>* args = expr->arguments();
3611 ASSERT(args->length() == 2);
3613 VisitForStackValue(args->at(0));
3614 VisitForAccumulatorValue(args->at(1));
3616 Register object = ebx;
3617 Register index = eax;
3618 Register result = edx;
3622 Label need_conversion;
3623 Label index_out_of_range;
3625 StringCharCodeAtGenerator generator(object,
3630 &index_out_of_range,
3631 STRING_INDEX_IS_NUMBER);
3632 generator.GenerateFast(masm_);
3635 __ bind(&index_out_of_range);
3636 // When the index is out of range, the spec requires us to return
3638 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3641 __ bind(&need_conversion);
3642 // Move the undefined value into the result register, which will
3643 // trigger conversion.
3644 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3647 NopRuntimeCallHelper call_helper;
3648 generator.GenerateSlow(masm_, call_helper);
3651 context()->Plug(result);
3655 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3656 ZoneList<Expression*>* args = expr->arguments();
3657 ASSERT(args->length() == 2);
3659 VisitForStackValue(args->at(0));
3660 VisitForAccumulatorValue(args->at(1));
3662 Register object = ebx;
3663 Register index = eax;
3664 Register scratch = edx;
3665 Register result = eax;
3669 Label need_conversion;
3670 Label index_out_of_range;
3672 StringCharAtGenerator generator(object,
3678 &index_out_of_range,
3679 STRING_INDEX_IS_NUMBER);
3680 generator.GenerateFast(masm_);
3683 __ bind(&index_out_of_range);
3684 // When the index is out of range, the spec requires us to return
3685 // the empty string.
3686 __ Move(result, Immediate(isolate()->factory()->empty_string()));
3689 __ bind(&need_conversion);
3690 // Move smi zero into the result register, which will trigger
3692 __ Move(result, Immediate(Smi::FromInt(0)));
3695 NopRuntimeCallHelper call_helper;
3696 generator.GenerateSlow(masm_, call_helper);
3699 context()->Plug(result);
3703 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3704 ZoneList<Expression*>* args = expr->arguments();
3705 ASSERT_EQ(2, args->length());
3706 VisitForStackValue(args->at(0));
3707 VisitForAccumulatorValue(args->at(1));
3710 StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
3712 context()->Plug(eax);
3716 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3717 ZoneList<Expression*>* args = expr->arguments();
3718 ASSERT_EQ(2, args->length());
3720 VisitForStackValue(args->at(0));
3721 VisitForStackValue(args->at(1));
3723 StringCompareStub stub;
3725 context()->Plug(eax);
3729 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3730 // Load the argument on the stack and call the runtime function.
3731 ZoneList<Expression*>* args = expr->arguments();
3732 ASSERT(args->length() == 1);
3733 VisitForStackValue(args->at(0));
3734 __ CallRuntime(Runtime::kMath_log, 1);
3735 context()->Plug(eax);
3739 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3740 // Load the argument on the stack and call the runtime function.
3741 ZoneList<Expression*>* args = expr->arguments();
3742 ASSERT(args->length() == 1);
3743 VisitForStackValue(args->at(0));
3744 __ CallRuntime(Runtime::kMath_sqrt, 1);
3745 context()->Plug(eax);
3749 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3750 ZoneList<Expression*>* args = expr->arguments();
3751 ASSERT(args->length() >= 2);
3753 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3754 for (int i = 0; i < arg_count + 1; ++i) {
3755 VisitForStackValue(args->at(i));
3757 VisitForAccumulatorValue(args->last()); // Function.
3759 Label runtime, done;
3760 // Check for non-function argument (including proxy).
3761 __ JumpIfSmi(eax, &runtime);
3762 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3763 __ j(not_equal, &runtime);
3765 // InvokeFunction requires the function in edi. Move it in there.
3766 __ mov(edi, result_register());
3767 ParameterCount count(arg_count);
3768 __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
3769 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3774 __ CallRuntime(Runtime::kCall, args->length());
3777 context()->Plug(eax);
3781 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3782 // Load the arguments on the stack and call the stub.
3783 RegExpConstructResultStub stub;
3784 ZoneList<Expression*>* args = expr->arguments();
3785 ASSERT(args->length() == 3);
3786 VisitForStackValue(args->at(0));
3787 VisitForStackValue(args->at(1));
3788 VisitForAccumulatorValue(args->at(2));
3792 context()->Plug(eax);
3796 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3797 ZoneList<Expression*>* args = expr->arguments();
3798 ASSERT_EQ(2, args->length());
3800 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3801 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3803 Handle<FixedArray> jsfunction_result_caches(
3804 isolate()->native_context()->jsfunction_result_caches());
3805 if (jsfunction_result_caches->length() <= cache_id) {
3806 __ Abort(kAttemptToUseUndefinedCache);
3807 __ mov(eax, isolate()->factory()->undefined_value());
3808 context()->Plug(eax);
3812 VisitForAccumulatorValue(args->at(1));
3815 Register cache = ebx;
3817 __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
3819 FieldOperand(cache, GlobalObject::kNativeContextOffset));
3820 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3822 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3824 Label done, not_found;
3825 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3826 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3827 // tmp now holds finger offset as a smi.
3828 __ cmp(key, FixedArrayElementOperand(cache, tmp));
3829 __ j(not_equal, ¬_found);
3831 __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
3834 __ bind(¬_found);
3835 // Call runtime to perform the lookup.
3838 __ CallRuntime(Runtime::kHiddenGetFromCache, 2);
3841 context()->Plug(eax);
3845 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3846 ZoneList<Expression*>* args = expr->arguments();
3847 ASSERT(args->length() == 1);
3849 VisitForAccumulatorValue(args->at(0));
3851 __ AssertString(eax);
3853 Label materialize_true, materialize_false;
3854 Label* if_true = NULL;
3855 Label* if_false = NULL;
3856 Label* fall_through = NULL;
3857 context()->PrepareTest(&materialize_true, &materialize_false,
3858 &if_true, &if_false, &fall_through);
3860 __ test(FieldOperand(eax, String::kHashFieldOffset),
3861 Immediate(String::kContainsCachedArrayIndexMask));
3862 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3863 Split(zero, if_true, if_false, fall_through);
3865 context()->Plug(if_true, if_false);
3869 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3870 ZoneList<Expression*>* args = expr->arguments();
3871 ASSERT(args->length() == 1);
3872 VisitForAccumulatorValue(args->at(0));
3874 __ AssertString(eax);
3876 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3877 __ IndexFromHash(eax, eax);
3879 context()->Plug(eax);
3883 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3884 Label bailout, done, one_char_separator, long_separator,
3885 non_trivial_array, not_size_one_array, loop,
3886 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3888 ZoneList<Expression*>* args = expr->arguments();
3889 ASSERT(args->length() == 2);
3890 // We will leave the separator on the stack until the end of the function.
3891 VisitForStackValue(args->at(1));
3892 // Load this to eax (= array)
3893 VisitForAccumulatorValue(args->at(0));
3894 // All aliases of the same register have disjoint lifetimes.
3895 Register array = eax;
3896 Register elements = no_reg; // Will be eax.
3898 Register index = edx;
3900 Register string_length = ecx;
3902 Register string = esi;
3904 Register scratch = ebx;
3906 Register array_length = edi;
3907 Register result_pos = no_reg; // Will be edi.
3909 // Separator operand is already pushed.
3910 Operand separator_operand = Operand(esp, 2 * kPointerSize);
3911 Operand result_operand = Operand(esp, 1 * kPointerSize);
3912 Operand array_length_operand = Operand(esp, 0);
3913 __ sub(esp, Immediate(2 * kPointerSize));
3915 // Check that the array is a JSArray
3916 __ JumpIfSmi(array, &bailout);
3917 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3918 __ j(not_equal, &bailout);
3920 // Check that the array has fast elements.
3921 __ CheckFastElements(scratch, &bailout);
3923 // If the array has length zero, return the empty string.
3924 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3925 __ SmiUntag(array_length);
3926 __ j(not_zero, &non_trivial_array);
3927 __ mov(result_operand, isolate()->factory()->empty_string());
3930 // Save the array length.
3931 __ bind(&non_trivial_array);
3932 __ mov(array_length_operand, array_length);
3934 // Save the FixedArray containing array's elements.
3935 // End of array's live range.
3937 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3941 // Check that all array elements are sequential ASCII strings, and
3942 // accumulate the sum of their lengths, as a smi-encoded value.
3943 __ Move(index, Immediate(0));
3944 __ Move(string_length, Immediate(0));
3945 // Loop condition: while (index < length).
3946 // Live loop registers: index, array_length, string,
3947 // scratch, string_length, elements.
3948 if (generate_debug_code_) {
3949 __ cmp(index, array_length);
3950 __ Assert(less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3953 __ mov(string, FieldOperand(elements,
3956 FixedArray::kHeaderSize));
3957 __ JumpIfSmi(string, &bailout);
3958 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3959 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3960 __ and_(scratch, Immediate(
3961 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3962 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3963 __ j(not_equal, &bailout);
3964 __ add(string_length,
3965 FieldOperand(string, SeqOneByteString::kLengthOffset));
3966 __ j(overflow, &bailout);
3967 __ add(index, Immediate(1));
3968 __ cmp(index, array_length);
3971 // If array_length is 1, return elements[0], a string.
3972 __ cmp(array_length, 1);
3973 __ j(not_equal, ¬_size_one_array);
3974 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3975 __ mov(result_operand, scratch);
3978 __ bind(¬_size_one_array);
3980 // End of array_length live range.
3981 result_pos = array_length;
3982 array_length = no_reg;
3985 // string_length: Sum of string lengths, as a smi.
3986 // elements: FixedArray of strings.
3988 // Check that the separator is a flat ASCII string.
3989 __ mov(string, separator_operand);
3990 __ JumpIfSmi(string, &bailout);
3991 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3992 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3993 __ and_(scratch, Immediate(
3994 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3995 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3996 __ j(not_equal, &bailout);
3998 // Add (separator length times array_length) - separator length
3999 // to string_length.
4000 __ mov(scratch, separator_operand);
4001 __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
4002 __ sub(string_length, scratch); // May be negative, temporarily.
4003 __ imul(scratch, array_length_operand);
4004 __ j(overflow, &bailout);
4005 __ add(string_length, scratch);
4006 __ j(overflow, &bailout);
4008 __ shr(string_length, 1);
4009 // Live registers and stack values:
4012 __ AllocateAsciiString(result_pos, string_length, scratch,
4013 index, string, &bailout);
4014 __ mov(result_operand, result_pos);
4015 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
4018 __ mov(string, separator_operand);
4019 __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
4020 Immediate(Smi::FromInt(1)));
4021 __ j(equal, &one_char_separator);
4022 __ j(greater, &long_separator);
4025 // Empty separator case
4026 __ mov(index, Immediate(0));
4027 __ jmp(&loop_1_condition);
4028 // Loop condition: while (index < length).
4030 // Each iteration of the loop concatenates one string to the result.
4031 // Live values in registers:
4032 // index: which element of the elements array we are adding to the result.
4033 // result_pos: the position to which we are currently copying characters.
4034 // elements: the FixedArray of strings we are joining.
4036 // Get string = array[index].
4037 __ mov(string, FieldOperand(elements, index,
4039 FixedArray::kHeaderSize));
4040 __ mov(string_length,
4041 FieldOperand(string, String::kLengthOffset));
4042 __ shr(string_length, 1);
4044 FieldOperand(string, SeqOneByteString::kHeaderSize));
4045 __ CopyBytes(string, result_pos, string_length, scratch);
4046 __ add(index, Immediate(1));
4047 __ bind(&loop_1_condition);
4048 __ cmp(index, array_length_operand);
4049 __ j(less, &loop_1); // End while (index < length).
4054 // One-character separator case
4055 __ bind(&one_char_separator);
4056 // Replace separator with its ASCII character value.
4057 __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
4058 __ mov_b(separator_operand, scratch);
4060 __ Move(index, Immediate(0));
4061 // Jump into the loop after the code that copies the separator, so the first
4062 // element is not preceded by a separator
4063 __ jmp(&loop_2_entry);
4064 // Loop condition: while (index < length).
4066 // Each iteration of the loop concatenates one string to the result.
4067 // Live values in registers:
4068 // index: which element of the elements array we are adding to the result.
4069 // result_pos: the position to which we are currently copying characters.
4071 // Copy the separator character to the result.
4072 __ mov_b(scratch, separator_operand);
4073 __ mov_b(Operand(result_pos, 0), scratch);
4076 __ bind(&loop_2_entry);
4077 // Get string = array[index].
4078 __ mov(string, FieldOperand(elements, index,
4080 FixedArray::kHeaderSize));
4081 __ mov(string_length,
4082 FieldOperand(string, String::kLengthOffset));
4083 __ shr(string_length, 1);
4085 FieldOperand(string, SeqOneByteString::kHeaderSize));
4086 __ CopyBytes(string, result_pos, string_length, scratch);
4087 __ add(index, Immediate(1));
4089 __ cmp(index, array_length_operand);
4090 __ j(less, &loop_2); // End while (index < length).
4094 // Long separator case (separator is more than one character).
4095 __ bind(&long_separator);
4097 __ Move(index, Immediate(0));
4098 // Jump into the loop after the code that copies the separator, so the first
4099 // element is not preceded by a separator
4100 __ jmp(&loop_3_entry);
4101 // Loop condition: while (index < length).
4103 // Each iteration of the loop concatenates one string to the result.
4104 // Live values in registers:
4105 // index: which element of the elements array we are adding to the result.
4106 // result_pos: the position to which we are currently copying characters.
4108 // Copy the separator to the result.
4109 __ mov(string, separator_operand);
4110 __ mov(string_length,
4111 FieldOperand(string, String::kLengthOffset));
4112 __ shr(string_length, 1);
4114 FieldOperand(string, SeqOneByteString::kHeaderSize));
4115 __ CopyBytes(string, result_pos, string_length, scratch);
4117 __ bind(&loop_3_entry);
4118 // Get string = array[index].
4119 __ mov(string, FieldOperand(elements, index,
4121 FixedArray::kHeaderSize));
4122 __ mov(string_length,
4123 FieldOperand(string, String::kLengthOffset));
4124 __ shr(string_length, 1);
4126 FieldOperand(string, SeqOneByteString::kHeaderSize));
4127 __ CopyBytes(string, result_pos, string_length, scratch);
4128 __ add(index, Immediate(1));
4130 __ cmp(index, array_length_operand);
4131 __ j(less, &loop_3); // End while (index < length).
4136 __ mov(result_operand, isolate()->factory()->undefined_value());
4138 __ mov(eax, result_operand);
4139 // Drop temp values from the stack, and restore context register.
4140 __ add(esp, Immediate(3 * kPointerSize));
4142 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4143 context()->Plug(eax);
4147 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4148 if (expr->function() != NULL &&
4149 expr->function()->intrinsic_type == Runtime::INLINE) {
4150 Comment cmnt(masm_, "[ InlineRuntimeCall");
4151 EmitInlineRuntimeCall(expr);
4155 Comment cmnt(masm_, "[ CallRuntime");
4156 ZoneList<Expression*>* args = expr->arguments();
4158 if (expr->is_jsruntime()) {
4159 // Push the builtins object as receiver.
4160 __ mov(eax, GlobalObjectOperand());
4161 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
4163 // Load the function from the receiver.
4164 __ mov(edx, Operand(esp, 0));
4165 __ mov(ecx, Immediate(expr->name()));
4166 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4168 // Push the target function under the receiver.
4169 __ push(Operand(esp, 0));
4170 __ mov(Operand(esp, kPointerSize), eax);
4172 // Code common for calls using the IC.
4173 ZoneList<Expression*>* args = expr->arguments();
4174 int arg_count = args->length();
4175 for (int i = 0; i < arg_count; i++) {
4176 VisitForStackValue(args->at(i));
4179 // Record source position of the IC call.
4180 SetSourcePosition(expr->position());
4181 CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
4182 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4184 // Restore context register.
4185 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4186 context()->DropAndPlug(1, eax);
4189 // Push the arguments ("left-to-right").
4190 int arg_count = args->length();
4191 for (int i = 0; i < arg_count; i++) {
4192 VisitForStackValue(args->at(i));
4195 // Call the C runtime function.
4196 __ CallRuntime(expr->function(), arg_count);
4198 context()->Plug(eax);
4203 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4204 switch (expr->op()) {
4205 case Token::DELETE: {
4206 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4207 Property* property = expr->expression()->AsProperty();
4208 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4210 if (property != NULL) {
4211 VisitForStackValue(property->obj());
4212 VisitForStackValue(property->key());
4213 __ push(Immediate(Smi::FromInt(strict_mode())));
4214 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4215 context()->Plug(eax);
4216 } else if (proxy != NULL) {
4217 Variable* var = proxy->var();
4218 // Delete of an unqualified identifier is disallowed in strict mode
4219 // but "delete this" is allowed.
4220 ASSERT(strict_mode() == SLOPPY || var->is_this());
4221 if (var->IsUnallocated()) {
4222 __ push(GlobalObjectOperand());
4223 __ push(Immediate(var->name()));
4224 __ push(Immediate(Smi::FromInt(SLOPPY)));
4225 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4226 context()->Plug(eax);
4227 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4228 // Result of deleting non-global variables is false. 'this' is
4229 // not really a variable, though we implement it as one. The
4230 // subexpression does not have side effects.
4231 context()->Plug(var->is_this());
4233 // Non-global variable. Call the runtime to try to delete from the
4234 // context where the variable was introduced.
4235 __ push(context_register());
4236 __ push(Immediate(var->name()));
4237 __ CallRuntime(Runtime::kHiddenDeleteContextSlot, 2);
4238 context()->Plug(eax);
4241 // Result of deleting non-property, non-variable reference is true.
4242 // The subexpression may have side effects.
4243 VisitForEffect(expr->expression());
4244 context()->Plug(true);
4250 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4251 VisitForEffect(expr->expression());
4252 context()->Plug(isolate()->factory()->undefined_value());
4257 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4258 if (context()->IsEffect()) {
4259 // Unary NOT has no side effects so it's only necessary to visit the
4260 // subexpression. Match the optimizing compiler by not branching.
4261 VisitForEffect(expr->expression());
4262 } else if (context()->IsTest()) {
4263 const TestContext* test = TestContext::cast(context());
4264 // The labels are swapped for the recursive call.
4265 VisitForControl(expr->expression(),
4266 test->false_label(),
4268 test->fall_through());
4269 context()->Plug(test->true_label(), test->false_label());
4271 // We handle value contexts explicitly rather than simply visiting
4272 // for control and plugging the control flow into the context,
4273 // because we need to prepare a pair of extra administrative AST ids
4274 // for the optimizing compiler.
4275 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
4276 Label materialize_true, materialize_false, done;
4277 VisitForControl(expr->expression(),
4281 __ bind(&materialize_true);
4282 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4283 if (context()->IsAccumulatorValue()) {
4284 __ mov(eax, isolate()->factory()->true_value());
4286 __ Push(isolate()->factory()->true_value());
4288 __ jmp(&done, Label::kNear);
4289 __ bind(&materialize_false);
4290 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4291 if (context()->IsAccumulatorValue()) {
4292 __ mov(eax, isolate()->factory()->false_value());
4294 __ Push(isolate()->factory()->false_value());
4301 case Token::TYPEOF: {
4302 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4303 { StackValueContext context(this);
4304 VisitForTypeofValue(expr->expression());
4306 __ CallRuntime(Runtime::kTypeof, 1);
4307 context()->Plug(eax);
4317 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4318 ASSERT(expr->expression()->IsValidLeftHandSide());
4320 Comment cmnt(masm_, "[ CountOperation");
4321 SetSourcePosition(expr->position());
4323 // Expression can only be a property, a global or a (parameter or local)
4325 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4326 LhsKind assign_type = VARIABLE;
4327 Property* prop = expr->expression()->AsProperty();
4328 // In case of a property we use the uninitialized expression context
4329 // of the key to detect a named property.
4332 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4335 // Evaluate expression and get value.
4336 if (assign_type == VARIABLE) {
4337 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4338 AccumulatorValueContext context(this);
4339 EmitVariableLoad(expr->expression()->AsVariableProxy());
4341 // Reserve space for result of postfix operation.
4342 if (expr->is_postfix() && !context()->IsEffect()) {
4343 __ push(Immediate(Smi::FromInt(0)));
4345 if (assign_type == NAMED_PROPERTY) {
4346 // Put the object both on the stack and in edx.
4347 VisitForAccumulatorValue(prop->obj());
4350 EmitNamedPropertyLoad(prop);
4352 VisitForStackValue(prop->obj());
4353 VisitForStackValue(prop->key());
4354 __ mov(edx, Operand(esp, kPointerSize)); // Object.
4355 __ mov(ecx, Operand(esp, 0)); // Key.
4356 EmitKeyedPropertyLoad(prop);
4360 // We need a second deoptimization point after loading the value
4361 // in case evaluating the property load my have a side effect.
4362 if (assign_type == VARIABLE) {
4363 PrepareForBailout(expr->expression(), TOS_REG);
4365 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4368 // Inline smi case if we are in a loop.
4369 Label done, stub_call;
4370 JumpPatchSite patch_site(masm_);
4371 if (ShouldInlineSmiCase(expr->op())) {
4373 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4375 // Save result for postfix expressions.
4376 if (expr->is_postfix()) {
4377 if (!context()->IsEffect()) {
4378 // Save the result on the stack. If we have a named or keyed property
4379 // we store the result under the receiver that is currently on top
4381 switch (assign_type) {
4385 case NAMED_PROPERTY:
4386 __ mov(Operand(esp, kPointerSize), eax);
4388 case KEYED_PROPERTY:
4389 __ mov(Operand(esp, 2 * kPointerSize), eax);
4395 if (expr->op() == Token::INC) {
4396 __ add(eax, Immediate(Smi::FromInt(1)));
4398 __ sub(eax, Immediate(Smi::FromInt(1)));
4400 __ j(no_overflow, &done, Label::kNear);
4401 // Call stub. Undo operation first.
4402 if (expr->op() == Token::INC) {
4403 __ sub(eax, Immediate(Smi::FromInt(1)));
4405 __ add(eax, Immediate(Smi::FromInt(1)));
4407 __ jmp(&stub_call, Label::kNear);
4410 ToNumberStub convert_stub;
4411 __ CallStub(&convert_stub);
4413 // Save result for postfix expressions.
4414 if (expr->is_postfix()) {
4415 if (!context()->IsEffect()) {
4416 // Save the result on the stack. If we have a named or keyed property
4417 // we store the result under the receiver that is currently on top
4419 switch (assign_type) {
4423 case NAMED_PROPERTY:
4424 __ mov(Operand(esp, kPointerSize), eax);
4426 case KEYED_PROPERTY:
4427 __ mov(Operand(esp, 2 * kPointerSize), eax);
4433 // Record position before stub call.
4434 SetSourcePosition(expr->position());
4436 // Call stub for +1/-1.
4437 __ bind(&stub_call);
4439 __ mov(eax, Immediate(Smi::FromInt(1)));
4440 BinaryOpICStub stub(expr->binary_op(), NO_OVERWRITE);
4441 CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
4442 patch_site.EmitPatchInfo();
4445 // Store the value returned in eax.
4446 switch (assign_type) {
4448 if (expr->is_postfix()) {
4449 // Perform the assignment as if via '='.
4450 { EffectContext context(this);
4451 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4453 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4456 // For all contexts except EffectContext We have the result on
4457 // top of the stack.
4458 if (!context()->IsEffect()) {
4459 context()->PlugTOS();
4462 // Perform the assignment as if via '='.
4463 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4465 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4466 context()->Plug(eax);
4469 case NAMED_PROPERTY: {
4470 __ mov(ecx, prop->key()->AsLiteral()->value());
4472 CallStoreIC(expr->CountStoreFeedbackId());
4473 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4474 if (expr->is_postfix()) {
4475 if (!context()->IsEffect()) {
4476 context()->PlugTOS();
4479 context()->Plug(eax);
4483 case KEYED_PROPERTY: {
4486 Handle<Code> ic = strict_mode() == SLOPPY
4487 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4488 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4489 CallIC(ic, expr->CountStoreFeedbackId());
4490 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4491 if (expr->is_postfix()) {
4492 // Result is on the stack
4493 if (!context()->IsEffect()) {
4494 context()->PlugTOS();
4497 context()->Plug(eax);
4505 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4506 VariableProxy* proxy = expr->AsVariableProxy();
4507 ASSERT(!context()->IsEffect());
4508 ASSERT(!context()->IsTest());
4510 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4511 Comment cmnt(masm_, "[ Global variable");
4512 __ mov(edx, GlobalObjectOperand());
4513 __ mov(ecx, Immediate(proxy->name()));
4514 // Use a regular load, not a contextual load, to avoid a reference
4516 CallLoadIC(NOT_CONTEXTUAL);
4517 PrepareForBailout(expr, TOS_REG);
4518 context()->Plug(eax);
4519 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4520 Comment cmnt(masm_, "[ Lookup slot");
4523 // Generate code for loading from variables potentially shadowed
4524 // by eval-introduced variables.
4525 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4529 __ push(Immediate(proxy->name()));
4530 __ CallRuntime(Runtime::kHiddenLoadContextSlotNoReferenceError, 2);
4531 PrepareForBailout(expr, TOS_REG);
4534 context()->Plug(eax);
4536 // This expression cannot throw a reference error at the top level.
4537 VisitInDuplicateContext(expr);
4542 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4543 Expression* sub_expr,
4544 Handle<String> check) {
4545 Label materialize_true, materialize_false;
4546 Label* if_true = NULL;
4547 Label* if_false = NULL;
4548 Label* fall_through = NULL;
4549 context()->PrepareTest(&materialize_true, &materialize_false,
4550 &if_true, &if_false, &fall_through);
4552 { AccumulatorValueContext context(this);
4553 VisitForTypeofValue(sub_expr);
4555 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4557 if (check->Equals(isolate()->heap()->number_string())) {
4558 __ JumpIfSmi(eax, if_true);
4559 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4560 isolate()->factory()->heap_number_map());
4561 Split(equal, if_true, if_false, fall_through);
4562 } else if (check->Equals(isolate()->heap()->float32x4_string())) {
4563 __ JumpIfSmi(eax, if_false);
4564 __ CmpObjectType(eax, FLOAT32x4_TYPE, edx);
4565 Split(equal, if_true, if_false, fall_through);
4566 } else if (check->Equals(isolate()->heap()->int32x4_string())) {
4567 __ JumpIfSmi(eax, if_false);
4568 __ CmpObjectType(eax, INT32x4_TYPE, edx);
4569 Split(equal, if_true, if_false, fall_through);
4570 } else if (check->Equals(isolate()->heap()->string_string())) {
4571 __ JumpIfSmi(eax, if_false);
4572 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4573 __ j(above_equal, if_false);
4574 // Check for undetectable objects => false.
4575 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4576 1 << Map::kIsUndetectable);
4577 Split(zero, if_true, if_false, fall_through);
4578 } else if (check->Equals(isolate()->heap()->symbol_string())) {
4579 __ JumpIfSmi(eax, if_false);
4580 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4581 Split(equal, if_true, if_false, fall_through);
4582 } else if (check->Equals(isolate()->heap()->boolean_string())) {
4583 __ cmp(eax, isolate()->factory()->true_value());
4584 __ j(equal, if_true);
4585 __ cmp(eax, isolate()->factory()->false_value());
4586 Split(equal, if_true, if_false, fall_through);
4587 } else if (FLAG_harmony_typeof &&
4588 check->Equals(isolate()->heap()->null_string())) {
4589 __ cmp(eax, isolate()->factory()->null_value());
4590 Split(equal, if_true, if_false, fall_through);
4591 } else if (check->Equals(isolate()->heap()->undefined_string())) {
4592 __ cmp(eax, isolate()->factory()->undefined_value());
4593 __ j(equal, if_true);
4594 __ JumpIfSmi(eax, if_false);
4595 // Check for undetectable objects => true.
4596 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4597 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4598 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4599 Split(not_zero, if_true, if_false, fall_through);
4600 } else if (check->Equals(isolate()->heap()->function_string())) {
4601 __ JumpIfSmi(eax, if_false);
4602 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4603 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4604 __ j(equal, if_true);
4605 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4606 Split(equal, if_true, if_false, fall_through);
4607 } else if (check->Equals(isolate()->heap()->object_string())) {
4608 __ JumpIfSmi(eax, if_false);
4609 if (!FLAG_harmony_typeof) {
4610 __ cmp(eax, isolate()->factory()->null_value());
4611 __ j(equal, if_true);
4613 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4614 __ j(below, if_false);
4615 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4616 __ j(above, if_false);
4617 // Check for undetectable objects => false.
4618 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4619 1 << Map::kIsUndetectable);
4620 Split(zero, if_true, if_false, fall_through);
4622 if (if_false != fall_through) __ jmp(if_false);
4624 context()->Plug(if_true, if_false);
4628 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4629 Comment cmnt(masm_, "[ CompareOperation");
4630 SetSourcePosition(expr->position());
4632 // First we try a fast inlined version of the compare when one of
4633 // the operands is a literal.
4634 if (TryLiteralCompare(expr)) return;
4636 // Always perform the comparison for its control flow. Pack the result
4637 // into the expression's context after the comparison is performed.
4638 Label materialize_true, materialize_false;
4639 Label* if_true = NULL;
4640 Label* if_false = NULL;
4641 Label* fall_through = NULL;
4642 context()->PrepareTest(&materialize_true, &materialize_false,
4643 &if_true, &if_false, &fall_through);
4645 Token::Value op = expr->op();
4646 VisitForStackValue(expr->left());
4649 VisitForStackValue(expr->right());
4650 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4651 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4652 __ cmp(eax, isolate()->factory()->true_value());
4653 Split(equal, if_true, if_false, fall_through);
4656 case Token::INSTANCEOF: {
4657 VisitForStackValue(expr->right());
4658 InstanceofStub stub(InstanceofStub::kNoFlags);
4660 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4662 // The stub returns 0 for true.
4663 Split(zero, if_true, if_false, fall_through);
4668 VisitForAccumulatorValue(expr->right());
4669 Condition cc = CompareIC::ComputeCondition(op);
4672 bool inline_smi_code = ShouldInlineSmiCase(op);
4673 JumpPatchSite patch_site(masm_);
4674 if (inline_smi_code) {
4678 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4680 Split(cc, if_true, if_false, NULL);
4681 __ bind(&slow_case);
4684 // Record position and call the compare IC.
4685 SetSourcePosition(expr->position());
4686 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4687 CallIC(ic, expr->CompareOperationFeedbackId());
4688 patch_site.EmitPatchInfo();
4690 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4692 Split(cc, if_true, if_false, fall_through);
4696 // Convert the result of the comparison into one expected for this
4697 // expression's context.
4698 context()->Plug(if_true, if_false);
4702 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4703 Expression* sub_expr,
4705 Label materialize_true, materialize_false;
4706 Label* if_true = NULL;
4707 Label* if_false = NULL;
4708 Label* fall_through = NULL;
4709 context()->PrepareTest(&materialize_true, &materialize_false,
4710 &if_true, &if_false, &fall_through);
4712 VisitForAccumulatorValue(sub_expr);
4713 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4715 Handle<Object> nil_value = nil == kNullValue
4716 ? isolate()->factory()->null_value()
4717 : isolate()->factory()->undefined_value();
4718 if (expr->op() == Token::EQ_STRICT) {
4719 __ cmp(eax, nil_value);
4720 Split(equal, if_true, if_false, fall_through);
4722 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4723 CallIC(ic, expr->CompareOperationFeedbackId());
4725 Split(not_zero, if_true, if_false, fall_through);
4727 context()->Plug(if_true, if_false);
4731 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4732 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4733 context()->Plug(eax);
4737 Register FullCodeGenerator::result_register() {
4742 Register FullCodeGenerator::context_register() {
4747 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4748 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4749 __ mov(Operand(ebp, frame_offset), value);
4753 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4754 __ mov(dst, ContextOperand(esi, context_index));
4758 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4759 Scope* declaration_scope = scope()->DeclarationScope();
4760 if (declaration_scope->is_global_scope() ||
4761 declaration_scope->is_module_scope()) {
4762 // Contexts nested in the native context have a canonical empty function
4763 // as their closure, not the anonymous closure containing the global
4764 // code. Pass a smi sentinel and let the runtime look up the empty
4766 __ push(Immediate(Smi::FromInt(0)));
4767 } else if (declaration_scope->is_eval_scope()) {
4768 // Contexts nested inside eval code have the same closure as the context
4769 // calling eval, not the anonymous closure containing the eval code.
4770 // Fetch it from the context.
4771 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
4773 ASSERT(declaration_scope->is_function_scope());
4774 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4779 // ----------------------------------------------------------------------------
4780 // Non-local control flow support.
4782 void FullCodeGenerator::EnterFinallyBlock() {
4783 // Cook return address on top of stack (smi encoded Code* delta)
4784 ASSERT(!result_register().is(edx));
4786 __ sub(edx, Immediate(masm_->CodeObject()));
4787 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4788 STATIC_ASSERT(kSmiTag == 0);
4792 // Store result register while executing finally block.
4793 __ push(result_register());
4795 // Store pending message while executing finally block.
4796 ExternalReference pending_message_obj =
4797 ExternalReference::address_of_pending_message_obj(isolate());
4798 __ mov(edx, Operand::StaticVariable(pending_message_obj));
4801 ExternalReference has_pending_message =
4802 ExternalReference::address_of_has_pending_message(isolate());
4803 __ mov(edx, Operand::StaticVariable(has_pending_message));
4807 ExternalReference pending_message_script =
4808 ExternalReference::address_of_pending_message_script(isolate());
4809 __ mov(edx, Operand::StaticVariable(pending_message_script));
4814 void FullCodeGenerator::ExitFinallyBlock() {
4815 ASSERT(!result_register().is(edx));
4816 // Restore pending message from stack.
4818 ExternalReference pending_message_script =
4819 ExternalReference::address_of_pending_message_script(isolate());
4820 __ mov(Operand::StaticVariable(pending_message_script), edx);
4824 ExternalReference has_pending_message =
4825 ExternalReference::address_of_has_pending_message(isolate());
4826 __ mov(Operand::StaticVariable(has_pending_message), edx);
4829 ExternalReference pending_message_obj =
4830 ExternalReference::address_of_pending_message_obj(isolate());
4831 __ mov(Operand::StaticVariable(pending_message_obj), edx);
4833 // Restore result register from stack.
4834 __ pop(result_register());
4836 // Uncook return address.
4839 __ add(edx, Immediate(masm_->CodeObject()));
4846 #define __ ACCESS_MASM(masm())
4848 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4850 int* context_length) {
4851 // The macros used here must preserve the result register.
4853 // Because the handler block contains the context of the finally
4854 // code, we can restore it directly from there for the finally code
4855 // rather than iteratively unwinding contexts via their previous
4857 __ Drop(*stack_depth); // Down to the handler block.
4858 if (*context_length > 0) {
4859 // Restore the context to its dedicated register and the stack.
4860 __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
4861 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
4864 __ call(finally_entry_);
4867 *context_length = 0;
4874 static const byte kJnsInstruction = 0x79;
4875 static const byte kJnsOffset = 0x11;
4876 static const byte kNopByteOne = 0x66;
4877 static const byte kNopByteTwo = 0x90;
4879 static const byte kCallInstruction = 0xe8;
4883 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4885 BackEdgeState target_state,
4886 Code* replacement_code) {
4887 Address call_target_address = pc - kIntSize;
4888 Address jns_instr_address = call_target_address - 3;
4889 Address jns_offset_address = call_target_address - 2;
4891 switch (target_state) {
4893 // sub <profiling_counter>, <delta> ;; Not changed
4895 // call <interrupt stub>
4897 *jns_instr_address = kJnsInstruction;
4898 *jns_offset_address = kJnsOffset;
4900 case ON_STACK_REPLACEMENT:
4901 case OSR_AFTER_STACK_CHECK:
4902 // sub <profiling_counter>, <delta> ;; Not changed
4905 // call <on-stack replacment>
4907 *jns_instr_address = kNopByteOne;
4908 *jns_offset_address = kNopByteTwo;
4912 Assembler::set_target_address_at(call_target_address,
4914 replacement_code->entry());
4915 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4916 unoptimized_code, call_target_address, replacement_code);
4920 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4922 Code* unoptimized_code,
4924 Address call_target_address = pc - kIntSize;
4925 Address jns_instr_address = call_target_address - 3;
4926 ASSERT_EQ(kCallInstruction, *(call_target_address - 1));
4928 if (*jns_instr_address == kJnsInstruction) {
4929 ASSERT_EQ(kJnsOffset, *(call_target_address - 2));
4930 ASSERT_EQ(isolate->builtins()->InterruptCheck()->entry(),
4931 Assembler::target_address_at(call_target_address,
4936 ASSERT_EQ(kNopByteOne, *jns_instr_address);
4937 ASSERT_EQ(kNopByteTwo, *(call_target_address - 2));
4939 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4940 isolate->builtins()->OnStackReplacement()->entry()) {
4941 return ON_STACK_REPLACEMENT;
4944 ASSERT_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4945 Assembler::target_address_at(call_target_address,
4947 return OSR_AFTER_STACK_CHECK;
4951 } } // namespace v8::internal
4953 #endif // V8_TARGET_ARCH_IA32