1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_IA32
9 #include "src/code-stubs.h"
10 #include "src/codegen.h"
11 #include "src/compiler.h"
12 #include "src/debug.h"
13 #include "src/full-codegen.h"
14 #include "src/isolate-inl.h"
15 #include "src/parser.h"
16 #include "src/scopes.h"
21 #define __ ACCESS_MASM(masm_)
24 class JumpPatchSite BASE_EMBEDDED {
26 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
28 info_emitted_ = false;
33 DCHECK(patch_site_.is_bound() == info_emitted_);
36 void EmitJumpIfNotSmi(Register reg,
38 Label::Distance distance = Label::kFar) {
39 __ test(reg, Immediate(kSmiTagMask));
40 EmitJump(not_carry, target, distance); // Always taken before patched.
43 void EmitJumpIfSmi(Register reg,
45 Label::Distance distance = Label::kFar) {
46 __ test(reg, Immediate(kSmiTagMask));
47 EmitJump(carry, target, distance); // Never taken before patched.
50 void EmitPatchInfo() {
51 if (patch_site_.is_bound()) {
52 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
53 DCHECK(is_uint8(delta_to_patch_site));
54 __ test(eax, Immediate(delta_to_patch_site));
59 __ nop(); // Signals no inlined code.
64 // jc will be patched with jz, jnc will become jnz.
65 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
66 DCHECK(!patch_site_.is_bound() && !info_emitted_);
67 DCHECK(cc == carry || cc == not_carry);
68 __ bind(&patch_site_);
69 __ j(cc, target, distance);
72 MacroAssembler* masm_;
80 // Generate code for a JS function. On entry to the function the receiver
81 // and arguments have been pushed on the stack left to right, with the
82 // return address on top of them. The actual argument count matches the
83 // formal parameter count expected by the function.
85 // The live registers are:
86 // o edi: the JS function object being called (i.e. ourselves)
88 // o ebp: our caller's frame pointer
89 // o esp: stack pointer (pointing to return address)
91 // The function builds a JS frame. Please see JavaScriptFrameConstants in
92 // frames-ia32.h for its layout.
93 void FullCodeGenerator::Generate() {
94 CompilationInfo* info = info_;
96 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
98 profiling_counter_ = isolate()->factory()->NewCell(
99 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget), isolate()));
100 SetFunctionPosition(function());
101 Comment cmnt(masm_, "[ function compiled by full code generator");
103 ProfileEntryHookStub::MaybeCallEntryHook(masm_);
106 if (strlen(FLAG_stop_at) > 0 &&
107 info->function()->name()->IsUtf8EqualTo(CStrVector(FLAG_stop_at))) {
112 // Sloppy mode functions and builtins need to replace the receiver with the
113 // global proxy when called as functions (without an explicit receiver
115 if (info->strict_mode() == SLOPPY && !info->is_native()) {
117 // +1 for return address.
118 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
119 __ mov(ecx, Operand(esp, receiver_offset));
121 __ cmp(ecx, isolate()->factory()->undefined_value());
122 __ j(not_equal, &ok, Label::kNear);
124 __ mov(ecx, GlobalObjectOperand());
125 __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalProxyOffset));
127 __ mov(Operand(esp, receiver_offset), ecx);
132 // Open a frame scope to indicate that there is a frame on the stack. The
133 // MANUAL indicates that the scope shouldn't actually generate code to set up
134 // the frame (that is done below).
135 FrameScope frame_scope(masm_, StackFrame::MANUAL);
137 info->set_prologue_offset(masm_->pc_offset());
138 __ Prologue(info->IsCodePreAgingActive());
139 info->AddNoFrameRange(0, masm_->pc_offset());
141 { Comment cmnt(masm_, "[ Allocate locals");
142 int locals_count = info->scope()->num_stack_slots();
143 // Generators allocate locals, if any, in context slots.
144 DCHECK(!info->function()->is_generator() || locals_count == 0);
145 if (locals_count == 1) {
146 __ push(Immediate(isolate()->factory()->undefined_value()));
147 } else if (locals_count > 1) {
148 if (locals_count >= 128) {
151 __ sub(ecx, Immediate(locals_count * kPointerSize));
152 ExternalReference stack_limit =
153 ExternalReference::address_of_real_stack_limit(isolate());
154 __ cmp(ecx, Operand::StaticVariable(stack_limit));
155 __ j(above_equal, &ok, Label::kNear);
156 __ InvokeBuiltin(Builtins::STACK_OVERFLOW, CALL_FUNCTION);
159 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
160 const int kMaxPushes = 32;
161 if (locals_count >= kMaxPushes) {
162 int loop_iterations = locals_count / kMaxPushes;
163 __ mov(ecx, loop_iterations);
165 __ bind(&loop_header);
167 for (int i = 0; i < kMaxPushes; i++) {
171 __ j(not_zero, &loop_header, Label::kNear);
173 int remaining = locals_count % kMaxPushes;
174 // Emit the remaining pushes.
175 for (int i = 0; i < remaining; i++) {
181 bool function_in_register = true;
183 // Possibly allocate a local context.
184 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
185 if (heap_slots > 0) {
186 Comment cmnt(masm_, "[ Allocate context");
187 bool need_write_barrier = true;
188 // Argument to NewContext is the function, which is still in edi.
189 if (FLAG_harmony_scoping && info->scope()->is_global_scope()) {
191 __ Push(info->scope()->GetScopeInfo());
192 __ CallRuntime(Runtime::kNewGlobalContext, 2);
193 } else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
194 FastNewContextStub stub(isolate(), heap_slots);
196 // Result of FastNewContextStub is always in new space.
197 need_write_barrier = false;
200 __ CallRuntime(Runtime::kNewFunctionContext, 1);
202 function_in_register = false;
203 // Context is returned in eax. It replaces the context passed to us.
204 // It's saved in the stack and kept live in esi.
206 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), eax);
208 // Copy parameters into context if necessary.
209 int num_parameters = info->scope()->num_parameters();
210 for (int i = 0; i < num_parameters; i++) {
211 Variable* var = scope()->parameter(i);
212 if (var->IsContextSlot()) {
213 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
214 (num_parameters - 1 - i) * kPointerSize;
215 // Load parameter from stack.
216 __ mov(eax, Operand(ebp, parameter_offset));
217 // Store it in the context.
218 int context_offset = Context::SlotOffset(var->index());
219 __ mov(Operand(esi, context_offset), eax);
220 // Update the write barrier. This clobbers eax and ebx.
221 if (need_write_barrier) {
222 __ RecordWriteContextSlot(esi,
227 } else if (FLAG_debug_code) {
229 __ JumpIfInNewSpace(esi, eax, &done, Label::kNear);
230 __ Abort(kExpectedNewSpaceObject);
237 Variable* arguments = scope()->arguments();
238 if (arguments != NULL) {
239 // Function uses arguments object.
240 Comment cmnt(masm_, "[ Allocate arguments object");
241 if (function_in_register) {
244 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
246 // Receiver is just before the parameters on the caller's stack.
247 int num_parameters = info->scope()->num_parameters();
248 int offset = num_parameters * kPointerSize;
250 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
252 __ push(Immediate(Smi::FromInt(num_parameters)));
253 // Arguments to ArgumentsAccessStub:
254 // function, receiver address, parameter count.
255 // The stub will rewrite receiver and parameter count if the previous
256 // stack frame was an arguments adapter frame.
257 ArgumentsAccessStub::Type type;
258 if (strict_mode() == STRICT) {
259 type = ArgumentsAccessStub::NEW_STRICT;
260 } else if (function()->has_duplicate_parameters()) {
261 type = ArgumentsAccessStub::NEW_SLOPPY_SLOW;
263 type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
265 ArgumentsAccessStub stub(isolate(), type);
268 SetVar(arguments, eax, ebx, edx);
272 __ CallRuntime(Runtime::kTraceEnter, 0);
275 // Visit the declarations and body unless there is an illegal
277 if (scope()->HasIllegalRedeclaration()) {
278 Comment cmnt(masm_, "[ Declarations");
279 scope()->VisitIllegalRedeclaration(this);
282 PrepareForBailoutForId(BailoutId::FunctionEntry(), NO_REGISTERS);
283 { Comment cmnt(masm_, "[ Declarations");
284 // For named function expressions, declare the function name as a
286 if (scope()->is_function_scope() && scope()->function() != NULL) {
287 VariableDeclaration* function = scope()->function();
288 DCHECK(function->proxy()->var()->mode() == CONST ||
289 function->proxy()->var()->mode() == CONST_LEGACY);
290 DCHECK(function->proxy()->var()->location() != Variable::UNALLOCATED);
291 VisitVariableDeclaration(function);
293 VisitDeclarations(scope()->declarations());
296 { Comment cmnt(masm_, "[ Stack check");
297 PrepareForBailoutForId(BailoutId::Declarations(), NO_REGISTERS);
299 ExternalReference stack_limit
300 = ExternalReference::address_of_stack_limit(isolate());
301 __ cmp(esp, Operand::StaticVariable(stack_limit));
302 __ j(above_equal, &ok, Label::kNear);
303 __ call(isolate()->builtins()->StackCheck(), RelocInfo::CODE_TARGET);
307 { Comment cmnt(masm_, "[ Body");
308 DCHECK(loop_depth() == 0);
309 VisitStatements(function()->body());
310 DCHECK(loop_depth() == 0);
314 // Always emit a 'return undefined' in case control fell off the end of
316 { Comment cmnt(masm_, "[ return <undefined>;");
317 __ mov(eax, isolate()->factory()->undefined_value());
318 EmitReturnSequence();
323 void FullCodeGenerator::ClearAccumulator() {
324 __ Move(eax, Immediate(Smi::FromInt(0)));
328 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
329 __ mov(ebx, Immediate(profiling_counter_));
330 __ sub(FieldOperand(ebx, Cell::kValueOffset),
331 Immediate(Smi::FromInt(delta)));
335 void FullCodeGenerator::EmitProfilingCounterReset() {
336 int reset_value = FLAG_interrupt_budget;
337 __ mov(ebx, Immediate(profiling_counter_));
338 __ mov(FieldOperand(ebx, Cell::kValueOffset),
339 Immediate(Smi::FromInt(reset_value)));
343 void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
344 Label* back_edge_target) {
345 Comment cmnt(masm_, "[ Back edge bookkeeping");
348 DCHECK(back_edge_target->is_bound());
349 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
350 int weight = Min(kMaxBackEdgeWeight,
351 Max(1, distance / kCodeSizeMultiplier));
352 EmitProfilingCounterDecrement(weight);
353 __ j(positive, &ok, Label::kNear);
354 __ call(isolate()->builtins()->InterruptCheck(), RelocInfo::CODE_TARGET);
356 // Record a mapping of this PC offset to the OSR id. This is used to find
357 // the AST id from the unoptimized code in order to use it as a key into
358 // the deoptimization input data found in the optimized code.
359 RecordBackEdge(stmt->OsrEntryId());
361 EmitProfilingCounterReset();
364 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
365 // Record a mapping of the OSR id to this PC. This is used if the OSR
366 // entry becomes the target of a bailout. We don't expect it to be, but
367 // we want it to work if it is.
368 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
372 void FullCodeGenerator::EmitReturnSequence() {
373 Comment cmnt(masm_, "[ Return sequence");
374 if (return_label_.is_bound()) {
375 __ jmp(&return_label_);
377 // Common return label
378 __ bind(&return_label_);
381 __ CallRuntime(Runtime::kTraceExit, 1);
383 // Pretend that the exit is a backwards jump to the entry.
385 if (info_->ShouldSelfOptimize()) {
386 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
388 int distance = masm_->pc_offset();
389 weight = Min(kMaxBackEdgeWeight,
390 Max(1, distance / kCodeSizeMultiplier));
392 EmitProfilingCounterDecrement(weight);
394 __ j(positive, &ok, Label::kNear);
396 __ call(isolate()->builtins()->InterruptCheck(),
397 RelocInfo::CODE_TARGET);
399 EmitProfilingCounterReset();
402 // Add a label for checking the size of the code used for returning.
403 Label check_exit_codesize;
404 masm_->bind(&check_exit_codesize);
406 SetSourcePosition(function()->end_position() - 1);
408 // Do not use the leave instruction here because it is too short to
409 // patch with the code required by the debugger.
411 int no_frame_start = masm_->pc_offset();
414 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
415 __ Ret(arguments_bytes, ecx);
416 // Check that the size of the code used for returning is large enough
417 // for the debugger's requirements.
418 DCHECK(Assembler::kJSReturnSequenceLength <=
419 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
420 info_->AddNoFrameRange(no_frame_start, masm_->pc_offset());
425 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
426 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
430 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
431 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
432 codegen()->GetVar(result_register(), var);
436 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
437 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
438 MemOperand operand = codegen()->VarOperand(var, result_register());
439 // Memory operands can be pushed directly.
444 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
445 // For simplicity we always test the accumulator register.
446 codegen()->GetVar(result_register(), var);
447 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
448 codegen()->DoTest(this);
452 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
453 UNREACHABLE(); // Not used on IA32.
457 void FullCodeGenerator::AccumulatorValueContext::Plug(
458 Heap::RootListIndex index) const {
459 UNREACHABLE(); // Not used on IA32.
463 void FullCodeGenerator::StackValueContext::Plug(
464 Heap::RootListIndex index) const {
465 UNREACHABLE(); // Not used on IA32.
469 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
470 UNREACHABLE(); // Not used on IA32.
474 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
478 void FullCodeGenerator::AccumulatorValueContext::Plug(
479 Handle<Object> lit) const {
481 __ SafeMove(result_register(), Immediate(lit));
483 __ Move(result_register(), Immediate(lit));
488 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
490 __ SafePush(Immediate(lit));
492 __ push(Immediate(lit));
497 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
498 codegen()->PrepareForBailoutBeforeSplit(condition(),
502 DCHECK(!lit->IsUndetectableObject()); // There are no undetectable literals.
503 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
504 if (false_label_ != fall_through_) __ jmp(false_label_);
505 } else if (lit->IsTrue() || lit->IsJSObject()) {
506 if (true_label_ != fall_through_) __ jmp(true_label_);
507 } else if (lit->IsString()) {
508 if (String::cast(*lit)->length() == 0) {
509 if (false_label_ != fall_through_) __ jmp(false_label_);
511 if (true_label_ != fall_through_) __ jmp(true_label_);
513 } else if (lit->IsSmi()) {
514 if (Smi::cast(*lit)->value() == 0) {
515 if (false_label_ != fall_through_) __ jmp(false_label_);
517 if (true_label_ != fall_through_) __ jmp(true_label_);
520 // For simplicity we always test the accumulator register.
521 __ mov(result_register(), lit);
522 codegen()->DoTest(this);
527 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
528 Register reg) const {
534 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
536 Register reg) const {
539 __ Move(result_register(), reg);
543 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
544 Register reg) const {
546 if (count > 1) __ Drop(count - 1);
547 __ mov(Operand(esp, 0), reg);
551 void FullCodeGenerator::TestContext::DropAndPlug(int count,
552 Register reg) const {
554 // For simplicity we always test the accumulator register.
556 __ Move(result_register(), reg);
557 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
558 codegen()->DoTest(this);
562 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
563 Label* materialize_false) const {
564 DCHECK(materialize_true == materialize_false);
565 __ bind(materialize_true);
569 void FullCodeGenerator::AccumulatorValueContext::Plug(
570 Label* materialize_true,
571 Label* materialize_false) const {
573 __ bind(materialize_true);
574 __ mov(result_register(), isolate()->factory()->true_value());
575 __ jmp(&done, Label::kNear);
576 __ bind(materialize_false);
577 __ mov(result_register(), isolate()->factory()->false_value());
582 void FullCodeGenerator::StackValueContext::Plug(
583 Label* materialize_true,
584 Label* materialize_false) const {
586 __ bind(materialize_true);
587 __ push(Immediate(isolate()->factory()->true_value()));
588 __ jmp(&done, Label::kNear);
589 __ bind(materialize_false);
590 __ push(Immediate(isolate()->factory()->false_value()));
595 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
596 Label* materialize_false) const {
597 DCHECK(materialize_true == true_label_);
598 DCHECK(materialize_false == false_label_);
602 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
606 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
607 Handle<Object> value = flag
608 ? isolate()->factory()->true_value()
609 : isolate()->factory()->false_value();
610 __ mov(result_register(), value);
614 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
615 Handle<Object> value = flag
616 ? isolate()->factory()->true_value()
617 : isolate()->factory()->false_value();
618 __ push(Immediate(value));
622 void FullCodeGenerator::TestContext::Plug(bool flag) const {
623 codegen()->PrepareForBailoutBeforeSplit(condition(),
628 if (true_label_ != fall_through_) __ jmp(true_label_);
630 if (false_label_ != fall_through_) __ jmp(false_label_);
635 void FullCodeGenerator::DoTest(Expression* condition,
638 Label* fall_through) {
639 Handle<Code> ic = ToBooleanStub::GetUninitialized(isolate());
640 CallIC(ic, condition->test_id());
641 __ test(result_register(), result_register());
642 // The stub returns nonzero for true.
643 Split(not_zero, if_true, if_false, fall_through);
647 void FullCodeGenerator::Split(Condition cc,
650 Label* fall_through) {
651 if (if_false == fall_through) {
653 } else if (if_true == fall_through) {
654 __ j(NegateCondition(cc), if_false);
662 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
663 DCHECK(var->IsStackAllocated());
664 // Offset is negative because higher indexes are at lower addresses.
665 int offset = -var->index() * kPointerSize;
666 // Adjust by a (parameter or local) base offset.
667 if (var->IsParameter()) {
668 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
670 offset += JavaScriptFrameConstants::kLocal0Offset;
672 return Operand(ebp, offset);
676 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
677 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
678 if (var->IsContextSlot()) {
679 int context_chain_length = scope()->ContextChainLength(var->scope());
680 __ LoadContext(scratch, context_chain_length);
681 return ContextOperand(scratch, var->index());
683 return StackOperand(var);
688 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
689 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
690 MemOperand location = VarOperand(var, dest);
691 __ mov(dest, location);
695 void FullCodeGenerator::SetVar(Variable* var,
699 DCHECK(var->IsContextSlot() || var->IsStackAllocated());
700 DCHECK(!scratch0.is(src));
701 DCHECK(!scratch0.is(scratch1));
702 DCHECK(!scratch1.is(src));
703 MemOperand location = VarOperand(var, scratch0);
704 __ mov(location, src);
706 // Emit the write barrier code if the location is in the heap.
707 if (var->IsContextSlot()) {
708 int offset = Context::SlotOffset(var->index());
709 DCHECK(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
710 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
715 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
716 bool should_normalize,
719 // Only prepare for bailouts before splits if we're in a test
720 // context. Otherwise, we let the Visit function deal with the
721 // preparation to avoid preparing with the same AST id twice.
722 if (!context()->IsTest() || !info_->IsOptimizable()) return;
725 if (should_normalize) __ jmp(&skip, Label::kNear);
726 PrepareForBailout(expr, TOS_REG);
727 if (should_normalize) {
728 __ cmp(eax, isolate()->factory()->true_value());
729 Split(equal, if_true, if_false, NULL);
735 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
736 // The variable in the declaration always resides in the current context.
737 DCHECK_EQ(0, scope()->ContextChainLength(variable->scope()));
738 if (generate_debug_code_) {
739 // Check that we're not inside a with or catch context.
740 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
741 __ cmp(ebx, isolate()->factory()->with_context_map());
742 __ Check(not_equal, kDeclarationInWithContext);
743 __ cmp(ebx, isolate()->factory()->catch_context_map());
744 __ Check(not_equal, kDeclarationInCatchContext);
749 void FullCodeGenerator::VisitVariableDeclaration(
750 VariableDeclaration* declaration) {
751 // If it was not possible to allocate the variable at compile time, we
752 // need to "declare" it at runtime to make sure it actually exists in the
754 VariableProxy* proxy = declaration->proxy();
755 VariableMode mode = declaration->mode();
756 Variable* variable = proxy->var();
757 bool hole_init = mode == LET || mode == CONST || mode == CONST_LEGACY;
758 switch (variable->location()) {
759 case Variable::UNALLOCATED:
760 globals_->Add(variable->name(), zone());
761 globals_->Add(variable->binding_needs_init()
762 ? isolate()->factory()->the_hole_value()
763 : isolate()->factory()->undefined_value(), zone());
766 case Variable::PARAMETER:
767 case Variable::LOCAL:
769 Comment cmnt(masm_, "[ VariableDeclaration");
770 __ mov(StackOperand(variable),
771 Immediate(isolate()->factory()->the_hole_value()));
775 case Variable::CONTEXT:
777 Comment cmnt(masm_, "[ VariableDeclaration");
778 EmitDebugCheckDeclarationContext(variable);
779 __ mov(ContextOperand(esi, variable->index()),
780 Immediate(isolate()->factory()->the_hole_value()));
781 // No write barrier since the hole value is in old space.
782 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
786 case Variable::LOOKUP: {
787 Comment cmnt(masm_, "[ VariableDeclaration");
789 __ push(Immediate(variable->name()));
790 // VariableDeclaration nodes are always introduced in one of four modes.
791 DCHECK(IsDeclaredVariableMode(mode));
792 PropertyAttributes attr =
793 IsImmutableVariableMode(mode) ? READ_ONLY : NONE;
794 __ push(Immediate(Smi::FromInt(attr)));
795 // Push initial value, if any.
796 // Note: For variables we must not push an initial value (such as
797 // 'undefined') because we may have a (legal) redeclaration and we
798 // must not destroy the current value.
800 __ push(Immediate(isolate()->factory()->the_hole_value()));
802 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
804 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
811 void FullCodeGenerator::VisitFunctionDeclaration(
812 FunctionDeclaration* declaration) {
813 VariableProxy* proxy = declaration->proxy();
814 Variable* variable = proxy->var();
815 switch (variable->location()) {
816 case Variable::UNALLOCATED: {
817 globals_->Add(variable->name(), zone());
818 Handle<SharedFunctionInfo> function =
819 Compiler::BuildFunctionInfo(declaration->fun(), script(), info_);
820 // Check for stack-overflow exception.
821 if (function.is_null()) return SetStackOverflow();
822 globals_->Add(function, zone());
826 case Variable::PARAMETER:
827 case Variable::LOCAL: {
828 Comment cmnt(masm_, "[ FunctionDeclaration");
829 VisitForAccumulatorValue(declaration->fun());
830 __ mov(StackOperand(variable), result_register());
834 case Variable::CONTEXT: {
835 Comment cmnt(masm_, "[ FunctionDeclaration");
836 EmitDebugCheckDeclarationContext(variable);
837 VisitForAccumulatorValue(declaration->fun());
838 __ mov(ContextOperand(esi, variable->index()), result_register());
839 // We know that we have written a function, which is not a smi.
840 __ RecordWriteContextSlot(esi,
841 Context::SlotOffset(variable->index()),
847 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
851 case Variable::LOOKUP: {
852 Comment cmnt(masm_, "[ FunctionDeclaration");
854 __ push(Immediate(variable->name()));
855 __ push(Immediate(Smi::FromInt(NONE)));
856 VisitForStackValue(declaration->fun());
857 __ CallRuntime(Runtime::kDeclareLookupSlot, 4);
864 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
865 Variable* variable = declaration->proxy()->var();
866 DCHECK(variable->location() == Variable::CONTEXT);
867 DCHECK(variable->interface()->IsFrozen());
869 Comment cmnt(masm_, "[ ModuleDeclaration");
870 EmitDebugCheckDeclarationContext(variable);
872 // Load instance object.
873 __ LoadContext(eax, scope_->ContextChainLength(scope_->GlobalScope()));
874 __ mov(eax, ContextOperand(eax, variable->interface()->Index()));
875 __ mov(eax, ContextOperand(eax, Context::EXTENSION_INDEX));
878 __ mov(ContextOperand(esi, variable->index()), eax);
879 // We know that we have written a module, which is not a smi.
880 __ RecordWriteContextSlot(esi,
881 Context::SlotOffset(variable->index()),
887 PrepareForBailoutForId(declaration->proxy()->id(), NO_REGISTERS);
889 // Traverse into body.
890 Visit(declaration->module());
894 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
895 VariableProxy* proxy = declaration->proxy();
896 Variable* variable = proxy->var();
897 switch (variable->location()) {
898 case Variable::UNALLOCATED:
902 case Variable::CONTEXT: {
903 Comment cmnt(masm_, "[ ImportDeclaration");
904 EmitDebugCheckDeclarationContext(variable);
909 case Variable::PARAMETER:
910 case Variable::LOCAL:
911 case Variable::LOOKUP:
917 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
922 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
923 // Call the runtime to declare the globals.
924 __ push(esi); // The context is the first argument.
926 __ Push(Smi::FromInt(DeclareGlobalsFlags()));
927 __ CallRuntime(Runtime::kDeclareGlobals, 3);
928 // Return value is ignored.
932 void FullCodeGenerator::DeclareModules(Handle<FixedArray> descriptions) {
933 // Call the runtime to declare the modules.
934 __ Push(descriptions);
935 __ CallRuntime(Runtime::kDeclareModules, 1);
936 // Return value is ignored.
940 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
941 Comment cmnt(masm_, "[ SwitchStatement");
942 Breakable nested_statement(this, stmt);
943 SetStatementPosition(stmt);
945 // Keep the switch value on the stack until a case matches.
946 VisitForStackValue(stmt->tag());
947 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
949 ZoneList<CaseClause*>* clauses = stmt->cases();
950 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
952 Label next_test; // Recycled for each test.
953 // Compile all the tests with branches to their bodies.
954 for (int i = 0; i < clauses->length(); i++) {
955 CaseClause* clause = clauses->at(i);
956 clause->body_target()->Unuse();
958 // The default is not a test, but remember it as final fall through.
959 if (clause->is_default()) {
960 default_clause = clause;
964 Comment cmnt(masm_, "[ Case comparison");
968 // Compile the label expression.
969 VisitForAccumulatorValue(clause->label());
971 // Perform the comparison as if via '==='.
972 __ mov(edx, Operand(esp, 0)); // Switch value.
973 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
974 JumpPatchSite patch_site(masm_);
975 if (inline_smi_code) {
979 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
982 __ j(not_equal, &next_test);
983 __ Drop(1); // Switch value is no longer needed.
984 __ jmp(clause->body_target());
988 // Record position before stub call for type feedback.
989 SetSourcePosition(clause->position());
990 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), Token::EQ_STRICT);
991 CallIC(ic, clause->CompareId());
992 patch_site.EmitPatchInfo();
995 __ jmp(&skip, Label::kNear);
996 PrepareForBailout(clause, TOS_REG);
997 __ cmp(eax, isolate()->factory()->true_value());
998 __ j(not_equal, &next_test);
1000 __ jmp(clause->body_target());
1004 __ j(not_equal, &next_test);
1005 __ Drop(1); // Switch value is no longer needed.
1006 __ jmp(clause->body_target());
1009 // Discard the test value and jump to the default if present, otherwise to
1010 // the end of the statement.
1011 __ bind(&next_test);
1012 __ Drop(1); // Switch value is no longer needed.
1013 if (default_clause == NULL) {
1014 __ jmp(nested_statement.break_label());
1016 __ jmp(default_clause->body_target());
1019 // Compile all the case bodies.
1020 for (int i = 0; i < clauses->length(); i++) {
1021 Comment cmnt(masm_, "[ Case body");
1022 CaseClause* clause = clauses->at(i);
1023 __ bind(clause->body_target());
1024 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1025 VisitStatements(clause->statements());
1028 __ bind(nested_statement.break_label());
1029 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1033 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1034 Comment cmnt(masm_, "[ ForInStatement");
1035 int slot = stmt->ForInFeedbackSlot();
1037 SetStatementPosition(stmt);
1040 ForIn loop_statement(this, stmt);
1041 increment_loop_depth();
1043 // Get the object to enumerate over. If the object is null or undefined, skip
1044 // over the loop. See ECMA-262 version 5, section 12.6.4.
1045 VisitForAccumulatorValue(stmt->enumerable());
1046 __ cmp(eax, isolate()->factory()->undefined_value());
1048 __ cmp(eax, isolate()->factory()->null_value());
1051 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1053 // Convert the object to a JS object.
1054 Label convert, done_convert;
1055 __ JumpIfSmi(eax, &convert, Label::kNear);
1056 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1057 __ j(above_equal, &done_convert, Label::kNear);
1060 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1061 __ bind(&done_convert);
1064 // Check for proxies.
1065 Label call_runtime, use_cache, fixed_array;
1066 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1067 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1068 __ j(below_equal, &call_runtime);
1070 // Check cache validity in generated code. This is a fast case for
1071 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1072 // guarantee cache validity, call the runtime system to check cache
1073 // validity or get the property names in a fixed array.
1074 __ CheckEnumCache(&call_runtime);
1076 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1077 __ jmp(&use_cache, Label::kNear);
1079 // Get the set of properties to enumerate.
1080 __ bind(&call_runtime);
1082 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1083 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1084 isolate()->factory()->meta_map());
1085 __ j(not_equal, &fixed_array);
1088 // We got a map in register eax. Get the enumeration cache from it.
1089 Label no_descriptors;
1090 __ bind(&use_cache);
1092 __ EnumLength(edx, eax);
1093 __ cmp(edx, Immediate(Smi::FromInt(0)));
1094 __ j(equal, &no_descriptors);
1096 __ LoadInstanceDescriptors(eax, ecx);
1097 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheOffset));
1098 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1100 // Set up the four remaining stack slots.
1101 __ push(eax); // Map.
1102 __ push(ecx); // Enumeration cache.
1103 __ push(edx); // Number of valid entries for the map in the enum cache.
1104 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1107 __ bind(&no_descriptors);
1108 __ add(esp, Immediate(kPointerSize));
1111 // We got a fixed array in register eax. Iterate through that.
1113 __ bind(&fixed_array);
1115 // No need for a write barrier, we are storing a Smi in the feedback vector.
1116 __ LoadHeapObject(ebx, FeedbackVector());
1117 __ mov(FieldOperand(ebx, FixedArray::OffsetOfElementAt(slot)),
1118 Immediate(TypeFeedbackInfo::MegamorphicSentinel(isolate())));
1120 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1121 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1122 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1123 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1124 __ j(above, &non_proxy);
1125 __ Move(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1126 __ bind(&non_proxy);
1127 __ push(ebx); // Smi
1128 __ push(eax); // Array
1129 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1130 __ push(eax); // Fixed array length (as smi).
1131 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1133 // Generate code for doing the condition check.
1134 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1136 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1137 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1138 __ j(above_equal, loop_statement.break_label());
1140 // Get the current entry of the array into register ebx.
1141 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1142 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1144 // Get the expected map from the stack or a smi in the
1145 // permanent slow case into register edx.
1146 __ mov(edx, Operand(esp, 3 * kPointerSize));
1148 // Check if the expected map still matches that of the enumerable.
1149 // If not, we may have to filter the key.
1151 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1152 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1153 __ j(equal, &update_each, Label::kNear);
1155 // For proxies, no filtering is done.
1156 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1157 DCHECK(Smi::FromInt(0) == 0);
1159 __ j(zero, &update_each);
1161 // Convert the entry to a string or null if it isn't a property
1162 // anymore. If the property has been removed while iterating, we
1164 __ push(ecx); // Enumerable.
1165 __ push(ebx); // Current entry.
1166 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1168 __ j(equal, loop_statement.continue_label());
1171 // Update the 'each' property or variable from the possibly filtered
1172 // entry in register ebx.
1173 __ bind(&update_each);
1174 __ mov(result_register(), ebx);
1175 // Perform the assignment as if via '='.
1176 { EffectContext context(this);
1177 EmitAssignment(stmt->each());
1180 // Generate code for the body of the loop.
1181 Visit(stmt->body());
1183 // Generate code for going to the next element by incrementing the
1184 // index (smi) stored on top of the stack.
1185 __ bind(loop_statement.continue_label());
1186 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1188 EmitBackEdgeBookkeeping(stmt, &loop);
1191 // Remove the pointers stored on the stack.
1192 __ bind(loop_statement.break_label());
1193 __ add(esp, Immediate(5 * kPointerSize));
1195 // Exit and decrement the loop depth.
1196 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1198 decrement_loop_depth();
1202 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1203 Comment cmnt(masm_, "[ ForOfStatement");
1204 SetStatementPosition(stmt);
1206 Iteration loop_statement(this, stmt);
1207 increment_loop_depth();
1209 // var iterator = iterable[Symbol.iterator]();
1210 VisitForEffect(stmt->assign_iterator());
1213 __ bind(loop_statement.continue_label());
1215 // result = iterator.next()
1216 VisitForEffect(stmt->next_result());
1218 // if (result.done) break;
1219 Label result_not_done;
1220 VisitForControl(stmt->result_done(),
1221 loop_statement.break_label(),
1224 __ bind(&result_not_done);
1226 // each = result.value
1227 VisitForEffect(stmt->assign_each());
1229 // Generate code for the body of the loop.
1230 Visit(stmt->body());
1232 // Check stack before looping.
1233 PrepareForBailoutForId(stmt->BackEdgeId(), NO_REGISTERS);
1234 EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1235 __ jmp(loop_statement.continue_label());
1237 // Exit and decrement the loop depth.
1238 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1239 __ bind(loop_statement.break_label());
1240 decrement_loop_depth();
1244 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1246 // Use the fast case closure allocation code that allocates in new
1247 // space for nested functions that don't need literals cloning. If
1248 // we're running with the --always-opt or the --prepare-always-opt
1249 // flag, we need to use the runtime function so that the new function
1250 // we are creating here gets a chance to have its code optimized and
1251 // doesn't just get a copy of the existing unoptimized code.
1252 if (!FLAG_always_opt &&
1253 !FLAG_prepare_always_opt &&
1255 scope()->is_function_scope() &&
1256 info->num_literals() == 0) {
1257 FastNewClosureStub stub(isolate(),
1258 info->strict_mode(),
1259 info->is_generator());
1260 __ mov(ebx, Immediate(info));
1264 __ push(Immediate(info));
1265 __ push(Immediate(pretenure
1266 ? isolate()->factory()->true_value()
1267 : isolate()->factory()->false_value()));
1268 __ CallRuntime(Runtime::kNewClosure, 3);
1270 context()->Plug(eax);
1274 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1275 Comment cmnt(masm_, "[ VariableProxy");
1276 EmitVariableLoad(expr);
1280 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
1281 TypeofState typeof_state,
1283 Register context = esi;
1284 Register temp = edx;
1288 if (s->num_heap_slots() > 0) {
1289 if (s->calls_sloppy_eval()) {
1290 // Check that extension is NULL.
1291 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1293 __ j(not_equal, slow);
1295 // Load next context in chain.
1296 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1297 // Walk the rest of the chain without clobbering esi.
1300 // If no outer scope calls eval, we do not need to check more
1301 // context extensions. If we have reached an eval scope, we check
1302 // all extensions from this point.
1303 if (!s->outer_scope_calls_sloppy_eval() || s->is_eval_scope()) break;
1304 s = s->outer_scope();
1307 if (s != NULL && s->is_eval_scope()) {
1308 // Loop up the context chain. There is no frame effect so it is
1309 // safe to use raw labels here.
1311 if (!context.is(temp)) {
1312 __ mov(temp, context);
1315 // Terminate at native context.
1316 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1317 Immediate(isolate()->factory()->native_context_map()));
1318 __ j(equal, &fast, Label::kNear);
1319 // Check that extension is NULL.
1320 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1321 __ j(not_equal, slow);
1322 // Load next context in chain.
1323 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1328 // All extension objects were empty and it is safe to use a global
1330 __ mov(LoadIC::ReceiverRegister(), GlobalObjectOperand());
1331 __ mov(LoadIC::NameRegister(), proxy->var()->name());
1332 if (FLAG_vector_ics) {
1333 __ mov(LoadIC::SlotRegister(),
1334 Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
1337 ContextualMode mode = (typeof_state == INSIDE_TYPEOF)
1345 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1347 DCHECK(var->IsContextSlot());
1348 Register context = esi;
1349 Register temp = ebx;
1351 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1352 if (s->num_heap_slots() > 0) {
1353 if (s->calls_sloppy_eval()) {
1354 // Check that extension is NULL.
1355 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1357 __ j(not_equal, slow);
1359 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1360 // Walk the rest of the chain without clobbering esi.
1364 // Check that last extension is NULL.
1365 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1366 __ j(not_equal, slow);
1368 // This function is used only for loads, not stores, so it's safe to
1369 // return an esi-based operand (the write barrier cannot be allowed to
1370 // destroy the esi register).
1371 return ContextOperand(context, var->index());
1375 void FullCodeGenerator::EmitDynamicLookupFastCase(VariableProxy* proxy,
1376 TypeofState typeof_state,
1379 // Generate fast-case code for variables that might be shadowed by
1380 // eval-introduced variables. Eval is used a lot without
1381 // introducing variables. In those cases, we do not want to
1382 // perform a runtime call for all variables in the scope
1383 // containing the eval.
1384 Variable* var = proxy->var();
1385 if (var->mode() == DYNAMIC_GLOBAL) {
1386 EmitLoadGlobalCheckExtensions(proxy, typeof_state, slow);
1388 } else if (var->mode() == DYNAMIC_LOCAL) {
1389 Variable* local = var->local_if_not_shadowed();
1390 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1391 if (local->mode() == LET || local->mode() == CONST ||
1392 local->mode() == CONST_LEGACY) {
1393 __ cmp(eax, isolate()->factory()->the_hole_value());
1394 __ j(not_equal, done);
1395 if (local->mode() == CONST_LEGACY) {
1396 __ mov(eax, isolate()->factory()->undefined_value());
1397 } else { // LET || CONST
1398 __ push(Immediate(var->name()));
1399 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1407 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1408 // Record position before possible IC call.
1409 SetSourcePosition(proxy->position());
1410 Variable* var = proxy->var();
1412 // Three cases: global variables, lookup variables, and all other types of
1414 switch (var->location()) {
1415 case Variable::UNALLOCATED: {
1416 Comment cmnt(masm_, "[ Global variable");
1417 __ mov(LoadIC::ReceiverRegister(), GlobalObjectOperand());
1418 __ mov(LoadIC::NameRegister(), var->name());
1419 if (FLAG_vector_ics) {
1420 __ mov(LoadIC::SlotRegister(),
1421 Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
1423 CallLoadIC(CONTEXTUAL);
1424 context()->Plug(eax);
1428 case Variable::PARAMETER:
1429 case Variable::LOCAL:
1430 case Variable::CONTEXT: {
1431 Comment cmnt(masm_, var->IsContextSlot() ? "[ Context variable"
1432 : "[ Stack variable");
1433 if (var->binding_needs_init()) {
1434 // var->scope() may be NULL when the proxy is located in eval code and
1435 // refers to a potential outside binding. Currently those bindings are
1436 // always looked up dynamically, i.e. in that case
1437 // var->location() == LOOKUP.
1439 DCHECK(var->scope() != NULL);
1441 // Check if the binding really needs an initialization check. The check
1442 // can be skipped in the following situation: we have a LET or CONST
1443 // binding in harmony mode, both the Variable and the VariableProxy have
1444 // the same declaration scope (i.e. they are both in global code, in the
1445 // same function or in the same eval code) and the VariableProxy is in
1446 // the source physically located after the initializer of the variable.
1448 // We cannot skip any initialization checks for CONST in non-harmony
1449 // mode because const variables may be declared but never initialized:
1450 // if (false) { const x; }; var y = x;
1452 // The condition on the declaration scopes is a conservative check for
1453 // nested functions that access a binding and are called before the
1454 // binding is initialized:
1455 // function() { f(); let x = 1; function f() { x = 2; } }
1457 bool skip_init_check;
1458 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1459 skip_init_check = false;
1461 // Check that we always have valid source position.
1462 DCHECK(var->initializer_position() != RelocInfo::kNoPosition);
1463 DCHECK(proxy->position() != RelocInfo::kNoPosition);
1464 skip_init_check = var->mode() != CONST_LEGACY &&
1465 var->initializer_position() < proxy->position();
1468 if (!skip_init_check) {
1469 // Let and const need a read barrier.
1472 __ cmp(eax, isolate()->factory()->the_hole_value());
1473 __ j(not_equal, &done, Label::kNear);
1474 if (var->mode() == LET || var->mode() == CONST) {
1475 // Throw a reference error when using an uninitialized let/const
1476 // binding in harmony mode.
1477 __ push(Immediate(var->name()));
1478 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1480 // Uninitalized const bindings outside of harmony mode are unholed.
1481 DCHECK(var->mode() == CONST_LEGACY);
1482 __ mov(eax, isolate()->factory()->undefined_value());
1485 context()->Plug(eax);
1489 context()->Plug(var);
1493 case Variable::LOOKUP: {
1494 Comment cmnt(masm_, "[ Lookup variable");
1496 // Generate code for loading from variables potentially shadowed
1497 // by eval-introduced variables.
1498 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
1500 __ push(esi); // Context.
1501 __ push(Immediate(var->name()));
1502 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
1504 context()->Plug(eax);
1511 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1512 Comment cmnt(masm_, "[ RegExpLiteral");
1514 // Registers will be used as follows:
1515 // edi = JS function.
1516 // ecx = literals array.
1517 // ebx = regexp literal.
1518 // eax = regexp literal clone.
1519 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1520 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1521 int literal_offset =
1522 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1523 __ mov(ebx, FieldOperand(ecx, literal_offset));
1524 __ cmp(ebx, isolate()->factory()->undefined_value());
1525 __ j(not_equal, &materialized, Label::kNear);
1527 // Create regexp literal using runtime function
1528 // Result will be in eax.
1530 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1531 __ push(Immediate(expr->pattern()));
1532 __ push(Immediate(expr->flags()));
1533 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1536 __ bind(&materialized);
1537 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1538 Label allocated, runtime_allocate;
1539 __ Allocate(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1542 __ bind(&runtime_allocate);
1544 __ push(Immediate(Smi::FromInt(size)));
1545 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1548 __ bind(&allocated);
1549 // Copy the content into the newly allocated memory.
1550 // (Unroll copy loop once for better throughput).
1551 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1552 __ mov(edx, FieldOperand(ebx, i));
1553 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1554 __ mov(FieldOperand(eax, i), edx);
1555 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1557 if ((size % (2 * kPointerSize)) != 0) {
1558 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1559 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1561 context()->Plug(eax);
1565 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1566 if (expression == NULL) {
1567 __ push(Immediate(isolate()->factory()->null_value()));
1569 VisitForStackValue(expression);
1574 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1575 Comment cmnt(masm_, "[ ObjectLiteral");
1577 expr->BuildConstantProperties(isolate());
1578 Handle<FixedArray> constant_properties = expr->constant_properties();
1579 int flags = expr->fast_elements()
1580 ? ObjectLiteral::kFastElements
1581 : ObjectLiteral::kNoFlags;
1582 flags |= expr->has_function()
1583 ? ObjectLiteral::kHasFunction
1584 : ObjectLiteral::kNoFlags;
1585 int properties_count = constant_properties->length() / 2;
1586 if (expr->may_store_doubles() || expr->depth() > 1 ||
1587 masm()->serializer_enabled() ||
1588 flags != ObjectLiteral::kFastElements ||
1589 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1590 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1591 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1592 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1593 __ push(Immediate(constant_properties));
1594 __ push(Immediate(Smi::FromInt(flags)));
1595 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1597 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1598 __ mov(eax, FieldOperand(edi, JSFunction::kLiteralsOffset));
1599 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1600 __ mov(ecx, Immediate(constant_properties));
1601 __ mov(edx, Immediate(Smi::FromInt(flags)));
1602 FastCloneShallowObjectStub stub(isolate(), properties_count);
1606 // If result_saved is true the result is on top of the stack. If
1607 // result_saved is false the result is in eax.
1608 bool result_saved = false;
1610 // Mark all computed expressions that are bound to a key that
1611 // is shadowed by a later occurrence of the same key. For the
1612 // marked expressions, no store code is emitted.
1613 expr->CalculateEmitStore(zone());
1615 AccessorTable accessor_table(zone());
1616 for (int i = 0; i < expr->properties()->length(); i++) {
1617 ObjectLiteral::Property* property = expr->properties()->at(i);
1618 if (property->IsCompileTimeValue()) continue;
1620 Literal* key = property->key();
1621 Expression* value = property->value();
1622 if (!result_saved) {
1623 __ push(eax); // Save result on the stack
1624 result_saved = true;
1626 switch (property->kind()) {
1627 case ObjectLiteral::Property::CONSTANT:
1629 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1630 DCHECK(!CompileTimeValue::IsCompileTimeValue(value));
1632 case ObjectLiteral::Property::COMPUTED:
1633 if (key->value()->IsInternalizedString()) {
1634 if (property->emit_store()) {
1635 VisitForAccumulatorValue(value);
1636 DCHECK(StoreIC::ValueRegister().is(eax));
1637 __ mov(StoreIC::NameRegister(), Immediate(key->value()));
1638 __ mov(StoreIC::ReceiverRegister(), Operand(esp, 0));
1639 CallStoreIC(key->LiteralFeedbackId());
1640 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1642 VisitForEffect(value);
1646 __ push(Operand(esp, 0)); // Duplicate receiver.
1647 VisitForStackValue(key);
1648 VisitForStackValue(value);
1649 if (property->emit_store()) {
1650 __ push(Immediate(Smi::FromInt(SLOPPY))); // Strict mode
1651 __ CallRuntime(Runtime::kSetProperty, 4);
1656 case ObjectLiteral::Property::PROTOTYPE:
1657 __ push(Operand(esp, 0)); // Duplicate receiver.
1658 VisitForStackValue(value);
1659 if (property->emit_store()) {
1660 __ CallRuntime(Runtime::kSetPrototype, 2);
1665 case ObjectLiteral::Property::GETTER:
1666 accessor_table.lookup(key)->second->getter = value;
1668 case ObjectLiteral::Property::SETTER:
1669 accessor_table.lookup(key)->second->setter = value;
1674 // Emit code to define accessors, using only a single call to the runtime for
1675 // each pair of corresponding getters and setters.
1676 for (AccessorTable::Iterator it = accessor_table.begin();
1677 it != accessor_table.end();
1679 __ push(Operand(esp, 0)); // Duplicate receiver.
1680 VisitForStackValue(it->first);
1681 EmitAccessor(it->second->getter);
1682 EmitAccessor(it->second->setter);
1683 __ push(Immediate(Smi::FromInt(NONE)));
1684 __ CallRuntime(Runtime::kDefineAccessorPropertyUnchecked, 5);
1687 if (expr->has_function()) {
1688 DCHECK(result_saved);
1689 __ push(Operand(esp, 0));
1690 __ CallRuntime(Runtime::kToFastProperties, 1);
1694 context()->PlugTOS();
1696 context()->Plug(eax);
1701 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1702 Comment cmnt(masm_, "[ ArrayLiteral");
1704 expr->BuildConstantElements(isolate());
1705 int flags = expr->depth() == 1
1706 ? ArrayLiteral::kShallowElements
1707 : ArrayLiteral::kNoFlags;
1709 ZoneList<Expression*>* subexprs = expr->values();
1710 int length = subexprs->length();
1711 Handle<FixedArray> constant_elements = expr->constant_elements();
1712 DCHECK_EQ(2, constant_elements->length());
1713 ElementsKind constant_elements_kind =
1714 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1715 bool has_constant_fast_elements =
1716 IsFastObjectElementsKind(constant_elements_kind);
1717 Handle<FixedArrayBase> constant_elements_values(
1718 FixedArrayBase::cast(constant_elements->get(1)));
1720 AllocationSiteMode allocation_site_mode = TRACK_ALLOCATION_SITE;
1721 if (has_constant_fast_elements && !FLAG_allocation_site_pretenuring) {
1722 // If the only customer of allocation sites is transitioning, then
1723 // we can turn it off if we don't have anywhere else to transition to.
1724 allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
1727 if (expr->depth() > 1 || length > JSObject::kInitialMaxFastElementArray) {
1728 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1729 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1730 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1731 __ push(Immediate(constant_elements));
1732 __ push(Immediate(Smi::FromInt(flags)));
1733 __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
1735 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1736 __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
1737 __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
1738 __ mov(ecx, Immediate(constant_elements));
1739 FastCloneShallowArrayStub stub(isolate(), allocation_site_mode);
1743 bool result_saved = false; // Is the result saved to the stack?
1745 // Emit code to evaluate all the non-constant subexpressions and to store
1746 // them into the newly cloned array.
1747 for (int i = 0; i < length; i++) {
1748 Expression* subexpr = subexprs->at(i);
1749 // If the subexpression is a literal or a simple materialized literal it
1750 // is already set in the cloned array.
1751 if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
1753 if (!result_saved) {
1754 __ push(eax); // array literal.
1755 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1756 result_saved = true;
1758 VisitForAccumulatorValue(subexpr);
1760 if (IsFastObjectElementsKind(constant_elements_kind)) {
1761 // Fast-case array literal with ElementsKind of FAST_*_ELEMENTS, they
1762 // cannot transition and don't need to call the runtime stub.
1763 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1764 __ mov(ebx, Operand(esp, kPointerSize)); // Copy of array literal.
1765 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1766 // Store the subexpression value in the array's elements.
1767 __ mov(FieldOperand(ebx, offset), result_register());
1768 // Update the write barrier for the array store.
1769 __ RecordWriteField(ebx, offset, result_register(), ecx,
1771 EMIT_REMEMBERED_SET,
1774 // Store the subexpression value in the array's elements.
1775 __ mov(ecx, Immediate(Smi::FromInt(i)));
1776 StoreArrayLiteralElementStub stub(isolate());
1780 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1784 __ add(esp, Immediate(kPointerSize)); // literal index
1785 context()->PlugTOS();
1787 context()->Plug(eax);
1792 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1793 DCHECK(expr->target()->IsValidReferenceExpression());
1795 Comment cmnt(masm_, "[ Assignment");
1797 // Left-hand side can only be a property, a global or a (parameter or local)
1799 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1800 LhsKind assign_type = VARIABLE;
1801 Property* property = expr->target()->AsProperty();
1802 if (property != NULL) {
1803 assign_type = (property->key()->IsPropertyName())
1808 // Evaluate LHS expression.
1809 switch (assign_type) {
1811 // Nothing to do here.
1813 case NAMED_PROPERTY:
1814 if (expr->is_compound()) {
1815 // We need the receiver both on the stack and in the register.
1816 VisitForStackValue(property->obj());
1817 __ mov(LoadIC::ReceiverRegister(), Operand(esp, 0));
1819 VisitForStackValue(property->obj());
1822 case KEYED_PROPERTY: {
1823 if (expr->is_compound()) {
1824 VisitForStackValue(property->obj());
1825 VisitForStackValue(property->key());
1826 __ mov(LoadIC::ReceiverRegister(), Operand(esp, kPointerSize));
1827 __ mov(LoadIC::NameRegister(), Operand(esp, 0));
1829 VisitForStackValue(property->obj());
1830 VisitForStackValue(property->key());
1836 // For compound assignments we need another deoptimization point after the
1837 // variable/property load.
1838 if (expr->is_compound()) {
1839 AccumulatorValueContext result_context(this);
1840 { AccumulatorValueContext left_operand_context(this);
1841 switch (assign_type) {
1843 EmitVariableLoad(expr->target()->AsVariableProxy());
1844 PrepareForBailout(expr->target(), TOS_REG);
1846 case NAMED_PROPERTY:
1847 EmitNamedPropertyLoad(property);
1848 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1850 case KEYED_PROPERTY:
1851 EmitKeyedPropertyLoad(property);
1852 PrepareForBailoutForId(property->LoadId(), TOS_REG);
1857 Token::Value op = expr->binary_op();
1858 __ push(eax); // Left operand goes on the stack.
1859 VisitForAccumulatorValue(expr->value());
1861 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1864 SetSourcePosition(expr->position() + 1);
1865 if (ShouldInlineSmiCase(op)) {
1866 EmitInlineSmiBinaryOp(expr->binary_operation(),
1872 EmitBinaryOp(expr->binary_operation(), op, mode);
1875 // Deoptimization point in case the binary operation may have side effects.
1876 PrepareForBailout(expr->binary_operation(), TOS_REG);
1878 VisitForAccumulatorValue(expr->value());
1881 // Record source position before possible IC call.
1882 SetSourcePosition(expr->position());
1885 switch (assign_type) {
1887 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1889 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1890 context()->Plug(eax);
1892 case NAMED_PROPERTY:
1893 EmitNamedPropertyAssignment(expr);
1895 case KEYED_PROPERTY:
1896 EmitKeyedPropertyAssignment(expr);
1902 void FullCodeGenerator::VisitYield(Yield* expr) {
1903 Comment cmnt(masm_, "[ Yield");
1904 // Evaluate yielded value first; the initial iterator definition depends on
1905 // this. It stays on the stack while we update the iterator.
1906 VisitForStackValue(expr->expression());
1908 switch (expr->yield_kind()) {
1909 case Yield::SUSPEND:
1910 // Pop value from top-of-stack slot; box result into result register.
1911 EmitCreateIteratorResult(false);
1912 __ push(result_register());
1914 case Yield::INITIAL: {
1915 Label suspend, continuation, post_runtime, resume;
1919 __ bind(&continuation);
1923 VisitForAccumulatorValue(expr->generator_object());
1924 DCHECK(continuation.pos() > 0 && Smi::IsValid(continuation.pos()));
1925 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
1926 Immediate(Smi::FromInt(continuation.pos())));
1927 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
1929 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
1931 __ lea(ebx, Operand(ebp, StandardFrameConstants::kExpressionsOffset));
1933 __ j(equal, &post_runtime);
1934 __ push(eax); // generator object
1935 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
1936 __ mov(context_register(),
1937 Operand(ebp, StandardFrameConstants::kContextOffset));
1938 __ bind(&post_runtime);
1939 __ pop(result_register());
1940 EmitReturnSequence();
1943 context()->Plug(result_register());
1947 case Yield::FINAL: {
1948 VisitForAccumulatorValue(expr->generator_object());
1949 __ mov(FieldOperand(result_register(),
1950 JSGeneratorObject::kContinuationOffset),
1951 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorClosed)));
1952 // Pop value from top-of-stack slot, box result into result register.
1953 EmitCreateIteratorResult(true);
1954 EmitUnwindBeforeReturn();
1955 EmitReturnSequence();
1959 case Yield::DELEGATING: {
1960 VisitForStackValue(expr->generator_object());
1962 // Initial stack layout is as follows:
1963 // [sp + 1 * kPointerSize] iter
1964 // [sp + 0 * kPointerSize] g
1966 Label l_catch, l_try, l_suspend, l_continuation, l_resume;
1967 Label l_next, l_call, l_loop;
1968 Register load_receiver = LoadIC::ReceiverRegister();
1969 Register load_name = LoadIC::NameRegister();
1971 // Initial send value is undefined.
1972 __ mov(eax, isolate()->factory()->undefined_value());
1975 // catch (e) { receiver = iter; f = 'throw'; arg = e; goto l_call; }
1977 handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
1978 __ mov(load_name, isolate()->factory()->throw_string()); // "throw"
1979 __ push(load_name); // "throw"
1980 __ push(Operand(esp, 2 * kPointerSize)); // iter
1981 __ push(eax); // exception
1984 // try { received = %yield result }
1985 // Shuffle the received result above a try handler and yield it without
1988 __ pop(eax); // result
1989 __ PushTryHandler(StackHandler::CATCH, expr->index());
1990 const int handler_size = StackHandlerConstants::kSize;
1991 __ push(eax); // result
1993 __ bind(&l_continuation);
1995 __ bind(&l_suspend);
1996 const int generator_object_depth = kPointerSize + handler_size;
1997 __ mov(eax, Operand(esp, generator_object_depth));
1999 DCHECK(l_continuation.pos() > 0 && Smi::IsValid(l_continuation.pos()));
2000 __ mov(FieldOperand(eax, JSGeneratorObject::kContinuationOffset),
2001 Immediate(Smi::FromInt(l_continuation.pos())));
2002 __ mov(FieldOperand(eax, JSGeneratorObject::kContextOffset), esi);
2004 __ RecordWriteField(eax, JSGeneratorObject::kContextOffset, ecx, edx,
2006 __ CallRuntime(Runtime::kSuspendJSGeneratorObject, 1);
2007 __ mov(context_register(),
2008 Operand(ebp, StandardFrameConstants::kContextOffset));
2009 __ pop(eax); // result
2010 EmitReturnSequence();
2011 __ bind(&l_resume); // received in eax
2014 // receiver = iter; f = iter.next; arg = received;
2017 __ mov(load_name, isolate()->factory()->next_string());
2018 __ push(load_name); // "next"
2019 __ push(Operand(esp, 2 * kPointerSize)); // iter
2020 __ push(eax); // received
2022 // result = receiver[f](arg);
2024 __ mov(load_receiver, Operand(esp, kPointerSize));
2025 if (FLAG_vector_ics) {
2026 __ mov(LoadIC::SlotRegister(),
2027 Immediate(Smi::FromInt(expr->KeyedLoadFeedbackSlot())));
2029 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2030 CallIC(ic, TypeFeedbackId::None());
2032 __ mov(Operand(esp, 2 * kPointerSize), edi);
2033 CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
2036 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2037 __ Drop(1); // The function is still on the stack; drop it.
2039 // if (!result.done) goto l_try;
2041 __ push(eax); // save result
2042 __ Move(load_receiver, eax); // result
2044 isolate()->factory()->done_string()); // "done"
2045 if (FLAG_vector_ics) {
2046 __ mov(LoadIC::SlotRegister(),
2047 Immediate(Smi::FromInt(expr->DoneFeedbackSlot())));
2049 CallLoadIC(NOT_CONTEXTUAL); // result.done in eax
2050 Handle<Code> bool_ic = ToBooleanStub::GetUninitialized(isolate());
2056 __ pop(load_receiver); // result
2058 isolate()->factory()->value_string()); // "value"
2059 if (FLAG_vector_ics) {
2060 __ mov(LoadIC::SlotRegister(),
2061 Immediate(Smi::FromInt(expr->ValueFeedbackSlot())));
2063 CallLoadIC(NOT_CONTEXTUAL); // result.value in eax
2064 context()->DropAndPlug(2, eax); // drop iter and g
2071 void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
2073 JSGeneratorObject::ResumeMode resume_mode) {
2074 // The value stays in eax, and is ultimately read by the resumed generator, as
2075 // if CallRuntime(Runtime::kSuspendJSGeneratorObject) returned it. Or it
2076 // is read to throw the value when the resumed generator is already closed.
2077 // ebx will hold the generator object until the activation has been resumed.
2078 VisitForStackValue(generator);
2079 VisitForAccumulatorValue(value);
2082 // Check generator state.
2083 Label wrong_state, closed_state, done;
2084 STATIC_ASSERT(JSGeneratorObject::kGeneratorExecuting < 0);
2085 STATIC_ASSERT(JSGeneratorObject::kGeneratorClosed == 0);
2086 __ cmp(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2087 Immediate(Smi::FromInt(0)));
2088 __ j(equal, &closed_state);
2089 __ j(less, &wrong_state);
2091 // Load suspended function and context.
2092 __ mov(esi, FieldOperand(ebx, JSGeneratorObject::kContextOffset));
2093 __ mov(edi, FieldOperand(ebx, JSGeneratorObject::kFunctionOffset));
2096 __ push(FieldOperand(ebx, JSGeneratorObject::kReceiverOffset));
2098 // Push holes for arguments to generator function.
2099 __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2101 FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2102 __ mov(ecx, isolate()->factory()->the_hole_value());
2103 Label push_argument_holes, push_frame;
2104 __ bind(&push_argument_holes);
2105 __ sub(edx, Immediate(Smi::FromInt(1)));
2106 __ j(carry, &push_frame);
2108 __ jmp(&push_argument_holes);
2110 // Enter a new JavaScript frame, and initialize its slots as they were when
2111 // the generator was suspended.
2113 __ bind(&push_frame);
2114 __ call(&resume_frame);
2116 __ bind(&resume_frame);
2117 __ push(ebp); // Caller's frame pointer.
2119 __ push(esi); // Callee's context.
2120 __ push(edi); // Callee's JS Function.
2122 // Load the operand stack size.
2123 __ mov(edx, FieldOperand(ebx, JSGeneratorObject::kOperandStackOffset));
2124 __ mov(edx, FieldOperand(edx, FixedArray::kLengthOffset));
2127 // If we are sending a value and there is no operand stack, we can jump back
2129 if (resume_mode == JSGeneratorObject::NEXT) {
2131 __ cmp(edx, Immediate(0));
2132 __ j(not_zero, &slow_resume);
2133 __ mov(edx, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2134 __ mov(ecx, FieldOperand(ebx, JSGeneratorObject::kContinuationOffset));
2137 __ mov(FieldOperand(ebx, JSGeneratorObject::kContinuationOffset),
2138 Immediate(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting)));
2140 __ bind(&slow_resume);
2143 // Otherwise, we push holes for the operand stack and call the runtime to fix
2144 // up the stack and the handlers.
2145 Label push_operand_holes, call_resume;
2146 __ bind(&push_operand_holes);
2147 __ sub(edx, Immediate(1));
2148 __ j(carry, &call_resume);
2150 __ jmp(&push_operand_holes);
2151 __ bind(&call_resume);
2153 __ push(result_register());
2154 __ Push(Smi::FromInt(resume_mode));
2155 __ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
2156 // Not reached: the runtime call returns elsewhere.
2157 __ Abort(kGeneratorFailedToResume);
2159 // Reach here when generator is closed.
2160 __ bind(&closed_state);
2161 if (resume_mode == JSGeneratorObject::NEXT) {
2162 // Return completed iterator result when generator is closed.
2163 __ push(Immediate(isolate()->factory()->undefined_value()));
2164 // Pop value from top-of-stack slot; box result into result register.
2165 EmitCreateIteratorResult(true);
2167 // Throw the provided value.
2169 __ CallRuntime(Runtime::kThrow, 1);
2173 // Throw error if we attempt to operate on a running generator.
2174 __ bind(&wrong_state);
2176 __ CallRuntime(Runtime::kThrowGeneratorStateError, 1);
2179 context()->Plug(result_register());
2183 void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
2187 Handle<Map> map(isolate()->native_context()->iterator_result_map());
2189 __ Allocate(map->instance_size(), eax, ecx, edx, &gc_required, TAG_OBJECT);
2192 __ bind(&gc_required);
2193 __ Push(Smi::FromInt(map->instance_size()));
2194 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
2195 __ mov(context_register(),
2196 Operand(ebp, StandardFrameConstants::kContextOffset));
2198 __ bind(&allocated);
2201 __ mov(edx, isolate()->factory()->ToBoolean(done));
2202 DCHECK_EQ(map->instance_size(), 5 * kPointerSize);
2203 __ mov(FieldOperand(eax, HeapObject::kMapOffset), ebx);
2204 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
2205 isolate()->factory()->empty_fixed_array());
2206 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
2207 isolate()->factory()->empty_fixed_array());
2208 __ mov(FieldOperand(eax, JSGeneratorObject::kResultValuePropertyOffset), ecx);
2209 __ mov(FieldOperand(eax, JSGeneratorObject::kResultDonePropertyOffset), edx);
2211 // Only the value field needs a write barrier, as the other values are in the
2213 __ RecordWriteField(eax, JSGeneratorObject::kResultValuePropertyOffset,
2214 ecx, edx, kDontSaveFPRegs);
2218 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
2219 SetSourcePosition(prop->position());
2220 Literal* key = prop->key()->AsLiteral();
2221 DCHECK(!key->value()->IsSmi());
2222 __ mov(LoadIC::NameRegister(), Immediate(key->value()));
2223 if (FLAG_vector_ics) {
2224 __ mov(LoadIC::SlotRegister(),
2225 Immediate(Smi::FromInt(prop->PropertyFeedbackSlot())));
2226 CallLoadIC(NOT_CONTEXTUAL);
2228 CallLoadIC(NOT_CONTEXTUAL, prop->PropertyFeedbackId());
2233 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
2234 SetSourcePosition(prop->position());
2235 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
2236 if (FLAG_vector_ics) {
2237 __ mov(LoadIC::SlotRegister(),
2238 Immediate(Smi::FromInt(prop->PropertyFeedbackSlot())));
2241 CallIC(ic, prop->PropertyFeedbackId());
2246 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
2250 Expression* right) {
2251 // Do combined smi check of the operands. Left operand is on the
2252 // stack. Right operand is in eax.
2253 Label smi_case, done, stub_call;
2257 JumpPatchSite patch_site(masm_);
2258 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
2260 __ bind(&stub_call);
2262 BinaryOpICStub stub(isolate(), op, mode);
2263 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2264 patch_site.EmitPatchInfo();
2265 __ jmp(&done, Label::kNear);
2269 __ mov(eax, edx); // Copy left operand in case of a stub call.
2274 __ sar_cl(eax); // No checks of result necessary
2275 __ and_(eax, Immediate(~kSmiTagMask));
2282 // Check that the *signed* result fits in a smi.
2283 __ cmp(eax, 0xc0000000);
2284 __ j(positive, &result_ok);
2287 __ bind(&result_ok);
2296 __ test(eax, Immediate(0xc0000000));
2297 __ j(zero, &result_ok);
2300 __ bind(&result_ok);
2306 __ j(overflow, &stub_call);
2310 __ j(overflow, &stub_call);
2315 __ j(overflow, &stub_call);
2317 __ j(not_zero, &done, Label::kNear);
2320 __ j(negative, &stub_call);
2326 case Token::BIT_AND:
2329 case Token::BIT_XOR:
2337 context()->Plug(eax);
2341 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
2343 OverwriteMode mode) {
2345 BinaryOpICStub stub(isolate(), op, mode);
2346 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
2347 CallIC(stub.GetCode(), expr->BinaryOperationFeedbackId());
2348 patch_site.EmitPatchInfo();
2349 context()->Plug(eax);
2353 void FullCodeGenerator::EmitAssignment(Expression* expr) {
2354 DCHECK(expr->IsValidReferenceExpression());
2356 // Left-hand side can only be a property, a global or a (parameter or local)
2358 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
2359 LhsKind assign_type = VARIABLE;
2360 Property* prop = expr->AsProperty();
2362 assign_type = (prop->key()->IsPropertyName())
2367 switch (assign_type) {
2369 Variable* var = expr->AsVariableProxy()->var();
2370 EffectContext context(this);
2371 EmitVariableAssignment(var, Token::ASSIGN);
2374 case NAMED_PROPERTY: {
2375 __ push(eax); // Preserve value.
2376 VisitForAccumulatorValue(prop->obj());
2377 __ Move(StoreIC::ReceiverRegister(), eax);
2378 __ pop(StoreIC::ValueRegister()); // Restore value.
2379 __ mov(StoreIC::NameRegister(), prop->key()->AsLiteral()->value());
2383 case KEYED_PROPERTY: {
2384 __ push(eax); // Preserve value.
2385 VisitForStackValue(prop->obj());
2386 VisitForAccumulatorValue(prop->key());
2387 __ Move(KeyedStoreIC::NameRegister(), eax);
2388 __ pop(KeyedStoreIC::ReceiverRegister()); // Receiver.
2389 __ pop(KeyedStoreIC::ValueRegister()); // Restore value.
2390 Handle<Code> ic = strict_mode() == SLOPPY
2391 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2392 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2397 context()->Plug(eax);
2401 void FullCodeGenerator::EmitStoreToStackLocalOrContextSlot(
2402 Variable* var, MemOperand location) {
2403 __ mov(location, eax);
2404 if (var->IsContextSlot()) {
2406 int offset = Context::SlotOffset(var->index());
2407 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2412 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2414 if (var->IsUnallocated()) {
2415 // Global var, const, or let.
2416 __ mov(StoreIC::NameRegister(), var->name());
2417 __ mov(StoreIC::ReceiverRegister(), GlobalObjectOperand());
2420 } else if (op == Token::INIT_CONST_LEGACY) {
2421 // Const initializers need a write barrier.
2422 DCHECK(!var->IsParameter()); // No const parameters.
2423 if (var->IsLookupSlot()) {
2426 __ push(Immediate(var->name()));
2427 __ CallRuntime(Runtime::kInitializeLegacyConstLookupSlot, 3);
2429 DCHECK(var->IsStackLocal() || var->IsContextSlot());
2431 MemOperand location = VarOperand(var, ecx);
2432 __ mov(edx, location);
2433 __ cmp(edx, isolate()->factory()->the_hole_value());
2434 __ j(not_equal, &skip, Label::kNear);
2435 EmitStoreToStackLocalOrContextSlot(var, location);
2439 } else if (var->mode() == LET && op != Token::INIT_LET) {
2440 // Non-initializing assignment to let variable needs a write barrier.
2441 DCHECK(!var->IsLookupSlot());
2442 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2444 MemOperand location = VarOperand(var, ecx);
2445 __ mov(edx, location);
2446 __ cmp(edx, isolate()->factory()->the_hole_value());
2447 __ j(not_equal, &assign, Label::kNear);
2448 __ push(Immediate(var->name()));
2449 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2451 EmitStoreToStackLocalOrContextSlot(var, location);
2453 } else if (!var->is_const_mode() || op == Token::INIT_CONST) {
2454 if (var->IsLookupSlot()) {
2455 // Assignment to var.
2456 __ push(eax); // Value.
2457 __ push(esi); // Context.
2458 __ push(Immediate(var->name()));
2459 __ push(Immediate(Smi::FromInt(strict_mode())));
2460 __ CallRuntime(Runtime::kStoreLookupSlot, 4);
2462 // Assignment to var or initializing assignment to let/const in harmony
2464 DCHECK(var->IsStackAllocated() || var->IsContextSlot());
2465 MemOperand location = VarOperand(var, ecx);
2466 if (generate_debug_code_ && op == Token::INIT_LET) {
2467 // Check for an uninitialized let binding.
2468 __ mov(edx, location);
2469 __ cmp(edx, isolate()->factory()->the_hole_value());
2470 __ Check(equal, kLetBindingReInitialization);
2472 EmitStoreToStackLocalOrContextSlot(var, location);
2475 // Non-initializing assignments to consts are ignored.
2479 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2480 // Assignment to a property, using a named store IC.
2482 // esp[0] : receiver
2484 Property* prop = expr->target()->AsProperty();
2485 DCHECK(prop != NULL);
2486 DCHECK(prop->key()->IsLiteral());
2488 // Record source code position before IC call.
2489 SetSourcePosition(expr->position());
2490 __ mov(StoreIC::NameRegister(), prop->key()->AsLiteral()->value());
2491 __ pop(StoreIC::ReceiverRegister());
2492 CallStoreIC(expr->AssignmentFeedbackId());
2493 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2494 context()->Plug(eax);
2498 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2499 // Assignment to a property, using a keyed store IC.
2502 // esp[kPointerSize] : receiver
2504 __ pop(KeyedStoreIC::NameRegister()); // Key.
2505 __ pop(KeyedStoreIC::ReceiverRegister());
2506 DCHECK(KeyedStoreIC::ValueRegister().is(eax));
2507 // Record source code position before IC call.
2508 SetSourcePosition(expr->position());
2509 Handle<Code> ic = strict_mode() == SLOPPY
2510 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2511 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2512 CallIC(ic, expr->AssignmentFeedbackId());
2514 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2515 context()->Plug(eax);
2519 void FullCodeGenerator::VisitProperty(Property* expr) {
2520 Comment cmnt(masm_, "[ Property");
2521 Expression* key = expr->key();
2523 if (key->IsPropertyName()) {
2524 VisitForAccumulatorValue(expr->obj());
2525 __ Move(LoadIC::ReceiverRegister(), result_register());
2526 EmitNamedPropertyLoad(expr);
2527 PrepareForBailoutForId(expr->LoadId(), TOS_REG);
2528 context()->Plug(eax);
2530 VisitForStackValue(expr->obj());
2531 VisitForAccumulatorValue(expr->key());
2532 __ pop(LoadIC::ReceiverRegister()); // Object.
2533 __ Move(LoadIC::NameRegister(), result_register()); // Key.
2534 EmitKeyedPropertyLoad(expr);
2535 context()->Plug(eax);
2540 void FullCodeGenerator::CallIC(Handle<Code> code,
2541 TypeFeedbackId ast_id) {
2543 __ call(code, RelocInfo::CODE_TARGET, ast_id);
2547 // Code common for calls using the IC.
2548 void FullCodeGenerator::EmitCallWithLoadIC(Call* expr) {
2549 Expression* callee = expr->expression();
2551 CallIC::CallType call_type = callee->IsVariableProxy()
2554 // Get the target function.
2555 if (call_type == CallIC::FUNCTION) {
2556 { StackValueContext context(this);
2557 EmitVariableLoad(callee->AsVariableProxy());
2558 PrepareForBailout(callee, NO_REGISTERS);
2560 // Push undefined as receiver. This is patched in the method prologue if it
2561 // is a sloppy mode method.
2562 __ push(Immediate(isolate()->factory()->undefined_value()));
2564 // Load the function from the receiver.
2565 DCHECK(callee->IsProperty());
2566 __ mov(LoadIC::ReceiverRegister(), Operand(esp, 0));
2567 EmitNamedPropertyLoad(callee->AsProperty());
2568 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2569 // Push the target function under the receiver.
2570 __ push(Operand(esp, 0));
2571 __ mov(Operand(esp, kPointerSize), eax);
2574 EmitCall(expr, call_type);
2578 // Code common for calls using the IC.
2579 void FullCodeGenerator::EmitKeyedCallWithLoadIC(Call* expr,
2582 VisitForAccumulatorValue(key);
2584 Expression* callee = expr->expression();
2586 // Load the function from the receiver.
2587 DCHECK(callee->IsProperty());
2588 __ mov(LoadIC::ReceiverRegister(), Operand(esp, 0));
2589 __ mov(LoadIC::NameRegister(), eax);
2590 EmitKeyedPropertyLoad(callee->AsProperty());
2591 PrepareForBailoutForId(callee->AsProperty()->LoadId(), TOS_REG);
2593 // Push the target function under the receiver.
2594 __ push(Operand(esp, 0));
2595 __ mov(Operand(esp, kPointerSize), eax);
2597 EmitCall(expr, CallIC::METHOD);
2601 void FullCodeGenerator::EmitCall(Call* expr, CallIC::CallType call_type) {
2602 // Load the arguments.
2603 ZoneList<Expression*>* args = expr->arguments();
2604 int arg_count = args->length();
2605 { PreservePositionScope scope(masm()->positions_recorder());
2606 for (int i = 0; i < arg_count; i++) {
2607 VisitForStackValue(args->at(i));
2611 // Record source position of the IC call.
2612 SetSourcePosition(expr->position());
2613 Handle<Code> ic = CallIC::initialize_stub(
2614 isolate(), arg_count, call_type);
2615 __ Move(edx, Immediate(Smi::FromInt(expr->CallFeedbackSlot())));
2616 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2617 // Don't assign a type feedback id to the IC, since type feedback is provided
2618 // by the vector above.
2621 RecordJSReturnSite(expr);
2623 // Restore context register.
2624 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2626 context()->DropAndPlug(1, eax);
2630 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2631 // Push copy of the first argument or undefined if it doesn't exist.
2632 if (arg_count > 0) {
2633 __ push(Operand(esp, arg_count * kPointerSize));
2635 __ push(Immediate(isolate()->factory()->undefined_value()));
2638 // Push the receiver of the enclosing function.
2639 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2640 // Push the language mode.
2641 __ push(Immediate(Smi::FromInt(strict_mode())));
2643 // Push the start position of the scope the calls resides in.
2644 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2646 // Do the runtime call.
2647 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2651 void FullCodeGenerator::VisitCall(Call* expr) {
2653 // We want to verify that RecordJSReturnSite gets called on all paths
2654 // through this function. Avoid early returns.
2655 expr->return_is_recorded_ = false;
2658 Comment cmnt(masm_, "[ Call");
2659 Expression* callee = expr->expression();
2660 Call::CallType call_type = expr->GetCallType(isolate());
2662 if (call_type == Call::POSSIBLY_EVAL_CALL) {
2663 // In a call to eval, we first call RuntimeHidden_ResolvePossiblyDirectEval
2664 // to resolve the function we need to call and the receiver of the call.
2665 // Then we call the resolved function using the given arguments.
2666 ZoneList<Expression*>* args = expr->arguments();
2667 int arg_count = args->length();
2668 { PreservePositionScope pos_scope(masm()->positions_recorder());
2669 VisitForStackValue(callee);
2670 // Reserved receiver slot.
2671 __ push(Immediate(isolate()->factory()->undefined_value()));
2672 // Push the arguments.
2673 for (int i = 0; i < arg_count; i++) {
2674 VisitForStackValue(args->at(i));
2677 // Push a copy of the function (found below the arguments) and
2679 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2680 EmitResolvePossiblyDirectEval(arg_count);
2682 // The runtime call returns a pair of values in eax (function) and
2683 // edx (receiver). Touch up the stack with the right values.
2684 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2685 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2687 // Record source position for debugger.
2688 SetSourcePosition(expr->position());
2689 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
2690 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2692 RecordJSReturnSite(expr);
2693 // Restore context register.
2694 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2695 context()->DropAndPlug(1, eax);
2697 } else if (call_type == Call::GLOBAL_CALL) {
2698 EmitCallWithLoadIC(expr);
2700 } else if (call_type == Call::LOOKUP_SLOT_CALL) {
2701 // Call to a lookup slot (dynamically introduced variable).
2702 VariableProxy* proxy = callee->AsVariableProxy();
2704 { PreservePositionScope scope(masm()->positions_recorder());
2705 // Generate code for loading from variables potentially shadowed by
2706 // eval-introduced variables.
2707 EmitDynamicLookupFastCase(proxy, NOT_INSIDE_TYPEOF, &slow, &done);
2710 // Call the runtime to find the function to call (returned in eax) and
2711 // the object holding it (returned in edx).
2712 __ push(context_register());
2713 __ push(Immediate(proxy->name()));
2714 __ CallRuntime(Runtime::kLoadLookupSlot, 2);
2715 __ push(eax); // Function.
2716 __ push(edx); // Receiver.
2718 // If fast case code has been generated, emit code to push the function
2719 // and receiver and have the slow path jump around this code.
2720 if (done.is_linked()) {
2722 __ jmp(&call, Label::kNear);
2726 // The receiver is implicitly the global receiver. Indicate this by
2727 // passing the hole to the call function stub.
2728 __ push(Immediate(isolate()->factory()->undefined_value()));
2732 // The receiver is either the global receiver or an object found by
2736 } else if (call_type == Call::PROPERTY_CALL) {
2737 Property* property = callee->AsProperty();
2738 { PreservePositionScope scope(masm()->positions_recorder());
2739 VisitForStackValue(property->obj());
2741 if (property->key()->IsPropertyName()) {
2742 EmitCallWithLoadIC(expr);
2744 EmitKeyedCallWithLoadIC(expr, property->key());
2748 DCHECK(call_type == Call::OTHER_CALL);
2749 // Call to an arbitrary expression not handled specially above.
2750 { PreservePositionScope scope(masm()->positions_recorder());
2751 VisitForStackValue(callee);
2753 __ push(Immediate(isolate()->factory()->undefined_value()));
2754 // Emit function call.
2759 // RecordJSReturnSite should have been called.
2760 DCHECK(expr->return_is_recorded_);
2765 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2766 Comment cmnt(masm_, "[ CallNew");
2767 // According to ECMA-262, section 11.2.2, page 44, the function
2768 // expression in new calls must be evaluated before the
2771 // Push constructor on the stack. If it's not a function it's used as
2772 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2774 VisitForStackValue(expr->expression());
2776 // Push the arguments ("left-to-right") on the stack.
2777 ZoneList<Expression*>* args = expr->arguments();
2778 int arg_count = args->length();
2779 for (int i = 0; i < arg_count; i++) {
2780 VisitForStackValue(args->at(i));
2783 // Call the construct call builtin that handles allocation and
2784 // constructor invocation.
2785 SetSourcePosition(expr->position());
2787 // Load function and argument count into edi and eax.
2788 __ Move(eax, Immediate(arg_count));
2789 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2791 // Record call targets in unoptimized code.
2792 if (FLAG_pretenuring_call_new) {
2793 EnsureSlotContainsAllocationSite(expr->AllocationSiteFeedbackSlot());
2794 DCHECK(expr->AllocationSiteFeedbackSlot() ==
2795 expr->CallNewFeedbackSlot() + 1);
2798 __ LoadHeapObject(ebx, FeedbackVector());
2799 __ mov(edx, Immediate(Smi::FromInt(expr->CallNewFeedbackSlot())));
2801 CallConstructStub stub(isolate(), RECORD_CONSTRUCTOR_TARGET);
2802 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2803 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2804 context()->Plug(eax);
2808 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2809 ZoneList<Expression*>* args = expr->arguments();
2810 DCHECK(args->length() == 1);
2812 VisitForAccumulatorValue(args->at(0));
2814 Label materialize_true, materialize_false;
2815 Label* if_true = NULL;
2816 Label* if_false = NULL;
2817 Label* fall_through = NULL;
2818 context()->PrepareTest(&materialize_true, &materialize_false,
2819 &if_true, &if_false, &fall_through);
2821 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2822 __ test(eax, Immediate(kSmiTagMask));
2823 Split(zero, if_true, if_false, fall_through);
2825 context()->Plug(if_true, if_false);
2829 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2830 ZoneList<Expression*>* args = expr->arguments();
2831 DCHECK(args->length() == 1);
2833 VisitForAccumulatorValue(args->at(0));
2835 Label materialize_true, materialize_false;
2836 Label* if_true = NULL;
2837 Label* if_false = NULL;
2838 Label* fall_through = NULL;
2839 context()->PrepareTest(&materialize_true, &materialize_false,
2840 &if_true, &if_false, &fall_through);
2842 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2843 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2844 Split(zero, if_true, if_false, fall_through);
2846 context()->Plug(if_true, if_false);
2850 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2851 ZoneList<Expression*>* args = expr->arguments();
2852 DCHECK(args->length() == 1);
2854 VisitForAccumulatorValue(args->at(0));
2856 Label materialize_true, materialize_false;
2857 Label* if_true = NULL;
2858 Label* if_false = NULL;
2859 Label* fall_through = NULL;
2860 context()->PrepareTest(&materialize_true, &materialize_false,
2861 &if_true, &if_false, &fall_through);
2863 __ JumpIfSmi(eax, if_false);
2864 __ cmp(eax, isolate()->factory()->null_value());
2865 __ j(equal, if_true);
2866 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2867 // Undetectable objects behave like undefined when tested with typeof.
2868 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2869 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2870 __ j(not_zero, if_false);
2871 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2872 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2873 __ j(below, if_false);
2874 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2875 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2876 Split(below_equal, if_true, if_false, fall_through);
2878 context()->Plug(if_true, if_false);
2882 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2883 ZoneList<Expression*>* args = expr->arguments();
2884 DCHECK(args->length() == 1);
2886 VisitForAccumulatorValue(args->at(0));
2888 Label materialize_true, materialize_false;
2889 Label* if_true = NULL;
2890 Label* if_false = NULL;
2891 Label* fall_through = NULL;
2892 context()->PrepareTest(&materialize_true, &materialize_false,
2893 &if_true, &if_false, &fall_through);
2895 __ JumpIfSmi(eax, if_false);
2896 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2897 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2898 Split(above_equal, if_true, if_false, fall_through);
2900 context()->Plug(if_true, if_false);
2904 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2905 ZoneList<Expression*>* args = expr->arguments();
2906 DCHECK(args->length() == 1);
2908 VisitForAccumulatorValue(args->at(0));
2910 Label materialize_true, materialize_false;
2911 Label* if_true = NULL;
2912 Label* if_false = NULL;
2913 Label* fall_through = NULL;
2914 context()->PrepareTest(&materialize_true, &materialize_false,
2915 &if_true, &if_false, &fall_through);
2917 __ JumpIfSmi(eax, if_false);
2918 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2919 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
2920 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2921 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2922 Split(not_zero, if_true, if_false, fall_through);
2924 context()->Plug(if_true, if_false);
2928 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2929 CallRuntime* expr) {
2930 ZoneList<Expression*>* args = expr->arguments();
2931 DCHECK(args->length() == 1);
2933 VisitForAccumulatorValue(args->at(0));
2935 Label materialize_true, materialize_false, skip_lookup;
2936 Label* if_true = NULL;
2937 Label* if_false = NULL;
2938 Label* fall_through = NULL;
2939 context()->PrepareTest(&materialize_true, &materialize_false,
2940 &if_true, &if_false, &fall_through);
2942 __ AssertNotSmi(eax);
2944 // Check whether this map has already been checked to be safe for default
2946 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2947 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
2948 1 << Map::kStringWrapperSafeForDefaultValueOf);
2949 __ j(not_zero, &skip_lookup);
2951 // Check for fast case object. Return false for slow case objects.
2952 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
2953 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2954 __ cmp(ecx, isolate()->factory()->hash_table_map());
2955 __ j(equal, if_false);
2957 // Look for valueOf string in the descriptor array, and indicate false if
2958 // found. Since we omit an enumeration index check, if it is added via a
2959 // transition that shares its descriptor array, this is a false positive.
2960 Label entry, loop, done;
2962 // Skip loop if no descriptors are valid.
2963 __ NumberOfOwnDescriptors(ecx, ebx);
2967 __ LoadInstanceDescriptors(ebx, ebx);
2968 // ebx: descriptor array.
2969 // ecx: valid entries in the descriptor array.
2970 // Calculate the end of the descriptor array.
2971 STATIC_ASSERT(kSmiTag == 0);
2972 STATIC_ASSERT(kSmiTagSize == 1);
2973 STATIC_ASSERT(kPointerSize == 4);
2974 __ imul(ecx, ecx, DescriptorArray::kDescriptorSize);
2975 __ lea(ecx, Operand(ebx, ecx, times_4, DescriptorArray::kFirstOffset));
2976 // Calculate location of the first key name.
2977 __ add(ebx, Immediate(DescriptorArray::kFirstOffset));
2978 // Loop through all the keys in the descriptor array. If one of these is the
2979 // internalized string "valueOf" the result is false.
2982 __ mov(edx, FieldOperand(ebx, 0));
2983 __ cmp(edx, isolate()->factory()->value_of_string());
2984 __ j(equal, if_false);
2985 __ add(ebx, Immediate(DescriptorArray::kDescriptorSize * kPointerSize));
2988 __ j(not_equal, &loop);
2992 // Reload map as register ebx was used as temporary above.
2993 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2995 // Set the bit in the map to indicate that there is no local valueOf field.
2996 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
2997 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2999 __ bind(&skip_lookup);
3001 // If a valueOf property is not found on the object check that its
3002 // prototype is the un-modified String prototype. If not result is false.
3003 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3004 __ JumpIfSmi(ecx, if_false);
3005 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
3006 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
3008 FieldOperand(edx, GlobalObject::kNativeContextOffset));
3011 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
3012 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3013 Split(equal, if_true, if_false, fall_through);
3015 context()->Plug(if_true, if_false);
3019 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
3020 ZoneList<Expression*>* args = expr->arguments();
3021 DCHECK(args->length() == 1);
3023 VisitForAccumulatorValue(args->at(0));
3025 Label materialize_true, materialize_false;
3026 Label* if_true = NULL;
3027 Label* if_false = NULL;
3028 Label* fall_through = NULL;
3029 context()->PrepareTest(&materialize_true, &materialize_false,
3030 &if_true, &if_false, &fall_through);
3032 __ JumpIfSmi(eax, if_false);
3033 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3034 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3035 Split(equal, if_true, if_false, fall_through);
3037 context()->Plug(if_true, if_false);
3041 void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
3042 ZoneList<Expression*>* args = expr->arguments();
3043 DCHECK(args->length() == 1);
3045 VisitForAccumulatorValue(args->at(0));
3047 Label materialize_true, materialize_false;
3048 Label* if_true = NULL;
3049 Label* if_false = NULL;
3050 Label* fall_through = NULL;
3051 context()->PrepareTest(&materialize_true, &materialize_false,
3052 &if_true, &if_false, &fall_through);
3054 Handle<Map> map = masm()->isolate()->factory()->heap_number_map();
3055 __ CheckMap(eax, map, if_false, DO_SMI_CHECK);
3056 // Check if the exponent half is 0x80000000. Comparing against 1 and
3057 // checking for overflow is the shortest possible encoding.
3058 __ cmp(FieldOperand(eax, HeapNumber::kExponentOffset), Immediate(0x1));
3059 __ j(no_overflow, if_false);
3060 __ cmp(FieldOperand(eax, HeapNumber::kMantissaOffset), Immediate(0x0));
3061 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3062 Split(equal, if_true, if_false, fall_through);
3064 context()->Plug(if_true, if_false);
3069 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
3070 ZoneList<Expression*>* args = expr->arguments();
3071 DCHECK(args->length() == 1);
3073 VisitForAccumulatorValue(args->at(0));
3075 Label materialize_true, materialize_false;
3076 Label* if_true = NULL;
3077 Label* if_false = NULL;
3078 Label* fall_through = NULL;
3079 context()->PrepareTest(&materialize_true, &materialize_false,
3080 &if_true, &if_false, &fall_through);
3082 __ JumpIfSmi(eax, if_false);
3083 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
3084 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3085 Split(equal, if_true, if_false, fall_through);
3087 context()->Plug(if_true, if_false);
3091 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
3092 ZoneList<Expression*>* args = expr->arguments();
3093 DCHECK(args->length() == 1);
3095 VisitForAccumulatorValue(args->at(0));
3097 Label materialize_true, materialize_false;
3098 Label* if_true = NULL;
3099 Label* if_false = NULL;
3100 Label* fall_through = NULL;
3101 context()->PrepareTest(&materialize_true, &materialize_false,
3102 &if_true, &if_false, &fall_through);
3104 __ JumpIfSmi(eax, if_false);
3105 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
3106 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3107 Split(equal, if_true, if_false, fall_through);
3109 context()->Plug(if_true, if_false);
3114 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
3115 DCHECK(expr->arguments()->length() == 0);
3117 Label materialize_true, materialize_false;
3118 Label* if_true = NULL;
3119 Label* if_false = NULL;
3120 Label* fall_through = NULL;
3121 context()->PrepareTest(&materialize_true, &materialize_false,
3122 &if_true, &if_false, &fall_through);
3124 // Get the frame pointer for the calling frame.
3125 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3127 // Skip the arguments adaptor frame if it exists.
3128 Label check_frame_marker;
3129 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
3130 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3131 __ j(not_equal, &check_frame_marker);
3132 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
3134 // Check the marker in the calling frame.
3135 __ bind(&check_frame_marker);
3136 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
3137 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
3138 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3139 Split(equal, if_true, if_false, fall_through);
3141 context()->Plug(if_true, if_false);
3145 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
3146 ZoneList<Expression*>* args = expr->arguments();
3147 DCHECK(args->length() == 2);
3149 // Load the two objects into registers and perform the comparison.
3150 VisitForStackValue(args->at(0));
3151 VisitForAccumulatorValue(args->at(1));
3153 Label materialize_true, materialize_false;
3154 Label* if_true = NULL;
3155 Label* if_false = NULL;
3156 Label* fall_through = NULL;
3157 context()->PrepareTest(&materialize_true, &materialize_false,
3158 &if_true, &if_false, &fall_through);
3162 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3163 Split(equal, if_true, if_false, fall_through);
3165 context()->Plug(if_true, if_false);
3169 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
3170 ZoneList<Expression*>* args = expr->arguments();
3171 DCHECK(args->length() == 1);
3173 // ArgumentsAccessStub expects the key in edx and the formal
3174 // parameter count in eax.
3175 VisitForAccumulatorValue(args->at(0));
3177 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3178 ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
3180 context()->Plug(eax);
3184 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
3185 DCHECK(expr->arguments()->length() == 0);
3188 // Get the number of formal parameters.
3189 __ Move(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
3191 // Check if the calling frame is an arguments adaptor frame.
3192 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3193 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
3194 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3195 __ j(not_equal, &exit);
3197 // Arguments adaptor case: Read the arguments length from the
3199 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3203 context()->Plug(eax);
3207 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
3208 ZoneList<Expression*>* args = expr->arguments();
3209 DCHECK(args->length() == 1);
3210 Label done, null, function, non_function_constructor;
3212 VisitForAccumulatorValue(args->at(0));
3214 // If the object is a smi, we return null.
3215 __ JumpIfSmi(eax, &null);
3217 // Check that the object is a JS object but take special care of JS
3218 // functions to make sure they have 'Function' as their class.
3219 // Assume that there are only two callable types, and one of them is at
3220 // either end of the type range for JS object types. Saves extra comparisons.
3221 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
3222 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
3223 // Map is now in eax.
3225 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3226 FIRST_SPEC_OBJECT_TYPE + 1);
3227 __ j(equal, &function);
3229 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
3230 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
3231 LAST_SPEC_OBJECT_TYPE - 1);
3232 __ j(equal, &function);
3233 // Assume that there is no larger type.
3234 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
3236 // Check if the constructor in the map is a JS function.
3237 __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
3238 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3239 __ j(not_equal, &non_function_constructor);
3241 // eax now contains the constructor function. Grab the
3242 // instance class name from there.
3243 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
3244 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
3247 // Functions have class 'Function'.
3249 __ mov(eax, isolate()->factory()->Function_string());
3252 // Objects with a non-function constructor have class 'Object'.
3253 __ bind(&non_function_constructor);
3254 __ mov(eax, isolate()->factory()->Object_string());
3257 // Non-JS objects have class null.
3259 __ mov(eax, isolate()->factory()->null_value());
3264 context()->Plug(eax);
3268 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3269 // Load the arguments on the stack and call the stub.
3270 SubStringStub stub(isolate());
3271 ZoneList<Expression*>* args = expr->arguments();
3272 DCHECK(args->length() == 3);
3273 VisitForStackValue(args->at(0));
3274 VisitForStackValue(args->at(1));
3275 VisitForStackValue(args->at(2));
3277 context()->Plug(eax);
3281 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3282 // Load the arguments on the stack and call the stub.
3283 RegExpExecStub stub(isolate());
3284 ZoneList<Expression*>* args = expr->arguments();
3285 DCHECK(args->length() == 4);
3286 VisitForStackValue(args->at(0));
3287 VisitForStackValue(args->at(1));
3288 VisitForStackValue(args->at(2));
3289 VisitForStackValue(args->at(3));
3291 context()->Plug(eax);
3295 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3296 ZoneList<Expression*>* args = expr->arguments();
3297 DCHECK(args->length() == 1);
3299 VisitForAccumulatorValue(args->at(0)); // Load the object.
3302 // If the object is a smi return the object.
3303 __ JumpIfSmi(eax, &done, Label::kNear);
3304 // If the object is not a value type, return the object.
3305 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3306 __ j(not_equal, &done, Label::kNear);
3307 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3310 context()->Plug(eax);
3314 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3315 ZoneList<Expression*>* args = expr->arguments();
3316 DCHECK(args->length() == 2);
3317 DCHECK_NE(NULL, args->at(1)->AsLiteral());
3318 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->value()));
3320 VisitForAccumulatorValue(args->at(0)); // Load the object.
3322 Label runtime, done, not_date_object;
3323 Register object = eax;
3324 Register result = eax;
3325 Register scratch = ecx;
3327 __ JumpIfSmi(object, ¬_date_object);
3328 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3329 __ j(not_equal, ¬_date_object);
3331 if (index->value() == 0) {
3332 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3335 if (index->value() < JSDate::kFirstUncachedField) {
3336 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3337 __ mov(scratch, Operand::StaticVariable(stamp));
3338 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3339 __ j(not_equal, &runtime, Label::kNear);
3340 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3341 kPointerSize * index->value()));
3345 __ PrepareCallCFunction(2, scratch);
3346 __ mov(Operand(esp, 0), object);
3347 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3348 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3352 __ bind(¬_date_object);
3353 __ CallRuntime(Runtime::kThrowNotDateError, 0);
3355 context()->Plug(result);
3359 void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
3360 ZoneList<Expression*>* args = expr->arguments();
3361 DCHECK_EQ(3, args->length());
3363 Register string = eax;
3364 Register index = ebx;
3365 Register value = ecx;
3367 VisitForStackValue(args->at(1)); // index
3368 VisitForStackValue(args->at(2)); // value
3369 VisitForAccumulatorValue(args->at(0)); // string
3374 if (FLAG_debug_code) {
3375 __ test(value, Immediate(kSmiTagMask));
3376 __ Check(zero, kNonSmiValue);
3377 __ test(index, Immediate(kSmiTagMask));
3378 __ Check(zero, kNonSmiValue);
3384 if (FLAG_debug_code) {
3385 static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
3386 __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
3389 __ mov_b(FieldOperand(string, index, times_1, SeqOneByteString::kHeaderSize),
3391 context()->Plug(string);
3395 void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
3396 ZoneList<Expression*>* args = expr->arguments();
3397 DCHECK_EQ(3, args->length());
3399 Register string = eax;
3400 Register index = ebx;
3401 Register value = ecx;
3403 VisitForStackValue(args->at(1)); // index
3404 VisitForStackValue(args->at(2)); // value
3405 VisitForAccumulatorValue(args->at(0)); // string
3409 if (FLAG_debug_code) {
3410 __ test(value, Immediate(kSmiTagMask));
3411 __ Check(zero, kNonSmiValue);
3412 __ test(index, Immediate(kSmiTagMask));
3413 __ Check(zero, kNonSmiValue);
3415 static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
3416 __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
3421 // No need to untag a smi for two-byte addressing.
3422 __ mov_w(FieldOperand(string, index, times_1, SeqTwoByteString::kHeaderSize),
3424 context()->Plug(string);
3428 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3429 // Load the arguments on the stack and call the runtime function.
3430 ZoneList<Expression*>* args = expr->arguments();
3431 DCHECK(args->length() == 2);
3432 VisitForStackValue(args->at(0));
3433 VisitForStackValue(args->at(1));
3435 MathPowStub stub(isolate(), MathPowStub::ON_STACK);
3437 context()->Plug(eax);
3441 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3442 ZoneList<Expression*>* args = expr->arguments();
3443 DCHECK(args->length() == 2);
3445 VisitForStackValue(args->at(0)); // Load the object.
3446 VisitForAccumulatorValue(args->at(1)); // Load the value.
3447 __ pop(ebx); // eax = value. ebx = object.
3450 // If the object is a smi, return the value.
3451 __ JumpIfSmi(ebx, &done, Label::kNear);
3453 // If the object is not a value type, return the value.
3454 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3455 __ j(not_equal, &done, Label::kNear);
3458 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3460 // Update the write barrier. Save the value as it will be
3461 // overwritten by the write barrier code and is needed afterward.
3463 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3466 context()->Plug(eax);
3470 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3471 ZoneList<Expression*>* args = expr->arguments();
3472 DCHECK_EQ(args->length(), 1);
3474 // Load the argument into eax and call the stub.
3475 VisitForAccumulatorValue(args->at(0));
3477 NumberToStringStub stub(isolate());
3479 context()->Plug(eax);
3483 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3484 ZoneList<Expression*>* args = expr->arguments();
3485 DCHECK(args->length() == 1);
3487 VisitForAccumulatorValue(args->at(0));
3490 StringCharFromCodeGenerator generator(eax, ebx);
3491 generator.GenerateFast(masm_);
3494 NopRuntimeCallHelper call_helper;
3495 generator.GenerateSlow(masm_, call_helper);
3498 context()->Plug(ebx);
3502 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3503 ZoneList<Expression*>* args = expr->arguments();
3504 DCHECK(args->length() == 2);
3506 VisitForStackValue(args->at(0));
3507 VisitForAccumulatorValue(args->at(1));
3509 Register object = ebx;
3510 Register index = eax;
3511 Register result = edx;
3515 Label need_conversion;
3516 Label index_out_of_range;
3518 StringCharCodeAtGenerator generator(object,
3523 &index_out_of_range,
3524 STRING_INDEX_IS_NUMBER);
3525 generator.GenerateFast(masm_);
3528 __ bind(&index_out_of_range);
3529 // When the index is out of range, the spec requires us to return
3531 __ Move(result, Immediate(isolate()->factory()->nan_value()));
3534 __ bind(&need_conversion);
3535 // Move the undefined value into the result register, which will
3536 // trigger conversion.
3537 __ Move(result, Immediate(isolate()->factory()->undefined_value()));
3540 NopRuntimeCallHelper call_helper;
3541 generator.GenerateSlow(masm_, call_helper);
3544 context()->Plug(result);
3548 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3549 ZoneList<Expression*>* args = expr->arguments();
3550 DCHECK(args->length() == 2);
3552 VisitForStackValue(args->at(0));
3553 VisitForAccumulatorValue(args->at(1));
3555 Register object = ebx;
3556 Register index = eax;
3557 Register scratch = edx;
3558 Register result = eax;
3562 Label need_conversion;
3563 Label index_out_of_range;
3565 StringCharAtGenerator generator(object,
3571 &index_out_of_range,
3572 STRING_INDEX_IS_NUMBER);
3573 generator.GenerateFast(masm_);
3576 __ bind(&index_out_of_range);
3577 // When the index is out of range, the spec requires us to return
3578 // the empty string.
3579 __ Move(result, Immediate(isolate()->factory()->empty_string()));
3582 __ bind(&need_conversion);
3583 // Move smi zero into the result register, which will trigger
3585 __ Move(result, Immediate(Smi::FromInt(0)));
3588 NopRuntimeCallHelper call_helper;
3589 generator.GenerateSlow(masm_, call_helper);
3592 context()->Plug(result);
3596 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3597 ZoneList<Expression*>* args = expr->arguments();
3598 DCHECK_EQ(2, args->length());
3599 VisitForStackValue(args->at(0));
3600 VisitForAccumulatorValue(args->at(1));
3603 StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
3605 context()->Plug(eax);
3609 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3610 ZoneList<Expression*>* args = expr->arguments();
3611 DCHECK_EQ(2, args->length());
3613 VisitForStackValue(args->at(0));
3614 VisitForStackValue(args->at(1));
3616 StringCompareStub stub(isolate());
3618 context()->Plug(eax);
3622 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3623 ZoneList<Expression*>* args = expr->arguments();
3624 DCHECK(args->length() >= 2);
3626 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3627 for (int i = 0; i < arg_count + 1; ++i) {
3628 VisitForStackValue(args->at(i));
3630 VisitForAccumulatorValue(args->last()); // Function.
3632 Label runtime, done;
3633 // Check for non-function argument (including proxy).
3634 __ JumpIfSmi(eax, &runtime);
3635 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
3636 __ j(not_equal, &runtime);
3638 // InvokeFunction requires the function in edi. Move it in there.
3639 __ mov(edi, result_register());
3640 ParameterCount count(arg_count);
3641 __ InvokeFunction(edi, count, CALL_FUNCTION, NullCallWrapper());
3642 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3647 __ CallRuntime(Runtime::kCall, args->length());
3650 context()->Plug(eax);
3654 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3655 // Load the arguments on the stack and call the stub.
3656 RegExpConstructResultStub stub(isolate());
3657 ZoneList<Expression*>* args = expr->arguments();
3658 DCHECK(args->length() == 3);
3659 VisitForStackValue(args->at(0));
3660 VisitForStackValue(args->at(1));
3661 VisitForAccumulatorValue(args->at(2));
3665 context()->Plug(eax);
3669 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3670 ZoneList<Expression*>* args = expr->arguments();
3671 DCHECK_EQ(2, args->length());
3673 DCHECK_NE(NULL, args->at(0)->AsLiteral());
3674 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->value()))->value();
3676 Handle<FixedArray> jsfunction_result_caches(
3677 isolate()->native_context()->jsfunction_result_caches());
3678 if (jsfunction_result_caches->length() <= cache_id) {
3679 __ Abort(kAttemptToUseUndefinedCache);
3680 __ mov(eax, isolate()->factory()->undefined_value());
3681 context()->Plug(eax);
3685 VisitForAccumulatorValue(args->at(1));
3688 Register cache = ebx;
3690 __ mov(cache, ContextOperand(esi, Context::GLOBAL_OBJECT_INDEX));
3692 FieldOperand(cache, GlobalObject::kNativeContextOffset));
3693 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3695 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3697 Label done, not_found;
3698 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3699 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3700 // tmp now holds finger offset as a smi.
3701 __ cmp(key, FixedArrayElementOperand(cache, tmp));
3702 __ j(not_equal, ¬_found);
3704 __ mov(eax, FixedArrayElementOperand(cache, tmp, 1));
3707 __ bind(¬_found);
3708 // Call runtime to perform the lookup.
3711 __ CallRuntime(Runtime::kGetFromCache, 2);
3714 context()->Plug(eax);
3718 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3719 ZoneList<Expression*>* args = expr->arguments();
3720 DCHECK(args->length() == 1);
3722 VisitForAccumulatorValue(args->at(0));
3724 __ AssertString(eax);
3726 Label materialize_true, materialize_false;
3727 Label* if_true = NULL;
3728 Label* if_false = NULL;
3729 Label* fall_through = NULL;
3730 context()->PrepareTest(&materialize_true, &materialize_false,
3731 &if_true, &if_false, &fall_through);
3733 __ test(FieldOperand(eax, String::kHashFieldOffset),
3734 Immediate(String::kContainsCachedArrayIndexMask));
3735 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3736 Split(zero, if_true, if_false, fall_through);
3738 context()->Plug(if_true, if_false);
3742 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3743 ZoneList<Expression*>* args = expr->arguments();
3744 DCHECK(args->length() == 1);
3745 VisitForAccumulatorValue(args->at(0));
3747 __ AssertString(eax);
3749 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3750 __ IndexFromHash(eax, eax);
3752 context()->Plug(eax);
3756 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3757 Label bailout, done, one_char_separator, long_separator,
3758 non_trivial_array, not_size_one_array, loop,
3759 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3761 ZoneList<Expression*>* args = expr->arguments();
3762 DCHECK(args->length() == 2);
3763 // We will leave the separator on the stack until the end of the function.
3764 VisitForStackValue(args->at(1));
3765 // Load this to eax (= array)
3766 VisitForAccumulatorValue(args->at(0));
3767 // All aliases of the same register have disjoint lifetimes.
3768 Register array = eax;
3769 Register elements = no_reg; // Will be eax.
3771 Register index = edx;
3773 Register string_length = ecx;
3775 Register string = esi;
3777 Register scratch = ebx;
3779 Register array_length = edi;
3780 Register result_pos = no_reg; // Will be edi.
3782 // Separator operand is already pushed.
3783 Operand separator_operand = Operand(esp, 2 * kPointerSize);
3784 Operand result_operand = Operand(esp, 1 * kPointerSize);
3785 Operand array_length_operand = Operand(esp, 0);
3786 __ sub(esp, Immediate(2 * kPointerSize));
3788 // Check that the array is a JSArray
3789 __ JumpIfSmi(array, &bailout);
3790 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3791 __ j(not_equal, &bailout);
3793 // Check that the array has fast elements.
3794 __ CheckFastElements(scratch, &bailout);
3796 // If the array has length zero, return the empty string.
3797 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3798 __ SmiUntag(array_length);
3799 __ j(not_zero, &non_trivial_array);
3800 __ mov(result_operand, isolate()->factory()->empty_string());
3803 // Save the array length.
3804 __ bind(&non_trivial_array);
3805 __ mov(array_length_operand, array_length);
3807 // Save the FixedArray containing array's elements.
3808 // End of array's live range.
3810 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3814 // Check that all array elements are sequential ASCII strings, and
3815 // accumulate the sum of their lengths, as a smi-encoded value.
3816 __ Move(index, Immediate(0));
3817 __ Move(string_length, Immediate(0));
3818 // Loop condition: while (index < length).
3819 // Live loop registers: index, array_length, string,
3820 // scratch, string_length, elements.
3821 if (generate_debug_code_) {
3822 __ cmp(index, array_length);
3823 __ Assert(less, kNoEmptyArraysHereInEmitFastAsciiArrayJoin);
3826 __ mov(string, FieldOperand(elements,
3829 FixedArray::kHeaderSize));
3830 __ JumpIfSmi(string, &bailout);
3831 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3832 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3833 __ and_(scratch, Immediate(
3834 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3835 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3836 __ j(not_equal, &bailout);
3837 __ add(string_length,
3838 FieldOperand(string, SeqOneByteString::kLengthOffset));
3839 __ j(overflow, &bailout);
3840 __ add(index, Immediate(1));
3841 __ cmp(index, array_length);
3844 // If array_length is 1, return elements[0], a string.
3845 __ cmp(array_length, 1);
3846 __ j(not_equal, ¬_size_one_array);
3847 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3848 __ mov(result_operand, scratch);
3851 __ bind(¬_size_one_array);
3853 // End of array_length live range.
3854 result_pos = array_length;
3855 array_length = no_reg;
3858 // string_length: Sum of string lengths, as a smi.
3859 // elements: FixedArray of strings.
3861 // Check that the separator is a flat ASCII string.
3862 __ mov(string, separator_operand);
3863 __ JumpIfSmi(string, &bailout);
3864 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3865 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3866 __ and_(scratch, Immediate(
3867 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3868 __ cmp(scratch, kStringTag | kOneByteStringTag | kSeqStringTag);
3869 __ j(not_equal, &bailout);
3871 // Add (separator length times array_length) - separator length
3872 // to string_length.
3873 __ mov(scratch, separator_operand);
3874 __ mov(scratch, FieldOperand(scratch, SeqOneByteString::kLengthOffset));
3875 __ sub(string_length, scratch); // May be negative, temporarily.
3876 __ imul(scratch, array_length_operand);
3877 __ j(overflow, &bailout);
3878 __ add(string_length, scratch);
3879 __ j(overflow, &bailout);
3881 __ shr(string_length, 1);
3882 // Live registers and stack values:
3885 __ AllocateAsciiString(result_pos, string_length, scratch,
3886 index, string, &bailout);
3887 __ mov(result_operand, result_pos);
3888 __ lea(result_pos, FieldOperand(result_pos, SeqOneByteString::kHeaderSize));
3891 __ mov(string, separator_operand);
3892 __ cmp(FieldOperand(string, SeqOneByteString::kLengthOffset),
3893 Immediate(Smi::FromInt(1)));
3894 __ j(equal, &one_char_separator);
3895 __ j(greater, &long_separator);
3898 // Empty separator case
3899 __ mov(index, Immediate(0));
3900 __ jmp(&loop_1_condition);
3901 // Loop condition: while (index < length).
3903 // Each iteration of the loop concatenates one string to the result.
3904 // Live values in registers:
3905 // index: which element of the elements array we are adding to the result.
3906 // result_pos: the position to which we are currently copying characters.
3907 // elements: the FixedArray of strings we are joining.
3909 // Get string = array[index].
3910 __ mov(string, FieldOperand(elements, index,
3912 FixedArray::kHeaderSize));
3913 __ mov(string_length,
3914 FieldOperand(string, String::kLengthOffset));
3915 __ shr(string_length, 1);
3917 FieldOperand(string, SeqOneByteString::kHeaderSize));
3918 __ CopyBytes(string, result_pos, string_length, scratch);
3919 __ add(index, Immediate(1));
3920 __ bind(&loop_1_condition);
3921 __ cmp(index, array_length_operand);
3922 __ j(less, &loop_1); // End while (index < length).
3927 // One-character separator case
3928 __ bind(&one_char_separator);
3929 // Replace separator with its ASCII character value.
3930 __ mov_b(scratch, FieldOperand(string, SeqOneByteString::kHeaderSize));
3931 __ mov_b(separator_operand, scratch);
3933 __ Move(index, Immediate(0));
3934 // Jump into the loop after the code that copies the separator, so the first
3935 // element is not preceded by a separator
3936 __ jmp(&loop_2_entry);
3937 // Loop condition: while (index < length).
3939 // Each iteration of the loop concatenates one string to the result.
3940 // Live values in registers:
3941 // index: which element of the elements array we are adding to the result.
3942 // result_pos: the position to which we are currently copying characters.
3944 // Copy the separator character to the result.
3945 __ mov_b(scratch, separator_operand);
3946 __ mov_b(Operand(result_pos, 0), scratch);
3949 __ bind(&loop_2_entry);
3950 // Get string = array[index].
3951 __ mov(string, FieldOperand(elements, index,
3953 FixedArray::kHeaderSize));
3954 __ mov(string_length,
3955 FieldOperand(string, String::kLengthOffset));
3956 __ shr(string_length, 1);
3958 FieldOperand(string, SeqOneByteString::kHeaderSize));
3959 __ CopyBytes(string, result_pos, string_length, scratch);
3960 __ add(index, Immediate(1));
3962 __ cmp(index, array_length_operand);
3963 __ j(less, &loop_2); // End while (index < length).
3967 // Long separator case (separator is more than one character).
3968 __ bind(&long_separator);
3970 __ Move(index, Immediate(0));
3971 // Jump into the loop after the code that copies the separator, so the first
3972 // element is not preceded by a separator
3973 __ jmp(&loop_3_entry);
3974 // Loop condition: while (index < length).
3976 // Each iteration of the loop concatenates one string to the result.
3977 // Live values in registers:
3978 // index: which element of the elements array we are adding to the result.
3979 // result_pos: the position to which we are currently copying characters.
3981 // Copy the separator to the result.
3982 __ mov(string, separator_operand);
3983 __ mov(string_length,
3984 FieldOperand(string, String::kLengthOffset));
3985 __ shr(string_length, 1);
3987 FieldOperand(string, SeqOneByteString::kHeaderSize));
3988 __ CopyBytes(string, result_pos, string_length, scratch);
3990 __ bind(&loop_3_entry);
3991 // Get string = array[index].
3992 __ mov(string, FieldOperand(elements, index,
3994 FixedArray::kHeaderSize));
3995 __ mov(string_length,
3996 FieldOperand(string, String::kLengthOffset));
3997 __ shr(string_length, 1);
3999 FieldOperand(string, SeqOneByteString::kHeaderSize));
4000 __ CopyBytes(string, result_pos, string_length, scratch);
4001 __ add(index, Immediate(1));
4003 __ cmp(index, array_length_operand);
4004 __ j(less, &loop_3); // End while (index < length).
4009 __ mov(result_operand, isolate()->factory()->undefined_value());
4011 __ mov(eax, result_operand);
4012 // Drop temp values from the stack, and restore context register.
4013 __ add(esp, Immediate(3 * kPointerSize));
4015 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4016 context()->Plug(eax);
4020 void FullCodeGenerator::EmitDebugIsActive(CallRuntime* expr) {
4021 DCHECK(expr->arguments()->length() == 0);
4022 ExternalReference debug_is_active =
4023 ExternalReference::debug_is_active_address(isolate());
4024 __ movzx_b(eax, Operand::StaticVariable(debug_is_active));
4026 context()->Plug(eax);
4030 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
4031 if (expr->function() != NULL &&
4032 expr->function()->intrinsic_type == Runtime::INLINE) {
4033 Comment cmnt(masm_, "[ InlineRuntimeCall");
4034 EmitInlineRuntimeCall(expr);
4038 Comment cmnt(masm_, "[ CallRuntime");
4039 ZoneList<Expression*>* args = expr->arguments();
4041 if (expr->is_jsruntime()) {
4042 // Push the builtins object as receiver.
4043 __ mov(eax, GlobalObjectOperand());
4044 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
4046 // Load the function from the receiver.
4047 __ mov(LoadIC::ReceiverRegister(), Operand(esp, 0));
4048 __ mov(LoadIC::NameRegister(), Immediate(expr->name()));
4049 if (FLAG_vector_ics) {
4050 __ mov(LoadIC::SlotRegister(),
4051 Immediate(Smi::FromInt(expr->CallRuntimeFeedbackSlot())));
4052 CallLoadIC(NOT_CONTEXTUAL);
4054 CallLoadIC(NOT_CONTEXTUAL, expr->CallRuntimeFeedbackId());
4057 // Push the target function under the receiver.
4058 __ push(Operand(esp, 0));
4059 __ mov(Operand(esp, kPointerSize), eax);
4061 // Code common for calls using the IC.
4062 ZoneList<Expression*>* args = expr->arguments();
4063 int arg_count = args->length();
4064 for (int i = 0; i < arg_count; i++) {
4065 VisitForStackValue(args->at(i));
4068 // Record source position of the IC call.
4069 SetSourcePosition(expr->position());
4070 CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
4071 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
4073 // Restore context register.
4074 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
4075 context()->DropAndPlug(1, eax);
4078 // Push the arguments ("left-to-right").
4079 int arg_count = args->length();
4080 for (int i = 0; i < arg_count; i++) {
4081 VisitForStackValue(args->at(i));
4084 // Call the C runtime function.
4085 __ CallRuntime(expr->function(), arg_count);
4087 context()->Plug(eax);
4092 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
4093 switch (expr->op()) {
4094 case Token::DELETE: {
4095 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
4096 Property* property = expr->expression()->AsProperty();
4097 VariableProxy* proxy = expr->expression()->AsVariableProxy();
4099 if (property != NULL) {
4100 VisitForStackValue(property->obj());
4101 VisitForStackValue(property->key());
4102 __ push(Immediate(Smi::FromInt(strict_mode())));
4103 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4104 context()->Plug(eax);
4105 } else if (proxy != NULL) {
4106 Variable* var = proxy->var();
4107 // Delete of an unqualified identifier is disallowed in strict mode
4108 // but "delete this" is allowed.
4109 DCHECK(strict_mode() == SLOPPY || var->is_this());
4110 if (var->IsUnallocated()) {
4111 __ push(GlobalObjectOperand());
4112 __ push(Immediate(var->name()));
4113 __ push(Immediate(Smi::FromInt(SLOPPY)));
4114 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
4115 context()->Plug(eax);
4116 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
4117 // Result of deleting non-global variables is false. 'this' is
4118 // not really a variable, though we implement it as one. The
4119 // subexpression does not have side effects.
4120 context()->Plug(var->is_this());
4122 // Non-global variable. Call the runtime to try to delete from the
4123 // context where the variable was introduced.
4124 __ push(context_register());
4125 __ push(Immediate(var->name()));
4126 __ CallRuntime(Runtime::kDeleteLookupSlot, 2);
4127 context()->Plug(eax);
4130 // Result of deleting non-property, non-variable reference is true.
4131 // The subexpression may have side effects.
4132 VisitForEffect(expr->expression());
4133 context()->Plug(true);
4139 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
4140 VisitForEffect(expr->expression());
4141 context()->Plug(isolate()->factory()->undefined_value());
4146 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
4147 if (context()->IsEffect()) {
4148 // Unary NOT has no side effects so it's only necessary to visit the
4149 // subexpression. Match the optimizing compiler by not branching.
4150 VisitForEffect(expr->expression());
4151 } else if (context()->IsTest()) {
4152 const TestContext* test = TestContext::cast(context());
4153 // The labels are swapped for the recursive call.
4154 VisitForControl(expr->expression(),
4155 test->false_label(),
4157 test->fall_through());
4158 context()->Plug(test->true_label(), test->false_label());
4160 // We handle value contexts explicitly rather than simply visiting
4161 // for control and plugging the control flow into the context,
4162 // because we need to prepare a pair of extra administrative AST ids
4163 // for the optimizing compiler.
4164 DCHECK(context()->IsAccumulatorValue() || context()->IsStackValue());
4165 Label materialize_true, materialize_false, done;
4166 VisitForControl(expr->expression(),
4170 __ bind(&materialize_true);
4171 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
4172 if (context()->IsAccumulatorValue()) {
4173 __ mov(eax, isolate()->factory()->true_value());
4175 __ Push(isolate()->factory()->true_value());
4177 __ jmp(&done, Label::kNear);
4178 __ bind(&materialize_false);
4179 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
4180 if (context()->IsAccumulatorValue()) {
4181 __ mov(eax, isolate()->factory()->false_value());
4183 __ Push(isolate()->factory()->false_value());
4190 case Token::TYPEOF: {
4191 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
4192 { StackValueContext context(this);
4193 VisitForTypeofValue(expr->expression());
4195 __ CallRuntime(Runtime::kTypeof, 1);
4196 context()->Plug(eax);
4206 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4207 DCHECK(expr->expression()->IsValidReferenceExpression());
4209 Comment cmnt(masm_, "[ CountOperation");
4210 SetSourcePosition(expr->position());
4212 // Expression can only be a property, a global or a (parameter or local)
4214 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4215 LhsKind assign_type = VARIABLE;
4216 Property* prop = expr->expression()->AsProperty();
4217 // In case of a property we use the uninitialized expression context
4218 // of the key to detect a named property.
4221 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4224 // Evaluate expression and get value.
4225 if (assign_type == VARIABLE) {
4226 DCHECK(expr->expression()->AsVariableProxy()->var() != NULL);
4227 AccumulatorValueContext context(this);
4228 EmitVariableLoad(expr->expression()->AsVariableProxy());
4230 // Reserve space for result of postfix operation.
4231 if (expr->is_postfix() && !context()->IsEffect()) {
4232 __ push(Immediate(Smi::FromInt(0)));
4234 if (assign_type == NAMED_PROPERTY) {
4235 // Put the object both on the stack and in the register.
4236 VisitForStackValue(prop->obj());
4237 __ mov(LoadIC::ReceiverRegister(), Operand(esp, 0));
4238 EmitNamedPropertyLoad(prop);
4240 VisitForStackValue(prop->obj());
4241 VisitForStackValue(prop->key());
4242 __ mov(LoadIC::ReceiverRegister(),
4243 Operand(esp, kPointerSize)); // Object.
4244 __ mov(LoadIC::NameRegister(), Operand(esp, 0)); // Key.
4245 EmitKeyedPropertyLoad(prop);
4249 // We need a second deoptimization point after loading the value
4250 // in case evaluating the property load my have a side effect.
4251 if (assign_type == VARIABLE) {
4252 PrepareForBailout(expr->expression(), TOS_REG);
4254 PrepareForBailoutForId(prop->LoadId(), TOS_REG);
4257 // Inline smi case if we are in a loop.
4258 Label done, stub_call;
4259 JumpPatchSite patch_site(masm_);
4260 if (ShouldInlineSmiCase(expr->op())) {
4262 patch_site.EmitJumpIfNotSmi(eax, &slow, Label::kNear);
4264 // Save result for postfix expressions.
4265 if (expr->is_postfix()) {
4266 if (!context()->IsEffect()) {
4267 // Save the result on the stack. If we have a named or keyed property
4268 // we store the result under the receiver that is currently on top
4270 switch (assign_type) {
4274 case NAMED_PROPERTY:
4275 __ mov(Operand(esp, kPointerSize), eax);
4277 case KEYED_PROPERTY:
4278 __ mov(Operand(esp, 2 * kPointerSize), eax);
4284 if (expr->op() == Token::INC) {
4285 __ add(eax, Immediate(Smi::FromInt(1)));
4287 __ sub(eax, Immediate(Smi::FromInt(1)));
4289 __ j(no_overflow, &done, Label::kNear);
4290 // Call stub. Undo operation first.
4291 if (expr->op() == Token::INC) {
4292 __ sub(eax, Immediate(Smi::FromInt(1)));
4294 __ add(eax, Immediate(Smi::FromInt(1)));
4296 __ jmp(&stub_call, Label::kNear);
4299 ToNumberStub convert_stub(isolate());
4300 __ CallStub(&convert_stub);
4302 // Save result for postfix expressions.
4303 if (expr->is_postfix()) {
4304 if (!context()->IsEffect()) {
4305 // Save the result on the stack. If we have a named or keyed property
4306 // we store the result under the receiver that is currently on top
4308 switch (assign_type) {
4312 case NAMED_PROPERTY:
4313 __ mov(Operand(esp, kPointerSize), eax);
4315 case KEYED_PROPERTY:
4316 __ mov(Operand(esp, 2 * kPointerSize), eax);
4322 // Record position before stub call.
4323 SetSourcePosition(expr->position());
4325 // Call stub for +1/-1.
4326 __ bind(&stub_call);
4328 __ mov(eax, Immediate(Smi::FromInt(1)));
4329 BinaryOpICStub stub(isolate(), expr->binary_op(), NO_OVERWRITE);
4330 CallIC(stub.GetCode(), expr->CountBinOpFeedbackId());
4331 patch_site.EmitPatchInfo();
4334 // Store the value returned in eax.
4335 switch (assign_type) {
4337 if (expr->is_postfix()) {
4338 // Perform the assignment as if via '='.
4339 { EffectContext context(this);
4340 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4342 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4345 // For all contexts except EffectContext We have the result on
4346 // top of the stack.
4347 if (!context()->IsEffect()) {
4348 context()->PlugTOS();
4351 // Perform the assignment as if via '='.
4352 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4354 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4355 context()->Plug(eax);
4358 case NAMED_PROPERTY: {
4359 __ mov(StoreIC::NameRegister(), prop->key()->AsLiteral()->value());
4360 __ pop(StoreIC::ReceiverRegister());
4361 CallStoreIC(expr->CountStoreFeedbackId());
4362 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4363 if (expr->is_postfix()) {
4364 if (!context()->IsEffect()) {
4365 context()->PlugTOS();
4368 context()->Plug(eax);
4372 case KEYED_PROPERTY: {
4373 __ pop(KeyedStoreIC::NameRegister());
4374 __ pop(KeyedStoreIC::ReceiverRegister());
4375 Handle<Code> ic = strict_mode() == SLOPPY
4376 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4377 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4378 CallIC(ic, expr->CountStoreFeedbackId());
4379 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4380 if (expr->is_postfix()) {
4381 // Result is on the stack
4382 if (!context()->IsEffect()) {
4383 context()->PlugTOS();
4386 context()->Plug(eax);
4394 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4395 VariableProxy* proxy = expr->AsVariableProxy();
4396 DCHECK(!context()->IsEffect());
4397 DCHECK(!context()->IsTest());
4399 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4400 Comment cmnt(masm_, "[ Global variable");
4401 __ mov(LoadIC::ReceiverRegister(), GlobalObjectOperand());
4402 __ mov(LoadIC::NameRegister(), Immediate(proxy->name()));
4403 if (FLAG_vector_ics) {
4404 __ mov(LoadIC::SlotRegister(),
4405 Immediate(Smi::FromInt(proxy->VariableFeedbackSlot())));
4407 // Use a regular load, not a contextual load, to avoid a reference
4409 CallLoadIC(NOT_CONTEXTUAL);
4410 PrepareForBailout(expr, TOS_REG);
4411 context()->Plug(eax);
4412 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4413 Comment cmnt(masm_, "[ Lookup slot");
4416 // Generate code for loading from variables potentially shadowed
4417 // by eval-introduced variables.
4418 EmitDynamicLookupFastCase(proxy, INSIDE_TYPEOF, &slow, &done);
4422 __ push(Immediate(proxy->name()));
4423 __ CallRuntime(Runtime::kLoadLookupSlotNoReferenceError, 2);
4424 PrepareForBailout(expr, TOS_REG);
4427 context()->Plug(eax);
4429 // This expression cannot throw a reference error at the top level.
4430 VisitInDuplicateContext(expr);
4435 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4436 Expression* sub_expr,
4437 Handle<String> check) {
4438 Label materialize_true, materialize_false;
4439 Label* if_true = NULL;
4440 Label* if_false = NULL;
4441 Label* fall_through = NULL;
4442 context()->PrepareTest(&materialize_true, &materialize_false,
4443 &if_true, &if_false, &fall_through);
4445 { AccumulatorValueContext context(this);
4446 VisitForTypeofValue(sub_expr);
4448 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4450 Factory* factory = isolate()->factory();
4451 if (String::Equals(check, factory->number_string())) {
4452 __ JumpIfSmi(eax, if_true);
4453 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4454 isolate()->factory()->heap_number_map());
4455 Split(equal, if_true, if_false, fall_through);
4456 } else if (String::Equals(check, factory->string_string())) {
4457 __ JumpIfSmi(eax, if_false);
4458 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4459 __ j(above_equal, if_false);
4460 // Check for undetectable objects => false.
4461 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4462 1 << Map::kIsUndetectable);
4463 Split(zero, if_true, if_false, fall_through);
4464 } else if (String::Equals(check, factory->symbol_string())) {
4465 __ JumpIfSmi(eax, if_false);
4466 __ CmpObjectType(eax, SYMBOL_TYPE, edx);
4467 Split(equal, if_true, if_false, fall_through);
4468 } else if (String::Equals(check, factory->boolean_string())) {
4469 __ cmp(eax, isolate()->factory()->true_value());
4470 __ j(equal, if_true);
4471 __ cmp(eax, isolate()->factory()->false_value());
4472 Split(equal, if_true, if_false, fall_through);
4473 } else if (String::Equals(check, factory->undefined_string())) {
4474 __ cmp(eax, isolate()->factory()->undefined_value());
4475 __ j(equal, if_true);
4476 __ JumpIfSmi(eax, if_false);
4477 // Check for undetectable objects => true.
4478 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4479 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4480 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4481 Split(not_zero, if_true, if_false, fall_through);
4482 } else if (String::Equals(check, factory->function_string())) {
4483 __ JumpIfSmi(eax, if_false);
4484 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4485 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4486 __ j(equal, if_true);
4487 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4488 Split(equal, if_true, if_false, fall_through);
4489 } else if (String::Equals(check, factory->object_string())) {
4490 __ JumpIfSmi(eax, if_false);
4491 __ cmp(eax, isolate()->factory()->null_value());
4492 __ j(equal, if_true);
4493 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4494 __ j(below, if_false);
4495 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4496 __ j(above, if_false);
4497 // Check for undetectable objects => false.
4498 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4499 1 << Map::kIsUndetectable);
4500 Split(zero, if_true, if_false, fall_through);
4502 if (if_false != fall_through) __ jmp(if_false);
4504 context()->Plug(if_true, if_false);
4508 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4509 Comment cmnt(masm_, "[ CompareOperation");
4510 SetSourcePosition(expr->position());
4512 // First we try a fast inlined version of the compare when one of
4513 // the operands is a literal.
4514 if (TryLiteralCompare(expr)) return;
4516 // Always perform the comparison for its control flow. Pack the result
4517 // into the expression's context after the comparison is performed.
4518 Label materialize_true, materialize_false;
4519 Label* if_true = NULL;
4520 Label* if_false = NULL;
4521 Label* fall_through = NULL;
4522 context()->PrepareTest(&materialize_true, &materialize_false,
4523 &if_true, &if_false, &fall_through);
4525 Token::Value op = expr->op();
4526 VisitForStackValue(expr->left());
4529 VisitForStackValue(expr->right());
4530 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4531 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4532 __ cmp(eax, isolate()->factory()->true_value());
4533 Split(equal, if_true, if_false, fall_through);
4536 case Token::INSTANCEOF: {
4537 VisitForStackValue(expr->right());
4538 InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
4540 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4542 // The stub returns 0 for true.
4543 Split(zero, if_true, if_false, fall_through);
4548 VisitForAccumulatorValue(expr->right());
4549 Condition cc = CompareIC::ComputeCondition(op);
4552 bool inline_smi_code = ShouldInlineSmiCase(op);
4553 JumpPatchSite patch_site(masm_);
4554 if (inline_smi_code) {
4558 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4560 Split(cc, if_true, if_false, NULL);
4561 __ bind(&slow_case);
4564 // Record position and call the compare IC.
4565 SetSourcePosition(expr->position());
4566 Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op);
4567 CallIC(ic, expr->CompareOperationFeedbackId());
4568 patch_site.EmitPatchInfo();
4570 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4572 Split(cc, if_true, if_false, fall_through);
4576 // Convert the result of the comparison into one expected for this
4577 // expression's context.
4578 context()->Plug(if_true, if_false);
4582 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4583 Expression* sub_expr,
4585 Label materialize_true, materialize_false;
4586 Label* if_true = NULL;
4587 Label* if_false = NULL;
4588 Label* fall_through = NULL;
4589 context()->PrepareTest(&materialize_true, &materialize_false,
4590 &if_true, &if_false, &fall_through);
4592 VisitForAccumulatorValue(sub_expr);
4593 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4595 Handle<Object> nil_value = nil == kNullValue
4596 ? isolate()->factory()->null_value()
4597 : isolate()->factory()->undefined_value();
4598 if (expr->op() == Token::EQ_STRICT) {
4599 __ cmp(eax, nil_value);
4600 Split(equal, if_true, if_false, fall_through);
4602 Handle<Code> ic = CompareNilICStub::GetUninitialized(isolate(), nil);
4603 CallIC(ic, expr->CompareOperationFeedbackId());
4605 Split(not_zero, if_true, if_false, fall_through);
4607 context()->Plug(if_true, if_false);
4611 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4612 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4613 context()->Plug(eax);
4617 Register FullCodeGenerator::result_register() {
4622 Register FullCodeGenerator::context_register() {
4627 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4628 DCHECK_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4629 __ mov(Operand(ebp, frame_offset), value);
4633 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4634 __ mov(dst, ContextOperand(esi, context_index));
4638 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4639 Scope* declaration_scope = scope()->DeclarationScope();
4640 if (declaration_scope->is_global_scope() ||
4641 declaration_scope->is_module_scope()) {
4642 // Contexts nested in the native context have a canonical empty function
4643 // as their closure, not the anonymous closure containing the global
4644 // code. Pass a smi sentinel and let the runtime look up the empty
4646 __ push(Immediate(Smi::FromInt(0)));
4647 } else if (declaration_scope->is_eval_scope()) {
4648 // Contexts nested inside eval code have the same closure as the context
4649 // calling eval, not the anonymous closure containing the eval code.
4650 // Fetch it from the context.
4651 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
4653 DCHECK(declaration_scope->is_function_scope());
4654 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4659 // ----------------------------------------------------------------------------
4660 // Non-local control flow support.
4662 void FullCodeGenerator::EnterFinallyBlock() {
4663 // Cook return address on top of stack (smi encoded Code* delta)
4664 DCHECK(!result_register().is(edx));
4666 __ sub(edx, Immediate(masm_->CodeObject()));
4667 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4668 STATIC_ASSERT(kSmiTag == 0);
4672 // Store result register while executing finally block.
4673 __ push(result_register());
4675 // Store pending message while executing finally block.
4676 ExternalReference pending_message_obj =
4677 ExternalReference::address_of_pending_message_obj(isolate());
4678 __ mov(edx, Operand::StaticVariable(pending_message_obj));
4681 ExternalReference has_pending_message =
4682 ExternalReference::address_of_has_pending_message(isolate());
4683 __ mov(edx, Operand::StaticVariable(has_pending_message));
4687 ExternalReference pending_message_script =
4688 ExternalReference::address_of_pending_message_script(isolate());
4689 __ mov(edx, Operand::StaticVariable(pending_message_script));
4694 void FullCodeGenerator::ExitFinallyBlock() {
4695 DCHECK(!result_register().is(edx));
4696 // Restore pending message from stack.
4698 ExternalReference pending_message_script =
4699 ExternalReference::address_of_pending_message_script(isolate());
4700 __ mov(Operand::StaticVariable(pending_message_script), edx);
4704 ExternalReference has_pending_message =
4705 ExternalReference::address_of_has_pending_message(isolate());
4706 __ mov(Operand::StaticVariable(has_pending_message), edx);
4709 ExternalReference pending_message_obj =
4710 ExternalReference::address_of_pending_message_obj(isolate());
4711 __ mov(Operand::StaticVariable(pending_message_obj), edx);
4713 // Restore result register from stack.
4714 __ pop(result_register());
4716 // Uncook return address.
4719 __ add(edx, Immediate(masm_->CodeObject()));
4726 #define __ ACCESS_MASM(masm())
4728 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4730 int* context_length) {
4731 // The macros used here must preserve the result register.
4733 // Because the handler block contains the context of the finally
4734 // code, we can restore it directly from there for the finally code
4735 // rather than iteratively unwinding contexts via their previous
4737 __ Drop(*stack_depth); // Down to the handler block.
4738 if (*context_length > 0) {
4739 // Restore the context to its dedicated register and the stack.
4740 __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
4741 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
4744 __ call(finally_entry_);
4747 *context_length = 0;
4754 static const byte kJnsInstruction = 0x79;
4755 static const byte kJnsOffset = 0x11;
4756 static const byte kNopByteOne = 0x66;
4757 static const byte kNopByteTwo = 0x90;
4759 static const byte kCallInstruction = 0xe8;
4763 void BackEdgeTable::PatchAt(Code* unoptimized_code,
4765 BackEdgeState target_state,
4766 Code* replacement_code) {
4767 Address call_target_address = pc - kIntSize;
4768 Address jns_instr_address = call_target_address - 3;
4769 Address jns_offset_address = call_target_address - 2;
4771 switch (target_state) {
4773 // sub <profiling_counter>, <delta> ;; Not changed
4775 // call <interrupt stub>
4777 *jns_instr_address = kJnsInstruction;
4778 *jns_offset_address = kJnsOffset;
4780 case ON_STACK_REPLACEMENT:
4781 case OSR_AFTER_STACK_CHECK:
4782 // sub <profiling_counter>, <delta> ;; Not changed
4785 // call <on-stack replacment>
4787 *jns_instr_address = kNopByteOne;
4788 *jns_offset_address = kNopByteTwo;
4792 Assembler::set_target_address_at(call_target_address,
4794 replacement_code->entry());
4795 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
4796 unoptimized_code, call_target_address, replacement_code);
4800 BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
4802 Code* unoptimized_code,
4804 Address call_target_address = pc - kIntSize;
4805 Address jns_instr_address = call_target_address - 3;
4806 DCHECK_EQ(kCallInstruction, *(call_target_address - 1));
4808 if (*jns_instr_address == kJnsInstruction) {
4809 DCHECK_EQ(kJnsOffset, *(call_target_address - 2));
4810 DCHECK_EQ(isolate->builtins()->InterruptCheck()->entry(),
4811 Assembler::target_address_at(call_target_address,
4816 DCHECK_EQ(kNopByteOne, *jns_instr_address);
4817 DCHECK_EQ(kNopByteTwo, *(call_target_address - 2));
4819 if (Assembler::target_address_at(call_target_address, unoptimized_code) ==
4820 isolate->builtins()->OnStackReplacement()->entry()) {
4821 return ON_STACK_REPLACEMENT;
4824 DCHECK_EQ(isolate->builtins()->OsrAfterStackCheck()->entry(),
4825 Assembler::target_address_at(call_target_address,
4827 return OSR_AFTER_STACK_CHECK;
4831 } } // namespace v8::internal
4833 #endif // V8_TARGET_ARCH_IA32