1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if defined(V8_TARGET_ARCH_IA32)
32 #include "code-stubs.h"
36 #include "full-codegen.h"
37 #include "isolate-inl.h"
40 #include "stub-cache.h"
45 #define __ ACCESS_MASM(masm_)
48 class JumpPatchSite BASE_EMBEDDED {
50 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
52 info_emitted_ = false;
57 ASSERT(patch_site_.is_bound() == info_emitted_);
60 void EmitJumpIfNotSmi(Register reg,
62 Label::Distance distance = Label::kFar) {
63 __ test(reg, Immediate(kSmiTagMask));
64 EmitJump(not_carry, target, distance); // Always taken before patched.
67 void EmitJumpIfSmi(Register reg,
69 Label::Distance distance = Label::kFar) {
70 __ test(reg, Immediate(kSmiTagMask));
71 EmitJump(carry, target, distance); // Never taken before patched.
74 void EmitPatchInfo() {
75 if (patch_site_.is_bound()) {
76 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
77 ASSERT(is_int8(delta_to_patch_site));
78 __ test(eax, Immediate(delta_to_patch_site));
83 __ nop(); // Signals no inlined code.
88 // jc will be patched with jz, jnc will become jnz.
89 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
90 ASSERT(!patch_site_.is_bound() && !info_emitted_);
91 ASSERT(cc == carry || cc == not_carry);
92 __ bind(&patch_site_);
93 __ j(cc, target, distance);
96 MacroAssembler* masm_;
104 // Generate code for a JS function. On entry to the function the receiver
105 // and arguments have been pushed on the stack left to right, with the
106 // return address on top of them. The actual argument count matches the
107 // formal parameter count expected by the function.
109 // The live registers are:
110 // o edi: the JS function object being called (i.e. ourselves)
111 // o esi: our context
112 // o ebp: our caller's frame pointer
113 // o esp: stack pointer (pointing to return address)
115 // The function builds a JS frame. Please see JavaScriptFrameConstants in
116 // frames-ia32.h for its layout.
117 void FullCodeGenerator::Generate() {
118 CompilationInfo* info = info_;
120 isolate()->factory()->NewFixedArray(function()->handler_count(), TENURED);
121 profiling_counter_ = isolate()->factory()->NewJSGlobalPropertyCell(
122 Handle<Smi>(Smi::FromInt(FLAG_interrupt_budget)));
123 SetFunctionPosition(function());
124 Comment cmnt(masm_, "[ function compiled by full code generator");
127 if (strlen(FLAG_stop_at) > 0 &&
128 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
133 // Strict mode functions and builtins need to replace the receiver
134 // with undefined when called as functions (without an explicit
135 // receiver object). ecx is zero for method calls and non-zero for
137 if (!info->is_classic_mode() || info->is_native()) {
140 __ j(zero, &ok, Label::kNear);
141 // +1 for return address.
142 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
143 __ mov(ecx, Operand(esp, receiver_offset));
144 __ JumpIfSmi(ecx, &ok);
145 __ CmpObjectType(ecx, JS_GLOBAL_PROXY_TYPE, ecx);
146 __ j(not_equal, &ok, Label::kNear);
147 __ mov(Operand(esp, receiver_offset),
148 Immediate(isolate()->factory()->undefined_value()));
152 // Open a frame scope to indicate that there is a frame on the stack. The
153 // MANUAL indicates that the scope shouldn't actually generate code to set up
154 // the frame (that is done below).
155 FrameScope frame_scope(masm_, StackFrame::MANUAL);
157 __ push(ebp); // Caller's frame pointer.
159 __ push(esi); // Callee's context.
160 __ push(edi); // Callee's JS Function.
162 { Comment cmnt(masm_, "[ Allocate locals");
163 int locals_count = info->scope()->num_stack_slots();
164 if (locals_count == 1) {
165 __ push(Immediate(isolate()->factory()->undefined_value()));
166 } else if (locals_count > 1) {
167 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
168 for (int i = 0; i < locals_count; i++) {
174 bool function_in_register = true;
176 // Possibly allocate a local context.
177 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
178 if (heap_slots > 0 ||
179 (scope()->is_qml_mode() && scope()->is_global_scope())) {
180 Comment cmnt(masm_, "[ Allocate local context");
181 // Argument to NewContext is the function, which is still in edi.
183 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
184 FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
187 __ CallRuntime(Runtime::kNewFunctionContext, 1);
189 function_in_register = false;
190 // Context is returned in both eax and esi. It replaces the context
191 // passed to us. It's saved in the stack and kept live in esi.
192 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
194 // Copy parameters into context if necessary.
195 int num_parameters = info->scope()->num_parameters();
196 for (int i = 0; i < num_parameters; i++) {
197 Variable* var = scope()->parameter(i);
198 if (var->IsContextSlot()) {
199 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
200 (num_parameters - 1 - i) * kPointerSize;
201 // Load parameter from stack.
202 __ mov(eax, Operand(ebp, parameter_offset));
203 // Store it in the context.
204 int context_offset = Context::SlotOffset(var->index());
205 __ mov(Operand(esi, context_offset), eax);
206 // Update the write barrier. This clobbers eax and ebx.
207 __ RecordWriteContextSlot(esi,
216 Variable* arguments = scope()->arguments();
217 if (arguments != NULL) {
218 // Function uses arguments object.
219 Comment cmnt(masm_, "[ Allocate arguments object");
220 if (function_in_register) {
223 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
225 // Receiver is just before the parameters on the caller's stack.
226 int num_parameters = info->scope()->num_parameters();
227 int offset = num_parameters * kPointerSize;
229 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
231 __ SafePush(Immediate(Smi::FromInt(num_parameters)));
232 // Arguments to ArgumentsAccessStub:
233 // function, receiver address, parameter count.
234 // The stub will rewrite receiver and parameter count if the previous
235 // stack frame was an arguments adapter frame.
236 ArgumentsAccessStub::Type type;
237 if (!is_classic_mode()) {
238 type = ArgumentsAccessStub::NEW_STRICT;
239 } else if (function()->has_duplicate_parameters()) {
240 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
242 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
244 ArgumentsAccessStub stub(type);
247 SetVar(arguments, eax, ebx, edx);
251 __ CallRuntime(Runtime::kTraceEnter, 0);
254 // Visit the declarations and body unless there is an illegal
256 if (scope()->HasIllegalRedeclaration()) {
257 Comment cmnt(masm_, "[ Declarations");
258 scope()->VisitIllegalRedeclaration(this);
261 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
262 { Comment cmnt(masm_, "[ Declarations");
263 // For named function expressions, declare the function name as a
265 if (scope()->is_function_scope() && scope()->function() != NULL) {
266 VariableDeclaration* function = scope()->function();
267 ASSERT(function->proxy()->var()->mode() == CONST ||
268 function->proxy()->var()->mode() == CONST_HARMONY);
269 ASSERT(function->proxy()->var()->location() != Variable::UNALLOCATED);
270 VisitVariableDeclaration(function);
272 VisitDeclarations(scope()->declarations());
275 { Comment cmnt(masm_, "[ Stack check");
276 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
278 ExternalReference stack_limit =
279 ExternalReference::address_of_stack_limit(isolate());
280 __ cmp(esp, Operand::StaticVariable(stack_limit));
281 __ j(above_equal, &ok, Label::kNear);
287 { Comment cmnt(masm_, "[ Body");
288 ASSERT(loop_depth() == 0);
289 VisitStatements(function()->body());
290 ASSERT(loop_depth() == 0);
294 // Always emit a 'return undefined' in case control fell off the end of
296 { Comment cmnt(masm_, "[ return <undefined>;");
297 __ mov(eax, isolate()->factory()->undefined_value());
298 EmitReturnSequence();
303 void FullCodeGenerator::ClearAccumulator() {
304 __ Set(eax, Immediate(Smi::FromInt(0)));
308 void FullCodeGenerator::EmitProfilingCounterDecrement(int delta) {
309 __ mov(ebx, Immediate(profiling_counter_));
310 __ sub(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
311 Immediate(Smi::FromInt(delta)));
315 void FullCodeGenerator::EmitProfilingCounterReset() {
316 int reset_value = FLAG_interrupt_budget;
317 if (info_->ShouldSelfOptimize() && !FLAG_retry_self_opt) {
318 // Self-optimization is a one-off thing: if it fails, don't try again.
319 reset_value = Smi::kMaxValue;
321 if (isolate()->IsDebuggerActive()) {
322 // Detect debug break requests as soon as possible.
325 __ mov(ebx, Immediate(profiling_counter_));
326 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
327 Immediate(Smi::FromInt(reset_value)));
331 static const int kMaxBackEdgeWeight = 127;
332 static const int kBackEdgeDistanceDivisor = 100;
335 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt,
336 Label* back_edge_target) {
337 Comment cmnt(masm_, "[ Stack check");
340 if (FLAG_count_based_interrupts) {
342 if (FLAG_weighted_back_edges) {
343 ASSERT(back_edge_target->is_bound());
344 int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
345 weight = Min(kMaxBackEdgeWeight,
346 Max(1, distance / kBackEdgeDistanceDivisor));
348 EmitProfilingCounterDecrement(weight);
349 __ j(positive, &ok, Label::kNear);
353 // Count based interrupts happen often enough when they are enabled
354 // that the additional stack checks are not necessary (they would
355 // only check for interrupts).
356 ExternalReference stack_limit =
357 ExternalReference::address_of_stack_limit(isolate());
358 __ cmp(esp, Operand::StaticVariable(stack_limit));
359 __ j(above_equal, &ok, Label::kNear);
364 // Record a mapping of this PC offset to the OSR id. This is used to find
365 // the AST id from the unoptimized code in order to use it as a key into
366 // the deoptimization input data found in the optimized code.
367 RecordStackCheck(stmt->OsrEntryId());
369 // Loop stack checks can be patched to perform on-stack replacement. In
370 // order to decide whether or not to perform OSR we embed the loop depth
371 // in a test instruction after the call so we can extract it from the OSR
373 ASSERT(loop_depth() > 0);
374 __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
376 if (FLAG_count_based_interrupts) {
377 EmitProfilingCounterReset();
381 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
382 // Record a mapping of the OSR id to this PC. This is used if the OSR
383 // entry becomes the target of a bailout. We don't expect it to be, but
384 // we want it to work if it is.
385 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
389 void FullCodeGenerator::EmitReturnSequence() {
390 Comment cmnt(masm_, "[ Return sequence");
391 if (return_label_.is_bound()) {
392 __ jmp(&return_label_);
394 // Common return label
395 __ bind(&return_label_);
398 __ CallRuntime(Runtime::kTraceExit, 1);
400 if (FLAG_interrupt_at_exit || FLAG_self_optimization) {
401 // Pretend that the exit is a backwards jump to the entry.
403 if (info_->ShouldSelfOptimize()) {
404 weight = FLAG_interrupt_budget / FLAG_self_opt_count;
405 } else if (FLAG_weighted_back_edges) {
406 int distance = masm_->pc_offset();
407 weight = Min(kMaxBackEdgeWeight,
408 Max(1, distance / kBackEdgeDistanceDivisor));
410 EmitProfilingCounterDecrement(weight);
412 __ j(positive, &ok, Label::kNear);
414 if (info_->ShouldSelfOptimize() && FLAG_direct_self_opt) {
415 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
416 __ CallRuntime(Runtime::kOptimizeFunctionOnNextCall, 1);
422 EmitProfilingCounterReset();
426 // Add a label for checking the size of the code used for returning.
427 Label check_exit_codesize;
428 masm_->bind(&check_exit_codesize);
430 SetSourcePosition(function()->end_position() - 1);
432 // Do not use the leave instruction here because it is too short to
433 // patch with the code required by the debugger.
437 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
438 __ Ret(arguments_bytes, ecx);
439 #ifdef ENABLE_DEBUGGER_SUPPORT
440 // Check that the size of the code used for returning is large enough
441 // for the debugger's requirements.
442 ASSERT(Assembler::kJSReturnSequenceLength <=
443 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
449 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
450 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
454 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
455 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
456 codegen()->GetVar(result_register(), var);
460 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
461 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
462 MemOperand operand = codegen()->VarOperand(var, result_register());
463 // Memory operands can be pushed directly.
468 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
469 // For simplicity we always test the accumulator register.
470 codegen()->GetVar(result_register(), var);
471 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
472 codegen()->DoTest(this);
476 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
477 UNREACHABLE(); // Not used on IA32.
481 void FullCodeGenerator::AccumulatorValueContext::Plug(
482 Heap::RootListIndex index) const {
483 UNREACHABLE(); // Not used on IA32.
487 void FullCodeGenerator::StackValueContext::Plug(
488 Heap::RootListIndex index) const {
489 UNREACHABLE(); // Not used on IA32.
493 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
494 UNREACHABLE(); // Not used on IA32.
498 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
502 void FullCodeGenerator::AccumulatorValueContext::Plug(
503 Handle<Object> lit) const {
505 __ SafeSet(result_register(), Immediate(lit));
507 __ Set(result_register(), Immediate(lit));
512 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
514 __ SafePush(Immediate(lit));
516 __ push(Immediate(lit));
521 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
522 codegen()->PrepareForBailoutBeforeSplit(condition(),
526 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
527 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
528 if (false_label_ != fall_through_) __ jmp(false_label_);
529 } else if (lit->IsTrue() || lit->IsJSObject()) {
530 if (true_label_ != fall_through_) __ jmp(true_label_);
531 } else if (lit->IsString()) {
532 if (String::cast(*lit)->length() == 0) {
533 if (false_label_ != fall_through_) __ jmp(false_label_);
535 if (true_label_ != fall_through_) __ jmp(true_label_);
537 } else if (lit->IsSmi()) {
538 if (Smi::cast(*lit)->value() == 0) {
539 if (false_label_ != fall_through_) __ jmp(false_label_);
541 if (true_label_ != fall_through_) __ jmp(true_label_);
544 // For simplicity we always test the accumulator register.
545 __ mov(result_register(), lit);
546 codegen()->DoTest(this);
551 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
552 Register reg) const {
558 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
560 Register reg) const {
563 __ Move(result_register(), reg);
567 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
568 Register reg) const {
570 if (count > 1) __ Drop(count - 1);
571 __ mov(Operand(esp, 0), reg);
575 void FullCodeGenerator::TestContext::DropAndPlug(int count,
576 Register reg) const {
578 // For simplicity we always test the accumulator register.
580 __ Move(result_register(), reg);
581 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
582 codegen()->DoTest(this);
586 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
587 Label* materialize_false) const {
588 ASSERT(materialize_true == materialize_false);
589 __ bind(materialize_true);
593 void FullCodeGenerator::AccumulatorValueContext::Plug(
594 Label* materialize_true,
595 Label* materialize_false) const {
597 __ bind(materialize_true);
598 __ mov(result_register(), isolate()->factory()->true_value());
599 __ jmp(&done, Label::kNear);
600 __ bind(materialize_false);
601 __ mov(result_register(), isolate()->factory()->false_value());
606 void FullCodeGenerator::StackValueContext::Plug(
607 Label* materialize_true,
608 Label* materialize_false) const {
610 __ bind(materialize_true);
611 __ push(Immediate(isolate()->factory()->true_value()));
612 __ jmp(&done, Label::kNear);
613 __ bind(materialize_false);
614 __ push(Immediate(isolate()->factory()->false_value()));
619 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
620 Label* materialize_false) const {
621 ASSERT(materialize_true == true_label_);
622 ASSERT(materialize_false == false_label_);
626 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
630 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
631 Handle<Object> value = flag
632 ? isolate()->factory()->true_value()
633 : isolate()->factory()->false_value();
634 __ mov(result_register(), value);
638 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
639 Handle<Object> value = flag
640 ? isolate()->factory()->true_value()
641 : isolate()->factory()->false_value();
642 __ push(Immediate(value));
646 void FullCodeGenerator::TestContext::Plug(bool flag) const {
647 codegen()->PrepareForBailoutBeforeSplit(condition(),
652 if (true_label_ != fall_through_) __ jmp(true_label_);
654 if (false_label_ != fall_through_) __ jmp(false_label_);
659 void FullCodeGenerator::DoTest(Expression* condition,
662 Label* fall_through) {
663 ToBooleanStub stub(result_register());
664 __ push(result_register());
665 __ CallStub(&stub, condition->test_id());
666 __ test(result_register(), result_register());
667 // The stub returns nonzero for true.
668 Split(not_zero, if_true, if_false, fall_through);
672 void FullCodeGenerator::Split(Condition cc,
675 Label* fall_through) {
676 if (if_false == fall_through) {
678 } else if (if_true == fall_through) {
679 __ j(NegateCondition(cc), if_false);
687 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
688 ASSERT(var->IsStackAllocated());
689 // Offset is negative because higher indexes are at lower addresses.
690 int offset = -var->index() * kPointerSize;
691 // Adjust by a (parameter or local) base offset.
692 if (var->IsParameter()) {
693 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
695 offset += JavaScriptFrameConstants::kLocal0Offset;
697 return Operand(ebp, offset);
701 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
702 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
703 if (var->IsContextSlot()) {
704 int context_chain_length = scope()->ContextChainLength(var->scope());
705 __ LoadContext(scratch, context_chain_length);
706 return ContextOperand(scratch, var->index());
708 return StackOperand(var);
713 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
714 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
715 MemOperand location = VarOperand(var, dest);
716 __ mov(dest, location);
720 void FullCodeGenerator::SetVar(Variable* var,
724 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
725 ASSERT(!scratch0.is(src));
726 ASSERT(!scratch0.is(scratch1));
727 ASSERT(!scratch1.is(src));
728 MemOperand location = VarOperand(var, scratch0);
729 __ mov(location, src);
731 // Emit the write barrier code if the location is in the heap.
732 if (var->IsContextSlot()) {
733 int offset = Context::SlotOffset(var->index());
734 ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
735 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
740 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
741 bool should_normalize,
744 // Only prepare for bailouts before splits if we're in a test
745 // context. Otherwise, we let the Visit function deal with the
746 // preparation to avoid preparing with the same AST id twice.
747 if (!context()->IsTest() || !info_->IsOptimizable()) return;
750 if (should_normalize) __ jmp(&skip, Label::kNear);
751 PrepareForBailout(expr, TOS_REG);
752 if (should_normalize) {
753 __ cmp(eax, isolate()->factory()->true_value());
754 Split(equal, if_true, if_false, NULL);
760 void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
761 // The variable in the declaration always resides in the current function
763 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
764 if (FLAG_debug_code) {
765 // Check that we're not inside a with or catch context.
766 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
767 __ cmp(ebx, isolate()->factory()->with_context_map());
768 __ Check(not_equal, "Declaration in with context.");
769 __ cmp(ebx, isolate()->factory()->catch_context_map());
770 __ Check(not_equal, "Declaration in catch context.");
775 void FullCodeGenerator::VisitVariableDeclaration(
776 VariableDeclaration* declaration) {
777 // If it was not possible to allocate the variable at compile time, we
778 // need to "declare" it at runtime to make sure it actually exists in the
780 VariableProxy* proxy = declaration->proxy();
781 VariableMode mode = declaration->mode();
782 Variable* variable = proxy->var();
783 bool hole_init = mode == CONST || mode == CONST_HARMONY || mode == LET;
784 switch (variable->location()) {
785 case Variable::UNALLOCATED:
786 globals_->Add(variable->name());
787 globals_->Add(variable->binding_needs_init()
788 ? isolate()->factory()->the_hole_value()
789 : isolate()->factory()->undefined_value());
790 globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
793 case Variable::PARAMETER:
794 case Variable::LOCAL:
796 Comment cmnt(masm_, "[ VariableDeclaration");
797 __ mov(StackOperand(variable),
798 Immediate(isolate()->factory()->the_hole_value()));
802 case Variable::CONTEXT:
804 Comment cmnt(masm_, "[ VariableDeclaration");
805 EmitDebugCheckDeclarationContext(variable);
806 __ mov(ContextOperand(esi, variable->index()),
807 Immediate(isolate()->factory()->the_hole_value()));
808 // No write barrier since the hole value is in old space.
809 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
813 case Variable::LOOKUP: {
814 Comment cmnt(masm_, "[ VariableDeclaration");
816 __ push(Immediate(variable->name()));
817 // VariableDeclaration nodes are always introduced in one of four modes.
818 ASSERT(mode == VAR || mode == LET ||
819 mode == CONST || mode == CONST_HARMONY);
820 PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
822 __ push(Immediate(Smi::FromInt(attr)));
823 // Push initial value, if any.
824 // Note: For variables we must not push an initial value (such as
825 // 'undefined') because we may have a (legal) redeclaration and we
826 // must not destroy the current value.
828 __ push(Immediate(isolate()->factory()->the_hole_value()));
830 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
832 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
839 void FullCodeGenerator::VisitFunctionDeclaration(
840 FunctionDeclaration* declaration) {
841 VariableProxy* proxy = declaration->proxy();
842 Variable* variable = proxy->var();
843 switch (variable->location()) {
844 case Variable::UNALLOCATED: {
845 globals_->Add(variable->name());
846 Handle<SharedFunctionInfo> function =
847 Compiler::BuildFunctionInfo(declaration->fun(), script());
848 // Check for stack-overflow exception.
849 if (function.is_null()) return SetStackOverflow();
850 globals_->Add(function);
851 globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
855 case Variable::PARAMETER:
856 case Variable::LOCAL: {
857 Comment cmnt(masm_, "[ FunctionDeclaration");
858 VisitForAccumulatorValue(declaration->fun());
859 __ mov(StackOperand(variable), result_register());
863 case Variable::CONTEXT: {
864 Comment cmnt(masm_, "[ FunctionDeclaration");
865 EmitDebugCheckDeclarationContext(variable);
866 VisitForAccumulatorValue(declaration->fun());
867 __ mov(ContextOperand(esi, variable->index()), result_register());
868 // We know that we have written a function, which is not a smi.
869 __ RecordWriteContextSlot(esi,
870 Context::SlotOffset(variable->index()),
876 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
880 case Variable::LOOKUP: {
881 Comment cmnt(masm_, "[ FunctionDeclaration");
883 __ push(Immediate(variable->name()));
884 __ push(Immediate(Smi::FromInt(NONE)));
885 VisitForStackValue(declaration->fun());
886 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
893 void FullCodeGenerator::VisitModuleDeclaration(ModuleDeclaration* declaration) {
894 VariableProxy* proxy = declaration->proxy();
895 Variable* variable = proxy->var();
896 Handle<JSModule> instance = declaration->module()->interface()->Instance();
897 ASSERT(!instance.is_null());
899 switch (variable->location()) {
900 case Variable::UNALLOCATED: {
901 Comment cmnt(masm_, "[ ModuleDeclaration");
902 globals_->Add(variable->name());
903 globals_->Add(instance);
904 globals_->Add(isolate()->factory()->ToBoolean(variable->is_qml_global()));
905 Visit(declaration->module());
909 case Variable::CONTEXT: {
910 Comment cmnt(masm_, "[ ModuleDeclaration");
911 EmitDebugCheckDeclarationContext(variable);
912 __ mov(ContextOperand(esi, variable->index()), Immediate(instance));
913 Visit(declaration->module());
917 case Variable::PARAMETER:
918 case Variable::LOCAL:
919 case Variable::LOOKUP:
925 void FullCodeGenerator::VisitImportDeclaration(ImportDeclaration* declaration) {
926 VariableProxy* proxy = declaration->proxy();
927 Variable* variable = proxy->var();
928 switch (variable->location()) {
929 case Variable::UNALLOCATED:
933 case Variable::CONTEXT: {
934 Comment cmnt(masm_, "[ ImportDeclaration");
935 EmitDebugCheckDeclarationContext(variable);
940 case Variable::PARAMETER:
941 case Variable::LOCAL:
942 case Variable::LOOKUP:
948 void FullCodeGenerator::VisitExportDeclaration(ExportDeclaration* declaration) {
953 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
954 // Call the runtime to declare the globals.
955 __ push(esi); // The context is the first argument.
956 __ push(Immediate(pairs));
957 __ push(Immediate(Smi::FromInt(DeclareGlobalsFlags())));
958 __ CallRuntime(Runtime::kDeclareGlobals, 3);
959 // Return value is ignored.
963 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
964 Comment cmnt(masm_, "[ SwitchStatement");
965 Breakable nested_statement(this, stmt);
966 SetStatementPosition(stmt);
968 // Keep the switch value on the stack until a case matches.
969 VisitForStackValue(stmt->tag());
970 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
972 ZoneList<CaseClause*>* clauses = stmt->cases();
973 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
975 Label next_test; // Recycled for each test.
976 // Compile all the tests with branches to their bodies.
977 for (int i = 0; i < clauses->length(); i++) {
978 CaseClause* clause = clauses->at(i);
979 clause->body_target()->Unuse();
981 // The default is not a test, but remember it as final fall through.
982 if (clause->is_default()) {
983 default_clause = clause;
987 Comment cmnt(masm_, "[ Case comparison");
991 // Compile the label expression.
992 VisitForAccumulatorValue(clause->label());
994 // Perform the comparison as if via '==='.
995 __ mov(edx, Operand(esp, 0)); // Switch value.
996 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
997 JumpPatchSite patch_site(masm_);
998 if (inline_smi_code) {
1002 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
1005 __ j(not_equal, &next_test);
1006 __ Drop(1); // Switch value is no longer needed.
1007 __ jmp(clause->body_target());
1008 __ bind(&slow_case);
1011 // Record position before stub call for type feedback.
1012 SetSourcePosition(clause->position());
1013 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
1014 CallIC(ic, RelocInfo::CODE_TARGET, clause->CompareId());
1015 patch_site.EmitPatchInfo();
1017 __ j(not_equal, &next_test);
1018 __ Drop(1); // Switch value is no longer needed.
1019 __ jmp(clause->body_target());
1022 // Discard the test value and jump to the default if present, otherwise to
1023 // the end of the statement.
1024 __ bind(&next_test);
1025 __ Drop(1); // Switch value is no longer needed.
1026 if (default_clause == NULL) {
1027 __ jmp(nested_statement.break_label());
1029 __ jmp(default_clause->body_target());
1032 // Compile all the case bodies.
1033 for (int i = 0; i < clauses->length(); i++) {
1034 Comment cmnt(masm_, "[ Case body");
1035 CaseClause* clause = clauses->at(i);
1036 __ bind(clause->body_target());
1037 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
1038 VisitStatements(clause->statements());
1041 __ bind(nested_statement.break_label());
1042 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1046 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
1047 Comment cmnt(masm_, "[ ForInStatement");
1048 SetStatementPosition(stmt);
1051 ForIn loop_statement(this, stmt);
1052 increment_loop_depth();
1054 // Get the object to enumerate over. Both SpiderMonkey and JSC
1055 // ignore null and undefined in contrast to the specification; see
1056 // ECMA-262 section 12.6.4.
1057 VisitForAccumulatorValue(stmt->enumerable());
1058 __ cmp(eax, isolate()->factory()->undefined_value());
1060 __ cmp(eax, isolate()->factory()->null_value());
1063 PrepareForBailoutForId(stmt->PrepareId(), TOS_REG);
1065 // Convert the object to a JS object.
1066 Label convert, done_convert;
1067 __ JumpIfSmi(eax, &convert, Label::kNear);
1068 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
1069 __ j(above_equal, &done_convert, Label::kNear);
1072 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1073 __ bind(&done_convert);
1076 // Check for proxies.
1077 Label call_runtime, use_cache, fixed_array;
1078 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1079 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
1080 __ j(below_equal, &call_runtime);
1082 // Check cache validity in generated code. This is a fast case for
1083 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
1084 // guarantee cache validity, call the runtime system to check cache
1085 // validity or get the property names in a fixed array.
1086 __ CheckEnumCache(&call_runtime);
1088 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
1089 __ jmp(&use_cache, Label::kNear);
1091 // Get the set of properties to enumerate.
1092 __ bind(&call_runtime);
1094 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
1095 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
1096 isolate()->factory()->meta_map());
1097 __ j(not_equal, &fixed_array);
1100 // We got a map in register eax. Get the enumeration cache from it.
1101 __ bind(&use_cache);
1102 __ LoadInstanceDescriptors(eax, ecx);
1103 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
1104 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
1106 // Set up the four remaining stack slots.
1107 __ push(eax); // Map.
1108 __ push(edx); // Enumeration cache.
1109 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
1110 __ push(eax); // Enumeration cache length (as smi).
1111 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1114 // We got a fixed array in register eax. Iterate through that.
1116 __ bind(&fixed_array);
1118 Handle<JSGlobalPropertyCell> cell =
1119 isolate()->factory()->NewJSGlobalPropertyCell(
1121 Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker)));
1122 RecordTypeFeedbackCell(stmt->PrepareId(), cell);
1123 __ LoadHeapObject(ebx, cell);
1124 __ mov(FieldOperand(ebx, JSGlobalPropertyCell::kValueOffset),
1125 Immediate(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
1127 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
1128 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
1129 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
1130 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
1131 __ j(above, &non_proxy);
1132 __ mov(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
1133 __ bind(&non_proxy);
1134 __ push(ebx); // Smi
1135 __ push(eax); // Array
1136 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
1137 __ push(eax); // Fixed array length (as smi).
1138 __ push(Immediate(Smi::FromInt(0))); // Initial index.
1140 // Generate code for doing the condition check.
1141 PrepareForBailoutForId(stmt->BodyId(), NO_REGISTERS);
1143 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
1144 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
1145 __ j(above_equal, loop_statement.break_label());
1147 // Get the current entry of the array into register ebx.
1148 __ mov(ebx, Operand(esp, 2 * kPointerSize));
1149 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
1151 // Get the expected map from the stack or a smi in the
1152 // permanent slow case into register edx.
1153 __ mov(edx, Operand(esp, 3 * kPointerSize));
1155 // Check if the expected map still matches that of the enumerable.
1156 // If not, we may have to filter the key.
1158 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1159 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1160 __ j(equal, &update_each, Label::kNear);
1162 // For proxies, no filtering is done.
1163 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1164 ASSERT(Smi::FromInt(0) == 0);
1166 __ j(zero, &update_each);
1168 // Convert the entry to a string or null if it isn't a property
1169 // anymore. If the property has been removed while iterating, we
1171 __ push(ecx); // Enumerable.
1172 __ push(ebx); // Current entry.
1173 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1175 __ j(equal, loop_statement.continue_label());
1178 // Update the 'each' property or variable from the possibly filtered
1179 // entry in register ebx.
1180 __ bind(&update_each);
1181 __ mov(result_register(), ebx);
1182 // Perform the assignment as if via '='.
1183 { EffectContext context(this);
1184 EmitAssignment(stmt->each());
1187 // Generate code for the body of the loop.
1188 Visit(stmt->body());
1190 // Generate code for going to the next element by incrementing the
1191 // index (smi) stored on top of the stack.
1192 __ bind(loop_statement.continue_label());
1193 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1195 EmitStackCheck(stmt, &loop);
1198 // Remove the pointers stored on the stack.
1199 __ bind(loop_statement.break_label());
1200 __ add(esp, Immediate(5 * kPointerSize));
1202 // Exit and decrement the loop depth.
1203 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
1205 decrement_loop_depth();
1209 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1211 // Use the fast case closure allocation code that allocates in new
1212 // space for nested functions that don't need literals cloning. If
1213 // we're running with the --always-opt or the --prepare-always-opt
1214 // flag, we need to use the runtime function so that the new function
1215 // we are creating here gets a chance to have its code optimized and
1216 // doesn't just get a copy of the existing unoptimized code.
1217 if (!FLAG_always_opt &&
1218 !FLAG_prepare_always_opt &&
1220 scope()->is_function_scope() &&
1221 info->num_literals() == 0) {
1222 FastNewClosureStub stub(info->language_mode());
1223 __ push(Immediate(info));
1227 __ push(Immediate(info));
1228 __ push(Immediate(pretenure
1229 ? isolate()->factory()->true_value()
1230 : isolate()->factory()->false_value()));
1231 __ CallRuntime(Runtime::kNewClosure, 3);
1233 context()->Plug(eax);
1237 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1238 Comment cmnt(masm_, "[ VariableProxy");
1239 EmitVariableLoad(expr);
1243 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1244 TypeofState typeof_state,
1246 Register context = esi;
1247 Register temp = edx;
1251 if (s->num_heap_slots() > 0) {
1252 if (s->calls_non_strict_eval()) {
1253 // Check that extension is NULL.
1254 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1256 __ j(not_equal, slow);
1258 // Load next context in chain.
1259 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1260 // Walk the rest of the chain without clobbering esi.
1263 // If no outer scope calls eval, we do not need to check more
1264 // context extensions. If we have reached an eval scope, we check
1265 // all extensions from this point.
1266 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1267 s = s->outer_scope();
1270 if (s != NULL && s->is_eval_scope()) {
1271 // Loop up the context chain. There is no frame effect so it is
1272 // safe to use raw labels here.
1274 if (!context.is(temp)) {
1275 __ mov(temp, context);
1278 // Terminate at global context.
1279 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1280 Immediate(isolate()->factory()->global_context_map()));
1281 __ j(equal, &fast, Label::kNear);
1282 // Check that extension is NULL.
1283 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1284 __ j(not_equal, slow);
1285 // Load next context in chain.
1286 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1291 // All extension objects were empty and it is safe to use a global
1293 __ mov(edx, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
1294 __ mov(ecx, var->name());
1295 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1296 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1297 ? RelocInfo::CODE_TARGET
1298 : RelocInfo::CODE_TARGET_CONTEXT;
1303 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1305 ASSERT(var->IsContextSlot());
1306 Register context = esi;
1307 Register temp = ebx;
1309 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1310 if (s->num_heap_slots() > 0) {
1311 if (s->calls_non_strict_eval()) {
1312 // Check that extension is NULL.
1313 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1315 __ j(not_equal, slow);
1317 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1318 // Walk the rest of the chain without clobbering esi.
1322 // Check that last extension is NULL.
1323 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1324 __ j(not_equal, slow);
1326 // This function is used only for loads, not stores, so it's safe to
1327 // return an esi-based operand (the write barrier cannot be allowed to
1328 // destroy the esi register).
1329 return ContextOperand(context, var->index());
1333 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1334 TypeofState typeof_state,
1337 // Generate fast-case code for variables that might be shadowed by
1338 // eval-introduced variables. Eval is used a lot without
1339 // introducing variables. In those cases, we do not want to
1340 // perform a runtime call for all variables in the scope
1341 // containing the eval.
1342 if (var->mode() == DYNAMIC_GLOBAL) {
1343 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1345 } else if (var->mode() == DYNAMIC_LOCAL) {
1346 Variable* local = var->local_if_not_shadowed();
1347 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1348 if (local->mode() == CONST ||
1349 local->mode() == CONST_HARMONY ||
1350 local->mode() == LET) {
1351 __ cmp(eax, isolate()->factory()->the_hole_value());
1352 __ j(not_equal, done);
1353 if (local->mode() == CONST) {
1354 __ mov(eax, isolate()->factory()->undefined_value());
1355 } else { // LET || CONST_HARMONY
1356 __ push(Immediate(var->name()));
1357 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1365 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1366 // Record position before possible IC call.
1367 SetSourcePosition(proxy->position());
1368 Variable* var = proxy->var();
1370 // Three cases: global variables, lookup variables, and all other types of
1372 switch (var->location()) {
1373 case Variable::UNALLOCATED: {
1374 Comment cmnt(masm_, "Global variable");
1375 // Use inline caching. Variable name is passed in ecx and the global
1377 __ mov(edx, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
1378 __ mov(ecx, var->name());
1379 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1380 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
1381 context()->Plug(eax);
1385 case Variable::PARAMETER:
1386 case Variable::LOCAL:
1387 case Variable::CONTEXT: {
1388 Comment cmnt(masm_, var->IsContextSlot()
1389 ? "Context variable"
1390 : "Stack variable");
1391 if (var->binding_needs_init()) {
1392 // var->scope() may be NULL when the proxy is located in eval code and
1393 // refers to a potential outside binding. Currently those bindings are
1394 // always looked up dynamically, i.e. in that case
1395 // var->location() == LOOKUP.
1397 ASSERT(var->scope() != NULL);
1399 // Check if the binding really needs an initialization check. The check
1400 // can be skipped in the following situation: we have a LET or CONST
1401 // binding in harmony mode, both the Variable and the VariableProxy have
1402 // the same declaration scope (i.e. they are both in global code, in the
1403 // same function or in the same eval code) and the VariableProxy is in
1404 // the source physically located after the initializer of the variable.
1406 // We cannot skip any initialization checks for CONST in non-harmony
1407 // mode because const variables may be declared but never initialized:
1408 // if (false) { const x; }; var y = x;
1410 // The condition on the declaration scopes is a conservative check for
1411 // nested functions that access a binding and are called before the
1412 // binding is initialized:
1413 // function() { f(); let x = 1; function f() { x = 2; } }
1415 bool skip_init_check;
1416 if (var->scope()->DeclarationScope() != scope()->DeclarationScope()) {
1417 skip_init_check = false;
1419 // Check that we always have valid source position.
1420 ASSERT(var->initializer_position() != RelocInfo::kNoPosition);
1421 ASSERT(proxy->position() != RelocInfo::kNoPosition);
1422 skip_init_check = var->mode() != CONST &&
1423 var->initializer_position() < proxy->position();
1426 if (!skip_init_check) {
1427 // Let and const need a read barrier.
1430 __ cmp(eax, isolate()->factory()->the_hole_value());
1431 __ j(not_equal, &done, Label::kNear);
1432 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1433 // Throw a reference error when using an uninitialized let/const
1434 // binding in harmony mode.
1435 __ push(Immediate(var->name()));
1436 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1438 // Uninitalized const bindings outside of harmony mode are unholed.
1439 ASSERT(var->mode() == CONST);
1440 __ mov(eax, isolate()->factory()->undefined_value());
1443 context()->Plug(eax);
1447 context()->Plug(var);
1451 case Variable::LOOKUP: {
1453 // Generate code for loading from variables potentially shadowed
1454 // by eval-introduced variables.
1455 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1457 Comment cmnt(masm_, "Lookup variable");
1458 __ push(esi); // Context.
1459 __ push(Immediate(var->name()));
1460 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1462 context()->Plug(eax);
1469 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1470 Comment cmnt(masm_, "[ RegExpLiteral");
1472 // Registers will be used as follows:
1473 // edi = JS function.
1474 // ecx = literals array.
1475 // ebx = regexp literal.
1476 // eax = regexp literal clone.
1477 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1478 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1479 int literal_offset =
1480 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1481 __ mov(ebx, FieldOperand(ecx, literal_offset));
1482 __ cmp(ebx, isolate()->factory()->undefined_value());
1483 __ j(not_equal, &materialized, Label::kNear);
1485 // Create regexp literal using runtime function
1486 // Result will be in eax.
1488 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1489 __ push(Immediate(expr->pattern()));
1490 __ push(Immediate(expr->flags()));
1491 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1494 __ bind(&materialized);
1495 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1496 Label allocated, runtime_allocate;
1497 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1500 __ bind(&runtime_allocate);
1502 __ push(Immediate(Smi::FromInt(size)));
1503 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1506 __ bind(&allocated);
1507 // Copy the content into the newly allocated memory.
1508 // (Unroll copy loop once for better throughput).
1509 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1510 __ mov(edx, FieldOperand(ebx, i));
1511 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1512 __ mov(FieldOperand(eax, i), edx);
1513 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1515 if ((size % (2 * kPointerSize)) != 0) {
1516 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1517 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1519 context()->Plug(eax);
1523 void FullCodeGenerator::EmitAccessor(Expression* expression) {
1524 if (expression == NULL) {
1525 __ push(Immediate(isolate()->factory()->null_value()));
1527 VisitForStackValue(expression);
1532 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1533 Comment cmnt(masm_, "[ ObjectLiteral");
1534 Handle<FixedArray> constant_properties = expr->constant_properties();
1535 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1536 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1537 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1538 __ push(Immediate(constant_properties));
1539 int flags = expr->fast_elements()
1540 ? ObjectLiteral::kFastElements
1541 : ObjectLiteral::kNoFlags;
1542 flags |= expr->has_function()
1543 ? ObjectLiteral::kHasFunction
1544 : ObjectLiteral::kNoFlags;
1545 __ push(Immediate(Smi::FromInt(flags)));
1546 int properties_count = constant_properties->length() / 2;
1547 if (expr->depth() > 1) {
1548 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1549 } else if (flags != ObjectLiteral::kFastElements ||
1550 properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
1551 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1553 FastCloneShallowObjectStub stub(properties_count);
1557 // If result_saved is true the result is on top of the stack. If
1558 // result_saved is false the result is in eax.
1559 bool result_saved = false;
1561 // Mark all computed expressions that are bound to a key that
1562 // is shadowed by a later occurrence of the same key. For the
1563 // marked expressions, no store code is emitted.
1564 expr->CalculateEmitStore();
1566 AccessorTable accessor_table(isolate()->zone());
1567 for (int i = 0; i < expr->properties()->length(); i++) {
1568 ObjectLiteral::Property* property = expr->properties()->at(i);
1569 if (property->IsCompileTimeValue()) continue;
1571 Literal* key = property->key();
1572 Expression* value = property->value();
1573 if (!result_saved) {
1574 __ push(eax); // Save result on the stack
1575 result_saved = true;
1577 switch (property->kind()) {
1578 case ObjectLiteral::Property::CONSTANT:
1580 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1581 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1583 case ObjectLiteral::Property::COMPUTED:
1584 if (key->handle()->IsSymbol()) {
1585 if (property->emit_store()) {
1586 VisitForAccumulatorValue(value);
1587 __ mov(ecx, Immediate(key->handle()));
1588 __ mov(edx, Operand(esp, 0));
1589 Handle<Code> ic = is_classic_mode()
1590 ? isolate()->builtins()->StoreIC_Initialize()
1591 : isolate()->builtins()->StoreIC_Initialize_Strict();
1592 CallIC(ic, RelocInfo::CODE_TARGET, key->id());
1593 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1595 VisitForEffect(value);
1600 case ObjectLiteral::Property::PROTOTYPE:
1601 __ push(Operand(esp, 0)); // Duplicate receiver.
1602 VisitForStackValue(key);
1603 VisitForStackValue(value);
1604 if (property->emit_store()) {
1605 __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1606 __ CallRuntime(Runtime::kSetProperty, 4);
1611 case ObjectLiteral::Property::GETTER:
1612 accessor_table.lookup(key)->second->getter = value;
1614 case ObjectLiteral::Property::SETTER:
1615 accessor_table.lookup(key)->second->setter = value;
1620 // Emit code to define accessors, using only a single call to the runtime for
1621 // each pair of corresponding getters and setters.
1622 for (AccessorTable::Iterator it = accessor_table.begin();
1623 it != accessor_table.end();
1625 __ push(Operand(esp, 0)); // Duplicate receiver.
1626 VisitForStackValue(it->first);
1627 EmitAccessor(it->second->getter);
1628 EmitAccessor(it->second->setter);
1629 __ push(Immediate(Smi::FromInt(NONE)));
1630 __ CallRuntime(Runtime::kDefineOrRedefineAccessorProperty, 5);
1633 if (expr->has_function()) {
1634 ASSERT(result_saved);
1635 __ push(Operand(esp, 0));
1636 __ CallRuntime(Runtime::kToFastProperties, 1);
1640 context()->PlugTOS();
1642 context()->Plug(eax);
1647 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1648 Comment cmnt(masm_, "[ ArrayLiteral");
1650 ZoneList<Expression*>* subexprs = expr->values();
1651 int length = subexprs->length();
1652 Handle<FixedArray> constant_elements = expr->constant_elements();
1653 ASSERT_EQ(2, constant_elements->length());
1654 ElementsKind constant_elements_kind =
1655 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1656 bool has_constant_fast_elements = constant_elements_kind == FAST_ELEMENTS;
1657 Handle<FixedArrayBase> constant_elements_values(
1658 FixedArrayBase::cast(constant_elements->get(1)));
1660 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1661 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1662 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1663 __ push(Immediate(constant_elements));
1664 Heap* heap = isolate()->heap();
1665 if (has_constant_fast_elements &&
1666 constant_elements_values->map() == heap->fixed_cow_array_map()) {
1667 // If the elements are already FAST_ELEMENTS, the boilerplate cannot
1668 // change, so it's possible to specialize the stub in advance.
1669 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1670 FastCloneShallowArrayStub stub(
1671 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
1674 } else if (expr->depth() > 1) {
1675 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1676 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1677 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1679 ASSERT(constant_elements_kind == FAST_ELEMENTS ||
1680 constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
1681 FLAG_smi_only_arrays);
1682 // If the elements are already FAST_ELEMENTS, the boilerplate cannot
1683 // change, so it's possible to specialize the stub in advance.
1684 FastCloneShallowArrayStub::Mode mode = has_constant_fast_elements
1685 ? FastCloneShallowArrayStub::CLONE_ELEMENTS
1686 : FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
1687 FastCloneShallowArrayStub stub(mode, length);
1691 bool result_saved = false; // Is the result saved to the stack?
1693 // Emit code to evaluate all the non-constant subexpressions and to store
1694 // them into the newly cloned array.
1695 for (int i = 0; i < length; i++) {
1696 Expression* subexpr = subexprs->at(i);
1697 // If the subexpression is a literal or a simple materialized literal it
1698 // is already set in the cloned array.
1699 if (subexpr->AsLiteral() != NULL ||
1700 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1704 if (!result_saved) {
1706 result_saved = true;
1708 VisitForAccumulatorValue(subexpr);
1710 if (constant_elements_kind == FAST_ELEMENTS) {
1711 // Fast-case array literal with ElementsKind of FAST_ELEMENTS, they cannot
1712 // transition and don't need to call the runtime stub.
1713 int offset = FixedArray::kHeaderSize + (i * kPointerSize);
1714 __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1715 __ mov(ebx, FieldOperand(ebx, JSObject::kElementsOffset));
1716 // Store the subexpression value in the array's elements.
1717 __ mov(FieldOperand(ebx, offset), result_register());
1718 // Update the write barrier for the array store.
1719 __ RecordWriteField(ebx, offset, result_register(), ecx,
1721 EMIT_REMEMBERED_SET,
1724 // Store the subexpression value in the array's elements.
1725 __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1726 __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
1727 __ mov(ecx, Immediate(Smi::FromInt(i)));
1728 __ mov(edx, Immediate(Smi::FromInt(expr->literal_index())));
1729 StoreArrayLiteralElementStub stub;
1733 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1737 context()->PlugTOS();
1739 context()->Plug(eax);
1744 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1745 Comment cmnt(masm_, "[ Assignment");
1746 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1747 // on the left-hand side.
1748 if (!expr->target()->IsValidLeftHandSide()) {
1749 VisitForEffect(expr->target());
1753 // Left-hand side can only be a property, a global or a (parameter or local)
1755 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1756 LhsKind assign_type = VARIABLE;
1757 Property* property = expr->target()->AsProperty();
1758 if (property != NULL) {
1759 assign_type = (property->key()->IsPropertyName())
1764 // Evaluate LHS expression.
1765 switch (assign_type) {
1767 // Nothing to do here.
1769 case NAMED_PROPERTY:
1770 if (expr->is_compound()) {
1771 // We need the receiver both on the stack and in edx.
1772 VisitForStackValue(property->obj());
1773 __ mov(edx, Operand(esp, 0));
1775 VisitForStackValue(property->obj());
1778 case KEYED_PROPERTY: {
1779 if (expr->is_compound()) {
1780 VisitForStackValue(property->obj());
1781 VisitForStackValue(property->key());
1782 __ mov(edx, Operand(esp, kPointerSize)); // Object.
1783 __ mov(ecx, Operand(esp, 0)); // Key.
1785 VisitForStackValue(property->obj());
1786 VisitForStackValue(property->key());
1792 // For compound assignments we need another deoptimization point after the
1793 // variable/property load.
1794 if (expr->is_compound()) {
1795 AccumulatorValueContext result_context(this);
1796 { AccumulatorValueContext left_operand_context(this);
1797 switch (assign_type) {
1799 EmitVariableLoad(expr->target()->AsVariableProxy());
1800 PrepareForBailout(expr->target(), TOS_REG);
1802 case NAMED_PROPERTY:
1803 EmitNamedPropertyLoad(property);
1804 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1806 case KEYED_PROPERTY:
1807 EmitKeyedPropertyLoad(property);
1808 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1813 Token::Value op = expr->binary_op();
1814 __ push(eax); // Left operand goes on the stack.
1815 VisitForAccumulatorValue(expr->value());
1817 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1820 SetSourcePosition(expr->position() + 1);
1821 if (ShouldInlineSmiCase(op)) {
1822 EmitInlineSmiBinaryOp(expr->binary_operation(),
1828 EmitBinaryOp(expr->binary_operation(), op, mode);
1831 // Deoptimization point in case the binary operation may have side effects.
1832 PrepareForBailout(expr->binary_operation(), TOS_REG);
1834 VisitForAccumulatorValue(expr->value());
1837 // Record source position before possible IC call.
1838 SetSourcePosition(expr->position());
1841 switch (assign_type) {
1843 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1845 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1846 context()->Plug(eax);
1848 case NAMED_PROPERTY:
1849 EmitNamedPropertyAssignment(expr);
1851 case KEYED_PROPERTY:
1852 EmitKeyedPropertyAssignment(expr);
1858 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1859 SetSourcePosition(prop->position());
1860 Literal* key = prop->key()->AsLiteral();
1861 ASSERT(!key->handle()->IsSmi());
1862 __ mov(ecx, Immediate(key->handle()));
1863 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1864 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1868 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1869 SetSourcePosition(prop->position());
1870 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1871 CallIC(ic, RelocInfo::CODE_TARGET, prop->id());
1875 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1879 Expression* right) {
1880 // Do combined smi check of the operands. Left operand is on the
1881 // stack. Right operand is in eax.
1882 Label smi_case, done, stub_call;
1886 JumpPatchSite patch_site(masm_);
1887 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1889 __ bind(&stub_call);
1891 BinaryOpStub stub(op, mode);
1892 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1893 patch_site.EmitPatchInfo();
1894 __ jmp(&done, Label::kNear);
1898 __ mov(eax, edx); // Copy left operand in case of a stub call.
1904 __ sar_cl(eax); // No checks of result necessary
1912 // Check that the *signed* result fits in a smi.
1913 __ cmp(eax, 0xc0000000);
1914 __ j(positive, &result_ok);
1917 __ bind(&result_ok);
1926 __ test(eax, Immediate(0xc0000000));
1927 __ j(zero, &result_ok);
1930 __ bind(&result_ok);
1936 __ j(overflow, &stub_call);
1940 __ j(overflow, &stub_call);
1945 __ j(overflow, &stub_call);
1947 __ j(not_zero, &done, Label::kNear);
1950 __ j(negative, &stub_call);
1956 case Token::BIT_AND:
1959 case Token::BIT_XOR:
1967 context()->Plug(eax);
1971 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1973 OverwriteMode mode) {
1975 BinaryOpStub stub(op, mode);
1976 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1977 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1978 patch_site.EmitPatchInfo();
1979 context()->Plug(eax);
1983 void FullCodeGenerator::EmitAssignment(Expression* expr) {
1984 // Invalid left-hand sides are rewritten to have a 'throw
1985 // ReferenceError' on the left-hand side.
1986 if (!expr->IsValidLeftHandSide()) {
1987 VisitForEffect(expr);
1991 // Left-hand side can only be a property, a global or a (parameter or local)
1993 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1994 LhsKind assign_type = VARIABLE;
1995 Property* prop = expr->AsProperty();
1997 assign_type = (prop->key()->IsPropertyName())
2002 switch (assign_type) {
2004 Variable* var = expr->AsVariableProxy()->var();
2005 EffectContext context(this);
2006 EmitVariableAssignment(var, Token::ASSIGN);
2009 case NAMED_PROPERTY: {
2010 __ push(eax); // Preserve value.
2011 VisitForAccumulatorValue(prop->obj());
2013 __ pop(eax); // Restore value.
2014 __ mov(ecx, prop->key()->AsLiteral()->handle());
2015 Handle<Code> ic = is_classic_mode()
2016 ? isolate()->builtins()->StoreIC_Initialize()
2017 : isolate()->builtins()->StoreIC_Initialize_Strict();
2021 case KEYED_PROPERTY: {
2022 __ push(eax); // Preserve value.
2023 VisitForStackValue(prop->obj());
2024 VisitForAccumulatorValue(prop->key());
2026 __ pop(edx); // Receiver.
2027 __ pop(eax); // Restore value.
2028 Handle<Code> ic = is_classic_mode()
2029 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2030 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2035 context()->Plug(eax);
2039 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
2041 if (var->IsUnallocated()) {
2042 // Global var, const, or let.
2043 __ mov(ecx, var->name());
2044 __ mov(edx, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
2045 Handle<Code> ic = is_classic_mode()
2046 ? isolate()->builtins()->StoreIC_Initialize()
2047 : isolate()->builtins()->StoreIC_Initialize_Strict();
2048 CallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
2050 } else if (op == Token::INIT_CONST) {
2051 // Const initializers need a write barrier.
2052 ASSERT(!var->IsParameter()); // No const parameters.
2053 if (var->IsStackLocal()) {
2055 __ mov(edx, StackOperand(var));
2056 __ cmp(edx, isolate()->factory()->the_hole_value());
2057 __ j(not_equal, &skip);
2058 __ mov(StackOperand(var), eax);
2061 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
2062 // Like var declarations, const declarations are hoisted to function
2063 // scope. However, unlike var initializers, const initializers are
2064 // able to drill a hole to that function context, even from inside a
2065 // 'with' context. We thus bypass the normal static scope lookup for
2066 // var->IsContextSlot().
2069 __ push(Immediate(var->name()));
2070 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
2073 } else if (var->mode() == LET && op != Token::INIT_LET) {
2074 // Non-initializing assignment to let variable needs a write barrier.
2075 if (var->IsLookupSlot()) {
2076 __ push(eax); // Value.
2077 __ push(esi); // Context.
2078 __ push(Immediate(var->name()));
2079 __ push(Immediate(Smi::FromInt(language_mode())));
2080 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2082 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
2084 MemOperand location = VarOperand(var, ecx);
2085 __ mov(edx, location);
2086 __ cmp(edx, isolate()->factory()->the_hole_value());
2087 __ j(not_equal, &assign, Label::kNear);
2088 __ push(Immediate(var->name()));
2089 __ CallRuntime(Runtime::kThrowReferenceError, 1);
2091 __ mov(location, eax);
2092 if (var->IsContextSlot()) {
2094 int offset = Context::SlotOffset(var->index());
2095 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2099 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
2100 // Assignment to var or initializing assignment to let/const
2102 if (var->IsStackAllocated() || var->IsContextSlot()) {
2103 MemOperand location = VarOperand(var, ecx);
2104 if (FLAG_debug_code && op == Token::INIT_LET) {
2105 // Check for an uninitialized let binding.
2106 __ mov(edx, location);
2107 __ cmp(edx, isolate()->factory()->the_hole_value());
2108 __ Check(equal, "Let binding re-initialization.");
2110 // Perform the assignment.
2111 __ mov(location, eax);
2112 if (var->IsContextSlot()) {
2114 int offset = Context::SlotOffset(var->index());
2115 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
2118 ASSERT(var->IsLookupSlot());
2119 __ push(eax); // Value.
2120 __ push(esi); // Context.
2121 __ push(Immediate(var->name()));
2122 __ push(Immediate(Smi::FromInt(language_mode())));
2123 __ CallRuntime(Runtime::kStoreContextSlot, 4);
2126 // Non-initializing assignments to consts are ignored.
2130 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
2131 // Assignment to a property, using a named store IC.
2133 // esp[0] : receiver
2135 Property* prop = expr->target()->AsProperty();
2136 ASSERT(prop != NULL);
2137 ASSERT(prop->key()->AsLiteral() != NULL);
2139 // If the assignment starts a block of assignments to the same object,
2140 // change to slow case to avoid the quadratic behavior of repeatedly
2141 // adding fast properties.
2142 if (expr->starts_initialization_block()) {
2143 __ push(result_register());
2144 __ push(Operand(esp, kPointerSize)); // Receiver is now under value.
2145 __ CallRuntime(Runtime::kToSlowProperties, 1);
2146 __ pop(result_register());
2149 // Record source code position before IC call.
2150 SetSourcePosition(expr->position());
2151 __ mov(ecx, prop->key()->AsLiteral()->handle());
2152 if (expr->ends_initialization_block()) {
2153 __ mov(edx, Operand(esp, 0));
2157 Handle<Code> ic = is_classic_mode()
2158 ? isolate()->builtins()->StoreIC_Initialize()
2159 : isolate()->builtins()->StoreIC_Initialize_Strict();
2160 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2162 // If the assignment ends an initialization block, revert to fast case.
2163 if (expr->ends_initialization_block()) {
2164 __ push(eax); // Result of assignment, saved even if not needed.
2165 __ push(Operand(esp, kPointerSize)); // Receiver is under value.
2166 __ CallRuntime(Runtime::kToFastProperties, 1);
2170 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2171 context()->Plug(eax);
2175 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
2176 // Assignment to a property, using a keyed store IC.
2179 // esp[kPointerSize] : receiver
2181 // If the assignment starts a block of assignments to the same object,
2182 // change to slow case to avoid the quadratic behavior of repeatedly
2183 // adding fast properties.
2184 if (expr->starts_initialization_block()) {
2185 __ push(result_register());
2186 // Receiver is now under the key and value.
2187 __ push(Operand(esp, 2 * kPointerSize));
2188 __ CallRuntime(Runtime::kToSlowProperties, 1);
2189 __ pop(result_register());
2192 __ pop(ecx); // Key.
2193 if (expr->ends_initialization_block()) {
2194 __ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
2198 // Record source code position before IC call.
2199 SetSourcePosition(expr->position());
2200 Handle<Code> ic = is_classic_mode()
2201 ? isolate()->builtins()->KeyedStoreIC_Initialize()
2202 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
2203 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2205 // If the assignment ends an initialization block, revert to fast case.
2206 if (expr->ends_initialization_block()) {
2208 __ push(eax); // Result of assignment, saved even if not needed.
2210 __ CallRuntime(Runtime::kToFastProperties, 1);
2214 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
2215 context()->Plug(eax);
2219 void FullCodeGenerator::VisitProperty(Property* expr) {
2220 Comment cmnt(masm_, "[ Property");
2221 Expression* key = expr->key();
2223 if (key->IsPropertyName()) {
2224 VisitForAccumulatorValue(expr->obj());
2225 __ mov(edx, result_register());
2226 EmitNamedPropertyLoad(expr);
2227 context()->Plug(eax);
2229 VisitForStackValue(expr->obj());
2230 VisitForAccumulatorValue(expr->key());
2231 __ pop(edx); // Object.
2232 __ mov(ecx, result_register()); // Key.
2233 EmitKeyedPropertyLoad(expr);
2234 context()->Plug(eax);
2239 void FullCodeGenerator::CallIC(Handle<Code> code,
2240 RelocInfo::Mode rmode,
2243 __ call(code, rmode, ast_id);
2249 void FullCodeGenerator::EmitCallWithIC(Call* expr,
2250 Handle<Object> name,
2251 RelocInfo::Mode mode) {
2252 // Code common for calls using the IC.
2253 ZoneList<Expression*>* args = expr->arguments();
2254 int arg_count = args->length();
2255 { PreservePositionScope scope(masm()->positions_recorder());
2256 for (int i = 0; i < arg_count; i++) {
2257 VisitForStackValue(args->at(i));
2259 __ Set(ecx, Immediate(name));
2261 // Record source position of the IC call.
2262 SetSourcePosition(expr->position());
2264 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2265 CallIC(ic, mode, expr->id());
2266 RecordJSReturnSite(expr);
2267 // Restore context register.
2268 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2269 context()->Plug(eax);
2273 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2276 VisitForAccumulatorValue(key);
2278 // Swap the name of the function and the receiver on the stack to follow
2279 // the calling convention for call ICs.
2284 // Load the arguments.
2285 ZoneList<Expression*>* args = expr->arguments();
2286 int arg_count = args->length();
2287 { PreservePositionScope scope(masm()->positions_recorder());
2288 for (int i = 0; i < arg_count; i++) {
2289 VisitForStackValue(args->at(i));
2292 // Record source position of the IC call.
2293 SetSourcePosition(expr->position());
2295 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2296 __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
2297 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
2298 RecordJSReturnSite(expr);
2299 // Restore context register.
2300 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2301 context()->DropAndPlug(1, eax); // Drop the key still on the stack.
2305 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2306 // Code common for calls using the call stub.
2307 ZoneList<Expression*>* args = expr->arguments();
2308 int arg_count = args->length();
2309 { PreservePositionScope scope(masm()->positions_recorder());
2310 for (int i = 0; i < arg_count; i++) {
2311 VisitForStackValue(args->at(i));
2314 // Record source position for debugger.
2315 SetSourcePosition(expr->position());
2317 // Record call targets in unoptimized code, but not in the snapshot.
2318 if (!Serializer::enabled()) {
2319 flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2320 Handle<Object> uninitialized =
2321 TypeFeedbackCells::UninitializedSentinel(isolate());
2322 Handle<JSGlobalPropertyCell> cell =
2323 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2324 RecordTypeFeedbackCell(expr->id(), cell);
2328 CallFunctionStub stub(arg_count, flags);
2329 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2330 __ CallStub(&stub, expr->id());
2332 RecordJSReturnSite(expr);
2333 // Restore context register.
2334 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2335 context()->DropAndPlug(1, eax);
2339 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2340 // Push copy of the first argument or undefined if it doesn't exist.
2341 if (arg_count > 0) {
2342 __ push(Operand(esp, arg_count * kPointerSize));
2344 __ push(Immediate(isolate()->factory()->undefined_value()));
2347 // Push the receiver of the enclosing function.
2348 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2349 // Push the language mode.
2350 __ push(Immediate(Smi::FromInt(language_mode())));
2352 // Push the start position of the scope the calls resides in.
2353 __ push(Immediate(Smi::FromInt(scope()->start_position())));
2355 // Push the qml mode flag
2356 __ push(Immediate(Smi::FromInt(is_qml_mode())));
2358 // Do the runtime call.
2359 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 6);
2363 void FullCodeGenerator::VisitCall(Call* expr) {
2365 // We want to verify that RecordJSReturnSite gets called on all paths
2366 // through this function. Avoid early returns.
2367 expr->return_is_recorded_ = false;
2370 Comment cmnt(masm_, "[ Call");
2371 Expression* callee = expr->expression();
2372 VariableProxy* proxy = callee->AsVariableProxy();
2373 Property* property = callee->AsProperty();
2375 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2376 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2377 // resolve the function we need to call and the receiver of the call.
2378 // Then we call the resolved function using the given arguments.
2379 ZoneList<Expression*>* args = expr->arguments();
2380 int arg_count = args->length();
2381 { PreservePositionScope pos_scope(masm()->positions_recorder());
2382 VisitForStackValue(callee);
2383 // Reserved receiver slot.
2384 __ push(Immediate(isolate()->factory()->undefined_value()));
2385 // Push the arguments.
2386 for (int i = 0; i < arg_count; i++) {
2387 VisitForStackValue(args->at(i));
2390 // Push a copy of the function (found below the arguments) and
2392 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2393 EmitResolvePossiblyDirectEval(arg_count);
2395 // The runtime call returns a pair of values in eax (function) and
2396 // edx (receiver). Touch up the stack with the right values.
2397 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2398 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2400 // Record source position for debugger.
2401 SetSourcePosition(expr->position());
2402 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2403 __ mov(edi, Operand(esp, (arg_count + 1) * kPointerSize));
2405 RecordJSReturnSite(expr);
2406 // Restore context register.
2407 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2408 context()->DropAndPlug(1, eax);
2410 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2411 // Push global object as receiver for the call IC.
2412 __ push(proxy->var()->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
2413 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2415 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2416 // Call to a lookup slot (dynamically introduced variable).
2418 { PreservePositionScope scope(masm()->positions_recorder());
2419 // Generate code for loading from variables potentially shadowed by
2420 // eval-introduced variables.
2421 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2424 // Call the runtime to find the function to call (returned in eax) and
2425 // the object holding it (returned in edx).
2426 __ push(context_register());
2427 __ push(Immediate(proxy->name()));
2428 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2429 __ push(eax); // Function.
2430 __ push(edx); // Receiver.
2432 // If fast case code has been generated, emit code to push the function
2433 // and receiver and have the slow path jump around this code.
2434 if (done.is_linked()) {
2436 __ jmp(&call, Label::kNear);
2440 // The receiver is implicitly the global receiver. Indicate this by
2441 // passing the hole to the call function stub.
2442 __ push(Immediate(isolate()->factory()->the_hole_value()));
2446 // The receiver is either the global receiver or an object found by
2447 // LoadContextSlot. That object could be the hole if the receiver is
2448 // implicitly the global object.
2449 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2451 } else if (property != NULL) {
2452 { PreservePositionScope scope(masm()->positions_recorder());
2453 VisitForStackValue(property->obj());
2455 if (property->key()->IsPropertyName()) {
2456 EmitCallWithIC(expr,
2457 property->key()->AsLiteral()->handle(),
2458 RelocInfo::CODE_TARGET);
2460 EmitKeyedCallWithIC(expr, property->key());
2464 // Call to an arbitrary expression not handled specially above.
2465 { PreservePositionScope scope(masm()->positions_recorder());
2466 VisitForStackValue(callee);
2468 // Load global receiver object.
2469 __ mov(ebx, GlobalObjectOperand());
2470 __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2471 // Emit function call.
2472 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2476 // RecordJSReturnSite should have been called.
2477 ASSERT(expr->return_is_recorded_);
2482 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2483 Comment cmnt(masm_, "[ CallNew");
2484 // According to ECMA-262, section 11.2.2, page 44, the function
2485 // expression in new calls must be evaluated before the
2488 // Push constructor on the stack. If it's not a function it's used as
2489 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2491 VisitForStackValue(expr->expression());
2493 // Push the arguments ("left-to-right") on the stack.
2494 ZoneList<Expression*>* args = expr->arguments();
2495 int arg_count = args->length();
2496 for (int i = 0; i < arg_count; i++) {
2497 VisitForStackValue(args->at(i));
2500 // Call the construct call builtin that handles allocation and
2501 // constructor invocation.
2502 SetSourcePosition(expr->position());
2504 // Load function and argument count into edi and eax.
2505 __ SafeSet(eax, Immediate(arg_count));
2506 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2508 // Record call targets in unoptimized code, but not in the snapshot.
2509 CallFunctionFlags flags;
2510 if (!Serializer::enabled()) {
2511 flags = RECORD_CALL_TARGET;
2512 Handle<Object> uninitialized =
2513 TypeFeedbackCells::UninitializedSentinel(isolate());
2514 Handle<JSGlobalPropertyCell> cell =
2515 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2516 RecordTypeFeedbackCell(expr->id(), cell);
2519 flags = NO_CALL_FUNCTION_FLAGS;
2522 CallConstructStub stub(flags);
2523 __ call(stub.GetCode(), RelocInfo::CONSTRUCT_CALL);
2524 PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
2525 context()->Plug(eax);
2529 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2530 ZoneList<Expression*>* args = expr->arguments();
2531 ASSERT(args->length() == 1);
2533 VisitForAccumulatorValue(args->at(0));
2535 Label materialize_true, materialize_false;
2536 Label* if_true = NULL;
2537 Label* if_false = NULL;
2538 Label* fall_through = NULL;
2539 context()->PrepareTest(&materialize_true, &materialize_false,
2540 &if_true, &if_false, &fall_through);
2542 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2543 __ test(eax, Immediate(kSmiTagMask));
2544 Split(zero, if_true, if_false, fall_through);
2546 context()->Plug(if_true, if_false);
2550 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2551 ZoneList<Expression*>* args = expr->arguments();
2552 ASSERT(args->length() == 1);
2554 VisitForAccumulatorValue(args->at(0));
2556 Label materialize_true, materialize_false;
2557 Label* if_true = NULL;
2558 Label* if_false = NULL;
2559 Label* fall_through = NULL;
2560 context()->PrepareTest(&materialize_true, &materialize_false,
2561 &if_true, &if_false, &fall_through);
2563 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2564 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2565 Split(zero, if_true, if_false, fall_through);
2567 context()->Plug(if_true, if_false);
2571 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2572 ZoneList<Expression*>* args = expr->arguments();
2573 ASSERT(args->length() == 1);
2575 VisitForAccumulatorValue(args->at(0));
2577 Label materialize_true, materialize_false;
2578 Label* if_true = NULL;
2579 Label* if_false = NULL;
2580 Label* fall_through = NULL;
2581 context()->PrepareTest(&materialize_true, &materialize_false,
2582 &if_true, &if_false, &fall_through);
2584 __ JumpIfSmi(eax, if_false);
2585 __ cmp(eax, isolate()->factory()->null_value());
2586 __ j(equal, if_true);
2587 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2588 // Undetectable objects behave like undefined when tested with typeof.
2589 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2590 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2591 __ j(not_zero, if_false);
2592 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2593 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2594 __ j(below, if_false);
2595 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2596 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2597 Split(below_equal, if_true, if_false, fall_through);
2599 context()->Plug(if_true, if_false);
2603 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2604 ZoneList<Expression*>* args = expr->arguments();
2605 ASSERT(args->length() == 1);
2607 VisitForAccumulatorValue(args->at(0));
2609 Label materialize_true, materialize_false;
2610 Label* if_true = NULL;
2611 Label* if_false = NULL;
2612 Label* fall_through = NULL;
2613 context()->PrepareTest(&materialize_true, &materialize_false,
2614 &if_true, &if_false, &fall_through);
2616 __ JumpIfSmi(eax, if_false);
2617 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2618 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2619 Split(above_equal, if_true, if_false, fall_through);
2621 context()->Plug(if_true, if_false);
2625 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2626 ZoneList<Expression*>* args = expr->arguments();
2627 ASSERT(args->length() == 1);
2629 VisitForAccumulatorValue(args->at(0));
2631 Label materialize_true, materialize_false;
2632 Label* if_true = NULL;
2633 Label* if_false = NULL;
2634 Label* fall_through = NULL;
2635 context()->PrepareTest(&materialize_true, &materialize_false,
2636 &if_true, &if_false, &fall_through);
2638 __ JumpIfSmi(eax, if_false);
2639 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2640 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
2641 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2642 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2643 Split(not_zero, if_true, if_false, fall_through);
2645 context()->Plug(if_true, if_false);
2649 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2650 CallRuntime* expr) {
2651 ZoneList<Expression*>* args = expr->arguments();
2652 ASSERT(args->length() == 1);
2654 VisitForAccumulatorValue(args->at(0));
2656 Label materialize_true, materialize_false;
2657 Label* if_true = NULL;
2658 Label* if_false = NULL;
2659 Label* fall_through = NULL;
2660 context()->PrepareTest(&materialize_true, &materialize_false,
2661 &if_true, &if_false, &fall_through);
2663 if (FLAG_debug_code) __ AbortIfSmi(eax);
2665 // Check whether this map has already been checked to be safe for default
2667 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2668 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
2669 1 << Map::kStringWrapperSafeForDefaultValueOf);
2670 __ j(not_zero, if_true);
2672 // Check for fast case object. Return false for slow case objects.
2673 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
2674 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2675 __ cmp(ecx, FACTORY->hash_table_map());
2676 __ j(equal, if_false);
2678 // Look for valueOf symbol in the descriptor array, and indicate false if
2679 // found. The type is not checked, so if it is a transition it is a false
2681 __ LoadInstanceDescriptors(ebx, ebx);
2682 __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
2683 // ebx: descriptor array
2684 // ecx: length of descriptor array
2685 // Calculate the end of the descriptor array.
2686 STATIC_ASSERT(kSmiTag == 0);
2687 STATIC_ASSERT(kSmiTagSize == 1);
2688 STATIC_ASSERT(kPointerSize == 4);
2689 __ lea(ecx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
2690 // Calculate location of the first key name.
2692 Immediate(FixedArray::kHeaderSize +
2693 DescriptorArray::kFirstIndex * kPointerSize));
2694 // Loop through all the keys in the descriptor array. If one of these is the
2695 // symbol valueOf the result is false.
2699 __ mov(edx, FieldOperand(ebx, 0));
2700 __ cmp(edx, FACTORY->value_of_symbol());
2701 __ j(equal, if_false);
2702 __ add(ebx, Immediate(kPointerSize));
2705 __ j(not_equal, &loop);
2707 // Reload map as register ebx was used as temporary above.
2708 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2710 // If a valueOf property is not found on the object check that it's
2711 // prototype is the un-modified String prototype. If not result is false.
2712 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
2713 __ JumpIfSmi(ecx, if_false);
2714 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2715 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2717 FieldOperand(edx, GlobalObject::kGlobalContextOffset));
2720 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2721 __ j(not_equal, if_false);
2722 // Set the bit in the map to indicate that it has been checked safe for
2723 // default valueOf and set true result.
2724 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
2725 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2728 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2729 context()->Plug(if_true, if_false);
2733 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2734 ZoneList<Expression*>* args = expr->arguments();
2735 ASSERT(args->length() == 1);
2737 VisitForAccumulatorValue(args->at(0));
2739 Label materialize_true, materialize_false;
2740 Label* if_true = NULL;
2741 Label* if_false = NULL;
2742 Label* fall_through = NULL;
2743 context()->PrepareTest(&materialize_true, &materialize_false,
2744 &if_true, &if_false, &fall_through);
2746 __ JumpIfSmi(eax, if_false);
2747 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2748 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2749 Split(equal, if_true, if_false, fall_through);
2751 context()->Plug(if_true, if_false);
2755 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2756 ZoneList<Expression*>* args = expr->arguments();
2757 ASSERT(args->length() == 1);
2759 VisitForAccumulatorValue(args->at(0));
2761 Label materialize_true, materialize_false;
2762 Label* if_true = NULL;
2763 Label* if_false = NULL;
2764 Label* fall_through = NULL;
2765 context()->PrepareTest(&materialize_true, &materialize_false,
2766 &if_true, &if_false, &fall_through);
2768 __ JumpIfSmi(eax, if_false);
2769 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2770 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2771 Split(equal, if_true, if_false, fall_through);
2773 context()->Plug(if_true, if_false);
2777 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2778 ZoneList<Expression*>* args = expr->arguments();
2779 ASSERT(args->length() == 1);
2781 VisitForAccumulatorValue(args->at(0));
2783 Label materialize_true, materialize_false;
2784 Label* if_true = NULL;
2785 Label* if_false = NULL;
2786 Label* fall_through = NULL;
2787 context()->PrepareTest(&materialize_true, &materialize_false,
2788 &if_true, &if_false, &fall_through);
2790 __ JumpIfSmi(eax, if_false);
2791 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2792 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2793 Split(equal, if_true, if_false, fall_through);
2795 context()->Plug(if_true, if_false);
2800 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2801 ASSERT(expr->arguments()->length() == 0);
2803 Label materialize_true, materialize_false;
2804 Label* if_true = NULL;
2805 Label* if_false = NULL;
2806 Label* fall_through = NULL;
2807 context()->PrepareTest(&materialize_true, &materialize_false,
2808 &if_true, &if_false, &fall_through);
2810 // Get the frame pointer for the calling frame.
2811 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2813 // Skip the arguments adaptor frame if it exists.
2814 Label check_frame_marker;
2815 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
2816 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2817 __ j(not_equal, &check_frame_marker);
2818 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
2820 // Check the marker in the calling frame.
2821 __ bind(&check_frame_marker);
2822 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
2823 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
2824 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2825 Split(equal, if_true, if_false, fall_through);
2827 context()->Plug(if_true, if_false);
2831 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2832 ZoneList<Expression*>* args = expr->arguments();
2833 ASSERT(args->length() == 2);
2835 // Load the two objects into registers and perform the comparison.
2836 VisitForStackValue(args->at(0));
2837 VisitForAccumulatorValue(args->at(1));
2839 Label materialize_true, materialize_false;
2840 Label* if_true = NULL;
2841 Label* if_false = NULL;
2842 Label* fall_through = NULL;
2843 context()->PrepareTest(&materialize_true, &materialize_false,
2844 &if_true, &if_false, &fall_through);
2848 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2849 Split(equal, if_true, if_false, fall_through);
2851 context()->Plug(if_true, if_false);
2855 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2856 ZoneList<Expression*>* args = expr->arguments();
2857 ASSERT(args->length() == 1);
2859 // ArgumentsAccessStub expects the key in edx and the formal
2860 // parameter count in eax.
2861 VisitForAccumulatorValue(args->at(0));
2863 __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2864 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2866 context()->Plug(eax);
2870 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2871 ASSERT(expr->arguments()->length() == 0);
2874 // Get the number of formal parameters.
2875 __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2877 // Check if the calling frame is an arguments adaptor frame.
2878 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2879 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
2880 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2881 __ j(not_equal, &exit);
2883 // Arguments adaptor case: Read the arguments length from the
2885 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2888 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
2889 context()->Plug(eax);
2893 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2894 ZoneList<Expression*>* args = expr->arguments();
2895 ASSERT(args->length() == 1);
2896 Label done, null, function, non_function_constructor;
2898 VisitForAccumulatorValue(args->at(0));
2900 // If the object is a smi, we return null.
2901 __ JumpIfSmi(eax, &null);
2903 // Check that the object is a JS object but take special care of JS
2904 // functions to make sure they have 'Function' as their class.
2905 // Assume that there are only two callable types, and one of them is at
2906 // either end of the type range for JS object types. Saves extra comparisons.
2907 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2908 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
2909 // Map is now in eax.
2911 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2912 FIRST_SPEC_OBJECT_TYPE + 1);
2913 __ j(equal, &function);
2915 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
2916 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2917 LAST_SPEC_OBJECT_TYPE - 1);
2918 __ j(equal, &function);
2919 // Assume that there is no larger type.
2920 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2922 // Check if the constructor in the map is a JS function.
2923 __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
2924 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2925 __ j(not_equal, &non_function_constructor);
2927 // eax now contains the constructor function. Grab the
2928 // instance class name from there.
2929 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2930 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2933 // Functions have class 'Function'.
2935 __ mov(eax, isolate()->factory()->function_class_symbol());
2938 // Objects with a non-function constructor have class 'Object'.
2939 __ bind(&non_function_constructor);
2940 __ mov(eax, isolate()->factory()->Object_symbol());
2943 // Non-JS objects have class null.
2945 __ mov(eax, isolate()->factory()->null_value());
2950 context()->Plug(eax);
2954 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2955 // Conditionally generate a log call.
2957 // 0 (literal string): The type of logging (corresponds to the flags).
2958 // This is used to determine whether or not to generate the log call.
2959 // 1 (string): Format string. Access the string at argument index 2
2960 // with '%2s' (see Logger::LogRuntime for all the formats).
2961 // 2 (array): Arguments to the format string.
2962 ZoneList<Expression*>* args = expr->arguments();
2963 ASSERT_EQ(args->length(), 3);
2964 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2965 VisitForStackValue(args->at(1));
2966 VisitForStackValue(args->at(2));
2967 __ CallRuntime(Runtime::kLog, 2);
2969 // Finally, we're expected to leave a value on the top of the stack.
2970 __ mov(eax, isolate()->factory()->undefined_value());
2971 context()->Plug(eax);
2975 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2976 ASSERT(expr->arguments()->length() == 0);
2978 Label slow_allocate_heapnumber;
2979 Label heapnumber_allocated;
2981 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
2982 __ jmp(&heapnumber_allocated);
2984 __ bind(&slow_allocate_heapnumber);
2985 // Allocate a heap number.
2986 __ CallRuntime(Runtime::kNumberAlloc, 0);
2989 __ bind(&heapnumber_allocated);
2991 __ PrepareCallCFunction(1, ebx);
2992 __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2993 __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
2994 __ mov(Operand(esp, 0), eax);
2995 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2997 // Convert 32 random bits in eax to 0.(32 random bits) in a double
2999 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
3000 // This is implemented on both SSE2 and FPU.
3001 if (CpuFeatures::IsSupported(SSE2)) {
3002 CpuFeatures::Scope fscope(SSE2);
3003 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
3006 __ cvtss2sd(xmm1, xmm1);
3007 __ xorps(xmm0, xmm1);
3008 __ subsd(xmm0, xmm1);
3009 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
3011 // 0x4130000000000000 is 1.0 x 2^20 as a double.
3012 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
3013 Immediate(0x41300000));
3014 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
3015 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
3016 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
3017 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
3019 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
3022 context()->Plug(eax);
3026 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
3027 // Load the arguments on the stack and call the stub.
3029 ZoneList<Expression*>* args = expr->arguments();
3030 ASSERT(args->length() == 3);
3031 VisitForStackValue(args->at(0));
3032 VisitForStackValue(args->at(1));
3033 VisitForStackValue(args->at(2));
3035 context()->Plug(eax);
3039 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
3040 // Load the arguments on the stack and call the stub.
3041 RegExpExecStub stub;
3042 ZoneList<Expression*>* args = expr->arguments();
3043 ASSERT(args->length() == 4);
3044 VisitForStackValue(args->at(0));
3045 VisitForStackValue(args->at(1));
3046 VisitForStackValue(args->at(2));
3047 VisitForStackValue(args->at(3));
3049 context()->Plug(eax);
3053 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
3054 ZoneList<Expression*>* args = expr->arguments();
3055 ASSERT(args->length() == 1);
3057 VisitForAccumulatorValue(args->at(0)); // Load the object.
3060 // If the object is a smi return the object.
3061 __ JumpIfSmi(eax, &done, Label::kNear);
3062 // If the object is not a value type, return the object.
3063 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
3064 __ j(not_equal, &done, Label::kNear);
3065 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
3068 context()->Plug(eax);
3072 void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
3073 ZoneList<Expression*>* args = expr->arguments();
3074 ASSERT(args->length() == 2);
3075 ASSERT_NE(NULL, args->at(1)->AsLiteral());
3076 Smi* index = Smi::cast(*(args->at(1)->AsLiteral()->handle()));
3078 VisitForAccumulatorValue(args->at(0)); // Load the object.
3080 Label runtime, done;
3081 Register object = eax;
3082 Register result = eax;
3083 Register scratch = ecx;
3086 __ AbortIfSmi(object);
3087 __ CmpObjectType(object, JS_DATE_TYPE, scratch);
3088 __ Assert(equal, "Trying to get date field from non-date.");
3091 if (index->value() == 0) {
3092 __ mov(result, FieldOperand(object, JSDate::kValueOffset));
3094 if (index->value() < JSDate::kFirstUncachedField) {
3095 ExternalReference stamp = ExternalReference::date_cache_stamp(isolate());
3096 __ mov(scratch, Operand::StaticVariable(stamp));
3097 __ cmp(scratch, FieldOperand(object, JSDate::kCacheStampOffset));
3098 __ j(not_equal, &runtime, Label::kNear);
3099 __ mov(result, FieldOperand(object, JSDate::kValueOffset +
3100 kPointerSize * index->value()));
3104 __ PrepareCallCFunction(2, scratch);
3105 __ mov(Operand(esp, 0), object);
3106 __ mov(Operand(esp, 1 * kPointerSize), Immediate(index));
3107 __ CallCFunction(ExternalReference::get_date_field_function(isolate()), 2);
3110 context()->Plug(result);
3114 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
3115 // Load the arguments on the stack and call the runtime function.
3116 ZoneList<Expression*>* args = expr->arguments();
3117 ASSERT(args->length() == 2);
3118 VisitForStackValue(args->at(0));
3119 VisitForStackValue(args->at(1));
3121 if (CpuFeatures::IsSupported(SSE2)) {
3122 MathPowStub stub(MathPowStub::ON_STACK);
3125 __ CallRuntime(Runtime::kMath_pow, 2);
3127 context()->Plug(eax);
3131 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
3132 ZoneList<Expression*>* args = expr->arguments();
3133 ASSERT(args->length() == 2);
3135 VisitForStackValue(args->at(0)); // Load the object.
3136 VisitForAccumulatorValue(args->at(1)); // Load the value.
3137 __ pop(ebx); // eax = value. ebx = object.
3140 // If the object is a smi, return the value.
3141 __ JumpIfSmi(ebx, &done, Label::kNear);
3143 // If the object is not a value type, return the value.
3144 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
3145 __ j(not_equal, &done, Label::kNear);
3148 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
3150 // Update the write barrier. Save the value as it will be
3151 // overwritten by the write barrier code and is needed afterward.
3153 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
3156 context()->Plug(eax);
3160 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
3161 ZoneList<Expression*>* args = expr->arguments();
3162 ASSERT_EQ(args->length(), 1);
3164 // Load the argument on the stack and call the stub.
3165 VisitForStackValue(args->at(0));
3167 NumberToStringStub stub;
3169 context()->Plug(eax);
3173 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
3174 ZoneList<Expression*>* args = expr->arguments();
3175 ASSERT(args->length() == 1);
3177 VisitForAccumulatorValue(args->at(0));
3180 StringCharFromCodeGenerator generator(eax, ebx);
3181 generator.GenerateFast(masm_);
3184 NopRuntimeCallHelper call_helper;
3185 generator.GenerateSlow(masm_, call_helper);
3188 context()->Plug(ebx);
3192 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
3193 ZoneList<Expression*>* args = expr->arguments();
3194 ASSERT(args->length() == 2);
3196 VisitForStackValue(args->at(0));
3197 VisitForAccumulatorValue(args->at(1));
3199 Register object = ebx;
3200 Register index = eax;
3201 Register result = edx;
3205 Label need_conversion;
3206 Label index_out_of_range;
3208 StringCharCodeAtGenerator generator(object,
3213 &index_out_of_range,
3214 STRING_INDEX_IS_NUMBER);
3215 generator.GenerateFast(masm_);
3218 __ bind(&index_out_of_range);
3219 // When the index is out of range, the spec requires us to return
3221 __ Set(result, Immediate(isolate()->factory()->nan_value()));
3224 __ bind(&need_conversion);
3225 // Move the undefined value into the result register, which will
3226 // trigger conversion.
3227 __ Set(result, Immediate(isolate()->factory()->undefined_value()));
3230 NopRuntimeCallHelper call_helper;
3231 generator.GenerateSlow(masm_, call_helper);
3234 context()->Plug(result);
3238 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
3239 ZoneList<Expression*>* args = expr->arguments();
3240 ASSERT(args->length() == 2);
3242 VisitForStackValue(args->at(0));
3243 VisitForAccumulatorValue(args->at(1));
3245 Register object = ebx;
3246 Register index = eax;
3247 Register scratch = edx;
3248 Register result = eax;
3252 Label need_conversion;
3253 Label index_out_of_range;
3255 StringCharAtGenerator generator(object,
3261 &index_out_of_range,
3262 STRING_INDEX_IS_NUMBER);
3263 generator.GenerateFast(masm_);
3266 __ bind(&index_out_of_range);
3267 // When the index is out of range, the spec requires us to return
3268 // the empty string.
3269 __ Set(result, Immediate(isolate()->factory()->empty_string()));
3272 __ bind(&need_conversion);
3273 // Move smi zero into the result register, which will trigger
3275 __ Set(result, Immediate(Smi::FromInt(0)));
3278 NopRuntimeCallHelper call_helper;
3279 generator.GenerateSlow(masm_, call_helper);
3282 context()->Plug(result);
3286 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
3287 ZoneList<Expression*>* args = expr->arguments();
3288 ASSERT_EQ(2, args->length());
3290 VisitForStackValue(args->at(0));
3291 VisitForStackValue(args->at(1));
3293 StringAddStub stub(NO_STRING_ADD_FLAGS);
3295 context()->Plug(eax);
3299 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3300 ZoneList<Expression*>* args = expr->arguments();
3301 ASSERT_EQ(2, args->length());
3303 VisitForStackValue(args->at(0));
3304 VisitForStackValue(args->at(1));
3306 StringCompareStub stub;
3308 context()->Plug(eax);
3312 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3313 // Load the argument on the stack and call the stub.
3314 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3315 TranscendentalCacheStub::TAGGED);
3316 ZoneList<Expression*>* args = expr->arguments();
3317 ASSERT(args->length() == 1);
3318 VisitForStackValue(args->at(0));
3320 context()->Plug(eax);
3324 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3325 // Load the argument on the stack and call the stub.
3326 TranscendentalCacheStub stub(TranscendentalCache::COS,
3327 TranscendentalCacheStub::TAGGED);
3328 ZoneList<Expression*>* args = expr->arguments();
3329 ASSERT(args->length() == 1);
3330 VisitForStackValue(args->at(0));
3332 context()->Plug(eax);
3336 void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
3337 // Load the argument on the stack and call the stub.
3338 TranscendentalCacheStub stub(TranscendentalCache::TAN,
3339 TranscendentalCacheStub::TAGGED);
3340 ZoneList<Expression*>* args = expr->arguments();
3341 ASSERT(args->length() == 1);
3342 VisitForStackValue(args->at(0));
3344 context()->Plug(eax);
3348 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3349 // Load the argument on the stack and call the stub.
3350 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3351 TranscendentalCacheStub::TAGGED);
3352 ZoneList<Expression*>* args = expr->arguments();
3353 ASSERT(args->length() == 1);
3354 VisitForStackValue(args->at(0));
3356 context()->Plug(eax);
3360 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3361 // Load the argument on the stack and call the runtime function.
3362 ZoneList<Expression*>* args = expr->arguments();
3363 ASSERT(args->length() == 1);
3364 VisitForStackValue(args->at(0));
3365 __ CallRuntime(Runtime::kMath_sqrt, 1);
3366 context()->Plug(eax);
3370 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3371 ZoneList<Expression*>* args = expr->arguments();
3372 ASSERT(args->length() >= 2);
3374 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3375 for (int i = 0; i < arg_count + 1; ++i) {
3376 VisitForStackValue(args->at(i));
3378 VisitForAccumulatorValue(args->last()); // Function.
3382 __ CmpObjectType(eax, JS_FUNCTION_PROXY_TYPE, ebx);
3383 __ j(equal, &proxy);
3385 // InvokeFunction requires the function in edi. Move it in there.
3386 __ mov(edi, result_register());
3387 ParameterCount count(arg_count);
3388 __ InvokeFunction(edi, count, CALL_FUNCTION,
3389 NullCallWrapper(), CALL_AS_METHOD);
3390 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3395 __ CallRuntime(Runtime::kCall, args->length());
3398 context()->Plug(eax);
3402 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3403 // Load the arguments on the stack and call the stub.
3404 RegExpConstructResultStub stub;
3405 ZoneList<Expression*>* args = expr->arguments();
3406 ASSERT(args->length() == 3);
3407 VisitForStackValue(args->at(0));
3408 VisitForStackValue(args->at(1));
3409 VisitForStackValue(args->at(2));
3411 context()->Plug(eax);
3415 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3416 ZoneList<Expression*>* args = expr->arguments();
3417 ASSERT_EQ(2, args->length());
3419 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3420 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3422 Handle<FixedArray> jsfunction_result_caches(
3423 isolate()->global_context()->jsfunction_result_caches());
3424 if (jsfunction_result_caches->length() <= cache_id) {
3425 __ Abort("Attempt to use undefined cache.");
3426 __ mov(eax, isolate()->factory()->undefined_value());
3427 context()->Plug(eax);
3431 VisitForAccumulatorValue(args->at(1));
3434 Register cache = ebx;
3436 __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX));
3438 FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3439 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3441 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3443 Label done, not_found;
3444 // tmp now holds finger offset as a smi.
3445 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3446 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3447 __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
3448 __ j(not_equal, ¬_found);
3450 __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
3453 __ bind(¬_found);
3454 // Call runtime to perform the lookup.
3457 __ CallRuntime(Runtime::kGetFromCache, 2);
3460 context()->Plug(eax);
3464 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3465 ZoneList<Expression*>* args = expr->arguments();
3466 ASSERT_EQ(2, args->length());
3468 Register right = eax;
3469 Register left = ebx;
3472 VisitForStackValue(args->at(0));
3473 VisitForAccumulatorValue(args->at(1));
3476 Label done, fail, ok;
3477 __ cmp(left, right);
3479 // Fail if either is a non-HeapObject.
3481 __ and_(tmp, right);
3482 __ JumpIfSmi(tmp, &fail);
3483 __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
3484 __ CmpInstanceType(tmp, JS_REGEXP_TYPE);
3485 __ j(not_equal, &fail);
3486 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
3487 __ j(not_equal, &fail);
3488 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3489 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3492 __ mov(eax, Immediate(isolate()->factory()->false_value()));
3495 __ mov(eax, Immediate(isolate()->factory()->true_value()));
3498 context()->Plug(eax);
3502 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3503 ZoneList<Expression*>* args = expr->arguments();
3504 ASSERT(args->length() == 1);
3506 VisitForAccumulatorValue(args->at(0));
3508 if (FLAG_debug_code) {
3509 __ AbortIfNotString(eax);
3512 Label materialize_true, materialize_false;
3513 Label* if_true = NULL;
3514 Label* if_false = NULL;
3515 Label* fall_through = NULL;
3516 context()->PrepareTest(&materialize_true, &materialize_false,
3517 &if_true, &if_false, &fall_through);
3519 __ test(FieldOperand(eax, String::kHashFieldOffset),
3520 Immediate(String::kContainsCachedArrayIndexMask));
3521 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3522 Split(zero, if_true, if_false, fall_through);
3524 context()->Plug(if_true, if_false);
3528 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3529 ZoneList<Expression*>* args = expr->arguments();
3530 ASSERT(args->length() == 1);
3531 VisitForAccumulatorValue(args->at(0));
3533 if (FLAG_debug_code) {
3534 __ AbortIfNotString(eax);
3537 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3538 __ IndexFromHash(eax, eax);
3540 context()->Plug(eax);
3544 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3545 Label bailout, done, one_char_separator, long_separator,
3546 non_trivial_array, not_size_one_array, loop,
3547 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3549 ZoneList<Expression*>* args = expr->arguments();
3550 ASSERT(args->length() == 2);
3551 // We will leave the separator on the stack until the end of the function.
3552 VisitForStackValue(args->at(1));
3553 // Load this to eax (= array)
3554 VisitForAccumulatorValue(args->at(0));
3555 // All aliases of the same register have disjoint lifetimes.
3556 Register array = eax;
3557 Register elements = no_reg; // Will be eax.
3559 Register index = edx;
3561 Register string_length = ecx;
3563 Register string = esi;
3565 Register scratch = ebx;
3567 Register array_length = edi;
3568 Register result_pos = no_reg; // Will be edi.
3570 // Separator operand is already pushed.
3571 Operand separator_operand = Operand(esp, 2 * kPointerSize);
3572 Operand result_operand = Operand(esp, 1 * kPointerSize);
3573 Operand array_length_operand = Operand(esp, 0);
3574 __ sub(esp, Immediate(2 * kPointerSize));
3576 // Check that the array is a JSArray
3577 __ JumpIfSmi(array, &bailout);
3578 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3579 __ j(not_equal, &bailout);
3581 // Check that the array has fast elements.
3582 __ CheckFastElements(scratch, &bailout);
3584 // If the array has length zero, return the empty string.
3585 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3586 __ SmiUntag(array_length);
3587 __ j(not_zero, &non_trivial_array);
3588 __ mov(result_operand, isolate()->factory()->empty_string());
3591 // Save the array length.
3592 __ bind(&non_trivial_array);
3593 __ mov(array_length_operand, array_length);
3595 // Save the FixedArray containing array's elements.
3596 // End of array's live range.
3598 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3602 // Check that all array elements are sequential ASCII strings, and
3603 // accumulate the sum of their lengths, as a smi-encoded value.
3604 __ Set(index, Immediate(0));
3605 __ Set(string_length, Immediate(0));
3606 // Loop condition: while (index < length).
3607 // Live loop registers: index, array_length, string,
3608 // scratch, string_length, elements.
3609 if (FLAG_debug_code) {
3610 __ cmp(index, array_length);
3611 __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
3614 __ mov(string, FieldOperand(elements,
3617 FixedArray::kHeaderSize));
3618 __ JumpIfSmi(string, &bailout);
3619 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3620 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3621 __ and_(scratch, Immediate(
3622 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3623 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
3624 __ j(not_equal, &bailout);
3625 __ add(string_length,
3626 FieldOperand(string, SeqAsciiString::kLengthOffset));
3627 __ j(overflow, &bailout);
3628 __ add(index, Immediate(1));
3629 __ cmp(index, array_length);
3632 // If array_length is 1, return elements[0], a string.
3633 __ cmp(array_length, 1);
3634 __ j(not_equal, ¬_size_one_array);
3635 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3636 __ mov(result_operand, scratch);
3639 __ bind(¬_size_one_array);
3641 // End of array_length live range.
3642 result_pos = array_length;
3643 array_length = no_reg;
3646 // string_length: Sum of string lengths, as a smi.
3647 // elements: FixedArray of strings.
3649 // Check that the separator is a flat ASCII string.
3650 __ mov(string, separator_operand);
3651 __ JumpIfSmi(string, &bailout);
3652 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3653 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3654 __ and_(scratch, Immediate(
3655 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3656 __ cmp(scratch, ASCII_STRING_TYPE);
3657 __ j(not_equal, &bailout);
3659 // Add (separator length times array_length) - separator length
3660 // to string_length.
3661 __ mov(scratch, separator_operand);
3662 __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
3663 __ sub(string_length, scratch); // May be negative, temporarily.
3664 __ imul(scratch, array_length_operand);
3665 __ j(overflow, &bailout);
3666 __ add(string_length, scratch);
3667 __ j(overflow, &bailout);
3669 __ shr(string_length, 1);
3670 // Live registers and stack values:
3673 __ AllocateAsciiString(result_pos, string_length, scratch,
3674 index, string, &bailout);
3675 __ mov(result_operand, result_pos);
3676 __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3679 __ mov(string, separator_operand);
3680 __ cmp(FieldOperand(string, SeqAsciiString::kLengthOffset),
3681 Immediate(Smi::FromInt(1)));
3682 __ j(equal, &one_char_separator);
3683 __ j(greater, &long_separator);
3686 // Empty separator case
3687 __ mov(index, Immediate(0));
3688 __ jmp(&loop_1_condition);
3689 // Loop condition: while (index < length).
3691 // Each iteration of the loop concatenates one string to the result.
3692 // Live values in registers:
3693 // index: which element of the elements array we are adding to the result.
3694 // result_pos: the position to which we are currently copying characters.
3695 // elements: the FixedArray of strings we are joining.
3697 // Get string = array[index].
3698 __ mov(string, FieldOperand(elements, index,
3700 FixedArray::kHeaderSize));
3701 __ mov(string_length,
3702 FieldOperand(string, String::kLengthOffset));
3703 __ shr(string_length, 1);
3705 FieldOperand(string, SeqAsciiString::kHeaderSize));
3706 __ CopyBytes(string, result_pos, string_length, scratch);
3707 __ add(index, Immediate(1));
3708 __ bind(&loop_1_condition);
3709 __ cmp(index, array_length_operand);
3710 __ j(less, &loop_1); // End while (index < length).
3715 // One-character separator case
3716 __ bind(&one_char_separator);
3717 // Replace separator with its ASCII character value.
3718 __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3719 __ mov_b(separator_operand, scratch);
3721 __ Set(index, Immediate(0));
3722 // Jump into the loop after the code that copies the separator, so the first
3723 // element is not preceded by a separator
3724 __ jmp(&loop_2_entry);
3725 // Loop condition: while (index < length).
3727 // Each iteration of the loop concatenates one string to the result.
3728 // Live values in registers:
3729 // index: which element of the elements array we are adding to the result.
3730 // result_pos: the position to which we are currently copying characters.
3732 // Copy the separator character to the result.
3733 __ mov_b(scratch, separator_operand);
3734 __ mov_b(Operand(result_pos, 0), scratch);
3737 __ bind(&loop_2_entry);
3738 // Get string = array[index].
3739 __ mov(string, FieldOperand(elements, index,
3741 FixedArray::kHeaderSize));
3742 __ mov(string_length,
3743 FieldOperand(string, String::kLengthOffset));
3744 __ shr(string_length, 1);
3746 FieldOperand(string, SeqAsciiString::kHeaderSize));
3747 __ CopyBytes(string, result_pos, string_length, scratch);
3748 __ add(index, Immediate(1));
3750 __ cmp(index, array_length_operand);
3751 __ j(less, &loop_2); // End while (index < length).
3755 // Long separator case (separator is more than one character).
3756 __ bind(&long_separator);
3758 __ Set(index, Immediate(0));
3759 // Jump into the loop after the code that copies the separator, so the first
3760 // element is not preceded by a separator
3761 __ jmp(&loop_3_entry);
3762 // Loop condition: while (index < length).
3764 // Each iteration of the loop concatenates one string to the result.
3765 // Live values in registers:
3766 // index: which element of the elements array we are adding to the result.
3767 // result_pos: the position to which we are currently copying characters.
3769 // Copy the separator to the result.
3770 __ mov(string, separator_operand);
3771 __ mov(string_length,
3772 FieldOperand(string, String::kLengthOffset));
3773 __ shr(string_length, 1);
3775 FieldOperand(string, SeqAsciiString::kHeaderSize));
3776 __ CopyBytes(string, result_pos, string_length, scratch);
3778 __ bind(&loop_3_entry);
3779 // Get string = array[index].
3780 __ mov(string, FieldOperand(elements, index,
3782 FixedArray::kHeaderSize));
3783 __ mov(string_length,
3784 FieldOperand(string, String::kLengthOffset));
3785 __ shr(string_length, 1);
3787 FieldOperand(string, SeqAsciiString::kHeaderSize));
3788 __ CopyBytes(string, result_pos, string_length, scratch);
3789 __ add(index, Immediate(1));
3791 __ cmp(index, array_length_operand);
3792 __ j(less, &loop_3); // End while (index < length).
3797 __ mov(result_operand, isolate()->factory()->undefined_value());
3799 __ mov(eax, result_operand);
3800 // Drop temp values from the stack, and restore context register.
3801 __ add(esp, Immediate(3 * kPointerSize));
3803 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3804 context()->Plug(eax);
3808 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3809 Handle<String> name = expr->name();
3810 if (name->length() > 0 && name->Get(0) == '_') {
3811 Comment cmnt(masm_, "[ InlineRuntimeCall");
3812 EmitInlineRuntimeCall(expr);
3816 Comment cmnt(masm_, "[ CallRuntime");
3817 ZoneList<Expression*>* args = expr->arguments();
3819 if (expr->is_jsruntime()) {
3820 // Prepare for calling JS runtime function.
3821 __ mov(eax, GlobalObjectOperand());
3822 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
3825 // Push the arguments ("left-to-right").
3826 int arg_count = args->length();
3827 for (int i = 0; i < arg_count; i++) {
3828 VisitForStackValue(args->at(i));
3831 if (expr->is_jsruntime()) {
3832 // Call the JS runtime function via a call IC.
3833 __ Set(ecx, Immediate(expr->name()));
3834 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3836 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3837 CallIC(ic, mode, expr->id());
3838 // Restore context register.
3839 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3841 // Call the C runtime function.
3842 __ CallRuntime(expr->function(), arg_count);
3844 context()->Plug(eax);
3848 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3849 switch (expr->op()) {
3850 case Token::DELETE: {
3851 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3852 Property* property = expr->expression()->AsProperty();
3853 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3855 if (property != NULL) {
3856 VisitForStackValue(property->obj());
3857 VisitForStackValue(property->key());
3858 StrictModeFlag strict_mode_flag = (language_mode() == CLASSIC_MODE)
3859 ? kNonStrictMode : kStrictMode;
3860 __ push(Immediate(Smi::FromInt(strict_mode_flag)));
3861 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3862 context()->Plug(eax);
3863 } else if (proxy != NULL) {
3864 Variable* var = proxy->var();
3865 // Delete of an unqualified identifier is disallowed in strict mode
3866 // but "delete this" is allowed.
3867 ASSERT(language_mode() == CLASSIC_MODE || var->is_this());
3868 if (var->IsUnallocated()) {
3869 __ push(var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
3870 __ push(Immediate(var->name()));
3871 __ push(Immediate(Smi::FromInt(kNonStrictMode)));
3872 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3873 context()->Plug(eax);
3874 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3875 // Result of deleting non-global variables is false. 'this' is
3876 // not really a variable, though we implement it as one. The
3877 // subexpression does not have side effects.
3878 context()->Plug(var->is_this());
3880 // Non-global variable. Call the runtime to try to delete from the
3881 // context where the variable was introduced.
3882 __ push(context_register());
3883 __ push(Immediate(var->name()));
3884 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3885 context()->Plug(eax);
3888 // Result of deleting non-property, non-variable reference is true.
3889 // The subexpression may have side effects.
3890 VisitForEffect(expr->expression());
3891 context()->Plug(true);
3897 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3898 VisitForEffect(expr->expression());
3899 context()->Plug(isolate()->factory()->undefined_value());
3904 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3905 if (context()->IsEffect()) {
3906 // Unary NOT has no side effects so it's only necessary to visit the
3907 // subexpression. Match the optimizing compiler by not branching.
3908 VisitForEffect(expr->expression());
3909 } else if (context()->IsTest()) {
3910 const TestContext* test = TestContext::cast(context());
3911 // The labels are swapped for the recursive call.
3912 VisitForControl(expr->expression(),
3913 test->false_label(),
3915 test->fall_through());
3916 context()->Plug(test->true_label(), test->false_label());
3918 // We handle value contexts explicitly rather than simply visiting
3919 // for control and plugging the control flow into the context,
3920 // because we need to prepare a pair of extra administrative AST ids
3921 // for the optimizing compiler.
3922 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3923 Label materialize_true, materialize_false, done;
3924 VisitForControl(expr->expression(),
3928 __ bind(&materialize_true);
3929 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3930 if (context()->IsAccumulatorValue()) {
3931 __ mov(eax, isolate()->factory()->true_value());
3933 __ Push(isolate()->factory()->true_value());
3935 __ jmp(&done, Label::kNear);
3936 __ bind(&materialize_false);
3937 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3938 if (context()->IsAccumulatorValue()) {
3939 __ mov(eax, isolate()->factory()->false_value());
3941 __ Push(isolate()->factory()->false_value());
3948 case Token::TYPEOF: {
3949 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3950 { StackValueContext context(this);
3951 VisitForTypeofValue(expr->expression());
3953 __ CallRuntime(Runtime::kTypeof, 1);
3954 context()->Plug(eax);
3959 Comment cmt(masm_, "[ UnaryOperation (ADD)");
3960 VisitForAccumulatorValue(expr->expression());
3961 Label no_conversion;
3962 __ JumpIfSmi(result_register(), &no_conversion);
3963 ToNumberStub convert_stub;
3964 __ CallStub(&convert_stub);
3965 __ bind(&no_conversion);
3966 context()->Plug(result_register());
3971 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3974 case Token::BIT_NOT:
3975 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3984 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3985 const char* comment) {
3986 Comment cmt(masm_, comment);
3987 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3988 UnaryOverwriteMode overwrite =
3989 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3990 UnaryOpStub stub(expr->op(), overwrite);
3991 // UnaryOpStub expects the argument to be in the
3992 // accumulator register eax.
3993 VisitForAccumulatorValue(expr->expression());
3994 SetSourcePosition(expr->position());
3995 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3996 context()->Plug(eax);
4000 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
4001 Comment cmnt(masm_, "[ CountOperation");
4002 SetSourcePosition(expr->position());
4004 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
4005 // as the left-hand side.
4006 if (!expr->expression()->IsValidLeftHandSide()) {
4007 VisitForEffect(expr->expression());
4011 // Expression can only be a property, a global or a (parameter or local)
4013 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
4014 LhsKind assign_type = VARIABLE;
4015 Property* prop = expr->expression()->AsProperty();
4016 // In case of a property we use the uninitialized expression context
4017 // of the key to detect a named property.
4020 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
4023 // Evaluate expression and get value.
4024 if (assign_type == VARIABLE) {
4025 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
4026 AccumulatorValueContext context(this);
4027 EmitVariableLoad(expr->expression()->AsVariableProxy());
4029 // Reserve space for result of postfix operation.
4030 if (expr->is_postfix() && !context()->IsEffect()) {
4031 __ push(Immediate(Smi::FromInt(0)));
4033 if (assign_type == NAMED_PROPERTY) {
4034 // Put the object both on the stack and in edx.
4035 VisitForAccumulatorValue(prop->obj());
4038 EmitNamedPropertyLoad(prop);
4040 VisitForStackValue(prop->obj());
4041 VisitForStackValue(prop->key());
4042 __ mov(edx, Operand(esp, kPointerSize)); // Object.
4043 __ mov(ecx, Operand(esp, 0)); // Key.
4044 EmitKeyedPropertyLoad(prop);
4048 // We need a second deoptimization point after loading the value
4049 // in case evaluating the property load my have a side effect.
4050 if (assign_type == VARIABLE) {
4051 PrepareForBailout(expr->expression(), TOS_REG);
4053 PrepareForBailoutForId(expr->CountId(), TOS_REG);
4056 // Call ToNumber only if operand is not a smi.
4057 Label no_conversion;
4058 if (ShouldInlineSmiCase(expr->op())) {
4059 __ JumpIfSmi(eax, &no_conversion, Label::kNear);
4061 ToNumberStub convert_stub;
4062 __ CallStub(&convert_stub);
4063 __ bind(&no_conversion);
4065 // Save result for postfix expressions.
4066 if (expr->is_postfix()) {
4067 if (!context()->IsEffect()) {
4068 // Save the result on the stack. If we have a named or keyed property
4069 // we store the result under the receiver that is currently on top
4071 switch (assign_type) {
4075 case NAMED_PROPERTY:
4076 __ mov(Operand(esp, kPointerSize), eax);
4078 case KEYED_PROPERTY:
4079 __ mov(Operand(esp, 2 * kPointerSize), eax);
4085 // Inline smi case if we are in a loop.
4086 Label done, stub_call;
4087 JumpPatchSite patch_site(masm_);
4089 if (ShouldInlineSmiCase(expr->op())) {
4090 if (expr->op() == Token::INC) {
4091 __ add(eax, Immediate(Smi::FromInt(1)));
4093 __ sub(eax, Immediate(Smi::FromInt(1)));
4095 __ j(overflow, &stub_call, Label::kNear);
4096 // We could eliminate this smi check if we split the code at
4097 // the first smi check before calling ToNumber.
4098 patch_site.EmitJumpIfSmi(eax, &done, Label::kNear);
4100 __ bind(&stub_call);
4101 // Call stub. Undo operation first.
4102 if (expr->op() == Token::INC) {
4103 __ sub(eax, Immediate(Smi::FromInt(1)));
4105 __ add(eax, Immediate(Smi::FromInt(1)));
4109 // Record position before stub call.
4110 SetSourcePosition(expr->position());
4112 // Call stub for +1/-1.
4114 __ mov(eax, Immediate(Smi::FromInt(1)));
4115 BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
4116 CallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
4117 patch_site.EmitPatchInfo();
4120 // Store the value returned in eax.
4121 switch (assign_type) {
4123 if (expr->is_postfix()) {
4124 // Perform the assignment as if via '='.
4125 { EffectContext context(this);
4126 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4128 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4131 // For all contexts except EffectContext We have the result on
4132 // top of the stack.
4133 if (!context()->IsEffect()) {
4134 context()->PlugTOS();
4137 // Perform the assignment as if via '='.
4138 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
4140 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4141 context()->Plug(eax);
4144 case NAMED_PROPERTY: {
4145 __ mov(ecx, prop->key()->AsLiteral()->handle());
4147 Handle<Code> ic = is_classic_mode()
4148 ? isolate()->builtins()->StoreIC_Initialize()
4149 : isolate()->builtins()->StoreIC_Initialize_Strict();
4150 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4151 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4152 if (expr->is_postfix()) {
4153 if (!context()->IsEffect()) {
4154 context()->PlugTOS();
4157 context()->Plug(eax);
4161 case KEYED_PROPERTY: {
4164 Handle<Code> ic = is_classic_mode()
4165 ? isolate()->builtins()->KeyedStoreIC_Initialize()
4166 : isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
4167 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4168 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
4169 if (expr->is_postfix()) {
4170 // Result is on the stack
4171 if (!context()->IsEffect()) {
4172 context()->PlugTOS();
4175 context()->Plug(eax);
4183 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
4184 VariableProxy* proxy = expr->AsVariableProxy();
4185 ASSERT(!context()->IsEffect());
4186 ASSERT(!context()->IsTest());
4188 if (proxy != NULL && proxy->var()->IsUnallocated()) {
4189 Comment cmnt(masm_, "Global variable");
4190 __ mov(edx, proxy->var()->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
4191 __ mov(ecx, Immediate(proxy->name()));
4192 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
4193 // Use a regular load, not a contextual load, to avoid a reference
4196 PrepareForBailout(expr, TOS_REG);
4197 context()->Plug(eax);
4198 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
4201 // Generate code for loading from variables potentially shadowed
4202 // by eval-introduced variables.
4203 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
4207 __ push(Immediate(proxy->name()));
4208 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
4209 PrepareForBailout(expr, TOS_REG);
4212 context()->Plug(eax);
4214 // This expression cannot throw a reference error at the top level.
4215 VisitInDuplicateContext(expr);
4220 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
4221 Expression* sub_expr,
4222 Handle<String> check) {
4223 Label materialize_true, materialize_false;
4224 Label* if_true = NULL;
4225 Label* if_false = NULL;
4226 Label* fall_through = NULL;
4227 context()->PrepareTest(&materialize_true, &materialize_false,
4228 &if_true, &if_false, &fall_through);
4230 { AccumulatorValueContext context(this);
4231 VisitForTypeofValue(sub_expr);
4233 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4235 if (check->Equals(isolate()->heap()->number_symbol())) {
4236 __ JumpIfSmi(eax, if_true);
4237 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4238 isolate()->factory()->heap_number_map());
4239 Split(equal, if_true, if_false, fall_through);
4240 } else if (check->Equals(isolate()->heap()->string_symbol())) {
4241 __ JumpIfSmi(eax, if_false);
4242 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4243 __ j(above_equal, if_false);
4244 // Check for undetectable objects => false.
4245 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4246 1 << Map::kIsUndetectable);
4247 Split(zero, if_true, if_false, fall_through);
4248 } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4249 __ cmp(eax, isolate()->factory()->true_value());
4250 __ j(equal, if_true);
4251 __ cmp(eax, isolate()->factory()->false_value());
4252 Split(equal, if_true, if_false, fall_through);
4253 } else if (FLAG_harmony_typeof &&
4254 check->Equals(isolate()->heap()->null_symbol())) {
4255 __ cmp(eax, isolate()->factory()->null_value());
4256 Split(equal, if_true, if_false, fall_through);
4257 } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4258 __ cmp(eax, isolate()->factory()->undefined_value());
4259 __ j(equal, if_true);
4260 __ JumpIfSmi(eax, if_false);
4261 // Check for undetectable objects => true.
4262 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4263 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4264 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4265 Split(not_zero, if_true, if_false, fall_through);
4266 } else if (check->Equals(isolate()->heap()->function_symbol())) {
4267 __ JumpIfSmi(eax, if_false);
4268 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4269 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4270 __ j(equal, if_true);
4271 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4272 Split(equal, if_true, if_false, fall_through);
4273 } else if (check->Equals(isolate()->heap()->object_symbol())) {
4274 __ JumpIfSmi(eax, if_false);
4275 if (!FLAG_harmony_typeof) {
4276 __ cmp(eax, isolate()->factory()->null_value());
4277 __ j(equal, if_true);
4279 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4280 __ j(below, if_false);
4281 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4282 __ j(above, if_false);
4283 // Check for undetectable objects => false.
4284 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4285 1 << Map::kIsUndetectable);
4286 Split(zero, if_true, if_false, fall_through);
4288 if (if_false != fall_through) __ jmp(if_false);
4290 context()->Plug(if_true, if_false);
4294 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4295 Comment cmnt(masm_, "[ CompareOperation");
4296 SetSourcePosition(expr->position());
4298 // First we try a fast inlined version of the compare when one of
4299 // the operands is a literal.
4300 if (TryLiteralCompare(expr)) return;
4302 // Always perform the comparison for its control flow. Pack the result
4303 // into the expression's context after the comparison is performed.
4304 Label materialize_true, materialize_false;
4305 Label* if_true = NULL;
4306 Label* if_false = NULL;
4307 Label* fall_through = NULL;
4308 context()->PrepareTest(&materialize_true, &materialize_false,
4309 &if_true, &if_false, &fall_through);
4311 Token::Value op = expr->op();
4312 VisitForStackValue(expr->left());
4315 VisitForStackValue(expr->right());
4316 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4317 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4318 __ cmp(eax, isolate()->factory()->true_value());
4319 Split(equal, if_true, if_false, fall_through);
4322 case Token::INSTANCEOF: {
4323 VisitForStackValue(expr->right());
4324 InstanceofStub stub(InstanceofStub::kNoFlags);
4326 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4328 // The stub returns 0 for true.
4329 Split(zero, if_true, if_false, fall_through);
4334 VisitForAccumulatorValue(expr->right());
4335 Condition cc = no_condition;
4337 case Token::EQ_STRICT:
4354 case Token::INSTANCEOF:
4360 bool inline_smi_code = ShouldInlineSmiCase(op);
4361 JumpPatchSite patch_site(masm_);
4362 if (inline_smi_code) {
4366 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4368 Split(cc, if_true, if_false, NULL);
4369 __ bind(&slow_case);
4372 // Record position and call the compare IC.
4373 SetSourcePosition(expr->position());
4374 Handle<Code> ic = CompareIC::GetUninitialized(op);
4375 CallIC(ic, RelocInfo::CODE_TARGET, expr->id());
4376 patch_site.EmitPatchInfo();
4378 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4380 Split(cc, if_true, if_false, fall_through);
4384 // Convert the result of the comparison into one expected for this
4385 // expression's context.
4386 context()->Plug(if_true, if_false);
4390 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4391 Expression* sub_expr,
4393 Label materialize_true, materialize_false;
4394 Label* if_true = NULL;
4395 Label* if_false = NULL;
4396 Label* fall_through = NULL;
4397 context()->PrepareTest(&materialize_true, &materialize_false,
4398 &if_true, &if_false, &fall_through);
4400 VisitForAccumulatorValue(sub_expr);
4401 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4402 Handle<Object> nil_value = nil == kNullValue ?
4403 isolate()->factory()->null_value() :
4404 isolate()->factory()->undefined_value();
4405 __ cmp(eax, nil_value);
4406 if (expr->op() == Token::EQ_STRICT) {
4407 Split(equal, if_true, if_false, fall_through);
4409 Handle<Object> other_nil_value = nil == kNullValue ?
4410 isolate()->factory()->undefined_value() :
4411 isolate()->factory()->null_value();
4412 __ j(equal, if_true);
4413 __ cmp(eax, other_nil_value);
4414 __ j(equal, if_true);
4415 __ JumpIfSmi(eax, if_false);
4416 // It can be an undetectable object.
4417 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4418 __ movzx_b(edx, FieldOperand(edx, Map::kBitFieldOffset));
4419 __ test(edx, Immediate(1 << Map::kIsUndetectable));
4420 Split(not_zero, if_true, if_false, fall_through);
4422 context()->Plug(if_true, if_false);
4426 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4427 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4428 context()->Plug(eax);
4432 Register FullCodeGenerator::result_register() {
4437 Register FullCodeGenerator::context_register() {
4442 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4443 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4444 __ mov(Operand(ebp, frame_offset), value);
4448 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4449 __ mov(dst, ContextOperand(esi, context_index));
4453 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4454 Scope* declaration_scope = scope()->DeclarationScope();
4455 if (declaration_scope->is_global_scope() ||
4456 declaration_scope->is_module_scope()) {
4457 // Contexts nested in the global context have a canonical empty function
4458 // as their closure, not the anonymous closure containing the global
4459 // code. Pass a smi sentinel and let the runtime look up the empty
4461 __ push(Immediate(Smi::FromInt(0)));
4462 } else if (declaration_scope->is_eval_scope()) {
4463 // Contexts nested inside eval code have the same closure as the context
4464 // calling eval, not the anonymous closure containing the eval code.
4465 // Fetch it from the context.
4466 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
4468 ASSERT(declaration_scope->is_function_scope());
4469 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4474 // ----------------------------------------------------------------------------
4475 // Non-local control flow support.
4477 void FullCodeGenerator::EnterFinallyBlock() {
4478 // Cook return address on top of stack (smi encoded Code* delta)
4479 ASSERT(!result_register().is(edx));
4481 __ sub(edx, Immediate(masm_->CodeObject()));
4482 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4483 STATIC_ASSERT(kSmiTag == 0);
4486 // Store result register while executing finally block.
4487 __ push(result_register());
4491 void FullCodeGenerator::ExitFinallyBlock() {
4492 ASSERT(!result_register().is(edx));
4493 __ pop(result_register());
4494 // Uncook return address.
4497 __ add(edx, Immediate(masm_->CodeObject()));
4504 #define __ ACCESS_MASM(masm())
4506 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4508 int* context_length) {
4509 // The macros used here must preserve the result register.
4511 // Because the handler block contains the context of the finally
4512 // code, we can restore it directly from there for the finally code
4513 // rather than iteratively unwinding contexts via their previous
4515 __ Drop(*stack_depth); // Down to the handler block.
4516 if (*context_length > 0) {
4517 // Restore the context to its dedicated register and the stack.
4518 __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
4519 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
4522 __ call(finally_entry_);
4525 *context_length = 0;
4532 } } // namespace v8::internal
4534 #endif // V8_TARGET_ARCH_IA32