1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if defined(V8_TARGET_ARCH_IA32)
32 #include "code-stubs.h"
36 #include "full-codegen.h"
39 #include "stub-cache.h"
44 #define __ ACCESS_MASM(masm_)
47 class JumpPatchSite BASE_EMBEDDED {
49 explicit JumpPatchSite(MacroAssembler* masm) : masm_(masm) {
51 info_emitted_ = false;
56 ASSERT(patch_site_.is_bound() == info_emitted_);
59 void EmitJumpIfNotSmi(Register reg,
61 Label::Distance distance = Label::kFar) {
62 __ test(reg, Immediate(kSmiTagMask));
63 EmitJump(not_carry, target, distance); // Always taken before patched.
66 void EmitJumpIfSmi(Register reg,
68 Label::Distance distance = Label::kFar) {
69 __ test(reg, Immediate(kSmiTagMask));
70 EmitJump(carry, target, distance); // Never taken before patched.
73 void EmitPatchInfo() {
74 if (patch_site_.is_bound()) {
75 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
76 ASSERT(is_int8(delta_to_patch_site));
77 __ test(eax, Immediate(delta_to_patch_site));
82 __ nop(); // Signals no inlined code.
87 // jc will be patched with jz, jnc will become jnz.
88 void EmitJump(Condition cc, Label* target, Label::Distance distance) {
89 ASSERT(!patch_site_.is_bound() && !info_emitted_);
90 ASSERT(cc == carry || cc == not_carry);
91 __ bind(&patch_site_);
92 __ j(cc, target, distance);
95 MacroAssembler* masm_;
103 // Generate code for a JS function. On entry to the function the receiver
104 // and arguments have been pushed on the stack left to right, with the
105 // return address on top of them. The actual argument count matches the
106 // formal parameter count expected by the function.
108 // The live registers are:
109 // o edi: the JS function object being called (ie, ourselves)
110 // o esi: our context
111 // o ebp: our caller's frame pointer
112 // o esp: stack pointer (pointing to return address)
114 // The function builds a JS frame. Please see JavaScriptFrameConstants in
115 // frames-ia32.h for its layout.
116 void FullCodeGenerator::Generate(CompilationInfo* info) {
117 ASSERT(info_ == NULL);
119 scope_ = info->scope();
120 SetFunctionPosition(function());
121 Comment cmnt(masm_, "[ function compiled by full code generator");
124 if (strlen(FLAG_stop_at) > 0 &&
125 info->function()->name()->IsEqualTo(CStrVector(FLAG_stop_at))) {
130 // Strict mode functions and builtins need to replace the receiver
131 // with undefined when called as functions (without an explicit
132 // receiver object). ecx is zero for method calls and non-zero for
134 if (info->is_strict_mode() || info->is_native()) {
137 __ j(zero, &ok, Label::kNear);
138 // +1 for return address.
139 int receiver_offset = (info->scope()->num_parameters() + 1) * kPointerSize;
140 __ mov(Operand(esp, receiver_offset),
141 Immediate(isolate()->factory()->undefined_value()));
145 // Open a frame scope to indicate that there is a frame on the stack. The
146 // MANUAL indicates that the scope shouldn't actually generate code to set up
147 // the frame (that is done below).
148 FrameScope frame_scope(masm_, StackFrame::MANUAL);
150 __ push(ebp); // Caller's frame pointer.
152 __ push(esi); // Callee's context.
153 __ push(edi); // Callee's JS Function.
155 { Comment cmnt(masm_, "[ Allocate locals");
156 int locals_count = info->scope()->num_stack_slots();
157 if (locals_count == 1) {
158 __ push(Immediate(isolate()->factory()->undefined_value()));
159 } else if (locals_count > 1) {
160 __ mov(eax, Immediate(isolate()->factory()->undefined_value()));
161 for (int i = 0; i < locals_count; i++) {
167 bool function_in_register = true;
169 // Possibly allocate a local context.
170 int heap_slots = info->scope()->num_heap_slots() - Context::MIN_CONTEXT_SLOTS;
171 if (heap_slots > 0 ||
172 (scope()->is_qml_mode() && scope()->is_global_scope())) {
173 Comment cmnt(masm_, "[ Allocate local context");
174 // Argument to NewContext is the function, which is still in edi.
176 if (heap_slots <= FastNewContextStub::kMaximumSlots) {
177 FastNewContextStub stub((heap_slots < 0)?0:heap_slots);
180 __ CallRuntime(Runtime::kNewFunctionContext, 1);
182 function_in_register = false;
183 // Context is returned in both eax and esi. It replaces the context
184 // passed to us. It's saved in the stack and kept live in esi.
185 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
187 // Copy parameters into context if necessary.
188 int num_parameters = info->scope()->num_parameters();
189 for (int i = 0; i < num_parameters; i++) {
190 Variable* var = scope()->parameter(i);
191 if (var->IsContextSlot()) {
192 int parameter_offset = StandardFrameConstants::kCallerSPOffset +
193 (num_parameters - 1 - i) * kPointerSize;
194 // Load parameter from stack.
195 __ mov(eax, Operand(ebp, parameter_offset));
196 // Store it in the context.
197 int context_offset = Context::SlotOffset(var->index());
198 __ mov(Operand(esi, context_offset), eax);
199 // Update the write barrier. This clobbers eax and ebx.
200 __ RecordWriteContextSlot(esi,
209 Variable* arguments = scope()->arguments();
210 if (arguments != NULL) {
211 // Function uses arguments object.
212 Comment cmnt(masm_, "[ Allocate arguments object");
213 if (function_in_register) {
216 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
218 // Receiver is just before the parameters on the caller's stack.
219 int num_parameters = info->scope()->num_parameters();
220 int offset = num_parameters * kPointerSize;
222 Operand(ebp, StandardFrameConstants::kCallerSPOffset + offset));
224 __ SafePush(Immediate(Smi::FromInt(num_parameters)));
225 // Arguments to ArgumentsAccessStub and/or New...:
226 // function, receiver address, parameter count.
227 // The stub will rewrite receiver and parameter count if the previous
228 // stack frame was an arguments adapter frame.
229 ArgumentsAccessStub::Type type;
230 if (is_strict_mode()) {
231 type = ArgumentsAccessStub::NEW_STRICT;
232 } else if (function()->has_duplicate_parameters()) {
233 type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
235 type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
237 ArgumentsAccessStub stub(type);
240 SetVar(arguments, eax, ebx, edx);
244 __ CallRuntime(Runtime::kTraceEnter, 0);
247 // Visit the declarations and body unless there is an illegal
249 if (scope()->HasIllegalRedeclaration()) {
250 Comment cmnt(masm_, "[ Declarations");
251 scope()->VisitIllegalRedeclaration(this);
254 PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
255 { Comment cmnt(masm_, "[ Declarations");
256 // For named function expressions, declare the function name as a
258 if (scope()->is_function_scope() && scope()->function() != NULL) {
260 VariableProxy* proxy = scope()->function();
261 ASSERT(proxy->var()->mode() == CONST ||
262 proxy->var()->mode() == CONST_HARMONY);
263 EmitDeclaration(proxy, proxy->var()->mode(), NULL, &ignored);
265 VisitDeclarations(scope()->declarations());
268 { Comment cmnt(masm_, "[ Stack check");
269 PrepareForBailoutForId(AstNode::kDeclarationsId, NO_REGISTERS);
271 ExternalReference stack_limit =
272 ExternalReference::address_of_stack_limit(isolate());
273 __ cmp(esp, Operand::StaticVariable(stack_limit));
274 __ j(above_equal, &ok, Label::kNear);
280 { Comment cmnt(masm_, "[ Body");
281 ASSERT(loop_depth() == 0);
282 VisitStatements(function()->body());
283 ASSERT(loop_depth() == 0);
287 // Always emit a 'return undefined' in case control fell off the end of
289 { Comment cmnt(masm_, "[ return <undefined>;");
290 __ mov(eax, isolate()->factory()->undefined_value());
291 EmitReturnSequence();
296 void FullCodeGenerator::ClearAccumulator() {
297 __ Set(eax, Immediate(Smi::FromInt(0)));
301 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
302 Comment cmnt(masm_, "[ Stack check");
304 ExternalReference stack_limit =
305 ExternalReference::address_of_stack_limit(isolate());
306 __ cmp(esp, Operand::StaticVariable(stack_limit));
307 __ j(above_equal, &ok, Label::kNear);
310 // Record a mapping of this PC offset to the OSR id. This is used to find
311 // the AST id from the unoptimized code in order to use it as a key into
312 // the deoptimization input data found in the optimized code.
313 RecordStackCheck(stmt->OsrEntryId());
315 // Loop stack checks can be patched to perform on-stack replacement. In
316 // order to decide whether or not to perform OSR we embed the loop depth
317 // in a test instruction after the call so we can extract it from the OSR
319 ASSERT(loop_depth() > 0);
320 __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
323 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
324 // Record a mapping of the OSR id to this PC. This is used if the OSR
325 // entry becomes the target of a bailout. We don't expect it to be, but
326 // we want it to work if it is.
327 PrepareForBailoutForId(stmt->OsrEntryId(), NO_REGISTERS);
331 void FullCodeGenerator::EmitReturnSequence() {
332 Comment cmnt(masm_, "[ Return sequence");
333 if (return_label_.is_bound()) {
334 __ jmp(&return_label_);
336 // Common return label
337 __ bind(&return_label_);
340 __ CallRuntime(Runtime::kTraceExit, 1);
343 // Add a label for checking the size of the code used for returning.
344 Label check_exit_codesize;
345 masm_->bind(&check_exit_codesize);
347 SetSourcePosition(function()->end_position() - 1);
349 // Do not use the leave instruction here because it is too short to
350 // patch with the code required by the debugger.
354 int arguments_bytes = (info_->scope()->num_parameters() + 1) * kPointerSize;
355 __ Ret(arguments_bytes, ecx);
356 #ifdef ENABLE_DEBUGGER_SUPPORT
357 // Check that the size of the code used for returning is large enough
358 // for the debugger's requirements.
359 ASSERT(Assembler::kJSReturnSequenceLength <=
360 masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
366 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
367 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
371 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
372 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
373 codegen()->GetVar(result_register(), var);
377 void FullCodeGenerator::StackValueContext::Plug(Variable* var) const {
378 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
379 MemOperand operand = codegen()->VarOperand(var, result_register());
380 // Memory operands can be pushed directly.
385 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
386 // For simplicity we always test the accumulator register.
387 codegen()->GetVar(result_register(), var);
388 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
389 codegen()->DoTest(this);
393 void FullCodeGenerator::EffectContext::Plug(Heap::RootListIndex index) const {
394 UNREACHABLE(); // Not used on IA32.
398 void FullCodeGenerator::AccumulatorValueContext::Plug(
399 Heap::RootListIndex index) const {
400 UNREACHABLE(); // Not used on IA32.
404 void FullCodeGenerator::StackValueContext::Plug(
405 Heap::RootListIndex index) const {
406 UNREACHABLE(); // Not used on IA32.
410 void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
411 UNREACHABLE(); // Not used on IA32.
415 void FullCodeGenerator::EffectContext::Plug(Handle<Object> lit) const {
419 void FullCodeGenerator::AccumulatorValueContext::Plug(
420 Handle<Object> lit) const {
422 __ SafeSet(result_register(), Immediate(lit));
424 __ Set(result_register(), Immediate(lit));
429 void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
431 __ SafePush(Immediate(lit));
433 __ push(Immediate(lit));
438 void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
439 codegen()->PrepareForBailoutBeforeSplit(condition(),
443 ASSERT(!lit->IsUndetectableObject()); // There are no undetectable literals.
444 if (lit->IsUndefined() || lit->IsNull() || lit->IsFalse()) {
445 if (false_label_ != fall_through_) __ jmp(false_label_);
446 } else if (lit->IsTrue() || lit->IsJSObject()) {
447 if (true_label_ != fall_through_) __ jmp(true_label_);
448 } else if (lit->IsString()) {
449 if (String::cast(*lit)->length() == 0) {
450 if (false_label_ != fall_through_) __ jmp(false_label_);
452 if (true_label_ != fall_through_) __ jmp(true_label_);
454 } else if (lit->IsSmi()) {
455 if (Smi::cast(*lit)->value() == 0) {
456 if (false_label_ != fall_through_) __ jmp(false_label_);
458 if (true_label_ != fall_through_) __ jmp(true_label_);
461 // For simplicity we always test the accumulator register.
462 __ mov(result_register(), lit);
463 codegen()->DoTest(this);
468 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
469 Register reg) const {
475 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
477 Register reg) const {
480 __ Move(result_register(), reg);
484 void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
485 Register reg) const {
487 if (count > 1) __ Drop(count - 1);
488 __ mov(Operand(esp, 0), reg);
492 void FullCodeGenerator::TestContext::DropAndPlug(int count,
493 Register reg) const {
495 // For simplicity we always test the accumulator register.
497 __ Move(result_register(), reg);
498 codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
499 codegen()->DoTest(this);
503 void FullCodeGenerator::EffectContext::Plug(Label* materialize_true,
504 Label* materialize_false) const {
505 ASSERT(materialize_true == materialize_false);
506 __ bind(materialize_true);
510 void FullCodeGenerator::AccumulatorValueContext::Plug(
511 Label* materialize_true,
512 Label* materialize_false) const {
514 __ bind(materialize_true);
515 __ mov(result_register(), isolate()->factory()->true_value());
516 __ jmp(&done, Label::kNear);
517 __ bind(materialize_false);
518 __ mov(result_register(), isolate()->factory()->false_value());
523 void FullCodeGenerator::StackValueContext::Plug(
524 Label* materialize_true,
525 Label* materialize_false) const {
527 __ bind(materialize_true);
528 __ push(Immediate(isolate()->factory()->true_value()));
529 __ jmp(&done, Label::kNear);
530 __ bind(materialize_false);
531 __ push(Immediate(isolate()->factory()->false_value()));
536 void FullCodeGenerator::TestContext::Plug(Label* materialize_true,
537 Label* materialize_false) const {
538 ASSERT(materialize_true == true_label_);
539 ASSERT(materialize_false == false_label_);
543 void FullCodeGenerator::EffectContext::Plug(bool flag) const {
547 void FullCodeGenerator::AccumulatorValueContext::Plug(bool flag) const {
548 Handle<Object> value = flag
549 ? isolate()->factory()->true_value()
550 : isolate()->factory()->false_value();
551 __ mov(result_register(), value);
555 void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
556 Handle<Object> value = flag
557 ? isolate()->factory()->true_value()
558 : isolate()->factory()->false_value();
559 __ push(Immediate(value));
563 void FullCodeGenerator::TestContext::Plug(bool flag) const {
564 codegen()->PrepareForBailoutBeforeSplit(condition(),
569 if (true_label_ != fall_through_) __ jmp(true_label_);
571 if (false_label_ != fall_through_) __ jmp(false_label_);
576 void FullCodeGenerator::DoTest(Expression* condition,
579 Label* fall_through) {
580 ToBooleanStub stub(result_register());
581 __ push(result_register());
582 __ CallStub(&stub, condition->test_id());
583 __ test(result_register(), result_register());
584 // The stub returns nonzero for true.
585 Split(not_zero, if_true, if_false, fall_through);
589 void FullCodeGenerator::Split(Condition cc,
592 Label* fall_through) {
593 if (if_false == fall_through) {
595 } else if (if_true == fall_through) {
596 __ j(NegateCondition(cc), if_false);
604 MemOperand FullCodeGenerator::StackOperand(Variable* var) {
605 ASSERT(var->IsStackAllocated());
606 // Offset is negative because higher indexes are at lower addresses.
607 int offset = -var->index() * kPointerSize;
608 // Adjust by a (parameter or local) base offset.
609 if (var->IsParameter()) {
610 offset += (info_->scope()->num_parameters() + 1) * kPointerSize;
612 offset += JavaScriptFrameConstants::kLocal0Offset;
614 return Operand(ebp, offset);
618 MemOperand FullCodeGenerator::VarOperand(Variable* var, Register scratch) {
619 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
620 if (var->IsContextSlot()) {
621 int context_chain_length = scope()->ContextChainLength(var->scope());
622 __ LoadContext(scratch, context_chain_length);
623 return ContextOperand(scratch, var->index());
625 return StackOperand(var);
630 void FullCodeGenerator::GetVar(Register dest, Variable* var) {
631 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
632 MemOperand location = VarOperand(var, dest);
633 __ mov(dest, location);
637 void FullCodeGenerator::SetVar(Variable* var,
641 ASSERT(var->IsContextSlot() || var->IsStackAllocated());
642 ASSERT(!scratch0.is(src));
643 ASSERT(!scratch0.is(scratch1));
644 ASSERT(!scratch1.is(src));
645 MemOperand location = VarOperand(var, scratch0);
646 __ mov(location, src);
648 // Emit the write barrier code if the location is in the heap.
649 if (var->IsContextSlot()) {
650 int offset = Context::SlotOffset(var->index());
651 ASSERT(!scratch0.is(esi) && !src.is(esi) && !scratch1.is(esi));
652 __ RecordWriteContextSlot(scratch0, offset, src, scratch1, kDontSaveFPRegs);
657 void FullCodeGenerator::PrepareForBailoutBeforeSplit(Expression* expr,
658 bool should_normalize,
661 // Only prepare for bailouts before splits if we're in a test
662 // context. Otherwise, we let the Visit function deal with the
663 // preparation to avoid preparing with the same AST id twice.
664 if (!context()->IsTest() || !info_->IsOptimizable()) return;
667 if (should_normalize) __ jmp(&skip, Label::kNear);
668 PrepareForBailout(expr, TOS_REG);
669 if (should_normalize) {
670 __ cmp(eax, isolate()->factory()->true_value());
671 Split(equal, if_true, if_false, NULL);
677 void FullCodeGenerator::EmitDeclaration(VariableProxy* proxy,
679 FunctionLiteral* function,
681 // If it was not possible to allocate the variable at compile time, we
682 // need to "declare" it at runtime to make sure it actually exists in the
684 Variable* variable = proxy->var();
685 bool binding_needs_init =
686 mode == CONST || mode == CONST_HARMONY || mode == LET;
687 switch (variable->location()) {
688 case Variable::UNALLOCATED:
692 case Variable::PARAMETER:
693 case Variable::LOCAL:
694 if (function != NULL) {
695 Comment cmnt(masm_, "[ Declaration");
696 VisitForAccumulatorValue(function);
697 __ mov(StackOperand(variable), result_register());
698 } else if (binding_needs_init) {
699 Comment cmnt(masm_, "[ Declaration");
700 __ mov(StackOperand(variable),
701 Immediate(isolate()->factory()->the_hole_value()));
705 case Variable::CONTEXT:
706 // The variable in the decl always resides in the current function
708 ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
709 if (FLAG_debug_code) {
710 // Check that we're not inside a with or catch context.
711 __ mov(ebx, FieldOperand(esi, HeapObject::kMapOffset));
712 __ cmp(ebx, isolate()->factory()->with_context_map());
713 __ Check(not_equal, "Declaration in with context.");
714 __ cmp(ebx, isolate()->factory()->catch_context_map());
715 __ Check(not_equal, "Declaration in catch context.");
717 if (function != NULL) {
718 Comment cmnt(masm_, "[ Declaration");
719 VisitForAccumulatorValue(function);
720 __ mov(ContextOperand(esi, variable->index()), result_register());
721 // We know that we have written a function, which is not a smi.
722 __ RecordWriteContextSlot(esi,
723 Context::SlotOffset(variable->index()),
729 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
730 } else if (binding_needs_init) {
731 Comment cmnt(masm_, "[ Declaration");
732 __ mov(ContextOperand(esi, variable->index()),
733 Immediate(isolate()->factory()->the_hole_value()));
734 // No write barrier since the hole value is in old space.
735 PrepareForBailoutForId(proxy->id(), NO_REGISTERS);
739 case Variable::LOOKUP: {
740 Comment cmnt(masm_, "[ Declaration");
742 __ push(Immediate(variable->name()));
743 // Declaration nodes are always introduced in one of four modes.
744 ASSERT(mode == VAR ||
746 mode == CONST_HARMONY ||
748 PropertyAttributes attr = (mode == CONST || mode == CONST_HARMONY)
750 __ push(Immediate(Smi::FromInt(attr)));
751 // Push initial value, if any.
752 // Note: For variables we must not push an initial value (such as
753 // 'undefined') because we may have a (legal) redeclaration and we
754 // must not destroy the current value.
755 if (function != NULL) {
756 VisitForStackValue(function);
757 } else if (binding_needs_init) {
758 __ push(Immediate(isolate()->factory()->the_hole_value()));
760 __ push(Immediate(Smi::FromInt(0))); // Indicates no initial value.
762 __ CallRuntime(Runtime::kDeclareContextSlot, 4);
769 void FullCodeGenerator::VisitDeclaration(Declaration* decl) { }
772 void FullCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
773 // Call the runtime to declare the globals.
774 __ push(esi); // The context is the first argument.
775 __ push(Immediate(pairs));
776 __ push(Immediate(Smi::FromInt(DeclareGlobalsFlags())));
777 __ CallRuntime(Runtime::kDeclareGlobals, 3);
778 // Return value is ignored.
782 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
783 Comment cmnt(masm_, "[ SwitchStatement");
784 Breakable nested_statement(this, stmt);
785 SetStatementPosition(stmt);
787 // Keep the switch value on the stack until a case matches.
788 VisitForStackValue(stmt->tag());
789 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
791 ZoneList<CaseClause*>* clauses = stmt->cases();
792 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
794 Label next_test; // Recycled for each test.
795 // Compile all the tests with branches to their bodies.
796 for (int i = 0; i < clauses->length(); i++) {
797 CaseClause* clause = clauses->at(i);
798 clause->body_target()->Unuse();
800 // The default is not a test, but remember it as final fall through.
801 if (clause->is_default()) {
802 default_clause = clause;
806 Comment cmnt(masm_, "[ Case comparison");
810 // Compile the label expression.
811 VisitForAccumulatorValue(clause->label());
813 // Perform the comparison as if via '==='.
814 __ mov(edx, Operand(esp, 0)); // Switch value.
815 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
816 JumpPatchSite patch_site(masm_);
817 if (inline_smi_code) {
821 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
824 __ j(not_equal, &next_test);
825 __ Drop(1); // Switch value is no longer needed.
826 __ jmp(clause->body_target());
830 // Record position before stub call for type feedback.
831 SetSourcePosition(clause->position());
832 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
833 __ call(ic, RelocInfo::CODE_TARGET, clause->CompareId());
834 patch_site.EmitPatchInfo();
836 __ j(not_equal, &next_test);
837 __ Drop(1); // Switch value is no longer needed.
838 __ jmp(clause->body_target());
841 // Discard the test value and jump to the default if present, otherwise to
842 // the end of the statement.
844 __ Drop(1); // Switch value is no longer needed.
845 if (default_clause == NULL) {
846 __ jmp(nested_statement.break_label());
848 __ jmp(default_clause->body_target());
851 // Compile all the case bodies.
852 for (int i = 0; i < clauses->length(); i++) {
853 Comment cmnt(masm_, "[ Case body");
854 CaseClause* clause = clauses->at(i);
855 __ bind(clause->body_target());
856 PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
857 VisitStatements(clause->statements());
860 __ bind(nested_statement.break_label());
861 PrepareForBailoutForId(stmt->ExitId(), NO_REGISTERS);
865 void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
866 Comment cmnt(masm_, "[ ForInStatement");
867 SetStatementPosition(stmt);
870 ForIn loop_statement(this, stmt);
871 increment_loop_depth();
873 // Get the object to enumerate over. Both SpiderMonkey and JSC
874 // ignore null and undefined in contrast to the specification; see
875 // ECMA-262 section 12.6.4.
876 VisitForAccumulatorValue(stmt->enumerable());
877 __ cmp(eax, isolate()->factory()->undefined_value());
879 __ cmp(eax, isolate()->factory()->null_value());
882 // Convert the object to a JS object.
883 Label convert, done_convert;
884 __ JumpIfSmi(eax, &convert, Label::kNear);
885 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ecx);
886 __ j(above_equal, &done_convert, Label::kNear);
889 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
890 __ bind(&done_convert);
893 // Check for proxies.
895 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
896 __ CmpObjectType(eax, LAST_JS_PROXY_TYPE, ecx);
897 __ j(below_equal, &call_runtime);
899 // Check cache validity in generated code. This is a fast case for
900 // the JSObject::IsSimpleEnum cache validity checks. If we cannot
901 // guarantee cache validity, call the runtime system to check cache
902 // validity or get the property names in a fixed array.
907 // Check that there are no elements. Register ecx contains the
908 // current JS object we've reached through the prototype chain.
909 __ cmp(FieldOperand(ecx, JSObject::kElementsOffset),
910 isolate()->factory()->empty_fixed_array());
911 __ j(not_equal, &call_runtime);
913 // Check that instance descriptors are not empty so that we can
914 // check for an enum cache. Leave the map in ebx for the subsequent
916 __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
917 __ mov(edx, FieldOperand(ebx, Map::kInstanceDescriptorsOrBitField3Offset));
918 __ JumpIfSmi(edx, &call_runtime);
920 // Check that there is an enum cache in the non-empty instance
921 // descriptors (edx). This is the case if the next enumeration
922 // index field does not contain a smi.
923 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumerationIndexOffset));
924 __ JumpIfSmi(edx, &call_runtime);
926 // For all objects but the receiver, check that the cache is empty.
927 Label check_prototype;
929 __ j(equal, &check_prototype, Label::kNear);
930 __ mov(edx, FieldOperand(edx, DescriptorArray::kEnumCacheBridgeCacheOffset));
931 __ cmp(edx, isolate()->factory()->empty_fixed_array());
932 __ j(not_equal, &call_runtime);
934 // Load the prototype from the map and loop if non-null.
935 __ bind(&check_prototype);
936 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
937 __ cmp(ecx, isolate()->factory()->null_value());
938 __ j(not_equal, &next);
940 // The enum cache is valid. Load the map of the object being
941 // iterated over and use the cache for the iteration.
943 __ mov(eax, FieldOperand(eax, HeapObject::kMapOffset));
944 __ jmp(&use_cache, Label::kNear);
946 // Get the set of properties to enumerate.
947 __ bind(&call_runtime);
948 __ push(eax); // Duplicate the enumerable object on the stack.
949 __ CallRuntime(Runtime::kGetPropertyNamesFast, 1);
951 // If we got a map from the runtime call, we can do a fast
952 // modification check. Otherwise, we got a fixed array, and we have
953 // to do a slow check.
955 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
956 isolate()->factory()->meta_map());
957 __ j(not_equal, &fixed_array, Label::kNear);
959 // We got a map in register eax. Get the enumeration cache from it.
961 __ LoadInstanceDescriptors(eax, ecx);
962 __ mov(ecx, FieldOperand(ecx, DescriptorArray::kEnumerationIndexOffset));
963 __ mov(edx, FieldOperand(ecx, DescriptorArray::kEnumCacheBridgeCacheOffset));
965 // Setup the four remaining stack slots.
966 __ push(eax); // Map.
967 __ push(edx); // Enumeration cache.
968 __ mov(eax, FieldOperand(edx, FixedArray::kLengthOffset));
969 __ push(eax); // Enumeration cache length (as smi).
970 __ push(Immediate(Smi::FromInt(0))); // Initial index.
973 // We got a fixed array in register eax. Iterate through that.
975 __ bind(&fixed_array);
976 __ mov(ebx, Immediate(Smi::FromInt(1))); // Smi indicates slow check
977 __ mov(ecx, Operand(esp, 0 * kPointerSize)); // Get enumerated object
978 STATIC_ASSERT(FIRST_JS_PROXY_TYPE == FIRST_SPEC_OBJECT_TYPE);
979 __ CmpObjectType(ecx, LAST_JS_PROXY_TYPE, ecx);
980 __ j(above, &non_proxy);
981 __ mov(ebx, Immediate(Smi::FromInt(0))); // Zero indicates proxy
984 __ push(eax); // Array
985 __ mov(eax, FieldOperand(eax, FixedArray::kLengthOffset));
986 __ push(eax); // Fixed array length (as smi).
987 __ push(Immediate(Smi::FromInt(0))); // Initial index.
989 // Generate code for doing the condition check.
991 __ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
992 __ cmp(eax, Operand(esp, 1 * kPointerSize)); // Compare to the array length.
993 __ j(above_equal, loop_statement.break_label());
995 // Get the current entry of the array into register ebx.
996 __ mov(ebx, Operand(esp, 2 * kPointerSize));
997 __ mov(ebx, FieldOperand(ebx, eax, times_2, FixedArray::kHeaderSize));
999 // Get the expected map from the stack or a smi in the
1000 // permanent slow case into register edx.
1001 __ mov(edx, Operand(esp, 3 * kPointerSize));
1003 // Check if the expected map still matches that of the enumerable.
1004 // If not, we may have to filter the key.
1006 __ mov(ecx, Operand(esp, 4 * kPointerSize));
1007 __ cmp(edx, FieldOperand(ecx, HeapObject::kMapOffset));
1008 __ j(equal, &update_each, Label::kNear);
1010 // For proxies, no filtering is done.
1011 // TODO(rossberg): What if only a prototype is a proxy? Not specified yet.
1012 ASSERT(Smi::FromInt(0) == 0);
1014 __ j(zero, &update_each);
1016 // Convert the entry to a string or null if it isn't a property
1017 // anymore. If the property has been removed while iterating, we
1019 __ push(ecx); // Enumerable.
1020 __ push(ebx); // Current entry.
1021 __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
1023 __ j(equal, loop_statement.continue_label());
1026 // Update the 'each' property or variable from the possibly filtered
1027 // entry in register ebx.
1028 __ bind(&update_each);
1029 __ mov(result_register(), ebx);
1030 // Perform the assignment as if via '='.
1031 { EffectContext context(this);
1032 EmitAssignment(stmt->each(), stmt->AssignmentId());
1035 // Generate code for the body of the loop.
1036 Visit(stmt->body());
1038 // Generate code for going to the next element by incrementing the
1039 // index (smi) stored on top of the stack.
1040 __ bind(loop_statement.continue_label());
1041 __ add(Operand(esp, 0 * kPointerSize), Immediate(Smi::FromInt(1)));
1043 EmitStackCheck(stmt);
1046 // Remove the pointers stored on the stack.
1047 __ bind(loop_statement.break_label());
1048 __ add(esp, Immediate(5 * kPointerSize));
1050 // Exit and decrement the loop depth.
1052 decrement_loop_depth();
1056 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1058 // Use the fast case closure allocation code that allocates in new
1059 // space for nested functions that don't need literals cloning. If
1060 // we're running with the --always-opt or the --prepare-always-opt
1061 // flag, we need to use the runtime function so that the new function
1062 // we are creating here gets a chance to have its code optimized and
1063 // doesn't just get a copy of the existing unoptimized code.
1064 if (!FLAG_always_opt &&
1065 !FLAG_prepare_always_opt &&
1067 scope()->is_function_scope() &&
1068 info->num_literals() == 0) {
1069 FastNewClosureStub stub(info->strict_mode_flag());
1070 __ push(Immediate(info));
1074 __ push(Immediate(info));
1075 __ push(Immediate(pretenure
1076 ? isolate()->factory()->true_value()
1077 : isolate()->factory()->false_value()));
1078 __ CallRuntime(Runtime::kNewClosure, 3);
1080 context()->Plug(eax);
1084 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
1085 Comment cmnt(masm_, "[ VariableProxy");
1086 EmitVariableLoad(expr);
1090 void FullCodeGenerator::EmitLoadGlobalCheckExtensions(Variable* var,
1091 TypeofState typeof_state,
1093 Register context = esi;
1094 Register temp = edx;
1098 if (s->num_heap_slots() > 0) {
1099 if (s->calls_non_strict_eval()) {
1100 // Check that extension is NULL.
1101 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1103 __ j(not_equal, slow);
1105 // Load next context in chain.
1106 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1107 // Walk the rest of the chain without clobbering esi.
1110 // If no outer scope calls eval, we do not need to check more
1111 // context extensions. If we have reached an eval scope, we check
1112 // all extensions from this point.
1113 if (!s->outer_scope_calls_non_strict_eval() || s->is_eval_scope()) break;
1114 s = s->outer_scope();
1117 if (s != NULL && s->is_eval_scope()) {
1118 // Loop up the context chain. There is no frame effect so it is
1119 // safe to use raw labels here.
1121 if (!context.is(temp)) {
1122 __ mov(temp, context);
1125 // Terminate at global context.
1126 __ cmp(FieldOperand(temp, HeapObject::kMapOffset),
1127 Immediate(isolate()->factory()->global_context_map()));
1128 __ j(equal, &fast, Label::kNear);
1129 // Check that extension is NULL.
1130 __ cmp(ContextOperand(temp, Context::EXTENSION_INDEX), Immediate(0));
1131 __ j(not_equal, slow);
1132 // Load next context in chain.
1133 __ mov(temp, ContextOperand(temp, Context::PREVIOUS_INDEX));
1138 // All extension objects were empty and it is safe to use a global
1140 __ mov(eax, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
1141 __ mov(ecx, var->name());
1142 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1143 RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
1144 ? RelocInfo::CODE_TARGET
1145 : RelocInfo::CODE_TARGET_CONTEXT;
1150 MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(Variable* var,
1152 ASSERT(var->IsContextSlot());
1153 Register context = esi;
1154 Register temp = ebx;
1156 for (Scope* s = scope(); s != var->scope(); s = s->outer_scope()) {
1157 if (s->num_heap_slots() > 0) {
1158 if (s->calls_non_strict_eval()) {
1159 // Check that extension is NULL.
1160 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX),
1162 __ j(not_equal, slow);
1164 __ mov(temp, ContextOperand(context, Context::PREVIOUS_INDEX));
1165 // Walk the rest of the chain without clobbering esi.
1169 // Check that last extension is NULL.
1170 __ cmp(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0));
1171 __ j(not_equal, slow);
1173 // This function is used only for loads, not stores, so it's safe to
1174 // return an esi-based operand (the write barrier cannot be allowed to
1175 // destroy the esi register).
1176 return ContextOperand(context, var->index());
1180 void FullCodeGenerator::EmitDynamicLookupFastCase(Variable* var,
1181 TypeofState typeof_state,
1184 // Generate fast-case code for variables that might be shadowed by
1185 // eval-introduced variables. Eval is used a lot without
1186 // introducing variables. In those cases, we do not want to
1187 // perform a runtime call for all variables in the scope
1188 // containing the eval.
1189 if (var->mode() == DYNAMIC_GLOBAL) {
1190 EmitLoadGlobalCheckExtensions(var, typeof_state, slow);
1192 } else if (var->mode() == DYNAMIC_LOCAL) {
1193 Variable* local = var->local_if_not_shadowed();
1194 __ mov(eax, ContextSlotOperandCheckExtensions(local, slow));
1195 if (local->mode() == CONST ||
1196 local->mode() == CONST_HARMONY ||
1197 local->mode() == LET) {
1198 __ cmp(eax, isolate()->factory()->the_hole_value());
1199 __ j(not_equal, done);
1200 if (local->mode() == CONST) {
1201 __ mov(eax, isolate()->factory()->undefined_value());
1202 } else { // LET || CONST_HARMONY
1203 __ push(Immediate(var->name()));
1204 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1212 void FullCodeGenerator::EmitVariableLoad(VariableProxy* proxy) {
1213 // Record position before possible IC call.
1214 SetSourcePosition(proxy->position());
1215 Variable* var = proxy->var();
1217 // Three cases: global variables, lookup variables, and all other types of
1219 switch (var->location()) {
1220 case Variable::UNALLOCATED: {
1221 Comment cmnt(masm_, "Global variable");
1222 // Use inline caching. Variable name is passed in ecx and the global
1224 __ mov(eax, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
1225 __ mov(ecx, var->name());
1226 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1227 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1228 context()->Plug(eax);
1232 case Variable::PARAMETER:
1233 case Variable::LOCAL:
1234 case Variable::CONTEXT: {
1235 Comment cmnt(masm_, var->IsContextSlot()
1236 ? "Context variable"
1237 : "Stack variable");
1238 if (!var->binding_needs_init()) {
1239 context()->Plug(var);
1241 // Let and const need a read barrier.
1244 __ cmp(eax, isolate()->factory()->the_hole_value());
1245 __ j(not_equal, &done, Label::kNear);
1246 if (var->mode() == LET || var->mode() == CONST_HARMONY) {
1247 // Throw a reference error when using an uninitialized let/const
1248 // binding in harmony mode.
1249 __ push(Immediate(var->name()));
1250 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1252 // Uninitalized const bindings outside of harmony mode are unholed.
1253 ASSERT(var->mode() == CONST);
1254 __ mov(eax, isolate()->factory()->undefined_value());
1257 context()->Plug(eax);
1262 case Variable::LOOKUP: {
1264 // Generate code for loading from variables potentially shadowed
1265 // by eval-introduced variables.
1266 EmitDynamicLookupFastCase(var, NOT_INSIDE_TYPEOF, &slow, &done);
1268 Comment cmnt(masm_, "Lookup variable");
1269 __ push(esi); // Context.
1270 __ push(Immediate(var->name()));
1271 __ CallRuntime(Runtime::kLoadContextSlot, 2);
1273 context()->Plug(eax);
1280 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1281 Comment cmnt(masm_, "[ RegExpLiteral");
1283 // Registers will be used as follows:
1284 // edi = JS function.
1285 // ecx = literals array.
1286 // ebx = regexp literal.
1287 // eax = regexp literal clone.
1288 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1289 __ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
1290 int literal_offset =
1291 FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
1292 __ mov(ebx, FieldOperand(ecx, literal_offset));
1293 __ cmp(ebx, isolate()->factory()->undefined_value());
1294 __ j(not_equal, &materialized, Label::kNear);
1296 // Create regexp literal using runtime function
1297 // Result will be in eax.
1299 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1300 __ push(Immediate(expr->pattern()));
1301 __ push(Immediate(expr->flags()));
1302 __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
1305 __ bind(&materialized);
1306 int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
1307 Label allocated, runtime_allocate;
1308 __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
1311 __ bind(&runtime_allocate);
1313 __ push(Immediate(Smi::FromInt(size)));
1314 __ CallRuntime(Runtime::kAllocateInNewSpace, 1);
1317 __ bind(&allocated);
1318 // Copy the content into the newly allocated memory.
1319 // (Unroll copy loop once for better throughput).
1320 for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
1321 __ mov(edx, FieldOperand(ebx, i));
1322 __ mov(ecx, FieldOperand(ebx, i + kPointerSize));
1323 __ mov(FieldOperand(eax, i), edx);
1324 __ mov(FieldOperand(eax, i + kPointerSize), ecx);
1326 if ((size % (2 * kPointerSize)) != 0) {
1327 __ mov(edx, FieldOperand(ebx, size - kPointerSize));
1328 __ mov(FieldOperand(eax, size - kPointerSize), edx);
1330 context()->Plug(eax);
1334 void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
1335 Comment cmnt(masm_, "[ ObjectLiteral");
1336 __ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1337 __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
1338 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1339 __ push(Immediate(expr->constant_properties()));
1340 int flags = expr->fast_elements()
1341 ? ObjectLiteral::kFastElements
1342 : ObjectLiteral::kNoFlags;
1343 flags |= expr->has_function()
1344 ? ObjectLiteral::kHasFunction
1345 : ObjectLiteral::kNoFlags;
1346 __ push(Immediate(Smi::FromInt(flags)));
1347 if (expr->depth() > 1) {
1348 __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
1350 __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
1353 // If result_saved is true the result is on top of the stack. If
1354 // result_saved is false the result is in eax.
1355 bool result_saved = false;
1357 // Mark all computed expressions that are bound to a key that
1358 // is shadowed by a later occurrence of the same key. For the
1359 // marked expressions, no store code is emitted.
1360 expr->CalculateEmitStore();
1362 for (int i = 0; i < expr->properties()->length(); i++) {
1363 ObjectLiteral::Property* property = expr->properties()->at(i);
1364 if (property->IsCompileTimeValue()) continue;
1366 Literal* key = property->key();
1367 Expression* value = property->value();
1368 if (!result_saved) {
1369 __ push(eax); // Save result on the stack
1370 result_saved = true;
1372 switch (property->kind()) {
1373 case ObjectLiteral::Property::MATERIALIZED_LITERAL:
1374 ASSERT(!CompileTimeValue::IsCompileTimeValue(value));
1376 case ObjectLiteral::Property::COMPUTED:
1377 if (key->handle()->IsSymbol()) {
1378 if (property->emit_store()) {
1379 VisitForAccumulatorValue(value);
1380 __ mov(ecx, Immediate(key->handle()));
1381 __ mov(edx, Operand(esp, 0));
1382 Handle<Code> ic = is_strict_mode()
1383 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1384 : isolate()->builtins()->StoreIC_Initialize();
1385 __ call(ic, RelocInfo::CODE_TARGET, key->id());
1386 PrepareForBailoutForId(key->id(), NO_REGISTERS);
1388 VisitForEffect(value);
1393 case ObjectLiteral::Property::PROTOTYPE:
1394 __ push(Operand(esp, 0)); // Duplicate receiver.
1395 VisitForStackValue(key);
1396 VisitForStackValue(value);
1397 if (property->emit_store()) {
1398 __ push(Immediate(Smi::FromInt(NONE))); // PropertyAttributes
1399 __ CallRuntime(Runtime::kSetProperty, 4);
1404 case ObjectLiteral::Property::SETTER:
1405 case ObjectLiteral::Property::GETTER:
1406 __ push(Operand(esp, 0)); // Duplicate receiver.
1407 VisitForStackValue(key);
1408 __ push(Immediate(property->kind() == ObjectLiteral::Property::SETTER ?
1411 VisitForStackValue(value);
1412 __ CallRuntime(Runtime::kDefineAccessor, 4);
1414 default: UNREACHABLE();
1418 if (expr->has_function()) {
1419 ASSERT(result_saved);
1420 __ push(Operand(esp, 0));
1421 __ CallRuntime(Runtime::kToFastProperties, 1);
1425 context()->PlugTOS();
1427 context()->Plug(eax);
1432 void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
1433 Comment cmnt(masm_, "[ ArrayLiteral");
1435 ZoneList<Expression*>* subexprs = expr->values();
1436 int length = subexprs->length();
1437 Handle<FixedArray> constant_elements = expr->constant_elements();
1438 ASSERT_EQ(2, constant_elements->length());
1439 ElementsKind constant_elements_kind =
1440 static_cast<ElementsKind>(Smi::cast(constant_elements->get(0))->value());
1441 Handle<FixedArrayBase> constant_elements_values(
1442 FixedArrayBase::cast(constant_elements->get(1)));
1444 __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
1445 __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
1446 __ push(Immediate(Smi::FromInt(expr->literal_index())));
1447 __ push(Immediate(constant_elements));
1448 if (constant_elements_values->map() ==
1449 isolate()->heap()->fixed_cow_array_map()) {
1450 ASSERT(expr->depth() == 1);
1451 FastCloneShallowArrayStub stub(
1452 FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
1454 __ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
1455 } else if (expr->depth() > 1) {
1456 __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
1457 } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
1458 __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
1460 ASSERT(constant_elements_kind == FAST_ELEMENTS ||
1461 constant_elements_kind == FAST_SMI_ONLY_ELEMENTS ||
1462 FLAG_smi_only_arrays);
1463 FastCloneShallowArrayStub::Mode mode =
1464 constant_elements_kind == FAST_DOUBLE_ELEMENTS
1465 ? FastCloneShallowArrayStub::CLONE_DOUBLE_ELEMENTS
1466 : FastCloneShallowArrayStub::CLONE_ELEMENTS;
1467 FastCloneShallowArrayStub stub(mode, length);
1471 bool result_saved = false; // Is the result saved to the stack?
1473 // Emit code to evaluate all the non-constant subexpressions and to store
1474 // them into the newly cloned array.
1475 for (int i = 0; i < length; i++) {
1476 Expression* subexpr = subexprs->at(i);
1477 // If the subexpression is a literal or a simple materialized literal it
1478 // is already set in the cloned array.
1479 if (subexpr->AsLiteral() != NULL ||
1480 CompileTimeValue::IsCompileTimeValue(subexpr)) {
1484 if (!result_saved) {
1486 result_saved = true;
1488 VisitForAccumulatorValue(subexpr);
1490 // Store the subexpression value in the array's elements.
1491 __ mov(ebx, Operand(esp, 0)); // Copy of array literal.
1492 __ mov(edi, FieldOperand(ebx, JSObject::kMapOffset));
1493 __ mov(ecx, Immediate(Smi::FromInt(i)));
1494 __ mov(edx, Immediate(Smi::FromInt(expr->literal_index())));
1495 StoreArrayLiteralElementStub stub;
1498 PrepareForBailoutForId(expr->GetIdForElement(i), NO_REGISTERS);
1502 context()->PlugTOS();
1504 context()->Plug(eax);
1509 void FullCodeGenerator::VisitAssignment(Assignment* expr) {
1510 Comment cmnt(masm_, "[ Assignment");
1511 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
1512 // on the left-hand side.
1513 if (!expr->target()->IsValidLeftHandSide()) {
1514 VisitForEffect(expr->target());
1518 // Left-hand side can only be a property, a global or a (parameter or local)
1520 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1521 LhsKind assign_type = VARIABLE;
1522 Property* property = expr->target()->AsProperty();
1523 if (property != NULL) {
1524 assign_type = (property->key()->IsPropertyName())
1529 // Evaluate LHS expression.
1530 switch (assign_type) {
1532 // Nothing to do here.
1534 case NAMED_PROPERTY:
1535 if (expr->is_compound()) {
1536 // We need the receiver both on the stack and in the accumulator.
1537 VisitForAccumulatorValue(property->obj());
1538 __ push(result_register());
1540 VisitForStackValue(property->obj());
1543 case KEYED_PROPERTY: {
1544 if (expr->is_compound()) {
1545 VisitForStackValue(property->obj());
1546 VisitForAccumulatorValue(property->key());
1547 __ mov(edx, Operand(esp, 0));
1550 VisitForStackValue(property->obj());
1551 VisitForStackValue(property->key());
1557 // For compound assignments we need another deoptimization point after the
1558 // variable/property load.
1559 if (expr->is_compound()) {
1560 AccumulatorValueContext result_context(this);
1561 { AccumulatorValueContext left_operand_context(this);
1562 switch (assign_type) {
1564 EmitVariableLoad(expr->target()->AsVariableProxy());
1565 PrepareForBailout(expr->target(), TOS_REG);
1567 case NAMED_PROPERTY:
1568 EmitNamedPropertyLoad(property);
1569 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1571 case KEYED_PROPERTY:
1572 EmitKeyedPropertyLoad(property);
1573 PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
1578 Token::Value op = expr->binary_op();
1579 __ push(eax); // Left operand goes on the stack.
1580 VisitForAccumulatorValue(expr->value());
1582 OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
1585 SetSourcePosition(expr->position() + 1);
1586 if (ShouldInlineSmiCase(op)) {
1587 EmitInlineSmiBinaryOp(expr->binary_operation(),
1593 EmitBinaryOp(expr->binary_operation(), op, mode);
1596 // Deoptimization point in case the binary operation may have side effects.
1597 PrepareForBailout(expr->binary_operation(), TOS_REG);
1599 VisitForAccumulatorValue(expr->value());
1602 // Record source position before possible IC call.
1603 SetSourcePosition(expr->position());
1606 switch (assign_type) {
1608 EmitVariableAssignment(expr->target()->AsVariableProxy()->var(),
1610 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1611 context()->Plug(eax);
1613 case NAMED_PROPERTY:
1614 EmitNamedPropertyAssignment(expr);
1616 case KEYED_PROPERTY:
1617 EmitKeyedPropertyAssignment(expr);
1623 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1624 SetSourcePosition(prop->position());
1625 Literal* key = prop->key()->AsLiteral();
1626 ASSERT(!key->handle()->IsSmi());
1627 __ mov(ecx, Immediate(key->handle()));
1628 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
1629 __ call(ic, RelocInfo::CODE_TARGET, prop->id());
1633 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1634 SetSourcePosition(prop->position());
1635 Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
1636 __ call(ic, RelocInfo::CODE_TARGET, prop->id());
1640 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
1644 Expression* right) {
1645 // Do combined smi check of the operands. Left operand is on the
1646 // stack. Right operand is in eax.
1647 Label smi_case, done, stub_call;
1651 JumpPatchSite patch_site(masm_);
1652 patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear);
1654 __ bind(&stub_call);
1656 BinaryOpStub stub(op, mode);
1657 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1658 patch_site.EmitPatchInfo();
1659 __ jmp(&done, Label::kNear);
1663 __ mov(eax, edx); // Copy left operand in case of a stub call.
1669 __ sar_cl(eax); // No checks of result necessary
1677 // Check that the *signed* result fits in a smi.
1678 __ cmp(eax, 0xc0000000);
1679 __ j(positive, &result_ok);
1682 __ bind(&result_ok);
1691 __ test(eax, Immediate(0xc0000000));
1692 __ j(zero, &result_ok);
1695 __ bind(&result_ok);
1701 __ j(overflow, &stub_call);
1705 __ j(overflow, &stub_call);
1710 __ j(overflow, &stub_call);
1712 __ j(not_zero, &done, Label::kNear);
1715 __ j(negative, &stub_call);
1721 case Token::BIT_AND:
1724 case Token::BIT_XOR:
1732 context()->Plug(eax);
1736 void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
1738 OverwriteMode mode) {
1740 BinaryOpStub stub(op, mode);
1741 JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
1742 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
1743 patch_site.EmitPatchInfo();
1744 context()->Plug(eax);
1748 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1749 // Invalid left-hand sides are rewritten to have a 'throw
1750 // ReferenceError' on the left-hand side.
1751 if (!expr->IsValidLeftHandSide()) {
1752 VisitForEffect(expr);
1756 // Left-hand side can only be a property, a global or a (parameter or local)
1758 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
1759 LhsKind assign_type = VARIABLE;
1760 Property* prop = expr->AsProperty();
1762 assign_type = (prop->key()->IsPropertyName())
1767 switch (assign_type) {
1769 Variable* var = expr->AsVariableProxy()->var();
1770 EffectContext context(this);
1771 EmitVariableAssignment(var, Token::ASSIGN);
1774 case NAMED_PROPERTY: {
1775 __ push(eax); // Preserve value.
1776 VisitForAccumulatorValue(prop->obj());
1778 __ pop(eax); // Restore value.
1779 __ mov(ecx, prop->key()->AsLiteral()->handle());
1780 Handle<Code> ic = is_strict_mode()
1781 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1782 : isolate()->builtins()->StoreIC_Initialize();
1786 case KEYED_PROPERTY: {
1787 __ push(eax); // Preserve value.
1788 VisitForStackValue(prop->obj());
1789 VisitForAccumulatorValue(prop->key());
1792 __ pop(eax); // Restore value.
1793 Handle<Code> ic = is_strict_mode()
1794 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1795 : isolate()->builtins()->KeyedStoreIC_Initialize();
1800 PrepareForBailoutForId(bailout_ast_id, TOS_REG);
1801 context()->Plug(eax);
1805 void FullCodeGenerator::EmitVariableAssignment(Variable* var,
1807 if (var->IsUnallocated()) {
1808 // Global var, const, or let.
1809 __ mov(ecx, var->name());
1810 __ mov(edx, var->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
1811 Handle<Code> ic = is_strict_mode()
1812 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1813 : isolate()->builtins()->StoreIC_Initialize();
1814 __ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
1816 } else if (op == Token::INIT_CONST) {
1817 // Const initializers need a write barrier.
1818 ASSERT(!var->IsParameter()); // No const parameters.
1819 if (var->IsStackLocal()) {
1821 __ mov(edx, StackOperand(var));
1822 __ cmp(edx, isolate()->factory()->the_hole_value());
1823 __ j(not_equal, &skip);
1824 __ mov(StackOperand(var), eax);
1827 ASSERT(var->IsContextSlot() || var->IsLookupSlot());
1828 // Like var declarations, const declarations are hoisted to function
1829 // scope. However, unlike var initializers, const initializers are
1830 // able to drill a hole to that function context, even from inside a
1831 // 'with' context. We thus bypass the normal static scope lookup for
1832 // var->IsContextSlot().
1835 __ push(Immediate(var->name()));
1836 __ CallRuntime(Runtime::kInitializeConstContextSlot, 3);
1839 } else if (var->mode() == LET && op != Token::INIT_LET) {
1840 // Non-initializing assignment to let variable needs a write barrier.
1841 if (var->IsLookupSlot()) {
1842 __ push(eax); // Value.
1843 __ push(esi); // Context.
1844 __ push(Immediate(var->name()));
1845 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
1846 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1848 ASSERT(var->IsStackAllocated() || var->IsContextSlot());
1850 MemOperand location = VarOperand(var, ecx);
1851 __ mov(edx, location);
1852 __ cmp(edx, isolate()->factory()->the_hole_value());
1853 __ j(not_equal, &assign, Label::kNear);
1854 __ push(Immediate(var->name()));
1855 __ CallRuntime(Runtime::kThrowReferenceError, 1);
1857 __ mov(location, eax);
1858 if (var->IsContextSlot()) {
1860 int offset = Context::SlotOffset(var->index());
1861 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
1865 } else if (!var->is_const_mode() || op == Token::INIT_CONST_HARMONY) {
1866 // Assignment to var or initializing assignment to let/const
1868 if (var->IsStackAllocated() || var->IsContextSlot()) {
1869 MemOperand location = VarOperand(var, ecx);
1870 if (FLAG_debug_code && op == Token::INIT_LET) {
1871 // Check for an uninitialized let binding.
1872 __ mov(edx, location);
1873 __ cmp(edx, isolate()->factory()->the_hole_value());
1874 __ Check(equal, "Let binding re-initialization.");
1876 // Perform the assignment.
1877 __ mov(location, eax);
1878 if (var->IsContextSlot()) {
1880 int offset = Context::SlotOffset(var->index());
1881 __ RecordWriteContextSlot(ecx, offset, edx, ebx, kDontSaveFPRegs);
1884 ASSERT(var->IsLookupSlot());
1885 __ push(eax); // Value.
1886 __ push(esi); // Context.
1887 __ push(Immediate(var->name()));
1888 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
1889 __ CallRuntime(Runtime::kStoreContextSlot, 4);
1892 // Non-initializing assignments to consts are ignored.
1896 void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
1897 // Assignment to a property, using a named store IC.
1898 Property* prop = expr->target()->AsProperty();
1899 ASSERT(prop != NULL);
1900 ASSERT(prop->key()->AsLiteral() != NULL);
1902 // If the assignment starts a block of assignments to the same object,
1903 // change to slow case to avoid the quadratic behavior of repeatedly
1904 // adding fast properties.
1905 if (expr->starts_initialization_block()) {
1906 __ push(result_register());
1907 __ push(Operand(esp, kPointerSize)); // Receiver is now under value.
1908 __ CallRuntime(Runtime::kToSlowProperties, 1);
1909 __ pop(result_register());
1912 // Record source code position before IC call.
1913 SetSourcePosition(expr->position());
1914 __ mov(ecx, prop->key()->AsLiteral()->handle());
1915 if (expr->ends_initialization_block()) {
1916 __ mov(edx, Operand(esp, 0));
1920 Handle<Code> ic = is_strict_mode()
1921 ? isolate()->builtins()->StoreIC_Initialize_Strict()
1922 : isolate()->builtins()->StoreIC_Initialize();
1923 __ call(ic, RelocInfo::CODE_TARGET, expr->id());
1925 // If the assignment ends an initialization block, revert to fast case.
1926 if (expr->ends_initialization_block()) {
1927 __ push(eax); // Result of assignment, saved even if not needed.
1928 __ push(Operand(esp, kPointerSize)); // Receiver is under value.
1929 __ CallRuntime(Runtime::kToFastProperties, 1);
1933 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1934 context()->Plug(eax);
1938 void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
1939 // Assignment to a property, using a keyed store IC.
1941 // If the assignment starts a block of assignments to the same object,
1942 // change to slow case to avoid the quadratic behavior of repeatedly
1943 // adding fast properties.
1944 if (expr->starts_initialization_block()) {
1945 __ push(result_register());
1946 // Receiver is now under the key and value.
1947 __ push(Operand(esp, 2 * kPointerSize));
1948 __ CallRuntime(Runtime::kToSlowProperties, 1);
1949 __ pop(result_register());
1953 if (expr->ends_initialization_block()) {
1954 __ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
1958 // Record source code position before IC call.
1959 SetSourcePosition(expr->position());
1960 Handle<Code> ic = is_strict_mode()
1961 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
1962 : isolate()->builtins()->KeyedStoreIC_Initialize();
1963 __ call(ic, RelocInfo::CODE_TARGET, expr->id());
1965 // If the assignment ends an initialization block, revert to fast case.
1966 if (expr->ends_initialization_block()) {
1968 __ push(eax); // Result of assignment, saved even if not needed.
1970 __ CallRuntime(Runtime::kToFastProperties, 1);
1974 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
1975 context()->Plug(eax);
1979 void FullCodeGenerator::VisitProperty(Property* expr) {
1980 Comment cmnt(masm_, "[ Property");
1981 Expression* key = expr->key();
1983 if (key->IsPropertyName()) {
1984 VisitForAccumulatorValue(expr->obj());
1985 EmitNamedPropertyLoad(expr);
1986 context()->Plug(eax);
1988 VisitForStackValue(expr->obj());
1989 VisitForAccumulatorValue(expr->key());
1991 EmitKeyedPropertyLoad(expr);
1992 context()->Plug(eax);
1997 void FullCodeGenerator::EmitCallWithIC(Call* expr,
1998 Handle<Object> name,
1999 RelocInfo::Mode mode) {
2000 // Code common for calls using the IC.
2001 ZoneList<Expression*>* args = expr->arguments();
2002 int arg_count = args->length();
2003 { PreservePositionScope scope(masm()->positions_recorder());
2004 for (int i = 0; i < arg_count; i++) {
2005 VisitForStackValue(args->at(i));
2007 __ Set(ecx, Immediate(name));
2009 // Record source position of the IC call.
2010 SetSourcePosition(expr->position());
2012 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
2013 __ call(ic, mode, expr->id());
2014 RecordJSReturnSite(expr);
2015 // Restore context register.
2016 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2017 context()->Plug(eax);
2021 void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
2024 VisitForAccumulatorValue(key);
2026 // Swap the name of the function and the receiver on the stack to follow
2027 // the calling convention for call ICs.
2032 // Load the arguments.
2033 ZoneList<Expression*>* args = expr->arguments();
2034 int arg_count = args->length();
2035 { PreservePositionScope scope(masm()->positions_recorder());
2036 for (int i = 0; i < arg_count; i++) {
2037 VisitForStackValue(args->at(i));
2040 // Record source position of the IC call.
2041 SetSourcePosition(expr->position());
2043 isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count);
2044 __ mov(ecx, Operand(esp, (arg_count + 1) * kPointerSize)); // Key.
2045 __ call(ic, RelocInfo::CODE_TARGET, expr->id());
2046 RecordJSReturnSite(expr);
2047 // Restore context register.
2048 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2049 context()->DropAndPlug(1, eax); // Drop the key still on the stack.
2053 void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
2054 // Code common for calls using the call stub.
2055 ZoneList<Expression*>* args = expr->arguments();
2056 int arg_count = args->length();
2057 { PreservePositionScope scope(masm()->positions_recorder());
2058 for (int i = 0; i < arg_count; i++) {
2059 VisitForStackValue(args->at(i));
2062 // Record source position for debugger.
2063 SetSourcePosition(expr->position());
2065 // Record call targets in unoptimized code, but not in the snapshot.
2066 // TODO(1789): Reenable temporarily disabled recording CallFunctionStub
2067 // when the issue is fixed.
2068 bool record_call_target = false && !Serializer::enabled();
2069 if (record_call_target) {
2070 flags = static_cast<CallFunctionFlags>(flags | RECORD_CALL_TARGET);
2072 CallFunctionStub stub(arg_count, flags);
2073 __ CallStub(&stub, expr->id());
2074 if (record_call_target) {
2075 // There is a one element cache in the instruction stream.
2077 int return_site_offset = masm()->pc_offset();
2079 Handle<Object> uninitialized =
2080 CallFunctionStub::UninitializedSentinel(isolate());
2081 Handle<JSGlobalPropertyCell> cell =
2082 isolate()->factory()->NewJSGlobalPropertyCell(uninitialized);
2083 __ test(eax, Immediate(cell));
2084 // Patching code in the stub assumes the opcode is 1 byte and there is
2085 // word for a pointer in the operand.
2086 ASSERT(masm()->pc_offset() - return_site_offset >= 1 + kPointerSize);
2089 RecordJSReturnSite(expr);
2090 // Restore context register.
2091 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2092 context()->DropAndPlug(1, eax);
2096 void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
2097 // Push copy of the first argument or undefined if it doesn't exist.
2098 if (arg_count > 0) {
2099 __ push(Operand(esp, arg_count * kPointerSize));
2101 __ push(Immediate(isolate()->factory()->undefined_value()));
2104 // Push the receiver of the enclosing function.
2105 __ push(Operand(ebp, (2 + info_->scope()->num_parameters()) * kPointerSize));
2107 // Push the strict mode flag. In harmony mode every eval call
2108 // is a strict mode eval call.
2109 StrictModeFlag strict_mode =
2110 FLAG_harmony_scoping ? kStrictMode : strict_mode_flag();
2111 __ push(Immediate(Smi::FromInt(strict_mode)));
2113 // Push the qml mode flag
2114 __ push(Immediate(Smi::FromInt(is_qml_mode())));
2116 __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
2120 void FullCodeGenerator::VisitCall(Call* expr) {
2122 // We want to verify that RecordJSReturnSite gets called on all paths
2123 // through this function. Avoid early returns.
2124 expr->return_is_recorded_ = false;
2127 Comment cmnt(masm_, "[ Call");
2128 Expression* callee = expr->expression();
2129 VariableProxy* proxy = callee->AsVariableProxy();
2130 Property* property = callee->AsProperty();
2132 if (proxy != NULL && proxy->var()->is_possibly_eval()) {
2133 // In a call to eval, we first call %ResolvePossiblyDirectEval to
2134 // resolve the function we need to call and the receiver of the call.
2135 // Then we call the resolved function using the given arguments.
2136 ZoneList<Expression*>* args = expr->arguments();
2137 int arg_count = args->length();
2138 { PreservePositionScope pos_scope(masm()->positions_recorder());
2139 VisitForStackValue(callee);
2140 // Reserved receiver slot.
2141 __ push(Immediate(isolate()->factory()->undefined_value()));
2142 // Push the arguments.
2143 for (int i = 0; i < arg_count; i++) {
2144 VisitForStackValue(args->at(i));
2147 // Push a copy of the function (found below the arguments) and
2149 __ push(Operand(esp, (arg_count + 1) * kPointerSize));
2150 EmitResolvePossiblyDirectEval(arg_count);
2152 // The runtime call returns a pair of values in eax (function) and
2153 // edx (receiver). Touch up the stack with the right values.
2154 __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
2155 __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
2157 // Record source position for debugger.
2158 SetSourcePosition(expr->position());
2159 CallFunctionStub stub(arg_count, RECEIVER_MIGHT_BE_IMPLICIT);
2161 RecordJSReturnSite(expr);
2162 // Restore context register.
2163 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
2164 context()->DropAndPlug(1, eax);
2166 } else if (proxy != NULL && proxy->var()->IsUnallocated()) {
2167 // Push global object as receiver for the call IC.
2168 __ push(proxy->var()->is_qml_global()?QmlGlobalObjectOperand():GlobalObjectOperand());
2169 EmitCallWithIC(expr, proxy->name(), RelocInfo::CODE_TARGET_CONTEXT);
2171 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
2172 // Call to a lookup slot (dynamically introduced variable).
2174 { PreservePositionScope scope(masm()->positions_recorder());
2175 // Generate code for loading from variables potentially shadowed by
2176 // eval-introduced variables.
2177 EmitDynamicLookupFastCase(proxy->var(), NOT_INSIDE_TYPEOF, &slow, &done);
2180 // Call the runtime to find the function to call (returned in eax) and
2181 // the object holding it (returned in edx).
2182 __ push(context_register());
2183 __ push(Immediate(proxy->name()));
2184 __ CallRuntime(Runtime::kLoadContextSlot, 2);
2185 __ push(eax); // Function.
2186 __ push(edx); // Receiver.
2188 // If fast case code has been generated, emit code to push the function
2189 // and receiver and have the slow path jump around this code.
2190 if (done.is_linked()) {
2192 __ jmp(&call, Label::kNear);
2196 // The receiver is implicitly the global receiver. Indicate this by
2197 // passing the hole to the call function stub.
2198 __ push(Immediate(isolate()->factory()->the_hole_value()));
2202 // The receiver is either the global receiver or an object found by
2203 // LoadContextSlot. That object could be the hole if the receiver is
2204 // implicitly the global object.
2205 EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
2207 } else if (property != NULL) {
2208 { PreservePositionScope scope(masm()->positions_recorder());
2209 VisitForStackValue(property->obj());
2211 if (property->key()->IsPropertyName()) {
2212 EmitCallWithIC(expr,
2213 property->key()->AsLiteral()->handle(),
2214 RelocInfo::CODE_TARGET);
2216 EmitKeyedCallWithIC(expr, property->key());
2220 // Call to an arbitrary expression not handled specially above.
2221 { PreservePositionScope scope(masm()->positions_recorder());
2222 VisitForStackValue(callee);
2224 // Load global receiver object.
2225 __ mov(ebx, GlobalObjectOperand());
2226 __ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
2227 // Emit function call.
2228 EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
2232 // RecordJSReturnSite should have been called.
2233 ASSERT(expr->return_is_recorded_);
2238 void FullCodeGenerator::VisitCallNew(CallNew* expr) {
2239 Comment cmnt(masm_, "[ CallNew");
2240 // According to ECMA-262, section 11.2.2, page 44, the function
2241 // expression in new calls must be evaluated before the
2244 // Push constructor on the stack. If it's not a function it's used as
2245 // receiver for CALL_NON_FUNCTION, otherwise the value on the stack is
2247 VisitForStackValue(expr->expression());
2249 // Push the arguments ("left-to-right") on the stack.
2250 ZoneList<Expression*>* args = expr->arguments();
2251 int arg_count = args->length();
2252 for (int i = 0; i < arg_count; i++) {
2253 VisitForStackValue(args->at(i));
2256 // Call the construct call builtin that handles allocation and
2257 // constructor invocation.
2258 SetSourcePosition(expr->position());
2260 // Load function and argument count into edi and eax.
2261 __ SafeSet(eax, Immediate(arg_count));
2262 __ mov(edi, Operand(esp, arg_count * kPointerSize));
2264 Handle<Code> construct_builtin =
2265 isolate()->builtins()->JSConstructCall();
2266 __ call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
2267 context()->Plug(eax);
2271 void FullCodeGenerator::EmitIsSmi(CallRuntime* expr) {
2272 ZoneList<Expression*>* args = expr->arguments();
2273 ASSERT(args->length() == 1);
2275 VisitForAccumulatorValue(args->at(0));
2277 Label materialize_true, materialize_false;
2278 Label* if_true = NULL;
2279 Label* if_false = NULL;
2280 Label* fall_through = NULL;
2281 context()->PrepareTest(&materialize_true, &materialize_false,
2282 &if_true, &if_false, &fall_through);
2284 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2285 __ test(eax, Immediate(kSmiTagMask));
2286 Split(zero, if_true, if_false, fall_through);
2288 context()->Plug(if_true, if_false);
2292 void FullCodeGenerator::EmitIsNonNegativeSmi(CallRuntime* expr) {
2293 ZoneList<Expression*>* args = expr->arguments();
2294 ASSERT(args->length() == 1);
2296 VisitForAccumulatorValue(args->at(0));
2298 Label materialize_true, materialize_false;
2299 Label* if_true = NULL;
2300 Label* if_false = NULL;
2301 Label* fall_through = NULL;
2302 context()->PrepareTest(&materialize_true, &materialize_false,
2303 &if_true, &if_false, &fall_through);
2305 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2306 __ test(eax, Immediate(kSmiTagMask | 0x80000000));
2307 Split(zero, if_true, if_false, fall_through);
2309 context()->Plug(if_true, if_false);
2313 void FullCodeGenerator::EmitIsObject(CallRuntime* expr) {
2314 ZoneList<Expression*>* args = expr->arguments();
2315 ASSERT(args->length() == 1);
2317 VisitForAccumulatorValue(args->at(0));
2319 Label materialize_true, materialize_false;
2320 Label* if_true = NULL;
2321 Label* if_false = NULL;
2322 Label* fall_through = NULL;
2323 context()->PrepareTest(&materialize_true, &materialize_false,
2324 &if_true, &if_false, &fall_through);
2326 __ JumpIfSmi(eax, if_false);
2327 __ cmp(eax, isolate()->factory()->null_value());
2328 __ j(equal, if_true);
2329 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2330 // Undetectable objects behave like undefined when tested with typeof.
2331 __ movzx_b(ecx, FieldOperand(ebx, Map::kBitFieldOffset));
2332 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
2333 __ j(not_zero, if_false);
2334 __ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceTypeOffset));
2335 __ cmp(ecx, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
2336 __ j(below, if_false);
2337 __ cmp(ecx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
2338 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2339 Split(below_equal, if_true, if_false, fall_through);
2341 context()->Plug(if_true, if_false);
2345 void FullCodeGenerator::EmitIsSpecObject(CallRuntime* expr) {
2346 ZoneList<Expression*>* args = expr->arguments();
2347 ASSERT(args->length() == 1);
2349 VisitForAccumulatorValue(args->at(0));
2351 Label materialize_true, materialize_false;
2352 Label* if_true = NULL;
2353 Label* if_false = NULL;
2354 Label* fall_through = NULL;
2355 context()->PrepareTest(&materialize_true, &materialize_false,
2356 &if_true, &if_false, &fall_through);
2358 __ JumpIfSmi(eax, if_false);
2359 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, ebx);
2360 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2361 Split(above_equal, if_true, if_false, fall_through);
2363 context()->Plug(if_true, if_false);
2367 void FullCodeGenerator::EmitIsUndetectableObject(CallRuntime* expr) {
2368 ZoneList<Expression*>* args = expr->arguments();
2369 ASSERT(args->length() == 1);
2371 VisitForAccumulatorValue(args->at(0));
2373 Label materialize_true, materialize_false;
2374 Label* if_true = NULL;
2375 Label* if_false = NULL;
2376 Label* fall_through = NULL;
2377 context()->PrepareTest(&materialize_true, &materialize_false,
2378 &if_true, &if_false, &fall_through);
2380 __ JumpIfSmi(eax, if_false);
2381 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2382 __ movzx_b(ebx, FieldOperand(ebx, Map::kBitFieldOffset));
2383 __ test(ebx, Immediate(1 << Map::kIsUndetectable));
2384 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2385 Split(not_zero, if_true, if_false, fall_through);
2387 context()->Plug(if_true, if_false);
2391 void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
2392 CallRuntime* expr) {
2393 ZoneList<Expression*>* args = expr->arguments();
2394 ASSERT(args->length() == 1);
2396 VisitForAccumulatorValue(args->at(0));
2398 Label materialize_true, materialize_false;
2399 Label* if_true = NULL;
2400 Label* if_false = NULL;
2401 Label* fall_through = NULL;
2402 context()->PrepareTest(&materialize_true, &materialize_false,
2403 &if_true, &if_false, &fall_through);
2405 if (FLAG_debug_code) __ AbortIfSmi(eax);
2407 // Check whether this map has already been checked to be safe for default
2409 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2410 __ test_b(FieldOperand(ebx, Map::kBitField2Offset),
2411 1 << Map::kStringWrapperSafeForDefaultValueOf);
2412 __ j(not_zero, if_true);
2414 // Check for fast case object. Return false for slow case objects.
2415 __ mov(ecx, FieldOperand(eax, JSObject::kPropertiesOffset));
2416 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2417 __ cmp(ecx, FACTORY->hash_table_map());
2418 __ j(equal, if_false);
2420 // Look for valueOf symbol in the descriptor array, and indicate false if
2421 // found. The type is not checked, so if it is a transition it is a false
2423 __ LoadInstanceDescriptors(ebx, ebx);
2424 __ mov(ecx, FieldOperand(ebx, FixedArray::kLengthOffset));
2425 // ebx: descriptor array
2426 // ecx: length of descriptor array
2427 // Calculate the end of the descriptor array.
2428 STATIC_ASSERT(kSmiTag == 0);
2429 STATIC_ASSERT(kSmiTagSize == 1);
2430 STATIC_ASSERT(kPointerSize == 4);
2431 __ lea(ecx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
2432 // Calculate location of the first key name.
2434 Immediate(FixedArray::kHeaderSize +
2435 DescriptorArray::kFirstIndex * kPointerSize));
2436 // Loop through all the keys in the descriptor array. If one of these is the
2437 // symbol valueOf the result is false.
2441 __ mov(edx, FieldOperand(ebx, 0));
2442 __ cmp(edx, FACTORY->value_of_symbol());
2443 __ j(equal, if_false);
2444 __ add(ebx, Immediate(kPointerSize));
2447 __ j(not_equal, &loop);
2449 // Reload map as register ebx was used as temporary above.
2450 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2452 // If a valueOf property is not found on the object check that it's
2453 // prototype is the un-modified String prototype. If not result is false.
2454 __ mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
2455 __ JumpIfSmi(ecx, if_false);
2456 __ mov(ecx, FieldOperand(ecx, HeapObject::kMapOffset));
2457 __ mov(edx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
2459 FieldOperand(edx, GlobalObject::kGlobalContextOffset));
2462 Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
2463 __ j(not_equal, if_false);
2464 // Set the bit in the map to indicate that it has been checked safe for
2465 // default valueOf and set true result.
2466 __ or_(FieldOperand(ebx, Map::kBitField2Offset),
2467 Immediate(1 << Map::kStringWrapperSafeForDefaultValueOf));
2470 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2471 context()->Plug(if_true, if_false);
2475 void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
2476 ZoneList<Expression*>* args = expr->arguments();
2477 ASSERT(args->length() == 1);
2479 VisitForAccumulatorValue(args->at(0));
2481 Label materialize_true, materialize_false;
2482 Label* if_true = NULL;
2483 Label* if_false = NULL;
2484 Label* fall_through = NULL;
2485 context()->PrepareTest(&materialize_true, &materialize_false,
2486 &if_true, &if_false, &fall_through);
2488 __ JumpIfSmi(eax, if_false);
2489 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2490 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2491 Split(equal, if_true, if_false, fall_through);
2493 context()->Plug(if_true, if_false);
2497 void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
2498 ZoneList<Expression*>* args = expr->arguments();
2499 ASSERT(args->length() == 1);
2501 VisitForAccumulatorValue(args->at(0));
2503 Label materialize_true, materialize_false;
2504 Label* if_true = NULL;
2505 Label* if_false = NULL;
2506 Label* fall_through = NULL;
2507 context()->PrepareTest(&materialize_true, &materialize_false,
2508 &if_true, &if_false, &fall_through);
2510 __ JumpIfSmi(eax, if_false);
2511 __ CmpObjectType(eax, JS_ARRAY_TYPE, ebx);
2512 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2513 Split(equal, if_true, if_false, fall_through);
2515 context()->Plug(if_true, if_false);
2519 void FullCodeGenerator::EmitIsRegExp(CallRuntime* expr) {
2520 ZoneList<Expression*>* args = expr->arguments();
2521 ASSERT(args->length() == 1);
2523 VisitForAccumulatorValue(args->at(0));
2525 Label materialize_true, materialize_false;
2526 Label* if_true = NULL;
2527 Label* if_false = NULL;
2528 Label* fall_through = NULL;
2529 context()->PrepareTest(&materialize_true, &materialize_false,
2530 &if_true, &if_false, &fall_through);
2532 __ JumpIfSmi(eax, if_false);
2533 __ CmpObjectType(eax, JS_REGEXP_TYPE, ebx);
2534 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2535 Split(equal, if_true, if_false, fall_through);
2537 context()->Plug(if_true, if_false);
2542 void FullCodeGenerator::EmitIsConstructCall(CallRuntime* expr) {
2543 ASSERT(expr->arguments()->length() == 0);
2545 Label materialize_true, materialize_false;
2546 Label* if_true = NULL;
2547 Label* if_false = NULL;
2548 Label* fall_through = NULL;
2549 context()->PrepareTest(&materialize_true, &materialize_false,
2550 &if_true, &if_false, &fall_through);
2552 // Get the frame pointer for the calling frame.
2553 __ mov(eax, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2555 // Skip the arguments adaptor frame if it exists.
2556 Label check_frame_marker;
2557 __ cmp(Operand(eax, StandardFrameConstants::kContextOffset),
2558 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2559 __ j(not_equal, &check_frame_marker);
2560 __ mov(eax, Operand(eax, StandardFrameConstants::kCallerFPOffset));
2562 // Check the marker in the calling frame.
2563 __ bind(&check_frame_marker);
2564 __ cmp(Operand(eax, StandardFrameConstants::kMarkerOffset),
2565 Immediate(Smi::FromInt(StackFrame::CONSTRUCT)));
2566 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2567 Split(equal, if_true, if_false, fall_through);
2569 context()->Plug(if_true, if_false);
2573 void FullCodeGenerator::EmitObjectEquals(CallRuntime* expr) {
2574 ZoneList<Expression*>* args = expr->arguments();
2575 ASSERT(args->length() == 2);
2577 // Load the two objects into registers and perform the comparison.
2578 VisitForStackValue(args->at(0));
2579 VisitForAccumulatorValue(args->at(1));
2581 Label materialize_true, materialize_false;
2582 Label* if_true = NULL;
2583 Label* if_false = NULL;
2584 Label* fall_through = NULL;
2585 context()->PrepareTest(&materialize_true, &materialize_false,
2586 &if_true, &if_false, &fall_through);
2590 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
2591 Split(equal, if_true, if_false, fall_through);
2593 context()->Plug(if_true, if_false);
2597 void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
2598 ZoneList<Expression*>* args = expr->arguments();
2599 ASSERT(args->length() == 1);
2601 // ArgumentsAccessStub expects the key in edx and the formal
2602 // parameter count in eax.
2603 VisitForAccumulatorValue(args->at(0));
2605 __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2606 ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
2608 context()->Plug(eax);
2612 void FullCodeGenerator::EmitArgumentsLength(CallRuntime* expr) {
2613 ASSERT(expr->arguments()->length() == 0);
2616 // Get the number of formal parameters.
2617 __ SafeSet(eax, Immediate(Smi::FromInt(info_->scope()->num_parameters())));
2619 // Check if the calling frame is an arguments adaptor frame.
2620 __ mov(ebx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
2621 __ cmp(Operand(ebx, StandardFrameConstants::kContextOffset),
2622 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
2623 __ j(not_equal, &exit);
2625 // Arguments adaptor case: Read the arguments length from the
2627 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
2630 if (FLAG_debug_code) __ AbortIfNotSmi(eax);
2631 context()->Plug(eax);
2635 void FullCodeGenerator::EmitClassOf(CallRuntime* expr) {
2636 ZoneList<Expression*>* args = expr->arguments();
2637 ASSERT(args->length() == 1);
2638 Label done, null, function, non_function_constructor;
2640 VisitForAccumulatorValue(args->at(0));
2642 // If the object is a smi, we return null.
2643 __ JumpIfSmi(eax, &null);
2645 // Check that the object is a JS object but take special care of JS
2646 // functions to make sure they have 'Function' as their class.
2647 // Assume that there are only two callable types, and one of them is at
2648 // either end of the type range for JS object types. Saves extra comparisons.
2649 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
2650 __ CmpObjectType(eax, FIRST_SPEC_OBJECT_TYPE, eax);
2651 // Map is now in eax.
2653 STATIC_ASSERT(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2654 FIRST_SPEC_OBJECT_TYPE + 1);
2655 __ j(equal, &function);
2657 __ CmpInstanceType(eax, LAST_SPEC_OBJECT_TYPE);
2658 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE ==
2659 LAST_SPEC_OBJECT_TYPE - 1);
2660 __ j(equal, &function);
2661 // Assume that there is no larger type.
2662 STATIC_ASSERT(LAST_NONCALLABLE_SPEC_OBJECT_TYPE == LAST_TYPE - 1);
2664 // Check if the constructor in the map is a JS function.
2665 __ mov(eax, FieldOperand(eax, Map::kConstructorOffset));
2666 __ CmpObjectType(eax, JS_FUNCTION_TYPE, ebx);
2667 __ j(not_equal, &non_function_constructor);
2669 // eax now contains the constructor function. Grab the
2670 // instance class name from there.
2671 __ mov(eax, FieldOperand(eax, JSFunction::kSharedFunctionInfoOffset));
2672 __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kInstanceClassNameOffset));
2675 // Functions have class 'Function'.
2677 __ mov(eax, isolate()->factory()->function_class_symbol());
2680 // Objects with a non-function constructor have class 'Object'.
2681 __ bind(&non_function_constructor);
2682 __ mov(eax, isolate()->factory()->Object_symbol());
2685 // Non-JS objects have class null.
2687 __ mov(eax, isolate()->factory()->null_value());
2692 context()->Plug(eax);
2696 void FullCodeGenerator::EmitLog(CallRuntime* expr) {
2697 // Conditionally generate a log call.
2699 // 0 (literal string): The type of logging (corresponds to the flags).
2700 // This is used to determine whether or not to generate the log call.
2701 // 1 (string): Format string. Access the string at argument index 2
2702 // with '%2s' (see Logger::LogRuntime for all the formats).
2703 // 2 (array): Arguments to the format string.
2704 ZoneList<Expression*>* args = expr->arguments();
2705 ASSERT_EQ(args->length(), 3);
2706 if (CodeGenerator::ShouldGenerateLog(args->at(0))) {
2707 VisitForStackValue(args->at(1));
2708 VisitForStackValue(args->at(2));
2709 __ CallRuntime(Runtime::kLog, 2);
2711 // Finally, we're expected to leave a value on the top of the stack.
2712 __ mov(eax, isolate()->factory()->undefined_value());
2713 context()->Plug(eax);
2717 void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
2718 ASSERT(expr->arguments()->length() == 0);
2720 Label slow_allocate_heapnumber;
2721 Label heapnumber_allocated;
2723 __ AllocateHeapNumber(edi, ebx, ecx, &slow_allocate_heapnumber);
2724 __ jmp(&heapnumber_allocated);
2726 __ bind(&slow_allocate_heapnumber);
2727 // Allocate a heap number.
2728 __ CallRuntime(Runtime::kNumberAlloc, 0);
2731 __ bind(&heapnumber_allocated);
2733 __ PrepareCallCFunction(1, ebx);
2734 __ mov(eax, ContextOperand(context_register(), Context::GLOBAL_INDEX));
2735 __ mov(eax, FieldOperand(eax, GlobalObject::kGlobalContextOffset));
2736 __ mov(Operand(esp, 0), eax);
2737 __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
2739 // Convert 32 random bits in eax to 0.(32 random bits) in a double
2741 // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
2742 // This is implemented on both SSE2 and FPU.
2743 if (CpuFeatures::IsSupported(SSE2)) {
2744 CpuFeatures::Scope fscope(SSE2);
2745 __ mov(ebx, Immediate(0x49800000)); // 1.0 x 2^20 as single.
2748 __ cvtss2sd(xmm1, xmm1);
2749 __ xorps(xmm0, xmm1);
2750 __ subsd(xmm0, xmm1);
2751 __ movdbl(FieldOperand(edi, HeapNumber::kValueOffset), xmm0);
2753 // 0x4130000000000000 is 1.0 x 2^20 as a double.
2754 __ mov(FieldOperand(edi, HeapNumber::kExponentOffset),
2755 Immediate(0x41300000));
2756 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), eax);
2757 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
2758 __ mov(FieldOperand(edi, HeapNumber::kMantissaOffset), Immediate(0));
2759 __ fld_d(FieldOperand(edi, HeapNumber::kValueOffset));
2761 __ fstp_d(FieldOperand(edi, HeapNumber::kValueOffset));
2764 context()->Plug(eax);
2768 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
2769 // Load the arguments on the stack and call the stub.
2771 ZoneList<Expression*>* args = expr->arguments();
2772 ASSERT(args->length() == 3);
2773 VisitForStackValue(args->at(0));
2774 VisitForStackValue(args->at(1));
2775 VisitForStackValue(args->at(2));
2777 context()->Plug(eax);
2781 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
2782 // Load the arguments on the stack and call the stub.
2783 RegExpExecStub stub;
2784 ZoneList<Expression*>* args = expr->arguments();
2785 ASSERT(args->length() == 4);
2786 VisitForStackValue(args->at(0));
2787 VisitForStackValue(args->at(1));
2788 VisitForStackValue(args->at(2));
2789 VisitForStackValue(args->at(3));
2791 context()->Plug(eax);
2795 void FullCodeGenerator::EmitValueOf(CallRuntime* expr) {
2796 ZoneList<Expression*>* args = expr->arguments();
2797 ASSERT(args->length() == 1);
2799 VisitForAccumulatorValue(args->at(0)); // Load the object.
2802 // If the object is a smi return the object.
2803 __ JumpIfSmi(eax, &done, Label::kNear);
2804 // If the object is not a value type, return the object.
2805 __ CmpObjectType(eax, JS_VALUE_TYPE, ebx);
2806 __ j(not_equal, &done, Label::kNear);
2807 __ mov(eax, FieldOperand(eax, JSValue::kValueOffset));
2810 context()->Plug(eax);
2814 void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
2815 // Load the arguments on the stack and call the runtime function.
2816 ZoneList<Expression*>* args = expr->arguments();
2817 ASSERT(args->length() == 2);
2818 VisitForStackValue(args->at(0));
2819 VisitForStackValue(args->at(1));
2821 if (CpuFeatures::IsSupported(SSE2)) {
2825 __ CallRuntime(Runtime::kMath_pow, 2);
2827 context()->Plug(eax);
2831 void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
2832 ZoneList<Expression*>* args = expr->arguments();
2833 ASSERT(args->length() == 2);
2835 VisitForStackValue(args->at(0)); // Load the object.
2836 VisitForAccumulatorValue(args->at(1)); // Load the value.
2837 __ pop(ebx); // eax = value. ebx = object.
2840 // If the object is a smi, return the value.
2841 __ JumpIfSmi(ebx, &done, Label::kNear);
2843 // If the object is not a value type, return the value.
2844 __ CmpObjectType(ebx, JS_VALUE_TYPE, ecx);
2845 __ j(not_equal, &done, Label::kNear);
2848 __ mov(FieldOperand(ebx, JSValue::kValueOffset), eax);
2850 // Update the write barrier. Save the value as it will be
2851 // overwritten by the write barrier code and is needed afterward.
2853 __ RecordWriteField(ebx, JSValue::kValueOffset, edx, ecx, kDontSaveFPRegs);
2856 context()->Plug(eax);
2860 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
2861 ZoneList<Expression*>* args = expr->arguments();
2862 ASSERT_EQ(args->length(), 1);
2864 // Load the argument on the stack and call the stub.
2865 VisitForStackValue(args->at(0));
2867 NumberToStringStub stub;
2869 context()->Plug(eax);
2873 void FullCodeGenerator::EmitStringCharFromCode(CallRuntime* expr) {
2874 ZoneList<Expression*>* args = expr->arguments();
2875 ASSERT(args->length() == 1);
2877 VisitForAccumulatorValue(args->at(0));
2880 StringCharFromCodeGenerator generator(eax, ebx);
2881 generator.GenerateFast(masm_);
2884 NopRuntimeCallHelper call_helper;
2885 generator.GenerateSlow(masm_, call_helper);
2888 context()->Plug(ebx);
2892 void FullCodeGenerator::EmitStringCharCodeAt(CallRuntime* expr) {
2893 ZoneList<Expression*>* args = expr->arguments();
2894 ASSERT(args->length() == 2);
2896 VisitForStackValue(args->at(0));
2897 VisitForAccumulatorValue(args->at(1));
2899 Register object = ebx;
2900 Register index = eax;
2901 Register scratch = ecx;
2902 Register result = edx;
2906 Label need_conversion;
2907 Label index_out_of_range;
2909 StringCharCodeAtGenerator generator(object,
2915 &index_out_of_range,
2916 STRING_INDEX_IS_NUMBER);
2917 generator.GenerateFast(masm_);
2920 __ bind(&index_out_of_range);
2921 // When the index is out of range, the spec requires us to return
2923 __ Set(result, Immediate(isolate()->factory()->nan_value()));
2926 __ bind(&need_conversion);
2927 // Move the undefined value into the result register, which will
2928 // trigger conversion.
2929 __ Set(result, Immediate(isolate()->factory()->undefined_value()));
2932 NopRuntimeCallHelper call_helper;
2933 generator.GenerateSlow(masm_, call_helper);
2936 context()->Plug(result);
2940 void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
2941 ZoneList<Expression*>* args = expr->arguments();
2942 ASSERT(args->length() == 2);
2944 VisitForStackValue(args->at(0));
2945 VisitForAccumulatorValue(args->at(1));
2947 Register object = ebx;
2948 Register index = eax;
2949 Register scratch1 = ecx;
2950 Register scratch2 = edx;
2951 Register result = eax;
2955 Label need_conversion;
2956 Label index_out_of_range;
2958 StringCharAtGenerator generator(object,
2965 &index_out_of_range,
2966 STRING_INDEX_IS_NUMBER);
2967 generator.GenerateFast(masm_);
2970 __ bind(&index_out_of_range);
2971 // When the index is out of range, the spec requires us to return
2972 // the empty string.
2973 __ Set(result, Immediate(isolate()->factory()->empty_string()));
2976 __ bind(&need_conversion);
2977 // Move smi zero into the result register, which will trigger
2979 __ Set(result, Immediate(Smi::FromInt(0)));
2982 NopRuntimeCallHelper call_helper;
2983 generator.GenerateSlow(masm_, call_helper);
2986 context()->Plug(result);
2990 void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
2991 ZoneList<Expression*>* args = expr->arguments();
2992 ASSERT_EQ(2, args->length());
2994 VisitForStackValue(args->at(0));
2995 VisitForStackValue(args->at(1));
2997 StringAddStub stub(NO_STRING_ADD_FLAGS);
2999 context()->Plug(eax);
3003 void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
3004 ZoneList<Expression*>* args = expr->arguments();
3005 ASSERT_EQ(2, args->length());
3007 VisitForStackValue(args->at(0));
3008 VisitForStackValue(args->at(1));
3010 StringCompareStub stub;
3012 context()->Plug(eax);
3016 void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
3017 // Load the argument on the stack and call the stub.
3018 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3019 TranscendentalCacheStub::TAGGED);
3020 ZoneList<Expression*>* args = expr->arguments();
3021 ASSERT(args->length() == 1);
3022 VisitForStackValue(args->at(0));
3024 context()->Plug(eax);
3028 void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
3029 // Load the argument on the stack and call the stub.
3030 TranscendentalCacheStub stub(TranscendentalCache::COS,
3031 TranscendentalCacheStub::TAGGED);
3032 ZoneList<Expression*>* args = expr->arguments();
3033 ASSERT(args->length() == 1);
3034 VisitForStackValue(args->at(0));
3036 context()->Plug(eax);
3040 void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
3041 // Load the argument on the stack and call the stub.
3042 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3043 TranscendentalCacheStub::TAGGED);
3044 ZoneList<Expression*>* args = expr->arguments();
3045 ASSERT(args->length() == 1);
3046 VisitForStackValue(args->at(0));
3048 context()->Plug(eax);
3052 void FullCodeGenerator::EmitMathSqrt(CallRuntime* expr) {
3053 // Load the argument on the stack and call the runtime function.
3054 ZoneList<Expression*>* args = expr->arguments();
3055 ASSERT(args->length() == 1);
3056 VisitForStackValue(args->at(0));
3057 __ CallRuntime(Runtime::kMath_sqrt, 1);
3058 context()->Plug(eax);
3062 void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
3063 ZoneList<Expression*>* args = expr->arguments();
3064 ASSERT(args->length() >= 2);
3066 int arg_count = args->length() - 2; // 2 ~ receiver and function.
3067 for (int i = 0; i < arg_count + 1; ++i) {
3068 VisitForStackValue(args->at(i));
3070 VisitForAccumulatorValue(args->last()); // Function.
3072 // InvokeFunction requires the function in edi. Move it in there.
3073 __ mov(edi, result_register());
3074 ParameterCount count(arg_count);
3075 __ InvokeFunction(edi, count, CALL_FUNCTION,
3076 NullCallWrapper(), CALL_AS_METHOD);
3077 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3078 context()->Plug(eax);
3082 void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
3083 // Load the arguments on the stack and call the stub.
3084 RegExpConstructResultStub stub;
3085 ZoneList<Expression*>* args = expr->arguments();
3086 ASSERT(args->length() == 3);
3087 VisitForStackValue(args->at(0));
3088 VisitForStackValue(args->at(1));
3089 VisitForStackValue(args->at(2));
3091 context()->Plug(eax);
3095 void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
3096 ZoneList<Expression*>* args = expr->arguments();
3097 ASSERT(args->length() == 3);
3098 VisitForStackValue(args->at(0));
3099 VisitForStackValue(args->at(1));
3100 VisitForStackValue(args->at(2));
3103 Register object = eax;
3104 Register index_1 = ebx;
3105 Register index_2 = ecx;
3106 Register elements = edi;
3107 Register temp = edx;
3108 __ mov(object, Operand(esp, 2 * kPointerSize));
3109 // Fetch the map and check if array is in fast case.
3110 // Check that object doesn't require security checks and
3111 // has no indexed interceptor.
3112 __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
3113 __ j(not_equal, &slow_case);
3114 __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
3115 KeyedLoadIC::kSlowCaseBitFieldMask);
3116 __ j(not_zero, &slow_case);
3118 // Check the object's elements are in fast case and writable.
3119 __ mov(elements, FieldOperand(object, JSObject::kElementsOffset));
3120 __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
3121 Immediate(isolate()->factory()->fixed_array_map()));
3122 __ j(not_equal, &slow_case);
3124 // Check that both indices are smis.
3125 __ mov(index_1, Operand(esp, 1 * kPointerSize));
3126 __ mov(index_2, Operand(esp, 0));
3127 __ mov(temp, index_1);
3128 __ or_(temp, index_2);
3129 __ JumpIfNotSmi(temp, &slow_case);
3131 // Check that both indices are valid.
3132 __ mov(temp, FieldOperand(object, JSArray::kLengthOffset));
3133 __ cmp(temp, index_1);
3134 __ j(below_equal, &slow_case);
3135 __ cmp(temp, index_2);
3136 __ j(below_equal, &slow_case);
3138 // Bring addresses into index1 and index2.
3139 __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
3140 __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
3142 // Swap elements. Use object and temp as scratch registers.
3143 __ mov(object, Operand(index_1, 0));
3144 __ mov(temp, Operand(index_2, 0));
3145 __ mov(Operand(index_2, 0), object);
3146 __ mov(Operand(index_1, 0), temp);
3148 Label no_remembered_set;
3149 __ CheckPageFlag(elements,
3151 1 << MemoryChunk::SCAN_ON_SCAVENGE,
3155 // Possible optimization: do a check that both values are Smis
3156 // (or them and test against Smi mask.)
3158 // We are swapping two objects in an array and the incremental marker never
3159 // pauses in the middle of scanning a single object. Therefore the
3160 // incremental marker is not disturbed, so we don't need to call the
3161 // RecordWrite stub that notifies the incremental marker.
3162 __ RememberedSetHelper(elements,
3166 MacroAssembler::kFallThroughAtEnd);
3167 __ RememberedSetHelper(elements,
3171 MacroAssembler::kFallThroughAtEnd);
3173 __ bind(&no_remembered_set);
3175 // We are done. Drop elements from the stack, and return undefined.
3176 __ add(esp, Immediate(3 * kPointerSize));
3177 __ mov(eax, isolate()->factory()->undefined_value());
3180 __ bind(&slow_case);
3181 __ CallRuntime(Runtime::kSwapElements, 3);
3184 context()->Plug(eax);
3188 void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
3189 ZoneList<Expression*>* args = expr->arguments();
3190 ASSERT_EQ(2, args->length());
3192 ASSERT_NE(NULL, args->at(0)->AsLiteral());
3193 int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
3195 Handle<FixedArray> jsfunction_result_caches(
3196 isolate()->global_context()->jsfunction_result_caches());
3197 if (jsfunction_result_caches->length() <= cache_id) {
3198 __ Abort("Attempt to use undefined cache.");
3199 __ mov(eax, isolate()->factory()->undefined_value());
3200 context()->Plug(eax);
3204 VisitForAccumulatorValue(args->at(1));
3207 Register cache = ebx;
3209 __ mov(cache, ContextOperand(esi, Context::GLOBAL_INDEX));
3211 FieldOperand(cache, GlobalObject::kGlobalContextOffset));
3212 __ mov(cache, ContextOperand(cache, Context::JSFUNCTION_RESULT_CACHES_INDEX));
3214 FieldOperand(cache, FixedArray::OffsetOfElementAt(cache_id)));
3216 Label done, not_found;
3217 // tmp now holds finger offset as a smi.
3218 STATIC_ASSERT(kSmiTag == 0 && kSmiTagSize == 1);
3219 __ mov(tmp, FieldOperand(cache, JSFunctionResultCache::kFingerOffset));
3220 __ cmp(key, CodeGenerator::FixedArrayElementOperand(cache, tmp));
3221 __ j(not_equal, ¬_found);
3223 __ mov(eax, CodeGenerator::FixedArrayElementOperand(cache, tmp, 1));
3226 __ bind(¬_found);
3227 // Call runtime to perform the lookup.
3230 __ CallRuntime(Runtime::kGetFromCache, 2);
3233 context()->Plug(eax);
3237 void FullCodeGenerator::EmitIsRegExpEquivalent(CallRuntime* expr) {
3238 ZoneList<Expression*>* args = expr->arguments();
3239 ASSERT_EQ(2, args->length());
3241 Register right = eax;
3242 Register left = ebx;
3245 VisitForStackValue(args->at(0));
3246 VisitForAccumulatorValue(args->at(1));
3249 Label done, fail, ok;
3250 __ cmp(left, right);
3252 // Fail if either is a non-HeapObject.
3254 __ and_(tmp, right);
3255 __ JumpIfSmi(tmp, &fail);
3256 __ mov(tmp, FieldOperand(left, HeapObject::kMapOffset));
3257 __ CmpInstanceType(tmp, JS_REGEXP_TYPE);
3258 __ j(not_equal, &fail);
3259 __ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
3260 __ j(not_equal, &fail);
3261 __ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
3262 __ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
3265 __ mov(eax, Immediate(isolate()->factory()->false_value()));
3268 __ mov(eax, Immediate(isolate()->factory()->true_value()));
3271 context()->Plug(eax);
3275 void FullCodeGenerator::EmitHasCachedArrayIndex(CallRuntime* expr) {
3276 ZoneList<Expression*>* args = expr->arguments();
3277 ASSERT(args->length() == 1);
3279 VisitForAccumulatorValue(args->at(0));
3281 if (FLAG_debug_code) {
3282 __ AbortIfNotString(eax);
3285 Label materialize_true, materialize_false;
3286 Label* if_true = NULL;
3287 Label* if_false = NULL;
3288 Label* fall_through = NULL;
3289 context()->PrepareTest(&materialize_true, &materialize_false,
3290 &if_true, &if_false, &fall_through);
3292 __ test(FieldOperand(eax, String::kHashFieldOffset),
3293 Immediate(String::kContainsCachedArrayIndexMask));
3294 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
3295 Split(zero, if_true, if_false, fall_through);
3297 context()->Plug(if_true, if_false);
3301 void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
3302 ZoneList<Expression*>* args = expr->arguments();
3303 ASSERT(args->length() == 1);
3304 VisitForAccumulatorValue(args->at(0));
3306 if (FLAG_debug_code) {
3307 __ AbortIfNotString(eax);
3310 __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
3311 __ IndexFromHash(eax, eax);
3313 context()->Plug(eax);
3317 void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
3318 Label bailout, done, one_char_separator, long_separator,
3319 non_trivial_array, not_size_one_array, loop,
3320 loop_1, loop_1_condition, loop_2, loop_2_entry, loop_3, loop_3_entry;
3322 ZoneList<Expression*>* args = expr->arguments();
3323 ASSERT(args->length() == 2);
3324 // We will leave the separator on the stack until the end of the function.
3325 VisitForStackValue(args->at(1));
3326 // Load this to eax (= array)
3327 VisitForAccumulatorValue(args->at(0));
3328 // All aliases of the same register have disjoint lifetimes.
3329 Register array = eax;
3330 Register elements = no_reg; // Will be eax.
3332 Register index = edx;
3334 Register string_length = ecx;
3336 Register string = esi;
3338 Register scratch = ebx;
3340 Register array_length = edi;
3341 Register result_pos = no_reg; // Will be edi.
3343 // Separator operand is already pushed.
3344 Operand separator_operand = Operand(esp, 2 * kPointerSize);
3345 Operand result_operand = Operand(esp, 1 * kPointerSize);
3346 Operand array_length_operand = Operand(esp, 0);
3347 __ sub(esp, Immediate(2 * kPointerSize));
3349 // Check that the array is a JSArray
3350 __ JumpIfSmi(array, &bailout);
3351 __ CmpObjectType(array, JS_ARRAY_TYPE, scratch);
3352 __ j(not_equal, &bailout);
3354 // Check that the array has fast elements.
3355 __ CheckFastElements(scratch, &bailout);
3357 // If the array has length zero, return the empty string.
3358 __ mov(array_length, FieldOperand(array, JSArray::kLengthOffset));
3359 __ SmiUntag(array_length);
3360 __ j(not_zero, &non_trivial_array);
3361 __ mov(result_operand, isolate()->factory()->empty_string());
3364 // Save the array length.
3365 __ bind(&non_trivial_array);
3366 __ mov(array_length_operand, array_length);
3368 // Save the FixedArray containing array's elements.
3369 // End of array's live range.
3371 __ mov(elements, FieldOperand(array, JSArray::kElementsOffset));
3375 // Check that all array elements are sequential ASCII strings, and
3376 // accumulate the sum of their lengths, as a smi-encoded value.
3377 __ Set(index, Immediate(0));
3378 __ Set(string_length, Immediate(0));
3379 // Loop condition: while (index < length).
3380 // Live loop registers: index, array_length, string,
3381 // scratch, string_length, elements.
3382 if (FLAG_debug_code) {
3383 __ cmp(index, array_length);
3384 __ Assert(less, "No empty arrays here in EmitFastAsciiArrayJoin");
3387 __ mov(string, FieldOperand(elements,
3390 FixedArray::kHeaderSize));
3391 __ JumpIfSmi(string, &bailout);
3392 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3393 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3394 __ and_(scratch, Immediate(
3395 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3396 __ cmp(scratch, kStringTag | kAsciiStringTag | kSeqStringTag);
3397 __ j(not_equal, &bailout);
3398 __ add(string_length,
3399 FieldOperand(string, SeqAsciiString::kLengthOffset));
3400 __ j(overflow, &bailout);
3401 __ add(index, Immediate(1));
3402 __ cmp(index, array_length);
3405 // If array_length is 1, return elements[0], a string.
3406 __ cmp(array_length, 1);
3407 __ j(not_equal, ¬_size_one_array);
3408 __ mov(scratch, FieldOperand(elements, FixedArray::kHeaderSize));
3409 __ mov(result_operand, scratch);
3412 __ bind(¬_size_one_array);
3414 // End of array_length live range.
3415 result_pos = array_length;
3416 array_length = no_reg;
3419 // string_length: Sum of string lengths, as a smi.
3420 // elements: FixedArray of strings.
3422 // Check that the separator is a flat ASCII string.
3423 __ mov(string, separator_operand);
3424 __ JumpIfSmi(string, &bailout);
3425 __ mov(scratch, FieldOperand(string, HeapObject::kMapOffset));
3426 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
3427 __ and_(scratch, Immediate(
3428 kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask));
3429 __ cmp(scratch, ASCII_STRING_TYPE);
3430 __ j(not_equal, &bailout);
3432 // Add (separator length times array_length) - separator length
3433 // to string_length.
3434 __ mov(scratch, separator_operand);
3435 __ mov(scratch, FieldOperand(scratch, SeqAsciiString::kLengthOffset));
3436 __ sub(string_length, scratch); // May be negative, temporarily.
3437 __ imul(scratch, array_length_operand);
3438 __ j(overflow, &bailout);
3439 __ add(string_length, scratch);
3440 __ j(overflow, &bailout);
3442 __ shr(string_length, 1);
3443 // Live registers and stack values:
3446 __ AllocateAsciiString(result_pos, string_length, scratch,
3447 index, string, &bailout);
3448 __ mov(result_operand, result_pos);
3449 __ lea(result_pos, FieldOperand(result_pos, SeqAsciiString::kHeaderSize));
3452 __ mov(string, separator_operand);
3453 __ cmp(FieldOperand(string, SeqAsciiString::kLengthOffset),
3454 Immediate(Smi::FromInt(1)));
3455 __ j(equal, &one_char_separator);
3456 __ j(greater, &long_separator);
3459 // Empty separator case
3460 __ mov(index, Immediate(0));
3461 __ jmp(&loop_1_condition);
3462 // Loop condition: while (index < length).
3464 // Each iteration of the loop concatenates one string to the result.
3465 // Live values in registers:
3466 // index: which element of the elements array we are adding to the result.
3467 // result_pos: the position to which we are currently copying characters.
3468 // elements: the FixedArray of strings we are joining.
3470 // Get string = array[index].
3471 __ mov(string, FieldOperand(elements, index,
3473 FixedArray::kHeaderSize));
3474 __ mov(string_length,
3475 FieldOperand(string, String::kLengthOffset));
3476 __ shr(string_length, 1);
3478 FieldOperand(string, SeqAsciiString::kHeaderSize));
3479 __ CopyBytes(string, result_pos, string_length, scratch);
3480 __ add(index, Immediate(1));
3481 __ bind(&loop_1_condition);
3482 __ cmp(index, array_length_operand);
3483 __ j(less, &loop_1); // End while (index < length).
3488 // One-character separator case
3489 __ bind(&one_char_separator);
3490 // Replace separator with its ascii character value.
3491 __ mov_b(scratch, FieldOperand(string, SeqAsciiString::kHeaderSize));
3492 __ mov_b(separator_operand, scratch);
3494 __ Set(index, Immediate(0));
3495 // Jump into the loop after the code that copies the separator, so the first
3496 // element is not preceded by a separator
3497 __ jmp(&loop_2_entry);
3498 // Loop condition: while (index < length).
3500 // Each iteration of the loop concatenates one string to the result.
3501 // Live values in registers:
3502 // index: which element of the elements array we are adding to the result.
3503 // result_pos: the position to which we are currently copying characters.
3505 // Copy the separator character to the result.
3506 __ mov_b(scratch, separator_operand);
3507 __ mov_b(Operand(result_pos, 0), scratch);
3510 __ bind(&loop_2_entry);
3511 // Get string = array[index].
3512 __ mov(string, FieldOperand(elements, index,
3514 FixedArray::kHeaderSize));
3515 __ mov(string_length,
3516 FieldOperand(string, String::kLengthOffset));
3517 __ shr(string_length, 1);
3519 FieldOperand(string, SeqAsciiString::kHeaderSize));
3520 __ CopyBytes(string, result_pos, string_length, scratch);
3521 __ add(index, Immediate(1));
3523 __ cmp(index, array_length_operand);
3524 __ j(less, &loop_2); // End while (index < length).
3528 // Long separator case (separator is more than one character).
3529 __ bind(&long_separator);
3531 __ Set(index, Immediate(0));
3532 // Jump into the loop after the code that copies the separator, so the first
3533 // element is not preceded by a separator
3534 __ jmp(&loop_3_entry);
3535 // Loop condition: while (index < length).
3537 // Each iteration of the loop concatenates one string to the result.
3538 // Live values in registers:
3539 // index: which element of the elements array we are adding to the result.
3540 // result_pos: the position to which we are currently copying characters.
3542 // Copy the separator to the result.
3543 __ mov(string, separator_operand);
3544 __ mov(string_length,
3545 FieldOperand(string, String::kLengthOffset));
3546 __ shr(string_length, 1);
3548 FieldOperand(string, SeqAsciiString::kHeaderSize));
3549 __ CopyBytes(string, result_pos, string_length, scratch);
3551 __ bind(&loop_3_entry);
3552 // Get string = array[index].
3553 __ mov(string, FieldOperand(elements, index,
3555 FixedArray::kHeaderSize));
3556 __ mov(string_length,
3557 FieldOperand(string, String::kLengthOffset));
3558 __ shr(string_length, 1);
3560 FieldOperand(string, SeqAsciiString::kHeaderSize));
3561 __ CopyBytes(string, result_pos, string_length, scratch);
3562 __ add(index, Immediate(1));
3564 __ cmp(index, array_length_operand);
3565 __ j(less, &loop_3); // End while (index < length).
3570 __ mov(result_operand, isolate()->factory()->undefined_value());
3572 __ mov(eax, result_operand);
3573 // Drop temp values from the stack, and restore context register.
3574 __ add(esp, Immediate(3 * kPointerSize));
3576 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3577 context()->Plug(eax);
3581 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
3582 Handle<String> name = expr->name();
3583 if (name->length() > 0 && name->Get(0) == '_') {
3584 Comment cmnt(masm_, "[ InlineRuntimeCall");
3585 EmitInlineRuntimeCall(expr);
3589 Comment cmnt(masm_, "[ CallRuntime");
3590 ZoneList<Expression*>* args = expr->arguments();
3592 if (expr->is_jsruntime()) {
3593 // Prepare for calling JS runtime function.
3594 __ mov(eax, GlobalObjectOperand());
3595 __ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
3598 // Push the arguments ("left-to-right").
3599 int arg_count = args->length();
3600 for (int i = 0; i < arg_count; i++) {
3601 VisitForStackValue(args->at(i));
3604 if (expr->is_jsruntime()) {
3605 // Call the JS runtime function via a call IC.
3606 __ Set(ecx, Immediate(expr->name()));
3607 RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
3609 isolate()->stub_cache()->ComputeCallInitialize(arg_count, mode);
3610 __ call(ic, mode, expr->id());
3611 // Restore context register.
3612 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
3614 // Call the C runtime function.
3615 __ CallRuntime(expr->function(), arg_count);
3617 context()->Plug(eax);
3621 void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
3622 switch (expr->op()) {
3623 case Token::DELETE: {
3624 Comment cmnt(masm_, "[ UnaryOperation (DELETE)");
3625 Property* property = expr->expression()->AsProperty();
3626 VariableProxy* proxy = expr->expression()->AsVariableProxy();
3628 if (property != NULL) {
3629 VisitForStackValue(property->obj());
3630 VisitForStackValue(property->key());
3631 __ push(Immediate(Smi::FromInt(strict_mode_flag())));
3632 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3633 context()->Plug(eax);
3634 } else if (proxy != NULL) {
3635 Variable* var = proxy->var();
3636 // Delete of an unqualified identifier is disallowed in strict mode
3637 // but "delete this" is allowed.
3638 ASSERT(strict_mode_flag() == kNonStrictMode || var->is_this());
3639 if (var->IsUnallocated()) {
3640 __ push(var->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
3641 __ push(Immediate(var->name()));
3642 __ push(Immediate(Smi::FromInt(kNonStrictMode)));
3643 __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
3644 context()->Plug(eax);
3645 } else if (var->IsStackAllocated() || var->IsContextSlot()) {
3646 // Result of deleting non-global variables is false. 'this' is
3647 // not really a variable, though we implement it as one. The
3648 // subexpression does not have side effects.
3649 context()->Plug(var->is_this());
3651 // Non-global variable. Call the runtime to try to delete from the
3652 // context where the variable was introduced.
3653 __ push(context_register());
3654 __ push(Immediate(var->name()));
3655 __ CallRuntime(Runtime::kDeleteContextSlot, 2);
3656 context()->Plug(eax);
3659 // Result of deleting non-property, non-variable reference is true.
3660 // The subexpression may have side effects.
3661 VisitForEffect(expr->expression());
3662 context()->Plug(true);
3668 Comment cmnt(masm_, "[ UnaryOperation (VOID)");
3669 VisitForEffect(expr->expression());
3670 context()->Plug(isolate()->factory()->undefined_value());
3675 Comment cmnt(masm_, "[ UnaryOperation (NOT)");
3676 if (context()->IsEffect()) {
3677 // Unary NOT has no side effects so it's only necessary to visit the
3678 // subexpression. Match the optimizing compiler by not branching.
3679 VisitForEffect(expr->expression());
3680 } else if (context()->IsTest()) {
3681 const TestContext* test = TestContext::cast(context());
3682 // The labels are swapped for the recursive call.
3683 VisitForControl(expr->expression(),
3684 test->false_label(),
3686 test->fall_through());
3687 context()->Plug(test->true_label(), test->false_label());
3689 // We handle value contexts explicitly rather than simply visiting
3690 // for control and plugging the control flow into the context,
3691 // because we need to prepare a pair of extra administrative AST ids
3692 // for the optimizing compiler.
3693 ASSERT(context()->IsAccumulatorValue() || context()->IsStackValue());
3694 Label materialize_true, materialize_false, done;
3695 VisitForControl(expr->expression(),
3699 __ bind(&materialize_true);
3700 PrepareForBailoutForId(expr->MaterializeTrueId(), NO_REGISTERS);
3701 if (context()->IsAccumulatorValue()) {
3702 __ mov(eax, isolate()->factory()->true_value());
3704 __ push(isolate()->factory()->true_value());
3706 __ jmp(&done, Label::kNear);
3707 __ bind(&materialize_false);
3708 PrepareForBailoutForId(expr->MaterializeFalseId(), NO_REGISTERS);
3709 if (context()->IsAccumulatorValue()) {
3710 __ mov(eax, isolate()->factory()->false_value());
3712 __ push(isolate()->factory()->false_value());
3719 case Token::TYPEOF: {
3720 Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)");
3721 { StackValueContext context(this);
3722 VisitForTypeofValue(expr->expression());
3724 __ CallRuntime(Runtime::kTypeof, 1);
3725 context()->Plug(eax);
3730 Comment cmt(masm_, "[ UnaryOperation (ADD)");
3731 VisitForAccumulatorValue(expr->expression());
3732 Label no_conversion;
3733 __ JumpIfSmi(result_register(), &no_conversion);
3734 ToNumberStub convert_stub;
3735 __ CallStub(&convert_stub);
3736 __ bind(&no_conversion);
3737 context()->Plug(result_register());
3742 EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
3745 case Token::BIT_NOT:
3746 EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
3755 void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
3756 const char* comment) {
3757 Comment cmt(masm_, comment);
3758 bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
3759 UnaryOverwriteMode overwrite =
3760 can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
3761 UnaryOpStub stub(expr->op(), overwrite);
3762 // UnaryOpStub expects the argument to be in the
3763 // accumulator register eax.
3764 VisitForAccumulatorValue(expr->expression());
3765 SetSourcePosition(expr->position());
3766 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
3767 context()->Plug(eax);
3771 void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
3772 Comment cmnt(masm_, "[ CountOperation");
3773 SetSourcePosition(expr->position());
3775 // Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
3776 // as the left-hand side.
3777 if (!expr->expression()->IsValidLeftHandSide()) {
3778 VisitForEffect(expr->expression());
3782 // Expression can only be a property, a global or a (parameter or local)
3784 enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
3785 LhsKind assign_type = VARIABLE;
3786 Property* prop = expr->expression()->AsProperty();
3787 // In case of a property we use the uninitialized expression context
3788 // of the key to detect a named property.
3791 (prop->key()->IsPropertyName()) ? NAMED_PROPERTY : KEYED_PROPERTY;
3794 // Evaluate expression and get value.
3795 if (assign_type == VARIABLE) {
3796 ASSERT(expr->expression()->AsVariableProxy()->var() != NULL);
3797 AccumulatorValueContext context(this);
3798 EmitVariableLoad(expr->expression()->AsVariableProxy());
3800 // Reserve space for result of postfix operation.
3801 if (expr->is_postfix() && !context()->IsEffect()) {
3802 __ push(Immediate(Smi::FromInt(0)));
3804 if (assign_type == NAMED_PROPERTY) {
3805 // Put the object both on the stack and in the accumulator.
3806 VisitForAccumulatorValue(prop->obj());
3808 EmitNamedPropertyLoad(prop);
3810 VisitForStackValue(prop->obj());
3811 VisitForAccumulatorValue(prop->key());
3812 __ mov(edx, Operand(esp, 0));
3814 EmitKeyedPropertyLoad(prop);
3818 // We need a second deoptimization point after loading the value
3819 // in case evaluating the property load my have a side effect.
3820 if (assign_type == VARIABLE) {
3821 PrepareForBailout(expr->expression(), TOS_REG);
3823 PrepareForBailoutForId(expr->CountId(), TOS_REG);
3826 // Call ToNumber only if operand is not a smi.
3827 Label no_conversion;
3828 if (ShouldInlineSmiCase(expr->op())) {
3829 __ JumpIfSmi(eax, &no_conversion, Label::kNear);
3831 ToNumberStub convert_stub;
3832 __ CallStub(&convert_stub);
3833 __ bind(&no_conversion);
3835 // Save result for postfix expressions.
3836 if (expr->is_postfix()) {
3837 if (!context()->IsEffect()) {
3838 // Save the result on the stack. If we have a named or keyed property
3839 // we store the result under the receiver that is currently on top
3841 switch (assign_type) {
3845 case NAMED_PROPERTY:
3846 __ mov(Operand(esp, kPointerSize), eax);
3848 case KEYED_PROPERTY:
3849 __ mov(Operand(esp, 2 * kPointerSize), eax);
3855 // Inline smi case if we are in a loop.
3856 Label done, stub_call;
3857 JumpPatchSite patch_site(masm_);
3859 if (ShouldInlineSmiCase(expr->op())) {
3860 if (expr->op() == Token::INC) {
3861 __ add(eax, Immediate(Smi::FromInt(1)));
3863 __ sub(eax, Immediate(Smi::FromInt(1)));
3865 __ j(overflow, &stub_call, Label::kNear);
3866 // We could eliminate this smi check if we split the code at
3867 // the first smi check before calling ToNumber.
3868 patch_site.EmitJumpIfSmi(eax, &done, Label::kNear);
3870 __ bind(&stub_call);
3871 // Call stub. Undo operation first.
3872 if (expr->op() == Token::INC) {
3873 __ sub(eax, Immediate(Smi::FromInt(1)));
3875 __ add(eax, Immediate(Smi::FromInt(1)));
3879 // Record position before stub call.
3880 SetSourcePosition(expr->position());
3882 // Call stub for +1/-1.
3884 __ mov(eax, Immediate(Smi::FromInt(1)));
3885 BinaryOpStub stub(expr->binary_op(), NO_OVERWRITE);
3886 __ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->CountId());
3887 patch_site.EmitPatchInfo();
3890 // Store the value returned in eax.
3891 switch (assign_type) {
3893 if (expr->is_postfix()) {
3894 // Perform the assignment as if via '='.
3895 { EffectContext context(this);
3896 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3898 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3901 // For all contexts except EffectContext We have the result on
3902 // top of the stack.
3903 if (!context()->IsEffect()) {
3904 context()->PlugTOS();
3907 // Perform the assignment as if via '='.
3908 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3910 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3911 context()->Plug(eax);
3914 case NAMED_PROPERTY: {
3915 __ mov(ecx, prop->key()->AsLiteral()->handle());
3917 Handle<Code> ic = is_strict_mode()
3918 ? isolate()->builtins()->StoreIC_Initialize_Strict()
3919 : isolate()->builtins()->StoreIC_Initialize();
3920 __ call(ic, RelocInfo::CODE_TARGET, expr->id());
3921 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3922 if (expr->is_postfix()) {
3923 if (!context()->IsEffect()) {
3924 context()->PlugTOS();
3927 context()->Plug(eax);
3931 case KEYED_PROPERTY: {
3934 Handle<Code> ic = is_strict_mode()
3935 ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
3936 : isolate()->builtins()->KeyedStoreIC_Initialize();
3937 __ call(ic, RelocInfo::CODE_TARGET, expr->id());
3938 PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
3939 if (expr->is_postfix()) {
3940 // Result is on the stack
3941 if (!context()->IsEffect()) {
3942 context()->PlugTOS();
3945 context()->Plug(eax);
3953 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
3954 VariableProxy* proxy = expr->AsVariableProxy();
3955 ASSERT(!context()->IsEffect());
3956 ASSERT(!context()->IsTest());
3958 if (proxy != NULL && proxy->var()->IsUnallocated()) {
3959 Comment cmnt(masm_, "Global variable");
3960 __ mov(eax, proxy->var()->is_qml_global() ? QmlGlobalObjectOperand() : GlobalObjectOperand());
3961 __ mov(ecx, Immediate(proxy->name()));
3962 Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
3963 // Use a regular load, not a contextual load, to avoid a reference
3966 PrepareForBailout(expr, TOS_REG);
3967 context()->Plug(eax);
3968 } else if (proxy != NULL && proxy->var()->IsLookupSlot()) {
3971 // Generate code for loading from variables potentially shadowed
3972 // by eval-introduced variables.
3973 EmitDynamicLookupFastCase(proxy->var(), INSIDE_TYPEOF, &slow, &done);
3977 __ push(Immediate(proxy->name()));
3978 __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2);
3979 PrepareForBailout(expr, TOS_REG);
3982 context()->Plug(eax);
3984 // This expression cannot throw a reference error at the top level.
3985 VisitInDuplicateContext(expr);
3990 void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
3991 Expression* sub_expr,
3992 Handle<String> check) {
3993 Label materialize_true, materialize_false;
3994 Label* if_true = NULL;
3995 Label* if_false = NULL;
3996 Label* fall_through = NULL;
3997 context()->PrepareTest(&materialize_true, &materialize_false,
3998 &if_true, &if_false, &fall_through);
4000 { AccumulatorValueContext context(this);
4001 VisitForTypeofValue(sub_expr);
4003 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4005 if (check->Equals(isolate()->heap()->number_symbol())) {
4006 __ JumpIfSmi(eax, if_true);
4007 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
4008 isolate()->factory()->heap_number_map());
4009 Split(equal, if_true, if_false, fall_through);
4010 } else if (check->Equals(isolate()->heap()->string_symbol())) {
4011 __ JumpIfSmi(eax, if_false);
4012 __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, edx);
4013 __ j(above_equal, if_false);
4014 // Check for undetectable objects => false.
4015 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4016 1 << Map::kIsUndetectable);
4017 Split(zero, if_true, if_false, fall_through);
4018 } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
4019 __ cmp(eax, isolate()->factory()->true_value());
4020 __ j(equal, if_true);
4021 __ cmp(eax, isolate()->factory()->false_value());
4022 Split(equal, if_true, if_false, fall_through);
4023 } else if (FLAG_harmony_typeof &&
4024 check->Equals(isolate()->heap()->null_symbol())) {
4025 __ cmp(eax, isolate()->factory()->null_value());
4026 Split(equal, if_true, if_false, fall_through);
4027 } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
4028 __ cmp(eax, isolate()->factory()->undefined_value());
4029 __ j(equal, if_true);
4030 __ JumpIfSmi(eax, if_false);
4031 // Check for undetectable objects => true.
4032 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4033 __ movzx_b(ecx, FieldOperand(edx, Map::kBitFieldOffset));
4034 __ test(ecx, Immediate(1 << Map::kIsUndetectable));
4035 Split(not_zero, if_true, if_false, fall_through);
4036 } else if (check->Equals(isolate()->heap()->function_symbol())) {
4037 __ JumpIfSmi(eax, if_false);
4038 STATIC_ASSERT(NUM_OF_CALLABLE_SPEC_OBJECT_TYPES == 2);
4039 __ CmpObjectType(eax, JS_FUNCTION_TYPE, edx);
4040 __ j(equal, if_true);
4041 __ CmpInstanceType(edx, JS_FUNCTION_PROXY_TYPE);
4042 Split(equal, if_true, if_false, fall_through);
4043 } else if (check->Equals(isolate()->heap()->object_symbol())) {
4044 __ JumpIfSmi(eax, if_false);
4045 if (!FLAG_harmony_typeof) {
4046 __ cmp(eax, isolate()->factory()->null_value());
4047 __ j(equal, if_true);
4049 __ CmpObjectType(eax, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE, edx);
4050 __ j(below, if_false);
4051 __ CmpInstanceType(edx, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
4052 __ j(above, if_false);
4053 // Check for undetectable objects => false.
4054 __ test_b(FieldOperand(edx, Map::kBitFieldOffset),
4055 1 << Map::kIsUndetectable);
4056 Split(zero, if_true, if_false, fall_through);
4058 if (if_false != fall_through) __ jmp(if_false);
4060 context()->Plug(if_true, if_false);
4064 void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
4065 Comment cmnt(masm_, "[ CompareOperation");
4066 SetSourcePosition(expr->position());
4068 // First we try a fast inlined version of the compare when one of
4069 // the operands is a literal.
4070 if (TryLiteralCompare(expr)) return;
4072 // Always perform the comparison for its control flow. Pack the result
4073 // into the expression's context after the comparison is performed.
4074 Label materialize_true, materialize_false;
4075 Label* if_true = NULL;
4076 Label* if_false = NULL;
4077 Label* fall_through = NULL;
4078 context()->PrepareTest(&materialize_true, &materialize_false,
4079 &if_true, &if_false, &fall_through);
4081 Token::Value op = expr->op();
4082 VisitForStackValue(expr->left());
4085 VisitForStackValue(expr->right());
4086 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
4087 PrepareForBailoutBeforeSplit(expr, false, NULL, NULL);
4088 __ cmp(eax, isolate()->factory()->true_value());
4089 Split(equal, if_true, if_false, fall_through);
4092 case Token::INSTANCEOF: {
4093 VisitForStackValue(expr->right());
4094 InstanceofStub stub(InstanceofStub::kNoFlags);
4096 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4098 // The stub returns 0 for true.
4099 Split(zero, if_true, if_false, fall_through);
4104 VisitForAccumulatorValue(expr->right());
4105 Condition cc = no_condition;
4107 case Token::EQ_STRICT:
4124 case Token::INSTANCEOF:
4130 bool inline_smi_code = ShouldInlineSmiCase(op);
4131 JumpPatchSite patch_site(masm_);
4132 if (inline_smi_code) {
4136 patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear);
4138 Split(cc, if_true, if_false, NULL);
4139 __ bind(&slow_case);
4142 // Record position and call the compare IC.
4143 SetSourcePosition(expr->position());
4144 Handle<Code> ic = CompareIC::GetUninitialized(op);
4145 __ call(ic, RelocInfo::CODE_TARGET, expr->id());
4146 patch_site.EmitPatchInfo();
4148 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4150 Split(cc, if_true, if_false, fall_through);
4154 // Convert the result of the comparison into one expected for this
4155 // expression's context.
4156 context()->Plug(if_true, if_false);
4160 void FullCodeGenerator::EmitLiteralCompareNil(CompareOperation* expr,
4161 Expression* sub_expr,
4163 Label materialize_true, materialize_false;
4164 Label* if_true = NULL;
4165 Label* if_false = NULL;
4166 Label* fall_through = NULL;
4167 context()->PrepareTest(&materialize_true, &materialize_false,
4168 &if_true, &if_false, &fall_through);
4170 VisitForAccumulatorValue(sub_expr);
4171 PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
4172 Handle<Object> nil_value = nil == kNullValue ?
4173 isolate()->factory()->null_value() :
4174 isolate()->factory()->undefined_value();
4175 __ cmp(eax, nil_value);
4176 if (expr->op() == Token::EQ_STRICT) {
4177 Split(equal, if_true, if_false, fall_through);
4179 Handle<Object> other_nil_value = nil == kNullValue ?
4180 isolate()->factory()->undefined_value() :
4181 isolate()->factory()->null_value();
4182 __ j(equal, if_true);
4183 __ cmp(eax, other_nil_value);
4184 __ j(equal, if_true);
4185 __ JumpIfSmi(eax, if_false);
4186 // It can be an undetectable object.
4187 __ mov(edx, FieldOperand(eax, HeapObject::kMapOffset));
4188 __ movzx_b(edx, FieldOperand(edx, Map::kBitFieldOffset));
4189 __ test(edx, Immediate(1 << Map::kIsUndetectable));
4190 Split(not_zero, if_true, if_false, fall_through);
4192 context()->Plug(if_true, if_false);
4196 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
4197 __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4198 context()->Plug(eax);
4202 Register FullCodeGenerator::result_register() {
4207 Register FullCodeGenerator::context_register() {
4212 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4213 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4214 __ mov(Operand(ebp, frame_offset), value);
4218 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4219 __ mov(dst, ContextOperand(esi, context_index));
4223 void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
4224 Scope* declaration_scope = scope()->DeclarationScope();
4225 if (declaration_scope->is_global_scope()) {
4226 // Contexts nested in the global context have a canonical empty function
4227 // as their closure, not the anonymous closure containing the global
4228 // code. Pass a smi sentinel and let the runtime look up the empty
4230 __ push(Immediate(Smi::FromInt(0)));
4231 } else if (declaration_scope->is_eval_scope()) {
4232 // Contexts nested inside eval code have the same closure as the context
4233 // calling eval, not the anonymous closure containing the eval code.
4234 // Fetch it from the context.
4235 __ push(ContextOperand(esi, Context::CLOSURE_INDEX));
4237 ASSERT(declaration_scope->is_function_scope());
4238 __ push(Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
4243 // ----------------------------------------------------------------------------
4244 // Non-local control flow support.
4246 void FullCodeGenerator::EnterFinallyBlock() {
4247 // Cook return address on top of stack (smi encoded Code* delta)
4248 ASSERT(!result_register().is(edx));
4250 __ sub(edx, Immediate(masm_->CodeObject()));
4251 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
4252 STATIC_ASSERT(kSmiTag == 0);
4255 // Store result register while executing finally block.
4256 __ push(result_register());
4260 void FullCodeGenerator::ExitFinallyBlock() {
4261 ASSERT(!result_register().is(edx));
4262 __ pop(result_register());
4263 // Uncook return address.
4266 __ add(edx, Immediate(masm_->CodeObject()));
4273 #define __ ACCESS_MASM(masm())
4275 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
4277 int* context_length) {
4278 // The macros used here must preserve the result register.
4280 // Because the handler block contains the context of the finally
4281 // code, we can restore it directly from there for the finally code
4282 // rather than iteratively unwinding contexts via their previous
4284 __ Drop(*stack_depth); // Down to the handler block.
4285 if (*context_length > 0) {
4286 // Restore the context to its dedicated register and the stack.
4287 __ mov(esi, Operand(esp, StackHandlerConstants::kContextOffset));
4288 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
4291 __ call(finally_entry_);
4294 *context_length = 0;
4301 } } // namespace v8::internal
4303 #endif // V8_TARGET_ARCH_IA32